feat(tvix/eval): deduplicate overlap between Closure and Thunk

This commit deduplicates the Thunk-like functionality from Closure
and unifies it with Thunk.

Specifically, we now have one and only one way of breaking reference
cycles in the Value-graph: Thunk.  No other variant contains a
RefCell.  This should make it easier to reason about the behavior of
the VM.  InnerClosure and UpvaluesCarrier are no longer necessary.

This refactoring allowed an improvement in code generation:
`Rc<RefCell<>>`s are now created only for closures which do not have
self-references or deferred upvalues, instead of for all closures.
OpClosure has been split into two separate opcodes:

- OpClosure creates non-recursive closures with no deferred
  upvalues.  The VM will not create an `Rc<RefCell<>>` when executing
  this instruction.

- OpThunkClosure is used for closures with self-references or
  deferred upvalues.  The VM will create a Thunk when executing this
  opcode, but the Thunk will start out already in the
  `ThunkRepr::Evaluated` state, rather than in the
  `ThunkRepr::Suspeneded` state.

To avoid confusion, OpThunk has been renamed OpThunkSuspended.

Thanks to @sterni for suggesting that all this could be done without
adding an additional variant to ThunkRepr.  This does however mean
that there will be mutating accesses to `ThunkRepr::Evaluated`,
which was not previously the case.  The field `is_finalised:bool`
has been added to `Closure` to ensure that these mutating accesses
are performed only on finalised Closures.  Both the check and the
field are present only if `#[cfg(debug_assertions)]`.

Change-Id: I04131501029772f30e28da8281d864427685097f
Signed-off-by: Adam Joseph <adam@westernsemico.com>
Reviewed-on: https://cl.tvl.fyi/c/depot/+/7019
Tested-by: BuildkiteCI
Reviewed-by: tazjin <tazjin@tvl.su>
This commit is contained in:
Adam Joseph 2022-10-15 16:10:10 -07:00 committed by tazjin
parent c91d86ee5c
commit d978b556e6
9 changed files with 206 additions and 152 deletions

View file

@ -151,7 +151,10 @@ pub fn builtins_import(
// Compilation succeeded, we can construct a thunk from whatever it spat // Compilation succeeded, we can construct a thunk from whatever it spat
// out and return that. // out and return that.
Ok(Value::Thunk(Thunk::new(result.lambda, vm.current_span()))) Ok(Value::Thunk(Thunk::new_suspended(
result.lambda,
vm.current_span(),
)))
}, },
) )
} }

View file

@ -1,5 +1,5 @@
use std::io::Write; use std::io::Write;
use std::ops::Index; use std::ops::{Index, IndexMut};
use crate::opcode::{CodeIdx, ConstantIdx, OpCode}; use crate::opcode::{CodeIdx, ConstantIdx, OpCode};
use crate::value::Value; use crate::value::Value;
@ -51,6 +51,12 @@ impl Index<CodeIdx> for Chunk {
} }
} }
impl IndexMut<CodeIdx> for Chunk {
fn index_mut(&mut self, index: CodeIdx) -> &mut Self::Output {
&mut self.code[index.0]
}
}
impl Chunk { impl Chunk {
pub fn push_op(&mut self, data: OpCode, span: codemap::Span) -> CodeIdx { pub fn push_op(&mut self, data: OpCode, span: codemap::Span) -> CodeIdx {
let idx = self.code.len(); let idx = self.code.len();

View file

@ -927,7 +927,7 @@ impl Compiler<'_> {
if lambda.upvalue_count == 0 { if lambda.upvalue_count == 0 {
self.emit_constant( self.emit_constant(
if is_thunk { if is_thunk {
Value::Thunk(Thunk::new(lambda, span)) Value::Thunk(Thunk::new_suspended(lambda, span))
} else { } else {
Value::Closure(Closure::new(lambda)) Value::Closure(Closure::new(lambda))
}, },
@ -942,11 +942,11 @@ impl Compiler<'_> {
// which the result can be constructed. // which the result can be constructed.
let blueprint_idx = self.chunk().push_constant(Value::Blueprint(lambda)); let blueprint_idx = self.chunk().push_constant(Value::Blueprint(lambda));
self.push_op( let code_idx = self.push_op(
if is_thunk { if is_thunk {
OpCode::OpThunk(blueprint_idx) OpCode::OpThunkSuspended(blueprint_idx)
} else { } else {
OpCode::OpClosure(blueprint_idx) OpCode::OpThunkClosure(blueprint_idx)
}, },
node, node,
); );
@ -957,6 +957,23 @@ impl Compiler<'_> {
compiled.scope.upvalues, compiled.scope.upvalues,
compiled.captures_with_stack, compiled.captures_with_stack,
); );
if !is_thunk && !self.scope()[outer_slot].needs_finaliser {
if !self.scope()[outer_slot].must_thunk {
// The closure has upvalues, but is not recursive. Therefore no thunk is required,
// which saves us the overhead of Rc<RefCell<>>
self.chunk()[code_idx] = OpCode::OpClosure(blueprint_idx);
} else {
// This case occurs when a closure has upvalue-references to itself but does not need a
// finaliser. Since no OpFinalise will be emitted later on we synthesize one here.
// It is needed here only to set [`Closure::is_finalised`] which is used for sanity checks.
#[cfg(debug_assertions)]
self.push_op(
OpCode::OpFinalise(self.scope().stack_index(outer_slot)),
&self.span_for(node),
);
}
}
} }
fn compile_apply(&mut self, slot: LocalIdx, node: &ast::Apply) { fn compile_apply(&mut self, slot: LocalIdx, node: &ast::Apply) {
@ -996,6 +1013,10 @@ impl Compiler<'_> {
self.push_op(OpCode::DataDeferredLocal(stack_idx), &upvalue.span); self.push_op(OpCode::DataDeferredLocal(stack_idx), &upvalue.span);
self.scope_mut().mark_needs_finaliser(slot); self.scope_mut().mark_needs_finaliser(slot);
} else { } else {
// a self-reference
if this_depth == target_depth && this_stack_slot == stack_idx {
self.scope_mut().mark_must_thunk(slot);
}
self.push_op(OpCode::DataLocalIdx(stack_idx), &upvalue.span); self.push_op(OpCode::DataLocalIdx(stack_idx), &upvalue.span);
} }
} }

View file

@ -50,6 +50,9 @@ pub struct Local {
/// Does this local need to be finalised after the enclosing scope /// Does this local need to be finalised after the enclosing scope
/// is completely constructed? /// is completely constructed?
pub needs_finaliser: bool, pub needs_finaliser: bool,
/// Does this local's upvalues contain a reference to itself?
pub must_thunk: bool,
} }
impl Local { impl Local {
@ -228,6 +231,7 @@ impl Scope {
name: LocalName::Phantom, name: LocalName::Phantom,
depth: self.scope_depth, depth: self.scope_depth,
needs_finaliser: false, needs_finaliser: false,
must_thunk: false,
used: true, used: true,
}); });
@ -243,6 +247,7 @@ impl Scope {
depth: self.scope_depth, depth: self.scope_depth,
initialised: false, initialised: false,
needs_finaliser: false, needs_finaliser: false,
must_thunk: false,
used: false, used: false,
}); });
@ -259,6 +264,12 @@ impl Scope {
self.locals[idx.0].needs_finaliser = true; self.locals[idx.0].needs_finaliser = true;
} }
/// Mark local as must be wrapped in a thunk. This happens if
/// the local has a reference to itself in its upvalues.
pub fn mark_must_thunk(&mut self, idx: LocalIdx) {
self.locals[idx.0].must_thunk = true;
}
/// Compute the runtime stack index for a given local by /// Compute the runtime stack index for a given local by
/// accounting for uninitialised variables at scopes below this /// accounting for uninitialised variables at scopes below this
/// one. /// one.

View file

@ -144,25 +144,27 @@ pub enum OpCode {
OpCall, OpCall,
OpTailCall, OpTailCall,
OpGetUpvalue(UpvalueIdx), OpGetUpvalue(UpvalueIdx),
// A Closure which has upvalues but no self-references
OpClosure(ConstantIdx), OpClosure(ConstantIdx),
// A Closure which has self-references (direct or via upvalues)
// Thunks OpThunkClosure(ConstantIdx),
OpThunk(ConstantIdx), // A suspended thunk, used to ensure laziness
OpThunkSuspended(ConstantIdx),
OpForce, OpForce,
/// Finalise initialisation of the upvalues of the value in the /// Finalise initialisation of the upvalues of the value in the
/// given stack index after the scope is fully bound. /// given stack index after the scope is fully bound.
OpFinalise(StackIdx), OpFinalise(StackIdx),
// [`OpClosure`] and [`OpThunk`] have a variable number of // [`OpClosure`], [`OpThunkSuspended`], and [`OpThunkClosure`] have a
// arguments to the instruction, which is represented here by // variable number of arguments to the instruction, which is
// making their data part of the opcodes. Each of these two // represented here by making their data part of the opcodes.
// opcodes has a `ConstantIdx`, which must reference a // Each of these two opcodes has a `ConstantIdx`, which must
// `Value::Blueprint(Lambda)`. The `upvalue_count` field in // reference a `Value::Blueprint(Lambda)`. The `upvalue_count`
// that `Lambda` indicates the number of arguments it takes, and // field in that `Lambda` indicates the number of arguments it
// the `OpClosure` or `OpThunk` must be followed by exactly this // takes, and the opcode must be followed by exactly this number
// number of `Data*` opcodes. The VM skips over these by // of `Data*` opcodes. The VM skips over these by advancing the
// advancing the instruction pointer. // instruction pointer.
// //
// It is illegal for a `Data*` opcode to appear anywhere else. // It is illegal for a `Data*` opcode to appear anywhere else.
DataLocalIdx(StackIdx), DataLocalIdx(StackIdx),

View file

@ -7,10 +7,7 @@
//! in order to resolve each free variable in the scope to a value. //! in order to resolve each free variable in the scope to a value.
//! "Upvalue" is a term taken from Lua. //! "Upvalue" is a term taken from Lua.
use std::{ use std::ops::Index;
cell::{Ref, RefMut},
ops::Index,
};
use crate::{opcode::UpvalueIdx, Value}; use crate::{opcode::UpvalueIdx, Value};
@ -68,6 +65,16 @@ impl Upvalues {
Some(stack) => stack.len(), Some(stack) => stack.len(),
} }
} }
/// Resolve deferred upvalues from the provided stack slice,
/// mutating them in the internal upvalue slots.
pub fn resolve_deferred_upvalues(&mut self, stack: &[Value]) {
for upvalue in self.static_upvalues.iter_mut() {
if let Value::DeferredUpvalue(update_from_idx) = upvalue {
*upvalue = stack[update_from_idx.0].clone();
}
}
}
} }
impl Index<UpvalueIdx> for Upvalues { impl Index<UpvalueIdx> for Upvalues {
@ -77,29 +84,3 @@ impl Index<UpvalueIdx> for Upvalues {
&self.static_upvalues[index.0] &self.static_upvalues[index.0]
} }
} }
/// `UpvalueCarrier` is implemented by all types that carry upvalues.
pub trait UpvalueCarrier {
fn upvalue_count(&self) -> usize;
/// Read-only accessor for the stored upvalues.
fn upvalues(&self) -> Ref<'_, Upvalues>;
/// Mutable accessor for stored upvalues.
fn upvalues_mut(&self) -> RefMut<'_, Upvalues>;
/// Read an upvalue at the given index.
fn upvalue(&self, idx: UpvalueIdx) -> Ref<'_, Value> {
Ref::map(self.upvalues(), |v| &v.static_upvalues[idx.0])
}
/// Resolve deferred upvalues from the provided stack slice,
/// mutating them in the internal upvalue slots.
fn resolve_deferred_upvalues(&self, stack: &[Value]) {
for upvalue in self.upvalues_mut().static_upvalues.iter_mut() {
if let Value::DeferredUpvalue(idx) = upvalue {
*upvalue = stack[idx.0].clone();
}
}
}
}

View file

@ -1,17 +1,9 @@
//! This module implements the runtime representation of functions. //! This module implements the runtime representation of functions.
use std::{ use std::{collections::HashMap, hash::Hash, rc::Rc};
cell::{Ref, RefCell, RefMut},
collections::HashMap,
hash::Hash,
rc::Rc,
};
use codemap::Span; use codemap::Span;
use crate::{ use crate::{chunk::Chunk, upvalues::Upvalues};
chunk::Chunk,
upvalues::{UpvalueCarrier, Upvalues},
};
use super::NixString; use super::NixString;
@ -42,8 +34,8 @@ impl Formals {
} }
/// The opcodes for a thunk or closure, plus the number of /// The opcodes for a thunk or closure, plus the number of
/// non-executable opcodes which are allowed after an OpClosure or /// non-executable opcodes which are allowed after an OpThunkClosure or
/// OpThunk referencing it. At runtime `Lambda` is usually wrapped /// OpThunkSuspended referencing it. At runtime `Lambda` is usually wrapped
/// in `Rc` to avoid copying the `Chunk` it holds (which can be /// in `Rc` to avoid copying the `Chunk` it holds (which can be
/// quite large). /// quite large).
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
@ -73,42 +65,37 @@ impl Lambda {
} }
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub struct InnerClosure { pub struct Closure {
pub lambda: Rc<Lambda>, pub lambda: Rc<Lambda>,
upvalues: Upvalues, pub upvalues: Upvalues,
/// true if all upvalues have been realised
#[cfg(debug_assertions)]
pub is_finalised: bool,
} }
#[repr(transparent)]
#[derive(Clone, Debug, PartialEq)]
pub struct Closure(Rc<RefCell<InnerClosure>>);
impl Closure { impl Closure {
pub fn new(lambda: Rc<Lambda>) -> Self { pub fn new(lambda: Rc<Lambda>) -> Self {
Closure(Rc::new(RefCell::new(InnerClosure { Self::new_with_upvalues(Upvalues::with_capacity(lambda.upvalue_count), lambda)
upvalues: Upvalues::with_capacity(lambda.upvalue_count),
lambda,
})))
} }
pub fn chunk(&self) -> Ref<'_, Chunk> { pub fn new_with_upvalues(upvalues: Upvalues, lambda: Rc<Lambda>) -> Self {
Ref::map(self.0.borrow(), |c| &c.lambda.chunk) Closure {
upvalues,
lambda,
#[cfg(debug_assertions)]
is_finalised: true,
}
}
pub fn chunk(&self) -> &Chunk {
&self.lambda.chunk
} }
pub fn lambda(&self) -> Rc<Lambda> { pub fn lambda(&self) -> Rc<Lambda> {
self.0.borrow().lambda.clone() self.lambda.clone()
} }
}
pub fn upvalues(&self) -> &Upvalues {
impl UpvalueCarrier for Closure { &self.upvalues
fn upvalue_count(&self) -> usize {
self.0.borrow().lambda.upvalue_count
}
fn upvalues(&self) -> Ref<'_, Upvalues> {
Ref::map(self.0.borrow(), |c| &c.upvalues)
}
fn upvalues_mut(&self) -> RefMut<'_, Upvalues> {
RefMut::map(self.0.borrow_mut(), |c| &mut c.upvalues)
} }
} }

View file

@ -28,7 +28,8 @@ use codemap::Span;
use crate::{ use crate::{
errors::{Error, ErrorKind}, errors::{Error, ErrorKind},
upvalues::{UpvalueCarrier, Upvalues}, upvalues::Upvalues,
value::Closure,
vm::VM, vm::VM,
Value, Value,
}; };
@ -36,6 +37,10 @@ use crate::{
use super::Lambda; use super::Lambda;
/// Internal representation of the different states of a thunk. /// Internal representation of the different states of a thunk.
///
/// Upvalues must be finalised before leaving the initial state
/// (Suspended or RecursiveClosure). The [`value()`] function may
/// not be called until the thunk is in the final state (Evaluated).
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
enum ThunkRepr { enum ThunkRepr {
/// Thunk is closed over some values, suspended and awaiting /// Thunk is closed over some values, suspended and awaiting
@ -43,6 +48,7 @@ enum ThunkRepr {
Suspended { Suspended {
lambda: Rc<Lambda>, lambda: Rc<Lambda>,
upvalues: Upvalues, upvalues: Upvalues,
span: Span,
}, },
/// Thunk currently under-evaluation; encountering a blackhole /// Thunk currently under-evaluation; encountering a blackhole
@ -53,21 +59,31 @@ enum ThunkRepr {
Evaluated(Value), Evaluated(Value),
} }
/// A thunk is created for any value which requires non-strict
/// evaluation due to self-reference or lazy semantics (or both).
/// Every reference cycle involving `Value`s will contain at least
/// one `Thunk`.
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub struct Thunk { pub struct Thunk(Rc<RefCell<ThunkRepr>>);
inner: Rc<RefCell<ThunkRepr>>,
span: Span,
}
impl Thunk { impl Thunk {
pub fn new(lambda: Rc<Lambda>, span: Span) -> Self { pub fn new_closure(lambda: Rc<Lambda>) -> Self {
Thunk { Thunk(Rc::new(RefCell::new(ThunkRepr::Evaluated(Value::Closure(
inner: Rc::new(RefCell::new(ThunkRepr::Suspended { Closure {
upvalues: Upvalues::with_capacity(lambda.upvalue_count), upvalues: Upvalues::with_capacity(lambda.upvalue_count),
lambda: lambda.clone(), lambda: lambda.clone(),
})), #[cfg(debug_assertions)]
span, is_finalised: false,
},
)))))
} }
pub fn new_suspended(lambda: Rc<Lambda>, span: Span) -> Self {
Thunk(Rc::new(RefCell::new(ThunkRepr::Suspended {
upvalues: Upvalues::with_capacity(lambda.upvalue_count),
lambda: lambda.clone(),
span,
})))
} }
/// Evaluate the content of a thunk, potentially repeatedly, until /// Evaluate the content of a thunk, potentially repeatedly, until
@ -79,11 +95,11 @@ impl Thunk {
/// are replaced. /// are replaced.
pub fn force(&self, vm: &mut VM) -> Result<(), ErrorKind> { pub fn force(&self, vm: &mut VM) -> Result<(), ErrorKind> {
loop { loop {
let mut thunk_mut = self.inner.borrow_mut(); let mut thunk_mut = self.0.borrow_mut();
match *thunk_mut { match *thunk_mut {
ThunkRepr::Evaluated(Value::Thunk(ref inner_thunk)) => { ThunkRepr::Evaluated(Value::Thunk(ref inner_thunk)) => {
let inner_repr = inner_thunk.inner.borrow().clone(); let inner_repr = inner_thunk.0.borrow().clone();
*thunk_mut = inner_repr; *thunk_mut = inner_repr;
} }
@ -91,24 +107,33 @@ impl Thunk {
ThunkRepr::Blackhole => return Err(ErrorKind::InfiniteRecursion), ThunkRepr::Blackhole => return Err(ErrorKind::InfiniteRecursion),
ThunkRepr::Suspended { .. } => { ThunkRepr::Suspended { .. } => {
if let ThunkRepr::Suspended { lambda, upvalues } = if let ThunkRepr::Suspended {
std::mem::replace(&mut *thunk_mut, ThunkRepr::Blackhole) lambda,
upvalues,
span,
} = std::mem::replace(&mut *thunk_mut, ThunkRepr::Blackhole)
{ {
drop(thunk_mut); drop(thunk_mut);
vm.enter_frame(lambda, upvalues, 0).map_err(|e| { vm.enter_frame(lambda, upvalues, 0)
ErrorKind::ThunkForce(Box::new(Error { .map_err(|e| ErrorKind::ThunkForce(Box::new(Error { span, ..e })))?;
span: self.span, (*self.0.borrow_mut()) = ThunkRepr::Evaluated(vm.pop())
..e
}))
})?;
let evaluated = ThunkRepr::Evaluated(vm.pop());
(*self.inner.borrow_mut()) = evaluated;
} }
} }
} }
} }
} }
pub fn finalise(&self, stack: &[Value]) {
self.upvalues_mut().resolve_deferred_upvalues(stack);
#[cfg(debug_assertions)]
{
let inner: &mut ThunkRepr = &mut self.0.as_ref().borrow_mut();
if let ThunkRepr::Evaluated(Value::Closure(c)) = inner {
c.is_finalised = true;
}
}
}
/// Returns a reference to the inner evaluated value of a thunk. /// Returns a reference to the inner evaluated value of a thunk.
/// It is an error to call this on a thunk that has not been /// It is an error to call this on a thunk that has not been
/// forced, or is not otherwise known to be fully evaluated. /// forced, or is not otherwise known to be fully evaluated.
@ -116,35 +141,48 @@ impl Thunk {
// difficult to represent in the type system without impacting the // difficult to represent in the type system without impacting the
// API too much. // API too much.
pub fn value(&self) -> Ref<Value> { pub fn value(&self) -> Ref<Value> {
Ref::map(self.inner.borrow(), |thunk| { Ref::map(self.0.borrow(), |thunk| {
if let ThunkRepr::Evaluated(value) = thunk { if let ThunkRepr::Evaluated(value) = thunk {
#[cfg(debug_assertions)]
if matches!(
value,
Value::Closure(Closure {
is_finalised: false,
..
})
) {
panic!("Thunk::value called on an unfinalised closure");
}
return value; return value;
} }
panic!("Thunk::value called on non-evaluated thunk"); panic!("Thunk::value called on non-evaluated thunk");
}) })
} }
}
impl UpvalueCarrier for Thunk { pub fn upvalues(&self) -> Ref<'_, Upvalues> {
fn upvalue_count(&self) -> usize { Ref::map(self.0.borrow(), |thunk| match thunk {
if let ThunkRepr::Suspended { lambda, .. } = &*self.inner.borrow() {
return lambda.upvalue_count;
}
panic!("upvalues() on non-suspended thunk");
}
fn upvalues(&self) -> Ref<'_, Upvalues> {
Ref::map(self.inner.borrow(), |thunk| match thunk {
ThunkRepr::Suspended { upvalues, .. } => upvalues, ThunkRepr::Suspended { upvalues, .. } => upvalues,
ThunkRepr::Evaluated(Value::Closure(Closure { upvalues, .. })) => upvalues,
_ => panic!("upvalues() on non-suspended thunk"), _ => panic!("upvalues() on non-suspended thunk"),
}) })
} }
fn upvalues_mut(&self) -> RefMut<'_, Upvalues> { pub fn upvalues_mut(&self) -> RefMut<'_, Upvalues> {
RefMut::map(self.inner.borrow_mut(), |thunk| match thunk { RefMut::map(self.0.borrow_mut(), |thunk| match thunk {
ThunkRepr::Suspended { upvalues, .. } => upvalues, ThunkRepr::Suspended { upvalues, .. } => upvalues,
ThunkRepr::Evaluated(Value::Closure(Closure {
upvalues,
#[cfg(debug_assertions)]
is_finalised,
..
})) => {
#[cfg(debug_assertions)]
if *is_finalised {
panic!("Thunk::upvalues_mut() called on a finalised closure");
}
upvalues
}
thunk => panic!("upvalues() on non-suspended thunk: {thunk:?}"), thunk => panic!("upvalues() on non-suspended thunk: {thunk:?}"),
}) })
} }
@ -152,7 +190,7 @@ impl UpvalueCarrier for Thunk {
impl Display for Thunk { impl Display for Thunk {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.inner.try_borrow() { match self.0.try_borrow() {
Ok(repr) => match &*repr { Ok(repr) => match &*repr {
ThunkRepr::Evaluated(v) => v.fmt(f), ThunkRepr::Evaluated(v) => v.fmt(f),
_ => f.write_str("internal[thunk]"), _ => f.write_str("internal[thunk]"),

View file

@ -1,7 +1,7 @@
//! This module implements the virtual (or abstract) machine that runs //! This module implements the virtual (or abstract) machine that runs
//! Tvix bytecode. //! Tvix bytecode.
use std::{cell::RefMut, path::PathBuf, rc::Rc}; use std::{ops::DerefMut, path::PathBuf, rc::Rc};
use crate::{ use crate::{
chunk::Chunk, chunk::Chunk,
@ -9,7 +9,7 @@ use crate::{
nix_search_path::NixSearchPath, nix_search_path::NixSearchPath,
observer::RuntimeObserver, observer::RuntimeObserver,
opcode::{CodeIdx, Count, JumpOffset, OpCode, StackIdx, UpvalueIdx}, opcode::{CodeIdx, Count, JumpOffset, OpCode, StackIdx, UpvalueIdx},
upvalues::{UpvalueCarrier, Upvalues}, upvalues::Upvalues,
value::{Builtin, Closure, CoercionKind, Lambda, NixAttrs, NixList, Thunk, Value}, value::{Builtin, Closure, CoercionKind, Lambda, NixAttrs, NixList, Thunk, Value},
warnings::{EvalWarning, WarningKind}, warnings::{EvalWarning, WarningKind},
}; };
@ -675,30 +675,37 @@ impl<'o> VM<'o> {
upvalue_count > 0, upvalue_count > 0,
"OpClosure should not be called for plain lambdas" "OpClosure should not be called for plain lambdas"
); );
let mut upvalues = Upvalues::with_capacity(blueprint.upvalue_count);
let closure = Closure::new(blueprint); self.populate_upvalues(upvalue_count, &mut upvalues)?;
let upvalues = closure.upvalues_mut(); self.push(Value::Closure(Closure::new_with_upvalues(
self.push(Value::Closure(closure.clone())); upvalues, blueprint,
)));
// From this point on we internally mutate the
// closure object's upvalues. The closure is
// already in its stack slot, which means that it
// can capture itself as an upvalue for
// self-recursion.
self.populate_upvalues(upvalue_count, upvalues)?;
} }
OpCode::OpThunk(idx) => { OpCode::OpThunkSuspended(idx) | OpCode::OpThunkClosure(idx) => {
let blueprint = match &self.chunk()[idx] { let blueprint = match &self.chunk()[idx] {
Value::Blueprint(lambda) => lambda.clone(), Value::Blueprint(lambda) => lambda.clone(),
_ => panic!("compiler bug: non-blueprint in blueprint slot"), _ => panic!("compiler bug: non-blueprint in blueprint slot"),
}; };
let upvalue_count = blueprint.upvalue_count; let upvalue_count = blueprint.upvalue_count;
let thunk = Thunk::new(blueprint, self.current_span()); let thunk = if matches!(op, OpCode::OpThunkClosure(_)) {
debug_assert!(
upvalue_count > 0,
"OpThunkClosure should not be called for plain lambdas"
);
Thunk::new_closure(blueprint)
} else {
Thunk::new_suspended(blueprint, self.current_span())
};
let upvalues = thunk.upvalues_mut(); let upvalues = thunk.upvalues_mut();
self.push(Value::Thunk(thunk.clone())); self.push(Value::Thunk(thunk.clone()));
// From this point on we internally mutate the
// upvalues. The closure (if `is_closure`) is
// already in its stack slot, which means that it
// can capture itself as an upvalue for
// self-recursion.
self.populate_upvalues(upvalue_count, upvalues)?; self.populate_upvalues(upvalue_count, upvalues)?;
} }
@ -715,13 +722,9 @@ impl<'o> VM<'o> {
OpCode::OpFinalise(StackIdx(idx)) => { OpCode::OpFinalise(StackIdx(idx)) => {
match &self.stack[self.frame().stack_offset + idx] { match &self.stack[self.frame().stack_offset + idx] {
Value::Closure(closure) => { Value::Closure(_) => panic!("attempted to finalise a closure"),
closure.resolve_deferred_upvalues(&self.stack[self.frame().stack_offset..])
}
Value::Thunk(thunk) => { Value::Thunk(thunk) => thunk.finalise(&self.stack[self.frame().stack_offset..]),
thunk.resolve_deferred_upvalues(&self.stack[self.frame().stack_offset..])
}
// In functions with "formals" attributes, it is // In functions with "formals" attributes, it is
// possible for `OpFinalise` to be called on a // possible for `OpFinalise` to be called on a
@ -809,21 +812,23 @@ impl<'o> VM<'o> {
fn populate_upvalues( fn populate_upvalues(
&mut self, &mut self,
count: usize, count: usize,
mut upvalues: RefMut<'_, Upvalues>, mut upvalues: impl DerefMut<Target = Upvalues>,
) -> EvalResult<()> { ) -> EvalResult<()> {
for _ in 0..count { for _ in 0..count {
match self.inc_ip() { match self.inc_ip() {
OpCode::DataLocalIdx(StackIdx(local_idx)) => { OpCode::DataLocalIdx(StackIdx(local_idx)) => {
let idx = self.frame().stack_offset + local_idx; let idx = self.frame().stack_offset + local_idx;
upvalues.push(self.stack[idx].clone()); upvalues.deref_mut().push(self.stack[idx].clone());
} }
OpCode::DataUpvalueIdx(upv_idx) => { OpCode::DataUpvalueIdx(upv_idx) => {
upvalues.push(self.frame().upvalue(upv_idx).clone()); upvalues
.deref_mut()
.push(self.frame().upvalue(upv_idx).clone());
} }
OpCode::DataDeferredLocal(idx) => { OpCode::DataDeferredLocal(idx) => {
upvalues.push(Value::DeferredUpvalue(idx)); upvalues.deref_mut().push(Value::DeferredUpvalue(idx));
} }
OpCode::DataCaptureWith => { OpCode::DataCaptureWith => {
@ -841,7 +846,7 @@ impl<'o> VM<'o> {
captured_with_stack.push(self.stack[*idx].clone()); captured_with_stack.push(self.stack[*idx].clone());
} }
upvalues.set_with_stack(captured_with_stack); upvalues.deref_mut().set_with_stack(captured_with_stack);
} }
_ => panic!("compiler error: missing closure operand"), _ => panic!("compiler error: missing closure operand"),