diff --git a/src/compile/basic_block.rs b/src/compile/basic_block.rs new file mode 100644 index 0000000..00489c2 --- /dev/null +++ b/src/compile/basic_block.rs @@ -0,0 +1,168 @@ +use crate::vm::inst::Inst; +use std::collections::BTreeMap; + +// +// enum BasicBlock +// + +/// A block of code or branch that determines control flow. +#[derive(Debug, Clone, PartialEq)] +pub enum BasicBlock { + /// A linear block of executable instructions. + Block { exit: usize, block: Vec }, + + /// A branch, instructing the basic block where to jump for the true and false blocks. + Branch { + block_true: usize, + block_false: usize, + }, +} + +#[derive(Debug, Default, Clone, PartialEq)] +pub struct BasicBlockList { + blocks: BTreeMap, +} + +impl BasicBlockList { + /// Creates a new `BasicBlockList` wrapper around the specified list. + pub fn new(blocks: BTreeMap) -> Self { + Self { blocks } + } + + /// Flattens a list of blocks into a list of instructions. + pub fn to_vec(self) -> Vec { + let mut body = Vec::new(); + let blocks = self.flatten(); + + // create reverse address of each block index + let mut addr_rev = BTreeMap::new(); + let mut addr = 0; + for (index, block) in blocks.iter().enumerate() { + addr_rev.insert(index, addr); + let inst_len = match block { + // If this block is going to exit to to the next block in the sequence, there will + // be no jump at the end of this execution. + BasicBlock::Block { exit, block } if *exit == (index + 1) => block.len(), + // If the block is going to exit to someplace that *isn't* next in the sequence, + // there will be a conditional jump added. + BasicBlock::Block { block, .. } => block.len() + 1, + // 2 because of: + // - conditional jump if true at start + // - unconditional jump to false block + BasicBlock::Branch { .. } => 2, + }; + + addr += inst_len; + } + let final_addr = addr; + drop(addr); + + // build the blocks + for (index, block) in blocks.into_iter().enumerate() { + match block { + BasicBlock::Block { exit, block } => { + body.extend(block); + // if we exit to someplace out-of-order on this block, insert a jump + // instruction + if exit != index + 1 { + let addr = addr_rev[&exit]; + body.push(Inst::Jump(addr)); + } + } + BasicBlock::Branch { block_true, block_false } => { + // insert conditional jump to true statement, and unconditional jump to false + // statement + let addr_true = addr_rev[&block_true]; + let addr_false = addr_rev[&block_false]; + body.push(Inst::JumpTrue(addr_true)); + body.push(Inst::Jump(addr_false)); + } + } + } + + // make sure that we actually got the right number of instructions + assert_eq!( + body.len(), + final_addr, + "predicted instruction length does not match compiled instruction length" + ); + + body + } + + /// Remaps block indices so that there are no holes between blocks, being converted into a flat + /// vector of `BasicBlock` values pointing to vector indices. + fn flatten(self) -> Vec { + let mut blocks = Vec::with_capacity(self.len()); + let mut entry_map = BTreeMap::new(); + // first pass: add blocks to the "blocks" list, and map block indices + for (new_index, (index, block)) in self.blocks.into_iter().enumerate() { + blocks.push(block); + entry_map.insert(index, new_index); + } + // second pass: update blocks in-place with their newly mapped addresses + blocks + .into_iter() + .map(|basic_block| match basic_block { + BasicBlock::Block { exit, block } => BasicBlock::Block { + exit: entry_map[&exit], + block, + }, + BasicBlock::Branch { + block_true, + block_false, + } => BasicBlock::Branch { + block_true: entry_map[&block_true], + block_false: entry_map[&block_false], + }, + }) + .collect() + } + + /// Inserts a `BasicBlock` with the given index, returning the previous `BasicBlock` occupying + /// that index (if any). + pub fn insert(&mut self, index: usize, block: BasicBlock) -> Option { + self.blocks.insert(index, block) + } + + /// Removes and returns a `BasicBlock` with the given index, if it exists. + pub fn remove(&mut self, index: &usize) -> Option { + self.blocks.remove(index) + } + + /// Gets a `BasicBlock` by its index, if it exists. + pub fn get(&self, index: &usize) -> Option<&BasicBlock> { + self.blocks.get(index) + } + + /// Iterates over the basic blocks in this list. + pub fn iter(&self) -> impl Iterator { + self.blocks.iter() + } + + /// Consumes this `BasicBlockList` into an iterator. + pub fn into_iter(self) -> impl Iterator { + self.blocks.into_iter() + } + + /// Gets the number of basic blocks registered. + pub fn len(&self) -> usize { + self.blocks.len() + } + + /// Gets the wrapped value for this list. + pub fn blocks(&self) -> &BTreeMap { + &self.blocks + } + + /// Consumes this `BasicBlockList` into its wrapped type. + pub fn into_blocks(self) -> BTreeMap { + self.blocks + } +} + +impl From> for BasicBlockList { + fn from(other: BTreeMap) -> Self { + BasicBlockList::new(other) + } +} diff --git a/src/compile/locals.rs b/src/compile/locals.rs new file mode 100644 index 0000000..7a2b0d7 --- /dev/null +++ b/src/compile/locals.rs @@ -0,0 +1,56 @@ +use crate::{compile::Compile, obj::prelude::*, syn::{ast::*, visit::*}}; + +/// Collects local names from the given tree. +/// +/// This will *not* attempt to recursively collect locals, and will only stay on the base statement +/// level. +pub struct CollectLocals<'c, 't> { + compile: &'c mut Compile<'t>, +} + +// - Python's LEGB methodology seems to be good. Look up variables in this order: +// - Local +// - Enclosing functions (i.e. inner functions) +// - Global +// - Builtin +// - Resolving names: +// - Scan assignments first. If anything is assigned (even before usage), it's local. +// - Everything else should just do a lookup + +impl<'c, 't> CollectLocals<'c, 't> { + pub fn new(compile: &'c mut Compile<'t>) -> Self { + Self { compile } + } + + pub fn collect(&mut self, body: &Body) { + self.visit_body(body) + } +} + +impl Visit for CollectLocals<'_, '_> { + type Out = (); + + fn visit_body(&mut self, body: &Body) -> Self::Out { DefaultAccept::default_accept(body, self); } + fn visit_stmt(&mut self, stmt: &Stmt) -> Self::Out { DefaultAccept::default_accept(stmt, self); } + fn visit_assign_stmt(&mut self, assign: &AssignStmt) -> Self::Out { DefaultAccept::default_accept(assign, self); } + fn visit_lhs_expr(&mut self, lhs_expr: &LhsExpr) -> Self::Out { + match lhs_expr { + LhsExpr::Local(name) => { + let sym = global_sym(name.to_string()); + self.compile.create_local(sym); + } + _ => { /* no op */ } + } + } + fn visit_expr(&mut self, expr: &Expr) -> Self::Out { DefaultAccept::default_accept(expr, self); } + fn visit_bin_expr(&mut self, expr: &BinExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); } + fn visit_un_expr(&mut self, expr: &UnExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); } + fn visit_call_expr(&mut self, expr: &CallExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); } + fn visit_index_expr(&mut self, expr: &IndexExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); } + fn visit_access_expr(&mut self, expr: &AccessExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); } + fn visit_atom(&mut self, atom: &Atom) -> Self::Out { DefaultAccept::default_accept(atom, self); } +} + +pub fn collect_locals(compile: &mut Compile, body: &Body) { + CollectLocals::new(compile).collect(body); +} diff --git a/src/compile/mod.rs b/src/compile/mod.rs index b6f2efd..a309db7 100644 --- a/src/compile/mod.rs +++ b/src/compile/mod.rs @@ -1,17 +1,28 @@ -pub mod thunk; +pub mod basic_block; pub mod error; +mod locals; +pub mod thunk; use crate::{obj::prelude::*, vm::consts::*}; -use std::collections::HashMap; +use std::collections::{BTreeMap, HashMap}; pub struct Compile<'t> { text: &'t str, const_data: ConstData, + globals: BTreeMap, + locals: Vec>, + next_local: Local, } impl<'t> Compile<'t> { pub fn new(text: &'t str) -> Self { - Compile { text, const_data: Default::default() } + Compile { + text, + const_data: Default::default(), + globals: Default::default(), + locals: Default::default(), + next_local: Default::default(), + } } pub fn text(&self) -> &'t str { @@ -28,14 +39,62 @@ impl<'t> Compile<'t> { /// Gets or inserts a static int reference. pub fn const_int(&mut self, int: i64) -> (ConstHandle, IntRef) { - self.const_data_mut() - .const_int(int) + self.const_data_mut().const_int(int) } /// Gets or inserts a static string reference. pub fn const_str(&mut self, s: impl AsRef) -> (ConstHandle, StrRef) { - self.const_data_mut() - .const_str(s) + self.const_data_mut().const_str(s) + } + + /// Looks up a variable name. + /// + /// This will search up the locals stack until the given name is found, ultimately ending with + /// a global name lookup. + pub fn lookup_scope(&mut self, sym: Sym) -> Option { + self.locals + .iter() + .rev() + .filter_map(|locals| locals.get(&sym)) + .next() + .or_else(|| self.globals.get(&sym)) + .copied() + } + + /// Creates a new local variable if it does not exist in the current local scope. + pub fn create_local(&mut self, sym: Sym) -> Local { + let locals = self.locals.last_mut().expect("scope"); + if let Some(local) = locals.get(&sym) { + *local + } else { + // wish I could use mem::replace here, oh well + let local = self.next_local; + self.next_local = self.next_local.next(); + locals.insert(sym, local); + local + } + } + + /// Creates a new global variable if it does not exist in the current global scope. + pub fn create_global(&mut self, sym: Sym) -> Local { + if let Some(global) = self.globals.get(&sym) { + *global + } else { + let global = self.next_local; + self.next_local = self.next_local.next(); + self.globals.insert(sym, global); + global + } + } + + /// Pushes an empty scope layer of local variables. + pub fn push_scope_layer(&mut self) { + self.locals.push(Default::default()); + } + + /// Pops a scope layer of local variables, if any are available. + pub fn pop_scope_layer(&mut self) -> Option> { + self.locals.pop() } } diff --git a/src/compile/thunk.rs b/src/compile/thunk.rs index 7e72ae3..958ae78 100644 --- a/src/compile/thunk.rs +++ b/src/compile/thunk.rs @@ -1,18 +1,47 @@ use crate::{ - compile::{error::*, Compile}, - syn::{ast::*, visit::*}, + compile::{basic_block::*, error::*, Compile}, obj::prelude::*, - vm::inst::* + syn::{ast::*, visit::*}, + vm::inst::*, }; use std::mem; +/// A basic block of VM code. +/// +/// Thunks are precomputed chunks of code that may allow for branching and/or looping. +#[derive(Debug, Clone, PartialEq)] pub enum Thunk { + /// A list of instructions. + /// + /// This is the core of all `Thunk` values. Body(Vec), + + /// A list of thunks. List(Vec), + + /// Based on the conditional flag in the VM, code for one of these thunks will be executed. + /// + /// The conditional flag is expected to be set upon entry to this thunk. + /// + /// Only one of these thunks will be executed. At the end of either thunk, the program will + /// continue at the address following this branch. Branch { thunk_true: Box, thunk_false: Box, }, + + /// Based on the conditional flag in the VM, code for this loop will continue to execute. + /// + /// The conditional flag is expected to be set upon entry to this thunk. + /// + /// At the start of the body, the condition flag is initially checked. If it is not true, + /// the program jumps to the end of the body and continues. + /// + /// At the end of the body, the program jumps back to the start where the condition is checked + /// again. + Loop(Box), + + /// A placeholder/default thunk that compiles to nothing. Nop, } @@ -54,6 +83,32 @@ impl Thunk { (lhs, rhs) => Thunk::List(vec![lhs, rhs]), }; } + + /// Gets the number of basic blocks that this thunk will produce. + /// + /// This is necessary for compiling to a basic block, in order to predict the "next block" that + /// a thunk will be jumping to. + fn basic_block_count(&self) -> usize { + match self { + Thunk::Body(_) => 1, + Thunk::List(thunks) => thunks + .iter() + .fold(0, |n, thunk| n + thunk.basic_block_count()), + Thunk::Branch { + thunk_true, + thunk_false, + // length is true + false block count, + 1 for the branch basic block at the start + } => thunk_true.basic_block_count() + thunk_false.basic_block_count() + 1, + // length is thunk, + 1 for branch at the start of the loop + Thunk::Loop(thunk) => thunk.basic_block_count() + 1, + Thunk::Nop => 0, + } + } + + pub fn flatten(self) -> BasicBlockList { + Flatten::default() + .flatten(self) + } } impl From for Thunk { @@ -74,6 +129,80 @@ impl From> for Thunk { } } +// +// struct Flatten +// + +/// Flattens a thunk into linear list of basic blocks. +#[derive(Default)] +struct Flatten { + // using a btreemap instead of a vec because we can insert things out-of-order + blocks: BasicBlockList, +} + +// +// impl Flatten +// + +impl Flatten { + pub fn flatten(mut self, thunk: Thunk) -> BasicBlockList { + // "It's 4pm babe, time for your thunk flattening!" + // "Yes, honey..." + let last_block = thunk.basic_block_count(); + self.flatten_next(last_block, thunk); + assert_eq!(self.blocks.len(), last_block); + self.blocks + } + + fn flatten_next(&mut self, next_block: usize, thunk: Thunk) { + match thunk { + Thunk::Body(thunk) => { + let this_block = self.this_block(); + let prev = self.blocks.insert(this_block, BasicBlock::Block { + exit: next_block, + block: thunk, + }); + assert!(prev.is_none()); + } + Thunk::List(thunks) => { + for thunk in thunks.into_iter() { + let next_block = self.this_block() + thunk.basic_block_count(); + self.flatten_next(next_block, thunk); + assert_eq!(next_block, self.this_block()); + } + assert_eq!(next_block, self.this_block()); + } + Thunk::Branch { thunk_true, thunk_false, } => { + let branch_block = self.this_block(); + let block_true = self.this_block() + 1; + let block_false = block_true + thunk_true.basic_block_count(); + self.blocks.insert(branch_block, BasicBlock::Branch { + block_true, + block_false, + }); + self.flatten_next(next_block, *thunk_true); + self.flatten_next(next_block, *thunk_false); + assert_eq!(self.this_block(), next_block); + } + Thunk::Loop(_) => todo!(), + Thunk::Nop => {} + } + } + + fn this_block(&self) -> usize { + self.blocks.len() + } +} + +// +// struct CompileBody +// + +/// Compiles an AST body down to a `Thunk`. +/// +/// Thunks are the basic building blocks of the IR. Thunks form a chain of decision paths that may +/// be taken, which allows an optimizer to remove dead code, detect endless loops, and so on. This +/// allows for shrinking blocks of code without having to recalculate jump addresses. pub struct CompileBody<'c, 't> { compile: &'c mut Compile<'t>, } @@ -88,7 +217,14 @@ impl<'c, 't> CompileBody<'c, 't> { } } +// +// impl Visit for CompileBody +// + impl Visit for CompileBody<'_, '_> { + // XXX + // Trying to "future-proof" by using Result<_> in case there's some reason that an error + // may need to be thrown in the future so I don't have to wrap every return value in Ok(_) type Out = Result; fn visit_body(&mut self, body: &Body) -> Self::Out { @@ -197,9 +333,9 @@ impl Visit for CompileBody<'_, '_> { // - eval expr // - getattr (expr.access) let mut thunk = self.visit_expr(&expr.expr)?; - thunk.push_thunk(Thunk::Body(vec![ - Inst::GetAttr(global_sym(expr.access.to_string())), - ])); + thunk.push_thunk(Thunk::Body(vec![Inst::GetAttr(global_sym( + expr.access.to_string(), + ))])); Ok(thunk) } @@ -233,3 +369,49 @@ impl Visit for CompileBody<'_, '_> { Ok(thunk) } } + +// +// Tests +// + +#[test] +fn test_flatten_thunk() { + let init_body = vec![ + Inst::PushSym(Sym::new(0)), + Inst::PushSym(Sym::new(1)), + Inst::Call(1) + ]; + let true_body = vec![Inst::PushSym(Sym::new(2))]; + let false_body = vec![Inst::PushSym(Sym::new(3))]; + let end_body = vec![ + Inst::PushSym(Sym::new(1)), + Inst::Call(1) + ]; + + let thunk = Thunk::List(vec![ + // do something before + Thunk::Body(init_body.clone()), + + // branch + Thunk::Branch { + thunk_true: Thunk::Body(true_body.clone()).into(), + thunk_false: Thunk::Body(false_body.clone()).into(), + }, + + // do something after + Thunk::Body(end_body.clone()), + ]); + + let block_count = thunk.basic_block_count(); + + let blocks = thunk.flatten(); + assert_eq!(blocks.len(), block_count); + + let mut iter = blocks.into_iter(); + assert_eq!(iter.next().unwrap(), (0, BasicBlock::Block { exit: 1, block: init_body, })); + assert_eq!(iter.next().unwrap(), (1, BasicBlock::Branch { block_true: 2, block_false: 3, })); + assert_eq!(iter.next().unwrap(), (2, BasicBlock::Block { exit: 4, block: true_body, })); + assert_eq!(iter.next().unwrap(), (3, BasicBlock::Block { exit: 4, block: false_body, })); + assert_eq!(iter.next().unwrap(), (4, BasicBlock::Block { exit: 5, block: end_body, })); + assert!(iter.next().is_none()); +} diff --git a/src/obj/locals.rs b/src/obj/locals.rs new file mode 100644 index 0000000..5d4a42e --- /dev/null +++ b/src/obj/locals.rs @@ -0,0 +1,6 @@ +use crate::obj::ObjRef; +use std::collections::BTreeMap; + +handle_type!(Local); + +pub type Locals = BTreeMap; diff --git a/src/obj/macros.rs b/src/obj/macros.rs index d6634b3..663108d 100644 --- a/src/obj/macros.rs +++ b/src/obj/macros.rs @@ -85,3 +85,30 @@ macro_rules! vtable { $crate::obj::attrs::Vtable::new(maplit::btreemap! { }) }} } + +#[macro_export] +macro_rules! handle_type { + ($name:ident) => { + #[derive(Default, Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] + pub struct $name(usize); + + impl $name { + /// Creates a new handle. + pub fn new(handle: usize) -> Self { + Self(handle) + } + + /// Gets the index of this handle. + pub fn index(&self) -> usize { + self.0 + } + + /// Gets the next handle after this one. + pub fn next(&self) -> Self { + Self::new(self.index() + 1) + } + } + + impl shredder::EmptyScan for $name {} + }; +} diff --git a/src/obj/mod.rs b/src/obj/mod.rs index c13664a..2af87d0 100644 --- a/src/obj/mod.rs +++ b/src/obj/mod.rs @@ -5,15 +5,16 @@ pub mod attrs; pub mod fun; pub mod int; pub mod intern; +pub mod locals; pub mod names; pub mod str; pub mod sym; -#[cfg(test)] -mod test; pub mod ty; pub mod prelude { - pub use crate::obj::{attrs::*, fun::*, int::*, intern::*, str::*, sym::*, ty::*, Obj, ObjRef}; + pub use crate::obj::{ + attrs::*, fun::*, int::*, intern::*, locals::*, str::*, sym::*, ty::*, Obj, ObjRef, + }; } use shredder::{Gc, Scan}; @@ -36,13 +37,10 @@ pub trait Obj: Scan + std::fmt::Debug { fn attrs_mut(&mut self) -> Option<&mut Attrs>; fn get_attr(&self, sym: Sym) -> Option { - self.attrs() - .get(&sym) - .cloned() - .or_else(|| { - let vtable = self.vtable().get(); - vtable.get(&sym).cloned() - }) + self.attrs().get(&sym).cloned().or_else(|| { + let vtable = self.vtable().get(); + vtable.get(&sym).cloned() + }) } } diff --git a/src/obj/sym.rs b/src/obj/sym.rs index b847104..4a6d4ed 100644 --- a/src/obj/sym.rs +++ b/src/obj/sym.rs @@ -1,6 +1,5 @@ use crate::obj::{intern::Interner, names::*, prelude::*}; use once_cell::sync::Lazy; -use shredder::Scan; use std::{collections::BTreeMap, sync::Mutex}; // @@ -9,29 +8,14 @@ use std::{collections::BTreeMap, sync::Mutex}; pub type SymRef = ObjRef; -/// A literal name or symbol. -#[derive(Scan, Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Sym(usize); +handle_type!(Sym); impl Sym { - /// Gets the index of this symbol. - pub fn index(&self) -> usize { - self.0 - } - - /// Creates a new symbol. - /// - /// Generally, new symbols should be created through the `global_sym` function - take care - /// while using this function. - pub(super) fn new(sym: usize) -> Self { - Sym(sym) - } - /// Creates a new symbol object. /// /// Generally, new symbol objects should be created through the `global_sym_ref` function - /// take care while using this function. - pub(super) fn new_obj(sym: impl Into) -> SymRef { + pub fn new_obj(sym: impl Into) -> SymRef { ObjRef::new(sym.into()) } } @@ -114,4 +98,3 @@ pub fn global_sym(s: String) -> Sym { // pub type SymTable = Interner; -pub type Locals = Interner; diff --git a/src/obj/test.rs b/src/obj/test.rs deleted file mode 100644 index 316dee4..0000000 --- a/src/obj/test.rs +++ /dev/null @@ -1,64 +0,0 @@ -use crate::obj::{names::*, prelude::*}; -use once_cell::sync::Lazy; -use shredder::*; -use std::sync::Mutex; - -static TEST_LOCK: Lazy> = Lazy::new(|| Mutex::new(())); - -#[test] -fn test_sym_plumbing() { - // Most of this test is making sure we are free of runtime infinite recursion issues with - // initializing static data (NIL_OBJ, SYM_TY, SYM_ATTRS) - - let _guard = TEST_LOCK.lock().unwrap(); - let start = number_of_tracked_allocations(); // need 'start' because of static allocations - run_with_gc_cleanup(|| { - assert_eq!(number_of_tracked_allocations(), start + 0); - - let nil = NIL_NAME.sym_ref(); - - // nil sym obj - assert_eq!(number_of_tracked_allocations(), start + 1); - - { - read_obj!(let nil_obj = nil); - let sym: Sym = **nil_obj; - assert_eq!(NIL_NAME.sym, sym); - assert_eq!(number_of_tracked_allocations(), start + 1); - - nil_obj.attrs(); - let ty_sym_obj = SYM_NAME.sym_ref(); - assert_eq!(number_of_tracked_allocations(), start + 2); - } - - let on = TRUE_NAME.sym_ref(); - // true sym obj, sym ty obj shouldn't be duplicated - assert_eq!(number_of_tracked_allocations(), start + 3); - - let off = FALSE_NAME.sym_ref(); - // false sym obj, sym ty obj shouldn't be duplicated - assert_eq!(number_of_tracked_allocations(), start + 4); - }); - // these are *static* values, so there will always remain at least one reference. - assert_eq!(number_of_tracked_allocations(), start + 4); -} - -#[test] -fn test_dyn_obj_ref_eq() { - #[derive(Default, Debug, Scan)] - struct FooObj { vtable: Attrs, attrs: Attrs } - - impl_obj!(FooObj, vtable, attrs); - - let _guard = TEST_LOCK.lock().unwrap(); - let start = number_of_tracked_allocations(); // need 'start' because of static allocations - run_with_gc_cleanup(|| { - let rf1: ObjRef = ObjRef::new(FooObj::default()); - let rf2 = rf1.clone(); - - assert!(rf1.ref_eq(&rf2)); - assert!(rf2.ref_eq(&rf1)); - assert_eq!(number_of_tracked_allocations(), start + 1); - }); - assert_eq!(number_of_tracked_allocations(), start + 0); -} diff --git a/src/syn/visit.rs b/src/syn/visit.rs index 86ee183..6dade5d 100644 --- a/src/syn/visit.rs +++ b/src/syn/visit.rs @@ -23,3 +23,20 @@ pub trait Visit { fn visit_access_expr(&mut self, expr: &AccessExpr) -> Self::Out; fn visit_atom(&mut self, atom: &Atom) -> Self::Out; } + +/* +copy/paste of default_accepts + + fn visit_body(&mut self, body: &Body) -> Self::Out { DefaultAccept::default_accept(body, self); } + fn visit_stmt(&mut self, stmt: &Stmt) -> Self::Out { DefaultAccept::default_accept(stmt, self); } + fn visit_assign_stmt(&mut self, assign: &AssignStmt) -> Self::Out { DefaultAccept::default_accept(assign, self); } + fn visit_lhs_expr(&mut self, lhs_expr: &LhsExpr) -> Self::Out { DefaultAccept::default_accept(lhs_expr, self); } + fn visit_expr(&mut self, expr: &Expr) -> Self::Out { DefaultAccept::default_accept(expr, self); } + fn visit_bin_expr(&mut self, expr: &BinExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); } + fn visit_un_expr(&mut self, expr: &UnExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); } + fn visit_call_expr(&mut self, expr: &CallExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); } + fn visit_index_expr(&mut self, expr: &IndexExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); } + fn visit_access_expr(&mut self, expr: &AccessExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); } + fn visit_atom(&mut self, atom: &Atom) -> Self::Out { DefaultAccept(atom, self); } + +*/ diff --git a/src/vm/consts.rs b/src/vm/consts.rs index bfc7c88..4768cd5 100644 --- a/src/vm/consts.rs +++ b/src/vm/consts.rs @@ -1,17 +1,6 @@ use crate::obj::prelude::*; -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct ConstHandle(usize); - -impl ConstHandle { - pub fn index(&self) -> usize { - self.0 - } - - pub fn new(handle: usize) -> Self { - ConstHandle(handle) - } -} +handle_type!(ConstHandle); #[derive(Debug, Default)] pub struct ConstPool { diff --git a/src/vm/frame.rs b/src/vm/frame.rs index dbf5d27..99a22a5 100644 --- a/src/vm/frame.rs +++ b/src/vm/frame.rs @@ -1,6 +1,5 @@ use crate::obj::prelude::*; use shredder::{GcSafe, Scan, Scanner}; -use std::collections::BTreeMap; /// A stack call frame. #[derive(Debug, Clone)] @@ -25,8 +24,6 @@ unsafe impl Scan for FrameKind { unsafe impl GcSafe for FrameKind {} -pub type Locals = BTreeMap; - #[derive(Scan, Debug, Clone)] pub struct Frame { locals: Locals, diff --git a/src/vm/inst.rs b/src/vm/inst.rs index 3abb14f..52dae3b 100644 --- a/src/vm/inst.rs +++ b/src/vm/inst.rs @@ -25,7 +25,7 @@ pub enum Inst { /// /// In code, it would look like this: /// - /// target.symbol = source + /// `target.symbol = source` /// SetAttr(Sym), diff --git a/src/vm/mod.rs b/src/vm/mod.rs index 1367c2f..961c0a9 100644 --- a/src/vm/mod.rs +++ b/src/vm/mod.rs @@ -81,8 +81,6 @@ impl<'c> Vm<'c> { /// Set the next program counter value. /// /// This may cause the running program to crash. Handle with care. - /// - /// TODO : consider making this `unsafe`? Is that appropriate in this context? pub fn set_pc(&mut self, pc: usize) { self.pc = pc; }