Add basic blocks and implementation of flattening thunks -> basic blocks

* Basic are a more linear way of representing code. Thunks beget basic
  blocks, which beget vectors of instructions.
* Basic blocks are also being flattened into a vector of instructions
  (hopefully, no tests done yet)
* OH yeah locals can be collected too (but currently are not being
  collected in the compiler, that should come soon)

Signed-off-by: Alek Ratzloff <alekratz@gmail.com>
This commit is contained in:
2020-09-16 17:18:31 -07:00
parent ef38680fe5
commit 582b3a4b73
14 changed files with 540 additions and 124 deletions

168
src/compile/basic_block.rs Normal file
View File

@@ -0,0 +1,168 @@
use crate::vm::inst::Inst;
use std::collections::BTreeMap;
//
// enum BasicBlock
//
/// A block of code or branch that determines control flow.
#[derive(Debug, Clone, PartialEq)]
pub enum BasicBlock {
/// A linear block of executable instructions.
Block { exit: usize, block: Vec<Inst> },
/// A branch, instructing the basic block where to jump for the true and false blocks.
Branch {
block_true: usize,
block_false: usize,
},
}
#[derive(Debug, Default, Clone, PartialEq)]
pub struct BasicBlockList {
blocks: BTreeMap<usize, BasicBlock>,
}
impl BasicBlockList {
/// Creates a new `BasicBlockList` wrapper around the specified list.
pub fn new(blocks: BTreeMap<usize, BasicBlock>) -> Self {
Self { blocks }
}
/// Flattens a list of blocks into a list of instructions.
pub fn to_vec(self) -> Vec<Inst> {
let mut body = Vec::new();
let blocks = self.flatten();
// create reverse address of each block index
let mut addr_rev = BTreeMap::new();
let mut addr = 0;
for (index, block) in blocks.iter().enumerate() {
addr_rev.insert(index, addr);
let inst_len = match block {
// If this block is going to exit to to the next block in the sequence, there will
// be no jump at the end of this execution.
BasicBlock::Block { exit, block } if *exit == (index + 1) => block.len(),
// If the block is going to exit to someplace that *isn't* next in the sequence,
// there will be a conditional jump added.
BasicBlock::Block { block, .. } => block.len() + 1,
// 2 because of:
// - conditional jump if true at start
// - unconditional jump to false block
BasicBlock::Branch { .. } => 2,
};
addr += inst_len;
}
let final_addr = addr;
drop(addr);
// build the blocks
for (index, block) in blocks.into_iter().enumerate() {
match block {
BasicBlock::Block { exit, block } => {
body.extend(block);
// if we exit to someplace out-of-order on this block, insert a jump
// instruction
if exit != index + 1 {
let addr = addr_rev[&exit];
body.push(Inst::Jump(addr));
}
}
BasicBlock::Branch { block_true, block_false } => {
// insert conditional jump to true statement, and unconditional jump to false
// statement
let addr_true = addr_rev[&block_true];
let addr_false = addr_rev[&block_false];
body.push(Inst::JumpTrue(addr_true));
body.push(Inst::Jump(addr_false));
}
}
}
// make sure that we actually got the right number of instructions
assert_eq!(
body.len(),
final_addr,
"predicted instruction length does not match compiled instruction length"
);
body
}
/// Remaps block indices so that there are no holes between blocks, being converted into a flat
/// vector of `BasicBlock` values pointing to vector indices.
fn flatten(self) -> Vec<BasicBlock> {
let mut blocks = Vec::with_capacity(self.len());
let mut entry_map = BTreeMap::new();
// first pass: add blocks to the "blocks" list, and map block indices
for (new_index, (index, block)) in self.blocks.into_iter().enumerate() {
blocks.push(block);
entry_map.insert(index, new_index);
}
// second pass: update blocks in-place with their newly mapped addresses
blocks
.into_iter()
.map(|basic_block| match basic_block {
BasicBlock::Block { exit, block } => BasicBlock::Block {
exit: entry_map[&exit],
block,
},
BasicBlock::Branch {
block_true,
block_false,
} => BasicBlock::Branch {
block_true: entry_map[&block_true],
block_false: entry_map[&block_false],
},
})
.collect()
}
/// Inserts a `BasicBlock` with the given index, returning the previous `BasicBlock` occupying
/// that index (if any).
pub fn insert(&mut self, index: usize, block: BasicBlock) -> Option<BasicBlock> {
self.blocks.insert(index, block)
}
/// Removes and returns a `BasicBlock` with the given index, if it exists.
pub fn remove(&mut self, index: &usize) -> Option<BasicBlock> {
self.blocks.remove(index)
}
/// Gets a `BasicBlock` by its index, if it exists.
pub fn get(&self, index: &usize) -> Option<&BasicBlock> {
self.blocks.get(index)
}
/// Iterates over the basic blocks in this list.
pub fn iter(&self) -> impl Iterator<Item = (&usize, &BasicBlock)> {
self.blocks.iter()
}
/// Consumes this `BasicBlockList` into an iterator.
pub fn into_iter(self) -> impl Iterator<Item = (usize, BasicBlock)> {
self.blocks.into_iter()
}
/// Gets the number of basic blocks registered.
pub fn len(&self) -> usize {
self.blocks.len()
}
/// Gets the wrapped value for this list.
pub fn blocks(&self) -> &BTreeMap<usize, BasicBlock> {
&self.blocks
}
/// Consumes this `BasicBlockList` into its wrapped type.
pub fn into_blocks(self) -> BTreeMap<usize, BasicBlock> {
self.blocks
}
}
impl From<BTreeMap<usize, BasicBlock>> for BasicBlockList {
fn from(other: BTreeMap<usize, BasicBlock>) -> Self {
BasicBlockList::new(other)
}
}

56
src/compile/locals.rs Normal file
View File

@@ -0,0 +1,56 @@
use crate::{compile::Compile, obj::prelude::*, syn::{ast::*, visit::*}};
/// Collects local names from the given tree.
///
/// This will *not* attempt to recursively collect locals, and will only stay on the base statement
/// level.
pub struct CollectLocals<'c, 't> {
compile: &'c mut Compile<'t>,
}
// - Python's LEGB methodology seems to be good. Look up variables in this order:
// - Local
// - Enclosing functions (i.e. inner functions)
// - Global
// - Builtin
// - Resolving names:
// - Scan assignments first. If anything is assigned (even before usage), it's local.
// - Everything else should just do a lookup
impl<'c, 't> CollectLocals<'c, 't> {
pub fn new(compile: &'c mut Compile<'t>) -> Self {
Self { compile }
}
pub fn collect(&mut self, body: &Body) {
self.visit_body(body)
}
}
impl Visit for CollectLocals<'_, '_> {
type Out = ();
fn visit_body(&mut self, body: &Body) -> Self::Out { DefaultAccept::default_accept(body, self); }
fn visit_stmt(&mut self, stmt: &Stmt) -> Self::Out { DefaultAccept::default_accept(stmt, self); }
fn visit_assign_stmt(&mut self, assign: &AssignStmt) -> Self::Out { DefaultAccept::default_accept(assign, self); }
fn visit_lhs_expr(&mut self, lhs_expr: &LhsExpr) -> Self::Out {
match lhs_expr {
LhsExpr::Local(name) => {
let sym = global_sym(name.to_string());
self.compile.create_local(sym);
}
_ => { /* no op */ }
}
}
fn visit_expr(&mut self, expr: &Expr) -> Self::Out { DefaultAccept::default_accept(expr, self); }
fn visit_bin_expr(&mut self, expr: &BinExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); }
fn visit_un_expr(&mut self, expr: &UnExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); }
fn visit_call_expr(&mut self, expr: &CallExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); }
fn visit_index_expr(&mut self, expr: &IndexExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); }
fn visit_access_expr(&mut self, expr: &AccessExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); }
fn visit_atom(&mut self, atom: &Atom) -> Self::Out { DefaultAccept::default_accept(atom, self); }
}
pub fn collect_locals(compile: &mut Compile, body: &Body) {
CollectLocals::new(compile).collect(body);
}

View File

@@ -1,17 +1,28 @@
pub mod thunk;
pub mod basic_block;
pub mod error;
mod locals;
pub mod thunk;
use crate::{obj::prelude::*, vm::consts::*};
use std::collections::HashMap;
use std::collections::{BTreeMap, HashMap};
pub struct Compile<'t> {
text: &'t str,
const_data: ConstData,
globals: BTreeMap<Sym, Local>,
locals: Vec<BTreeMap<Sym, Local>>,
next_local: Local,
}
impl<'t> Compile<'t> {
pub fn new(text: &'t str) -> Self {
Compile { text, const_data: Default::default() }
Compile {
text,
const_data: Default::default(),
globals: Default::default(),
locals: Default::default(),
next_local: Default::default(),
}
}
pub fn text(&self) -> &'t str {
@@ -28,14 +39,62 @@ impl<'t> Compile<'t> {
/// Gets or inserts a static int reference.
pub fn const_int(&mut self, int: i64) -> (ConstHandle, IntRef) {
self.const_data_mut()
.const_int(int)
self.const_data_mut().const_int(int)
}
/// Gets or inserts a static string reference.
pub fn const_str(&mut self, s: impl AsRef<str>) -> (ConstHandle, StrRef) {
self.const_data_mut()
.const_str(s)
self.const_data_mut().const_str(s)
}
/// Looks up a variable name.
///
/// This will search up the locals stack until the given name is found, ultimately ending with
/// a global name lookup.
pub fn lookup_scope(&mut self, sym: Sym) -> Option<Local> {
self.locals
.iter()
.rev()
.filter_map(|locals| locals.get(&sym))
.next()
.or_else(|| self.globals.get(&sym))
.copied()
}
/// Creates a new local variable if it does not exist in the current local scope.
pub fn create_local(&mut self, sym: Sym) -> Local {
let locals = self.locals.last_mut().expect("scope");
if let Some(local) = locals.get(&sym) {
*local
} else {
// wish I could use mem::replace here, oh well
let local = self.next_local;
self.next_local = self.next_local.next();
locals.insert(sym, local);
local
}
}
/// Creates a new global variable if it does not exist in the current global scope.
pub fn create_global(&mut self, sym: Sym) -> Local {
if let Some(global) = self.globals.get(&sym) {
*global
} else {
let global = self.next_local;
self.next_local = self.next_local.next();
self.globals.insert(sym, global);
global
}
}
/// Pushes an empty scope layer of local variables.
pub fn push_scope_layer(&mut self) {
self.locals.push(Default::default());
}
/// Pops a scope layer of local variables, if any are available.
pub fn pop_scope_layer(&mut self) -> Option<BTreeMap<Sym, Local>> {
self.locals.pop()
}
}

View File

@@ -1,18 +1,47 @@
use crate::{
compile::{error::*, Compile},
syn::{ast::*, visit::*},
compile::{basic_block::*, error::*, Compile},
obj::prelude::*,
vm::inst::*
syn::{ast::*, visit::*},
vm::inst::*,
};
use std::mem;
/// A basic block of VM code.
///
/// Thunks are precomputed chunks of code that may allow for branching and/or looping.
#[derive(Debug, Clone, PartialEq)]
pub enum Thunk {
/// A list of instructions.
///
/// This is the core of all `Thunk` values.
Body(Vec<Inst>),
/// A list of thunks.
List(Vec<Thunk>),
/// Based on the conditional flag in the VM, code for one of these thunks will be executed.
///
/// The conditional flag is expected to be set upon entry to this thunk.
///
/// Only one of these thunks will be executed. At the end of either thunk, the program will
/// continue at the address following this branch.
Branch {
thunk_true: Box<Thunk>,
thunk_false: Box<Thunk>,
},
/// Based on the conditional flag in the VM, code for this loop will continue to execute.
///
/// The conditional flag is expected to be set upon entry to this thunk.
///
/// At the start of the body, the condition flag is initially checked. If it is not true,
/// the program jumps to the end of the body and continues.
///
/// At the end of the body, the program jumps back to the start where the condition is checked
/// again.
Loop(Box<Thunk>),
/// A placeholder/default thunk that compiles to nothing.
Nop,
}
@@ -54,6 +83,32 @@ impl Thunk {
(lhs, rhs) => Thunk::List(vec![lhs, rhs]),
};
}
/// Gets the number of basic blocks that this thunk will produce.
///
/// This is necessary for compiling to a basic block, in order to predict the "next block" that
/// a thunk will be jumping to.
fn basic_block_count(&self) -> usize {
match self {
Thunk::Body(_) => 1,
Thunk::List(thunks) => thunks
.iter()
.fold(0, |n, thunk| n + thunk.basic_block_count()),
Thunk::Branch {
thunk_true,
thunk_false,
// length is true + false block count, + 1 for the branch basic block at the start
} => thunk_true.basic_block_count() + thunk_false.basic_block_count() + 1,
// length is thunk, + 1 for branch at the start of the loop
Thunk::Loop(thunk) => thunk.basic_block_count() + 1,
Thunk::Nop => 0,
}
}
pub fn flatten(self) -> BasicBlockList {
Flatten::default()
.flatten(self)
}
}
impl From<Inst> for Thunk {
@@ -74,6 +129,80 @@ impl From<Vec<Thunk>> for Thunk {
}
}
//
// struct Flatten
//
/// Flattens a thunk into linear list of basic blocks.
#[derive(Default)]
struct Flatten {
// using a btreemap instead of a vec because we can insert things out-of-order
blocks: BasicBlockList,
}
//
// impl Flatten
//
impl Flatten {
pub fn flatten(mut self, thunk: Thunk) -> BasicBlockList {
// "It's 4pm babe, time for your thunk flattening!"
// "Yes, honey..."
let last_block = thunk.basic_block_count();
self.flatten_next(last_block, thunk);
assert_eq!(self.blocks.len(), last_block);
self.blocks
}
fn flatten_next(&mut self, next_block: usize, thunk: Thunk) {
match thunk {
Thunk::Body(thunk) => {
let this_block = self.this_block();
let prev = self.blocks.insert(this_block, BasicBlock::Block {
exit: next_block,
block: thunk,
});
assert!(prev.is_none());
}
Thunk::List(thunks) => {
for thunk in thunks.into_iter() {
let next_block = self.this_block() + thunk.basic_block_count();
self.flatten_next(next_block, thunk);
assert_eq!(next_block, self.this_block());
}
assert_eq!(next_block, self.this_block());
}
Thunk::Branch { thunk_true, thunk_false, } => {
let branch_block = self.this_block();
let block_true = self.this_block() + 1;
let block_false = block_true + thunk_true.basic_block_count();
self.blocks.insert(branch_block, BasicBlock::Branch {
block_true,
block_false,
});
self.flatten_next(next_block, *thunk_true);
self.flatten_next(next_block, *thunk_false);
assert_eq!(self.this_block(), next_block);
}
Thunk::Loop(_) => todo!(),
Thunk::Nop => {}
}
}
fn this_block(&self) -> usize {
self.blocks.len()
}
}
//
// struct CompileBody
//
/// Compiles an AST body down to a `Thunk`.
///
/// Thunks are the basic building blocks of the IR. Thunks form a chain of decision paths that may
/// be taken, which allows an optimizer to remove dead code, detect endless loops, and so on. This
/// allows for shrinking blocks of code without having to recalculate jump addresses.
pub struct CompileBody<'c, 't> {
compile: &'c mut Compile<'t>,
}
@@ -88,7 +217,14 @@ impl<'c, 't> CompileBody<'c, 't> {
}
}
//
// impl Visit for CompileBody
//
impl Visit for CompileBody<'_, '_> {
// XXX
// Trying to "future-proof" by using Result<_> in case there's some reason that an error
// may need to be thrown in the future so I don't have to wrap every return value in Ok(_)
type Out = Result<Thunk>;
fn visit_body(&mut self, body: &Body) -> Self::Out {
@@ -197,9 +333,9 @@ impl Visit for CompileBody<'_, '_> {
// - eval expr
// - getattr (expr.access)
let mut thunk = self.visit_expr(&expr.expr)?;
thunk.push_thunk(Thunk::Body(vec![
Inst::GetAttr(global_sym(expr.access.to_string())),
]));
thunk.push_thunk(Thunk::Body(vec![Inst::GetAttr(global_sym(
expr.access.to_string(),
))]));
Ok(thunk)
}
@@ -233,3 +369,49 @@ impl Visit for CompileBody<'_, '_> {
Ok(thunk)
}
}
//
// Tests
//
#[test]
fn test_flatten_thunk() {
let init_body = vec![
Inst::PushSym(Sym::new(0)),
Inst::PushSym(Sym::new(1)),
Inst::Call(1)
];
let true_body = vec![Inst::PushSym(Sym::new(2))];
let false_body = vec![Inst::PushSym(Sym::new(3))];
let end_body = vec![
Inst::PushSym(Sym::new(1)),
Inst::Call(1)
];
let thunk = Thunk::List(vec![
// do something before
Thunk::Body(init_body.clone()),
// branch
Thunk::Branch {
thunk_true: Thunk::Body(true_body.clone()).into(),
thunk_false: Thunk::Body(false_body.clone()).into(),
},
// do something after
Thunk::Body(end_body.clone()),
]);
let block_count = thunk.basic_block_count();
let blocks = thunk.flatten();
assert_eq!(blocks.len(), block_count);
let mut iter = blocks.into_iter();
assert_eq!(iter.next().unwrap(), (0, BasicBlock::Block { exit: 1, block: init_body, }));
assert_eq!(iter.next().unwrap(), (1, BasicBlock::Branch { block_true: 2, block_false: 3, }));
assert_eq!(iter.next().unwrap(), (2, BasicBlock::Block { exit: 4, block: true_body, }));
assert_eq!(iter.next().unwrap(), (3, BasicBlock::Block { exit: 4, block: false_body, }));
assert_eq!(iter.next().unwrap(), (4, BasicBlock::Block { exit: 5, block: end_body, }));
assert!(iter.next().is_none());
}

6
src/obj/locals.rs Normal file
View File

@@ -0,0 +1,6 @@
use crate::obj::ObjRef;
use std::collections::BTreeMap;
handle_type!(Local);
pub type Locals = BTreeMap<Local, ObjRef>;

View File

@@ -85,3 +85,30 @@ macro_rules! vtable {
$crate::obj::attrs::Vtable::new(maplit::btreemap! { })
}}
}
#[macro_export]
macro_rules! handle_type {
($name:ident) => {
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct $name(usize);
impl $name {
/// Creates a new handle.
pub fn new(handle: usize) -> Self {
Self(handle)
}
/// Gets the index of this handle.
pub fn index(&self) -> usize {
self.0
}
/// Gets the next handle after this one.
pub fn next(&self) -> Self {
Self::new(self.index() + 1)
}
}
impl shredder::EmptyScan for $name {}
};
}

View File

@@ -5,15 +5,16 @@ pub mod attrs;
pub mod fun;
pub mod int;
pub mod intern;
pub mod locals;
pub mod names;
pub mod str;
pub mod sym;
#[cfg(test)]
mod test;
pub mod ty;
pub mod prelude {
pub use crate::obj::{attrs::*, fun::*, int::*, intern::*, str::*, sym::*, ty::*, Obj, ObjRef};
pub use crate::obj::{
attrs::*, fun::*, int::*, intern::*, locals::*, str::*, sym::*, ty::*, Obj, ObjRef,
};
}
use shredder::{Gc, Scan};
@@ -36,13 +37,10 @@ pub trait Obj: Scan + std::fmt::Debug {
fn attrs_mut(&mut self) -> Option<&mut Attrs>;
fn get_attr(&self, sym: Sym) -> Option<ObjRef> {
self.attrs()
.get(&sym)
.cloned()
.or_else(|| {
let vtable = self.vtable().get();
vtable.get(&sym).cloned()
})
self.attrs().get(&sym).cloned().or_else(|| {
let vtable = self.vtable().get();
vtable.get(&sym).cloned()
})
}
}

View File

@@ -1,6 +1,5 @@
use crate::obj::{intern::Interner, names::*, prelude::*};
use once_cell::sync::Lazy;
use shredder::Scan;
use std::{collections::BTreeMap, sync::Mutex};
//
@@ -9,29 +8,14 @@ use std::{collections::BTreeMap, sync::Mutex};
pub type SymRef = ObjRef<Sym>;
/// A literal name or symbol.
#[derive(Scan, Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Sym(usize);
handle_type!(Sym);
impl Sym {
/// Gets the index of this symbol.
pub fn index(&self) -> usize {
self.0
}
/// Creates a new symbol.
///
/// Generally, new symbols should be created through the `global_sym` function - take care
/// while using this function.
pub(super) fn new(sym: usize) -> Self {
Sym(sym)
}
/// Creates a new symbol object.
///
/// Generally, new symbol objects should be created through the `global_sym_ref` function -
/// take care while using this function.
pub(super) fn new_obj(sym: impl Into<Sym>) -> SymRef {
pub fn new_obj(sym: impl Into<Sym>) -> SymRef {
ObjRef::new(sym.into())
}
}
@@ -114,4 +98,3 @@ pub fn global_sym(s: String) -> Sym {
//
pub type SymTable = Interner<String>;
pub type Locals = Interner<Sym>;

View File

@@ -1,64 +0,0 @@
use crate::obj::{names::*, prelude::*};
use once_cell::sync::Lazy;
use shredder::*;
use std::sync::Mutex;
static TEST_LOCK: Lazy<Mutex<()>> = Lazy::new(|| Mutex::new(()));
#[test]
fn test_sym_plumbing() {
// Most of this test is making sure we are free of runtime infinite recursion issues with
// initializing static data (NIL_OBJ, SYM_TY, SYM_ATTRS)
let _guard = TEST_LOCK.lock().unwrap();
let start = number_of_tracked_allocations(); // need 'start' because of static allocations
run_with_gc_cleanup(|| {
assert_eq!(number_of_tracked_allocations(), start + 0);
let nil = NIL_NAME.sym_ref();
// nil sym obj
assert_eq!(number_of_tracked_allocations(), start + 1);
{
read_obj!(let nil_obj = nil);
let sym: Sym = **nil_obj;
assert_eq!(NIL_NAME.sym, sym);
assert_eq!(number_of_tracked_allocations(), start + 1);
nil_obj.attrs();
let ty_sym_obj = SYM_NAME.sym_ref();
assert_eq!(number_of_tracked_allocations(), start + 2);
}
let on = TRUE_NAME.sym_ref();
// true sym obj, sym ty obj shouldn't be duplicated
assert_eq!(number_of_tracked_allocations(), start + 3);
let off = FALSE_NAME.sym_ref();
// false sym obj, sym ty obj shouldn't be duplicated
assert_eq!(number_of_tracked_allocations(), start + 4);
});
// these are *static* values, so there will always remain at least one reference.
assert_eq!(number_of_tracked_allocations(), start + 4);
}
#[test]
fn test_dyn_obj_ref_eq() {
#[derive(Default, Debug, Scan)]
struct FooObj { vtable: Attrs, attrs: Attrs }
impl_obj!(FooObj, vtable, attrs);
let _guard = TEST_LOCK.lock().unwrap();
let start = number_of_tracked_allocations(); // need 'start' because of static allocations
run_with_gc_cleanup(|| {
let rf1: ObjRef = ObjRef::new(FooObj::default());
let rf2 = rf1.clone();
assert!(rf1.ref_eq(&rf2));
assert!(rf2.ref_eq(&rf1));
assert_eq!(number_of_tracked_allocations(), start + 1);
});
assert_eq!(number_of_tracked_allocations(), start + 0);
}

View File

@@ -23,3 +23,20 @@ pub trait Visit {
fn visit_access_expr(&mut self, expr: &AccessExpr) -> Self::Out;
fn visit_atom(&mut self, atom: &Atom) -> Self::Out;
}
/*
copy/paste of default_accepts
fn visit_body(&mut self, body: &Body) -> Self::Out { DefaultAccept::default_accept(body, self); }
fn visit_stmt(&mut self, stmt: &Stmt) -> Self::Out { DefaultAccept::default_accept(stmt, self); }
fn visit_assign_stmt(&mut self, assign: &AssignStmt) -> Self::Out { DefaultAccept::default_accept(assign, self); }
fn visit_lhs_expr(&mut self, lhs_expr: &LhsExpr) -> Self::Out { DefaultAccept::default_accept(lhs_expr, self); }
fn visit_expr(&mut self, expr: &Expr) -> Self::Out { DefaultAccept::default_accept(expr, self); }
fn visit_bin_expr(&mut self, expr: &BinExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); }
fn visit_un_expr(&mut self, expr: &UnExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); }
fn visit_call_expr(&mut self, expr: &CallExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); }
fn visit_index_expr(&mut self, expr: &IndexExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); }
fn visit_access_expr(&mut self, expr: &AccessExpr) -> Self::Out { DefaultAccept::default_accept(expr, self); }
fn visit_atom(&mut self, atom: &Atom) -> Self::Out { DefaultAccept(atom, self); }
*/

View File

@@ -1,17 +1,6 @@
use crate::obj::prelude::*;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ConstHandle(usize);
impl ConstHandle {
pub fn index(&self) -> usize {
self.0
}
pub fn new(handle: usize) -> Self {
ConstHandle(handle)
}
}
handle_type!(ConstHandle);
#[derive(Debug, Default)]
pub struct ConstPool {

View File

@@ -1,6 +1,5 @@
use crate::obj::prelude::*;
use shredder::{GcSafe, Scan, Scanner};
use std::collections::BTreeMap;
/// A stack call frame.
#[derive(Debug, Clone)]
@@ -25,8 +24,6 @@ unsafe impl Scan for FrameKind {
unsafe impl GcSafe for FrameKind {}
pub type Locals = BTreeMap<usize, ObjRef>;
#[derive(Scan, Debug, Clone)]
pub struct Frame {
locals: Locals,

View File

@@ -25,7 +25,7 @@ pub enum Inst {
///
/// In code, it would look like this:
///
/// target.symbol = source
/// `target.symbol = source`
///
SetAttr(Sym),

View File

@@ -81,8 +81,6 @@ impl<'c> Vm<'c> {
/// Set the next program counter value.
///
/// This may cause the running program to crash. Handle with care.
///
/// TODO : consider making this `unsafe`? Is that appropriate in this context?
pub fn set_pc(&mut self, pc: usize) {
self.pc = pc;
}