tuple patterns now use jump_stub and patch_jump, with 16 bit jump values

This commit is contained in:
Scott Richmond 2025-06-18 14:47:00 -04:00
parent a0ef6d2777
commit a5f2e2a9bd
2 changed files with 82 additions and 40 deletions

View File

@ -271,8 +271,8 @@ impl Chunk {
| JumpIfZero => {
let high = self.bytecode[*i + 1];
let low = self.bytecode[*i + 2];
let rand = ((high as u16) << 8) + low as u16;
println!("{i:04}: {:16} {rand:05}", op.to_string());
let len = ((high as u16) << 8) + low as u16;
println!("{i:04}: {:16} {len:05}", op.to_string());
*i += 2;
}
}
@ -410,6 +410,21 @@ impl<'a> Compiler<'a> {
self.chunk.bytecode.push(low);
}
fn jump_stub(&mut self, op: Op) -> usize {
let out = self.chunk.bytecode.len();
self.emit_op(op);
self.emit_byte(0xff);
self.emit_byte(0xff);
out
}
fn patch_jump(&mut self, i: usize, len: usize) {
let low = len as u8;
let high = (len >> 8) as u8;
self.chunk.bytecode[i + 1] = high;
self.chunk.bytecode[i + 2] = low;
}
fn emit_constant(&mut self, val: Value) {
let const_idx = if let Some(idx) = self.chunk.constants.iter().position(|v| *v == val) {
idx
@ -583,18 +598,26 @@ impl<'a> Compiler<'a> {
}
Keyword(s) => self.emit_constant(Value::Keyword(s)),
Block(lines) => {
// increase the scope
self.scope_depth += 1;
// stash the stack depth
let stack_depth = self.stack_depth;
// evaluate all the lines but the last
for expr in lines.iter().take(lines.len() - 1) {
if is_binding(expr) {
self.visit(expr);
} else {
self.visit(expr);
// evaluate the expression
self.visit(expr);
// if it doesn't bind a name, pop the result from the stack
if !is_binding(expr) {
self.pop();
}
}
// now, evaluate the last expression in the block
let last_expr = lines.last().unwrap();
match last_expr {
// if the last expression is a let form,
// return the evaluated rhs instead of whatever is last on the stack
// we do this by pretending it's a binding
(Let(patt, expr), _) => {
self.match_depth = 0;
self.emit_op(Op::ResetMatch);
@ -606,14 +629,20 @@ impl<'a> Compiler<'a> {
self.emit_byte(expr_pos);
self.stack_depth += 1;
}
// otherwise, just evaluate it and leave the value on the stack
_ => {
self.visit(last_expr);
}
}
self.stack_depth += 1;
self.emit_op(Op::Store);
self.scope_depth -= 1;
// we've made a new value, so increase the stack level in the compiler
self.stack_depth += 1;
// store the value in the return register
self.emit_op(Op::Store);
// reset the scope
self.scope_depth -= 1;
while let Some(binding) = self.bindings.last() {
if binding.depth > self.scope_depth {
self.bindings.pop();
@ -621,29 +650,24 @@ impl<'a> Compiler<'a> {
break;
}
}
// reset stack
while self.stack_depth > stack_depth + 1 {
self.pop();
}
// reset the stack
self.pop_n(self.stack_depth - stack_depth - 1);
// load the value from the return register
self.emit_op(Op::Load);
}
If(cond, then, r#else) => {
self.visit(cond);
let jif_idx = self.len();
self.emit_op(Op::JumpIfFalse);
self.emit_byte(0xff);
let jif_idx = self.jump_stub(Op::JumpIfFalse);
self.stack_depth -= 1;
self.visit(then);
let jump_idx = self.len();
self.emit_op(Op::Jump);
self.emit_byte(0xff);
let jump_idx = self.jump_stub(Op::Jump);
self.visit(r#else);
self.stack_depth -= 1;
let end_idx = self.len();
let jif_offset = jump_idx - jif_idx;
let jump_offset = end_idx - jump_idx - 2;
self.chunk.bytecode[jif_idx + 1] = jif_offset as u8;
self.chunk.bytecode[jump_idx + 1] = jump_offset as u8;
let jump_offset = end_idx - jump_idx - 1;
self.patch_jump(jif_idx, jif_offset);
self.patch_jump(jump_idx, jump_offset);
}
Let(patt, expr) => {
self.match_depth = 0;
@ -694,36 +718,54 @@ impl<'a> Compiler<'a> {
self.match_constant(Value::Interned(s));
}
TuplePattern(members) => {
// first, test the tuple against length
self.emit_op(Op::MatchTuple);
self.emit_byte(members.len());
self.emit_op(Op::JumpIfNoMatch);
let before_load_tup_idx = self.len();
self.emit_byte(0xff);
// skip everything if tuple lengths don't match
let before_load_tup_idx = self.jump_stub(Op::JumpIfNoMatch);
// set up the per-member conditional logic
let mut jump_idxes = vec![];
self.match_depth += members.len();
// stash match_depth, and set it to the tuple len
let match_depth = self.match_depth;
self.match_depth = members.len();
// load the tuple and update the stack len
self.emit_op(Op::LoadTuple);
self.stack_depth += members.len();
// visit each member
for member in members {
// reduce the match depth to start
self.match_depth -= 1;
self.emit_op(Op::MatchDepth);
self.emit_byte(self.match_depth);
// visit the pattern member
self.visit(member);
self.emit_op(Op::JumpIfNoMatch);
jump_idxes.push(self.len());
self.emit_byte(0xff);
// and jump if there's no match
jump_idxes.push(self.jump_stub(Op::JumpIfNoMatch));
}
self.emit_op(Op::Jump);
let jump_idx = self.len();
self.emit_byte(0xff);
// if we get here--not having jumped on no match--we're matched; jump the "no match" code
self.match_depth = match_depth + members.len();
let jump_idx = self.jump_stub(Op::Jump);
// patch up the previous no match jumps to jump to clean-up code
for idx in jump_idxes {
self.chunk.bytecode[idx] = (self.len() - idx) as u8 - 1;
self.patch_jump(idx, self.len() - idx - 2)
}
for _ in 0..members.len() {
self.emit_op(Op::Pop);
}
self.chunk.bytecode[before_load_tup_idx] =
(self.len() - before_load_tup_idx) as u8 - 1;
self.chunk.bytecode[jump_idx] = (self.len() - jump_idx) as u8 - 1;
// pop everything that was pushed
// don't change the compiler stack representation, tho
// we need this as cleanup code with no matches
// the compiler should still have access to the bindings in this pattern
self.emit_op(Op::PopN);
self.emit_byte(members.len());
// patch up the tuple length match jump
self.patch_jump(before_load_tup_idx, self.len() - before_load_tup_idx - 3);
// patch up the yes-matches unconditional jump
self.patch_jump(jump_idx, self.len() - jump_idx - 3);
}
ListPattern(members) => {
self.emit_op(Op::MatchList);

View File

@ -75,7 +75,7 @@ pub fn run(src: &'static str) {
pub fn main() {
// env::set_var("RUST_BACKTRACE", "1");
let src = "
let ((1), 2) = ((1), 2)
let ((1)) = ((1))
";
run(src);
}