commit some changes
This commit is contained in:
commit
69b6b0cce6
24
src/base.rs
24
src/base.rs
|
@ -53,8 +53,32 @@ pub fn store(b: &Value, val: &Value) -> Value {
|
||||||
// name, patterns, AND docstring
|
// name, patterns, AND docstring
|
||||||
pub fn doc(f: &Value) -> Value {
|
pub fn doc(f: &Value) -> Value {
|
||||||
match f {
|
match f {
|
||||||
|
<<<<<<< HEAD
|
||||||
Value::Fn(f) => f.as_ref().doc(),
|
Value::Fn(f) => f.as_ref().doc(),
|
||||||
_ => Value::Interned("no documentation found"),
|
_ => Value::Interned("no documentation found"),
|
||||||
|
||||||| things & stuff
|
||||||
|
Value::Fn(f) => {
|
||||||
|
let name = &f.borrow().name;
|
||||||
|
let doc = &f.borrow().doc;
|
||||||
|
if let Some(docstr) = doc {
|
||||||
|
Value::AllocatedString(Rc::new(format!("{name}: {docstr}")))
|
||||||
|
} else {
|
||||||
|
Value::AllocatedString(Rc::new(format!("{name}: no documentation found")))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => Value::InternedString("no documentation found"),
|
||||||
|
=======
|
||||||
|
Value::Fn(f) => {
|
||||||
|
let name = &f.name;
|
||||||
|
let doc = &f.doc;
|
||||||
|
if let Some(docstr) = doc {
|
||||||
|
Value::AllocatedString(Rc::new(format!("{name}: {docstr}")))
|
||||||
|
} else {
|
||||||
|
Value::AllocatedString(Rc::new(format!("{name}: no documentation found")))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => Value::InternedString("no documentation found"),
|
||||||
|
>>>>>>> main
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@ use crate::value::Value;
|
||||||
use ariadne::{sources, Color, Label, Report, ReportKind};
|
use ariadne::{sources, Color, Label, Report, ReportKind};
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
<<<<<<< HEAD
|
||||||
// pub fn report_panic(err: LErr) {
|
// pub fn report_panic(err: LErr) {
|
||||||
// let mut srcs = HashSet::new();
|
// let mut srcs = HashSet::new();
|
||||||
// let mut stack = vec![];
|
// let mut stack = vec![];
|
||||||
|
@ -32,6 +33,65 @@ use std::collections::HashSet;
|
||||||
// stack.push(label);
|
// stack.push(label);
|
||||||
// srcs.insert((*input, *src));
|
// srcs.insert((*input, *src));
|
||||||
// }
|
// }
|
||||||
|
||||||| things & stuff
|
||||||
|
pub fn report_panic(err: LErr) {
|
||||||
|
let mut srcs = HashSet::new();
|
||||||
|
let mut stack = vec![];
|
||||||
|
let mut order = 1;
|
||||||
|
for entry in err.trace.iter().rev() {
|
||||||
|
let Trace {
|
||||||
|
callee,
|
||||||
|
caller,
|
||||||
|
function,
|
||||||
|
arguments,
|
||||||
|
input,
|
||||||
|
src,
|
||||||
|
} = entry;
|
||||||
|
let (_, first_span) = callee;
|
||||||
|
let (_, second_span) = caller;
|
||||||
|
let Value::Fn(f) = function else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
let fn_name = f.borrow().name.clone();
|
||||||
|
let i = first_span.start;
|
||||||
|
let j = second_span.end;
|
||||||
|
let label = Label::new((entry.input, i..j))
|
||||||
|
.with_color(Color::Yellow)
|
||||||
|
.with_message(format!("({order}) calling `{fn_name}` with `{arguments}`"));
|
||||||
|
order += 1;
|
||||||
|
stack.push(label);
|
||||||
|
srcs.insert((*input, *src));
|
||||||
|
}
|
||||||
|
=======
|
||||||
|
pub fn report_panic(err: LErr) {
|
||||||
|
let mut srcs = HashSet::new();
|
||||||
|
let mut stack = vec![];
|
||||||
|
let mut order = 1;
|
||||||
|
for entry in err.trace.iter().rev() {
|
||||||
|
let Trace {
|
||||||
|
callee,
|
||||||
|
caller,
|
||||||
|
function,
|
||||||
|
arguments,
|
||||||
|
input,
|
||||||
|
src,
|
||||||
|
} = entry;
|
||||||
|
let (_, first_span) = callee;
|
||||||
|
let (_, second_span) = caller;
|
||||||
|
let Value::Fn(f) = function else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
let fn_name = f.name.clone();
|
||||||
|
let i = first_span.start;
|
||||||
|
let j = second_span.end;
|
||||||
|
let label = Label::new((entry.input, i..j))
|
||||||
|
.with_color(Color::Yellow)
|
||||||
|
.with_message(format!("({order}) calling `{fn_name}` with `{arguments}`"));
|
||||||
|
order += 1;
|
||||||
|
stack.push(label);
|
||||||
|
srcs.insert((*input, *src));
|
||||||
|
}
|
||||||
|
>>>>>>> main
|
||||||
|
|
||||||
// Report::build(ReportKind::Error, (err.input, err.span.into_range()))
|
// Report::build(ReportKind::Error, (err.input, err.span.into_range()))
|
||||||
// .with_message(format!("Ludus panicked! {}", err.msg))
|
// .with_message(format!("Ludus panicked! {}", err.msg))
|
||||||
|
|
424
src/main.rs
424
src/main.rs
|
@ -1,10 +1,434 @@
|
||||||
|
<<<<<<< HEAD
|
||||||
use rudus::ludus;
|
use rudus::ludus;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
||||||| things & stuff
|
||||||
|
// an implementation of Ludus
|
||||||
|
|
||||||
|
// curently left undone (and not adding for a while yet):
|
||||||
|
// * sets
|
||||||
|
// * interpolated strings & string patterns
|
||||||
|
// * pkgs, namespaces, imports, `use` forms
|
||||||
|
// * with forms
|
||||||
|
// * test forms
|
||||||
|
// * ignored words
|
||||||
|
|
||||||
|
// todo:
|
||||||
|
// * [x] rewrite fn parser to use chumsky::Recursive::declare/define
|
||||||
|
// - [x] do this to extract/simplify/DRY things like tuple patterns, fn clauses, etc.
|
||||||
|
// * [x] Work around chumsky::Stream::from_iter().spanned disappearing in most recent version
|
||||||
|
// * [x] investigate using labels (which is behind a compiler flag, somehow)
|
||||||
|
// * [ ] write parsing errors
|
||||||
|
// * [ ] wire up Ariadne parsing errors
|
||||||
|
// * [x] add stack traces and code locations to panics
|
||||||
|
// * [x] validation
|
||||||
|
// * [x] break this out into multiple files
|
||||||
|
// * [x] write a tree-walk VM
|
||||||
|
// - [x] learn how to deal with lifetimes
|
||||||
|
// - [x] with stack mechanics and refcounting
|
||||||
|
// - [ ] with tail-call optimization (nb: this may not be possible w/ a TW-VM)
|
||||||
|
// - [ ] with all the necessary forms for current Ludus
|
||||||
|
// * [x] guards in match clauses
|
||||||
|
// * [x] `as` patterns
|
||||||
|
// * [x] splat patterns in tuples, lists, dicts
|
||||||
|
// * [x] splats in list and dict literals
|
||||||
|
// * [x] `loop` and `recur`
|
||||||
|
// * [x] string patterns
|
||||||
|
// * [x] string interpolation
|
||||||
|
// * [x] docstrings
|
||||||
|
// * [x] write `base` in Rust
|
||||||
|
// * [ ] turn this into a library function
|
||||||
|
// * [ ] compile this into WASM
|
||||||
|
// * [ ] perf testing
|
||||||
|
|
||||||
|
use chumsky::{input::Stream, prelude::*};
|
||||||
|
use rust_embed::Embed;
|
||||||
|
|
||||||
|
mod spans;
|
||||||
|
|
||||||
|
mod lexer;
|
||||||
|
use crate::lexer::*;
|
||||||
|
|
||||||
|
mod value;
|
||||||
|
use crate::value::*;
|
||||||
|
|
||||||
|
mod parser;
|
||||||
|
use crate::parser::*;
|
||||||
|
|
||||||
|
mod base;
|
||||||
|
use crate::base::*;
|
||||||
|
|
||||||
|
mod validator;
|
||||||
|
use crate::validator::*;
|
||||||
|
|
||||||
|
mod process;
|
||||||
|
use crate::process::*;
|
||||||
|
|
||||||
|
mod errors;
|
||||||
|
use crate::errors::*;
|
||||||
|
|
||||||
|
#[derive(Embed)]
|
||||||
|
#[folder = "assets/"]
|
||||||
|
struct Asset;
|
||||||
|
|
||||||
|
pub fn prelude<'src>() -> (
|
||||||
|
Vec<(String, Value<'src>)>,
|
||||||
|
std::collections::HashMap<*const Ast, FnInfo>,
|
||||||
|
) {
|
||||||
|
let prelude = Asset::get("prelude.ld").unwrap().data.into_owned();
|
||||||
|
// we know for sure Prelude should live through the whole run of the program
|
||||||
|
let leaked = Box::leak(Box::new(prelude));
|
||||||
|
let prelude = std::str::from_utf8(leaked).unwrap();
|
||||||
|
|
||||||
|
let (ptoks, perrs) = lexer().parse(prelude).into_output_errors();
|
||||||
|
if !perrs.is_empty() {
|
||||||
|
println!("Errors lexing Prelude");
|
||||||
|
println!("{:?}", perrs);
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
|
||||||
|
let ptoks = ptoks.unwrap();
|
||||||
|
|
||||||
|
let (p_ast, perrs) = parser()
|
||||||
|
.parse(Stream::from_iter(ptoks).map((0..prelude.len()).into(), |(t, s)| (t, s)))
|
||||||
|
.into_output_errors();
|
||||||
|
if !perrs.is_empty() {
|
||||||
|
println!("Errors parsing Prelude");
|
||||||
|
println!("{:?}", perrs);
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
|
||||||
|
let prelude_parsed = Box::leak(Box::new(p_ast.unwrap()));
|
||||||
|
let base_pkg = base();
|
||||||
|
|
||||||
|
let mut v6or = Validator::new(
|
||||||
|
&prelude_parsed.0,
|
||||||
|
prelude_parsed.1,
|
||||||
|
"prelude",
|
||||||
|
prelude,
|
||||||
|
&base_pkg,
|
||||||
|
);
|
||||||
|
v6or.validate();
|
||||||
|
|
||||||
|
if !v6or.errors.is_empty() {
|
||||||
|
report_invalidation(v6or.errors);
|
||||||
|
panic!("interal Ludus error: invalid prelude")
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut base_ctx = Process::<'src> {
|
||||||
|
input: "prelude",
|
||||||
|
src: prelude,
|
||||||
|
locals: base_pkg.clone(),
|
||||||
|
ast: &prelude_parsed.0,
|
||||||
|
span: prelude_parsed.1,
|
||||||
|
prelude: vec![],
|
||||||
|
fn_info: v6or.fn_info,
|
||||||
|
};
|
||||||
|
|
||||||
|
let prelude = base_ctx.eval();
|
||||||
|
|
||||||
|
let mut p_ctx = vec![];
|
||||||
|
|
||||||
|
match prelude {
|
||||||
|
Ok(Value::Dict(p_dict)) => {
|
||||||
|
for (key, value) in p_dict.iter() {
|
||||||
|
p_ctx.push((key.to_string(), value.clone()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(_) => {
|
||||||
|
println!("Bad Prelude export");
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
Err(LErr { msg, .. }) => {
|
||||||
|
println!("Error running Prelude");
|
||||||
|
println!("{:?}", msg);
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
(p_ctx, base_ctx.fn_info)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run(src: &'static str) {
|
||||||
|
let (tokens, lex_errs) = lexer().parse(src).into_output_errors();
|
||||||
|
if !lex_errs.is_empty() {
|
||||||
|
println!("{:?}", lex_errs);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let tokens = tokens.unwrap();
|
||||||
|
let to_parse = tokens.clone();
|
||||||
|
|
||||||
|
let (parse_result, parse_errors) = parser()
|
||||||
|
.parse(Stream::from_iter(to_parse).map((0..src.len()).into(), |(t, s)| (t, s)))
|
||||||
|
.into_output_errors();
|
||||||
|
if !parse_errors.is_empty() {
|
||||||
|
println!("{:?}", parse_errors);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let parsed = parse_result.unwrap();
|
||||||
|
|
||||||
|
let (prelude_ctx, mut prelude_fn_info) = prelude();
|
||||||
|
|
||||||
|
let mut v6or = Validator::new(&parsed.0, parsed.1, "script", src, &prelude_ctx);
|
||||||
|
|
||||||
|
v6or.validate();
|
||||||
|
|
||||||
|
if !v6or.errors.is_empty() {
|
||||||
|
report_invalidation(v6or.errors);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
prelude_fn_info.extend(&mut v6or.fn_info.into_iter());
|
||||||
|
|
||||||
|
let mut proc = Process {
|
||||||
|
input: "script",
|
||||||
|
src,
|
||||||
|
locals: vec![],
|
||||||
|
prelude: prelude_ctx,
|
||||||
|
ast: &parsed.0,
|
||||||
|
span: parsed.1,
|
||||||
|
fn_info: prelude_fn_info,
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = proc.eval();
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(result) => println!("{}", result),
|
||||||
|
Err(err) => report_panic(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
=======
|
||||||
|
// an implementation of Ludus
|
||||||
|
|
||||||
|
// curently left undone (and not adding for a while yet):
|
||||||
|
// * sets
|
||||||
|
// * interpolated strings & string patterns
|
||||||
|
// * pkgs, namespaces, imports, `use` forms
|
||||||
|
// * with forms
|
||||||
|
// * test forms
|
||||||
|
// * ignored words
|
||||||
|
|
||||||
|
// todo:
|
||||||
|
// * [x] rewrite fn parser to use chumsky::Recursive::declare/define
|
||||||
|
// - [x] do this to extract/simplify/DRY things like tuple patterns, fn clauses, etc.
|
||||||
|
// * [x] Work around chumsky::Stream::from_iter().spanned disappearing in most recent version
|
||||||
|
// * [x] investigate using labels (which is behind a compiler flag, somehow)
|
||||||
|
// * [ ] write parsing errors
|
||||||
|
// * [ ] wire up Ariadne parsing errors
|
||||||
|
// * [x] add stack traces and code locations to panics
|
||||||
|
// * [x] validation
|
||||||
|
// * [x] break this out into multiple files
|
||||||
|
// * [x] write a tree-walk VM
|
||||||
|
// - [x] learn how to deal with lifetimes
|
||||||
|
// - [x] with stack mechanics and refcounting
|
||||||
|
// - [ ] with tail-call optimization (nb: this may not be possible w/ a TW-VM)
|
||||||
|
// - [ ] with all the necessary forms for current Ludus
|
||||||
|
// * [x] guards in match clauses
|
||||||
|
// * [x] `as` patterns
|
||||||
|
// * [x] splat patterns in tuples, lists, dicts
|
||||||
|
// * [x] splats in list and dict literals
|
||||||
|
// * [x] `loop` and `recur`
|
||||||
|
// * [x] string patterns
|
||||||
|
// * [x] string interpolation
|
||||||
|
// * [x] docstrings
|
||||||
|
// * [x] write `base` in Rust
|
||||||
|
// * [ ] turn this into a library function
|
||||||
|
// * [ ] compile this into WASM
|
||||||
|
// * [ ] perf testing
|
||||||
|
|
||||||
|
use chumsky::{input::Stream, prelude::*};
|
||||||
|
use rust_embed::Embed;
|
||||||
|
|
||||||
|
mod spans;
|
||||||
|
|
||||||
|
mod lexer;
|
||||||
|
use crate::lexer::*;
|
||||||
|
|
||||||
|
mod value;
|
||||||
|
use crate::value::*;
|
||||||
|
|
||||||
|
mod parser;
|
||||||
|
use crate::parser::*;
|
||||||
|
|
||||||
|
mod base;
|
||||||
|
use crate::base::*;
|
||||||
|
|
||||||
|
mod validator;
|
||||||
|
use crate::validator::*;
|
||||||
|
|
||||||
|
mod process;
|
||||||
|
use crate::process::*;
|
||||||
|
|
||||||
|
mod errors;
|
||||||
|
use crate::errors::*;
|
||||||
|
|
||||||
|
#[derive(Embed)]
|
||||||
|
#[folder = "assets/"]
|
||||||
|
struct Asset;
|
||||||
|
|
||||||
|
pub fn prelude<'src>() -> (
|
||||||
|
Vec<(String, Value<'src>)>,
|
||||||
|
std::collections::HashMap<*const Ast, FnInfo>,
|
||||||
|
) {
|
||||||
|
let prelude = Asset::get("prelude.ld").unwrap().data.into_owned();
|
||||||
|
// we know for sure Prelude should live through the whole run of the program
|
||||||
|
let leaked = Box::leak(Box::new(prelude));
|
||||||
|
let prelude = std::str::from_utf8(leaked).unwrap();
|
||||||
|
|
||||||
|
let (ptoks, perrs) = lexer().parse(prelude).into_output_errors();
|
||||||
|
if !perrs.is_empty() {
|
||||||
|
println!("Errors lexing Prelude");
|
||||||
|
println!("{:?}", perrs);
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
|
||||||
|
let ptoks = ptoks.unwrap();
|
||||||
|
|
||||||
|
let (p_ast, perrs) = parser()
|
||||||
|
.parse(Stream::from_iter(ptoks).map((0..prelude.len()).into(), |(t, s)| (t, s)))
|
||||||
|
.into_output_errors();
|
||||||
|
if !perrs.is_empty() {
|
||||||
|
println!("Errors parsing Prelude");
|
||||||
|
println!("{:?}", perrs);
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
|
||||||
|
let prelude_parsed = Box::leak(Box::new(p_ast.unwrap()));
|
||||||
|
let base_pkg = Box::leak(Box::new(base()));
|
||||||
|
|
||||||
|
let mut v6or = Validator::new(
|
||||||
|
&prelude_parsed.0,
|
||||||
|
prelude_parsed.1,
|
||||||
|
"prelude",
|
||||||
|
prelude,
|
||||||
|
base_pkg,
|
||||||
|
);
|
||||||
|
v6or.validate();
|
||||||
|
|
||||||
|
if !v6or.errors.is_empty() {
|
||||||
|
report_invalidation(v6or.errors);
|
||||||
|
panic!("interal Ludus error: invalid prelude")
|
||||||
|
}
|
||||||
|
|
||||||
|
let static_vec = Box::leak(Box::new(vec![]));
|
||||||
|
|
||||||
|
let mut base_ctx = Process::<'src> {
|
||||||
|
input: "prelude",
|
||||||
|
src: prelude,
|
||||||
|
locals: base_pkg.clone(),
|
||||||
|
ast: &prelude_parsed.0,
|
||||||
|
span: prelude_parsed.1,
|
||||||
|
prelude: static_vec,
|
||||||
|
fn_info: v6or.fn_info,
|
||||||
|
};
|
||||||
|
|
||||||
|
let prelude = base_ctx.eval();
|
||||||
|
|
||||||
|
let mut p_ctx = vec![];
|
||||||
|
|
||||||
|
match prelude {
|
||||||
|
Ok(Value::Dict(p_dict)) => {
|
||||||
|
for (key, value) in p_dict.iter() {
|
||||||
|
p_ctx.push((key.to_string(), value.clone()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(_) => {
|
||||||
|
println!("Bad Prelude export");
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
Err(LErr { msg, .. }) => {
|
||||||
|
println!("Error running Prelude");
|
||||||
|
println!("{:?}", msg);
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
(p_ctx, base_ctx.fn_info)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run(src: &'static str) {
|
||||||
|
let (tokens, lex_errs) = lexer().parse(src).into_output_errors();
|
||||||
|
if !lex_errs.is_empty() {
|
||||||
|
println!("{:?}", lex_errs);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let tokens = tokens.unwrap();
|
||||||
|
let to_parse = tokens.clone();
|
||||||
|
|
||||||
|
let (parse_result, parse_errors) = parser()
|
||||||
|
.parse(Stream::from_iter(to_parse).map((0..src.len()).into(), |(t, s)| (t, s)))
|
||||||
|
.into_output_errors();
|
||||||
|
if !parse_errors.is_empty() {
|
||||||
|
println!("{:?}", parse_errors);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let parsed = parse_result.unwrap();
|
||||||
|
|
||||||
|
let (prelude_ctx, mut prelude_fn_info) = prelude();
|
||||||
|
let prelude_ctx = Box::leak(Box::new(prelude_ctx));
|
||||||
|
|
||||||
|
let mut v6or = Validator::new(&parsed.0, parsed.1, "script", src, prelude_ctx);
|
||||||
|
|
||||||
|
v6or.validate();
|
||||||
|
|
||||||
|
if !v6or.errors.is_empty() {
|
||||||
|
report_invalidation(v6or.errors);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
prelude_fn_info.extend(&mut v6or.fn_info.into_iter());
|
||||||
|
|
||||||
|
let mut proc = Process {
|
||||||
|
input: "script",
|
||||||
|
src,
|
||||||
|
locals: vec![],
|
||||||
|
prelude: prelude_ctx,
|
||||||
|
ast: &parsed.0,
|
||||||
|
span: parsed.1,
|
||||||
|
fn_info: prelude_fn_info,
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = proc.eval();
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(result) => println!("{}", result),
|
||||||
|
Err(err) => report_panic(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
>>>>>>> main
|
||||||
|
|
||||||
pub fn main() {
|
pub fn main() {
|
||||||
|
<<<<<<< HEAD
|
||||||
env::set_var("RUST_BACKTRACE", "1");
|
env::set_var("RUST_BACKTRACE", "1");
|
||||||
let src = fs::read_to_string("sandbox.ld").unwrap();
|
let src = fs::read_to_string("sandbox.ld").unwrap();
|
||||||
let json = ludus(src);
|
let json = ludus(src);
|
||||||
println!("{json}");
|
println!("{json}");
|
||||||
|
||||||| things & stuff
|
||||||
|
let src = "
|
||||||
|
loop (100000, 1) with {
|
||||||
|
(1, acc) -> acc
|
||||||
|
(n, acc) -> recur (dec (n), add (n, acc))
|
||||||
|
}
|
||||||
|
";
|
||||||
|
run(src);
|
||||||
|
// struct_scalpel::print_dissection_info::<value::Value>()
|
||||||
|
// struct_scalpel::print_dissection_info::<parser::Ast>();
|
||||||
|
// println!("{}", std::mem::size_of::<parser::Ast>())
|
||||||
|
=======
|
||||||
|
let src = "
|
||||||
|
fn sum_to {
|
||||||
|
(n) -> sum_to (n, 0)
|
||||||
|
(1, acc) -> acc
|
||||||
|
(n, acc) -> sum_to (dec (n), add (n, acc))
|
||||||
|
}
|
||||||
|
|
||||||
|
sum_to (10000)
|
||||||
|
";
|
||||||
|
run(src);
|
||||||
|
// struct_scalpel::print_dissection_info::<value::Value>()
|
||||||
|
// struct_scalpel::print_dissection_info::<parser::Ast>();
|
||||||
|
// println!("{}", std::mem::size_of::<parser::Ast>())
|
||||||
|
>>>>>>> main
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,7 +54,7 @@ pub struct Process<'src> {
|
||||||
pub input: &'static str,
|
pub input: &'static str,
|
||||||
pub src: &'static str,
|
pub src: &'static str,
|
||||||
pub locals: Vec<(String, Value<'src>)>,
|
pub locals: Vec<(String, Value<'src>)>,
|
||||||
pub prelude: Vec<(String, Value<'src>)>,
|
pub prelude: &'src Vec<(String, Value<'src>)>,
|
||||||
pub ast: &'src Ast,
|
pub ast: &'src Ast,
|
||||||
pub span: SimpleSpan,
|
pub span: SimpleSpan,
|
||||||
pub fn_info: std::collections::HashMap<*const Ast, FnInfo>,
|
pub fn_info: std::collections::HashMap<*const Ast, FnInfo>,
|
||||||
|
@ -296,13 +296,15 @@ impl<'src> Process<'src> {
|
||||||
// can't just use the `caller` value b/c borrow checker nonsense
|
// can't just use the `caller` value b/c borrow checker nonsense
|
||||||
let args = Tuple(args);
|
let args = Tuple(args);
|
||||||
let to = self.locals.len();
|
let to = self.locals.len();
|
||||||
let mut f = f.borrow_mut();
|
if !f.has_run {
|
||||||
for i in 0..f.enclosing.len() {
|
for i in 0..f.enclosing.len() {
|
||||||
let (name, value) = f.enclosing[i].clone();
|
let (name, value) = f.enclosing[i].clone();
|
||||||
if !f.has_run && matches!(value, Value::FnDecl(_)) {
|
if !f.has_run && matches!(value, Value::FnDecl(_)) {
|
||||||
let defined = self.resolve(&name);
|
let defined = self.resolve(&name);
|
||||||
match defined {
|
match defined {
|
||||||
Ok(Value::Fn(defined)) => f.enclosing[i] = (name.clone(), Fn(defined)),
|
Ok(Value::Fn(defined)) => {
|
||||||
|
f.enclosing[i] = (name.clone(), Fn(defined))
|
||||||
|
}
|
||||||
Ok(Value::FnDecl(_)) => {
|
Ok(Value::FnDecl(_)) => {
|
||||||
return self.panic(format!(
|
return self.panic(format!(
|
||||||
"function `{name}` called before it was defined"
|
"function `{name}` called before it was defined"
|
||||||
|
@ -315,6 +317,7 @@ impl<'src> Process<'src> {
|
||||||
self.locals.push(f.enclosing[i].clone());
|
self.locals.push(f.enclosing[i].clone());
|
||||||
}
|
}
|
||||||
f.has_run = true;
|
f.has_run = true;
|
||||||
|
}
|
||||||
let input = self.input;
|
let input = self.input;
|
||||||
let src = self.src;
|
let src = self.src;
|
||||||
self.input = f.input;
|
self.input = f.input;
|
||||||
|
|
|
@ -59,9 +59,19 @@ fn match_arities(arities: &HashSet<Arity>, num_args: u8) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
|
<<<<<<< HEAD
|
||||||
pub struct Validator<'a> {
|
pub struct Validator<'a> {
|
||||||
pub locals: Vec<(String, &'a Span, FnInfo)>,
|
pub locals: Vec<(String, &'a Span, FnInfo)>,
|
||||||
pub prelude: imbl::HashMap<&'static str, Value>,
|
pub prelude: imbl::HashMap<&'static str, Value>,
|
||||||
|
||||||| things & stuff
|
||||||
|
pub struct Validator<'a, 'src> {
|
||||||
|
pub locals: Vec<(String, Span, FnInfo)>,
|
||||||
|
pub prelude: &'a Vec<(String, Value<'src>)>,
|
||||||
|
=======
|
||||||
|
pub struct Validator<'a> {
|
||||||
|
pub locals: Vec<(String, Span, FnInfo)>,
|
||||||
|
pub prelude: &'a Vec<(String, Value<'a>)>,
|
||||||
|
>>>>>>> main
|
||||||
pub input: &'static str,
|
pub input: &'static str,
|
||||||
pub src: &'static str,
|
pub src: &'static str,
|
||||||
pub ast: &'a Ast,
|
pub ast: &'a Ast,
|
||||||
|
@ -77,8 +87,16 @@ impl<'a> Validator<'a> {
|
||||||
span: &'a Span,
|
span: &'a Span,
|
||||||
input: &'static str,
|
input: &'static str,
|
||||||
src: &'static str,
|
src: &'static str,
|
||||||
|
<<<<<<< HEAD
|
||||||
prelude: imbl::HashMap<&'static str, Value>,
|
prelude: imbl::HashMap<&'static str, Value>,
|
||||||
) -> Validator<'a> {
|
) -> Validator<'a> {
|
||||||
|
||||||| things & stuff
|
||||||
|
prelude: &'a Vec<(String, Value<'src>)>,
|
||||||
|
) -> Validator<'a, 'src> {
|
||||||
|
=======
|
||||||
|
prelude: &'a Vec<(String, Value<'a>)>,
|
||||||
|
) -> Validator<'a> {
|
||||||
|
>>>>>>> main
|
||||||
Validator {
|
Validator {
|
||||||
input,
|
input,
|
||||||
src,
|
src,
|
||||||
|
@ -149,6 +167,7 @@ impl<'a> Validator<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
<<<<<<< HEAD
|
||||||
fn visit(&mut self, node: &'a Spanned<Ast>) {
|
fn visit(&mut self, node: &'a Spanned<Ast>) {
|
||||||
let (expr, span) = node;
|
let (expr, span) = node;
|
||||||
self.ast = expr;
|
self.ast = expr;
|
||||||
|
@ -156,6 +175,16 @@ impl<'a> Validator<'a> {
|
||||||
self.validate();
|
self.validate();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
||||||| things & stuff
|
||||||
|
=======
|
||||||
|
fn visit(&mut self, node: &'a Spanned<Ast>) {
|
||||||
|
let (expr, span) = node;
|
||||||
|
self.ast = expr;
|
||||||
|
self.span = *span;
|
||||||
|
self.validate();
|
||||||
|
}
|
||||||
|
|
||||||
|
>>>>>>> main
|
||||||
pub fn validate(&mut self) {
|
pub fn validate(&mut self) {
|
||||||
use Ast::*;
|
use Ast::*;
|
||||||
let root = self.ast;
|
let root = self.ast;
|
||||||
|
@ -284,10 +313,21 @@ impl<'a> Validator<'a> {
|
||||||
// check arity against fn info if first term is word and second term is args
|
// check arity against fn info if first term is word and second term is args
|
||||||
Synthetic(first, second, rest) => {
|
Synthetic(first, second, rest) => {
|
||||||
match (&first.0, &second.0) {
|
match (&first.0, &second.0) {
|
||||||
|
<<<<<<< HEAD
|
||||||
(Ast::And, Ast::Arguments(_)) | (Ast::Or, Ast::Arguments(_)) => {
|
(Ast::And, Ast::Arguments(_)) | (Ast::Or, Ast::Arguments(_)) => {
|
||||||
self.visit(second.as_ref())
|
self.visit(second.as_ref())
|
||||||
}
|
}
|
||||||
(Ast::Word(_), Ast::Keyword(_)) => self.visit(first.as_ref()),
|
(Ast::Word(_), Ast::Keyword(_)) => self.visit(first.as_ref()),
|
||||||
|
||||||| things & stuff
|
||||||
|
(Ast::Word(_), Ast::Keyword(_)) => {
|
||||||
|
let (expr, span) = first.as_ref();
|
||||||
|
self.ast = expr;
|
||||||
|
self.span = *span;
|
||||||
|
self.validate();
|
||||||
|
}
|
||||||
|
=======
|
||||||
|
(Ast::Word(_), Ast::Keyword(_)) => self.visit(first.as_ref()),
|
||||||
|
>>>>>>> main
|
||||||
(Ast::Keyword(_), Ast::Arguments(args)) => {
|
(Ast::Keyword(_), Ast::Arguments(args)) => {
|
||||||
if args.len() != 1 {
|
if args.len() != 1 {
|
||||||
self.err("called keywords may only take one argument".to_string())
|
self.err("called keywords may only take one argument".to_string())
|
||||||
|
|
48
src/value.rs
48
src/value.rs
|
@ -121,6 +121,7 @@ pub enum Value {
|
||||||
True,
|
True,
|
||||||
False,
|
False,
|
||||||
Keyword(&'static str),
|
Keyword(&'static str),
|
||||||
|
<<<<<<< HEAD
|
||||||
Interned(&'static str),
|
Interned(&'static str),
|
||||||
String(Rc<String>),
|
String(Rc<String>),
|
||||||
Number(f64),
|
Number(f64),
|
||||||
|
@ -131,6 +132,53 @@ pub enum Value {
|
||||||
Fn(Rc<LFn>),
|
Fn(Rc<LFn>),
|
||||||
BaseFn(BaseFn),
|
BaseFn(BaseFn),
|
||||||
Partial(Rc<Partial>),
|
Partial(Rc<Partial>),
|
||||||
|
||||||| things & stuff
|
||||||
|
InternedString(&'static str),
|
||||||
|
AllocatedString(Rc<String>),
|
||||||
|
// on the heap for now
|
||||||
|
Tuple(Rc<Vec<Self>>),
|
||||||
|
Args(Rc<Vec<Self>>),
|
||||||
|
List(Vector<Self>),
|
||||||
|
Dict(HashMap<&'static str, Self>),
|
||||||
|
Box(&'static str, Rc<RefCell<Self>>),
|
||||||
|
Fn(Rc<RefCell<Fn<'src>>>),
|
||||||
|
FnDecl(&'static str),
|
||||||
|
Base(BaseFn<'src>),
|
||||||
|
Recur(Vec<Self>),
|
||||||
|
// Set(HashSet<Self>),
|
||||||
|
// Sets are hard
|
||||||
|
// Sets require Eq
|
||||||
|
// Eq is not implemented on f64, because NaNs
|
||||||
|
// We could use ordered_float::NotNan
|
||||||
|
// Let's defer that
|
||||||
|
// We're not really using sets in Ludus
|
||||||
|
|
||||||
|
// Other things we're not implementing yet:
|
||||||
|
// pkgs, nses, tests
|
||||||
|
=======
|
||||||
|
InternedString(&'static str),
|
||||||
|
AllocatedString(Rc<String>),
|
||||||
|
// on the heap for now
|
||||||
|
Tuple(Rc<Vec<Self>>),
|
||||||
|
Args(Rc<Vec<Self>>),
|
||||||
|
List(Vector<Self>),
|
||||||
|
Dict(HashMap<&'static str, Self>),
|
||||||
|
Box(&'static str, Rc<RefCell<Self>>),
|
||||||
|
Fn(Rc<Fn<'src>>),
|
||||||
|
FnDecl(&'static str),
|
||||||
|
Base(BaseFn<'src>),
|
||||||
|
Recur(Vec<Self>),
|
||||||
|
// Set(HashSet<Self>),
|
||||||
|
// Sets are hard
|
||||||
|
// Sets require Eq
|
||||||
|
// Eq is not implemented on f64, because NaNs
|
||||||
|
// We could use ordered_float::NotNan
|
||||||
|
// Let's defer that
|
||||||
|
// We're not really using sets in Ludus
|
||||||
|
|
||||||
|
// Other things we're not implementing yet:
|
||||||
|
// pkgs, nses, tests
|
||||||
|
>>>>>>> main
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq for Value {
|
impl PartialEq for Value {
|
||||||
|
|
Loading…
Reference in New Issue
Block a user