2024-10-29 03:59:50 +00:00
|
|
|
// an implementation of Ludus
|
|
|
|
|
|
|
|
// curently left undone (and not adding for a while yet):
|
|
|
|
// * sets
|
|
|
|
// * interpolated strings & string patterns
|
|
|
|
// * pkgs, namespaces, imports, `use` forms
|
|
|
|
// * with forms
|
|
|
|
// * test forms
|
|
|
|
// * ignored words
|
|
|
|
|
|
|
|
// todo:
|
2024-10-31 20:28:15 +00:00
|
|
|
// * [x] rewrite fn parser to use chumsky::Recursive::declare/define
|
|
|
|
// - [x] do this to extract/simplify/DRY things like tuple patterns, fn clauses, etc.
|
2024-10-31 19:38:55 +00:00
|
|
|
// * [x] Work around chumsky::Stream::from_iter().spanned disappearing in most recent version
|
2024-10-29 03:59:50 +00:00
|
|
|
// * [x] investigate using labels (which is behind a compiler flag, somehow)
|
|
|
|
// * [ ] wire up Ariadne parsing errors
|
|
|
|
// * [ ] validation
|
2024-10-31 20:59:26 +00:00
|
|
|
// * [x] break this out into multiple files
|
2024-11-11 22:50:58 +00:00
|
|
|
// * [x] write a tree-walk VM
|
|
|
|
// - [x] learn how to deal with lifetimes
|
|
|
|
// - [x] with stack mechanics and refcounting
|
|
|
|
// - [ ] with tail-call optimization (nb: this may not be possible w/ a TW-VM)
|
|
|
|
// - [ ] with all the necessary forms for current Ludus
|
|
|
|
// * [ ] guards in match clauses
|
|
|
|
// * [ ] `as` patterns
|
|
|
|
// * [ ] splat patterns in tuples, lists, dicts
|
|
|
|
// * [ ] splats in list and dict literals
|
|
|
|
// * [ ] `loop` and `recur`
|
2024-10-29 03:59:50 +00:00
|
|
|
// * [ ] write `base` in Rust
|
|
|
|
// * [ ] turn this into a library function
|
|
|
|
// * [ ] compile this into WASM
|
|
|
|
// * [ ] perf testing
|
|
|
|
|
2024-10-31 20:59:26 +00:00
|
|
|
use chumsky::{input::Stream, prelude::*};
|
2024-11-11 22:50:58 +00:00
|
|
|
use std::rc::Rc;
|
2024-10-29 03:59:50 +00:00
|
|
|
|
2024-10-31 20:59:26 +00:00
|
|
|
mod spans;
|
2024-10-29 03:59:50 +00:00
|
|
|
|
2024-10-31 20:59:26 +00:00
|
|
|
mod lexer;
|
|
|
|
use crate::lexer::*;
|
2024-10-29 03:59:50 +00:00
|
|
|
|
2024-10-31 20:59:26 +00:00
|
|
|
mod value;
|
2024-11-11 22:50:58 +00:00
|
|
|
use crate::value::*;
|
2024-10-29 03:59:50 +00:00
|
|
|
|
2024-10-31 20:59:26 +00:00
|
|
|
mod parser;
|
|
|
|
use crate::parser::*;
|
2024-10-29 03:59:50 +00:00
|
|
|
|
2024-11-01 03:53:48 +00:00
|
|
|
mod vm;
|
|
|
|
use crate::vm::*;
|
|
|
|
|
2024-11-11 22:50:58 +00:00
|
|
|
mod base;
|
|
|
|
use crate::base::*;
|
|
|
|
|
2024-10-29 03:59:50 +00:00
|
|
|
pub fn main() {
|
2024-11-11 01:12:19 +00:00
|
|
|
let src = "
|
2024-11-11 22:50:58 +00:00
|
|
|
eq
|
2024-11-11 01:12:19 +00:00
|
|
|
";
|
2024-10-29 03:59:50 +00:00
|
|
|
let (tokens, lex_errs) = lexer().parse(src).into_output_errors();
|
|
|
|
if lex_errs.len() > 0 {
|
|
|
|
println!("{:?}", lex_errs);
|
|
|
|
return ();
|
|
|
|
}
|
|
|
|
let tokens = tokens.unwrap();
|
|
|
|
let to_parse = tokens.clone();
|
2024-11-11 01:12:19 +00:00
|
|
|
// for (token, _) in tokens {
|
|
|
|
// println!("{}", token)
|
|
|
|
// }
|
2024-10-29 03:59:50 +00:00
|
|
|
let (ast, _) = parser()
|
2024-10-31 19:38:55 +00:00
|
|
|
.parse(Stream::from_iter(to_parse).map((0..src.len()).into(), |(t, s)| (t, s)))
|
2024-10-29 03:59:50 +00:00
|
|
|
.unwrap();
|
2024-11-11 01:12:19 +00:00
|
|
|
// println!("{}", ast);
|
2024-11-01 03:53:48 +00:00
|
|
|
|
2024-11-11 22:50:58 +00:00
|
|
|
let mut ctx = base();
|
2024-11-01 03:53:48 +00:00
|
|
|
|
2024-11-06 22:37:57 +00:00
|
|
|
let result = eval(&ast, &mut ctx).unwrap();
|
2024-11-01 03:53:48 +00:00
|
|
|
|
|
|
|
println!("{}", result);
|
2024-10-29 03:59:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// #[cfg(test)]
|
|
|
|
// mod tests {
|
|
|
|
// use crate::lexer;
|
|
|
|
// use crate::Token;
|
|
|
|
// use chumsky::prelude::*;
|
|
|
|
|
|
|
|
// #[test]
|
|
|
|
// fn it_lexes_positive_ints() {
|
|
|
|
// let (mytoken, _) = lexer().parse("42").unwrap()[0].clone();
|
|
|
|
// assert_eq!(mytoken, Token::Number(42.0))
|
|
|
|
// }
|
|
|
|
|
|
|
|
// #[test]
|
|
|
|
// fn it_lexes_negative_ints() {
|
|
|
|
// let (mytoken, _) = lexer().parse("-42").unwrap()[0].clone();
|
|
|
|
// assert_eq!(mytoken, Token::Number(-42.0))
|
|
|
|
// }
|
|
|
|
|
|
|
|
// #[test]
|
|
|
|
// fn it_lexes_positive_floats() {
|
|
|
|
// let (mytoken, _) = lexer().parse("42.032").unwrap()[0].clone();
|
|
|
|
// assert_eq!(mytoken, Token::Number(42.032))
|
|
|
|
// }
|
|
|
|
|
|
|
|
// #[test]
|
|
|
|
// fn it_lexes_positive_decimals() {
|
|
|
|
// let (mytoken, _) = lexer().parse("0.123").unwrap()[0].clone();
|
|
|
|
// assert_eq!(mytoken, Token::Number(0.123))
|
|
|
|
// }
|
|
|
|
|
|
|
|
// #[test]
|
|
|
|
// fn it_lexes_negative_floats() {
|
|
|
|
// let mytoken = lexer().parse("-42.123").unwrap()[0].clone().0;
|
|
|
|
// assert_eq!(mytoken, Token::Number(-42.123))
|
|
|
|
// }
|
|
|
|
|
|
|
|
// #[test]
|
|
|
|
// fn it_lexes_negative_decimals() {
|
|
|
|
// let mytoken = lexer().parse("-0.123").unwrap()[0].clone().0;
|
|
|
|
// assert_eq!(mytoken, Token::Number(-0.123))
|
|
|
|
// }
|
|
|
|
|
|
|
|
// #[test]
|
|
|
|
// fn it_lexes_bools() {
|
|
|
|
// let tt = lexer().parse("true").unwrap()[0].clone().0;
|
|
|
|
// assert_eq!(tt, Token::Boolean(true));
|
|
|
|
// let ff = lexer().parse("false").unwrap()[0].clone().0;
|
|
|
|
// assert_eq!(ff, Token::Boolean(false))
|
|
|
|
// }
|
|
|
|
|
|
|
|
// #[test]
|
|
|
|
// fn it_lexes_words() {
|
|
|
|
// let mytoken = lexer().parse("foo").unwrap()[0].clone().0;
|
|
|
|
// assert_eq!(mytoken, Token::Word("foo"))
|
|
|
|
// }
|
|
|
|
|
|
|
|
// #[test]
|
|
|
|
// fn it_lexes_keywords() {
|
|
|
|
// let kw = lexer().parse(":foo").unwrap()[0].clone().0;
|
|
|
|
// assert_eq!(kw, Token::Keyword("foo"))
|
|
|
|
// }
|
|
|
|
|
|
|
|
// #[test]
|
|
|
|
// fn it_lexes_strings() {
|
|
|
|
// let s = lexer().parse("\"foo bar baz\"").unwrap()[0].clone().0;
|
|
|
|
// assert_eq!(s, Token::String("foo bar baz"))
|
|
|
|
// }
|
|
|
|
|
|
|
|
// #[test]
|
|
|
|
// fn it_ignores_comments() {
|
|
|
|
// let e = lexer().parse("foo &bar\nbaz").unwrap();
|
|
|
|
// assert_eq!(e[0].0, Token::Word("foo"));
|
|
|
|
// assert_eq!(e[1].0, Token::Punctuation("\n"));
|
|
|
|
// assert_eq!(e[2].0, Token::Word("baz"))
|
|
|
|
// }
|
|
|
|
|
|
|
|
// #[test]
|
|
|
|
// fn it_lexes_multiple_tokens() {
|
|
|
|
// let toks = lexer().parse("foo;bar\nbaz").unwrap();
|
|
|
|
// assert_eq!(toks[0].0, Token::Word("foo"));
|
|
|
|
// assert_eq!(toks[2].0, Token::Word("bar"));
|
|
|
|
// assert_eq!(toks[4].0, Token::Word("baz"))
|
|
|
|
// }
|
|
|
|
|
|
|
|
// #[test]
|
|
|
|
// fn it_lexes_collections() {
|
|
|
|
// let toks = lexer().parse("(1, 2)").unwrap();
|
|
|
|
// assert_eq!(toks[0].0, Token::Punctuation("("));
|
|
|
|
// assert_eq!(toks[1].0, Token::Number(1.0));
|
|
|
|
// assert_eq!(toks[2].0, Token::Punctuation(","));
|
|
|
|
// assert_eq!(toks[3].0, Token::Number(2.0));
|
|
|
|
// assert_eq!(toks[4].0, Token::Punctuation(")"))
|
|
|
|
// }
|
|
|
|
// }
|