// an implementation of Ludus // curently left undone (and not adding for a while yet): // * sets // * interpolated strings & string patterns // * pkgs, namespaces, imports, `use` forms // * with forms // * test forms // * ignored words // todo: // * [x] rewrite fn parser to use chumsky::Recursive::declare/define // - [x] do this to extract/simplify/DRY things like tuple patterns, fn clauses, etc. // * [x] Work around chumsky::Stream::from_iter().spanned disappearing in most recent version // * [x] investigate using labels (which is behind a compiler flag, somehow) // * [ ] wire up Ariadne parsing errors // * [ ] validation // * [x] break this out into multiple files // * [x] write a tree-walk VM // - [x] learn how to deal with lifetimes // - [x] with stack mechanics and refcounting // - [ ] with tail-call optimization (nb: this may not be possible w/ a TW-VM) // - [ ] with all the necessary forms for current Ludus // * [ ] guards in match clauses // * [ ] `as` patterns // * [ ] splat patterns in tuples, lists, dicts // * [ ] splats in list and dict literals // * [ ] `loop` and `recur` // * [ ] write `base` in Rust // * [ ] turn this into a library function // * [ ] compile this into WASM // * [ ] perf testing use chumsky::{input::Stream, prelude::*}; use std::rc::Rc; mod spans; mod lexer; use crate::lexer::*; mod value; use crate::value::*; mod parser; use crate::parser::*; mod vm; use crate::vm::*; mod base; use crate::base::*; pub fn main() { let src = " eq "; let (tokens, lex_errs) = lexer().parse(src).into_output_errors(); if lex_errs.len() > 0 { println!("{:?}", lex_errs); return (); } let tokens = tokens.unwrap(); let to_parse = tokens.clone(); // for (token, _) in tokens { // println!("{}", token) // } let (ast, _) = parser() .parse(Stream::from_iter(to_parse).map((0..src.len()).into(), |(t, s)| (t, s))) .unwrap(); // println!("{}", ast); let mut ctx = base(); let result = eval(&ast, &mut ctx).unwrap(); println!("{}", result); } // #[cfg(test)] // mod tests { // use crate::lexer; // use crate::Token; // use chumsky::prelude::*; // #[test] // fn it_lexes_positive_ints() { // let (mytoken, _) = lexer().parse("42").unwrap()[0].clone(); // assert_eq!(mytoken, Token::Number(42.0)) // } // #[test] // fn it_lexes_negative_ints() { // let (mytoken, _) = lexer().parse("-42").unwrap()[0].clone(); // assert_eq!(mytoken, Token::Number(-42.0)) // } // #[test] // fn it_lexes_positive_floats() { // let (mytoken, _) = lexer().parse("42.032").unwrap()[0].clone(); // assert_eq!(mytoken, Token::Number(42.032)) // } // #[test] // fn it_lexes_positive_decimals() { // let (mytoken, _) = lexer().parse("0.123").unwrap()[0].clone(); // assert_eq!(mytoken, Token::Number(0.123)) // } // #[test] // fn it_lexes_negative_floats() { // let mytoken = lexer().parse("-42.123").unwrap()[0].clone().0; // assert_eq!(mytoken, Token::Number(-42.123)) // } // #[test] // fn it_lexes_negative_decimals() { // let mytoken = lexer().parse("-0.123").unwrap()[0].clone().0; // assert_eq!(mytoken, Token::Number(-0.123)) // } // #[test] // fn it_lexes_bools() { // let tt = lexer().parse("true").unwrap()[0].clone().0; // assert_eq!(tt, Token::Boolean(true)); // let ff = lexer().parse("false").unwrap()[0].clone().0; // assert_eq!(ff, Token::Boolean(false)) // } // #[test] // fn it_lexes_words() { // let mytoken = lexer().parse("foo").unwrap()[0].clone().0; // assert_eq!(mytoken, Token::Word("foo")) // } // #[test] // fn it_lexes_keywords() { // let kw = lexer().parse(":foo").unwrap()[0].clone().0; // assert_eq!(kw, Token::Keyword("foo")) // } // #[test] // fn it_lexes_strings() { // let s = lexer().parse("\"foo bar baz\"").unwrap()[0].clone().0; // assert_eq!(s, Token::String("foo bar baz")) // } // #[test] // fn it_ignores_comments() { // let e = lexer().parse("foo &bar\nbaz").unwrap(); // assert_eq!(e[0].0, Token::Word("foo")); // assert_eq!(e[1].0, Token::Punctuation("\n")); // assert_eq!(e[2].0, Token::Word("baz")) // } // #[test] // fn it_lexes_multiple_tokens() { // let toks = lexer().parse("foo;bar\nbaz").unwrap(); // assert_eq!(toks[0].0, Token::Word("foo")); // assert_eq!(toks[2].0, Token::Word("bar")); // assert_eq!(toks[4].0, Token::Word("baz")) // } // #[test] // fn it_lexes_collections() { // let toks = lexer().parse("(1, 2)").unwrap(); // assert_eq!(toks[0].0, Token::Punctuation("(")); // assert_eq!(toks[1].0, Token::Number(1.0)); // assert_eq!(toks[2].0, Token::Punctuation(",")); // assert_eq!(toks[3].0, Token::Number(2.0)); // assert_eq!(toks[4].0, Token::Punctuation(")")) // } // }