rudus/src/main.rs

152 lines
4.6 KiB
Rust
Raw Normal View History

2024-10-29 03:59:50 +00:00
// an implementation of Ludus
// curently left undone (and not adding for a while yet):
// * sets
// * interpolated strings & string patterns
// * pkgs, namespaces, imports, `use` forms
// * with forms
// * test forms
// * ignored words
// todo:
// * [x] rewrite fn parser to use chumsky::Recursive::declare/define
// - [x] do this to extract/simplify/DRY things like tuple patterns, fn clauses, etc.
2024-10-31 19:38:55 +00:00
// * [x] Work around chumsky::Stream::from_iter().spanned disappearing in most recent version
2024-10-29 03:59:50 +00:00
// * [x] investigate using labels (which is behind a compiler flag, somehow)
// * [ ] wire up Ariadne parsing errors
// * [ ] validation
2024-10-31 20:59:26 +00:00
// * [x] break this out into multiple files
2024-10-29 03:59:50 +00:00
// * [ ] write a tree-walk VM
// - [ ] learn how to deal with lifetimes
// - [ ] with stack mechanics and refcounting
// - [ ] with tail-call optimization
// * [ ] write `base` in Rust
// * [ ] turn this into a library function
// * [ ] compile this into WASM
// * [ ] perf testing
2024-10-31 20:59:26 +00:00
use chumsky::{input::Stream, prelude::*};
2024-10-29 03:59:50 +00:00
2024-10-31 20:59:26 +00:00
mod spans;
2024-10-29 03:59:50 +00:00
2024-10-31 20:59:26 +00:00
mod lexer;
use crate::lexer::*;
2024-10-29 03:59:50 +00:00
2024-10-31 20:59:26 +00:00
mod value;
2024-10-29 03:59:50 +00:00
2024-10-31 20:59:26 +00:00
mod parser;
use crate::parser::*;
2024-10-29 03:59:50 +00:00
pub fn main() {
let src = "let #{a, :b b} = foo\na(b(c),d)";
2024-10-29 03:59:50 +00:00
let (tokens, lex_errs) = lexer().parse(src).into_output_errors();
if lex_errs.len() > 0 {
println!("{:?}", lex_errs);
return ();
}
let tokens = tokens.unwrap();
let to_parse = tokens.clone();
for (token, _) in tokens {
println!("{}", token)
}
let (ast, _) = parser()
2024-10-31 19:38:55 +00:00
.parse(Stream::from_iter(to_parse).map((0..src.len()).into(), |(t, s)| (t, s)))
2024-10-29 03:59:50 +00:00
.unwrap();
println!("{}", ast);
}
// #[cfg(test)]
// mod tests {
// use crate::lexer;
// use crate::Token;
// use chumsky::prelude::*;
// #[test]
// fn it_lexes_positive_ints() {
// let (mytoken, _) = lexer().parse("42").unwrap()[0].clone();
// assert_eq!(mytoken, Token::Number(42.0))
// }
// #[test]
// fn it_lexes_negative_ints() {
// let (mytoken, _) = lexer().parse("-42").unwrap()[0].clone();
// assert_eq!(mytoken, Token::Number(-42.0))
// }
// #[test]
// fn it_lexes_positive_floats() {
// let (mytoken, _) = lexer().parse("42.032").unwrap()[0].clone();
// assert_eq!(mytoken, Token::Number(42.032))
// }
// #[test]
// fn it_lexes_positive_decimals() {
// let (mytoken, _) = lexer().parse("0.123").unwrap()[0].clone();
// assert_eq!(mytoken, Token::Number(0.123))
// }
// #[test]
// fn it_lexes_negative_floats() {
// let mytoken = lexer().parse("-42.123").unwrap()[0].clone().0;
// assert_eq!(mytoken, Token::Number(-42.123))
// }
// #[test]
// fn it_lexes_negative_decimals() {
// let mytoken = lexer().parse("-0.123").unwrap()[0].clone().0;
// assert_eq!(mytoken, Token::Number(-0.123))
// }
// #[test]
// fn it_lexes_bools() {
// let tt = lexer().parse("true").unwrap()[0].clone().0;
// assert_eq!(tt, Token::Boolean(true));
// let ff = lexer().parse("false").unwrap()[0].clone().0;
// assert_eq!(ff, Token::Boolean(false))
// }
// #[test]
// fn it_lexes_words() {
// let mytoken = lexer().parse("foo").unwrap()[0].clone().0;
// assert_eq!(mytoken, Token::Word("foo"))
// }
// #[test]
// fn it_lexes_keywords() {
// let kw = lexer().parse(":foo").unwrap()[0].clone().0;
// assert_eq!(kw, Token::Keyword("foo"))
// }
// #[test]
// fn it_lexes_strings() {
// let s = lexer().parse("\"foo bar baz\"").unwrap()[0].clone().0;
// assert_eq!(s, Token::String("foo bar baz"))
// }
// #[test]
// fn it_ignores_comments() {
// let e = lexer().parse("foo &bar\nbaz").unwrap();
// assert_eq!(e[0].0, Token::Word("foo"));
// assert_eq!(e[1].0, Token::Punctuation("\n"));
// assert_eq!(e[2].0, Token::Word("baz"))
// }
// #[test]
// fn it_lexes_multiple_tokens() {
// let toks = lexer().parse("foo;bar\nbaz").unwrap();
// assert_eq!(toks[0].0, Token::Word("foo"));
// assert_eq!(toks[2].0, Token::Word("bar"));
// assert_eq!(toks[4].0, Token::Word("baz"))
// }
// #[test]
// fn it_lexes_collections() {
// let toks = lexer().parse("(1, 2)").unwrap();
// assert_eq!(toks[0].0, Token::Punctuation("("));
// assert_eq!(toks[1].0, Token::Number(1.0));
// assert_eq!(toks[2].0, Token::Punctuation(","));
// assert_eq!(toks[3].0, Token::Number(2.0));
// assert_eq!(toks[4].0, Token::Punctuation(")"))
// }
// }