ludus/src/lexer.rs

171 lines
4.8 KiB
Rust
Raw Normal View History

2024-10-31 20:59:26 +00:00
use crate::spans::*;
use chumsky::prelude::*;
use std::fmt;
2025-07-04 03:23:14 +00:00
#[derive(Clone, PartialEq, Debug)]
pub enum Token {
2024-10-31 20:59:26 +00:00
Nil,
Number(f64),
Word(&'static str),
2024-10-31 20:59:26 +00:00
Boolean(bool),
Keyword(&'static str),
String(&'static str),
2024-10-31 20:59:26 +00:00
// todo: hard code these types
Reserved(&'static str),
Punctuation(&'static str),
2025-07-02 23:29:49 +00:00
Method(&'static str),
2024-10-31 20:59:26 +00:00
}
impl fmt::Display for Token {
2024-10-31 20:59:26 +00:00
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Token::Number(n) => write!(f, "[Number {}]", n),
Token::Word(w) => write!(f, "[Word {}]", w),
Token::Boolean(b) => write!(f, "[Boolean {}]", b),
Token::Keyword(k) => write!(f, "[Keyword :{}]", k),
Token::String(s) => write!(f, "[String {}]", s),
Token::Reserved(r) => write!(f, "[Reserved {}]", r),
Token::Nil => write!(f, "[nil]"),
Token::Punctuation(p) => write!(f, "[Punctuation {}]", p),
2025-07-02 23:29:49 +00:00
Token::Method(m) => write!(f, "[Method {m}]"),
2024-10-31 20:59:26 +00:00
}
}
}
2025-07-04 03:23:14 +00:00
impl Token {
pub fn show(&self) -> String {
match self {
Token::Number(n) => format!("{n}"),
Token::Boolean(b) => format!("{b}"),
Token::Keyword(k) => format!(":{k}"),
Token::Method(m) => format!("::{m}"),
Token::Nil => "nil".to_string(),
Token::String(s) => format!("\"{s}\""),
Token::Reserved(s) | Token::Word(s) => s.to_string(),
Token::Punctuation(s) => {
let out = if *s == "\n" { "newline" } else { s };
out.to_string()
}
}
}
}
pub fn lexer(
) -> impl Parser<'static, &'static str, Vec<(Token, Span)>, extra::Err<Rich<'static, char, Span>>> {
2024-10-31 20:59:26 +00:00
let number = just('-')
.or_not()
.then(text::int(10).then(just('.').then(text::digits(10)).or_not()))
.to_slice()
.from_str()
.unwrapped()
.map(Token::Number);
let word = any()
.filter(char::is_ascii_lowercase)
.then(
any()
.filter(char::is_ascii_alphanumeric)
.or(one_of("*/?!_"))
.repeated(),
)
.to_slice();
let reserved_or_word = word.map(|word: &str| match word {
"true" => Token::Boolean(true),
"false" => Token::Boolean(false),
"nil" => Token::Nil,
// todo: hard code these as type constructors
"as" | "box" | "do" | "else" | "fn" | "if" | "let" | "loop" | "match" | "panic!"
| "recur" | "repeat" | "then" | "when" | "with" | "or" | "and" | "receive" => {
Token::Reserved(word)
}
2024-10-31 20:59:26 +00:00
_ => Token::Word(word),
});
2025-07-02 23:29:49 +00:00
let method = just("::").ignore_then(word).map(Token::Method);
2024-11-22 01:00:49 +00:00
let keyword = just(':').ignore_then(word).map(Token::Keyword);
2024-10-31 20:59:26 +00:00
2025-07-03 00:54:21 +00:00
let escape = just('\\')
.then(choice((
2025-07-03 03:47:02 +00:00
just('\\').to('\\'),
2025-07-03 00:54:21 +00:00
just('n').to('\n'),
just('t').to('\t'),
just('r').to('\r'),
2025-07-04 03:23:14 +00:00
just('"').to('"'), // TODO: figure out why this isn't working
2025-07-03 00:54:21 +00:00
)))
.ignored();
2025-07-04 03:23:14 +00:00
let string = none_of('"')
2025-07-03 00:54:21 +00:00
.ignored()
.or(escape)
.repeated()
.to_slice()
.map(Token::String)
.delimited_by(just('"'), just('"'));
2024-10-31 20:59:26 +00:00
// todo: hard code these as type constructors
let punctuation = one_of(",=[]{}()>;\n_")
.to_slice()
.or(just("->"))
.or(just("..."))
.or(just("#{"))
.or(just("${"))
.map(Token::Punctuation);
let token = number
.or(reserved_or_word)
.or(keyword)
2025-07-02 23:29:49 +00:00
.or(method)
2024-10-31 20:59:26 +00:00
.or(string)
.or(punctuation);
let comment = just('&')
.ignore_then(any().and_is(just('\n').not()).repeated())
.repeated();
let ludus_ws = just(' ').or(just('\t')).repeated();
token
.map_with(|tok, e| (tok, e.span()))
.padded_by(ludus_ws)
.padded_by(comment)
.recover_with(skip_then_retry_until(any().ignored(), end()))
.repeated()
.collect()
}
2025-07-03 00:54:21 +00:00
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_lexes_nil() {
let spanned_toks = lexer().parse("nil").into_output_errors().0.unwrap();
let (token, _) = spanned_toks[0].clone();
assert_eq!(token, Token::Nil);
}
#[test]
fn it_lexes_strings() {
let spanned_toks = lexer()
.parse("\"foo bar baz\"")
.into_output_errors()
.0
.unwrap();
let (token, _) = spanned_toks[0].clone();
assert_eq!(token, Token::String("foo bar baz"));
}
#[test]
fn it_lexes_strings_w_escaped_quotes() {
let spanned_toks = lexer()
.parse("\"foo \\\"bar baz\"")
.into_output_errors()
.0
.unwrap();
let (token, _) = spanned_toks[0].clone();
2025-07-04 03:23:14 +00:00
assert_eq!(token, Token::String("foo \"bar baz"));
2025-07-03 00:54:21 +00:00
}
}