just panics on new lines again now

This commit is contained in:
LunarAkai 2025-08-07 21:13:46 +02:00
commit 783955d671
3 changed files with 13 additions and 17 deletions

View file

@ -64,21 +64,19 @@ where
add_sub
});
let decl = recursive(|decl| {
let r#var = just(Token::Var)
//let decl = recursive(|decl| {
let var = just(Token::Var)
.ignore_then(ident)
.then_ignore(just(Token::Assign))
.then(expr.clone())
.then_ignore(just(Token::NewLine).or_not())
.then(decl)
.map(|(name, rhs)| Expr::Assignment {
target: Box::new(Expr::Ident(name.0)),
target: Box::new(Expr::Ident(name)),
value: Box::new(rhs),
});
r#var.or(expr)
});
decl.then_ignore(end())
var.or(expr)
//});
//decl.then_ignore(end())
}

View file

@ -4,8 +4,6 @@ use logos::{Logos};
#[derive(Logos, Debug, Clone, PartialEq)]
pub enum Token {
#[token(r"\n")]
NewLine,
// Identifier
#[regex(r"[a-zA-Z_][a-zA-Z0-9_]*", |lex| lex.slice().to_owned())]
Identifier(String),
@ -55,7 +53,6 @@ pub enum Token {
#[token("String")]
StringType,
// Literals
#[regex(r#""([^"\\]|\\.)*""#, |lex| lex.slice().to_owned())]
StringLiteral(String),
@ -146,10 +143,11 @@ pub enum Token {
#[regex(r"/\*([^*]|\*[^/])*\*/", logos::skip)]
Comment,
#[regex(r"[\t\r\f]+", logos::skip)]
#[regex(r"[ \t\f]+", logos::skip)]
Whitespace,
#[token(r"[ \n]")]
NewLine,
Eof,
@ -159,7 +157,6 @@ pub enum Token {
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Token::NewLine => write!(f, ""),
Token::Identifier(ident) => write!(f, "{ident}"),
Token::Fun => write!(f, "fun"),
Token::Class => write!(f, "class"),
@ -202,6 +199,7 @@ impl fmt::Display for Token {
Token::Comma => write!(f, ","),
Token::Dot => write!(f, "."),
Token::Comment => write!(f, ""),
Token::NewLine => write!(f, "\n"),
Token::Whitespace => write!(f, ""),
Token::Eof => write!(f, ""),
Token::Error => write!(f, "<error>"),

View file

@ -41,8 +41,8 @@ fn main() {
println!("{:?}", sourcecode);
let lexer = Token::lexer(&sourcecode)
.spanned()
.collect::<Vec<_>>();
.spanned();
//.collect::<Vec<_>>();
for token in lexer {
println!("{:?}", token);