guess i'm stupid, var works now :3

This commit is contained in:
LunarAkai 2025-08-07 21:44:41 +02:00
commit 3478da43a3
3 changed files with 11 additions and 4 deletions

View file

@ -1,5 +1,5 @@
use chumsky::{
combinator::Or, error::Rich, extra, input::ValueInput, prelude::{choice, end, just, nested_delimiters, recursive, via_parser}, primitive::select, recursive, select, select_ref, span::{self, SimpleSpan}, text::{self, ascii::{ident, keyword}, whitespace}, Boxed, ConfigIterParser, IterParser, Parser
combinator::Or, error::Rich, extra, input::ValueInput, prelude::{choice, end, just, nested_delimiters, recursive, via_parser}, primitive::select, recursive, select, select_ref, span::{self, SimpleSpan}, text::{self, ascii::{ident, keyword}, newline, whitespace}, Boxed, ConfigIterParser, IterParser, Parser
};
use crate::language_frontend::{abstract_syntax_tree::ast::{BinaryOp, Expr}, lexer::tokens::Token};
@ -69,7 +69,7 @@ where
.ignore_then(ident)
.then_ignore(just(Token::Assign))
.then(expr.clone())
.then_ignore(just(Token::NewLine).or_not())
.then_ignore(just(Token::NewLine))
.map(|(name, rhs)| Expr::Assignment {
target: Box::new(Expr::Ident(name)),
value: Box::new(rhs),

View file

@ -3,6 +3,7 @@ use std::fmt;
use logos::{Logos};
#[derive(Logos, Debug, Clone, PartialEq)]
#[regex(r"[ \t\f]+", logos::skip)]
pub enum Token {
// Identifier
#[regex(r"[a-zA-Z_][a-zA-Z0-9_]*", |lex| lex.slice().to_owned())]
@ -138,6 +139,8 @@ pub enum Token {
// Special
#[regex(r"\n")]
NewLine,
#[regex(r"//[^\r]*", logos::skip)]
#[regex(r"/\*([^*]|\*[^/])*\*/", logos::skip)]
@ -146,8 +149,7 @@ pub enum Token {
#[regex(r"[ \t\f]+", logos::skip)]
Whitespace,
#[token(r"[ \n]")]
NewLine,
Eof,

View file

@ -32,6 +32,11 @@ fn main() {
Err(()) => (Token::Error, span.into()),
});
println!("Token Stream:");
for (token, span) in token_iter.clone() {
println!("{:?} at {:?}", token, span);
}
// Turn the token iterator into a stream that chumsky can use for things like backtracking
let token_stream = Stream::from_iter(token_iter)
// Tell chumsky to split the (Token, SimpleSpan) stream into its parts so that it can handle the spans for us