just panics on new lines again now
This commit is contained in:
parent
3de0631341
commit
783955d671
3 changed files with 13 additions and 17 deletions
|
|
@ -64,21 +64,19 @@ where
|
||||||
|
|
||||||
add_sub
|
add_sub
|
||||||
});
|
});
|
||||||
let decl = recursive(|decl| {
|
//let decl = recursive(|decl| {
|
||||||
let r#var = just(Token::Var)
|
let var = just(Token::Var)
|
||||||
.ignore_then(ident)
|
.ignore_then(ident)
|
||||||
.then_ignore(just(Token::Assign))
|
.then_ignore(just(Token::Assign))
|
||||||
.then(expr.clone())
|
.then(expr.clone())
|
||||||
.then_ignore(just(Token::NewLine).or_not())
|
.then_ignore(just(Token::NewLine).or_not())
|
||||||
.then(decl)
|
|
||||||
.map(|(name, rhs)| Expr::Assignment {
|
.map(|(name, rhs)| Expr::Assignment {
|
||||||
target: Box::new(Expr::Ident(name.0)),
|
target: Box::new(Expr::Ident(name)),
|
||||||
value: Box::new(rhs),
|
value: Box::new(rhs),
|
||||||
});
|
});
|
||||||
r#var.or(expr)
|
var.or(expr)
|
||||||
});
|
//});
|
||||||
|
//decl.then_ignore(end())
|
||||||
decl.then_ignore(end())
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,8 +4,6 @@ use logos::{Logos};
|
||||||
|
|
||||||
#[derive(Logos, Debug, Clone, PartialEq)]
|
#[derive(Logos, Debug, Clone, PartialEq)]
|
||||||
pub enum Token {
|
pub enum Token {
|
||||||
#[token(r"\n")]
|
|
||||||
NewLine,
|
|
||||||
// Identifier
|
// Identifier
|
||||||
#[regex(r"[a-zA-Z_][a-zA-Z0-9_]*", |lex| lex.slice().to_owned())]
|
#[regex(r"[a-zA-Z_][a-zA-Z0-9_]*", |lex| lex.slice().to_owned())]
|
||||||
Identifier(String),
|
Identifier(String),
|
||||||
|
|
@ -55,7 +53,6 @@ pub enum Token {
|
||||||
#[token("String")]
|
#[token("String")]
|
||||||
StringType,
|
StringType,
|
||||||
|
|
||||||
|
|
||||||
// Literals
|
// Literals
|
||||||
#[regex(r#""([^"\\]|\\.)*""#, |lex| lex.slice().to_owned())]
|
#[regex(r#""([^"\\]|\\.)*""#, |lex| lex.slice().to_owned())]
|
||||||
StringLiteral(String),
|
StringLiteral(String),
|
||||||
|
|
@ -146,10 +143,11 @@ pub enum Token {
|
||||||
#[regex(r"/\*([^*]|\*[^/])*\*/", logos::skip)]
|
#[regex(r"/\*([^*]|\*[^/])*\*/", logos::skip)]
|
||||||
Comment,
|
Comment,
|
||||||
|
|
||||||
|
#[regex(r"[ \t\f]+", logos::skip)]
|
||||||
|
|
||||||
#[regex(r"[\t\r\f]+", logos::skip)]
|
|
||||||
Whitespace,
|
Whitespace,
|
||||||
|
|
||||||
|
#[token(r"[ \n]")]
|
||||||
|
NewLine,
|
||||||
|
|
||||||
Eof,
|
Eof,
|
||||||
|
|
||||||
|
|
@ -159,7 +157,6 @@ pub enum Token {
|
||||||
impl fmt::Display for Token {
|
impl fmt::Display for Token {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Token::NewLine => write!(f, ""),
|
|
||||||
Token::Identifier(ident) => write!(f, "{ident}"),
|
Token::Identifier(ident) => write!(f, "{ident}"),
|
||||||
Token::Fun => write!(f, "fun"),
|
Token::Fun => write!(f, "fun"),
|
||||||
Token::Class => write!(f, "class"),
|
Token::Class => write!(f, "class"),
|
||||||
|
|
@ -202,6 +199,7 @@ impl fmt::Display for Token {
|
||||||
Token::Comma => write!(f, ","),
|
Token::Comma => write!(f, ","),
|
||||||
Token::Dot => write!(f, "."),
|
Token::Dot => write!(f, "."),
|
||||||
Token::Comment => write!(f, ""),
|
Token::Comment => write!(f, ""),
|
||||||
|
Token::NewLine => write!(f, "\n"),
|
||||||
Token::Whitespace => write!(f, ""),
|
Token::Whitespace => write!(f, ""),
|
||||||
Token::Eof => write!(f, ""),
|
Token::Eof => write!(f, ""),
|
||||||
Token::Error => write!(f, "<error>"),
|
Token::Error => write!(f, "<error>"),
|
||||||
|
|
|
||||||
|
|
@ -41,8 +41,8 @@ fn main() {
|
||||||
println!("{:?}", sourcecode);
|
println!("{:?}", sourcecode);
|
||||||
|
|
||||||
let lexer = Token::lexer(&sourcecode)
|
let lexer = Token::lexer(&sourcecode)
|
||||||
.spanned()
|
.spanned();
|
||||||
.collect::<Vec<_>>();
|
//.collect::<Vec<_>>();
|
||||||
|
|
||||||
for token in lexer {
|
for token in lexer {
|
||||||
println!("{:?}", token);
|
println!("{:?}", token);
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue