huh, multiline works now :O
This commit is contained in:
parent
b1d118a1a2
commit
6756d286f9
3 changed files with 31 additions and 38 deletions
2
justfile
Normal file
2
justfile
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
run:
|
||||
cargo run
|
||||
|
|
@ -1,20 +1,33 @@
|
|||
use chumsky::{
|
||||
combinator::Or, error::Rich, extra, input::ValueInput, prelude::{choice, end, just, nested_delimiters, recursive, via_parser}, primitive::select, recursive, select, select_ref, span::{self, SimpleSpan}, text::{self, ascii::{ident, keyword}, newline, whitespace}, Boxed, ConfigIterParser, IterParser, Parser
|
||||
combinator::Or, error::Rich, extra, input::{Input, Stream, ValueInput}, prelude::{choice, end, just, nested_delimiters, recursive, skip_then_retry_until, via_parser}, primitive::select, recursive, select, select_ref, span::{self, SimpleSpan}, text::{self, ascii::{ident, keyword}, newline, whitespace}, Boxed, ConfigIterParser, IterParser, ParseResult, Parser
|
||||
};
|
||||
use logos::{source, Logos};
|
||||
|
||||
use crate::language_frontend::{abstract_syntax_tree::ast::{BinaryOp, Expr}, lexer::tokens::Token};
|
||||
use crate::language_frontend::{abstract_syntax_tree::ast::{BinaryOp, Expr, UnaryOp}, lexer::tokens::{self, Token}};
|
||||
|
||||
// goal of parsing is to construct an abstract syntax tree
|
||||
|
||||
#[allow(clippy::let_and_return)]
|
||||
pub fn parser<'tokens, 'src: 'tokens, I>()
|
||||
-> impl Parser<'tokens, I, Expr, extra::Err<Rich<'tokens, Token>>>
|
||||
|
||||
|
||||
pub fn parse(source: &str) ->Result<Vec<Expr>, Vec<Rich<'_, Token>>> {
|
||||
let token_iter = Token::lexer(source).spanned().map(|(token, span)| (token.unwrap_or(Token::Error), span.into()));
|
||||
|
||||
let end_of_input: SimpleSpan = (source.len()..source.len()).into();
|
||||
let token_stream = Stream::from_iter(token_iter)
|
||||
// Tell chumsky to split the (Token, SimpleSpan) stream into its parts so that it can handle the spans for us
|
||||
// This involves giving chumsky an 'end of input' span: we just use a zero-width span at the end of the string
|
||||
.map((0..end_of_input.into_iter().len()).into(), |(t, s): (_, _)| (t, s));
|
||||
|
||||
parser().parse(token_stream).into_result()
|
||||
}
|
||||
|
||||
|
||||
fn parser<'src, I>()
|
||||
-> impl Parser<'src, I, Vec<Expr>, extra::Err<Rich<'src, Token>>>
|
||||
where
|
||||
I: ValueInput<'tokens, Token = Token, Span = SimpleSpan>,
|
||||
I: ValueInput<'src, Token = Token, Span = SimpleSpan>,
|
||||
{
|
||||
let ident = select! {
|
||||
Token::Identifier(s) => s,
|
||||
};
|
||||
let ident = select! { Token::Identifier(s) => s, };
|
||||
/*
|
||||
let block = recursive(|block| {
|
||||
let indent = just(Token::NewLine)
|
||||
|
|
@ -64,7 +77,7 @@ where
|
|||
|
||||
add_sub
|
||||
});
|
||||
//let decl = recursive(|decl| {
|
||||
let decl = recursive(|decl| {
|
||||
let var = just(Token::Var)
|
||||
.ignore_then(ident)
|
||||
.then_ignore(just(Token::Assign))
|
||||
|
|
@ -74,9 +87,9 @@ where
|
|||
target: Box::new(Expr::Ident(name)),
|
||||
value: Box::new(rhs),
|
||||
});
|
||||
var.or(expr)
|
||||
//});
|
||||
//decl.then_ignore(end())
|
||||
|
||||
var.or(expr)
|
||||
});
|
||||
|
||||
decl.repeated().collect()
|
||||
|
||||
}
|
||||
|
|
|
|||
26
src/main.rs
26
src/main.rs
|
|
@ -3,10 +3,7 @@ use chumsky::prelude::end;
|
|||
use chumsky::Parser;
|
||||
use logos::Logos;
|
||||
|
||||
use crate::{
|
||||
language_frontend::lexer::tokens::Token, language_frontend::abstract_syntax_tree::parser::parser};
|
||||
|
||||
use crate::language_frontend::abstract_syntax_tree::ast::{Expr};
|
||||
use crate::language_frontend::abstract_syntax_tree::parser::parse;
|
||||
|
||||
mod language_frontend;
|
||||
|
||||
|
|
@ -20,30 +17,11 @@ Simple Compiler -> 4 Stages:
|
|||
|
||||
fn main() {
|
||||
let sourcecode = std::fs::read_to_string("sample.akai").unwrap();
|
||||
|
||||
// Create a logos lexer over the source code
|
||||
let token_iter = Token::lexer(&sourcecode)
|
||||
.spanned()
|
||||
// Convert logos errors into tokens. We want parsing to be recoverable and not fail at the lexing stage, so
|
||||
// we have a dedicated `Token::Error` variant that represents a token error that was previously encountered
|
||||
.map(|(tok, span)| match tok {
|
||||
// Turn the `Range<usize>` spans logos gives us into chumsky's `SimpleSpan` via `Into`, because it's easier
|
||||
// to work with
|
||||
Ok(tok) => (tok, span.into()),
|
||||
Err(()) => (Token::Error, span.into()),
|
||||
});
|
||||
|
||||
//println!("Token Stream:");
|
||||
//for (token, span) in token_iter.clone() {
|
||||
// println!("{:?} at {:?}", token, span);
|
||||
//}
|
||||
|
||||
// Turn the token iterator into a stream that chumsky can use for things like backtracking
|
||||
let token_stream = Stream::from_iter(token_iter)
|
||||
// Tell chumsky to split the (Token, SimpleSpan) stream into its parts so that it can handle the spans for us
|
||||
// This involves giving chumsky an 'end of input' span: we just use a zero-width span at the end of the string
|
||||
.map((0..sourcecode.len()).into(), |(t, s): (_, _)| (t, s));
|
||||
|
||||
println!("{:?}", sourcecode);
|
||||
|
||||
/*
|
||||
|
|
@ -56,7 +34,7 @@ fn main() {
|
|||
}
|
||||
*/
|
||||
|
||||
match parser().then_ignore(end()).parse(token_stream).into_result() {
|
||||
match parse(&sourcecode) {
|
||||
Ok(res) => println!("{:?}", res),
|
||||
Err(e) => {
|
||||
panic!("{:#?}", e)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue