.
This commit is contained in:
parent
de63467730
commit
92db6f7f6e
4 changed files with 51 additions and 30 deletions
|
|
@ -22,10 +22,7 @@ pub enum Expr {
|
||||||
|
|
||||||
Err, // todo
|
Err, // todo
|
||||||
|
|
||||||
Call {
|
Call(Call),
|
||||||
callee: Box<Expr>,
|
|
||||||
arguments: Vec<Expr>,
|
|
||||||
},
|
|
||||||
|
|
||||||
Unary {
|
Unary {
|
||||||
operator: UnaryOp,
|
operator: UnaryOp,
|
||||||
|
|
@ -42,5 +39,8 @@ pub enum Expr {
|
||||||
target: Box<Expr>,
|
target: Box<Expr>,
|
||||||
value: Box<Expr>,
|
value: Box<Expr>,
|
||||||
},
|
},
|
||||||
|
|
||||||
|
Function(Function),
|
||||||
|
|
||||||
Error,
|
Error,
|
||||||
}
|
}
|
||||||
|
|
@ -50,8 +50,8 @@ pub enum Literal {
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub enum Type {
|
pub enum Type {
|
||||||
UnsignedInteger,
|
Integer,
|
||||||
SignedInteger,
|
Float,
|
||||||
Bool,
|
Bool,
|
||||||
Char,
|
Char,
|
||||||
String,
|
String,
|
||||||
|
|
@ -59,8 +59,8 @@ pub enum Type {
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub enum Value {
|
pub enum Value {
|
||||||
UnsignedInteger(u32),
|
Integer(i64),
|
||||||
SignedInteger(i32),
|
Float(f64),
|
||||||
Bool(bool),
|
Bool(bool),
|
||||||
Char(char),
|
Char(char),
|
||||||
String(String),
|
String(String),
|
||||||
|
|
@ -71,8 +71,8 @@ impl Value {
|
||||||
match (ty, self) {
|
match (ty, self) {
|
||||||
(Type::Bool, Value::Bool(_)) => true,
|
(Type::Bool, Value::Bool(_)) => true,
|
||||||
(Type::Char, Value::Char(_)) => true,
|
(Type::Char, Value::Char(_)) => true,
|
||||||
(Type::SignedInteger, Value::SignedInteger(_)) => true,
|
(Type::Integer, Value::Integer(_)) => true,
|
||||||
(Type::UnsignedInteger, Value::UnsignedInteger(_)) => true,
|
(Type::Float, Value::Float(_)) => true,
|
||||||
(Type::String, Value::String(_)) => true,
|
(Type::String, Value::String(_)) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
|
|
@ -86,7 +86,6 @@ impl Value {
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub struct Ident(pub Rc<str>);
|
pub struct Ident(pub Rc<str>);
|
||||||
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub struct While {
|
pub struct While {
|
||||||
pub condition: Expr,
|
pub condition: Expr,
|
||||||
|
|
@ -100,11 +99,33 @@ pub struct Condition {
|
||||||
pub else_body: Option<BlockStatement>,
|
pub else_body: Option<BlockStatement>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Represents the Structure of a `Function` in AkaiLang
|
||||||
|
///
|
||||||
|
/// Examples:
|
||||||
|
///```AkaiLang
|
||||||
|
///fun helloWorld() {
|
||||||
|
/// print("Hello World")
|
||||||
|
///}
|
||||||
|
///```
|
||||||
|
/// <br>
|
||||||
|
///
|
||||||
|
///```AkaiLang
|
||||||
|
///fun returnsIntPlusOne(i: i32): i32 {
|
||||||
|
/// -> i + 1
|
||||||
|
///}
|
||||||
|
///```
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub struct Function {
|
pub struct Function {
|
||||||
pub name: Rc<str>,
|
pub name: Rc<str>,
|
||||||
pub params: Vec<(Ident, Type)>,
|
pub params: Vec<(Ident, Type)>,
|
||||||
pub return_type: Option<Type>,
|
pub return_type: Option<Type>,
|
||||||
pub body: Vec<Statement>,
|
pub body: Vec<Statement>,
|
||||||
|
pub body_expr: Option<Type>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
|
pub struct Call {
|
||||||
|
callee: Box<Expr>,
|
||||||
|
arguments: Vec<Expr>,
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,6 @@ use crate::language_frontend::{abstract_syntax_tree::{ast::Expr, definitions::*}
|
||||||
|
|
||||||
pub fn parse(source: &str) ->Result<Vec<Expr>, Vec<Rich<'_, Token>>> {
|
pub fn parse(source: &str) ->Result<Vec<Expr>, Vec<Rich<'_, Token>>> {
|
||||||
let token_iter = Token::lexer(source).spanned().map(|(token, span)| (token.unwrap_or(Token::Error), span.into()));
|
let token_iter = Token::lexer(source).spanned().map(|(token, span)| (token.unwrap_or(Token::Error), span.into()));
|
||||||
|
|
||||||
let end_of_input: SimpleSpan = (0..source.len()).into();
|
let end_of_input: SimpleSpan = (0..source.len()).into();
|
||||||
let token_stream = Stream::from_iter(token_iter)
|
let token_stream = Stream::from_iter(token_iter)
|
||||||
// Tell chumsky to split the (Token, SimpleSpan) stream into its parts so that it can handle the spans for us
|
// Tell chumsky to split the (Token, SimpleSpan) stream into its parts so that it can handle the spans for us
|
||||||
|
|
@ -86,6 +85,18 @@ where
|
||||||
target: Box::new(Expr::Ident(name)),
|
target: Box::new(Expr::Ident(name)),
|
||||||
value: Box::new(rhs),
|
value: Box::new(rhs),
|
||||||
});
|
});
|
||||||
|
/*
|
||||||
|
let fun = just(Token::Fun)
|
||||||
|
.ignore_then(ident.clone())
|
||||||
|
.then_ignore(just(Token::LParen))
|
||||||
|
.then_ignore(just(Token::RParen))
|
||||||
|
.map(|(((name, args), ret), (body, body_expr))| Expr::Function(Function {
|
||||||
|
name,
|
||||||
|
params: args,
|
||||||
|
return_type: ret,
|
||||||
|
body,
|
||||||
|
body_expr
|
||||||
|
})); */
|
||||||
var.or(expr)
|
var.or(expr)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -93,6 +104,11 @@ where
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn lex(source: &str) -> Vec<Token> {
|
||||||
|
Token::lexer(&source)
|
||||||
|
.filter_map(|t| t.ok()).collect()
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
|
||||||
20
src/main.rs
20
src/main.rs
|
|
@ -4,6 +4,7 @@ use logos::Logos;
|
||||||
|
|
||||||
mod language_frontend;
|
mod language_frontend;
|
||||||
|
|
||||||
|
use crate::language_frontend::abstract_syntax_tree::parser::lex;
|
||||||
use crate::{
|
use crate::{
|
||||||
language_frontend::lexer::tokens::Token, language_frontend::abstract_syntax_tree::parser::parse};
|
language_frontend::lexer::tokens::Token, language_frontend::abstract_syntax_tree::parser::parse};
|
||||||
|
|
||||||
|
|
@ -26,25 +27,8 @@ fn main() {
|
||||||
|
|
||||||
println!("{:?}", sourcecode);
|
println!("{:?}", sourcecode);
|
||||||
|
|
||||||
|
|
||||||
let lexer = Token::lexer(&sourcecode)
|
|
||||||
.spanned()
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
for token in lexer {
|
|
||||||
println!("{:?}", token);
|
|
||||||
}
|
|
||||||
|
|
||||||
let token_iter = Token::lexer(&sourcecode).spanned().map(|(tok, span)| tok.map(|t| (t, span))).filter_map(Result::ok);
|
|
||||||
|
|
||||||
let token_stream = Stream::from_iter(token_iter)
|
|
||||||
// Tell chumsky to split the (Token, SimpleSpan) stream into its parts so that it can handle the spans for us
|
|
||||||
// This involves giving chumsky an 'end of input' span: we just use a zero-width span at the end of the string
|
|
||||||
.map((0..sourcecode.len()).into(), |(t, s): (_, _)| (t, s));
|
|
||||||
|
|
||||||
|
|
||||||
match parse(&sourcecode) {
|
match parse(&sourcecode) {
|
||||||
Ok(res) => println!("{:?}", res),
|
Ok(res) => println!("{:#?}", res),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
panic!("{:#?}", e)
|
panic!("{:#?}", e)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue