Compare commits
	
		
			2 Commits
		
	
	
		
			master
			...
			613e839a6e
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 613e839a6e | |||
| 29e0a3e972 | 
							
								
								
									
										13
									
								
								flake.nix
									
									
									
									
									
								
							
							
						
						
									
										13
									
								
								flake.nix
									
									
									
									
									
								
							@@ -9,28 +9,25 @@
 | 
			
		||||
      inputs.nixpkgs.follows = "nixpkgs";
 | 
			
		||||
    };
 | 
			
		||||
 | 
			
		||||
    fenix = {
 | 
			
		||||
      url = github:nix-community/fenix;
 | 
			
		||||
    rust-overlay = {
 | 
			
		||||
      url = github:oxalica/rust-overlay;
 | 
			
		||||
      inputs.nixpkgs.follows = "nixpkgs";
 | 
			
		||||
    };
 | 
			
		||||
 | 
			
		||||
    flake-utils.url = github:numtide/flake-utils;
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  outputs = inputs@{ self, nixpkgs, crane, fenix, flake-utils }:
 | 
			
		||||
  outputs = inputs@{ self, nixpkgs, crane, rust-overlay, flake-utils }:
 | 
			
		||||
    flake-utils.lib.eachDefaultSystem (system:
 | 
			
		||||
      let
 | 
			
		||||
        pkgs = import nixpkgs {
 | 
			
		||||
          inherit system;
 | 
			
		||||
          overlays = [ fenix.overlays.default ];
 | 
			
		||||
          overlays = [ rust-overlay.overlays.default ];
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
        inherit (pkgs) lib;
 | 
			
		||||
 | 
			
		||||
        toolchain = pkgs.fenix.fromToolchainFile {
 | 
			
		||||
          file = ./rust-toolchain.toml;
 | 
			
		||||
          sha256 = "sha256-n8LtGbpj/yCUGo0NFJ7FNv9fSdT9oKEUl+EPLg06JdQ=";
 | 
			
		||||
        };
 | 
			
		||||
        toolchain = pkgs.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml;
 | 
			
		||||
 | 
			
		||||
        craneLib = (crane.mkLib pkgs).overrideToolchain toolchain;
 | 
			
		||||
        src = craneLib.cleanCargoSource (craneLib.path ./.);
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,4 @@
 | 
			
		||||
use std::env;
 | 
			
		||||
use std::path::PathBuf;
 | 
			
		||||
use std::process::exit;
 | 
			
		||||
 | 
			
		||||
const VERSION: &str = env!("CARGO_PKG_VERSION");
 | 
			
		||||
@@ -9,7 +8,7 @@ const CRATE: &str = env!("CARGO_CRATE_NAME");
 | 
			
		||||
#[derive(Default)]
 | 
			
		||||
pub struct Args {
 | 
			
		||||
    version: bool,
 | 
			
		||||
    file: Option<PathBuf>,
 | 
			
		||||
    file: Option<String>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl Args {
 | 
			
		||||
@@ -35,7 +34,7 @@ impl Args {
 | 
			
		||||
                    if self.file.is_some() {
 | 
			
		||||
                        panic!("please specify only a single source file!");
 | 
			
		||||
                    }
 | 
			
		||||
                    self.file = Some(PathBuf::from(file));
 | 
			
		||||
                    self.file = Some(file.to_owned());
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
@@ -56,7 +55,7 @@ impl Args {
 | 
			
		||||
    /// Fetches the file from the arguments.
 | 
			
		||||
    /// Panics if there is no file in the arguments
 | 
			
		||||
    #[inline]
 | 
			
		||||
    pub fn get_file(self) -> PathBuf {
 | 
			
		||||
    pub fn get_file(self) -> String {
 | 
			
		||||
        self.file.expect("no file supplied!")
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										113
									
								
								src/ast.rs
									
									
									
									
									
								
							
							
						
						
									
										113
									
								
								src/ast.rs
									
									
									
									
									
								
							@@ -1,132 +1,89 @@
 | 
			
		||||
//! A very naive AST definition using recursive enums
 | 
			
		||||
//!
 | 
			
		||||
//! See the parser for implementation
 | 
			
		||||
/// A very naive AST definition using recursive enums
 | 
			
		||||
/// See the parser for implementation
 | 
			
		||||
 | 
			
		||||
use std::rc::Rc;
 | 
			
		||||
 | 
			
		||||
pub type Parent = Vec<Entity>;
 | 
			
		||||
 | 
			
		||||
/// Entities are functions, classes, and modules
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
#[derive(Debug)]
 | 
			
		||||
pub enum Entity {
 | 
			
		||||
    Fn(Fn),
 | 
			
		||||
    Class(Class),
 | 
			
		||||
    Module(Module),
 | 
			
		||||
    Static(Let),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// A module just provides an additional scope
 | 
			
		||||
///
 | 
			
		||||
/// TODO: Add exporting and importing modules
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
#[derive(Debug)]
 | 
			
		||||
pub struct Module {
 | 
			
		||||
    /// Name of module
 | 
			
		||||
    pub name: Rc<str>,
 | 
			
		||||
    /// Everything inside the module
 | 
			
		||||
    pub children: Vec<ModuleChildren>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// Modules contain functions, classes and statements
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
#[derive(Debug)]
 | 
			
		||||
pub enum ModuleChildren {
 | 
			
		||||
    Fn(Fn),
 | 
			
		||||
    Class(Class),
 | 
			
		||||
    Module(Module),
 | 
			
		||||
    Static(Let),
 | 
			
		||||
    Statement(Statement),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// Classes encapsulate functions and definitions.
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
#[derive(Debug)]
 | 
			
		||||
pub struct Class {
 | 
			
		||||
    /// Name of class
 | 
			
		||||
    pub name: Rc<str>,
 | 
			
		||||
    /// Everything inside the class
 | 
			
		||||
    pub children: Vec<ClassChildren>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
/// Classes contain functions and statements.
 | 
			
		||||
///
 | 
			
		||||
/// TODO: Maybe change statements to something else
 | 
			
		||||
#[derive(Debug)]
 | 
			
		||||
pub enum ClassChildren {
 | 
			
		||||
    Fn(Fn),
 | 
			
		||||
    Let(Let),
 | 
			
		||||
    Static(Let),
 | 
			
		||||
    Statement(Statement),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// A Function
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
#[derive(Debug)]
 | 
			
		||||
pub struct Fn {
 | 
			
		||||
    /// Name of the function
 | 
			
		||||
    pub name: Rc<str>,
 | 
			
		||||
    /// Optional return type
 | 
			
		||||
    pub return_ty: Option<Ty>,
 | 
			
		||||
    /// Parameters
 | 
			
		||||
    pub params: Vec<(Rc<str>, Ty)>,
 | 
			
		||||
    /// The function block
 | 
			
		||||
    pub return_typ: Option<Primitive>,
 | 
			
		||||
    pub params: Vec<(Rc<str>, Primitive)>,
 | 
			
		||||
    pub children: Vec<Statement>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// Statements encapsulate expressions and definitions
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
#[derive(Debug)]
 | 
			
		||||
pub enum Statement {
 | 
			
		||||
    Static(Let),
 | 
			
		||||
    Let(Let),
 | 
			
		||||
    Expr(Expr),
 | 
			
		||||
    Block(Vec<Statement>),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// A variable definition
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
#[derive(Debug)]
 | 
			
		||||
pub struct Let {
 | 
			
		||||
    /// Name of variabe
 | 
			
		||||
    pub name: Rc<str>,
 | 
			
		||||
    /// Type of variable
 | 
			
		||||
    pub ty: Ty,
 | 
			
		||||
    /// Value of variable
 | 
			
		||||
    pub typ: Primitive,
 | 
			
		||||
    pub expr: Option<Expr>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Op = crate::lexer::TokenSymbol;
 | 
			
		||||
 | 
			
		||||
#[derive(Debug)]
 | 
			
		||||
pub enum Expr {
 | 
			
		||||
    Int(i32),
 | 
			
		||||
    Float(f32),
 | 
			
		||||
    Char(char),
 | 
			
		||||
    Op(Op, Box<Expr>, Option<Box<Expr>>),
 | 
			
		||||
    If(Box<Expr>, Box<Expr>, Option<Box<Expr>>),
 | 
			
		||||
    Loop,
 | 
			
		||||
    Break,
 | 
			
		||||
    Continue,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// Primitives
 | 
			
		||||
///
 | 
			
		||||
/// TODO: add arrays and pointers maybe
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
pub enum Ty {
 | 
			
		||||
#[derive(Debug)]
 | 
			
		||||
pub enum Primitive {
 | 
			
		||||
    Int,
 | 
			
		||||
    Float,
 | 
			
		||||
    Char,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
pub struct If {
 | 
			
		||||
    pub cond: Box<Expr>,
 | 
			
		||||
    pub then: Vec<Statement>,
 | 
			
		||||
    pub or: Option<Box<ElseType>>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
pub enum ElseType {
 | 
			
		||||
    If(If),
 | 
			
		||||
    Else(Vec<Statement>),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub(crate) type Op = crate::lexer::TokenSymbol;
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
pub enum Literal {
 | 
			
		||||
    Int(i32),
 | 
			
		||||
    Float(f32),
 | 
			
		||||
    Char(char),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// Lowest form of expression
 | 
			
		||||
///
 | 
			
		||||
/// TODO: refine
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
pub enum Expr {
 | 
			
		||||
    Literal(Literal),
 | 
			
		||||
    Identifier(Rc<str>),
 | 
			
		||||
    Op(Op, Box<Expr>, Option<Box<Expr>>),
 | 
			
		||||
    If(If),
 | 
			
		||||
    Block(Vec<Statement>),
 | 
			
		||||
    Loop(Vec<Statement>),
 | 
			
		||||
    Break,
 | 
			
		||||
    Continue,
 | 
			
		||||
    Return(Option<Box<Expr>>),
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										90
									
								
								src/lexer.rs
									
									
									
									
									
								
							
							
						
						
									
										90
									
								
								src/lexer.rs
									
									
									
									
									
								
							@@ -6,7 +6,7 @@ use std::str;
 | 
			
		||||
/// All token literals
 | 
			
		||||
///
 | 
			
		||||
/// TODO: Add string
 | 
			
		||||
#[derive(Debug, PartialEq, Clone, Copy)]
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
pub enum TokenLiteral {
 | 
			
		||||
    Int,
 | 
			
		||||
    Float,
 | 
			
		||||
@@ -14,9 +14,11 @@ pub enum TokenLiteral {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// All token symbols
 | 
			
		||||
#[derive(Debug, PartialEq, Clone, Copy)]
 | 
			
		||||
///
 | 
			
		||||
/// TODO: Maybe add *
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
pub enum TokenSymbol {
 | 
			
		||||
    // arithmetic
 | 
			
		||||
    // operators
 | 
			
		||||
    Plus,
 | 
			
		||||
    Minus,
 | 
			
		||||
    Star,
 | 
			
		||||
@@ -38,7 +40,9 @@ pub enum TokenSymbol {
 | 
			
		||||
 | 
			
		||||
    // relational
 | 
			
		||||
    Gt,
 | 
			
		||||
    Ge,
 | 
			
		||||
    Lt,
 | 
			
		||||
    Le,
 | 
			
		||||
    GtEq,
 | 
			
		||||
    LtEq,
 | 
			
		||||
    EqEq,
 | 
			
		||||
@@ -65,7 +69,7 @@ pub enum TokenSymbol {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// All token keywod
 | 
			
		||||
#[derive(Debug, PartialEq, Clone, Copy)]
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
pub enum TokenKeyword {
 | 
			
		||||
    // parents
 | 
			
		||||
    Fn,
 | 
			
		||||
@@ -73,7 +77,6 @@ pub enum TokenKeyword {
 | 
			
		||||
    Module,
 | 
			
		||||
 | 
			
		||||
    // statements
 | 
			
		||||
    Static,
 | 
			
		||||
    Let,
 | 
			
		||||
    Ret,
 | 
			
		||||
 | 
			
		||||
@@ -86,7 +89,6 @@ pub enum TokenKeyword {
 | 
			
		||||
    Loop,
 | 
			
		||||
    Break,
 | 
			
		||||
    Continue,
 | 
			
		||||
    Return,
 | 
			
		||||
 | 
			
		||||
    // primitives
 | 
			
		||||
    Int,
 | 
			
		||||
@@ -97,7 +99,7 @@ pub enum TokenKeyword {
 | 
			
		||||
/// All token delimiters
 | 
			
		||||
///
 | 
			
		||||
/// TODO: Maybe add \[ and \]
 | 
			
		||||
#[derive(Debug, PartialEq, Clone, Copy)]
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
pub enum TokenDelimiter {
 | 
			
		||||
    BraceOpen,
 | 
			
		||||
    BraceClose,
 | 
			
		||||
@@ -106,7 +108,7 @@ pub enum TokenDelimiter {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// All tokens
 | 
			
		||||
#[derive(Debug, PartialEq, Clone, Copy)]
 | 
			
		||||
#[derive(Debug, PartialEq)]
 | 
			
		||||
pub enum TokenKind {
 | 
			
		||||
    Newline,
 | 
			
		||||
    Eof,
 | 
			
		||||
@@ -136,10 +138,9 @@ pub struct Lexer<'a> {
 | 
			
		||||
    /// A peekable double ended queue for the tokens
 | 
			
		||||
    tokens: VecDeque<Token>,
 | 
			
		||||
    /// Current line number
 | 
			
		||||
    pub(crate) line: usize,
 | 
			
		||||
    pub(crate) col: usize,
 | 
			
		||||
    pub line: usize,
 | 
			
		||||
    /// Start character index for the current token
 | 
			
		||||
    start: usize,
 | 
			
		||||
    pub start: usize,
 | 
			
		||||
    /// End character index for the current token
 | 
			
		||||
    end: usize,
 | 
			
		||||
}
 | 
			
		||||
@@ -171,7 +172,6 @@ impl<'a> Lexer<'a> {
 | 
			
		||||
            chars: content.chars().peekable(),
 | 
			
		||||
            tokens: VecDeque::new(),
 | 
			
		||||
            line: 1,
 | 
			
		||||
            col: 1,
 | 
			
		||||
            start: 0,
 | 
			
		||||
            end: 0,
 | 
			
		||||
        }
 | 
			
		||||
@@ -198,7 +198,6 @@ impl<'a> Lexer<'a> {
 | 
			
		||||
    #[inline]
 | 
			
		||||
    fn next(&mut self) -> Option<char> {
 | 
			
		||||
        self.end += 1;
 | 
			
		||||
        self.col += 1;
 | 
			
		||||
        self.chars.next()
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
@@ -234,16 +233,11 @@ impl<'a> Lexer<'a> {
 | 
			
		||||
                '0'..='9' => {}
 | 
			
		||||
                '.' => {
 | 
			
		||||
                    if is_float {
 | 
			
		||||
                        self.error("multiple decimals encountered");
 | 
			
		||||
                        self.error("Multiple decimals encountered");
 | 
			
		||||
                        return self.new_token(TokenKind::Invalid);
 | 
			
		||||
                    }
 | 
			
		||||
                    is_float = true;
 | 
			
		||||
                }
 | 
			
		||||
                'e' | 'E' => {
 | 
			
		||||
                    self.next();
 | 
			
		||||
                    is_float = true;
 | 
			
		||||
                    break;
 | 
			
		||||
                }
 | 
			
		||||
                _ => break,
 | 
			
		||||
            }
 | 
			
		||||
            self.next();
 | 
			
		||||
@@ -265,11 +259,6 @@ impl<'a> Lexer<'a> {
 | 
			
		||||
            return self.new_token(TokenKind::Invalid);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        if self.peek() != Some(&'\'') {
 | 
			
		||||
            self.error("Expected '");
 | 
			
		||||
            return self.new_token(TokenKind::Invalid);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        // skip '
 | 
			
		||||
        self.next();
 | 
			
		||||
 | 
			
		||||
@@ -279,7 +268,7 @@ impl<'a> Lexer<'a> {
 | 
			
		||||
    fn get_alphanumeric(&mut self) -> Token {
 | 
			
		||||
        while let Some(c) = self.peek() {
 | 
			
		||||
            match c {
 | 
			
		||||
                'a'..='z' | 'A'..='Z' | '0'..='9' | '_' => {}
 | 
			
		||||
                'a'..='z' | 'A'..='Z' | '0'..='9' => {}
 | 
			
		||||
                _ => break,
 | 
			
		||||
            }
 | 
			
		||||
            self.next();
 | 
			
		||||
@@ -292,7 +281,6 @@ impl<'a> Lexer<'a> {
 | 
			
		||||
            "fn" => Keyword(Fn),
 | 
			
		||||
            "class" => Keyword(Class),
 | 
			
		||||
            "module" => Keyword(Module),
 | 
			
		||||
            "static" => Keyword(Static),
 | 
			
		||||
            "let" => Keyword(Let),
 | 
			
		||||
            "ret" => Keyword(Ret),
 | 
			
		||||
            "if" => Keyword(If),
 | 
			
		||||
@@ -301,7 +289,6 @@ impl<'a> Lexer<'a> {
 | 
			
		||||
            "loop" => Keyword(Loop),
 | 
			
		||||
            "break" => Keyword(Break),
 | 
			
		||||
            "continue" => Keyword(Continue),
 | 
			
		||||
            "return" => Keyword(Return),
 | 
			
		||||
            "int" => Keyword(Int),
 | 
			
		||||
            "float" => Keyword(Float),
 | 
			
		||||
            "char" => Keyword(Char),
 | 
			
		||||
@@ -405,11 +392,10 @@ impl<'a> Lexer<'a> {
 | 
			
		||||
                '\n' => {
 | 
			
		||||
                    self.next();
 | 
			
		||||
                    self.line += 1;
 | 
			
		||||
                    self.col = 0;
 | 
			
		||||
                    self.new_token(TokenKind::Newline)
 | 
			
		||||
                }
 | 
			
		||||
                '0'..='9' => self.get_numeric(),
 | 
			
		||||
                'a'..='z' | 'A'..='Z' | '_' => self.get_alphanumeric(),
 | 
			
		||||
                'a'..='z' | 'A'..='Z' => self.get_alphanumeric(),
 | 
			
		||||
                '\'' => self.get_char(),
 | 
			
		||||
                _ => self.get_symbol(),
 | 
			
		||||
            }
 | 
			
		||||
@@ -455,7 +441,7 @@ fn test_peek_next() {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[test]
 | 
			
		||||
fn test_tokens() {
 | 
			
		||||
fn test_tokens_1() {
 | 
			
		||||
    let mut lexer = Lexer::new("let test02 = 4 << 1");
 | 
			
		||||
 | 
			
		||||
    use TokenKind::*;
 | 
			
		||||
@@ -515,51 +501,9 @@ fn test_tokens_2() {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[test]
 | 
			
		||||
fn test_tokens_eof() {
 | 
			
		||||
fn test_tokens_3() {
 | 
			
		||||
    let mut lexer = Lexer::new("");
 | 
			
		||||
 | 
			
		||||
    assert_eq!(lexer.peek_token().kind, TokenKind::Eof);
 | 
			
		||||
    assert_eq!(lexer.next_token().kind, TokenKind::Eof);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[test]
 | 
			
		||||
fn test_tokens_numeric() {
 | 
			
		||||
    let mut lexer = Lexer::new("3342");
 | 
			
		||||
 | 
			
		||||
    let token = lexer.next_token();
 | 
			
		||||
    assert_eq!(token.kind, TokenKind::Literal(TokenLiteral::Int));
 | 
			
		||||
    assert_eq!(*token.val, *"3342");
 | 
			
		||||
 | 
			
		||||
    assert_eq!(lexer.next_token().kind, TokenKind::Eof);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[test]
 | 
			
		||||
fn test_tokens_numeric_2() {
 | 
			
		||||
    let mut lexer = Lexer::new("334.2e");
 | 
			
		||||
 | 
			
		||||
    let token = lexer.next_token();
 | 
			
		||||
    assert_eq!(token.kind, TokenKind::Literal(TokenLiteral::Float));
 | 
			
		||||
    assert_eq!(*token.val, *"334.2e");
 | 
			
		||||
 | 
			
		||||
    assert_eq!(lexer.next_token().kind, TokenKind::Eof);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[test]
 | 
			
		||||
fn test_tokens_numeric_3() {
 | 
			
		||||
    let mut lexer = Lexer::new("334.2e-5");
 | 
			
		||||
 | 
			
		||||
    let mut token = lexer.next_token();
 | 
			
		||||
    assert_eq!(token.kind, TokenKind::Literal(TokenLiteral::Float));
 | 
			
		||||
    assert_eq!(*token.val, *"334.2e");
 | 
			
		||||
 | 
			
		||||
    assert_eq!(
 | 
			
		||||
        lexer.next_token().kind,
 | 
			
		||||
        TokenKind::Symbol(TokenSymbol::Minus)
 | 
			
		||||
    );
 | 
			
		||||
 | 
			
		||||
    token = lexer.next_token();
 | 
			
		||||
    assert_eq!(token.kind, TokenKind::Literal(TokenLiteral::Int));
 | 
			
		||||
    assert_eq!(*token.val, *"5");
 | 
			
		||||
 | 
			
		||||
    assert_eq!(lexer.next_token().kind, TokenKind::Eof);
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,4 +1,3 @@
 | 
			
		||||
pub mod args;
 | 
			
		||||
pub mod ast;
 | 
			
		||||
pub mod lexer;
 | 
			
		||||
pub mod parser;
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										20
									
								
								src/main.rs
									
									
									
									
									
								
							
							
						
						
									
										20
									
								
								src/main.rs
									
									
									
									
									
								
							@@ -4,7 +4,10 @@ use std::{
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
use tricc::args::Args;
 | 
			
		||||
use tricc::parser::Parser;
 | 
			
		||||
use tricc::lexer::{
 | 
			
		||||
    Lexer,
 | 
			
		||||
    TokenKind,
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
fn main() {
 | 
			
		||||
    panic::set_hook(Box::new(|panic_info| {
 | 
			
		||||
@@ -28,13 +31,10 @@ fn main() {
 | 
			
		||||
 | 
			
		||||
    let file = args.get_file();
 | 
			
		||||
    let content = fs::read_to_string(&file).expect("Couldn't read the file");
 | 
			
		||||
    let mut parser = Parser::new(&content);
 | 
			
		||||
    let Some(parent) = parser.parse() else {
 | 
			
		||||
        eprintln!(
 | 
			
		||||
            "Failed to parse {} - See the errors above",
 | 
			
		||||
            file.to_string_lossy()
 | 
			
		||||
        );
 | 
			
		||||
        std::process::exit(1);
 | 
			
		||||
    };
 | 
			
		||||
    println!("Parsed AST:\n{:#?}", parent);
 | 
			
		||||
 | 
			
		||||
    let mut lexer = Lexer::new(content.as_str());
 | 
			
		||||
 | 
			
		||||
    while lexer.peek_token().kind != TokenKind::Eof {
 | 
			
		||||
        println!("{:?}", lexer.next_token());
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,215 +0,0 @@
 | 
			
		||||
use super::Parser;
 | 
			
		||||
use crate::ast::*;
 | 
			
		||||
use crate::lexer::{
 | 
			
		||||
    TokenDelimiter,
 | 
			
		||||
    TokenKeyword,
 | 
			
		||||
    TokenKind,
 | 
			
		||||
    TokenSymbol,
 | 
			
		||||
};
 | 
			
		||||
use std::rc::Rc;
 | 
			
		||||
 | 
			
		||||
impl<'a> Parser<'a> {
 | 
			
		||||
    /// entity ::= module | class | fn | static
 | 
			
		||||
    pub(super) fn parse_entity(&mut self) -> Option<Entity> {
 | 
			
		||||
        use TokenKeyword::*;
 | 
			
		||||
        let token = self.peek_token();
 | 
			
		||||
 | 
			
		||||
        if let TokenKind::Keyword(keyword) = &token.kind {
 | 
			
		||||
            Some(match keyword {
 | 
			
		||||
                Module => Entity::Module(self.parse_module()?),
 | 
			
		||||
                Class => Entity::Class(self.parse_class()?),
 | 
			
		||||
                Fn => Entity::Fn(self.parse_fn()?),
 | 
			
		||||
                Static => Entity::Static(self.parse_static()?),
 | 
			
		||||
                _ => {
 | 
			
		||||
                    self.error_expected_peek("entity");
 | 
			
		||||
                    return None;
 | 
			
		||||
                }
 | 
			
		||||
            })
 | 
			
		||||
        } else {
 | 
			
		||||
            self.error_expected_peek("entity");
 | 
			
		||||
            None
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// module ::= "module" ident "{" { module | fn | static | class } "}"
 | 
			
		||||
    fn parse_module(&mut self) -> Option<Module> {
 | 
			
		||||
        self.next_token();
 | 
			
		||||
 | 
			
		||||
        let name = self.parse_ident()?;
 | 
			
		||||
        let mut children = vec![];
 | 
			
		||||
 | 
			
		||||
        if !self.skip_token(TokenKind::Delimiter(TokenDelimiter::BraceOpen)) {
 | 
			
		||||
            self.error_expected_peek("{");
 | 
			
		||||
            return None;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        loop {
 | 
			
		||||
            use TokenKeyword::*;
 | 
			
		||||
            self.trim_newlines();
 | 
			
		||||
            if let TokenKind::Keyword(keyword) = &self.peek_token().kind {
 | 
			
		||||
                children.push(match keyword {
 | 
			
		||||
                    Module => ModuleChildren::Module(self.parse_module()?),
 | 
			
		||||
                    Fn => ModuleChildren::Fn(self.parse_fn()?),
 | 
			
		||||
                    Static => ModuleChildren::Static(self.parse_static()?),
 | 
			
		||||
                    Class => ModuleChildren::Class(self.parse_class()?),
 | 
			
		||||
                    _ => {
 | 
			
		||||
                        self.error_expected_peek("module child");
 | 
			
		||||
                        return None;
 | 
			
		||||
                    }
 | 
			
		||||
                });
 | 
			
		||||
                if !self.check_newline_or_tok(TokenKind::Delimiter(TokenDelimiter::BraceClose)) {
 | 
			
		||||
                    self.error_expected_peek("newline or }");
 | 
			
		||||
                    return None;
 | 
			
		||||
                }
 | 
			
		||||
            } else if !self.skip_token(TokenKind::Delimiter(TokenDelimiter::BraceClose)) {
 | 
			
		||||
                self.error_expected_peek("}");
 | 
			
		||||
                return None;
 | 
			
		||||
            } else {
 | 
			
		||||
                break;
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        Some(Module { name, children })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// class ::= "class" ident "{" { fn | static | let } "}"
 | 
			
		||||
    fn parse_class(&mut self) -> Option<Class> {
 | 
			
		||||
        self.next_token();
 | 
			
		||||
 | 
			
		||||
        let name = self.parse_ident()?;
 | 
			
		||||
        let mut children = vec![];
 | 
			
		||||
 | 
			
		||||
        if !self.skip_token(TokenKind::Delimiter(TokenDelimiter::BraceOpen)) {
 | 
			
		||||
            self.error_expected_peek("{");
 | 
			
		||||
            return None;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        loop {
 | 
			
		||||
            use TokenKeyword::*;
 | 
			
		||||
            self.trim_newlines();
 | 
			
		||||
            if let TokenKind::Keyword(keyword) = &self.peek_token().kind {
 | 
			
		||||
                children.push(match keyword {
 | 
			
		||||
                    Fn => ClassChildren::Fn(self.parse_fn()?),
 | 
			
		||||
                    Static => ClassChildren::Static(self.parse_static()?),
 | 
			
		||||
                    Let => ClassChildren::Let(self.parse_let()?),
 | 
			
		||||
                    _ => {
 | 
			
		||||
                        self.error_expected_peek("class child");
 | 
			
		||||
                        return None;
 | 
			
		||||
                    }
 | 
			
		||||
                });
 | 
			
		||||
                if !self.check_newline_or_tok(TokenKind::Delimiter(TokenDelimiter::BraceClose)) {
 | 
			
		||||
                    self.error_expected_peek("newline or }");
 | 
			
		||||
                    return None;
 | 
			
		||||
                }
 | 
			
		||||
            } else if !self.skip_token(TokenKind::Delimiter(TokenDelimiter::BraceClose)) {
 | 
			
		||||
                self.error_expected_peek("}");
 | 
			
		||||
                return None;
 | 
			
		||||
            } else {
 | 
			
		||||
                break;
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        Some(Class { name, children })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// fn ::= "fn" ident "(" [ identWithTy { "," identWithTy } ] ")" [ ":" ty ]
 | 
			
		||||
    ///        "{" { statement } "}"
 | 
			
		||||
    fn parse_fn(&mut self) -> Option<Fn> {
 | 
			
		||||
        self.next_token();
 | 
			
		||||
 | 
			
		||||
        let name = self.parse_ident()?;
 | 
			
		||||
        let mut params: Vec<(Rc<str>, Ty)> = vec![];
 | 
			
		||||
        let mut return_typ: Option<Ty> = None;
 | 
			
		||||
        let mut children: Vec<Statement> = vec![];
 | 
			
		||||
 | 
			
		||||
        if !self.skip_token(TokenKind::Delimiter(TokenDelimiter::ParenOpen)) {
 | 
			
		||||
            self.error_expected_peek("(");
 | 
			
		||||
            return None;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        loop {
 | 
			
		||||
            if self.peek_token().kind == TokenKind::Identifier {
 | 
			
		||||
                params.push(self.parse_ident_with_ty()?);
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            if !self.skip_token(TokenKind::Symbol(TokenSymbol::Comma)) {
 | 
			
		||||
                if !self.skip_token(TokenKind::Delimiter(TokenDelimiter::ParenClose)) {
 | 
			
		||||
                    self.error_expected_peek(", or )");
 | 
			
		||||
                    return None;
 | 
			
		||||
                } else {
 | 
			
		||||
                    break;
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        if self.skip_token(TokenKind::Symbol(TokenSymbol::Colon)) {
 | 
			
		||||
            return_typ = Some(self.parse_ty()?);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        if !self.skip_token(TokenKind::Delimiter(TokenDelimiter::BraceOpen)) {
 | 
			
		||||
            self.error_expected_peek("{");
 | 
			
		||||
            return None;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        loop {
 | 
			
		||||
            self.trim_newlines();
 | 
			
		||||
            if self.skip_token(TokenKind::Delimiter(TokenDelimiter::BraceClose)) {
 | 
			
		||||
                break;
 | 
			
		||||
            }
 | 
			
		||||
            children.push(self.parse_statement()?);
 | 
			
		||||
            if !self.check_newline_or_tok(TokenKind::Delimiter(TokenDelimiter::BraceClose)) {
 | 
			
		||||
                self.error_expected_peek("newline or }");
 | 
			
		||||
                return None;
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        Some(Fn {
 | 
			
		||||
            name,
 | 
			
		||||
            return_ty: return_typ,
 | 
			
		||||
            params,
 | 
			
		||||
            children,
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[test]
 | 
			
		||||
fn test_parse_entity() {
 | 
			
		||||
    let mut parser = Parser::new(
 | 
			
		||||
        r#"module module01 {
 | 
			
		||||
             class class01 {
 | 
			
		||||
               fn fn01(param01: char, param02: float) {
 | 
			
		||||
                 static let let01: int = 4
 | 
			
		||||
               }
 | 
			
		||||
             }
 | 
			
		||||
 | 
			
		||||
             fn fn02 (): int { }
 | 
			
		||||
          }"#,
 | 
			
		||||
    );
 | 
			
		||||
    assert_eq!(
 | 
			
		||||
        parser.parse_entity(),
 | 
			
		||||
        Some(Entity::Module(Module {
 | 
			
		||||
            name: "module01".into(),
 | 
			
		||||
            children: vec![
 | 
			
		||||
                ModuleChildren::Class(Class {
 | 
			
		||||
                    name: "class01".into(),
 | 
			
		||||
                    children: vec![ClassChildren::Fn(Fn {
 | 
			
		||||
                        name: "fn01".into(),
 | 
			
		||||
                        return_ty: None,
 | 
			
		||||
                        params: vec![("param01".into(), Ty::Char), ("param02".into(), Ty::Float)],
 | 
			
		||||
                        children: vec![Statement::Static(Let {
 | 
			
		||||
                            name: "let01".into(),
 | 
			
		||||
                            ty: Ty::Int,
 | 
			
		||||
                            expr: Some(Expr::Literal(Literal::Int(4)))
 | 
			
		||||
                        })]
 | 
			
		||||
                    })]
 | 
			
		||||
                }),
 | 
			
		||||
                ModuleChildren::Fn(Fn {
 | 
			
		||||
                    name: "fn02".into(),
 | 
			
		||||
                    return_ty: Some(Ty::Int),
 | 
			
		||||
                    params: vec![],
 | 
			
		||||
                    children: vec![]
 | 
			
		||||
                })
 | 
			
		||||
            ]
 | 
			
		||||
        }))
 | 
			
		||||
    );
 | 
			
		||||
}
 | 
			
		||||
@@ -1,435 +0,0 @@
 | 
			
		||||
use super::Parser;
 | 
			
		||||
use crate::ast::{
 | 
			
		||||
    self,
 | 
			
		||||
    *,
 | 
			
		||||
};
 | 
			
		||||
use crate::lexer::{
 | 
			
		||||
    TokenDelimiter,
 | 
			
		||||
    TokenKeyword,
 | 
			
		||||
    TokenKind,
 | 
			
		||||
    TokenLiteral,
 | 
			
		||||
    TokenSymbol,
 | 
			
		||||
};
 | 
			
		||||
use std::rc::Rc;
 | 
			
		||||
 | 
			
		||||
impl<'a> Parser<'a> {
 | 
			
		||||
    /// exprIf ::= "if" expr block [ else (block | exprIf ) ]
 | 
			
		||||
    fn parse_expr_if(&mut self) -> Option<If> {
 | 
			
		||||
        // skip "if"
 | 
			
		||||
        self.next_token();
 | 
			
		||||
 | 
			
		||||
        let cond = Box::new(self.parse_expr()?);
 | 
			
		||||
        let then = self.parse_expr_block()?;
 | 
			
		||||
 | 
			
		||||
        if !self.skip_token(TokenKind::Keyword(TokenKeyword::Else)) {
 | 
			
		||||
            return Some(If {
 | 
			
		||||
                cond,
 | 
			
		||||
                then,
 | 
			
		||||
                or: None,
 | 
			
		||||
            });
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        if self.peek_token().kind != TokenKind::Keyword(TokenKeyword::If) {
 | 
			
		||||
            return Some(If {
 | 
			
		||||
                cond,
 | 
			
		||||
                then,
 | 
			
		||||
                or: Some(Box::new(ElseType::Else(self.parse_expr_block()?))),
 | 
			
		||||
            });
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        Some(If {
 | 
			
		||||
            cond,
 | 
			
		||||
            then,
 | 
			
		||||
            or: Some(Box::new(ElseType::If(self.parse_expr_if()?))),
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// exprBlock ::= "{" { statement } "}"
 | 
			
		||||
    fn parse_expr_block(&mut self) -> Option<Vec<Statement>> {
 | 
			
		||||
        let mut statements = vec![];
 | 
			
		||||
 | 
			
		||||
        // skip {
 | 
			
		||||
        self.next_token();
 | 
			
		||||
 | 
			
		||||
        loop {
 | 
			
		||||
            self.trim_newlines();
 | 
			
		||||
            if self.skip_token(TokenKind::Delimiter(TokenDelimiter::BraceClose)) {
 | 
			
		||||
                break;
 | 
			
		||||
            }
 | 
			
		||||
            statements.push(self.parse_statement()?);
 | 
			
		||||
            if !self.check_newline_or_tok(TokenKind::Delimiter(TokenDelimiter::BraceClose)) {
 | 
			
		||||
                self.error_expected_peek("newline or }");
 | 
			
		||||
                return None;
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        Some(statements)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// exprLoop ::= "loop" exprBlock
 | 
			
		||||
    fn parse_expr_loop(&mut self) -> Option<Vec<Statement>> {
 | 
			
		||||
        self.next_token();
 | 
			
		||||
        if self.peek_token().kind != TokenKind::Delimiter(TokenDelimiter::BraceOpen) {
 | 
			
		||||
            self.error_expected_peek("{");
 | 
			
		||||
            return None;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        self.parse_expr_block()
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// exprAtom ::= ( "(" expr ")" ) | ident | int | float | char | exprBlock | exprLoop | exprIf
 | 
			
		||||
    fn parse_expr_atom(&mut self) -> Option<Expr> {
 | 
			
		||||
        use ast::Literal::*;
 | 
			
		||||
        use TokenKind::*;
 | 
			
		||||
 | 
			
		||||
        // TODO: check lvalue validity in the analysis phase
 | 
			
		||||
        Some(match self.peek_token().kind {
 | 
			
		||||
            Delimiter(TokenDelimiter::ParenOpen) => {
 | 
			
		||||
                self.next_token(); // skip (
 | 
			
		||||
 | 
			
		||||
                let expr = self.parse_expr()?;
 | 
			
		||||
 | 
			
		||||
                if !self.skip_token(TokenKind::Delimiter(TokenDelimiter::ParenClose)) {
 | 
			
		||||
                    self.error_expected_peek(")");
 | 
			
		||||
                    return None;
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
                expr
 | 
			
		||||
            }
 | 
			
		||||
            Identifier => {
 | 
			
		||||
                let token = self.next_token();
 | 
			
		||||
                Expr::Identifier(Rc::clone(&token.val))
 | 
			
		||||
            }
 | 
			
		||||
            Literal(TokenLiteral::Int) => Expr::Literal(Int(self.parse_int()?)),
 | 
			
		||||
            Literal(TokenLiteral::Float) => Expr::Literal(Float(self.parse_float()?)),
 | 
			
		||||
            Literal(TokenLiteral::Char) => Expr::Literal(Char(self.parse_char()?)),
 | 
			
		||||
            Delimiter(TokenDelimiter::BraceOpen) => Expr::Block(self.parse_expr_block()?),
 | 
			
		||||
            Keyword(TokenKeyword::Loop) => Expr::Loop(self.parse_expr_loop()?),
 | 
			
		||||
            Keyword(TokenKeyword::If) => Expr::If(self.parse_expr_if()?),
 | 
			
		||||
            _ => {
 | 
			
		||||
                self.error_expected_peek("expression");
 | 
			
		||||
                return None;
 | 
			
		||||
            }
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// exprUnary ::= [ unaryOp ] exprAtom
 | 
			
		||||
    /// unaryOp   ::= "+" | "-" | "~"
 | 
			
		||||
    fn parse_expr_unary(&mut self) -> Option<Expr> {
 | 
			
		||||
        use TokenSymbol::*;
 | 
			
		||||
        Some(match self.peek_token().kind {
 | 
			
		||||
            TokenKind::Symbol(symbol @ (Minus | Plus | Tilde)) => {
 | 
			
		||||
                self.next_token();
 | 
			
		||||
                Expr::Op(symbol, Box::new(self.parse_expr_atom()?), None)
 | 
			
		||||
            }
 | 
			
		||||
            _ => self.parse_expr_atom()?,
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// exprArithmeticMul ::= exprUnary [ arithmeticMulOp exprArithmeticMul ]
 | 
			
		||||
    /// arithmeticMulOp   ::= "*" | "/" | "%"
 | 
			
		||||
    fn parse_expr_arithmetic_mul(&mut self) -> Option<Expr> {
 | 
			
		||||
        use TokenSymbol::*;
 | 
			
		||||
        let lhs = self.parse_expr_unary()?;
 | 
			
		||||
 | 
			
		||||
        Some(match self.peek_token().kind {
 | 
			
		||||
            TokenKind::Symbol(symbol @ (Star | Slash | Percent)) => {
 | 
			
		||||
                self.next_token();
 | 
			
		||||
                Expr::Op(
 | 
			
		||||
                    symbol,
 | 
			
		||||
                    Box::new(lhs),
 | 
			
		||||
                    Some(Box::new(self.parse_expr_arithmetic_mul()?)),
 | 
			
		||||
                )
 | 
			
		||||
            }
 | 
			
		||||
            _ => lhs,
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// exprArithmeticAdd ::= exprArithmeticMul [ arithmeticAddOp exprArithmeticAdd ]
 | 
			
		||||
    /// arithmeticAddOp   ::= "+" | "-"
 | 
			
		||||
    fn parse_expr_arithmetic_add(&mut self) -> Option<Expr> {
 | 
			
		||||
        use TokenSymbol::*;
 | 
			
		||||
        let lhs = self.parse_expr_arithmetic_mul()?;
 | 
			
		||||
 | 
			
		||||
        Some(match self.peek_token().kind {
 | 
			
		||||
            TokenKind::Symbol(symbol @ (Plus | Minus)) => {
 | 
			
		||||
                self.next_token();
 | 
			
		||||
                Expr::Op(
 | 
			
		||||
                    symbol,
 | 
			
		||||
                    Box::new(lhs),
 | 
			
		||||
                    Some(Box::new(self.parse_expr_arithmetic_add()?)),
 | 
			
		||||
                )
 | 
			
		||||
            }
 | 
			
		||||
            _ => lhs,
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// exprBitwiseShift ::= exprArithmeticAdd [ bitwiseShiftOp exprBitwiseShift ]
 | 
			
		||||
    /// bitwiseShiftOp   ::= "<<" | ">>"
 | 
			
		||||
    fn parse_expr_bitwise_shift(&mut self) -> Option<Expr> {
 | 
			
		||||
        use TokenSymbol::*;
 | 
			
		||||
        let lhs = self.parse_expr_arithmetic_add()?;
 | 
			
		||||
 | 
			
		||||
        Some(match self.peek_token().kind {
 | 
			
		||||
            TokenKind::Symbol(symbol @ (Shl | Shr)) => {
 | 
			
		||||
                self.next_token();
 | 
			
		||||
                Expr::Op(
 | 
			
		||||
                    symbol,
 | 
			
		||||
                    Box::new(lhs),
 | 
			
		||||
                    Some(Box::new(self.parse_expr_bitwise_shift()?)),
 | 
			
		||||
                )
 | 
			
		||||
            }
 | 
			
		||||
            _ => lhs,
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// exprBitwiseAnd ::= exprBitwiseShift [ "&" exprBitwiseAnd ]
 | 
			
		||||
    fn parse_expr_bitwise_and(&mut self) -> Option<Expr> {
 | 
			
		||||
        let lhs = self.parse_expr_bitwise_shift()?;
 | 
			
		||||
        let symbol = TokenSymbol::And;
 | 
			
		||||
 | 
			
		||||
        if !self.skip_token(TokenKind::Symbol(symbol)) {
 | 
			
		||||
            return Some(lhs);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        Some(Expr::Op(
 | 
			
		||||
            symbol,
 | 
			
		||||
            Box::new(lhs),
 | 
			
		||||
            Some(Box::new(self.parse_expr_bitwise_and()?)),
 | 
			
		||||
        ))
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// exprBitwiseXor ::= exprBitwiseAnd [ "^" exprBitwiseXor ]
 | 
			
		||||
    fn parse_expr_bitwise_xor(&mut self) -> Option<Expr> {
 | 
			
		||||
        let lhs = self.parse_expr_bitwise_and()?;
 | 
			
		||||
        let symbol = TokenSymbol::Caret;
 | 
			
		||||
 | 
			
		||||
        if !self.skip_token(TokenKind::Symbol(symbol)) {
 | 
			
		||||
            return Some(lhs);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        Some(Expr::Op(
 | 
			
		||||
            symbol,
 | 
			
		||||
            Box::new(lhs),
 | 
			
		||||
            Some(Box::new(self.parse_expr_bitwise_xor()?)),
 | 
			
		||||
        ))
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// exprBiwiseOr ::= exprBitwiseXor [ "|" exprBitwiseOr ]
 | 
			
		||||
    fn parse_expr_bitwise_or(&mut self) -> Option<Expr> {
 | 
			
		||||
        let lhs = self.parse_expr_bitwise_xor()?;
 | 
			
		||||
        let symbol = TokenSymbol::Or;
 | 
			
		||||
 | 
			
		||||
        if !self.skip_token(TokenKind::Symbol(symbol)) {
 | 
			
		||||
            return Some(lhs);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        Some(Expr::Op(
 | 
			
		||||
            symbol,
 | 
			
		||||
            Box::new(lhs),
 | 
			
		||||
            Some(Box::new(self.parse_expr_bitwise_or()?)),
 | 
			
		||||
        ))
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// exprAssign   ::= exprBitwiseOr [ relationalOp exprRelational ]
 | 
			
		||||
    /// relationalOp ::= ">" | "<" | ">=" | "<=" | "==" | "!="
 | 
			
		||||
    fn parse_expr_relational(&mut self) -> Option<Expr> {
 | 
			
		||||
        use TokenSymbol::*;
 | 
			
		||||
        let lhs = self.parse_expr_bitwise_or()?;
 | 
			
		||||
 | 
			
		||||
        Some(match self.peek_token().kind {
 | 
			
		||||
            TokenKind::Symbol(symbol @ (Gt | Lt | GtEq | LtEq | EqEq | Ne)) => {
 | 
			
		||||
                self.next_token();
 | 
			
		||||
                Expr::Op(
 | 
			
		||||
                    symbol,
 | 
			
		||||
                    Box::new(lhs),
 | 
			
		||||
                    Some(Box::new(self.parse_expr_relational()?)),
 | 
			
		||||
                )
 | 
			
		||||
            }
 | 
			
		||||
            _ => lhs,
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// exprLogicalAnd ::= exprLogicalRelational [ "&&" exprLogicalAnd ]
 | 
			
		||||
    fn parse_expr_logical_and(&mut self) -> Option<Expr> {
 | 
			
		||||
        let lhs = self.parse_expr_relational()?;
 | 
			
		||||
        let symbol = TokenSymbol::AndAnd;
 | 
			
		||||
 | 
			
		||||
        if !self.skip_token(TokenKind::Symbol(symbol)) {
 | 
			
		||||
            return Some(lhs);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        Some(Expr::Op(
 | 
			
		||||
            symbol,
 | 
			
		||||
            Box::new(lhs),
 | 
			
		||||
            Some(Box::new(self.parse_expr_logical_and()?)),
 | 
			
		||||
        ))
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// exprLogicalOr ::= exprLogicalAnd [ "||" exprLogicalOr ]
 | 
			
		||||
    fn parse_expr_logical_or(&mut self) -> Option<Expr> {
 | 
			
		||||
        let lhs = self.parse_expr_logical_and()?;
 | 
			
		||||
        let symbol = TokenSymbol::OrOr;
 | 
			
		||||
 | 
			
		||||
        if !self.skip_token(TokenKind::Symbol(symbol)) {
 | 
			
		||||
            return Some(lhs);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        Some(Expr::Op(
 | 
			
		||||
            symbol,
 | 
			
		||||
            Box::new(lhs),
 | 
			
		||||
            Some(Box::new(self.parse_expr_logical_or()?)),
 | 
			
		||||
        ))
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// exprAssign ::= exprLogicalOr [ assignOp exprAssign ]
 | 
			
		||||
    /// assignOp   ::= "=" | "+=" | "-=" | "*=" | "/=" | "%=" | "^=" | "<<=" | ">>=" | "&=" | "|="
 | 
			
		||||
    fn parse_expr_assign(&mut self) -> Option<Expr> {
 | 
			
		||||
        use TokenSymbol::*;
 | 
			
		||||
        let lhs = self.parse_expr_logical_or()?;
 | 
			
		||||
 | 
			
		||||
        Some(match self.peek_token().kind {
 | 
			
		||||
            TokenKind::Symbol(
 | 
			
		||||
                symbol @ (Eq | PlusEq | MinusEq | StarEq | SlashEq | PercentEq | CaretEq | ShlEq
 | 
			
		||||
                | ShrEq | AndEq | OrEq),
 | 
			
		||||
            ) => {
 | 
			
		||||
                self.next_token();
 | 
			
		||||
                Expr::Op(
 | 
			
		||||
                    symbol,
 | 
			
		||||
                    Box::new(lhs),
 | 
			
		||||
                    Some(Box::new(self.parse_expr_assign()?)),
 | 
			
		||||
                )
 | 
			
		||||
            }
 | 
			
		||||
            _ => lhs,
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// exprControl ::= "continue" | "break" | "return" [ exprControl ] | exprAssign
 | 
			
		||||
    fn parse_expr_control(&mut self) -> Option<Expr> {
 | 
			
		||||
        use TokenKeyword::*;
 | 
			
		||||
 | 
			
		||||
        Some(match self.peek_token().kind {
 | 
			
		||||
            TokenKind::Keyword(Continue) => {
 | 
			
		||||
                self.next_token();
 | 
			
		||||
                Expr::Continue
 | 
			
		||||
            }
 | 
			
		||||
            TokenKind::Keyword(Break) => {
 | 
			
		||||
                self.next_token();
 | 
			
		||||
                Expr::Break
 | 
			
		||||
            }
 | 
			
		||||
            TokenKind::Keyword(Return) => {
 | 
			
		||||
                self.next_token();
 | 
			
		||||
                Expr::Return(self.parse_expr_control().map(Box::new))
 | 
			
		||||
            }
 | 
			
		||||
            _ => self.parse_expr_assign()?,
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// entrypoint for expression parsing using recursive descent parsing
 | 
			
		||||
    ///
 | 
			
		||||
    /// <https://en.wikipedia.org/wiki/Recursive_descent_parser>
 | 
			
		||||
    /// expr ::= exprControl
 | 
			
		||||
    pub(super) fn parse_expr(&mut self) -> Option<Expr> {
 | 
			
		||||
        self.parse_expr_control()
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[test]
 | 
			
		||||
fn test_parse_expr() {
 | 
			
		||||
    use Literal::*;
 | 
			
		||||
    use TokenSymbol::*;
 | 
			
		||||
 | 
			
		||||
    macro_rules! b {
 | 
			
		||||
        ($expr:expr) => {
 | 
			
		||||
            Box::new($expr)
 | 
			
		||||
        };
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    let mut parser = Parser::new(
 | 
			
		||||
        r#"if if 1 { 1 } else { 0 } + 9 {
 | 
			
		||||
             a = 4
 | 
			
		||||
           } else if 1 {
 | 
			
		||||
             a = 5
 | 
			
		||||
           } else {
 | 
			
		||||
           }
 | 
			
		||||
          amul ^= (4 + 93 * (1 << 3) / 1.44) ^ bhatura
 | 
			
		||||
          stove = { 44 } + amul"#,
 | 
			
		||||
    );
 | 
			
		||||
 | 
			
		||||
    assert_eq!(
 | 
			
		||||
        parser.parse_expr(),
 | 
			
		||||
        Some(Expr::If(If {
 | 
			
		||||
            cond: b!(Expr::Op(
 | 
			
		||||
                Plus,
 | 
			
		||||
                b!(Expr::If(If {
 | 
			
		||||
                    cond: b!(Expr::Literal(Int(1))),
 | 
			
		||||
                    then: vec![Statement::Expr(Expr::Literal(Int(1)))],
 | 
			
		||||
                    or: Some(b!(ElseType::Else(vec![Statement::Expr(Expr::Literal(
 | 
			
		||||
                        Int(0)
 | 
			
		||||
                    ))])))
 | 
			
		||||
                })),
 | 
			
		||||
                Some(b!(Expr::Literal(Int(9))))
 | 
			
		||||
            )),
 | 
			
		||||
            then: vec![Statement::Expr(Expr::Op(
 | 
			
		||||
                Eq,
 | 
			
		||||
                b!(Expr::Identifier("a".into())),
 | 
			
		||||
                Some(b!(Expr::Literal(Int(4))))
 | 
			
		||||
            ))],
 | 
			
		||||
            or: Some(b!(ElseType::If(If {
 | 
			
		||||
                cond: b!(Expr::Literal(Int(1))),
 | 
			
		||||
                then: vec![Statement::Expr(Expr::Op(
 | 
			
		||||
                    Eq,
 | 
			
		||||
                    b!(Expr::Identifier("a".into())),
 | 
			
		||||
                    Some(b!(Expr::Literal(Int(5))))
 | 
			
		||||
                ))],
 | 
			
		||||
                or: Some(b!(ElseType::Else(vec![])))
 | 
			
		||||
            })))
 | 
			
		||||
        }))
 | 
			
		||||
    );
 | 
			
		||||
 | 
			
		||||
    assert_eq!(parser.skip_token(TokenKind::Newline), true);
 | 
			
		||||
 | 
			
		||||
    assert_eq!(
 | 
			
		||||
        parser.parse_expr(),
 | 
			
		||||
        Some(Expr::Op(
 | 
			
		||||
            CaretEq,
 | 
			
		||||
            b!(Expr::Identifier("amul".into())),
 | 
			
		||||
            Some(b!(Expr::Op(
 | 
			
		||||
                Caret,
 | 
			
		||||
                b!(Expr::Op(
 | 
			
		||||
                    Plus,
 | 
			
		||||
                    b!(Expr::Literal(Int(4))),
 | 
			
		||||
                    Some(b!(Expr::Op(
 | 
			
		||||
                        Star,
 | 
			
		||||
                        b!(Expr::Literal(Int(93))),
 | 
			
		||||
                        Some(b!(Expr::Op(
 | 
			
		||||
                            Slash,
 | 
			
		||||
                            b!(Expr::Op(
 | 
			
		||||
                                Shl,
 | 
			
		||||
                                b!(Expr::Literal(Int(1))),
 | 
			
		||||
                                Some(b!(Expr::Literal(Int(3))))
 | 
			
		||||
                            )),
 | 
			
		||||
                            Some(b!(Expr::Literal(Float(1.44))))
 | 
			
		||||
                        )))
 | 
			
		||||
                    )))
 | 
			
		||||
                )),
 | 
			
		||||
                Some(b!(Expr::Identifier("bhatura".into())))
 | 
			
		||||
            )))
 | 
			
		||||
        ))
 | 
			
		||||
    );
 | 
			
		||||
 | 
			
		||||
    assert_eq!(parser.skip_token(TokenKind::Newline), true);
 | 
			
		||||
 | 
			
		||||
    assert_eq!(
 | 
			
		||||
        parser.parse_expr(),
 | 
			
		||||
        Some(Expr::Op(
 | 
			
		||||
            Eq,
 | 
			
		||||
            b!(Expr::Identifier("stove".into())),
 | 
			
		||||
            Some(b!(Expr::Op(
 | 
			
		||||
                Plus,
 | 
			
		||||
                b!(Expr::Block(vec![Statement::Expr(Expr::Literal(Int(44)))])),
 | 
			
		||||
                Some(b!(Expr::Identifier("amul".into())))
 | 
			
		||||
            )))
 | 
			
		||||
        ))
 | 
			
		||||
    );
 | 
			
		||||
}
 | 
			
		||||
@@ -1,121 +0,0 @@
 | 
			
		||||
use super::Parser;
 | 
			
		||||
use crate::lexer::{
 | 
			
		||||
    TokenKind,
 | 
			
		||||
    TokenLiteral,
 | 
			
		||||
    TokenSymbol,
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
impl<'a> Parser<'a> {
 | 
			
		||||
    /// int ::= digit { digit }
 | 
			
		||||
    pub(super) fn parse_int(&mut self) -> Option<i32> {
 | 
			
		||||
        let val = self.next_token().val;
 | 
			
		||||
        let mut integer: i32 = 0;
 | 
			
		||||
        let error = || {
 | 
			
		||||
            self.error(&format!(
 | 
			
		||||
                "integer values must be in range [{}, {}]",
 | 
			
		||||
                i32::MIN,
 | 
			
		||||
                i32::MAX
 | 
			
		||||
            ))
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
        for c in val.chars() {
 | 
			
		||||
            // c is always ['0'..='9']
 | 
			
		||||
            let d = c.to_digit(10)?;
 | 
			
		||||
 | 
			
		||||
            match integer.checked_mul(10) {
 | 
			
		||||
                Some(m) => integer = m,
 | 
			
		||||
                None => {
 | 
			
		||||
                    error();
 | 
			
		||||
                    return None;
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            match integer.checked_add(d as i32) {
 | 
			
		||||
                Some(a) => integer = a,
 | 
			
		||||
                None => {
 | 
			
		||||
                    error();
 | 
			
		||||
                    return None;
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        Some(integer)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // didnt use parse() because i wanted to do this myself for some reason
 | 
			
		||||
    /// f32 can be NaN and inf as well
 | 
			
		||||
    /// float ::= int [ "." { digit } ] [ "e" { digit } ]
 | 
			
		||||
    pub(super) fn parse_float(&mut self) -> Option<f32> {
 | 
			
		||||
        let token = self.next_token();
 | 
			
		||||
        let mut chars = token.val.chars();
 | 
			
		||||
        let mut float: f32 = 0.0;
 | 
			
		||||
        let mut fraction: f32 = 0.0;
 | 
			
		||||
        let mut prec: i32 = 0;
 | 
			
		||||
        let mut exp: i32 = 0;
 | 
			
		||||
        let mut decimal: bool = false;
 | 
			
		||||
 | 
			
		||||
        // lexer takes care of multiple decimals and non digit characters
 | 
			
		||||
        for c in chars.by_ref() {
 | 
			
		||||
            match c {
 | 
			
		||||
                '.' => decimal = true,
 | 
			
		||||
                'e' | 'E' => {
 | 
			
		||||
                    // lexer takes care that decimal doesnt come after e
 | 
			
		||||
                    let s;
 | 
			
		||||
                    match self.peek_token().kind {
 | 
			
		||||
                        TokenKind::Symbol(TokenSymbol::Minus) => {
 | 
			
		||||
                            s = -1;
 | 
			
		||||
                            self.next_token();
 | 
			
		||||
                        }
 | 
			
		||||
                        TokenKind::Symbol(TokenSymbol::Plus) => {
 | 
			
		||||
                            s = 1;
 | 
			
		||||
                            self.next_token();
 | 
			
		||||
                        }
 | 
			
		||||
                        _ => s = 1,
 | 
			
		||||
                    }
 | 
			
		||||
 | 
			
		||||
                    if self.peek_token().kind != TokenKind::Literal(TokenLiteral::Int) {
 | 
			
		||||
                        break;
 | 
			
		||||
                    }
 | 
			
		||||
 | 
			
		||||
                    exp = self.parse_int()? * s;
 | 
			
		||||
                    break;
 | 
			
		||||
                }
 | 
			
		||||
                _ => {
 | 
			
		||||
                    // c is always ['0'..='9']
 | 
			
		||||
                    let d = c.to_digit(10)? as f32;
 | 
			
		||||
                    if decimal {
 | 
			
		||||
                        fraction *= 10.0;
 | 
			
		||||
                        fraction += d;
 | 
			
		||||
                        prec += 1;
 | 
			
		||||
                    } else {
 | 
			
		||||
                        float *= 10.0;
 | 
			
		||||
                        float += d;
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        fraction /= 10f32.powi(prec);
 | 
			
		||||
        float += fraction;
 | 
			
		||||
        float *= 10f32.powi(exp);
 | 
			
		||||
 | 
			
		||||
        Some(float)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// char ::= "'" letter "'"
 | 
			
		||||
    pub(super) fn parse_char(&mut self) -> Option<char> {
 | 
			
		||||
        // the lexer ensures that the 0th and 2nd characters are both '
 | 
			
		||||
        self.next_token().val.chars().nth(1)
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[test]
 | 
			
		||||
fn test_parse_literals() {
 | 
			
		||||
    let mut parser = Parser::new("4524 3123.15e4 9e2 9083482.429455 'c' 3331.13.1");
 | 
			
		||||
    assert_eq!(parser.parse_int(), Some(4524));
 | 
			
		||||
    assert_eq!(parser.parse_float(), Some(3123.15e4));
 | 
			
		||||
    assert_eq!(parser.parse_float(), Some(9e2));
 | 
			
		||||
    assert_eq!(parser.parse_float(), Some(9083482.429455));
 | 
			
		||||
    assert_eq!(parser.parse_char(), Some('c'));
 | 
			
		||||
    assert_eq!(parser.next_token().kind, TokenKind::Invalid);
 | 
			
		||||
}
 | 
			
		||||
@@ -1,156 +0,0 @@
 | 
			
		||||
//! A naive parser just to get started
 | 
			
		||||
//!
 | 
			
		||||
//! Can only parse module, class and function declaration now along with let statements
 | 
			
		||||
 | 
			
		||||
mod entity;
 | 
			
		||||
mod expr;
 | 
			
		||||
mod literal;
 | 
			
		||||
mod statement;
 | 
			
		||||
 | 
			
		||||
use crate::ast::{
 | 
			
		||||
    Parent,
 | 
			
		||||
    Ty,
 | 
			
		||||
};
 | 
			
		||||
use crate::lexer::{
 | 
			
		||||
    Lexer,
 | 
			
		||||
    Token,
 | 
			
		||||
    TokenKeyword,
 | 
			
		||||
    TokenKind,
 | 
			
		||||
    TokenSymbol,
 | 
			
		||||
};
 | 
			
		||||
use std::rc::Rc;
 | 
			
		||||
 | 
			
		||||
pub struct Parser<'a> {
 | 
			
		||||
    pub lexer: Lexer<'a>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl<'a> Parser<'a> {
 | 
			
		||||
    /// Creates a new [`Parser`] instance.
 | 
			
		||||
    pub fn new(contents: &'a str) -> Parser<'a> {
 | 
			
		||||
        Parser {
 | 
			
		||||
            lexer: Lexer::new(contents),
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[inline]
 | 
			
		||||
    fn error(&self, message: &str) {
 | 
			
		||||
        eprintln!(
 | 
			
		||||
            "Parser: {}, at \"{}:{}\"",
 | 
			
		||||
            message, self.lexer.line, self.lexer.col
 | 
			
		||||
        );
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[inline]
 | 
			
		||||
    fn error_expected(&self, expected: &str, found: &str) {
 | 
			
		||||
        self.error(&format!("expected {}, found {}", expected, found));
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[inline]
 | 
			
		||||
    fn error_expected_peek(&mut self, expected: &str) {
 | 
			
		||||
        let found = &Rc::clone(&self.peek_token().val);
 | 
			
		||||
        self.error_expected(expected, found);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[inline]
 | 
			
		||||
    fn next_token(&mut self) -> Token {
 | 
			
		||||
        let t = self.lexer.next_token();
 | 
			
		||||
        println!("{:?}", t);
 | 
			
		||||
        t
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[inline]
 | 
			
		||||
    fn peek_token(&mut self) -> &Token {
 | 
			
		||||
        return self.lexer.peek_token();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// newline ::= "}\n"
 | 
			
		||||
    fn trim_newlines(&mut self) {
 | 
			
		||||
        while self.peek_token().kind == TokenKind::Newline {
 | 
			
		||||
            self.next_token();
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fn skip_token(&mut self, kind: TokenKind) -> bool {
 | 
			
		||||
        if self.peek_token().kind == kind {
 | 
			
		||||
            self.next_token();
 | 
			
		||||
            return true;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        false
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fn check_newline_or_tok(&mut self, token: TokenKind) -> bool {
 | 
			
		||||
        match self.peek_token().kind {
 | 
			
		||||
            TokenKind::Newline => true,
 | 
			
		||||
            d if d == token => true,
 | 
			
		||||
            _ => false,
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// ty ::= "int" | "float" | "char"
 | 
			
		||||
    fn parse_ty(&mut self) -> Option<Ty> {
 | 
			
		||||
        let ty: Ty;
 | 
			
		||||
 | 
			
		||||
        if let TokenKind::Keyword(keyword) = &self.peek_token().kind {
 | 
			
		||||
            ty = match keyword {
 | 
			
		||||
                TokenKeyword::Int => Ty::Int,
 | 
			
		||||
                TokenKeyword::Char => Ty::Char,
 | 
			
		||||
                TokenKeyword::Float => Ty::Float,
 | 
			
		||||
                _ => {
 | 
			
		||||
                    self.error_expected_peek("ty");
 | 
			
		||||
                    return None;
 | 
			
		||||
                }
 | 
			
		||||
            };
 | 
			
		||||
        } else {
 | 
			
		||||
            self.error_expected_peek("ty");
 | 
			
		||||
            return None;
 | 
			
		||||
        }
 | 
			
		||||
        self.next_token();
 | 
			
		||||
        Some(ty)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// ident ::= ( letter | "_" ) { letter | digit | "_" }
 | 
			
		||||
    fn parse_ident(&mut self) -> Option<Rc<str>> {
 | 
			
		||||
        if self.peek_token().kind != TokenKind::Identifier {
 | 
			
		||||
            self.error_expected_peek("identifier");
 | 
			
		||||
            return None;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        Some(Rc::clone(&self.next_token().val))
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// identWithTy ::= letter ":" ty
 | 
			
		||||
    fn parse_ident_with_ty(&mut self) -> Option<(Rc<str>, Ty)> {
 | 
			
		||||
        let ident = self.parse_ident()?;
 | 
			
		||||
 | 
			
		||||
        if !self.skip_token(TokenKind::Symbol(TokenSymbol::Colon)) {
 | 
			
		||||
            self.error_expected_peek(":");
 | 
			
		||||
            return None;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        Some((ident, self.parse_ty()?))
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// Returns an [`Entity`] vector after parsing
 | 
			
		||||
    ///
 | 
			
		||||
    /// parent ::= { entity }
 | 
			
		||||
    /// [`Entity`]: crate::ast::Entity
 | 
			
		||||
    pub fn parse(&mut self) -> Option<Parent> {
 | 
			
		||||
        let mut parent = vec![];
 | 
			
		||||
 | 
			
		||||
        loop {
 | 
			
		||||
            match self.peek_token().kind {
 | 
			
		||||
                TokenKind::Newline => self.trim_newlines(),
 | 
			
		||||
                TokenKind::Eof => break,
 | 
			
		||||
                _ => {
 | 
			
		||||
                    parent.push(self.parse_entity()?);
 | 
			
		||||
                    if !self.check_newline_or_tok(TokenKind::Eof) {
 | 
			
		||||
                        self.error_expected_peek("newline or end of file");
 | 
			
		||||
                        return None;
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        Some(parent)
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,91 +0,0 @@
 | 
			
		||||
use super::Parser;
 | 
			
		||||
use crate::ast::*;
 | 
			
		||||
use crate::lexer::{
 | 
			
		||||
    TokenKeyword,
 | 
			
		||||
    TokenKind,
 | 
			
		||||
    TokenSymbol,
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
impl<'a> Parser<'a> {
 | 
			
		||||
    /// statement ::= static | let | expr
 | 
			
		||||
    pub(super) fn parse_statement(&mut self) -> Option<Statement> {
 | 
			
		||||
        use TokenKeyword::*;
 | 
			
		||||
        println!("STMT");
 | 
			
		||||
 | 
			
		||||
        Some(match self.peek_token().kind {
 | 
			
		||||
            TokenKind::Keyword(Static) => Statement::Static(self.parse_static()?),
 | 
			
		||||
            TokenKind::Keyword(Let) => Statement::Let(self.parse_let()?),
 | 
			
		||||
            _ => Statement::Expr(self.parse_expr()?),
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// static ::="static" let
 | 
			
		||||
    pub(super) fn parse_static(&mut self) -> Option<Let> {
 | 
			
		||||
        self.next_token();
 | 
			
		||||
 | 
			
		||||
        if self.peek_token().kind != TokenKind::Keyword(TokenKeyword::Let) {
 | 
			
		||||
            self.error_expected_peek("let");
 | 
			
		||||
            return None;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        self.parse_let()
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// let ::= "let" identWithTy "=" expr
 | 
			
		||||
    pub(super) fn parse_let(&mut self) -> Option<Let> {
 | 
			
		||||
        self.next_token();
 | 
			
		||||
 | 
			
		||||
        let (name, ty) = self.parse_ident_with_ty()?;
 | 
			
		||||
 | 
			
		||||
        let expr = if self.skip_token(TokenKind::Symbol(TokenSymbol::Eq)) {
 | 
			
		||||
            self.parse_expr()
 | 
			
		||||
        } else if self.peek_token().kind == TokenKind::Newline {
 | 
			
		||||
            None
 | 
			
		||||
        } else {
 | 
			
		||||
            self.error_expected_peek("= or newline");
 | 
			
		||||
            return None;
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
        Some(Let { name, ty, expr })
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[test]
 | 
			
		||||
fn test_parse_let() {
 | 
			
		||||
    use Literal::*;
 | 
			
		||||
 | 
			
		||||
    let mut parser = Parser::new(
 | 
			
		||||
        r#"static let test01: int = 4
 | 
			
		||||
           let test02: char = '6'
 | 
			
		||||
           static let test03: float
 | 
			
		||||
           let test04 = 9"#,
 | 
			
		||||
    );
 | 
			
		||||
    assert_eq!(
 | 
			
		||||
        parser.parse_static(),
 | 
			
		||||
        Some(Let {
 | 
			
		||||
            name: "test01".into(),
 | 
			
		||||
            ty: Ty::Int,
 | 
			
		||||
            expr: Some(Expr::Literal(Int(4)))
 | 
			
		||||
        })
 | 
			
		||||
    );
 | 
			
		||||
    assert_eq!(parser.skip_token(TokenKind::Newline), true);
 | 
			
		||||
    assert_eq!(
 | 
			
		||||
        parser.parse_let(),
 | 
			
		||||
        Some(Let {
 | 
			
		||||
            name: "test02".into(),
 | 
			
		||||
            ty: Ty::Char,
 | 
			
		||||
            expr: Some(Expr::Literal(Char('6')))
 | 
			
		||||
        })
 | 
			
		||||
    );
 | 
			
		||||
    assert_eq!(parser.skip_token(TokenKind::Newline), true);
 | 
			
		||||
    assert_eq!(
 | 
			
		||||
        parser.parse_static(),
 | 
			
		||||
        Some(Let {
 | 
			
		||||
            name: "test03".into(),
 | 
			
		||||
            ty: Ty::Float,
 | 
			
		||||
            expr: None
 | 
			
		||||
        })
 | 
			
		||||
    );
 | 
			
		||||
    assert_eq!(parser.skip_token(TokenKind::Newline), true);
 | 
			
		||||
    assert_eq!(parser.parse_let(), None);
 | 
			
		||||
}
 | 
			
		||||
		Reference in New Issue
	
	Block a user