diff --git a/Cargo.lock b/Cargo.lock index ad31626..bb7a515 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -29,6 +29,7 @@ dependencies = [ name = "parser" version = "0.1.0" dependencies = [ + "commons", "lexer", "utils", ] diff --git a/commons/src/err/mod.rs b/commons/src/err/mod.rs index 47759a4..0cef6bd 100644 --- a/commons/src/err/mod.rs +++ b/commons/src/err/mod.rs @@ -11,6 +11,7 @@ use crate::Position; pub type PositionedResult = Result; /// An error that has a position +#[derive(Debug)] pub struct PositionedError { pub start: Position, pub end: Position, @@ -19,26 +20,31 @@ pub struct PositionedError { impl PositionedError { pub fn new(start: Position, end: Position, reason: String) -> Self { - return PositionedError { start, end, reason } + + let err = PositionedError { start, end, reason }; + + println!("{}", err); + + return err; } } impl fmt::Display for PositionedError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - writeln!(f, "{} at {}", "ERR".bright_red().bold(), self.start); + writeln!(f, "{} at {}", "ERR".bright_red().bold(), self.start)?; let line = match self.start.get_line_content() { Ok(v) => v, Err(e) => format!("{}","Couldn't read file contents!".red().bold()) }; - let before = &line[self.start.col - 1..]; - let target = &line[self.start.col..self.end.col - 1].cyan().underline(); + let before = &line[0..self.start.col - 1]; + let target = &line[self.start.col - 1..self.end.col].cyan().underline(); let after = &line[self.end.col..]; - writeln!(f, "{}{}{}", before, target, after); - writeln!(f, ""); - writeln!(f, "{}", self.reason.bright_red()); + writeln!(f, "{}{}{}", before, target, after)?; + writeln!(f, "")?; + writeln!(f, "{}", self.reason.bright_red())?; Ok(()) } diff --git a/lexer/src/lexer.rs b/lexer/src/lexer.rs index 9143903..5b49cd6 100644 --- a/lexer/src/lexer.rs +++ b/lexer/src/lexer.rs @@ -39,35 +39,44 @@ pub fn lexer_parse_file(file_path: &String) -> LexerParseResult> let mut i: usize = 0; let mut line: usize = 1; - let mut col: usize = 0; + + let mut last_line_break: usize = 0; while i < contents.len() { let c: char = contents.chars().nth(i).unwrap(); - col += 1; - if c == '\n' { + i += c.len_utf8(); + last_line_break = i; line += 1; continue; } if c.is_numeric() { + let col = i - last_line_break + 1; tokens.push(parse_number_token(&contents, &mut i, Position::new(file_path.to_string(), line, col))?); continue; } if c == '"' { + let col = i - last_line_break + 1; + tokens.push(parse_string_token(&contents, &mut i, Position::new(file_path.to_string(), line, col))); continue; } if c.is_alphabetic() { + let col = i - last_line_break + 1; + tokens.push(parse_keyword(&contents, &mut i, Position::new(file_path.to_string(), line, col))); continue; } i += c.len_utf8(); + + let col = i - last_line_break + 1; + let pos = Position::new(file_path.to_string(), line, col); match c { @@ -89,7 +98,7 @@ pub fn lexer_parse_file(file_path: &String) -> LexerParseResult> } - tokens.push(LexerToken::make_single_sized(Position::new(file_path.to_string(), line, col), LexerTokenType::END_OF_FILE)); + tokens.push(LexerToken::make_single_sized(Position::new(file_path.to_string(), line, i - last_line_break + 1), LexerTokenType::END_OF_FILE)); Ok(tokens) } @@ -114,7 +123,7 @@ fn parse_number_token(str: &String, ind: &mut usize, start_pos: Position) -> Lex *ind = end; - let endpos = start_pos.increment_by(start - end); + let endpos = start_pos.increment_by(end - start); return Ok(LexerToken::new(start_pos, endpos, LexerTokenType::INT_LIT(num))); } @@ -135,7 +144,7 @@ fn parse_string_token(str: &String, ind: &mut usize, start_pos: Position) -> Lex *ind = end; - let endpos: Position = start_pos.increment_by(start - end); + let endpos: Position = start_pos.increment_by(end - start); return LexerToken::new(start_pos, endpos, LexerTokenType::STRING_LIT(slice.to_string())); } @@ -176,6 +185,6 @@ fn parse_keyword(str: &String, ind: &mut usize, start_pos: Position) -> LexerTok _ => LexerTokenType::KEYWORD(slice.to_string(), hash) }; - let endpos: Position = start_pos.increment_by(start - end); + let endpos: Position = start_pos.increment_by(end - start); return LexerToken::new(start_pos, endpos, token_type); } \ No newline at end of file diff --git a/parser/Cargo.toml b/parser/Cargo.toml index 9315bef..40b07c1 100644 --- a/parser/Cargo.toml +++ b/parser/Cargo.toml @@ -5,4 +5,5 @@ edition = "2024" [dependencies] utils = { path = "../utils" } -lexer = { path = "../lexer" } \ No newline at end of file +lexer = { path = "../lexer" } +commons = { path = "../commons" } \ No newline at end of file diff --git a/parser/src/ast/cond/operators.rs b/parser/src/ast/cond/operators.rs index dcb9479..8c9909d 100644 --- a/parser/src/ast/cond/operators.rs +++ b/parser/src/ast/cond/operators.rs @@ -1,4 +1,5 @@ -use lexer::token::LexerToken; +use commons::err::PositionedResult; +use lexer::token::{LexerToken, LexerTokenType}; use crate::{ParserError, ParserResult}; @@ -14,38 +15,38 @@ pub enum ConditionOperator { LOWEREQ // A <= B } -pub fn parse_condition_operator(tokens: &Vec, ind: &mut usize) -> ParserResult { - match &tokens[*ind] { - LexerToken::EQUAL_SIGN => { +pub fn parse_condition_operator(tokens: &Vec, ind: &mut usize) -> PositionedResult { + match &tokens[*ind].tok_type { + LexerTokenType::EQUAL_SIGN => { *ind += 1; - if tokens[*ind] == LexerToken::EQUAL_SIGN { - return Ok(ConditionOperator::EQUAL); - } + tokens[*ind].expects(LexerTokenType::EQUAL_SIGN); + + return Ok(ConditionOperator::EQUAL) }, - LexerToken::EXCLAMATION_MARK => { + LexerTokenType::EXCLAMATION_MARK => { *ind += 1; - if tokens[*ind] == LexerToken::EQUAL_SIGN { + if tokens[*ind].tok_type == LexerTokenType::EQUAL_SIGN { return Ok(ConditionOperator::NOT_EQUAL) } }, - LexerToken::ANGEL_BRACKET_OPEN => { + LexerTokenType::ANGEL_BRACKET_OPEN => { *ind += 1; - if tokens[*ind] == LexerToken::EQUAL_SIGN { + if tokens[*ind].tok_type == LexerTokenType::EQUAL_SIGN { return Ok(ConditionOperator::LOWEREQ); } return Ok(ConditionOperator::LOWER); }, - LexerToken::ANGEL_BRACKET_CLOSE => { + LexerTokenType::ANGEL_BRACKET_CLOSE => { *ind += 1; - if tokens[*ind] == LexerToken::EQUAL_SIGN { + if tokens[*ind].tok_type == LexerTokenType::EQUAL_SIGN { return Ok(ConditionOperator::HIGHEREQ); } @@ -55,5 +56,5 @@ pub fn parse_condition_operator(tokens: &Vec, ind: &mut usize) -> Pa _ => {} } - Err(ParserError::new(String::from("Pattern doesn't represent a valid condition operator!"), 0)) + Err(tokens[*ind].make_err("Token doesn't make a valid condition operator")) } \ No newline at end of file diff --git a/parser/src/ast/control/forloop.rs b/parser/src/ast/control/forloop.rs index 31aafde..f65ed39 100644 --- a/parser/src/ast/control/forloop.rs +++ b/parser/src/ast/control/forloop.rs @@ -1,42 +1,31 @@ -use lexer::token::LexerToken; +use commons::err::PositionedResult; +use lexer::token::{LexerToken, LexerTokenType}; use crate::{ParserError, ParserResult, ast::{func::parse_node_body, parse_ast_node, parse_ast_value, tree::ASTTreeNode, var::decl::parse_variable_declaration}}; -pub fn parse_for_loop(tokens: &Vec, ind: &mut usize) -> ParserResult> { +pub fn parse_for_loop(tokens: &Vec, ind: &mut usize) -> PositionedResult> { *ind += 1; - if tokens[*ind] != LexerToken::PAREN_OPEN { - return Err(ParserError::new(String::from("Requires ("), 0)); - } + tokens[*ind].expects(LexerTokenType::PAREN_OPEN)?; let initial = parse_variable_declaration(tokens, ind)?; - if tokens[*ind] != LexerToken::COMMA { - return Err(ParserError::new(String::from("Requires for bodies to seperated by commas!"), 0)); - } + tokens[*ind].expects(LexerTokenType::COMMA)?; *ind += 1; let cond = parse_ast_value(tokens, ind)?; - if tokens[*ind] != LexerToken::COMMA { - return Err(ParserError::new(String::from("Requires for bodies to seperated by commas!"), 0)); - } + tokens[*ind].expects(LexerTokenType::COMMA)?; *ind += 1; - let increment = parse_ast_node(tokens, ind)?; *ind += 1; - if tokens[*ind] != LexerToken::PAREN_CLOSE { - return Err(ParserError::new(String::from("Requires )"), 0)); - } - + tokens[*ind].expects(LexerTokenType::PAREN_CLOSE)?; *ind += 1; - if tokens[*ind] != LexerToken::BRACKET_OPEN { - return Err(ParserError::new(String::from("Requires {"), 0)); - } + tokens[*ind].expects(LexerTokenType::BRACKET_OPEN)?; let body = parse_node_body(tokens, ind)?; diff --git a/parser/src/ast/control/ifelse.rs b/parser/src/ast/control/ifelse.rs index 1845267..e29033c 100644 --- a/parser/src/ast/control/ifelse.rs +++ b/parser/src/ast/control/ifelse.rs @@ -2,35 +2,30 @@ //! Parsing for if and else statements //! -use lexer::token::LexerToken; +use commons::err::PositionedResult; +use lexer::token::{LexerToken, LexerTokenType}; use crate::{ParserError, ParserResult, ast::{func::parse_node_body, parse_ast_value, tree::ASTTreeNode}}; -pub fn parse_condition_member(tokens: &Vec, ind: &mut usize) -> ParserResult> { - if tokens[*ind] != LexerToken::PAREN_OPEN { - return Err(ParserError::new(String::from("If statements must be followed by condition!"), 0)); - } +pub fn parse_condition_member(tokens: &Vec, ind: &mut usize) -> PositionedResult> { + tokens[*ind].expects(LexerTokenType::PAREN_OPEN)?; *ind += 1; let cond = parse_ast_value(tokens, ind)?; - if tokens[*ind] != LexerToken::PAREN_CLOSE { - return Err(ParserError::new(String::from("Conditions must be closed by paren!"), 0)); - } + tokens[*ind].expects(LexerTokenType::PAREN_CLOSE)?; *ind += 1; return Ok(cond); } -pub fn parse_if_statement(tokens: &Vec, ind: &mut usize) -> ParserResult> { +pub fn parse_if_statement(tokens: &Vec, ind: &mut usize) -> PositionedResult> { *ind += 1; let cond = parse_condition_member(tokens, ind)?; - if tokens[*ind] != LexerToken::BRACKET_OPEN { - return Err(ParserError::new(String::from("Condition must be followed by body!"), 0)); - } + tokens[*ind].expects(LexerTokenType::BRACKET_OPEN)?; let body = match parse_node_body(tokens, ind) { Ok(v) => v, @@ -39,7 +34,7 @@ pub fn parse_if_statement(tokens: &Vec, ind: &mut usize) -> ParserRe let mut elseStatement = None; - if tokens[*ind + 1] == LexerToken::ELSE { + if tokens[*ind + 1].tok_type == LexerTokenType::ELSE { *ind += 1; elseStatement = Some(parse_else_statement(tokens, ind)?); @@ -48,19 +43,17 @@ pub fn parse_if_statement(tokens: &Vec, ind: &mut usize) -> ParserRe return Ok(Box::new(ASTTreeNode::IfStatement { cond, body, elseStatement })); } -pub fn parse_else_statement(tokens: &Vec, ind: &mut usize) -> ParserResult> { +pub fn parse_else_statement(tokens: &Vec, ind: &mut usize) -> PositionedResult> { *ind += 1; let mut cond = None; - if tokens[*ind] == LexerToken::IF { + if tokens[*ind].tok_type == LexerTokenType::IF { *ind += 1; cond = Some(parse_condition_member(tokens, ind)?); } - if tokens[*ind] != LexerToken::BRACKET_OPEN { - return Err(ParserError::new(String::from("Condition must be followed by body!"), 0)); - } + tokens[*ind].expects(LexerTokenType::BRACKET_OPEN)?; let body = match parse_node_body(tokens, ind) { Ok(v) => v, @@ -70,7 +63,7 @@ pub fn parse_else_statement(tokens: &Vec, ind: &mut usize) -> Parser if cond.is_some() { let mut elseStatement = None; - if tokens[*ind + 1] == LexerToken::ELSE { + if tokens[*ind + 1].tok_type == LexerTokenType::ELSE { *ind += 1; elseStatement = Some(parse_else_statement(tokens, ind)?); diff --git a/parser/src/ast/control/whileblock.rs b/parser/src/ast/control/whileblock.rs index 92eec6b..b4103d7 100644 --- a/parser/src/ast/control/whileblock.rs +++ b/parser/src/ast/control/whileblock.rs @@ -1,15 +1,14 @@ +use commons::err::PositionedResult; use lexer::token::LexerToken; use crate::{ParserError, ParserResult, ast::{control::ifelse::parse_condition_member, func::parse_node_body, tree::ASTTreeNode}}; -pub fn parse_while_block(tokens: &Vec, ind: &mut usize) -> ParserResult> { +pub fn parse_while_block(tokens: &Vec, ind: &mut usize) -> PositionedResult> { *ind += 1; let cond = parse_condition_member(tokens, ind)?; - if tokens[*ind] != LexerToken::BRACKET_OPEN { - return Err(ParserError::new(String::from("Expected block body!"), 0)); - } + tokens[*ind].expects(lexer::token::LexerTokenType::BRACKET_OPEN)?; let body = match parse_node_body(tokens, ind) { Ok(v) => v, diff --git a/parser/src/ast/func/call.rs b/parser/src/ast/func/call.rs index 575fca1..c23da7e 100644 --- a/parser/src/ast/func/call.rs +++ b/parser/src/ast/func/call.rs @@ -1,28 +1,25 @@ -use lexer::token::LexerToken; +use commons::err::PositionedResult; +use lexer::token::{LexerToken, LexerTokenType}; use utils::hash::WithHash; use crate::{ParserError, ParserResult, ast::{parse_ast_value, tree::ASTTreeNode}}; -pub fn parse_function_call(tokens: &Vec, ind: &mut usize) -> ParserResult> { +pub fn parse_function_call(tokens: &Vec, ind: &mut usize) -> PositionedResult> { let func = WithHash::new(tokens[*ind].as_keyword().unwrap().0); *ind += 1; - if tokens[*ind] != LexerToken::PAREN_OPEN { - return Err(ParserError::new(String::from("Function must start with paren given arguments!"), 0)); - } + tokens[*ind].expects(LexerTokenType::PAREN_OPEN)?; *ind += 1; let mut vals: Vec> = Vec::new(); - while tokens[*ind] != LexerToken::PAREN_CLOSE { + while tokens[*ind].tok_type != LexerTokenType::PAREN_CLOSE { vals.push(parse_ast_value(tokens, ind)?); - if tokens[*ind] != LexerToken::COMMA { - return Err(ParserError::new(String::from("Expected comma seperated args"), 0)); - } + tokens[*ind].expects(LexerTokenType::COMMA)?; *ind += 1; } diff --git a/parser/src/ast/func/decl.rs b/parser/src/ast/func/decl.rs index 3ee60bf..8f89de9 100644 --- a/parser/src/ast/func/decl.rs +++ b/parser/src/ast/func/decl.rs @@ -1,26 +1,21 @@ -use lexer::token::LexerToken; +use commons::err::PositionedResult; +use lexer::token::{LexerToken, LexerTokenType}; use utils::hash::WithHash; use crate::{ParserError, ParserResult, ast::{func::{parse_function_arguments, parse_node_body}, tree::ASTTreeNode}}; -pub fn parse_function_declaraction(tokens: &Vec, ind: &mut usize) -> ParserResult> { +pub fn parse_function_declaraction(tokens: &Vec, ind: &mut usize) -> PositionedResult> { *ind += 1; - let functionName = match tokens[*ind].as_keyword() { - Ok(val) => val, - Err(e) => return Err(ParserError::new(String::from("Function name wasn't a keyword!"), 0)) - }; + let functionName = tokens[*ind].expects_keyword()?; *ind += 1; - if tokens[*ind] != LexerToken::PAREN_OPEN { - return Err(ParserError::new(String::from("Function name must be followed by arguments!"), 0)); - } + tokens[*ind].expects(LexerTokenType::PAREN_OPEN)?; let args = parse_function_arguments(tokens, ind)?; *ind += 1; - if tokens[*ind] != LexerToken::BRACKET_OPEN { - return Err(ParserError::new(String::from("Expected function body declaration after arguments!"), 0)); - } + + tokens[*ind].expects(LexerTokenType::BRACKET_OPEN)?; let body = parse_node_body(tokens, ind)?; diff --git a/parser/src/ast/func/mod.rs b/parser/src/ast/func/mod.rs index 5d8328b..9444457 100644 --- a/parser/src/ast/func/mod.rs +++ b/parser/src/ast/func/mod.rs @@ -1,23 +1,21 @@ use std::fmt::Debug; -use lexer::token::LexerToken; +use commons::err::PositionedResult; +use lexer::token::{LexerToken, LexerTokenType}; use crate::{ParserError, ParserResult, ast::{parse_ast_node, tree::{ASTTreeNode, FunctionDeclarationArgument}}}; pub mod decl; pub mod call; -pub fn parse_node_body(tokens: &Vec, ind: &mut usize) -> ParserResult>> { +pub fn parse_node_body(tokens: &Vec, ind: &mut usize) -> PositionedResult>> { *ind += 1; let mut tok: &LexerToken = &tokens[*ind]; let mut body: Vec> = Vec::new(); - while tok != &LexerToken::END_OF_FILE && tok != &LexerToken::BRACKET_CLOSE { - let n = match parse_ast_node(tokens, ind) { - Ok(val) => val, - Err(e) => return Err(e) - }; + while tok.tok_type != LexerTokenType::END_OF_FILE && tok.tok_type != LexerTokenType::BRACKET_CLOSE { + let n = parse_ast_node(tokens, ind)?; body.push(n); @@ -31,41 +29,31 @@ pub fn parse_node_body(tokens: &Vec, ind: &mut usize) -> ParserResul /// Parses functions arguments. -pub fn parse_function_arguments(tokens: &Vec, ind: &mut usize) -> ParserResult> { +pub fn parse_function_arguments(tokens: &Vec, ind: &mut usize) -> PositionedResult> { *ind += 1; let mut args: Vec = Vec::new(); while *ind < tokens.len() && tokens[*ind].is_keyword() { - let varType = match tokens[*ind].as_keyword() { - Ok(val) => val, - Err(e) => return Err(ParserError::new(String::from("Malformed function arguments"), 0)) - }; + let varType = tokens[*ind].expects_keyword()?; *ind += 1; - let varName = match tokens[*ind].as_keyword() { - Ok(val) => val, - Err(e ) => return Err(ParserError::new(String::from("Malformed argument name"), 0)) - }; + let varName = tokens[*ind].expects_keyword()?; args.push(FunctionDeclarationArgument::new(varName.0, varType.1)); *ind += 1; - if tokens[*ind] == LexerToken::PAREN_CLOSE { + if tokens[*ind].tok_type == LexerTokenType::PAREN_CLOSE { break; } - if tokens[*ind] != LexerToken::COMMA { - return Err(ParserError::new(format!("Arguments must be seperated with commas! Got token {:#?}", tokens[*ind]), 0)); - } + tokens[*ind].expects(LexerTokenType::COMMA)?; *ind += 1; } - if *ind >= tokens.len() || tokens[*ind] != LexerToken::PAREN_CLOSE { - return Err(ParserError::new(String::from("Arguments must end with a paren close!"), 0)); - } + tokens[*ind].expects(LexerTokenType::PAREN_CLOSE)?; Ok(args) } \ No newline at end of file diff --git a/parser/src/ast/literals.rs b/parser/src/ast/literals.rs index 3cfca32..69f7e2d 100644 --- a/parser/src/ast/literals.rs +++ b/parser/src/ast/literals.rs @@ -1,17 +1,18 @@ +use commons::err::{PositionedError, PositionedResult}; use lexer::token::{LexerToken, LexerTokenType}; use crate::{ParserError, ParserResult, ast::tree::ASTTreeNode}; -pub fn parse_integer_literal(tokens: &Vec, ind: &mut usize) -> ParserResult> { - let mut val; - tokens[*ind].expects(lexer::token::LexerTokenType::INT_LIT(val)); - +pub fn parse_integer_literal(tokens: &Vec, ind: &mut usize) -> PositionedResult> { + let val = tokens[*ind].expects_int_lit()?; *ind += 1; return Ok(Box::new(ASTTreeNode::IntegerLit(val))); } -pub fn parse_string_literal(tokens: &Vec, ind: &mut usize) -> ParserResult> { - let val; - tokens[*ind].expects(LexerTokenType::STRING_LIT(val)); +pub fn parse_string_literal(tokens: &Vec, ind: &mut usize) -> PositionedResult> { + let val = tokens[*ind].expects_string_lit()?; + *ind += 1; + + return Ok(Box::new(ASTTreeNode::StringLit(val))); } \ No newline at end of file diff --git a/parser/src/ast/mod.rs b/parser/src/ast/mod.rs index 42c605d..b6041d1 100644 --- a/parser/src/ast/mod.rs +++ b/parser/src/ast/mod.rs @@ -8,6 +8,7 @@ use std::fmt::Debug; +use commons::err::PositionedResult; use lexer::token::{LexerToken, LexerTokenType}; use utils::hash::WithHash; @@ -20,14 +21,14 @@ pub mod literals; pub mod cond; pub mod control; -pub fn parse_ast_value_post_l(tokens: &Vec, ind: &mut usize, original: ParserResult>) -> ParserResult> { +pub fn parse_ast_value_post_l(tokens: &Vec, ind: &mut usize, original: PositionedResult>) -> PositionedResult> { match &tokens[*ind].tok_type { LexerTokenType::DOT => { let o = &original?; let k = Box::new(ASTTreeNode::clone(o.as_ref())); if !o.is_function_call() && !o.is_var_access() { - return Err(ParserError::new(String::from("Tried using field/func access on non-value element!"), 0)); + return Err(tokens[*ind].make_err("Invalid dot access token!")); } *ind += 1; @@ -39,7 +40,7 @@ pub fn parse_ast_value_post_l(tokens: &Vec, ind: &mut usize, origina return Ok(Box::new(ASTTreeNode::StructLRVariable { l: k, r })) } - return Err(ParserError::new(String::from("Next member isn't any valid field/func access type!"), 0)); + return Err(tokens[*ind].make_err("Invalid token type to use dot access!")); }, LexerTokenType::ANGEL_BRACKET_CLOSE | LexerTokenType::EQUAL_SIGN | LexerTokenType::ANGEL_BRACKET_OPEN => { @@ -58,7 +59,7 @@ pub fn parse_ast_value_post_l(tokens: &Vec, ind: &mut usize, origina } } -pub fn parse_ast_value(tokens: &Vec, ind: &mut usize) -> ParserResult> { +pub fn parse_ast_value(tokens: &Vec, ind: &mut usize) -> PositionedResult> { match &tokens[*ind].tok_type { LexerTokenType::EXCLAMATION_MARK => { @@ -69,7 +70,7 @@ pub fn parse_ast_value(tokens: &Vec, ind: &mut usize) -> ParserResul return Ok(Box::new(ASTTreeNode::BooleanBasedConditionMember { val: ast, negate: true })) } - return Err(ParserError::new(String::from("Boolean negation requires either function or variable usage!"), 0)); + return Err(tokens[*ind].make_err("Boolean negative requires either func or var access!")); }, LexerTokenType::INT_LIT(_) => { @@ -95,11 +96,11 @@ pub fn parse_ast_value(tokens: &Vec, ind: &mut usize) -> ParserResul return parse_ast_value_post_l(tokens, ind, n); } - _ => return Err(ParserError::new(String::from("Cannot be parsed as val!"), 0)) + _ => return Err(tokens[*ind].make_err("Cannot be parsed as value!")) } } -pub fn parse_ast_node(tokens: &Vec, ind: &mut usize) -> ParserResult> { +pub fn parse_ast_node(tokens: &Vec, ind: &mut usize) -> PositionedResult> { println!("Ind: {}, tok at: {:#?}", ind, tokens[*ind].tok_type); match &tokens[*ind].tok_type { @@ -124,7 +125,7 @@ pub fn parse_ast_node(tokens: &Vec, ind: &mut usize) -> ParserResult } _ => { - return Err(ParserError::new(format!("err: {:#?}", tokens[*ind].tok_type), 0)); + return Err(tokens[*ind].make_err("Invalid token type! Shouldn't be there!")); } } diff --git a/parser/src/ast/var/decl.rs b/parser/src/ast/var/decl.rs index edc9964..2821a2b 100644 --- a/parser/src/ast/var/decl.rs +++ b/parser/src/ast/var/decl.rs @@ -1,34 +1,26 @@ -use lexer::token::LexerToken; +use commons::err::PositionedResult; +use lexer::token::{LexerToken, LexerTokenType}; use utils::hash::WithHash; use crate::{ParserError, ParserResult, ast::{parse_ast_node, parse_ast_value, tree::ASTTreeNode}}; -pub fn parse_variable_declaration(tokens: &Vec, ind: &mut usize) -> ParserResult> { +pub fn parse_variable_declaration(tokens: &Vec, ind: &mut usize) -> PositionedResult> { *ind += 1; - let typeName = match tokens[*ind].as_keyword() { - Ok(val) => val, - Err(e) => return Err(ParserError::new(String::from("Type name isn't a keyword"), 0)) - }; + let typeName = tokens[*ind].expects_keyword()?; *ind += 1; - let varName = match tokens[*ind].as_keyword() { - Ok(val) => val, - Err(e) => return Err(ParserError::new(String::from("Var name isn't a keyword!"), 0)) - }; + let varName = tokens[*ind].expects_keyword()?; *ind += 1; let mut val: Option> = None; - if tokens[*ind] == LexerToken::EQUAL_SIGN { + if tokens[*ind].tok_type == LexerTokenType::EQUAL_SIGN { *ind += 1; - val = match parse_ast_value(tokens, ind) { - Ok(v) => Some(v), - Err(e) => None - }; + val = Some(parse_ast_value(tokens, ind)?); } return Ok(Box::new(ASTTreeNode::VarDeclaration { varName: WithHash::new(varName.0), varType: typeName.1, value: val }));