diff --git a/CONTRIBUTING.MD b/CONTRIBUTING.MD index c566105..a0270a6 100644 --- a/CONTRIBUTING.MD +++ b/CONTRIBUTING.MD @@ -14,7 +14,7 @@ git clone https://github.com/Quickfall/quickfall ``` You will need the following installed in order to compile & use Quickfall: -- Rust 1.91.1 -- LLVM 21.1 +- Rust 1.94.1 +- LLVM 22.1 You can then build the project by using the `cargo` Rust tool. \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 4f8edcd..5009a10 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -56,10 +56,12 @@ name = "astoir_hir_lowering" version = "0.1.0" dependencies = [ "ast", + "ast_parser", "astoir_hir", "compiler_typing", "compiler_utils", "diagnostics", + "lexer", ] [[package]] diff --git a/README.md b/README.md index ba489f0..fed3dc0 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,8 @@ Quickfall is a programming language focusing on allowing everyone to build fast Quickfall aims to give the performance of extremely fast languages like C while giving the safety of Rust and also being reliable. What Quickfall focuses on: -- **Speed**: Quickfall focuses on runtime speed which means that every single compile time safety check can be simply disabled either globally or for a single element. On recent benchmarks, Quickfall has surpassed C in terms of runtime speed by up to 25%! +- **Speed**: Quickfall focuses on runtime speed which means that every single compile time safety check can be simply disabled either globally or for a single element. +- **Feature filled**: Quickfall gives multiple useful features to build reliable software such as enums or arbitrary size types - **Strict syntax**: Quickfall enforces a strict syntax to avoid mistakes or error prone code - **Safety**: Quickfall enforces strict safety features on compile time and optional ones on runtime that can be easily disabled to allow for the highest safety or speed depending on the build -- **Thread safety**: Quickfall gives tools to enforce thread safety mostly on compile time such as the *thread markers* to identify and restrict variables to only certain threads +- **Mutlipurpose**: Quickfall can both be used to build low level and high level software with different level of tools. diff --git a/compiler/ast/src/ctx.rs b/compiler/ast/src/ctx.rs index 02a4d99..0b5ed02 100644 --- a/compiler/ast/src/ctx.rs +++ b/compiler/ast/src/ctx.rs @@ -5,12 +5,13 @@ use crate::tree::ASTTreeNode; #[derive(Debug)] pub struct ParserCtx { pub map: HashMap>, + pub uses: Vec>, pub iter_order: Vec } impl ParserCtx { pub fn new() -> Self { - return ParserCtx { map: HashMap::new(), iter_order: Vec::new() } + return ParserCtx { map: HashMap::new(), iter_order: Vec::new(), uses: vec![] } } pub fn insert(&mut self, name: String, node: Box) -> bool { diff --git a/compiler/ast/src/tree.rs b/compiler/ast/src/tree.rs index 0ceb502..387d1ab 100644 --- a/compiler/ast/src/tree.rs +++ b/compiler/ast/src/tree.rs @@ -29,6 +29,8 @@ pub enum ASTTreeNodeKind { IntegerLit { val: i128, hash: u64 }, StringLit(String), + UseStatement { shards: Vec, use_clauses: Vec }, + ThisStructParam, UnwrapCondition { original: Box, target_type: ASTType, unsafe_unwrap: bool, target_var: Option }, @@ -166,6 +168,7 @@ impl Display for ASTTreeNode { impl Display for ASTTreeNodeKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let s = match self { + Self::UseStatement { .. } => "use statement", Self::UnwrapCondition { .. } | Self::UnwrapValue { .. } => "unwrap", Self::IntegerLit { .. } => "integer literal", Self::StringLit(_) => "string literal", diff --git a/compiler/ast_parser/src/lib.rs b/compiler/ast_parser/src/lib.rs index 7bddedf..6be0672 100644 --- a/compiler/ast_parser/src/lib.rs +++ b/compiler/ast_parser/src/lib.rs @@ -2,7 +2,7 @@ //! The parser for the Quickfall AST //! -use ast::ctx::ParserCtx; +use ast::{ctx::ParserCtx, tree::ASTTreeNodeKind}; use diagnostics::{DiagnosticResult, builders::make_unexpected_simple_error}; use lexer::token::{LexerToken, LexerTokenType}; @@ -19,6 +19,7 @@ pub mod variables; pub mod types; pub mod arrays; pub mod unwraps; +pub mod use_statements; pub fn parse_ast_ctx(tokens: &Vec) -> DiagnosticResult { let mut ind = 0; @@ -28,6 +29,11 @@ pub fn parse_ast_ctx(tokens: &Vec) -> DiagnosticResult { while tokens[ind].tok_type != LexerTokenType::EndOfFile { let node = parse_ast_node(tokens, &mut ind)?; + if let ASTTreeNodeKind::UseStatement { .. } = node.kind { + ctx.uses.push(node); + continue; + } + if !node.kind.is_tree_permissible() { return Err(make_unexpected_simple_error(&*node, &node).into()) } diff --git a/compiler/ast_parser/src/parser.rs b/compiler/ast_parser/src/parser.rs index 58e0028..08586a0 100644 --- a/compiler/ast_parser/src/parser.rs +++ b/compiler/ast_parser/src/parser.rs @@ -3,7 +3,7 @@ use compiler_utils::hash::HashedString; use diagnostics::{DiagnosticResult, builders::{make_unexpected_simple_error, make_unused_variable}}; use lexer::token::{LexerToken, LexerTokenType}; -use crate::{control::{for_loop::parse_for_loop, if_else::parse_if_statement, while_block::parse_while_block}, functions::{parse_function_call, parse_function_declaraction, returns::parse_function_return_statement, shadow::parse_shadow_function_declaration}, structs::{enums::parse_enum_declaration, parse_type_declaration}, value::parse_ast_value_post_l, variables::{decl::parse_variable_declaration, static_decl::parse_static_variable_declaration}}; +use crate::{control::{for_loop::parse_for_loop, if_else::parse_if_statement, while_block::parse_while_block}, functions::{parse_function_call, parse_function_declaraction, returns::parse_function_return_statement, shadow::parse_shadow_function_declaration}, structs::{enums::parse_enum_declaration, parse_type_declaration}, use_statements::parse_use_statement, value::parse_ast_value_post_l, variables::{decl::parse_variable_declaration, static_decl::parse_static_variable_declaration}}; /// Parses an AST node outside of any other node. /// @@ -38,12 +38,16 @@ pub fn parse_ast_node(tokens: &Vec, ind: &mut usize) -> DiagnosticRe return parse_enum_declaration(tokens, ind); }, + LexerTokenType::Use => { + return parse_use_statement(tokens, ind); + } + _ => return Err(make_unexpected_simple_error(&tokens[*ind], &tokens[*ind].tok_type).into()) } } /// Parses an AST node inside of another compatible node (functions, control bodies) -pub fn parse_ast_node_in_body(tokens: &Vec, ind: &mut usize) -> DiagnosticResult> { +pub fn parse_ast_node_in_body(tokens: &Vec, ind: &mut usize) -> DiagnosticResult> { match &tokens[*ind].tok_type { LexerTokenType::Var => { diff --git a/compiler/ast_parser/src/use_statements.rs b/compiler/ast_parser/src/use_statements.rs new file mode 100644 index 0000000..e14f326 --- /dev/null +++ b/compiler/ast_parser/src/use_statements.rs @@ -0,0 +1,44 @@ +use ast::tree::{ASTTreeNode, ASTTreeNodeKind}; +use compiler_utils::hash::HashedString; +use diagnostics::DiagnosticResult; +use lexer::token::{LexerToken, LexerTokenType}; + +pub fn parse_use_statement(tokens: &Vec, ind: &mut usize) -> DiagnosticResult> { + let start = tokens[*ind].pos.clone(); + + *ind += 1; + + let mut steps = vec![]; + let mut uses = vec![]; + + while tokens[*ind].is_keyword() { + let kwd = tokens[*ind].expects_keyword()?; + *ind += 1; + + steps.push(HashedString::new(kwd.0)); + + tokens[*ind].expects(LexerTokenType::Collon)?; + *ind += 1; + } + + tokens[*ind].expects(LexerTokenType::ArrayOpen)?; + *ind += 1; + + while tokens[*ind].is_keyword() { + let kwd = tokens[*ind].expects_keyword()?; + *ind += 1; + + uses.push(HashedString::new(kwd.0)); + + if tokens[*ind].tok_type == LexerTokenType::ArrayClose { + break; + } + + tokens[*ind].expects(LexerTokenType::Comma)?; + *ind += 1; + } + + *ind += 1; + + return Ok((Box::new(ASTTreeNode::new(ASTTreeNodeKind::UseStatement { shards: steps, use_clauses: uses }, start, tokens[*ind].get_end_pos())))) +} \ No newline at end of file diff --git a/compiler/astoir_hir_lowering/Cargo.toml b/compiler/astoir_hir_lowering/Cargo.toml index c3a60e4..2d35609 100644 --- a/compiler/astoir_hir_lowering/Cargo.toml +++ b/compiler/astoir_hir_lowering/Cargo.toml @@ -8,4 +8,6 @@ astoir_hir = { path = "../astoir_hir" } compiler_typing = { path = "../compiler_typing" } ast = { path = "../ast" } diagnostics = { path = "../diagnostics" } -compiler_utils = { path = "../compiler_utils" } \ No newline at end of file +compiler_utils = { path = "../compiler_utils" } +lexer = { path = "../lexer" } +ast_parser = { path = "../ast_parser" } \ No newline at end of file diff --git a/compiler/astoir_hir_lowering/src/lib.rs b/compiler/astoir_hir_lowering/src/lib.rs index b0df1df..e0deed9 100644 --- a/compiler/astoir_hir_lowering/src/lib.rs +++ b/compiler/astoir_hir_lowering/src/lib.rs @@ -2,7 +2,7 @@ use ast::{ctx::ParserCtx, tree::{ASTTreeNode, ASTTreeNodeKind}}; use astoir_hir::{ctx::{HIRBranchedContext, HIRContext}, nodes::{HIRNode, HIRNodeKind}}; use diagnostics::{DiagnosticResult, DiagnosticSpanOrigin, move_current_diagnostic_pos}; -use crate::{arrays::lower_ast_array_modify, control::{lower_ast_for_block, lower_ast_if_statement, lower_ast_while_block}, enums::lower_ast_enum, func::{lower_ast_function_call, lower_ast_function_declaration, lower_ast_shadow_function_declaration}, math::lower_ast_math_operation, structs::lower_ast_struct_declaration, values::lower_ast_value, var::{lower_ast_variable_assign, lower_ast_variable_declaration}}; +use crate::{arrays::lower_ast_array_modify, control::{lower_ast_for_block, lower_ast_if_statement, lower_ast_while_block}, enums::lower_ast_enum, func::{lower_ast_function_call, lower_ast_function_declaration, lower_ast_shadow_function_declaration}, math::lower_ast_math_operation, structs::lower_ast_struct_declaration, uses::handle_ast_use_statement, values::lower_ast_value, var::{lower_ast_variable_assign, lower_ast_variable_declaration}}; pub mod literals; pub mod var; @@ -16,6 +16,7 @@ pub mod structs; pub mod arrays; pub mod unwraps; pub mod enums; +pub mod uses; pub fn lower_ast_body_node(context: &mut HIRContext, curr_ctx: &mut HIRBranchedContext, node: Box) -> DiagnosticResult> { move_current_diagnostic_pos(node.get_pos()); @@ -106,6 +107,10 @@ pub fn lower_ast_toplevel(context: &mut HIRContext, node: Box) -> D pub fn lower_ast(ctx: ParserCtx) -> DiagnosticResult { let mut hir_ctx = HIRContext::new(); + for u in ctx.uses { + handle_ast_use_statement(&mut hir_ctx, u)?; + } + for s in ctx.iter_order { let k = ctx.map[&s].clone(); diff --git a/compiler/astoir_hir_lowering/src/uses.rs b/compiler/astoir_hir_lowering/src/uses.rs new file mode 100644 index 0000000..6b7a7fe --- /dev/null +++ b/compiler/astoir_hir_lowering/src/uses.rs @@ -0,0 +1,78 @@ +use std::path::{MAIN_SEPARATOR_STR}; + +use ast::tree::{ASTTreeNode, ASTTreeNodeKind}; +use ast_parser::parse_ast_ctx; +use astoir_hir::{ctx::HIRContext}; +use diagnostics::{MaybeDiagnostic, builders::make_use_not_found}; +use lexer::lexer::lexer_parse_file; + +use crate::{lower_ast_toplevel, types::lower_ast_type}; + +pub fn handle_ast_use_statement_function_decl(context: &mut HIRContext, node: Box) -> MaybeDiagnostic { + if let ASTTreeNodeKind::FunctionDeclaration { func_name, args, body: _, return_type, requires_this: _ } = node.kind.clone() { + let ret_type; + + if return_type.is_some() { + let lower = lower_ast_type(context, return_type.unwrap(), &*node)?; + + ret_type = Some(lower) + } else { + ret_type = None; + } + + let mut arguments = vec![]; + let mut types = vec![]; + + for arg in args { + types.push(arg.argument_type.clone()); + let t = lower_ast_type(context, arg.argument_type, &*node)?; + + arguments.push((arg.name.hash, t)); + } + + context.functions.append(func_name.hash, (ret_type.clone(), arguments.clone(), func_name.val.clone())); + context.function_contexts.push(None); + + return Ok(()) + } + + panic!("Invalid node") +} + +pub fn handle_ast_use_statement(context: &mut HIRContext, node: Box) -> MaybeDiagnostic { + if let ASTTreeNodeKind::UseStatement { shards, use_clauses } = node.kind.clone() { + let mut path = ".".to_string(); + + for shard in shards { + path += &MAIN_SEPARATOR_STR .to_owned(); + path += &shard.val; + } + + path += &".qf"; + + let lexer = lexer_parse_file(&path)?; + let ast = parse_ast_ctx(&lexer)?; + + for clause in use_clauses { + if !ast.map.contains_key(&clause.val) { + return Err(make_use_not_found(&*node, &clause.val, &path).into()) + } + + let n = ast.map[&clause.val].clone(); + + match n.kind { + ASTTreeNodeKind::FunctionDeclaration { .. } => { + handle_ast_use_statement_function_decl(context, n.clone())?; + } + _ => { + println!(" --> {:#?}", n); + lower_ast_toplevel(context, n.clone())?; + } + }; + } + + return Ok(()) + } + + panic!("Invalid node") +} \ No newline at end of file diff --git a/compiler/astoir_mir_lowering/src/vars.rs b/compiler/astoir_mir_lowering/src/vars.rs index fc6ab3f..dd1816e 100644 --- a/compiler/astoir_mir_lowering/src/vars.rs +++ b/compiler/astoir_mir_lowering/src/vars.rs @@ -10,6 +10,15 @@ use crate::{MIRLoweringContext, lower_hir_type, values::lower_hir_value}; pub fn lower_hir_variable_declaration(block_id: MIRBlockReference, node: Box, ctx: &mut MIRLoweringContext) -> DiagnosticResult { if let HIRNodeKind::VarDeclaration { variable, var_type, default_val } = node.clone().kind { let func = ctx.mir_ctx.block_to_func[&block_id]; + + println!("Func {}", func); + + for i in 0..ctx.hir_ctx.function_contexts.len() { + println!("Func {}: {:#?}", i, ctx.hir_ctx.function_contexts[i]) + } + + + let local_ctx = ctx.hir_ctx.function_contexts[func].as_ref().unwrap(); if local_ctx.is_eligible_for_ssa(variable) { diff --git a/compiler/diagnostics/src/builders.rs b/compiler/diagnostics/src/builders.rs index 7333a0c..36edf54 100644 --- a/compiler/diagnostics/src/builders.rs +++ b/compiler/diagnostics/src/builders.rs @@ -1,6 +1,6 @@ use std::{fmt::Display}; -use crate::{DiagnosticSpanOrigin, diagnostic::{Diagnostic, Level, Span, SpanKind, SpanPosition}, errors::{ALREADY_IN_SCOPE, ASSIGN_DIFF_TYPE_IR, BOUND_MISSING, DIFF_SIZE_SPECIFIERS, DIFF_TYPE_SPECIFIERS, ENUM_PARENT_FIELDS, ERA_NOT_EXIST, EXPECTED_FREE, EXPECTED_TOKEN, EXPECTED_TYPE, FIELD_MISSING, FIELD_STRUCT_INIT, FIND_TYPE, FIND_TYPE_FIELD, FIND_TYPE_FUNCTION, FIND_VAR, FUNC_MISSING, INDEX_USAGE, INVALID_POINTING, INVALID_TYPE_REQ, IR_CAST, IR_INSTRUCTION_HELD_VAL, MATH_OPERATION_ASSIGNS, TRAIT_MISSING, TYPE_NOT_PART, UNEXPECTED_TOKEN, VARIABLE_UNINIT}, get_current_diagnostic_pos, warnings::UNUSED_VAR}; +use crate::{DiagnosticSpanOrigin, diagnostic::{Diagnostic, Level, Span, SpanKind, SpanPosition}, errors::{ALREADY_IN_SCOPE, ASSIGN_DIFF_TYPE_IR, BOUND_MISSING, DIFF_SIZE_SPECIFIERS, DIFF_TYPE_SPECIFIERS, ENUM_PARENT_FIELDS, ERA_NOT_EXIST, EXPECTED_FREE, EXPECTED_TOKEN, EXPECTED_TYPE, FIELD_MISSING, FIELD_STRUCT_INIT, FIND_TYPE, FIND_TYPE_FIELD, FIND_TYPE_FUNCTION, FIND_VAR, FUNC_MISSING, INDEX_USAGE, INVALID_POINTING, INVALID_TYPE_REQ, IR_CAST, IR_INSTRUCTION_HELD_VAL, MATH_OPERATION_ASSIGNS, NOT_FOUND_USE, TRAIT_MISSING, TYPE_NOT_PART, UNEXPECTED_TOKEN, VARIABLE_UNINIT}, get_current_diagnostic_pos, warnings::UNUSED_VAR}; pub fn make_expected_simple_error(origin: &K, expected: &E, got: &G) -> Diagnostic { origin.make_simple_diagnostic(EXPECTED_TOKEN.0, Level::Error, format!("expected {} but got {}", expected, got), None, vec![], vec![], vec![]) @@ -184,4 +184,8 @@ pub fn make_req_type_kind(origin: &K, t: &T pub fn make_type_not_partof(origin: &K, a: &A, b: &B) -> Diagnostic { origin.make_simple_diagnostic(TYPE_NOT_PART.0, Level::Error, format!("type {} is not part of type {}", a, b), None, vec![], vec![], vec![]) +} + +pub fn make_use_not_found(origin: &K, element: &E, path: &P) -> Diagnostic { + origin.make_simple_diagnostic(NOT_FOUND_USE.0, Level::Error, format!("element {} was not found in {}", element, path), None, vec![], vec![], vec![]) } \ No newline at end of file diff --git a/compiler/diagnostics/src/errors.rs b/compiler/diagnostics/src/errors.rs index ac2c27d..d337b1d 100644 --- a/compiler/diagnostics/src/errors.rs +++ b/compiler/diagnostics/src/errors.rs @@ -40,4 +40,5 @@ declare_error!(IR_INSTRUCTION_HELD_VAL, 29, "cannot extract held value from inst // Misc declare_error!(INVALID_TYPE_REQ, 30, "this operation requires a {} type"); -declare_error!(TYPE_NOT_PART, 31, "type {} is not part of type {}"); \ No newline at end of file +declare_error!(TYPE_NOT_PART, 31, "type {} is not part of type {}"); +declare_error!(NOT_FOUND_USE, 32, "element {} was not found in {}"); \ No newline at end of file diff --git a/compiler/lexer/src/lexer.rs b/compiler/lexer/src/lexer.rs index c13537d..8c6d965 100644 --- a/compiler/lexer/src/lexer.rs +++ b/compiler/lexer/src/lexer.rs @@ -30,6 +30,7 @@ const NEW_KEYWORD_HASH: u64 = hash!("new"); const UNWRAP_KEYWORD_HASH: u64 = hash!("unwrap"); const UNWRAP_UNSAFE_KEYWORD_HASH: u64 = hash!("unsafe_unwrap"); const ENUM_KEYWORD_HASH: u64 = hash!("enum"); +const USE_KEYWORD_HASH: u64 = hash!("use"); /// Parses a file into a set of lexer tokens. /// @@ -207,7 +208,7 @@ fn parse_math_operator(contents: &String, ind: &mut usize, start_pos: Position) *ind += 1; if contents.chars().nth(*ind).unwrap() != '=' { - return Ok(LexerToken::make_single_sized(start_pos, LexerTokenType::Asterisk)); + return Ok(LexerToken::make_single_sized(start_pos, LexerTokenType::EqualSign)); } let assigns = match contents.chars().nth(*ind) { @@ -375,6 +376,7 @@ fn parse_keyword(str: &String, ind: &mut usize, start_pos: Position) -> LexerTok UNWRAP_KEYWORD_HASH => LexerTokenType::Unwrap, UNWRAP_UNSAFE_KEYWORD_HASH => LexerTokenType::UnwrapUnsafe, ENUM_KEYWORD_HASH => LexerTokenType::Enum, + USE_KEYWORD_HASH => LexerTokenType::Use, _ => LexerTokenType::Keyword(slice.to_string(), hash) }; diff --git a/compiler/lexer/src/token.rs b/compiler/lexer/src/token.rs index c3a1a35..f700669 100644 --- a/compiler/lexer/src/token.rs +++ b/compiler/lexer/src/token.rs @@ -22,6 +22,8 @@ pub enum LexerTokenType { Unwrap, UnwrapUnsafe, + Use, + Var, Struct, Layout, @@ -226,6 +228,7 @@ impl Display for LexerTokenType { Self::True => "true", Self::While => "while", Self::Unwrap => "unwrap", + Self::Use => "use", Self::UnwrapUnsafe => "unsafe_unwrap" };