Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions CONTRIBUTING.MD
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ git clone https://github.com/Quickfall/quickfall
```

You will need the following installed in order to compile & use Quickfall:
- Rust 1.91.1
- LLVM 21.1
- Rust 1.94.1
- LLVM 22.1

You can then build the project by using the `cargo` Rust tool.
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@ Quickfall is a programming language focusing on allowing everyone to build fast
Quickfall aims to give the performance of extremely fast languages like C while giving the safety of Rust and also being reliable.

What Quickfall focuses on:
- **Speed**: Quickfall focuses on runtime speed which means that every single compile time safety check can be simply disabled either globally or for a single element. On recent benchmarks, Quickfall has surpassed C in terms of runtime speed by up to 25%!
- **Speed**: Quickfall focuses on runtime speed which means that every single compile time safety check can be simply disabled either globally or for a single element.
- **Feature filled**: Quickfall gives multiple useful features to build reliable software such as enums or arbitrary size types
- **Strict syntax**: Quickfall enforces a strict syntax to avoid mistakes or error prone code
- **Safety**: Quickfall enforces strict safety features on compile time and optional ones on runtime that can be easily disabled to allow for the highest safety or speed depending on the build
- **Thread safety**: Quickfall gives tools to enforce thread safety mostly on compile time such as the *thread markers* to identify and restrict variables to only certain threads
- **Mutlipurpose**: Quickfall can both be used to build low level and high level software with different level of tools.
3 changes: 2 additions & 1 deletion compiler/ast/src/ctx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@ use crate::tree::ASTTreeNode;
#[derive(Debug)]
pub struct ParserCtx {
pub map: HashMap<String, Box<ASTTreeNode>>,
pub uses: Vec<Box<ASTTreeNode>>,
pub iter_order: Vec<String>
}

impl ParserCtx {
pub fn new() -> Self {
return ParserCtx { map: HashMap::new(), iter_order: Vec::new() }
return ParserCtx { map: HashMap::new(), iter_order: Vec::new(), uses: vec![] }
}

pub fn insert(&mut self, name: String, node: Box<ASTTreeNode>) -> bool {
Expand Down
3 changes: 3 additions & 0 deletions compiler/ast/src/tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ pub enum ASTTreeNodeKind {
IntegerLit { val: i128, hash: u64 },
StringLit(String),

UseStatement { shards: Vec<HashedString>, use_clauses: Vec<HashedString> },

ThisStructParam,

UnwrapCondition { original: Box<ASTTreeNode>, target_type: ASTType, unsafe_unwrap: bool, target_var: Option<HashedString> },
Expand Down Expand Up @@ -166,6 +168,7 @@ impl Display for ASTTreeNode {
impl Display for ASTTreeNodeKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
Self::UseStatement { .. } => "use statement",
Self::UnwrapCondition { .. } | Self::UnwrapValue { .. } => "unwrap",
Self::IntegerLit { .. } => "integer literal",
Self::StringLit(_) => "string literal",
Expand Down
8 changes: 7 additions & 1 deletion compiler/ast_parser/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
//! The parser for the Quickfall AST
//!

use ast::ctx::ParserCtx;
use ast::{ctx::ParserCtx, tree::ASTTreeNodeKind};
use diagnostics::{DiagnosticResult, builders::make_unexpected_simple_error};
use lexer::token::{LexerToken, LexerTokenType};

Expand All @@ -19,6 +19,7 @@ pub mod variables;
pub mod types;
pub mod arrays;
pub mod unwraps;
pub mod use_statements;

pub fn parse_ast_ctx(tokens: &Vec<LexerToken>) -> DiagnosticResult<ParserCtx> {
let mut ind = 0;
Expand All @@ -28,6 +29,11 @@ pub fn parse_ast_ctx(tokens: &Vec<LexerToken>) -> DiagnosticResult<ParserCtx> {
while tokens[ind].tok_type != LexerTokenType::EndOfFile {
let node = parse_ast_node(tokens, &mut ind)?;

if let ASTTreeNodeKind::UseStatement { .. } = node.kind {
ctx.uses.push(node);
continue;
}

if !node.kind.is_tree_permissible() {
return Err(make_unexpected_simple_error(&*node, &node).into())
}
Expand Down
8 changes: 6 additions & 2 deletions compiler/ast_parser/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use compiler_utils::hash::HashedString;
use diagnostics::{DiagnosticResult, builders::{make_unexpected_simple_error, make_unused_variable}};
use lexer::token::{LexerToken, LexerTokenType};

use crate::{control::{for_loop::parse_for_loop, if_else::parse_if_statement, while_block::parse_while_block}, functions::{parse_function_call, parse_function_declaraction, returns::parse_function_return_statement, shadow::parse_shadow_function_declaration}, structs::{enums::parse_enum_declaration, parse_type_declaration}, value::parse_ast_value_post_l, variables::{decl::parse_variable_declaration, static_decl::parse_static_variable_declaration}};
use crate::{control::{for_loop::parse_for_loop, if_else::parse_if_statement, while_block::parse_while_block}, functions::{parse_function_call, parse_function_declaraction, returns::parse_function_return_statement, shadow::parse_shadow_function_declaration}, structs::{enums::parse_enum_declaration, parse_type_declaration}, use_statements::parse_use_statement, value::parse_ast_value_post_l, variables::{decl::parse_variable_declaration, static_decl::parse_static_variable_declaration}};

/// Parses an AST node outside of any other node.
///
Expand Down Expand Up @@ -38,12 +38,16 @@ pub fn parse_ast_node(tokens: &Vec<LexerToken>, ind: &mut usize) -> DiagnosticRe
return parse_enum_declaration(tokens, ind);
},

LexerTokenType::Use => {
return parse_use_statement(tokens, ind);
}

_ => return Err(make_unexpected_simple_error(&tokens[*ind], &tokens[*ind].tok_type).into())
}
}

/// Parses an AST node inside of another compatible node (functions, control bodies)
pub fn parse_ast_node_in_body(tokens: &Vec<LexerToken>, ind: &mut usize) -> DiagnosticResult<Box<ASTTreeNode>> {
pub fn parse_ast_node_in_body(tokens: &Vec<LexerToken>, ind: &mut usize) -> DiagnosticResult<Box<ASTTreeNode>> {
match &tokens[*ind].tok_type {

LexerTokenType::Var => {
Expand Down
44 changes: 44 additions & 0 deletions compiler/ast_parser/src/use_statements.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
use ast::tree::{ASTTreeNode, ASTTreeNodeKind};
use compiler_utils::hash::HashedString;
use diagnostics::DiagnosticResult;
use lexer::token::{LexerToken, LexerTokenType};

pub fn parse_use_statement(tokens: &Vec<LexerToken>, ind: &mut usize) -> DiagnosticResult<Box<ASTTreeNode>> {
let start = tokens[*ind].pos.clone();

*ind += 1;

let mut steps = vec![];
let mut uses = vec![];

while tokens[*ind].is_keyword() {
let kwd = tokens[*ind].expects_keyword()?;
*ind += 1;

steps.push(HashedString::new(kwd.0));

tokens[*ind].expects(LexerTokenType::Collon)?;
*ind += 1;
}

tokens[*ind].expects(LexerTokenType::ArrayOpen)?;
*ind += 1;

while tokens[*ind].is_keyword() {
let kwd = tokens[*ind].expects_keyword()?;
*ind += 1;

uses.push(HashedString::new(kwd.0));

if tokens[*ind].tok_type == LexerTokenType::ArrayClose {
break;
}

tokens[*ind].expects(LexerTokenType::Comma)?;
*ind += 1;
}

*ind += 1;

return Ok((Box::new(ASTTreeNode::new(ASTTreeNodeKind::UseStatement { shards: steps, use_clauses: uses }, start, tokens[*ind].get_end_pos()))))
}
4 changes: 3 additions & 1 deletion compiler/astoir_hir_lowering/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,6 @@ astoir_hir = { path = "../astoir_hir" }
compiler_typing = { path = "../compiler_typing" }
ast = { path = "../ast" }
diagnostics = { path = "../diagnostics" }
compiler_utils = { path = "../compiler_utils" }
compiler_utils = { path = "../compiler_utils" }
lexer = { path = "../lexer" }
ast_parser = { path = "../ast_parser" }
7 changes: 6 additions & 1 deletion compiler/astoir_hir_lowering/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use ast::{ctx::ParserCtx, tree::{ASTTreeNode, ASTTreeNodeKind}};
use astoir_hir::{ctx::{HIRBranchedContext, HIRContext}, nodes::{HIRNode, HIRNodeKind}};
use diagnostics::{DiagnosticResult, DiagnosticSpanOrigin, move_current_diagnostic_pos};

use crate::{arrays::lower_ast_array_modify, control::{lower_ast_for_block, lower_ast_if_statement, lower_ast_while_block}, enums::lower_ast_enum, func::{lower_ast_function_call, lower_ast_function_declaration, lower_ast_shadow_function_declaration}, math::lower_ast_math_operation, structs::lower_ast_struct_declaration, values::lower_ast_value, var::{lower_ast_variable_assign, lower_ast_variable_declaration}};
use crate::{arrays::lower_ast_array_modify, control::{lower_ast_for_block, lower_ast_if_statement, lower_ast_while_block}, enums::lower_ast_enum, func::{lower_ast_function_call, lower_ast_function_declaration, lower_ast_shadow_function_declaration}, math::lower_ast_math_operation, structs::lower_ast_struct_declaration, uses::handle_ast_use_statement, values::lower_ast_value, var::{lower_ast_variable_assign, lower_ast_variable_declaration}};

pub mod literals;
pub mod var;
Expand All @@ -16,6 +16,7 @@ pub mod structs;
pub mod arrays;
pub mod unwraps;
pub mod enums;
pub mod uses;

pub fn lower_ast_body_node(context: &mut HIRContext, curr_ctx: &mut HIRBranchedContext, node: Box<ASTTreeNode>) -> DiagnosticResult<Box<HIRNode>> {
move_current_diagnostic_pos(node.get_pos());
Expand Down Expand Up @@ -106,6 +107,10 @@ pub fn lower_ast_toplevel(context: &mut HIRContext, node: Box<ASTTreeNode>) -> D
pub fn lower_ast(ctx: ParserCtx) -> DiagnosticResult<HIRContext> {
let mut hir_ctx = HIRContext::new();

for u in ctx.uses {
handle_ast_use_statement(&mut hir_ctx, u)?;
}

for s in ctx.iter_order {
let k = ctx.map[&s].clone();

Expand Down
78 changes: 78 additions & 0 deletions compiler/astoir_hir_lowering/src/uses.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
use std::path::{MAIN_SEPARATOR_STR};

use ast::tree::{ASTTreeNode, ASTTreeNodeKind};
use ast_parser::parse_ast_ctx;
use astoir_hir::{ctx::HIRContext};
use diagnostics::{MaybeDiagnostic, builders::make_use_not_found};
use lexer::lexer::lexer_parse_file;

use crate::{lower_ast_toplevel, types::lower_ast_type};

pub fn handle_ast_use_statement_function_decl(context: &mut HIRContext, node: Box<ASTTreeNode>) -> MaybeDiagnostic {
if let ASTTreeNodeKind::FunctionDeclaration { func_name, args, body: _, return_type, requires_this: _ } = node.kind.clone() {
let ret_type;

if return_type.is_some() {
let lower = lower_ast_type(context, return_type.unwrap(), &*node)?;

ret_type = Some(lower)
} else {
ret_type = None;
}

let mut arguments = vec![];
let mut types = vec![];

for arg in args {
types.push(arg.argument_type.clone());
let t = lower_ast_type(context, arg.argument_type, &*node)?;

arguments.push((arg.name.hash, t));
}

context.functions.append(func_name.hash, (ret_type.clone(), arguments.clone(), func_name.val.clone()));
context.function_contexts.push(None);

return Ok(())
}

panic!("Invalid node")
}

pub fn handle_ast_use_statement(context: &mut HIRContext, node: Box<ASTTreeNode>) -> MaybeDiagnostic {
if let ASTTreeNodeKind::UseStatement { shards, use_clauses } = node.kind.clone() {
let mut path = ".".to_string();

for shard in shards {
path += &MAIN_SEPARATOR_STR .to_owned();
path += &shard.val;
}

path += &".qf";

let lexer = lexer_parse_file(&path)?;
let ast = parse_ast_ctx(&lexer)?;

for clause in use_clauses {
if !ast.map.contains_key(&clause.val) {
return Err(make_use_not_found(&*node, &clause.val, &path).into())
}

let n = ast.map[&clause.val].clone();

match n.kind {
ASTTreeNodeKind::FunctionDeclaration { .. } => {
handle_ast_use_statement_function_decl(context, n.clone())?;
}
_ => {
println!(" --> {:#?}", n);
lower_ast_toplevel(context, n.clone())?;
}
};
}

return Ok(())
}

panic!("Invalid node")
}
9 changes: 9 additions & 0 deletions compiler/astoir_mir_lowering/src/vars.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,15 @@ use crate::{MIRLoweringContext, lower_hir_type, values::lower_hir_value};
pub fn lower_hir_variable_declaration(block_id: MIRBlockReference, node: Box<HIRNode>, ctx: &mut MIRLoweringContext) -> DiagnosticResult<bool> {
if let HIRNodeKind::VarDeclaration { variable, var_type, default_val } = node.clone().kind {
let func = ctx.mir_ctx.block_to_func[&block_id];

println!("Func {}", func);

for i in 0..ctx.hir_ctx.function_contexts.len() {
println!("Func {}: {:#?}", i, ctx.hir_ctx.function_contexts[i])
}



let local_ctx = ctx.hir_ctx.function_contexts[func].as_ref().unwrap();

if local_ctx.is_eligible_for_ssa(variable) {
Expand Down
6 changes: 5 additions & 1 deletion compiler/diagnostics/src/builders.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::{fmt::Display};

use crate::{DiagnosticSpanOrigin, diagnostic::{Diagnostic, Level, Span, SpanKind, SpanPosition}, errors::{ALREADY_IN_SCOPE, ASSIGN_DIFF_TYPE_IR, BOUND_MISSING, DIFF_SIZE_SPECIFIERS, DIFF_TYPE_SPECIFIERS, ENUM_PARENT_FIELDS, ERA_NOT_EXIST, EXPECTED_FREE, EXPECTED_TOKEN, EXPECTED_TYPE, FIELD_MISSING, FIELD_STRUCT_INIT, FIND_TYPE, FIND_TYPE_FIELD, FIND_TYPE_FUNCTION, FIND_VAR, FUNC_MISSING, INDEX_USAGE, INVALID_POINTING, INVALID_TYPE_REQ, IR_CAST, IR_INSTRUCTION_HELD_VAL, MATH_OPERATION_ASSIGNS, TRAIT_MISSING, TYPE_NOT_PART, UNEXPECTED_TOKEN, VARIABLE_UNINIT}, get_current_diagnostic_pos, warnings::UNUSED_VAR};
use crate::{DiagnosticSpanOrigin, diagnostic::{Diagnostic, Level, Span, SpanKind, SpanPosition}, errors::{ALREADY_IN_SCOPE, ASSIGN_DIFF_TYPE_IR, BOUND_MISSING, DIFF_SIZE_SPECIFIERS, DIFF_TYPE_SPECIFIERS, ENUM_PARENT_FIELDS, ERA_NOT_EXIST, EXPECTED_FREE, EXPECTED_TOKEN, EXPECTED_TYPE, FIELD_MISSING, FIELD_STRUCT_INIT, FIND_TYPE, FIND_TYPE_FIELD, FIND_TYPE_FUNCTION, FIND_VAR, FUNC_MISSING, INDEX_USAGE, INVALID_POINTING, INVALID_TYPE_REQ, IR_CAST, IR_INSTRUCTION_HELD_VAL, MATH_OPERATION_ASSIGNS, NOT_FOUND_USE, TRAIT_MISSING, TYPE_NOT_PART, UNEXPECTED_TOKEN, VARIABLE_UNINIT}, get_current_diagnostic_pos, warnings::UNUSED_VAR};

pub fn make_expected_simple_error<K: DiagnosticSpanOrigin, E: Display, G: Display>(origin: &K, expected: &E, got: &G) -> Diagnostic {
origin.make_simple_diagnostic(EXPECTED_TOKEN.0, Level::Error, format!("expected {} but got {}", expected, got), None, vec![], vec![], vec![])
Expand Down Expand Up @@ -184,4 +184,8 @@ pub fn make_req_type_kind<K: DiagnosticSpanOrigin, T: Display>(origin: &K, t: &T

pub fn make_type_not_partof<K: DiagnosticSpanOrigin, A: Display, B: Display>(origin: &K, a: &A, b: &B) -> Diagnostic {
origin.make_simple_diagnostic(TYPE_NOT_PART.0, Level::Error, format!("type {} is not part of type {}", a, b), None, vec![], vec![], vec![])
}

pub fn make_use_not_found<K: DiagnosticSpanOrigin, E: Display, P: Display>(origin: &K, element: &E, path: &P) -> Diagnostic {
origin.make_simple_diagnostic(NOT_FOUND_USE.0, Level::Error, format!("element {} was not found in {}", element, path), None, vec![], vec![], vec![])
}
3 changes: 2 additions & 1 deletion compiler/diagnostics/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,4 +40,5 @@ declare_error!(IR_INSTRUCTION_HELD_VAL, 29, "cannot extract held value from inst

// Misc
declare_error!(INVALID_TYPE_REQ, 30, "this operation requires a {} type");
declare_error!(TYPE_NOT_PART, 31, "type {} is not part of type {}");
declare_error!(TYPE_NOT_PART, 31, "type {} is not part of type {}");
declare_error!(NOT_FOUND_USE, 32, "element {} was not found in {}");
4 changes: 3 additions & 1 deletion compiler/lexer/src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ const NEW_KEYWORD_HASH: u64 = hash!("new");
const UNWRAP_KEYWORD_HASH: u64 = hash!("unwrap");
const UNWRAP_UNSAFE_KEYWORD_HASH: u64 = hash!("unsafe_unwrap");
const ENUM_KEYWORD_HASH: u64 = hash!("enum");
const USE_KEYWORD_HASH: u64 = hash!("use");

/// Parses a file into a set of lexer tokens.
///
Expand Down Expand Up @@ -207,7 +208,7 @@ fn parse_math_operator(contents: &String, ind: &mut usize, start_pos: Position)
*ind += 1;

if contents.chars().nth(*ind).unwrap() != '=' {
return Ok(LexerToken::make_single_sized(start_pos, LexerTokenType::Asterisk));
return Ok(LexerToken::make_single_sized(start_pos, LexerTokenType::EqualSign));
}

let assigns = match contents.chars().nth(*ind) {
Expand Down Expand Up @@ -375,6 +376,7 @@ fn parse_keyword(str: &String, ind: &mut usize, start_pos: Position) -> LexerTok
UNWRAP_KEYWORD_HASH => LexerTokenType::Unwrap,
UNWRAP_UNSAFE_KEYWORD_HASH => LexerTokenType::UnwrapUnsafe,
ENUM_KEYWORD_HASH => LexerTokenType::Enum,
USE_KEYWORD_HASH => LexerTokenType::Use,
_ => LexerTokenType::Keyword(slice.to_string(), hash)
};

Expand Down
3 changes: 3 additions & 0 deletions compiler/lexer/src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ pub enum LexerTokenType {
Unwrap,
UnwrapUnsafe,

Use,

Var,
Struct,
Layout,
Expand Down Expand Up @@ -226,6 +228,7 @@ impl Display for LexerTokenType {
Self::True => "true",
Self::While => "while",
Self::Unwrap => "unwrap",
Self::Use => "use",
Self::UnwrapUnsafe => "unsafe_unwrap"
};

Expand Down
Loading