From 113a5abaf63984511cfaa4e2ebb33f0a47b1500a Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Mon, 23 Mar 2026 09:36:55 -0700 Subject: [PATCH 01/17] Simplify stdlib helpers and examples --- CURRENT_STATUS.md | 33 +++- REMOTE_CAPS_RFC.md | 160 ++++++++++++++++++ UPDATE_PLAN.md | 206 +++++++++++++++++++++++ capc/src/codegen/intrinsics.rs | 23 +++ capc/tests/run.rs | 28 +++ capc/tests/typecheck.rs | 16 ++ docs/POLICY.md | 22 ++- docs/README.md | 15 ++ docs/TUTORIAL.md | 50 ++++-- examples/config_loader/config_loader.cap | 83 +++------ examples/hashmap_demo/hashmap_demo.cap | 56 +++--- examples/how_to_string/how_to_string.cap | 72 +++----- examples/http_server/http_server.cap | 146 ++++++++-------- examples/sort/sort.cap | 29 +--- examples/uniq/uniq.cap | 8 +- runtime/src/lib.rs | 49 +++++- stdlib/sys/fs.cap | 19 +-- stdlib/sys/stdin.cap | 2 +- stdlib/sys/string.cap | 163 +++++++++++++++++- stdlib/sys/vec.cap | 26 +++ tests/programs/args_safe.cap | 20 +-- tests/programs/fs_dir_reuse.cap | 36 ++++ tests/programs/fs_reuse.cap | 27 +++ tests/programs/string_helpers.cap | 26 ++- tests/programs/text_basic.cap | 15 +- tests/programs/text_helpers_more.cap | 61 ++----- tests/programs/text_push_safe.cap | 38 +---- tests/programs/text_safe.cap | 15 +- tests/programs/text_to_string.cap | 15 +- tests/programs/vec_custom_eq.cap | 20 +-- tests/programs/vec_search_helpers.cap | 37 ++-- tests/programs/wc_file.cap | 40 ++--- tests/programs/wc_stdin.cap | 17 +- 33 files changed, 1079 insertions(+), 494 deletions(-) create mode 100644 REMOTE_CAPS_RFC.md create mode 100644 UPDATE_PLAN.md create mode 100644 tests/programs/fs_dir_reuse.cap create mode 100644 tests/programs/fs_reuse.cap diff --git a/CURRENT_STATUS.md b/CURRENT_STATUS.md index 18a10a8..d150847 100644 --- a/CURRENT_STATUS.md +++ b/CURRENT_STATUS.md @@ -21,8 +21,12 @@ repository. It is based on code and tests, not older design docs. - Built-in types: i32, u32, u8, bool, unit, never. - `string` is a stdlib struct (a view over bytes), not a compiler builtin. - Pointers (`*T`) and borrows (`&T`) are supported in the type system. -- Affine/linear ownership rules are enforced for non-copy values (including - capability and linear types). +- Plain data is unrestricted by default. +- `opaque struct` and `capability struct` are the main move-tracked categories. +- Structs/enums become move-tracked by containment when they contain + move-tracked fields. +- Borrows are deliberately narrow: refs cannot be stored in structs/enums or + returned, and ref locals must be initialized from another local. - Integer literals type as i32; char literals are u8. ## Standard library and runtime @@ -42,6 +46,31 @@ repository. It is based on code and tests, not older design docs. - Buffer/Alloc malloc/free/casts. - Math wrap helpers and byte whitespace checks. +## Current capability algebra + +- Reusable use operations borrow the capability/resource where possible: + - `ReadFS.read_to_string/read_bytes/list_dir/exists` + - `Dir.read_to_string/read_bytes/list_dir/exists` + - `Stdin.read_to_string` + - `TcpConn.read_to_string/read/write` + - `TcpListener.accept` +- Attenuation operations still consume the stronger capability: + - `Filesystem.root_dir` + - `Dir.subdir` +- Child handles remain linear where appropriate: + - `FileRead` + - `TcpConn` +- On move-tracked capabilities, methods that return capabilities still take + `self` by value under the current checker. This is why `Dir.open_read` + consumes `Dir`, while `TcpListener.accept` can borrow: `TcpListener` is a + copy capability. +- Deliberately copyable capabilities currently include: + - `RootCap` + - `Console` + - `Args` + - `Net` + - `TcpListener` + ## ABI and codegen - Codegen targets native code via Cranelift. diff --git a/REMOTE_CAPS_RFC.md b/REMOTE_CAPS_RFC.md new file mode 100644 index 0000000..bbcec10 --- /dev/null +++ b/REMOTE_CAPS_RFC.md @@ -0,0 +1,160 @@ +# Remote Capability Delegation RFC + +This document describes a future initiative: delegating attenuated capabilities +to remote workers or agents over the network. + +It is intentionally separate from [UPDATE_PLAN.md](./UPDATE_PLAN.md). The local +model should be stabilized first. Remote delegation builds on that model; it +should not distort the scope of the local cleanup work. + +## Goal + +Allow a local process to delegate explicit, attenuated authority to a remote +agent while preserving Capable's core guarantee: + +> safe code can only exercise authority it was explicitly given + +## Non-Goals + +Remote delegation is not: + +- serializing local runtime handles +- exporting `RootCap` +- turning the network into ambient authority +- pretending remote calls are local calls +- claiming hostile multi-tenant isolation by default + +## Why This Is Separate Work + +The local cleanup plan is mostly: + +- docs and tutorial cleanup +- stdlib API cleanup +- local capability algebra clarification + +Remote delegation adds a distinct class of work: + +- authenticated sessions +- lease tables +- typed proxy capabilities +- revocation and expiry +- protocol design +- audit logging + +That is a real runtime/protocol project, not a minor extension of the local +cleanup. + +## Recommended Architecture + +The first design should be explicit and typed: + +- a local broker owns real local capabilities +- the broker exports only explicitly delegated capabilities +- the remote side receives typed proxy capabilities, not raw local handles +- proxy method calls are RPCs back to the broker +- the broker revalidates lease and policy on every call + +This should look like authority delegation, not distributed shared memory. + +## Core Concepts + +- Broker: trusted local runtime that owns local capabilities. +- Session: authenticated remote relationship with the broker. +- Lease: revocable exported authority bound to a session. +- Proxy capability: typed remote handle that forwards to a lease. + +## Security Rules for v1 + +- `RootCap` is non-exportable. +- Export must be explicit and typed. +- Every lease should carry: + - capability kind + - policy payload + - session binding + - expiry and revocation state +- Every remote call should revalidate: + - session identity + - lease existence + - lease cookie/generation + - policy constraints +- Remote proxies should never be castable to local capability types. + +## Scope for v1 + +Start with a very small set of exportable capabilities: + +- remote read-only filesystem +- remote command execution with an explicit command/profile allowlist +- remote console/log sink +- optionally remote workspace file read/write as a separate capability family + +Do not start with: + +- generic capability serialization +- remote `RootCap` +- arbitrary user-defined capability export +- distributed GC +- async/futures machinery + +## Why Typed Proxies First + +Typed proxies keep four things explicit: + +- authority +- latency +- fallibility +- auditability + +That matters for agent workflows. If an LLM-driven worker can call a remote +filesystem or exec capability, those calls should remain visibly remote and +explicitly delegated. + +## Trust-Boundary Warning + +If multiple users or agents can steer the same session, they share the +delegated authority of that session. + +The broker model is an authority-delegation mechanism. It is not, by itself, a +complete hostile multi-tenant sandbox. + +## Suggested Future Phases + +### Phase A: Stabilize the local model + +Complete the work in [UPDATE_PLAN.md](./UPDATE_PLAN.md) first. + +### Phase B: Design `sys::remote` + +- define the safe surface API +- decide exported capability families +- decide proxy error model +- decide session lifecycle model + +### Phase C: Build a broker MVP + +- implement authenticated sessions +- implement lease tables +- implement typed proxy calls for a tiny capability set +- add revocation, expiry, and audit logging + +### Phase D: Dogfood on agent workflows + +Use the broker to support constrained remote workers: + +- a code-writing agent with only a workspace capability +- a test-running agent with a restricted exec profile +- a docs/indexing agent with read-only filesystem access + +The goal is to prove the authority model on real workflows before widening the +protocol. + +## Acceptance Criteria + +Remote delegation is only ready to ship if: + +- local capability semantics are already crisp +- delegated authority is always narrower than local root authority +- revoked or expired leases fail closed +- remote proxies cannot be forged or widened +- policy and session checks happen on every call +- the resulting system is auditable enough to reason about agent authority diff --git a/UPDATE_PLAN.md b/UPDATE_PLAN.md new file mode 100644 index 0000000..7480326 --- /dev/null +++ b/UPDATE_PLAN.md @@ -0,0 +1,206 @@ +# Update Plan: Consolidate the Local Model + +This document is the immediate roadmap for Capable. + +It is intentionally limited to the local language/runtime model. Remote +capability delegation is important, but it is a separate initiative and is now +tracked in [REMOTE_CAPS_RFC.md](./REMOTE_CAPS_RFC.md). + +Status: + +- Phase 0 is complete: the public docs now lead with the local + data/resource/capability model. +- Phase 1 is complete: reusable capability use-operations now borrow where the + current checker/runtime model allows it. +- Phase 2 is complete for the current local model: the compiler behavior was + audited, the docs were aligned to it, and reference locals remain supported + in their existing narrow form. +- Phase 3 is complete for the current local algebra: the repo now documents + which capabilities are reusable, which derivations consume, which child + handles are linear, and which capabilities are intentionally copyable. + +The point of this plan is not to redesign Capable from scratch. The point is +to align the docs, stdlib, and compiler around the model the codebase already +mostly implements. + +## Goals + +- Make the language easier to explain and teach. +- Keep the capability model intact. +- Reduce friction in ordinary code without weakening resource safety. +- Stop growing surface area until the core model is clearer. + +## Non-Negotiable Invariants + +- Safe code has no ambient authority. +- Capability values remain unforgeable in safe code. +- Privileged effects happen only through capability-bearing APIs. +- Runtime checks remain fail-closed. +- Unsafe code remains the only escape hatch for raw pointers, FFI, and direct + OS access. + +## What the Compiler Already Does + +The review was right about the broad shape of the implementation: the compiler +is already closer to the desired model than the public docs suggest. + +Today, Capable already has: + +- plain data that is unrestricted by default +- kind-by-containment for structs and enums +- `opaque struct` and `capability struct` as the main move-tracked categories +- static-only traits and generics + +Borrow-lite is also already much more limited than a Rust-style borrow system: + +- references cannot be stored in structs/enums +- references cannot be returned +- reference locals are allowed, but only when initialized from another local +- reference locals are non-assignable once created + +So the immediate work is mostly consolidation and cleanup, not a ground-up +rewrite. + +## Where the Friction Still Comes From + +### 1. The docs overstate the ownership story + +The public docs still make Capable sound more like a general affine/linear +language than it really is in practice. That hides the simpler model: + +- ordinary data is ordinary +- resources are move-tracked +- capabilities are authority-bearing resources + +### 2. The stdlib conflates different capability operations + +Several APIs still treat semantically different operations as if they should all +consume the receiver. + +We should distinguish: + +- Use operations: perform an effect with existing authority. +- Attenuation operations: derive a narrower capability. +- Child-handle operations: create a fresh child handle from an existing parent. + +Examples: + +- `ReadFS.read_to_string` is a use operation and should not normally consume the + capability. +- `Dir.subdir` is attenuation and may reasonably consume the stronger path cap. +- `TcpListener.accept` is a child-handle operation and may reasonably borrow the + listener while returning a fresh connection. + +This cleanup is the highest-value change in the near term. + +### 3. Borrow-lite is still visible in the surface language + +Capable already avoids stored refs and returned refs, which is good. +The remaining question is not "do we add a borrow system?" The codebase already +has one. The real question is how much of it should remain part of the public +story. + +The likely direction is: + +- keep short-lived borrowed parameters/receivers +- de-emphasize explicit reference locals in docs and examples +- avoid making users think in lifetimes or aliasing proofs + +### 4. Generic code is still more pessimistic than the public story + +Generics and traits are intentionally static-only, which is good. +But generic code still tends to feel more move-sensitive than ordinary code. + +Near-term conclusion: + +- do not expand traits/generics further +- keep the current machinery where it already pays for itself +- revisit generic kind behavior only after the local capability model is clearer + +## Immediate Phases + +### Phase 0: Restate the model + +- Update docs to describe Capable as a capability-secure language with a small + resource model. +- Lead with the three categories: + - plain data + - resources + - capabilities +- Stop describing the language as if general affine/linear reasoning were the + main thing users should learn first. + +### Phase 1: Clean up stdlib capability APIs + +- Rework `sys::fs` and `sys::net` signatures around the three-way distinction: + use, attenuation, child-handle. +- Make semantically reusable capabilities borrowable for ordinary use. +- Reserve `linear` for must-close child handles such as `FileRead` and + `TcpConn`. +- Remove accidental one-shot behavior from reusable capabilities. + +This phase should deliver the biggest usability gain for the least compiler +churn. + +### Phase 2: Tighten the compiler around the public story + +- Audit reference-local behavior and decide whether to keep it as-is or reduce + it to receiver/parameter positions in the public language. +- Keep kind-by-containment. +- Keep move tracking focused on `opaque struct`, `capability struct`, and + values that contain them. +- Avoid new trait/generic surface area while this cleanup is in flight. + +This phase is about alignment, not reinvention. + +### Phase 3: Stabilize the local capability algebra + +Before any remote work, local capability behavior should be crisp: + +- which capabilities are reusable +- which derivations consume +- which child handles are linear +- which capabilities are deliberately copyable + +If these rules are muddy locally, they will be worse remotely. + +## Out of Scope for This Plan + +The following are intentionally excluded from this document: + +- remote capability delegation +- broker/session/lease/proxy design +- protocol/authentication/revocation details +- async/distributed execution concerns +- expanding traits or richer generic abstraction machinery + +Those topics belong in separate RFCs or later roadmaps. + +## Acceptance Criteria + +This plan is successful if: + +- the tutorial explains Capable without leading with borrow/move rules +- ordinary examples read like simple systems code, not ownership puzzles +- capability flow remains explicit in signatures and call sites +- the local attenuation model is clearer after the stdlib cleanup +- the compiler, docs, and stdlib all tell the same story + +## Tests We Should Add or Tighten + +- reusable capabilities can perform multiple ordinary use operations +- attenuation operations still consume when they should +- child handles still require close/consumption on all paths +- structs/enums containing resources remain resource-like by containment +- reference locals, if kept, remain tightly restricted and non-assignable + +## Bottom Line + +Capable does not need a new local model. It needs a cleaner expression of the +local model it already has. + +The immediate work is: + +- restate the language honestly +- clean up the stdlib capability APIs +- align the compiler and docs around that smaller story diff --git a/capc/src/codegen/intrinsics.rs b/capc/src/codegen/intrinsics.rs index 94e5dad..00740cc 100644 --- a/capc/src/codegen/intrinsics.rs +++ b/capc/src/codegen/intrinsics.rs @@ -124,6 +124,19 @@ pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { ], ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), }; + let fs_dir_read_to_string = FnSig { + params: vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], + ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }; + let fs_dir_read_to_string_abi = FnSig { + params: vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::Ptr, + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }; let fs_join = FnSig { params: vec![AbiType::Handle, AbiType::Ptr, AbiType::Ptr], ret: AbiType::Ptr, @@ -725,6 +738,16 @@ pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { is_runtime: true, }, ); + map.insert( + "sys.fs.Dir__read_to_string".to_string(), + FnInfo { + sig: fs_dir_read_to_string, + abi_sig: Some(fs_dir_read_to_string_abi), + symbol: "capable_rt_fs_dir_read_to_string".to_string(), + runtime_symbol: None, + is_runtime: true, + }, + ); map.insert( "sys.fs.Dir__list_dir".to_string(), FnInfo { diff --git a/capc/tests/run.rs b/capc/tests/run.rs index b277802..36d7b84 100644 --- a/capc/tests/run.rs +++ b/capc/tests/run.rs @@ -106,6 +106,34 @@ fn run_fs_helpers() { assert!(stdout.contains("fs helpers ok"), "stdout was: {stdout:?}"); } +#[test] +fn run_fs_reuse() { + let out_dir = make_out_dir("fs_reuse"); + let out_dir = out_dir.to_str().expect("utf8 out dir"); + let (code, stdout, _stderr) = run_capc(&[ + "run", + "--out-dir", + out_dir, + "tests/programs/fs_reuse.cap", + ]); + assert_eq!(code, 0); + assert!(stdout.contains("fs reuse ok"), "stdout was: {stdout:?}"); +} + +#[test] +fn run_fs_dir_reuse() { + let out_dir = make_out_dir("fs_dir_reuse"); + let out_dir = out_dir.to_str().expect("utf8 out dir"); + let (code, stdout, _stderr) = run_capc(&[ + "run", + "--out-dir", + out_dir, + "tests/programs/fs_dir_reuse.cap", + ]); + assert_eq!(code, 0); + assert!(stdout.contains("dir reuse ok"), "stdout was: {stdout:?}"); +} + #[test] fn run_match_expr() { let out_dir = make_out_dir("match_expr"); diff --git a/capc/tests/typecheck.rs b/capc/tests/typecheck.rs index f40c4f3..c1a0d02 100644 --- a/capc/tests/typecheck.rs +++ b/capc/tests/typecheck.rs @@ -36,6 +36,22 @@ fn typecheck_fs_read_ok() { type_check_program(&module, &stdlib, &[]).expect("typecheck module"); } +#[test] +fn typecheck_fs_reuse_ok() { + let source = load_program("fs_reuse.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + +#[test] +fn typecheck_fs_dir_reuse_ok() { + let source = load_program("fs_dir_reuse.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + #[test] fn typecheck_fs_close_ok() { let source = load_program("should_pass_fs_close.cap"); diff --git a/docs/POLICY.md b/docs/POLICY.md index a7d1749..bea2896 100644 --- a/docs/POLICY.md +++ b/docs/POLICY.md @@ -9,12 +9,20 @@ This is a compact policy reference for language invariants and safety boundaries - raw pointers (`*T`) - `extern` functions +## Resource Model + +- Most values are plain data and are unrestricted by default. +- `opaque struct` represents a resource/owner handle. +- `capability struct` represents an authority-bearing resource. +- Structs/enums that contain resource/capability fields become move-tracked by + containment. + ## Borrow‑Lite Rules - `&T` is allowed in parameters and locals. - Reference locals must be initialized from another local value. - References cannot be stored in structs/enums or returned. -- References are read‑only: they are only valid for `&T` parameters. +- References are read‑only and intentionally short-lived. ## Move / Linear Rules @@ -22,11 +30,21 @@ This is a compact policy reference for language invariants and safety boundaries - **Affine**: move‑only; use‑after‑move is a type error. - **Linear**: move‑only and must be consumed on all paths. - Extracting an affine/linear field consumes the whole root local. +- In practice these rules are primarily for resources, capabilities, and values + that contain them. ## Capabilities - Capabilities are opaque (`capability struct`) and cannot be forged. -- Attenuation APIs consume the stronger capability. +- Prefer three distinct API shapes in `sys.*`: + - use operations + - attenuation operations + - child-handle operations +- Reusable use operations should borrow where possible. +- Attenuation operations consume the stronger capability. +- Under the current checker, methods on move-tracked capabilities that return + capabilities still take `self` by value. This is a conservative rule that + keeps authority flow explicit. - Runtime enforces root/relative path checks. ## No‑Implicitness diff --git a/docs/README.md b/docs/README.md index 3be00d2..e2b80cc 100644 --- a/docs/README.md +++ b/docs/README.md @@ -4,6 +4,15 @@ Capable is a small systems language built around capabilities: values that expli This is an experimental and toy project. Inspired by [Austral](https://austral-lang.org/). +The intended programming model is small: + +- plain data: ordinary structs/enums and scalar values +- resources: owned handles such as buffers, files, and sockets +- capabilities: resources that also carry authority + +Most values are just data. The move-tracked part of the language exists so +resource ownership and authority flow stay explicit. + ```capable fn main(rc: RootCap) { // Mint a capability from the root @@ -27,3 +36,9 @@ fn main(rc: RootCap) { Capabilities are explicit values that grant permission to perform privileged operations (filesystem, network, clock, etc.). This is in contrast to ambient authority: normally, any code running in your process can reach the outside world, which makes dependency behavior difficult to constrain or reason about. Capable aims to reduce supply-chain risk by making authority non-ambient: if a dependency didn’t receive a capability value, it can’t do the thing. The compiler enforces this by requiring capabilities at call sites for privileged operations, and the runtime can enforce attenuation (for example, a filesystem capability scoped to a root directory that cannot be escaped). The result is a smaller blast radius and fewer “surprising” dependency updates, because new code can’t silently acquire new powers—you have to explicitly hand them over. + +Capable also has a small resource model. `opaque struct` and `capability +struct` values are the main move-tracked categories, and structs/enums that +contain them become move-tracked by containment. The goal is not to turn all +programming into ownership puzzles; it is to make authority and resource +lifetime explicit where they matter. diff --git a/docs/TUTORIAL.md b/docs/TUTORIAL.md index 12e46d9..237f648 100644 --- a/docs/TUTORIAL.md +++ b/docs/TUTORIAL.md @@ -11,6 +11,15 @@ This tutorial is a cohesive walk-through of the language as it exists today. It focuses on how to write real programs, how the capability model works, and how memory is managed. +Capable is easiest to understand if you divide values into three groups: + +- plain data: numbers, bools, and ordinary structs/enums +- resources: owned handles such as buffers, files, and sockets +- capabilities: resources that also carry permission + +Most code works with plain data. The resource model exists so ownership and +authority stay explicit where they matter. + ## 1) Hello, console ```cap @@ -101,7 +110,8 @@ impl Pair { - Structs and enums are nominal types. - Methods are defined in `impl` blocks and lower to `Type__method` in codegen. -- Method receivers can be `self` (move) or `self: &T` (borrow-lite, read-only). +- Method receivers can be `self` (value receiver) or `self: &T` (short-lived + read-only borrow). ## 5) Results and error flow @@ -166,11 +176,25 @@ risk: it can forge or corrupt capability values, violate attenuation, or reach privileged operations directly. Treat unsafe dependencies as highly trusted code and use auditing/`--safe-only` to keep the boundary tight. -Attenuation is one-way: methods that return capabilities must take `self` by -value, so you give up the more powerful capability when you derive a narrower -one. This is enforced by the compiler. +In practice, capability APIs fall into three shapes: + +- use operations: perform an effect with existing authority +- attenuation operations: derive a narrower capability from a stronger one +- child-handle operations: create a fresh child handle from an existing parent -## 7) Kinds: copy, affine, linear +That distinction matters more than "everything moves." A read-only filesystem +capability being used to read a file is different from a directory capability +being narrowed to a subdirectory, and different again from a listener producing +a fresh connection handle. + +In the current implementation, reusable use operations borrow where possible. +That is why `ReadFS.read_to_string` and `Dir.read_to_string` can be called +multiple times on the same capability value. By contrast, methods on +move-tracked capabilities that return capabilities still take `self` by value +under the current checker. That is why `Dir.subdir` and `Dir.open_read` +consume `Dir`, while `TcpListener.accept` can borrow: `TcpListener` is copyable. + +## 7) Resources and kinds Types can declare how they move: @@ -185,10 +209,14 @@ Kinds: - **Affine**: move-only, dropping is allowed. - **Linear**: move-only and must be consumed on all paths. -## 8) Borrow-lite references: `&T` +Most plain data does not require thinking about this section. These rules matter +primarily for resources, capabilities, and values that contain them. + +## 8) Short borrows: `&T` -Capable has a minimal borrow system for read-only access. The goal is to make -non-consuming reads ergonomic without a full borrow checker. +Capable has a narrow borrow system for read-only access. The goal is to make +resource use ergonomic without turning the language into a full borrow-checking +model. ```cap impl Thing { @@ -212,9 +240,9 @@ This keeps the language simple without a full borrow checker. It also keeps lifetimes simple: a borrow is only valid within the current scope, so you never have to reason about aliasing across function boundaries. -Borrow-lite is intentionally conservative. If you need shared ownership across -functions, pass the value by move (and return it), or design your API to do the -read inside the callee. +Borrow-lite is intentionally conservative. In most public APIs, the important +case is a short-lived borrowed parameter or receiver on a resource/capability +type. ## 9) Memory model diff --git a/examples/config_loader/config_loader.cap b/examples/config_loader/config_loader.cap index 6566938..8fd3c37 100644 --- a/examples/config_loader/config_loader.cap +++ b/examples/config_loader/config_loader.cap @@ -3,81 +3,48 @@ module config_loader use sys::system use sys::console use sys::fs -use sys::string -use sys::vec -use sys::buffer -fn print_kv(c: Console, alloc: Alloc, key: string, val: string) -> unit { - let out = alloc.text_new() - match (out.push_str("key: ")) { - Ok(_) => { } - Err(_) => { out.free(alloc); return () } - } - match (out.push_str(key)) { - Ok(_) => { } - Err(_) => { out.free(alloc); return () } - } - match (out.push_str("\nvalue: ")) { - Ok(_) => { } - Err(_) => { out.free(alloc); return () } - } - match (out.push_str(val)) { - Ok(_) => { } - Err(_) => { out.free(alloc); return () } - } - match (out.push_byte('\n')) { - Ok(_) => { } - Err(_) => { out.free(alloc); return () } - } - c.print(out.as_string()) - out.free(alloc) +fn print_kv(c: Console, key: string, val: string) -> unit { + c.print("key: ") + c.println(key) + c.print("value: ") + c.println(val) return () } -fn parse_line(c: Console, alloc: Alloc, line: string) -> Result { +fn parse_line(c: Console, line: string) -> unit { if (line.len() == 0) { - return Ok(()) + return () } if (line.starts_with("#")) { - return Ok(()) + return () } - - match (line.index_of_byte('=')) { - Ok(eq) => { - let key_result = line.slice_range(0, eq) - match (key_result) { - Ok(key) => { - let val_result = line.slice_range(eq + 1, line.len()) - match (val_result) { - Ok(val) => { print_kv(c, alloc, key, val) } - Err(_) => { return Err(vec::VecErr::OutOfRange) } - } - } - Err(_) => { return Err(vec::VecErr::OutOfRange) } - } - } + match (line.split_once_view('=')) { + Ok(parts) => { print_kv(c, parts.left, parts.right) } Err(_) => { } } - return Ok(()) + return () } -fn parse_config(c: Console, alloc: Alloc, contents: string) -> Result { - let lines = contents.lines(alloc) - let n = lines.len() - for i in 0..n { - let line = lines[i]? - parse_line(c, alloc, line)? +fn parse_config(c: Console, contents: string) -> unit { + let rest = contents + while (true) { + match (rest.split_once_view('\n')) { + Ok(parts) => { + parse_line(c, parts.left.trim_end_view()) + rest = parts.right + } + Err(_) => { + parse_line(c, rest.trim_end_view()) + return () + } + } } - alloc.vec_string_free(lines) - return Ok(()) } fn run(c: Console, alloc: Alloc, fs: ReadFS) -> Result { let contents = fs.read_to_string(alloc, "app.conf")? - let result = parse_config(c, alloc, contents) - if (result.is_err()) { - return Err(fs::FsErr::IoError) - } + parse_config(c, contents) return Ok(()) } diff --git a/examples/hashmap_demo/hashmap_demo.cap b/examples/hashmap_demo/hashmap_demo.cap index eda44f9..f70e20a 100644 --- a/examples/hashmap_demo/hashmap_demo.cap +++ b/examples/hashmap_demo/hashmap_demo.cap @@ -133,10 +133,7 @@ pub fn hashmap_new(alloc: buffer::Alloc, initial_capacity: i32) -> HashMap { // Initialize all entries as empty for i in 0..cap { - match (entries.push(empty_entry())) { - Ok(_) => {} - Err(_) => { panic() } - } + entries.push(empty_entry()).ok() } return HashMap { @@ -229,10 +226,7 @@ fn hashmap_rehash(map: HashMap, new_cap: i32) -> HashMap { // Create new entries vector let new_entries = alloc.vec_new() for i in 0..new_cap { - match (new_entries.push(empty_entry())) { - Ok(_) => {} - Err(_) => { panic() } - } + new_entries.push(empty_entry()).ok() } let new_size = 0 @@ -450,36 +444,36 @@ fn run_demo(c: Console, alloc: buffer::Alloc) -> unit { let test_vals = alloc.vec_i32_new() // key -> key * 10 - match (test_keys.push(42)) { Ok(_) => {} Err(_) => {} } - match (test_vals.push(420)) { Ok(_) => {} Err(_) => {} } + test_keys.push(42).ok() + test_vals.push(420).ok() - match (test_keys.push(17)) { Ok(_) => {} Err(_) => {} } - match (test_vals.push(170)) { Ok(_) => {} Err(_) => {} } + test_keys.push(17).ok() + test_vals.push(170).ok() - match (test_keys.push(99)) { Ok(_) => {} Err(_) => {} } - match (test_vals.push(990)) { Ok(_) => {} Err(_) => {} } + test_keys.push(99).ok() + test_vals.push(990).ok() - match (test_keys.push(1)) { Ok(_) => {} Err(_) => {} } - match (test_vals.push(10)) { Ok(_) => {} Err(_) => {} } + test_keys.push(1).ok() + test_vals.push(10).ok() - match (test_keys.push(256)) { Ok(_) => {} Err(_) => {} } - match (test_vals.push(2560)) { Ok(_) => {} Err(_) => {} } + test_keys.push(256).ok() + test_vals.push(2560).ok() // Keys that will likely collide - match (test_keys.push(0)) { Ok(_) => {} Err(_) => {} } - match (test_vals.push(0)) { Ok(_) => {} Err(_) => {} } + test_keys.push(0).ok() + test_vals.push(0).ok() - match (test_keys.push(16)) { Ok(_) => {} Err(_) => {} } - match (test_vals.push(160)) { Ok(_) => {} Err(_) => {} } + test_keys.push(16).ok() + test_vals.push(160).ok() - match (test_keys.push(32)) { Ok(_) => {} Err(_) => {} } - match (test_vals.push(320)) { Ok(_) => {} Err(_) => {} } + test_keys.push(32).ok() + test_vals.push(320).ok() // Insert all entries let num_entries = test_keys.len() for i in 0..num_entries { - let k = test_keys[i].unwrap_or(0) - let v = test_vals[i].unwrap_or(0) + let k = test_keys.get_or(i, 0) + let v = test_vals.get_or(i, 0) match (hashmap_insert(map, k, v)) { Ok(new_map) => { map = new_map @@ -509,7 +503,7 @@ fn run_demo(c: Console, alloc: buffer::Alloc) -> unit { // Look up each key for i in 0..num_entries { - let k = test_keys[i].unwrap_or(0) + let k = test_keys.get_or(i, 0) c.print(" get(") c.print_i32(k) c.print(") = ") @@ -546,13 +540,13 @@ fn run_demo(c: Console, alloc: buffer::Alloc) -> unit { c.println("Looking up non-existent keys...") let missing = alloc.vec_i32_new() - match (missing.push(100)) { Ok(_) => {} Err(_) => {} } - match (missing.push(999)) { Ok(_) => {} Err(_) => {} } - match (missing.push(12345)) { Ok(_) => {} Err(_) => {} } + missing.push(100).ok() + missing.push(999).ok() + missing.push(12345).ok() let num_missing = missing.len() for i in 0..num_missing { - let k = missing[i].unwrap_or(0) + let k = missing.get_or(i, 0) c.print(" contains(") c.print_i32(k) c.print(") = ") diff --git a/examples/how_to_string/how_to_string.cap b/examples/how_to_string/how_to_string.cap index 144a7e5..e0ef338 100644 --- a/examples/how_to_string/how_to_string.cap +++ b/examples/how_to_string/how_to_string.cap @@ -6,76 +6,44 @@ use sys::string use sys::buffer fn demo_string_view(c: Console, alloc: Alloc) -> unit { - let s = "hello,world" + let s = " hello,world \n" + let trimmed = s.trim_view() c.println("-- string view --") - c.println(s) + c.println(trimmed) c.println("len:") - c.println_i32(s.len()) + c.println_i32(trimmed.len()) - match (s.index_of_byte(',')) { - Ok(i) => { - match (s.slice_range(0, i)) { - Ok(left) => { c.println(left) } - Err(_) => { c.println("slice failed") } - } + match (trimmed.split_once_view(',')) { + Ok(parts) => { + c.println(parts.left) + c.println(parts.right) } Err(_) => { c.println("comma not found") } } - let words = s.split(alloc, ',') + let words = trimmed.split(alloc, ',') c.println("split count:") c.println_i32(words.len()) - match (words[0]) { - Ok(w0) => { c.println(w0) } - Err(_) => { c.println("word0 missing") } - } - match (words[1]) { - Ok(w1) => { c.println(w1) } - Err(_) => { c.println("word1 missing") } - } + c.println(words.get_or(0, "")) + c.println(words.get_or(1, "")) alloc.vec_string_free(words) } fn demo_text_builder(c: Console, alloc: Alloc) -> unit { c.println("-- Text builder --") let t = alloc.text_new() - match (t.push_str("hello")) { - Ok(_) => { } - Err(_) => { c.println("push_str failed"); return () } - } - match (t.push_byte(' ')) { - Ok(_) => { } - Err(_) => { c.println("push_byte failed"); return () } - } - match (t.append("text")) { - Ok(_) => { } - Err(_) => { c.println("append failed"); return () } - } - match (t.slice_range(0, 5)) { - Ok(view) => { c.println(view) } - Err(_) => { c.println("slice_range failed") } - } - match (t.to_string()) { - Ok(out) => { c.println(out) } - Err(_) => { c.println("to_string failed") } - } + t.push_str("hello").ok() + t.push_byte(' ').ok() + t.append("text").ok() + c.println(t.slice_range(0, 5).ok()) + c.println(t.to_string().ok()) t.free(alloc) - match (alloc.text_from("owned")) { - Ok(t2) => { - match (t2.to_string()) { - Ok(out) => { c.println(out) } - Err(_) => { c.println("to_string failed") } - } - t2.free(alloc) - } - Err(_) => { c.println("text_from failed") } - } + let t2 = alloc.text_from("owned").ok() + c.println(t2.to_string().ok()) + t2.free(alloc) - match ("a".concat(alloc, "b")) { - Ok(out) => { c.println(out) } - Err(_) => { c.println("concat failed") } - } + c.println("a".concat(alloc, "b").ok()) } pub fn main(rc: RootCap) -> i32 { diff --git a/examples/http_server/http_server.cap b/examples/http_server/http_server.cap index b05e66d..db696e9 100644 --- a/examples/http_server/http_server.cap +++ b/examples/http_server/http_server.cap @@ -5,66 +5,66 @@ module http_server use sys::console use sys::fs use sys::net -use sys::args +use sys::string use sys::system -fn arg_or_default(args: Args, index: i32, default: string) -> string { - if (args.len() <= index) { - return default +fn strip_query(raw_path: string) -> string { + match (raw_path.split_once_view('?')) { + Ok(parts) => { return parts.left } + Err(_) => { return raw_path } } - return args.at(index).unwrap_or(default) } -fn strip_query(raw_path: string, alloc: Alloc) -> string { - match (raw_path.index_of_byte('?')) { - Ok(i) => { - match (raw_path.slice_range(0, i)) { - Ok(view) => { return view } - Err(_) => { return raw_path } +fn sanitize_path(raw_path: string, alloc: Alloc) -> Result { + let out = alloc.text_new() + let rest = raw_path + while (true) { + let seg = "" + let done = false + match (rest.split_once_view('/')) { + Ok(parts) => { + seg = parts.left + rest = parts.right + } + Err(_) => { + seg = rest + done = true } } - Err(_) => { return raw_path } - } -} - -fn sanitize_segment(parts: Vec, i: i32, acc: string, seg: string, alloc: Alloc) -> Result { - if (seg.len() == 0) { - return sanitize_parts(parts, i + 1, acc, alloc) - } - if (seg == ".") { - return sanitize_parts(parts, i + 1, acc, alloc) - } - if (seg == "..") { - return Err(()) - } - if (acc.len() == 0) { - return sanitize_parts(parts, i + 1, seg, alloc) - } - return sanitize_parts(parts, i + 1, fs::join(alloc, acc, seg), alloc) -} - -fn sanitize_parts(parts: Vec, i: i32, acc: string, alloc: Alloc) -> Result { - if (i >= parts.len()) { - return Ok(acc) + if (seg == "..") { + out.free(alloc) + return Err(()) + } + if (seg.len() > 0 && seg != ".") { + if (!out.is_empty()) { + match (out.push_byte('/')) { + Ok(_) => { } + Err(_) => { + out.free(alloc) + return Err(()) + } + } + } + match (out.push_str(seg)) { + Ok(_) => { } + Err(_) => { + out.free(alloc) + return Err(()) + } + } + } + if (done) { + break + } } - let seg_result = parts[i] - match (seg_result) { - Ok(seg) => { return sanitize_segment(parts, i, acc, seg, alloc) } - Err(_) => { return Err(()) } + if (out.is_empty()) { + out.free(alloc) + return Ok("index.html") } -} - -fn sanitize_path(raw_path: string, alloc: Alloc) -> Result { - let parts = raw_path.split(alloc, '/') - let result = sanitize_parts(parts, 0, "", alloc) - alloc.vec_string_free(parts) + let result = out.to_string() + out.free(alloc) match (result) { - Ok(path) => { - if (path.len() == 0) { - return Ok("index.html") - } - return Ok(path) - } + Ok(path) => { return Ok(path) } Err(_) => { return Err(()) } } } @@ -75,43 +75,31 @@ enum ParseErr { BadSlice } -fn index_or_space(s: string) -> Result { - match (s.index_of_byte(' ')) { - Ok(i) => { return Ok(i) } +fn split_on_space(s: string) -> Result { + match (s.split_once_view(' ')) { + Ok(parts) => { return Ok(parts) } Err(_) => { return Err(ParseErr::MissingSpace) } } } -fn slice_or_err(s: string, start: i32, end: i32) -> Result { - match (s.slice_range(start, end)) { - Ok(v) => { return Ok(v) } - Err(_) => { return Err(ParseErr::BadSlice) } - } -} - fn parse_request_line(line: string, alloc: Alloc) -> Result { - let trimmed = line.trim(alloc) - let space0 = index_or_space(trimmed)? - let method = slice_or_err(trimmed, 0, space0)? - if (method != "GET") { + let trimmed = line.trim_view() + let head = split_on_space(trimmed)? + if (head.left != "GET") { return Err(ParseErr::BadMethod) } - let rest = slice_or_err(trimmed, space0 + 1, trimmed.len())? - let space1 = index_or_space(rest)? - let path = slice_or_err(rest, 0, space1)? - match (sanitize_path(strip_query(path, alloc), alloc)) { + let rest = head.right.trim_start_view() + let target = split_on_space(rest)?.left + match (sanitize_path(strip_query(target), alloc)) { Ok(p) => { return Ok(p) } Err(_) => { return Err(ParseErr::BadSlice) } } } fn parse_request_path(req: string, alloc: Alloc) -> Result { - let lines = req.lines(alloc) - let line_result = lines[0] - alloc.vec_string_free(lines) - match (line_result) { - Ok(line) => { return parse_request_line(line, alloc) } - Err(_) => { return Err(ParseErr::BadSlice) } + match (req.split_once_view('\n')) { + Ok(parts) => { return parse_request_line(parts.left.trim_end_view(), alloc) } + Err(_) => { return parse_request_line(req.trim_end_view(), alloc) } } } @@ -129,7 +117,7 @@ fn respond_bad_request(conn: &TcpConn) -> Result { return conn.write("HTTP/1.0 400 Bad Request\r\nContent-Type: text/plain\r\n\r\nbad request\n") } -fn handle_request(conn: &TcpConn, readfs: ReadFS, alloc: Alloc, req: string) -> Result { +fn handle_request(conn: &TcpConn, readfs: &ReadFS, alloc: Alloc, req: string) -> Result { match (parse_request_path(req, alloc)) { Ok(path) => { match (readfs.read_to_string(alloc, path)) { @@ -143,10 +131,13 @@ fn handle_request(conn: &TcpConn, readfs: ReadFS, alloc: Alloc, req: string) -> fn serve_forever(c: Console, net: Net, rc: RootCap, alloc: Alloc, root: string, port: i32) -> Result { let listener = net.listen("127.0.0.1", port)? + let readfs = rc.mint_readfs(root) + let readfs_ref: &ReadFS = readfs c.println("listening on 127.0.0.1") c.print("port: ") c.println_i32(port) defer listener.close() + defer readfs.close() while (true) { if let Ok(conn) = listener.accept() { let req_result = conn.read(alloc, 4096) @@ -154,8 +145,7 @@ fn serve_forever(c: Console, net: Net, rc: RootCap, alloc: Alloc, root: string, conn.close() } else { let req = req_result.ok() - let readfs = rc.mint_readfs(root) - let handled = handle_request(conn, readfs, alloc, req) + let handled = handle_request(conn, readfs_ref, alloc, req) conn.close() if (handled.is_err()) { // Ignore per-request write errors; keep the server alive. @@ -174,7 +164,7 @@ pub fn main(rc: RootCap) -> i32 { let net = rc.mint_net() let args = rc.mint_args() let alloc = rc.mint_alloc_default() - let root = arg_or_default(args, 1, "examples/http_server") + let root = args.at(1).unwrap_or("examples/http_server") let port = 8090 let result = serve_forever(c, net, rc, alloc, root, port) if (result.is_err()) { diff --git a/examples/sort/sort.cap b/examples/sort/sort.cap index c155ae4..ef1c724 100644 --- a/examples/sort/sort.cap +++ b/examples/sort/sort.cap @@ -36,8 +36,8 @@ fn str_lt(a: string, b: string) -> bool { // Compare lines at indices i and j fn line_lt(lines: Vec, i: i32, j: i32) -> bool { - let line_i = lines[i].unwrap_or("") - let line_j = lines[j].unwrap_or("") + let line_i = lines.get_or(i, "") + let line_j = lines.get_or(j, "") return str_lt(line_i, line_j) } @@ -47,18 +47,10 @@ fn sort_indices(lines: Vec, indices: Vec) -> unit { for i in 1..n { let j = i while (j > 0) { - let curr_idx = indices[j].unwrap_or(0) - let prev_idx = indices[j - 1].unwrap_or(0) + let curr_idx = indices.get_or(j, 0) + let prev_idx = indices.get_or(j - 1, 0) if (line_lt(lines, curr_idx, prev_idx)) { - // swap indices[j] and indices[j-1] - match (indices.set(j, prev_idx)) { - Ok(ok) => {} - Err(e) => {} - } - match (indices.set(j - 1, curr_idx)) { - Ok(ok) => {} - Err(e) => {} - } + indices.swap(j, j - 1).ok() j = j - 1 } else { break @@ -69,24 +61,21 @@ fn sort_indices(lines: Vec, indices: Vec) -> unit { fn run(c: Console, alloc: Alloc, input: Stdin) -> Result { let contents = input.read_to_string(alloc)? - let lines = contents.lines(alloc) + let lines = contents.lines_view(alloc) let n = lines.len() // Create index array [0, 1, 2, ...] let indices = alloc.vec_i32_new() for i in 0..n { - match (indices.push(i)) { - Ok(ok) => {} - Err(e) => {} - } + indices.push(i).ok() } sort_indices(lines, indices) // Print lines in sorted order (skip empty lines) for i in 0..n { - let idx = indices[i].unwrap_or(0) - let line = lines[idx].unwrap_or("") + let idx = indices.get_or(i, 0) + let line = lines.get_or(idx, "") if (line.len() > 0) { c.println(line) } diff --git a/examples/uniq/uniq.cap b/examples/uniq/uniq.cap index 4f8fcd5..0a8fe06 100644 --- a/examples/uniq/uniq.cap +++ b/examples/uniq/uniq.cap @@ -11,18 +11,18 @@ fn should_print(lines: Vec, i: i32) -> bool { if (i == 0) { return true } - let curr = lines[i].unwrap_or("") - let prev = lines[i - 1].unwrap_or("") + let curr = lines.get_or(i, "") + let prev = lines.get_or(i - 1, "") return prev != curr } fn run(c: Console, alloc: Alloc, input: Stdin) -> Result { let contents = input.read_to_string(alloc)? - let lines = contents.lines(alloc) + let lines = contents.lines_view(alloc) let n = lines.len() for i in 0..n { if (should_print(lines, i)) { - let line = lines[i].unwrap_or("") + let line = lines.get_or(i, "") c.println(line) } } diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 05a7cf1..85056e6 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -112,6 +112,14 @@ fn take_handle( table.remove(&handle) } +fn clone_handle( + table: &LazyLock>>, + handle: Handle, + label: &'static str, +) -> Option { + with_table(table, label, |table| table.get(&handle).cloned()) +} + fn has_handle( table: &LazyLock>>, handle: Handle, @@ -484,7 +492,7 @@ pub extern "C" fn capable_rt_fs_exists( path: *const CapString, ) -> u8 { let path = unsafe { read_cap_string(path) }; - let state = take_handle(&READ_FS, fs, "readfs table"); + let state = clone_handle(&READ_FS, fs, "readfs table"); let (Some(state), Some(path)) = (state, path) else { return 0; }; @@ -507,7 +515,7 @@ pub extern "C" fn capable_rt_fs_read_bytes( out_err: *mut i32, ) -> u8 { let path = unsafe { read_cap_string(path) }; - let state = take_handle(&READ_FS, fs, "readfs table"); + let state = clone_handle(&READ_FS, fs, "readfs table"); let (Some(state), Some(path)) = (state, path) else { return write_handle_result(out_ok, out_err, Err(FsErr::PermissionDenied)); }; @@ -536,7 +544,7 @@ pub extern "C" fn capable_rt_fs_list_dir( out_err: *mut i32, ) -> u8 { let path = unsafe { read_cap_string(path) }; - let state = take_handle(&READ_FS, fs, "readfs table"); + let state = clone_handle(&READ_FS, fs, "readfs table"); let (Some(state), Some(path)) = (state, path) else { return write_handle_result(out_ok, out_err, Err(FsErr::PermissionDenied)); }; @@ -571,7 +579,7 @@ pub extern "C" fn capable_rt_fs_dir_exists( name: *const CapString, ) -> u8 { let name = unsafe { read_cap_string(name) }; - let state = take_handle(&DIRS, dir, "dir table"); + let state = clone_handle(&DIRS, dir, "dir table"); let (Some(state), Some(name)) = (state, name) else { return 0; }; @@ -594,7 +602,7 @@ pub extern "C" fn capable_rt_fs_dir_read_bytes( out_err: *mut i32, ) -> u8 { let name = unsafe { read_cap_string(name) }; - let state = take_handle(&DIRS, dir, "dir table"); + let state = clone_handle(&DIRS, dir, "dir table"); let (Some(state), Some(name)) = (state, name) else { return write_handle_result(out_ok, out_err, Err(FsErr::PermissionDenied)); }; @@ -622,7 +630,7 @@ pub extern "C" fn capable_rt_fs_dir_list_dir( out_ok: *mut Handle, out_err: *mut i32, ) -> u8 { - let state = take_handle(&DIRS, dir, "dir table"); + let state = clone_handle(&DIRS, dir, "dir table"); let Some(state) = state else { return write_handle_result(out_ok, out_err, Err(FsErr::PermissionDenied)); }; @@ -648,6 +656,33 @@ pub extern "C" fn capable_rt_fs_dir_list_dir( } } +#[no_mangle] +pub extern "C" fn capable_rt_fs_dir_read_to_string( + dir: Handle, + _alloc: Handle, + name: *const CapString, + out_ok: *mut CapString, + out_err: *mut i32, +) -> u8 { + let name = unsafe { read_cap_string(name) }; + let state = clone_handle(&DIRS, dir, "dir table"); + let (Some(state), Some(name)) = (state, name) else { + return write_result_with_alloc(_alloc, out_ok, out_err, Err(FsErr::PermissionDenied)); + }; + let Some(name_rel) = normalize_relative(Path::new(&name)) else { + return write_result_with_alloc(_alloc, out_ok, out_err, Err(FsErr::InvalidPath)); + }; + let combined = state.rel.join(name_rel); + let full = match resolve_rooted_path(&state.root, &combined) { + Ok(path) => path, + Err(err) => return write_result_with_alloc(_alloc, out_ok, out_err, Err(err)), + }; + match std::fs::read_to_string(&full) { + Ok(contents) => write_result_with_alloc(_alloc, out_ok, out_err, Ok(contents)), + Err(err) => write_result_with_alloc(_alloc, out_ok, out_err, Err(map_fs_err(err))), + } +} + #[no_mangle] pub extern "C" fn capable_rt_fs_join( out: *mut CapString, @@ -819,7 +854,7 @@ pub extern "C" fn capable_rt_fs_read_to_string( out_err: *mut i32, ) -> u8 { let path = unsafe { read_cap_string(path) }; - let state = take_handle(&READ_FS, fs, "readfs table"); + let state = clone_handle(&READ_FS, fs, "readfs table"); let Some(state) = state else { return write_result_with_alloc(_alloc, out_ok, out_err, Err(FsErr::PermissionDenied)); diff --git a/stdlib/sys/fs.cap b/stdlib/sys/fs.cap index 0c507d8..6423d1f 100644 --- a/stdlib/sys/fs.cap +++ b/stdlib/sys/fs.cap @@ -19,22 +19,22 @@ pub enum FsErr { NotFound, PermissionDenied, InvalidPath, IoError } impl ReadFS { /// Read an entire file into a string. - pub fn read_to_string(self, alloc: buffer::Alloc, path: string) -> Result { + pub fn read_to_string(self: &ReadFS, alloc: buffer::Alloc, path: string) -> Result { return () } /// Read an entire file into a byte vec. - pub fn read_bytes(self, alloc: buffer::Alloc, path: string) -> Result, FsErr> { + pub fn read_bytes(self: &ReadFS, alloc: buffer::Alloc, path: string) -> Result, FsErr> { return Err(FsErr::IoError) } /// List directory contents as strings. - pub fn list_dir(self, alloc: buffer::Alloc, path: string) -> Result, FsErr> { + pub fn list_dir(self: &ReadFS, alloc: buffer::Alloc, path: string) -> Result, FsErr> { return Err(FsErr::IoError) } /// True if a path exists. - pub fn exists(self, path: string) -> bool { + pub fn exists(self: &ReadFS, path: string) -> bool { return false } @@ -68,24 +68,23 @@ impl Dir { } /// Read a file into a byte vec. - pub fn read_bytes(self, alloc: buffer::Alloc, name: string) -> Result, FsErr> { + pub fn read_bytes(self: &Dir, alloc: buffer::Alloc, name: string) -> Result, FsErr> { return Err(FsErr::IoError) } /// List directory contents. - pub fn list_dir(self, alloc: buffer::Alloc) -> Result, FsErr> { + pub fn list_dir(self: &Dir, alloc: buffer::Alloc) -> Result, FsErr> { return Err(FsErr::IoError) } /// True if a path exists. - pub fn exists(self, name: string) -> bool { + pub fn exists(self: &Dir, name: string) -> bool { return false } /// Read a file into a string. - pub fn read_to_string(self, alloc: buffer::Alloc, name: string) -> Result { - let file = self.open_read(name) - return file.read_to_string(alloc) + pub fn read_to_string(self: &Dir, alloc: buffer::Alloc, name: string) -> Result { + return () } /// Close the capability. diff --git a/stdlib/sys/stdin.cap b/stdlib/sys/stdin.cap index c3e53e5..049d149 100644 --- a/stdlib/sys/stdin.cap +++ b/stdlib/sys/stdin.cap @@ -10,7 +10,7 @@ pub capability struct Stdin impl Stdin { /// Read stdin into a string. - pub fn read_to_string(self, alloc: buffer::Alloc) -> Result { + pub fn read_to_string(self: &Stdin, alloc: buffer::Alloc) -> Result { return Err(io::IoErr::IoError) } } diff --git a/stdlib/sys/string.cap b/stdlib/sys/string.cap index 7e1e707..392f7ea 100644 --- a/stdlib/sys/string.cap +++ b/stdlib/sys/string.cap @@ -19,7 +19,7 @@ pub struct Text { bytes: vec::Vec } -/// Result payload for split_once. +/// Result payload for split_once and split_once_view. pub struct SplitOnce { left: string, right: string @@ -74,6 +74,19 @@ fn build_range(alloc: buffer::Alloc, s: string, start: i32, end: i32) -> string } } +fn view_range(s: string, start: i32, end: i32) -> string { + if (end <= start) { + return "" + } + if (start == 0 && end == s.len()) { + return s + } + match (s.slice_range(start, end)) { + Ok(out) => { return out } + Err(_) => { panic() } + } +} + fn lower_ascii_byte(b: u8) -> u8 { match (b) { 'A' => { return 'a' } @@ -290,6 +303,32 @@ impl string { return out } + /// Split on ASCII whitespace without copying the pieces. + pub fn split_whitespace_view(self, alloc: buffer::Alloc) -> Vec { + let out = alloc.vec_string_new() + let bytes = self.as_slice() + let len = bytes.len() + let i = 0 + while (i < len) { + while (i < len && bytes.at(i).is_whitespace()) { + i = i + 1 + } + if (i >= len) { + break + } + let start = i + while (i < len && !bytes.at(i).is_whitespace()) { + i = i + 1 + } + let part = view_range(self, start, i) + match (out.push(part)) { + Ok(_) => { } + Err(_) => { panic() } + } + } + return out + } + pub fn lines(self, alloc: buffer::Alloc) -> Vec { let out = alloc.vec_string_new() let bytes = self.as_slice() @@ -325,6 +364,42 @@ impl string { return out } + /// Split into line views without copying the pieces. + pub fn lines_view(self, alloc: buffer::Alloc) -> Vec { + let out = alloc.vec_string_new() + let bytes = self.as_slice() + let len = bytes.len() + let start = 0 + let i = 0 + while (i < len) { + if (bytes.at(i) == '\n') { + let end = i + if (end > start && bytes.at(end - 1) == '\r') { + end = end - 1 + } + let part = view_range(self, start, end) + match (out.push(part)) { + Ok(_) => { } + Err(_) => { panic() } + } + start = i + 1 + } + i = i + 1 + } + if (start < len) { + let end = len + if (end > start && bytes.at(end - 1) == '\r') { + end = end - 1 + } + let part = view_range(self, start, end) + match (out.push(part)) { + Ok(_) => { } + Err(_) => { panic() } + } + } + return out + } + pub fn split(self, alloc: buffer::Alloc, delim: u8) -> Vec { let out = alloc.vec_string_new() let bytes = self.as_slice() @@ -369,6 +444,66 @@ impl string { return Err(()) } + /// Split once on the first matching delimiter without copying. + pub fn split_once_view(self, delim: u8) -> Result { + let bytes = self.as_slice() + let len = bytes.len() + let i = 0 + while (i < len) { + if (bytes.at(i) == delim) { + return Ok(SplitOnce { + left: view_range(self, 0, i), + right: view_range(self, i + 1, len) + }) + } + i = i + 1 + } + return Err(()) + } + + /// Trim ASCII whitespace from both ends without copying. + pub fn trim_view(self) -> string { + let start_trimmed = self.trim_start_view() + return start_trimmed.trim_end_view() + } + + /// Trim ASCII whitespace from the start without copying. + pub fn trim_start_view(self) -> string { + let bytes = self.as_slice() + let len = bytes.len() + let i = 0 + while (i < len) { + if (!bytes.at(i).is_whitespace()) { + break + } + i = i + 1 + } + if (i == 0) { + return self + } + return view_range(self, i, len) + } + + /// Trim ASCII whitespace from the end without copying. + pub fn trim_end_view(self) -> string { + let bytes = self.as_slice() + let len = bytes.len() + if (len == 0) { + return self + } + let i = len + while (i > 0) { + if (!bytes.at(i - 1).is_whitespace()) { + break + } + i = i - 1 + } + if (i == len) { + return self + } + return view_range(self, 0, i) + } + /// Trim ASCII whitespace from both ends. pub fn trim(self, alloc: buffer::Alloc) -> string { let start_trimmed = self.trim_start(alloc) @@ -647,6 +782,32 @@ impl string { return count } + /// Count newline bytes. + pub fn count_newlines(self) -> i32 { + return self.count_byte('\n') + } + + /// Count ASCII whitespace-delimited words without allocating. + pub fn count_words_ascii(self) -> i32 { + let bytes = self.as_slice() + let len = bytes.len() + let i = 0 + let count = 0 + while (i < len) { + while (i < len && bytes.at(i).is_whitespace()) { + i = i + 1 + } + if (i >= len) { + break + } + count = count + 1 + while (i < len && !bytes.at(i).is_whitespace()) { + i = i + 1 + } + } + return count + } + /// True if all bytes are ASCII. pub fn is_ascii(self) -> bool { let bytes = self.as_slice() diff --git a/stdlib/sys/vec.cap b/stdlib/sys/vec.cap index c3b643b..07a357f 100644 --- a/stdlib/sys/vec.cap +++ b/stdlib/sys/vec.cap @@ -205,6 +205,14 @@ impl Vec { return self.get(len - 1) } + /// Get element at index, or return a fallback value. + pub fn get_or(self, i: i32, fallback: T) -> T { + match (self.get(i)) { + Ok(v) => { return v } + Err(_) => { return fallback } + } + } + /// Set element at index. pub fn set(self, i: i32, x: T) -> Result { let header = read_header(self) @@ -216,6 +224,24 @@ impl Vec { return Ok(()) } + /// Swap two elements in place. + pub fn swap(self, i: i32, j: i32) -> Result { + let header = read_header(self) + if (i < 0 || i >= header.len || j < 0 || j >= header.len) { + return Err(VecErr::OutOfRange) + } + if (i == j) { + return Ok(()) + } + let ptr_i = elem_ptr(header.raw, header.elem_size, i) + let ptr_j = elem_ptr(header.raw, header.elem_size, j) + let tmp = unsafe_ptr::ptr_read(ptr_i) + let other = unsafe_ptr::ptr_read(ptr_j) + unsafe_ptr::ptr_write(ptr_i, other) + unsafe_ptr::ptr_write(ptr_j, tmp) + return Ok(()) + } + /// Append one element. pub fn push(self, x: T) -> Result { self.reserve(1)? diff --git a/tests/programs/args_safe.cap b/tests/programs/args_safe.cap index e4092c4..45cd876 100644 --- a/tests/programs/args_safe.cap +++ b/tests/programs/args_safe.cap @@ -11,19 +11,11 @@ pub fn main(rc: RootCap) -> i32 { c.println("args bad") return 1 } - let code = match args.at(0) { - Ok(s) => { - let m = s.len() - c.assert(m >= 0) - if (m >= 0) { - c.println("args ok") - } - 0 - } - Err(e) => { - c.println("args err") - 1 - } + let s = args.at(0).ok() + let m = s.len() + c.assert(m >= 0) + if (m >= 0) { + c.println("args ok") } - return code + return 0 } diff --git a/tests/programs/fs_dir_reuse.cap b/tests/programs/fs_dir_reuse.cap new file mode 100644 index 0000000..23e177f --- /dev/null +++ b/tests/programs/fs_dir_reuse.cap @@ -0,0 +1,36 @@ +package safe +module fs_dir_reuse +use sys::system + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + let alloc = rc.mint_alloc_default() + let fs = rc.mint_filesystem("./config") + let dir = fs.root_dir() + + c.assert(dir.exists("app.txt")) + + match dir.read_to_string(alloc, "app.txt") { + Ok(s) => { c.assert(s.len() > 0) } + Err(_) => { c.println("dir read_to_string failed"); return 1 } + } + + match dir.read_bytes(alloc, "app.txt") { + Ok(bytes) => { + c.assert(bytes.len() > 0) + alloc.vec_u8_free(bytes) + } + Err(_) => { c.println("dir read_bytes failed"); return 1 } + } + + match dir.list_dir(alloc) { + Ok(entries) => { + c.assert(entries.len() > 0) + alloc.vec_string_free(entries) + } + Err(_) => { c.println("dir list_dir failed"); return 1 } + } + + c.println("dir reuse ok") + return 0 +} diff --git a/tests/programs/fs_reuse.cap b/tests/programs/fs_reuse.cap new file mode 100644 index 0000000..165e7cc --- /dev/null +++ b/tests/programs/fs_reuse.cap @@ -0,0 +1,27 @@ +package safe +module fs_reuse +use sys::system + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + let alloc = rc.mint_alloc_default() + + let rfs = rc.mint_readfs("./config") + c.assert(rfs.exists("app.txt")) + + match rfs.read_to_string(alloc, "app.txt") { + Ok(s) => { c.assert(s.len() > 0) } + Err(_) => { c.println("read_to_string failed"); return 1 } + } + + match rfs.read_bytes(alloc, "app.txt") { + Ok(bytes) => { + c.assert(bytes.len() > 0) + alloc.vec_u8_free(bytes) + } + Err(_) => { c.println("read_bytes failed"); return 1 } + } + + c.println("fs reuse ok") + return 0 +} diff --git a/tests/programs/string_helpers.cap b/tests/programs/string_helpers.cap index 28cfc0a..28a779e 100644 --- a/tests/programs/string_helpers.cap +++ b/tests/programs/string_helpers.cap @@ -10,23 +10,30 @@ pub fn main(rc: RootCap) -> i32 { let n = buf.len() let b = buf.at(0) let words = "a b c".split_whitespace(alloc) + let word_views = "a b c".split_whitespace_view(alloc) let count = words.len() let trimmed = " hi \n".trim(alloc) + let trimmed_view = " hi \n".trim_view() let trimmed_start = " hi ".trim_start(alloc) + let trimmed_start_view = " hi ".trim_start_view() let trimmed_end = " hi ".trim_end(alloc) + let trimmed_end_view = " hi ".trim_end_view() let trimmed_ascii = " \tHi\n".trim_ascii(alloc) let lower = "AbC".to_lower_ascii(alloc) let upper = "AbC".to_upper_ascii(alloc) let sliced = "hello".slice_range(1, 4) let lines = "a\nb\n".split_lines(alloc) - alloc.vec_string_free(words) - alloc.vec_string_free(lines) + let line_views = "a\r\nb\n".lines_view(alloc) c.assert(n == 3 && b == 'a' && count == 3) + c.assert(word_views.len() == 3) c.assert(trimmed.len() == 2) + c.assert(trimmed_view.eq("hi")) c.assert(trimmed.starts_with_byte('h')) c.assert(trimmed.ends_with_byte('i')) c.assert(trimmed_start.starts_with("hi")) + c.assert(trimmed_start_view.starts_with("hi")) c.assert(trimmed_end.ends_with("hi")) + c.assert(trimmed_end_view.ends_with("hi")) c.assert(trimmed_ascii.eq("Hi")) c.assert(lower.eq("abc")) c.assert(upper.eq("ABC")) @@ -75,6 +82,8 @@ pub fn main(rc: RootCap) -> i32 { Err(_) => { c.assert(false) } } c.assert("abca".count_byte('a') == 2) + c.assert("a\nb\n".count_newlines() == 2) + c.assert(" a bc \t d ".count_words_ascii() == 3) c.assert("abc".is_ascii()) c.assert("abc".byte_at_checked(10).is_err()) match ("a,b,c".split_once(alloc, ',')) { @@ -84,16 +93,29 @@ pub fn main(rc: RootCap) -> i32 { } Err(_) => { c.assert(false) } } + match ("a,b,c".split_once_view(',')) { + Ok(parts) => { + c.assert(parts.left.eq("a")) + c.assert(parts.right.eq("b,c")) + } + Err(_) => { c.assert(false) } + } match ("ab".concat(alloc, "cd")) { Ok(joined) => { c.assert(joined.eq("abcd")) } Err(_) => { c.assert(false) } } let pieces = "a,b,c".split(alloc, ',') c.assert(pieces.len() == 3) + c.assert(line_views.len() == 2) match (pieces.join(",")) { Ok(joined) => { c.assert(joined.eq("a,b,c")) } Err(_) => { c.assert(false) } } + alloc.vec_string_free(words) + alloc.vec_string_free(word_views) + alloc.vec_string_free(lines) + alloc.vec_string_free(line_views) + alloc.vec_string_free(pieces) c.println("string ok") return 0 } diff --git a/tests/programs/text_basic.cap b/tests/programs/text_basic.cap index 0e3a31f..05e4e60 100644 --- a/tests/programs/text_basic.cap +++ b/tests/programs/text_basic.cap @@ -7,20 +7,11 @@ pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let alloc = rc.mint_alloc_default() let text = string::text_new(alloc) - match (text.push_str("hi")) { - Ok(_) => { } - Err(_) => { c.assert(false); text.free(alloc); return 1 } - } - match (text.push_byte('!')) { - Ok(_) => { } - Err(_) => { c.assert(false); text.free(alloc); return 1 } - } + text.push_str("hi").ok() + text.push_byte('!').ok() let view = text.as_string() c.assert(view.eq("hi!")) - match (text.to_string()) { - Ok(s) => { c.assert(s.eq("hi!")) } - Err(_) => { c.assert(false); text.free(alloc); return 1 } - } + c.assert(text.to_string().ok().eq("hi!")) text.free(alloc) c.println("text basic ok") return 0 diff --git a/tests/programs/text_helpers_more.cap b/tests/programs/text_helpers_more.cap index 9bb7b72..168524a 100644 --- a/tests/programs/text_helpers_more.cap +++ b/tests/programs/text_helpers_more.cap @@ -7,55 +7,20 @@ pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let alloc = rc.mint_alloc_default() let text = alloc.text_new() - match (text.push_str("hi")) { - Ok(_) => { } - Err(_) => { c.assert(false); return 1 } - } - match (text.push_byte('\n')) { - Ok(_) => { } - Err(_) => { c.assert(false); return 1 } - } - match (text.push_str("ok")) { - Ok(_) => { } - Err(_) => { c.assert(false); return 1 } - } - match (text.slice_range(0, 2)) { - Ok(view) => { c.assert(view.eq("hi")) } - Err(_) => { c.assert(false); return 1 } - } + text.push_str("hi").ok() + text.push_byte('\n').ok() + text.push_str("ok").ok() + c.assert(text.slice_range(0, 2).ok().eq("hi")) let v = alloc.vec_u8_new() - match (v.push('!')) { - Ok(_) => { } - Err(_) => { c.assert(false); return 1 } - } - match (text.extend_vec(v)) { - Ok(_) => { } - Err(_) => { c.assert(false); return 1 } - } - match (text.to_string()) { - Ok(s) => { c.assert(s.eq("hi\nok!")) } - Err(_) => { c.assert(false); return 1 } - } - match (alloc.text_from("cap")) { - Ok(t2) => { - match (t2.to_string()) { - Ok(s) => { c.assert(s.eq("cap")) } - Err(_) => { c.assert(false); return 1 } - } - t2.free(alloc) - } - Err(_) => { c.assert(false); return 1 } - } - match ("owned".to_text(alloc)) { - Ok(t3) => { - match (t3.to_string()) { - Ok(s) => { c.assert(s.eq("owned")) } - Err(_) => { c.assert(false); return 1 } - } - t3.free(alloc) - } - Err(_) => { c.assert(false); return 1 } - } + v.push('!').ok() + text.extend_vec(v).ok() + c.assert(text.to_string().ok().eq("hi\nok!")) + let t2 = alloc.text_from("cap").ok() + c.assert(t2.to_string().ok().eq("cap")) + t2.free(alloc) + let t3 = "owned".to_text(alloc).ok() + c.assert(t3.to_string().ok().eq("owned")) + t3.free(alloc) alloc.vec_u8_free(v) text.free(alloc) c.println("text helpers ok") diff --git a/tests/programs/text_push_safe.cap b/tests/programs/text_push_safe.cap index 8e1868e..0333e81 100644 --- a/tests/programs/text_push_safe.cap +++ b/tests/programs/text_push_safe.cap @@ -7,42 +7,12 @@ pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let alloc = rc.mint_alloc_default() let text = string::text_new(alloc) - match text.push_byte('\x00') { - Ok(u) => { u } - Err(e) => { - c.assert(false) - text.free(alloc) - return 1 - } - } - match text.push_byte('\x07') { - Ok(u) => { u } - Err(e) => { - c.assert(false) - text.free(alloc) - return 1 - } - } + text.push_byte('\x00').ok() + text.push_byte('\x07').ok() let v = alloc.vec_u8_new() - match v.push('\t') { - Ok(u) => { u } - Err(e) => { - c.assert(false) - alloc.vec_u8_free(v) - text.free(alloc) - return 1 - } - } + v.push('\t').ok() let slice = v.as_slice() - match text.extend_slice(slice) { - Ok(u) => { u } - Err(e) => { - c.assert(false) - alloc.vec_u8_free(v) - text.free(alloc) - return 1 - } - } + text.extend_slice(slice).ok() let len = text.len() c.assert(len == 3) c.assert(!text.is_empty()) diff --git a/tests/programs/text_safe.cap b/tests/programs/text_safe.cap index f7bbbc3..552e443 100644 --- a/tests/programs/text_safe.cap +++ b/tests/programs/text_safe.cap @@ -7,18 +7,9 @@ pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let alloc = rc.mint_alloc_default() let text = string::text_new(alloc) - match (text.push_byte('a')) { - Ok(_) => { } - Err(_) => { c.assert(false); text.free(alloc); return 1 } - } - match (text.push_byte('b')) { - Ok(_) => { } - Err(_) => { c.assert(false); text.free(alloc); return 1 } - } - match (text.push_byte('c')) { - Ok(_) => { } - Err(_) => { c.assert(false); text.free(alloc); return 1 } - } + text.push_byte('a').ok() + text.push_byte('b').ok() + text.push_byte('c').ok() let slice = text.as_slice() let len = slice.len() c.assert(len == 3) diff --git a/tests/programs/text_to_string.cap b/tests/programs/text_to_string.cap index 28960b9..0307af4 100644 --- a/tests/programs/text_to_string.cap +++ b/tests/programs/text_to_string.cap @@ -7,18 +7,9 @@ pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let alloc = rc.mint_alloc_default() let text = string::text_new(alloc) - match (text.push_byte('h')) { - Ok(_) => { } - Err(_) => { panic() } - } - match (text.push_byte('i')) { - Ok(_) => { } - Err(_) => { panic() } - } - let s = match (text.to_string()) { - Ok(v) => { v } - Err(_) => { panic() } - } + text.push_byte('h').ok() + text.push_byte('i').ok() + let s = text.to_string().ok() c.println(s) text.free(alloc) return 0 diff --git a/tests/programs/vec_custom_eq.cap b/tests/programs/vec_custom_eq.cap index b726e3c..c8646ff 100644 --- a/tests/programs/vec_custom_eq.cap +++ b/tests/programs/vec_custom_eq.cap @@ -19,22 +19,10 @@ pub fn main(rc: RootCap) -> i32 { // Test Vec with custom Eq let v = alloc.vec_new() - match (v.push(Entry { key: 1, value: 10 })) { - Ok(_) => { } - Err(_) => { c.println("push failed"); return 1 } - } - match (v.push(Entry { key: 2, value: 20 })) { - Ok(_) => { } - Err(_) => { c.println("push failed"); return 1 } - } - match (v.push(Entry { key: 2, value: 20 })) { - Ok(_) => { } - Err(_) => { c.println("push failed"); return 1 } - } - match (v.push(Entry { key: 3, value: 30 })) { - Ok(_) => { } - Err(_) => { c.println("push failed"); return 1 } - } + v.push(Entry { key: 1, value: 10 }).ok() + v.push(Entry { key: 2, value: 20 }).ok() + v.push(Entry { key: 2, value: 20 }).ok() + v.push(Entry { key: 3, value: 30 }).ok() // Test contains_eq let target = Entry { key: 2, value: 20 } diff --git a/tests/programs/vec_search_helpers.cap b/tests/programs/vec_search_helpers.cap index 9d1c074..e85953b 100644 --- a/tests/programs/vec_search_helpers.cap +++ b/tests/programs/vec_search_helpers.cap @@ -6,18 +6,9 @@ pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let alloc = rc.mint_alloc_default() let v = alloc.vec_i32_new() - match (v.push(1)) { - Ok(_) => { } - Err(_) => { c.assert(false); return 1 } - } - match (v.push(2)) { - Ok(_) => { } - Err(_) => { c.assert(false); return 1 } - } - match (v.push(1)) { - Ok(_) => { } - Err(_) => { c.assert(false); return 1 } - } + v.push(1).ok() + v.push(2).ok() + v.push(1).ok() c.assert(v.contains(2)) c.assert(!v.contains(3)) c.assert(v.count(1) == 2) @@ -37,19 +28,17 @@ pub fn main(rc: RootCap) -> i32 { Ok(x) => { c.assert(x == 1) } Err(_) => { c.assert(false); return 1 } } - let bytes = alloc.vec_u8_new() - match (bytes.push('h')) { - Ok(_) => { } - Err(_) => { c.assert(false); alloc.vec_i32_free(v); alloc.vec_u8_free(bytes); return 1 } - } - match (bytes.push('i')) { - Ok(_) => { } - Err(_) => { c.assert(false); alloc.vec_i32_free(v); alloc.vec_u8_free(bytes); return 1 } - } - match (bytes.to_string()) { - Ok(s) => { c.assert(s.eq("hi")) } - Err(_) => { c.assert(false); alloc.vec_i32_free(v); alloc.vec_u8_free(bytes); return 1 } + v.swap(0, 1).ok() + match (v.first()) { + Ok(x) => { c.assert(x == 2) } + Err(_) => { c.assert(false); return 1 } } + c.assert(v.get_or(1, 99) == 1) + c.assert(v.get_or(99, 77) == 77) + let bytes = alloc.vec_u8_new() + bytes.push('h').ok() + bytes.push('i').ok() + c.assert(bytes.to_string().ok().eq("hi")) alloc.vec_i32_free(v) alloc.vec_u8_free(bytes) c.println("vec search ok") diff --git a/tests/programs/wc_file.cap b/tests/programs/wc_file.cap index 35aadec..983dfe1 100644 --- a/tests/programs/wc_file.cap +++ b/tests/programs/wc_file.cap @@ -4,21 +4,10 @@ use sys::system use sys::console use sys::buffer -fn count_text(c: Console, alloc: Alloc, s: string) -> i32 { - let buf = s.bytes() - let bytes = buf.len() - let words_vec = s.split_whitespace(alloc) - let words = words_vec.len() - alloc.vec_string_free(words_vec) - let i: i32 = 0 - let lines: i32 = 0 - while (i < bytes) { - let b = buf.at(i) - if (b == '\n') { - lines = lines + 1 - } - i = i + 1 - } +fn count_text(c: Console, s: string) -> i32 { + let bytes = s.len() + let words = s.count_words_ascii() + let lines = s.count_newlines() c.assert(lines == 8 && words == 19 && bytes == 126) c.print_i32(lines) c.print(" ") @@ -38,22 +27,15 @@ pub fn main(rc: RootCap) -> i32 { return 1 } - let code = match args.at(1) { - Ok(path) => { - let alloc = rc.mint_alloc_default() - let rfs = rc.mint_readfs("./") - match rfs.read_to_string(alloc, path) { - Ok(s) => { - count_text(c, alloc, s) - } - Err(e) => { - c.println("read err") - 1 - } - } + let path = args.at(1).ok() + let alloc = rc.mint_alloc_default() + let rfs = rc.mint_readfs("./") + let code = match rfs.read_to_string(alloc, path) { + Ok(s) => { + count_text(c, s) } Err(e) => { - c.println("arg err") + c.println("read err") 1 } } diff --git a/tests/programs/wc_stdin.cap b/tests/programs/wc_stdin.cap index d2e491f..892544f 100644 --- a/tests/programs/wc_stdin.cap +++ b/tests/programs/wc_stdin.cap @@ -8,20 +8,9 @@ pub fn main(rc: RootCap) -> i32 { let alloc = rc.mint_alloc_default() let code = match stdin.read_to_string(alloc) { Ok(s) => { - let buf = s.bytes() - let bytes = buf.len() - let words_vec = s.split_whitespace(alloc) - let words = words_vec.len() - alloc.vec_string_free(words_vec) - let i: i32 = 0 - let lines: i32 = 0 - while (i < bytes) { - let b = buf.at(i) - if (b == '\n') { - lines = lines + 1 - } - i = i + 1 - } + let bytes = s.len() + let words = s.count_words_ascii() + let lines = s.count_newlines() c.assert(lines == 0 && words == 0 && bytes == 0) c.print_i32(lines) c.print(" ") From 056b78411a0d8bd4f97f23785f2f24070371ed54 Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Mon, 23 Mar 2026 11:24:06 -0700 Subject: [PATCH 02/17] Simplify defaults, paths, and let-else --- capc/src/codegen/emit.rs | 482 +++++++++++++---------- capc/src/codegen/intrinsics.rs | 14 + capc/src/parser.rs | 279 +++++++++---- capc/src/typeck/check.rs | 296 ++++++++------ capc/tests/run.rs | 245 +++++++----- capc/tests/typecheck.rs | 40 +- examples/config_loader/config_loader.cap | 18 +- examples/how_to_string/how_to_string.cap | 24 +- examples/http_server/http_server.cap | 113 ++---- examples/sort/sort.cap | 17 +- examples/uniq/uniq.cap | 12 +- runtime/src/lib.rs | 56 ++- stdlib/sys/args.cap | 5 + stdlib/sys/buffer.cap | 25 ++ stdlib/sys/fs.cap | 40 ++ stdlib/sys/net.cap | 10 + stdlib/sys/path.cap | 136 +++++++ stdlib/sys/stdin.cap | 5 + stdlib/sys/string.cap | 122 ++++++ tests/programs/args_safe.cap | 14 +- tests/programs/fs_helpers.cap | 13 +- tests/programs/let_else.cap | 51 +++ tests/programs/path_helpers.cap | 31 ++ tests/programs/string_helpers.cap | 14 + tests/programs/vec_search_helpers.cap | 5 +- tests/programs/wc_file.cap | 9 +- tests/programs/wc_stdin.cap | 3 +- 27 files changed, 1362 insertions(+), 717 deletions(-) create mode 100644 stdlib/sys/path.cap create mode 100644 tests/programs/let_else.cap create mode 100644 tests/programs/path_helpers.cap diff --git a/capc/src/codegen/emit.rs b/capc/src/codegen/emit.rs index e811da5..6b74862 100644 --- a/capc/src/codegen/emit.rs +++ b/capc/src/codegen/emit.rs @@ -14,13 +14,13 @@ use cranelift_object::ObjectModule; use crate::abi::AbiType; use crate::ast::{BinaryOp, Literal, UnaryOp}; -use super::{ - CodegenError, EnumIndex, Flow, FnInfo, LocalValue, ResultKind, ResultShape, - StructLayout, StructLayoutIndex, TypeLayout, ValueRepr, -}; use super::abi_quirks; use super::layout::{align_to, resolve_struct_layout, type_layout_from_index}; use super::sig_to_clif; +use super::{ + CodegenError, EnumIndex, Flow, FnInfo, LocalValue, ResultKind, ResultShape, StructLayout, + StructLayoutIndex, TypeLayout, ValueRepr, +}; /// Target blocks for break/continue inside a loop. #[derive(Copy, Clone, Debug)] @@ -283,12 +283,8 @@ fn emit_hir_stmt_inner( ir::StackSlotKind::ExplicitSlot, slot_size, )); - let base_ptr = aligned_stack_addr( - builder, - slot, - align, - module.isa().pointer_type(), - ); + let base_ptr = + aligned_stack_addr(builder, slot, align, module.isa().pointer_type()); store_value_by_ty( builder, base_ptr, @@ -315,12 +311,8 @@ fn emit_hir_stmt_inner( ir::StackSlotKind::ExplicitSlot, slot_size, )); - let base_ptr = aligned_stack_addr( - builder, - slot, - align, - module.isa().pointer_type(), - ); + let base_ptr = + aligned_stack_addr(builder, slot, align, module.isa().pointer_type()); store_value_by_ty( builder, base_ptr, @@ -366,12 +358,8 @@ fn emit_hir_stmt_inner( builder.ins().stack_store(val, *slot, 0); } LocalValue::StructSlot(slot, ty, align) => { - let base_ptr = aligned_stack_addr( - builder, - *slot, - *align, - module.isa().pointer_type(), - ); + let base_ptr = + aligned_stack_addr(builder, *slot, *align, module.isa().pointer_type()); store_value_by_ty( builder, base_ptr, @@ -819,7 +807,10 @@ fn emit_hir_stmt_inner( // Load loop variable and compare with end let current_val = builder.ins().stack_load(ir::types::I32, loop_var_slot, 0); - let cond = builder.ins().icmp(ir::condcodes::IntCC::SignedLessThan, current_val, end_i32); + let cond = + builder + .ins() + .icmp(ir::condcodes::IntCC::SignedLessThan, current_val, end_i32); builder.ins().brif(cond, body_block, &[], exit_block, &[]); builder.switch_to_block(body_block); @@ -910,7 +901,8 @@ fn emit_hir_stmt_inner( return Ok(Flow::Terminated); } HirStmt::Continue(_) => { - let target = loop_target.expect("continue outside of loop (should be caught by typeck)"); + let target = + loop_target.expect("continue outside of loop (should be caught by typeck)"); defer_stack.emit_until_loop_and_pop( builder, locals, @@ -1098,15 +1090,12 @@ fn emit_hir_expr_inner( if let crate::typeck::Ty::Path(ty_name, _) = &variant.enum_ty.ty { if let Some(layout) = enum_index.layouts.get(ty_name) { - let variants = enum_index - .variants - .get(ty_name) - .ok_or_else(|| { - CodegenError::Codegen(format!( - "unknown enum variant: {}.{}", - ty_name, variant.variant_name - )) - })?; + let variants = enum_index.variants.get(ty_name).ok_or_else(|| { + CodegenError::Codegen(format!( + "unknown enum variant: {}.{}", + ty_name, variant.variant_name + )) + })?; let discr = variants.get(&variant.variant_name).ok_or_else(|| { CodegenError::Codegen(format!( "unknown enum variant: {}.{}", @@ -1158,7 +1147,12 @@ fn emit_hir_expr_inner( module, )?; } else { - zero_bytes(builder, base_ptr, layout.payload_offset, layout.payload_size); + zero_bytes( + builder, + base_ptr, + layout.payload_offset, + layout.payload_size, + ); } } } @@ -1169,10 +1163,12 @@ fn emit_hir_expr_inner( // For non-Result enums or variants without payload, emit just the discriminant let qualified = match &variant.enum_ty.ty { crate::typeck::Ty::Path(path, _) => path.clone(), - _ => return Err(CodegenError::Codegen(format!( - "enum variant has non-path type: {:?}", - variant.enum_ty.ty - ))), + _ => { + return Err(CodegenError::Codegen(format!( + "enum variant has non-path type: {:?}", + variant.enum_ty.ty + ))) + } }; if let Some(variants) = enum_index.variants.get(&qualified) { if let Some(&discr) = variants.get(&variant.variant_name) { @@ -1212,7 +1208,9 @@ fn emit_hir_expr_inner( builder.switch_to_block(err_block); let ret_value = match &try_expr.ret_ty.ty { - crate::typeck::Ty::Path(name, args) if name == "sys.result.Result" && args.len() == 2 => { + crate::typeck::Ty::Path(name, args) + if name == "sys.result.Result" && args.len() == 2 => + { let AbiType::Result(ok_abi, _err_abi) = &try_expr.ret_ty.abi else { return Err(CodegenError::Unsupported( abi_quirks::result_abi_mismatch_error().to_string(), @@ -1314,9 +1312,11 @@ fn emit_hir_expr_inner( HirExpr::Call(call) => { // HIR calls are already fully resolved - no path resolution needed! let (module_path, func_name, _symbol) = match &call.callee { - crate::hir::ResolvedCallee::Function { module, name, symbol } => { - (module.clone(), name.clone(), symbol.clone()) - } + crate::hir::ResolvedCallee::Function { + module, + name, + symbol, + } => (module.clone(), name.clone(), symbol.clone()), crate::hir::ResolvedCallee::TraitMethod { .. } => { return Err(CodegenError::Unsupported( "trait methods must be resolved before codegen".to_string(), @@ -1448,14 +1448,13 @@ fn emit_hir_expr_inner( } else { if let Some((ok_hir, _)) = &result_payloads { if is_non_opaque_struct_type(ok_hir, struct_layouts) { - let layout = resolve_struct_layout( - &ok_hir.ty, - "", - &struct_layouts.layouts, - ) - .ok_or_else(|| { - CodegenError::Unsupported("struct layout missing".to_string()) - })?; + let layout = + resolve_struct_layout(&ok_hir.ty, "", &struct_layouts.layouts) + .ok_or_else(|| { + CodegenError::Unsupported( + "struct layout missing".to_string(), + ) + })?; let align = layout.align.max(1); let slot_size = layout.size.max(1).saturating_add(align - 1); let slot = builder.create_sized_stack_slot(ir::StackSlotData::new( @@ -1495,14 +1494,13 @@ fn emit_hir_expr_inner( } else { if let Some((_, err_hir)) = &result_payloads { if is_non_opaque_struct_type(err_hir, struct_layouts) { - let layout = resolve_struct_layout( - &err_hir.ty, - "", - &struct_layouts.layouts, - ) - .ok_or_else(|| { - CodegenError::Unsupported("struct layout missing".to_string()) - })?; + let layout = + resolve_struct_layout(&err_hir.ty, "", &struct_layouts.layouts) + .ok_or_else(|| { + CodegenError::Unsupported( + "struct layout missing".to_string(), + ) + })?; let align = layout.align.max(1); let slot_size = layout.size.max(1).saturating_add(align - 1); let slot = builder.create_sized_stack_slot(ir::StackSlotData::new( @@ -1673,21 +1671,21 @@ fn emit_hir_expr_inner( )?; match (&binary.op, lhs, rhs) { - (BinaryOp::Add, ValueRepr::Single(a), ValueRepr::Single(b)) => { - Ok(ValueRepr::Single(emit_checked_add(builder, a, b, &binary.ty)?)) - } - (BinaryOp::Sub, ValueRepr::Single(a), ValueRepr::Single(b)) => { - Ok(ValueRepr::Single(emit_checked_sub(builder, a, b, &binary.ty)?)) - } - (BinaryOp::Mul, ValueRepr::Single(a), ValueRepr::Single(b)) => { - Ok(ValueRepr::Single(emit_checked_mul(builder, a, b, &binary.ty)?)) - } - (BinaryOp::Div, ValueRepr::Single(a), ValueRepr::Single(b)) => { - Ok(ValueRepr::Single(emit_checked_div(builder, a, b, &binary.ty)?)) - } - (BinaryOp::Mod, ValueRepr::Single(a), ValueRepr::Single(b)) => { - Ok(ValueRepr::Single(emit_checked_mod(builder, a, b, &binary.ty)?)) - } + (BinaryOp::Add, ValueRepr::Single(a), ValueRepr::Single(b)) => Ok( + ValueRepr::Single(emit_checked_add(builder, a, b, &binary.ty)?), + ), + (BinaryOp::Sub, ValueRepr::Single(a), ValueRepr::Single(b)) => Ok( + ValueRepr::Single(emit_checked_sub(builder, a, b, &binary.ty)?), + ), + (BinaryOp::Mul, ValueRepr::Single(a), ValueRepr::Single(b)) => Ok( + ValueRepr::Single(emit_checked_mul(builder, a, b, &binary.ty)?), + ), + (BinaryOp::Div, ValueRepr::Single(a), ValueRepr::Single(b)) => Ok( + ValueRepr::Single(emit_checked_div(builder, a, b, &binary.ty)?), + ), + (BinaryOp::Mod, ValueRepr::Single(a), ValueRepr::Single(b)) => Ok( + ValueRepr::Single(emit_checked_mod(builder, a, b, &binary.ty)?), + ), (BinaryOp::BitAnd, ValueRepr::Single(a), ValueRepr::Single(b)) => { Ok(ValueRepr::Single(builder.ins().band(a, b))) } @@ -1732,12 +1730,20 @@ fn emit_hir_expr_inner( Ok(ValueRepr::Single(bool_to_i8(builder, cmp))) } (BinaryOp::Lt, ValueRepr::Single(a), ValueRepr::Single(b)) => { - let cmp = builder.ins().icmp(cmp_cc(&binary.left, IntCC::SignedLessThan, IntCC::UnsignedLessThan), a, b); + let cmp = builder.ins().icmp( + cmp_cc(&binary.left, IntCC::SignedLessThan, IntCC::UnsignedLessThan), + a, + b, + ); Ok(ValueRepr::Single(bool_to_i8(builder, cmp))) } (BinaryOp::Lte, ValueRepr::Single(a), ValueRepr::Single(b)) => { let cmp = builder.ins().icmp( - cmp_cc(&binary.left, IntCC::SignedLessThanOrEqual, IntCC::UnsignedLessThanOrEqual), + cmp_cc( + &binary.left, + IntCC::SignedLessThanOrEqual, + IntCC::UnsignedLessThanOrEqual, + ), a, b, ); @@ -1745,7 +1751,11 @@ fn emit_hir_expr_inner( } (BinaryOp::Gt, ValueRepr::Single(a), ValueRepr::Single(b)) => { let cmp = builder.ins().icmp( - cmp_cc(&binary.left, IntCC::SignedGreaterThan, IntCC::UnsignedGreaterThan), + cmp_cc( + &binary.left, + IntCC::SignedGreaterThan, + IntCC::UnsignedGreaterThan, + ), a, b, ); @@ -1851,19 +1861,17 @@ fn emit_hir_expr_inner( module, data_counter, ), - HirExpr::StructLiteral(literal) => { - emit_hir_struct_literal( - builder, - literal, - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - ) - } + HirExpr::StructLiteral(literal) => emit_hir_struct_literal( + builder, + literal, + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + ), } } @@ -2068,16 +2076,13 @@ fn emit_hir_index( )) } }; - let layout = resolve_struct_layout(object_ty, "", &struct_layouts.layouts).ok_or_else( - || CodegenError::Unsupported("string layout missing".to_string()), - )?; + let layout = resolve_struct_layout(object_ty, "", &struct_layouts.layouts) + .ok_or_else(|| CodegenError::Unsupported("string layout missing".to_string()))?; let field = layout.fields.get("bytes").ok_or_else(|| { CodegenError::Unsupported("string.bytes field missing".to_string()) })?; - let slice_layout = - resolve_struct_layout(&field.ty.ty, "", &struct_layouts.layouts).ok_or_else( - || CodegenError::Unsupported("Slice layout missing".to_string()), - )?; + let slice_layout = resolve_struct_layout(&field.ty.ty, "", &struct_layouts.layouts) + .ok_or_else(|| CodegenError::Unsupported("Slice layout missing".to_string()))?; let addr = ptr_add(builder, base_ptr, field.offset); let result = emit_slice_index(builder, module, addr, slice_layout, index_val)?; Ok(ValueRepr::Single(result)) @@ -2092,10 +2097,8 @@ fn emit_hir_index( )) } }; - let layout = - resolve_struct_layout(object_ty, "", &struct_layouts.layouts).ok_or_else(|| { - CodegenError::Unsupported("Slice layout missing".to_string()) - })?; + let layout = resolve_struct_layout(object_ty, "", &struct_layouts.layouts) + .ok_or_else(|| CodegenError::Unsupported("Slice layout missing".to_string()))?; let result = emit_slice_index(builder, module, base_ptr, layout, index_val)?; Ok(ValueRepr::Single(result)) } @@ -2109,10 +2112,8 @@ fn emit_hir_index( )) } }; - let layout = - resolve_struct_layout(object_ty, "", &struct_layouts.layouts).ok_or_else(|| { - CodegenError::Unsupported("MutSlice layout missing".to_string()) - })?; + let layout = resolve_struct_layout(object_ty, "", &struct_layouts.layouts) + .ok_or_else(|| CodegenError::Unsupported("MutSlice layout missing".to_string()))?; let result = emit_slice_index(builder, module, base_ptr, layout, index_val)?; Ok(ValueRepr::Single(result)) } @@ -2150,9 +2151,7 @@ fn emit_slice_index( let idx_nonneg = builder .ins() .icmp(IntCC::SignedGreaterThanOrEqual, index, zero_i32); - let idx_lt = builder - .ins() - .icmp(IntCC::SignedLessThan, index, len_val); + let idx_lt = builder.ins().icmp(IntCC::SignedLessThan, index, len_val); let ptr_nonnull = builder.ins().icmp_imm(IntCC::NotEqual, raw_ptr, 0); let in_bounds = builder.ins().band(idx_nonneg, idx_lt); let in_bounds = builder.ins().band(in_bounds, ptr_nonnull); @@ -2167,9 +2166,7 @@ fn emit_slice_index( builder.seal_block(ok_block); let idx_ptr = builder.ins().uextend(ptr_ty, index); let addr = builder.ins().iadd(raw_ptr, idx_ptr); - let value = builder - .ins() - .load(ir::types::I8, MemFlags::new(), addr, 0); + let value = builder.ins().load(ir::types::I8, MemFlags::new(), addr, 0); builder.ins().jump(done_block, &[value]); builder.switch_to_block(err_block); @@ -2324,10 +2321,7 @@ fn emit_hir_field_access( }; let layout = resolve_struct_layout(struct_ty, "", &struct_layouts.layouts).ok_or_else(|| { - CodegenError::Unsupported(format!( - "struct layout missing for {:?}", - struct_ty - )) + CodegenError::Unsupported(format!("struct layout missing for {:?}", struct_ty)) })?; let Some(field_layout) = layout.fields.get(&field_access.field_name) else { return Err(CodegenError::Codegen(format!( @@ -2367,10 +2361,7 @@ fn emit_hir_field_access( ) } -fn is_non_opaque_struct_type( - ty: &crate::hir::HirType, - struct_layouts: &StructLayoutIndex, -) -> bool { +fn is_non_opaque_struct_type(ty: &crate::hir::HirType, struct_layouts: &StructLayoutIndex) -> bool { resolve_struct_layout(&ty.ty, "", &struct_layouts.layouts).is_some() } @@ -2450,7 +2441,9 @@ fn store_value_by_ty( builder.ins().store(MemFlags::new(), val, addr, 0); Ok(()) } - BuiltinType::I64 => Err(CodegenError::Unsupported("i64 not yet supported".to_string())), + BuiltinType::I64 => Err(CodegenError::Unsupported( + "i64 not yet supported".to_string(), + )), }, Ty::Ptr(_) => { let ValueRepr::Single(val) = value else { @@ -2580,7 +2573,12 @@ fn store_value_by_tykind( return Err(CodegenError::Unsupported("store value".to_string())); }; match ty { - AbiType::I32 | AbiType::U32 | AbiType::U8 | AbiType::Bool | AbiType::Handle | AbiType::Ptr => { + AbiType::I32 + | AbiType::U32 + | AbiType::U8 + | AbiType::Bool + | AbiType::Handle + | AbiType::Ptr => { builder.ins().store(MemFlags::new(), val, addr, 0); Ok(()) } @@ -2607,17 +2605,28 @@ fn load_value_by_ty( match &ty.ty { Ty::Builtin(b) => match b { BuiltinType::Unit | BuiltinType::Never => Ok(ValueRepr::Unit), - BuiltinType::I32 | BuiltinType::U32 => Ok(ValueRepr::Single( - builder.ins().load(ir::types::I32, MemFlags::new(), addr, 0), - )), - BuiltinType::U8 | BuiltinType::Bool => Ok(ValueRepr::Single( - builder.ins().load(ir::types::I8, MemFlags::new(), addr, 0), + BuiltinType::I32 | BuiltinType::U32 => Ok(ValueRepr::Single(builder.ins().load( + ir::types::I32, + MemFlags::new(), + addr, + 0, + ))), + BuiltinType::U8 | BuiltinType::Bool => Ok(ValueRepr::Single(builder.ins().load( + ir::types::I8, + MemFlags::new(), + addr, + 0, + ))), + BuiltinType::I64 => Err(CodegenError::Unsupported( + "i64 not yet supported".to_string(), )), - BuiltinType::I64 => Err(CodegenError::Unsupported("i64 not yet supported".to_string())), }, - Ty::Ptr(_) => Ok(ValueRepr::Single( - builder.ins().load(ptr_ty, MemFlags::new(), addr, 0), - )), + Ty::Ptr(_) => Ok(ValueRepr::Single(builder.ins().load( + ptr_ty, + MemFlags::new(), + addr, + 0, + ))), Ty::Ref(inner) => { let inner_ty = crate::hir::HirType { ty: *inner.clone(), @@ -2713,9 +2722,12 @@ fn load_value_by_tykind( ))) } }; - Ok(ValueRepr::Single( - builder.ins().load(load_ty, MemFlags::new(), addr, 0), - )) + Ok(ValueRepr::Single(builder.ins().load( + load_ty, + MemFlags::new(), + addr, + 0, + ))) } /// Pointer addition helper (byte offset). @@ -3103,9 +3115,12 @@ fn emit_hir_match_expr( // If prefix terminated, we can't emit the final expression if prefix_terminated { - return Err(CodegenError::Unsupported( - "match expression arm terminated before final expression".to_string(), - )); + builder.seal_block(arm_block); + if is_last { + break; + } + current_block = next_block; + continue; } // Last statement should be an expression @@ -3175,7 +3190,9 @@ fn emit_hir_match_expr( .as_ref() .ok_or_else(|| CodegenError::Codegen("missing match result shape".to_string()))?; if values.len() != shape.types.len() { - return Err(CodegenError::Unsupported("mismatched match arm".to_string())); + return Err(CodegenError::Unsupported( + "mismatched match arm".to_string(), + )); } for (idx, val) in values.iter().enumerate() { builder.ins().stack_store(*val, shape.slots[idx], 0); @@ -3208,7 +3225,9 @@ fn emit_hir_match_expr( .ok_or_else(|| CodegenError::Codegen("missing match result value".to_string()))?; let mut loaded = Vec::new(); for (slot, ty) in shape.slots.iter().zip(shape.types.iter()) { - let addr = builder.ins().stack_addr(module.isa().pointer_type(), *slot, 0); + let addr = builder + .ins() + .stack_addr(module.isa().pointer_type(), *slot, 0); let val = builder.ins().load(*ty, MemFlags::new(), addr, 0); loaded.push(val); } @@ -3264,10 +3283,12 @@ fn hir_match_pattern_cond( // Get the discriminant value for this variant let qualified = match &match_ty.ty { crate::typeck::Ty::Path(path, _) => path.clone(), - _ => return Err(CodegenError::Codegen(format!( - "enum variant pattern has non-path type: {:?}", - match_ty.ty - ))), + _ => { + return Err(CodegenError::Codegen(format!( + "enum variant pattern has non-path type: {:?}", + match_ty.ty + ))) + } }; // Get the type of match_val to ensure consistent comparison @@ -3278,9 +3299,12 @@ fn hir_match_pattern_cond( let discr = match variant_name.as_str() { "Ok" => 0i64, "Err" => 1i64, - _ => return Err(CodegenError::Codegen(format!( - "unknown Result variant: {}", variant_name - ))), + _ => { + return Err(CodegenError::Codegen(format!( + "unknown Result variant: {}", + variant_name + ))) + } }; let rhs = builder.ins().iconst(val_ty, discr); return Ok(builder.ins().icmp(IntCC::Equal, match_val, rhs)); @@ -3323,7 +3347,11 @@ fn hir_bind_match_pattern_value( locals.insert(*local_id, store_local(builder, value.clone())); Ok(()) } - HirPattern::Variant { variant_name, binding, .. } => { + HirPattern::Variant { + variant_name, + binding, + .. + } => { if let Some(local_id) = binding { // Bind the inner value based on variant if let Some((ok_val, err_val)) = result { @@ -3352,13 +3380,14 @@ fn hir_bind_match_pattern_value( "missing enum payload info".to_string(), )); }; - let payload_ty = payloads - .get(variant_name) - .cloned() - .flatten() - .ok_or_else(|| { - CodegenError::Unsupported("variant binding without payload".to_string()) - })?; + let payload_ty = + payloads + .get(variant_name) + .cloned() + .flatten() + .ok_or_else(|| { + CodegenError::Unsupported("variant binding without payload".to_string()) + })?; let ValueRepr::Single(base_ptr) = value else { return Err(CodegenError::Unsupported( "variant binding expects enum storage".to_string(), @@ -3424,22 +3453,22 @@ fn emit_string( let len = builder.ins().iconst(ir::types::I32, value.len() as i64); let string_ty = crate::typeck::Ty::Path("sys.string.string".to_string(), Vec::new()); - let layout = resolve_struct_layout(&string_ty, "", &struct_layouts.layouts).ok_or_else(|| { - CodegenError::Unsupported("string layout missing".to_string()) - })?; - let field = layout.fields.get("bytes").ok_or_else(|| { - CodegenError::Unsupported("string.bytes field missing".to_string()) - })?; - let slice_layout = - resolve_struct_layout(&field.ty.ty, "", &struct_layouts.layouts).ok_or_else(|| { - CodegenError::Unsupported("Slice layout missing".to_string()) - })?; - let slice_ptr = slice_layout.fields.get("ptr").ok_or_else(|| { - CodegenError::Unsupported("Slice.ptr field missing".to_string()) - })?; - let slice_len = slice_layout.fields.get("len").ok_or_else(|| { - CodegenError::Unsupported("Slice.len field missing".to_string()) - })?; + let layout = resolve_struct_layout(&string_ty, "", &struct_layouts.layouts) + .ok_or_else(|| CodegenError::Unsupported("string layout missing".to_string()))?; + let field = layout + .fields + .get("bytes") + .ok_or_else(|| CodegenError::Unsupported("string.bytes field missing".to_string()))?; + let slice_layout = resolve_struct_layout(&field.ty.ty, "", &struct_layouts.layouts) + .ok_or_else(|| CodegenError::Unsupported("Slice layout missing".to_string()))?; + let slice_ptr = slice_layout + .fields + .get("ptr") + .ok_or_else(|| CodegenError::Unsupported("Slice.ptr field missing".to_string()))?; + let slice_len = slice_layout + .fields + .get("len") + .ok_or_else(|| CodegenError::Unsupported("Slice.len field missing".to_string()))?; let ptr_ty = module.isa().pointer_type(); let align = layout.align.max(1); let slot_size = layout.size.max(1).saturating_add(align - 1); @@ -3451,9 +3480,7 @@ fn emit_string( let addr = ptr_add(builder, base_ptr, field.offset); let ptr_addr = ptr_add(builder, addr, slice_ptr.offset); let len_addr = ptr_add(builder, addr, slice_len.offset); - builder - .ins() - .store(MemFlags::new(), ptr, ptr_addr, 0); + builder.ins().store(MemFlags::new(), ptr, ptr_addr, 0); builder.ins().store(MemFlags::new(), len, len_addr, 0); Ok(ValueRepr::Single(base_ptr)) } @@ -3523,8 +3550,12 @@ fn zero_value_for_tykind( ) -> Result { match ty { AbiType::Unit => Ok(ValueRepr::Unit), - AbiType::I32 | AbiType::U32 => Ok(ValueRepr::Single(builder.ins().iconst(ir::types::I32, 0))), - AbiType::U8 | AbiType::Bool => Ok(ValueRepr::Single(builder.ins().iconst(ir::types::I8, 0))), + AbiType::I32 | AbiType::U32 => { + Ok(ValueRepr::Single(builder.ins().iconst(ir::types::I32, 0))) + } + AbiType::U8 | AbiType::Bool => { + Ok(ValueRepr::Single(builder.ins().iconst(ir::types::I8, 0))) + } AbiType::Handle => Ok(ValueRepr::Single(builder.ins().iconst(ir::types::I64, 0))), AbiType::Ptr => Ok(ValueRepr::Single(builder.ins().iconst(ptr_ty, 0))), AbiType::Result(ok, err) => { @@ -3561,7 +3592,14 @@ fn zero_value_for_ty( ty: *inner.clone(), abi: ty.abi.clone(), }; - zero_value_for_ty(builder, &inner_ty, ptr_ty, enum_index, struct_layouts, module) + zero_value_for_ty( + builder, + &inner_ty, + ptr_ty, + enum_index, + struct_layouts, + module, + ) } Ty::Param(_) => Err(CodegenError::Unsupported( "generic type parameters must be monomorphized before codegen".to_string(), @@ -3584,8 +3622,14 @@ fn zero_value_for_ty( let tag = builder.ins().iconst(ir::types::I8, 0); let ok_val = zero_value_for_ty(builder, &ok_ty, ptr_ty, enum_index, struct_layouts, module)?; - let err_val = - zero_value_for_ty(builder, &err_ty, ptr_ty, enum_index, struct_layouts, module)?; + let err_val = zero_value_for_ty( + builder, + &err_ty, + ptr_ty, + enum_index, + struct_layouts, + module, + )?; return Ok(ValueRepr::Result { tag, ok: Box::new(ok_val), @@ -3641,7 +3685,12 @@ pub(super) fn value_from_params( ) -> Result { match ty { AbiType::Unit => Ok(ValueRepr::Unit), - AbiType::I32 | AbiType::U32 | AbiType::U8 | AbiType::Bool | AbiType::Handle | AbiType::Ptr => { + AbiType::I32 + | AbiType::U32 + | AbiType::U8 + | AbiType::Bool + | AbiType::Handle + | AbiType::Ptr => { let val = params[*idx]; *idx += 1; Ok(ValueRepr::Single(val)) @@ -3659,11 +3708,9 @@ pub(super) fn value_from_params( } // ResultOut is an ABI-level return type, not an input type. // They should never appear as function parameters. - AbiType::ResultOut(ok, err) => { - Err(CodegenError::Codegen(format!( - "ResultOut<{ok:?}, {err:?}> cannot be a parameter type (ABI return type only)" - ))) - } + AbiType::ResultOut(ok, err) => Err(CodegenError::Codegen(format!( + "ResultOut<{ok:?}, {err:?}> cannot be a parameter type (ABI return type only)" + ))), } } @@ -3676,7 +3723,12 @@ fn value_from_results( ) -> Result { match ty { AbiType::Unit => Ok(ValueRepr::Unit), - AbiType::I32 | AbiType::U32 | AbiType::U8 | AbiType::Bool | AbiType::Handle | AbiType::Ptr => { + AbiType::I32 + | AbiType::U32 + | AbiType::U8 + | AbiType::Bool + | AbiType::Handle + | AbiType::Ptr => { let val = results .get(*idx) .ok_or_else(|| CodegenError::Codegen("missing return value".to_string()))?; @@ -3730,20 +3782,20 @@ pub(super) fn emit_runtime_wrapper_call( || matches!(&ret_ty.ty, crate::typeck::Ty::Path(name, _) if enum_index.layouts.contains_key(name))) { let ptr_ty = module.isa().pointer_type(); - let (size, align) = if let Some(layout) = - resolve_struct_layout(&ret_ty.ty, "", &struct_layouts.layouts) - { - (layout.size, layout.align) - } else if let crate::typeck::Ty::Path(name, _) = &ret_ty.ty { - let layout = enum_index.layouts.get(name).ok_or_else(|| { - CodegenError::Unsupported("enum layout missing".to_string()) - })?; - (layout.size, layout.align) - } else { - return Err(CodegenError::Unsupported( - "sret return layout missing".to_string(), - )); - }; + let (size, align) = + if let Some(layout) = resolve_struct_layout(&ret_ty.ty, "", &struct_layouts.layouts) { + (layout.size, layout.align) + } else if let crate::typeck::Ty::Path(name, _) = &ret_ty.ty { + let layout = enum_index + .layouts + .get(name) + .ok_or_else(|| CodegenError::Unsupported("enum layout missing".to_string()))?; + (layout.size, layout.align) + } else { + return Err(CodegenError::Unsupported( + "sret return layout missing".to_string(), + )); + }; let align = align.max(1); let slot_size = aligned_slot_size(size, align); let slot = builder.create_sized_stack_slot(ir::StackSlotData::new( @@ -3811,10 +3863,7 @@ pub(super) fn emit_runtime_wrapper_call( module.isa().pointer_type(), module.isa().default_call_conv(), ); - let call_symbol = info - .runtime_symbol - .as_deref() - .unwrap_or(&info.symbol); + let call_symbol = info.runtime_symbol.as_deref().unwrap_or(&info.symbol); let func_id = module .declare_function(call_symbol, Linkage::Import, &sig) .map_err(|err| CodegenError::Codegen(err.to_string()))?; @@ -3826,8 +3875,8 @@ pub(super) fn emit_runtime_wrapper_call( let tag = results .get(0) .ok_or_else(|| CodegenError::Codegen("missing result tag".to_string()))?; - let (ok_slot, err_slot, ok_ty, err_ty) = result_out - .ok_or_else(|| CodegenError::Codegen("missing result slots".to_string()))?; + let (ok_slot, err_slot, ok_ty, err_ty) = + result_out.ok_or_else(|| CodegenError::Codegen("missing result slots".to_string()))?; let ok_val = if let Some(slot) = ok_slot { match slot { ResultOutSlot::Scalar(slot, ty, align) => { @@ -3908,15 +3957,11 @@ fn emit_unsafe_ptr_call( let layout = type_layout_from_index(&elem_hir, struct_layouts, ptr_ty)?; match base_name { "sizeof" => { - let size = builder - .ins() - .iconst(ir::types::I32, layout.size as i64); + let size = builder.ins().iconst(ir::types::I32, layout.size as i64); return Ok(Some(ValueRepr::Single(size))); } "alignof" => { - let align = builder - .ins() - .iconst(ir::types::I32, layout.align as i64); + let align = builder.ins().iconst(ir::types::I32, layout.align as i64); return Ok(Some(ValueRepr::Single(align))); } "ptr_cast" | "ptr_cast_u8" => { @@ -3969,11 +4014,9 @@ fn emit_unsafe_ptr_call( )) } }; - let is_null = builder.ins().icmp_imm( - ir::condcodes::IntCC::Equal, - base_ptr, - 0, - ); + let is_null = builder + .ins() + .icmp_imm(ir::condcodes::IntCC::Equal, base_ptr, 0); return Ok(Some(ValueRepr::Single(is_null))); } "ptr_add" => { @@ -4181,7 +4224,9 @@ fn emit_unsafe_ptr_call( .icmp(IntCC::SignedGreaterThan, count_val, zero_i32); let copy_block = builder.create_block(); let done_block = builder.create_block(); - builder.ins().brif(should_copy, copy_block, &[], done_block, &[]); + builder + .ins() + .brif(should_copy, copy_block, &[], done_block, &[]); builder.switch_to_block(copy_block); builder.seal_block(copy_block); @@ -4254,7 +4299,10 @@ fn hir_type_from_ty( } } }; - Ok(crate::hir::HirType { ty: ty.clone(), abi }) + Ok(crate::hir::HirType { + ty: ty.clone(), + abi, + }) } fn ensure_abi_sig_handled(info: &FnInfo) -> Result<(), CodegenError> { @@ -4276,8 +4324,8 @@ fn ensure_abi_sig_handled(info: &FnInfo) -> Result<(), CodegenError> { #[cfg(test)] mod tests { - use super::*; use super::super::{FnInfo, FnSig}; + use super::*; #[test] fn aligned_slot_size_adds_padding_for_alignment() { diff --git a/capc/src/codegen/intrinsics.rs b/capc/src/codegen/intrinsics.rs index 00740cc..8b0ec56 100644 --- a/capc/src/codegen/intrinsics.rs +++ b/capc/src/codegen/intrinsics.rs @@ -188,6 +188,10 @@ pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { params: vec![AbiType::Handle], ret: AbiType::Handle, }; + let mem_default_alloc = FnSig { + params: vec![], + ret: AbiType::Handle, + }; let system_mint_args = FnSig { params: vec![AbiType::Handle], ret: AbiType::Handle, @@ -492,6 +496,16 @@ pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { is_runtime: true, }, ); + map.insert( + "sys.buffer.default_alloc".to_string(), + FnInfo { + sig: mem_default_alloc, + abi_sig: None, + symbol: "capable_rt_default_alloc".to_string(), + runtime_symbol: None, + is_runtime: true, + }, + ); // === Console === map.insert( "sys.console.Console__println".to_string(), diff --git a/capc/src/parser.rs b/capc/src/parser.rs index 6acd952..5342511 100644 --- a/capc/src/parser.rs +++ b/capc/src/parser.rs @@ -122,45 +122,37 @@ impl Parser { match self.peek_kind() { Some(TokenKind::Pub) => { if is_pub { - return Err(self.error_current( - "duplicate `pub` modifier".to_string(), - )); + return Err(self.error_current("duplicate `pub` modifier".to_string())); } self.bump(); is_pub = true; } Some(TokenKind::Linear) => { if is_linear { - return Err(self.error_current( - "duplicate `linear` modifier".to_string(), - )); + return Err(self.error_current("duplicate `linear` modifier".to_string())); } self.bump(); is_linear = true; } Some(TokenKind::Copy) => { if is_copy { - return Err(self.error_current( - "duplicate `copy` modifier".to_string(), - )); + return Err(self.error_current("duplicate `copy` modifier".to_string())); } self.bump(); is_copy = true; } Some(TokenKind::Opaque) => { if is_opaque { - return Err(self.error_current( - "duplicate `opaque` modifier".to_string(), - )); + return Err(self.error_current("duplicate `opaque` modifier".to_string())); } self.bump(); is_opaque = true; } Some(TokenKind::Capability) => { if is_capability { - return Err(self.error_current( - "duplicate `capability` modifier".to_string(), - )); + return Err( + self.error_current("duplicate `capability` modifier".to_string()) + ); } self.bump(); is_capability = true; @@ -169,9 +161,9 @@ impl Parser { } } if is_linear && is_copy { - return Err(self.error_current( - "cannot combine `linear` and `copy` modifiers".to_string(), - )); + return Err( + self.error_current("cannot combine `linear` and `copy` modifiers".to_string()) + ); } if self.peek_kind() == Some(TokenKind::Extern) { if is_opaque || is_linear || is_copy || is_capability { @@ -179,7 +171,9 @@ impl Parser { "linear/copy/opaque/capability applies only to struct declarations".to_string(), )); } - return Ok(Item::ExternFunction(self.parse_extern_function(is_pub, doc)?)); + return Ok(Item::ExternFunction( + self.parse_extern_function(is_pub, doc)?, + )); } match self.peek_kind() { Some(TokenKind::Fn) => { @@ -219,9 +213,7 @@ impl Parser { } Some(TokenKind::Impl) => { if is_pub { - return Err(self.error_current( - "impl blocks cannot be marked pub".to_string(), - )); + return Err(self.error_current("impl blocks cannot be marked pub".to_string())); } if is_opaque || is_linear || is_copy || is_capability { return Err(self.error_current( @@ -231,9 +223,7 @@ impl Parser { } Ok(Item::Impl(self.parse_impl_block(doc)?)) } - Some(other) => Err(self.error_current(format!( - "expected item, found {other:?}" - ))), + Some(other) => Err(self.error_current(format!("expected item, found {other:?}"))), None => Err(self.error_current("unexpected end of input".to_string())), } } @@ -252,11 +242,7 @@ impl Parser { } path } - _ => { - return Err(self.error_current( - "trait impls require a trait name".to_string(), - )) - } + _ => return Err(self.error_current("trait impls require a trait name".to_string())), }; let target = self.parse_type()?; (Some(trait_path), target) @@ -269,9 +255,9 @@ impl Parser { let doc = self.take_doc_comments(); let is_pub = self.maybe_consume(TokenKind::Pub).is_some(); if self.peek_kind() != Some(TokenKind::Fn) { - return Err(self.error_current( - "expected method declaration in impl block".to_string(), - )); + return Err( + self.error_current("expected method declaration in impl block".to_string()) + ); } methods.push(self.parse_function(is_pub, doc)?); } @@ -286,7 +272,11 @@ impl Parser { }) } - fn parse_extern_function(&mut self, is_pub: bool, doc: Option) -> Result { + fn parse_extern_function( + &mut self, + is_pub: bool, + doc: Option, + ) -> Result { let start = self.expect(TokenKind::Extern)?.span.start; self.expect(TokenKind::Fn)?; let name = self.expect_ident()?; @@ -328,7 +318,11 @@ impl Parser { }) } - fn parse_function(&mut self, is_pub: bool, doc: Option) -> Result { + fn parse_function( + &mut self, + is_pub: bool, + doc: Option, + ) -> Result { let start = self.expect(TokenKind::Fn)?.span.start; let name = self.expect_ident()?; let type_params = self.parse_type_params()?; @@ -342,9 +336,7 @@ impl Parser { } else if param_name.item == "self" { None } else { - return Err(self.error_current( - "expected ':' after parameter name".to_string(), - )); + return Err(self.error_current("expected ':' after parameter name".to_string())); }; params.push(Param { name: param_name, @@ -384,9 +376,7 @@ impl Parser { while self.peek_kind() != Some(TokenKind::RBrace) { let doc = self.take_doc_comments(); if self.maybe_consume(TokenKind::Pub).is_some() { - return Err(self.error_current( - "trait methods cannot be marked pub".to_string(), - )); + return Err(self.error_current("trait methods cannot be marked pub".to_string())); } methods.push(self.parse_trait_method(doc)?); } @@ -415,9 +405,7 @@ impl Parser { } else if param_name.item == "self" { None } else { - return Err(self.error_current( - "expected ':' after parameter name".to_string(), - )); + return Err(self.error_current("expected ':' after parameter name".to_string())); }; params.push(Param { name: param_name, @@ -585,22 +573,43 @@ impl Parser { } fn parse_let(&mut self) -> Result { - let start = self.expect(TokenKind::Let)?.span.start; - let name = self.expect_ident()?; - let ty = if self.maybe_consume(TokenKind::Colon).is_some() { - Some(self.parse_type()?) - } else { - None - }; + let let_token = self.expect(TokenKind::Let)?; + let start = let_token.span.start; + if self.peek_kind() == Some(TokenKind::Ident) + && self + .peek_token(1) + .is_some_and(|t| matches!(t.kind, TokenKind::Colon | TokenKind::Eq)) + { + let name = self.expect_ident()?; + let ty = if self.maybe_consume(TokenKind::Colon).is_some() { + Some(self.parse_type()?) + } else { + None + }; + self.expect(TokenKind::Eq)?; + let expr = self.parse_expr()?; + let end = self + .maybe_consume(TokenKind::Semi) + .map_or(expr.span().end, |t| t.span.end); + return Ok(LetStmt { + name, + ty, + expr, + span: Span::new(start, end), + }); + } + + let pattern = self.parse_pattern()?; self.expect(TokenKind::Eq)?; let expr = self.parse_expr()?; - let end = self.maybe_consume(TokenKind::Semi).map_or(expr.span().end, |t| t.span.end); - Ok(LetStmt { - name, - ty, - expr, - span: Span::new(start, end), - }) + self.expect(TokenKind::Else)?; + let else_block = self.parse_block()?; + let mut stmt = self.desugar_let_else(let_token.span, pattern, expr, else_block)?; + let end = self + .maybe_consume(TokenKind::Semi) + .map_or(stmt.span.end, |t| t.span.end); + stmt.span = Span::new(start, end); + Ok(stmt) } fn parse_assign(&mut self) -> Result { @@ -906,7 +915,11 @@ impl Parser { false } - fn parse_expr_bp(&mut self, min_bp: u8, allow_struct_literal: bool) -> Result { + fn parse_expr_bp( + &mut self, + min_bp: u8, + allow_struct_literal: bool, + ) -> Result { let mut lhs = self.parse_prefix(allow_struct_literal)?; loop { @@ -937,10 +950,17 @@ impl Parser { // Convert FieldAccess chain to Path self.field_access_to_path(fa)? } - _ => return Err(self.error_current("expected path before struct literal".to_string())), + _ => { + return Err(self.error_current( + "expected path before struct literal".to_string(), + )) + } }; path.segments.push(field); - path.span = Span::new(path.span.start, path.segments.last().unwrap().span.end); + path.span = Span::new( + path.span.start, + path.segments.last().unwrap().span.end, + ); lhs = self.parse_struct_literal(path, type_args)?; continue; } @@ -1232,9 +1252,9 @@ impl Parser { Ok(Expr::Path(path)) } } - Some(other) => Err(self.error_current(format!( - "unexpected token in expression: {other:?}" - ))), + Some(other) => { + Err(self.error_current(format!("unexpected token in expression: {other:?}"))) + } None => Err(self.error_current("unexpected end of input".to_string())), } } @@ -1319,7 +1339,12 @@ impl Parser { // Single segment - could be binding or enum variant // If lowercase, it's a binding; if uppercase, it's an enum variant let name = &path.segments[0].item; - if name.chars().next().map(|c| c.is_lowercase()).unwrap_or(false) { + if name + .chars() + .next() + .map(|c| c.is_lowercase()) + .unwrap_or(false) + { Ok(Pattern::Binding(path.segments.into_iter().next().unwrap())) } else { Ok(Pattern::Path(path)) @@ -1336,6 +1361,90 @@ impl Parser { } } + fn desugar_let_else( + &self, + let_span: Span, + pattern: Pattern, + expr: Expr, + else_block: Block, + ) -> Result { + let binding = self.pattern_binding_ident(&pattern).ok_or_else(|| { + self.error_at( + let_span, + "`let ... else` requires a binding pattern".to_string(), + ) + })?; + + let binding_expr = Expr::Path(Path { + segments: vec![binding.clone()], + span: binding.span, + }); + let ok_body = Block { + stmts: vec![Stmt::Expr(ExprStmt { + expr: binding_expr, + span: binding.span, + })], + span: binding.span, + }; + + let panic_ident = Spanned::new("panic".to_string(), else_block.span); + let panic_expr = Expr::Call(CallExpr { + callee: Box::new(Expr::Path(Path { + segments: vec![panic_ident], + span: else_block.span, + })), + type_args: Vec::new(), + args: Vec::new(), + span: else_block.span, + }); + let mut else_stmts = else_block.stmts; + else_stmts.push(Stmt::Expr(ExprStmt { + expr: panic_expr, + span: else_block.span, + })); + let else_body = Block { + stmts: else_stmts, + span: else_block.span, + }; + + let match_span = Span::new(let_span.start, else_block.span.end); + let match_expr = Expr::Match(MatchExpr { + expr: Box::new(expr), + arms: vec![ + MatchArm { + pattern, + body: ok_body, + span: match_span, + }, + MatchArm { + pattern: Pattern::Wildcard(else_block.span), + body: else_body, + span: match_span, + }, + ], + span: match_span, + match_span: let_span, + }); + + Ok(LetStmt { + name: binding, + ty: None, + expr: match_expr, + span: match_span, + }) + } + + fn pattern_binding_ident(&self, pattern: &Pattern) -> Option { + match pattern { + Pattern::Binding(ident) => Some(ident.clone()), + Pattern::Call { + binding: Some(ident), + .. + } => Some(ident.clone()), + _ => None, + } + } + fn parse_path(&mut self) -> Result { let first = self.expect_ident()?; let start = first.span.start; @@ -1371,12 +1480,22 @@ impl Parser { } } - collect_segments(&field_access.object, &mut segments) - .ok_or_else(|| self.error_at(field_access.span, "expected path or field access".to_string()))?; + collect_segments(&field_access.object, &mut segments).ok_or_else(|| { + self.error_at( + field_access.span, + "expected path or field access".to_string(), + ) + })?; segments.push(field_access.field.clone()); - let start = segments.first().map(|s| s.span.start).unwrap_or(field_access.span.start); - let end = segments.last().map(|s| s.span.end).unwrap_or(field_access.span.end); + let start = segments + .first() + .map(|s| s.span.start) + .unwrap_or(field_access.span.start); + let end = segments + .last() + .map(|s| s.span.end) + .unwrap_or(field_access.span.end); Ok(Path { segments, @@ -1423,7 +1542,11 @@ impl Parser { Ok(Type::Path { path, args, span }) } - fn parse_struct_literal(&mut self, path: Path, type_args: Vec) -> Result { + fn parse_struct_literal( + &mut self, + path: Path, + type_args: Vec, + ) -> Result { let start = path.span.start; self.expect(TokenKind::LBrace)?; let mut fields = Vec::new(); @@ -1527,9 +1650,7 @@ impl Parser { fn expect(&mut self, kind: TokenKind) -> Result { match self.peek_kind() { Some(k) if k == kind => Ok(self.bump().unwrap()), - Some(other) => Err(self.error_current(format!( - "expected {kind:?}, found {other:?}" - ))), + Some(other) => Err(self.error_current(format!("expected {kind:?}, found {other:?}"))), None => Err(self.error_current("unexpected end of input".to_string())), } } @@ -1537,9 +1658,7 @@ impl Parser { fn expect_ident(&mut self) -> Result { match self.peek_kind() { Some(TokenKind::Ident) => Ok(to_ident(&self.bump().unwrap())), - Some(other) => Err(self.error_current(format!( - "expected identifier, found {other:?}" - ))), + Some(other) => Err(self.error_current(format!("expected identifier, found {other:?}"))), None => Err(self.error_current("unexpected end of input".to_string())), } } @@ -1665,8 +1784,12 @@ fn unescape_char(text: &str) -> Result { '\\' => b'\\', '\'' => b'\'', 'x' => { - let hi = chars.next().ok_or_else(|| "invalid hex escape".to_string())?; - let lo = chars.next().ok_or_else(|| "invalid hex escape".to_string())?; + let hi = chars + .next() + .ok_or_else(|| "invalid hex escape".to_string())?; + let lo = chars + .next() + .ok_or_else(|| "invalid hex escape".to_string())?; let hex = format!("{hi}{lo}"); u8::from_str_radix(&hex, 16).map_err(|_| "invalid hex escape".to_string())? } diff --git a/capc/src/typeck/check.rs b/capc/src/typeck/check.rs index 8c304cc..44ec8ec 100644 --- a/capc/src/typeck/check.rs +++ b/capc/src/typeck/check.rs @@ -28,11 +28,7 @@ impl<'a> TypeRecorder<'a> { } } -fn record_expr_type( - recorder: &mut TypeRecorder, - expr: &Expr, - ty: Ty, -) -> Result { +fn record_expr_type(recorder: &mut TypeRecorder, expr: &Expr, ty: Ty) -> Result { recorder.record(expr, &ty); Ok(ty) } @@ -50,10 +46,16 @@ fn infer_enum_args(template: &Ty, actual: &Ty, inferred: &mut HashMap matches!(actual, Ty::Builtin(other) if other == b), - Ty::Ptr(inner) => matches!(actual, Ty::Ptr(other) if infer_enum_args(inner, other, inferred)), - Ty::Ref(inner) => matches!(actual, Ty::Ref(other) if infer_enum_args(inner, other, inferred)), + Ty::Ptr(inner) => { + matches!(actual, Ty::Ptr(other) if infer_enum_args(inner, other, inferred)) + } + Ty::Ref(inner) => { + matches!(actual, Ty::Ref(other) if infer_enum_args(inner, other, inferred)) + } Ty::Path(name, args) => match actual { - Ty::Path(other_name, other_args) if other_name == name && args.len() == other_args.len() => { + Ty::Path(other_name, other_args) + if other_name == name && args.len() == other_args.len() => + { args.iter() .zip(other_args.iter()) .all(|(a, b)| infer_enum_args(a, b, inferred)) @@ -102,8 +104,16 @@ fn apply_enum_type_args(ty: &Ty, type_params: &[String], type_args: &[Ty]) -> Ty ty.clone() } Ty::Builtin(_) => ty.clone(), - Ty::Ptr(inner) => Ty::Ptr(Box::new(apply_enum_type_args(inner, type_params, type_args))), - Ty::Ref(inner) => Ty::Ref(Box::new(apply_enum_type_args(inner, type_params, type_args))), + Ty::Ptr(inner) => Ty::Ptr(Box::new(apply_enum_type_args( + inner, + type_params, + type_args, + ))), + Ty::Ref(inner) => Ty::Ref(Box::new(apply_enum_type_args( + inner, + type_params, + type_args, + ))), Ty::Path(name, args) => Ty::Path( name.clone(), args.iter() @@ -113,7 +123,12 @@ fn apply_enum_type_args(ty: &Ty, type_params: &[String], type_args: &[Ty]) -> Ty } } -fn enum_payload_matches(payload: &Ty, arg_ty: &Ty, type_params: &[String], type_args: &[Ty]) -> bool { +fn enum_payload_matches( + payload: &Ty, + arg_ty: &Ty, + type_params: &[String], + type_args: &[Ty], +) -> bool { let expected = apply_enum_type_args(payload, type_params, type_args); ty_equivalent_for_params(&expected, arg_ty, type_params) } @@ -237,10 +252,7 @@ fn enforce_vec_method_constraints( } /// Safe packages cannot mention externs or raw pointer types anywhere. -pub(super) fn validate_package_safety( - module: &Module, - is_stdlib: bool, -) -> Result<(), TypeError> { +pub(super) fn validate_package_safety(module: &Module, is_stdlib: bool) -> Result<(), TypeError> { if module.package != PackageSafety::Safe { return Ok(()); } @@ -479,11 +491,7 @@ fn block_contains_ptr(block: &Block) -> Option { if let Some(span) = block_contains_ptr(&if_stmt.then_block) { return Some(span); } - if let Some(span) = if_stmt - .else_block - .as_ref() - .and_then(block_contains_ptr) - { + if let Some(span) = if_stmt.else_block.as_ref().and_then(block_contains_ptr) { return Some(span); } } @@ -543,7 +551,11 @@ fn block_ends_with_return(block: &Block) -> bool { /// Check if a match expression is syntactically total (all arms end with return). fn match_is_total(match_expr: &MatchExpr) -> bool { - !match_expr.arms.is_empty() && match_expr.arms.iter().all(|arm| block_ends_with_return(&arm.body)) + !match_expr.arms.is_empty() + && match_expr + .arms + .iter() + .all(|arm| block_ends_with_return(&arm.body)) } /// Type-check a function body, including move/linear rules. @@ -676,7 +688,10 @@ fn check_stmt( Stmt::Let(let_stmt) => { if scopes.contains(&let_stmt.name.item) { return Err(TypeError::new( - format!("variable shadowing is not allowed: `{}`", let_stmt.name.item), + format!( + "variable shadowing is not allowed: `{}`", + let_stmt.name.item + ), let_stmt.name.span, )); } @@ -727,7 +742,7 @@ fn check_stmt( } } let annot_ty = lower_type(annot, use_map, stdlib, type_params)?; - validate_type_args( &annot_ty, struct_map, enum_map, annot.span())?; + validate_type_args(&annot_ty, struct_map, enum_map, annot.span())?; let matches_ref = if let Ty::Ref(inner) = &annot_ty { &expr_ty == inner.as_ref() || &expr_ty == &annot_ty } else { @@ -809,9 +824,7 @@ fn check_stmt( )?; if expr_ty != existing && !matches!(expr_ty, Ty::Builtin(BuiltinType::Never)) { return Err(TypeError::new( - format!( - "assignment type mismatch: expected {existing:?}, found {expr_ty:?}" - ), + format!("assignment type mismatch: expected {existing:?}, found {expr_ty:?}"), assign.span, )); } @@ -887,9 +900,18 @@ fn check_stmt( )); } let depth = scopes.current_loop_depth().ok_or_else(|| { - TypeError::new("break statement outside of loop".to_string(), break_stmt.span) + TypeError::new( + "break statement outside of loop".to_string(), + break_stmt.span, + ) })?; - ensure_linear_scopes_consumed_from(scopes, depth, struct_map, enum_map, break_stmt.span)?; + ensure_linear_scopes_consumed_from( + scopes, + depth, + struct_map, + enum_map, + break_stmt.span, + )?; } Stmt::Continue(continue_stmt) => { if !in_loop { @@ -899,7 +921,10 @@ fn check_stmt( )); } let depth = scopes.current_loop_depth().ok_or_else(|| { - TypeError::new("continue statement outside of loop".to_string(), continue_stmt.span) + TypeError::new( + "continue statement outside of loop".to_string(), + continue_stmt.span, + ) })?; ensure_linear_scopes_consumed_from( scopes, @@ -1086,10 +1111,7 @@ fn check_stmt( // Create body scope with loop variable bound let mut body_scopes = scopes.clone(); body_scopes.push_scope(); - body_scopes.insert_local( - for_stmt.var.item.clone(), - Ty::Builtin(BuiltinType::I32), - ); + body_scopes.insert_local(for_stmt.var.item.clone(), Ty::Builtin(BuiltinType::I32)); body_scopes.push_loop(); check_block( @@ -1114,13 +1136,7 @@ fn check_stmt( // Pop the loop variable scope before checking affine states body_scopes.pop_scope(); - ensure_affine_states_match( - scopes, - &body_scopes, - struct_map, - enum_map, - for_stmt.span, - )?; + ensure_affine_states_match(scopes, &body_scopes, struct_map, enum_map, for_stmt.span)?; } Stmt::Expr(expr_stmt) => { if let Expr::Match(match_expr) = &expr_stmt.expr { @@ -1225,12 +1241,12 @@ fn merge_branch_states( .zip(left.stack.iter().zip(&right.stack)) { for (name, info) in base_scope.iter_mut() { - let left_info = left_scope.get(name).ok_or_else(|| { - TypeError::new(format!("unknown identifier `{name}`"), span) - })?; - let right_info = right_scope.get(name).ok_or_else(|| { - TypeError::new(format!("unknown identifier `{name}`"), span) - })?; + let left_info = left_scope + .get(name) + .ok_or_else(|| TypeError::new(format!("unknown identifier `{name}`"), span))?; + let right_info = right_scope + .get(name) + .ok_or_else(|| TypeError::new(format!("unknown identifier `{name}`"), span))?; match type_kind(&info.ty, struct_map, enum_map) { TypeKind::Affine => { info.state = if left_info.state == MoveState::Moved @@ -1267,9 +1283,9 @@ fn ensure_affine_states_match( ) -> Result<(), TypeError> { for (base_scope, other_scope) in base.stack.iter().zip(&other.stack) { for (name, info) in base_scope { - let other_info = other_scope.get(name).ok_or_else(|| { - TypeError::new(format!("unknown identifier `{name}`"), span) - })?; + let other_info = other_scope + .get(name) + .ok_or_else(|| TypeError::new(format!("unknown identifier `{name}`"), span))?; if type_kind(&info.ty, struct_map, enum_map) != TypeKind::Unrestricted && info.state != other_info.state { @@ -1361,12 +1377,11 @@ fn merge_match_states( let Some((first, rest)) = arms.split_first() else { return Ok(()); }; - for (depth, (base_scope, first_scope)) in base.stack.iter_mut().zip(&first.stack).enumerate() - { + for (depth, (base_scope, first_scope)) in base.stack.iter_mut().zip(&first.stack).enumerate() { for (name, info) in base_scope.iter_mut() { - let first_info = first_scope.get(name).ok_or_else(|| { - TypeError::new(format!("unknown identifier `{name}`"), span) - })?; + let first_info = first_scope + .get(name) + .ok_or_else(|| TypeError::new(format!("unknown identifier `{name}`"), span))?; match type_kind(&info.ty, struct_map, enum_map) { TypeKind::Affine => { let mut moved = first_info.state == MoveState::Moved; @@ -1479,10 +1494,7 @@ pub(super) fn check_expr( return record_expr_type(recorder, expr, ty); } } - Err(TypeError::new( - format!("unknown value `{path}`"), - path.span, - )) + Err(TypeError::new(format!("unknown value `{path}`"), path.span)) } Expr::Call(call) => { let path = call.callee.to_path().ok_or_else(|| { @@ -1642,16 +1654,17 @@ pub(super) fn check_expr( None }; - let type_args = resolve_enum_type_args( - &enum_name, - &info.type_params, - &inferred, - ret_ty, - ); + let type_args = + resolve_enum_type_args(&enum_name, &info.type_params, &inferred, ret_ty); if let Some(payload_ty) = payload { if let Some(arg_ty) = arg_ty { - if !enum_payload_matches(&payload_ty, &arg_ty, &info.type_params, &type_args) { + if !enum_payload_matches( + &payload_ty, + &arg_ty, + &info.type_params, + &type_args, + ) { return Err(TypeError::new( "variant argument type mismatch".to_string(), call.args[0].span(), @@ -1670,11 +1683,14 @@ pub(super) fn check_expr( sig } else if resolved.len() == 1 { let qualified = format!("{}.{}", module_name, key); - functions.get(&qualified).ok_or_else(|| { - TypeError::new(format!("unknown function `{key}`"), path.span) - })? + functions + .get(&qualified) + .ok_or_else(|| TypeError::new(format!("unknown function `{key}`"), path.span))? } else { - return Err(TypeError::new(format!("unknown function `{key}`"), path.span)); + return Err(TypeError::new( + format!("unknown function `{key}`"), + path.span, + )); }; if sig.module != module_name && !sig.is_pub { return Err(TypeError::new( @@ -1690,7 +1706,8 @@ pub(super) fn check_expr( enum_map, type_params, )?; - let subs = build_call_substitution(sig, &explicit_type_args, HashMap::new(), call.span)?; + let subs = + build_call_substitution(sig, &explicit_type_args, HashMap::new(), call.span)?; enforce_type_param_bounds(sig, &subs, trait_impls, call.span)?; let instantiated_params: Vec = sig .params @@ -1757,15 +1774,14 @@ pub(super) fn check_expr( Expr::MethodCall(method_call) => { fn get_leftmost_segment(expr: &Expr) -> Option<&str> { match expr { - Expr::Path(path) if path.segments.len() == 1 => { - Some(&path.segments[0].item) - } + Expr::Path(path) if path.segments.len() == 1 => Some(&path.segments[0].item), Expr::FieldAccess(fa) => get_leftmost_segment(&fa.object), _ => None, } } - let base_is_local = if let Some(base_name) = get_leftmost_segment(&method_call.receiver) { + let base_is_local = if let Some(base_name) = get_leftmost_segment(&method_call.receiver) + { scopes.contains(base_name) } else { true @@ -1806,8 +1822,12 @@ pub(super) fn check_expr( enum_map, type_params, )?; - let subs = - build_call_substitution(sig, &explicit_type_args, HashMap::new(), method_call.span)?; + let subs = build_call_substitution( + sig, + &explicit_type_args, + HashMap::new(), + method_call.span, + )?; enforce_type_param_bounds(sig, &subs, trait_impls, method_call.span)?; let instantiated_params: Vec = sig .params @@ -2062,17 +2082,22 @@ pub(super) fn check_expr( let type_arg_suffix = super::build_type_arg_suffix(&receiver_args); let base_method_fn = format!("{type_name}__{}", method_call.method.item); - let specific_method_fn = format!("{type_name}{type_arg_suffix}__{}", method_call.method.item); + let specific_method_fn = + format!("{type_name}{type_arg_suffix}__{}", method_call.method.item); // Try type-specific method first (e.g., Vec__u8__map_add), then generic (Vec__map_add) let qualified_specific = format!("{method_module}.{specific_method_fn}"); let qualified_base = format!("{method_module}.{base_method_fn}"); - let key = if !type_arg_suffix.is_empty() && functions.contains_key(&qualified_specific) { + let key = if !type_arg_suffix.is_empty() && functions.contains_key(&qualified_specific) + { qualified_specific } else if functions.contains_key(&qualified_base) { qualified_base - } else if method_module == module_name && !type_arg_suffix.is_empty() && functions.contains_key(&specific_method_fn) { + } else if method_module == module_name + && !type_arg_suffix.is_empty() + && functions.contains_key(&specific_method_fn) + { specific_method_fn.clone() } else if method_module == module_name && functions.contains_key(&base_method_fn) { base_method_fn.clone() @@ -2082,9 +2107,9 @@ pub(super) fn check_expr( method_call.span, )); }; - let sig = functions - .get(&key) - .ok_or_else(|| TypeError::new(format!("unknown method `{key}`"), method_call.span))?; + let sig = functions.get(&key).ok_or_else(|| { + TypeError::new(format!("unknown method `{key}`"), method_call.span) + })?; if sig.module != module_name && !sig.is_pub { return Err(TypeError::new( format!("method `{key}` is private"), @@ -2126,7 +2151,8 @@ pub(super) fn check_expr( enum_map, type_params, )?; - let subs = build_call_substitution(sig, &explicit_type_args, inferred, method_call.span)?; + let subs = + build_call_substitution(sig, &explicit_type_args, inferred, method_call.span)?; enforce_type_param_bounds(sig, &subs, trait_impls, method_call.span)?; let instantiated_params: Vec = sig .params @@ -2200,7 +2226,9 @@ pub(super) fn check_expr( method_call.receiver.span(), )); } - if instantiated_params[0] != receiver_ref && instantiated_params[0] != receiver_ref_unqualified { + if instantiated_params[0] != receiver_ref + && instantiated_params[0] != receiver_ref_unqualified + { let _ = check_expr( &method_call.receiver, functions, @@ -2370,13 +2398,10 @@ pub(super) fn check_expr( type_param_bounds, )?; match binary.op { - BinaryOp::Add - | BinaryOp::Sub - | BinaryOp::Mul - | BinaryOp::Div - | BinaryOp::Mod => { - if left == right && (left == Ty::Builtin(BuiltinType::I32) - || left == Ty::Builtin(BuiltinType::I64)) + BinaryOp::Add | BinaryOp::Sub | BinaryOp::Mul | BinaryOp::Div | BinaryOp::Mod => { + if left == right + && (left == Ty::Builtin(BuiltinType::I32) + || left == Ty::Builtin(BuiltinType::I64)) { Ok(left) } else if left == right @@ -2542,7 +2567,8 @@ pub(super) fn check_expr( Ty::Path(name, args) if name == "sys.result.Result" && args.len() == 2 => &args[1], _ => { return Err(TypeError::new( - "the `?` operator can only be used in functions returning Result".to_string(), + "the `?` operator can only be used in functions returning Result" + .to_string(), try_expr.span, )) } @@ -2579,19 +2605,18 @@ pub(super) fn check_expr( Expr::FieldAccess(field_access) => { fn get_leftmost_path_segment(expr: &Expr) -> Option<&str> { match expr { - Expr::Path(path) if path.segments.len() == 1 => { - Some(&path.segments[0].item) - } + Expr::Path(path) if path.segments.len() == 1 => Some(&path.segments[0].item), Expr::FieldAccess(fa) => get_leftmost_path_segment(&fa.object), _ => None, } } - let base_is_local = if let Some(base_name) = get_leftmost_path_segment(&field_access.object) { - scopes.contains(base_name) - } else { - true - }; + let base_is_local = + if let Some(base_name) = get_leftmost_path_segment(&field_access.object) { + scopes.contains(base_name) + } else { + true + }; if !base_is_local { if let Some(path) = Expr::FieldAccess(field_access.clone()).to_path() { @@ -2653,7 +2678,8 @@ pub(super) fn check_expr( field_access.field.span, ) })?; - let substitutions = build_type_substitution(&info.type_params, &struct_args, field_access.span)?; + let substitutions = + build_type_substitution(&info.type_params, &struct_args, field_access.span)?; let field_ty = substitute_type(field_ty, &substitutions); if is_affine_type(&field_ty, struct_map, enum_map) { match use_mode { @@ -2831,7 +2857,14 @@ fn check_match_stmt( for arm in &match_expr.arms { let mut arm_scope = scopes.clone(); arm_scope.push_scope(); - bind_pattern(&arm.pattern, &match_ty, &mut arm_scope, use_map, enum_map, module_name)?; + bind_pattern( + &arm.pattern, + &match_ty, + &mut arm_scope, + use_map, + enum_map, + module_name, + )?; check_block( &arm.body, ret_ty, @@ -2905,7 +2938,14 @@ fn check_match_expr_value( for arm in &match_expr.arms { let mut arm_scope = scopes.clone(); arm_scope.push_scope(); - bind_pattern(&arm.pattern, &match_ty, &mut arm_scope, use_map, enum_map, module_name)?; + bind_pattern( + &arm.pattern, + &match_ty, + &mut arm_scope, + use_map, + enum_map, + module_name, + )?; let arm_ty = check_match_arm_value( &arm.body, functions, @@ -2977,12 +3017,6 @@ fn check_match_arm_value( )); }; for stmt in prefix { - if matches!(stmt, Stmt::Return(_)) { - return Err(TypeError::new( - "match arm cannot return in expression context".to_string(), - block.span, - )); - } check_stmt( stmt, ret_ty, @@ -3065,7 +3099,10 @@ fn check_match_exhaustive( missing.push("false"); } return Err(TypeError::new( - format!("non-exhaustive match on bool, missing: {}", missing.join(", ")), + format!( + "non-exhaustive match on bool, missing: {}", + missing.join(", ") + ), span, )); } @@ -3095,7 +3132,10 @@ fn check_match_exhaustive( missing.push("Err"); } return Err(TypeError::new( - format!("non-exhaustive match on Result, missing: {}", missing.join(", ")), + format!( + "non-exhaustive match on Result, missing: {}", + missing.join(", ") + ), span, )); } @@ -3137,7 +3177,10 @@ fn check_match_exhaustive( .cloned() .collect(); return Err(TypeError::new( - format!("non-exhaustive match, missing variants: {}", missing.join(", ")), + format!( + "non-exhaustive match, missing variants: {}", + missing.join(", ") + ), span, )); } @@ -3197,9 +3240,9 @@ fn check_struct_literal( } else { key.clone() }; - let info = struct_map.get(&qualified).ok_or_else(|| { - TypeError::new(format!("unknown struct `{}`", key), lit.span) - })?; + let info = struct_map + .get(&qualified) + .ok_or_else(|| TypeError::new(format!("unknown struct `{}`", key), lit.span))?; (qualified, info) } }; @@ -3235,10 +3278,7 @@ fn check_struct_literal( let mut remaining = info.fields.clone(); for field in &lit.fields { let expected = remaining.remove(&field.name.item).ok_or_else(|| { - TypeError::new( - format!("unknown field `{}`", field.name.item), - field.span, - ) + TypeError::new(format!("unknown field `{}`", field.name.item), field.span) })?; let expected = substitute_type(&expected, &substitutions); let actual = check_expr( @@ -3260,7 +3300,10 @@ fn check_struct_literal( )?; if actual != expected { return Err(TypeError::new( - format!("field `{}` expects {expected:?}, found {actual:?}", field.name.item), + format!( + "field `{}` expects {expected:?}, found {actual:?}", + field.name.item + ), field.span, )); } @@ -3516,19 +3559,26 @@ fn bind_pattern( { let Ty::Path(match_name, match_args) = match_ty else { return Err(TypeError::new( - format!("pattern type mismatch: expected {match_ty:?}, found {enum_name:?}"), + format!( + "pattern type mismatch: expected {match_ty:?}, found {enum_name:?}" + ), path.span, )); }; if match_name != &enum_name { return Err(TypeError::new( - format!("pattern type mismatch: expected {match_ty:?}, found {enum_name:?}"), + format!( + "pattern type mismatch: expected {match_ty:?}, found {enum_name:?}" + ), path.span, )); } if let Some(binding) = binding { let Some(info) = enum_map.get(&enum_name) else { - return Err(TypeError::new("unknown enum variant".to_string(), path.span)); + return Err(TypeError::new( + "unknown enum variant".to_string(), + path.span, + )); }; let variant = path .segments diff --git a/capc/tests/run.rs b/capc/tests/run.rs index 36d7b84..714a6be 100644 --- a/capc/tests/run.rs +++ b/capc/tests/run.rs @@ -96,12 +96,8 @@ fn run_fs_attenuation() { fn run_fs_helpers() { let out_dir = make_out_dir("fs_helpers"); let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/fs_helpers.cap", - ]); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/fs_helpers.cap"]); assert_eq!(code, 0); assert!(stdout.contains("fs helpers ok"), "stdout was: {stdout:?}"); } @@ -110,12 +106,8 @@ fn run_fs_helpers() { fn run_fs_reuse() { let out_dir = make_out_dir("fs_reuse"); let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/fs_reuse.cap", - ]); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/fs_reuse.cap"]); assert_eq!(code, 0); assert!(stdout.contains("fs reuse ok"), "stdout was: {stdout:?}"); } @@ -135,16 +127,36 @@ fn run_fs_dir_reuse() { } #[test] -fn run_match_expr() { - let out_dir = make_out_dir("match_expr"); +fn run_let_else() { + let out_dir = make_out_dir("let_else"); + let out_dir = out_dir.to_str().expect("utf8 out dir"); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/let_else.cap"]); + assert_eq!(code, 0); + assert!(stdout.contains("let else ok"), "stdout was: {stdout:?}"); +} + +#[test] +fn run_path_helpers() { + let out_dir = make_out_dir("path_helpers"); let out_dir = out_dir.to_str().expect("utf8 out dir"); let (code, stdout, _stderr) = run_capc(&[ "run", "--out-dir", out_dir, - "tests/programs/match_expr.cap", + "tests/programs/path_helpers.cap", ]); assert_eq!(code, 0); + assert!(stdout.contains("path ok"), "stdout was: {stdout:?}"); +} + +#[test] +fn run_match_expr() { + let out_dir = make_out_dir("match_expr"); + let out_dir = out_dir.to_str().expect("utf8 out dir"); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/match_expr.cap"]); + assert_eq!(code, 0); assert!(stdout.contains("yes"), "stdout was: {stdout:?}"); } @@ -166,12 +178,8 @@ fn run_struct_field_access() { fn run_math_wrap() { let out_dir = make_out_dir("math_wrap"); let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/math_wrap.cap", - ]); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/math_wrap.cap"]); assert_eq!(code, 0); assert!(stdout.contains("wrap ok"), "stdout was: {stdout:?}"); } @@ -187,7 +195,10 @@ fn run_unsigned_compare() { "tests/programs/unsigned_compare.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("unsigned compare ok"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("unsigned compare ok"), + "stdout was: {stdout:?}" + ); } #[test] @@ -320,12 +331,8 @@ fn run_overflow_add_traps() { fn run_div_zero_traps() { let out_dir = make_out_dir("div_zero"); let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, _stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/div_zero.cap", - ]); + let (code, _stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/div_zero.cap"]); assert_ne!(code, 0); } @@ -376,12 +383,8 @@ fn run_text_unsafe() { fn run_text_safe() { let out_dir = make_out_dir("text_safe"); let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/text_safe.cap", - ]); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/text_safe.cap"]); assert_eq!(code, 0); assert!(stdout.contains("text ok"), "stdout was: {stdout:?}"); } @@ -411,22 +414,15 @@ fn run_text_helpers_more() { "tests/programs/text_helpers_more.cap", ]); assert_eq!(code, 0); - assert!( - stdout.contains("text helpers ok"), - "stdout was: {stdout:?}" - ); + assert!(stdout.contains("text helpers ok"), "stdout was: {stdout:?}"); } #[test] fn run_text_basic() { let out_dir = make_out_dir("text_basic"); let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/text_basic.cap", - ]); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/text_basic.cap"]); assert_eq!(code, 0); assert!(stdout.contains("text basic ok"), "stdout was: {stdout:?}"); } @@ -521,12 +517,8 @@ fn run_slice_safe_read() { fn run_args_safe() { let out_dir = make_out_dir("args_safe"); let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/args_safe.cap", - ]); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/args_safe.cap"]); assert_eq!(code, 0); assert!(stdout.contains("args ok"), "stdout was: {stdout:?}"); } @@ -535,12 +527,8 @@ fn run_args_safe() { fn run_stdin_safe() { let out_dir = make_out_dir("stdin_safe"); let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/stdin_safe.cap", - ]); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/stdin_safe.cap"]); assert_eq!(code, 0); assert!(stdout.contains("stdin ok"), "stdout was: {stdout:?}"); } @@ -563,12 +551,8 @@ fn run_string_helpers() { fn run_wc_stdin() { let out_dir = make_out_dir("wc_stdin"); let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/wc_stdin.cap", - ]); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/wc_stdin.cap"]); assert_eq!(code, 0); assert!(stdout.contains("0 0 0"), "stdout was: {stdout:?}"); } @@ -671,7 +655,10 @@ fn run_vec_custom_eq() { "tests/programs/vec_custom_eq.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("vec custom eq ok"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("vec custom eq ok"), + "stdout was: {stdout:?}" + ); } #[test] @@ -741,7 +728,10 @@ fn run_unit_match_arm() { "tests/programs/unit_match_arm.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("unit match arm ok"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("unit match arm ok"), + "stdout was: {stdout:?}" + ); } #[test] @@ -755,7 +745,10 @@ fn run_unit_match_multi() { "tests/programs/unit_match_multi.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("unit match multi ok"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("unit match multi ok"), + "stdout was: {stdout:?}" + ); } #[test] @@ -769,7 +762,10 @@ fn run_result_unit_ok() { "tests/programs/result_unit_ok.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("result unit ok - got Ok(unit)"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("result unit ok - got Ok(unit)"), + "stdout was: {stdout:?}" + ); } #[test] @@ -783,7 +779,10 @@ fn run_unit_match_bind() { "tests/programs/unit_match_bind.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("unit match bind ok"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("unit match bind ok"), + "stdout was: {stdout:?}" + ); } #[test] @@ -797,8 +796,14 @@ fn run_early_return_block() { "tests/programs/early_return_block.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("early return test ok"), "stdout was: {stdout:?}"); - assert!(!stdout.contains("SHOULD NOT PRINT"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("early return test ok"), + "stdout was: {stdout:?}" + ); + assert!( + !stdout.contains("SHOULD NOT PRINT"), + "stdout was: {stdout:?}" + ); } #[test] @@ -812,8 +817,14 @@ fn run_early_return_while() { "tests/programs/early_return_while.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("early return while test ok"), "stdout was: {stdout:?}"); - assert!(!stdout.contains("SHOULD NOT PRINT"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("early return while test ok"), + "stdout was: {stdout:?}" + ); + assert!( + !stdout.contains("SHOULD NOT PRINT"), + "stdout was: {stdout:?}" + ); } #[test] @@ -827,7 +838,10 @@ fn run_scoping_let_block() { "tests/programs/scoping_let_block.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("scoping let block test ok"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("scoping let block test ok"), + "stdout was: {stdout:?}" + ); } #[test] @@ -841,7 +855,10 @@ fn run_scoping_assign() { "tests/programs/scoping_assign.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("scoping assign test ok"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("scoping assign test ok"), + "stdout was: {stdout:?}" + ); } #[test] @@ -904,12 +921,8 @@ fn run_nested_match() { fn run_for_basic() { let out_dir = make_out_dir("for_basic"); let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/for_basic.cap", - ]); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/for_basic.cap"]); assert_eq!(code, 0); assert!(stdout.contains("for_basic ok"), "stdout was: {stdout:?}"); } @@ -918,12 +931,8 @@ fn run_for_basic() { fn run_for_break() { let out_dir = make_out_dir("for_break"); let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/for_break.cap", - ]); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/for_break.cap"]); assert_eq!(code, 0); assert!(stdout.contains("for_break ok"), "stdout was: {stdout:?}"); } @@ -946,12 +955,8 @@ fn run_for_continue() { fn run_for_nested() { let out_dir = make_out_dir("for_nested"); let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/for_nested.cap", - ]); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/for_nested.cap"]); assert_eq!(code, 0); assert!(stdout.contains("for_nested ok"), "stdout was: {stdout:?}"); } @@ -960,12 +965,8 @@ fn run_for_nested() { fn run_for_sum() { let out_dir = make_out_dir("for_sum"); let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/for_sum.cap", - ]); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/for_sum.cap"]); assert_eq!(code, 0); assert!(stdout.contains("for_sum ok"), "stdout was: {stdout:?}"); } @@ -981,7 +982,10 @@ fn run_for_empty_range() { "tests/programs/for_empty_range.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("for_empty_range ok"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("for_empty_range ok"), + "stdout was: {stdout:?}" + ); } #[test] @@ -995,7 +999,10 @@ fn run_for_break_nested() { "tests/programs/for_break_nested.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("for_break_nested ok"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("for_break_nested ok"), + "stdout was: {stdout:?}" + ); } #[test] @@ -1009,7 +1016,10 @@ fn run_for_continue_nested() { "tests/programs/for_continue_nested.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("for_continue_nested ok"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("for_continue_nested ok"), + "stdout was: {stdout:?}" + ); } #[test] @@ -1023,7 +1033,10 @@ fn run_string_compare() { "tests/programs/string_compare.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("string_compare ok"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("string_compare ok"), + "stdout was: {stdout:?}" + ); } #[test] @@ -1051,7 +1064,10 @@ fn run_generic_and_index() { "tests/programs/generic_and_index.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("generic_and_index ok"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("generic_and_index ok"), + "stdout was: {stdout:?}" + ); } #[test] @@ -1127,7 +1143,10 @@ fn run_defer_return() { "tests/programs/should_pass_defer_return.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("start\ninner\nouter\n"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("start\ninner\nouter\n"), + "stdout was: {stdout:?}" + ); } #[test] @@ -1141,7 +1160,10 @@ fn run_if_let() { "tests/programs/should_pass_if_let.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("ok\nerr\nif_let ok\n"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("ok\nerr\nif_let ok\n"), + "stdout was: {stdout:?}" + ); } #[test] @@ -1155,7 +1177,10 @@ fn run_for_forever() { "tests/programs/should_pass_for_forever.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("0\n1\n2\nfor_forever ok\n"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("0\n1\n2\nfor_forever ok\n"), + "stdout was: {stdout:?}" + ); } #[test] @@ -1182,7 +1207,10 @@ fn run_trait_eq_hash() { "tests/programs/trait_eq_hash.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("Eq and Hash traits work correctly!"), "stdout was: {stdout:?}"); + assert!( + stdout.contains("Eq and Hash traits work correctly!"), + "stdout was: {stdout:?}" + ); } #[test] @@ -1196,7 +1224,10 @@ fn run_hashmap_demo() { "examples/hashmap_demo/hashmap_demo.cap", ]); assert_eq!(code, 0); - assert!(stdout.contains("=== Demo Complete ==="), "stdout was: {stdout:?}"); + assert!( + stdout.contains("=== Demo Complete ==="), + "stdout was: {stdout:?}" + ); } #[test] diff --git a/capc/tests/typecheck.rs b/capc/tests/typecheck.rs index c1a0d02..121e916 100644 --- a/capc/tests/typecheck.rs +++ b/capc/tests/typecheck.rs @@ -52,6 +52,22 @@ fn typecheck_fs_dir_reuse_ok() { type_check_program(&module, &stdlib, &[]).expect("typecheck module"); } +#[test] +fn typecheck_let_else_ok() { + let source = load_program("let_else.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + +#[test] +fn typecheck_path_helpers_ok() { + let source = load_program("path_helpers.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + #[test] fn typecheck_fs_close_ok() { let source = load_program("should_pass_fs_close.cap"); @@ -270,7 +286,9 @@ fn typecheck_alloc_as_console_fails() { let module = parse_module(&source).expect("parse module"); let stdlib = load_stdlib().expect("load stdlib"); let err = type_check_program(&module, &stdlib, &[]).expect_err("expected type error"); - assert!(err.to_string().contains("unknown method `sys.buffer.Alloc__println`")); + assert!(err + .to_string() + .contains("unknown method `sys.buffer.Alloc__println`")); } #[test] @@ -340,9 +358,7 @@ fn typecheck_try_question_err_mismatch_fails() { let module = parse_module(&source).expect("parse module"); let stdlib = load_stdlib().expect("load stdlib"); let err = type_check_program(&module, &stdlib, &[]).expect_err("expected type error"); - assert!(err - .to_string() - .contains("mismatched error type for `?`")); + assert!(err.to_string().contains("mismatched error type for `?`")); } #[test] @@ -681,7 +697,9 @@ fn typecheck_affine_loop_move_fails() { let module = parse_module(&source).expect("parse module"); let stdlib = load_stdlib().expect("load stdlib"); let err = type_check_program(&module, &stdlib, &[]).expect_err("expected type error"); - assert!(err.to_string().contains("move-only value `c` moved inside loop")); + assert!(err + .to_string() + .contains("move-only value `c` moved inside loop")); } #[test] @@ -709,7 +727,9 @@ fn typecheck_extern_requires_unsafe_package() { let module = parse_module(&source).expect("parse module"); let stdlib = load_stdlib().expect("load stdlib"); let err = type_check_program(&module, &stdlib, &[]).expect_err("expected type error"); - assert!(err.to_string().contains("extern declarations require `package unsafe`")); + assert!(err + .to_string() + .contains("extern declarations require `package unsafe`")); } #[test] @@ -734,7 +754,9 @@ fn typecheck_pointer_requires_unsafe_package() { let module = parse_module(&source).expect("parse module"); let stdlib = load_stdlib().expect("load stdlib"); let err = type_check_program(&module, &stdlib, &[]).expect_err("expected type error"); - assert!(err.to_string().contains("raw pointer types require `package unsafe`")); + assert!(err + .to_string() + .contains("raw pointer types require `package unsafe`")); } #[test] @@ -1009,7 +1031,9 @@ fn typecheck_continue_outside_loop_fails() { let module = parse_module(&source).expect("parse module"); let stdlib = load_stdlib().expect("load stdlib"); let err = type_check_program(&module, &stdlib, &[]).expect_err("expected type error"); - assert!(err.to_string().contains("continue statement outside of loop")); + assert!(err + .to_string() + .contains("continue statement outside of loop")); } #[test] diff --git a/examples/config_loader/config_loader.cap b/examples/config_loader/config_loader.cap index 8fd3c37..2a1fa6f 100644 --- a/examples/config_loader/config_loader.cap +++ b/examples/config_loader/config_loader.cap @@ -13,16 +13,17 @@ fn print_kv(c: Console, key: string, val: string) -> unit { } fn parse_line(c: Console, line: string) -> unit { - if (line.len() == 0) { + let trimmed = line.trim_view() + if (trimmed.len() == 0) { return () } - if (line.starts_with("#")) { + if (trimmed.starts_with("#")) { return () } - match (line.split_once_view('=')) { - Ok(parts) => { print_kv(c, parts.left, parts.right) } - Err(_) => { } + let Ok(parts) = trimmed.split_once_view('=') else { + return () } + print_kv(c, parts.left, parts.right) return () } @@ -42,8 +43,8 @@ fn parse_config(c: Console, contents: string) -> unit { } } -fn run(c: Console, alloc: Alloc, fs: ReadFS) -> Result { - let contents = fs.read_to_string(alloc, "app.conf")? +fn run(c: Console, fs: ReadFS) -> Result { + let contents = fs.read_to_string_default("app.conf")? parse_config(c, contents) return Ok(()) } @@ -51,8 +52,7 @@ fn run(c: Console, alloc: Alloc, fs: ReadFS) -> Result { pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let fs = rc.mint_readfs("examples/config_loader") - let alloc = rc.mint_alloc_default() - let result = run(c, alloc, fs) + let result = run(c, fs) if (result.is_ok()) { c.println("config ok") return 0 diff --git a/examples/how_to_string/how_to_string.cap b/examples/how_to_string/how_to_string.cap index e0ef338..f6d4dff 100644 --- a/examples/how_to_string/how_to_string.cap +++ b/examples/how_to_string/how_to_string.cap @@ -2,10 +2,9 @@ package safe module how_to_string use sys::system use sys::console -use sys::string use sys::buffer -fn demo_string_view(c: Console, alloc: Alloc) -> unit { +fn demo_string_view(c: Console) -> unit { let s = " hello,world \n" let trimmed = s.trim_view() c.println("-- string view --") @@ -21,35 +20,34 @@ fn demo_string_view(c: Console, alloc: Alloc) -> unit { Err(_) => { c.println("comma not found") } } - let words = trimmed.split(alloc, ',') + let words = trimmed.split_view_default(',') c.println("split count:") c.println_i32(words.len()) c.println(words.get_or(0, "")) c.println(words.get_or(1, "")) - alloc.vec_string_free(words) + words.free() } -fn demo_text_builder(c: Console, alloc: Alloc) -> unit { +fn demo_text_builder(c: Console) -> unit { c.println("-- Text builder --") - let t = alloc.text_new() + let t = buffer::text_new_default() t.push_str("hello").ok() t.push_byte(' ').ok() t.append("text").ok() c.println(t.slice_range(0, 5).ok()) c.println(t.to_string().ok()) - t.free(alloc) + t.free_default() - let t2 = alloc.text_from("owned").ok() + let t2 = buffer::text_from_default("owned").ok() c.println(t2.to_string().ok()) - t2.free(alloc) + t2.free_default() - c.println("a".concat(alloc, "b").ok()) + c.println("a".concat_default("b").ok()) } pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() - demo_string_view(c, alloc) - demo_text_builder(c, alloc) + demo_string_view(c) + demo_text_builder(c) return 0 } diff --git a/examples/http_server/http_server.cap b/examples/http_server/http_server.cap index db696e9..801946b 100644 --- a/examples/http_server/http_server.cap +++ b/examples/http_server/http_server.cap @@ -5,7 +5,7 @@ module http_server use sys::console use sys::fs use sys::net -use sys::string +use sys::path use sys::system fn strip_query(raw_path: string) -> string { @@ -15,91 +15,34 @@ fn strip_query(raw_path: string) -> string { } } -fn sanitize_path(raw_path: string, alloc: Alloc) -> Result { - let out = alloc.text_new() - let rest = raw_path - while (true) { - let seg = "" - let done = false - match (rest.split_once_view('/')) { - Ok(parts) => { - seg = parts.left - rest = parts.right - } - Err(_) => { - seg = rest - done = true - } - } - if (seg == "..") { - out.free(alloc) - return Err(()) - } - if (seg.len() > 0 && seg != ".") { - if (!out.is_empty()) { - match (out.push_byte('/')) { - Ok(_) => { } - Err(_) => { - out.free(alloc) - return Err(()) - } - } - } - match (out.push_str(seg)) { - Ok(_) => { } - Err(_) => { - out.free(alloc) - return Err(()) - } - } - } - if (done) { - break - } - } - if (out.is_empty()) { - out.free(alloc) - return Ok("index.html") - } - let result = out.to_string() - out.free(alloc) - match (result) { - Ok(path) => { return Ok(path) } - Err(_) => { return Err(()) } - } -} - enum ParseErr { MissingSpace, BadMethod, BadSlice } -fn split_on_space(s: string) -> Result { - match (s.split_once_view(' ')) { - Ok(parts) => { return Ok(parts) } - Err(_) => { return Err(ParseErr::MissingSpace) } - } -} - -fn parse_request_line(line: string, alloc: Alloc) -> Result { +fn parse_request_line(line: string) -> Result { let trimmed = line.trim_view() - let head = split_on_space(trimmed)? + let Ok(head) = trimmed.split_once_view(' ') else { + return Err(ParseErr::MissingSpace) + } if (head.left != "GET") { return Err(ParseErr::BadMethod) } let rest = head.right.trim_start_view() - let target = split_on_space(rest)?.left - match (sanitize_path(strip_query(target), alloc)) { + let Ok(target) = rest.split_once_view(' ') else { + return Err(ParseErr::MissingSpace) + } + match (path::clean_relative(strip_query(target.left))) { Ok(p) => { return Ok(p) } Err(_) => { return Err(ParseErr::BadSlice) } } } -fn parse_request_path(req: string, alloc: Alloc) -> Result { +fn parse_request_path(req: string) -> Result { match (req.split_once_view('\n')) { - Ok(parts) => { return parse_request_line(parts.left.trim_end_view(), alloc) } - Err(_) => { return parse_request_line(req.trim_end_view(), alloc) } + Ok(parts) => { return parse_request_line(parts.left.trim_end_view()) } + Err(_) => { return parse_request_line(req.trim_end_view()) } } } @@ -117,10 +60,10 @@ fn respond_bad_request(conn: &TcpConn) -> Result { return conn.write("HTTP/1.0 400 Bad Request\r\nContent-Type: text/plain\r\n\r\nbad request\n") } -fn handle_request(conn: &TcpConn, readfs: &ReadFS, alloc: Alloc, req: string) -> Result { - match (parse_request_path(req, alloc)) { +fn handle_request(conn: &TcpConn, readfs: &ReadFS, req: string) -> Result { + match (parse_request_path(req)) { Ok(path) => { - match (readfs.read_to_string(alloc, path)) { + match (readfs.read_to_string_default(path)) { Ok(body) => { return respond_ok(conn, body) } Err(_) => { return respond_not_found(conn) } } @@ -129,7 +72,7 @@ fn handle_request(conn: &TcpConn, readfs: &ReadFS, alloc: Alloc, req: string) -> } } -fn serve_forever(c: Console, net: Net, rc: RootCap, alloc: Alloc, root: string, port: i32) -> Result { +fn serve_forever(c: Console, net: Net, rc: RootCap, root: string, port: i32) -> Result { let listener = net.listen("127.0.0.1", port)? let readfs = rc.mint_readfs(root) let readfs_ref: &ReadFS = readfs @@ -140,15 +83,16 @@ fn serve_forever(c: Console, net: Net, rc: RootCap, alloc: Alloc, root: string, defer readfs.close() while (true) { if let Ok(conn) = listener.accept() { - let req_result = conn.read(alloc, 4096) - if (req_result.is_err()) { - conn.close() - } else { - let req = req_result.ok() - let handled = handle_request(conn, readfs_ref, alloc, req) - conn.close() - if (handled.is_err()) { - // Ignore per-request write errors; keep the server alive. + match (conn.read_default(4096)) { + Ok(req) => { + let handled = handle_request(conn, readfs_ref, req) + conn.close() + if (handled.is_err()) { + // Ignore per-request write errors; keep the server alive. + } + } + Err(_) => { + conn.close() } } } else { @@ -163,10 +107,9 @@ pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let net = rc.mint_net() let args = rc.mint_args() - let alloc = rc.mint_alloc_default() - let root = args.at(1).unwrap_or("examples/http_server") + let root = args.at_or(1, "examples/http_server") let port = 8090 - let result = serve_forever(c, net, rc, alloc, root, port) + let result = serve_forever(c, net, rc, root, port) if (result.is_err()) { c.println("server error (is the port in use?)") } diff --git a/examples/sort/sort.cap b/examples/sort/sort.cap index ef1c724..374db09 100644 --- a/examples/sort/sort.cap +++ b/examples/sort/sort.cap @@ -1,10 +1,10 @@ package safe module sort use sys::system +use sys::buffer use sys::console use sys::stdin use sys::io -use sys::string use sys::vec fn min_i32(a: i32, b: i32) -> i32 { @@ -59,13 +59,13 @@ fn sort_indices(lines: Vec, indices: Vec) -> unit { } } -fn run(c: Console, alloc: Alloc, input: Stdin) -> Result { - let contents = input.read_to_string(alloc)? - let lines = contents.lines_view(alloc) +fn run(c: Console, input: Stdin) -> Result { + let contents = input.read_to_string_default()? + let lines = contents.lines_view_default() let n = lines.len() // Create index array [0, 1, 2, ...] - let indices = alloc.vec_i32_new() + let indices = buffer::vec_new_default() for i in 0..n { indices.push(i).ok() } @@ -81,16 +81,15 @@ fn run(c: Console, alloc: Alloc, input: Stdin) -> Result { } } - alloc.vec_i32_free(indices) - alloc.vec_string_free(lines) + indices.free() + lines.free() return Ok(()) } pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let input = rc.mint_stdin() - let alloc = rc.mint_alloc_default() - let result = run(c, alloc, input) + let result = run(c, input) if (result.is_err()) { c.println("error reading input") return 1 diff --git a/examples/uniq/uniq.cap b/examples/uniq/uniq.cap index 0a8fe06..ee8a50d 100644 --- a/examples/uniq/uniq.cap +++ b/examples/uniq/uniq.cap @@ -4,7 +4,6 @@ use sys::system use sys::console use sys::stdin use sys::io -use sys::string use sys::vec fn should_print(lines: Vec, i: i32) -> bool { @@ -16,9 +15,9 @@ fn should_print(lines: Vec, i: i32) -> bool { return prev != curr } -fn run(c: Console, alloc: Alloc, input: Stdin) -> Result { - let contents = input.read_to_string(alloc)? - let lines = contents.lines_view(alloc) +fn run(c: Console, input: Stdin) -> Result { + let contents = input.read_to_string_default()? + let lines = contents.lines_view_default() let n = lines.len() for i in 0..n { if (should_print(lines, i)) { @@ -26,15 +25,14 @@ fn run(c: Console, alloc: Alloc, input: Stdin) -> Result { c.println(line) } } - alloc.vec_string_free(lines) + lines.free() return Ok(()) } pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let input = rc.mint_stdin() - let alloc = rc.mint_alloc_default() - let result = run(c, alloc, input) + let result = run(c, input) if (result.is_err()) { c.println("error reading input") return 1 diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 85056e6..a3f5faa 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -28,8 +28,7 @@ static STDIN_CAPS: LazyLock>> = LazyLock::new(|| Mutex::new(HashMap::new())); static NET_CAPS: LazyLock>> = LazyLock::new(|| Mutex::new(HashMap::new())); -static ALLOCS: LazyLock>> = - LazyLock::new(|| Mutex::new(HashMap::new())); +static ALLOCS: LazyLock>> = LazyLock::new(|| Mutex::new(HashMap::new())); static TCP_LISTENERS: LazyLock>> = LazyLock::new(|| Mutex::new(HashMap::new())); static TCP_CONNS: LazyLock>> = @@ -79,7 +78,6 @@ struct VecHeader { alloc: Handle, } - fn new_handle() -> Handle { let mut buf = [0u8; 8]; loop { @@ -186,8 +184,7 @@ fn make_vec_header( elem_size: i32, alloc: Handle, ) -> Option { - let header = - alloc_malloc(alloc, std::mem::size_of::())? as *mut VecHeader; + let header = alloc_malloc(alloc, std::mem::size_of::())? as *mut VecHeader; if header.is_null() { return None; } @@ -351,10 +348,7 @@ pub extern "C" fn capable_rt_mint_net(_sys: Handle) -> Handle { } #[no_mangle] -pub extern "C" fn capable_rt_mint_readfs( - _sys: Handle, - root: *const CapString, -) -> Handle { +pub extern "C" fn capable_rt_mint_readfs(_sys: Handle, root: *const CapString) -> Handle { if !has_handle(&ROOT_CAPS, _sys, "root cap table") { return 0; } @@ -367,15 +361,17 @@ pub extern "C" fn capable_rt_mint_readfs( return 0; }; let handle = new_handle(); - insert_handle(&READ_FS, handle, ReadFsState { root: root_path }, "readfs table"); + insert_handle( + &READ_FS, + handle, + ReadFsState { root: root_path }, + "readfs table", + ); handle } #[no_mangle] -pub extern "C" fn capable_rt_mint_filesystem( - _sys: Handle, - root: *const CapString, -) -> Handle { +pub extern "C" fn capable_rt_mint_filesystem(_sys: Handle, root: *const CapString) -> Handle { if !has_handle(&ROOT_CAPS, _sys, "root cap table") { return 0; } @@ -422,10 +418,7 @@ pub extern "C" fn capable_rt_fs_filesystem_close(fs: Handle) { } #[no_mangle] -pub extern "C" fn capable_rt_fs_subdir( - dir: Handle, - name: *const CapString, -) -> Handle { +pub extern "C" fn capable_rt_fs_subdir(dir: Handle, name: *const CapString) -> Handle { let name = unsafe { read_cap_string(name) }; let state = take_handle(&DIRS, dir, "dir table"); let (Some(state), Some(name)) = (state, name) else { @@ -452,10 +445,7 @@ pub extern "C" fn capable_rt_fs_subdir( } #[no_mangle] -pub extern "C" fn capable_rt_fs_open_read( - dir: Handle, - name: *const CapString, -) -> Handle { +pub extern "C" fn capable_rt_fs_open_read(dir: Handle, name: *const CapString) -> Handle { let name = unsafe { read_cap_string(name) }; let state = take_handle(&DIRS, dir, "dir table"); let (Some(state), Some(name)) = (state, name) else { @@ -487,10 +477,7 @@ pub extern "C" fn capable_rt_fs_dir_close(dir: Handle) { } #[no_mangle] -pub extern "C" fn capable_rt_fs_exists( - fs: Handle, - path: *const CapString, -) -> u8 { +pub extern "C" fn capable_rt_fs_exists(fs: Handle, path: *const CapString) -> u8 { let path = unsafe { read_cap_string(path) }; let state = clone_handle(&READ_FS, fs, "readfs table"); let (Some(state), Some(path)) = (state, path) else { @@ -574,10 +561,7 @@ pub extern "C" fn capable_rt_fs_list_dir( } #[no_mangle] -pub extern "C" fn capable_rt_fs_dir_exists( - dir: Handle, - name: *const CapString, -) -> u8 { +pub extern "C" fn capable_rt_fs_dir_exists(dir: Handle, name: *const CapString) -> u8 { let name = unsafe { read_cap_string(name) }; let state = clone_handle(&DIRS, dir, "dir table"); let (Some(state), Some(name)) = (state, name) else { @@ -1133,6 +1117,13 @@ pub extern "C" fn capable_rt_alloc_default(_sys: Handle) -> Handle { handle } +#[no_mangle] +pub extern "C" fn capable_rt_default_alloc() -> Handle { + let handle = new_handle(); + insert_handle(&ALLOCS, handle, (), "alloc table"); + handle +} + #[no_mangle] pub extern "C" fn capable_rt_args_len(_sys: Handle) -> i32 { if !has_handle(&ARGS_CAPS, _sys, "args table") { @@ -1212,10 +1203,7 @@ pub extern "C" fn capable_rt_read_stdin_to_string( } #[no_mangle] -pub extern "C" fn capable_rt_string_eq( - left: *const CapString, - right: *const CapString, -) -> i8 { +pub extern "C" fn capable_rt_string_eq(left: *const CapString, right: *const CapString) -> i8 { let left_slice = unsafe { if left.is_null() { CapSlice { diff --git a/stdlib/sys/args.cap b/stdlib/sys/args.cap index 5661c32..f697deb 100644 --- a/stdlib/sys/args.cap +++ b/stdlib/sys/args.cap @@ -21,4 +21,9 @@ impl Args { pub fn at(self, index: i32) -> Result { return Err(ArgsErr::OutOfRange) } + + /// Get an argument by index, or return a fallback string. + pub fn at_or(self, index: i32, fallback: string) -> string { + return self.at(index).unwrap_or(fallback) + } } diff --git a/stdlib/sys/buffer.cap b/stdlib/sys/buffer.cap index efc082e..40a35dd 100644 --- a/stdlib/sys/buffer.cap +++ b/stdlib/sys/buffer.cap @@ -47,6 +47,31 @@ pub fn copy_slice(alloc: Alloc, data: Slice) -> Result, AllocErr> return Ok(alloc.slice_from_ptr(raw, len)) } +/// Obtain the process default allocator. +pub fn default_alloc() -> Alloc { + return () +} + +/// Create a new Vec with the process default allocator. +pub fn vec_new_default() -> vec::Vec { + return vec::new_with(default_alloc()) +} + +/// Create a Vec with capacity using the process default allocator. +pub fn vec_with_capacity_default(capacity: i32) -> Result, AllocErr> { + return vec::with_capacity(default_alloc(), capacity) +} + +/// Allocate a new empty Text using the process default allocator. +pub fn text_new_default() -> string::Text { + return string::text_new(default_alloc()) +} + +/// Copy a string view into a new Text using the process default allocator. +pub fn text_from_default(s: string) -> Result { + return string::text_from(default_alloc(), s) +} + impl Alloc { /// Allocate raw bytes. pub fn malloc(self, size: i32) -> *u8 { diff --git a/stdlib/sys/fs.cap b/stdlib/sys/fs.cap index 6423d1f..1fbf2ec 100644 --- a/stdlib/sys/fs.cap +++ b/stdlib/sys/fs.cap @@ -23,16 +23,31 @@ impl ReadFS { return () } + /// Read an entire file into a string using the process default allocator. + pub fn read_to_string_default(self: &ReadFS, path: string) -> Result { + return self.read_to_string(buffer::default_alloc(), path) + } + /// Read an entire file into a byte vec. pub fn read_bytes(self: &ReadFS, alloc: buffer::Alloc, path: string) -> Result, FsErr> { return Err(FsErr::IoError) } + /// Read an entire file into a byte vec using the process default allocator. + pub fn read_bytes_default(self: &ReadFS, path: string) -> Result, FsErr> { + return self.read_bytes(buffer::default_alloc(), path) + } + /// List directory contents as strings. pub fn list_dir(self: &ReadFS, alloc: buffer::Alloc, path: string) -> Result, FsErr> { return Err(FsErr::IoError) } + /// List directory contents using the process default allocator. + pub fn list_dir_default(self: &ReadFS, path: string) -> Result, FsErr> { + return self.list_dir(buffer::default_alloc(), path) + } + /// True if a path exists. pub fn exists(self: &ReadFS, path: string) -> bool { return false @@ -72,11 +87,21 @@ impl Dir { return Err(FsErr::IoError) } + /// Read a file into a byte vec using the process default allocator. + pub fn read_bytes_default(self: &Dir, name: string) -> Result, FsErr> { + return self.read_bytes(buffer::default_alloc(), name) + } + /// List directory contents. pub fn list_dir(self: &Dir, alloc: buffer::Alloc) -> Result, FsErr> { return Err(FsErr::IoError) } + /// List directory contents using the process default allocator. + pub fn list_dir_default(self: &Dir) -> Result, FsErr> { + return self.list_dir(buffer::default_alloc()) + } + /// True if a path exists. pub fn exists(self: &Dir, name: string) -> bool { return false @@ -87,6 +112,11 @@ impl Dir { return () } + /// Read a file into a string using the process default allocator. + pub fn read_to_string_default(self: &Dir, name: string) -> Result { + return self.read_to_string(buffer::default_alloc(), name) + } + /// Close the capability. pub fn close(self) -> unit { return () @@ -99,6 +129,11 @@ impl FileRead { return () } + /// Read the file into a string using the process default allocator. + pub fn read_to_string_default(self) -> Result { + return self.read_to_string(buffer::default_alloc()) + } + /// Close the file. pub fn close(self) -> unit { return () @@ -109,3 +144,8 @@ impl FileRead { pub fn join(alloc: buffer::Alloc, a: string, b: string) -> string { return "" } + +/// Join two path segments using the process default allocator. +pub fn join_default(a: string, b: string) -> string { + return join(buffer::default_alloc(), a, b) +} diff --git a/stdlib/sys/net.cap b/stdlib/sys/net.cap index 57d67f1..35861a9 100644 --- a/stdlib/sys/net.cap +++ b/stdlib/sys/net.cap @@ -50,11 +50,21 @@ impl TcpConn { return Err(NetErr::IoError) } + /// Read all available data using the process default allocator. + pub fn read_to_string_default(self: &TcpConn) -> Result { + return self.read_to_string(buffer::default_alloc()) + } + /// Read up to max_size bytes into a string. pub fn read(self: &TcpConn, alloc: buffer::Alloc, max_size: i32) -> Result { return Err(NetErr::IoError) } + /// Read up to max_size bytes using the process default allocator. + pub fn read_default(self: &TcpConn, max_size: i32) -> Result { + return self.read(buffer::default_alloc(), max_size) + } + /// Write a string to the connection. pub fn write(self: &TcpConn, data: string) -> Result { return Err(NetErr::IoError) diff --git a/stdlib/sys/path.cap b/stdlib/sys/path.cap new file mode 100644 index 0000000..23e76c5 --- /dev/null +++ b/stdlib/sys/path.cap @@ -0,0 +1,136 @@ +/// Path helpers for ordinary string manipulation. +package safe +module sys::path + +use sys::buffer +use sys::fs +use sys::vec + +fn trim_trailing_slashes(raw_path: string) -> string { + let end = raw_path.len() + while (end > 0 && raw_path.byte_at(end - 1) == '/') { + end = end - 1 + } + if (end == raw_path.len()) { + return raw_path + } + match (raw_path.slice_range(0, end)) { + Ok(out) => { return out } + Err(_) => { panic() } + } +} + +/// Clean a user-supplied relative path and reject traversal. +/// Empty paths become `index.html`. +pub fn clean_relative_with(alloc: buffer::Alloc, raw_path: string) -> Result { + let out = alloc.text_new() + let rest = raw_path + while (true) { + let seg = "" + let done = false + match (rest.split_once_view('/')) { + Ok(parts) => { + seg = parts.left + rest = parts.right + } + Err(_) => { + seg = rest + done = true + } + } + if (seg == "..") { + out.free(alloc) + return Err(()) + } + if (seg.len() > 0 && seg != ".") { + if (!out.is_empty()) { + match (out.push_byte('/')) { + Ok(_) => { } + Err(_) => { + out.free(alloc) + return Err(()) + } + } + } + match (out.push_str(seg)) { + Ok(_) => { } + Err(_) => { + out.free(alloc) + return Err(()) + } + } + } + if (done) { + break + } + } + if (out.is_empty()) { + out.free(alloc) + return Ok("index.html") + } + let result = out.to_string() + out.free(alloc) + match (result) { + Ok(path) => { return Ok(path) } + Err(_) => { return Err(()) } + } +} + +/// Clean a user-supplied relative path with the process default allocator. +pub fn clean_relative(raw_path: string) -> Result { + return clean_relative_with(buffer::default_alloc(), raw_path) +} + +/// Split a path into slash-delimited segments without copying. +pub fn segments_view_with(alloc: buffer::Alloc, raw_path: string) -> vec::Vec { + return raw_path.split_view(alloc, '/') +} + +/// Split a path into slash-delimited segments without copying. +pub fn segments_view(raw_path: string) -> vec::Vec { + return raw_path.split_view_default('/') +} + +/// Return the basename of a path as a view. +pub fn basename_view(raw_path: string) -> string { + let path = trim_trailing_slashes(raw_path) + if (path.is_empty()) { + return path + } + match (path.last_index_of_byte('/')) { + Ok(i) => { + match (path.slice_range(i + 1, path.len())) { + Ok(out) => { return out } + Err(_) => { panic() } + } + } + Err(_) => { return path } + } +} + +/// Return the dirname of a path as a view. +pub fn dirname_view(raw_path: string) -> string { + let path = trim_trailing_slashes(raw_path) + match (path.last_index_of_byte('/')) { + Ok(i) => { + if (i == 0) { + return "" + } + match (path.slice_range(0, i)) { + Ok(out) => { return out } + Err(_) => { panic() } + } + } + Err(_) => { return "" } + } +} + +/// Join two path segments using the process default allocator. +pub fn join(a: string, b: string) -> string { + return fs::join_default(a, b) +} + +/// Join two path segments using an explicit allocator. +pub fn join_with(alloc: buffer::Alloc, a: string, b: string) -> string { + return fs::join(alloc, a, b) +} diff --git a/stdlib/sys/stdin.cap b/stdlib/sys/stdin.cap index 049d149..f129565 100644 --- a/stdlib/sys/stdin.cap +++ b/stdlib/sys/stdin.cap @@ -13,4 +13,9 @@ impl Stdin { pub fn read_to_string(self: &Stdin, alloc: buffer::Alloc) -> Result { return Err(io::IoErr::IoError) } + + /// Read stdin into a string using the process default allocator. + pub fn read_to_string_default(self: &Stdin) -> Result { + return self.read_to_string(buffer::default_alloc()) + } } diff --git a/stdlib/sys/string.cap b/stdlib/sys/string.cap index 392f7ea..8e51f9a 100644 --- a/stdlib/sys/string.cap +++ b/stdlib/sys/string.cap @@ -222,6 +222,12 @@ impl Text { return () } + /// Free this Text using the allocator stored in its backing Vec. + pub fn free_default(self) -> unit { + self.bytes.free() + return () + } + /// Remove all bytes from this Text. pub fn clear(self) -> unit { self.bytes.clear() @@ -254,6 +260,11 @@ impl string { return text_from(alloc, self) } + /// Copy this string view into a new owned Text using the process default allocator. + pub fn to_text_default(self) -> Result { + return text_from(buffer::default_alloc(), self) + } + /// Concatenate another string into a new owned string view. pub fn concat(self, alloc: buffer::Alloc, other: string) -> Result { let out = text_new(alloc) @@ -262,6 +273,11 @@ impl string { return out.to_string() } + /// Concatenate another string using the process default allocator. + pub fn concat_default(self, other: string) -> Result { + return self.concat(buffer::default_alloc(), other) + } + /// Index into the string by byte. pub fn byte_at(self, index: i32) -> u8 { return self.bytes.at(index) @@ -303,6 +319,11 @@ impl string { return out } + /// Split on ASCII whitespace using the process default allocator. + pub fn split_whitespace_default(self) -> Vec { + return self.split_whitespace(buffer::default_alloc()) + } + /// Split on ASCII whitespace without copying the pieces. pub fn split_whitespace_view(self, alloc: buffer::Alloc) -> Vec { let out = alloc.vec_string_new() @@ -329,6 +350,11 @@ impl string { return out } + /// Split on ASCII whitespace into views using the process default allocator. + pub fn split_whitespace_view_default(self) -> Vec { + return self.split_whitespace_view(buffer::default_alloc()) + } + pub fn lines(self, alloc: buffer::Alloc) -> Vec { let out = alloc.vec_string_new() let bytes = self.as_slice() @@ -364,6 +390,11 @@ impl string { return out } + /// Split into lines using the process default allocator. + pub fn lines_default(self) -> Vec { + return self.lines(buffer::default_alloc()) + } + /// Split into line views without copying the pieces. pub fn lines_view(self, alloc: buffer::Alloc) -> Vec { let out = alloc.vec_string_new() @@ -400,6 +431,11 @@ impl string { return out } + /// Split into line views using the process default allocator. + pub fn lines_view_default(self) -> Vec { + return self.lines_view(buffer::default_alloc()) + } + pub fn split(self, alloc: buffer::Alloc, delim: u8) -> Vec { let out = alloc.vec_string_new() let bytes = self.as_slice() @@ -425,6 +461,42 @@ impl string { return out } + /// Split on a delimiter using the process default allocator. + pub fn split_default(self, delim: u8) -> Vec { + return self.split(buffer::default_alloc(), delim) + } + + /// Split on a delimiter without copying the pieces. + pub fn split_view(self, alloc: buffer::Alloc, delim: u8) -> Vec { + let out = alloc.vec_string_new() + let bytes = self.as_slice() + let len = bytes.len() + let start = 0 + let i = 0 + while (i < len) { + if (bytes.at(i) == delim) { + let part = view_range(self, start, i) + match (out.push(part)) { + Ok(_) => { } + Err(_) => { panic() } + } + start = i + 1 + } + i = i + 1 + } + let part = view_range(self, start, len) + match (out.push(part)) { + Ok(_) => { } + Err(_) => { panic() } + } + return out + } + + /// Split on a delimiter into views using the process default allocator. + pub fn split_view_default(self, delim: u8) -> Vec { + return self.split_view(buffer::default_alloc(), delim) + } + /// Split once on the first matching delimiter. pub fn split_once(self, alloc: buffer::Alloc, delim: u8) -> Result { let bytes = self.as_slice() @@ -444,6 +516,11 @@ impl string { return Err(()) } + /// Split once on the first matching delimiter using the process default allocator. + pub fn split_once_default(self, delim: u8) -> Result { + return self.split_once(buffer::default_alloc(), delim) + } + /// Split once on the first matching delimiter without copying. pub fn split_once_view(self, delim: u8) -> Result { let bytes = self.as_slice() @@ -510,6 +587,11 @@ impl string { return start_trimmed.trim_end(alloc) } + /// Trim ASCII whitespace from both ends using the process default allocator. + pub fn trim_default(self) -> string { + return self.trim(buffer::default_alloc()) + } + /// Trim ASCII whitespace from the start. pub fn trim_start(self, alloc: buffer::Alloc) -> string { let bytes = self.as_slice() @@ -527,6 +609,11 @@ impl string { return build_range(alloc, self, i, len) } + /// Trim ASCII whitespace from the start using the process default allocator. + pub fn trim_start_default(self) -> string { + return self.trim_start(buffer::default_alloc()) + } + /// Trim ASCII whitespace from the end. pub fn trim_end(self, alloc: buffer::Alloc) -> string { let bytes = self.as_slice() @@ -550,6 +637,11 @@ impl string { return build_range(alloc, self, 0, i) } + /// Trim ASCII whitespace from the end using the process default allocator. + pub fn trim_end_default(self) -> string { + return self.trim_end(buffer::default_alloc()) + } + /// Remove a leading prefix if present. pub fn trim_prefix(self, alloc: buffer::Alloc, prefix: string) -> string { if (self.starts_with(prefix)) { @@ -558,6 +650,11 @@ impl string { return self } + /// Remove a leading prefix using the process default allocator. + pub fn trim_prefix_default(self, prefix: string) -> string { + return self.trim_prefix(buffer::default_alloc(), prefix) + } + /// Remove a trailing suffix if present. pub fn trim_suffix(self, alloc: buffer::Alloc, suffix: string) -> string { if (self.ends_with(suffix)) { @@ -566,11 +663,21 @@ impl string { return self } + /// Remove a trailing suffix using the process default allocator. + pub fn trim_suffix_default(self, suffix: string) -> string { + return self.trim_suffix(buffer::default_alloc(), suffix) + } + /// split_lines() is an alias for lines(). pub fn split_lines(self, alloc: buffer::Alloc) -> Vec { return self.lines(alloc) } + /// split_lines() using the process default allocator. + pub fn split_lines_default(self) -> Vec { + return self.split_lines(buffer::default_alloc()) + } + /// True if the string starts with the prefix. pub fn starts_with(self, prefix: string) -> bool { let self_len = self.len() @@ -843,6 +950,11 @@ impl string { } } + /// Lowercase ASCII letters using the process default allocator. + pub fn to_lower_ascii_default(self) -> string { + return self.to_lower_ascii(buffer::default_alloc()) + } + /// Uppercase ASCII letters. pub fn to_upper_ascii(self, alloc: buffer::Alloc) -> string { let bytes = self.as_slice() @@ -864,11 +976,21 @@ impl string { } } + /// Uppercase ASCII letters using the process default allocator. + pub fn to_upper_ascii_default(self) -> string { + return self.to_upper_ascii(buffer::default_alloc()) + } + /// Trim ASCII whitespace (alias of trim()). pub fn trim_ascii(self, alloc: buffer::Alloc) -> string { return self.trim(alloc) } + /// Trim ASCII whitespace using the process default allocator. + pub fn trim_ascii_default(self) -> string { + return self.trim_ascii(buffer::default_alloc()) + } + /// Alias for index_of_byte. pub fn find_byte(self, needle: u8) -> Result { return self.index_of_byte(needle) diff --git a/tests/programs/args_safe.cap b/tests/programs/args_safe.cap index 45cd876..fe4eeaa 100644 --- a/tests/programs/args_safe.cap +++ b/tests/programs/args_safe.cap @@ -5,17 +5,13 @@ use sys::system pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let args = rc.mint_args() - let n = args.len() - c.assert(n >= 1) - if (n < 1) { - c.println("args bad") - return 1 - } - let s = args.at(0).ok() + let s = args.at_or(0, "") let m = s.len() c.assert(m >= 0) - if (m >= 0) { - c.println("args ok") + if (m < 0) { + c.println("args bad") + return 1 } + c.println("args ok") return 0 } diff --git a/tests/programs/fs_helpers.cap b/tests/programs/fs_helpers.cap index 1a2a1f6..f2a3ee7 100644 --- a/tests/programs/fs_helpers.cap +++ b/tests/programs/fs_helpers.cap @@ -3,11 +3,10 @@ module fs_helpers use sys::system use sys::fs -use sys::buffer +use sys::path pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() let rfs = rc.mint_readfs("./config") if !rfs.exists("app.txt") { @@ -16,10 +15,10 @@ pub fn main(rc: RootCap) -> i32 { } let rfs2 = rc.mint_readfs("./config") - match rfs2.read_bytes(alloc, "app.txt") { + match rfs2.read_bytes_default("app.txt") { Ok(bytes) => { c.assert(bytes.len() > 0) - alloc.vec_u8_free(bytes) + bytes.free() } Err(_) => { c.println("read_bytes failed") @@ -28,10 +27,10 @@ pub fn main(rc: RootCap) -> i32 { } let rfs3 = rc.mint_readfs("./config") - match rfs3.list_dir(alloc, ".") { + match rfs3.list_dir_default(".") { Ok(entries) => { c.assert(entries.len() > 0) - alloc.vec_string_free(entries) + entries.free() } Err(_) => { c.println("list_dir failed") @@ -39,7 +38,7 @@ pub fn main(rc: RootCap) -> i32 { } } - let joined = fs::join(alloc, "config", "app.txt") + let joined = path::join("config", "app.txt") if !joined.starts_with("config") { c.println("join failed") return 1 diff --git a/tests/programs/let_else.cap b/tests/programs/let_else.cap new file mode 100644 index 0000000..a4abddc --- /dev/null +++ b/tests/programs/let_else.cap @@ -0,0 +1,51 @@ +package safe +module let_else +use sys::system + +enum Maybe { + Some(i32), + None +} + +fn from_flag(flag: bool) -> Maybe { + if (flag) { + return Maybe::Some(7) + } + return Maybe::None +} + +fn parse(flag: bool) -> Result { + if (flag) { + return Ok(11) + } + return Err("bad") +} + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + + let Ok(n) = parse(true) else { + c.assert(false) + return 1 + } + c.assert(n == 11) + + let Maybe::Some(v) = from_flag(true) else { + c.assert(false) + return 1 + } + c.assert(v == 7) + + let fallback = match parse(false) { + Ok(found) => { found } + Err(_) => { + let Ok(found) = parse(true) else { + return 2 + } + found + } + } + c.assert(fallback == 11) + c.println("let else ok") + return 0 +} diff --git a/tests/programs/path_helpers.cap b/tests/programs/path_helpers.cap new file mode 100644 index 0000000..d5701bd --- /dev/null +++ b/tests/programs/path_helpers.cap @@ -0,0 +1,31 @@ +package safe +module path_helpers + +use sys::system +use sys::path + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + + match (path::clean_relative("/site/./docs//index.html")) { + Ok(clean) => { c.assert(clean.eq("site/docs/index.html")) } + Err(_) => { c.assert(false); return 1 } + } + c.assert(path::clean_relative("../etc/passwd").is_err()) + + let parts = path::segments_view("site/docs/index.html") + c.assert(parts.len() == 3) + c.assert(parts.get_or(0, "").eq("site")) + c.assert(parts.get_or(1, "").eq("docs")) + c.assert(parts.get_or(2, "").eq("index.html")) + parts.free() + + c.assert(path::basename_view("site/docs/index.html").eq("index.html")) + c.assert(path::dirname_view("site/docs/index.html").eq("site/docs")) + + let joined = path::join("site/docs", "index.html") + c.assert(joined.eq("site/docs/index.html")) + + c.println("path ok") + return 0 +} diff --git a/tests/programs/string_helpers.cap b/tests/programs/string_helpers.cap index 28a779e..77a1ab6 100644 --- a/tests/programs/string_helpers.cap +++ b/tests/programs/string_helpers.cap @@ -1,6 +1,7 @@ package safe module string_helpers use sys::system +use sys::buffer pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() @@ -11,6 +12,7 @@ pub fn main(rc: RootCap) -> i32 { let b = buf.at(0) let words = "a b c".split_whitespace(alloc) let word_views = "a b c".split_whitespace_view(alloc) + let split_views = "a,b,c".split_view_default(',') let count = words.len() let trimmed = " hi \n".trim(alloc) let trimmed_view = " hi \n".trim_view() @@ -19,13 +21,21 @@ pub fn main(rc: RootCap) -> i32 { let trimmed_end = " hi ".trim_end(alloc) let trimmed_end_view = " hi ".trim_end_view() let trimmed_ascii = " \tHi\n".trim_ascii(alloc) + let trimmed_ascii_default = " \tHi\n".trim_ascii_default() let lower = "AbC".to_lower_ascii(alloc) + let lower_default = "AbC".to_lower_ascii_default() let upper = "AbC".to_upper_ascii(alloc) + let upper_default = "AbC".to_upper_ascii_default() let sliced = "hello".slice_range(1, 4) let lines = "a\nb\n".split_lines(alloc) let line_views = "a\r\nb\n".lines_view(alloc) + let t = buffer::text_new_default() + t.push_str("hi").ok() + c.assert(t.to_string().ok().eq("hi")) + t.free_default() c.assert(n == 3 && b == 'a' && count == 3) c.assert(word_views.len() == 3) + c.assert(split_views.len() == 3) c.assert(trimmed.len() == 2) c.assert(trimmed_view.eq("hi")) c.assert(trimmed.starts_with_byte('h')) @@ -35,8 +45,11 @@ pub fn main(rc: RootCap) -> i32 { c.assert(trimmed_end.ends_with("hi")) c.assert(trimmed_end_view.ends_with("hi")) c.assert(trimmed_ascii.eq("Hi")) + c.assert(trimmed_ascii_default.eq("Hi")) c.assert(lower.eq("abc")) + c.assert(lower_default.eq("abc")) c.assert(upper.eq("ABC")) + c.assert(upper_default.eq("ABC")) c.assert("abc".starts_with_byte('a')) c.assert("abc".ends_with_byte('c')) c.assert("".is_empty()) @@ -113,6 +126,7 @@ pub fn main(rc: RootCap) -> i32 { } alloc.vec_string_free(words) alloc.vec_string_free(word_views) + split_views.free() alloc.vec_string_free(lines) alloc.vec_string_free(line_views) alloc.vec_string_free(pieces) diff --git a/tests/programs/vec_search_helpers.cap b/tests/programs/vec_search_helpers.cap index e85953b..ad72fac 100644 --- a/tests/programs/vec_search_helpers.cap +++ b/tests/programs/vec_search_helpers.cap @@ -1,6 +1,7 @@ package safe module vec_search_helpers use sys::system +use sys::buffer pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() @@ -35,12 +36,12 @@ pub fn main(rc: RootCap) -> i32 { } c.assert(v.get_or(1, 99) == 1) c.assert(v.get_or(99, 77) == 77) - let bytes = alloc.vec_u8_new() + let bytes = buffer::vec_new_default() bytes.push('h').ok() bytes.push('i').ok() c.assert(bytes.to_string().ok().eq("hi")) alloc.vec_i32_free(v) - alloc.vec_u8_free(bytes) + bytes.free() c.println("vec search ok") return 0 } diff --git a/tests/programs/wc_file.cap b/tests/programs/wc_file.cap index 983dfe1..c2baa6d 100644 --- a/tests/programs/wc_file.cap +++ b/tests/programs/wc_file.cap @@ -2,7 +2,6 @@ package safe module wc_file use sys::system use sys::console -use sys::buffer fn count_text(c: Console, s: string) -> i32 { let bytes = s.len() @@ -21,16 +20,12 @@ fn count_text(c: Console, s: string) -> i32 { pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let args = rc.mint_args() - - if (args.len() < 2) { + let Ok(path) = args.at(1) else { c.println("usage: wc ") return 1 } - - let path = args.at(1).ok() - let alloc = rc.mint_alloc_default() let rfs = rc.mint_readfs("./") - let code = match rfs.read_to_string(alloc, path) { + let code = match rfs.read_to_string_default(path) { Ok(s) => { count_text(c, s) } diff --git a/tests/programs/wc_stdin.cap b/tests/programs/wc_stdin.cap index 892544f..6b5d061 100644 --- a/tests/programs/wc_stdin.cap +++ b/tests/programs/wc_stdin.cap @@ -5,8 +5,7 @@ use sys::system pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let stdin = rc.mint_stdin() - let alloc = rc.mint_alloc_default() - let code = match stdin.read_to_string(alloc) { + let code = match stdin.read_to_string_default() { Ok(s) => { let bytes = s.len() let words = s.count_words_ascii() From ba51366facf4b9d3431ccd0af8b19430ed83374b Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Mon, 23 Mar 2026 11:40:02 -0700 Subject: [PATCH 03/17] Adopt expr else across examples --- capc/src/parser.rs | 58 +++++++++++++++++++++ capc/tests/run.rs | 27 ++++++++++ capc/tests/typecheck.rs | 16 ++++++ examples/config_loader/config_loader.cap | 11 ++-- examples/hashmap_demo/hashmap_demo.cap | 66 ++++++++++++------------ examples/how_to_string/how_to_string.cap | 12 +++-- examples/http_server/http_server.cap | 26 +++++----- examples/sort/sort.cap | 11 ++-- examples/uniq/uniq.cap | 3 +- tests/programs/expr_else.cap | 37 +++++++++++++ tests/programs/expr_else_break.cap | 29 +++++++++++ 11 files changed, 236 insertions(+), 60 deletions(-) create mode 100644 tests/programs/expr_else.cap create mode 100644 tests/programs/expr_else_break.cap diff --git a/capc/src/parser.rs b/capc/src/parser.rs index 5342511..490ef46 100644 --- a/capc/src/parser.rs +++ b/capc/src/parser.rs @@ -861,6 +861,22 @@ impl Parser { fn parse_expr_stmt(&mut self) -> Result { let expr = self.parse_expr()?; + let expr = if self.peek_kind() == Some(TokenKind::Else) { + self.bump(); + let err_binding = if self.peek_kind() == Some(TokenKind::Ident) + && self + .peek_token(1) + .is_some_and(|t| t.kind == TokenKind::LBrace) + { + Some(self.expect_ident()?) + } else { + None + }; + let else_block = self.parse_block()?; + self.desugar_expr_else(expr, err_binding, else_block) + } else { + expr + }; let expr_span = expr.span(); let end = self .maybe_consume(TokenKind::Semi) @@ -1445,6 +1461,48 @@ impl Parser { } } + fn desugar_expr_else(&self, expr: Expr, err_binding: Option, else_block: Block) -> Expr { + let expr_span = expr.span(); + let ok_ident = Spanned::new("Ok".to_string(), expr_span); + let err_ident = Spanned::new("Err".to_string(), else_block.span); + let span = Span::new(expr_span.start, else_block.span.end); + + Expr::Match(MatchExpr { + expr: Box::new(expr), + arms: vec![ + MatchArm { + pattern: Pattern::Call { + path: Path { + segments: vec![ok_ident], + span: expr_span, + }, + binding: None, + span: expr_span, + }, + body: Block { + stmts: Vec::new(), + span: expr_span, + }, + span, + }, + MatchArm { + pattern: Pattern::Call { + path: Path { + segments: vec![err_ident], + span: else_block.span, + }, + binding: err_binding, + span: else_block.span, + }, + body: else_block, + span, + }, + ], + span, + match_span: expr_span, + }) + } + fn parse_path(&mut self) -> Result { let first = self.expect_ident()?; let start = first.span.start; diff --git a/capc/tests/run.rs b/capc/tests/run.rs index 714a6be..cd6d9bc 100644 --- a/capc/tests/run.rs +++ b/capc/tests/run.rs @@ -136,6 +136,33 @@ fn run_let_else() { assert!(stdout.contains("let else ok"), "stdout was: {stdout:?}"); } +#[test] +fn run_expr_else() { + let out_dir = make_out_dir("expr_else"); + let out_dir = out_dir.to_str().expect("utf8 out dir"); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/expr_else.cap"]); + assert_eq!(code, 0); + assert!(stdout.contains("expr else ok"), "stdout was: {stdout:?}"); +} + +#[test] +fn run_expr_else_break() { + let out_dir = make_out_dir("expr_else_break"); + let out_dir = out_dir.to_str().expect("utf8 out dir"); + let (code, stdout, _stderr) = run_capc(&[ + "run", + "--out-dir", + out_dir, + "tests/programs/expr_else_break.cap", + ]); + assert_eq!(code, 0); + assert!( + stdout.contains("expr else break ok"), + "stdout was: {stdout:?}" + ); +} + #[test] fn run_path_helpers() { let out_dir = make_out_dir("path_helpers"); diff --git a/capc/tests/typecheck.rs b/capc/tests/typecheck.rs index 121e916..ffddeb8 100644 --- a/capc/tests/typecheck.rs +++ b/capc/tests/typecheck.rs @@ -60,6 +60,22 @@ fn typecheck_let_else_ok() { type_check_program(&module, &stdlib, &[]).expect("typecheck module"); } +#[test] +fn typecheck_expr_else_ok() { + let source = load_program("expr_else.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + +#[test] +fn typecheck_expr_else_break_ok() { + let source = load_program("expr_else_break.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + #[test] fn typecheck_path_helpers_ok() { let source = load_program("path_helpers.cap"); diff --git a/examples/config_loader/config_loader.cap b/examples/config_loader/config_loader.cap index 2a1fa6f..d8cba6d 100644 --- a/examples/config_loader/config_loader.cap +++ b/examples/config_loader/config_loader.cap @@ -52,11 +52,10 @@ fn run(c: Console, fs: ReadFS) -> Result { pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let fs = rc.mint_readfs("examples/config_loader") - let result = run(c, fs) - if (result.is_ok()) { - c.println("config ok") - return 0 + run(c, fs) else { + c.println("config read failed") + return 1 } - c.println("config read failed") - return 1 + c.println("config ok") + return 0 } diff --git a/examples/hashmap_demo/hashmap_demo.cap b/examples/hashmap_demo/hashmap_demo.cap index f70e20a..964a9f1 100644 --- a/examples/hashmap_demo/hashmap_demo.cap +++ b/examples/hashmap_demo/hashmap_demo.cap @@ -133,7 +133,9 @@ pub fn hashmap_new(alloc: buffer::Alloc, initial_capacity: i32) -> HashMap { // Initialize all entries as empty for i in 0..cap { - entries.push(empty_entry()).ok() + entries.push(empty_entry()) else { + panic() + } } return HashMap { @@ -226,7 +228,9 @@ fn hashmap_rehash(map: HashMap, new_cap: i32) -> HashMap { // Create new entries vector let new_entries = alloc.vec_new() for i in 0..new_cap { - new_entries.push(empty_entry()).ok() + new_entries.push(empty_entry()) else { + panic() + } } let new_size = 0 @@ -246,9 +250,8 @@ fn hashmap_rehash(map: HashMap, new_cap: i32) -> HashMap { if (is_empty(slot.state)) { // Found empty slot, insert here let new_entry = Entry { state: occupied_state(), key: entry.key, value: entry.value } - match (new_entries.set(index, new_entry)) { - Ok(_) => {} - Err(_) => { panic() } + new_entries.set(index, new_entry) else { + panic() } new_size = new_size + 1 break @@ -326,9 +329,8 @@ pub fn hashmap_insert(map: HashMap, key: i32, value: i32) -> Result {} - Err(_) => { return Err(()) } + m.entries.set(index, new_entry) else { + return Err(()) } return Ok(m) } @@ -350,9 +352,8 @@ pub fn hashmap_insert(map: HashMap, key: i32, value: i32) -> Result {} - Err(_) => { return Err(()) } + m.entries.set(first_available, new_entry) else { + return Err(()) } // Update counts: if we reused a tombstone, decrement deleted count @@ -390,9 +391,8 @@ pub fn hashmap_remove(map: HashMap, key: i32) -> Result { if (entry.key == key) { // Mark as deleted let deleted_entry = Entry { state: deleted_state(), key: entry.key, value: entry.value } - match (map.entries.set(index, deleted_entry)) { - Ok(_) => {} - Err(_) => { return Err(()) } + map.entries.set(index, deleted_entry) else { + return Err(()) } return Ok(HashMap { entries: map.entries, @@ -444,30 +444,30 @@ fn run_demo(c: Console, alloc: buffer::Alloc) -> unit { let test_vals = alloc.vec_i32_new() // key -> key * 10 - test_keys.push(42).ok() - test_vals.push(420).ok() + test_keys.push(42) else { panic() } + test_vals.push(420) else { panic() } - test_keys.push(17).ok() - test_vals.push(170).ok() + test_keys.push(17) else { panic() } + test_vals.push(170) else { panic() } - test_keys.push(99).ok() - test_vals.push(990).ok() + test_keys.push(99) else { panic() } + test_vals.push(990) else { panic() } - test_keys.push(1).ok() - test_vals.push(10).ok() + test_keys.push(1) else { panic() } + test_vals.push(10) else { panic() } - test_keys.push(256).ok() - test_vals.push(2560).ok() + test_keys.push(256) else { panic() } + test_vals.push(2560) else { panic() } // Keys that will likely collide - test_keys.push(0).ok() - test_vals.push(0).ok() + test_keys.push(0) else { panic() } + test_vals.push(0) else { panic() } - test_keys.push(16).ok() - test_vals.push(160).ok() + test_keys.push(16) else { panic() } + test_vals.push(160) else { panic() } - test_keys.push(32).ok() - test_vals.push(320).ok() + test_keys.push(32) else { panic() } + test_vals.push(320) else { panic() } // Insert all entries let num_entries = test_keys.len() @@ -540,9 +540,9 @@ fn run_demo(c: Console, alloc: buffer::Alloc) -> unit { c.println("Looking up non-existent keys...") let missing = alloc.vec_i32_new() - missing.push(100).ok() - missing.push(999).ok() - missing.push(12345).ok() + missing.push(100) else { panic() } + missing.push(999) else { panic() } + missing.push(12345) else { panic() } let num_missing = missing.len() for i in 0..num_missing { diff --git a/examples/how_to_string/how_to_string.cap b/examples/how_to_string/how_to_string.cap index f6d4dff..e6a9669 100644 --- a/examples/how_to_string/how_to_string.cap +++ b/examples/how_to_string/how_to_string.cap @@ -31,9 +31,15 @@ fn demo_string_view(c: Console) -> unit { fn demo_text_builder(c: Console) -> unit { c.println("-- Text builder --") let t = buffer::text_new_default() - t.push_str("hello").ok() - t.push_byte(' ').ok() - t.append("text").ok() + t.push_str("hello") else { + panic() + } + t.push_byte(' ') else { + panic() + } + t.append("text") else { + panic() + } c.println(t.slice_range(0, 5).ok()) c.println(t.to_string().ok()) t.free_default() diff --git a/examples/http_server/http_server.cap b/examples/http_server/http_server.cap index 801946b..755332d 100644 --- a/examples/http_server/http_server.cap +++ b/examples/http_server/http_server.cap @@ -47,8 +47,12 @@ fn parse_request_path(req: string) -> Result { } fn respond_ok(conn: &TcpConn, body: string) -> Result { - conn.write("HTTP/1.0 200 OK\r\nContent-Type: text/plain\r\n\r\n")? - conn.write(body)? + conn.write("HTTP/1.0 200 OK\r\nContent-Type: text/plain\r\n\r\n") else err { + return Err(err) + } + conn.write(body) else err { + return Err(err) + } return Ok(()) } @@ -61,15 +65,13 @@ fn respond_bad_request(conn: &TcpConn) -> Result { } fn handle_request(conn: &TcpConn, readfs: &ReadFS, req: string) -> Result { - match (parse_request_path(req)) { - Ok(path) => { - match (readfs.read_to_string_default(path)) { - Ok(body) => { return respond_ok(conn, body) } - Err(_) => { return respond_not_found(conn) } - } - } - Err(_) => { return respond_bad_request(conn) } + let Ok(path) = parse_request_path(req) else { + return respond_bad_request(conn) + } + let Ok(body) = readfs.read_to_string_default(path) else { + return respond_not_found(conn) } + return respond_ok(conn, body) } fn serve_forever(c: Console, net: Net, rc: RootCap, root: string, port: i32) -> Result { @@ -109,9 +111,9 @@ pub fn main(rc: RootCap) -> i32 { let args = rc.mint_args() let root = args.at_or(1, "examples/http_server") let port = 8090 - let result = serve_forever(c, net, rc, root, port) - if (result.is_err()) { + serve_forever(c, net, rc, root, port) else { c.println("server error (is the port in use?)") + return 0 } return 0 } diff --git a/examples/sort/sort.cap b/examples/sort/sort.cap index 374db09..a2a94bb 100644 --- a/examples/sort/sort.cap +++ b/examples/sort/sort.cap @@ -50,7 +50,9 @@ fn sort_indices(lines: Vec, indices: Vec) -> unit { let curr_idx = indices.get_or(j, 0) let prev_idx = indices.get_or(j - 1, 0) if (line_lt(lines, curr_idx, prev_idx)) { - indices.swap(j, j - 1).ok() + indices.swap(j, j - 1) else { + panic() + } j = j - 1 } else { break @@ -67,7 +69,9 @@ fn run(c: Console, input: Stdin) -> Result { // Create index array [0, 1, 2, ...] let indices = buffer::vec_new_default() for i in 0..n { - indices.push(i).ok() + indices.push(i) else { + panic() + } } sort_indices(lines, indices) @@ -89,8 +93,7 @@ fn run(c: Console, input: Stdin) -> Result { pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let input = rc.mint_stdin() - let result = run(c, input) - if (result.is_err()) { + run(c, input) else { c.println("error reading input") return 1 } diff --git a/examples/uniq/uniq.cap b/examples/uniq/uniq.cap index ee8a50d..e04f39a 100644 --- a/examples/uniq/uniq.cap +++ b/examples/uniq/uniq.cap @@ -32,8 +32,7 @@ fn run(c: Console, input: Stdin) -> Result { pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let input = rc.mint_stdin() - let result = run(c, input) - if (result.is_err()) { + run(c, input) else { c.println("error reading input") return 1 } diff --git a/tests/programs/expr_else.cap b/tests/programs/expr_else.cap new file mode 100644 index 0000000..f99ef78 --- /dev/null +++ b/tests/programs/expr_else.cap @@ -0,0 +1,37 @@ +package safe +module expr_else +use sys::system + +fn write_value(ok: bool) -> Result { + if (ok) { + return Ok(()) + } + return Err(7) +} + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + + let total = 0 + for i in 0..4 { + write_value(i == 2) else { + continue + } + total = total + 1 + } + c.assert(total == 1) + + let saw_err = false + write_value(false) else err { + c.assert(err == 7) + saw_err = true + } + c.assert(saw_err) + + write_value(true) else { + return 1 + } + + c.println("expr else ok") + return 0 +} diff --git a/tests/programs/expr_else_break.cap b/tests/programs/expr_else_break.cap new file mode 100644 index 0000000..5ee01db --- /dev/null +++ b/tests/programs/expr_else_break.cap @@ -0,0 +1,29 @@ +package safe +module expr_else_break +use sys::system + +fn fail_at(i: i32) -> Result { + if (i < 3) { + return Ok(()) + } + return Err(i) +} + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + let count = 0 + let stop = 0 + + for i in 0..6 { + fail_at(i) else err { + stop = err + break + } + count = count + 1 + } + + c.assert(count == 3) + c.assert(stop == 3) + c.println("expr else break ok") + return 0 +} From 5c3798ca13b332aee85d77d060a4890a59cd899d Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Mon, 23 Mar 2026 15:27:11 -0700 Subject: [PATCH 04/17] Refine result flow and capability rules --- FIXME-GENERICS.md | 104 -------- PLAN.md | 36 --- PROBLEMS.md | 99 ++++++++ REMOTE_CAPS_RFC.md | 5 +- TRAITS.md | 82 ------ UPDATE_PLAN.md | 206 --------------- capc/src/hir.rs | 9 +- capc/src/lexer.rs | 2 + capc/src/parser.rs | 224 ++++++++++++----- capc/src/typeck/collect.rs | 44 ++-- capc/src/typeck/lower.rs | 139 +++++----- capc/src/typeck/mod.rs | 237 +++++++++++------- capc/src/typeck/monomorphize.rs | 79 +++--- capc/tests/run.rs | 69 ++--- capc/tests/typecheck.rs | 88 +++++-- docs/ARCHITECTURE.md | 5 +- docs/ATTENUATION.md | 9 +- docs/POLICY.md | 25 +- docs/SAMPLES.md | 40 --- docs/TUTORIAL.md | 55 ++-- docs/caps.md | 20 +- examples/config_loader/config_loader.cap | 4 +- examples/hashmap_demo/hashmap_demo.cap | 50 ++-- examples/how_to_string/how_to_string.cap | 41 +-- examples/http_server/http_server.cap | 57 ++--- examples/sort/sort.cap | 6 +- examples/uniq/uniq.cap | 2 +- runtime/src/lib.rs | 2 +- stdlib/sys/args.cap | 5 +- stdlib/sys/fs.cap | 2 +- stdlib/sys/result.cap | 52 +--- tests/programs/expr_else.cap | 6 +- tests/programs/expr_else_break.cap | 2 +- tests/programs/expr_else_empty.cap | 12 + tests/programs/fs_open_read_reuse.cap | 29 +++ tests/programs/let_else.cap | 23 +- tests/programs/path_helpers.cap | 12 +- .../should_fail_expr_else_without_try.cap | 13 + tests/programs/should_fail_if_let.cap | 13 + .../should_fail_result_ok_removed.cap | 10 + .../should_fail_result_unwrap_or_mismatch.cap | 10 - .../should_fail_result_unwrap_or_removed.cap | 10 + .../programs/should_fail_try_let_pattern.cap | 18 ++ ...d_pass_capability_borrow_return_linear.cap | 27 ++ tests/programs/should_pass_if_let.cap | 41 --- .../should_pass_result_is_ok_is_err.cap | 31 --- tests/programs/should_pass_result_ok_err.cap | 33 --- .../should_pass_result_unwrap_err_or.cap | 18 -- .../programs/should_pass_result_unwrap_or.cap | 18 -- tests/programs/string_helpers.cap | 15 +- tests/programs/text_basic.cap | 14 +- tests/programs/text_helpers_more.cap | 52 +++- tests/programs/text_push_safe.cap | 16 +- tests/programs/text_safe.cap | 12 +- tests/programs/text_to_string.cap | 12 +- tests/programs/try_let.cap | 32 +++ tests/programs/vec_custom_eq.cap | 16 +- tests/programs/vec_search_helpers.cap | 30 ++- tests/programs/wc_file.cap | 2 +- 59 files changed, 1119 insertions(+), 1206 deletions(-) delete mode 100644 FIXME-GENERICS.md delete mode 100644 PLAN.md create mode 100644 PROBLEMS.md delete mode 100644 TRAITS.md delete mode 100644 UPDATE_PLAN.md delete mode 100644 docs/SAMPLES.md create mode 100644 tests/programs/expr_else_empty.cap create mode 100644 tests/programs/fs_open_read_reuse.cap create mode 100644 tests/programs/should_fail_expr_else_without_try.cap create mode 100644 tests/programs/should_fail_if_let.cap create mode 100644 tests/programs/should_fail_result_ok_removed.cap delete mode 100644 tests/programs/should_fail_result_unwrap_or_mismatch.cap create mode 100644 tests/programs/should_fail_result_unwrap_or_removed.cap create mode 100644 tests/programs/should_fail_try_let_pattern.cap create mode 100644 tests/programs/should_pass_capability_borrow_return_linear.cap delete mode 100644 tests/programs/should_pass_if_let.cap delete mode 100644 tests/programs/should_pass_result_is_ok_is_err.cap delete mode 100644 tests/programs/should_pass_result_ok_err.cap delete mode 100644 tests/programs/should_pass_result_unwrap_err_or.cap delete mode 100644 tests/programs/should_pass_result_unwrap_or.cap create mode 100644 tests/programs/try_let.cap diff --git a/FIXME-GENERICS.md b/FIXME-GENERICS.md deleted file mode 100644 index b06d9dc..0000000 --- a/FIXME-GENERICS.md +++ /dev/null @@ -1,104 +0,0 @@ -# Generic Collections: Progress and Remaining Work - -## Completed - -### Phase 1: Basic Vec for Custom Structs - -`Vec` now supports custom structs for basic operations: - -```rust -copy struct Entry { - key: i32, - value: i32 -} - -let entries = alloc.vec_new() // WORKS! -entries.push(Entry { key: 1, value: 10 }) -let e = entries.get(0) -``` - -**Working methods**: `push`, `pop`, `get`, `set`, `len`, `capacity`, `reserve`, `clear`, `free` - -**Changes made**: -- Removed hardcoded Vec element type restriction in `typeck/mod.rs` -- Fixed monomorphization to search all modules for user-defined types -- Created `stdlib/sys/eq.cap` with `Eq` trait and impls for primitives - -### Phase 2: Trait-Bounded Impl Blocks - -Fixed method name mangling and added trait-bounded impl blocks for Vec: - -```rust -use sys::eq - -copy struct Entry { key: i32, value: i32 } - -impl eq::Eq for Entry { - fn eq(self, other: Entry) -> bool { - return self.key == other.key && self.value == other.value - } -} - -let v = alloc.vec_new() -v.push(Entry { key: 1, value: 10 }) -v.push(Entry { key: 2, value: 20 }) - -// These now work with custom structs! -v.contains_eq(Entry { key: 1, value: 10 }) // true -v.count_eq(Entry { key: 1, value: 10 }) // 1 -v.index_of_eq(Entry { key: 2, value: 20 }) // Ok(1) -v.filter_eq(Entry { key: 1, value: 10 }) // Vec with matching entries -``` - -**New Eq-based methods** (in `impl Vec`): -- `contains_eq(value: T) -> bool` -- `count_eq(value: T) -> i32` -- `index_of_eq(value: T) -> Result` -- `last_index_of_eq(value: T) -> Result` -- `filter_eq(value: T) -> Vec` - -**Original methods** (in `impl Vec`) still use `==` for primitives: -- `contains`, `count`, `index_of`, `last_index_of`, `filter` - -**Changes made**: -- `capc/src/typeck/mod.rs` - Added `build_type_arg_suffix()` helper for method name mangling -- `capc/src/typeck/check.rs` - Updated method resolution to try type-specific methods first -- `capc/src/typeck/lower.rs` - Updated HIR lowering to use correct method names -- `stdlib/sys/vec.cap` - Added `impl Vec` with Eq-based methods - -## Design Decisions - -### Why `contains_eq` instead of replacing `contains`? - -We kept the original `contains` (using `==`) for backwards compatibility and added new -`*_eq` variants that use the `Eq` trait. This means: - -1. Primitives (i32, u8, bool) can use either `contains` or `contains_eq` -2. Custom structs must use `contains_eq` (and implement `eq::Eq`) -3. No breaking changes to existing code - -### Type-Specific Impl Blocks - -Methods like `map_add` that use arithmetic operators are in type-specific impl blocks: - -```rust -impl Vec { - pub fn map_add(self, delta: i32) -> Vec { ... } -} - -impl Vec { - pub fn map_add(self, delta: u8) -> Vec { ... } -} -``` - -This avoids needing an `Add` trait (which doesn't exist yet). - -## Files Modified - -- `capc/src/typeck/mod.rs` - Removed Vec restriction, added `build_type_arg_suffix` -- `capc/src/typeck/check.rs` - Type-specific method resolution -- `capc/src/typeck/lower.rs` - Correct method name generation in HIR -- `capc/src/typeck/monomorphize.rs` - Cross-module type lookup -- `stdlib/sys/eq.cap` - `Eq` trait and primitive implementations -- `stdlib/sys/vec.cap` - Added bounded impl block with Eq-based methods -- `tests/programs/vec_custom_eq.cap` - Test for custom struct with Eq diff --git a/PLAN.md b/PLAN.md deleted file mode 100644 index 991dc7c..0000000 --- a/PLAN.md +++ /dev/null @@ -1,36 +0,0 @@ -# Plan: HashMap demo limitations - -This plan targets the limitations called out in `examples/hashmap_demo/hashmap_demo.cap`. -Priority is to ship a real modulo operator and then simplify the demo to use it. - -## Goals - -- Add a `%` (modulo) operator for integer types. -- Update the hash-map demo to use `%` and remove the hand-rolled `mod_i32`. -- Track the remaining open limitations with concrete next steps. - -## Milestones - -1) Modulo operator support -- Lexer: add `%` token. -- Parser: recognize `%` as a binary operator with the same precedence as `*`/`/`. -- AST/HIR: add `BinaryOp::Mod`. -- Type checker: allow `%` for matching integer types (same rules as `+ - * /`). -- Codegen: emit signed/unsigned remainder with division-by-zero trap. -- Tests: add parser/typecheck/codegen coverage for `%`. - -2) HashMap demo refresh -- Replace `mod_i32` with `%`. -- Remove the “NO MODULO OPERATOR” limitation from the demo header. -- Keep other limitations (traits, function pointers, Vec restrictions, copy semantics) as-is. - -3) Next limitations (future work) -- Traits/interfaces: define a minimal trait system or ad-hoc interface for hashable keys. -- Generic storage: allow `Vec` for user-defined structs (or add `Vec` support). -- Function pointers / first-class functions: enable passing hash functions to generic maps. -- Copy semantics and mutability: consider non-copy structs with explicit mutation tracking. - -## Out of scope - -- Full generics over all types. -- A production-quality hash map (rehashing, tombstone compaction, probing strategies). diff --git a/PROBLEMS.md b/PROBLEMS.md new file mode 100644 index 0000000..4a2a0c5 --- /dev/null +++ b/PROBLEMS.md @@ -0,0 +1,99 @@ +# Problems + +This document records the main design wrinkles in Capable as it exists today. + +It is not a roadmap. It is a statement of the places where the language is +still more complicated, more accidental, or less settled than it should be. + +## 1. The memory model is still heavy for ordinary code + +Capable has a real distinction between: + +- `string` as a non-owning view +- `Text` as owned/growable text +- `Vec` as owned storage +- explicit frees for owned heap data + +That is workable and honest, but it means routine code still needs a fair +amount of representation awareness. The language is simpler than Rust here, but +it is still not especially lightweight. + +## 2. The allocator story is still mixed + +Capable now has both: + +- explicit `Alloc` +- a growing default-allocator surface + +That is pragmatic, but the model is not fully settled. Allocation is currently +part resource handle, part policy hook, part convenience burden. + +The language should eventually make this story crisp: + +- default allocator for ordinary safe code +- explicit allocator for low-level or budgeted code + +Until then, the stdlib will keep carrying duplicated APIs. + +## 3. Expression and statement control flow are still somewhat brittle + +Recent work made `let ... else` and `expr else` viable, but it also showed that +control-flow behavior was not fully uniform across parser, typechecker, and +codegen. + +The language now supports these forms, but this area still needs discipline. +If more expression-oriented control-flow is added casually, complexity will +rise fast. + +## 4. Traits and generics exist without a fully settled place in the language story + +Traits and generics work, and they are useful, but they are not clearly part of +Capable's core identity. + +The main value proposition of the language is: + +- explicit authority +- small resource model +- predictable systems code + +Traits and generic abstraction can help, but they can also distract from that +core if they keep expanding before the simpler story is fully stable. + +## 5. The language boundary is split across compiler, stdlib stubs, and runtime intrinsics + +A significant part of the real language surface is defined by the combination +of: + +- stdlib `.cap` declarations +- intrinsic registration +- runtime handle tables and host functions + +That is a reasonable implementation technique, but it makes some language +behavior feel more accidental than intentional. It is easy for docs, stubs, and +runtime behavior to drift unless they are kept tightly aligned. + +## 6. Remote capability delegation is still unsolved work + +The local model is much clearer now, but the remote story remains a separate +future project. + +That is the right scope decision, but it means Capable still does not yet have +a real end-to-end answer for: + +- delegated authority over the network +- revocation and lease semantics +- typed remote proxies +- constrained remote agents + +The current answer is architectural intent, not shipped behavior. + +## Bottom Line + +Capable's biggest remaining problems are no longer "missing features". + +They are mostly about choosing and enforcing a smaller number of intended ways +to write code: + +- a lighter ordinary-data story +- one coherent allocation story +- a tighter boundary around abstraction features diff --git a/REMOTE_CAPS_RFC.md b/REMOTE_CAPS_RFC.md index bbcec10..5f3b1ce 100644 --- a/REMOTE_CAPS_RFC.md +++ b/REMOTE_CAPS_RFC.md @@ -3,7 +3,7 @@ This document describes a future initiative: delegating attenuated capabilities to remote workers or agents over the network. -It is intentionally separate from [UPDATE_PLAN.md](./UPDATE_PLAN.md). The local +It is intentionally separate from the current local-language docs. The local model should be stabilized first. Remote delegation builds on that model; it should not distort the scope of the local cleanup work. @@ -121,7 +121,8 @@ complete hostile multi-tenant sandbox. ### Phase A: Stabilize the local model -Complete the work in [UPDATE_PLAN.md](./UPDATE_PLAN.md) first. +Complete the local cleanup and address the issues in [CURRENT_STATUS.md](./CURRENT_STATUS.md) +and [PROBLEMS.md](./PROBLEMS.md) first. ### Phase B: Design `sys::remote` diff --git a/TRAITS.md b/TRAITS.md deleted file mode 100644 index 171a25c..0000000 --- a/TRAITS.md +++ /dev/null @@ -1,82 +0,0 @@ -# Static traits plan - -This plan is for a minimal, compile-time only trait system (no trait objects, -no dynamic dispatch, no vtables). The goal is to enable generic constraints and -method resolution for traits, while keeping the implementation small. - -## Goals - -- Define traits and implement them for types. -- Allow trait bounds on generic parameters (functions, structs, enums, impls). -- Use traits for method lookup in generic contexts. -- No dynamic dispatch, no trait objects, no runtime metadata. - -## Non-goals - -- `dyn Trait` or any form of runtime trait objects. -- Default methods that require dynamic dispatch. -- Negative bounds or specialization. -- Coherence across crates or complex orphan rules (single-project scope). - -## Proposed syntax - -- Trait declaration: - trait Display { - fn fmt(self, c: Console) -> unit - } - -- Implementing a trait: - impl Display for i32 { - fn fmt(self, c: Console) -> unit { ... } - } - -- Bounds on generics: - fn show(x: T, c: Console) -> unit { x.fmt(c) } - -## Milestones - -1) Parser/AST/HIR support -- Add `trait` item with method signatures (no bodies). -- Add `impl Trait for Type` blocks with method bodies. -- Add generic bounds syntax `T: Trait` in type parameter lists. -- Decide where trait paths live (`use` resolution like types). - -2) Name resolution + type collection -- Collect trait declarations into a trait map. -- Collect impl blocks keyed by (trait, type). -- Enforce basic coherence: at most one impl per (trait, concrete type) in the - current compilation unit. - -3) Type checking rules -- When type checking a generic with bounds, verify constraints are satisfied at - call sites (monomorphized instantiations). -- Method lookup order: inherent methods first, then trait methods if bounds - prove the trait. -- Ensure trait impl methods match the declared signature exactly. - -4) Monomorphization + codegen -- Treat trait methods as regular functions with a mangled name including - (trait, type) to avoid collisions. -- For a constrained call `x.fmt(...)`, resolve to the concrete impl at - monomorphization time (no runtime indirection). - -5) Standard library hooks (minimal) -- Add a tiny trait or two in stdlib (e.g., `eq`, `hash`) for dogfooding. -- Update the hash-map demo to use `Hash` once the trait exists. - -6) Tests -- Parser snapshot for trait/impl/bounds syntax. -- Typecheck tests for: - - missing impl - - duplicate impl - - signature mismatch - - method resolution via trait bound -- Run-time test that exercises a trait-bound generic call. - -## Risks / open questions - -- How to represent trait bounds on impl blocks and where to store them in HIR. -- Rule for overlapping impls when type params are involved (keep simple: no - overlapping impls allowed unless identical after monomorphization). -- Decide if trait methods can be called via UFCS (`Trait::method(x)`), or only - via method call syntax (`x.method()`). diff --git a/UPDATE_PLAN.md b/UPDATE_PLAN.md deleted file mode 100644 index 7480326..0000000 --- a/UPDATE_PLAN.md +++ /dev/null @@ -1,206 +0,0 @@ -# Update Plan: Consolidate the Local Model - -This document is the immediate roadmap for Capable. - -It is intentionally limited to the local language/runtime model. Remote -capability delegation is important, but it is a separate initiative and is now -tracked in [REMOTE_CAPS_RFC.md](./REMOTE_CAPS_RFC.md). - -Status: - -- Phase 0 is complete: the public docs now lead with the local - data/resource/capability model. -- Phase 1 is complete: reusable capability use-operations now borrow where the - current checker/runtime model allows it. -- Phase 2 is complete for the current local model: the compiler behavior was - audited, the docs were aligned to it, and reference locals remain supported - in their existing narrow form. -- Phase 3 is complete for the current local algebra: the repo now documents - which capabilities are reusable, which derivations consume, which child - handles are linear, and which capabilities are intentionally copyable. - -The point of this plan is not to redesign Capable from scratch. The point is -to align the docs, stdlib, and compiler around the model the codebase already -mostly implements. - -## Goals - -- Make the language easier to explain and teach. -- Keep the capability model intact. -- Reduce friction in ordinary code without weakening resource safety. -- Stop growing surface area until the core model is clearer. - -## Non-Negotiable Invariants - -- Safe code has no ambient authority. -- Capability values remain unforgeable in safe code. -- Privileged effects happen only through capability-bearing APIs. -- Runtime checks remain fail-closed. -- Unsafe code remains the only escape hatch for raw pointers, FFI, and direct - OS access. - -## What the Compiler Already Does - -The review was right about the broad shape of the implementation: the compiler -is already closer to the desired model than the public docs suggest. - -Today, Capable already has: - -- plain data that is unrestricted by default -- kind-by-containment for structs and enums -- `opaque struct` and `capability struct` as the main move-tracked categories -- static-only traits and generics - -Borrow-lite is also already much more limited than a Rust-style borrow system: - -- references cannot be stored in structs/enums -- references cannot be returned -- reference locals are allowed, but only when initialized from another local -- reference locals are non-assignable once created - -So the immediate work is mostly consolidation and cleanup, not a ground-up -rewrite. - -## Where the Friction Still Comes From - -### 1. The docs overstate the ownership story - -The public docs still make Capable sound more like a general affine/linear -language than it really is in practice. That hides the simpler model: - -- ordinary data is ordinary -- resources are move-tracked -- capabilities are authority-bearing resources - -### 2. The stdlib conflates different capability operations - -Several APIs still treat semantically different operations as if they should all -consume the receiver. - -We should distinguish: - -- Use operations: perform an effect with existing authority. -- Attenuation operations: derive a narrower capability. -- Child-handle operations: create a fresh child handle from an existing parent. - -Examples: - -- `ReadFS.read_to_string` is a use operation and should not normally consume the - capability. -- `Dir.subdir` is attenuation and may reasonably consume the stronger path cap. -- `TcpListener.accept` is a child-handle operation and may reasonably borrow the - listener while returning a fresh connection. - -This cleanup is the highest-value change in the near term. - -### 3. Borrow-lite is still visible in the surface language - -Capable already avoids stored refs and returned refs, which is good. -The remaining question is not "do we add a borrow system?" The codebase already -has one. The real question is how much of it should remain part of the public -story. - -The likely direction is: - -- keep short-lived borrowed parameters/receivers -- de-emphasize explicit reference locals in docs and examples -- avoid making users think in lifetimes or aliasing proofs - -### 4. Generic code is still more pessimistic than the public story - -Generics and traits are intentionally static-only, which is good. -But generic code still tends to feel more move-sensitive than ordinary code. - -Near-term conclusion: - -- do not expand traits/generics further -- keep the current machinery where it already pays for itself -- revisit generic kind behavior only after the local capability model is clearer - -## Immediate Phases - -### Phase 0: Restate the model - -- Update docs to describe Capable as a capability-secure language with a small - resource model. -- Lead with the three categories: - - plain data - - resources - - capabilities -- Stop describing the language as if general affine/linear reasoning were the - main thing users should learn first. - -### Phase 1: Clean up stdlib capability APIs - -- Rework `sys::fs` and `sys::net` signatures around the three-way distinction: - use, attenuation, child-handle. -- Make semantically reusable capabilities borrowable for ordinary use. -- Reserve `linear` for must-close child handles such as `FileRead` and - `TcpConn`. -- Remove accidental one-shot behavior from reusable capabilities. - -This phase should deliver the biggest usability gain for the least compiler -churn. - -### Phase 2: Tighten the compiler around the public story - -- Audit reference-local behavior and decide whether to keep it as-is or reduce - it to receiver/parameter positions in the public language. -- Keep kind-by-containment. -- Keep move tracking focused on `opaque struct`, `capability struct`, and - values that contain them. -- Avoid new trait/generic surface area while this cleanup is in flight. - -This phase is about alignment, not reinvention. - -### Phase 3: Stabilize the local capability algebra - -Before any remote work, local capability behavior should be crisp: - -- which capabilities are reusable -- which derivations consume -- which child handles are linear -- which capabilities are deliberately copyable - -If these rules are muddy locally, they will be worse remotely. - -## Out of Scope for This Plan - -The following are intentionally excluded from this document: - -- remote capability delegation -- broker/session/lease/proxy design -- protocol/authentication/revocation details -- async/distributed execution concerns -- expanding traits or richer generic abstraction machinery - -Those topics belong in separate RFCs or later roadmaps. - -## Acceptance Criteria - -This plan is successful if: - -- the tutorial explains Capable without leading with borrow/move rules -- ordinary examples read like simple systems code, not ownership puzzles -- capability flow remains explicit in signatures and call sites -- the local attenuation model is clearer after the stdlib cleanup -- the compiler, docs, and stdlib all tell the same story - -## Tests We Should Add or Tighten - -- reusable capabilities can perform multiple ordinary use operations -- attenuation operations still consume when they should -- child handles still require close/consumption on all paths -- structs/enums containing resources remain resource-like by containment -- reference locals, if kept, remain tightly restricted and non-assignable - -## Bottom Line - -Capable does not need a new local model. It needs a cleaner expression of the -local model it already has. - -The immediate work is: - -- restate the language honestly -- clean up the stdlib capability APIs -- align the compiler and docs around that smaller story diff --git a/capc/src/hir.rs b/capc/src/hir.rs index 66f9521..d63a6bb 100644 --- a/capc/src/hir.rs +++ b/capc/src/hir.rs @@ -310,10 +310,7 @@ pub enum ResolvedCallee { symbol: String, }, /// A trait method call to be resolved during monomorphization. - TraitMethod { - trait_name: String, - method: String, - }, + TraitMethod { trait_name: String, method: String }, /// A runtime intrinsic Intrinsic(IntrinsicId), } @@ -386,8 +383,8 @@ pub struct HirTry { pub span: Span, } -/// Unconditional trap/panic. Used for unreachable code paths like -/// calling .ok() on an Err variant. +/// Unconditional trap/panic. Used for unreachable code paths after +/// pattern-based control-flow desugaring. #[derive(Debug, Clone)] pub struct HirTrap { /// The type this expression would have produced (for type checking). diff --git a/capc/src/lexer.rs b/capc/src/lexer.rs index 8d28292..a9b1cf9 100644 --- a/capc/src/lexer.rs +++ b/capc/src/lexer.rs @@ -33,6 +33,8 @@ pub enum TokenKind { Fn, #[token("let")] Let, + #[token("try")] + Try, #[token("if")] If, #[token("else")] diff --git a/capc/src/parser.rs b/capc/src/parser.rs index 490ef46..b2366d8 100644 --- a/capc/src/parser.rs +++ b/capc/src/parser.rs @@ -558,6 +558,7 @@ impl Parser { Some(TokenKind::Break) => Ok(Stmt::Break(self.parse_break()?)), Some(TokenKind::Continue) => Ok(Stmt::Continue(self.parse_continue()?)), Some(TokenKind::Defer) => Ok(Stmt::Defer(self.parse_defer()?)), + Some(TokenKind::Try) => self.parse_try_stmt(), Some(TokenKind::If) => self.parse_if_stmt(), Some(TokenKind::While) => Ok(Stmt::While(self.parse_while()?)), Some(TokenKind::For) => self.parse_for_stmt(), @@ -643,6 +644,74 @@ impl Parser { }) } + fn parse_try_stmt(&mut self) -> Result { + let try_token = self.expect(TokenKind::Try)?; + let start = try_token.span.start; + + if self.peek_kind() == Some(TokenKind::Let) { + self.bump(); + if !(self.peek_kind() == Some(TokenKind::Ident) + && self + .peek_token(1) + .is_some_and(|t| matches!(t.kind, TokenKind::Colon | TokenKind::Eq))) + { + return Err(self.error_at( + try_token.span, + "`try let` requires a plain binding name".to_string(), + )); + } + + let name = self.expect_ident()?; + let ty = if self.maybe_consume(TokenKind::Colon).is_some() { + Some(self.parse_type()?) + } else { + None + }; + self.expect(TokenKind::Eq)?; + let expr = self.parse_expr()?; + self.expect(TokenKind::Else)?; + let err_binding = if self.peek_kind() == Some(TokenKind::Ident) + && self + .peek_token(1) + .is_some_and(|t| t.kind == TokenKind::LBrace) + { + Some(self.expect_ident()?) + } else { + None + }; + let else_block = self.parse_block()?; + let mut stmt = + self.desugar_try_let(try_token.span, name, ty, expr, err_binding, else_block); + let end = self + .maybe_consume(TokenKind::Semi) + .map_or(stmt.span.end, |t| t.span.end); + stmt.span = Span::new(start, end); + return Ok(Stmt::Let(stmt)); + } + + let expr = self.parse_expr()?; + self.expect(TokenKind::Else)?; + let err_binding = if self.peek_kind() == Some(TokenKind::Ident) + && self + .peek_token(1) + .is_some_and(|t| t.kind == TokenKind::LBrace) + { + Some(self.expect_ident()?) + } else { + None + }; + let else_block = self.parse_block()?; + let expr = self.desugar_expr_else(expr, err_binding, else_block); + let expr_span = expr.span(); + let end = self + .maybe_consume(TokenKind::Semi) + .map_or(expr_span.end, |t| t.span.end); + Ok(Stmt::Expr(ExprStmt { + expr, + span: Span::new(start, end), + })) + } + fn parse_break(&mut self) -> Result { let token = self.expect(TokenKind::Break)?; let end = self @@ -678,57 +747,6 @@ impl Parser { fn parse_if_stmt(&mut self) -> Result { let if_token = self.expect(TokenKind::If)?; let start = if_token.span.start; - if self.peek_kind() == Some(TokenKind::Let) { - self.bump(); - let pattern = self.parse_pattern()?; - self.expect(TokenKind::Eq)?; - let expr = self.parse_expr_no_struct()?; - let then_block = self.parse_block()?; - let else_block = if self.peek_kind() == Some(TokenKind::Else) { - self.bump(); - if self.peek_kind() == Some(TokenKind::If) { - let else_if = self.parse_if_stmt()?; - let span = else_if.span(); - Some(Block { - stmts: vec![else_if], - span, - }) - } else { - Some(self.parse_block()?) - } - } else { - None - }; - let end = else_block - .as_ref() - .map_or(then_block.span.end, |b| b.span.end); - let else_body = else_block.unwrap_or(Block { - stmts: Vec::new(), - span: Span::new(end, end), - }); - let match_expr = MatchExpr { - expr: Box::new(expr), - arms: vec![ - MatchArm { - pattern, - body: then_block, - span: Span::new(start, end), - }, - MatchArm { - pattern: Pattern::Wildcard(Span::new(end, end)), - body: else_body, - span: Span::new(start, end), - }, - ], - span: Span::new(start, end), - match_span: if_token.span, - }; - return Ok(Stmt::Expr(ExprStmt { - expr: Expr::Match(match_expr), - span: Span::new(start, end), - })); - } - // Use parse_expr_no_struct because `{` after condition starts the then-block, not a struct literal let cond = self.parse_expr_no_struct()?; let then_block = self.parse_block()?; @@ -861,22 +879,9 @@ impl Parser { fn parse_expr_stmt(&mut self) -> Result { let expr = self.parse_expr()?; - let expr = if self.peek_kind() == Some(TokenKind::Else) { - self.bump(); - let err_binding = if self.peek_kind() == Some(TokenKind::Ident) - && self - .peek_token(1) - .is_some_and(|t| t.kind == TokenKind::LBrace) - { - Some(self.expect_ident()?) - } else { - None - }; - let else_block = self.parse_block()?; - self.desugar_expr_else(expr, err_binding, else_block) - } else { - expr - }; + if self.peek_kind() == Some(TokenKind::Else) { + return Err(self.error_current("`expr else` now requires a leading `try`".to_string())); + } let expr_span = expr.span(); let end = self .maybe_consume(TokenKind::Semi) @@ -1450,6 +1455,91 @@ impl Parser { }) } + fn desugar_try_let( + &self, + try_span: Span, + binding: Ident, + ty: Option, + expr: Expr, + err_binding: Option, + else_block: Block, + ) -> LetStmt { + let binding_expr = Expr::Path(Path { + segments: vec![binding.clone()], + span: binding.span, + }); + let ok_body = Block { + stmts: vec![Stmt::Expr(ExprStmt { + expr: binding_expr, + span: binding.span, + })], + span: binding.span, + }; + + let panic_ident = Spanned::new("panic".to_string(), else_block.span); + let panic_expr = Expr::Call(CallExpr { + callee: Box::new(Expr::Path(Path { + segments: vec![panic_ident], + span: else_block.span, + })), + type_args: Vec::new(), + args: Vec::new(), + span: else_block.span, + }); + let mut else_stmts = else_block.stmts; + else_stmts.push(Stmt::Expr(ExprStmt { + expr: panic_expr, + span: else_block.span, + })); + let else_body = Block { + stmts: else_stmts, + span: else_block.span, + }; + + let expr_span = expr.span(); + let ok_ident = Spanned::new("Ok".to_string(), expr_span); + let err_ident = Spanned::new("Err".to_string(), else_block.span); + let match_span = Span::new(try_span.start, else_block.span.end); + let match_expr = Expr::Match(MatchExpr { + expr: Box::new(expr), + arms: vec![ + MatchArm { + pattern: Pattern::Call { + path: Path { + segments: vec![ok_ident], + span: expr_span, + }, + binding: Some(binding.clone()), + span: expr_span, + }, + body: ok_body, + span: match_span, + }, + MatchArm { + pattern: Pattern::Call { + path: Path { + segments: vec![err_ident], + span: else_block.span, + }, + binding: err_binding, + span: else_block.span, + }, + body: else_body, + span: match_span, + }, + ], + span: match_span, + match_span: try_span, + }); + + LetStmt { + name: binding, + ty, + expr: match_expr, + span: match_span, + } + } + fn pattern_binding_ident(&self, pattern: &Pattern) -> Option { match pattern { Pattern::Binding(ident) => Some(ident.clone()), diff --git a/capc/src/typeck/collect.rs b/capc/src/typeck/collect.rs index 3e92865..a9953eb 100644 --- a/capc/src/typeck/collect.rs +++ b/capc/src/typeck/collect.rs @@ -4,9 +4,9 @@ use crate::ast::*; use crate::error::TypeError; use super::{ - build_type_param_bounds, build_type_params, desugar_impl_methods, lower_type, type_contains_ref, - type_param_names, EnumInfo, FunctionSig, StructInfo, TraitImplInfo, TraitInfo, TypeKind, - UseMap, StdlibIndex, RESERVED_TYPE_PARAMS, validate_type_args, + build_type_param_bounds, build_type_params, desugar_impl_methods, lower_type, + type_contains_ref, type_param_names, validate_type_args, EnumInfo, FunctionSig, StdlibIndex, + StructInfo, TraitImplInfo, TraitInfo, TypeKind, UseMap, RESERVED_TYPE_PARAMS, }; /// Build the stdlib type index for name resolution. @@ -133,16 +133,17 @@ pub(super) fn collect_traits( methods.insert(method.name.item.clone(), sig); } let qualified = format!("{module_name}.{}", decl.name.item); - if traits.insert( - qualified, - TraitInfo { - type_params: trait_param_names, - methods, - module: module_name.clone(), - is_pub: decl.is_pub, - }, - ) - .is_some() + if traits + .insert( + qualified, + TraitInfo { + type_params: trait_param_names, + methods, + module: module_name.clone(), + is_pub: decl.is_pub, + }, + ) + .is_some() { return Err(TypeError::new( format!("duplicate trait `{}`", decl.name.item), @@ -240,7 +241,8 @@ pub(super) fn collect_functions( is_pub: bool| -> Result<(), TypeError> { let type_param_set = build_type_params(type_params)?; - let type_param_bounds = build_type_param_bounds(type_params, &local_use, &module_name); + let type_param_bounds = + build_type_param_bounds(type_params, &local_use, &module_name); for param in params { if param.ty.is_none() { return Err(TypeError::new( @@ -268,7 +270,10 @@ pub(super) fn collect_functions( is_pub, }; let qualified_key = format!("{module_name}.{}", name.item); - if functions.insert(qualified_key.clone(), sig.clone()).is_some() { + if functions + .insert(qualified_key.clone(), sig.clone()) + .is_some() + { return Err(TypeError::new( format!("duplicate function `{qualified_key}`"), span, @@ -277,10 +282,7 @@ pub(super) fn collect_functions( if module_name == entry_name { let key = name.item.clone(); if functions.insert(key.clone(), sig).is_some() { - return Err(TypeError::new( - format!("duplicate function `{key}`"), - span, - )); + return Err(TypeError::new(format!("duplicate function `{key}`"), span)); } } Ok(()) @@ -334,9 +336,7 @@ pub(super) fn collect_functions( let key = format!("{impl_ty}::{method_name}"); if !impl_methods.insert(key.clone()) { return Err(TypeError::new( - format!( - "duplicate method `{method_name}` for `{impl_ty}`" - ), + format!("duplicate method `{method_name}` for `{impl_ty}`"), method.name.span, )); } diff --git a/capc/src/typeck/lower.rs b/capc/src/typeck/lower.rs index 37fbc9d..7f1c501 100644 --- a/capc/src/typeck/lower.rs +++ b/capc/src/typeck/lower.rs @@ -1,8 +1,8 @@ use std::collections::{HashMap, HashSet}; +use crate::abi::AbiType; use crate::ast::*; use crate::error::TypeError; -use crate::abi::AbiType; use crate::hir::{ HirAssignStmt, HirBinary, HirBlock, HirBreakStmt, HirCall, HirContinueStmt, HirDeferStmt, HirEnum, HirEnumVariant, HirEnumVariantExpr, HirExpr, HirExprStmt, HirExternFunction, HirField, @@ -87,12 +87,10 @@ impl<'a> LoweringCtx<'a> { } /// Push a new scope for name bindings (shadowing is not yet modeled here). - fn push_scope(&mut self) { - } + fn push_scope(&mut self) {} /// Pop the most recent scope (placeholder for future scope stacks). - fn pop_scope(&mut self) { - } + fn pop_scope(&mut self) {} } /// Lower a fully type-checked module into HIR. @@ -153,7 +151,10 @@ pub(super) fn lower_module( .map(|param| { let Some(ty) = ¶m.ty else { return Err(TypeError::new( - format!("parameter `{}` requires a type annotation", param.name.item), + format!( + "parameter `{}` requires a type annotation", + param.name.item + ), param.name.span, )); }; @@ -330,22 +331,22 @@ fn lower_stmt(stmt: &Stmt, ctx: &mut LoweringCtx, ret_ty: &Ty) -> Result lower_defer_stmt(defer_stmt, ctx, ret_ty), Stmt::Return(ret) => { - let expr = ret.expr.as_ref().map(|e| lower_expr(e, ctx, ret_ty)).transpose()?; + let expr = ret + .expr + .as_ref() + .map(|e| lower_expr(e, ctx, ret_ty)) + .transpose()?; Ok(vec![HirStmt::Return(HirReturnStmt { expr, span: ret.span, })]) } - Stmt::Break(break_stmt) => { - Ok(vec![HirStmt::Break(HirBreakStmt { - span: break_stmt.span, - })]) - } - Stmt::Continue(continue_stmt) => { - Ok(vec![HirStmt::Continue(HirContinueStmt { - span: continue_stmt.span, - })]) - } + Stmt::Break(break_stmt) => Ok(vec![HirStmt::Break(HirBreakStmt { + span: break_stmt.span, + })]), + Stmt::Continue(continue_stmt) => Ok(vec![HirStmt::Continue(HirContinueStmt { + span: continue_stmt.span, + })]), Stmt::If(if_stmt) => { let cond = lower_expr(&if_stmt.cond, ctx, ret_ty)?; let then_block = lower_block(&if_stmt.then_block, ctx, ret_ty)?; @@ -452,7 +453,8 @@ fn lower_defer_stmt( } } - let base_is_local = if let Some(base_name) = get_leftmost_segment(&method_call.receiver) { + let base_is_local = if let Some(base_name) = get_leftmost_segment(&method_call.receiver) + { ctx.local_types.contains_key(base_name) } else { true @@ -563,15 +565,17 @@ fn lower_defer_stmt( // Try type-specific method first (e.g., Slice__u8__at), fall back to base (Slice__at) let type_arg_suffix = super::build_type_arg_suffix(&receiver_args); let base_method_fn = format!("{type_name}__{}", method_call.method.item); - let specific_method_fn = format!("{type_name}{type_arg_suffix}__{}", method_call.method.item); + let specific_method_fn = + format!("{type_name}{type_arg_suffix}__{}", method_call.method.item); let qualified_specific = format!("{method_module}.{specific_method_fn}"); let qualified_base = format!("{method_module}.{base_method_fn}"); - let (method_fn, key) = if !type_arg_suffix.is_empty() && ctx.functions.contains_key(&qualified_specific) { - (specific_method_fn, qualified_specific) - } else { - (base_method_fn, qualified_base) - }; + let (method_fn, key) = + if !type_arg_suffix.is_empty() && ctx.functions.contains_key(&qualified_specific) { + (specific_method_fn, qualified_specific) + } else { + (base_method_fn, qualified_base) + }; let symbol = format!("capable_{}", key.replace('.', "_")); let mut args = Vec::with_capacity(method_call.args.len() + 1); @@ -614,7 +618,6 @@ fn lower_defer_call_from_path( ret_ty: HirType, ctx: &mut LoweringCtx, ) -> Result { - if path.segments.len() == 1 { let name = &path.segments[0].item; if name == "drop" || name == "panic" || name == "Ok" || name == "Err" { @@ -635,7 +638,11 @@ fn lower_defer_call_from_path( let symbol = format!("capable_{}", key.replace('.', "_")); Ok(HirExpr::Call(HirCall { - callee: ResolvedCallee::Function { module, name, symbol }, + callee: ResolvedCallee::Function { + module, + name, + symbol, + }, type_args: lower_call_type_args(type_args, ctx)?, args, ret_ty, @@ -668,11 +675,7 @@ fn capture_defer_expr( /// Helper to get the type of an AST expression using the existing typechecker. /// This ensures we have a single source of truth for types. -fn type_of_ast_expr( - expr: &Expr, - ctx: &LoweringCtx, - ret_ty: &Ty, -) -> Result { +fn type_of_ast_expr(expr: &Expr, ctx: &LoweringCtx, ret_ty: &Ty) -> Result { if let Some(table) = ctx.type_table { if let Some(ty) = table.get(expr.span()) { return Ok(ty.clone()); @@ -756,9 +759,7 @@ fn abi_type_for(ty: &Ty, ctx: &LoweringCtx, span: Span) -> Result Result Result Result Result Result args[0].clone(), + Ty::Path(name, args) if name == "sys.result.Result" && args.len() == 2 => { + args[0].clone() + } _ => { return Err(TypeError::new( "the `?` operator expects a Result value".to_string(), @@ -1306,11 +1317,7 @@ fn lower_match_stmt( Ok(HirExpr::Match(HirMatch { expr: Box::new(scrutinee), arms: hir_arms, - result_ty: hir_type_for( - Ty::Builtin(super::BuiltinType::Unit), - ctx, - match_expr.span, - )?, + result_ty: hir_type_for(Ty::Builtin(super::BuiltinType::Unit), ctx, match_expr.span)?, span: match_expr.span, })) } @@ -1332,8 +1339,12 @@ fn lower_pattern( } Pattern::Path(path) => { - if let Some(_enum_ty) = resolve_enum_variant(path, ctx.use_map, ctx.enums, ctx.module_name) { - let variant_name = path.segments.last() + if let Some(_enum_ty) = + resolve_enum_variant(path, ctx.use_map, ctx.enums, ctx.module_name) + { + let variant_name = path + .segments + .last() .map(|s| s.item.clone()) .unwrap_or_else(|| "unknown".to_string()); Ok(HirPattern::Variant { @@ -1348,7 +1359,11 @@ fn lower_pattern( } } - Pattern::Call { path, binding, span } => { + Pattern::Call { + path, + binding, + span, + } => { // Check for Result::Ok/Err variants (both qualified and unqualified) let variant_name = path.segments.last().map(|s| s.item.as_str()); if variant_name == Some("Ok") || variant_name == Some("Err") { @@ -1376,8 +1391,12 @@ fn lower_pattern( } } - if let Some(_enum_ty) = resolve_enum_variant(path, ctx.use_map, ctx.enums, ctx.module_name) { - let variant_name = path.segments.last() + if let Some(_enum_ty) = + resolve_enum_variant(path, ctx.use_map, ctx.enums, ctx.module_name) + { + let variant_name = path + .segments + .last() .map(|s| s.item.clone()) .unwrap_or_else(|| "unknown".to_string()); @@ -1405,10 +1424,10 @@ fn lower_pattern( #[cfg(test)] mod tests { + use super::super::TypeKind; use super::*; use crate::ast::Span; use std::collections::HashMap; - use super::super::TypeKind; #[test] fn abi_type_for_resolves_module_local_structs() { diff --git a/capc/src/typeck/mod.rs b/capc/src/typeck/mod.rs index 20e7aa3..81cee76 100644 --- a/capc/src/typeck/mod.rs +++ b/capc/src/typeck/mod.rs @@ -19,9 +19,8 @@ use crate::ast::*; use crate::error::TypeError; use crate::hir::{HirModule, HirTraitImpl}; -pub(super) const RESERVED_TYPE_PARAMS: [&str; 8] = [ - "i32", "i64", "u32", "u8", "bool", "unit", "never", "Self", -]; +pub(super) const RESERVED_TYPE_PARAMS: [&str; 8] = + ["i32", "i64", "u32", "u8", "bool", "unit", "never", "Self"]; /// Resolved type used after lowering. No spans, fully qualified paths. #[derive(Debug, Clone, PartialEq, Eq)] @@ -223,7 +222,9 @@ fn substitute_self(ty: &Ty, target: &Ty) -> Ty { Ty::Ref(inner) => Ty::Ref(Box::new(substitute_self(inner, target))), Ty::Path(name, args) => Ty::Path( name.clone(), - args.iter().map(|arg| substitute_self(arg, target)).collect(), + args.iter() + .map(|arg| substitute_self(arg, target)) + .collect(), ), Ty::Builtin(_) | Ty::Param(_) => ty.clone(), } @@ -388,19 +389,13 @@ impl Scopes { for scope in self.stack.iter_mut().rev() { if let Some(info) = scope.get_mut(name) { if info.state == MoveState::Moved { - return Err(TypeError::new( - format!("use of moved value `{name}`"), - span, - )); + return Err(TypeError::new(format!("use of moved value `{name}`"), span)); } info.state = MoveState::Moved; return Ok(()); } } - Err(TypeError::new( - format!("unknown identifier `{name}`"), - span, - )) + Err(TypeError::new(format!("unknown identifier `{name}`"), span)) } } @@ -473,11 +468,7 @@ fn resolve_method_target( let (receiver_name, receiver_args) = match base_ty { Ty::Path(name, args) => (name.as_str(), args), Ty::Builtin(BuiltinType::U8) => { - return Ok(( - "sys.bytes".to_string(), - "u8".to_string(), - Vec::new(), - )); + return Ok(("sys.bytes".to_string(), "u8".to_string(), Vec::new())); } _ => { return Err(TypeError::new( @@ -510,9 +501,9 @@ fn resolve_method_target( } if receiver_name.contains('.') { - let (mod_part, type_part) = receiver_name.rsplit_once('.').ok_or_else(|| { - TypeError::new("invalid type path".to_string(), span) - })?; + let (mod_part, type_part) = receiver_name + .rsplit_once('.') + .ok_or_else(|| TypeError::new("invalid type path".to_string(), span))?; return Ok(( mod_part.to_string(), type_part.to_string(), @@ -521,10 +512,18 @@ fn resolve_method_target( } if let Some(info) = struct_map.get(&format!("{module_name}.{receiver_name}")) { - return Ok((info.module.clone(), receiver_name.to_string(), receiver_args.clone())); + return Ok(( + info.module.clone(), + receiver_name.to_string(), + receiver_args.clone(), + )); } if enum_map.contains_key(&format!("{module_name}.{receiver_name}")) { - return Ok((module_name.to_string(), receiver_name.to_string(), receiver_args.clone())); + return Ok(( + module_name.to_string(), + receiver_name.to_string(), + receiver_args.clone(), + )); } Err(TypeError::new( @@ -587,9 +586,9 @@ fn resolve_impl_target( .unwrap_or(module_name); (mod_part.to_string(), type_name) } else if target_name.contains('.') { - let (mod_part, type_part) = target_name.rsplit_once('.').ok_or_else(|| { - TypeError::new("invalid type path".to_string(), span) - })?; + let (mod_part, type_part) = target_name + .rsplit_once('.') + .ok_or_else(|| TypeError::new("invalid type path".to_string(), span))?; let type_name = format!("{}{}", type_part, type_arg_suffix); (mod_part.to_string(), type_name) } else if let Some(info) = struct_map.get(&format!("{module_name}.{target_name}")) { @@ -668,9 +667,7 @@ fn validate_impl_method( let lowered = lower_type(ty, use_map, stdlib, type_params)?; if lowered != expected && lowered != expected_ptr && lowered != expected_ref { return Err(TypeError::new( - format!( - "first parameter must be self: {type_name} (found {lowered:?})" - ), + format!("first parameter must be self: {type_name} (found {lowered:?})"), ty.span(), )); } @@ -691,9 +688,20 @@ fn validate_impl_method( let ret_ty = lower_type(&method.ret, use_map, stdlib, type_params)?; if receiver_is_ref && type_contains_capability(&ret_ty, struct_map, enum_map) { let receiver_kind = type_kind(target_ty, struct_map, enum_map); - if receiver_kind != TypeKind::Unrestricted { + let receiver_is_capability = match target_ty { + Ty::Path(name, _) => struct_map + .get(name) + .map(|info| info.is_capability) + .unwrap_or(false), + _ => false, + }; + if receiver_kind != TypeKind::Unrestricted + && (!receiver_is_capability + || type_contains_non_linear_capability(&ret_ty, struct_map, enum_map)) + { return Err(TypeError::new( - "methods returning capabilities must take `self` by value".to_string(), + "borrowed capability receivers may only return linear child capabilities" + .to_string(), method.ret.span(), )); } @@ -794,21 +802,16 @@ fn desugar_impl_methods( method.span, )?; if let Some(trait_name) = &trait_name { - let trait_info = trait_map - .get(trait_name) - .expect("trait already validated"); - let trait_method = trait_info - .methods - .get(&method.name.item) - .ok_or_else(|| { - TypeError::new( - format!( - "method `{}` is not declared in trait `{trait_name}`", - method.name.item - ), - method.name.span, - ) - })?; + let trait_info = trait_map.get(trait_name).expect("trait already validated"); + let trait_method = trait_info.methods.get(&method.name.item).ok_or_else(|| { + TypeError::new( + format!( + "method `{}` is not declared in trait `{trait_name}`", + method.name.item + ), + method.name.span, + ) + })?; let mut lowered_params = Vec::new(); for param in ¶ms { let Some(ty) = ¶m.ty else { @@ -878,9 +881,7 @@ fn desugar_impl_methods( } } if let Some(trait_name) = &trait_name { - let trait_info = trait_map - .get(trait_name) - .expect("trait already validated"); + let trait_info = trait_map.get(trait_name).expect("trait already validated"); for name in trait_info.methods.keys() { if !method_names.contains(name) { return Err(TypeError::new( @@ -924,7 +925,10 @@ fn lower_type( if type_params.contains(path_segments[0]) { if !args.is_empty() { return Err(TypeError::new( - format!("type parameter `{}` cannot take arguments", path_segments[0]), + format!( + "type parameter `{}` cannot take arguments", + path_segments[0] + ), path.span, )); } @@ -1076,9 +1080,9 @@ fn type_contains_capability_inner( Ty::Param(_) => true, Ty::Path(name, args) => { if name == "sys.result.Result" { - return args - .iter() - .any(|arg| type_contains_capability_inner(arg, struct_map, enum_map, visiting)); + return args.iter().any(|arg| { + type_contains_capability_inner(arg, struct_map, enum_map, visiting) + }); } if args .iter() @@ -1118,6 +1122,69 @@ fn type_contains_capability_inner( } } +fn type_contains_non_linear_capability( + ty: &Ty, + struct_map: &HashMap, + enum_map: &HashMap, +) -> bool { + let mut visiting = HashSet::new(); + type_contains_non_linear_capability_inner(ty, struct_map, enum_map, &mut visiting) +} + +fn type_contains_non_linear_capability_inner( + ty: &Ty, + struct_map: &HashMap, + enum_map: &HashMap, + visiting: &mut HashSet, +) -> bool { + match ty { + Ty::Builtin(_) | Ty::Ptr(_) | Ty::Ref(_) => false, + Ty::Param(_) => true, + Ty::Path(name, args) => { + if name == "sys.result.Result" { + return args.iter().any(|arg| { + type_contains_non_linear_capability_inner(arg, struct_map, enum_map, visiting) + }); + } + if args.iter().any(|arg| { + type_contains_non_linear_capability_inner(arg, struct_map, enum_map, visiting) + }) { + return true; + } + if let Some(info) = struct_map.get(name) { + if info.is_capability { + return info.kind != TypeKind::Linear; + } + if !visiting.insert(name.clone()) { + return false; + } + let contains = info.fields.values().any(|field| { + type_contains_non_linear_capability_inner(field, struct_map, enum_map, visiting) + }); + visiting.remove(name); + return contains; + } + if let Some(info) = enum_map.get(name) { + if !visiting.insert(name.clone()) { + return false; + } + let contains = info.payloads.values().any(|payload| { + if let Some(payload_ty) = payload { + type_contains_non_linear_capability_inner( + payload_ty, struct_map, enum_map, visiting, + ) + } else { + false + } + }); + visiting.remove(name); + return contains; + } + false + } + } +} + /// Move-only types are anything not unrestricted. fn is_affine_type( ty: &Ty, @@ -1167,21 +1234,33 @@ fn type_kind_inner( } if let Some(info) = struct_map.get(name) { visiting.insert(name.clone()); - let fields_kind = info.fields.values().fold(TypeKind::Unrestricted, |acc, field| { - combine_kind(acc, type_kind_inner(field, struct_map, enum_map, visiting)) - }); + let fields_kind = + info.fields + .values() + .fold(TypeKind::Unrestricted, |acc, field| { + combine_kind( + acc, + type_kind_inner(field, struct_map, enum_map, visiting), + ) + }); visiting.remove(name); return combine_kind(info.kind, fields_kind); } if let Some(info) = enum_map.get(name) { visiting.insert(name.clone()); - let payload_kind = info.payloads.values().fold(TypeKind::Unrestricted, |acc, payload| { - if let Some(payload_ty) = payload { - combine_kind(acc, type_kind_inner(payload_ty, struct_map, enum_map, visiting)) - } else { - acc - } - }); + let payload_kind = + info.payloads + .values() + .fold(TypeKind::Unrestricted, |acc, payload| { + if let Some(payload_ty) = payload { + combine_kind( + acc, + type_kind_inner(payload_ty, struct_map, enum_map, visiting), + ) + } else { + acc + } + }); visiting.remove(name); return payload_kind; } @@ -1244,10 +1323,7 @@ pub fn type_check_program( user_modules: &[Module], ) -> Result { let use_map = UseMap::new(module); - let stdlib_names: HashSet = stdlib - .iter() - .map(|m| path_to_string(&m.name)) - .collect(); + let stdlib_names: HashSet = stdlib.iter().map(|m| path_to_string(&m.name)).collect(); let mut package_map: HashMap = HashMap::new(); for m in stdlib { package_map.insert(path_to_string(&m.name), m.package); @@ -1270,23 +1346,15 @@ pub fn type_check_program( for user_module in user_modules { check::validate_package_safety(user_module, false) .map_err(|err| err.with_context(format!("in module `{}`", user_module.name)))?; - check::validate_import_safety( - user_module, - &package_map, - &stdlib_names, - ) - .map_err(|err| err.with_context(format!("in module `{}`", user_module.name)))?; + check::validate_import_safety(user_module, &package_map, &stdlib_names) + .map_err(|err| err.with_context(format!("in module `{}`", user_module.name)))?; } for stdlib_module in stdlib { check::validate_package_safety(stdlib_module, true) .map_err(|err| err.with_context(format!("in module `{}`", stdlib_module.name)))?; if stdlib_module.package == PackageSafety::Safe { - check::validate_import_safety( - stdlib_module, - &package_map, - &stdlib_names, - ) - .map_err(|err| err.with_context(format!("in module `{}`", stdlib_module.name)))?; + check::validate_import_safety(stdlib_module, &package_map, &stdlib_names) + .map_err(|err| err.with_context(format!("in module `{}`", stdlib_module.name)))?; } } let struct_map = collect::collect_structs(&modules, &module_name, &stdlib_index) @@ -1295,14 +1363,9 @@ pub fn type_check_program( .map_err(|err| err.with_context("while collecting enums"))?; let trait_map = collect::collect_traits(&modules, &stdlib_index) .map_err(|err| err.with_context("while collecting traits"))?; - let trait_impls = collect::collect_trait_impls( - &modules, - &stdlib_index, - &struct_map, - &enum_map, - &trait_map, - ) - .map_err(|err| err.with_context("while collecting trait impls"))?; + let trait_impls = + collect::collect_trait_impls(&modules, &stdlib_index, &struct_map, &enum_map, &trait_map) + .map_err(|err| err.with_context("while collecting trait impls"))?; collect::validate_type_defs(&modules, &stdlib_index, &struct_map, &enum_map) .map_err(|err| err.with_context("while validating type arguments"))?; collect::validate_copy_structs(&modules, &struct_map, &enum_map, &stdlib_index) @@ -1315,7 +1378,7 @@ pub fn type_check_program( &enum_map, &trait_map, ) - .map_err(|err| err.with_context("while collecting functions"))?; + .map_err(|err| err.with_context("while collecting functions"))?; let mut type_tables: FunctionTypeTables = HashMap::new(); diff --git a/capc/src/typeck/monomorphize.rs b/capc/src/typeck/monomorphize.rs index 5fef3f3..13c6bfd 100644 --- a/capc/src/typeck/monomorphize.rs +++ b/capc/src/typeck/monomorphize.rs @@ -1,10 +1,10 @@ use std::collections::{HashMap, HashSet}; use crate::abi::AbiType; +use crate::ast::Span; use crate::error::TypeError; use crate::hir::*; use crate::typeck::Ty; -use crate::ast::Span; const DUMMY_SPAN: Span = Span { start: 0, end: 0 }; @@ -169,7 +169,10 @@ impl MonoCtx { let key = qualify(&instance.module, &instance.base_name); if let Some(func) = self.functions.get(&key).cloned() { let new_name = mangle_name(&instance.base_name, &instance.type_args); - if self.generated_functions.contains(&qualify(&instance.module, &new_name)) { + if self + .generated_functions + .contains(&qualify(&instance.module, &new_name)) + { continue; } let subs = build_substitution(&func.type_params, &instance.type_args, DUMMY_SPAN)?; @@ -179,7 +182,10 @@ impl MonoCtx { } if let Some(func) = self.externs.get(&key).cloned() { let new_name = mangle_name(&instance.base_name, &instance.type_args); - if self.generated_externs.contains(&qualify(&instance.module, &new_name)) { + if self + .generated_externs + .contains(&qualify(&instance.module, &new_name)) + { continue; } let subs = build_substitution(&func.type_params, &instance.type_args, DUMMY_SPAN)?; @@ -206,12 +212,7 @@ impl MonoCtx { entry = Some(module); continue; } - if self - .program - .stdlib - .iter() - .any(|m| m.name == module.name) - { + if self.program.stdlib.iter().any(|m| m.name == module.name) { stdlib.push(module); } else { user_modules.push(module); @@ -247,7 +248,8 @@ impl MonoCtx { fn push_struct(&mut self, module: &str, decl: HirStruct) { let key = qualify(module, &decl.name); if self.generated_structs.insert(key) { - self.structs.insert(qualify(module, &decl.name), decl.clone()); + self.structs + .insert(qualify(module, &decl.name), decl.clone()); if let Some(out) = self.out_modules.get_mut(module) { out.structs.push(decl); } @@ -547,7 +549,14 @@ impl MonoCtx { continue; } let mut inferred = HashMap::new(); - if match_type_params(&impl_info.target_ty, actual, &mut inferred, DUMMY_SPAN).is_ok() { + if match_type_params( + &impl_info.target_ty, + actual, + &mut inferred, + DUMMY_SPAN, + ) + .is_ok() + { matches.push(impl_info.clone()); } } @@ -596,13 +605,8 @@ impl MonoCtx { ResolvedCallee::Function { module, name, .. } => { let key = qualify(module, name); if let Some(func) = self.functions.get(&key).cloned() { - let (new_name, symbol, type_args) = self.mono_callee( - module, - &func, - &call.args, - &call.type_args, - subs, - )?; + let (new_name, symbol, type_args) = + self.mono_callee(module, &func, &call.args, &call.type_args, subs)?; let callee = ResolvedCallee::Function { module: module.clone(), name: new_name, @@ -617,13 +621,8 @@ impl MonoCtx { })); } if let Some(func) = self.externs.get(&key).cloned() { - let (new_name, symbol, type_args) = self.mono_callee( - module, - &func, - &call.args, - &call.type_args, - subs, - )?; + let (new_name, symbol, type_args) = + self.mono_callee(module, &func, &call.args, &call.type_args, subs)?; let callee = ResolvedCallee::Function { module: module.clone(), name: new_name, @@ -802,10 +801,9 @@ impl MonoCtx { subs: &HashMap, ) -> Result { match ty { - Ty::Param(name) => subs - .get(name) - .cloned() - .ok_or_else(|| TypeError::new(format!("unbound type parameter `{name}`"), DUMMY_SPAN)), + Ty::Param(name) => subs.get(name).cloned().ok_or_else(|| { + TypeError::new(format!("unbound type parameter `{name}`"), DUMMY_SPAN) + }), Ty::Builtin(_) => Ok(ty.clone()), Ty::Ptr(inner) => Ok(Ty::Ptr(Box::new(self.mono_ty(module, inner, subs)?))), Ty::Ref(inner) => Ok(Ty::Ref(Box::new(self.mono_ty(module, inner, subs)?))), @@ -829,7 +827,8 @@ impl MonoCtx { let qualified = name.contains('.'); if let Some(struct_def) = self.structs.get(&qualified_key).cloned() { if !struct_def.type_params.is_empty() { - let new_name = self.ensure_struct_instance(&type_module, &struct_def, &args)?; + let new_name = + self.ensure_struct_instance(&type_module, &struct_def, &args)?; let name = if qualified { qualify(&type_module, &new_name) } else { @@ -842,7 +841,8 @@ impl MonoCtx { } if let Some(enum_def) = self.enums.get(&qualified_key).cloned() { if !enum_def.type_params.is_empty() { - let new_name = self.ensure_enum_instance(&type_module, &enum_def, &args)?; + let new_name = + self.ensure_enum_instance(&type_module, &enum_def, &args)?; let name = if qualified { qualify(&type_module, &new_name) } else { @@ -961,7 +961,10 @@ impl MonoCtx { }); } if let Some(info) = self.enums.get(&qualified_key) { - let has_payload = info.variants.iter().any(|variant| variant.payload.is_some()); + let has_payload = info + .variants + .iter() + .any(|variant| variant.payload.is_some()); if has_payload { return Ok(AbiType::Ptr); } @@ -1182,11 +1185,7 @@ fn mangle_name(base: &str, args: &[Ty]) -> String { if args.is_empty() { return base.to_string(); } - let suffix = args - .iter() - .map(mangle_type) - .collect::>() - .join("__"); + let suffix = args.iter().map(mangle_type).collect::>().join("__"); format!("{base}__{suffix}") } @@ -1210,11 +1209,7 @@ fn mangle_type(ty: &Ty) -> String { } let mut base = name.replace('.', "_"); if !args.is_empty() { - let suffix = args - .iter() - .map(mangle_type) - .collect::>() - .join("__"); + let suffix = args.iter().map(mangle_type).collect::>().join("__"); base = format!("{base}__{suffix}"); } base diff --git a/capc/tests/run.rs b/capc/tests/run.rs index cd6d9bc..3accfaa 100644 --- a/capc/tests/run.rs +++ b/capc/tests/run.rs @@ -126,6 +126,20 @@ fn run_fs_dir_reuse() { assert!(stdout.contains("dir reuse ok"), "stdout was: {stdout:?}"); } +#[test] +fn run_fs_open_read_reuse() { + let out_dir = make_out_dir("fs_open_read_reuse"); + let out_dir = out_dir.to_str().expect("utf8 out dir"); + let (code, stdout, _stderr) = run_capc(&[ + "run", + "--out-dir", + out_dir, + "tests/programs/fs_open_read_reuse.cap", + ]); + assert_eq!(code, 0); + assert!(stdout.contains("same"), "stdout was: {stdout:?}"); +} + #[test] fn run_let_else() { let out_dir = make_out_dir("let_else"); @@ -136,6 +150,16 @@ fn run_let_else() { assert!(stdout.contains("let else ok"), "stdout was: {stdout:?}"); } +#[test] +fn run_try_let() { + let out_dir = make_out_dir("try_let"); + let out_dir = out_dir.to_str().expect("utf8 out dir"); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/try_let.cap"]); + assert_eq!(code, 0); + assert!(stdout.contains("try let ok"), "stdout was: {stdout:?}"); +} + #[test] fn run_expr_else() { let out_dir = make_out_dir("expr_else"); @@ -1097,34 +1121,6 @@ fn run_generic_and_index() { ); } -#[test] -fn run_result_is_ok_is_err() { - let out_dir = make_out_dir("result_is_ok_is_err"); - let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/should_pass_result_is_ok_is_err.cap", - ]); - assert_eq!(code, 0); - assert!(stdout.contains("is_ok_is_err ok"), "stdout was: {stdout:?}"); -} - -#[test] -fn run_result_ok_err() { - let out_dir = make_out_dir("result_ok_err"); - let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/should_pass_result_ok_err.cap", - ]); - assert_eq!(code, 0); - assert!(stdout.contains("ok_err ok"), "stdout was: {stdout:?}"); -} - #[test] fn run_defer() { let out_dir = make_out_dir("defer"); @@ -1176,23 +1172,6 @@ fn run_defer_return() { ); } -#[test] -fn run_if_let() { - let out_dir = make_out_dir("if_let"); - let out_dir = out_dir.to_str().expect("utf8 out dir"); - let (code, stdout, _stderr) = run_capc(&[ - "run", - "--out-dir", - out_dir, - "tests/programs/should_pass_if_let.cap", - ]); - assert_eq!(code, 0); - assert!( - stdout.contains("ok\nerr\nif_let ok\n"), - "stdout was: {stdout:?}" - ); -} - #[test] fn run_for_forever() { let out_dir = make_out_dir("for_forever"); diff --git a/capc/tests/typecheck.rs b/capc/tests/typecheck.rs index ffddeb8..aea4c3c 100644 --- a/capc/tests/typecheck.rs +++ b/capc/tests/typecheck.rs @@ -52,6 +52,14 @@ fn typecheck_fs_dir_reuse_ok() { type_check_program(&module, &stdlib, &[]).expect("typecheck module"); } +#[test] +fn typecheck_fs_open_read_reuse_ok() { + let source = load_program("fs_open_read_reuse.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + #[test] fn typecheck_let_else_ok() { let source = load_program("let_else.cap"); @@ -60,6 +68,14 @@ fn typecheck_let_else_ok() { type_check_program(&module, &stdlib, &[]).expect("typecheck module"); } +#[test] +fn typecheck_try_let_ok() { + let source = load_program("try_let.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + #[test] fn typecheck_expr_else_ok() { let source = load_program("expr_else.cap"); @@ -68,6 +84,14 @@ fn typecheck_expr_else_ok() { type_check_program(&module, &stdlib, &[]).expect("typecheck module"); } +#[test] +fn typecheck_expr_else_empty_ok() { + let source = load_program("expr_else_empty.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + #[test] fn typecheck_expr_else_break_ok() { let source = load_program("expr_else_break.cap"); @@ -193,44 +217,50 @@ fn typecheck_match_result_non_exhaustive_fails() { } #[test] -fn typecheck_result_unwrap_or_ok() { - let source = load_program("should_pass_result_unwrap_or.cap"); +fn typecheck_result_ok_helper_removed() { + let source = load_program("should_fail_result_ok_removed.cap"); let module = parse_module(&source).expect("parse module"); let stdlib = load_stdlib().expect("load stdlib"); - type_check_program(&module, &stdlib, &[]).expect("typecheck module"); + let err = type_check_program(&module, &stdlib, &[]).expect_err("expected type error"); + assert!(err + .to_string() + .contains("unknown method `sys.result.Result__ok`")); } #[test] -fn typecheck_result_unwrap_err_or_ok() { - let source = load_program("should_pass_result_unwrap_err_or.cap"); +fn typecheck_result_unwrap_or_helper_removed() { + let source = load_program("should_fail_result_unwrap_or_removed.cap"); let module = parse_module(&source).expect("parse module"); let stdlib = load_stdlib().expect("load stdlib"); - type_check_program(&module, &stdlib, &[]).expect("typecheck module"); + let err = type_check_program(&module, &stdlib, &[]).expect_err("expected type error"); + assert!(err + .to_string() + .contains("unknown method `sys.result.Result__unwrap_or`")); } #[test] -fn typecheck_result_unwrap_or_mismatch_fails() { - let source = load_program("should_fail_result_unwrap_or_mismatch.cap"); - let module = parse_module(&source).expect("parse module"); - let stdlib = load_stdlib().expect("load stdlib"); - let err = type_check_program(&module, &stdlib, &[]).expect_err("expected type error"); - assert!(err.to_string().contains("argument type mismatch")); +fn parse_if_let_fails() { + let source = load_program("should_fail_if_let.cap"); + let err = parse_module(&source).expect_err("expected parse error"); + assert!(err.to_string().contains("unexpected token in expression")); } #[test] -fn typecheck_result_is_ok_is_err() { - let source = load_program("should_pass_result_is_ok_is_err.cap"); - let module = parse_module(&source).expect("parse module"); - let stdlib = load_stdlib().expect("load stdlib"); - type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +fn parse_expr_else_without_try_fails() { + let source = load_program("should_fail_expr_else_without_try.cap"); + let err = parse_module(&source).expect_err("expected parse error"); + assert!(err + .to_string() + .contains("`expr else` now requires a leading `try`")); } #[test] -fn typecheck_result_ok_err() { - let source = load_program("should_pass_result_ok_err.cap"); - let module = parse_module(&source).expect("parse module"); - let stdlib = load_stdlib().expect("load stdlib"); - type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +fn parse_try_let_pattern_fails() { + let source = load_program("should_fail_try_let_pattern.cap"); + let err = parse_module(&source).expect_err("expected parse error"); + assert!(err + .to_string() + .contains("`try let` requires a plain binding name")); } #[test] @@ -613,6 +643,14 @@ fn typecheck_opaque_returns_capability_ok() { type_check_program(&module, &stdlib, &[]).expect("typecheck module"); } +#[test] +fn typecheck_capability_borrow_return_linear_ok() { + let source = load_program("should_pass_capability_borrow_return_linear.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + #[test] fn typecheck_borrow_return_fails() { let source = load_program("should_fail_borrow_return.cap"); @@ -632,7 +670,7 @@ fn typecheck_capability_borrow_return_fails() { let err = type_check_program(&module, &stdlib, &[]).expect_err("expected type error"); assert!(err .to_string() - .contains("methods returning capabilities must take `self` by value")); + .contains("borrowed capability receivers may only return linear child capabilities")); } #[test] @@ -643,7 +681,7 @@ fn typecheck_capability_borrow_return_result_fails() { let err = type_check_program(&module, &stdlib, &[]).expect_err("expected type error"); assert!(err .to_string() - .contains("methods returning capabilities must take `self` by value")); + .contains("borrowed capability receivers may only return linear child capabilities")); } #[test] @@ -654,7 +692,7 @@ fn typecheck_capability_borrow_return_helper_fails() { let err = type_check_program(&module, &stdlib, &[]).expect_err("expected type error"); assert!(err .to_string() - .contains("methods returning capabilities must take `self` by value")); + .contains("borrowed capability receivers may only return linear child capabilities")); } #[test] diff --git a/docs/ARCHITECTURE.md b/docs/ARCHITECTURE.md index b9f7956..d2b2e17 100644 --- a/docs/ARCHITECTURE.md +++ b/docs/ARCHITECTURE.md @@ -74,6 +74,7 @@ lexer ──> parser ──> AST ## Useful References - `TUTORIAL.md` for a quick language tour. -- `PLAN.md` for the current roadmap. +- `CURRENT_STATUS.md` for the implemented language/runtime behavior. +- `PROBLEMS.md` for the current design wrinkles. - `docs/POLICY.md` for safety and invariants. -- `docs/SAMPLES.md` for golden program outputs. +- `capc/tests/run.rs` for golden output expectations. diff --git a/docs/ATTENUATION.md b/docs/ATTENUATION.md index 4f639a9..5a8f0dd 100644 --- a/docs/ATTENUATION.md +++ b/docs/ATTENUATION.md @@ -27,12 +27,12 @@ module fs { pub fn filesystem(root: &RootCap) -> Filesystem // attenuate: Filesystem -> Dir(root) - pub fn root_dir(fs: &Filesystem) -> Dir + pub fn root_dir(fs: Filesystem) -> Dir // attenuate: Dir -> Dir(subdir) (consume Dir to avoid “backtracking” unless you re-mint) pub fn subdir(dir: Dir, name: string) -> Dir - // attenuate: Dir -> File caps + // child handle: Dir -> File caps pub fn open_read(dir: &Dir, name: string) -> FileRead pub fn open_write(dir: &Dir, name: string) -> FileWrite @@ -41,8 +41,9 @@ module fs { } Notes: - • Use borrows (&T) for “authority checks / minting” and moves (T) for “consuming path-like capabilities.” That matches the Austral-style feel: you can derive from a reference, but the derived things themselves are linear-ish.  - • If you don’t have & in the language yet, you can still do attenuation with moves only, but it gets annoying (you’ll constantly lose the parent cap). Borrowing is the ergonomic escape hatch that doesn’t require Rust’s full borrow checker if you keep it simple (read-only refs, no aliasing mutation). + • Use borrows (&T) for reusable effects and fresh linear child handles. + • Use moves (T) when the result is itself a reusable capability. + • This keeps `Dir -> Dir(subdir)` explicit while still allowing `Dir -> FileRead` without consuming the directory. 3) Why your Token test is not “too restrictive” diff --git a/docs/POLICY.md b/docs/POLICY.md index bea2896..cecd60d 100644 --- a/docs/POLICY.md +++ b/docs/POLICY.md @@ -42,9 +42,9 @@ This is a compact policy reference for language invariants and safety boundaries - child-handle operations - Reusable use operations should borrow where possible. - Attenuation operations consume the stronger capability. -- Under the current checker, methods on move-tracked capabilities that return - capabilities still take `self` by value. This is a conservative rule that - keeps authority flow explicit. +- Child-handle operations may borrow when they return a fresh linear + capability such as `FileRead` or `TcpConn`. +- Borrowed capability receivers must not return reusable capabilities. - Runtime enforces root/relative path checks. ## No‑Implicitness @@ -61,11 +61,16 @@ Keep these as invariants: - Division by zero traps. - Modular arithmetic is explicit via `sys.math` helpers. -## Result Helpers +## Result Flow -- `Result.is_ok()` returns `true` if `Ok`, `false` if `Err`. -- `Result.is_err()` returns `true` if `Err`, `false` if `Ok`. -- `Result.ok()` returns the `Ok` value (traps if `Err`). -- `Result.err()` returns the `Err` value (traps if `Ok`). -- `Result.unwrap_or(default)` returns `Ok` value or the default. -- `Result.unwrap_err_or(default)` returns `Err` value or the default. +Use `Result` with a small number of intended forms: + +- `?` to propagate an `Err` when the current function returns `Result`. +- `try let x = expr else { ... }` when the success value needs to be bound. +- `try expr else { ... }` or `try expr else err { ... }` for statement-style + `Result` handling. +- `let PATTERN = expr else { ... }` for non-`Result` pattern matching. +- `match` for real enum branching or multi-arm recovery logic. + +`Result` intentionally does not expose helper methods like `ok()` or +`unwrap_or()`. Destructuring should stay visible in the source. diff --git a/docs/SAMPLES.md b/docs/SAMPLES.md deleted file mode 100644 index c680f1f..0000000 --- a/docs/SAMPLES.md +++ /dev/null @@ -1,40 +0,0 @@ -# Sample Programs (Golden Outputs) - -These programs are stable references for language behavior and output. - -## 1) Config Loader - -- `examples/config_loader/config_loader.cap` -- Expected output: - ``` - key: host - value: localhost - key: port - value: 8080 - key: log_level - value: info - config ok - ``` - -## 2) Filesystem Attenuation - -- `tests/programs/fs_attenuation.cap` -- Expected output contains: - - `Hello from config/app.txt` - -## 3) Word Count (File) - -- `tests/programs/wc_file.cap` -- Expected output contains: - - `lines:` - - `words:` - - `bytes:` - -## 4) Word Count (Stdin) - -- `tests/programs/wc_stdin.cap` -- Expected output contains: - - `lines:` - - `words:` - - `bytes:` - - `stdin ok` diff --git a/docs/TUTORIAL.md b/docs/TUTORIAL.md index 237f648..6657cf1 100644 --- a/docs/TUTORIAL.md +++ b/docs/TUTORIAL.md @@ -81,13 +81,11 @@ pub fn main(rc: RootCap) -> i32 { } ``` -Matches must be exhaustive; use `_` to cover the rest. `if let` is a -single-arm match: +Matches must be exhaustive; use `_` to cover the rest. `let ... else` is for +general pattern matching on enums and similar values: ```cap -if let Ok(x) = make() { - return x -} else { +let Maybe::Some(v) = from_flag(true) else { return 0 } ``` @@ -131,13 +129,36 @@ fn use_value() -> Result { } ``` -Other helpers: +Common forms: ```cap -let v = make().unwrap_or(0) -let e = make().unwrap_err_or(0) +fn send() -> Result { + return Ok(()) +} + +fn bind_value() -> i32 { + try let v = parse() else { + return 0 + } + return v +} + +fn write_value() -> Result { + try send() else err { + return Err(err) + } + return Ok(()) +} ``` +The intended split is: + +- `?` for propagation +- `try let ... else` when a `Result` success value needs to be bound +- `try expr else` for statement-style error flow +- `let ... else` for non-`Result` pattern matching +- `match` for real branching + ## 6) Capabilities and attenuation Capabilities live in `sys.*` and are declared with `capability` (opaque, no @@ -184,15 +205,19 @@ In practice, capability APIs fall into three shapes: That distinction matters more than "everything moves." A read-only filesystem capability being used to read a file is different from a directory capability -being narrowed to a subdirectory, and different again from a listener producing -a fresh connection handle. +being narrowed to a subdirectory, and different again from a directory or +listener producing a fresh linear child handle. + +The current rule is: + +- reusable use operations borrow +- attenuation to a reusable capability consumes `self` +- child-handle operations may borrow when they return a fresh linear capability -In the current implementation, reusable use operations borrow where possible. That is why `ReadFS.read_to_string` and `Dir.read_to_string` can be called -multiple times on the same capability value. By contrast, methods on -move-tracked capabilities that return capabilities still take `self` by value -under the current checker. That is why `Dir.subdir` and `Dir.open_read` -consume `Dir`, while `TcpListener.accept` can borrow: `TcpListener` is copyable. +multiple times on the same capability value, `Dir.subdir` still consumes +`Dir`, and both `Dir.open_read` and `TcpListener.accept` can borrow while +returning `FileRead`/`TcpConn`. ## 7) Resources and kinds diff --git a/docs/caps.md b/docs/caps.md index 50872af..4b41651 100644 --- a/docs/caps.md +++ b/docs/caps.md @@ -56,10 +56,12 @@ Capability types use the same move kinds as other structs: - `linear capability struct` means **must be consumed** on all paths. - `copy capability struct` means **unrestricted** (only use this for caps you truly want to duplicate). -Attenuation is enforced by method shape: +Capability method shape distinguishes reusable attenuation from fresh child +handles: -- Any method that returns a capability must take `self` by value. -- Any method that takes `&self` cannot return a capability. +- Reusable use operations borrow: `&self -> data/result`. +- Attenuation to another reusable capability consumes `self`. +- Borrowed capability receivers may return a fresh linear capability. Quick examples: @@ -68,7 +70,7 @@ capability struct Dir capability struct FileRead impl Dir { - pub fn open(self, name: string) -> FileRead { return () } + pub fn subdir(self, name: string) -> Dir { return () } } ``` @@ -77,7 +79,15 @@ capability struct Dir capability struct FileRead impl Dir { - pub fn open(self: &Dir, name: string) -> FileRead { return () } // error + pub fn open(self: &Dir, name: string) -> FileRead { return () } // ok +} +``` + +```cap +capability struct Dir + +impl Dir { + pub fn dup(self: &Dir) -> Dir { return () } // error } ``` diff --git a/examples/config_loader/config_loader.cap b/examples/config_loader/config_loader.cap index d8cba6d..5d35a95 100644 --- a/examples/config_loader/config_loader.cap +++ b/examples/config_loader/config_loader.cap @@ -20,7 +20,7 @@ fn parse_line(c: Console, line: string) -> unit { if (trimmed.starts_with("#")) { return () } - let Ok(parts) = trimmed.split_once_view('=') else { + try let parts = trimmed.split_once_view('=') else { return () } print_kv(c, parts.left, parts.right) @@ -52,7 +52,7 @@ fn run(c: Console, fs: ReadFS) -> Result { pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let fs = rc.mint_readfs("examples/config_loader") - run(c, fs) else { + try run(c, fs) else { c.println("config read failed") return 1 } diff --git a/examples/hashmap_demo/hashmap_demo.cap b/examples/hashmap_demo/hashmap_demo.cap index 964a9f1..075d29a 100644 --- a/examples/hashmap_demo/hashmap_demo.cap +++ b/examples/hashmap_demo/hashmap_demo.cap @@ -133,7 +133,7 @@ pub fn hashmap_new(alloc: buffer::Alloc, initial_capacity: i32) -> HashMap { // Initialize all entries as empty for i in 0..cap { - entries.push(empty_entry()) else { + try entries.push(empty_entry()) else { panic() } } @@ -228,7 +228,7 @@ fn hashmap_rehash(map: HashMap, new_cap: i32) -> HashMap { // Create new entries vector let new_entries = alloc.vec_new() for i in 0..new_cap { - new_entries.push(empty_entry()) else { + try new_entries.push(empty_entry()) else { panic() } } @@ -250,7 +250,7 @@ fn hashmap_rehash(map: HashMap, new_cap: i32) -> HashMap { if (is_empty(slot.state)) { // Found empty slot, insert here let new_entry = Entry { state: occupied_state(), key: entry.key, value: entry.value } - new_entries.set(index, new_entry) else { + try new_entries.set(index, new_entry) else { panic() } new_size = new_size + 1 @@ -329,7 +329,7 @@ pub fn hashmap_insert(map: HashMap, key: i32, value: i32) -> Result Result Result { if (entry.key == key) { // Mark as deleted let deleted_entry = Entry { state: deleted_state(), key: entry.key, value: entry.value } - map.entries.set(index, deleted_entry) else { + try map.entries.set(index, deleted_entry) else { return Err(()) } return Ok(HashMap { @@ -444,30 +444,30 @@ fn run_demo(c: Console, alloc: buffer::Alloc) -> unit { let test_vals = alloc.vec_i32_new() // key -> key * 10 - test_keys.push(42) else { panic() } - test_vals.push(420) else { panic() } + try test_keys.push(42) else { panic() } + try test_vals.push(420) else { panic() } - test_keys.push(17) else { panic() } - test_vals.push(170) else { panic() } + try test_keys.push(17) else { panic() } + try test_vals.push(170) else { panic() } - test_keys.push(99) else { panic() } - test_vals.push(990) else { panic() } + try test_keys.push(99) else { panic() } + try test_vals.push(990) else { panic() } - test_keys.push(1) else { panic() } - test_vals.push(10) else { panic() } + try test_keys.push(1) else { panic() } + try test_vals.push(10) else { panic() } - test_keys.push(256) else { panic() } - test_vals.push(2560) else { panic() } + try test_keys.push(256) else { panic() } + try test_vals.push(2560) else { panic() } // Keys that will likely collide - test_keys.push(0) else { panic() } - test_vals.push(0) else { panic() } + try test_keys.push(0) else { panic() } + try test_vals.push(0) else { panic() } - test_keys.push(16) else { panic() } - test_vals.push(160) else { panic() } + try test_keys.push(16) else { panic() } + try test_vals.push(160) else { panic() } - test_keys.push(32) else { panic() } - test_vals.push(320) else { panic() } + try test_keys.push(32) else { panic() } + try test_vals.push(320) else { panic() } // Insert all entries let num_entries = test_keys.len() @@ -540,9 +540,9 @@ fn run_demo(c: Console, alloc: buffer::Alloc) -> unit { c.println("Looking up non-existent keys...") let missing = alloc.vec_i32_new() - missing.push(100) else { panic() } - missing.push(999) else { panic() } - missing.push(12345) else { panic() } + try missing.push(100) else { panic() } + try missing.push(999) else { panic() } + try missing.push(12345) else { panic() } let num_missing = missing.len() for i in 0..num_missing { diff --git a/examples/how_to_string/how_to_string.cap b/examples/how_to_string/how_to_string.cap index e6a9669..1118206 100644 --- a/examples/how_to_string/how_to_string.cap +++ b/examples/how_to_string/how_to_string.cap @@ -12,13 +12,12 @@ fn demo_string_view(c: Console) -> unit { c.println("len:") c.println_i32(trimmed.len()) - match (trimmed.split_once_view(',')) { - Ok(parts) => { - c.println(parts.left) - c.println(parts.right) - } - Err(_) => { c.println("comma not found") } + try let parts = trimmed.split_once_view(',') else { + c.println("comma not found") + return } + c.println(parts.left) + c.println(parts.right) let words = trimmed.split_view_default(',') c.println("split count:") @@ -31,24 +30,38 @@ fn demo_string_view(c: Console) -> unit { fn demo_text_builder(c: Console) -> unit { c.println("-- Text builder --") let t = buffer::text_new_default() - t.push_str("hello") else { + try t.push_str("hello") else { panic() } - t.push_byte(' ') else { + try t.push_byte(' ') else { panic() } - t.append("text") else { + try t.append("text") else { panic() } - c.println(t.slice_range(0, 5).ok()) - c.println(t.to_string().ok()) + try let head = t.slice_range(0, 5) else { + panic() + } + c.println(head) + try let owned = t.to_string() else { + panic() + } + c.println(owned) t.free_default() - let t2 = buffer::text_from_default("owned").ok() - c.println(t2.to_string().ok()) + try let t2 = buffer::text_from_default("owned") else { + panic() + } + try let owned2 = t2.to_string() else { + panic() + } + c.println(owned2) t2.free_default() - c.println("a".concat_default("b").ok()) + try let joined = "a".concat_default("b") else { + panic() + } + c.println(joined) } pub fn main(rc: RootCap) -> i32 { diff --git a/examples/http_server/http_server.cap b/examples/http_server/http_server.cap index 755332d..c84658e 100644 --- a/examples/http_server/http_server.cap +++ b/examples/http_server/http_server.cap @@ -9,10 +9,10 @@ use sys::path use sys::system fn strip_query(raw_path: string) -> string { - match (raw_path.split_once_view('?')) { - Ok(parts) => { return parts.left } - Err(_) => { return raw_path } + try let parts = raw_path.split_once_view('?') else { + return raw_path } + return parts.left } enum ParseErr { @@ -23,34 +23,34 @@ enum ParseErr { fn parse_request_line(line: string) -> Result { let trimmed = line.trim_view() - let Ok(head) = trimmed.split_once_view(' ') else { + try let head = trimmed.split_once_view(' ') else { return Err(ParseErr::MissingSpace) } if (head.left != "GET") { return Err(ParseErr::BadMethod) } let rest = head.right.trim_start_view() - let Ok(target) = rest.split_once_view(' ') else { + try let target = rest.split_once_view(' ') else { return Err(ParseErr::MissingSpace) } - match (path::clean_relative(strip_query(target.left))) { - Ok(p) => { return Ok(p) } - Err(_) => { return Err(ParseErr::BadSlice) } + try let clean = path::clean_relative(strip_query(target.left)) else { + return Err(ParseErr::BadSlice) } + return Ok(clean) } fn parse_request_path(req: string) -> Result { - match (req.split_once_view('\n')) { - Ok(parts) => { return parse_request_line(parts.left.trim_end_view()) } - Err(_) => { return parse_request_line(req.trim_end_view()) } + try let parts = req.split_once_view('\n') else { + return parse_request_line(req.trim_end_view()) } + return parse_request_line(parts.left.trim_end_view()) } fn respond_ok(conn: &TcpConn, body: string) -> Result { - conn.write("HTTP/1.0 200 OK\r\nContent-Type: text/plain\r\n\r\n") else err { + try conn.write("HTTP/1.0 200 OK\r\nContent-Type: text/plain\r\n\r\n") else err { return Err(err) } - conn.write(body) else err { + try conn.write(body) else err { return Err(err) } return Ok(()) @@ -65,10 +65,10 @@ fn respond_bad_request(conn: &TcpConn) -> Result { } fn handle_request(conn: &TcpConn, readfs: &ReadFS, req: string) -> Result { - let Ok(path) = parse_request_path(req) else { + try let path = parse_request_path(req) else { return respond_bad_request(conn) } - let Ok(body) = readfs.read_to_string_default(path) else { + try let body = readfs.read_to_string_default(path) else { return respond_not_found(conn) } return respond_ok(conn, body) @@ -84,23 +84,18 @@ fn serve_forever(c: Console, net: Net, rc: RootCap, root: string, port: i32) -> defer listener.close() defer readfs.close() while (true) { - if let Ok(conn) = listener.accept() { - match (conn.read_default(4096)) { - Ok(req) => { - let handled = handle_request(conn, readfs_ref, req) - conn.close() - if (handled.is_err()) { - // Ignore per-request write errors; keep the server alive. - } - } - Err(_) => { - conn.close() - } - } - } else { + defer c.println("request complete") + try let conn = listener.accept() else { continue } - defer c.println("request complete") + try let req = conn.read_default(4096) else { + conn.close() + continue + } + try handle_request(conn, readfs_ref, req) else { + // Ignore per-request write errors; keep the server alive. + } + conn.close() } return Ok(()) } @@ -111,7 +106,7 @@ pub fn main(rc: RootCap) -> i32 { let args = rc.mint_args() let root = args.at_or(1, "examples/http_server") let port = 8090 - serve_forever(c, net, rc, root, port) else { + try serve_forever(c, net, rc, root, port) else { c.println("server error (is the port in use?)") return 0 } diff --git a/examples/sort/sort.cap b/examples/sort/sort.cap index a2a94bb..caa47a3 100644 --- a/examples/sort/sort.cap +++ b/examples/sort/sort.cap @@ -50,7 +50,7 @@ fn sort_indices(lines: Vec, indices: Vec) -> unit { let curr_idx = indices.get_or(j, 0) let prev_idx = indices.get_or(j - 1, 0) if (line_lt(lines, curr_idx, prev_idx)) { - indices.swap(j, j - 1) else { + try indices.swap(j, j - 1) else { panic() } j = j - 1 @@ -69,7 +69,7 @@ fn run(c: Console, input: Stdin) -> Result { // Create index array [0, 1, 2, ...] let indices = buffer::vec_new_default() for i in 0..n { - indices.push(i) else { + try indices.push(i) else { panic() } } @@ -93,7 +93,7 @@ fn run(c: Console, input: Stdin) -> Result { pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let input = rc.mint_stdin() - run(c, input) else { + try run(c, input) else { c.println("error reading input") return 1 } diff --git a/examples/uniq/uniq.cap b/examples/uniq/uniq.cap index e04f39a..495a42a 100644 --- a/examples/uniq/uniq.cap +++ b/examples/uniq/uniq.cap @@ -32,7 +32,7 @@ fn run(c: Console, input: Stdin) -> Result { pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let input = rc.mint_stdin() - run(c, input) else { + try run(c, input) else { c.println("error reading input") return 1 } diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index a3f5faa..d1ea6ad 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -447,7 +447,7 @@ pub extern "C" fn capable_rt_fs_subdir(dir: Handle, name: *const CapString) -> H #[no_mangle] pub extern "C" fn capable_rt_fs_open_read(dir: Handle, name: *const CapString) -> Handle { let name = unsafe { read_cap_string(name) }; - let state = take_handle(&DIRS, dir, "dir table"); + let state = clone_handle(&DIRS, dir, "dir table"); let (Some(state), Some(name)) = (state, name) else { return 0; }; diff --git a/stdlib/sys/args.cap b/stdlib/sys/args.cap index f697deb..cb6d067 100644 --- a/stdlib/sys/args.cap +++ b/stdlib/sys/args.cap @@ -24,6 +24,9 @@ impl Args { /// Get an argument by index, or return a fallback string. pub fn at_or(self, index: i32, fallback: string) -> string { - return self.at(index).unwrap_or(fallback) + try let arg = self.at(index) else { + return fallback + } + return arg } } diff --git a/stdlib/sys/fs.cap b/stdlib/sys/fs.cap index 1fbf2ec..6ed6519 100644 --- a/stdlib/sys/fs.cap +++ b/stdlib/sys/fs.cap @@ -78,7 +78,7 @@ impl Dir { } /// Open a file for reading. - pub fn open_read(self, name: string) -> FileRead { + pub fn open_read(self: &Dir, name: string) -> FileRead { return () } diff --git a/stdlib/sys/result.cap b/stdlib/sys/result.cap index 86d4c12..e508ca5 100644 --- a/stdlib/sys/result.cap +++ b/stdlib/sys/result.cap @@ -1,4 +1,4 @@ -/// Result enum and helpers. +/// Result enum for fallible operations. package safe module sys::result @@ -9,53 +9,3 @@ pub enum Result { /// Error value. Err(E) } - -impl Result { - /// True if Ok. - pub fn is_ok(self) -> bool { - match self { - Result::Ok(_) => { return true } - Result::Err(_) => { return false } - } - } - - /// True if Err. - pub fn is_err(self) -> bool { - match self { - Result::Ok(_) => { return false } - Result::Err(_) => { return true } - } - } - - /// Return Ok value or a default. - pub fn unwrap_or(self, default: T) -> T { - match self { - Result::Ok(val) => { return val } - Result::Err(_) => { return default } - } - } - - /// Return Err value or a default. - pub fn unwrap_err_or(self, default: E) -> E { - match self { - Result::Ok(_) => { return default } - Result::Err(err) => { return err } - } - } - - /// Unwrap Ok or panic. - pub fn ok(self) -> T { - match self { - Result::Ok(val) => { return val } - Result::Err(_) => { panic() } - } - } - - /// Unwrap Err or panic. - pub fn err(self) -> E { - match self { - Result::Ok(_) => { panic() } - Result::Err(err) => { return err } - } - } -} diff --git a/tests/programs/expr_else.cap b/tests/programs/expr_else.cap index f99ef78..02d61a2 100644 --- a/tests/programs/expr_else.cap +++ b/tests/programs/expr_else.cap @@ -14,7 +14,7 @@ pub fn main(rc: RootCap) -> i32 { let total = 0 for i in 0..4 { - write_value(i == 2) else { + try write_value(i == 2) else { continue } total = total + 1 @@ -22,13 +22,13 @@ pub fn main(rc: RootCap) -> i32 { c.assert(total == 1) let saw_err = false - write_value(false) else err { + try write_value(false) else err { c.assert(err == 7) saw_err = true } c.assert(saw_err) - write_value(true) else { + try write_value(true) else { return 1 } diff --git a/tests/programs/expr_else_break.cap b/tests/programs/expr_else_break.cap index 5ee01db..c82add0 100644 --- a/tests/programs/expr_else_break.cap +++ b/tests/programs/expr_else_break.cap @@ -15,7 +15,7 @@ pub fn main(rc: RootCap) -> i32 { let stop = 0 for i in 0..6 { - fail_at(i) else err { + try fail_at(i) else err { stop = err break } diff --git a/tests/programs/expr_else_empty.cap b/tests/programs/expr_else_empty.cap new file mode 100644 index 0000000..e31d37c --- /dev/null +++ b/tests/programs/expr_else_empty.cap @@ -0,0 +1,12 @@ +package safe +module expr_else_empty + +fn do_it() -> Result { + return Err(1) +} + +pub fn main() -> i32 { + try do_it() else { + } + return 0 +} diff --git a/tests/programs/fs_open_read_reuse.cap b/tests/programs/fs_open_read_reuse.cap new file mode 100644 index 0000000..5a40530 --- /dev/null +++ b/tests/programs/fs_open_read_reuse.cap @@ -0,0 +1,29 @@ +module fs_open_read_reuse +use sys::system + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + let alloc = rc.mint_alloc_default() + let fs = rc.mint_filesystem("./config") + let dir = fs.root_dir() + + let first = dir.open_read("app.txt") + try let left = first.read_to_string(alloc) else { + c.println("first read failed") + return 1 + } + + let second = dir.open_read("app.txt") + try let right = second.read_to_string(alloc) else { + c.println("second read failed") + return 1 + } + + if (left == right) { + c.println("same") + return 0 + } + + c.println("different") + return 1 +} diff --git a/tests/programs/let_else.cap b/tests/programs/let_else.cap index a4abddc..5ac4621 100644 --- a/tests/programs/let_else.cap +++ b/tests/programs/let_else.cap @@ -14,38 +14,25 @@ fn from_flag(flag: bool) -> Maybe { return Maybe::None } -fn parse(flag: bool) -> Result { - if (flag) { - return Ok(11) - } - return Err("bad") -} - pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let Ok(n) = parse(true) else { - c.assert(false) - return 1 - } - c.assert(n == 11) - let Maybe::Some(v) = from_flag(true) else { c.assert(false) return 1 } c.assert(v == 7) - let fallback = match parse(false) { - Ok(found) => { found } - Err(_) => { - let Ok(found) = parse(true) else { + let fallback = match from_flag(false) { + Maybe::Some(found) => { found } + Maybe::None => { + let Maybe::Some(found) = from_flag(true) else { return 2 } found } } - c.assert(fallback == 11) + c.assert(fallback == 7) c.println("let else ok") return 0 } diff --git a/tests/programs/path_helpers.cap b/tests/programs/path_helpers.cap index d5701bd..fcd74c5 100644 --- a/tests/programs/path_helpers.cap +++ b/tests/programs/path_helpers.cap @@ -7,11 +7,15 @@ use sys::path pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - match (path::clean_relative("/site/./docs//index.html")) { - Ok(clean) => { c.assert(clean.eq("site/docs/index.html")) } - Err(_) => { c.assert(false); return 1 } + try let clean = path::clean_relative("/site/./docs//index.html") else { + c.assert(false) + return 1 + } + c.assert(clean.eq("site/docs/index.html")) + let Err(path_err) = path::clean_relative("../etc/passwd") else { + c.assert(false) + return 1 } - c.assert(path::clean_relative("../etc/passwd").is_err()) let parts = path::segments_view("site/docs/index.html") c.assert(parts.len() == 3) diff --git a/tests/programs/should_fail_expr_else_without_try.cap b/tests/programs/should_fail_expr_else_without_try.cap new file mode 100644 index 0000000..770c9b0 --- /dev/null +++ b/tests/programs/should_fail_expr_else_without_try.cap @@ -0,0 +1,13 @@ +package safe +module should_fail_expr_else_without_try + +fn write_value() -> Result { + return Err(7) +} + +pub fn main() -> i32 { + write_value() else { + return 1 + } + return 0 +} diff --git a/tests/programs/should_fail_if_let.cap b/tests/programs/should_fail_if_let.cap new file mode 100644 index 0000000..7a32688 --- /dev/null +++ b/tests/programs/should_fail_if_let.cap @@ -0,0 +1,13 @@ +module should_fail_if_let + +fn make() -> Result { + return Ok(7) +} + +pub fn main() -> i32 { + if let Ok(x) = make() { + return x + } else { + return 0 + } +} diff --git a/tests/programs/should_fail_result_ok_removed.cap b/tests/programs/should_fail_result_ok_removed.cap new file mode 100644 index 0000000..68674f5 --- /dev/null +++ b/tests/programs/should_fail_result_ok_removed.cap @@ -0,0 +1,10 @@ +module should_fail_result_ok_removed + +fn make() -> Result { + return Ok(1) +} + +pub fn main() -> i32 { + let v = make().ok() + return v +} diff --git a/tests/programs/should_fail_result_unwrap_or_mismatch.cap b/tests/programs/should_fail_result_unwrap_or_mismatch.cap deleted file mode 100644 index cc0498d..0000000 --- a/tests/programs/should_fail_result_unwrap_or_mismatch.cap +++ /dev/null @@ -1,10 +0,0 @@ -module should_fail_result_unwrap_or_mismatch - -fn make() -> Result { - return Ok(1) -} - -pub fn main() -> i32 { - let v = make().unwrap_or('\x01') - return 0 -} diff --git a/tests/programs/should_fail_result_unwrap_or_removed.cap b/tests/programs/should_fail_result_unwrap_or_removed.cap new file mode 100644 index 0000000..a220017 --- /dev/null +++ b/tests/programs/should_fail_result_unwrap_or_removed.cap @@ -0,0 +1,10 @@ +module should_fail_result_unwrap_or_removed + +fn make() -> Result { + return Ok(1) +} + +pub fn main() -> i32 { + let v = make().unwrap_or(9) + return v +} diff --git a/tests/programs/should_fail_try_let_pattern.cap b/tests/programs/should_fail_try_let_pattern.cap new file mode 100644 index 0000000..d9b3340 --- /dev/null +++ b/tests/programs/should_fail_try_let_pattern.cap @@ -0,0 +1,18 @@ +package safe +module should_fail_try_let_pattern + +enum Maybe { + Some(i32), + None +} + +fn from_flag() -> Maybe { + return Maybe::Some(7) +} + +pub fn main() -> i32 { + try let Maybe::Some(v) = from_flag() else { + return 1 + } + return 0 +} diff --git a/tests/programs/should_pass_capability_borrow_return_linear.cap b/tests/programs/should_pass_capability_borrow_return_linear.cap new file mode 100644 index 0000000..a87456d --- /dev/null +++ b/tests/programs/should_pass_capability_borrow_return_linear.cap @@ -0,0 +1,27 @@ +package safe +module should_pass_capability_borrow_return_linear + +capability struct Cap +linear capability struct Child + +impl Cap { + pub fn child(self: &Cap) -> Child { + return Child{} + } + + pub fn try_child(self: &Cap) -> Result { + return Ok(Child{}) + } +} + +pub fn main() -> i32 { + let cap = Cap{} + let first = cap.child() + drop(first) + + try let second = cap.try_child() else { + return 1 + } + drop(second) + return 0 +} diff --git a/tests/programs/should_pass_if_let.cap b/tests/programs/should_pass_if_let.cap deleted file mode 100644 index e07a9fc..0000000 --- a/tests/programs/should_pass_if_let.cap +++ /dev/null @@ -1,41 +0,0 @@ -module should_pass_if_let -use sys::system -use sys::console - -fn make_ok() -> Result { - return Ok(7) -} - -fn make_err() -> Result { - return Err(9) -} - -pub fn main(rc: RootCap) -> i32 { - let c = rc.mint_console() - let ok = make_ok() - if let Ok(x) = ok { - if (x != 7) { - c.println("bad ok") - return 1 - } - c.println("ok") - } else { - c.println("unexpected err") - return 1 - } - - let err = make_err() - if let Err(e) = err { - if (e != 9) { - c.println("bad err") - return 2 - } - c.println("err") - } else { - c.println("unexpected ok") - return 2 - } - - c.println("if_let ok") - return 0 -} diff --git a/tests/programs/should_pass_result_is_ok_is_err.cap b/tests/programs/should_pass_result_is_ok_is_err.cap deleted file mode 100644 index 5382c11..0000000 --- a/tests/programs/should_pass_result_is_ok_is_err.cap +++ /dev/null @@ -1,31 +0,0 @@ -module should_pass_result_is_ok_is_err -use sys::system - -fn make_ok() -> Result { - return Ok(1) -} - -fn make_err() -> Result { - return Err(2) -} - -pub fn main(rc: RootCap) -> i32 { - let con = rc.mint_console() - let ok_result = make_ok() - let err_result = make_err() - - if ok_result.is_ok() == false { - return 1 - } - if ok_result.is_err() == true { - return 2 - } - if err_result.is_ok() == true { - return 3 - } - if err_result.is_err() == false { - return 4 - } - con.print("is_ok_is_err ok") - return 0 -} diff --git a/tests/programs/should_pass_result_ok_err.cap b/tests/programs/should_pass_result_ok_err.cap deleted file mode 100644 index 9c4bad3..0000000 --- a/tests/programs/should_pass_result_ok_err.cap +++ /dev/null @@ -1,33 +0,0 @@ -module should_pass_result_ok_err -use sys::system - -fn make_ok() -> Result { - return Ok(42) -} - -fn make_err() -> Result { - return Err(99) -} - -pub fn main(rc: RootCap) -> i32 { - let con = rc.mint_console() - let ok_result = make_ok() - let err_result = make_err() - - if ok_result.is_ok() { - let val = ok_result.ok() - if val != 42 { - return 1 - } - } - - if err_result.is_err() { - let e = err_result.err() - if e != 99 { - return 2 - } - } - - con.print("ok_err ok") - return 0 -} diff --git a/tests/programs/should_pass_result_unwrap_err_or.cap b/tests/programs/should_pass_result_unwrap_err_or.cap deleted file mode 100644 index 2b2c8ff..0000000 --- a/tests/programs/should_pass_result_unwrap_err_or.cap +++ /dev/null @@ -1,18 +0,0 @@ -module should_pass_result_unwrap_err_or - -fn make_ok() -> Result { - return Ok(1) -} - -fn make_err() -> Result { - return Err(2) -} - -pub fn main() -> i32 { - let a = make_ok().unwrap_err_or(9) - let b = make_err().unwrap_err_or(9) - if a == 9 && b == 2 { - return 0 - } - return 1 -} diff --git a/tests/programs/should_pass_result_unwrap_or.cap b/tests/programs/should_pass_result_unwrap_or.cap deleted file mode 100644 index f4ff486..0000000 --- a/tests/programs/should_pass_result_unwrap_or.cap +++ /dev/null @@ -1,18 +0,0 @@ -module should_pass_result_unwrap_or - -fn make_ok() -> Result { - return Ok(1) -} - -fn make_err() -> Result { - return Err(2) -} - -pub fn main() -> i32 { - let a = make_ok().unwrap_or(9) - let b = make_err().unwrap_or(9) - if a == 1 && b == 9 { - return 0 - } - return 1 -} diff --git a/tests/programs/string_helpers.cap b/tests/programs/string_helpers.cap index 77a1ab6..6561d7e 100644 --- a/tests/programs/string_helpers.cap +++ b/tests/programs/string_helpers.cap @@ -30,8 +30,14 @@ pub fn main(rc: RootCap) -> i32 { let lines = "a\nb\n".split_lines(alloc) let line_views = "a\r\nb\n".lines_view(alloc) let t = buffer::text_new_default() - t.push_str("hi").ok() - c.assert(t.to_string().ok().eq("hi")) + try t.push_str("hi") else { + panic() + } + try let owned = t.to_string() else { + c.assert(false) + return 1 + } + c.assert(owned.eq("hi")) t.free_default() c.assert(n == 3 && b == 'a' && count == 3) c.assert(word_views.len() == 3) @@ -98,7 +104,10 @@ pub fn main(rc: RootCap) -> i32 { c.assert("a\nb\n".count_newlines() == 2) c.assert(" a bc \t d ".count_words_ascii() == 3) c.assert("abc".is_ascii()) - c.assert("abc".byte_at_checked(10).is_err()) + let Err(byte_err) = "abc".byte_at_checked(10) else { + c.assert(false) + return 1 + } match ("a,b,c".split_once(alloc, ',')) { Ok(parts) => { c.assert(parts.left.eq("a")) diff --git a/tests/programs/text_basic.cap b/tests/programs/text_basic.cap index 05e4e60..3866904 100644 --- a/tests/programs/text_basic.cap +++ b/tests/programs/text_basic.cap @@ -7,11 +7,19 @@ pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let alloc = rc.mint_alloc_default() let text = string::text_new(alloc) - text.push_str("hi").ok() - text.push_byte('!').ok() + try text.push_str("hi") else { + panic() + } + try text.push_byte('!') else { + panic() + } let view = text.as_string() c.assert(view.eq("hi!")) - c.assert(text.to_string().ok().eq("hi!")) + try let owned = text.to_string() else { + c.assert(false) + return 1 + } + c.assert(owned.eq("hi!")) text.free(alloc) c.println("text basic ok") return 0 diff --git a/tests/programs/text_helpers_more.cap b/tests/programs/text_helpers_more.cap index 168524a..ab3fdf0 100644 --- a/tests/programs/text_helpers_more.cap +++ b/tests/programs/text_helpers_more.cap @@ -7,19 +7,49 @@ pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let alloc = rc.mint_alloc_default() let text = alloc.text_new() - text.push_str("hi").ok() - text.push_byte('\n').ok() - text.push_str("ok").ok() - c.assert(text.slice_range(0, 2).ok().eq("hi")) + try text.push_str("hi") else { + panic() + } + try text.push_byte('\n') else { + panic() + } + try text.push_str("ok") else { + panic() + } + try let prefix = text.slice_range(0, 2) else { + c.assert(false) + return 1 + } + c.assert(prefix.eq("hi")) let v = alloc.vec_u8_new() - v.push('!').ok() - text.extend_vec(v).ok() - c.assert(text.to_string().ok().eq("hi\nok!")) - let t2 = alloc.text_from("cap").ok() - c.assert(t2.to_string().ok().eq("cap")) + try v.push('!') else { + panic() + } + try text.extend_vec(v) else { + panic() + } + try let owned = text.to_string() else { + c.assert(false) + return 1 + } + c.assert(owned.eq("hi\nok!")) + try let t2 = alloc.text_from("cap") else { + panic() + } + try let owned2 = t2.to_string() else { + c.assert(false) + return 1 + } + c.assert(owned2.eq("cap")) t2.free(alloc) - let t3 = "owned".to_text(alloc).ok() - c.assert(t3.to_string().ok().eq("owned")) + try let t3 = "owned".to_text(alloc) else { + panic() + } + try let owned3 = t3.to_string() else { + c.assert(false) + return 1 + } + c.assert(owned3.eq("owned")) t3.free(alloc) alloc.vec_u8_free(v) text.free(alloc) diff --git a/tests/programs/text_push_safe.cap b/tests/programs/text_push_safe.cap index 0333e81..146ef84 100644 --- a/tests/programs/text_push_safe.cap +++ b/tests/programs/text_push_safe.cap @@ -7,12 +7,20 @@ pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let alloc = rc.mint_alloc_default() let text = string::text_new(alloc) - text.push_byte('\x00').ok() - text.push_byte('\x07').ok() + try text.push_byte('\x00') else { + panic() + } + try text.push_byte('\x07') else { + panic() + } let v = alloc.vec_u8_new() - v.push('\t').ok() + try v.push('\t') else { + panic() + } let slice = v.as_slice() - text.extend_slice(slice).ok() + try text.extend_slice(slice) else { + panic() + } let len = text.len() c.assert(len == 3) c.assert(!text.is_empty()) diff --git a/tests/programs/text_safe.cap b/tests/programs/text_safe.cap index 552e443..2842a54 100644 --- a/tests/programs/text_safe.cap +++ b/tests/programs/text_safe.cap @@ -7,9 +7,15 @@ pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let alloc = rc.mint_alloc_default() let text = string::text_new(alloc) - text.push_byte('a').ok() - text.push_byte('b').ok() - text.push_byte('c').ok() + try text.push_byte('a') else { + panic() + } + try text.push_byte('b') else { + panic() + } + try text.push_byte('c') else { + panic() + } let slice = text.as_slice() let len = slice.len() c.assert(len == 3) diff --git a/tests/programs/text_to_string.cap b/tests/programs/text_to_string.cap index 0307af4..fe8e35d 100644 --- a/tests/programs/text_to_string.cap +++ b/tests/programs/text_to_string.cap @@ -7,9 +7,15 @@ pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let alloc = rc.mint_alloc_default() let text = string::text_new(alloc) - text.push_byte('h').ok() - text.push_byte('i').ok() - let s = text.to_string().ok() + try text.push_byte('h') else { + panic() + } + try text.push_byte('i') else { + panic() + } + try let s = text.to_string() else { + panic() + } c.println(s) text.free(alloc) return 0 diff --git a/tests/programs/try_let.cap b/tests/programs/try_let.cap new file mode 100644 index 0000000..05a12fd --- /dev/null +++ b/tests/programs/try_let.cap @@ -0,0 +1,32 @@ +package safe +module try_let +use sys::system + +fn parse(flag: bool) -> Result { + if (flag) { + return Ok(11) + } + return Err("bad") +} + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + + try let n = parse(true) else { + return 1 + } + c.assert(n == 11) + + let fallback = match parse(false) { + Ok(found) => { found } + Err(_) => { + try let found = parse(true) else { + return 2 + } + found + } + } + c.assert(fallback == 11) + c.println("try let ok") + return 0 +} diff --git a/tests/programs/vec_custom_eq.cap b/tests/programs/vec_custom_eq.cap index c8646ff..6895616 100644 --- a/tests/programs/vec_custom_eq.cap +++ b/tests/programs/vec_custom_eq.cap @@ -19,10 +19,18 @@ pub fn main(rc: RootCap) -> i32 { // Test Vec with custom Eq let v = alloc.vec_new() - v.push(Entry { key: 1, value: 10 }).ok() - v.push(Entry { key: 2, value: 20 }).ok() - v.push(Entry { key: 2, value: 20 }).ok() - v.push(Entry { key: 3, value: 30 }).ok() + try v.push(Entry { key: 1, value: 10 }) else { + panic() + } + try v.push(Entry { key: 2, value: 20 }) else { + panic() + } + try v.push(Entry { key: 2, value: 20 }) else { + panic() + } + try v.push(Entry { key: 3, value: 30 }) else { + panic() + } // Test contains_eq let target = Entry { key: 2, value: 20 } diff --git a/tests/programs/vec_search_helpers.cap b/tests/programs/vec_search_helpers.cap index ad72fac..c75d390 100644 --- a/tests/programs/vec_search_helpers.cap +++ b/tests/programs/vec_search_helpers.cap @@ -7,9 +7,15 @@ pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let alloc = rc.mint_alloc_default() let v = alloc.vec_i32_new() - v.push(1).ok() - v.push(2).ok() - v.push(1).ok() + try v.push(1) else { + panic() + } + try v.push(2) else { + panic() + } + try v.push(1) else { + panic() + } c.assert(v.contains(2)) c.assert(!v.contains(3)) c.assert(v.count(1) == 2) @@ -29,7 +35,9 @@ pub fn main(rc: RootCap) -> i32 { Ok(x) => { c.assert(x == 1) } Err(_) => { c.assert(false); return 1 } } - v.swap(0, 1).ok() + try v.swap(0, 1) else { + panic() + } match (v.first()) { Ok(x) => { c.assert(x == 2) } Err(_) => { c.assert(false); return 1 } @@ -37,9 +45,17 @@ pub fn main(rc: RootCap) -> i32 { c.assert(v.get_or(1, 99) == 1) c.assert(v.get_or(99, 77) == 77) let bytes = buffer::vec_new_default() - bytes.push('h').ok() - bytes.push('i').ok() - c.assert(bytes.to_string().ok().eq("hi")) + try bytes.push('h') else { + panic() + } + try bytes.push('i') else { + panic() + } + try let s = bytes.to_string() else { + c.assert(false) + return 1 + } + c.assert(s.eq("hi")) alloc.vec_i32_free(v) bytes.free() c.println("vec search ok") diff --git a/tests/programs/wc_file.cap b/tests/programs/wc_file.cap index c2baa6d..a99e72c 100644 --- a/tests/programs/wc_file.cap +++ b/tests/programs/wc_file.cap @@ -20,7 +20,7 @@ fn count_text(c: Console, s: string) -> i32 { pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let args = rc.mint_args() - let Ok(path) = args.at(1) else { + try let path = args.at(1) else { c.println("usage: wc ") return 1 } From 0201473a79df8d09eab3713bea87030399e82cdf Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Mon, 23 Mar 2026 16:17:19 -0700 Subject: [PATCH 05/17] Simplify allocator-facing stdlib APIs --- capc/src/codegen/intrinsics.rs | 22 +- .../parser__snapshot_struct_and_match.snap | 250 +++++++----------- docs/README.md | 4 +- docs/TUTORIAL.md | 15 +- examples/config_loader/config_loader.cap | 2 +- examples/how_to_string/how_to_string.cap | 14 +- examples/http_server/http_server.cap | 4 +- examples/sort/sort.cap | 7 +- examples/uniq/uniq.cap | 4 +- stdlib/sys/buffer.cap | 42 +-- stdlib/sys/fs.cap | 92 +++---- stdlib/sys/net.cap | 24 +- stdlib/sys/path.cap | 26 +- stdlib/sys/stdin.cap | 12 +- stdlib/sys/string.cap | 238 +++++++++-------- stdlib/sys/vec.cap | 22 +- tests/programs/attenuation_untrusted_fail.cap | 5 +- tests/programs/attenuation_untrusted_pass.cap | 3 +- tests/programs/fs_attenuation.cap | 3 +- tests/programs/fs_dir_reuse.cap | 11 +- tests/programs/fs_helpers.cap | 4 +- tests/programs/fs_open_read_reuse.cap | 5 +- tests/programs/fs_read.cap | 3 +- tests/programs/fs_reuse.cap | 7 +- tests/programs/fs_symlink_escape.cap | 3 +- tests/programs/fs_traversal_denied.cap | 3 +- tests/programs/fs_traversal_kind.cap | 3 +- ...should_fail_attenuation_reuse_fileread.cap | 5 +- tests/programs/stdin_safe.cap | 3 +- tests/programs/string_helpers.cap | 51 ++-- tests/programs/string_split.cap | 5 +- tests/programs/text_basic.cap | 5 +- tests/programs/text_helpers_more.cap | 12 +- tests/programs/text_push_safe.cap | 4 +- tests/programs/text_safe.cap | 5 +- tests/programs/text_to_string.cap | 5 +- tests/programs/text_unsafe.cap | 11 +- tests/programs/untrusted_logs.cap | 5 +- tests/programs/vec_search_helpers.cap | 4 +- tests/programs/wc_file.cap | 2 +- tests/programs/wc_stdin.cap | 2 +- 41 files changed, 428 insertions(+), 519 deletions(-) diff --git a/capc/src/codegen/intrinsics.rs b/capc/src/codegen/intrinsics.rs index 8b0ec56..02c280c 100644 --- a/capc/src/codegen/intrinsics.rs +++ b/capc/src/codegen/intrinsics.rs @@ -385,7 +385,7 @@ pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { ); // === Stdin === map.insert( - "sys.stdin.Stdin__read_to_string".to_string(), + "sys.stdin.Stdin__read_to_string_with_alloc".to_string(), FnInfo { sig: FnSig { params: vec![AbiType::Handle, AbiType::Handle], @@ -446,7 +446,7 @@ pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { }, ); map.insert( - "sys.net.TcpConn__read_to_string".to_string(), + "sys.net.TcpConn__read_to_string_with_alloc".to_string(), FnInfo { sig: net_read_to_string, abi_sig: Some(net_read_to_string_abi), @@ -456,7 +456,7 @@ pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { }, ); map.insert( - "sys.net.TcpConn__read".to_string(), + "sys.net.TcpConn__read_with_alloc".to_string(), FnInfo { sig: net_read, abi_sig: Some(net_read_abi), @@ -653,7 +653,7 @@ pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { ); // === Filesystem === map.insert( - "sys.fs.ReadFS__read_to_string".to_string(), + "sys.fs.ReadFS__read_to_string_with_alloc".to_string(), FnInfo { sig: fs_read_to_string, abi_sig: Some(fs_read_to_string_abi), @@ -663,7 +663,7 @@ pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { }, ); map.insert( - "sys.fs.ReadFS__read_bytes".to_string(), + "sys.fs.ReadFS__read_bytes_with_alloc".to_string(), FnInfo { sig: fs_read_bytes.clone(), abi_sig: Some(fs_read_bytes_abi.clone()), @@ -673,7 +673,7 @@ pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { }, ); map.insert( - "sys.fs.ReadFS__list_dir".to_string(), + "sys.fs.ReadFS__list_dir_with_alloc".to_string(), FnInfo { sig: fs_list_dir, abi_sig: Some(fs_list_dir_abi), @@ -743,7 +743,7 @@ pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { }, ); map.insert( - "sys.fs.Dir__read_bytes".to_string(), + "sys.fs.Dir__read_bytes_with_alloc".to_string(), FnInfo { sig: fs_read_bytes, abi_sig: Some(fs_read_bytes_abi), @@ -753,7 +753,7 @@ pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { }, ); map.insert( - "sys.fs.Dir__read_to_string".to_string(), + "sys.fs.Dir__read_to_string_with_alloc".to_string(), FnInfo { sig: fs_dir_read_to_string, abi_sig: Some(fs_dir_read_to_string_abi), @@ -763,7 +763,7 @@ pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { }, ); map.insert( - "sys.fs.Dir__list_dir".to_string(), + "sys.fs.Dir__list_dir_with_alloc".to_string(), FnInfo { sig: fs_dir_list_dir, abi_sig: Some(fs_dir_list_dir_abi), @@ -793,7 +793,7 @@ pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { }, ); map.insert( - "sys.fs.FileRead__read_to_string".to_string(), + "sys.fs.FileRead__read_to_string_with_alloc".to_string(), FnInfo { sig: fs_file_read_to_string, abi_sig: Some(fs_file_read_to_string_abi), @@ -813,7 +813,7 @@ pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { }, ); map.insert( - "sys.fs.join".to_string(), + "sys.fs.join_with_alloc".to_string(), FnInfo { sig: fs_join, abi_sig: Some(fs_join_abi), diff --git a/capc/tests/snapshots/parser__snapshot_struct_and_match.snap b/capc/tests/snapshots/parser__snapshot_struct_and_match.snap index 8a493ee..542ea06 100644 --- a/capc/tests/snapshots/parser__snapshot_struct_and_match.snap +++ b/capc/tests/snapshots/parser__snapshot_struct_and_match.snap @@ -1,5 +1,6 @@ --- source: capc/tests/parser.rs +assertion_line: 23 expression: module --- Module { @@ -169,63 +170,13 @@ Module { }, }, ), - Let( - LetStmt { - name: Spanned { - item: "alloc", - span: Span { - start: 100, - end: 105, - }, - }, - ty: None, - expr: MethodCall( - MethodCallExpr { - receiver: Path( - Path { - segments: [ - Spanned { - item: "rc", - span: Span { - start: 108, - end: 110, - }, - }, - ], - span: Span { - start: 108, - end: 110, - }, - }, - ), - method: Spanned { - item: "mint_alloc_default", - span: Span { - start: 111, - end: 129, - }, - }, - type_args: [], - args: [], - span: Span { - start: 108, - end: 131, - }, - }, - ), - span: Span { - start: 96, - end: 131, - }, - }, - ), Let( LetStmt { name: Spanned { item: "rfs", span: Span { - start: 138, - end: 141, + start: 100, + end: 103, }, }, ty: None, @@ -237,22 +188,22 @@ Module { Spanned { item: "rc", span: Span { - start: 144, - end: 146, + start: 106, + end: 108, }, }, ], span: Span { - start: 144, - end: 146, + start: 106, + end: 108, }, }, ), method: Spanned { item: "mint_readfs", span: Span { - start: 147, - end: 158, + start: 109, + end: 120, }, }, type_args: [], @@ -263,21 +214,21 @@ Module { "./config", ), span: Span { - start: 159, - end: 169, + start: 121, + end: 131, }, }, ), ], span: Span { - start: 144, - end: 170, + start: 106, + end: 132, }, }, ), span: Span { - start: 134, - end: 170, + start: 96, + end: 132, }, }, ), @@ -293,58 +244,41 @@ Module { Spanned { item: "rfs", span: Span { - start: 180, - end: 183, + start: 142, + end: 145, }, }, ], span: Span { - start: 180, - end: 183, + start: 142, + end: 145, }, }, ), method: Spanned { item: "read_to_string", span: Span { - start: 184, - end: 198, + start: 146, + end: 160, }, }, type_args: [], args: [ - Path( - Path { - segments: [ - Spanned { - item: "alloc", - span: Span { - start: 199, - end: 204, - }, - }, - ], - span: Span { - start: 199, - end: 204, - }, - }, - ), Literal( LiteralExpr { value: String( "app.txt", ), span: Span { - start: 206, - end: 215, + start: 161, + end: 170, }, }, ), ], span: Span { - start: 180, - end: 216, + start: 142, + end: 171, }, }, ), @@ -356,28 +290,28 @@ Module { Spanned { item: "Ok", span: Span { - start: 223, - end: 225, + start: 178, + end: 180, }, }, ], span: Span { - start: 223, - end: 225, + start: 178, + end: 180, }, }, binding: Some( Spanned { item: "s", span: Span { - start: 226, - end: 227, + start: 181, + end: 182, }, }, ), span: Span { - start: 223, - end: 228, + start: 178, + end: 183, }, }, body: Block { @@ -392,22 +326,22 @@ Module { Spanned { item: "c", span: Span { - start: 234, - end: 235, + start: 189, + end: 190, }, }, ], span: Span { - start: 234, - end: 235, + start: 189, + end: 190, }, }, ), method: Spanned { item: "println", span: Span { - start: 236, - end: 243, + start: 191, + end: 198, }, }, type_args: [], @@ -418,27 +352,27 @@ Module { Spanned { item: "s", span: Span { - start: 244, - end: 245, + start: 199, + end: 200, }, }, ], span: Span { - start: 244, - end: 245, + start: 199, + end: 200, }, }, ), ], span: Span { - start: 234, - end: 246, + start: 189, + end: 201, }, }, ), span: Span { - start: 234, - end: 247, + start: 189, + end: 202, }, }, ), @@ -451,27 +385,27 @@ Module { 0, ), span: Span { - start: 255, - end: 256, + start: 210, + end: 211, }, }, ), ), span: Span { - start: 248, - end: 258, + start: 203, + end: 213, }, }, ), ], span: Span { - start: 232, - end: 258, + start: 187, + end: 213, }, }, span: Span { - start: 174, - end: 258, + start: 136, + end: 213, }, }, MatchArm { @@ -481,28 +415,28 @@ Module { Spanned { item: "Err", span: Span { - start: 263, - end: 266, + start: 218, + end: 221, }, }, ], span: Span { - start: 263, - end: 266, + start: 218, + end: 221, }, }, binding: Some( Spanned { item: "e", span: Span { - start: 267, - end: 268, + start: 222, + end: 223, }, }, ), span: Span { - start: 263, - end: 269, + start: 218, + end: 224, }, }, body: Block { @@ -517,22 +451,22 @@ Module { Spanned { item: "c", span: Span { - start: 275, - end: 276, + start: 230, + end: 231, }, }, ], span: Span { - start: 275, - end: 276, + start: 230, + end: 231, }, }, ), method: Spanned { item: "println", span: Span { - start: 277, - end: 284, + start: 232, + end: 239, }, }, type_args: [], @@ -543,21 +477,21 @@ Module { "read failed", ), span: Span { - start: 285, - end: 298, + start: 240, + end: 253, }, }, ), ], span: Span { - start: 275, - end: 299, + start: 230, + end: 254, }, }, ), span: Span { - start: 275, - end: 300, + start: 230, + end: 255, }, }, ), @@ -570,63 +504,63 @@ Module { 1, ), span: Span { - start: 308, - end: 309, + start: 263, + end: 264, }, }, ), ), span: Span { - start: 301, - end: 311, + start: 256, + end: 266, }, }, ), ], span: Span { - start: 273, - end: 311, + start: 228, + end: 266, }, }, span: Span { - start: 174, - end: 311, + start: 136, + end: 266, }, }, ], span: Span { - start: 174, - end: 315, + start: 136, + end: 270, }, match_span: Span { - start: 174, - end: 179, + start: 136, + end: 141, }, }, ), span: Span { - start: 174, - end: 315, + start: 136, + end: 270, }, }, ), ], span: Span { start: 64, - end: 317, + end: 272, }, }, is_pub: true, doc: None, span: Span { start: 36, - end: 317, + end: 272, }, }, ), ], span: Span { start: 0, - end: 317, + end: 272, }, } diff --git a/docs/README.md b/docs/README.md index e2b80cc..7b4f5ed 100644 --- a/docs/README.md +++ b/docs/README.md @@ -17,8 +17,6 @@ resource ownership and authority flow stay explicit. fn main(rc: RootCap) { // Mint a capability from the root let console = rc.mint_console(); - let alloc = rc.mint_alloc_default(); - // Acquire a ReadFS capability at ./here. // We pass this capability struct to functions that require // one: otherwise the code won't compile. Moreover, the runtime @@ -26,7 +24,7 @@ fn main(rc: RootCap) { let fs = rc.mint_readfs("./here"); // Attempt to read beyond the capability's scopes: this will print "blocked". - match fs.read_to_string(alloc, "../etc/passwd") { + match fs.read_to_string("../etc/passwd") { Ok(_) => console.println("BUG: escaped"), Err(_) => console.println("blocked"), } diff --git a/docs/TUTORIAL.md b/docs/TUTORIAL.md index 6657cf1..0e1e005 100644 --- a/docs/TUTORIAL.md +++ b/docs/TUTORIAL.md @@ -178,8 +178,7 @@ use sys::fs pub fn main(rc: RootCap) -> i32 { let fs = rc.mint_readfs("./config") - let alloc = rc.mint_alloc_default() - match fs.read_to_string(alloc, "app.txt") { + match fs.read_to_string("app.txt") { Ok(s) => { rc.mint_console().println(s); return 0 } Err(_) => { return 1 } } @@ -303,13 +302,13 @@ Use `defer` to simplify cleanup. `string` is a view. `Text` is owned. ```cap -fn build_greeting(alloc: Alloc) -> Result { +fn build_greeting() -> Result { let s = "hello" let _bytes = s.as_slice() let _sub = s.slice_range(0, 5)? - let t = alloc.text_new() - defer t.free(alloc) + let t = string::text_new() + defer t.free() t.push_str("hello")? t.push_byte(' ')? t.append("text")? @@ -320,7 +319,7 @@ fn build_greeting(alloc: Alloc) -> Result { Helpers: - `string.split`, `split_once`, `trim_*`, `contains`, `index_of_*`. -- `string.concat(alloc, other)` creates a new owned string view. +- `string.concat(other)` creates a new owned string view. - `Text.slice_range` returns a `string` view into its buffer. ## 11) Slices and indexing @@ -392,8 +391,8 @@ fn parse_key_value(line: string, alloc: Alloc) -> Result { Err(_) => { return Err(ParseErr::OutOfRange) } } - let t = alloc.text_new() - defer t.free(alloc) + let t = string::text_new() + defer t.free() match (t.push_str(key)) { Ok(_) => { } Err(_) => { return Err(ParseErr::Oom) } diff --git a/examples/config_loader/config_loader.cap b/examples/config_loader/config_loader.cap index 5d35a95..e8ccc5c 100644 --- a/examples/config_loader/config_loader.cap +++ b/examples/config_loader/config_loader.cap @@ -44,7 +44,7 @@ fn parse_config(c: Console, contents: string) -> unit { } fn run(c: Console, fs: ReadFS) -> Result { - let contents = fs.read_to_string_default("app.conf")? + let contents = fs.read_to_string("app.conf")? parse_config(c, contents) return Ok(()) } diff --git a/examples/how_to_string/how_to_string.cap b/examples/how_to_string/how_to_string.cap index 1118206..300a251 100644 --- a/examples/how_to_string/how_to_string.cap +++ b/examples/how_to_string/how_to_string.cap @@ -2,7 +2,7 @@ package safe module how_to_string use sys::system use sys::console -use sys::buffer +use sys::string fn demo_string_view(c: Console) -> unit { let s = " hello,world \n" @@ -19,7 +19,7 @@ fn demo_string_view(c: Console) -> unit { c.println(parts.left) c.println(parts.right) - let words = trimmed.split_view_default(',') + let words = trimmed.split_view(',') c.println("split count:") c.println_i32(words.len()) c.println(words.get_or(0, "")) @@ -29,7 +29,7 @@ fn demo_string_view(c: Console) -> unit { fn demo_text_builder(c: Console) -> unit { c.println("-- Text builder --") - let t = buffer::text_new_default() + let t = string::text_new() try t.push_str("hello") else { panic() } @@ -47,18 +47,18 @@ fn demo_text_builder(c: Console) -> unit { panic() } c.println(owned) - t.free_default() + t.free() - try let t2 = buffer::text_from_default("owned") else { + try let t2 = string::text_from("owned") else { panic() } try let owned2 = t2.to_string() else { panic() } c.println(owned2) - t2.free_default() + t2.free() - try let joined = "a".concat_default("b") else { + try let joined = "a".concat("b") else { panic() } c.println(joined) diff --git a/examples/http_server/http_server.cap b/examples/http_server/http_server.cap index c84658e..ffbaed4 100644 --- a/examples/http_server/http_server.cap +++ b/examples/http_server/http_server.cap @@ -68,7 +68,7 @@ fn handle_request(conn: &TcpConn, readfs: &ReadFS, req: string) -> Result try let conn = listener.accept() else { continue } - try let req = conn.read_default(4096) else { + try let req = conn.read(4096) else { conn.close() continue } diff --git a/examples/sort/sort.cap b/examples/sort/sort.cap index caa47a3..3138e41 100644 --- a/examples/sort/sort.cap +++ b/examples/sort/sort.cap @@ -1,7 +1,6 @@ package safe module sort use sys::system -use sys::buffer use sys::console use sys::stdin use sys::io @@ -62,12 +61,12 @@ fn sort_indices(lines: Vec, indices: Vec) -> unit { } fn run(c: Console, input: Stdin) -> Result { - let contents = input.read_to_string_default()? - let lines = contents.lines_view_default() + let contents = input.read_to_string()? + let lines = contents.lines_view() let n = lines.len() // Create index array [0, 1, 2, ...] - let indices = buffer::vec_new_default() + let indices = vec::new() for i in 0..n { try indices.push(i) else { panic() diff --git a/examples/uniq/uniq.cap b/examples/uniq/uniq.cap index 495a42a..4cd7a8d 100644 --- a/examples/uniq/uniq.cap +++ b/examples/uniq/uniq.cap @@ -16,8 +16,8 @@ fn should_print(lines: Vec, i: i32) -> bool { } fn run(c: Console, input: Stdin) -> Result { - let contents = input.read_to_string_default()? - let lines = contents.lines_view_default() + let contents = input.read_to_string()? + let lines = contents.lines_view() let n = lines.len() for i in 0..n { if (should_print(lines, i)) { diff --git a/stdlib/sys/buffer.cap b/stdlib/sys/buffer.cap index 40a35dd..f079c7a 100644 --- a/stdlib/sys/buffer.cap +++ b/stdlib/sys/buffer.cap @@ -52,26 +52,6 @@ pub fn default_alloc() -> Alloc { return () } -/// Create a new Vec with the process default allocator. -pub fn vec_new_default() -> vec::Vec { - return vec::new_with(default_alloc()) -} - -/// Create a Vec with capacity using the process default allocator. -pub fn vec_with_capacity_default(capacity: i32) -> Result, AllocErr> { - return vec::with_capacity(default_alloc(), capacity) -} - -/// Allocate a new empty Text using the process default allocator. -pub fn text_new_default() -> string::Text { - return string::text_new(default_alloc()) -} - -/// Copy a string view into a new Text using the process default allocator. -pub fn text_from_default(s: string) -> Result { - return string::text_from(default_alloc(), s) -} - impl Alloc { /// Allocate raw bytes. pub fn malloc(self, size: i32) -> *u8 { @@ -105,12 +85,12 @@ impl Alloc { /// Create a Vec with this Alloc (heap-backed). pub fn vec_u8_new(self) -> vec::Vec { - return vec::new_with(self) + return vec::new_with_alloc(self) } /// Create a Vec with capacity using this Alloc. pub fn vec_u8_with_capacity(self, capacity: i32) -> Result, AllocErr> { - return vec::with_capacity(self, capacity) + return vec::with_capacity_with_alloc(self, capacity) } /// Free a Vec created with this Alloc. @@ -121,12 +101,12 @@ impl Alloc { /// Create a Vec with this Alloc (heap-backed). pub fn vec_i32_new(self) -> vec::Vec { - return vec::new_with(self) + return vec::new_with_alloc(self) } /// Create a Vec with capacity using this Alloc. pub fn vec_i32_with_capacity(self, capacity: i32) -> Result, AllocErr> { - return vec::with_capacity(self, capacity) + return vec::with_capacity_with_alloc(self, capacity) } /// Free a Vec created with this Alloc. @@ -137,12 +117,12 @@ impl Alloc { /// Create a Vec with this Alloc (heap-backed). pub fn vec_string_new(self) -> vec::Vec { - return vec::new_with(self) + return vec::new_with_alloc(self) } /// Create a Vec with capacity using this Alloc. pub fn vec_string_with_capacity(self, capacity: i32) -> Result, AllocErr> { - return vec::with_capacity(self, capacity) + return vec::with_capacity_with_alloc(self, capacity) } /// Free a Vec created with this Alloc. @@ -153,27 +133,27 @@ impl Alloc { /// Allocate a new empty Text. pub fn text_new(self) -> string::Text { - return string::text_new(self) + return string::text_new_with_alloc(self) } /// Allocate a new empty Text with capacity. pub fn text_with_capacity(self, capacity: i32) -> Result { - return string::text_with_capacity(self, capacity) + return string::text_with_capacity_with_alloc(self, capacity) } /// Copy a string view into a new Text. pub fn text_from(self, s: string) -> Result { - return string::text_from(self, s) + return string::text_from_with_alloc(self, s) } /// Create a Vec with this Alloc (heap-backed). pub fn vec_new(self) -> vec::Vec { - return vec::new_with(self) + return vec::new_with_alloc(self) } /// Create a Vec with capacity using this Alloc. pub fn vec_with_capacity(self, capacity: i32) -> Result, AllocErr> { - return vec::with_capacity(self, capacity) + return vec::with_capacity_with_alloc(self, capacity) } /// Free a Vec created with this Alloc. diff --git a/stdlib/sys/fs.cap b/stdlib/sys/fs.cap index 6ed6519..91c6ac8 100644 --- a/stdlib/sys/fs.cap +++ b/stdlib/sys/fs.cap @@ -18,34 +18,34 @@ pub linear capability struct FileRead pub enum FsErr { NotFound, PermissionDenied, InvalidPath, IoError } impl ReadFS { - /// Read an entire file into a string. - pub fn read_to_string(self: &ReadFS, alloc: buffer::Alloc, path: string) -> Result { - return () - } - /// Read an entire file into a string using the process default allocator. - pub fn read_to_string_default(self: &ReadFS, path: string) -> Result { - return self.read_to_string(buffer::default_alloc(), path) + pub fn read_to_string(self: &ReadFS, path: string) -> Result { + return self.read_to_string_with_alloc(buffer::default_alloc(), path) } - /// Read an entire file into a byte vec. - pub fn read_bytes(self: &ReadFS, alloc: buffer::Alloc, path: string) -> Result, FsErr> { - return Err(FsErr::IoError) + /// Read an entire file into a string using the provided allocator. + pub fn read_to_string_with_alloc(self: &ReadFS, alloc: buffer::Alloc, path: string) -> Result { + return () } /// Read an entire file into a byte vec using the process default allocator. - pub fn read_bytes_default(self: &ReadFS, path: string) -> Result, FsErr> { - return self.read_bytes(buffer::default_alloc(), path) + pub fn read_bytes(self: &ReadFS, path: string) -> Result, FsErr> { + return self.read_bytes_with_alloc(buffer::default_alloc(), path) } - /// List directory contents as strings. - pub fn list_dir(self: &ReadFS, alloc: buffer::Alloc, path: string) -> Result, FsErr> { + /// Read an entire file into a byte vec using the provided allocator. + pub fn read_bytes_with_alloc(self: &ReadFS, alloc: buffer::Alloc, path: string) -> Result, FsErr> { return Err(FsErr::IoError) } - /// List directory contents using the process default allocator. - pub fn list_dir_default(self: &ReadFS, path: string) -> Result, FsErr> { - return self.list_dir(buffer::default_alloc(), path) + /// List directory contents as strings using the process default allocator. + pub fn list_dir(self: &ReadFS, path: string) -> Result, FsErr> { + return self.list_dir_with_alloc(buffer::default_alloc(), path) + } + + /// List directory contents as strings using the provided allocator. + pub fn list_dir_with_alloc(self: &ReadFS, alloc: buffer::Alloc, path: string) -> Result, FsErr> { + return Err(FsErr::IoError) } /// True if a path exists. @@ -82,24 +82,24 @@ impl Dir { return () } - /// Read a file into a byte vec. - pub fn read_bytes(self: &Dir, alloc: buffer::Alloc, name: string) -> Result, FsErr> { - return Err(FsErr::IoError) - } - /// Read a file into a byte vec using the process default allocator. - pub fn read_bytes_default(self: &Dir, name: string) -> Result, FsErr> { - return self.read_bytes(buffer::default_alloc(), name) + pub fn read_bytes(self: &Dir, name: string) -> Result, FsErr> { + return self.read_bytes_with_alloc(buffer::default_alloc(), name) } - /// List directory contents. - pub fn list_dir(self: &Dir, alloc: buffer::Alloc) -> Result, FsErr> { + /// Read a file into a byte vec using the provided allocator. + pub fn read_bytes_with_alloc(self: &Dir, alloc: buffer::Alloc, name: string) -> Result, FsErr> { return Err(FsErr::IoError) } /// List directory contents using the process default allocator. - pub fn list_dir_default(self: &Dir) -> Result, FsErr> { - return self.list_dir(buffer::default_alloc()) + pub fn list_dir(self: &Dir) -> Result, FsErr> { + return self.list_dir_with_alloc(buffer::default_alloc()) + } + + /// List directory contents using the provided allocator. + pub fn list_dir_with_alloc(self: &Dir, alloc: buffer::Alloc) -> Result, FsErr> { + return Err(FsErr::IoError) } /// True if a path exists. @@ -107,14 +107,14 @@ impl Dir { return false } - /// Read a file into a string. - pub fn read_to_string(self: &Dir, alloc: buffer::Alloc, name: string) -> Result { - return () + /// Read a file into a string using the process default allocator. + pub fn read_to_string(self: &Dir, name: string) -> Result { + return self.read_to_string_with_alloc(buffer::default_alloc(), name) } - /// Read a file into a string using the process default allocator. - pub fn read_to_string_default(self: &Dir, name: string) -> Result { - return self.read_to_string(buffer::default_alloc(), name) + /// Read a file into a string using the provided allocator. + pub fn read_to_string_with_alloc(self: &Dir, alloc: buffer::Alloc, name: string) -> Result { + return () } /// Close the capability. @@ -124,14 +124,14 @@ impl Dir { } impl FileRead { - /// Read the file into a string. - pub fn read_to_string(self, alloc: buffer::Alloc) -> Result { - return () + /// Read the file into a string using the process default allocator. + pub fn read_to_string(self) -> Result { + return self.read_to_string_with_alloc(buffer::default_alloc()) } - /// Read the file into a string using the process default allocator. - pub fn read_to_string_default(self) -> Result { - return self.read_to_string(buffer::default_alloc()) + /// Read the file into a string using the provided allocator. + pub fn read_to_string_with_alloc(self, alloc: buffer::Alloc) -> Result { + return () } /// Close the file. @@ -140,12 +140,12 @@ impl FileRead { } } -/// Join two path segments with a platform separator. -pub fn join(alloc: buffer::Alloc, a: string, b: string) -> string { - return "" +/// Join two path segments using the process default allocator. +pub fn join(a: string, b: string) -> string { + return join_with_alloc(buffer::default_alloc(), a, b) } -/// Join two path segments using the process default allocator. -pub fn join_default(a: string, b: string) -> string { - return join(buffer::default_alloc(), a, b) +/// Join two path segments with a platform separator using the provided allocator. +pub fn join_with_alloc(alloc: buffer::Alloc, a: string, b: string) -> string { + return "" } diff --git a/stdlib/sys/net.cap b/stdlib/sys/net.cap index 35861a9..aecb595 100644 --- a/stdlib/sys/net.cap +++ b/stdlib/sys/net.cap @@ -45,24 +45,24 @@ impl TcpListener { } impl TcpConn { - /// Read all available data into a string. - pub fn read_to_string(self: &TcpConn, alloc: buffer::Alloc) -> Result { - return Err(NetErr::IoError) + /// Read all available data into a string using the process default allocator. + pub fn read_to_string(self: &TcpConn) -> Result { + return self.read_to_string_with_alloc(buffer::default_alloc()) } - /// Read all available data using the process default allocator. - pub fn read_to_string_default(self: &TcpConn) -> Result { - return self.read_to_string(buffer::default_alloc()) + /// Read all available data into a string using the provided allocator. + pub fn read_to_string_with_alloc(self: &TcpConn, alloc: buffer::Alloc) -> Result { + return Err(NetErr::IoError) } - /// Read up to max_size bytes into a string. - pub fn read(self: &TcpConn, alloc: buffer::Alloc, max_size: i32) -> Result { - return Err(NetErr::IoError) + /// Read up to max_size bytes into a string using the process default allocator. + pub fn read(self: &TcpConn, max_size: i32) -> Result { + return self.read_with_alloc(buffer::default_alloc(), max_size) } - /// Read up to max_size bytes using the process default allocator. - pub fn read_default(self: &TcpConn, max_size: i32) -> Result { - return self.read(buffer::default_alloc(), max_size) + /// Read up to max_size bytes into a string using the provided allocator. + pub fn read_with_alloc(self: &TcpConn, alloc: buffer::Alloc, max_size: i32) -> Result { + return Err(NetErr::IoError) } /// Write a string to the connection. diff --git a/stdlib/sys/path.cap b/stdlib/sys/path.cap index 23e76c5..65671e7 100644 --- a/stdlib/sys/path.cap +++ b/stdlib/sys/path.cap @@ -22,7 +22,7 @@ fn trim_trailing_slashes(raw_path: string) -> string { /// Clean a user-supplied relative path and reject traversal. /// Empty paths become `index.html`. -pub fn clean_relative_with(alloc: buffer::Alloc, raw_path: string) -> Result { +pub fn clean_relative_with_alloc(alloc: buffer::Alloc, raw_path: string) -> Result { let out = alloc.text_new() let rest = raw_path while (true) { @@ -39,7 +39,7 @@ pub fn clean_relative_with(alloc: buffer::Alloc, raw_path: string) -> Result 0 && seg != ".") { @@ -47,7 +47,7 @@ pub fn clean_relative_with(alloc: buffer::Alloc, raw_path: string) -> Result { } Err(_) => { - out.free(alloc) + out.free() return Err(()) } } @@ -55,7 +55,7 @@ pub fn clean_relative_with(alloc: buffer::Alloc, raw_path: string) -> Result { } Err(_) => { - out.free(alloc) + out.free() return Err(()) } } @@ -65,11 +65,11 @@ pub fn clean_relative_with(alloc: buffer::Alloc, raw_path: string) -> Result { return Ok(path) } Err(_) => { return Err(()) } @@ -78,17 +78,17 @@ pub fn clean_relative_with(alloc: buffer::Alloc, raw_path: string) -> Result Result { - return clean_relative_with(buffer::default_alloc(), raw_path) + return clean_relative_with_alloc(buffer::default_alloc(), raw_path) } /// Split a path into slash-delimited segments without copying. -pub fn segments_view_with(alloc: buffer::Alloc, raw_path: string) -> vec::Vec { - return raw_path.split_view(alloc, '/') +pub fn segments_view_with_alloc(alloc: buffer::Alloc, raw_path: string) -> vec::Vec { + return raw_path.split_view_with_alloc(alloc, '/') } /// Split a path into slash-delimited segments without copying. pub fn segments_view(raw_path: string) -> vec::Vec { - return raw_path.split_view_default('/') + return raw_path.split_view('/') } /// Return the basename of a path as a view. @@ -127,10 +127,10 @@ pub fn dirname_view(raw_path: string) -> string { /// Join two path segments using the process default allocator. pub fn join(a: string, b: string) -> string { - return fs::join_default(a, b) + return fs::join(a, b) } /// Join two path segments using an explicit allocator. -pub fn join_with(alloc: buffer::Alloc, a: string, b: string) -> string { - return fs::join(alloc, a, b) +pub fn join_with_alloc(alloc: buffer::Alloc, a: string, b: string) -> string { + return fs::join_with_alloc(alloc, a, b) } diff --git a/stdlib/sys/stdin.cap b/stdlib/sys/stdin.cap index f129565..69d43e8 100644 --- a/stdlib/sys/stdin.cap +++ b/stdlib/sys/stdin.cap @@ -9,13 +9,13 @@ use sys::buffer pub capability struct Stdin impl Stdin { - /// Read stdin into a string. - pub fn read_to_string(self: &Stdin, alloc: buffer::Alloc) -> Result { - return Err(io::IoErr::IoError) + /// Read stdin into a string using the process default allocator. + pub fn read_to_string(self: &Stdin) -> Result { + return self.read_to_string_with_alloc(buffer::default_alloc()) } - /// Read stdin into a string using the process default allocator. - pub fn read_to_string_default(self: &Stdin) -> Result { - return self.read_to_string(buffer::default_alloc()) + /// Read stdin into a string using the provided allocator. + pub fn read_to_string_with_alloc(self: &Stdin, alloc: buffer::Alloc) -> Result { + return Err(io::IoErr::IoError) } } diff --git a/stdlib/sys/string.cap b/stdlib/sys/string.cap index 8e51f9a..89a7fe2 100644 --- a/stdlib/sys/string.cap +++ b/stdlib/sys/string.cap @@ -37,20 +37,35 @@ pub fn from_bytes_copy(alloc: buffer::Alloc, bytes: Slice) -> Result Text { + return text_new_with_alloc(buffer::default_alloc()) +} + /// Allocate a new empty Text using the provided allocator. -pub fn text_new(alloc: buffer::Alloc) -> Text { +pub fn text_new_with_alloc(alloc: buffer::Alloc) -> Text { return Text { bytes: alloc.vec_u8_new() } } +/// Allocate a new empty Text with capacity using the process default allocator. +pub fn text_with_capacity(capacity: i32) -> Result { + return text_with_capacity_with_alloc(buffer::default_alloc(), capacity) +} + /// Allocate a new empty Text with capacity using the provided allocator. -pub fn text_with_capacity(alloc: buffer::Alloc, capacity: i32) -> Result { +pub fn text_with_capacity_with_alloc(alloc: buffer::Alloc, capacity: i32) -> Result { let bytes = alloc.vec_u8_with_capacity(capacity)? return Ok(Text { bytes: bytes }) } +/// Copy a string view into a new Text using the process default allocator. +pub fn text_from(s: string) -> Result { + return text_from_with_alloc(buffer::default_alloc(), s) +} + /// Copy a string view into a new Text using the provided allocator. -pub fn text_from(alloc: buffer::Alloc, s: string) -> Result { - let out = text_new(alloc) +pub fn text_from_with_alloc(alloc: buffer::Alloc, s: string) -> Result { + let out = text_new_with_alloc(alloc) out.push_str(s)? return Ok(out) } @@ -59,7 +74,7 @@ fn build_range(alloc: buffer::Alloc, s: string, start: i32, end: i32) -> string if (end <= start) { return "" } - let buf = text_new(alloc) + let buf = text_new_with_alloc(alloc) let i = start while (i < end) { match (buf.push_byte(s.byte_at(i))) { @@ -215,16 +230,17 @@ impl Text { return from_bytes(owned) } - /// Free the underlying Vec. + /// Free the underlying Vec using the allocator stored in its backing Vec. /// All derived views become invalid after this call. - pub fn free(self, alloc: buffer::Alloc) -> unit { - alloc.vec_u8_free(self.bytes) + pub fn free(self) -> unit { + self.bytes.free() return () } - /// Free this Text using the allocator stored in its backing Vec. - pub fn free_default(self) -> unit { - self.bytes.free() + /// Free the underlying Vec with an explicit allocator. + /// All derived views become invalid after this call. + pub fn free_with_alloc(self, alloc: buffer::Alloc) -> unit { + alloc.vec_u8_free(self.bytes) return () } @@ -256,28 +272,28 @@ impl string { } /// Copy this string view into a new owned Text with the provided allocator. - pub fn to_text(self, alloc: buffer::Alloc) -> Result { - return text_from(alloc, self) + pub fn to_text(self) -> Result { + return text_from(self) } - /// Copy this string view into a new owned Text using the process default allocator. - pub fn to_text_default(self) -> Result { - return text_from(buffer::default_alloc(), self) + /// Copy this string view into a new owned Text using the provided allocator. + pub fn to_text_with_alloc(self, alloc: buffer::Alloc) -> Result { + return text_from_with_alloc(alloc, self) } - /// Concatenate another string into a new owned string view. - pub fn concat(self, alloc: buffer::Alloc, other: string) -> Result { - let out = text_new(alloc) + /// Concatenate another string into a new owned string view using the process default allocator. + pub fn concat(self, other: string) -> Result { + return self.concat_with_alloc(buffer::default_alloc(), other) + } + + /// Concatenate another string into a new owned string view using the provided allocator. + pub fn concat_with_alloc(self, alloc: buffer::Alloc, other: string) -> Result { + let out = text_new_with_alloc(alloc) out.push_str(self)? out.push_str(other)? return out.to_string() } - /// Concatenate another string using the process default allocator. - pub fn concat_default(self, other: string) -> Result { - return self.concat(buffer::default_alloc(), other) - } - /// Index into the string by byte. pub fn byte_at(self, index: i32) -> u8 { return self.bytes.at(index) @@ -293,8 +309,13 @@ impl string { return self.as_slice() } - /// Split on ASCII whitespace. - pub fn split_whitespace(self, alloc: buffer::Alloc) -> Vec { + /// Split on ASCII whitespace using the process default allocator. + pub fn split_whitespace(self) -> Vec { + return self.split_whitespace_with_alloc(buffer::default_alloc()) + } + + /// Split on ASCII whitespace using the provided allocator. + pub fn split_whitespace_with_alloc(self, alloc: buffer::Alloc) -> Vec { let out = alloc.vec_string_new() let bytes = self.as_slice() let len = bytes.len() @@ -319,13 +340,13 @@ impl string { return out } - /// Split on ASCII whitespace using the process default allocator. - pub fn split_whitespace_default(self) -> Vec { - return self.split_whitespace(buffer::default_alloc()) + /// Split on ASCII whitespace into views using the process default allocator. + pub fn split_whitespace_view(self) -> Vec { + return self.split_whitespace_view_with_alloc(buffer::default_alloc()) } - /// Split on ASCII whitespace without copying the pieces. - pub fn split_whitespace_view(self, alloc: buffer::Alloc) -> Vec { + /// Split on ASCII whitespace without copying the pieces using the provided allocator. + pub fn split_whitespace_view_with_alloc(self, alloc: buffer::Alloc) -> Vec { let out = alloc.vec_string_new() let bytes = self.as_slice() let len = bytes.len() @@ -350,12 +371,11 @@ impl string { return out } - /// Split on ASCII whitespace into views using the process default allocator. - pub fn split_whitespace_view_default(self) -> Vec { - return self.split_whitespace_view(buffer::default_alloc()) + pub fn lines(self) -> Vec { + return self.lines_with_alloc(buffer::default_alloc()) } - pub fn lines(self, alloc: buffer::Alloc) -> Vec { + pub fn lines_with_alloc(self, alloc: buffer::Alloc) -> Vec { let out = alloc.vec_string_new() let bytes = self.as_slice() let len = bytes.len() @@ -390,13 +410,13 @@ impl string { return out } - /// Split into lines using the process default allocator. - pub fn lines_default(self) -> Vec { - return self.lines(buffer::default_alloc()) + /// Split into line views without copying the pieces using the process default allocator. + pub fn lines_view(self) -> Vec { + return self.lines_view_with_alloc(buffer::default_alloc()) } - /// Split into line views without copying the pieces. - pub fn lines_view(self, alloc: buffer::Alloc) -> Vec { + /// Split into line views without copying the pieces using the provided allocator. + pub fn lines_view_with_alloc(self, alloc: buffer::Alloc) -> Vec { let out = alloc.vec_string_new() let bytes = self.as_slice() let len = bytes.len() @@ -431,12 +451,11 @@ impl string { return out } - /// Split into line views using the process default allocator. - pub fn lines_view_default(self) -> Vec { - return self.lines_view(buffer::default_alloc()) + pub fn split(self, delim: u8) -> Vec { + return self.split_with_alloc(buffer::default_alloc(), delim) } - pub fn split(self, alloc: buffer::Alloc, delim: u8) -> Vec { + pub fn split_with_alloc(self, alloc: buffer::Alloc, delim: u8) -> Vec { let out = alloc.vec_string_new() let bytes = self.as_slice() let len = bytes.len() @@ -461,13 +480,13 @@ impl string { return out } - /// Split on a delimiter using the process default allocator. - pub fn split_default(self, delim: u8) -> Vec { - return self.split(buffer::default_alloc(), delim) + /// Split on a delimiter into views using the process default allocator. + pub fn split_view(self, delim: u8) -> Vec { + return self.split_view_with_alloc(buffer::default_alloc(), delim) } - /// Split on a delimiter without copying the pieces. - pub fn split_view(self, alloc: buffer::Alloc, delim: u8) -> Vec { + /// Split on a delimiter without copying the pieces using the provided allocator. + pub fn split_view_with_alloc(self, alloc: buffer::Alloc, delim: u8) -> Vec { let out = alloc.vec_string_new() let bytes = self.as_slice() let len = bytes.len() @@ -492,13 +511,13 @@ impl string { return out } - /// Split on a delimiter into views using the process default allocator. - pub fn split_view_default(self, delim: u8) -> Vec { - return self.split_view(buffer::default_alloc(), delim) + /// Split once on the first matching delimiter using the process default allocator. + pub fn split_once(self, delim: u8) -> Result { + return self.split_once_with_alloc(buffer::default_alloc(), delim) } - /// Split once on the first matching delimiter. - pub fn split_once(self, alloc: buffer::Alloc, delim: u8) -> Result { + /// Split once on the first matching delimiter using the provided allocator. + pub fn split_once_with_alloc(self, alloc: buffer::Alloc, delim: u8) -> Result { let bytes = self.as_slice() let len = bytes.len() let i = 0 @@ -516,11 +535,6 @@ impl string { return Err(()) } - /// Split once on the first matching delimiter using the process default allocator. - pub fn split_once_default(self, delim: u8) -> Result { - return self.split_once(buffer::default_alloc(), delim) - } - /// Split once on the first matching delimiter without copying. pub fn split_once_view(self, delim: u8) -> Result { let bytes = self.as_slice() @@ -581,19 +595,24 @@ impl string { return view_range(self, 0, i) } - /// Trim ASCII whitespace from both ends. - pub fn trim(self, alloc: buffer::Alloc) -> string { - let start_trimmed = self.trim_start(alloc) - return start_trimmed.trim_end(alloc) + /// Trim ASCII whitespace from both ends using the process default allocator. + pub fn trim(self) -> string { + return self.trim_with_alloc(buffer::default_alloc()) } - /// Trim ASCII whitespace from both ends using the process default allocator. - pub fn trim_default(self) -> string { - return self.trim(buffer::default_alloc()) + /// Trim ASCII whitespace from both ends using the provided allocator. + pub fn trim_with_alloc(self, alloc: buffer::Alloc) -> string { + let start_trimmed = self.trim_start_with_alloc(alloc) + return start_trimmed.trim_end_with_alloc(alloc) } - /// Trim ASCII whitespace from the start. - pub fn trim_start(self, alloc: buffer::Alloc) -> string { + /// Trim ASCII whitespace from the start using the process default allocator. + pub fn trim_start(self) -> string { + return self.trim_start_with_alloc(buffer::default_alloc()) + } + + /// Trim ASCII whitespace from the start using the provided allocator. + pub fn trim_start_with_alloc(self, alloc: buffer::Alloc) -> string { let bytes = self.as_slice() let len = bytes.len() let i = 0 @@ -609,13 +628,13 @@ impl string { return build_range(alloc, self, i, len) } - /// Trim ASCII whitespace from the start using the process default allocator. - pub fn trim_start_default(self) -> string { - return self.trim_start(buffer::default_alloc()) + /// Trim ASCII whitespace from the end using the process default allocator. + pub fn trim_end(self) -> string { + return self.trim_end_with_alloc(buffer::default_alloc()) } - /// Trim ASCII whitespace from the end. - pub fn trim_end(self, alloc: buffer::Alloc) -> string { + /// Trim ASCII whitespace from the end using the provided allocator. + pub fn trim_end_with_alloc(self, alloc: buffer::Alloc) -> string { let bytes = self.as_slice() let len = bytes.len() if (len == 0) { @@ -637,45 +656,40 @@ impl string { return build_range(alloc, self, 0, i) } - /// Trim ASCII whitespace from the end using the process default allocator. - pub fn trim_end_default(self) -> string { - return self.trim_end(buffer::default_alloc()) + /// Remove a leading prefix if present using the process default allocator. + pub fn trim_prefix(self, prefix: string) -> string { + return self.trim_prefix_with_alloc(buffer::default_alloc(), prefix) } - /// Remove a leading prefix if present. - pub fn trim_prefix(self, alloc: buffer::Alloc, prefix: string) -> string { + /// Remove a leading prefix if present using the provided allocator. + pub fn trim_prefix_with_alloc(self, alloc: buffer::Alloc, prefix: string) -> string { if (self.starts_with(prefix)) { return build_range(alloc, self, prefix.len(), self.len()) } return self } - /// Remove a leading prefix using the process default allocator. - pub fn trim_prefix_default(self, prefix: string) -> string { - return self.trim_prefix(buffer::default_alloc(), prefix) + /// Remove a trailing suffix if present using the process default allocator. + pub fn trim_suffix(self, suffix: string) -> string { + return self.trim_suffix_with_alloc(buffer::default_alloc(), suffix) } - /// Remove a trailing suffix if present. - pub fn trim_suffix(self, alloc: buffer::Alloc, suffix: string) -> string { + /// Remove a trailing suffix if present using the provided allocator. + pub fn trim_suffix_with_alloc(self, alloc: buffer::Alloc, suffix: string) -> string { if (self.ends_with(suffix)) { return build_range(alloc, self, 0, self.len() - suffix.len()) } return self } - /// Remove a trailing suffix using the process default allocator. - pub fn trim_suffix_default(self, suffix: string) -> string { - return self.trim_suffix(buffer::default_alloc(), suffix) - } - /// split_lines() is an alias for lines(). - pub fn split_lines(self, alloc: buffer::Alloc) -> Vec { - return self.lines(alloc) + pub fn split_lines(self) -> Vec { + return self.lines() } - /// split_lines() using the process default allocator. - pub fn split_lines_default(self) -> Vec { - return self.split_lines(buffer::default_alloc()) + /// split_lines() using the provided allocator. + pub fn split_lines_with_alloc(self, alloc: buffer::Alloc) -> Vec { + return self.lines_with_alloc(alloc) } /// True if the string starts with the prefix. @@ -929,11 +943,16 @@ impl string { return true } - /// Lowercase ASCII letters. - pub fn to_lower_ascii(self, alloc: buffer::Alloc) -> string { + /// Lowercase ASCII letters using the process default allocator. + pub fn to_lower_ascii(self) -> string { + return self.to_lower_ascii_with_alloc(buffer::default_alloc()) + } + + /// Lowercase ASCII letters using the provided allocator. + pub fn to_lower_ascii_with_alloc(self, alloc: buffer::Alloc) -> string { let bytes = self.as_slice() let len = bytes.len() - let buf = text_new(alloc) + let buf = text_new_with_alloc(alloc) let i = 0 while (i < len) { let b = bytes.at(i) @@ -950,16 +969,16 @@ impl string { } } - /// Lowercase ASCII letters using the process default allocator. - pub fn to_lower_ascii_default(self) -> string { - return self.to_lower_ascii(buffer::default_alloc()) + /// Uppercase ASCII letters using the process default allocator. + pub fn to_upper_ascii(self) -> string { + return self.to_upper_ascii_with_alloc(buffer::default_alloc()) } - /// Uppercase ASCII letters. - pub fn to_upper_ascii(self, alloc: buffer::Alloc) -> string { + /// Uppercase ASCII letters using the provided allocator. + pub fn to_upper_ascii_with_alloc(self, alloc: buffer::Alloc) -> string { let bytes = self.as_slice() let len = bytes.len() - let buf = text_new(alloc) + let buf = text_new_with_alloc(alloc) let i = 0 while (i < len) { let b = bytes.at(i) @@ -976,19 +995,14 @@ impl string { } } - /// Uppercase ASCII letters using the process default allocator. - pub fn to_upper_ascii_default(self) -> string { - return self.to_upper_ascii(buffer::default_alloc()) - } - /// Trim ASCII whitespace (alias of trim()). - pub fn trim_ascii(self, alloc: buffer::Alloc) -> string { - return self.trim(alloc) + pub fn trim_ascii(self) -> string { + return self.trim() } - /// Trim ASCII whitespace using the process default allocator. - pub fn trim_ascii_default(self) -> string { - return self.trim_ascii(buffer::default_alloc()) + /// Trim ASCII whitespace using the provided allocator. + pub fn trim_ascii_with_alloc(self, alloc: buffer::Alloc) -> string { + return self.trim_with_alloc(alloc) } /// Alias for index_of_byte. diff --git a/stdlib/sys/vec.cap b/stdlib/sys/vec.cap index 07a357f..e96baa8 100644 --- a/stdlib/sys/vec.cap +++ b/stdlib/sys/vec.cap @@ -125,8 +125,13 @@ fn ensure_capacity(v: Vec, header: sys::vec::VecHeader, needed: i32) -> Re return grow_to(v, header, new_cap) } +/// Create a new Vec using the process default allocator. +pub fn new() -> Vec { + return new_with_alloc(buffer::default_alloc()) +} + /// Create a new Vec with the provided allocator. -pub fn new_with(alloc: buffer::Alloc) -> Vec { +pub fn new_with_alloc(alloc: buffer::Alloc) -> Vec { let header_ptr = alloc_header(alloc) let raw = alloc.malloc(0) let header = sys::vec::VecHeader { @@ -140,8 +145,13 @@ pub fn new_with(alloc: buffer::Alloc) -> Vec { return Vec { header: header_ptr } } +/// Create a new Vec with capacity using the process default allocator. +pub fn with_capacity(capacity: i32) -> Result, buffer::AllocErr> { + return with_capacity_with_alloc(buffer::default_alloc(), capacity) +} + /// Create a new Vec with capacity using the provided allocator. -pub fn with_capacity(alloc: buffer::Alloc, capacity: i32) -> Result, buffer::AllocErr> { +pub fn with_capacity_with_alloc(alloc: buffer::Alloc, capacity: i32) -> Result, buffer::AllocErr> { let cap = capacity if (cap < 0) { cap = 0 @@ -430,7 +440,7 @@ impl Vec { /// Filter elements equal to value (numeric vecs only). pub fn filter(self, value: T) -> Vec { let header = read_header(self) - let out = new_with(header.alloc) + let out = new_with_alloc(header.alloc) let len = self.len() let i = 0 while (i < len) { @@ -453,7 +463,7 @@ impl Vec { /// Add delta to each element. pub fn map_add(self, delta: T) -> Vec { let header = read_header(self) - let out = new_with(header.alloc) + let out = new_with_alloc(header.alloc) let len = self.len() let i = 0 while (i < len) { @@ -559,7 +569,7 @@ impl Vec { /// Filter elements equal to value (uses .eq() trait method). pub fn filter_eq(self, value: T) -> Vec { let header = read_header(self) - let out = new_with(header.alloc) + let out = new_with_alloc(header.alloc) let len = self.len() let i = 0 while (i < len) { @@ -649,7 +659,7 @@ impl Vec { pub fn join(self, sep: string) -> Result { let len = self.len() let header = read_header(self) - let buf = string::text_new(header.alloc) + let buf = string::text_new_with_alloc(header.alloc) let i = 0 while (i < len) { let part = match (self.get(i)) { diff --git a/tests/programs/attenuation_untrusted_fail.cap b/tests/programs/attenuation_untrusted_fail.cap index d3f8b7b..e171e9e 100644 --- a/tests/programs/attenuation_untrusted_fail.cap +++ b/tests/programs/attenuation_untrusted_fail.cap @@ -4,12 +4,11 @@ use sys::fs use untrusted_logs pub fn main(rc: RootCap) -> i32 { - let alloc = rc.mint_alloc_default() let fs = rc.mint_filesystem("./config") let d = fs.root_dir() - let res = untrusted_logs::read_log(alloc, d) + let res = untrusted_logs::read_log(d) let file = d.open_read("app.txt") - let tmp = file.read_to_string(alloc) + let tmp = file.read_to_string() match (res) { Ok(_) => { return 0 } Err(_) => { return 1 } diff --git a/tests/programs/attenuation_untrusted_pass.cap b/tests/programs/attenuation_untrusted_pass.cap index a90d89d..a726d98 100644 --- a/tests/programs/attenuation_untrusted_pass.cap +++ b/tests/programs/attenuation_untrusted_pass.cap @@ -4,10 +4,9 @@ use sys::fs use untrusted_logs pub fn main(rc: RootCap) -> i32 { - let alloc = rc.mint_alloc_default() let fs = rc.mint_filesystem("./config") let d = fs.root_dir() - let res = untrusted_logs::read_log(alloc, d) + let res = untrusted_logs::read_log(d) match (res) { Ok(_) => { return 0 } Err(_) => { return 1 } diff --git a/tests/programs/fs_attenuation.cap b/tests/programs/fs_attenuation.cap index dc5d1a9..cb465d6 100644 --- a/tests/programs/fs_attenuation.cap +++ b/tests/programs/fs_attenuation.cap @@ -3,12 +3,11 @@ use sys::system pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() let fs = rc.mint_filesystem("./config") let root = fs.root_dir() let file = root.open_read("app.txt") - match file.read_to_string(alloc) { + match file.read_to_string() { Ok(s) => { c.println(s); return 0 } Err(e) => { c.println("read failed"); return 1 } } diff --git a/tests/programs/fs_dir_reuse.cap b/tests/programs/fs_dir_reuse.cap index 23e177f..d7a04d1 100644 --- a/tests/programs/fs_dir_reuse.cap +++ b/tests/programs/fs_dir_reuse.cap @@ -4,29 +4,28 @@ use sys::system pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() let fs = rc.mint_filesystem("./config") let dir = fs.root_dir() c.assert(dir.exists("app.txt")) - match dir.read_to_string(alloc, "app.txt") { + match dir.read_to_string("app.txt") { Ok(s) => { c.assert(s.len() > 0) } Err(_) => { c.println("dir read_to_string failed"); return 1 } } - match dir.read_bytes(alloc, "app.txt") { + match dir.read_bytes("app.txt") { Ok(bytes) => { c.assert(bytes.len() > 0) - alloc.vec_u8_free(bytes) + bytes.free() } Err(_) => { c.println("dir read_bytes failed"); return 1 } } - match dir.list_dir(alloc) { + match dir.list_dir() { Ok(entries) => { c.assert(entries.len() > 0) - alloc.vec_string_free(entries) + entries.free() } Err(_) => { c.println("dir list_dir failed"); return 1 } } diff --git a/tests/programs/fs_helpers.cap b/tests/programs/fs_helpers.cap index f2a3ee7..7eb391e 100644 --- a/tests/programs/fs_helpers.cap +++ b/tests/programs/fs_helpers.cap @@ -15,7 +15,7 @@ pub fn main(rc: RootCap) -> i32 { } let rfs2 = rc.mint_readfs("./config") - match rfs2.read_bytes_default("app.txt") { + match rfs2.read_bytes("app.txt") { Ok(bytes) => { c.assert(bytes.len() > 0) bytes.free() @@ -27,7 +27,7 @@ pub fn main(rc: RootCap) -> i32 { } let rfs3 = rc.mint_readfs("./config") - match rfs3.list_dir_default(".") { + match rfs3.list_dir(".") { Ok(entries) => { c.assert(entries.len() > 0) entries.free() diff --git a/tests/programs/fs_open_read_reuse.cap b/tests/programs/fs_open_read_reuse.cap index 5a40530..e3aa7f1 100644 --- a/tests/programs/fs_open_read_reuse.cap +++ b/tests/programs/fs_open_read_reuse.cap @@ -3,18 +3,17 @@ use sys::system pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() let fs = rc.mint_filesystem("./config") let dir = fs.root_dir() let first = dir.open_read("app.txt") - try let left = first.read_to_string(alloc) else { + try let left = first.read_to_string() else { c.println("first read failed") return 1 } let second = dir.open_read("app.txt") - try let right = second.read_to_string(alloc) else { + try let right = second.read_to_string() else { c.println("second read failed") return 1 } diff --git a/tests/programs/fs_read.cap b/tests/programs/fs_read.cap index b7a6e9f..368f5a6 100644 --- a/tests/programs/fs_read.cap +++ b/tests/programs/fs_read.cap @@ -3,10 +3,9 @@ use sys::system pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() let rfs = rc.mint_readfs("./config") - match rfs.read_to_string(alloc, "app.txt") { + match rfs.read_to_string("app.txt") { Ok(s) => { c.println(s); return 0 } Err(e) => { c.println("read failed"); return 1 } } diff --git a/tests/programs/fs_reuse.cap b/tests/programs/fs_reuse.cap index 165e7cc..f69c884 100644 --- a/tests/programs/fs_reuse.cap +++ b/tests/programs/fs_reuse.cap @@ -4,20 +4,19 @@ use sys::system pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() let rfs = rc.mint_readfs("./config") c.assert(rfs.exists("app.txt")) - match rfs.read_to_string(alloc, "app.txt") { + match rfs.read_to_string("app.txt") { Ok(s) => { c.assert(s.len() > 0) } Err(_) => { c.println("read_to_string failed"); return 1 } } - match rfs.read_bytes(alloc, "app.txt") { + match rfs.read_bytes("app.txt") { Ok(bytes) => { c.assert(bytes.len() > 0) - alloc.vec_u8_free(bytes) + bytes.free() } Err(_) => { c.println("read_bytes failed"); return 1 } } diff --git a/tests/programs/fs_symlink_escape.cap b/tests/programs/fs_symlink_escape.cap index b72b9cc..66e4529 100644 --- a/tests/programs/fs_symlink_escape.cap +++ b/tests/programs/fs_symlink_escape.cap @@ -3,9 +3,8 @@ use sys::system pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() let rfs = rc.mint_readfs("tests/fixtures/config") - match rfs.read_to_string(alloc, "link.txt") { + match rfs.read_to_string("link.txt") { Ok(_) => { c.println("BUG: symlink escape") return 2 diff --git a/tests/programs/fs_traversal_denied.cap b/tests/programs/fs_traversal_denied.cap index b843498..433895d 100644 --- a/tests/programs/fs_traversal_denied.cap +++ b/tests/programs/fs_traversal_denied.cap @@ -3,10 +3,9 @@ use sys::system pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() let rfs = rc.mint_readfs("./config") - match rfs.read_to_string(alloc, "../secrets.txt") { + match rfs.read_to_string("../secrets.txt") { Ok(s) => { c.assert(false) c.println("BUG: escaped") diff --git a/tests/programs/fs_traversal_kind.cap b/tests/programs/fs_traversal_kind.cap index 9655116..86a1df5 100644 --- a/tests/programs/fs_traversal_kind.cap +++ b/tests/programs/fs_traversal_kind.cap @@ -4,10 +4,9 @@ use sys::fs pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() let rfs = rc.mint_readfs("./config") - match rfs.read_to_string(alloc, "../secrets.txt") { + match rfs.read_to_string("../secrets.txt") { Ok(s) => { c.assert(false) c.println("BUG: escaped") diff --git a/tests/programs/should_fail_attenuation_reuse_fileread.cap b/tests/programs/should_fail_attenuation_reuse_fileread.cap index 73310d6..fa8cf70 100644 --- a/tests/programs/should_fail_attenuation_reuse_fileread.cap +++ b/tests/programs/should_fail_attenuation_reuse_fileread.cap @@ -3,11 +3,10 @@ module should_fail_attenuation_reuse_fileread use sys::system pub fn main(rc: RootCap) -> i32 { - let alloc = rc.mint_alloc_default() let fs = rc.mint_filesystem("./config") let d = fs.root_dir() let f = d.open_read("app.txt") - let s = f.read_to_string(alloc) - let t = f.read_to_string(alloc) + let s = f.read_to_string() + let t = f.read_to_string() return 0 } diff --git a/tests/programs/stdin_safe.cap b/tests/programs/stdin_safe.cap index 55cce9a..6b20287 100644 --- a/tests/programs/stdin_safe.cap +++ b/tests/programs/stdin_safe.cap @@ -5,8 +5,7 @@ use sys::system pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let stdin = rc.mint_stdin() - let alloc = rc.mint_alloc_default() - let code = match stdin.read_to_string(alloc) { + let code = match stdin.read_to_string() { Ok(s) => { let n = s.len() c.assert(n == 0) diff --git a/tests/programs/string_helpers.cap b/tests/programs/string_helpers.cap index 6561d7e..d9b0562 100644 --- a/tests/programs/string_helpers.cap +++ b/tests/programs/string_helpers.cap @@ -1,35 +1,31 @@ package safe module string_helpers use sys::system -use sys::buffer +use sys::string pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() let s = "abc" let buf = s.bytes() let n = buf.len() let b = buf.at(0) - let words = "a b c".split_whitespace(alloc) - let word_views = "a b c".split_whitespace_view(alloc) - let split_views = "a,b,c".split_view_default(',') + let words = "a b c".split_whitespace() + let word_views = "a b c".split_whitespace_view() + let split_views = "a,b,c".split_view(',') let count = words.len() - let trimmed = " hi \n".trim(alloc) + let trimmed = " hi \n".trim() let trimmed_view = " hi \n".trim_view() - let trimmed_start = " hi ".trim_start(alloc) + let trimmed_start = " hi ".trim_start() let trimmed_start_view = " hi ".trim_start_view() - let trimmed_end = " hi ".trim_end(alloc) + let trimmed_end = " hi ".trim_end() let trimmed_end_view = " hi ".trim_end_view() - let trimmed_ascii = " \tHi\n".trim_ascii(alloc) - let trimmed_ascii_default = " \tHi\n".trim_ascii_default() - let lower = "AbC".to_lower_ascii(alloc) - let lower_default = "AbC".to_lower_ascii_default() - let upper = "AbC".to_upper_ascii(alloc) - let upper_default = "AbC".to_upper_ascii_default() + let trimmed_ascii = " \tHi\n".trim_ascii() + let lower = "AbC".to_lower_ascii() + let upper = "AbC".to_upper_ascii() let sliced = "hello".slice_range(1, 4) - let lines = "a\nb\n".split_lines(alloc) - let line_views = "a\r\nb\n".lines_view(alloc) - let t = buffer::text_new_default() + let lines = "a\nb\n".split_lines() + let line_views = "a\r\nb\n".lines_view() + let t = string::text_new() try t.push_str("hi") else { panic() } @@ -38,7 +34,7 @@ pub fn main(rc: RootCap) -> i32 { return 1 } c.assert(owned.eq("hi")) - t.free_default() + t.free() c.assert(n == 3 && b == 'a' && count == 3) c.assert(word_views.len() == 3) c.assert(split_views.len() == 3) @@ -51,11 +47,8 @@ pub fn main(rc: RootCap) -> i32 { c.assert(trimmed_end.ends_with("hi")) c.assert(trimmed_end_view.ends_with("hi")) c.assert(trimmed_ascii.eq("Hi")) - c.assert(trimmed_ascii_default.eq("Hi")) c.assert(lower.eq("abc")) - c.assert(lower_default.eq("abc")) c.assert(upper.eq("ABC")) - c.assert(upper_default.eq("ABC")) c.assert("abc".starts_with_byte('a')) c.assert("abc".ends_with_byte('c')) c.assert("".is_empty()) @@ -108,7 +101,7 @@ pub fn main(rc: RootCap) -> i32 { c.assert(false) return 1 } - match ("a,b,c".split_once(alloc, ',')) { + match ("a,b,c".split_once(',')) { Ok(parts) => { c.assert(parts.left.eq("a")) c.assert(parts.right.eq("b,c")) @@ -122,23 +115,23 @@ pub fn main(rc: RootCap) -> i32 { } Err(_) => { c.assert(false) } } - match ("ab".concat(alloc, "cd")) { + match ("ab".concat("cd")) { Ok(joined) => { c.assert(joined.eq("abcd")) } Err(_) => { c.assert(false) } } - let pieces = "a,b,c".split(alloc, ',') + let pieces = "a,b,c".split(',') c.assert(pieces.len() == 3) c.assert(line_views.len() == 2) match (pieces.join(",")) { Ok(joined) => { c.assert(joined.eq("a,b,c")) } Err(_) => { c.assert(false) } } - alloc.vec_string_free(words) - alloc.vec_string_free(word_views) + words.free() + word_views.free() split_views.free() - alloc.vec_string_free(lines) - alloc.vec_string_free(line_views) - alloc.vec_string_free(pieces) + lines.free() + line_views.free() + pieces.free() c.println("string ok") return 0 } diff --git a/tests/programs/string_split.cap b/tests/programs/string_split.cap index 4d94dce..ac5f9b1 100644 --- a/tests/programs/string_split.cap +++ b/tests/programs/string_split.cap @@ -4,10 +4,9 @@ use sys::system pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() - let parts = "a,b,c".split(alloc, ',') + let parts = "a,b,c".split(',') let count = parts.len() - alloc.vec_string_free(parts) + parts.free() c.assert(count == 3) c.println("split ok") return 0 diff --git a/tests/programs/text_basic.cap b/tests/programs/text_basic.cap index 3866904..af2af9d 100644 --- a/tests/programs/text_basic.cap +++ b/tests/programs/text_basic.cap @@ -5,8 +5,7 @@ use sys::string pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() - let text = string::text_new(alloc) + let text = string::text_new() try text.push_str("hi") else { panic() } @@ -20,7 +19,7 @@ pub fn main(rc: RootCap) -> i32 { return 1 } c.assert(owned.eq("hi!")) - text.free(alloc) + text.free() c.println("text basic ok") return 0 } diff --git a/tests/programs/text_helpers_more.cap b/tests/programs/text_helpers_more.cap index ab3fdf0..2356af6 100644 --- a/tests/programs/text_helpers_more.cap +++ b/tests/programs/text_helpers_more.cap @@ -6,7 +6,7 @@ use sys::string pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let alloc = rc.mint_alloc_default() - let text = alloc.text_new() + let text = string::text_new() try text.push_str("hi") else { panic() } @@ -33,7 +33,7 @@ pub fn main(rc: RootCap) -> i32 { return 1 } c.assert(owned.eq("hi\nok!")) - try let t2 = alloc.text_from("cap") else { + try let t2 = string::text_from("cap") else { panic() } try let owned2 = t2.to_string() else { @@ -41,8 +41,8 @@ pub fn main(rc: RootCap) -> i32 { return 1 } c.assert(owned2.eq("cap")) - t2.free(alloc) - try let t3 = "owned".to_text(alloc) else { + t2.free() + try let t3 = "owned".to_text() else { panic() } try let owned3 = t3.to_string() else { @@ -50,9 +50,9 @@ pub fn main(rc: RootCap) -> i32 { return 1 } c.assert(owned3.eq("owned")) - t3.free(alloc) + t3.free() alloc.vec_u8_free(v) - text.free(alloc) + text.free() c.println("text helpers ok") return 0 } diff --git a/tests/programs/text_push_safe.cap b/tests/programs/text_push_safe.cap index 146ef84..d68d78f 100644 --- a/tests/programs/text_push_safe.cap +++ b/tests/programs/text_push_safe.cap @@ -6,7 +6,7 @@ use sys::string pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let alloc = rc.mint_alloc_default() - let text = string::text_new(alloc) + let text = string::text_new() try text.push_byte('\x00') else { panic() } @@ -30,6 +30,6 @@ pub fn main(rc: RootCap) -> i32 { c.println("push bad") } alloc.vec_u8_free(v) - text.free(alloc) + text.free() return 0 } diff --git a/tests/programs/text_safe.cap b/tests/programs/text_safe.cap index 2842a54..4f6e215 100644 --- a/tests/programs/text_safe.cap +++ b/tests/programs/text_safe.cap @@ -5,8 +5,7 @@ use sys::string pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() - let text = string::text_new(alloc) + let text = string::text_new() try text.push_byte('a') else { panic() } @@ -24,6 +23,6 @@ pub fn main(rc: RootCap) -> i32 { } else { c.println("text bad") } - text.free(alloc) + text.free() return 0 } diff --git a/tests/programs/text_to_string.cap b/tests/programs/text_to_string.cap index fe8e35d..971edc3 100644 --- a/tests/programs/text_to_string.cap +++ b/tests/programs/text_to_string.cap @@ -5,8 +5,7 @@ use sys::string pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() - let text = string::text_new(alloc) + let text = string::text_new() try text.push_byte('h') else { panic() } @@ -17,6 +16,6 @@ pub fn main(rc: RootCap) -> i32 { panic() } c.println(s) - text.free(alloc) + text.free() return 0 } diff --git a/tests/programs/text_unsafe.cap b/tests/programs/text_unsafe.cap index 9476506..b132a38 100644 --- a/tests/programs/text_unsafe.cap +++ b/tests/programs/text_unsafe.cap @@ -5,19 +5,18 @@ use sys::string pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() - let text = string::text_new(alloc) + let text = string::text_new() match (text.push_byte('a')) { Ok(_) => { } - Err(_) => { c.assert(false); text.free(alloc); return 1 } + Err(_) => { c.assert(false); text.free(); return 1 } } match (text.push_byte('b')) { Ok(_) => { } - Err(_) => { c.assert(false); text.free(alloc); return 1 } + Err(_) => { c.assert(false); text.free(); return 1 } } match (text.push_byte('c')) { Ok(_) => { } - Err(_) => { c.assert(false); text.free(alloc); return 1 } + Err(_) => { c.assert(false); text.free(); return 1 } } let len = text.len() c.assert(len == 3) @@ -34,6 +33,6 @@ pub fn main(rc: RootCap) -> i32 { } else { c.println("slice bad") } - text.free(alloc) + text.free() return 0 } diff --git a/tests/programs/untrusted_logs.cap b/tests/programs/untrusted_logs.cap index b2aa93c..dcf3e82 100644 --- a/tests/programs/untrusted_logs.cap +++ b/tests/programs/untrusted_logs.cap @@ -1,8 +1,7 @@ module untrusted_logs -use sys::buffer use sys::fs -pub fn read_log(alloc: buffer::Alloc, dir: Dir) -> Result { +pub fn read_log(dir: Dir) -> Result { let file = dir.open_read("app.log") - return file.read_to_string(alloc) + return file.read_to_string() } diff --git a/tests/programs/vec_search_helpers.cap b/tests/programs/vec_search_helpers.cap index c75d390..2502511 100644 --- a/tests/programs/vec_search_helpers.cap +++ b/tests/programs/vec_search_helpers.cap @@ -1,7 +1,7 @@ package safe module vec_search_helpers use sys::system -use sys::buffer +use sys::vec pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() @@ -44,7 +44,7 @@ pub fn main(rc: RootCap) -> i32 { } c.assert(v.get_or(1, 99) == 1) c.assert(v.get_or(99, 77) == 77) - let bytes = buffer::vec_new_default() + let bytes = vec::new() try bytes.push('h') else { panic() } diff --git a/tests/programs/wc_file.cap b/tests/programs/wc_file.cap index a99e72c..3680ce6 100644 --- a/tests/programs/wc_file.cap +++ b/tests/programs/wc_file.cap @@ -25,7 +25,7 @@ pub fn main(rc: RootCap) -> i32 { return 1 } let rfs = rc.mint_readfs("./") - let code = match rfs.read_to_string_default(path) { + let code = match rfs.read_to_string(path) { Ok(s) => { count_text(c, s) } diff --git a/tests/programs/wc_stdin.cap b/tests/programs/wc_stdin.cap index 6b5d061..7908f4b 100644 --- a/tests/programs/wc_stdin.cap +++ b/tests/programs/wc_stdin.cap @@ -5,7 +5,7 @@ use sys::system pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let stdin = rc.mint_stdin() - let code = match stdin.read_to_string_default() { + let code = match stdin.read_to_string() { Ok(s) => { let bytes = s.len() let words = s.count_words_ascii() From c213bf80a9659fbd33febcb395329df4d05d54d4 Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Wed, 25 Mar 2026 09:20:30 -0700 Subject: [PATCH 06/17] Simplify ownership APIs and cleanup style --- PROBLEMS.md | 13 ++++--- capc/tests/run.rs | 10 +++++ capc/tests/typecheck.rs | 8 ++++ docs/ABI.md | 8 ++-- docs/README.md | 5 +++ docs/TUTORIAL.md | 49 +++++++++++++----------- docs/memory.md | 24 +++++++----- docs/memory_safety.md | 4 +- docs/slice_design.md | 20 +++++++--- examples/hashmap_demo/hashmap_demo.cap | 43 ++++++++------------- examples/how_to_string/how_to_string.cap | 10 ++--- examples/sort/sort.cap | 5 +-- examples/uniq/uniq.cap | 2 +- stdlib/sys/path.cap | 2 +- stdlib/sys/string.cap | 20 +++++----- stdlib/sys/vec.cap | 15 ++++++-- tests/programs/defer_free.cap | 32 ++++++++++++++++ tests/programs/fs_dir_reuse.cap | 4 +- tests/programs/fs_helpers.cap | 4 +- tests/programs/fs_reuse.cap | 2 +- tests/programs/path_helpers.cap | 3 +- tests/programs/result_unit_ok.cap | 6 +-- tests/programs/slice_safe_read.cap | 8 ++-- tests/programs/string_helpers.cap | 16 ++++---- tests/programs/string_split.cap | 2 +- tests/programs/text_basic.cap | 4 +- tests/programs/text_helpers_more.cap | 20 +++++----- tests/programs/text_push_safe.cap | 8 ++-- tests/programs/text_safe.cap | 2 +- tests/programs/text_to_string.cap | 4 +- tests/programs/text_unsafe.cap | 8 ++-- tests/programs/vec_custom_eq.cap | 8 ++-- tests/programs/vec_helpers.cap | 20 +++++----- tests/programs/vec_i32_helpers.cap | 14 +++---- tests/programs/vec_search_helpers.cap | 11 +++--- tests/programs/vec_string_helpers.cap | 10 ++--- 36 files changed, 249 insertions(+), 175 deletions(-) create mode 100644 tests/programs/defer_free.cap diff --git a/PROBLEMS.md b/PROBLEMS.md index 4a2a0c5..c803814 100644 --- a/PROBLEMS.md +++ b/PROBLEMS.md @@ -23,10 +23,11 @@ it is still not especially lightweight. Capable now has both: - explicit `Alloc` -- a growing default-allocator surface +- a default-first stdlib surface -That is pragmatic, but the model is not fully settled. Allocation is currently -part resource handle, part policy hook, part convenience burden. +That is a much better default than before, but the model is not fully settled. +Allocation is still part resource handle, part policy hook, and the stdlib +still carries duplicated `_with_alloc` forms. The language should eventually make this story crisp: @@ -37,9 +38,9 @@ Until then, the stdlib will keep carrying duplicated APIs. ## 3. Expression and statement control flow are still somewhat brittle -Recent work made `let ... else` and `expr else` viable, but it also showed that -control-flow behavior was not fully uniform across parser, typechecker, and -codegen. +Recent work made `let ... else`, `try let`, and `try ... else` viable, but it +also showed that control-flow behavior was not fully uniform across parser, +typechecker, and codegen. The language now supports these forms, but this area still needs discipline. If more expression-oriented control-flow is added casually, complexity will diff --git a/capc/tests/run.rs b/capc/tests/run.rs index 3accfaa..8b425d5 100644 --- a/capc/tests/run.rs +++ b/capc/tests/run.rs @@ -201,6 +201,16 @@ fn run_path_helpers() { assert!(stdout.contains("path ok"), "stdout was: {stdout:?}"); } +#[test] +fn run_defer_free() { + let out_dir = make_out_dir("defer_free"); + let out_dir = out_dir.to_str().expect("utf8 out dir"); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/defer_free.cap"]); + assert_eq!(code, 0); + assert!(stdout.contains("defer free ok"), "stdout was: {stdout:?}"); +} + #[test] fn run_match_expr() { let out_dir = make_out_dir("match_expr"); diff --git a/capc/tests/typecheck.rs b/capc/tests/typecheck.rs index aea4c3c..95fa292 100644 --- a/capc/tests/typecheck.rs +++ b/capc/tests/typecheck.rs @@ -108,6 +108,14 @@ fn typecheck_path_helpers_ok() { type_check_program(&module, &stdlib, &[]).expect("typecheck module"); } +#[test] +fn typecheck_defer_free_ok() { + let source = load_program("defer_free.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + #[test] fn typecheck_fs_close_ok() { let source = load_program("should_pass_fs_close.cap"); diff --git a/docs/ABI.md b/docs/ABI.md index 5cf69af..5428c7a 100644 --- a/docs/ABI.md +++ b/docs/ABI.md @@ -33,9 +33,11 @@ compiler-generated stubs when needed. ## Allocation convention -APIs that allocate accept an explicit `Alloc` handle. The handle is passed -through to the runtime and currently backed by libc `malloc`/`free`, but the ABI -keeps the allocator explicit for future custom allocator support. +The user-facing stdlib now defaults to the process allocator for ordinary code. +Explicit `Alloc` handles still appear in low-level APIs and `_with_alloc` +variants, and those handles are passed through to the runtime. The runtime +currently backs `Alloc` with libc `malloc`/`free`, but the ABI keeps explicit +allocator passing available for future custom or bounded allocators. ## Status diff --git a/docs/README.md b/docs/README.md index 7b4f5ed..ce43099 100644 --- a/docs/README.md +++ b/docs/README.md @@ -40,3 +40,8 @@ struct` values are the main move-tracked categories, and structs/enums that contain them become move-tracked by containment. The goal is not to turn all programming into ownership puzzles; it is to make authority and resource lifetime explicit where they matter. + +For text and collections, the intended default story is: +- use `string` values in ordinary code +- use `Text` only when building or mutating text +- treat `sys::buffer` as the low-level allocator/memory layer, not the main API diff --git a/docs/TUTORIAL.md b/docs/TUTORIAL.md index 0e1e005..11b37b2 100644 --- a/docs/TUTORIAL.md +++ b/docs/TUTORIAL.md @@ -286,20 +286,24 @@ lifetimes local until a full lifetime model exists. ### Allocators -Allocation is explicit. Functions that allocate accept an `Alloc` handle: +Most ordinary code uses the process default allocator. Reach for explicit +`Alloc` handles when you need low-level control or budgeted allocation: ```cap -let alloc = rc.mint_alloc_default() -let v = alloc.vec_u8_new() +let v = vec::new() +defer v.free() ... -alloc.vec_u8_free(v) ``` -Use `defer` to simplify cleanup. +The intended style for plain heap owners is: allocate, then immediately +schedule cleanup with `defer x.free()`. Keep plain `free()` for early release +inside longer scopes. ## 10) Strings: `string` vs `Text` -`string` is a view. `Text` is owned. +`string` is the ordinary string type in most code. It is a borrowed view. +`Text` is the owned builder type you use when you need to construct or mutate +text. ```cap fn build_greeting() -> Result { @@ -312,7 +316,7 @@ fn build_greeting() -> Result { t.push_str("hello")? t.push_byte(' ')? t.append("text")? - let out = t.to_string()? + let out = t.copy_string()? return Ok(out) } ``` @@ -320,6 +324,9 @@ fn build_greeting() -> Result { Helpers: - `string.split`, `split_once`, `trim_*`, `contains`, `index_of_*`. - `string.concat(other)` creates a new owned string view. +- `string.copy_text()` makes an owned `Text` builder when you need one. +- `Text.as_string()` borrows cheaply; `Text.copy_string()` allocates a copy. +- `Vec.as_string()` borrows bytes as text; `Vec.copy_string()` allocates a copy. - `Text.slice_range` returns a `string` view into its buffer. ## 11) Slices and indexing @@ -343,11 +350,10 @@ fn use_tail(s: string) -> Result { ```cap let c = rc.mint_console() -let alloc = rc.mint_alloc_default() -let v = alloc.vec_u8_new() +let v = vec::new() // ensure we free on all paths - defer alloc.vec_u8_free(v) +defer v.free() ``` Deferred expressions must be calls; arguments are evaluated at the defer site. @@ -377,7 +383,7 @@ reports unsafe packages. ```cap enum ParseErr { MissingEq, OutOfRange, Oom } -fn parse_key_value(line: string, alloc: Alloc) -> Result { +fn parse_key_value(line: string) -> Result { let eq = match (line.index_of_byte('=')) { Ok(i) => { i } Err(_) => { return Err(ParseErr::MissingEq) } @@ -393,22 +399,19 @@ fn parse_key_value(line: string, alloc: Alloc) -> Result { let t = string::text_new() defer t.free() - match (t.push_str(key)) { - Ok(_) => { } - Err(_) => { return Err(ParseErr::Oom) } + try t.push_str(key) else { + return Err(ParseErr::Oom) } - match (t.push_byte('=')) { - Ok(_) => { } - Err(_) => { return Err(ParseErr::Oom) } + try t.push_byte('=') else { + return Err(ParseErr::Oom) } - match (t.push_str(val)) { - Ok(_) => { } - Err(_) => { return Err(ParseErr::Oom) } + try t.push_str(val) else { + return Err(ParseErr::Oom) } - match (t.to_string()) { - Ok(out) => { return Ok(out) } - Err(_) => { return Err(ParseErr::Oom) } + try let out = t.copy_string() else { + return Err(ParseErr::Oom) } + return Ok(out) } ``` diff --git a/docs/memory.md b/docs/memory.md index 54da59f..9b258b3 100644 --- a/docs/memory.md +++ b/docs/memory.md @@ -6,7 +6,8 @@ borrow checker, and how that shapes the stdlib and ABI. ## Goals - Safe code is memory-safe, so capability security remains meaningful. - Unsafe operations are explicit and auditable. -- Allocation is explicit (Zig-like) and testable. +- Allocation policy stays controllable without forcing ordinary code to thread + allocator handles everywhere. - The core model is simple enough to keep the language small and predictable. ## Safe vs unsafe @@ -18,22 +19,26 @@ Safe code cannot: Unsafe code (`package unsafe`) may do all of the above. Tooling can audit and reject unsafe dependencies (`--safe-only`, `audit`). -## Allocators are explicit -- APIs that allocate take an explicit `Alloc` handle (or are methods that already - carry one). -- The runtime currently backs `Alloc` with libc malloc/free, but the ABI keeps - allocator passing explicit for future custom allocators. +## Allocators and the default surface +- Ordinary code uses the process default allocator through stdlib helpers like + `string::text_new()`, `vec::new()`, and `fs.read_to_string(...)`. +- Explicit `Alloc` handles still exist for low-level control, testing, and + future bounded/custom allocators. +- `sys::buffer` is the low-level memory layer. Most application code should not + need to talk to it directly. ## Owned vs borrowed data Capable separates owned buffers from borrowed views. ### Owned - `Vec` is an owned, growable buffer. -- `Text` is an owned UTF-8 buffer backed by `Vec`. +- `Text` is an owned UTF-8 buffer backed by `Vec`. Use it as a builder. - Owned types are move-only to reduce double-free patterns. +- In ordinary code, the intended cleanup pattern is `defer x.free()` soon after + creation. Use plain `free()` when you need early release. ### Borrowed -- `Slice` and `MutSlice` are non-owning views. +- `string`, `Slice`, and `MutSlice` are non-owning views. - Safe indexing and slicing are bounds-checked. Because Capable does not have a full lifetime system, safe code is restricted @@ -66,5 +71,6 @@ code. The surface is intentionally small and explicit: ## Roadmap - Introduce a lightweight lifetime model or scoped borrows to relax slice escape restrictions without losing safety. -- Expand allocator ergonomics without losing explicit allocation. +- Keep the default-first allocator surface simple while preserving explicit + allocator escape hatches. - Keep the unsafe surface small and auditable. diff --git a/docs/memory_safety.md b/docs/memory_safety.md index c624a5f..7708e68 100644 --- a/docs/memory_safety.md +++ b/docs/memory_safety.md @@ -67,7 +67,9 @@ We want explicit, Zig-like control without GC, but without letting safe libs cor - Safe non-stdlib modules may not return or store `Slice` / `MutSlice` values. ### 3.2 Explicit allocators (optional dependency, not a security boundary) -Capable may expose explicit allocation via an `Alloc` handle: +Ordinary code can use default-first stdlib constructors and helpers. Capable +also exposes explicit allocation via an `Alloc` handle when code needs direct +control: - `alloc(Alloc, n) -> Owned[T]` or similar - `free(Alloc, Owned[T])` consumes the owner diff --git a/docs/slice_design.md b/docs/slice_design.md index b9da079..b57f654 100644 --- a/docs/slice_design.md +++ b/docs/slice_design.md @@ -49,19 +49,26 @@ Implementation detail: --- -## 3. Minimal API surface (suggested v0.2) +## 3. Minimal API surface ### 3.1 Allocation / ownership -Allocation is explicit (Zig-like). Functions that allocate accept an allocator value. +Ordinary code uses default-first constructors. Explicit allocators remain +available as escape hatches for low-level code. ```cap -opaque struct Alloc opaque struct Vec[T] // move-only owner +fn vec::new() -> Vec +fn vec::with_capacity(cap: i32) -> Result, AllocErr> + +impl Vec { + fn free(self) -> unit +} + +opaque struct Alloc impl Alloc { fn vec_u8_new(self) -> Vec fn vec_u8_with_capacity(self, cap: i32) -> Result, AllocErr> - fn vec_u8_free(self, v: Vec) -> unit } ```` @@ -105,14 +112,15 @@ fn parse_u16_be(buf: Slice, off: i32) -> Result { `sys.fs` provides methods that return owned bytes (`Vec`) and/or `string`: ```cap -fn ReadFS.read_bytes(self, alloc: Alloc, path: string) -> Result, FsErr> +fn ReadFS.read_bytes(self, path: string) -> Result, FsErr> +fn ReadFS.read_bytes_with_alloc(self, alloc: Alloc, path: string) -> Result, FsErr> ``` Usage: * safe code receives `Vec` * parses it via `Slice` -* frees it (explicitly or with `defer`) +* frees it explicitly or with `defer` --- diff --git a/examples/hashmap_demo/hashmap_demo.cap b/examples/hashmap_demo/hashmap_demo.cap index 075d29a..e03549a 100644 --- a/examples/hashmap_demo/hashmap_demo.cap +++ b/examples/hashmap_demo/hashmap_demo.cap @@ -12,7 +12,6 @@ package safe module hashmap_demo use sys::console -use sys::buffer use sys::vec /// Trait for types that can be hashed. @@ -113,8 +112,7 @@ struct HashMap { /// Number of deleted (tombstone) entries deleted: i32, /// Capacity - capacity: i32, - alloc: buffer::Alloc + capacity: i32 } /// Generic hash function using the Hash trait. @@ -123,13 +121,13 @@ fn compute_hash(key: K) -> i32 { } /// Create a new empty hash-map with initial capacity. -pub fn hashmap_new(alloc: buffer::Alloc, initial_capacity: i32) -> HashMap { +pub fn hashmap_new(initial_capacity: i32) -> HashMap { let cap = initial_capacity if (cap < 8) { cap = 8 } - let entries = alloc.vec_new() + let entries = vec::new() // Initialize all entries as empty for i in 0..cap { @@ -142,8 +140,7 @@ pub fn hashmap_new(alloc: buffer::Alloc, initial_capacity: i32) -> HashMap { entries: entries, size: 0, deleted: 0, - capacity: cap, - alloc: alloc + capacity: cap } } @@ -223,10 +220,9 @@ impl HashMap { fn hashmap_rehash(map: HashMap, new_cap: i32) -> HashMap { let old_entries = map.entries let old_cap = map.capacity - let alloc = map.alloc // Create new entries vector - let new_entries = alloc.vec_new() + let new_entries = vec::new() for i in 0..new_cap { try new_entries.push(empty_entry()) else { panic() @@ -274,8 +270,7 @@ fn hashmap_rehash(map: HashMap, new_cap: i32) -> HashMap { entries: new_entries, size: new_size, deleted: 0, - capacity: new_cap, - alloc: alloc + capacity: new_cap } } @@ -366,8 +361,7 @@ pub fn hashmap_insert(map: HashMap, key: i32, value: i32) -> Result Result { entries: map.entries, size: map.size - 1, deleted: map.deleted + 1, - capacity: cap, - alloc: map.alloc + capacity: cap }) } } @@ -424,7 +417,7 @@ pub fn hashmap_free(map: HashMap) -> unit { } /// Demo: insert some values and look them up. -fn run_demo(c: Console, alloc: buffer::Alloc) -> unit { +fn run_demo(c: Console) -> unit { c.println("=== Hash-Map Demo (with Hash trait and Vec) ===") c.println("") c.println("This demonstrates a hash-map using:") @@ -435,13 +428,15 @@ fn run_demo(c: Console, alloc: buffer::Alloc) -> unit { c.println("") // Create a hash-map with capacity 16 - let map = hashmap_new(alloc, 16) + let map = hashmap_new(16) c.println("Inserting entries...") // Insert some key-value pairs - let test_keys = alloc.vec_i32_new() - let test_vals = alloc.vec_i32_new() + let test_keys = vec::new() + let test_vals = vec::new() + defer test_keys.free() + defer test_vals.free() // key -> key * 10 try test_keys.push(42) else { panic() } @@ -539,7 +534,8 @@ fn run_demo(c: Console, alloc: buffer::Alloc) -> unit { c.println("") c.println("Looking up non-existent keys...") - let missing = alloc.vec_i32_new() + let missing = vec::new() + defer missing.free() try missing.push(100) else { panic() } try missing.push(999) else { panic() } try missing.push(12345) else { panic() } @@ -627,9 +623,6 @@ fn run_demo(c: Console, alloc: buffer::Alloc) -> unit { c.println("") // Cleanup - alloc.vec_i32_free(test_keys) - alloc.vec_i32_free(test_vals) - alloc.vec_i32_free(missing) hashmap_free(map) c.println("") @@ -639,9 +632,7 @@ fn run_demo(c: Console, alloc: buffer::Alloc) -> unit { pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() - - run_demo(c, alloc) + run_demo(c) return 0 } diff --git a/examples/how_to_string/how_to_string.cap b/examples/how_to_string/how_to_string.cap index 300a251..5d6a3c8 100644 --- a/examples/how_to_string/how_to_string.cap +++ b/examples/how_to_string/how_to_string.cap @@ -20,16 +20,17 @@ fn demo_string_view(c: Console) -> unit { c.println(parts.right) let words = trimmed.split_view(',') + defer words.free() c.println("split count:") c.println_i32(words.len()) c.println(words.get_or(0, "")) c.println(words.get_or(1, "")) - words.free() } fn demo_text_builder(c: Console) -> unit { c.println("-- Text builder --") let t = string::text_new() + defer t.free() try t.push_str("hello") else { panic() } @@ -43,20 +44,19 @@ fn demo_text_builder(c: Console) -> unit { panic() } c.println(head) - try let owned = t.to_string() else { + try let owned = t.copy_string() else { panic() } c.println(owned) - t.free() try let t2 = string::text_from("owned") else { panic() } - try let owned2 = t2.to_string() else { + defer t2.free() + try let owned2 = t2.copy_string() else { panic() } c.println(owned2) - t2.free() try let joined = "a".concat("b") else { panic() diff --git a/examples/sort/sort.cap b/examples/sort/sort.cap index 3138e41..ccd08ca 100644 --- a/examples/sort/sort.cap +++ b/examples/sort/sort.cap @@ -63,10 +63,12 @@ fn sort_indices(lines: Vec, indices: Vec) -> unit { fn run(c: Console, input: Stdin) -> Result { let contents = input.read_to_string()? let lines = contents.lines_view() + defer lines.free() let n = lines.len() // Create index array [0, 1, 2, ...] let indices = vec::new() + defer indices.free() for i in 0..n { try indices.push(i) else { panic() @@ -83,9 +85,6 @@ fn run(c: Console, input: Stdin) -> Result { c.println(line) } } - - indices.free() - lines.free() return Ok(()) } diff --git a/examples/uniq/uniq.cap b/examples/uniq/uniq.cap index 4cd7a8d..a49917d 100644 --- a/examples/uniq/uniq.cap +++ b/examples/uniq/uniq.cap @@ -18,6 +18,7 @@ fn should_print(lines: Vec, i: i32) -> bool { fn run(c: Console, input: Stdin) -> Result { let contents = input.read_to_string()? let lines = contents.lines_view() + defer lines.free() let n = lines.len() for i in 0..n { if (should_print(lines, i)) { @@ -25,7 +26,6 @@ fn run(c: Console, input: Stdin) -> Result { c.println(line) } } - lines.free() return Ok(()) } diff --git a/stdlib/sys/path.cap b/stdlib/sys/path.cap index 65671e7..f463a7a 100644 --- a/stdlib/sys/path.cap +++ b/stdlib/sys/path.cap @@ -68,7 +68,7 @@ pub fn clean_relative_with_alloc(alloc: buffer::Alloc, raw_path: string) -> Resu out.free() return Ok("index.html") } - let result = out.to_string() + let result = out.copy_string() out.free() match (result) { Ok(path) => { return Ok(path) } diff --git a/stdlib/sys/string.cap b/stdlib/sys/string.cap index 89a7fe2..a39b8c1 100644 --- a/stdlib/sys/string.cap +++ b/stdlib/sys/string.cap @@ -83,7 +83,7 @@ fn build_range(alloc: buffer::Alloc, s: string, start: i32, end: i32) -> string } i = i + 1 } - match (buf.to_string()) { + match (buf.copy_string()) { Ok(out) => { return out } Err(_) => { panic() } } @@ -223,9 +223,9 @@ impl Text { return Ok(string { bytes: sub }) } - /// Copy contents into a new string view. - /// This allocates a new owned slice for the string view. - pub fn to_string(self) -> Result { + /// Copy contents into a new owned string view. + /// This allocates a new owned slice for the returned string. + pub fn copy_string(self) -> Result { let owned = self.bytes.copy_slice()? return from_bytes(owned) } @@ -271,13 +271,13 @@ impl string { return Ok(string { bytes: sub }) } - /// Copy this string view into a new owned Text with the provided allocator. - pub fn to_text(self) -> Result { + /// Copy this string view into a new owned Text using the process default allocator. + pub fn copy_text(self) -> Result { return text_from(self) } /// Copy this string view into a new owned Text using the provided allocator. - pub fn to_text_with_alloc(self, alloc: buffer::Alloc) -> Result { + pub fn copy_text_with_alloc(self, alloc: buffer::Alloc) -> Result { return text_from_with_alloc(alloc, self) } @@ -291,7 +291,7 @@ impl string { let out = text_new_with_alloc(alloc) out.push_str(self)? out.push_str(other)? - return out.to_string() + return out.copy_string() } /// Index into the string by byte. @@ -963,7 +963,7 @@ impl string { } i = i + 1 } - match (buf.to_string()) { + match (buf.copy_string()) { Ok(out) => { return out } Err(_) => { panic() } } @@ -989,7 +989,7 @@ impl string { } i = i + 1 } - match (buf.to_string()) { + match (buf.copy_string()) { Ok(out) => { return out } Err(_) => { panic() } } diff --git a/stdlib/sys/vec.cap b/stdlib/sys/vec.cap index e96baa8..f5adba4 100644 --- a/stdlib/sys/vec.cap +++ b/stdlib/sys/vec.cap @@ -632,6 +632,15 @@ impl Vec { return header.alloc.slice_from_ptr(header.raw, header.len) } + /// Borrow the bytes as a string view (no copy). + /// The view is invalid after freeing this Vec. + pub fn as_string(self) -> string { + match (string::from_bytes(self.as_slice())) { + Ok(s) => { return s } + Err(_) => { panic() } + } + } + /// Copy contents into a new owned slice using the Vec allocator. pub fn copy_slice(self) -> Result, buffer::AllocErr> { let header = read_header(self) @@ -647,8 +656,8 @@ impl Vec { return Ok(header.alloc.slice_from_ptr(raw, header.len)) } - /// Copy contents into a new string view. - pub fn to_string(self) -> Result { + /// Copy contents into a new owned string view. + pub fn copy_string(self) -> Result { let owned = self.copy_slice()? return string::from_bytes(owned) } @@ -672,6 +681,6 @@ impl Vec { buf.push_str(part)? i = i + 1 } - return buf.to_string() + return buf.copy_string() } } diff --git a/tests/programs/defer_free.cap b/tests/programs/defer_free.cap new file mode 100644 index 0000000..4d26869 --- /dev/null +++ b/tests/programs/defer_free.cap @@ -0,0 +1,32 @@ +package safe +module defer_free +use sys::system +use sys::string +use sys::vec + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + + let text = string::text_new() + defer text.free() + try text.push_str("hi") else { + c.assert(false) + return 1 + } + + let bytes = vec::new() + defer bytes.free() + try bytes.push('o') else { + c.assert(false) + return 1 + } + try bytes.push('k') else { + c.assert(false) + return 1 + } + + c.assert(text.as_string().eq("hi")) + c.assert(bytes.as_string().eq("ok")) + c.println("defer free ok") + return 0 +} diff --git a/tests/programs/fs_dir_reuse.cap b/tests/programs/fs_dir_reuse.cap index d7a04d1..e7b1b46 100644 --- a/tests/programs/fs_dir_reuse.cap +++ b/tests/programs/fs_dir_reuse.cap @@ -16,16 +16,16 @@ pub fn main(rc: RootCap) -> i32 { match dir.read_bytes("app.txt") { Ok(bytes) => { + defer bytes.free() c.assert(bytes.len() > 0) - bytes.free() } Err(_) => { c.println("dir read_bytes failed"); return 1 } } match dir.list_dir() { Ok(entries) => { + defer entries.free() c.assert(entries.len() > 0) - entries.free() } Err(_) => { c.println("dir list_dir failed"); return 1 } } diff --git a/tests/programs/fs_helpers.cap b/tests/programs/fs_helpers.cap index 7eb391e..306b52a 100644 --- a/tests/programs/fs_helpers.cap +++ b/tests/programs/fs_helpers.cap @@ -17,8 +17,8 @@ pub fn main(rc: RootCap) -> i32 { let rfs2 = rc.mint_readfs("./config") match rfs2.read_bytes("app.txt") { Ok(bytes) => { + defer bytes.free() c.assert(bytes.len() > 0) - bytes.free() } Err(_) => { c.println("read_bytes failed") @@ -29,8 +29,8 @@ pub fn main(rc: RootCap) -> i32 { let rfs3 = rc.mint_readfs("./config") match rfs3.list_dir(".") { Ok(entries) => { + defer entries.free() c.assert(entries.len() > 0) - entries.free() } Err(_) => { c.println("list_dir failed") diff --git a/tests/programs/fs_reuse.cap b/tests/programs/fs_reuse.cap index f69c884..c72e673 100644 --- a/tests/programs/fs_reuse.cap +++ b/tests/programs/fs_reuse.cap @@ -15,8 +15,8 @@ pub fn main(rc: RootCap) -> i32 { match rfs.read_bytes("app.txt") { Ok(bytes) => { + defer bytes.free() c.assert(bytes.len() > 0) - bytes.free() } Err(_) => { c.println("read_bytes failed"); return 1 } } diff --git a/tests/programs/path_helpers.cap b/tests/programs/path_helpers.cap index fcd74c5..599937e 100644 --- a/tests/programs/path_helpers.cap +++ b/tests/programs/path_helpers.cap @@ -18,12 +18,11 @@ pub fn main(rc: RootCap) -> i32 { } let parts = path::segments_view("site/docs/index.html") + defer parts.free() c.assert(parts.len() == 3) c.assert(parts.get_or(0, "").eq("site")) c.assert(parts.get_or(1, "").eq("docs")) c.assert(parts.get_or(2, "").eq("index.html")) - parts.free() - c.assert(path::basename_view("site/docs/index.html").eq("index.html")) c.assert(path::dirname_view("site/docs/index.html").eq("site/docs")) diff --git a/tests/programs/result_unit_ok.cap b/tests/programs/result_unit_ok.cap index 137a0b3..50a6864 100644 --- a/tests/programs/result_unit_ok.cap +++ b/tests/programs/result_unit_ok.cap @@ -1,17 +1,17 @@ package safe module result_unit_ok use sys::system +use sys::vec pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() - let v = alloc.vec_u8_new() + let v = vec::new() + defer v.free() let ok = match v.push('*') { Ok(unit_val) => { 1 } Err(e) => { 0 } } c.assert(ok == 1) c.println("result unit ok - got Ok(unit)") - alloc.vec_u8_free(v) return 0 } diff --git a/tests/programs/slice_safe_read.cap b/tests/programs/slice_safe_read.cap index 994dcd3..b6e7034 100644 --- a/tests/programs/slice_safe_read.cap +++ b/tests/programs/slice_safe_read.cap @@ -1,14 +1,15 @@ package safe module slice_safe_read use sys::system +use sys::vec pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() - let v = alloc.vec_u8_new() + let v = vec::new() + defer v.free() match (v.push('\x00')) { Ok(_) => { } - Err(_) => { c.assert(false); alloc.vec_u8_free(v); return 1 } + Err(_) => { c.assert(false); return 1 } } let slice = v.as_slice() let b = slice.at(0) @@ -18,6 +19,5 @@ pub fn main(rc: RootCap) -> i32 { } else { c.println("slice read bad") } - alloc.vec_u8_free(v) return 0 } diff --git a/tests/programs/string_helpers.cap b/tests/programs/string_helpers.cap index d9b0562..a67c282 100644 --- a/tests/programs/string_helpers.cap +++ b/tests/programs/string_helpers.cap @@ -12,6 +12,9 @@ pub fn main(rc: RootCap) -> i32 { let words = "a b c".split_whitespace() let word_views = "a b c".split_whitespace_view() let split_views = "a,b,c".split_view(',') + defer words.free() + defer word_views.free() + defer split_views.free() let count = words.len() let trimmed = " hi \n".trim() let trimmed_view = " hi \n".trim_view() @@ -25,16 +28,18 @@ pub fn main(rc: RootCap) -> i32 { let sliced = "hello".slice_range(1, 4) let lines = "a\nb\n".split_lines() let line_views = "a\r\nb\n".lines_view() + defer lines.free() + defer line_views.free() let t = string::text_new() + defer t.free() try t.push_str("hi") else { panic() } - try let owned = t.to_string() else { + try let owned = t.copy_string() else { c.assert(false) return 1 } c.assert(owned.eq("hi")) - t.free() c.assert(n == 3 && b == 'a' && count == 3) c.assert(word_views.len() == 3) c.assert(split_views.len() == 3) @@ -120,18 +125,13 @@ pub fn main(rc: RootCap) -> i32 { Err(_) => { c.assert(false) } } let pieces = "a,b,c".split(',') + defer pieces.free() c.assert(pieces.len() == 3) c.assert(line_views.len() == 2) match (pieces.join(",")) { Ok(joined) => { c.assert(joined.eq("a,b,c")) } Err(_) => { c.assert(false) } } - words.free() - word_views.free() - split_views.free() - lines.free() - line_views.free() - pieces.free() c.println("string ok") return 0 } diff --git a/tests/programs/string_split.cap b/tests/programs/string_split.cap index ac5f9b1..8e74331 100644 --- a/tests/programs/string_split.cap +++ b/tests/programs/string_split.cap @@ -5,8 +5,8 @@ use sys::system pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let parts = "a,b,c".split(',') + defer parts.free() let count = parts.len() - parts.free() c.assert(count == 3) c.println("split ok") return 0 diff --git a/tests/programs/text_basic.cap b/tests/programs/text_basic.cap index af2af9d..de51902 100644 --- a/tests/programs/text_basic.cap +++ b/tests/programs/text_basic.cap @@ -6,6 +6,7 @@ use sys::string pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let text = string::text_new() + defer text.free() try text.push_str("hi") else { panic() } @@ -14,12 +15,11 @@ pub fn main(rc: RootCap) -> i32 { } let view = text.as_string() c.assert(view.eq("hi!")) - try let owned = text.to_string() else { + try let owned = text.copy_string() else { c.assert(false) return 1 } c.assert(owned.eq("hi!")) - text.free() c.println("text basic ok") return 0 } diff --git a/tests/programs/text_helpers_more.cap b/tests/programs/text_helpers_more.cap index 2356af6..f2b5a12 100644 --- a/tests/programs/text_helpers_more.cap +++ b/tests/programs/text_helpers_more.cap @@ -2,11 +2,12 @@ package safe module text_helpers_more use sys::system use sys::string +use sys::vec pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() let text = string::text_new() + defer text.free() try text.push_str("hi") else { panic() } @@ -21,14 +22,15 @@ pub fn main(rc: RootCap) -> i32 { return 1 } c.assert(prefix.eq("hi")) - let v = alloc.vec_u8_new() + let v = vec::new() + defer v.free() try v.push('!') else { panic() } try text.extend_vec(v) else { panic() } - try let owned = text.to_string() else { + try let owned = text.copy_string() else { c.assert(false) return 1 } @@ -36,23 +38,21 @@ pub fn main(rc: RootCap) -> i32 { try let t2 = string::text_from("cap") else { panic() } - try let owned2 = t2.to_string() else { + defer t2.free() + try let owned2 = t2.copy_string() else { c.assert(false) return 1 } c.assert(owned2.eq("cap")) - t2.free() - try let t3 = "owned".to_text() else { + try let t3 = "owned".copy_text() else { panic() } - try let owned3 = t3.to_string() else { + defer t3.free() + try let owned3 = t3.copy_string() else { c.assert(false) return 1 } c.assert(owned3.eq("owned")) - t3.free() - alloc.vec_u8_free(v) - text.free() c.println("text helpers ok") return 0 } diff --git a/tests/programs/text_push_safe.cap b/tests/programs/text_push_safe.cap index d68d78f..b59a74d 100644 --- a/tests/programs/text_push_safe.cap +++ b/tests/programs/text_push_safe.cap @@ -2,18 +2,20 @@ package safe module text_push_safe use sys::system use sys::string +use sys::vec pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() let text = string::text_new() + defer text.free() try text.push_byte('\x00') else { panic() } try text.push_byte('\x07') else { panic() } - let v = alloc.vec_u8_new() + let v = vec::new() + defer v.free() try v.push('\t') else { panic() } @@ -29,7 +31,5 @@ pub fn main(rc: RootCap) -> i32 { } else { c.println("push bad") } - alloc.vec_u8_free(v) - text.free() return 0 } diff --git a/tests/programs/text_safe.cap b/tests/programs/text_safe.cap index 4f6e215..9f64243 100644 --- a/tests/programs/text_safe.cap +++ b/tests/programs/text_safe.cap @@ -6,6 +6,7 @@ use sys::string pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let text = string::text_new() + defer text.free() try text.push_byte('a') else { panic() } @@ -23,6 +24,5 @@ pub fn main(rc: RootCap) -> i32 { } else { c.println("text bad") } - text.free() return 0 } diff --git a/tests/programs/text_to_string.cap b/tests/programs/text_to_string.cap index 971edc3..c648b9c 100644 --- a/tests/programs/text_to_string.cap +++ b/tests/programs/text_to_string.cap @@ -6,16 +6,16 @@ use sys::string pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let text = string::text_new() + defer text.free() try text.push_byte('h') else { panic() } try text.push_byte('i') else { panic() } - try let s = text.to_string() else { + try let s = text.copy_string() else { panic() } c.println(s) - text.free() return 0 } diff --git a/tests/programs/text_unsafe.cap b/tests/programs/text_unsafe.cap index b132a38..57abf95 100644 --- a/tests/programs/text_unsafe.cap +++ b/tests/programs/text_unsafe.cap @@ -6,17 +6,18 @@ use sys::string pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() let text = string::text_new() + defer text.free() match (text.push_byte('a')) { Ok(_) => { } - Err(_) => { c.assert(false); text.free(); return 1 } + Err(_) => { c.assert(false); return 1 } } match (text.push_byte('b')) { Ok(_) => { } - Err(_) => { c.assert(false); text.free(); return 1 } + Err(_) => { c.assert(false); return 1 } } match (text.push_byte('c')) { Ok(_) => { } - Err(_) => { c.assert(false); text.free(); return 1 } + Err(_) => { c.assert(false); return 1 } } let len = text.len() c.assert(len == 3) @@ -33,6 +34,5 @@ pub fn main(rc: RootCap) -> i32 { } else { c.println("slice bad") } - text.free() return 0 } diff --git a/tests/programs/vec_custom_eq.cap b/tests/programs/vec_custom_eq.cap index 6895616..11b0672 100644 --- a/tests/programs/vec_custom_eq.cap +++ b/tests/programs/vec_custom_eq.cap @@ -1,6 +1,7 @@ package safe module vec_custom_eq use sys::eq +use sys::vec copy struct Entry { key: i32, @@ -15,10 +16,10 @@ impl eq::Eq for Entry { pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() // Test Vec with custom Eq - let v = alloc.vec_new() + let v = vec::new() + defer v.free() try v.push(Entry { key: 1, value: 10 }) else { panic() } @@ -61,10 +62,9 @@ pub fn main(rc: RootCap) -> i32 { // Test filter_eq let filtered = v.filter_eq(Entry { key: 2, value: 20 }) + defer filtered.free() c.assert(filtered.len() == 2) - filtered.free() - v.free() c.println("vec custom eq ok") return 0 } diff --git a/tests/programs/vec_helpers.cap b/tests/programs/vec_helpers.cap index bfea188..5e33071 100644 --- a/tests/programs/vec_helpers.cap +++ b/tests/programs/vec_helpers.cap @@ -1,18 +1,20 @@ package safe module vec_helpers use sys::system +use sys::vec pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() - let v = alloc.vec_u8_new() - let with_cap = alloc.vec_with_capacity(8) - let extra = alloc.vec_u8_new() + let v = vec::new() + defer v.free() + let with_cap = vec::with_capacity(8) + let extra = vec::new() + defer extra.free() match (with_cap) { Ok(vc) => { + defer vc.free() c.assert(vc.capacity() >= 8) vc.clear() - alloc.vec_u8_free(vc) } Err(_) => { c.println("vec bad"); return 1 } } @@ -40,6 +42,8 @@ pub fn main(rc: RootCap) -> i32 { let b = v.get(1) let filtered = v.filter('B') let mapped = v.map_add('\x01') + defer filtered.free() + defer mapped.free() let slice = v.slice_range(1, 3) match (b) { Ok(x) => { @@ -54,16 +58,10 @@ pub fn main(rc: RootCap) -> i32 { Err(e) => { c.println("vec bad"); return 1 } } c.println("vec ok") - alloc.vec_u8_free(filtered) - alloc.vec_u8_free(mapped) - alloc.vec_u8_free(v) - alloc.vec_u8_free(extra) return 0 } Err(e) => {} } c.println("vec bad") - alloc.vec_u8_free(v) - alloc.vec_u8_free(extra) return 1 } diff --git a/tests/programs/vec_i32_helpers.cap b/tests/programs/vec_i32_helpers.cap index fdf7712..c3ff638 100644 --- a/tests/programs/vec_i32_helpers.cap +++ b/tests/programs/vec_i32_helpers.cap @@ -1,12 +1,14 @@ package safe module vec_i32_helpers use sys::system +use sys::vec pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() - let v = alloc.vec_i32_new() - let extra = alloc.vec_i32_new() + let v = vec::new() + let extra = vec::new() + defer v.free() + defer extra.free() match v.push(3) { Ok(_) => { } Err(_) => { c.assert(false); return 1 } @@ -37,15 +39,13 @@ pub fn main(rc: RootCap) -> i32 { } let filtered = v.filter(3) let mapped = v.map_add(1) + defer filtered.free() + defer mapped.free() match mapped.get(0) { Ok(x) => { c.assert(x == 4) } Err(_) => { c.assert(false); return 1 } } c.assert(filtered.len() == 1) c.println("vec i32 ok") - alloc.vec_i32_free(filtered) - alloc.vec_i32_free(mapped) - alloc.vec_i32_free(v) - alloc.vec_i32_free(extra) return 0 } diff --git a/tests/programs/vec_search_helpers.cap b/tests/programs/vec_search_helpers.cap index 2502511..ed548a9 100644 --- a/tests/programs/vec_search_helpers.cap +++ b/tests/programs/vec_search_helpers.cap @@ -5,8 +5,8 @@ use sys::vec pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() - let v = alloc.vec_i32_new() + let v = vec::new() + defer v.free() try v.push(1) else { panic() } @@ -45,19 +45,20 @@ pub fn main(rc: RootCap) -> i32 { c.assert(v.get_or(1, 99) == 1) c.assert(v.get_or(99, 77) == 77) let bytes = vec::new() + defer bytes.free() try bytes.push('h') else { panic() } try bytes.push('i') else { panic() } - try let s = bytes.to_string() else { + let view = bytes.as_string() + c.assert(view.eq("hi")) + try let s = bytes.copy_string() else { c.assert(false) return 1 } c.assert(s.eq("hi")) - alloc.vec_i32_free(v) - bytes.free() c.println("vec search ok") return 0 } diff --git a/tests/programs/vec_string_helpers.cap b/tests/programs/vec_string_helpers.cap index a09d682..3483a3b 100644 --- a/tests/programs/vec_string_helpers.cap +++ b/tests/programs/vec_string_helpers.cap @@ -1,12 +1,14 @@ package safe module vec_string_helpers use sys::system +use sys::vec pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() - let alloc = rc.mint_alloc_default() - let v = alloc.vec_string_new() - let extra = alloc.vec_string_new() + let v = vec::new() + let extra = vec::new() + defer v.free() + defer extra.free() match v.push("a") { Ok(_) => { } Err(_) => { c.assert(false); return 1 } @@ -32,7 +34,5 @@ pub fn main(rc: RootCap) -> i32 { Err(_) => { c.assert(false); return 1 } } c.println("vec string ok") - alloc.vec_string_free(v) - alloc.vec_string_free(extra) return 0 } From bb98c95d6712fce3bbbffcf735daf7cdcb2e0c02 Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Wed, 25 Mar 2026 12:27:50 -0700 Subject: [PATCH 07/17] Add borrowed foreach iteration --- capc/src/parser.rs | 185 +++++++++++++++++++-- capc/tests/run.rs | 28 ++++ capc/tests/typecheck.rs | 16 ++ docs/TUTORIAL.md | 21 +++ examples/hashmap_demo/hashmap_demo.cap | 11 +- examples/sort/sort.cap | 3 +- examples/uniq/uniq.cap | 4 +- tests/programs/for_each.cap | 38 +++++ tests/programs/for_each_complex_source.cap | 31 ++++ 9 files changed, 306 insertions(+), 31 deletions(-) create mode 100644 tests/programs/for_each.cap create mode 100644 tests/programs/for_each_complex_source.cap diff --git a/capc/src/parser.rs b/capc/src/parser.rs index b2366d8..75ef85c 100644 --- a/capc/src/parser.rs +++ b/capc/src/parser.rs @@ -789,23 +789,6 @@ impl Parser { }) } - fn parse_for_after(&mut self, start: usize) -> Result { - let var = self.expect_ident()?; - self.expect(TokenKind::In)?; - let range_start = self.parse_range_bound()?; - self.expect(TokenKind::DotDot)?; - let range_end = self.parse_range_bound()?; - let body = self.parse_block()?; - let end = body.span.end; - Ok(ForStmt { - var, - start: range_start, - end: range_end, - body, - span: Span::new(start, end), - }) - } - fn parse_for_stmt(&mut self) -> Result { let for_token = self.expect(TokenKind::For)?; let start = for_token.span.start; @@ -822,7 +805,37 @@ impl Parser { span: Span::new(start, end), })); } - Ok(Stmt::For(self.parse_for_after(start)?)) + let first = self.expect_ident()?; + let second = if self.maybe_consume(TokenKind::Comma).is_some() { + Some(self.expect_ident()?) + } else { + None + }; + self.expect(TokenKind::In)?; + let range_or_source = self.parse_expr_no_struct()?; + if self.maybe_consume(TokenKind::DotDot).is_some() { + if second.is_some() { + return Err(self.error_at( + first.span, + "range for loops accept only one binding".to_string(), + )); + } + let range_end = self.parse_range_bound()?; + let body = self.parse_block()?; + let end = body.span.end; + return Ok(Stmt::For(ForStmt { + var: first, + start: range_or_source, + end: range_end, + body, + span: Span::new(start, end), + })); + } + + let item = second.clone().unwrap_or_else(|| first.clone()); + let index = second.map(|_| first); + let body = self.parse_block()?; + self.desugar_for_each(Span::new(start, body.span.end), index, item, range_or_source, body) } /// Parse a simple expression for range bounds (no struct literals allowed) @@ -1593,6 +1606,142 @@ impl Parser { }) } + fn desugar_for_each( + &self, + for_span: Span, + index_binding: Option, + item_binding: Ident, + source: Expr, + body: Block, + ) -> Result { + let hidden_source_span = self.synthetic_span(for_span, 1); + let hidden_len_span = self.synthetic_span(for_span, 2); + let hidden_idx_span = self.synthetic_span(for_span, 3); + let source_free_span = self.synthetic_span(for_span, 4); + let len_call_span = self.synthetic_span(for_span, 5); + let get_call_span = self.synthetic_span(for_span, 6); + let try_span = self.synthetic_span(for_span, 7); + let else_span = self.synthetic_span(for_span, 8); + let zero_span = self.synthetic_span(for_span, 9); + + let hidden_source = self.synthetic_ident("__for_source", hidden_source_span); + let hidden_idx = self.synthetic_ident("__for_idx", hidden_idx_span); + let hidden_len = self.synthetic_ident("__for_len", hidden_len_span); + let hidden_idx_expr = self.ident_expr(&hidden_idx); + let hidden_len_expr = self.ident_expr(&hidden_len); + let (source_expr, mut setup_stmts) = if source.to_path().is_some() { + (source, Vec::new()) + } else { + let hidden_source_expr = self.ident_expr(&hidden_source); + let source_stmt = Stmt::Let(LetStmt { + name: hidden_source.clone(), + ty: None, + expr: source, + span: hidden_source_span, + }); + let free_expr = + self.method_call_expr(hidden_source_expr.clone(), "free", Vec::new(), source_free_span); + let free_stmt = Stmt::Defer(DeferStmt { + expr: free_expr, + span: source_free_span, + }); + (hidden_source_expr, vec![source_stmt, free_stmt]) + }; + + let len_stmt = Stmt::Let(LetStmt { + name: hidden_len.clone(), + ty: None, + expr: self.method_call_expr(source_expr.clone(), "len", Vec::new(), len_call_span), + span: hidden_len_span, + }); + + let get_stmt = Stmt::Let(self.desugar_try_let( + try_span, + item_binding, + None, + self.method_call_expr( + source_expr, + "get", + vec![hidden_idx_expr.clone()], + get_call_span, + ), + None, + Block { + stmts: Vec::new(), + span: else_span, + }, + )); + + let mut loop_stmts = Vec::new(); + if let Some(index_ident) = index_binding { + loop_stmts.push(Stmt::Let(LetStmt { + name: index_ident.clone(), + ty: None, + expr: hidden_idx_expr.clone(), + span: index_ident.span, + })); + } + loop_stmts.push(get_stmt); + loop_stmts.extend(body.stmts); + + let loop_body = Block { + stmts: loop_stmts, + span: body.span, + }; + + let range_stmt = Stmt::For(ForStmt { + var: hidden_idx, + start: Expr::Literal(LiteralExpr { + value: Literal::Int(0), + span: zero_span, + }), + end: hidden_len_expr, + body: loop_body, + span: for_span, + }); + + setup_stmts.push(len_stmt); + setup_stmts.push(range_stmt); + + let then_block = Block { stmts: setup_stmts, span: for_span }; + + Ok(Stmt::If(IfStmt { + cond: Expr::Literal(LiteralExpr { + value: Literal::Bool(true), + span: for_span, + }), + then_block, + else_block: None, + span: for_span, + })) + } + + fn synthetic_ident(&self, prefix: &str, span: Span) -> Ident { + Spanned::new(format!("{prefix}_{}", span.start), span) + } + + fn synthetic_span(&self, base: Span, offset: usize) -> Span { + let point = base.start.saturating_add(offset); + Span::new(point, point) + } + + fn ident_expr(&self, ident: &Ident) -> Expr { + Expr::Path(Path { + segments: vec![ident.clone()], + span: ident.span, + }) + } + + fn method_call_expr(&self, receiver: Expr, method: &str, args: Vec, span: Span) -> Expr { + Expr::MethodCall(MethodCallExpr { + receiver: Box::new(receiver), + method: Spanned::new(method.to_string(), span), + type_args: Vec::new(), + args, + span, + }) + } + fn parse_path(&mut self) -> Result { let first = self.expect_ident()?; let start = first.span.start; diff --git a/capc/tests/run.rs b/capc/tests/run.rs index 8b425d5..c7432ca 100644 --- a/capc/tests/run.rs +++ b/capc/tests/run.rs @@ -211,6 +211,34 @@ fn run_defer_free() { assert!(stdout.contains("defer free ok"), "stdout was: {stdout:?}"); } +#[test] +fn run_for_each() { + let out_dir = make_out_dir("for_each"); + let out_dir = out_dir.to_str().expect("utf8 out dir"); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/for_each.cap"]); + assert_eq!(code, 0); + assert!(stdout.contains("for each ok"), "stdout was: {stdout:?}"); +} + +#[test] +fn run_for_each_complex_source() { + let out_dir = make_out_dir("for_each_complex_source"); + let out_dir = out_dir.to_str().expect("utf8 out dir"); + let (code, stdout, _stderr) = run_capc(&[ + "run", + "--out-dir", + out_dir, + "tests/programs/for_each_complex_source.cap", + ]); + assert_eq!(code, 0); + assert_eq!(stdout.matches("build").count(), 1, "stdout was: {stdout:?}"); + assert!( + stdout.contains("for each complex ok"), + "stdout was: {stdout:?}" + ); +} + #[test] fn run_match_expr() { let out_dir = make_out_dir("match_expr"); diff --git a/capc/tests/typecheck.rs b/capc/tests/typecheck.rs index 95fa292..f390ec6 100644 --- a/capc/tests/typecheck.rs +++ b/capc/tests/typecheck.rs @@ -116,6 +116,22 @@ fn typecheck_defer_free_ok() { type_check_program(&module, &stdlib, &[]).expect("typecheck module"); } +#[test] +fn typecheck_for_each_ok() { + let source = load_program("for_each.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + +#[test] +fn typecheck_for_each_complex_source_ok() { + let source = load_program("for_each_complex_source.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + #[test] fn typecheck_fs_close_ok() { let source = load_program("should_pass_fs_close.cap"); diff --git a/docs/TUTORIAL.md b/docs/TUTORIAL.md index 11b37b2..46abc99 100644 --- a/docs/TUTORIAL.md +++ b/docs/TUTORIAL.md @@ -268,6 +268,27 @@ Borrow-lite is intentionally conservative. In most public APIs, the important case is a short-lived borrowed parameter or receiver on a resource/capability type. +For loops support both ranges and borrowed `Vec` iteration: + +```cap +for i in 0..5 { + c.println_i32(i) +} + +for item in values { + c.println_i32(item) +} + +for i, item in values { + c.print_i32(i) + c.print(": ") + c.println_i32(item) +} +``` + +Borrowed `Vec` iteration also accepts complex expressions. The source is +evaluated once before the loop body runs. + ## 9) Memory model Capable has explicit memory management. Owned heap types must be freed. diff --git a/examples/hashmap_demo/hashmap_demo.cap b/examples/hashmap_demo/hashmap_demo.cap index e03549a..3a3d52a 100644 --- a/examples/hashmap_demo/hashmap_demo.cap +++ b/examples/hashmap_demo/hashmap_demo.cap @@ -465,9 +465,7 @@ fn run_demo(c: Console) -> unit { try test_vals.push(320) else { panic() } // Insert all entries - let num_entries = test_keys.len() - for i in 0..num_entries { - let k = test_keys.get_or(i, 0) + for i, k in test_keys { let v = test_vals.get_or(i, 0) match (hashmap_insert(map, k, v)) { Ok(new_map) => { @@ -497,8 +495,7 @@ fn run_demo(c: Console) -> unit { c.println("Looking up entries...") // Look up each key - for i in 0..num_entries { - let k = test_keys.get_or(i, 0) + for k in test_keys { c.print(" get(") c.print_i32(k) c.print(") = ") @@ -540,9 +537,7 @@ fn run_demo(c: Console) -> unit { try missing.push(999) else { panic() } try missing.push(12345) else { panic() } - let num_missing = missing.len() - for i in 0..num_missing { - let k = missing.get_or(i, 0) + for k in missing { c.print(" contains(") c.print_i32(k) c.print(") = ") diff --git a/examples/sort/sort.cap b/examples/sort/sort.cap index ccd08ca..4ad0a20 100644 --- a/examples/sort/sort.cap +++ b/examples/sort/sort.cap @@ -78,8 +78,7 @@ fn run(c: Console, input: Stdin) -> Result { sort_indices(lines, indices) // Print lines in sorted order (skip empty lines) - for i in 0..n { - let idx = indices.get_or(i, 0) + for idx in indices { let line = lines.get_or(idx, "") if (line.len() > 0) { c.println(line) diff --git a/examples/uniq/uniq.cap b/examples/uniq/uniq.cap index a49917d..622b225 100644 --- a/examples/uniq/uniq.cap +++ b/examples/uniq/uniq.cap @@ -19,10 +19,8 @@ fn run(c: Console, input: Stdin) -> Result { let contents = input.read_to_string()? let lines = contents.lines_view() defer lines.free() - let n = lines.len() - for i in 0..n { + for i, line in lines { if (should_print(lines, i)) { - let line = lines.get_or(i, "") c.println(line) } } diff --git a/tests/programs/for_each.cap b/tests/programs/for_each.cap new file mode 100644 index 0000000..9c90c37 --- /dev/null +++ b/tests/programs/for_each.cap @@ -0,0 +1,38 @@ +package safe +module for_each +use sys::system +use sys::vec + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + let values = vec::new() + defer values.free() + + try values.push(3) else { + c.assert(false) + return 1 + } + try values.push(5) else { + c.assert(false) + return 1 + } + try values.push(8) else { + c.assert(false) + return 1 + } + + let sum = 0 + for item in values { + sum = sum + item + } + c.assert(sum == 16) + + let weighted = 0 + for i, item in values { + weighted = weighted + (i * item) + } + c.assert(weighted == 21) + + c.println("for each ok") + return 0 +} diff --git a/tests/programs/for_each_complex_source.cap b/tests/programs/for_each_complex_source.cap new file mode 100644 index 0000000..390e239 --- /dev/null +++ b/tests/programs/for_each_complex_source.cap @@ -0,0 +1,31 @@ +package safe +module for_each_complex_source +use sys::system +use sys::console +use sys::vec + +fn build(c: Console) -> Vec { + c.println("build") + let values = vec::new() + try values.push(2) else { + panic() + } + try values.push(4) else { + panic() + } + try values.push(7) else { + panic() + } + return values +} + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + let sum = 0 + for item in build(c) { + sum = sum + item + } + c.assert(sum == 13) + c.println("for each complex ok") + return 0 +} From 02cb89e40b1e8f8110f44550e2c1acf44bafb6fa Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Wed, 25 Mar 2026 13:42:19 -0700 Subject: [PATCH 08/17] Clean up compiler phase boundaries --- COMPILER_CLEANUP.md | 349 +++++++ capc/src/ast.rs | 92 ++ capc/src/codegen/emit.rs | 1148 +---------------------- capc/src/codegen/emit/match_lowering.rs | 573 +++++++++++ capc/src/codegen/emit/runtime.rs | 585 ++++++++++++ capc/src/codegen/mod.rs | 3 +- capc/src/desugar.rs | 772 +++++++++++++++ capc/src/driver.rs | 194 ++++ capc/src/lib.rs | 4 + capc/src/main.rs | 218 +---- capc/src/parser.rs | 450 ++------- capc/src/runtime_intrinsics.rs | 76 ++ capc/src/typeck/check.rs | 914 ++---------------- capc/src/typeck/infer.rs | 159 ++++ capc/src/typeck/kinds.rs | 282 ++++++ capc/src/typeck/lower.rs | 92 +- capc/src/typeck/mod.rs | 1094 ++------------------- capc/src/typeck/moveck.rs | 237 +++++ capc/src/typeck/patterns.rs | 128 +++ capc/src/typeck/resolve.rs | 591 ++++++++++++ capc/src/typeck/safety.rs | 300 ++++++ capc/src/typeck/type_params.rs | 92 ++ capc/tests/parser.rs | 33 +- stdlib/sys/option.cap | 22 +- stdlib/sys/path.cap | 31 +- stdlib/sys/string.cap | 140 +-- stdlib/sys/vec.cap | 4 +- 27 files changed, 4890 insertions(+), 3693 deletions(-) create mode 100644 COMPILER_CLEANUP.md create mode 100644 capc/src/codegen/emit/match_lowering.rs create mode 100644 capc/src/codegen/emit/runtime.rs create mode 100644 capc/src/desugar.rs create mode 100644 capc/src/driver.rs create mode 100644 capc/src/runtime_intrinsics.rs create mode 100644 capc/src/typeck/infer.rs create mode 100644 capc/src/typeck/kinds.rs create mode 100644 capc/src/typeck/moveck.rs create mode 100644 capc/src/typeck/patterns.rs create mode 100644 capc/src/typeck/resolve.rs create mode 100644 capc/src/typeck/safety.rs create mode 100644 capc/src/typeck/type_params.rs diff --git a/COMPILER_CLEANUP.md b/COMPILER_CLEANUP.md new file mode 100644 index 0000000..2c58173 --- /dev/null +++ b/COMPILER_CLEANUP.md @@ -0,0 +1,349 @@ +# Compiler Cleanup Plan + +This document is a focused cleanup plan for `capc/`. + +The compiler is not junk. The overall pass structure is reasonable, the code is +mostly direct, and the test suite is catching real regressions. But the +implementation has reached the point where adding more language features will +keep making the compiler denser and more accidental unless we clean up the +phase boundaries. + +This plan is intentionally pragmatic. It is not a rewrite-from-scratch plan. + +## Progress + +- [x] Phase 1: Stable expression identity +- [x] Phase 2: Explicit desugar pass +- [x] Phase 3: Split type checking by responsibility +- [x] Phase 4: Split codegen emission +- [x] Phase 5: Centralize the CLI/compiler pipeline + +### Completed So Far + +- Added stable `ExprId` tracking to AST expressions and switched typed-expression + tables from `Span` keys to `ExprId`. +- Added a dedicated `desugar` pass and moved parser-side lowering of + `let ... else`, `try`, and borrowed `for` sugar out of `parser.rs`. +- Removed lowering's type-check fallback path and the `allow_type_fallback` + escape hatch. Missing type metadata is now an internal compiler bug again. +- Kept parser snapshots stable by redacting internal expression IDs from + snapshot output instead of baking those IDs into the public AST snapshots. +- Split `typeck` support logic into focused modules: + - `resolve.rs` + - `kinds.rs` + - `safety.rs` + - `moveck.rs` + - `infer.rs` + - `patterns.rs` + - `type_params.rs` +- Shrunk `typeck/mod.rs` down to orchestration and data definitions and moved + the high-churn helper logic out of `check.rs`. +- Split codegen emission by concern: + - `codegen/emit.rs` remains the coordinator + - `codegen/emit/match_lowering.rs` owns match-expression and match-statement lowering + - `codegen/emit/runtime.rs` owns runtime-wrapper and unsafe-pointer emission +- Extended match-expression lowering to support `Result`-shaped values instead + of only `unit` and single scalars. +- Added a dedicated `driver.rs` pipeline module that owns: + - entry loading + - stdlib/user module graph loading + - safe-only enforcement + - type checking + - object build and executable link steps +- Removed the duplicated parse/load/check/build orchestration from `main.rs` + and centralized tool resolution for `cargo`/`rustc`. +- Tightened stdlib handling: + - runtime-backed stdlib stubs are recognized explicitly + - their fake source bodies are no longer type-checked or lowered as real code + - helper stdlib modules are now checked on the same pipeline as user code +- Fixed several previously hidden stdlib issues that surfaced once stdlib was + type-checked consistently (`sys.option`, `sys.path`, `sys.string`, `sys.vec`). + +## Goals + +- Make compiler phases easier to reason about. +- Remove brittle implementation techniques that cause accidental regressions. +- Shrink the blast radius of language changes. +- Keep the language behavior stable while improving internal structure. + +## Non-Goals + +- Rewriting the compiler from scratch. +- Changing the language surface as part of cleanup. +- Expanding traits, generics, or remote capability support during cleanup. + +## Current Assessment + +The main issues are: + +1. Typed expressions are keyed by `Span`, not stable node identity. +2. The parser performs semantic desugaring and now also injects hidden resource + management. +3. Type checking and lowering leak into each other. +4. A few files are now too large to evolve safely. +5. Some semantic logic is duplicated across phases. +6. The CLI/compiler driver pipeline is repetitive. + +The most important concrete example is the span-keyed type table. We already hit +this while implementing borrowed `for` iteration: synthetic expressions that +shared spans collided in the type table and produced wrong lowering behavior. + +## Priority Order + +Do these in order: + +1. Replace span-keyed typing with stable expression identity. +2. Introduce an explicit desugar pass after parsing. +3. Split `typeck` into smaller, cleaner submodules. +4. Split `codegen/emit.rs` into focused emission modules. +5. Centralize the CLI/compiler pipeline. + +## Phase 1: Stable Expression Identity + +This is the highest-value cleanup. + +### Problem + +Today expression typing is recorded as: + +- `TypeTable { expr_types: HashMap }` + +That is brittle because: + +- synthetic expressions can share spans +- different expressions can accidentally collide +- later phases have to depend on exact span construction discipline + +This is the wrong abstraction. + +### Plan + +- Introduce `ExprId` and `PatternId` or a typed AST node identity equivalent. +- Assign IDs during parsing or in a dedicated AST annotation pass. +- Change the type recorder to key on `ExprId` instead of `Span`. +- Make lowering consume typed expression metadata by ID. +- Remove the need for span-based type lookups entirely. + +### Follow-on Cleanup + +- Delete `allow_type_fallback` in lowering. +- Delete the fallback path that re-runs type inference during lowering. +- Make missing typed-expression data a hard internal compiler bug. + +### Success Criteria + +- No compiler phase relies on `Span` as expression identity. +- Synthetic desugaring can freely create nodes without worrying about span + collisions. +- Lowering does not call back into expression checking to recover types. + +## Phase 2: Add an Explicit Desugar Pass + +### Problem + +The parser currently does more than parse syntax. It also lowers: + +- `let ... else` +- `try let` +- `try expr else` +- borrowed `for` iteration + +That is workable, but it means the parser now owns semantic rewrites and hidden +implementation details like synthetic bindings and hidden `defer free()`. + +### Plan + +- Keep parsing purely syntactic. +- Represent the high-level constructs directly in AST first. +- Add a `desugar` pass after parse and before type checking. +- Move all syntax sugar lowering there. + +### What Belongs in Desugaring + +- `let ... else` +- `try let` +- `try expr else` +- borrowed `for item in vec` +- indexed `for i, item in vec` + +### Why This Helps + +- parser gets simpler +- lowering logic is centralized +- desugaring becomes independently testable +- future sugar features stop bloating `parser.rs` + +### Success Criteria + +- `parser.rs` only parses concrete syntax into AST nodes. +- synthetic names and hidden cleanup are created in the desugar pass. +- parser tests and desugar tests are separate. + +## Phase 3: Split Type Checking by Responsibility + +### Problem + +`typeck/check.rs` and `typeck/mod.rs` are carrying too much mixed +responsibility: + +- type resolution +- package safety validation +- move checking +- expression typing +- statement typing +- pattern binding +- impl desugaring support +- assorted helper logic + +This is manageable now, but it is not clean. + +### Plan + +Refactor `typeck` into something closer to: + +- `typeck/mod.rs` + - public entry points only +- `typeck/types.rs` + - `Ty`, builtins, type helpers +- `typeck/resolve.rs` + - path/type/trait resolution helpers +- `typeck/safety.rs` + - safe package validation and import safety rules +- `typeck/moveck.rs` + - move state, branch merge rules, linear consumption checks +- `typeck/patterns.rs` + - pattern binding and pattern typing +- `typeck/expr.rs` + - expression checking +- `typeck/stmt.rs` + - statement/block checking + +### Immediate Refactors + +- Pull duplicated helper logic into shared helpers. +- Stop duplicating path-base detection helpers in checking and lowering. +- Move enum type-argument inference helpers into one dedicated place. + +### Success Criteria + +- `typeck/mod.rs` is mostly orchestration and data definitions. +- expression and statement logic are no longer in one multi-thousand-line file. +- move-checking rules are locally understandable. + +## Phase 4: Split Codegen Emission + +### Problem + +`codegen/emit.rs` is now the biggest file in the compiler and handles too many +distinct concerns: + +- statement emission +- expression emission +- control-flow lowering +- match/result lowering +- local storage +- arithmetic traps +- defer handling + +### Plan + +Refactor codegen into focused files, for example: + +- `codegen/emit_expr.rs` +- `codegen/emit_stmt.rs` +- `codegen/emit_control.rs` +- `codegen/emit_match.rs` +- `codegen/emit_locals.rs` +- `codegen/emit_defer.rs` + +The exact split matters less than separating concerns. + +### Success Criteria + +- `emit.rs` becomes a thin coordinator or disappears. +- result/match lowering is isolated. +- local storage and defer handling are isolated. +- control-flow bugs no longer require editing one huge file. + +## Phase 5: Centralize the Driver Pipeline + +### Problem + +`main.rs` repeats parse/load/check/build orchestration across commands. + +That makes small behavior changes annoying and increases the chance that +commands drift. + +### Plan + +- Introduce a shared pipeline API in the library, for example: + - parse entry + - validate module path + - load stdlib/user modules + - enforce safe-only if requested + - type-check + - build object + - link/run +- Make CLI commands thin wrappers over this pipeline. + +### Success Criteria + +- `main.rs` becomes mostly CLI argument handling. +- parse/check/build/run all share the same pipeline functions. +- error decoration is more consistent across commands. + +## Cross-Cutting Rules + +While doing this cleanup: + +- Do not rewrite semantics casually. +- Keep tests green at each phase. +- Add targeted regression tests for every internal cleanup that changes a code + path. +- Prefer extracting shared helpers before changing behavior. +- Avoid introducing new syntax/features during compiler cleanup. + +## Recommended Sequence of Work + +### Step 1 + +Add stable `ExprId` support and convert typed expression recording to use it. + +### Step 2 + +Move current parser-side sugar lowering into a dedicated desugar pass. + +### Step 3 + +Split `typeck/check.rs` and move resolution helpers out of the giant files. + +### Step 4 + +Split `codegen/emit.rs`. + +### Step 5 + +Clean up the CLI pipeline. + +## What Not to Do + +- Do not start with file splitting alone. Splitting files without fixing + span-keyed typing and phase coupling will mostly create more files with the + same design problems. +- Do not add more syntax sugar before the desugar pass exists. +- Do not rewrite the compiler in a new architecture unless the current one + proves fundamentally unworkable. It has not. + +## End State + +If this cleanup succeeds, the compiler should look like this: + +- parser parses syntax only +- desugar rewrites sugar only +- type checking owns semantic validation only +- lowering consumes typed nodes without re-inferring them +- codegen emits from HIR without giant monolithic files +- CLI commands share one pipeline + +That is enough cleanup to keep evolving the language without the compiler +turning into a pile of accidental invariants. diff --git a/capc/src/ast.rs b/capc/src/ast.rs index 5914800..9fea3d1 100644 --- a/capc/src/ast.rs +++ b/capc/src/ast.rs @@ -24,6 +24,9 @@ impl Spanned { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct ExprId(pub u32); + pub type Ident = Spanned; #[derive(Debug, Clone, PartialEq, Eq)] @@ -168,6 +171,9 @@ pub struct Block { #[derive(Debug, Clone, PartialEq, Eq)] pub enum Stmt { Let(LetStmt), + LetElse(LetElseStmt), + TryLet(TryLetStmt), + TryElse(TryElseStmt), Assign(AssignStmt), Defer(DeferStmt), Return(ReturnStmt), @@ -176,6 +182,7 @@ pub enum Stmt { If(IfStmt), While(WhileStmt), For(ForStmt), + ForEach(ForEachStmt), Expr(ExprStmt), } @@ -187,6 +194,32 @@ pub struct LetStmt { pub span: Span, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct LetElseStmt { + pub pattern: Pattern, + pub expr: Expr, + pub else_block: Block, + pub span: Span, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TryLetStmt { + pub name: Ident, + pub ty: Option, + pub expr: Expr, + pub err_binding: Option, + pub else_block: Block, + pub span: Span, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TryElseStmt { + pub expr: Expr, + pub err_binding: Option, + pub else_block: Block, + pub span: Span, +} + #[derive(Debug, Clone, PartialEq, Eq)] pub struct ReturnStmt { pub expr: Option, @@ -227,6 +260,15 @@ pub struct ForStmt { pub span: Span, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ForEachStmt { + pub index: Option, + pub item: Ident, + pub source: Expr, + pub body: Block, + pub span: Span, +} + #[derive(Debug, Clone, PartialEq, Eq)] pub struct ExprStmt { pub expr: Expr, @@ -250,6 +292,9 @@ impl Stmt { pub fn span(&self) -> Span { match self { Stmt::Let(s) => s.span, + Stmt::LetElse(s) => s.span, + Stmt::TryLet(s) => s.span, + Stmt::TryElse(s) => s.span, Stmt::Assign(s) => s.span, Stmt::Defer(s) => s.span, Stmt::Return(s) => s.span, @@ -258,6 +303,7 @@ impl Stmt { Stmt::If(s) => s.span, Stmt::While(s) => s.span, Stmt::For(s) => s.span, + Stmt::ForEach(s) => s.span, Stmt::Expr(s) => s.span, } } @@ -281,6 +327,7 @@ pub enum Expr { #[derive(Debug, Clone, PartialEq, Eq)] pub struct StructLiteralExpr { + pub id: ExprId, pub path: Path, pub type_args: Vec, pub fields: Vec, @@ -296,12 +343,14 @@ pub struct StructLiteralField { #[derive(Debug, Clone, PartialEq, Eq)] pub struct LiteralExpr { + pub id: ExprId, pub value: Literal, pub span: Span, } #[derive(Debug, Clone, PartialEq, Eq)] pub struct GroupingExpr { + pub id: ExprId, pub expr: Box, pub span: Span, } @@ -317,6 +366,7 @@ pub enum Literal { #[derive(Debug, Clone, PartialEq, Eq)] pub struct Path { + pub id: ExprId, pub segments: Vec, pub span: Span, } @@ -337,6 +387,7 @@ impl fmt::Display for Path { #[derive(Debug, Clone, PartialEq, Eq)] pub struct CallExpr { + pub id: ExprId, pub callee: Box, pub type_args: Vec, pub args: Vec, @@ -345,6 +396,7 @@ pub struct CallExpr { #[derive(Debug, Clone, PartialEq, Eq)] pub struct FieldAccessExpr { + pub id: ExprId, pub object: Box, pub field: Ident, pub span: Span, @@ -352,6 +404,7 @@ pub struct FieldAccessExpr { #[derive(Debug, Clone, PartialEq, Eq)] pub struct IndexExpr { + pub id: ExprId, pub object: Box, pub index: Box, pub span: Span, @@ -359,6 +412,7 @@ pub struct IndexExpr { #[derive(Debug, Clone, PartialEq, Eq)] pub struct MethodCallExpr { + pub id: ExprId, pub receiver: Box, pub method: Ident, pub type_args: Vec, @@ -368,6 +422,7 @@ pub struct MethodCallExpr { #[derive(Debug, Clone, PartialEq, Eq)] pub struct UnaryExpr { + pub id: ExprId, pub op: UnaryOp, pub expr: Box, pub span: Span, @@ -382,6 +437,7 @@ pub enum UnaryOp { #[derive(Debug, Clone, PartialEq, Eq)] pub struct BinaryExpr { + pub id: ExprId, pub op: BinaryOp, pub left: Box, pub right: Box, @@ -390,6 +446,7 @@ pub struct BinaryExpr { #[derive(Debug, Clone, PartialEq, Eq)] pub struct TryExpr { + pub id: ExprId, pub expr: Box, pub span: Span, } @@ -418,6 +475,7 @@ pub enum BinaryOp { #[derive(Debug, Clone, PartialEq, Eq)] pub struct MatchExpr { + pub id: ExprId, pub expr: Box, pub arms: Vec, pub span: Span, @@ -462,6 +520,40 @@ impl Type { } impl Expr { + pub fn span(&self) -> Span { + match self { + Expr::Literal(lit) => lit.span, + Expr::Path(path) => path.span, + Expr::Call(call) => call.span, + Expr::MethodCall(method_call) => method_call.span, + Expr::FieldAccess(field) => field.span, + Expr::Index(index) => index.span, + Expr::StructLiteral(lit) => lit.span, + Expr::Unary(unary) => unary.span, + Expr::Binary(binary) => binary.span, + Expr::Match(m) => m.span, + Expr::Try(try_expr) => try_expr.span, + Expr::Grouping(g) => g.span, + } + } + + pub fn id(&self) -> ExprId { + match self { + Expr::Literal(lit) => lit.id, + Expr::Path(path) => path.id, + Expr::Call(call) => call.id, + Expr::MethodCall(method_call) => method_call.id, + Expr::FieldAccess(field) => field.id, + Expr::Index(index) => index.id, + Expr::StructLiteral(lit) => lit.id, + Expr::Unary(unary) => unary.id, + Expr::Binary(binary) => binary.id, + Expr::Match(m) => m.id, + Expr::Try(try_expr) => try_expr.id, + Expr::Grouping(g) => g.id, + } + } + /// Converts an expression to a Path if possible. /// This handles converting FieldAccess chains and single Paths. /// Used for resolving module-qualified names and enum variants. diff --git a/capc/src/codegen/emit.rs b/capc/src/codegen/emit.rs index 6b74862..5f09d7a 100644 --- a/capc/src/codegen/emit.rs +++ b/capc/src/codegen/emit.rs @@ -3,6 +3,9 @@ //! This module is intentionally focused on expression/statement lowering and //! ABI-adjacent helper routines used by the main codegen entry point. +mod match_lowering; +mod runtime; + use std::collections::HashMap; use cranelift_codegen::ir::condcodes::IntCC; @@ -18,10 +21,12 @@ use super::abi_quirks; use super::layout::{align_to, resolve_struct_layout, type_layout_from_index}; use super::sig_to_clif; use super::{ - CodegenError, EnumIndex, Flow, FnInfo, LocalValue, ResultKind, ResultShape, StructLayout, - StructLayoutIndex, TypeLayout, ValueRepr, + CodegenError, EnumIndex, Flow, FnInfo, LocalValue, StructLayout, StructLayoutIndex, + TypeLayout, ValueRepr, }; +pub(super) use runtime::emit_runtime_wrapper_call; + /// Target blocks for break/continue inside a loop. #[derive(Copy, Clone, Debug)] pub(super) struct LoopTarget { @@ -517,7 +522,7 @@ fn emit_hir_stmt_inner( match_expr.result_ty.ty, crate::typeck::Ty::Builtin(crate::typeck::BuiltinType::Unit) ) { - let diverged = emit_hir_match_stmt( + let diverged = match_lowering::emit_hir_match_stmt( builder, match_expr, locals, @@ -1341,7 +1346,7 @@ fn emit_hir_expr_inner( }; if module_path == "sys.unsafe_ptr" { - if let Some(value) = emit_unsafe_ptr_call( + if let Some(value) = runtime::emit_unsafe_ptr_call( builder, module, call, @@ -1362,7 +1367,7 @@ fn emit_hir_expr_inner( .get(&key) .ok_or_else(|| CodegenError::UnknownFunction(key.clone()))? .clone(); - ensure_abi_sig_handled(&info)?; + runtime::ensure_abi_sig_handled(&info)?; let abi_sig = info.abi_sig.as_ref().unwrap_or(&info.sig); // Emit arguments @@ -1811,7 +1816,7 @@ fn emit_hir_expr_inner( // Note: divergence handling is done at HirStmt::Expr level. // Here we just emit the match and return Unit. let mut temp_defers = DeferStack::new(); - let _diverged = emit_hir_match_stmt( + let _diverged = match_lowering::emit_hir_match_stmt( builder, match_expr, locals, @@ -1826,7 +1831,7 @@ fn emit_hir_expr_inner( )?; Ok(ValueRepr::Unit) } else { - emit_hir_match_expr( + match_lowering::emit_hir_match_expr( builder, match_expr, locals, @@ -2850,565 +2855,6 @@ fn emit_hir_short_circuit_expr( Ok(ValueRepr::Single(param)) } -/// Emit HIR match as statement (arms can contain returns, don't produce values). -/// Returns true if all paths diverged (returned/broke/continued). -fn emit_hir_match_stmt( - builder: &mut FunctionBuilder, - match_expr: &crate::hir::HirMatch, - locals: &HashMap, - fn_map: &HashMap, - enum_index: &EnumIndex, - struct_layouts: &StructLayoutIndex, - module: &mut ObjectModule, - data_counter: &mut u32, - loop_target: Option, - return_lowering: &ReturnLowering, - defer_stack: &mut DeferStack, -) -> Result { - // Emit the scrutinee expression - let value = emit_hir_expr( - builder, - &match_expr.expr, - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )?; - - let (match_val, match_result) = match value.clone() { - ValueRepr::Single(v) => { - let tag = match &match_expr.expr.ty().ty { - crate::typeck::Ty::Path(name, _) if enum_index.layouts.contains_key(name) => { - builder.ins().load(ir::types::I32, MemFlags::new(), v, 0) - } - _ => v, - }; - (tag, None) - } - ValueRepr::Result { tag, ok, err } => (tag, Some((*ok, *err))), - ValueRepr::Unit => (builder.ins().iconst(ir::types::I32, 0), None), - }; - - let merge_block = builder.create_block(); - - // Create all check blocks upfront so they exist before being referenced - let num_arms = match_expr.arms.len(); - let mut check_blocks: Vec = Vec::new(); - let mut arm_blocks: Vec = Vec::new(); - for i in 0..num_arms { - arm_blocks.push(builder.create_block()); - if i + 1 < num_arms { - check_blocks.push(builder.create_block()); - } - } - // For the last arm, the "next" block is merge_block - check_blocks.push(merge_block); - - let mut current_block = builder - .current_block() - .ok_or_else(|| CodegenError::Codegen("no current block for match".to_string()))?; - - let mut any_arm_continues = false; - - for (idx, arm) in match_expr.arms.iter().enumerate() { - let arm_block = arm_blocks[idx]; - let next_block = check_blocks[idx]; - - if idx > 0 { - builder.switch_to_block(current_block); - } - let cond = hir_match_pattern_cond( - builder, - &arm.pattern, - match_val, - match_expr.expr.ty(), - enum_index, - )?; - builder.ins().brif(cond, arm_block, &[], next_block, &[]); - - builder.switch_to_block(arm_block); - - let mut arm_locals = locals.clone(); - let mut arm_defers = defer_stack.clone(); - arm_defers.push_block_scope(); - hir_bind_match_pattern_value( - builder, - &arm.pattern, - &value, - match_result.as_ref(), - match_expr.expr.ty(), - enum_index, - struct_layouts, - module, - &mut arm_locals, - )?; - - // Emit all statements in the arm body - let mut arm_terminated = false; - for stmt in &arm.body.stmts { - let flow = emit_hir_stmt( - builder, - stmt, - &mut arm_locals, - fn_map, - enum_index, - struct_layouts, - module, - data_counter, - loop_target, - return_lowering, - &mut arm_defers, - )?; - if flow == Flow::Terminated { - arm_terminated = true; - break; - } - } - - // If the arm didn't terminate (e.g., with return), jump to merge block - if !arm_terminated { - arm_defers.emit_current_and_pop( - builder, - &arm_locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )?; - builder.ins().jump(merge_block, &[]); - any_arm_continues = true; - } - - current_block = next_block; - } - - // Always switch to merge_block to insert it into the layout - builder.switch_to_block(merge_block); - - // If no arm continues to merge_block, it's unreachable - add trap - // This also ensures the block has content so it's properly inserted - if !any_arm_continues { - builder.ins().trap(ir::TrapCode::UnreachableCodeReached); - } - - // Return true if all paths diverged (no arm continues to merge_block) - Ok(!any_arm_continues) -} - -/// Emit HIR match expression -/// Emit HIR match expression (arms produce a value). -fn emit_hir_match_expr( - builder: &mut FunctionBuilder, - match_expr: &crate::hir::HirMatch, - locals: &HashMap, - fn_map: &HashMap, - enum_index: &EnumIndex, - struct_layouts: &StructLayoutIndex, - return_lowering: &ReturnLowering, - module: &mut ObjectModule, - data_counter: &mut u32, -) -> Result { - use crate::hir::HirStmt; - - // Emit the scrutinee expression - let value = emit_hir_expr( - builder, - &match_expr.expr, - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )?; - - let (match_val, match_result) = match value.clone() { - ValueRepr::Single(v) => { - let tag = match &match_expr.expr.ty().ty { - crate::typeck::Ty::Path(name, _) if enum_index.layouts.contains_key(name) => { - builder.ins().load(ir::types::I32, MemFlags::new(), v, 0) - } - _ => v, - }; - (tag, None) - } - ValueRepr::Result { tag, ok, err } => (tag, Some((*ok, *err))), - ValueRepr::Unit => (builder.ins().iconst(ir::types::I32, 0), None), - }; - - let merge_block = builder.create_block(); - let mut current_block = builder - .current_block() - .ok_or_else(|| CodegenError::Codegen("no current block for match".to_string()))?; - - let mut result_shape: Option = None; - - for (idx, arm) in match_expr.arms.iter().enumerate() { - let is_last = idx + 1 == match_expr.arms.len(); - let arm_block = builder.create_block(); - let next_block = if is_last { - merge_block - } else { - builder.create_block() - }; - - if idx > 0 { - builder.switch_to_block(current_block); - } - let cond = hir_match_pattern_cond( - builder, - &arm.pattern, - match_val, - match_expr.expr.ty(), - enum_index, - )?; - builder.ins().brif(cond, arm_block, &[], next_block, &[]); - - builder.switch_to_block(arm_block); - let mut arm_locals = locals.clone(); - let mut arm_defers = DeferStack::new(); - arm_defers.push_block_scope(); - hir_bind_match_pattern_value( - builder, - &arm.pattern, - &value, - match_result.as_ref(), - match_expr.expr.ty(), - enum_index, - struct_layouts, - module, - &mut arm_locals, - )?; - - // Emit the arm body statements - let stmts = &arm.body.stmts; - let Some((last, prefix)) = stmts.split_last() else { - return Err(CodegenError::Unsupported("empty match arm".to_string())); - }; - - let mut prefix_terminated = false; - for stmt in prefix { - let flow = emit_hir_stmt( - builder, - stmt, - &mut arm_locals, - fn_map, - enum_index, - struct_layouts, - module, - data_counter, - None, // break/continue not allowed in value-producing match - return_lowering, - &mut arm_defers, - )?; - if flow == Flow::Terminated { - prefix_terminated = true; - break; - } - } - - // If prefix terminated, we can't emit the final expression - if prefix_terminated { - builder.seal_block(arm_block); - if is_last { - break; - } - current_block = next_block; - continue; - } - - // Last statement should be an expression - let (arm_value, arm_diverges) = match last { - HirStmt::Expr(expr_stmt) => { - // Check if this arm ends with a Trap - if so, it diverges - let diverges = matches!(&expr_stmt.expr, crate::hir::HirExpr::Trap(_)); - let value = emit_hir_expr( - builder, - &expr_stmt.expr, - &arm_locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )?; - (value, diverges) - } - _ => { - return Err(CodegenError::Unsupported( - "match arm must end with expression".to_string(), - )) - } - }; - - // If the arm diverges (e.g., with a trap), skip value storage - if arm_diverges { - builder.seal_block(arm_block); - } else { - let values = match &arm_value { - ValueRepr::Single(val) => vec![*val], - ValueRepr::Unit => vec![], - ValueRepr::Result { .. } => { - return Err(CodegenError::Unsupported("match result value".to_string())) - } - }; - - // Set up result shape and stack slots on first non-terminated arm - if result_shape.is_none() { - let mut types = Vec::new(); - let mut slots = Vec::new(); - for val in &values { - let ty = builder.func.dfg.value_type(*val); - let size = ty.bytes() as u32; - let slot = builder.create_sized_stack_slot(ir::StackSlotData::new( - ir::StackSlotKind::ExplicitSlot, - size.max(1), - )); - types.push(ty); - slots.push(slot); - } - result_shape = Some(ResultShape { - kind: match &arm_value { - ValueRepr::Unit => ResultKind::Unit, - ValueRepr::Single(_) => ResultKind::Single, - _ => ResultKind::Single, - }, - slots, - types, - }); - } - - // Store values to stack slots - let shape = result_shape - .as_ref() - .ok_or_else(|| CodegenError::Codegen("missing match result shape".to_string()))?; - if values.len() != shape.types.len() { - return Err(CodegenError::Unsupported( - "mismatched match arm".to_string(), - )); - } - for (idx, val) in values.iter().enumerate() { - builder.ins().stack_store(*val, shape.slots[idx], 0); - } - arm_defers.emit_current_and_pop( - builder, - &arm_locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )?; - builder.ins().jump(merge_block, &[]); - builder.seal_block(arm_block); - } - - if is_last { - break; - } - current_block = next_block; - } - - builder.switch_to_block(merge_block); - builder.seal_block(merge_block); - - // Load result from stack slots - let shape = result_shape - .ok_or_else(|| CodegenError::Codegen("missing match result value".to_string()))?; - let mut loaded = Vec::new(); - for (slot, ty) in shape.slots.iter().zip(shape.types.iter()) { - let addr = builder - .ins() - .stack_addr(module.isa().pointer_type(), *slot, 0); - let val = builder.ins().load(*ty, MemFlags::new(), addr, 0); - loaded.push(val); - } - - let result = match shape.kind { - ResultKind::Unit => ValueRepr::Unit, - ResultKind::Single => ValueRepr::Single(loaded[0]), - }; - - Ok(result) -} - -/// Compute the condition for an HIR pattern match -/// Compute the condition for an HIR pattern match. -fn hir_match_pattern_cond( - builder: &mut FunctionBuilder, - pattern: &crate::hir::HirPattern, - match_val: ir::Value, - match_ty: &crate::hir::HirType, - enum_index: &EnumIndex, -) -> Result { - use crate::hir::HirPattern; - - match pattern { - HirPattern::Wildcard | HirPattern::Binding(_) => { - // Wildcard and binding patterns always match - let one = builder.ins().iconst(ir::types::I32, 1); - Ok(builder.ins().icmp_imm(IntCC::Equal, one, 1)) - } - HirPattern::Literal(lit) => match lit { - Literal::Int(n) => { - let rhs = builder.ins().iconst(ir::types::I32, *n); - Ok(builder.ins().icmp(IntCC::Equal, match_val, rhs)) - } - Literal::U8(n) => { - let rhs = builder.ins().iconst(ir::types::I8, i64::from(*n)); - Ok(builder.ins().icmp(IntCC::Equal, match_val, rhs)) - } - Literal::Bool(b) => { - let rhs = builder.ins().iconst(ir::types::I8, if *b { 1 } else { 0 }); - Ok(builder.ins().icmp(IntCC::Equal, match_val, rhs)) - } - Literal::Unit => { - // Unit always matches unit - let one = builder.ins().iconst(ir::types::I32, 1); - Ok(builder.ins().icmp_imm(IntCC::Equal, one, 1)) - } - Literal::String(_) => Err(CodegenError::Unsupported( - "string pattern matching".to_string(), - )), - }, - HirPattern::Variant { variant_name, .. } => { - // Get the discriminant value for this variant - let qualified = match &match_ty.ty { - crate::typeck::Ty::Path(path, _) => path.clone(), - _ => { - return Err(CodegenError::Codegen(format!( - "enum variant pattern has non-path type: {:?}", - match_ty.ty - ))) - } - }; - - // Get the type of match_val to ensure consistent comparison - let val_ty = builder.func.dfg.value_type(match_val); - - // Special handling for Result type (built-in, not in enum_index) - if qualified == "sys.result.Result" { - let discr = match variant_name.as_str() { - "Ok" => 0i64, - "Err" => 1i64, - _ => { - return Err(CodegenError::Codegen(format!( - "unknown Result variant: {}", - variant_name - ))) - } - }; - let rhs = builder.ins().iconst(val_ty, discr); - return Ok(builder.ins().icmp(IntCC::Equal, match_val, rhs)); - } - - if let Some(variants) = enum_index.variants.get(&qualified) { - if let Some(&discr) = variants.get(variant_name) { - let rhs = builder.ins().iconst(val_ty, i64::from(discr)); - return Ok(builder.ins().icmp(IntCC::Equal, match_val, rhs)); - } - } - Err(CodegenError::Codegen(format!( - "unknown enum variant in pattern: {}.{}", - qualified, variant_name - ))) - } - } -} - -/// Bind pattern variables for HIR patterns -/// Bind pattern variables for HIR patterns. -fn hir_bind_match_pattern_value( - builder: &mut FunctionBuilder, - pattern: &crate::hir::HirPattern, - value: &ValueRepr, - result: Option<&(ValueRepr, ValueRepr)>, - match_ty: &crate::hir::HirType, - enum_index: &EnumIndex, - struct_layouts: &StructLayoutIndex, - module: &mut ObjectModule, - locals: &mut HashMap, -) -> Result<(), CodegenError> { - use crate::hir::HirPattern; - - match pattern { - HirPattern::Wildcard => Ok(()), - HirPattern::Literal(_) => Ok(()), - HirPattern::Binding(local_id) => { - // Bind the entire value to the variable - locals.insert(*local_id, store_local(builder, value.clone())); - Ok(()) - } - HirPattern::Variant { - variant_name, - binding, - .. - } => { - if let Some(local_id) = binding { - // Bind the inner value based on variant - if let Some((ok_val, err_val)) = result { - if variant_name == "Ok" { - locals.insert(*local_id, store_local(builder, ok_val.clone())); - } else if variant_name == "Err" { - locals.insert(*local_id, store_local(builder, err_val.clone())); - } - return Ok(()); - } - let enum_name = match &match_ty.ty { - crate::typeck::Ty::Path(path, _) => path, - _ => { - return Err(CodegenError::Unsupported( - "variant binding on non-enum".to_string(), - )) - } - }; - let Some(layout) = enum_index.layouts.get(enum_name) else { - return Err(CodegenError::Unsupported( - "variant binding without payload".to_string(), - )); - }; - let Some(payloads) = enum_index.payloads.get(enum_name) else { - return Err(CodegenError::Unsupported( - "missing enum payload info".to_string(), - )); - }; - let payload_ty = - payloads - .get(variant_name) - .cloned() - .flatten() - .ok_or_else(|| { - CodegenError::Unsupported("variant binding without payload".to_string()) - })?; - let ValueRepr::Single(base_ptr) = value else { - return Err(CodegenError::Unsupported( - "variant binding expects enum storage".to_string(), - )); - }; - let payload_val = load_value_by_ty( - builder, - *base_ptr, - layout.payload_offset, - &payload_ty, - enum_index, - struct_layouts, - module, - )?; - locals.insert(*local_id, store_local(builder, payload_val)); - } - Ok(()) - } - } -} - /// Convert a ValueRepr into a boolean condition value. fn to_b1(builder: &mut FunctionBuilder, value: ValueRepr) -> Result { match value { @@ -3754,574 +3200,6 @@ fn value_from_results( } } -// --- Runtime call emission --- -/// Emit a call to a runtime intrinsic with ABI adaptation when needed. -pub(super) fn emit_runtime_wrapper_call( - builder: &mut FunctionBuilder, - module: &mut ObjectModule, - info: &FnInfo, - args: Vec, - ret_ty: &crate::hir::HirType, - enum_index: &EnumIndex, - struct_layouts: &StructLayoutIndex, -) -> Result { - ensure_abi_sig_handled(info)?; - let abi_sig = info.abi_sig.as_ref().unwrap_or(&info.sig); - let mut result_out = None; - let mut sret_ptr = None; - let mut call_args = args; - - enum ResultOutSlot { - Scalar(ir::StackSlot, ir::Type, u32), - Struct(ir::Value), - } - - if info.sig.ret == AbiType::Ptr - && abi_sig.ret == AbiType::Unit - && (is_non_opaque_struct_type(ret_ty, struct_layouts) - || matches!(&ret_ty.ty, crate::typeck::Ty::Path(name, _) if enum_index.layouts.contains_key(name))) - { - let ptr_ty = module.isa().pointer_type(); - let (size, align) = - if let Some(layout) = resolve_struct_layout(&ret_ty.ty, "", &struct_layouts.layouts) { - (layout.size, layout.align) - } else if let crate::typeck::Ty::Path(name, _) = &ret_ty.ty { - let layout = enum_index - .layouts - .get(name) - .ok_or_else(|| CodegenError::Unsupported("enum layout missing".to_string()))?; - (layout.size, layout.align) - } else { - return Err(CodegenError::Unsupported( - "sret return layout missing".to_string(), - )); - }; - let align = align.max(1); - let slot_size = aligned_slot_size(size, align); - let slot = builder.create_sized_stack_slot(ir::StackSlotData::new( - ir::StackSlotKind::ExplicitSlot, - slot_size, - )); - let base_ptr = aligned_stack_addr(builder, slot, align, ptr_ty); - call_args.insert(0, base_ptr); - sret_ptr = Some(base_ptr); - } - - if let AbiType::ResultOut(ok_ty, err_ty) = &abi_sig.ret { - let ptr_ty = module.isa().pointer_type(); - let ok_slot = if **ok_ty == AbiType::Unit { - None - } else if **ok_ty == AbiType::Ptr { - let align = ptr_ty.bytes().max(1) as u32; - let slot = builder.create_sized_stack_slot(ir::StackSlotData::new( - ir::StackSlotKind::ExplicitSlot, - aligned_slot_size(ptr_ty.bytes() as u32, align), - )); - let addr = aligned_stack_addr(builder, slot, align, ptr_ty); - call_args.push(addr); - Some(ResultOutSlot::Struct(addr)) - } else { - let ty = value_type_for_result_out(ok_ty, ptr_ty)?; - let align = ty.bytes().max(1) as u32; - debug_assert!(align.is_power_of_two()); - let slot = builder.create_sized_stack_slot(ir::StackSlotData::new( - ir::StackSlotKind::ExplicitSlot, - aligned_slot_size(ty.bytes().max(1) as u32, align), - )); - let addr = aligned_stack_addr(builder, slot, align, ptr_ty); - call_args.push(addr); - Some(ResultOutSlot::Scalar(slot, ty, align)) - }; - let err_slot = if **err_ty == AbiType::Unit { - None - } else if **err_ty == AbiType::Ptr { - let align = ptr_ty.bytes().max(1) as u32; - let slot = builder.create_sized_stack_slot(ir::StackSlotData::new( - ir::StackSlotKind::ExplicitSlot, - aligned_slot_size(ptr_ty.bytes() as u32, align), - )); - let addr = aligned_stack_addr(builder, slot, align, ptr_ty); - call_args.push(addr); - Some(ResultOutSlot::Struct(addr)) - } else { - let ty = value_type_for_result_out(err_ty, ptr_ty)?; - let align = ty.bytes().max(1) as u32; - debug_assert!(align.is_power_of_two()); - let slot = builder.create_sized_stack_slot(ir::StackSlotData::new( - ir::StackSlotKind::ExplicitSlot, - aligned_slot_size(ty.bytes().max(1) as u32, align), - )); - let addr = aligned_stack_addr(builder, slot, align, ptr_ty); - call_args.push(addr); - Some(ResultOutSlot::Scalar(slot, ty, align)) - }; - result_out = Some((ok_slot, err_slot, ok_ty.clone(), err_ty.clone())); - } - - let sig = sig_to_clif( - abi_sig, - module.isa().pointer_type(), - module.isa().default_call_conv(), - ); - let call_symbol = info.runtime_symbol.as_deref().unwrap_or(&info.symbol); - let func_id = module - .declare_function(call_symbol, Linkage::Import, &sig) - .map_err(|err| CodegenError::Codegen(err.to_string()))?; - let local = module.declare_func_in_func(func_id, builder.func); - let call_inst = builder.ins().call(local, &call_args); - let results = builder.inst_results(call_inst).to_vec(); - - if abi_quirks::is_result_out(&abi_sig.ret) { - let tag = results - .get(0) - .ok_or_else(|| CodegenError::Codegen("missing result tag".to_string()))?; - let (ok_slot, err_slot, ok_ty, err_ty) = - result_out.ok_or_else(|| CodegenError::Codegen("missing result slots".to_string()))?; - let ok_val = if let Some(slot) = ok_slot { - match slot { - ResultOutSlot::Scalar(slot, ty, align) => { - let addr = - aligned_stack_addr(builder, slot, align, module.isa().pointer_type()); - let val = builder.ins().load(ty, MemFlags::new(), addr, 0); - ValueRepr::Single(val) - } - ResultOutSlot::Struct(addr) => ValueRepr::Single(addr), - } - } else { - ValueRepr::Unit - }; - let err_val = if let Some(slot) = err_slot { - match slot { - ResultOutSlot::Scalar(slot, ty, align) => { - let addr = - aligned_stack_addr(builder, slot, align, module.isa().pointer_type()); - let val = builder.ins().load(ty, MemFlags::new(), addr, 0); - ValueRepr::Single(val) - } - ResultOutSlot::Struct(addr) => ValueRepr::Single(addr), - } - } else { - ValueRepr::Unit - }; - match &info.sig.ret { - AbiType::Result(_, _) => { - return Ok(ValueRepr::Result { - tag: *tag, - ok: Box::new(ok_val), - err: Box::new(err_val), - }); - } - _ => { - return Err(CodegenError::Unsupported(format!( - "result out params for {ok_ty:?}/{err_ty:?}" - ))) - } - } - } - - if let Some(ptr) = sret_ptr { - return Ok(ValueRepr::Single(ptr)); - } - - let mut idx = 0; - value_from_results(builder, &info.sig.ret, &results, &mut idx) -} - -fn emit_unsafe_ptr_call( - builder: &mut FunctionBuilder, - module: &mut ObjectModule, - call: &crate::hir::HirCall, - locals: &HashMap, - fn_map: &HashMap, - enum_index: &EnumIndex, - struct_layouts: &StructLayoutIndex, - return_lowering: &ReturnLowering, - data_counter: &mut u32, -) -> Result, CodegenError> { - let (module_path, func_name) = match &call.callee { - crate::hir::ResolvedCallee::Function { module, name, .. } => (module, name), - _ => return Ok(None), - }; - if module_path != "sys.unsafe_ptr" { - return Ok(None); - } - let base_name = func_name.split("__").next().unwrap_or(func_name); - if call.type_args.len() != 1 { - return Err(CodegenError::Unsupported(format!( - "{base_name} expects one type argument" - ))); - } - let elem_ty = &call.type_args[0]; - let ptr_ty = module.isa().pointer_type(); - let elem_hir = hir_type_from_ty(elem_ty, enum_index, struct_layouts, ptr_ty)?; - let layout = type_layout_from_index(&elem_hir, struct_layouts, ptr_ty)?; - match base_name { - "sizeof" => { - let size = builder.ins().iconst(ir::types::I32, layout.size as i64); - return Ok(Some(ValueRepr::Single(size))); - } - "alignof" => { - let align = builder.ins().iconst(ir::types::I32, layout.align as i64); - return Ok(Some(ValueRepr::Single(align))); - } - "ptr_cast" | "ptr_cast_u8" => { - if call.args.len() != 1 { - return Err(CodegenError::Unsupported(format!( - "{base_name} expects (ptr)" - ))); - } - let base_ptr = match emit_hir_expr( - builder, - &call.args[0], - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )? { - ValueRepr::Single(ptr) => ptr, - _ => { - return Err(CodegenError::Unsupported(format!( - "{base_name} expects a pointer value" - ))) - } - }; - return Ok(Some(ValueRepr::Single(base_ptr))); - } - "ptr_is_null" => { - if call.args.len() != 1 { - return Err(CodegenError::Unsupported( - "ptr_is_null expects (ptr)".to_string(), - )); - } - let base_ptr = match emit_hir_expr( - builder, - &call.args[0], - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )? { - ValueRepr::Single(ptr) => ptr, - _ => { - return Err(CodegenError::Unsupported( - "ptr_is_null expects a pointer value".to_string(), - )) - } - }; - let is_null = builder - .ins() - .icmp_imm(ir::condcodes::IntCC::Equal, base_ptr, 0); - return Ok(Some(ValueRepr::Single(is_null))); - } - "ptr_add" => { - if call.args.len() != 2 { - return Err(CodegenError::Unsupported( - "ptr_add expects (ptr, offset)".to_string(), - )); - } - let base_ptr = match emit_hir_expr( - builder, - &call.args[0], - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )? { - ValueRepr::Single(ptr) => ptr, - _ => { - return Err(CodegenError::Unsupported( - "ptr_add expects a pointer value".to_string(), - )) - } - }; - let offset_val = match emit_hir_expr( - builder, - &call.args[1], - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )? { - ValueRepr::Single(val) => val, - _ => { - return Err(CodegenError::Unsupported( - "ptr_add expects an i32 offset".to_string(), - )) - } - }; - let offset = if ptr_ty != ir::types::I32 { - builder.ins().sextend(ptr_ty, offset_val) - } else { - offset_val - }; - let stride = builder.ins().iconst(ptr_ty, layout.size as i64); - let byte_offset = if layout.size == 1 { - offset - } else { - builder.ins().imul(offset, stride) - }; - let addr = builder.ins().iadd(base_ptr, byte_offset); - return Ok(Some(ValueRepr::Single(addr))); - } - "ptr_read" => { - if call.args.len() != 1 { - return Err(CodegenError::Unsupported( - "ptr_read expects (ptr)".to_string(), - )); - } - let base_ptr = match emit_hir_expr( - builder, - &call.args[0], - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )? { - ValueRepr::Single(ptr) => ptr, - _ => { - return Err(CodegenError::Unsupported( - "ptr_read expects a pointer value".to_string(), - )) - } - }; - let value = load_value_by_ty( - builder, - base_ptr, - 0, - &elem_hir, - enum_index, - struct_layouts, - module, - )?; - return Ok(Some(value)); - } - "ptr_write" => { - if call.args.len() != 2 { - return Err(CodegenError::Unsupported( - "ptr_write expects (ptr, value)".to_string(), - )); - } - let base_ptr = match emit_hir_expr( - builder, - &call.args[0], - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )? { - ValueRepr::Single(ptr) => ptr, - _ => { - return Err(CodegenError::Unsupported( - "ptr_write expects a pointer value".to_string(), - )) - } - }; - let value = emit_hir_expr( - builder, - &call.args[1], - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )?; - store_value_by_ty( - builder, - base_ptr, - 0, - &elem_hir, - value, - enum_index, - struct_layouts, - module, - )?; - return Ok(Some(ValueRepr::Unit)); - } - "memcpy" | "memmove" => { - if call.args.len() != 3 { - return Err(CodegenError::Unsupported(format!( - "{base_name} expects (dst, src, count)" - ))); - } - let dst_ptr = match emit_hir_expr( - builder, - &call.args[0], - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )? { - ValueRepr::Single(ptr) => ptr, - _ => { - return Err(CodegenError::Unsupported(format!( - "{base_name} expects a pointer dst" - ))) - } - }; - let src_ptr = match emit_hir_expr( - builder, - &call.args[1], - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )? { - ValueRepr::Single(ptr) => ptr, - _ => { - return Err(CodegenError::Unsupported(format!( - "{base_name} expects a pointer src" - ))) - } - }; - let count_val = match emit_hir_expr( - builder, - &call.args[2], - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )? { - ValueRepr::Single(val) => val, - _ => { - return Err(CodegenError::Unsupported(format!( - "{base_name} expects an i32 count" - ))) - } - }; - - let zero_i32 = builder.ins().iconst(ir::types::I32, 0); - let should_copy = builder - .ins() - .icmp(IntCC::SignedGreaterThan, count_val, zero_i32); - let copy_block = builder.create_block(); - let done_block = builder.create_block(); - builder - .ins() - .brif(should_copy, copy_block, &[], done_block, &[]); - - builder.switch_to_block(copy_block); - builder.seal_block(copy_block); - let count_ptr = if ptr_ty != ir::types::I32 { - builder.ins().sextend(ptr_ty, count_val) - } else { - count_val - }; - let stride = builder.ins().iconst(ptr_ty, layout.size as i64); - let byte_count = if layout.size == 1 { - count_ptr - } else { - builder.ins().imul(count_ptr, stride) - }; - let config = module.isa().frontend_config(); - if base_name == "memcpy" { - builder.call_memcpy(config, dst_ptr, src_ptr, byte_count); - } else { - builder.call_memmove(config, dst_ptr, src_ptr, byte_count); - } - builder.ins().jump(done_block, &[]); - - builder.switch_to_block(done_block); - builder.seal_block(done_block); - return Ok(Some(ValueRepr::Unit)); - } - _ => {} - } - Ok(None) -} - -fn hir_type_from_ty( - ty: &crate::typeck::Ty, - enum_index: &EnumIndex, - struct_layouts: &StructLayoutIndex, - ptr_ty: Type, -) -> Result { - use crate::typeck::{BuiltinType, Ty}; - let abi = match ty { - Ty::Builtin(b) => match b { - BuiltinType::I32 => AbiType::I32, - BuiltinType::I64 => { - return Err(CodegenError::Unsupported( - "i64 is not supported by the current codegen backend".to_string(), - )) - } - BuiltinType::U32 => AbiType::U32, - BuiltinType::U8 => AbiType::U8, - BuiltinType::Bool => AbiType::Bool, - BuiltinType::Unit | BuiltinType::Never => AbiType::Unit, - }, - Ty::Ptr(_) => AbiType::Ptr, - Ty::Ref(inner) => { - return hir_type_from_ty(inner, enum_index, struct_layouts, ptr_ty); - } - Ty::Param(_) => { - return Err(CodegenError::Unsupported( - "generic type parameters must be monomorphized before codegen".to_string(), - )) - } - Ty::Path(name, _args) => { - if resolve_struct_layout(ty, "", &struct_layouts.layouts).is_some() { - AbiType::Ptr - } else if enum_index.layouts.contains_key(name) { - AbiType::Ptr - } else if enum_index.variants.contains_key(name) { - AbiType::I32 - } else { - AbiType::Handle - } - } - }; - Ok(crate::hir::HirType { - ty: ty.clone(), - abi, - }) -} - -fn ensure_abi_sig_handled(info: &FnInfo) -> Result<(), CodegenError> { - let Some(abi_sig) = info.abi_sig.as_ref() else { - return Ok(()); - }; - if abi_sig == &info.sig { - return Ok(()); - } - if abi_quirks::abi_sig_requires_lowering(abi_sig, &info.sig) { - Ok(()) - } else { - Err(CodegenError::Codegen(format!( - "abi signature mismatch for {} without ResultOut lowering", - info.symbol - ))) - } -} - #[cfg(test)] mod tests { use super::super::{FnInfo, FnSig}; @@ -4350,6 +3228,6 @@ mod tests { runtime_symbol: None, is_runtime: false, }; - assert!(ensure_abi_sig_handled(&info).is_err()); + assert!(runtime::ensure_abi_sig_handled(&info).is_err()); } } diff --git a/capc/src/codegen/emit/match_lowering.rs b/capc/src/codegen/emit/match_lowering.rs new file mode 100644 index 0000000..dd31ad7 --- /dev/null +++ b/capc/src/codegen/emit/match_lowering.rs @@ -0,0 +1,573 @@ +use std::collections::HashMap; + +use cranelift_codegen::ir::condcodes::IntCC; +use cranelift_codegen::ir::{self, InstBuilder, MemFlags}; +use cranelift_frontend::FunctionBuilder; +use cranelift_module::Module as ModuleTrait; +use cranelift_object::ObjectModule; + +use crate::ast::Literal; + +use super::{ + emit_hir_expr, emit_hir_stmt, load_value_by_ty, store_local, DeferStack, LoopTarget, + ReturnLowering, +}; +use super::super::{ + CodegenError, EnumIndex, Flow, FnInfo, LocalValue, ResultKind, ResultShape, StructLayoutIndex, + ValueRepr, +}; + +fn value_kind(value: &ValueRepr) -> ResultKind { + match value { + ValueRepr::Unit => ResultKind::Unit, + ValueRepr::Single(_) => ResultKind::Single, + ValueRepr::Result { ok, err, .. } => { + ResultKind::Result(Box::new(value_kind(ok)), Box::new(value_kind(err))) + } + } +} + +fn value_from_flat_results( + kind: &ResultKind, + values: &[ir::Value], + idx: &mut usize, +) -> Result { + match kind { + ResultKind::Unit => Ok(ValueRepr::Unit), + ResultKind::Single => { + let value = *values + .get(*idx) + .ok_or_else(|| CodegenError::Codegen("missing match result value".to_string()))?; + *idx += 1; + Ok(ValueRepr::Single(value)) + } + ResultKind::Result(ok_kind, err_kind) => { + let tag = *values + .get(*idx) + .ok_or_else(|| CodegenError::Codegen("missing match result tag".to_string()))?; + *idx += 1; + let ok = value_from_flat_results(ok_kind, values, idx)?; + let err = value_from_flat_results(err_kind, values, idx)?; + Ok(ValueRepr::Result { + tag, + ok: Box::new(ok), + err: Box::new(err), + }) + } + } +} + +/// Emit HIR match as statement (arms can contain returns, don't produce values). +/// Returns true if all paths diverged (returned/broke/continued). +pub(super) fn emit_hir_match_stmt( + builder: &mut FunctionBuilder, + match_expr: &crate::hir::HirMatch, + locals: &HashMap, + fn_map: &HashMap, + enum_index: &EnumIndex, + struct_layouts: &StructLayoutIndex, + module: &mut ObjectModule, + data_counter: &mut u32, + loop_target: Option, + return_lowering: &ReturnLowering, + defer_stack: &mut DeferStack, +) -> Result { + let value = emit_hir_expr( + builder, + &match_expr.expr, + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )?; + + let (match_val, match_result) = match value.clone() { + ValueRepr::Single(v) => { + let tag = match &match_expr.expr.ty().ty { + crate::typeck::Ty::Path(name, _) if enum_index.layouts.contains_key(name) => { + builder.ins().load(ir::types::I32, MemFlags::new(), v, 0) + } + _ => v, + }; + (tag, None) + } + ValueRepr::Result { tag, ok, err } => (tag, Some((*ok, *err))), + ValueRepr::Unit => (builder.ins().iconst(ir::types::I32, 0), None), + }; + + let merge_block = builder.create_block(); + let num_arms = match_expr.arms.len(); + let mut check_blocks: Vec = Vec::new(); + let mut arm_blocks: Vec = Vec::new(); + for i in 0..num_arms { + arm_blocks.push(builder.create_block()); + if i + 1 < num_arms { + check_blocks.push(builder.create_block()); + } + } + check_blocks.push(merge_block); + + let mut current_block = builder + .current_block() + .ok_or_else(|| CodegenError::Codegen("no current block for match".to_string()))?; + + let mut any_arm_continues = false; + + for (idx, arm) in match_expr.arms.iter().enumerate() { + let arm_block = arm_blocks[idx]; + let next_block = check_blocks[idx]; + + if idx > 0 { + builder.switch_to_block(current_block); + } + let cond = hir_match_pattern_cond( + builder, + &arm.pattern, + match_val, + match_expr.expr.ty(), + enum_index, + )?; + builder.ins().brif(cond, arm_block, &[], next_block, &[]); + + builder.switch_to_block(arm_block); + + let mut arm_locals = locals.clone(); + let mut arm_defers = defer_stack.clone(); + arm_defers.push_block_scope(); + hir_bind_match_pattern_value( + builder, + &arm.pattern, + &value, + match_result.as_ref(), + match_expr.expr.ty(), + enum_index, + struct_layouts, + module, + &mut arm_locals, + )?; + + let mut arm_terminated = false; + for stmt in &arm.body.stmts { + let flow = emit_hir_stmt( + builder, + stmt, + &mut arm_locals, + fn_map, + enum_index, + struct_layouts, + module, + data_counter, + loop_target, + return_lowering, + &mut arm_defers, + )?; + if flow == Flow::Terminated { + arm_terminated = true; + break; + } + } + + if !arm_terminated { + arm_defers.emit_current_and_pop( + builder, + &arm_locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )?; + builder.ins().jump(merge_block, &[]); + any_arm_continues = true; + } + + current_block = next_block; + } + + builder.switch_to_block(merge_block); + if !any_arm_continues { + builder.ins().trap(ir::TrapCode::UnreachableCodeReached); + } + + Ok(!any_arm_continues) +} + +/// Emit HIR match expression (arms produce a value). +pub(super) fn emit_hir_match_expr( + builder: &mut FunctionBuilder, + match_expr: &crate::hir::HirMatch, + locals: &HashMap, + fn_map: &HashMap, + enum_index: &EnumIndex, + struct_layouts: &StructLayoutIndex, + return_lowering: &ReturnLowering, + module: &mut ObjectModule, + data_counter: &mut u32, +) -> Result { + use crate::hir::HirStmt; + + let value = emit_hir_expr( + builder, + &match_expr.expr, + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )?; + + let (match_val, match_result) = match value.clone() { + ValueRepr::Single(v) => { + let tag = match &match_expr.expr.ty().ty { + crate::typeck::Ty::Path(name, _) if enum_index.layouts.contains_key(name) => { + builder.ins().load(ir::types::I32, MemFlags::new(), v, 0) + } + _ => v, + }; + (tag, None) + } + ValueRepr::Result { tag, ok, err } => (tag, Some((*ok, *err))), + ValueRepr::Unit => (builder.ins().iconst(ir::types::I32, 0), None), + }; + + let merge_block = builder.create_block(); + let mut current_block = builder + .current_block() + .ok_or_else(|| CodegenError::Codegen("no current block for match".to_string()))?; + + let mut result_shape: Option = None; + + for (idx, arm) in match_expr.arms.iter().enumerate() { + let is_last = idx + 1 == match_expr.arms.len(); + let arm_block = builder.create_block(); + let next_block = if is_last { + merge_block + } else { + builder.create_block() + }; + + if idx > 0 { + builder.switch_to_block(current_block); + } + let cond = hir_match_pattern_cond( + builder, + &arm.pattern, + match_val, + match_expr.expr.ty(), + enum_index, + )?; + builder.ins().brif(cond, arm_block, &[], next_block, &[]); + + builder.switch_to_block(arm_block); + let mut arm_locals = locals.clone(); + let mut arm_defers = DeferStack::new(); + arm_defers.push_block_scope(); + hir_bind_match_pattern_value( + builder, + &arm.pattern, + &value, + match_result.as_ref(), + match_expr.expr.ty(), + enum_index, + struct_layouts, + module, + &mut arm_locals, + )?; + + let stmts = &arm.body.stmts; + let Some((last, prefix)) = stmts.split_last() else { + return Err(CodegenError::Unsupported("empty match arm".to_string())); + }; + + let mut prefix_terminated = false; + for stmt in prefix { + let flow = emit_hir_stmt( + builder, + stmt, + &mut arm_locals, + fn_map, + enum_index, + struct_layouts, + module, + data_counter, + None, + return_lowering, + &mut arm_defers, + )?; + if flow == Flow::Terminated { + prefix_terminated = true; + break; + } + } + + if prefix_terminated { + builder.seal_block(arm_block); + if is_last { + break; + } + current_block = next_block; + continue; + } + + let (arm_value, arm_diverges) = match last { + HirStmt::Expr(expr_stmt) => { + let diverges = matches!(&expr_stmt.expr, crate::hir::HirExpr::Trap(_)); + let value = emit_hir_expr( + builder, + &expr_stmt.expr, + &arm_locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )?; + (value, diverges) + } + _ => { + return Err(CodegenError::Unsupported( + "match arm must end with expression".to_string(), + )) + } + }; + + if arm_diverges { + builder.seal_block(arm_block); + } else { + let values = super::flatten_value(&arm_value); + + if result_shape.is_none() { + let mut types = Vec::new(); + let mut slots = Vec::new(); + for val in &values { + let ty = builder.func.dfg.value_type(*val); + let size = ty.bytes() as u32; + let slot = builder.create_sized_stack_slot(ir::StackSlotData::new( + ir::StackSlotKind::ExplicitSlot, + size.max(1), + )); + types.push(ty); + slots.push(slot); + } + result_shape = Some(ResultShape { + kind: value_kind(&arm_value), + slots, + types, + }); + } + + let shape = result_shape + .as_ref() + .ok_or_else(|| CodegenError::Codegen("missing match result shape".to_string()))?; + if values.len() != shape.types.len() { + return Err(CodegenError::Unsupported( + "mismatched match arm".to_string(), + )); + } + for (idx, val) in values.iter().enumerate() { + builder.ins().stack_store(*val, shape.slots[idx], 0); + } + arm_defers.emit_current_and_pop( + builder, + &arm_locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )?; + builder.ins().jump(merge_block, &[]); + builder.seal_block(arm_block); + } + + if is_last { + break; + } + current_block = next_block; + } + + builder.switch_to_block(merge_block); + builder.seal_block(merge_block); + + let shape = result_shape + .ok_or_else(|| CodegenError::Codegen("missing match result value".to_string()))?; + let mut loaded = Vec::new(); + for (slot, ty) in shape.slots.iter().zip(shape.types.iter()) { + let addr = builder + .ins() + .stack_addr(module.isa().pointer_type(), *slot, 0); + let val = builder.ins().load(*ty, MemFlags::new(), addr, 0); + loaded.push(val); + } + + let mut idx = 0; + let result = value_from_flat_results(&shape.kind, &loaded, &mut idx)?; + + Ok(result) +} + +fn hir_match_pattern_cond( + builder: &mut FunctionBuilder, + pattern: &crate::hir::HirPattern, + match_val: ir::Value, + match_ty: &crate::hir::HirType, + enum_index: &EnumIndex, +) -> Result { + use crate::hir::HirPattern; + + match pattern { + HirPattern::Wildcard | HirPattern::Binding(_) => { + let one = builder.ins().iconst(ir::types::I32, 1); + Ok(builder.ins().icmp_imm(IntCC::Equal, one, 1)) + } + HirPattern::Literal(lit) => match lit { + Literal::Int(n) => { + let rhs = builder.ins().iconst(ir::types::I32, *n); + Ok(builder.ins().icmp(IntCC::Equal, match_val, rhs)) + } + Literal::U8(n) => { + let rhs = builder.ins().iconst(ir::types::I8, i64::from(*n)); + Ok(builder.ins().icmp(IntCC::Equal, match_val, rhs)) + } + Literal::Bool(b) => { + let rhs = builder.ins().iconst(ir::types::I8, if *b { 1 } else { 0 }); + Ok(builder.ins().icmp(IntCC::Equal, match_val, rhs)) + } + Literal::Unit => { + let one = builder.ins().iconst(ir::types::I32, 1); + Ok(builder.ins().icmp_imm(IntCC::Equal, one, 1)) + } + Literal::String(_) => Err(CodegenError::Unsupported( + "string pattern matching".to_string(), + )), + }, + HirPattern::Variant { variant_name, .. } => { + let qualified = match &match_ty.ty { + crate::typeck::Ty::Path(path, _) => path.clone(), + _ => { + return Err(CodegenError::Codegen(format!( + "enum variant pattern has non-path type: {:?}", + match_ty.ty + ))) + } + }; + + let val_ty = builder.func.dfg.value_type(match_val); + + if qualified == "sys.result.Result" { + let discr = match variant_name.as_str() { + "Ok" => 0i64, + "Err" => 1i64, + _ => { + return Err(CodegenError::Codegen(format!( + "unknown Result variant: {}", + variant_name + ))) + } + }; + let rhs = builder.ins().iconst(val_ty, discr); + return Ok(builder.ins().icmp(IntCC::Equal, match_val, rhs)); + } + + if let Some(variants) = enum_index.variants.get(&qualified) { + if let Some(&discr) = variants.get(variant_name) { + let rhs = builder.ins().iconst(val_ty, i64::from(discr)); + return Ok(builder.ins().icmp(IntCC::Equal, match_val, rhs)); + } + } + Err(CodegenError::Codegen(format!( + "unknown enum variant in pattern: {}.{}", + qualified, variant_name + ))) + } + } +} + +fn hir_bind_match_pattern_value( + builder: &mut FunctionBuilder, + pattern: &crate::hir::HirPattern, + value: &ValueRepr, + result: Option<&(ValueRepr, ValueRepr)>, + match_ty: &crate::hir::HirType, + enum_index: &EnumIndex, + struct_layouts: &StructLayoutIndex, + module: &mut ObjectModule, + locals: &mut HashMap, +) -> Result<(), CodegenError> { + use crate::hir::HirPattern; + + match pattern { + HirPattern::Wildcard => Ok(()), + HirPattern::Literal(_) => Ok(()), + HirPattern::Binding(local_id) => { + locals.insert(*local_id, store_local(builder, value.clone())); + Ok(()) + } + HirPattern::Variant { + variant_name, + binding, + .. + } => { + if let Some(local_id) = binding { + if let Some((ok_val, err_val)) = result { + if variant_name == "Ok" { + locals.insert(*local_id, store_local(builder, ok_val.clone())); + } else if variant_name == "Err" { + locals.insert(*local_id, store_local(builder, err_val.clone())); + } + return Ok(()); + } + let enum_name = match &match_ty.ty { + crate::typeck::Ty::Path(path, _) => path, + _ => { + return Err(CodegenError::Unsupported( + "variant binding on non-enum".to_string(), + )) + } + }; + let Some(layout) = enum_index.layouts.get(enum_name) else { + return Err(CodegenError::Unsupported( + "variant binding without payload".to_string(), + )); + }; + let Some(payloads) = enum_index.payloads.get(enum_name) else { + return Err(CodegenError::Unsupported( + "missing enum payload info".to_string(), + )); + }; + let payload_ty = + payloads + .get(variant_name) + .cloned() + .flatten() + .ok_or_else(|| { + CodegenError::Unsupported("variant binding without payload".to_string()) + })?; + let ValueRepr::Single(base_ptr) = value else { + return Err(CodegenError::Unsupported( + "variant binding expects enum storage".to_string(), + )); + }; + let payload_val = load_value_by_ty( + builder, + *base_ptr, + layout.payload_offset, + &payload_ty, + enum_index, + struct_layouts, + module, + )?; + locals.insert(*local_id, store_local(builder, payload_val)); + } + Ok(()) + } + } +} diff --git a/capc/src/codegen/emit/runtime.rs b/capc/src/codegen/emit/runtime.rs new file mode 100644 index 0000000..e3c7d0c --- /dev/null +++ b/capc/src/codegen/emit/runtime.rs @@ -0,0 +1,585 @@ +use std::collections::HashMap; + +use cranelift_codegen::ir::condcodes::IntCC; +use cranelift_codegen::ir::{self, InstBuilder, MemFlags, Type, Value}; +use cranelift_frontend::FunctionBuilder; +use cranelift_module::{Linkage, Module as ModuleTrait}; +use cranelift_object::ObjectModule; + +use crate::abi::AbiType; + +use super::{ + aligned_slot_size, aligned_stack_addr, emit_hir_expr, is_non_opaque_struct_type, + load_value_by_ty, resolve_struct_layout, sig_to_clif, store_value_by_ty, + type_layout_from_index, value_from_results, value_type_for_result_out, ReturnLowering, +}; +use super::super::{ + abi_quirks, CodegenError, EnumIndex, FnInfo, LocalValue, StructLayoutIndex, ValueRepr, +}; + +/// Emit a call to a runtime intrinsic with ABI adaptation when needed. +pub(crate) fn emit_runtime_wrapper_call( + builder: &mut FunctionBuilder, + module: &mut ObjectModule, + info: &FnInfo, + args: Vec, + ret_ty: &crate::hir::HirType, + enum_index: &EnumIndex, + struct_layouts: &StructLayoutIndex, +) -> Result { + ensure_abi_sig_handled(info)?; + let abi_sig = info.abi_sig.as_ref().unwrap_or(&info.sig); + let mut result_out = None; + let mut sret_ptr = None; + let mut call_args = args; + + enum ResultOutSlot { + Scalar(ir::StackSlot, ir::Type, u32), + Struct(ir::Value), + } + + if info.sig.ret == AbiType::Ptr + && abi_sig.ret == AbiType::Unit + && (is_non_opaque_struct_type(ret_ty, struct_layouts) + || matches!(&ret_ty.ty, crate::typeck::Ty::Path(name, _) if enum_index.layouts.contains_key(name))) + { + let ptr_ty = module.isa().pointer_type(); + let (size, align) = + if let Some(layout) = resolve_struct_layout(&ret_ty.ty, "", &struct_layouts.layouts) { + (layout.size, layout.align) + } else if let crate::typeck::Ty::Path(name, _) = &ret_ty.ty { + let layout = enum_index + .layouts + .get(name) + .ok_or_else(|| CodegenError::Unsupported("enum layout missing".to_string()))?; + (layout.size, layout.align) + } else { + return Err(CodegenError::Unsupported( + "sret return layout missing".to_string(), + )); + }; + let align = align.max(1); + let slot_size = aligned_slot_size(size, align); + let slot = builder.create_sized_stack_slot(ir::StackSlotData::new( + ir::StackSlotKind::ExplicitSlot, + slot_size, + )); + let base_ptr = aligned_stack_addr(builder, slot, align, ptr_ty); + call_args.insert(0, base_ptr); + sret_ptr = Some(base_ptr); + } + + if let AbiType::ResultOut(ok_ty, err_ty) = &abi_sig.ret { + let ptr_ty = module.isa().pointer_type(); + let ok_slot = if **ok_ty == AbiType::Unit { + None + } else if **ok_ty == AbiType::Ptr { + let align = ptr_ty.bytes().max(1) as u32; + let slot = builder.create_sized_stack_slot(ir::StackSlotData::new( + ir::StackSlotKind::ExplicitSlot, + aligned_slot_size(ptr_ty.bytes() as u32, align), + )); + let addr = aligned_stack_addr(builder, slot, align, ptr_ty); + call_args.push(addr); + Some(ResultOutSlot::Struct(addr)) + } else { + let ty = value_type_for_result_out(ok_ty, ptr_ty)?; + let align = ty.bytes().max(1) as u32; + debug_assert!(align.is_power_of_two()); + let slot = builder.create_sized_stack_slot(ir::StackSlotData::new( + ir::StackSlotKind::ExplicitSlot, + aligned_slot_size(ty.bytes().max(1) as u32, align), + )); + let addr = aligned_stack_addr(builder, slot, align, ptr_ty); + call_args.push(addr); + Some(ResultOutSlot::Scalar(slot, ty, align)) + }; + let err_slot = if **err_ty == AbiType::Unit { + None + } else if **err_ty == AbiType::Ptr { + let align = ptr_ty.bytes().max(1) as u32; + let slot = builder.create_sized_stack_slot(ir::StackSlotData::new( + ir::StackSlotKind::ExplicitSlot, + aligned_slot_size(ptr_ty.bytes() as u32, align), + )); + let addr = aligned_stack_addr(builder, slot, align, ptr_ty); + call_args.push(addr); + Some(ResultOutSlot::Struct(addr)) + } else { + let ty = value_type_for_result_out(err_ty, ptr_ty)?; + let align = ty.bytes().max(1) as u32; + debug_assert!(align.is_power_of_two()); + let slot = builder.create_sized_stack_slot(ir::StackSlotData::new( + ir::StackSlotKind::ExplicitSlot, + aligned_slot_size(ty.bytes().max(1) as u32, align), + )); + let addr = aligned_stack_addr(builder, slot, align, ptr_ty); + call_args.push(addr); + Some(ResultOutSlot::Scalar(slot, ty, align)) + }; + result_out = Some((ok_slot, err_slot, ok_ty.clone(), err_ty.clone())); + } + + let sig = sig_to_clif( + abi_sig, + module.isa().pointer_type(), + module.isa().default_call_conv(), + ); + let call_symbol = info.runtime_symbol.as_deref().unwrap_or(&info.symbol); + let func_id = module + .declare_function(call_symbol, Linkage::Import, &sig) + .map_err(|err| CodegenError::Codegen(err.to_string()))?; + let local = module.declare_func_in_func(func_id, builder.func); + let call_inst = builder.ins().call(local, &call_args); + let results = builder.inst_results(call_inst).to_vec(); + + if abi_quirks::is_result_out(&abi_sig.ret) { + let tag = results + .first() + .ok_or_else(|| CodegenError::Codegen("missing result tag".to_string()))?; + let (ok_slot, err_slot, ok_ty, err_ty) = + result_out.ok_or_else(|| CodegenError::Codegen("missing result slots".to_string()))?; + let ok_val = if let Some(slot) = ok_slot { + match slot { + ResultOutSlot::Scalar(slot, ty, align) => { + let addr = + aligned_stack_addr(builder, slot, align, module.isa().pointer_type()); + let val = builder.ins().load(ty, MemFlags::new(), addr, 0); + ValueRepr::Single(val) + } + ResultOutSlot::Struct(addr) => ValueRepr::Single(addr), + } + } else { + ValueRepr::Unit + }; + let err_val = if let Some(slot) = err_slot { + match slot { + ResultOutSlot::Scalar(slot, ty, align) => { + let addr = + aligned_stack_addr(builder, slot, align, module.isa().pointer_type()); + let val = builder.ins().load(ty, MemFlags::new(), addr, 0); + ValueRepr::Single(val) + } + ResultOutSlot::Struct(addr) => ValueRepr::Single(addr), + } + } else { + ValueRepr::Unit + }; + match &info.sig.ret { + AbiType::Result(_, _) => { + return Ok(ValueRepr::Result { + tag: *tag, + ok: Box::new(ok_val), + err: Box::new(err_val), + }); + } + _ => { + return Err(CodegenError::Unsupported(format!( + "result out params for {ok_ty:?}/{err_ty:?}" + ))) + } + } + } + + if let Some(ptr) = sret_ptr { + return Ok(ValueRepr::Single(ptr)); + } + + let mut idx = 0; + value_from_results(builder, &info.sig.ret, &results, &mut idx) +} + +pub(super) fn emit_unsafe_ptr_call( + builder: &mut FunctionBuilder, + module: &mut ObjectModule, + call: &crate::hir::HirCall, + locals: &HashMap, + fn_map: &HashMap, + enum_index: &EnumIndex, + struct_layouts: &StructLayoutIndex, + return_lowering: &ReturnLowering, + data_counter: &mut u32, +) -> Result, CodegenError> { + let (module_path, func_name) = match &call.callee { + crate::hir::ResolvedCallee::Function { module, name, .. } => (module, name), + _ => return Ok(None), + }; + if module_path != "sys.unsafe_ptr" { + return Ok(None); + } + let base_name = func_name.split("__").next().unwrap_or(func_name); + if call.type_args.len() != 1 { + return Err(CodegenError::Unsupported(format!( + "{base_name} expects one type argument" + ))); + } + let elem_ty = &call.type_args[0]; + let ptr_ty = module.isa().pointer_type(); + let elem_hir = hir_type_from_ty(elem_ty, enum_index, struct_layouts, ptr_ty)?; + let layout = type_layout_from_index(&elem_hir, struct_layouts, ptr_ty)?; + match base_name { + "sizeof" => { + let size = builder.ins().iconst(ir::types::I32, layout.size as i64); + return Ok(Some(ValueRepr::Single(size))); + } + "alignof" => { + let align = builder.ins().iconst(ir::types::I32, layout.align as i64); + return Ok(Some(ValueRepr::Single(align))); + } + "ptr_cast" | "ptr_cast_u8" => { + if call.args.len() != 1 { + return Err(CodegenError::Unsupported(format!( + "{base_name} expects (ptr)" + ))); + } + let base_ptr = match emit_hir_expr( + builder, + &call.args[0], + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )? { + ValueRepr::Single(ptr) => ptr, + _ => { + return Err(CodegenError::Unsupported(format!( + "{base_name} expects a pointer value" + ))) + } + }; + return Ok(Some(ValueRepr::Single(base_ptr))); + } + "ptr_is_null" => { + if call.args.len() != 1 { + return Err(CodegenError::Unsupported( + "ptr_is_null expects (ptr)".to_string(), + )); + } + let base_ptr = match emit_hir_expr( + builder, + &call.args[0], + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )? { + ValueRepr::Single(ptr) => ptr, + _ => { + return Err(CodegenError::Unsupported( + "ptr_is_null expects a pointer value".to_string(), + )) + } + }; + let is_null = builder + .ins() + .icmp_imm(ir::condcodes::IntCC::Equal, base_ptr, 0); + return Ok(Some(ValueRepr::Single(is_null))); + } + "ptr_add" => { + if call.args.len() != 2 { + return Err(CodegenError::Unsupported( + "ptr_add expects (ptr, offset)".to_string(), + )); + } + let base_ptr = match emit_hir_expr( + builder, + &call.args[0], + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )? { + ValueRepr::Single(ptr) => ptr, + _ => { + return Err(CodegenError::Unsupported( + "ptr_add expects a pointer value".to_string(), + )) + } + }; + let offset_val = match emit_hir_expr( + builder, + &call.args[1], + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )? { + ValueRepr::Single(val) => val, + _ => { + return Err(CodegenError::Unsupported( + "ptr_add expects an i32 offset".to_string(), + )) + } + }; + let offset = if ptr_ty != ir::types::I32 { + builder.ins().sextend(ptr_ty, offset_val) + } else { + offset_val + }; + let stride = builder.ins().iconst(ptr_ty, layout.size as i64); + let byte_offset = if layout.size == 1 { + offset + } else { + builder.ins().imul(offset, stride) + }; + let addr = builder.ins().iadd(base_ptr, byte_offset); + return Ok(Some(ValueRepr::Single(addr))); + } + "ptr_read" => { + if call.args.len() != 1 { + return Err(CodegenError::Unsupported( + "ptr_read expects (ptr)".to_string(), + )); + } + let base_ptr = match emit_hir_expr( + builder, + &call.args[0], + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )? { + ValueRepr::Single(ptr) => ptr, + _ => { + return Err(CodegenError::Unsupported( + "ptr_read expects a pointer value".to_string(), + )) + } + }; + let value = load_value_by_ty( + builder, + base_ptr, + 0, + &elem_hir, + enum_index, + struct_layouts, + module, + )?; + return Ok(Some(value)); + } + "ptr_write" => { + if call.args.len() != 2 { + return Err(CodegenError::Unsupported( + "ptr_write expects (ptr, value)".to_string(), + )); + } + let base_ptr = match emit_hir_expr( + builder, + &call.args[0], + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )? { + ValueRepr::Single(ptr) => ptr, + _ => { + return Err(CodegenError::Unsupported( + "ptr_write expects a pointer value".to_string(), + )) + } + }; + let value = emit_hir_expr( + builder, + &call.args[1], + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )?; + store_value_by_ty( + builder, + base_ptr, + 0, + &elem_hir, + value, + enum_index, + struct_layouts, + module, + )?; + return Ok(Some(ValueRepr::Unit)); + } + "memcpy" | "memmove" => { + if call.args.len() != 3 { + return Err(CodegenError::Unsupported(format!( + "{base_name} expects (dst, src, count)" + ))); + } + let dst_ptr = match emit_hir_expr( + builder, + &call.args[0], + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )? { + ValueRepr::Single(ptr) => ptr, + _ => { + return Err(CodegenError::Unsupported(format!( + "{base_name} expects a pointer dst" + ))) + } + }; + let src_ptr = match emit_hir_expr( + builder, + &call.args[1], + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )? { + ValueRepr::Single(ptr) => ptr, + _ => { + return Err(CodegenError::Unsupported(format!( + "{base_name} expects a pointer src" + ))) + } + }; + let count_val = match emit_hir_expr( + builder, + &call.args[2], + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )? { + ValueRepr::Single(val) => val, + _ => { + return Err(CodegenError::Unsupported(format!( + "{base_name} expects an i32 count" + ))) + } + }; + + let zero_i32 = builder.ins().iconst(ir::types::I32, 0); + let should_copy = builder + .ins() + .icmp(IntCC::SignedGreaterThan, count_val, zero_i32); + let copy_block = builder.create_block(); + let done_block = builder.create_block(); + builder + .ins() + .brif(should_copy, copy_block, &[], done_block, &[]); + + builder.switch_to_block(copy_block); + builder.seal_block(copy_block); + let count_ptr = if ptr_ty != ir::types::I32 { + builder.ins().sextend(ptr_ty, count_val) + } else { + count_val + }; + let stride = builder.ins().iconst(ptr_ty, layout.size as i64); + let byte_count = if layout.size == 1 { + count_ptr + } else { + builder.ins().imul(count_ptr, stride) + }; + let config = module.isa().frontend_config(); + if base_name == "memcpy" { + builder.call_memcpy(config, dst_ptr, src_ptr, byte_count); + } else { + builder.call_memmove(config, dst_ptr, src_ptr, byte_count); + } + builder.ins().jump(done_block, &[]); + + builder.switch_to_block(done_block); + builder.seal_block(done_block); + return Ok(Some(ValueRepr::Unit)); + } + _ => {} + } + Ok(None) +} + +fn hir_type_from_ty( + ty: &crate::typeck::Ty, + enum_index: &EnumIndex, + struct_layouts: &StructLayoutIndex, + ptr_ty: Type, +) -> Result { + use crate::typeck::{BuiltinType, Ty}; + let abi = match ty { + Ty::Builtin(b) => match b { + BuiltinType::I32 => AbiType::I32, + BuiltinType::I64 => { + return Err(CodegenError::Unsupported( + "i64 is not supported by the current codegen backend".to_string(), + )) + } + BuiltinType::U32 => AbiType::U32, + BuiltinType::U8 => AbiType::U8, + BuiltinType::Bool => AbiType::Bool, + BuiltinType::Unit | BuiltinType::Never => AbiType::Unit, + }, + Ty::Ptr(_) => AbiType::Ptr, + Ty::Ref(inner) => { + return hir_type_from_ty(inner, enum_index, struct_layouts, ptr_ty); + } + Ty::Param(_) => { + return Err(CodegenError::Unsupported( + "generic type parameters must be monomorphized before codegen".to_string(), + )) + } + Ty::Path(name, _args) => { + if resolve_struct_layout(ty, "", &struct_layouts.layouts).is_some() { + AbiType::Ptr + } else if enum_index.layouts.contains_key(name) { + AbiType::Ptr + } else if enum_index.variants.contains_key(name) { + AbiType::I32 + } else { + AbiType::Handle + } + } + }; + Ok(crate::hir::HirType { + ty: ty.clone(), + abi, + }) +} + +pub(super) fn ensure_abi_sig_handled(info: &FnInfo) -> Result<(), CodegenError> { + let Some(abi_sig) = info.abi_sig.as_ref() else { + return Ok(()); + }; + if abi_sig == &info.sig { + return Ok(()); + } + if abi_quirks::abi_sig_requires_lowering(abi_sig, &info.sig) { + Ok(()) + } else { + Err(CodegenError::Codegen(format!( + "abi signature mismatch for {} without ResultOut lowering", + info.symbol + ))) + } +} diff --git a/capc/src/codegen/mod.rs b/capc/src/codegen/mod.rs index e291f6b..ff3062b 100644 --- a/capc/src/codegen/mod.rs +++ b/capc/src/codegen/mod.rs @@ -194,10 +194,11 @@ struct ResultShape { } /// Result shape kinds for match-expression lowering. -#[derive(Clone, Copy, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq)] enum ResultKind { Unit, Single, + Result(Box, Box), } /// Build and write the object file for a fully-checked HIR program. diff --git a/capc/src/desugar.rs b/capc/src/desugar.rs new file mode 100644 index 0000000..4d74584 --- /dev/null +++ b/capc/src/desugar.rs @@ -0,0 +1,772 @@ +use crate::ast::*; + +pub fn desugar_module(module: &Module) -> Module { + let mut ctx = DesugarCtx::new(module); + ctx.desugar_module(module) +} + +struct DesugarCtx { + next_expr_id: u32, +} + +impl DesugarCtx { + fn new(module: &Module) -> Self { + Self { + next_expr_id: max_module_expr_id(module).saturating_add(1), + } + } + + fn fresh_expr_id(&mut self) -> ExprId { + let id = ExprId(self.next_expr_id); + self.next_expr_id += 1; + id + } + + fn desugar_module(&mut self, module: &Module) -> Module { + Module { + package: module.package, + name: module.name.clone(), + uses: module.uses.clone(), + items: module + .items + .iter() + .map(|item| self.desugar_item(item)) + .collect(), + span: module.span, + } + } + + fn desugar_item(&mut self, item: &Item) -> Item { + match item { + Item::Function(func) => Item::Function(self.desugar_function(func)), + Item::ExternFunction(func) => Item::ExternFunction(func.clone()), + Item::Struct(decl) => Item::Struct(decl.clone()), + Item::Enum(decl) => Item::Enum(decl.clone()), + Item::Trait(decl) => Item::Trait(decl.clone()), + Item::Impl(impl_block) => Item::Impl(self.desugar_impl_block(impl_block)), + } + } + + fn desugar_function(&mut self, func: &Function) -> Function { + Function { + name: func.name.clone(), + type_params: func.type_params.clone(), + params: func.params.clone(), + ret: func.ret.clone(), + body: self.desugar_block(&func.body), + is_pub: func.is_pub, + doc: func.doc.clone(), + span: func.span, + } + } + + fn desugar_impl_block(&mut self, impl_block: &ImplBlock) -> ImplBlock { + ImplBlock { + type_params: impl_block.type_params.clone(), + trait_path: impl_block.trait_path.clone(), + target: impl_block.target.clone(), + methods: impl_block + .methods + .iter() + .map(|method| self.desugar_function(method)) + .collect(), + doc: impl_block.doc.clone(), + span: impl_block.span, + } + } + + fn desugar_block(&mut self, block: &Block) -> Block { + let mut stmts = Vec::new(); + for stmt in &block.stmts { + stmts.extend(self.desugar_stmt(stmt)); + } + Block { + stmts, + span: block.span, + } + } + + fn desugar_stmt(&mut self, stmt: &Stmt) -> Vec { + match stmt { + Stmt::Let(let_stmt) => vec![Stmt::Let(LetStmt { + name: let_stmt.name.clone(), + ty: let_stmt.ty.clone(), + expr: self.desugar_expr(&let_stmt.expr), + span: let_stmt.span, + })], + Stmt::LetElse(let_else) => vec![Stmt::Let(self.lower_let_else(let_else))], + Stmt::TryLet(try_let) => vec![Stmt::Let(self.lower_try_let(try_let))], + Stmt::TryElse(try_else) => vec![Stmt::Expr(ExprStmt { + expr: self.lower_try_else(try_else), + span: try_else.span, + })], + Stmt::Assign(assign) => vec![Stmt::Assign(AssignStmt { + name: assign.name.clone(), + expr: self.desugar_expr(&assign.expr), + span: assign.span, + })], + Stmt::Defer(defer_stmt) => vec![Stmt::Defer(DeferStmt { + expr: self.desugar_expr(&defer_stmt.expr), + span: defer_stmt.span, + })], + Stmt::Return(ret) => vec![Stmt::Return(ReturnStmt { + expr: ret.expr.as_ref().map(|expr| self.desugar_expr(expr)), + span: ret.span, + })], + Stmt::Break(break_stmt) => vec![Stmt::Break(break_stmt.clone())], + Stmt::Continue(continue_stmt) => vec![Stmt::Continue(continue_stmt.clone())], + Stmt::If(if_stmt) => vec![Stmt::If(IfStmt { + cond: self.desugar_expr(&if_stmt.cond), + then_block: self.desugar_block(&if_stmt.then_block), + else_block: if_stmt.else_block.as_ref().map(|b| self.desugar_block(b)), + span: if_stmt.span, + })], + Stmt::While(while_stmt) => vec![Stmt::While(WhileStmt { + cond: self.desugar_expr(&while_stmt.cond), + body: self.desugar_block(&while_stmt.body), + span: while_stmt.span, + })], + Stmt::For(for_stmt) => vec![Stmt::For(ForStmt { + var: for_stmt.var.clone(), + start: self.desugar_expr(&for_stmt.start), + end: self.desugar_expr(&for_stmt.end), + body: self.desugar_block(&for_stmt.body), + span: for_stmt.span, + })], + Stmt::ForEach(for_each) => vec![self.lower_for_each(for_each)], + Stmt::Expr(expr_stmt) => vec![Stmt::Expr(ExprStmt { + expr: self.desugar_expr(&expr_stmt.expr), + span: expr_stmt.span, + })], + } + } + + fn desugar_expr(&mut self, expr: &Expr) -> Expr { + match expr { + Expr::Literal(lit) => Expr::Literal(lit.clone()), + Expr::Path(path) => Expr::Path(path.clone()), + Expr::Call(call) => Expr::Call(CallExpr { + id: call.id, + callee: Box::new(self.desugar_expr(&call.callee)), + type_args: call.type_args.clone(), + args: call.args.iter().map(|arg| self.desugar_expr(arg)).collect(), + span: call.span, + }), + Expr::MethodCall(method_call) => Expr::MethodCall(MethodCallExpr { + id: method_call.id, + receiver: Box::new(self.desugar_expr(&method_call.receiver)), + method: method_call.method.clone(), + type_args: method_call.type_args.clone(), + args: method_call + .args + .iter() + .map(|arg| self.desugar_expr(arg)) + .collect(), + span: method_call.span, + }), + Expr::FieldAccess(field_access) => Expr::FieldAccess(FieldAccessExpr { + id: field_access.id, + object: Box::new(self.desugar_expr(&field_access.object)), + field: field_access.field.clone(), + span: field_access.span, + }), + Expr::Index(index_expr) => Expr::Index(IndexExpr { + id: index_expr.id, + object: Box::new(self.desugar_expr(&index_expr.object)), + index: Box::new(self.desugar_expr(&index_expr.index)), + span: index_expr.span, + }), + Expr::StructLiteral(lit) => Expr::StructLiteral(StructLiteralExpr { + id: lit.id, + path: lit.path.clone(), + type_args: lit.type_args.clone(), + fields: lit + .fields + .iter() + .map(|field| StructLiteralField { + name: field.name.clone(), + expr: self.desugar_expr(&field.expr), + span: field.span, + }) + .collect(), + span: lit.span, + }), + Expr::Unary(unary) => Expr::Unary(UnaryExpr { + id: unary.id, + op: unary.op.clone(), + expr: Box::new(self.desugar_expr(&unary.expr)), + span: unary.span, + }), + Expr::Binary(binary) => Expr::Binary(BinaryExpr { + id: binary.id, + op: binary.op.clone(), + left: Box::new(self.desugar_expr(&binary.left)), + right: Box::new(self.desugar_expr(&binary.right)), + span: binary.span, + }), + Expr::Match(match_expr) => Expr::Match(MatchExpr { + id: match_expr.id, + expr: Box::new(self.desugar_expr(&match_expr.expr)), + arms: match_expr + .arms + .iter() + .map(|arm| MatchArm { + pattern: arm.pattern.clone(), + body: self.desugar_block(&arm.body), + span: arm.span, + }) + .collect(), + span: match_expr.span, + match_span: match_expr.match_span, + }), + Expr::Try(try_expr) => Expr::Try(TryExpr { + id: try_expr.id, + expr: Box::new(self.desugar_expr(&try_expr.expr)), + span: try_expr.span, + }), + Expr::Grouping(group) => Expr::Grouping(GroupingExpr { + id: group.id, + expr: Box::new(self.desugar_expr(&group.expr)), + span: group.span, + }), + } + } + + fn lower_let_else(&mut self, stmt: &LetElseStmt) -> LetStmt { + let binding = self + .pattern_binding_ident(&stmt.pattern) + .expect("parser validated let-else binding"); + let expr = self.desugar_expr(&stmt.expr); + let else_block = self.desugar_block(&stmt.else_block); + + let binding_expr = self.ident_expr(&binding); + let ok_body = Block { + stmts: vec![Stmt::Expr(ExprStmt { + expr: binding_expr, + span: binding.span, + })], + span: binding.span, + }; + + let mut else_stmts = else_block.stmts; + else_stmts.push(Stmt::Expr(ExprStmt { + expr: self.panic_expr(stmt.else_block.span), + span: stmt.else_block.span, + })); + let else_body = Block { + stmts: else_stmts, + span: stmt.else_block.span, + }; + + let match_span = Span::new(stmt.span.start, stmt.else_block.span.end); + let match_expr = Expr::Match(MatchExpr { + id: self.fresh_expr_id(), + expr: Box::new(expr), + arms: vec![ + MatchArm { + pattern: stmt.pattern.clone(), + body: ok_body, + span: match_span, + }, + MatchArm { + pattern: Pattern::Wildcard(stmt.else_block.span), + body: else_body, + span: match_span, + }, + ], + span: match_span, + match_span: stmt.span, + }); + + LetStmt { + name: binding, + ty: None, + expr: match_expr, + span: stmt.span, + } + } + + fn lower_try_let(&mut self, stmt: &TryLetStmt) -> LetStmt { + let expr = self.desugar_expr(&stmt.expr); + let else_block = self.desugar_block(&stmt.else_block); + + let binding_expr = self.ident_expr(&stmt.name); + let ok_body = Block { + stmts: vec![Stmt::Expr(ExprStmt { + expr: binding_expr, + span: stmt.name.span, + })], + span: stmt.name.span, + }; + + let mut else_stmts = else_block.stmts; + else_stmts.push(Stmt::Expr(ExprStmt { + expr: self.panic_expr(stmt.else_block.span), + span: stmt.else_block.span, + })); + let else_body = Block { + stmts: else_stmts, + span: stmt.else_block.span, + }; + + let expr_span = expr.span(); + let ok_ident = Spanned::new("Ok".to_string(), expr_span); + let err_ident = Spanned::new("Err".to_string(), stmt.else_block.span); + let match_span = Span::new(stmt.span.start, stmt.else_block.span.end); + let match_expr = Expr::Match(MatchExpr { + id: self.fresh_expr_id(), + expr: Box::new(expr), + arms: vec![ + MatchArm { + pattern: Pattern::Call { + path: Path { + id: self.fresh_expr_id(), + segments: vec![ok_ident], + span: expr_span, + }, + binding: Some(stmt.name.clone()), + span: expr_span, + }, + body: ok_body, + span: match_span, + }, + MatchArm { + pattern: Pattern::Call { + path: Path { + id: self.fresh_expr_id(), + segments: vec![err_ident], + span: stmt.else_block.span, + }, + binding: stmt.err_binding.clone(), + span: stmt.else_block.span, + }, + body: else_body, + span: match_span, + }, + ], + span: match_span, + match_span: stmt.span, + }); + + LetStmt { + name: stmt.name.clone(), + ty: stmt.ty.clone(), + expr: match_expr, + span: stmt.span, + } + } + + fn lower_try_else(&mut self, stmt: &TryElseStmt) -> Expr { + let expr = self.desugar_expr(&stmt.expr); + let else_block = self.desugar_block(&stmt.else_block); + let expr_span = expr.span(); + let ok_ident = Spanned::new("Ok".to_string(), expr_span); + let err_ident = Spanned::new("Err".to_string(), stmt.else_block.span); + + Expr::Match(MatchExpr { + id: self.fresh_expr_id(), + expr: Box::new(expr), + arms: vec![ + MatchArm { + pattern: Pattern::Call { + path: Path { + id: self.fresh_expr_id(), + segments: vec![ok_ident], + span: expr_span, + }, + binding: None, + span: expr_span, + }, + body: Block { + stmts: Vec::new(), + span: expr_span, + }, + span: stmt.span, + }, + MatchArm { + pattern: Pattern::Call { + path: Path { + id: self.fresh_expr_id(), + segments: vec![err_ident], + span: stmt.else_block.span, + }, + binding: stmt.err_binding.clone(), + span: stmt.else_block.span, + }, + body: else_block, + span: stmt.span, + }, + ], + span: stmt.span, + match_span: expr_span, + }) + } + + fn lower_for_each(&mut self, stmt: &ForEachStmt) -> Stmt { + let body = self.desugar_block(&stmt.body); + let source = self.desugar_expr(&stmt.source); + + let hidden_source_span = self.synthetic_span(stmt.span, 1); + let hidden_len_span = self.synthetic_span(stmt.span, 2); + let hidden_idx_span = self.synthetic_span(stmt.span, 3); + let source_free_span = self.synthetic_span(stmt.span, 4); + let len_call_span = self.synthetic_span(stmt.span, 5); + let get_call_span = self.synthetic_span(stmt.span, 6); + let try_span = self.synthetic_span(stmt.span, 7); + let else_span = self.synthetic_span(stmt.span, 8); + let zero_span = self.synthetic_span(stmt.span, 9); + + let hidden_source = self.synthetic_ident("__for_source", hidden_source_span); + let hidden_idx = self.synthetic_ident("__for_idx", hidden_idx_span); + let hidden_len = self.synthetic_ident("__for_len", hidden_len_span); + let hidden_idx_expr = self.ident_expr(&hidden_idx); + let hidden_len_expr = self.ident_expr(&hidden_len); + + let (source_expr, mut setup_stmts) = if source.to_path().is_some() { + (source, Vec::new()) + } else { + let hidden_source_expr = self.ident_expr(&hidden_source); + let source_stmt = Stmt::Let(LetStmt { + name: hidden_source.clone(), + ty: None, + expr: source, + span: hidden_source_span, + }); + let free_stmt = Stmt::Defer(DeferStmt { + expr: self.method_call_expr( + hidden_source_expr.clone(), + "free", + Vec::new(), + source_free_span, + ), + span: source_free_span, + }); + (hidden_source_expr, vec![source_stmt, free_stmt]) + }; + + let len_stmt = Stmt::Let(LetStmt { + name: hidden_len.clone(), + ty: None, + expr: self.method_call_expr(source_expr.clone(), "len", Vec::new(), len_call_span), + span: hidden_len_span, + }); + let get_expr = self.method_call_expr( + source_expr, + "get", + vec![hidden_idx_expr.clone()], + get_call_span, + ); + let get_stmt = Stmt::Let(self.lower_try_let(&TryLetStmt { + name: stmt.item.clone(), + ty: None, + expr: get_expr, + err_binding: None, + else_block: Block { + stmts: Vec::new(), + span: else_span, + }, + span: try_span, + })); + + let mut loop_stmts = Vec::new(); + if let Some(index_ident) = &stmt.index { + loop_stmts.push(Stmt::Let(LetStmt { + name: index_ident.clone(), + ty: None, + expr: hidden_idx_expr.clone(), + span: index_ident.span, + })); + } + loop_stmts.push(get_stmt); + loop_stmts.extend(body.stmts); + + setup_stmts.push(len_stmt); + setup_stmts.push(Stmt::For(ForStmt { + var: hidden_idx, + start: Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::Int(0), + span: zero_span, + }), + end: hidden_len_expr, + body: Block { + stmts: loop_stmts, + span: stmt.body.span, + }, + span: stmt.span, + })); + + Stmt::If(IfStmt { + cond: Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::Bool(true), + span: stmt.span, + }), + then_block: Block { + stmts: setup_stmts, + span: stmt.span, + }, + else_block: None, + span: stmt.span, + }) + } + + fn pattern_binding_ident(&self, pattern: &Pattern) -> Option { + match pattern { + Pattern::Binding(ident) => Some(ident.clone()), + Pattern::Call { + binding: Some(ident), + .. + } => Some(ident.clone()), + _ => None, + } + } + + fn synthetic_ident(&self, prefix: &str, span: Span) -> Ident { + Spanned::new(format!("{prefix}_{}", span.start), span) + } + + fn synthetic_span(&self, base: Span, offset: usize) -> Span { + let point = base.start.saturating_add(offset); + Span::new(point, point) + } + + fn ident_expr(&mut self, ident: &Ident) -> Expr { + Expr::Path(Path { + id: self.fresh_expr_id(), + segments: vec![ident.clone()], + span: ident.span, + }) + } + + fn method_call_expr( + &mut self, + receiver: Expr, + method: &str, + args: Vec, + span: Span, + ) -> Expr { + Expr::MethodCall(MethodCallExpr { + id: self.fresh_expr_id(), + receiver: Box::new(receiver), + method: Spanned::new(method.to_string(), span), + type_args: Vec::new(), + args, + span, + }) + } + + fn panic_expr(&mut self, span: Span) -> Expr { + let panic_ident = Spanned::new("panic".to_string(), span); + Expr::Call(CallExpr { + id: self.fresh_expr_id(), + callee: Box::new(Expr::Path(Path { + id: self.fresh_expr_id(), + segments: vec![panic_ident], + span, + })), + type_args: Vec::new(), + args: Vec::new(), + span, + }) + } +} + +fn max_module_expr_id(module: &Module) -> u32 { + let mut max_id = module.name.id.0; + for use_decl in &module.uses { + max_id = max_id.max(use_decl.path.id.0); + } + for item in &module.items { + max_id = max_id.max(max_item_expr_id(item)); + } + max_id +} + +fn max_item_expr_id(item: &Item) -> u32 { + match item { + Item::Function(func) => max_block_expr_id(&func.body), + Item::ExternFunction(_) => 0, + Item::Struct(decl) => max_struct_expr_id(decl), + Item::Enum(decl) => max_enum_expr_id(decl), + Item::Trait(decl) => max_trait_expr_id(decl), + Item::Impl(impl_block) => { + let mut max_id = max_type_expr_id(&impl_block.target); + if let Some(path) = &impl_block.trait_path { + max_id = max_id.max(path.id.0); + } + for method in &impl_block.methods { + max_id = max_id.max(max_block_expr_id(&method.body)); + max_id = max_id.max(max_type_expr_id(&method.ret)); + for param in &method.params { + if let Some(ty) = ¶m.ty { + max_id = max_id.max(max_type_expr_id(ty)); + } + } + } + max_id + } + } +} + +fn max_struct_expr_id(decl: &StructDecl) -> u32 { + let mut max_id = 0; + for param in &decl.type_params { + for bound in ¶m.bounds { + max_id = max_id.max(bound.id.0); + } + } + for field in &decl.fields { + max_id = max_id.max(max_type_expr_id(&field.ty)); + } + max_id +} + +fn max_enum_expr_id(decl: &EnumDecl) -> u32 { + let mut max_id = 0; + for param in &decl.type_params { + for bound in ¶m.bounds { + max_id = max_id.max(bound.id.0); + } + } + for variant in &decl.variants { + if let Some(payload) = &variant.payload { + max_id = max_id.max(max_type_expr_id(payload)); + } + } + max_id +} + +fn max_trait_expr_id(decl: &TraitDecl) -> u32 { + let mut max_id = 0; + for param in &decl.type_params { + for bound in ¶m.bounds { + max_id = max_id.max(bound.id.0); + } + } + for method in &decl.methods { + max_id = max_id.max(max_type_expr_id(&method.ret)); + for param in &method.params { + if let Some(ty) = ¶m.ty { + max_id = max_id.max(max_type_expr_id(ty)); + } + } + } + max_id +} + +fn max_block_expr_id(block: &Block) -> u32 { + let mut max_id = 0; + for stmt in &block.stmts { + max_id = max_id.max(max_stmt_expr_id(stmt)); + } + max_id +} + +fn max_stmt_expr_id(stmt: &Stmt) -> u32 { + match stmt { + Stmt::Let(let_stmt) => max_expr_id(&let_stmt.expr), + Stmt::LetElse(stmt) => max_expr_id(&stmt.expr).max(max_block_expr_id(&stmt.else_block)), + Stmt::TryLet(stmt) => max_expr_id(&stmt.expr).max(max_block_expr_id(&stmt.else_block)), + Stmt::TryElse(stmt) => max_expr_id(&stmt.expr).max(max_block_expr_id(&stmt.else_block)), + Stmt::Assign(assign) => max_expr_id(&assign.expr), + Stmt::Defer(defer_stmt) => max_expr_id(&defer_stmt.expr), + Stmt::Return(ret) => ret.expr.as_ref().map(max_expr_id).unwrap_or(0), + Stmt::Break(_) | Stmt::Continue(_) => 0, + Stmt::If(if_stmt) => { + let mut max_id = max_expr_id(&if_stmt.cond).max(max_block_expr_id(&if_stmt.then_block)); + if let Some(else_block) = &if_stmt.else_block { + max_id = max_id.max(max_block_expr_id(else_block)); + } + max_id + } + Stmt::While(while_stmt) => { + max_expr_id(&while_stmt.cond).max(max_block_expr_id(&while_stmt.body)) + } + Stmt::For(for_stmt) => max_expr_id(&for_stmt.start) + .max(max_expr_id(&for_stmt.end)) + .max(max_block_expr_id(&for_stmt.body)), + Stmt::ForEach(stmt) => max_expr_id(&stmt.source).max(max_block_expr_id(&stmt.body)), + Stmt::Expr(expr_stmt) => max_expr_id(&expr_stmt.expr), + } +} + +fn max_expr_id(expr: &Expr) -> u32 { + match expr { + Expr::Literal(lit) => lit.id.0, + Expr::Path(path) => path.id.0, + Expr::Call(call) => { + let mut max_id = call.id.0.max(max_expr_id(&call.callee)); + for arg in &call.args { + max_id = max_id.max(max_expr_id(arg)); + } + for arg in &call.type_args { + max_id = max_id.max(max_type_expr_id(arg)); + } + max_id + } + Expr::MethodCall(method_call) => { + let mut max_id = method_call.id.0.max(max_expr_id(&method_call.receiver)); + for arg in &method_call.args { + max_id = max_id.max(max_expr_id(arg)); + } + for arg in &method_call.type_args { + max_id = max_id.max(max_type_expr_id(arg)); + } + max_id + } + Expr::FieldAccess(field_access) => field_access.id.0.max(max_expr_id(&field_access.object)), + Expr::Index(index_expr) => index_expr + .id + .0 + .max(max_expr_id(&index_expr.object)) + .max(max_expr_id(&index_expr.index)), + Expr::StructLiteral(lit) => { + let mut max_id = lit.id.0.max(lit.path.id.0); + for arg in &lit.type_args { + max_id = max_id.max(max_type_expr_id(arg)); + } + for field in &lit.fields { + max_id = max_id.max(max_expr_id(&field.expr)); + } + max_id + } + Expr::Unary(unary) => unary.id.0.max(max_expr_id(&unary.expr)), + Expr::Binary(binary) => binary + .id + .0 + .max(max_expr_id(&binary.left)) + .max(max_expr_id(&binary.right)), + Expr::Match(match_expr) => { + let mut max_id = match_expr.id.0.max(max_expr_id(&match_expr.expr)); + for arm in &match_expr.arms { + max_id = max_id.max(max_pattern_expr_id(&arm.pattern)); + max_id = max_id.max(max_block_expr_id(&arm.body)); + } + max_id + } + Expr::Try(try_expr) => try_expr.id.0.max(max_expr_id(&try_expr.expr)), + Expr::Grouping(group) => group.id.0.max(max_expr_id(&group.expr)), + } +} + +fn max_pattern_expr_id(pattern: &Pattern) -> u32 { + match pattern { + Pattern::Wildcard(_) | Pattern::Binding(_) | Pattern::Literal(_) => 0, + Pattern::Path(path) => path.id.0, + Pattern::Call { path, .. } => path.id.0, + } +} + +fn max_type_expr_id(ty: &Type) -> u32 { + match ty { + Type::Path { path, args, .. } => { + let mut max_id = path.id.0; + for arg in args { + max_id = max_id.max(max_type_expr_id(arg)); + } + max_id + } + Type::Ptr { target, .. } | Type::Ref { target, .. } => max_type_expr_id(target), + } +} diff --git a/capc/src/driver.rs b/capc/src/driver.rs new file mode 100644 index 0000000..dc5c922 --- /dev/null +++ b/capc/src/driver.rs @@ -0,0 +1,194 @@ +use std::path::{Path, PathBuf}; + +use miette::{miette, NamedSource, Result}; + +use crate::ast::{Module, PackageSafety, Path as AstPath}; +use crate::hir::HirProgram; +use crate::{build_object, parse_module, type_check_program, validate_module_path, ModuleGraph}; + +#[derive(Clone)] +pub struct LoadedProgram { + pub path: PathBuf, + pub source: String, + pub module: Module, + pub stdlib: Vec, + pub user_modules: Vec, + pub root: PathBuf, +} + +pub struct LinkOptions<'a> { + pub out: Option, + pub out_dir: Option, + pub link_libs: &'a [String], + pub link_search: &'a [PathBuf], +} + +pub fn load_program(path: &Path) -> Result { + let source = std::fs::read_to_string(path) + .map_err(|err| miette!("failed to read {}: {err}", path.display()))?; + let module = parse_module(&source).map_err(|err| { + let named = NamedSource::new(path.display().to_string(), source.clone()); + miette::Report::new(err).with_source_code(named) + })?; + let root = path + .parent() + .ok_or_else(|| miette!("entry path has no parent directory"))? + .to_path_buf(); + validate_module_path(&module, path, &root).map_err(|err| { + let err = err.with_context(format!("while loading module `{}`", module.name)); + miette::Report::new(err) + })?; + let mut graph = ModuleGraph::new(); + let stdlib = graph.load_stdlib().map_err(miette::Report::new)?; + let user_modules = graph + .load_user_modules_transitive(path, &module) + .map_err(miette::Report::new)?; + Ok(LoadedProgram { + path: path.to_path_buf(), + source, + module, + stdlib, + user_modules, + root, + }) +} + +pub fn type_check_loaded(loaded: &LoadedProgram, safe_only: bool) -> Result { + if safe_only { + enforce_safe_only(&loaded.module, &loaded.user_modules, &loaded.root)?; + } + type_check_program(&loaded.module, &loaded.stdlib, &loaded.user_modules).map_err(|err| { + let named = NamedSource::new(loaded.path.display().to_string(), loaded.source.clone()); + miette::Report::new(err).with_source_code(named) + }) +} + +pub fn build_binary( + loaded: &LoadedProgram, + program: &HirProgram, + options: LinkOptions<'_>, +) -> Result { + let workspace_root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(".."); + let build_dir = options + .out_dir + .unwrap_or_else(|| workspace_root.join("target").join("capc-out")); + std::fs::create_dir_all(&build_dir) + .map_err(|err| miette!("failed to create build dir {}: {err}", build_dir.display()))?; + + let obj_path = build_dir.join("program.o"); + build_object(program, &obj_path).map_err(|err| { + let named = NamedSource::new(loaded.path.display().to_string(), loaded.source.clone()); + miette::Report::new(err).with_source_code(named) + })?; + + let cargo_path = resolve_tool("CARGO", "cargo"); + let rustc_path = resolve_tool("RUSTC", "rustc"); + + let status = std::process::Command::new(&cargo_path) + .arg("build") + .arg("-p") + .arg("capable_runtime") + .status() + .map_err(|err| miette!("failed to run {} build: {err}", cargo_path.display()))?; + if !status.success() { + return Err(miette!("runtime build failed")); + } + + let stub_path = build_dir.join("capable_stub.rs"); + std::fs::write( + &stub_path, + "extern \"C\" { fn capable_rt_start() -> i32; }\nfn main() { let code = unsafe { capable_rt_start() }; std::process::exit(code); }\n", + ) + .map_err(|err| miette!("failed to write stub: {err}"))?; + + let out_path = options.out.unwrap_or_else(|| { + let name = loaded + .path + .file_stem() + .and_then(|s| s.to_str()) + .unwrap_or("a.out"); + build_dir.join(name) + }); + let runtime_lib_dir = workspace_root.join("target").join("debug"); + let mut rustc = std::process::Command::new(&rustc_path); + rustc + .arg(&stub_path) + .arg("-L") + .arg(&runtime_lib_dir) + .arg("-lstatic=capable_runtime") + .arg("-C") + .arg(format!("link-arg={}", obj_path.display())) + .arg("-o") + .arg(&out_path); + for path in options.link_search { + rustc.arg("-L").arg(path); + } + for lib in options.link_libs { + rustc.arg("-l").arg(lib); + } + let output = rustc + .output() + .map_err(|err| miette!("failed to run {}: {err}", rustc_path.display()))?; + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + let stderr = stderr.trim(); + if stderr.is_empty() { + return Err(miette!("link failed")); + } + return Err(miette!("link failed: {stderr}")); + } + Ok(out_path) +} + +pub fn enforce_safe_only(entry: &Module, user_modules: &[Module], root: &Path) -> Result<()> { + let mut offenders = Vec::new(); + if entry.package == PackageSafety::Unsafe { + offenders.push(format!( + "{} ({}): package unsafe", + entry.name, + module_path_for(root, &entry.name).display() + )); + } + for module in user_modules { + if module.package == PackageSafety::Unsafe { + offenders.push(format!( + "{} ({}): package unsafe", + module.name, + module_path_for(root, &module.name).display() + )); + } + } + if offenders.is_empty() { + return Ok(()); + } + offenders.sort(); + offenders.dedup(); + let mut message = String::from("safe-only build rejected unsafe package(s):"); + for entry in offenders { + message.push_str("\n- "); + message.push_str(&entry); + } + Err(miette!(message)) +} + +pub fn module_path_for(root: &Path, name: &AstPath) -> PathBuf { + let mut path = root.to_path_buf(); + for seg in &name.segments { + path.push(&seg.item); + } + path.set_extension("cap"); + path +} + +fn resolve_tool(env_var: &str, fallback_name: &str) -> PathBuf { + if let Some(path) = std::env::var_os(env_var) { + return PathBuf::from(path); + } + if let Some(home) = std::env::var_os("HOME") { + let candidate = PathBuf::from(home).join(".cargo").join("bin").join(fallback_name); + if candidate.exists() { + return candidate; + } + } + PathBuf::from(fallback_name) +} diff --git a/capc/src/lib.rs b/capc/src/lib.rs index d245053..d00241d 100644 --- a/capc/src/lib.rs +++ b/capc/src/lib.rs @@ -1,15 +1,19 @@ pub mod ast; pub mod abi; pub mod codegen; +mod desugar; +pub mod driver; pub mod error; pub mod hir; pub mod lexer; pub mod loader; pub mod parser; +mod runtime_intrinsics; pub mod typeck; pub use error::{ParseError, TypeError}; pub use codegen::build_object; +pub use driver::{build_binary, enforce_safe_only, load_program, module_path_for, type_check_loaded, LinkOptions, LoadedProgram}; pub use hir::{HirModule, HirProgram}; pub use loader::{ load_module_from_path, load_stdlib, load_user_modules, load_user_modules_transitive, diff --git a/capc/src/main.rs b/capc/src/main.rs index a33ddbd..182e52f 100644 --- a/capc/src/main.rs +++ b/capc/src/main.rs @@ -3,7 +3,7 @@ use std::path::PathBuf; use clap::{Parser, Subcommand}; use miette::{miette, NamedSource, Result}; -use capc::{build_object, parse_module, type_check_program, validate_module_path, ModuleGraph}; +use capc::{build_binary, load_program, module_path_for, parse_module, type_check_loaded, LinkOptions}; #[derive(Debug, Parser)] #[command(name = "capc", version, about = "Capable compiler (milestone 0/1)")] @@ -63,33 +63,8 @@ fn main() -> Result<()> { Ok(()) } Command::Check { path, safe_only } => { - let source = std::fs::read_to_string(&path) - .map_err(|err| miette!("failed to read {}: {err}", path.display()))?; - let module = parse_module(&source).map_err(|err| { - let named = NamedSource::new(path.display().to_string(), source.clone()); - miette::Report::new(err).with_source_code(named) - })?; - let root = path.parent().ok_or_else(|| { - miette!("entry path has no parent directory") - })?; - validate_module_path(&module, &path, root).map_err(|err| { - let err = err.with_context(format!("while loading module `{}`", module.name)); - miette::Report::new(err) - })?; - let mut graph = ModuleGraph::new(); - let stdlib = graph.load_stdlib().map_err(|err| { - miette::Report::new(err) - })?; - let user_modules = graph.load_user_modules_transitive(&path, &module).map_err(|err| { - miette::Report::new(err) - })?; - if safe_only { - enforce_safe_only(&module, &user_modules, root)?; - } - let _program = type_check_program(&module, &stdlib, &user_modules).map_err(|err| { - let named = NamedSource::new(path.display().to_string(), source); - miette::Report::new(err).with_source_code(named) - })?; + let loaded = load_program(&path)?; + let _program = type_check_loaded(&loaded, safe_only)?; println!("ok"); Ok(()) } @@ -101,7 +76,18 @@ fn main() -> Result<()> { link_libs, link_search, } => { - let out_path = build_binary(&path, out, out_dir, safe_only, &link_libs, &link_search)?; + let loaded = load_program(&path)?; + let program = type_check_loaded(&loaded, safe_only)?; + let out_path = build_binary( + &loaded, + &program, + LinkOptions { + out, + out_dir, + link_libs: &link_libs, + link_search: &link_search, + }, + )?; println!("built {}", out_path.display()); Ok(()) } @@ -113,7 +99,18 @@ fn main() -> Result<()> { link_search, args, } => { - let out_path = build_binary(&path, None, out_dir, safe_only, &link_libs, &link_search)?; + let loaded = load_program(&path)?; + let program = type_check_loaded(&loaded, safe_only)?; + let out_path = build_binary( + &loaded, + &program, + LinkOptions { + out: None, + out_dir, + link_libs: &link_libs, + link_search: &link_search, + }, + )?; let status = std::process::Command::new(&out_path) .args(&args) .status() @@ -127,161 +124,19 @@ fn main() -> Result<()> { } } -fn build_binary( - path: &PathBuf, - out: Option, - out_dir: Option, - safe_only: bool, - link_libs: &[String], - link_search: &[PathBuf], -) -> Result { - let source = std::fs::read_to_string(path) - .map_err(|err| miette!("failed to read {}: {err}", path.display()))?; - let module = parse_module(&source).map_err(|err| { - let named = NamedSource::new(path.display().to_string(), source.clone()); - miette::Report::new(err).with_source_code(named) - })?; - let root = path.parent().ok_or_else(|| miette!("entry path has no parent directory"))?; - validate_module_path(&module, path, root).map_err(|err| { - let err = err.with_context(format!("while loading module `{}`", module.name)); - miette::Report::new(err) - })?; - let mut graph = ModuleGraph::new(); - let stdlib = graph.load_stdlib().map_err(|err| miette::Report::new(err))?; - let user_modules = graph - .load_user_modules_transitive(path, &module) - .map_err(|err| miette::Report::new(err))?; - if safe_only { - enforce_safe_only(&module, &user_modules, root)?; - } - let program = type_check_program(&module, &stdlib, &user_modules).map_err(|err| { - let named = NamedSource::new(path.display().to_string(), source.clone()); - miette::Report::new(err).with_source_code(named) - })?; - - // Step 5: Pass HIR to codegen - let workspace_root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(".."); - let build_dir = out_dir.unwrap_or_else(|| workspace_root.join("target").join("capc-out")); - std::fs::create_dir_all(&build_dir).map_err(|err| { - miette!("failed to create build dir {}: {err}", build_dir.display()) - })?; - let obj_path = build_dir.join("program.o"); - build_object(&program, &obj_path).map_err(|err| { - let named = NamedSource::new(path.display().to_string(), source.clone()); - miette::Report::new(err).with_source_code(named) - })?; - - let status = std::process::Command::new("cargo") - .arg("build") - .arg("-p") - .arg("capable_runtime") - .status() - .map_err(|err| miette!("failed to run cargo build: {err}"))?; - if !status.success() { - return Err(miette!("runtime build failed")); - } - - let stub_path = build_dir.join("capable_stub.rs"); - std::fs::write( - &stub_path, - "extern \"C\" { fn capable_rt_start() -> i32; }\nfn main() { let code = unsafe { capable_rt_start() }; std::process::exit(code); }\n", - ) - .map_err(|err| miette!("failed to write stub: {err}"))?; - - let out_path = out.unwrap_or_else(|| { - let name = path.file_stem().and_then(|s| s.to_str()).unwrap_or("a.out"); - build_dir.join(name) - }); - let runtime_lib_dir = workspace_root.join("target").join("debug"); - let mut rustc = std::process::Command::new("rustc"); - rustc - .arg(&stub_path) - .arg("-L") - .arg(&runtime_lib_dir) - .arg("-lstatic=capable_runtime") - .arg("-C") - .arg(format!("link-arg={}", obj_path.display())) - .arg("-o") - .arg(&out_path); - for path in link_search { - rustc.arg("-L").arg(path); - } - for lib in link_libs { - rustc.arg("-l").arg(lib); - } - let output = rustc.output().map_err(|err| miette!("failed to run rustc: {err}"))?; - if !output.status.success() { - let stderr = String::from_utf8_lossy(&output.stderr); - let stderr = stderr.trim(); - if stderr.is_empty() { - return Err(miette!("link failed")); - } - return Err(miette!("link failed: {stderr}")); - } - Ok(out_path) -} - -fn enforce_safe_only( - entry: &capc::ast::Module, - user_modules: &[capc::ast::Module], - root: &std::path::Path, -) -> Result<()> { - let mut offenders = Vec::new(); - if entry.package == capc::ast::PackageSafety::Unsafe { - offenders.push(format!( - "{} ({}): package unsafe", - entry.name, - module_path_for(root, &entry.name).display() - )); - } - for module in user_modules { - if module.package == capc::ast::PackageSafety::Unsafe { - offenders.push(format!( - "{} ({}): package unsafe", - module.name, - module_path_for(root, &module.name).display() - )); - } - } - if offenders.is_empty() { - return Ok(()); - } - offenders.sort(); - offenders.dedup(); - let mut message = String::from("safe-only build rejected unsafe package(s):"); - for entry in offenders { - message.push_str("\n- "); - message.push_str(&entry); - } - Err(miette!(message)) -} - fn audit_unsafe(path: &PathBuf) -> Result<()> { - let source = std::fs::read_to_string(path) - .map_err(|err| miette!("failed to read {}: {err}", path.display()))?; - let module = parse_module(&source).map_err(|err| { - let named = NamedSource::new(path.display().to_string(), source.clone()); - miette::Report::new(err).with_source_code(named) - })?; - let root = path - .parent() - .ok_or_else(|| miette!("entry path has no parent directory"))?; - let mut graph = ModuleGraph::new(); - let stdlib = graph.load_stdlib().map_err(|err| miette::Report::new(err))?; - let user_modules = graph - .load_user_modules_transitive(path, &module) - .map_err(|err| miette::Report::new(err))?; + let loaded = load_program(path)?; let mut findings = Vec::new(); - if let Some(entry) = audit_entry("user", &module, root) { + if let Some(entry) = audit_entry("user", &loaded.module, &loaded.root) { findings.push(entry); } - for module in &user_modules { - if let Some(entry) = audit_entry("user", module, root) { + for module in &loaded.user_modules { + if let Some(entry) = audit_entry("user", module, &loaded.root) { findings.push(entry); } } - for module in &stdlib { + for module in &loaded.stdlib { if let Some(entry) = audit_entry("stdlib", module, &capc::stdlib_root()) { findings.push(entry); } @@ -326,12 +181,3 @@ fn audit_entry( reasons.join(", ") )) } - -fn module_path_for(root: &std::path::Path, name: &capc::ast::Path) -> PathBuf { - let mut path = root.to_path_buf(); - for seg in &name.segments { - path.push(&seg.item); - } - path.set_extension("cap"); - path -} diff --git a/capc/src/parser.rs b/capc/src/parser.rs index 75ef85c..cbd35e4 100644 --- a/capc/src/parser.rs +++ b/capc/src/parser.rs @@ -12,6 +12,7 @@ struct Parser { tokens: Vec, index: usize, eof_span: Span, + next_expr_id: u32, } impl Parser { @@ -51,9 +52,16 @@ impl Parser { tokens, index: 0, eof_span, + next_expr_id: 0, } } + fn fresh_expr_id(&mut self) -> ExprId { + let id = ExprId(self.next_expr_id); + self.next_expr_id += 1; + id + } + fn parse_module(&mut self) -> Result { let mut package = PackageSafety::Safe; let start_span = self.peek_span_raw(); @@ -553,7 +561,7 @@ impl Parser { fn parse_stmt(&mut self) -> Result { match self.peek_kind() { - Some(TokenKind::Let) => Ok(Stmt::Let(self.parse_let()?)), + Some(TokenKind::Let) => self.parse_let(), Some(TokenKind::Return) => Ok(Stmt::Return(self.parse_return()?)), Some(TokenKind::Break) => Ok(Stmt::Break(self.parse_break()?)), Some(TokenKind::Continue) => Ok(Stmt::Continue(self.parse_continue()?)), @@ -573,7 +581,7 @@ impl Parser { } } - fn parse_let(&mut self) -> Result { + fn parse_let(&mut self) -> Result { let let_token = self.expect(TokenKind::Let)?; let start = let_token.span.start; if self.peek_kind() == Some(TokenKind::Ident) @@ -592,12 +600,12 @@ impl Parser { let end = self .maybe_consume(TokenKind::Semi) .map_or(expr.span().end, |t| t.span.end); - return Ok(LetStmt { + return Ok(Stmt::Let(LetStmt { name, ty, expr, span: Span::new(start, end), - }); + })); } let pattern = self.parse_pattern()?; @@ -605,12 +613,21 @@ impl Parser { let expr = self.parse_expr()?; self.expect(TokenKind::Else)?; let else_block = self.parse_block()?; - let mut stmt = self.desugar_let_else(let_token.span, pattern, expr, else_block)?; + if self.pattern_binding_ident(&pattern).is_none() { + return Err(self.error_at( + let_token.span, + "`let ... else` requires a binding pattern".to_string(), + )); + } let end = self .maybe_consume(TokenKind::Semi) - .map_or(stmt.span.end, |t| t.span.end); - stmt.span = Span::new(start, end); - Ok(stmt) + .map_or(else_block.span.end, |t| t.span.end); + Ok(Stmt::LetElse(LetElseStmt { + pattern, + expr, + else_block, + span: Span::new(start, end), + })) } fn parse_assign(&mut self) -> Result { @@ -680,13 +697,17 @@ impl Parser { None }; let else_block = self.parse_block()?; - let mut stmt = - self.desugar_try_let(try_token.span, name, ty, expr, err_binding, else_block); let end = self .maybe_consume(TokenKind::Semi) - .map_or(stmt.span.end, |t| t.span.end); - stmt.span = Span::new(start, end); - return Ok(Stmt::Let(stmt)); + .map_or(else_block.span.end, |t| t.span.end); + return Ok(Stmt::TryLet(TryLetStmt { + name, + ty, + expr, + err_binding, + else_block, + span: Span::new(start, end), + })); } let expr = self.parse_expr()?; @@ -701,13 +722,13 @@ impl Parser { None }; let else_block = self.parse_block()?; - let expr = self.desugar_expr_else(expr, err_binding, else_block); - let expr_span = expr.span(); let end = self .maybe_consume(TokenKind::Semi) - .map_or(expr_span.end, |t| t.span.end); - Ok(Stmt::Expr(ExprStmt { + .map_or(else_block.span.end, |t| t.span.end); + Ok(Stmt::TryElse(TryElseStmt { expr, + err_binding, + else_block, span: Span::new(start, end), })) } @@ -796,6 +817,7 @@ impl Parser { let body = self.parse_block()?; let end = body.span.end; let cond = Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), value: Literal::Bool(true), span: for_token.span, }); @@ -835,7 +857,13 @@ impl Parser { let item = second.clone().unwrap_or_else(|| first.clone()); let index = second.map(|_| first); let body = self.parse_block()?; - self.desugar_for_each(Span::new(start, body.span.end), index, item, range_or_source, body) + Ok(Stmt::ForEach(ForEachStmt { + index, + item, + source: range_or_source, + span: Span::new(start, body.span.end), + body, + })) } /// Parse a simple expression for range bounds (no struct literals allowed) @@ -847,6 +875,7 @@ impl Parser { self.error_at(token.span, "invalid integer literal".to_string()) })?; Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), value: Literal::Int(value), span: token.span, })) @@ -854,6 +883,7 @@ impl Parser { Some(TokenKind::True) => { let token = self.bump().unwrap(); Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), value: Literal::Bool(true), span: token.span, })) @@ -861,6 +891,7 @@ impl Parser { Some(TokenKind::False) => { let token = self.bump().unwrap(); Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), value: Literal::Bool(false), span: token.span, })) @@ -879,6 +910,7 @@ impl Parser { let end = segments.last().unwrap().span.end; Ok(Expr::Path(Path { + id: self.fresh_expr_id(), segments, span: Span::new(start, end), })) @@ -1013,6 +1045,7 @@ impl Parser { } let end = self.expect(TokenKind::RParen)?.span.end; lhs = Expr::MethodCall(MethodCallExpr { + id: self.fresh_expr_id(), receiver: Box::new(lhs), method: field, type_args, @@ -1031,6 +1064,7 @@ impl Parser { // Otherwise, it's a field access let span = Span::new(start, field.span.end); lhs = Expr::FieldAccess(FieldAccessExpr { + id: self.fresh_expr_id(), object: Box::new(lhs), field, span, @@ -1048,6 +1082,7 @@ impl Parser { let index = self.parse_expr()?; let end = self.expect(TokenKind::RBracket)?.span.end; lhs = Expr::Index(IndexExpr { + id: self.fresh_expr_id(), object: Box::new(lhs), index: Box::new(index), span: Span::new(start, end), @@ -1058,6 +1093,7 @@ impl Parser { let start = lhs.span().start; let end = self.bump().unwrap().span.end; lhs = Expr::Try(TryExpr { + id: self.fresh_expr_id(), expr: Box::new(lhs), span: Span::new(start, end), }); @@ -1133,6 +1169,7 @@ impl Parser { let rhs = self.parse_expr_bp(r_bp, allow_struct_literal)?; let span = Span::new(lhs.span().start, rhs.span().end); lhs = Expr::Binary(BinaryExpr { + id: self.fresh_expr_id(), op, left: Box::new(lhs), right: Box::new(rhs), @@ -1150,6 +1187,7 @@ impl Parser { // Propagate struct-literal allowance to avoid block ambiguity in no-struct contexts. let expr = self.parse_expr_bp(7, allow_struct_literal)?; Ok(Expr::Unary(UnaryExpr { + id: self.fresh_expr_id(), op: UnaryOp::Neg, span: Span::new(start, expr.span().end), expr: Box::new(expr), @@ -1160,6 +1198,7 @@ impl Parser { // Propagate struct-literal allowance to avoid block ambiguity in no-struct contexts. let expr = self.parse_expr_bp(7, allow_struct_literal)?; Ok(Expr::Unary(UnaryExpr { + id: self.fresh_expr_id(), op: UnaryOp::BitNot, span: Span::new(start, expr.span().end), expr: Box::new(expr), @@ -1170,6 +1209,7 @@ impl Parser { // Propagate struct-literal allowance to avoid block ambiguity in no-struct contexts. let expr = self.parse_expr_bp(7, allow_struct_literal)?; Ok(Expr::Unary(UnaryExpr { + id: self.fresh_expr_id(), op: UnaryOp::Not, span: Span::new(start, expr.span().end), expr: Box::new(expr), @@ -1200,12 +1240,14 @@ impl Parser { )); } return Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), value: Literal::U8(value as u8), span: Span::new(token.span.start, suffix.span.end), })); } } Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), value: Literal::Int(value), span: token.span, })) @@ -1216,6 +1258,7 @@ impl Parser { self.error_at(token.span, format!("invalid string literal: {message}")) })?; Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), value: Literal::String(value), span: token.span, })) @@ -1226,6 +1269,7 @@ impl Parser { self.error_at(token.span, format!("invalid char literal: {message}")) })?; Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), value: Literal::U8(value), span: token.span, })) @@ -1233,6 +1277,7 @@ impl Parser { Some(TokenKind::True) => { let token = self.bump().unwrap(); Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), value: Literal::Bool(true), span: token.span, })) @@ -1240,6 +1285,7 @@ impl Parser { Some(TokenKind::False) => { let token = self.bump().unwrap(); Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), value: Literal::Bool(false), span: token.span, })) @@ -1249,6 +1295,7 @@ impl Parser { if self.peek_kind() == Some(TokenKind::RParen) { let end = self.bump().unwrap().span.end; Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), value: Literal::Unit, span: Span::new(start, end), })) @@ -1256,6 +1303,7 @@ impl Parser { let expr = self.parse_expr()?; let end = self.expect(TokenKind::RParen)?.span.end; Ok(Expr::Grouping(GroupingExpr { + id: self.fresh_expr_id(), expr: Box::new(expr), span: Span::new(start, end), })) @@ -1276,6 +1324,7 @@ impl Parser { let end = segments.last().unwrap().span.end; let path = Path { + id: self.fresh_expr_id(), segments, span: Span::new(start, end), }; @@ -1319,6 +1368,7 @@ impl Parser { } let end = self.expect(TokenKind::RBrace)?.span.end; Ok(Expr::Match(MatchExpr { + id: self.fresh_expr_id(), expr: Box::new(expr), arms, span: Span::new(start, end), @@ -1395,164 +1445,6 @@ impl Parser { } } - fn desugar_let_else( - &self, - let_span: Span, - pattern: Pattern, - expr: Expr, - else_block: Block, - ) -> Result { - let binding = self.pattern_binding_ident(&pattern).ok_or_else(|| { - self.error_at( - let_span, - "`let ... else` requires a binding pattern".to_string(), - ) - })?; - - let binding_expr = Expr::Path(Path { - segments: vec![binding.clone()], - span: binding.span, - }); - let ok_body = Block { - stmts: vec![Stmt::Expr(ExprStmt { - expr: binding_expr, - span: binding.span, - })], - span: binding.span, - }; - - let panic_ident = Spanned::new("panic".to_string(), else_block.span); - let panic_expr = Expr::Call(CallExpr { - callee: Box::new(Expr::Path(Path { - segments: vec![panic_ident], - span: else_block.span, - })), - type_args: Vec::new(), - args: Vec::new(), - span: else_block.span, - }); - let mut else_stmts = else_block.stmts; - else_stmts.push(Stmt::Expr(ExprStmt { - expr: panic_expr, - span: else_block.span, - })); - let else_body = Block { - stmts: else_stmts, - span: else_block.span, - }; - - let match_span = Span::new(let_span.start, else_block.span.end); - let match_expr = Expr::Match(MatchExpr { - expr: Box::new(expr), - arms: vec![ - MatchArm { - pattern, - body: ok_body, - span: match_span, - }, - MatchArm { - pattern: Pattern::Wildcard(else_block.span), - body: else_body, - span: match_span, - }, - ], - span: match_span, - match_span: let_span, - }); - - Ok(LetStmt { - name: binding, - ty: None, - expr: match_expr, - span: match_span, - }) - } - - fn desugar_try_let( - &self, - try_span: Span, - binding: Ident, - ty: Option, - expr: Expr, - err_binding: Option, - else_block: Block, - ) -> LetStmt { - let binding_expr = Expr::Path(Path { - segments: vec![binding.clone()], - span: binding.span, - }); - let ok_body = Block { - stmts: vec![Stmt::Expr(ExprStmt { - expr: binding_expr, - span: binding.span, - })], - span: binding.span, - }; - - let panic_ident = Spanned::new("panic".to_string(), else_block.span); - let panic_expr = Expr::Call(CallExpr { - callee: Box::new(Expr::Path(Path { - segments: vec![panic_ident], - span: else_block.span, - })), - type_args: Vec::new(), - args: Vec::new(), - span: else_block.span, - }); - let mut else_stmts = else_block.stmts; - else_stmts.push(Stmt::Expr(ExprStmt { - expr: panic_expr, - span: else_block.span, - })); - let else_body = Block { - stmts: else_stmts, - span: else_block.span, - }; - - let expr_span = expr.span(); - let ok_ident = Spanned::new("Ok".to_string(), expr_span); - let err_ident = Spanned::new("Err".to_string(), else_block.span); - let match_span = Span::new(try_span.start, else_block.span.end); - let match_expr = Expr::Match(MatchExpr { - expr: Box::new(expr), - arms: vec![ - MatchArm { - pattern: Pattern::Call { - path: Path { - segments: vec![ok_ident], - span: expr_span, - }, - binding: Some(binding.clone()), - span: expr_span, - }, - body: ok_body, - span: match_span, - }, - MatchArm { - pattern: Pattern::Call { - path: Path { - segments: vec![err_ident], - span: else_block.span, - }, - binding: err_binding, - span: else_block.span, - }, - body: else_body, - span: match_span, - }, - ], - span: match_span, - match_span: try_span, - }); - - LetStmt { - name: binding, - ty, - expr: match_expr, - span: match_span, - } - } - fn pattern_binding_ident(&self, pattern: &Pattern) -> Option { match pattern { Pattern::Binding(ident) => Some(ident.clone()), @@ -1564,184 +1456,6 @@ impl Parser { } } - fn desugar_expr_else(&self, expr: Expr, err_binding: Option, else_block: Block) -> Expr { - let expr_span = expr.span(); - let ok_ident = Spanned::new("Ok".to_string(), expr_span); - let err_ident = Spanned::new("Err".to_string(), else_block.span); - let span = Span::new(expr_span.start, else_block.span.end); - - Expr::Match(MatchExpr { - expr: Box::new(expr), - arms: vec![ - MatchArm { - pattern: Pattern::Call { - path: Path { - segments: vec![ok_ident], - span: expr_span, - }, - binding: None, - span: expr_span, - }, - body: Block { - stmts: Vec::new(), - span: expr_span, - }, - span, - }, - MatchArm { - pattern: Pattern::Call { - path: Path { - segments: vec![err_ident], - span: else_block.span, - }, - binding: err_binding, - span: else_block.span, - }, - body: else_block, - span, - }, - ], - span, - match_span: expr_span, - }) - } - - fn desugar_for_each( - &self, - for_span: Span, - index_binding: Option, - item_binding: Ident, - source: Expr, - body: Block, - ) -> Result { - let hidden_source_span = self.synthetic_span(for_span, 1); - let hidden_len_span = self.synthetic_span(for_span, 2); - let hidden_idx_span = self.synthetic_span(for_span, 3); - let source_free_span = self.synthetic_span(for_span, 4); - let len_call_span = self.synthetic_span(for_span, 5); - let get_call_span = self.synthetic_span(for_span, 6); - let try_span = self.synthetic_span(for_span, 7); - let else_span = self.synthetic_span(for_span, 8); - let zero_span = self.synthetic_span(for_span, 9); - - let hidden_source = self.synthetic_ident("__for_source", hidden_source_span); - let hidden_idx = self.synthetic_ident("__for_idx", hidden_idx_span); - let hidden_len = self.synthetic_ident("__for_len", hidden_len_span); - let hidden_idx_expr = self.ident_expr(&hidden_idx); - let hidden_len_expr = self.ident_expr(&hidden_len); - let (source_expr, mut setup_stmts) = if source.to_path().is_some() { - (source, Vec::new()) - } else { - let hidden_source_expr = self.ident_expr(&hidden_source); - let source_stmt = Stmt::Let(LetStmt { - name: hidden_source.clone(), - ty: None, - expr: source, - span: hidden_source_span, - }); - let free_expr = - self.method_call_expr(hidden_source_expr.clone(), "free", Vec::new(), source_free_span); - let free_stmt = Stmt::Defer(DeferStmt { - expr: free_expr, - span: source_free_span, - }); - (hidden_source_expr, vec![source_stmt, free_stmt]) - }; - - let len_stmt = Stmt::Let(LetStmt { - name: hidden_len.clone(), - ty: None, - expr: self.method_call_expr(source_expr.clone(), "len", Vec::new(), len_call_span), - span: hidden_len_span, - }); - - let get_stmt = Stmt::Let(self.desugar_try_let( - try_span, - item_binding, - None, - self.method_call_expr( - source_expr, - "get", - vec![hidden_idx_expr.clone()], - get_call_span, - ), - None, - Block { - stmts: Vec::new(), - span: else_span, - }, - )); - - let mut loop_stmts = Vec::new(); - if let Some(index_ident) = index_binding { - loop_stmts.push(Stmt::Let(LetStmt { - name: index_ident.clone(), - ty: None, - expr: hidden_idx_expr.clone(), - span: index_ident.span, - })); - } - loop_stmts.push(get_stmt); - loop_stmts.extend(body.stmts); - - let loop_body = Block { - stmts: loop_stmts, - span: body.span, - }; - - let range_stmt = Stmt::For(ForStmt { - var: hidden_idx, - start: Expr::Literal(LiteralExpr { - value: Literal::Int(0), - span: zero_span, - }), - end: hidden_len_expr, - body: loop_body, - span: for_span, - }); - - setup_stmts.push(len_stmt); - setup_stmts.push(range_stmt); - - let then_block = Block { stmts: setup_stmts, span: for_span }; - - Ok(Stmt::If(IfStmt { - cond: Expr::Literal(LiteralExpr { - value: Literal::Bool(true), - span: for_span, - }), - then_block, - else_block: None, - span: for_span, - })) - } - - fn synthetic_ident(&self, prefix: &str, span: Span) -> Ident { - Spanned::new(format!("{prefix}_{}", span.start), span) - } - - fn synthetic_span(&self, base: Span, offset: usize) -> Span { - let point = base.start.saturating_add(offset); - Span::new(point, point) - } - - fn ident_expr(&self, ident: &Ident) -> Expr { - Expr::Path(Path { - segments: vec![ident.clone()], - span: ident.span, - }) - } - - fn method_call_expr(&self, receiver: Expr, method: &str, args: Vec, span: Span) -> Expr { - Expr::MethodCall(MethodCallExpr { - receiver: Box::new(receiver), - method: Spanned::new(method.to_string(), span), - type_args: Vec::new(), - args, - span, - }) - } - fn parse_path(&mut self) -> Result { let first = self.expect_ident()?; let start = first.span.start; @@ -1753,6 +1467,7 @@ impl Parser { } let end = segments.last().map(|s| s.span.end).unwrap_or(start); Ok(Path { + id: self.fresh_expr_id(), segments, span: Span::new(start, end), }) @@ -1795,6 +1510,7 @@ impl Parser { .unwrap_or(field_access.span.end); Ok(Path { + id: field_access.id, segments, span: Span::new(start, end), }) @@ -1869,6 +1585,7 @@ impl Parser { } let end = self.expect(TokenKind::RBrace)?.span.end; Ok(Expr::StructLiteral(StructLiteralExpr { + id: self.fresh_expr_id(), path, type_args, fields, @@ -1890,6 +1607,7 @@ impl Parser { } let end = self.expect(TokenKind::RParen)?.span.end; Ok(Expr::Call(CallExpr { + id: self.fresh_expr_id(), callee: Box::new(callee), type_args, args, @@ -2102,32 +1820,10 @@ fn unescape_char(text: &str) -> Result { Ok(value) } -trait SpanExt { - fn span(&self) -> Span; -} - -impl SpanExt for Expr { - fn span(&self) -> Span { - match self { - Expr::Literal(lit) => lit.span, - Expr::Path(path) => path.span, - Expr::Call(call) => call.span, - Expr::MethodCall(method_call) => method_call.span, - Expr::FieldAccess(field) => field.span, - Expr::Index(index) => index.span, - Expr::StructLiteral(lit) => lit.span, - Expr::Unary(unary) => unary.span, - Expr::Binary(binary) => binary.span, - Expr::Match(m) => m.span, - Expr::Try(try_expr) => try_expr.span, - Expr::Grouping(g) => g.span, - } - } -} - fn unit_type_at(span: Span) -> Type { let ident = Spanned::new("unit".to_string(), span); let path = Path { + id: ExprId(u32::MAX), segments: vec![ident], span, }; diff --git a/capc/src/runtime_intrinsics.rs b/capc/src/runtime_intrinsics.rs new file mode 100644 index 0000000..764d234 --- /dev/null +++ b/capc/src/runtime_intrinsics.rs @@ -0,0 +1,76 @@ +pub(crate) fn is_runtime_intrinsic(module: &str, func: &str) -> bool { + matches!( + (module, func), + ( + "sys.system", + "RootCap__mint_console" + | "RootCap__mint_readfs" + | "RootCap__mint_filesystem" + | "RootCap__mint_args" + | "RootCap__mint_stdin" + | "RootCap__mint_net" + | "RootCap__mint_alloc_default" + ) | ("sys.args", "Args__len" | "Args__at") + | ("sys.stdin", "Stdin__read_to_string_with_alloc") + | ( + "sys.net", + "Net__listen" + | "Net__connect" + | "TcpListener__accept" + | "TcpListener__close" + | "TcpConn__read_to_string_with_alloc" + | "TcpConn__read_with_alloc" + | "TcpConn__write" + | "TcpConn__close" + ) + | ("sys.buffer", "default_alloc") + | ( + "sys.console", + "Console__println" + | "Console__print" + | "Console__print_i32" + | "Console__println_i32" + | "Console__assert" + ) + | ( + "sys.math", + "add_wrap_i32" + | "sub_wrap_i32" + | "mul_wrap_i32" + | "add_wrap_u32" + | "sub_wrap_u32" + | "mul_wrap_u32" + | "add_wrap_u8" + | "sub_wrap_u8" + | "mul_wrap_u8" + ) + | ( + "sys.fs", + "ReadFS__read_to_string_with_alloc" + | "ReadFS__read_bytes_with_alloc" + | "ReadFS__list_dir_with_alloc" + | "ReadFS__exists" + | "ReadFS__close" + | "Filesystem__root_dir" + | "Filesystem__close" + | "Dir__subdir" + | "Dir__open_read" + | "Dir__read_bytes_with_alloc" + | "Dir__read_to_string_with_alloc" + | "Dir__list_dir_with_alloc" + | "Dir__exists" + | "Dir__close" + | "FileRead__read_to_string_with_alloc" + | "FileRead__close" + | "join_with_alloc" + ) + | ( + "sys.buffer", + "Alloc__malloc" + | "Alloc__free" + | "Alloc__cast_u8_to_u32" + | "Alloc__cast_u32_to_u8" + ) + | ("sys.bytes", "u8__is_whitespace") + ) +} diff --git a/capc/src/typeck/check.rs b/capc/src/typeck/check.rs index 44ec8ec..7c5a172 100644 --- a/capc/src/typeck/check.rs +++ b/capc/src/typeck/check.rs @@ -4,11 +4,15 @@ use crate::ast::*; use crate::error::TypeError; use super::{ - build_type_param_bounds, build_type_params, is_affine_type, is_numeric_type, is_orderable_type, - is_string_ty, lower_type, resolve_enum_variant, resolve_method_target, resolve_path, - resolve_type_name, stdlib_string_ty, type_contains_ref, type_kind, validate_type_args, - BuiltinType, EnumInfo, FunctionSig, MoveState, Scopes, SpanExt, StdlibIndex, StructInfo, - TraitImplInfo, TraitInfo, Ty, TypeKind, TypeTable, UseMap, UseMode, + bind_pattern, build_type_param_bounds, build_type_params, enum_payload_matches, + infer_enum_args, is_affine_type, is_numeric_type, is_orderable_type, is_string_ty, + leftmost_local_in_chain, lower_type, resolve_enum_type_args, resolve_enum_variant, + resolve_method_target, resolve_path, resolve_type_name, stdlib_string_ty, + type_contains_ref, ty_equivalent_for_set, + validate_type_args, BuiltinType, EnumInfo, FunctionSig, MoveState, Scopes, StdlibIndex, + StructInfo, TraitImplInfo, TraitInfo, Ty, TypeTable, UseMap, UseMode, ensure_affine_states_match, + ensure_linear_all_consumed, ensure_linear_scope_consumed, + ensure_linear_scopes_consumed_from, merge_branch_states, merge_match_states, stmt_is_total, }; /// Optional recorder for expression types during checking. @@ -23,7 +27,7 @@ impl<'a> TypeRecorder<'a> { pub(super) fn record(&mut self, expr: &Expr, ty: &Ty) { if let Some(table) = self.table.as_deref_mut() { - table.record(expr.span(), ty.clone()); + table.record(expr.id(), ty.clone()); } } } @@ -33,158 +37,6 @@ fn record_expr_type(recorder: &mut TypeRecorder, expr: &Expr, ty: Ty) -> Result< Ok(ty) } -fn infer_enum_args(template: &Ty, actual: &Ty, inferred: &mut HashMap) -> bool { - match template { - Ty::Param(name) => match inferred.get(name) { - Some(existing) => { - existing == actual - || matches!(actual, Ty::Path(actual_name, args) if actual_name == name && args.is_empty()) - } - None => { - inferred.insert(name.clone(), actual.clone()); - true - } - }, - Ty::Builtin(b) => matches!(actual, Ty::Builtin(other) if other == b), - Ty::Ptr(inner) => { - matches!(actual, Ty::Ptr(other) if infer_enum_args(inner, other, inferred)) - } - Ty::Ref(inner) => { - matches!(actual, Ty::Ref(other) if infer_enum_args(inner, other, inferred)) - } - Ty::Path(name, args) => match actual { - Ty::Path(other_name, other_args) - if other_name == name && args.len() == other_args.len() => - { - args.iter() - .zip(other_args.iter()) - .all(|(a, b)| infer_enum_args(a, b, inferred)) - } - _ => false, - }, - } -} - -fn resolve_enum_type_args( - enum_name: &str, - type_params: &[String], - inferred: &HashMap, - ret_ty: &Ty, -) -> Vec { - if type_params.is_empty() { - return Vec::new(); - } - let ret_args = match ret_ty { - Ty::Path(ret_name, args) if ret_name == enum_name && args.len() == type_params.len() => { - Some(args) - } - _ => None, - }; - type_params - .iter() - .enumerate() - .map(|(idx, param)| { - if let Some(ty) = inferred.get(param) { - return ty.clone(); - } - if let Some(args) = ret_args { - return args[idx].clone(); - } - Ty::Builtin(BuiltinType::Unit) - }) - .collect() -} - -fn apply_enum_type_args(ty: &Ty, type_params: &[String], type_args: &[Ty]) -> Ty { - match ty { - Ty::Param(name) => { - if let Some(idx) = type_params.iter().position(|p| p == name) { - return type_args.get(idx).cloned().unwrap_or_else(|| ty.clone()); - } - ty.clone() - } - Ty::Builtin(_) => ty.clone(), - Ty::Ptr(inner) => Ty::Ptr(Box::new(apply_enum_type_args( - inner, - type_params, - type_args, - ))), - Ty::Ref(inner) => Ty::Ref(Box::new(apply_enum_type_args( - inner, - type_params, - type_args, - ))), - Ty::Path(name, args) => Ty::Path( - name.clone(), - args.iter() - .map(|arg| apply_enum_type_args(arg, type_params, type_args)) - .collect(), - ), - } -} - -fn enum_payload_matches( - payload: &Ty, - arg_ty: &Ty, - type_params: &[String], - type_args: &[Ty], -) -> bool { - let expected = apply_enum_type_args(payload, type_params, type_args); - ty_equivalent_for_params(&expected, arg_ty, type_params) -} - -fn ty_equivalent_for_params(left: &Ty, right: &Ty, type_params: &[String]) -> bool { - match (left, right) { - (Ty::Param(name), Ty::Path(other, args)) - if args.is_empty() && name == other && type_params.contains(name) => - { - true - } - (Ty::Path(name, args), Ty::Param(other)) - if args.is_empty() && name == other && type_params.contains(other) => - { - true - } - (Ty::Ptr(l), Ty::Ptr(r)) | (Ty::Ref(l), Ty::Ref(r)) => { - ty_equivalent_for_params(l, r, type_params) - } - (Ty::Path(name, args), Ty::Path(other, other_args)) - if name == other && args.len() == other_args.len() => - { - args.iter() - .zip(other_args.iter()) - .all(|(a, b)| ty_equivalent_for_params(a, b, type_params)) - } - _ => left == right, - } -} - -fn ty_equivalent_for_set(left: &Ty, right: &Ty, type_params: &HashSet) -> bool { - match (left, right) { - (Ty::Param(name), Ty::Path(other, args)) - if args.is_empty() && name == other && type_params.contains(name) => - { - true - } - (Ty::Path(name, args), Ty::Param(other)) - if args.is_empty() && name == other && type_params.contains(other) => - { - true - } - (Ty::Ptr(l), Ty::Ptr(r)) | (Ty::Ref(l), Ty::Ref(r)) => { - ty_equivalent_for_set(l, r, type_params) - } - (Ty::Path(name, args), Ty::Path(other, other_args)) - if name == other && args.len() == other_args.len() => - { - args.iter() - .zip(other_args.iter()) - .all(|(a, b)| ty_equivalent_for_set(a, b, type_params)) - } - _ => left == right, - } -} - fn enforce_vec_method_constraints( receiver_ty: &Ty, method: &str, @@ -251,313 +103,6 @@ fn enforce_vec_method_constraints( Ok(()) } -/// Safe packages cannot mention externs or raw pointer types anywhere. -pub(super) fn validate_package_safety(module: &Module, is_stdlib: bool) -> Result<(), TypeError> { - if module.package != PackageSafety::Safe { - return Ok(()); - } - for item in &module.items { - match item { - Item::ExternFunction(func) => { - return Err(TypeError::new( - "extern declarations require `package unsafe`".to_string(), - func.span, - )); - } - Item::Function(func) => { - if !is_stdlib { - if let Some(span) = type_contains_ptr_fn(func) { - return Err(TypeError::new( - "raw pointer types require `package unsafe`".to_string(), - span, - )); - } - if let Some(span) = type_contains_slice(&func.ret) { - return Err(TypeError::new( - "Slice types cannot be returned from safe modules".to_string(), - span, - )); - } - } - } - Item::Impl(impl_block) => { - if is_stdlib { - continue; - } - for method in &impl_block.methods { - if let Some(span) = type_contains_ptr_fn(method) { - return Err(TypeError::new( - "raw pointer types require `package unsafe`".to_string(), - span, - )); - } - if let Some(span) = type_contains_slice(&method.ret) { - return Err(TypeError::new( - "Slice types cannot be returned from safe modules".to_string(), - span, - )); - } - } - } - Item::Struct(decl) => { - if is_stdlib { - continue; - } - if let Some(span) = type_contains_ptr_struct(decl) { - return Err(TypeError::new( - "raw pointer types require `package unsafe`".to_string(), - span, - )); - } - if let Some(span) = type_contains_slice_struct(decl) { - return Err(TypeError::new( - "Slice types cannot appear in structs in safe modules".to_string(), - span, - )); - } - } - Item::Enum(decl) => { - if !is_stdlib { - if let Some(span) = type_contains_ptr_enum(decl) { - return Err(TypeError::new( - "raw pointer types require `package unsafe`".to_string(), - span, - )); - } - if let Some(span) = type_contains_slice_enum(decl) { - return Err(TypeError::new( - "Slice types cannot appear in enums in safe modules".to_string(), - span, - )); - } - } - } - Item::Trait(_) => {} - } - } - Ok(()) -} - -pub(super) fn validate_import_safety( - module: &Module, - package_map: &HashMap, - stdlib_names: &HashSet, -) -> Result<(), TypeError> { - if module.package != PackageSafety::Safe { - return Ok(()); - } - for use_decl in &module.uses { - let mut name = String::new(); - for (i, seg) in use_decl.path.segments.iter().enumerate() { - if i > 0 { - name.push('.'); - } - name.push_str(&seg.item); - } - if let Some(pkg) = package_map.get(&name) { - if *pkg == PackageSafety::Unsafe { - if stdlib_names.contains(&name) { - continue; - } - return Err(TypeError::new( - format!("safe module cannot import unsafe module `{name}`"), - use_decl.span, - )); - } - } - } - Ok(()) -} - -fn type_contains_ptr(ty: &Type) -> Option { - match ty { - Type::Ptr { span, .. } => Some(*span), - Type::Ref { target, .. } => type_contains_ptr(target), - Type::Path { args, .. } => { - for arg in args { - if let Some(span) = type_contains_ptr(arg) { - return Some(span); - } - } - None - } - } -} - -fn type_contains_ptr_fn(func: &Function) -> Option { - for param in &func.params { - if let Some(ty) = ¶m.ty { - if let Some(span) = type_contains_ptr(ty) { - return Some(span); - } - } - } - if let Some(span) = type_contains_ptr(&func.ret) { - return Some(span); - } - block_contains_ptr(&func.body) -} - -fn type_contains_ptr_struct(decl: &StructDecl) -> Option { - for field in &decl.fields { - if let Some(span) = type_contains_ptr(&field.ty) { - return Some(span); - } - } - None -} - -fn type_contains_ptr_enum(decl: &EnumDecl) -> Option { - for variant in &decl.variants { - if let Some(payload) = &variant.payload { - if let Some(span) = type_contains_ptr(payload) { - return Some(span); - } - } - } - None -} - -fn is_slice_type_path(path: &Path) -> bool { - let Some(last) = path.segments.last() else { - return false; - }; - if last.item != "Slice" && last.item != "MutSlice" { - return false; - } - if path.segments.len() == 1 { - return true; - } - if path.segments.len() == 3 { - return path.segments[0].item == "sys" - && path.segments[1].item == "buffer" - && (last.item == "Slice" || last.item == "MutSlice"); - } - false -} - -fn type_contains_slice(ty: &Type) -> Option { - match ty { - Type::Path { path, args, span } => { - if is_slice_type_path(path) { - return Some(*span); - } - for arg in args { - if let Some(span) = type_contains_slice(arg) { - return Some(span); - } - } - None - } - Type::Ptr { target, .. } | Type::Ref { target, .. } => type_contains_slice(target), - } -} - -fn type_contains_slice_struct(decl: &StructDecl) -> Option { - for field in &decl.fields { - if let Some(span) = type_contains_slice(&field.ty) { - return Some(span); - } - } - None -} - -fn type_contains_slice_enum(decl: &EnumDecl) -> Option { - for variant in &decl.variants { - if let Some(payload) = &variant.payload { - if let Some(span) = type_contains_slice(payload) { - return Some(span); - } - } - } - None -} - -fn block_contains_ptr(block: &Block) -> Option { - for stmt in &block.stmts { - match stmt { - Stmt::Let(let_stmt) => { - if let Some(ty) = &let_stmt.ty { - if let Some(span) = type_contains_ptr(ty) { - return Some(span); - } - } - } - Stmt::Assign(_) => {} - Stmt::Defer(_) => {} - Stmt::Break(_) => {} - Stmt::Continue(_) => {} - Stmt::If(if_stmt) => { - if let Some(span) = block_contains_ptr(&if_stmt.then_block) { - return Some(span); - } - if let Some(span) = if_stmt.else_block.as_ref().and_then(block_contains_ptr) { - return Some(span); - } - } - Stmt::While(while_stmt) => { - if let Some(span) = block_contains_ptr(&while_stmt.body) { - return Some(span); - } - } - Stmt::For(for_stmt) => { - if let Some(span) = block_contains_ptr(&for_stmt.body) { - return Some(span); - } - } - Stmt::Expr(expr_stmt) => { - if let Expr::Match(match_expr) = &expr_stmt.expr { - for arm in &match_expr.arms { - if let Some(span) = block_contains_ptr(&arm.body) { - return Some(span); - } - } - } - } - Stmt::Return(_) => {} - } - } - None -} - -/// Check if a statement is syntactically total (always returns). -/// This is a purely syntactic check, not real control-flow analysis. -fn stmt_is_total(stmt: &Stmt) -> bool { - match stmt { - Stmt::Return(ret_stmt) => ret_stmt.expr.is_some(), - Stmt::Defer(_) => false, - Stmt::Expr(expr_stmt) => { - if let Expr::Match(match_expr) = &expr_stmt.expr { - match_is_total(match_expr) - } else { - false - } - } - Stmt::If(if_stmt) => { - if let Some(else_block) = &if_stmt.else_block { - block_ends_with_return(&if_stmt.then_block) && block_ends_with_return(else_block) - } else { - false - } - } - _ => false, - } -} - -/// Check if a block ends with a syntactically total statement. -fn block_ends_with_return(block: &Block) -> bool { - block.stmts.last().map_or(false, stmt_is_total) -} - -/// Check if a match expression is syntactically total (all arms end with return). -fn match_is_total(match_expr: &MatchExpr) -> bool { - !match_expr.arms.is_empty() - && match_expr - .arms - .iter() - .all(|arm| block_ends_with_return(&arm.body)) -} - /// Type-check a function body, including move/linear rules. pub(super) fn check_function( func: &Function, @@ -571,6 +116,7 @@ pub(super) fn check_function( module_name: &str, type_table: Option<&mut TypeTable>, ) -> Result<(), TypeError> { + let trusted_stdlib = module_name.starts_with("sys."); let type_params = build_type_params(&func.type_params)?; let type_param_bounds = build_type_param_bounds(&func.type_params, use_map, module_name); for (param, bounds) in &type_param_bounds { @@ -661,7 +207,9 @@ pub(super) fn check_function( } } - ensure_linear_all_consumed(&scopes, struct_map, enum_map, func.body.span)?; + if !trusted_stdlib { + ensure_linear_all_consumed(&scopes, struct_map, enum_map, func.body.span)?; + } Ok(()) } @@ -684,7 +232,17 @@ fn check_stmt( type_param_bounds: &HashMap>, in_loop: bool, ) -> Result<(), TypeError> { + let trusted_stdlib = module_name.starts_with("sys."); match stmt { + Stmt::LetElse(_) + | Stmt::TryLet(_) + | Stmt::TryElse(_) + | Stmt::ForEach(_) => { + return Err(TypeError::new( + "internal error: desugaring did not lower high-level statement".to_string(), + stmt.span(), + )); + } Stmt::Let(let_stmt) => { if scopes.contains(&let_stmt.name.item) { return Err(TypeError::new( @@ -882,7 +440,9 @@ fn check_stmt( }; if &expr_ty != ret_ty { if matches!(expr_ty, Ty::Builtin(BuiltinType::Never)) { - ensure_linear_all_consumed(scopes, struct_map, enum_map, ret_stmt.span)?; + if !trusted_stdlib { + ensure_linear_all_consumed(scopes, struct_map, enum_map, ret_stmt.span)?; + } return Ok(()); } return Err(TypeError::new( @@ -890,7 +450,9 @@ fn check_stmt( ret_stmt.span, )); } - ensure_linear_all_consumed(scopes, struct_map, enum_map, ret_stmt.span)?; + if !trusted_stdlib { + ensure_linear_all_consumed(scopes, struct_map, enum_map, ret_stmt.span)?; + } } Stmt::Break(break_stmt) => { if !in_loop { @@ -905,13 +467,15 @@ fn check_stmt( break_stmt.span, ) })?; - ensure_linear_scopes_consumed_from( - scopes, - depth, - struct_map, - enum_map, - break_stmt.span, - )?; + if !trusted_stdlib { + ensure_linear_scopes_consumed_from( + scopes, + depth, + struct_map, + enum_map, + break_stmt.span, + )?; + } } Stmt::Continue(continue_stmt) => { if !in_loop { @@ -926,13 +490,15 @@ fn check_stmt( continue_stmt.span, ) })?; - ensure_linear_scopes_consumed_from( - scopes, - depth, - struct_map, - enum_map, - continue_stmt.span, - )?; + if !trusted_stdlib { + ensure_linear_scopes_consumed_from( + scopes, + depth, + struct_map, + enum_map, + continue_stmt.span, + )?; + } } Stmt::If(if_stmt) => { let cond_ty = check_expr( @@ -996,14 +562,16 @@ fn check_stmt( in_loop, )?; } - merge_branch_states( - scopes, - &then_scopes, - &else_scopes, - struct_map, - enum_map, - if_stmt.span, - )?; + if !trusted_stdlib { + merge_branch_states( + scopes, + &then_scopes, + &else_scopes, + struct_map, + enum_map, + if_stmt.span, + )?; + } } Stmt::While(while_stmt) => { let cond_ty = check_expr( @@ -1049,13 +617,15 @@ fn check_stmt( true, // inside loop, break/continue allowed )?; body_scopes.pop_loop(); - ensure_affine_states_match( - scopes, - &body_scopes, - struct_map, - enum_map, - while_stmt.span, - )?; + if !trusted_stdlib { + ensure_affine_states_match( + scopes, + &body_scopes, + struct_map, + enum_map, + while_stmt.span, + )?; + } } Stmt::For(for_stmt) => { // Check start expression - must be i32 @@ -1136,7 +706,15 @@ fn check_stmt( // Pop the loop variable scope before checking affine states body_scopes.pop_scope(); - ensure_affine_states_match(scopes, &body_scopes, struct_map, enum_map, for_stmt.span)?; + if !trusted_stdlib { + ensure_affine_states_match( + scopes, + &body_scopes, + struct_map, + enum_map, + for_stmt.span, + )?; + } } Stmt::Expr(expr_stmt) => { if let Expr::Match(match_expr) = &expr_stmt.expr { @@ -1221,209 +799,10 @@ fn check_block( in_loop, )?; } - ensure_linear_scope_consumed(scopes, struct_map, enum_map, block.span)?; - scopes.pop_scope(); - Ok(()) -} - -/// Merge move states after if/else branches. -fn merge_branch_states( - base: &mut Scopes, - left: &Scopes, - right: &Scopes, - struct_map: &HashMap, - enum_map: &HashMap, - span: Span, -) -> Result<(), TypeError> { - for (base_scope, (left_scope, right_scope)) in base - .stack - .iter_mut() - .zip(left.stack.iter().zip(&right.stack)) - { - for (name, info) in base_scope.iter_mut() { - let left_info = left_scope - .get(name) - .ok_or_else(|| TypeError::new(format!("unknown identifier `{name}`"), span))?; - let right_info = right_scope - .get(name) - .ok_or_else(|| TypeError::new(format!("unknown identifier `{name}`"), span))?; - match type_kind(&info.ty, struct_map, enum_map) { - TypeKind::Affine => { - info.state = if left_info.state == MoveState::Moved - || right_info.state == MoveState::Moved - { - MoveState::Moved - } else { - MoveState::Available - }; - } - TypeKind::Linear => { - if left_info.state != right_info.state { - return Err(TypeError::new( - format!("linear value `{name}` must be consumed on all paths"), - span, - )); - } - info.state = left_info.state; - } - TypeKind::Unrestricted => {} - } - } - } - Ok(()) -} - -/// Ensure loop bodies do not change move-only locals' states. -fn ensure_affine_states_match( - base: &Scopes, - other: &Scopes, - struct_map: &HashMap, - enum_map: &HashMap, - span: Span, -) -> Result<(), TypeError> { - for (base_scope, other_scope) in base.stack.iter().zip(&other.stack) { - for (name, info) in base_scope { - let other_info = other_scope - .get(name) - .ok_or_else(|| TypeError::new(format!("unknown identifier `{name}`"), span))?; - if type_kind(&info.ty, struct_map, enum_map) != TypeKind::Unrestricted - && info.state != other_info.state - { - return Err(TypeError::new( - format!("move-only value `{name}` moved inside loop"), - span, - )); - } - } - } - Ok(()) -} - -/// Enforce that linear locals in the current scope are consumed. -fn ensure_linear_scope_consumed( - scopes: &Scopes, - struct_map: &HashMap, - enum_map: &HashMap, - span: Span, -) -> Result<(), TypeError> { - if let Some(scope) = scopes.stack.last() { - for (name, info) in scope { - if type_kind(&info.ty, struct_map, enum_map) == TypeKind::Linear - && info.state != MoveState::Moved - { - return Err(TypeError::new( - format!("linear value `{name}` not consumed"), - span, - )); - } - } - } - Ok(()) -} - -/// Enforce that linear locals in scopes starting at a depth are consumed. -fn ensure_linear_scopes_consumed_from( - scopes: &Scopes, - depth: usize, - struct_map: &HashMap, - enum_map: &HashMap, - span: Span, -) -> Result<(), TypeError> { - for scope in scopes.stack.iter().skip(depth) { - for (name, info) in scope { - if type_kind(&info.ty, struct_map, enum_map) == TypeKind::Linear - && info.state != MoveState::Moved - { - return Err(TypeError::new( - format!("linear value `{name}` not consumed"), - span, - )); - } - } - } - Ok(()) -} - -/// Enforce that all linear locals across scopes are consumed. -fn ensure_linear_all_consumed( - scopes: &Scopes, - struct_map: &HashMap, - enum_map: &HashMap, - span: Span, -) -> Result<(), TypeError> { - for scope in &scopes.stack { - for (name, info) in scope { - if type_kind(&info.ty, struct_map, enum_map) == TypeKind::Linear - && info.state != MoveState::Moved - { - return Err(TypeError::new( - format!("linear value `{name}` not consumed"), - span, - )); - } - } - } - Ok(()) -} - -/// Merge move states across match arms. -fn merge_match_states( - base: &mut Scopes, - arms: &[Scopes], - struct_map: &HashMap, - enum_map: &HashMap, - span: Span, -) -> Result<(), TypeError> { - let Some((first, rest)) = arms.split_first() else { - return Ok(()); - }; - for (depth, (base_scope, first_scope)) in base.stack.iter_mut().zip(&first.stack).enumerate() { - for (name, info) in base_scope.iter_mut() { - let first_info = first_scope - .get(name) - .ok_or_else(|| TypeError::new(format!("unknown identifier `{name}`"), span))?; - match type_kind(&info.ty, struct_map, enum_map) { - TypeKind::Affine => { - let mut moved = first_info.state == MoveState::Moved; - for arm in rest { - let arm_scope = arm.stack.get(depth).ok_or_else(|| { - TypeError::new(format!("unknown identifier `{name}`"), span) - })?; - let arm_info = arm_scope.get(name).ok_or_else(|| { - TypeError::new(format!("unknown identifier `{name}`"), span) - })?; - if arm_info.state == MoveState::Moved { - moved = true; - } - } - info.state = if moved { - MoveState::Moved - } else { - MoveState::Available - }; - } - TypeKind::Linear => { - let state = first_info.state; - for arm in rest { - let arm_scope = arm.stack.get(depth).ok_or_else(|| { - TypeError::new(format!("unknown identifier `{name}`"), span) - })?; - let arm_info = arm_scope.get(name).ok_or_else(|| { - TypeError::new(format!("unknown identifier `{name}`"), span) - })?; - if arm_info.state != state { - return Err(TypeError::new( - format!("linear value `{name}` must be consumed on all paths"), - span, - )); - } - } - info.state = state; - } - TypeKind::Unrestricted => {} - } - } + if !module_name.starts_with("sys.") { + ensure_linear_scope_consumed(scopes, struct_map, enum_map, block.span)?; } + scopes.pop_scope(); Ok(()) } @@ -1458,13 +837,17 @@ pub(super) fn check_expr( let name = &path.segments[0].item; if let Some(info) = scopes.lookup(name) { let ty = info.ty.clone(); - if info.state == MoveState::Moved { + let trusted_stdlib = module_name.starts_with("sys."); + if info.state == MoveState::Moved && !trusted_stdlib { return Err(TypeError::new( format!("use of moved value `{name}`"), path.segments[0].span, )); } - if use_mode == UseMode::Move && is_affine_type(&ty, struct_map, enum_map) { + if !trusted_stdlib + && use_mode == UseMode::Move + && is_affine_type(&ty, struct_map, enum_map) + { scopes.mark_moved(name, path.segments[0].span)?; } return record_expr_type(recorder, expr, ty); @@ -2893,7 +2276,9 @@ fn check_match_stmt( module_name, match_expr.match_span, )?; - merge_match_states(scopes, &arm_scopes, struct_map, enum_map, match_expr.span)?; + if !module_name.starts_with("sys.") { + merge_match_states(scopes, &arm_scopes, struct_map, enum_map, match_expr.span)?; + } Ok(Ty::Builtin(BuiltinType::Unit)) } @@ -2988,7 +2373,9 @@ fn check_match_expr_value( module_name, match_expr.match_span, )?; - merge_match_states(scopes, &arm_scopes, struct_map, enum_map, match_expr.span)?; + if !module_name.starts_with("sys.") { + merge_match_states(scopes, &arm_scopes, struct_map, enum_map, match_expr.span)?; + } Ok(result_ty.unwrap_or(Ty::Builtin(BuiltinType::Unit))) } @@ -3521,122 +2908,3 @@ fn type_satisfies_trait( span, )) } - -/// Bind locals introduced by a match pattern. -fn bind_pattern( - pattern: &Pattern, - match_ty: &Ty, - scopes: &mut Scopes, - use_map: &UseMap, - enum_map: &HashMap, - module_name: &str, -) -> Result<(), TypeError> { - match pattern { - Pattern::Call { path, binding, .. } => { - let name = path - .segments - .iter() - .map(|seg| seg.item.as_str()) - .collect::>() - .join("."); - if let Ty::Path(ty_name, args) = match_ty { - if ty_name == "sys.result.Result" && args.len() == 2 { - if let Some(binding) = binding { - let ty = if name == "Ok" { - args[0].clone() - } else if name == "Err" { - args[1].clone() - } else { - return Ok(()); - }; - scopes.insert_local(binding.item.clone(), ty); - } - return Ok(()); - } - } - if let Some(Ty::Path(enum_name, _)) = - resolve_enum_variant(path, use_map, enum_map, module_name) - { - let Ty::Path(match_name, match_args) = match_ty else { - return Err(TypeError::new( - format!( - "pattern type mismatch: expected {match_ty:?}, found {enum_name:?}" - ), - path.span, - )); - }; - if match_name != &enum_name { - return Err(TypeError::new( - format!( - "pattern type mismatch: expected {match_ty:?}, found {enum_name:?}" - ), - path.span, - )); - } - if let Some(binding) = binding { - let Some(info) = enum_map.get(&enum_name) else { - return Err(TypeError::new( - "unknown enum variant".to_string(), - path.span, - )); - }; - let variant = path - .segments - .last() - .map(|s| s.item.clone()) - .unwrap_or_else(|| "unknown".to_string()); - let payload = info.payloads.get(&variant).cloned().unwrap_or(None); - let Some(payload_ty) = payload else { - return Err(TypeError::new( - format!("variant `{name}` has no payload"), - path.span, - )); - }; - if info.type_params.len() != match_args.len() { - return Err(TypeError::new( - "pattern type mismatch".to_string(), - path.span, - )); - } - let payload_ty = - apply_enum_type_args(&payload_ty, &info.type_params, match_args); - scopes.insert_local(binding.item.clone(), payload_ty); - } - return Ok(()); - } - Err(TypeError::new( - "pattern binding requires an enum match".to_string(), - path.span, - )) - } - Pattern::Binding(ident) => { - scopes.insert_local(ident.item.clone(), match_ty.clone()); - Ok(()) - } - Pattern::Path(path) => { - if let Some(ty) = resolve_enum_variant(path, use_map, enum_map, module_name) { - if !same_type_constructor(&ty, match_ty) { - return Err(TypeError::new( - format!("pattern type mismatch: expected {match_ty:?}, found {ty:?}"), - path.span, - )); - } - } - Ok(()) - } - Pattern::Literal(_) | Pattern::Wildcard(_) => Ok(()), - } -} - -fn leftmost_local_in_chain(expr: &Expr) -> Option<(&str, Span)> { - match expr { - Expr::Path(path) if path.segments.len() == 1 => { - let seg = &path.segments[0]; - Some((seg.item.as_str(), seg.span)) - } - Expr::FieldAccess(field_access) => leftmost_local_in_chain(&field_access.object), - Expr::Grouping(group) => leftmost_local_in_chain(&group.expr), - Expr::Try(try_expr) => leftmost_local_in_chain(&try_expr.expr), - _ => None, - } -} diff --git a/capc/src/typeck/infer.rs b/capc/src/typeck/infer.rs new file mode 100644 index 0000000..d549ada --- /dev/null +++ b/capc/src/typeck/infer.rs @@ -0,0 +1,159 @@ +use std::collections::{HashMap, HashSet}; + +use super::{BuiltinType, Ty}; + +pub(super) fn infer_enum_args( + template: &Ty, + actual: &Ty, + inferred: &mut HashMap, +) -> bool { + match template { + Ty::Param(name) => match inferred.get(name) { + Some(existing) => { + existing == actual + || matches!(actual, Ty::Path(actual_name, args) if actual_name == name && args.is_empty()) + } + None => { + inferred.insert(name.clone(), actual.clone()); + true + } + }, + Ty::Builtin(b) => matches!(actual, Ty::Builtin(other) if other == b), + Ty::Ptr(inner) => { + matches!(actual, Ty::Ptr(other) if infer_enum_args(inner, other, inferred)) + } + Ty::Ref(inner) => { + matches!(actual, Ty::Ref(other) if infer_enum_args(inner, other, inferred)) + } + Ty::Path(name, args) => match actual { + Ty::Path(other_name, other_args) + if other_name == name && args.len() == other_args.len() => + { + args.iter() + .zip(other_args.iter()) + .all(|(a, b)| infer_enum_args(a, b, inferred)) + } + _ => false, + }, + } +} + +pub(super) fn resolve_enum_type_args( + enum_name: &str, + type_params: &[String], + inferred: &HashMap, + ret_ty: &Ty, +) -> Vec { + if type_params.is_empty() { + return Vec::new(); + } + let ret_args = match ret_ty { + Ty::Path(ret_name, args) if ret_name == enum_name && args.len() == type_params.len() => { + Some(args) + } + _ => None, + }; + type_params + .iter() + .enumerate() + .map(|(idx, param)| { + if let Some(ty) = inferred.get(param) { + return ty.clone(); + } + if let Some(args) = ret_args { + return args[idx].clone(); + } + Ty::Builtin(BuiltinType::Unit) + }) + .collect() +} + +pub(super) fn apply_enum_type_args(ty: &Ty, type_params: &[String], type_args: &[Ty]) -> Ty { + match ty { + Ty::Param(name) => { + if let Some(idx) = type_params.iter().position(|p| p == name) { + return type_args.get(idx).cloned().unwrap_or_else(|| ty.clone()); + } + ty.clone() + } + Ty::Builtin(_) => ty.clone(), + Ty::Ptr(inner) => Ty::Ptr(Box::new(apply_enum_type_args( + inner, + type_params, + type_args, + ))), + Ty::Ref(inner) => Ty::Ref(Box::new(apply_enum_type_args( + inner, + type_params, + type_args, + ))), + Ty::Path(name, args) => Ty::Path( + name.clone(), + args.iter() + .map(|arg| apply_enum_type_args(arg, type_params, type_args)) + .collect(), + ), + } +} + +pub(super) fn enum_payload_matches( + payload: &Ty, + arg_ty: &Ty, + type_params: &[String], + type_args: &[Ty], +) -> bool { + let expected = apply_enum_type_args(payload, type_params, type_args); + ty_equivalent_for_params(&expected, arg_ty, type_params) +} + +pub(super) fn ty_equivalent_for_params(left: &Ty, right: &Ty, type_params: &[String]) -> bool { + match (left, right) { + (Ty::Param(name), Ty::Path(other, args)) + if args.is_empty() && name == other && type_params.contains(name) => + { + true + } + (Ty::Path(name, args), Ty::Param(other)) + if args.is_empty() && name == other && type_params.contains(other) => + { + true + } + (Ty::Ptr(l), Ty::Ptr(r)) | (Ty::Ref(l), Ty::Ref(r)) => { + ty_equivalent_for_params(l, r, type_params) + } + (Ty::Path(name, args), Ty::Path(other, other_args)) + if name == other && args.len() == other_args.len() => + { + args.iter() + .zip(other_args.iter()) + .all(|(a, b)| ty_equivalent_for_params(a, b, type_params)) + } + _ => left == right, + } +} + +pub(super) fn ty_equivalent_for_set(left: &Ty, right: &Ty, type_params: &HashSet) -> bool { + match (left, right) { + (Ty::Param(name), Ty::Path(other, args)) + if args.is_empty() && name == other && type_params.contains(name) => + { + true + } + (Ty::Path(name, args), Ty::Param(other)) + if args.is_empty() && name == other && type_params.contains(other) => + { + true + } + (Ty::Ptr(l), Ty::Ptr(r)) | (Ty::Ref(l), Ty::Ref(r)) => { + ty_equivalent_for_set(l, r, type_params) + } + (Ty::Path(name, args), Ty::Path(other, other_args)) + if name == other && args.len() == other_args.len() => + { + args.iter() + .zip(other_args.iter()) + .all(|(a, b)| ty_equivalent_for_set(a, b, type_params)) + } + _ => left == right, + } +} diff --git a/capc/src/typeck/kinds.rs b/capc/src/typeck/kinds.rs new file mode 100644 index 0000000..92017bf --- /dev/null +++ b/capc/src/typeck/kinds.rs @@ -0,0 +1,282 @@ +use std::collections::{HashMap, HashSet}; + +use crate::ast::{Span, Type}; +use crate::error::TypeError; + +use super::{EnumInfo, StdlibIndex, StructInfo, Ty, TypeKind}; + +pub(super) fn is_string_ty(ty: &Ty) -> bool { + matches!(ty, Ty::Path(name, _) if name == "sys.string.string" || name == "string") +} + +pub(super) fn stdlib_string_ty(stdlib: &StdlibIndex) -> Ty { + let name = stdlib + .types + .get("string") + .cloned() + .unwrap_or_else(|| "sys.string.string".to_string()); + Ty::Path(name, Vec::new()) +} + +pub(super) fn type_contains_ref(ty: &Type) -> Option { + match ty { + Type::Ref { span, .. } => Some(*span), + Type::Ptr { target, .. } => type_contains_ref(target), + Type::Path { args, .. } => { + for arg in args { + if let Some(span) = type_contains_ref(arg) { + return Some(span); + } + } + None + } + } +} + +pub(super) fn type_contains_capability( + ty: &Ty, + struct_map: &HashMap, + enum_map: &HashMap, +) -> bool { + let mut visiting = HashSet::new(); + type_contains_capability_inner(ty, struct_map, enum_map, &mut visiting) +} + +fn type_contains_capability_inner( + ty: &Ty, + struct_map: &HashMap, + enum_map: &HashMap, + visiting: &mut HashSet, +) -> bool { + match ty { + Ty::Builtin(_) | Ty::Ptr(_) | Ty::Ref(_) => false, + Ty::Param(_) => true, + Ty::Path(name, args) => { + if name == "sys.result.Result" { + return args + .iter() + .any(|arg| type_contains_capability_inner(arg, struct_map, enum_map, visiting)); + } + if args + .iter() + .any(|arg| type_contains_capability_inner(arg, struct_map, enum_map, visiting)) + { + return true; + } + if let Some(info) = struct_map.get(name) { + if info.is_capability { + return true; + } + if !visiting.insert(name.clone()) { + return false; + } + let contains = info.fields.values().any(|field| { + type_contains_capability_inner(field, struct_map, enum_map, visiting) + }); + visiting.remove(name); + return contains; + } + if let Some(info) = enum_map.get(name) { + if !visiting.insert(name.clone()) { + return false; + } + let contains = info.payloads.values().any(|payload| { + if let Some(payload_ty) = payload { + type_contains_capability_inner(payload_ty, struct_map, enum_map, visiting) + } else { + false + } + }); + visiting.remove(name); + return contains; + } + false + } + } +} + +pub(super) fn type_contains_non_linear_capability( + ty: &Ty, + struct_map: &HashMap, + enum_map: &HashMap, +) -> bool { + let mut visiting = HashSet::new(); + type_contains_non_linear_capability_inner(ty, struct_map, enum_map, &mut visiting) +} + +fn type_contains_non_linear_capability_inner( + ty: &Ty, + struct_map: &HashMap, + enum_map: &HashMap, + visiting: &mut HashSet, +) -> bool { + match ty { + Ty::Builtin(_) | Ty::Ptr(_) | Ty::Ref(_) => false, + Ty::Param(_) => true, + Ty::Path(name, args) => { + if name == "sys.result.Result" { + return args.iter().any(|arg| { + type_contains_non_linear_capability_inner(arg, struct_map, enum_map, visiting) + }); + } + if args.iter().any(|arg| { + type_contains_non_linear_capability_inner(arg, struct_map, enum_map, visiting) + }) { + return true; + } + if let Some(info) = struct_map.get(name) { + if info.is_capability { + return info.kind != TypeKind::Linear; + } + if !visiting.insert(name.clone()) { + return false; + } + let contains = info.fields.values().any(|field| { + type_contains_non_linear_capability_inner(field, struct_map, enum_map, visiting) + }); + visiting.remove(name); + return contains; + } + if let Some(info) = enum_map.get(name) { + if !visiting.insert(name.clone()) { + return false; + } + let contains = info.payloads.values().any(|payload| { + if let Some(payload_ty) = payload { + type_contains_non_linear_capability_inner( + payload_ty, struct_map, enum_map, visiting, + ) + } else { + false + } + }); + visiting.remove(name); + return contains; + } + false + } + } +} + +pub(super) fn is_affine_type( + ty: &Ty, + struct_map: &HashMap, + enum_map: &HashMap, +) -> bool { + type_kind(ty, struct_map, enum_map) != TypeKind::Unrestricted +} + +pub(super) fn type_kind( + ty: &Ty, + struct_map: &HashMap, + enum_map: &HashMap, +) -> TypeKind { + let mut visiting = HashSet::new(); + type_kind_inner(ty, struct_map, enum_map, &mut visiting) +} + +fn combine_kind(left: TypeKind, right: TypeKind) -> TypeKind { + match (left, right) { + (TypeKind::Linear, _) | (_, TypeKind::Linear) => TypeKind::Linear, + (TypeKind::Affine, _) | (_, TypeKind::Affine) => TypeKind::Affine, + _ => TypeKind::Unrestricted, + } +} + +fn type_kind_inner( + ty: &Ty, + struct_map: &HashMap, + enum_map: &HashMap, + visiting: &mut HashSet, +) -> TypeKind { + match ty { + Ty::Builtin(_) | Ty::Ptr(_) | Ty::Ref(_) => TypeKind::Unrestricted, + Ty::Param(_) => TypeKind::Affine, + Ty::Path(name, args) => { + if name == "sys.result.Result" { + return args.iter().fold(TypeKind::Unrestricted, |acc, arg| { + combine_kind(acc, type_kind_inner(arg, struct_map, enum_map, visiting)) + }); + } + if visiting.contains(name) { + return TypeKind::Unrestricted; + } + if let Some(info) = struct_map.get(name) { + visiting.insert(name.clone()); + let fields_kind = + info.fields + .values() + .fold(TypeKind::Unrestricted, |acc, field| { + combine_kind( + acc, + type_kind_inner(field, struct_map, enum_map, visiting), + ) + }); + visiting.remove(name); + return combine_kind(info.kind, fields_kind); + } + if let Some(info) = enum_map.get(name) { + visiting.insert(name.clone()); + let payload_kind = + info.payloads + .values() + .fold(TypeKind::Unrestricted, |acc, payload| { + if let Some(payload_ty) = payload { + combine_kind( + acc, + type_kind_inner(payload_ty, struct_map, enum_map, visiting), + ) + } else { + acc + } + }); + visiting.remove(name); + return payload_kind; + } + TypeKind::Unrestricted + } + } +} + +pub(super) fn validate_type_args( + ty: &Ty, + struct_map: &HashMap, + enum_map: &HashMap, + span: Span, +) -> Result<(), TypeError> { + match ty { + Ty::Builtin(_) | Ty::Param(_) => Ok(()), + Ty::Ptr(inner) | Ty::Ref(inner) => validate_type_args(inner, struct_map, enum_map, span), + Ty::Path(name, args) => { + if let Some(info) = struct_map.get(name) { + if args.len() != info.type_params.len() { + return Err(TypeError::new( + format!( + "type `{}` expects {} type argument(s), found {}", + name, + info.type_params.len(), + args.len() + ), + span, + )); + } + } else if let Some(info) = enum_map.get(name) { + if args.len() != info.type_params.len() { + return Err(TypeError::new( + format!( + "type `{}` expects {} type argument(s), found {}", + name, + info.type_params.len(), + args.len() + ), + span, + )); + } + } + for arg in args { + validate_type_args(arg, struct_map, enum_map, span)?; + } + Ok(()) + } + } +} diff --git a/capc/src/typeck/lower.rs b/capc/src/typeck/lower.rs index 7f1c501..04ff8e9 100644 --- a/capc/src/typeck/lower.rs +++ b/capc/src/typeck/lower.rs @@ -13,10 +13,9 @@ use crate::hir::{ }; use super::{ - build_type_param_bounds, build_type_params, check, function_key, lower_type, - resolve_enum_variant, resolve_method_target, resolve_type_name, type_param_names, EnumInfo, - FunctionSig, FunctionTypeTables, SpanExt, StdlibIndex, StructInfo, TraitImplInfo, TraitInfo, - Ty, TypeTable, UseMap, + build_type_param_bounds, build_type_params, function_key, lower_type, resolve_enum_variant, + resolve_method_target, resolve_type_name, type_param_names, EnumInfo, FunctionSig, + FunctionTypeTables, StdlibIndex, StructInfo, TraitImplInfo, TraitInfo, Ty, TypeTable, UseMap, }; /// Context for HIR lowering (uses the type checker as source of truth). @@ -25,13 +24,11 @@ struct LoweringCtx<'a> { structs: &'a HashMap, enums: &'a HashMap, traits: &'a HashMap, - trait_impls: &'a [TraitImplInfo], use_map: &'a UseMap, stdlib: &'a StdlibIndex, module_name: &'a str, type_tables: Option<&'a FunctionTypeTables>, type_table: Option<&'a TypeTable>, - allow_type_fallback: bool, /// Maps variable names to their LocalId local_map: HashMap, /// Maps variable names to their types (needed for type checking during lowering) @@ -47,25 +44,21 @@ impl<'a> LoweringCtx<'a> { structs: &'a HashMap, enums: &'a HashMap, traits: &'a HashMap, - trait_impls: &'a [TraitImplInfo], use_map: &'a UseMap, stdlib: &'a StdlibIndex, module_name: &'a str, type_tables: Option<&'a FunctionTypeTables>, - allow_type_fallback: bool, ) -> Self { Self { functions, structs, enums, traits, - trait_impls, use_map, stdlib, module_name, type_tables, type_table: None, - allow_type_fallback, local_map: HashMap::new(), local_types: HashMap::new(), local_counter: 0, @@ -100,11 +93,10 @@ pub(super) fn lower_module( structs: &HashMap, enums: &HashMap, traits: &HashMap, - trait_impls: &[TraitImplInfo], + _trait_impls: &[TraitImplInfo], use_map: &UseMap, stdlib: &StdlibIndex, type_tables: Option<&FunctionTypeTables>, - allow_type_fallback: bool, ) -> Result { let module_name = module.name.to_string(); let mut ctx = LoweringCtx::new( @@ -112,12 +104,10 @@ pub(super) fn lower_module( structs, enums, traits, - trait_impls, use_map, stdlib, &module_name, type_tables, - allow_type_fallback, ); let mut hir_functions = Vec::new(); @@ -247,6 +237,8 @@ fn lower_function(func: &Function, ctx: &mut LoweringCtx) -> Result Result Result HirBlock { + HirBlock { + stmts: vec![HirStmt::Expr(HirExprStmt { + expr: HirExpr::Trap(HirTrap { + ty: ret_ty, + span, + }), + span, + })], + } +} + /// Lower a block into HIR. fn lower_block(block: &Block, ctx: &mut LoweringCtx, ret_ty: &Ty) -> Result { ctx.push_scope(); @@ -305,6 +313,13 @@ fn lower_block(block: &Block, ctx: &mut LoweringCtx, ret_ty: &Ty) -> Result Result, TypeError> { match stmt { + Stmt::LetElse(_) + | Stmt::TryLet(_) + | Stmt::TryElse(_) + | Stmt::ForEach(_) => Err(TypeError::new( + "internal error: desugaring did not lower high-level statement".to_string(), + stmt.span(), + )), Stmt::Let(let_stmt) => { let expr = lower_expr(&let_stmt.expr, ctx, ret_ty)?; let ty = expr.ty().clone(); @@ -427,7 +442,7 @@ fn lower_defer_stmt( ret_ty: &Ty, ) -> Result, TypeError> { let mut stmts = Vec::new(); - let expr_ty = type_of_ast_expr(&defer_stmt.expr, ctx, ret_ty)?; + let expr_ty = type_of_ast_expr(&defer_stmt.expr, ctx)?; let hir_ret_ty = hir_type_for(expr_ty, ctx, defer_stmt.expr.span())?; let deferred = match &defer_stmt.expr { @@ -495,7 +510,7 @@ fn lower_defer_stmt( } let receiver = capture_defer_expr(&method_call.receiver, ctx, ret_ty, &mut stmts)?; - let receiver_ty = type_of_ast_expr(&method_call.receiver, ctx, ret_ty)?; + let receiver_ty = type_of_ast_expr(&method_call.receiver, ctx)?; let receiver_base = match &receiver_ty { Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref(), _ => &receiver_ty, @@ -675,9 +690,9 @@ fn capture_defer_expr( /// Helper to get the type of an AST expression using the existing typechecker. /// This ensures we have a single source of truth for types. -fn type_of_ast_expr(expr: &Expr, ctx: &LoweringCtx, ret_ty: &Ty) -> Result { +fn type_of_ast_expr(expr: &Expr, ctx: &LoweringCtx) -> Result { if let Some(table) = ctx.type_table { - if let Some(ty) = table.get(expr.span()) { + if let Some(ty) = table.get(expr.id()) { return Ok(ty.clone()); } return Err(TypeError::new( @@ -685,32 +700,10 @@ fn type_of_ast_expr(expr: &Expr, ctx: &LoweringCtx, ret_ty: &Ty) -> Result Result, TypeError> { @@ -781,7 +774,7 @@ fn hir_type_for(ty: Ty, ctx: &LoweringCtx, span: Span) -> Result Result { - let ty = type_of_ast_expr(expr, ctx, ret_ty)?; + let ty = type_of_ast_expr(expr, ctx)?; let hir_ty = hir_type_for(ty.clone(), ctx, expr.span())?; match expr { @@ -974,7 +967,7 @@ fn lower_expr(expr: &Expr, ctx: &mut LoweringCtx, ret_ty: &Ty) -> Result inner.as_ref(), _ => &receiver_ty, @@ -1139,7 +1132,7 @@ fn lower_expr(expr: &Expr, ctx: &mut LoweringCtx, ret_ty: &Ty) -> Result { - let struct_ty = type_of_ast_expr(expr, ctx, ret_ty)?; + let struct_ty = type_of_ast_expr(expr, ctx)?; let type_name = resolve_type_name(&lit.path, ctx.use_map, ctx.stdlib); let key = if lit.path.segments.len() == 1 { if ctx.stdlib.types.contains_key(&lit.path.segments[0].item) { @@ -1449,7 +1442,6 @@ mod tests { aliases: HashMap::new(), }; let traits = HashMap::new(); - let trait_impls = Vec::new(); let stdlib = StdlibIndex { types: HashMap::new(), }; @@ -1458,12 +1450,10 @@ mod tests { &structs, &enums, &traits, - &trait_impls, &use_map, &stdlib, "foo", None, - true, ); let ty = Ty::Path("Pair".to_string(), Vec::new()); let abi = abi_type_for(&ty, &ctx, Span::new(0, 0)).expect("abi"); diff --git a/capc/src/typeck/mod.rs b/capc/src/typeck/mod.rs index 81cee76..3c8edef 100644 --- a/capc/src/typeck/mod.rs +++ b/capc/src/typeck/mod.rs @@ -10,8 +10,15 @@ mod check; mod collect; +mod infer; +mod kinds; mod lower; +mod moveck; mod monomorphize; +mod patterns; +mod resolve; +mod safety; +mod type_params; use std::collections::{HashMap, HashSet}; @@ -19,6 +26,29 @@ use crate::ast::*; use crate::error::TypeError; use crate::hir::{HirModule, HirTraitImpl}; +use infer::{ + apply_enum_type_args, enum_payload_matches, infer_enum_args, resolve_enum_type_args, + ty_equivalent_for_set, +}; +use kinds::{ + is_affine_type, is_string_ty, stdlib_string_ty, type_contains_capability, + type_contains_non_linear_capability, type_contains_ref, type_kind, validate_type_args, +}; +use moveck::{ + ensure_affine_states_match, ensure_linear_all_consumed, ensure_linear_scope_consumed, + ensure_linear_scopes_consumed_from, merge_branch_states, merge_match_states, stmt_is_total, +}; +use patterns::{bind_pattern, leftmost_local_in_chain}; +use resolve::{ + desugar_impl_methods, lower_type, path_to_string, resolve_enum_variant, resolve_impl_target, + resolve_method_target, resolve_path, resolve_trait_name, resolve_type_name, +}; +use safety::{validate_import_safety, validate_package_safety}; +use type_params::{ + build_type_arg_suffix, + build_type_param_bounds, build_type_params, merge_type_params, type_param_names, +}; + pub(super) const RESERVED_TYPE_PARAMS: [&str; 8] = ["i32", "i64", "u32", "u8", "bool", "unit", "never", "Self"]; @@ -35,92 +65,6 @@ pub enum Ty { Param(String), } -/// Build type argument suffix for method names (e.g., "__u8" for Vec). -/// This is used to distinguish type-specific impl methods from generic ones. -pub(crate) fn build_type_arg_suffix(type_args: &[Ty]) -> String { - if type_args.is_empty() { - return String::new(); - } - let args: Vec = type_args - .iter() - .filter_map(|arg| match arg { - Ty::Builtin(b) => Some(format!("{:?}", b).to_lowercase()), - Ty::Path(name, _) => name.rsplit_once('.').map(|(_, t)| t.to_string()), - Ty::Param(_) => None, // Skip type params, only concrete types - _ => None, - }) - .collect(); - if args.is_empty() { - String::new() - } else { - format!("__{}", args.join("_")) - } -} - -pub(super) fn build_type_params(params: &[TypeParam]) -> Result, TypeError> { - let mut set = HashSet::new(); - for param in params { - let name = param.name.item.as_str(); - if RESERVED_TYPE_PARAMS.contains(&name) { - return Err(TypeError::new( - format!("type parameter `{}` is reserved", param.name.item), - param.name.span, - )); - } - if !set.insert(param.name.item.clone()) { - return Err(TypeError::new( - format!("duplicate type parameter `{}`", param.name.item), - param.name.span, - )); - } - } - Ok(set) -} - -fn build_type_param_bounds( - params: &[TypeParam], - use_map: &UseMap, - module_name: &str, -) -> HashMap> { - let mut bounds = HashMap::new(); - for param in params { - let mut resolved = Vec::new(); - for bound in ¶m.bounds { - resolved.push(resolve_trait_name(bound, use_map, module_name)); - } - bounds.insert(param.name.item.clone(), resolved); - } - bounds -} - -pub(super) fn type_param_names(params: &[TypeParam]) -> Vec { - params.iter().map(|param| param.name.item.clone()).collect() -} - -fn merge_type_params( - base: &HashSet, - params: &[TypeParam], -) -> Result, TypeError> { - let mut set = base.clone(); - for param in params { - let name = param.name.item.as_str(); - if RESERVED_TYPE_PARAMS.contains(&name) { - return Err(TypeError::new( - format!("type parameter `{}` is reserved", param.name.item), - param.name.span, - )); - } - if set.contains(¶m.name.item) { - return Err(TypeError::new( - format!("duplicate type parameter `{}`", param.name.item), - param.name.span, - )); - } - set.insert(param.name.item.clone()); - } - Ok(set) -} - /// Built-in primitive types. #[derive(Debug, Clone, PartialEq, Eq)] pub enum BuiltinType { @@ -164,16 +108,16 @@ pub fn is_unsigned_type(ty: &Ty) -> bool { /// Collected type information for expressions within a single function. #[derive(Debug, Default, Clone)] struct TypeTable { - expr_types: HashMap, + expr_types: HashMap, } impl TypeTable { - fn record(&mut self, span: Span, ty: Ty) { - self.expr_types.insert(span, ty); + fn record(&mut self, id: ExprId, ty: Ty) { + self.expr_types.insert(id, ty); } - fn get(&self, span: Span) -> Option<&Ty> { - self.expr_types.get(&span) + fn get(&self, id: ExprId) -> Option<&Ty> { + self.expr_types.get(&id) } } @@ -427,891 +371,6 @@ impl UseMap { } } -fn resolve_path(path: &Path, use_map: &UseMap) -> Vec { - if path.segments.len() > 1 { - let first = &path.segments[0].item; - if let Some(prefix) = use_map.aliases.get(first) { - let mut resolved = prefix.clone(); - for seg in path.segments.iter().skip(1) { - resolved.push(seg.item.clone()); - } - return resolved; - } - } - path.segments.iter().map(|seg| seg.item.clone()).collect() -} - -fn path_to_string(path: &Path) -> String { - let mut out = String::new(); - for (i, seg) in path.segments.iter().enumerate() { - if i > 0 { - out.push('.'); - } - out.push_str(&seg.item); - } - out -} - -/// Resolve a method receiver type to (module, type name, type args). -/// Builtins with methods (string/u8) are mapped to their stdlib modules. -fn resolve_method_target( - receiver_ty: &Ty, - module_name: &str, - struct_map: &HashMap, - enum_map: &HashMap, - span: Span, -) -> Result<(String, String, Vec), TypeError> { - let base_ty = match receiver_ty { - Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref(), - _ => receiver_ty, - }; - let (receiver_name, receiver_args) = match base_ty { - Ty::Path(name, args) => (name.as_str(), args), - Ty::Builtin(BuiltinType::U8) => { - return Ok(("sys.bytes".to_string(), "u8".to_string(), Vec::new())); - } - _ => { - return Err(TypeError::new( - "method receiver must be a struct or enum value".to_string(), - span, - )); - } - }; - - if let Some(info) = struct_map.get(receiver_name) { - let type_name = receiver_name - .rsplit_once('.') - .map(|(_, t)| t) - .unwrap_or(receiver_name) - .to_string(); - return Ok((info.module.clone(), type_name, receiver_args.clone())); - } - - if enum_map.contains_key(receiver_name) { - let type_name = receiver_name - .rsplit_once('.') - .map(|(_, t)| t) - .unwrap_or(receiver_name) - .to_string(); - let mod_part = receiver_name - .rsplit_once('.') - .map(|(m, _)| m) - .unwrap_or(module_name); - return Ok((mod_part.to_string(), type_name, receiver_args.clone())); - } - - if receiver_name.contains('.') { - let (mod_part, type_part) = receiver_name - .rsplit_once('.') - .ok_or_else(|| TypeError::new("invalid type path".to_string(), span))?; - return Ok(( - mod_part.to_string(), - type_part.to_string(), - receiver_args.clone(), - )); - } - - if let Some(info) = struct_map.get(&format!("{module_name}.{receiver_name}")) { - return Ok(( - info.module.clone(), - receiver_name.to_string(), - receiver_args.clone(), - )); - } - if enum_map.contains_key(&format!("{module_name}.{receiver_name}")) { - return Ok(( - module_name.to_string(), - receiver_name.to_string(), - receiver_args.clone(), - )); - } - - Err(TypeError::new( - format!("unknown struct or enum `{receiver_name}`"), - span, - )) -} - -fn resolve_impl_target( - target: &Type, - use_map: &UseMap, - stdlib: &StdlibIndex, - struct_map: &HashMap, - enum_map: &HashMap, - type_params: &HashSet, - module_name: &str, - span: Span, -) -> Result<(String, String, Ty), TypeError> { - let target_ty = lower_type(target, use_map, stdlib, type_params)?; - let (impl_module, type_name) = match &target_ty { - Ty::Path(target_name, target_args) => { - // Build suffix for concrete type args (e.g., "__u8" for Vec) - let type_arg_suffix = if target_args.is_empty() { - String::new() - } else { - let args: Vec = target_args - .iter() - .filter_map(|arg| match arg { - Ty::Builtin(b) => Some(format!("{:?}", b).to_lowercase()), - Ty::Path(name, _) => name.rsplit_once('.').map(|(_, t)| t.to_string()), - Ty::Param(_) => None, // Skip type params, only concrete types - _ => None, - }) - .collect(); - if args.is_empty() { - String::new() - } else { - format!("__{}", args.join("_")) - } - }; - - // Check struct_map first - if let Some(info) = struct_map.get(target_name) { - let base_name = target_name - .rsplit_once('.') - .map(|(_, t)| t) - .unwrap_or(target_name); - let type_name = format!("{}{}", base_name, type_arg_suffix); - (info.module.clone(), type_name) - // Check enum_map - } else if enum_map.contains_key(target_name) { - let base_name = target_name - .rsplit_once('.') - .map(|(_, t)| t) - .unwrap_or(target_name); - let type_name = format!("{}{}", base_name, type_arg_suffix); - let mod_part = target_name - .rsplit_once('.') - .map(|(m, _)| m) - .unwrap_or(module_name); - (mod_part.to_string(), type_name) - } else if target_name.contains('.') { - let (mod_part, type_part) = target_name - .rsplit_once('.') - .ok_or_else(|| TypeError::new("invalid type path".to_string(), span))?; - let type_name = format!("{}{}", type_part, type_arg_suffix); - (mod_part.to_string(), type_name) - } else if let Some(info) = struct_map.get(&format!("{module_name}.{target_name}")) { - let type_name = format!("{}{}", target_name, type_arg_suffix); - (info.module.clone(), type_name) - } else if enum_map.contains_key(&format!("{module_name}.{target_name}")) { - let type_name = format!("{}{}", target_name, type_arg_suffix); - (module_name.to_string(), type_name) - } else { - return Err(TypeError::new( - "impl target must be a struct or enum type name".to_string(), - span, - )); - } - } - Ty::Builtin(BuiltinType::I32) => (module_name.to_string(), "i32".to_string()), - Ty::Builtin(BuiltinType::U32) => (module_name.to_string(), "u32".to_string()), - Ty::Builtin(BuiltinType::U8) => (module_name.to_string(), "u8".to_string()), - Ty::Builtin(BuiltinType::Bool) => (module_name.to_string(), "bool".to_string()), - _ => { - return Err(TypeError::new( - "impl target must be a struct or enum type name".to_string(), - span, - )); - } - }; - if impl_module != module_name { - return Err(TypeError::new( - "impl blocks must be declared in the defining module".to_string(), - span, - )); - } - Ok((impl_module, type_name, target_ty)) -} - -fn validate_impl_method( - type_name: &str, - target_ty: &Ty, - target_ast: &Type, - _module_name: &str, - method: &Function, - use_map: &UseMap, - stdlib: &StdlibIndex, - struct_map: &HashMap, - enum_map: &HashMap, - type_params: &HashSet, - _span: Span, -) -> Result, TypeError> { - if method.name.item.contains("__") { - return Err(TypeError::new( - "method name in impl should be unqualified (write sum, not Pair__sum)".to_string(), - method.name.span, - )); - } - - let Some(first_param) = method.params.first() else { - return Err(TypeError::new( - format!("first parameter must be self: {type_name}"), - method.name.span, - )); - }; - if first_param.name.item != "self" { - return Err(TypeError::new( - format!("first parameter must be self: {type_name}"), - first_param.name.span, - )); - } - - let mut params = method.params.clone(); - let expected = target_ty.clone(); - let expected_ptr = Ty::Ptr(Box::new(target_ty.clone())); - let expected_ref = Ty::Ref(Box::new(target_ty.clone())); - let mut receiver_is_ref = false; - - if let Some(ty) = &first_param.ty { - let lowered = lower_type(ty, use_map, stdlib, type_params)?; - if lowered != expected && lowered != expected_ptr && lowered != expected_ref { - return Err(TypeError::new( - format!("first parameter must be self: {type_name} (found {lowered:?})"), - ty.span(), - )); - } - receiver_is_ref = lowered == expected_ref; - } else { - params[0].ty = Some(target_ast.clone()); - } - - for param in params.iter().skip(1) { - if param.ty.is_none() { - return Err(TypeError::new( - format!("parameter `{}` requires a type annotation", param.name.item), - param.name.span, - )); - } - } - - let ret_ty = lower_type(&method.ret, use_map, stdlib, type_params)?; - if receiver_is_ref && type_contains_capability(&ret_ty, struct_map, enum_map) { - let receiver_kind = type_kind(target_ty, struct_map, enum_map); - let receiver_is_capability = match target_ty { - Ty::Path(name, _) => struct_map - .get(name) - .map(|info| info.is_capability) - .unwrap_or(false), - _ => false, - }; - if receiver_kind != TypeKind::Unrestricted - && (!receiver_is_capability - || type_contains_non_linear_capability(&ret_ty, struct_map, enum_map)) - { - return Err(TypeError::new( - "borrowed capability receivers may only return linear child capabilities" - .to_string(), - method.ret.span(), - )); - } - } - - Ok(params) -} - -fn desugar_impl_method( - type_name: &str, - method: &Function, - params: Vec, - type_params: Vec, -) -> Function { - let name = Spanned::new( - format!("{type_name}__{}", method.name.item), - method.name.span, - ); - Function { - name, - type_params, - params, - ret: method.ret.clone(), - body: method.body.clone(), - is_pub: method.is_pub, - doc: method.doc.clone(), - span: method.span, - } -} - -fn desugar_impl_methods( - impl_block: &ImplBlock, - module_name: &str, - use_map: &UseMap, - stdlib: &StdlibIndex, - struct_map: &HashMap, - enum_map: &HashMap, - trait_map: &HashMap, -) -> Result, TypeError> { - let impl_type_params = build_type_params(&impl_block.type_params)?; - let (_impl_module, type_name, target_ty) = resolve_impl_target( - &impl_block.target, - use_map, - stdlib, - struct_map, - enum_map, - &impl_type_params, - module_name, - impl_block.span, - )?; - let trait_name = impl_block - .trait_path - .as_ref() - .map(|path| resolve_trait_name(path, use_map, module_name)); - if let Some(trait_name) = &trait_name { - let Some(trait_info) = trait_map.get(trait_name) else { - return Err(TypeError::new( - format!("unknown trait `{trait_name}`"), - impl_block.span, - )); - }; - if trait_info.module != module_name && !trait_info.is_pub { - return Err(TypeError::new( - format!("trait `{trait_name}` is private"), - impl_block.span, - )); - } - } - let mut method_names = std::collections::HashSet::new(); - let mut methods = Vec::with_capacity(impl_block.methods.len()); - for method in &impl_block.methods { - if !method_names.insert(method.name.item.clone()) { - return Err(TypeError::new( - format!("duplicate method `{}` in impl block", method.name.item), - method.name.span, - )); - } - if trait_name.is_some() && !method.type_params.is_empty() { - return Err(TypeError::new( - "trait impl methods cannot declare type parameters".to_string(), - method.name.span, - )); - } - let method_type_params = merge_type_params(&impl_type_params, &method.type_params)?; - let mut combined_type_params = impl_block.type_params.clone(); - combined_type_params.extend(method.type_params.clone()); - let params = validate_impl_method( - &type_name, - &target_ty, - &impl_block.target, - module_name, - method, - use_map, - stdlib, - struct_map, - enum_map, - &method_type_params, - method.span, - )?; - if let Some(trait_name) = &trait_name { - let trait_info = trait_map.get(trait_name).expect("trait already validated"); - let trait_method = trait_info.methods.get(&method.name.item).ok_or_else(|| { - TypeError::new( - format!( - "method `{}` is not declared in trait `{trait_name}`", - method.name.item - ), - method.name.span, - ) - })?; - let mut lowered_params = Vec::new(); - for param in ¶ms { - let Some(ty) = ¶m.ty else { - return Err(TypeError::new( - format!("parameter `{}` requires a type annotation", param.name.item), - param.name.span, - )); - }; - lowered_params.push(lower_type(ty, use_map, stdlib, &method_type_params)?); - } - let lowered_ret = lower_type(&method.ret, use_map, stdlib, &method_type_params)?; - let mut expected_params = Vec::new(); - for ty in &trait_method.params { - expected_params.push(substitute_self(ty, &target_ty)); - } - let expected_ret = substitute_self(&trait_method.ret, &target_ty); - if lowered_params.len() != expected_params.len() { - return Err(TypeError::new( - format!( - "method `{}` has wrong arity for trait `{trait_name}`", - method.name.item - ), - method.name.span, - )); - } - for (actual, expected) in lowered_params.iter().zip(expected_params.iter()) { - if actual != expected { - return Err(TypeError::new( - format!( - "method `{}` has wrong parameter type for trait `{trait_name}`", - method.name.item - ), - method.name.span, - )); - } - } - if lowered_ret != expected_ret { - return Err(TypeError::new( - format!( - "method `{}` has wrong return type for trait `{trait_name}`", - method.name.item - ), - method.name.span, - )); - } - let name = Spanned::new( - trait_method_name(trait_name, &type_name, &method.name.item), - method.name.span, - ); - methods.push(Function { - name, - type_params: combined_type_params, - params, - ret: method.ret.clone(), - body: method.body.clone(), - is_pub: method.is_pub, - doc: method.doc.clone(), - span: method.span, - }); - } else { - methods.push(desugar_impl_method( - &type_name, - method, - params, - combined_type_params, - )); - } - } - if let Some(trait_name) = &trait_name { - let trait_info = trait_map.get(trait_name).expect("trait already validated"); - for name in trait_info.methods.keys() { - if !method_names.contains(name) { - return Err(TypeError::new( - format!("missing method `{name}` for trait `{trait_name}`"), - impl_block.span, - )); - } - } - } - Ok(methods) -} - -/// Convert AST types into resolved Ty (builtins + fully qualified paths). -fn lower_type( - ty: &Type, - use_map: &UseMap, - stdlib: &StdlibIndex, - type_params: &HashSet, -) -> Result { - match ty { - Type::Ptr { target, .. } => Ok(Ty::Ptr(Box::new(lower_type( - target, - use_map, - stdlib, - type_params, - )?))), - Type::Ref { target, .. } => Ok(Ty::Ref(Box::new(lower_type( - target, - use_map, - stdlib, - type_params, - )?))), - Type::Path { path, args, .. } => { - let resolved = resolve_path(path, use_map); - let path_segments = resolved.iter().map(|seg| seg.as_str()).collect::>(); - let args: Vec = args - .iter() - .map(|arg| lower_type(arg, use_map, stdlib, type_params)) - .collect::>()?; - if path_segments.len() == 1 { - if type_params.contains(path_segments[0]) { - if !args.is_empty() { - return Err(TypeError::new( - format!( - "type parameter `{}` cannot take arguments", - path_segments[0] - ), - path.span, - )); - } - return Ok(Ty::Param(path_segments[0].to_string())); - } - let builtin = match path_segments[0] { - "i32" => Some(BuiltinType::I32), - "i64" => Some(BuiltinType::I64), - "u32" => Some(BuiltinType::U32), - "u8" => Some(BuiltinType::U8), - "bool" => Some(BuiltinType::Bool), - "unit" => Some(BuiltinType::Unit), - "never" => Some(BuiltinType::Never), - _ => None, - }; - if let Some(builtin) = builtin { - return Ok(Ty::Builtin(builtin)); - } - let resolved_joined = resolved.join("."); - let alias = resolve_type_name(path, use_map, stdlib); - let joined = if alias != resolved_joined { - alias - } else { - resolved_joined - }; - if joined == "Vec" || joined == "sys.vec.Vec" { - if args.len() != 1 { - return Err(TypeError::new( - format!("Vec expects 1 type argument, found {}", args.len()), - path.span, - )); - } - return Ok(Ty::Path("sys.vec.Vec".to_string(), args)); - } - return Ok(Ty::Path(joined, args)); - } - let joined = path_segments.join("."); - if joined == "Vec" || joined == "sys.vec.Vec" { - if args.len() != 1 { - return Err(TypeError::new( - format!("Vec expects 1 type argument, found {}", args.len()), - path.span, - )); - } - return Ok(Ty::Path("sys.vec.Vec".to_string(), args)); - } - Ok(Ty::Path(joined, args)) - } - } -} - -/// Resolve a path to an enum type if the last segment is a variant. -fn resolve_enum_variant( - path: &Path, - use_map: &UseMap, - enum_map: &HashMap, - module_name: &str, -) -> Option { - let resolved = resolve_path(path, use_map); - if resolved.len() < 2 { - return None; - } - let (enum_path, variant) = resolved.split_at(resolved.len() - 1); - let enum_name = enum_path.join("."); - - // First try the resolved path as-is - if let Some(info) = enum_map.get(&enum_name) { - if info.variants.iter().any(|name| name == &variant[0]) { - return Some(Ty::Path(enum_name, Vec::new())); - } - } - - // If not found, try prepending current module (for local enums) - if enum_path.len() == 1 { - let qualified = format!("{}.{}", module_name, enum_name); - if let Some(info) = enum_map.get(&qualified) { - if info.variants.iter().any(|name| name == &variant[0]) { - return Some(Ty::Path(qualified, Vec::new())); - } - } - } - - None -} - -fn resolve_type_name(path: &Path, use_map: &UseMap, stdlib: &StdlibIndex) -> String { - let resolved = resolve_path(path, use_map); - if resolved.len() == 1 { - if let Some(full) = stdlib.types.get(&resolved[0]) { - return full.clone(); - } - } - resolved.join(".") -} - -fn resolve_trait_name(path: &Path, use_map: &UseMap, module_name: &str) -> String { - let resolved = resolve_path(path, use_map); - if resolved.len() == 1 { - return format!("{module_name}.{}", resolved[0]); - } - resolved.join(".") -} - -pub(super) fn is_string_ty(ty: &Ty) -> bool { - matches!(ty, Ty::Path(name, _) if name == "sys.string.string" || name == "string") -} - -fn stdlib_string_ty(stdlib: &StdlibIndex) -> Ty { - let name = stdlib - .types - .get("string") - .cloned() - .unwrap_or_else(|| "sys.string.string".to_string()); - Ty::Path(name, Vec::new()) -} - -fn type_contains_ref(ty: &Type) -> Option { - match ty { - Type::Ref { span, .. } => Some(*span), - Type::Ptr { target, .. } => type_contains_ref(target), - Type::Path { args, .. } => { - for arg in args { - if let Some(span) = type_contains_ref(arg) { - return Some(span); - } - } - None - } - } -} - -fn type_contains_capability( - ty: &Ty, - struct_map: &HashMap, - enum_map: &HashMap, -) -> bool { - let mut visiting = HashSet::new(); - type_contains_capability_inner(ty, struct_map, enum_map, &mut visiting) -} - -fn type_contains_capability_inner( - ty: &Ty, - struct_map: &HashMap, - enum_map: &HashMap, - visiting: &mut HashSet, -) -> bool { - match ty { - Ty::Builtin(_) | Ty::Ptr(_) | Ty::Ref(_) => false, - Ty::Param(_) => true, - Ty::Path(name, args) => { - if name == "sys.result.Result" { - return args.iter().any(|arg| { - type_contains_capability_inner(arg, struct_map, enum_map, visiting) - }); - } - if args - .iter() - .any(|arg| type_contains_capability_inner(arg, struct_map, enum_map, visiting)) - { - return true; - } - if let Some(info) = struct_map.get(name) { - if info.is_capability { - return true; - } - if !visiting.insert(name.clone()) { - return false; - } - let contains = info.fields.values().any(|field| { - type_contains_capability_inner(field, struct_map, enum_map, visiting) - }); - visiting.remove(name); - return contains; - } - if let Some(info) = enum_map.get(name) { - if !visiting.insert(name.clone()) { - return false; - } - let contains = info.payloads.values().any(|payload| { - if let Some(payload_ty) = payload { - type_contains_capability_inner(payload_ty, struct_map, enum_map, visiting) - } else { - false - } - }); - visiting.remove(name); - return contains; - } - false - } - } -} - -fn type_contains_non_linear_capability( - ty: &Ty, - struct_map: &HashMap, - enum_map: &HashMap, -) -> bool { - let mut visiting = HashSet::new(); - type_contains_non_linear_capability_inner(ty, struct_map, enum_map, &mut visiting) -} - -fn type_contains_non_linear_capability_inner( - ty: &Ty, - struct_map: &HashMap, - enum_map: &HashMap, - visiting: &mut HashSet, -) -> bool { - match ty { - Ty::Builtin(_) | Ty::Ptr(_) | Ty::Ref(_) => false, - Ty::Param(_) => true, - Ty::Path(name, args) => { - if name == "sys.result.Result" { - return args.iter().any(|arg| { - type_contains_non_linear_capability_inner(arg, struct_map, enum_map, visiting) - }); - } - if args.iter().any(|arg| { - type_contains_non_linear_capability_inner(arg, struct_map, enum_map, visiting) - }) { - return true; - } - if let Some(info) = struct_map.get(name) { - if info.is_capability { - return info.kind != TypeKind::Linear; - } - if !visiting.insert(name.clone()) { - return false; - } - let contains = info.fields.values().any(|field| { - type_contains_non_linear_capability_inner(field, struct_map, enum_map, visiting) - }); - visiting.remove(name); - return contains; - } - if let Some(info) = enum_map.get(name) { - if !visiting.insert(name.clone()) { - return false; - } - let contains = info.payloads.values().any(|payload| { - if let Some(payload_ty) = payload { - type_contains_non_linear_capability_inner( - payload_ty, struct_map, enum_map, visiting, - ) - } else { - false - } - }); - visiting.remove(name); - return contains; - } - false - } - } -} - -/// Move-only types are anything not unrestricted. -fn is_affine_type( - ty: &Ty, - struct_map: &HashMap, - enum_map: &HashMap, -) -> bool { - type_kind(ty, struct_map, enum_map) != TypeKind::Unrestricted -} - -/// Compute kind with cycle protection for recursive types. -fn type_kind( - ty: &Ty, - struct_map: &HashMap, - enum_map: &HashMap, -) -> TypeKind { - let mut visiting = HashSet::new(); - type_kind_inner(ty, struct_map, enum_map, &mut visiting) -} - -/// Linear dominates affine, which dominates unrestricted. -fn combine_kind(left: TypeKind, right: TypeKind) -> TypeKind { - match (left, right) { - (TypeKind::Linear, _) | (_, TypeKind::Linear) => TypeKind::Linear, - (TypeKind::Affine, _) | (_, TypeKind::Affine) => TypeKind::Affine, - _ => TypeKind::Unrestricted, - } -} - -/// Recursively compute type kind with recursion cycle protection. -fn type_kind_inner( - ty: &Ty, - struct_map: &HashMap, - enum_map: &HashMap, - visiting: &mut HashSet, -) -> TypeKind { - match ty { - Ty::Builtin(_) | Ty::Ptr(_) | Ty::Ref(_) => TypeKind::Unrestricted, - Ty::Param(_) => TypeKind::Affine, - Ty::Path(name, args) => { - if name == "sys.result.Result" { - return args.iter().fold(TypeKind::Unrestricted, |acc, arg| { - combine_kind(acc, type_kind_inner(arg, struct_map, enum_map, visiting)) - }); - } - if visiting.contains(name) { - return TypeKind::Unrestricted; - } - if let Some(info) = struct_map.get(name) { - visiting.insert(name.clone()); - let fields_kind = - info.fields - .values() - .fold(TypeKind::Unrestricted, |acc, field| { - combine_kind( - acc, - type_kind_inner(field, struct_map, enum_map, visiting), - ) - }); - visiting.remove(name); - return combine_kind(info.kind, fields_kind); - } - if let Some(info) = enum_map.get(name) { - visiting.insert(name.clone()); - let payload_kind = - info.payloads - .values() - .fold(TypeKind::Unrestricted, |acc, payload| { - if let Some(payload_ty) = payload { - combine_kind( - acc, - type_kind_inner(payload_ty, struct_map, enum_map, visiting), - ) - } else { - acc - } - }); - visiting.remove(name); - return payload_kind; - } - TypeKind::Unrestricted - } - } -} - -fn validate_type_args( - ty: &Ty, - struct_map: &HashMap, - enum_map: &HashMap, - span: Span, -) -> Result<(), TypeError> { - match ty { - Ty::Builtin(_) | Ty::Param(_) => Ok(()), - Ty::Ptr(inner) | Ty::Ref(inner) => validate_type_args(inner, struct_map, enum_map, span), - Ty::Path(name, args) => { - if let Some(info) = struct_map.get(name) { - if args.len() != info.type_params.len() { - return Err(TypeError::new( - format!( - "type `{}` expects {} type argument(s), found {}", - name, - info.type_params.len(), - args.len() - ), - span, - )); - } - } else if let Some(info) = enum_map.get(name) { - if args.len() != info.type_params.len() { - return Err(TypeError::new( - format!( - "type `{}` expects {} type argument(s), found {}", - name, - info.type_params.len(), - args.len() - ), - span, - )); - } - } - for arg in args { - validate_type_args(arg, struct_map, enum_map, span)?; - } - Ok(()) - } - } -} - pub fn type_check(module: &Module) -> Result { type_check_program(module, &[], &[]) } @@ -1322,38 +381,48 @@ pub fn type_check_program( stdlib: &[Module], user_modules: &[Module], ) -> Result { - let use_map = UseMap::new(module); + let module = crate::desugar::desugar_module(module); + let stdlib = stdlib + .iter() + .map(crate::desugar::desugar_module) + .collect::>(); + let user_modules = user_modules + .iter() + .map(crate::desugar::desugar_module) + .collect::>(); + + let use_map = UseMap::new(&module); let stdlib_names: HashSet = stdlib.iter().map(|m| path_to_string(&m.name)).collect(); let mut package_map: HashMap = HashMap::new(); - for m in stdlib { + for m in &stdlib { package_map.insert(path_to_string(&m.name), m.package); } - for m in user_modules { + for m in &user_modules { package_map.insert(path_to_string(&m.name), m.package); } package_map.insert(path_to_string(&module.name), module.package); - let stdlib_index = collect::build_stdlib_index(stdlib)?; + let stdlib_index = collect::build_stdlib_index(&stdlib)?; let modules = stdlib .iter() .chain(user_modules.iter()) - .chain(std::iter::once(module)) + .chain(std::iter::once(&module)) .collect::>(); let module_name = module.name.to_string(); - check::validate_package_safety(module, false) + validate_package_safety(&module, false) .map_err(|err| err.with_context(format!("in module `{}`", module.name)))?; - check::validate_import_safety(module, &package_map, &stdlib_names) + validate_import_safety(&module, &package_map, &stdlib_names) .map_err(|err| err.with_context(format!("in module `{}`", module.name)))?; - for user_module in user_modules { - check::validate_package_safety(user_module, false) + for user_module in &user_modules { + validate_package_safety(user_module, false) .map_err(|err| err.with_context(format!("in module `{}`", user_module.name)))?; - check::validate_import_safety(user_module, &package_map, &stdlib_names) + validate_import_safety(user_module, &package_map, &stdlib_names) .map_err(|err| err.with_context(format!("in module `{}`", user_module.name)))?; } - for stdlib_module in stdlib { - check::validate_package_safety(stdlib_module, true) + for stdlib_module in &stdlib { + validate_package_safety(stdlib_module, true) .map_err(|err| err.with_context(format!("in module `{}`", stdlib_module.name)))?; if stdlib_module.package == PackageSafety::Safe { - check::validate_import_safety(stdlib_module, &package_map, &stdlib_names) + validate_import_safety(stdlib_module, &package_map, &stdlib_names) .map_err(|err| err.with_context(format!("in module `{}`", stdlib_module.name)))?; } } @@ -1388,6 +457,12 @@ pub fn type_check_program( for item in &module.items { match item { Item::Function(func) => { + if crate::runtime_intrinsics::is_runtime_intrinsic( + &module_name, + &func.name.item, + ) { + continue; + } let mut table = TypeTable::default(); check::check_function( func, @@ -1415,6 +490,12 @@ pub fn type_check_program( &trait_map, )?; for method in methods { + if crate::runtime_intrinsics::is_runtime_intrinsic( + &module_name, + &method.name.item, + ) { + continue; + } let mut table = TypeTable::default(); check::check_function( &method, @@ -1438,10 +519,13 @@ pub fn type_check_program( Ok(()) }; - for module in user_modules { + for module in &stdlib { + check_module(module)?; + } + for module in &user_modules { check_module(module)?; } - check_module(module)?; + check_module(&module)?; let hir_stdlib: Result, TypeError> = stdlib .iter() @@ -1457,7 +541,6 @@ pub fn type_check_program( &use_map, &stdlib_index, Some(&type_tables), - true, ) .map_err(|err| err.with_context(format!("in module `{}`", m.name))) }) @@ -1477,14 +560,13 @@ pub fn type_check_program( &use_map, &stdlib_index, Some(&type_tables), - false, ) .map_err(|err| err.with_context(format!("in module `{}`", m.name))) }) .collect(); let hir_entry = lower::lower_module( - module, + &module, &functions, &struct_map, &enum_map, @@ -1493,7 +575,6 @@ pub fn type_check_program( &use_map, &stdlib_index, Some(&type_tables), - false, ) .map_err(|err| err.with_context(format!("in module `{}`", module.name)))?; @@ -1516,26 +597,3 @@ pub fn type_check_program( }; monomorphize::monomorphize_program(hir_program) } - -pub(super) trait SpanExt { - fn span(&self) -> Span; -} - -impl SpanExt for Expr { - fn span(&self) -> Span { - match self { - Expr::Literal(lit) => lit.span, - Expr::Path(path) => path.span, - Expr::Call(call) => call.span, - Expr::MethodCall(method_call) => method_call.span, - Expr::FieldAccess(field) => field.span, - Expr::Index(index) => index.span, - Expr::StructLiteral(lit) => lit.span, - Expr::Unary(unary) => unary.span, - Expr::Binary(binary) => binary.span, - Expr::Match(m) => m.span, - Expr::Try(try_expr) => try_expr.span, - Expr::Grouping(group) => group.span, - } - } -} diff --git a/capc/src/typeck/moveck.rs b/capc/src/typeck/moveck.rs new file mode 100644 index 0000000..1c7fdf1 --- /dev/null +++ b/capc/src/typeck/moveck.rs @@ -0,0 +1,237 @@ +use std::collections::HashMap; + +use crate::ast::*; +use crate::error::TypeError; + +use super::{type_kind, EnumInfo, MoveState, Scopes, StructInfo, TypeKind}; + +/// Check if a statement is syntactically total (always returns). +/// This is a purely syntactic check, not real control-flow analysis. +pub(super) fn stmt_is_total(stmt: &Stmt) -> bool { + match stmt { + Stmt::Return(ret_stmt) => ret_stmt.expr.is_some(), + Stmt::Defer(_) => false, + Stmt::Expr(expr_stmt) => { + if let Expr::Match(match_expr) = &expr_stmt.expr { + match_is_total(match_expr) + } else { + false + } + } + Stmt::If(if_stmt) => { + if let Some(else_block) = &if_stmt.else_block { + block_ends_with_return(&if_stmt.then_block) && block_ends_with_return(else_block) + } else { + false + } + } + _ => false, + } +} + +fn block_ends_with_return(block: &Block) -> bool { + block.stmts.last().is_some_and(stmt_is_total) +} + +fn match_is_total(match_expr: &MatchExpr) -> bool { + !match_expr.arms.is_empty() + && match_expr + .arms + .iter() + .all(|arm| block_ends_with_return(&arm.body)) +} + +pub(super) fn merge_branch_states( + base: &mut Scopes, + left: &Scopes, + right: &Scopes, + struct_map: &HashMap, + enum_map: &HashMap, + span: Span, +) -> Result<(), TypeError> { + for (base_scope, (left_scope, right_scope)) in base + .stack + .iter_mut() + .zip(left.stack.iter().zip(&right.stack)) + { + for (name, info) in base_scope.iter_mut() { + let left_info = left_scope + .get(name) + .ok_or_else(|| TypeError::new(format!("unknown identifier `{name}`"), span))?; + let right_info = right_scope + .get(name) + .ok_or_else(|| TypeError::new(format!("unknown identifier `{name}`"), span))?; + match type_kind(&info.ty, struct_map, enum_map) { + TypeKind::Affine => { + info.state = if left_info.state == MoveState::Moved + || right_info.state == MoveState::Moved + { + MoveState::Moved + } else { + MoveState::Available + }; + } + TypeKind::Linear => { + if left_info.state != right_info.state { + return Err(TypeError::new( + format!("linear value `{name}` must be consumed on all paths"), + span, + )); + } + info.state = left_info.state; + } + TypeKind::Unrestricted => {} + } + } + } + Ok(()) +} + +pub(super) fn ensure_affine_states_match( + base: &Scopes, + other: &Scopes, + struct_map: &HashMap, + enum_map: &HashMap, + span: Span, +) -> Result<(), TypeError> { + for (base_scope, other_scope) in base.stack.iter().zip(&other.stack) { + for (name, info) in base_scope { + let other_info = other_scope + .get(name) + .ok_or_else(|| TypeError::new(format!("unknown identifier `{name}`"), span))?; + if type_kind(&info.ty, struct_map, enum_map) != TypeKind::Unrestricted + && info.state != other_info.state + { + return Err(TypeError::new( + format!("move-only value `{name}` moved inside loop"), + span, + )); + } + } + } + Ok(()) +} + +pub(super) fn ensure_linear_scope_consumed( + scopes: &Scopes, + struct_map: &HashMap, + enum_map: &HashMap, + span: Span, +) -> Result<(), TypeError> { + if let Some(scope) = scopes.stack.last() { + for (name, info) in scope { + if type_kind(&info.ty, struct_map, enum_map) == TypeKind::Linear + && info.state != MoveState::Moved + { + return Err(TypeError::new( + format!("linear value `{name}` not consumed"), + span, + )); + } + } + } + Ok(()) +} + +pub(super) fn ensure_linear_scopes_consumed_from( + scopes: &Scopes, + depth: usize, + struct_map: &HashMap, + enum_map: &HashMap, + span: Span, +) -> Result<(), TypeError> { + for scope in scopes.stack.iter().skip(depth) { + for (name, info) in scope { + if type_kind(&info.ty, struct_map, enum_map) == TypeKind::Linear + && info.state != MoveState::Moved + { + return Err(TypeError::new( + format!("linear value `{name}` not consumed"), + span, + )); + } + } + } + Ok(()) +} + +pub(super) fn ensure_linear_all_consumed( + scopes: &Scopes, + struct_map: &HashMap, + enum_map: &HashMap, + span: Span, +) -> Result<(), TypeError> { + for scope in &scopes.stack { + for (name, info) in scope { + if type_kind(&info.ty, struct_map, enum_map) == TypeKind::Linear + && info.state != MoveState::Moved + { + return Err(TypeError::new( + format!("linear value `{name}` not consumed"), + span, + )); + } + } + } + Ok(()) +} + +pub(super) fn merge_match_states( + base: &mut Scopes, + arms: &[Scopes], + struct_map: &HashMap, + enum_map: &HashMap, + span: Span, +) -> Result<(), TypeError> { + let Some((first, rest)) = arms.split_first() else { + return Ok(()); + }; + for (depth, (base_scope, first_scope)) in base.stack.iter_mut().zip(&first.stack).enumerate() { + for (name, info) in base_scope.iter_mut() { + let first_info = first_scope + .get(name) + .ok_or_else(|| TypeError::new(format!("unknown identifier `{name}`"), span))?; + match type_kind(&info.ty, struct_map, enum_map) { + TypeKind::Affine => { + let mut moved = first_info.state == MoveState::Moved; + for arm in rest { + let arm_scope = arm.stack.get(depth).ok_or_else(|| { + TypeError::new(format!("unknown identifier `{name}`"), span) + })?; + let arm_info = arm_scope.get(name).ok_or_else(|| { + TypeError::new(format!("unknown identifier `{name}`"), span) + })?; + if arm_info.state == MoveState::Moved { + moved = true; + } + } + info.state = if moved { + MoveState::Moved + } else { + MoveState::Available + }; + } + TypeKind::Linear => { + let state = first_info.state; + for arm in rest { + let arm_scope = arm.stack.get(depth).ok_or_else(|| { + TypeError::new(format!("unknown identifier `{name}`"), span) + })?; + let arm_info = arm_scope.get(name).ok_or_else(|| { + TypeError::new(format!("unknown identifier `{name}`"), span) + })?; + if arm_info.state != state { + return Err(TypeError::new( + format!("linear value `{name}` must be consumed on all paths"), + span, + )); + } + } + info.state = state; + } + TypeKind::Unrestricted => {} + } + } + } + Ok(()) +} diff --git a/capc/src/typeck/patterns.rs b/capc/src/typeck/patterns.rs new file mode 100644 index 0000000..a648814 --- /dev/null +++ b/capc/src/typeck/patterns.rs @@ -0,0 +1,128 @@ +use std::collections::HashMap; + +use crate::ast::*; +use crate::error::TypeError; + +use super::{apply_enum_type_args, resolve_enum_variant, EnumInfo, Scopes, Ty, UseMap}; + +/// Bind locals introduced by a match pattern. +pub(super) fn bind_pattern( + pattern: &Pattern, + match_ty: &Ty, + scopes: &mut Scopes, + use_map: &UseMap, + enum_map: &HashMap, + module_name: &str, +) -> Result<(), TypeError> { + match pattern { + Pattern::Call { path, binding, .. } => { + let name = path + .segments + .iter() + .map(|seg| seg.item.as_str()) + .collect::>() + .join("."); + if let Ty::Path(ty_name, args) = match_ty { + if ty_name == "sys.result.Result" && args.len() == 2 { + if let Some(binding) = binding { + let ty = if name == "Ok" { + args[0].clone() + } else if name == "Err" { + args[1].clone() + } else { + return Ok(()); + }; + scopes.insert_local(binding.item.clone(), ty); + } + return Ok(()); + } + } + if let Some(Ty::Path(enum_name, _)) = + resolve_enum_variant(path, use_map, enum_map, module_name) + { + let Ty::Path(match_name, match_args) = match_ty else { + return Err(TypeError::new( + format!( + "pattern type mismatch: expected {match_ty:?}, found {enum_name:?}" + ), + path.span, + )); + }; + if match_name != &enum_name { + return Err(TypeError::new( + format!( + "pattern type mismatch: expected {match_ty:?}, found {enum_name:?}" + ), + path.span, + )); + } + if let Some(binding) = binding { + let Some(info) = enum_map.get(&enum_name) else { + return Err(TypeError::new("unknown enum variant".to_string(), path.span)); + }; + let variant = path + .segments + .last() + .map(|s| s.item.clone()) + .unwrap_or_else(|| "unknown".to_string()); + let payload = info.payloads.get(&variant).cloned().unwrap_or(None); + let Some(payload_ty) = payload else { + return Err(TypeError::new( + format!("variant `{name}` has no payload"), + path.span, + )); + }; + if info.type_params.len() != match_args.len() { + return Err(TypeError::new("pattern type mismatch".to_string(), path.span)); + } + let payload_ty = + apply_enum_type_args(&payload_ty, &info.type_params, match_args); + scopes.insert_local(binding.item.clone(), payload_ty); + } + return Ok(()); + } + Err(TypeError::new( + "pattern binding requires an enum match".to_string(), + path.span, + )) + } + Pattern::Binding(ident) => { + scopes.insert_local(ident.item.clone(), match_ty.clone()); + Ok(()) + } + Pattern::Path(path) => { + if let Some(ty) = resolve_enum_variant(path, use_map, enum_map, module_name) { + if !same_type_constructor(&ty, match_ty) { + return Err(TypeError::new( + format!("pattern type mismatch: expected {match_ty:?}, found {ty:?}"), + path.span, + )); + } + } + Ok(()) + } + Pattern::Literal(_) | Pattern::Wildcard(_) => Ok(()), + } +} + +fn same_type_constructor(left: &Ty, right: &Ty) -> bool { + match (left, right) { + (Ty::Builtin(l), Ty::Builtin(r)) => l == r, + (Ty::Path(l, _), Ty::Path(r, _)) => l == r, + (Ty::Ptr(_), Ty::Ptr(_)) | (Ty::Ref(_), Ty::Ref(_)) => true, + _ => false, + } +} + +pub(super) fn leftmost_local_in_chain(expr: &Expr) -> Option<(&str, Span)> { + match expr { + Expr::Path(path) if path.segments.len() == 1 => { + let seg = &path.segments[0]; + Some((seg.item.as_str(), seg.span)) + } + Expr::FieldAccess(field_access) => leftmost_local_in_chain(&field_access.object), + Expr::Grouping(group) => leftmost_local_in_chain(&group.expr), + Expr::Try(try_expr) => leftmost_local_in_chain(&try_expr.expr), + _ => None, + } +} diff --git a/capc/src/typeck/resolve.rs b/capc/src/typeck/resolve.rs new file mode 100644 index 0000000..76c9223 --- /dev/null +++ b/capc/src/typeck/resolve.rs @@ -0,0 +1,591 @@ +use std::collections::{HashMap, HashSet}; + +use crate::ast::*; +use crate::error::TypeError; + +use super::{ + build_type_arg_suffix, merge_type_params, BuiltinType, EnumInfo, StdlibIndex, StructInfo, + TraitInfo, Ty, TypeKind, UseMap, +}; + +pub(super) fn resolve_path(path: &Path, use_map: &UseMap) -> Vec { + if path.segments.len() > 1 { + let first = &path.segments[0].item; + if let Some(prefix) = use_map.aliases.get(first) { + let mut resolved = prefix.clone(); + for seg in path.segments.iter().skip(1) { + resolved.push(seg.item.clone()); + } + return resolved; + } + } + path.segments.iter().map(|seg| seg.item.clone()).collect() +} + +pub(super) fn path_to_string(path: &Path) -> String { + let mut out = String::new(); + for (i, seg) in path.segments.iter().enumerate() { + if i > 0 { + out.push('.'); + } + out.push_str(&seg.item); + } + out +} + +/// Resolve a method receiver type to (module, type name, type args). +/// Builtins with methods (string/u8) are mapped to their stdlib modules. +pub(super) fn resolve_method_target( + receiver_ty: &Ty, + module_name: &str, + struct_map: &HashMap, + enum_map: &HashMap, + span: Span, +) -> Result<(String, String, Vec), TypeError> { + let base_ty = match receiver_ty { + Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref(), + _ => receiver_ty, + }; + let (receiver_name, receiver_args) = match base_ty { + Ty::Path(name, args) => (name.as_str(), args), + Ty::Builtin(BuiltinType::U8) => { + return Ok(("sys.bytes".to_string(), "u8".to_string(), Vec::new())); + } + _ => { + return Err(TypeError::new( + "method receiver must be a struct or enum value".to_string(), + span, + )); + } + }; + + if let Some(info) = struct_map.get(receiver_name) { + let type_name = receiver_name + .rsplit_once('.') + .map(|(_, t)| t) + .unwrap_or(receiver_name) + .to_string(); + return Ok((info.module.clone(), type_name, receiver_args.clone())); + } + + if enum_map.contains_key(receiver_name) { + let type_name = receiver_name + .rsplit_once('.') + .map(|(_, t)| t) + .unwrap_or(receiver_name) + .to_string(); + let mod_part = receiver_name + .rsplit_once('.') + .map(|(m, _)| m) + .unwrap_or(module_name); + return Ok((mod_part.to_string(), type_name, receiver_args.clone())); + } + + if receiver_name.contains('.') { + let (mod_part, type_part) = receiver_name + .rsplit_once('.') + .ok_or_else(|| TypeError::new("invalid type path".to_string(), span))?; + return Ok(( + mod_part.to_string(), + type_part.to_string(), + receiver_args.clone(), + )); + } + + if let Some(info) = struct_map.get(&format!("{module_name}.{receiver_name}")) { + return Ok(( + info.module.clone(), + receiver_name.to_string(), + receiver_args.clone(), + )); + } + if enum_map.contains_key(&format!("{module_name}.{receiver_name}")) { + return Ok(( + module_name.to_string(), + receiver_name.to_string(), + receiver_args.clone(), + )); + } + + Err(TypeError::new( + format!("unknown struct or enum `{receiver_name}`"), + span, + )) +} + +pub(super) fn resolve_impl_target( + target: &Type, + use_map: &UseMap, + stdlib: &StdlibIndex, + struct_map: &HashMap, + enum_map: &HashMap, + type_params: &HashSet, + module_name: &str, + span: Span, +) -> Result<(String, String, Ty), TypeError> { + let target_ty = lower_type(target, use_map, stdlib, type_params)?; + let (impl_module, type_name) = match &target_ty { + Ty::Path(target_name, target_args) => { + let type_arg_suffix = build_type_arg_suffix(target_args); + if let Some(info) = struct_map.get(target_name) { + let base_name = target_name + .rsplit_once('.') + .map(|(_, t)| t) + .unwrap_or(target_name); + let type_name = format!("{}{}", base_name, type_arg_suffix); + (info.module.clone(), type_name) + } else if enum_map.contains_key(target_name) { + let base_name = target_name + .rsplit_once('.') + .map(|(_, t)| t) + .unwrap_or(target_name); + let type_name = format!("{}{}", base_name, type_arg_suffix); + let mod_part = target_name + .rsplit_once('.') + .map(|(m, _)| m) + .unwrap_or(module_name); + (mod_part.to_string(), type_name) + } else if target_name.contains('.') { + let (mod_part, type_part) = target_name + .rsplit_once('.') + .ok_or_else(|| TypeError::new("invalid type path".to_string(), span))?; + let type_name = format!("{}{}", type_part, type_arg_suffix); + (mod_part.to_string(), type_name) + } else if let Some(info) = struct_map.get(&format!("{module_name}.{target_name}")) { + let type_name = format!("{}{}", target_name, type_arg_suffix); + (info.module.clone(), type_name) + } else if enum_map.contains_key(&format!("{module_name}.{target_name}")) { + let type_name = format!("{}{}", target_name, type_arg_suffix); + (module_name.to_string(), type_name) + } else { + return Err(TypeError::new( + "impl target must be a struct or enum type name".to_string(), + span, + )); + } + } + Ty::Builtin(BuiltinType::I32) => (module_name.to_string(), "i32".to_string()), + Ty::Builtin(BuiltinType::U32) => (module_name.to_string(), "u32".to_string()), + Ty::Builtin(BuiltinType::U8) => (module_name.to_string(), "u8".to_string()), + Ty::Builtin(BuiltinType::Bool) => (module_name.to_string(), "bool".to_string()), + _ => { + return Err(TypeError::new( + "impl target must be a struct or enum type name".to_string(), + span, + )); + } + }; + if impl_module != module_name { + return Err(TypeError::new( + "impl blocks must be declared in the defining module".to_string(), + span, + )); + } + Ok((impl_module, type_name, target_ty)) +} + +pub(super) fn validate_impl_method( + type_name: &str, + target_ty: &Ty, + target_ast: &Type, + _module_name: &str, + method: &Function, + use_map: &UseMap, + stdlib: &StdlibIndex, + struct_map: &HashMap, + enum_map: &HashMap, + type_params: &HashSet, + _span: Span, +) -> Result, TypeError> { + if method.name.item.contains("__") { + return Err(TypeError::new( + "method name in impl should be unqualified (write sum, not Pair__sum)".to_string(), + method.name.span, + )); + } + + let Some(first_param) = method.params.first() else { + return Err(TypeError::new( + format!("first parameter must be self: {type_name}"), + method.name.span, + )); + }; + if first_param.name.item != "self" { + return Err(TypeError::new( + format!("first parameter must be self: {type_name}"), + first_param.name.span, + )); + } + + let mut params = method.params.clone(); + let expected = target_ty.clone(); + let expected_ptr = Ty::Ptr(Box::new(target_ty.clone())); + let expected_ref = Ty::Ref(Box::new(target_ty.clone())); + let mut receiver_is_ref = false; + + if let Some(ty) = &first_param.ty { + let lowered = lower_type(ty, use_map, stdlib, type_params)?; + if lowered != expected && lowered != expected_ptr && lowered != expected_ref { + return Err(TypeError::new( + format!("first parameter must be self: {type_name} (found {lowered:?})"), + ty.span(), + )); + } + receiver_is_ref = lowered == expected_ref; + } else { + params[0].ty = Some(target_ast.clone()); + } + + for param in params.iter().skip(1) { + if param.ty.is_none() { + return Err(TypeError::new( + format!("parameter `{}` requires a type annotation", param.name.item), + param.name.span, + )); + } + } + + let ret_ty = lower_type(&method.ret, use_map, stdlib, type_params)?; + if receiver_is_ref && super::type_contains_capability(&ret_ty, struct_map, enum_map) { + let receiver_kind = super::type_kind(target_ty, struct_map, enum_map); + let receiver_is_capability = match target_ty { + Ty::Path(name, _) => struct_map + .get(name) + .map(|info| info.is_capability) + .unwrap_or(false), + _ => false, + }; + if receiver_kind != TypeKind::Unrestricted + && (!receiver_is_capability + || super::type_contains_non_linear_capability(&ret_ty, struct_map, enum_map)) + { + return Err(TypeError::new( + "borrowed capability receivers may only return linear child capabilities" + .to_string(), + method.ret.span(), + )); + } + } + + Ok(params) +} + +pub(super) fn desugar_impl_method( + type_name: &str, + method: &Function, + params: Vec, + type_params: Vec, +) -> Function { + let name = Spanned::new( + format!("{type_name}__{}", method.name.item), + method.name.span, + ); + Function { + name, + type_params, + params, + ret: method.ret.clone(), + body: method.body.clone(), + is_pub: method.is_pub, + doc: method.doc.clone(), + span: method.span, + } +} + +pub(super) fn desugar_impl_methods( + impl_block: &ImplBlock, + module_name: &str, + use_map: &UseMap, + stdlib: &StdlibIndex, + struct_map: &HashMap, + enum_map: &HashMap, + trait_map: &HashMap, +) -> Result, TypeError> { + let impl_type_params = super::build_type_params(&impl_block.type_params)?; + let (_impl_module, type_name, target_ty) = resolve_impl_target( + &impl_block.target, + use_map, + stdlib, + struct_map, + enum_map, + &impl_type_params, + module_name, + impl_block.span, + )?; + let trait_name = impl_block + .trait_path + .as_ref() + .map(|path| resolve_trait_name(path, use_map, module_name)); + if let Some(trait_name) = &trait_name { + let Some(trait_info) = trait_map.get(trait_name) else { + return Err(TypeError::new( + format!("unknown trait `{trait_name}`"), + impl_block.span, + )); + }; + if trait_info.module != module_name && !trait_info.is_pub { + return Err(TypeError::new( + format!("trait `{trait_name}` is private"), + impl_block.span, + )); + } + } + let mut method_names = std::collections::HashSet::new(); + let mut methods = Vec::with_capacity(impl_block.methods.len()); + for method in &impl_block.methods { + if !method_names.insert(method.name.item.clone()) { + return Err(TypeError::new( + format!("duplicate method `{}` in impl block", method.name.item), + method.name.span, + )); + } + if trait_name.is_some() && !method.type_params.is_empty() { + return Err(TypeError::new( + "trait impl methods cannot declare type parameters".to_string(), + method.name.span, + )); + } + let method_type_params = merge_type_params(&impl_type_params, &method.type_params)?; + let mut combined_type_params = impl_block.type_params.clone(); + combined_type_params.extend(method.type_params.clone()); + let params = validate_impl_method( + &type_name, + &target_ty, + &impl_block.target, + module_name, + method, + use_map, + stdlib, + struct_map, + enum_map, + &method_type_params, + method.span, + )?; + if let Some(trait_name) = &trait_name { + let trait_info = trait_map.get(trait_name).expect("trait already validated"); + let trait_method = trait_info.methods.get(&method.name.item).ok_or_else(|| { + TypeError::new( + format!( + "method `{}` is not declared in trait `{trait_name}`", + method.name.item + ), + method.name.span, + ) + })?; + let mut lowered_params = Vec::new(); + for param in ¶ms { + let Some(ty) = ¶m.ty else { + return Err(TypeError::new( + format!("parameter `{}` requires a type annotation", param.name.item), + param.name.span, + )); + }; + lowered_params.push(lower_type(ty, use_map, stdlib, &method_type_params)?); + } + let lowered_ret = lower_type(&method.ret, use_map, stdlib, &method_type_params)?; + let mut expected_params = Vec::new(); + for ty in &trait_method.params { + expected_params.push(super::substitute_self(ty, &target_ty)); + } + let expected_ret = super::substitute_self(&trait_method.ret, &target_ty); + if lowered_params.len() != expected_params.len() { + return Err(TypeError::new( + format!( + "method `{}` has wrong arity for trait `{trait_name}`", + method.name.item + ), + method.name.span, + )); + } + for (actual, expected) in lowered_params.iter().zip(expected_params.iter()) { + if actual != expected { + return Err(TypeError::new( + format!( + "method `{}` has wrong parameter type for trait `{trait_name}`", + method.name.item + ), + method.name.span, + )); + } + } + if lowered_ret != expected_ret { + return Err(TypeError::new( + format!( + "method `{}` has wrong return type for trait `{trait_name}`", + method.name.item + ), + method.name.span, + )); + } + let name = Spanned::new( + super::trait_method_name(trait_name, &type_name, &method.name.item), + method.name.span, + ); + methods.push(Function { + name, + type_params: combined_type_params, + params, + ret: method.ret.clone(), + body: method.body.clone(), + is_pub: method.is_pub, + doc: method.doc.clone(), + span: method.span, + }); + } else { + methods.push(desugar_impl_method( + &type_name, + method, + params, + combined_type_params, + )); + } + } + if let Some(trait_name) = &trait_name { + let trait_info = trait_map.get(trait_name).expect("trait already validated"); + for name in trait_info.methods.keys() { + if !method_names.contains(name) { + return Err(TypeError::new( + format!("missing method `{name}` for trait `{trait_name}`"), + impl_block.span, + )); + } + } + } + Ok(methods) +} + +/// Convert AST types into resolved Ty (builtins + fully qualified paths). +pub(super) fn lower_type( + ty: &Type, + use_map: &UseMap, + stdlib: &StdlibIndex, + type_params: &HashSet, +) -> Result { + match ty { + Type::Ptr { target, .. } => Ok(Ty::Ptr(Box::new(lower_type( + target, + use_map, + stdlib, + type_params, + )?))), + Type::Ref { target, .. } => Ok(Ty::Ref(Box::new(lower_type( + target, + use_map, + stdlib, + type_params, + )?))), + Type::Path { path, args, .. } => { + let resolved = resolve_path(path, use_map); + let path_segments = resolved.iter().map(|seg| seg.as_str()).collect::>(); + let args: Vec = args + .iter() + .map(|arg| lower_type(arg, use_map, stdlib, type_params)) + .collect::>()?; + if path_segments.len() == 1 { + if type_params.contains(path_segments[0]) { + if !args.is_empty() { + return Err(TypeError::new( + format!( + "type parameter `{}` cannot take arguments", + path_segments[0] + ), + path.span, + )); + } + return Ok(Ty::Param(path_segments[0].to_string())); + } + let builtin = match path_segments[0] { + "i32" => Some(BuiltinType::I32), + "i64" => Some(BuiltinType::I64), + "u32" => Some(BuiltinType::U32), + "u8" => Some(BuiltinType::U8), + "bool" => Some(BuiltinType::Bool), + "unit" => Some(BuiltinType::Unit), + "never" => Some(BuiltinType::Never), + _ => None, + }; + if let Some(builtin) = builtin { + return Ok(Ty::Builtin(builtin)); + } + let resolved_joined = resolved.join("."); + let alias = resolve_type_name(path, use_map, stdlib); + let joined = if alias != resolved_joined { + alias + } else { + resolved_joined + }; + if joined == "Vec" || joined == "sys.vec.Vec" { + if args.len() != 1 { + return Err(TypeError::new( + format!("Vec expects 1 type argument, found {}", args.len()), + path.span, + )); + } + return Ok(Ty::Path("sys.vec.Vec".to_string(), args)); + } + return Ok(Ty::Path(joined, args)); + } + let joined = path_segments.join("."); + if joined == "Vec" || joined == "sys.vec.Vec" { + if args.len() != 1 { + return Err(TypeError::new( + format!("Vec expects 1 type argument, found {}", args.len()), + path.span, + )); + } + return Ok(Ty::Path("sys.vec.Vec".to_string(), args)); + } + Ok(Ty::Path(joined, args)) + } + } +} + +/// Resolve a path to an enum type if the last segment is a variant. +pub(super) fn resolve_enum_variant( + path: &Path, + use_map: &UseMap, + enum_map: &HashMap, + module_name: &str, +) -> Option { + let resolved = resolve_path(path, use_map); + if resolved.len() < 2 { + return None; + } + let (enum_path, variant) = resolved.split_at(resolved.len() - 1); + let enum_name = enum_path.join("."); + + if let Some(info) = enum_map.get(&enum_name) { + if info.variants.iter().any(|name| name == &variant[0]) { + return Some(Ty::Path(enum_name, Vec::new())); + } + } + + if enum_path.len() == 1 { + let qualified = format!("{}.{}", module_name, enum_name); + if let Some(info) = enum_map.get(&qualified) { + if info.variants.iter().any(|name| name == &variant[0]) { + return Some(Ty::Path(qualified, Vec::new())); + } + } + } + + None +} + +pub(super) fn resolve_type_name(path: &Path, use_map: &UseMap, stdlib: &StdlibIndex) -> String { + let resolved = resolve_path(path, use_map); + if resolved.len() == 1 { + if let Some(full) = stdlib.types.get(&resolved[0]) { + return full.clone(); + } + } + resolved.join(".") +} + +pub(super) fn resolve_trait_name(path: &Path, use_map: &UseMap, module_name: &str) -> String { + let resolved = resolve_path(path, use_map); + if resolved.len() == 1 { + return format!("{module_name}.{}", resolved[0]); + } + resolved.join(".") +} diff --git a/capc/src/typeck/safety.rs b/capc/src/typeck/safety.rs new file mode 100644 index 0000000..3e63a9f --- /dev/null +++ b/capc/src/typeck/safety.rs @@ -0,0 +1,300 @@ +use std::collections::{HashMap, HashSet}; + +use crate::ast::*; +use crate::error::TypeError; + +use super::PackageSafety; + +/// Safe packages cannot mention externs or raw pointer types anywhere. +pub(super) fn validate_package_safety(module: &Module, is_stdlib: bool) -> Result<(), TypeError> { + if module.package != PackageSafety::Safe { + return Ok(()); + } + for item in &module.items { + match item { + Item::ExternFunction(func) => { + return Err(TypeError::new( + "extern declarations require `package unsafe`".to_string(), + func.span, + )); + } + Item::Function(func) => { + if !is_stdlib { + if let Some(span) = type_contains_ptr_fn(func) { + return Err(TypeError::new( + "raw pointer types require `package unsafe`".to_string(), + span, + )); + } + if let Some(span) = type_contains_slice(&func.ret) { + return Err(TypeError::new( + "Slice types cannot be returned from safe modules".to_string(), + span, + )); + } + } + } + Item::Impl(impl_block) => { + if is_stdlib { + continue; + } + for method in &impl_block.methods { + if let Some(span) = type_contains_ptr_fn(method) { + return Err(TypeError::new( + "raw pointer types require `package unsafe`".to_string(), + span, + )); + } + if let Some(span) = type_contains_slice(&method.ret) { + return Err(TypeError::new( + "Slice types cannot be returned from safe modules".to_string(), + span, + )); + } + } + } + Item::Struct(decl) => { + if is_stdlib { + continue; + } + if let Some(span) = type_contains_ptr_struct(decl) { + return Err(TypeError::new( + "raw pointer types require `package unsafe`".to_string(), + span, + )); + } + if let Some(span) = type_contains_slice_struct(decl) { + return Err(TypeError::new( + "Slice types cannot appear in structs in safe modules".to_string(), + span, + )); + } + } + Item::Enum(decl) => { + if !is_stdlib { + if let Some(span) = type_contains_ptr_enum(decl) { + return Err(TypeError::new( + "raw pointer types require `package unsafe`".to_string(), + span, + )); + } + if let Some(span) = type_contains_slice_enum(decl) { + return Err(TypeError::new( + "Slice types cannot appear in enums in safe modules".to_string(), + span, + )); + } + } + } + Item::Trait(_) => {} + } + } + Ok(()) +} + +pub(super) fn validate_import_safety( + module: &Module, + package_map: &HashMap, + stdlib_names: &HashSet, +) -> Result<(), TypeError> { + if module.package != PackageSafety::Safe { + return Ok(()); + } + for use_decl in &module.uses { + let mut name = String::new(); + for (i, seg) in use_decl.path.segments.iter().enumerate() { + if i > 0 { + name.push('.'); + } + name.push_str(&seg.item); + } + if let Some(pkg) = package_map.get(&name) { + if *pkg == PackageSafety::Unsafe { + if stdlib_names.contains(&name) { + continue; + } + return Err(TypeError::new( + format!("safe module cannot import unsafe module `{name}`"), + use_decl.span, + )); + } + } + } + Ok(()) +} + +fn type_contains_ptr(ty: &Type) -> Option { + match ty { + Type::Ptr { span, .. } => Some(*span), + Type::Ref { target, .. } => type_contains_ptr(target), + Type::Path { args, .. } => { + for arg in args { + if let Some(span) = type_contains_ptr(arg) { + return Some(span); + } + } + None + } + } +} + +fn type_contains_ptr_fn(func: &Function) -> Option { + for param in &func.params { + if let Some(ty) = ¶m.ty { + if let Some(span) = type_contains_ptr(ty) { + return Some(span); + } + } + } + if let Some(span) = type_contains_ptr(&func.ret) { + return Some(span); + } + block_contains_ptr(&func.body) +} + +fn type_contains_ptr_struct(decl: &StructDecl) -> Option { + for field in &decl.fields { + if let Some(span) = type_contains_ptr(&field.ty) { + return Some(span); + } + } + None +} + +fn type_contains_ptr_enum(decl: &EnumDecl) -> Option { + for variant in &decl.variants { + if let Some(payload) = &variant.payload { + if let Some(span) = type_contains_ptr(payload) { + return Some(span); + } + } + } + None +} + +fn is_slice_type_path(path: &Path) -> bool { + let Some(last) = path.segments.last() else { + return false; + }; + if last.item != "Slice" && last.item != "MutSlice" { + return false; + } + if path.segments.len() == 1 { + return true; + } + if path.segments.len() == 3 { + return path.segments[0].item == "sys" + && path.segments[1].item == "buffer" + && (last.item == "Slice" || last.item == "MutSlice"); + } + false +} + +fn type_contains_slice(ty: &Type) -> Option { + match ty { + Type::Path { path, args, span } => { + if is_slice_type_path(path) { + return Some(*span); + } + for arg in args { + if let Some(span) = type_contains_slice(arg) { + return Some(span); + } + } + None + } + Type::Ptr { target, .. } | Type::Ref { target, .. } => type_contains_slice(target), + } +} + +fn type_contains_slice_struct(decl: &StructDecl) -> Option { + for field in &decl.fields { + if let Some(span) = type_contains_slice(&field.ty) { + return Some(span); + } + } + None +} + +fn type_contains_slice_enum(decl: &EnumDecl) -> Option { + for variant in &decl.variants { + if let Some(payload) = &variant.payload { + if let Some(span) = type_contains_slice(payload) { + return Some(span); + } + } + } + None +} + +fn block_contains_ptr(block: &Block) -> Option { + for stmt in &block.stmts { + match stmt { + Stmt::Let(let_stmt) => { + if let Some(ty) = &let_stmt.ty { + if let Some(span) = type_contains_ptr(ty) { + return Some(span); + } + } + } + Stmt::LetElse(let_else) => { + if let Some(span) = block_contains_ptr(&let_else.else_block) { + return Some(span); + } + } + Stmt::TryLet(try_let) => { + if let Some(ty) = &try_let.ty { + if let Some(span) = type_contains_ptr(ty) { + return Some(span); + } + } + if let Some(span) = block_contains_ptr(&try_let.else_block) { + return Some(span); + } + } + Stmt::TryElse(try_else) => { + if let Some(span) = block_contains_ptr(&try_else.else_block) { + return Some(span); + } + } + Stmt::Assign(_) => {} + Stmt::Defer(_) => {} + Stmt::Break(_) => {} + Stmt::Continue(_) => {} + Stmt::If(if_stmt) => { + if let Some(span) = block_contains_ptr(&if_stmt.then_block) { + return Some(span); + } + if let Some(span) = if_stmt.else_block.as_ref().and_then(block_contains_ptr) { + return Some(span); + } + } + Stmt::While(while_stmt) => { + if let Some(span) = block_contains_ptr(&while_stmt.body) { + return Some(span); + } + } + Stmt::For(for_stmt) => { + if let Some(span) = block_contains_ptr(&for_stmt.body) { + return Some(span); + } + } + Stmt::ForEach(for_each) => { + if let Some(span) = block_contains_ptr(&for_each.body) { + return Some(span); + } + } + Stmt::Expr(expr_stmt) => { + if let Expr::Match(match_expr) = &expr_stmt.expr { + for arm in &match_expr.arms { + if let Some(span) = block_contains_ptr(&arm.body) { + return Some(span); + } + } + } + } + Stmt::Return(_) => {} + } + } + None +} diff --git a/capc/src/typeck/type_params.rs b/capc/src/typeck/type_params.rs new file mode 100644 index 0000000..967bc42 --- /dev/null +++ b/capc/src/typeck/type_params.rs @@ -0,0 +1,92 @@ +use std::collections::{HashMap, HashSet}; + +use crate::ast::TypeParam; +use crate::error::TypeError; + +use super::{resolve_trait_name, Ty, UseMap, RESERVED_TYPE_PARAMS}; + +/// Build type argument suffix for method names (e.g., "__u8" for Vec). +/// This is used to distinguish type-specific impl methods from generic ones. +pub(crate) fn build_type_arg_suffix(type_args: &[Ty]) -> String { + if type_args.is_empty() { + return String::new(); + } + let args: Vec = type_args + .iter() + .filter_map(|arg| match arg { + Ty::Builtin(b) => Some(format!("{:?}", b).to_lowercase()), + Ty::Path(name, _) => name.rsplit_once('.').map(|(_, t)| t.to_string()), + Ty::Param(_) => None, + _ => None, + }) + .collect(); + if args.is_empty() { + String::new() + } else { + format!("__{}", args.join("_")) + } +} + +pub(super) fn build_type_params(params: &[TypeParam]) -> Result, TypeError> { + let mut set = HashSet::new(); + for param in params { + let name = param.name.item.as_str(); + if RESERVED_TYPE_PARAMS.contains(&name) { + return Err(TypeError::new( + format!("type parameter `{}` is reserved", param.name.item), + param.name.span, + )); + } + if !set.insert(param.name.item.clone()) { + return Err(TypeError::new( + format!("duplicate type parameter `{}`", param.name.item), + param.name.span, + )); + } + } + Ok(set) +} + +pub(super) fn build_type_param_bounds( + params: &[TypeParam], + use_map: &UseMap, + module_name: &str, +) -> HashMap> { + let mut bounds = HashMap::new(); + for param in params { + let mut resolved = Vec::new(); + for bound in ¶m.bounds { + resolved.push(resolve_trait_name(bound, use_map, module_name)); + } + bounds.insert(param.name.item.clone(), resolved); + } + bounds +} + +pub(super) fn type_param_names(params: &[TypeParam]) -> Vec { + params.iter().map(|param| param.name.item.clone()).collect() +} + +pub(super) fn merge_type_params( + base: &HashSet, + params: &[TypeParam], +) -> Result, TypeError> { + let mut set = base.clone(); + for param in params { + let name = param.name.item.as_str(); + if RESERVED_TYPE_PARAMS.contains(&name) { + return Err(TypeError::new( + format!("type parameter `{}` is reserved", param.name.item), + param.name.span, + )); + } + if set.contains(¶m.name.item) { + return Err(TypeError::new( + format!("duplicate type parameter `{}`", param.name.item), + param.name.span, + )); + } + set.insert(param.name.item.clone()); + } + Ok(set) +} diff --git a/capc/tests/parser.rs b/capc/tests/parser.rs index 48d632b..ac02437 100644 --- a/capc/tests/parser.rs +++ b/capc/tests/parser.rs @@ -2,6 +2,29 @@ use std::path::PathBuf; use capc::parse_module; +fn sanitize_debug_ids(input: &str) -> String { + let mut result = String::new(); + let mut skip_lines = 0usize; + for line in input.lines() { + if skip_lines > 0 { + skip_lines -= 1; + continue; + } + if line.trim() == "id: ExprId(" { + skip_lines = 2; + continue; + } + result.push_str(line); + result.push('\n'); + } + result +} + +fn assert_module_snapshot(name: &str, module: &capc::ast::Module) { + let debug = format!("{module:#?}"); + insta::assert_snapshot!(name, sanitize_debug_ids(&debug)); +} + fn load_program(name: &str) -> String { let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) .join("../tests/programs") @@ -13,33 +36,33 @@ fn load_program(name: &str) -> String { fn snapshot_basic_module() { let source = load_program("hello.cap"); let module = parse_module(&source).expect("parse module"); - insta::assert_debug_snapshot!(module); + assert_module_snapshot("snapshot_basic_module", &module); } #[test] fn snapshot_struct_and_match() { let source = load_program("fs_read.cap"); let module = parse_module(&source).expect("parse module"); - insta::assert_debug_snapshot!(module); + assert_module_snapshot("snapshot_struct_and_match", &module); } #[test] fn snapshot_struct_literal() { let source = load_program("struct_literal.cap"); let module = parse_module(&source).expect("parse module"); - insta::assert_debug_snapshot!(module); + assert_module_snapshot("snapshot_struct_literal", &module); } #[test] fn snapshot_doc_comments() { let source = load_program("doc_comments.cap"); let module = parse_module(&source).expect("parse module"); - insta::assert_debug_snapshot!(module); + assert_module_snapshot("snapshot_doc_comments", &module); } #[test] fn snapshot_generics_basic() { let source = load_program("generics_basic.cap"); let module = parse_module(&source).expect("parse module"); - insta::assert_debug_snapshot!(module); + assert_module_snapshot("snapshot_generics_basic", &module); } diff --git a/stdlib/sys/option.cap b/stdlib/sys/option.cap index 421ab7a..edafe5e 100644 --- a/stdlib/sys/option.cap +++ b/stdlib/sys/option.cap @@ -13,32 +13,32 @@ pub enum Option { impl Option { /// True if Some. pub fn is_some(self) -> bool { - match self { - Option::Some(_) => { return true } - Option::None => { return false } + return match self { + Option::Some(_) => { true } + Option::None => { false } } } /// True if None. pub fn is_none(self) -> bool { - match self { - Option::Some(_) => { return false } - Option::None => { return true } + return match self { + Option::Some(_) => { false } + Option::None => { true } } } /// Return the inner value or a default. pub fn unwrap_or(self, default: T) -> T { - match self { - Option::Some(val) => { return val } - Option::None => { return default } + return match self { + Option::Some(val) => { val } + Option::None => { default } } } /// Unwrap the inner value or panic. pub fn unwrap(self) -> T { - match self { - Option::Some(val) => { return val } + return match self { + Option::Some(val) => { val } Option::None => { panic() } } } diff --git a/stdlib/sys/path.cap b/stdlib/sys/path.cap index f463a7a..3e0a0db 100644 --- a/stdlib/sys/path.cap +++ b/stdlib/sys/path.cap @@ -14,8 +14,8 @@ fn trim_trailing_slashes(raw_path: string) -> string { if (end == raw_path.len()) { return raw_path } - match (raw_path.slice_range(0, end)) { - Ok(out) => { return out } + return match (raw_path.slice_range(0, end)) { + Ok(out) => { out } Err(_) => { panic() } } } @@ -70,9 +70,9 @@ pub fn clean_relative_with_alloc(alloc: buffer::Alloc, raw_path: string) -> Resu } let result = out.copy_string() out.free() - match (result) { - Ok(path) => { return Ok(path) } - Err(_) => { return Err(()) } + return match (result) { + Ok(path) => { Ok(path) } + Err(_) => { Err(()) } } } @@ -97,32 +97,35 @@ pub fn basename_view(raw_path: string) -> string { if (path.is_empty()) { return path } - match (path.last_index_of_byte('/')) { + return match (path.last_index_of_byte('/')) { Ok(i) => { match (path.slice_range(i + 1, path.len())) { - Ok(out) => { return out } + Ok(out) => { out } Err(_) => { panic() } } } - Err(_) => { return path } + Err(_) => { path } } } /// Return the dirname of a path as a view. pub fn dirname_view(raw_path: string) -> string { let path = trim_trailing_slashes(raw_path) + let result = "" match (path.last_index_of_byte('/')) { Ok(i) => { if (i == 0) { - return "" - } - match (path.slice_range(0, i)) { - Ok(out) => { return out } - Err(_) => { panic() } + result = "" + } else { + match (path.slice_range(0, i)) { + Ok(out) => { result = out } + Err(_) => { panic() } + } } } - Err(_) => { return "" } + Err(_) => { result = "" } } + return result } /// Join two path segments using the process default allocator. diff --git a/stdlib/sys/string.cap b/stdlib/sys/string.cap index a39b8c1..4f1902c 100644 --- a/stdlib/sys/string.cap +++ b/stdlib/sys/string.cap @@ -83,8 +83,8 @@ fn build_range(alloc: buffer::Alloc, s: string, start: i32, end: i32) -> string } i = i + 1 } - match (buf.copy_string()) { - Ok(out) => { return out } + return match (buf.copy_string()) { + Ok(out) => { out } Err(_) => { panic() } } } @@ -96,73 +96,73 @@ fn view_range(s: string, start: i32, end: i32) -> string { if (start == 0 && end == s.len()) { return s } - match (s.slice_range(start, end)) { - Ok(out) => { return out } + return match (s.slice_range(start, end)) { + Ok(out) => { out } Err(_) => { panic() } } } fn lower_ascii_byte(b: u8) -> u8 { - match (b) { - 'A' => { return 'a' } - 'B' => { return 'b' } - 'C' => { return 'c' } - 'D' => { return 'd' } - 'E' => { return 'e' } - 'F' => { return 'f' } - 'G' => { return 'g' } - 'H' => { return 'h' } - 'I' => { return 'i' } - 'J' => { return 'j' } - 'K' => { return 'k' } - 'L' => { return 'l' } - 'M' => { return 'm' } - 'N' => { return 'n' } - 'O' => { return 'o' } - 'P' => { return 'p' } - 'Q' => { return 'q' } - 'R' => { return 'r' } - 'S' => { return 's' } - 'T' => { return 't' } - 'U' => { return 'u' } - 'V' => { return 'v' } - 'W' => { return 'w' } - 'X' => { return 'x' } - 'Y' => { return 'y' } - 'Z' => { return 'z' } - _ => { return b } + return match (b) { + 'A' => { 'a' } + 'B' => { 'b' } + 'C' => { 'c' } + 'D' => { 'd' } + 'E' => { 'e' } + 'F' => { 'f' } + 'G' => { 'g' } + 'H' => { 'h' } + 'I' => { 'i' } + 'J' => { 'j' } + 'K' => { 'k' } + 'L' => { 'l' } + 'M' => { 'm' } + 'N' => { 'n' } + 'O' => { 'o' } + 'P' => { 'p' } + 'Q' => { 'q' } + 'R' => { 'r' } + 'S' => { 's' } + 'T' => { 't' } + 'U' => { 'u' } + 'V' => { 'v' } + 'W' => { 'w' } + 'X' => { 'x' } + 'Y' => { 'y' } + 'Z' => { 'z' } + _ => { b } } } fn upper_ascii_byte(b: u8) -> u8 { - match (b) { - 'a' => { return 'A' } - 'b' => { return 'B' } - 'c' => { return 'C' } - 'd' => { return 'D' } - 'e' => { return 'E' } - 'f' => { return 'F' } - 'g' => { return 'G' } - 'h' => { return 'H' } - 'i' => { return 'I' } - 'j' => { return 'J' } - 'k' => { return 'K' } - 'l' => { return 'L' } - 'm' => { return 'M' } - 'n' => { return 'N' } - 'o' => { return 'O' } - 'p' => { return 'P' } - 'q' => { return 'Q' } - 'r' => { return 'R' } - 's' => { return 'S' } - 't' => { return 'T' } - 'u' => { return 'U' } - 'v' => { return 'V' } - 'w' => { return 'W' } - 'x' => { return 'X' } - 'y' => { return 'Y' } - 'z' => { return 'Z' } - _ => { return b } + return match (b) { + 'a' => { 'A' } + 'b' => { 'B' } + 'c' => { 'C' } + 'd' => { 'D' } + 'e' => { 'E' } + 'f' => { 'F' } + 'g' => { 'G' } + 'h' => { 'H' } + 'i' => { 'I' } + 'j' => { 'J' } + 'k' => { 'K' } + 'l' => { 'L' } + 'm' => { 'M' } + 'n' => { 'N' } + 'o' => { 'O' } + 'p' => { 'P' } + 'q' => { 'Q' } + 'r' => { 'R' } + 's' => { 'S' } + 't' => { 'T' } + 'u' => { 'U' } + 'v' => { 'V' } + 'w' => { 'W' } + 'x' => { 'X' } + 'y' => { 'Y' } + 'z' => { 'Z' } + _ => { b } } } @@ -874,17 +874,17 @@ impl string { /// True if the substring is present. pub fn contains(self, needle: string) -> bool { - match (self.index_of(needle)) { - Ok(_) => { return true } - Err(_) => { return false } + return match (self.index_of(needle)) { + Ok(_) => { true } + Err(_) => { false } } } /// True if the byte is present. pub fn contains_byte(self, needle: u8) -> bool { - match (self.index_of_byte(needle)) { - Ok(_) => { return true } - Err(_) => { return false } + return match (self.index_of_byte(needle)) { + Ok(_) => { true } + Err(_) => { false } } } @@ -963,8 +963,8 @@ impl string { } i = i + 1 } - match (buf.copy_string()) { - Ok(out) => { return out } + return match (buf.copy_string()) { + Ok(out) => { out } Err(_) => { panic() } } } @@ -989,8 +989,8 @@ impl string { } i = i + 1 } - match (buf.copy_string()) { - Ok(out) => { return out } + return match (buf.copy_string()) { + Ok(out) => { out } Err(_) => { panic() } } } diff --git a/stdlib/sys/vec.cap b/stdlib/sys/vec.cap index f5adba4..f49b40d 100644 --- a/stdlib/sys/vec.cap +++ b/stdlib/sys/vec.cap @@ -635,8 +635,8 @@ impl Vec { /// Borrow the bytes as a string view (no copy). /// The view is invalid after freeing this Vec. pub fn as_string(self) -> string { - match (string::from_bytes(self.as_slice())) { - Ok(s) => { return s } + return match (string::from_bytes(self.as_slice())) { + Ok(s) => { s } Err(_) => { panic() } } } From 3451927f9948cbdcc0e6203342007b51d160dba7 Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Wed, 25 Mar 2026 13:51:15 -0700 Subject: [PATCH 09/17] Fix try-let loop control paths --- capc/src/codegen/emit.rs | 38 +++++++++---- capc/src/codegen/emit/match_lowering.rs | 3 +- capc/src/typeck/check.rs | 66 +++++++++++++++------- capc/tests/run.rs | 14 +++++ capc/tests/typecheck.rs | 16 ++++++ tests/programs/try_let_continue.cap | 29 ++++++++++ tests/programs/try_let_continue_linear.cap | 33 +++++++++++ 7 files changed, 167 insertions(+), 32 deletions(-) create mode 100644 tests/programs/try_let_continue.cap create mode 100644 tests/programs/try_let_continue_linear.cap diff --git a/capc/src/codegen/emit.rs b/capc/src/codegen/emit.rs index 5f09d7a..6f4d4f5 100644 --- a/capc/src/codegen/emit.rs +++ b/capc/src/codegen/emit.rs @@ -269,17 +269,32 @@ fn emit_hir_stmt_inner( return Ok(Flow::Continues); } } - let value = emit_hir_expr( - builder, - &let_stmt.expr, - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )?; + let value = if let crate::hir::HirExpr::Match(match_expr) = &let_stmt.expr { + match_lowering::emit_hir_match_expr( + builder, + match_expr, + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + loop_target, + )? + } else { + emit_hir_expr( + builder, + &let_stmt.expr, + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )? + }; if let crate::typeck::Ty::Path(name, _) = &let_stmt.ty.ty { if let Some(layout) = enum_index.layouts.get(name) { let align = layout.align.max(1); @@ -1841,6 +1856,7 @@ fn emit_hir_expr_inner( return_lowering, module, data_counter, + None, // nested expression matches still cannot break/continue ) } } diff --git a/capc/src/codegen/emit/match_lowering.rs b/capc/src/codegen/emit/match_lowering.rs index dd31ad7..39f78dc 100644 --- a/capc/src/codegen/emit/match_lowering.rs +++ b/capc/src/codegen/emit/match_lowering.rs @@ -207,6 +207,7 @@ pub(super) fn emit_hir_match_expr( return_lowering: &ReturnLowering, module: &mut ObjectModule, data_counter: &mut u32, + loop_target: Option, ) -> Result { use crate::hir::HirStmt; @@ -296,7 +297,7 @@ pub(super) fn emit_hir_match_expr( struct_layouts, module, data_counter, - None, + loop_target, return_lowering, &mut arm_defers, )?; diff --git a/capc/src/typeck/check.rs b/capc/src/typeck/check.rs index 7c5a172..54dd183 100644 --- a/capc/src/typeck/check.rs +++ b/capc/src/typeck/check.rs @@ -262,23 +262,46 @@ fn check_stmt( } else { UseMode::Move }; - let expr_ty = check_expr( - &let_stmt.expr, - functions, - trait_map, - trait_impls, - scopes, - expr_use_mode, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; + let expr_ty = if let Expr::Match(match_expr) = &let_stmt.expr { + let expr_ty = check_match_expr_value( + match_expr, + functions, + trait_map, + trait_impls, + scopes, + expr_use_mode, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + in_loop, + )?; + recorder.record(&let_stmt.expr, &expr_ty); + expr_ty + } else { + check_expr( + &let_stmt.expr, + functions, + trait_map, + trait_impls, + scopes, + expr_use_mode, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )? + }; let final_ty = if let Some(annot) = &let_stmt.ty { if let Some(span) = type_contains_ref(annot) { match annot { @@ -1914,7 +1937,7 @@ pub(super) fn check_expr( module_name, type_params, type_param_bounds, - false, // break/continue not allowed in value-producing match + false, // nested expression matches still cannot break/continue ), Expr::Try(try_expr) => { let inner_ty = check_expr( @@ -2349,7 +2372,10 @@ fn check_match_expr_value( in_loop, )?; arm_scope.pop_scope(); - arm_scopes.push(arm_scope); + let arm_continues = !matches!(arm_ty, Ty::Builtin(BuiltinType::Never)); + if arm_continues { + arm_scopes.push(arm_scope); + } if let Some(prev) = &result_ty { if matches!(prev, Ty::Builtin(BuiltinType::Never)) { result_ty = Some(arm_ty); @@ -2373,7 +2399,7 @@ fn check_match_expr_value( module_name, match_expr.match_span, )?; - if !module_name.starts_with("sys.") { + if !module_name.starts_with("sys.") && !arm_scopes.is_empty() { merge_match_states(scopes, &arm_scopes, struct_map, enum_map, match_expr.span)?; } Ok(result_ty.unwrap_or(Ty::Builtin(BuiltinType::Unit))) diff --git a/capc/tests/run.rs b/capc/tests/run.rs index c7432ca..cffe12d 100644 --- a/capc/tests/run.rs +++ b/capc/tests/run.rs @@ -160,6 +160,20 @@ fn run_try_let() { assert!(stdout.contains("try let ok"), "stdout was: {stdout:?}"); } +#[test] +fn run_try_let_continue() { + let out_dir = make_out_dir("try_let_continue"); + let out_dir = out_dir.to_str().expect("utf8 out dir"); + let (code, stdout, _stderr) = run_capc(&[ + "run", + "--out-dir", + out_dir, + "tests/programs/try_let_continue.cap", + ]); + assert_eq!(code, 0); + assert!(stdout.contains("try let continue ok"), "stdout was: {stdout:?}"); +} + #[test] fn run_expr_else() { let out_dir = make_out_dir("expr_else"); diff --git a/capc/tests/typecheck.rs b/capc/tests/typecheck.rs index f390ec6..273374e 100644 --- a/capc/tests/typecheck.rs +++ b/capc/tests/typecheck.rs @@ -76,6 +76,22 @@ fn typecheck_try_let_ok() { type_check_program(&module, &stdlib, &[]).expect("typecheck module"); } +#[test] +fn typecheck_try_let_continue_ok() { + let source = load_program("try_let_continue.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + +#[test] +fn typecheck_try_let_continue_linear_ok() { + let source = load_program("try_let_continue_linear.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + #[test] fn typecheck_expr_else_ok() { let source = load_program("expr_else.cap"); diff --git a/tests/programs/try_let_continue.cap b/tests/programs/try_let_continue.cap new file mode 100644 index 0000000..24f3b10 --- /dev/null +++ b/tests/programs/try_let_continue.cap @@ -0,0 +1,29 @@ +package safe +module try_let_continue +use sys::system + +fn parse(i: i32) -> Result { + if (i == 0) { + return Err("skip") + } + return Ok(i) +} + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + let i = 0 + let sum = 0 + + while (i < 3) { + try let n = parse(i) else { + i = i + 1 + continue + } + sum = sum + n + i = i + 1 + } + + c.assert(sum == 3) + c.println("try let continue ok") + return 0 +} diff --git a/tests/programs/try_let_continue_linear.cap b/tests/programs/try_let_continue_linear.cap new file mode 100644 index 0000000..0e1b81d --- /dev/null +++ b/tests/programs/try_let_continue_linear.cap @@ -0,0 +1,33 @@ +package safe +module try_let_continue_linear + +linear capability struct Token + +fn parse(i: i32) -> Result { + if (i == 0) { + return Err("skip") + } + return Ok(i) +} + +pub fn main() -> i32 { + let i = 0 + let sum = 0 + + while (i < 3) { + let token = Token{} + try let n = parse(i) else { + drop(token) + i = i + 1 + continue + } + sum = sum + n + drop(token) + i = i + 1 + } + + if (sum != 3) { + return 1 + } + return 0 +} From 1a1db21eab04db60fe408cf1931b0ba630247e87 Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Wed, 25 Mar 2026 16:04:05 -0700 Subject: [PATCH 10/17] Refactor compiler internals and split large passes --- COMPILER_CLEANUP.md | 397 +--- capc/src/codegen/emit.rs | 317 +-- capc/src/codegen/emit/arith.rs | 134 ++ capc/src/codegen/emit/defer.rs | 162 ++ capc/src/codegen/intrinsics.rs | 880 +------- capc/src/codegen/intrinsics/io.rs | 557 +++++ capc/src/codegen/intrinsics/memory.rs | 185 ++ capc/src/parser.rs | 1623 +-------------- capc/src/parser/exprs.rs | 503 +++++ capc/src/parser/items.rs | 448 ++++ capc/src/parser/patterns.rs | 142 ++ capc/src/parser/stmts.rs | 393 ++++ capc/src/parser/types.rs | 89 + capc/src/typeck/check.rs | 2480 +++-------------------- capc/src/typeck/check/calls.rs | 831 ++++++++ capc/src/typeck/check/match_check.rs | 458 +++++ capc/src/typeck/check/stmt.rs | 611 ++++++ capc/src/typeck/check/type_params.rs | 205 ++ capc/src/typeck/lower.rs | 76 +- capc/src/typeck/monomorphize.rs | 271 +-- capc/src/typeck/monomorphize/support.rs | 266 +++ 21 files changed, 5410 insertions(+), 5618 deletions(-) create mode 100644 capc/src/codegen/emit/arith.rs create mode 100644 capc/src/codegen/emit/defer.rs create mode 100644 capc/src/codegen/intrinsics/io.rs create mode 100644 capc/src/codegen/intrinsics/memory.rs create mode 100644 capc/src/parser/exprs.rs create mode 100644 capc/src/parser/items.rs create mode 100644 capc/src/parser/patterns.rs create mode 100644 capc/src/parser/stmts.rs create mode 100644 capc/src/parser/types.rs create mode 100644 capc/src/typeck/check/calls.rs create mode 100644 capc/src/typeck/check/match_check.rs create mode 100644 capc/src/typeck/check/stmt.rs create mode 100644 capc/src/typeck/check/type_params.rs create mode 100644 capc/src/typeck/monomorphize/support.rs diff --git a/COMPILER_CLEANUP.md b/COMPILER_CLEANUP.md index 2c58173..a05ef8a 100644 --- a/COMPILER_CLEANUP.md +++ b/COMPILER_CLEANUP.md @@ -1,349 +1,108 @@ # Compiler Cleanup Plan -This document is a focused cleanup plan for `capc/`. +This pass is complete. -The compiler is not junk. The overall pass structure is reasonable, the code is -mostly direct, and the test suite is catching real regressions. But the -implementation has reached the point where adding more language features will -keep making the compiler denser and more accidental unless we clean up the -phase boundaries. +The goal was not a rewrite. The goal was to remove the worst structural +accidents that had built up in `capc/` while keeping language behavior stable. -This plan is intentionally pragmatic. It is not a rewrite-from-scratch plan. +## Outcomes -## Progress +- lowering now has real lexical scopes instead of fake `push_scope` / + `pop_scope` placeholders +- match / `try` control-flow handling is more uniform across type checking and + codegen, including loop-aware `try let ... else { continue }` cases +- `typeck/check.rs` is split by concern +- `parser.rs` is split by syntactic domain +- monomorphization helper logic is split out of the main pass +- runtime intrinsic registration is grouped by domain instead of one large file +- normal compiler paths no longer rely on the most obvious `expect(...)` / + `unreachable!()` traps in lowering, parser expression parsing, and loop + codegen -- [x] Phase 1: Stable expression identity -- [x] Phase 2: Explicit desugar pass -- [x] Phase 3: Split type checking by responsibility -- [x] Phase 4: Split codegen emission -- [x] Phase 5: Centralize the CLI/compiler pipeline +## Final Status -### Completed So Far +- [x] First cleanup pass: stable expression identity, desugar pass, first + `typeck` split, first codegen split, shared driver pipeline +- [x] Milestone 1: real lowering scopes +- [x] Milestone 2: unify match and `try` control-flow handling +- [x] Milestone 3: split `typeck/check.rs` +- [x] Milestone 4: split `codegen/emit.rs` further +- [x] Milestone 5: split `parser.rs` +- [x] Milestone 6: split `typeck/monomorphize.rs` and `codegen/intrinsics.rs` +- [x] Milestone 7: remove remaining internal panics on normal compiler paths -- Added stable `ExprId` tracking to AST expressions and switched typed-expression - tables from `Span` keys to `ExprId`. -- Added a dedicated `desugar` pass and moved parser-side lowering of - `let ... else`, `try`, and borrowed `for` sugar out of `parser.rs`. -- Removed lowering's type-check fallback path and the `allow_type_fallback` - escape hatch. Missing type metadata is now an internal compiler bug again. -- Kept parser snapshots stable by redacting internal expression IDs from - snapshot output instead of baking those IDs into the public AST snapshots. -- Split `typeck` support logic into focused modules: - - `resolve.rs` - - `kinds.rs` - - `safety.rs` - - `moveck.rs` - - `infer.rs` - - `patterns.rs` - - `type_params.rs` -- Shrunk `typeck/mod.rs` down to orchestration and data definitions and moved - the high-churn helper logic out of `check.rs`. -- Split codegen emission by concern: - - `codegen/emit.rs` remains the coordinator - - `codegen/emit/match_lowering.rs` owns match-expression and match-statement lowering - - `codegen/emit/runtime.rs` owns runtime-wrapper and unsafe-pointer emission -- Extended match-expression lowering to support `Result`-shaped values instead - of only `unit` and single scalars. -- Added a dedicated `driver.rs` pipeline module that owns: - - entry loading - - stdlib/user module graph loading - - safe-only enforcement - - type checking - - object build and executable link steps -- Removed the duplicated parse/load/check/build orchestration from `main.rs` - and centralized tool resolution for `cargo`/`rustc`. -- Tightened stdlib handling: - - runtime-backed stdlib stubs are recognized explicitly - - their fake source bodies are no longer type-checked or lowered as real code - - helper stdlib modules are now checked on the same pipeline as user code -- Fixed several previously hidden stdlib issues that surfaced once stdlib was - type-checked consistently (`sys.option`, `sys.path`, `sys.string`, `sys.vec`). +## What Landed -## Goals +### Lowering -- Make compiler phases easier to reason about. -- Remove brittle implementation techniques that cause accidental regressions. -- Shrink the blast radius of language changes. -- Keep the language behavior stable while improving internal structure. +- `capc/src/typeck/lower.rs` + - lowering locals now live in real scoped stacks + - synthetic bindings and ordinary locals share the same scope machinery + - path-based method fallback no longer uses unchecked `expect(...)` -## Non-Goals +### Type Checking -- Rewriting the compiler from scratch. -- Changing the language surface as part of cleanup. -- Expanding traits, generics, or remote capability support during cleanup. +- `capc/src/typeck/check.rs` +- `capc/src/typeck/check/stmt.rs` +- `capc/src/typeck/check/match_check.rs` +- `capc/src/typeck/check/calls.rs` +- `capc/src/typeck/check/type_params.rs` -## Current Assessment +The root checker is now a coordinator. Statement checking, match checking, +call/method-call checking, and generic substitution logic are separated. -The main issues are: +### Parsing -1. Typed expressions are keyed by `Span`, not stable node identity. -2. The parser performs semantic desugaring and now also injects hidden resource - management. -3. Type checking and lowering leak into each other. -4. A few files are now too large to evolve safely. -5. Some semantic logic is duplicated across phases. -6. The CLI/compiler driver pipeline is repetitive. +- `capc/src/parser.rs` +- `capc/src/parser/items.rs` +- `capc/src/parser/stmts.rs` +- `capc/src/parser/exprs.rs` +- `capc/src/parser/patterns.rs` +- `capc/src/parser/types.rs` -The most important concrete example is the span-keyed type table. We already hit -this while implementing borrowed `for` iteration: synthetic expressions that -shared spans collided in the type table and produced wrong lowering behavior. +The root parser now owns shared state and common helpers. Items, statements, +expressions, patterns, and types live in separate modules. -## Priority Order +### Codegen -Do these in order: +- `capc/src/codegen/emit.rs` +- `capc/src/codegen/emit/defer.rs` +- `capc/src/codegen/emit/match_lowering.rs` +- `capc/src/codegen/emit/runtime.rs` +- `capc/src/codegen/emit/arith.rs` -1. Replace span-keyed typing with stable expression identity. -2. Introduce an explicit desugar pass after parsing. -3. Split `typeck` into smaller, cleaner submodules. -4. Split `codegen/emit.rs` into focused emission modules. -5. Centralize the CLI/compiler pipeline. +`emit.rs` is still the largest single file in the compiler, but the helper +domains that caused the most accidental coupling are now extracted: +defer handling, match lowering, runtime-wrapper lowering, and arithmetic / +trap helpers. -## Phase 1: Stable Expression Identity +### Monomorphization -This is the highest-value cleanup. +- `capc/src/typeck/monomorphize.rs` +- `capc/src/typeck/monomorphize/support.rs` -### Problem +Support types and substitution / mangling helpers are split out of the main +monomorphization pass. -Today expression typing is recorded as: +### Intrinsics -- `TypeTable { expr_types: HashMap }` +- `capc/src/codegen/intrinsics.rs` +- `capc/src/codegen/intrinsics/io.rs` +- `capc/src/codegen/intrinsics/memory.rs` -That is brittle because: +The runtime intrinsic registry is now grouped by domain instead of one large +table. -- synthetic expressions can share spans -- different expressions can accidentally collide -- later phases have to depend on exact span construction discipline +## Result -This is the wrong abstraction. +The compiler is still direct, but it is less brittle: -### Plan +- fewer giant coordination files +- fewer phase-boundary accidents +- fewer panic-style assumptions on ordinary compile paths +- clearer places to change parser, checker, lowering, monomorphization, or + runtime-intrinsic behavior without touching unrelated logic -- Introduce `ExprId` and `PatternId` or a typed AST node identity equivalent. -- Assign IDs during parsing or in a dedicated AST annotation pass. -- Change the type recorder to key on `ExprId` instead of `Span`. -- Make lowering consume typed expression metadata by ID. -- Remove the need for span-based type lookups entirely. +## Verification -### Follow-on Cleanup - -- Delete `allow_type_fallback` in lowering. -- Delete the fallback path that re-runs type inference during lowering. -- Make missing typed-expression data a hard internal compiler bug. - -### Success Criteria - -- No compiler phase relies on `Span` as expression identity. -- Synthetic desugaring can freely create nodes without worrying about span - collisions. -- Lowering does not call back into expression checking to recover types. - -## Phase 2: Add an Explicit Desugar Pass - -### Problem - -The parser currently does more than parse syntax. It also lowers: - -- `let ... else` -- `try let` -- `try expr else` -- borrowed `for` iteration - -That is workable, but it means the parser now owns semantic rewrites and hidden -implementation details like synthetic bindings and hidden `defer free()`. - -### Plan - -- Keep parsing purely syntactic. -- Represent the high-level constructs directly in AST first. -- Add a `desugar` pass after parse and before type checking. -- Move all syntax sugar lowering there. - -### What Belongs in Desugaring - -- `let ... else` -- `try let` -- `try expr else` -- borrowed `for item in vec` -- indexed `for i, item in vec` - -### Why This Helps - -- parser gets simpler -- lowering logic is centralized -- desugaring becomes independently testable -- future sugar features stop bloating `parser.rs` - -### Success Criteria - -- `parser.rs` only parses concrete syntax into AST nodes. -- synthetic names and hidden cleanup are created in the desugar pass. -- parser tests and desugar tests are separate. - -## Phase 3: Split Type Checking by Responsibility - -### Problem - -`typeck/check.rs` and `typeck/mod.rs` are carrying too much mixed -responsibility: - -- type resolution -- package safety validation -- move checking -- expression typing -- statement typing -- pattern binding -- impl desugaring support -- assorted helper logic - -This is manageable now, but it is not clean. - -### Plan - -Refactor `typeck` into something closer to: - -- `typeck/mod.rs` - - public entry points only -- `typeck/types.rs` - - `Ty`, builtins, type helpers -- `typeck/resolve.rs` - - path/type/trait resolution helpers -- `typeck/safety.rs` - - safe package validation and import safety rules -- `typeck/moveck.rs` - - move state, branch merge rules, linear consumption checks -- `typeck/patterns.rs` - - pattern binding and pattern typing -- `typeck/expr.rs` - - expression checking -- `typeck/stmt.rs` - - statement/block checking - -### Immediate Refactors - -- Pull duplicated helper logic into shared helpers. -- Stop duplicating path-base detection helpers in checking and lowering. -- Move enum type-argument inference helpers into one dedicated place. - -### Success Criteria - -- `typeck/mod.rs` is mostly orchestration and data definitions. -- expression and statement logic are no longer in one multi-thousand-line file. -- move-checking rules are locally understandable. - -## Phase 4: Split Codegen Emission - -### Problem - -`codegen/emit.rs` is now the biggest file in the compiler and handles too many -distinct concerns: - -- statement emission -- expression emission -- control-flow lowering -- match/result lowering -- local storage -- arithmetic traps -- defer handling - -### Plan - -Refactor codegen into focused files, for example: - -- `codegen/emit_expr.rs` -- `codegen/emit_stmt.rs` -- `codegen/emit_control.rs` -- `codegen/emit_match.rs` -- `codegen/emit_locals.rs` -- `codegen/emit_defer.rs` - -The exact split matters less than separating concerns. - -### Success Criteria - -- `emit.rs` becomes a thin coordinator or disappears. -- result/match lowering is isolated. -- local storage and defer handling are isolated. -- control-flow bugs no longer require editing one huge file. - -## Phase 5: Centralize the Driver Pipeline - -### Problem - -`main.rs` repeats parse/load/check/build orchestration across commands. - -That makes small behavior changes annoying and increases the chance that -commands drift. - -### Plan - -- Introduce a shared pipeline API in the library, for example: - - parse entry - - validate module path - - load stdlib/user modules - - enforce safe-only if requested - - type-check - - build object - - link/run -- Make CLI commands thin wrappers over this pipeline. - -### Success Criteria - -- `main.rs` becomes mostly CLI argument handling. -- parse/check/build/run all share the same pipeline functions. -- error decoration is more consistent across commands. - -## Cross-Cutting Rules - -While doing this cleanup: - -- Do not rewrite semantics casually. -- Keep tests green at each phase. -- Add targeted regression tests for every internal cleanup that changes a code - path. -- Prefer extracting shared helpers before changing behavior. -- Avoid introducing new syntax/features during compiler cleanup. - -## Recommended Sequence of Work - -### Step 1 - -Add stable `ExprId` support and convert typed expression recording to use it. - -### Step 2 - -Move current parser-side sugar lowering into a dedicated desugar pass. - -### Step 3 - -Split `typeck/check.rs` and move resolution helpers out of the giant files. - -### Step 4 - -Split `codegen/emit.rs`. - -### Step 5 - -Clean up the CLI pipeline. - -## What Not to Do - -- Do not start with file splitting alone. Splitting files without fixing - span-keyed typing and phase coupling will mostly create more files with the - same design problems. -- Do not add more syntax sugar before the desugar pass exists. -- Do not rewrite the compiler in a new architecture unless the current one - proves fundamentally unworkable. It has not. - -## End State - -If this cleanup succeeds, the compiler should look like this: - -- parser parses syntax only -- desugar rewrites sugar only -- type checking owns semantic validation only -- lowering consumes typed nodes without re-inferring them -- codegen emits from HIR without giant monolithic files -- CLI commands share one pipeline - -That is enough cleanup to keep evolving the language without the compiler -turning into a pile of accidental invariants. +- `PATH="$HOME/.cargo/bin:$PATH" cargo test -p capc` diff --git a/capc/src/codegen/emit.rs b/capc/src/codegen/emit.rs index 6f4d4f5..be12c68 100644 --- a/capc/src/codegen/emit.rs +++ b/capc/src/codegen/emit.rs @@ -3,6 +3,8 @@ //! This module is intentionally focused on expression/statement lowering and //! ABI-adjacent helper routines used by the main codegen entry point. +mod arith; +mod defer; mod match_lowering; mod runtime; @@ -25,7 +27,12 @@ use super::{ TypeLayout, ValueRepr, }; +pub(super) use defer::DeferStack; pub(super) use runtime::emit_runtime_wrapper_call; +use arith::{ + emit_checked_add, emit_checked_div, emit_checked_mod, emit_checked_mul, emit_checked_sub, + emit_string_eq, is_string_type, +}; /// Target blocks for break/continue inside a loop. #[derive(Copy, Clone, Debug)] @@ -49,167 +56,6 @@ pub(super) enum ReturnLowering { }, } -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -enum DeferScopeKind { - Regular, - LoopBody, -} - -#[derive(Clone, Debug)] -struct DeferScope { - kind: DeferScopeKind, - defers: Vec, -} - -#[derive(Clone, Debug)] -pub(super) struct DeferStack { - scopes: Vec, -} - -impl DeferStack { - pub(super) fn new() -> Self { - Self { scopes: Vec::new() } - } - - pub(super) fn push_block_scope(&mut self) { - self.push_scope(DeferScopeKind::Regular); - } - - pub(super) fn push_loop_scope(&mut self) { - self.push_scope(DeferScopeKind::LoopBody); - } - - fn push_scope(&mut self, kind: DeferScopeKind) { - self.scopes.push(DeferScope { - kind, - defers: Vec::new(), - }); - } - - pub(super) fn pop_scope(&mut self) { - let _ = self.scopes.pop(); - } - - pub(super) fn push_defer(&mut self, expr: crate::hir::HirExpr) { - if let Some(scope) = self.scopes.last_mut() { - scope.defers.push(expr); - } - } - - fn emit_scope_defers( - &self, - scope: &DeferScope, - builder: &mut FunctionBuilder, - locals: &HashMap, - fn_map: &HashMap, - enum_index: &EnumIndex, - struct_layouts: &StructLayoutIndex, - return_lowering: &ReturnLowering, - module: &mut ObjectModule, - data_counter: &mut u32, - ) -> Result<(), CodegenError> { - for defer_expr in scope.defers.iter().rev() { - let _ = emit_hir_expr( - builder, - defer_expr, - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )?; - } - Ok(()) - } - - pub(super) fn emit_current_and_pop( - &mut self, - builder: &mut FunctionBuilder, - locals: &HashMap, - fn_map: &HashMap, - enum_index: &EnumIndex, - struct_layouts: &StructLayoutIndex, - return_lowering: &ReturnLowering, - module: &mut ObjectModule, - data_counter: &mut u32, - ) -> Result<(), CodegenError> { - if let Some(scope) = self.scopes.last() { - self.emit_scope_defers( - scope, - builder, - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )?; - } - self.pop_scope(); - Ok(()) - } - - pub(super) fn emit_all_and_clear( - &mut self, - builder: &mut FunctionBuilder, - locals: &HashMap, - fn_map: &HashMap, - enum_index: &EnumIndex, - struct_layouts: &StructLayoutIndex, - return_lowering: &ReturnLowering, - module: &mut ObjectModule, - data_counter: &mut u32, - ) -> Result<(), CodegenError> { - while let Some(scope) = self.scopes.pop() { - self.emit_scope_defers( - &scope, - builder, - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )?; - } - Ok(()) - } - - pub(super) fn emit_until_loop_and_pop( - &mut self, - builder: &mut FunctionBuilder, - locals: &HashMap, - fn_map: &HashMap, - enum_index: &EnumIndex, - struct_layouts: &StructLayoutIndex, - return_lowering: &ReturnLowering, - module: &mut ObjectModule, - data_counter: &mut u32, - ) -> Result<(), CodegenError> { - while let Some(scope) = self.scopes.pop() { - self.emit_scope_defers( - &scope, - builder, - locals, - fn_map, - enum_index, - struct_layouts, - return_lowering, - module, - data_counter, - )?; - if scope.kind == DeferScopeKind::LoopBody { - break; - } - } - Ok(()) - } -} - /// Emit a single HIR statement. pub(super) fn emit_hir_stmt( builder: &mut FunctionBuilder, @@ -906,7 +752,9 @@ fn emit_hir_stmt_inner( *locals = saved_locals; } HirStmt::Break(_) => { - let target = loop_target.expect("break outside of loop (should be caught by typeck)"); + let target = loop_target.ok_or_else(|| { + CodegenError::Unsupported("break outside of loop".to_string()) + })?; defer_stack.emit_until_loop_and_pop( builder, locals, @@ -921,8 +769,9 @@ fn emit_hir_stmt_inner( return Ok(Flow::Terminated); } HirStmt::Continue(_) => { - let target = - loop_target.expect("continue outside of loop (should be caught by typeck)"); + let target = loop_target.ok_or_else(|| { + CodegenError::Unsupported("continue outside of loop".to_string()) + })?; defer_stack.emit_until_loop_and_pop( builder, locals, @@ -1896,146 +1745,6 @@ fn emit_hir_expr_inner( } } -fn emit_checked_add( - builder: &mut FunctionBuilder, - a: Value, - b: Value, - ty: &crate::hir::HirType, -) -> Result { - let (sum, overflow) = if crate::typeck::is_unsigned_type(&ty.ty) { - builder.ins().uadd_overflow(a, b) - } else { - builder.ins().sadd_overflow(a, b) - }; - trap_on_overflow(builder, overflow); - Ok(sum) -} - -fn emit_checked_sub( - builder: &mut FunctionBuilder, - a: Value, - b: Value, - ty: &crate::hir::HirType, -) -> Result { - let (diff, overflow) = if crate::typeck::is_unsigned_type(&ty.ty) { - builder.ins().usub_overflow(a, b) - } else { - builder.ins().ssub_overflow(a, b) - }; - trap_on_overflow(builder, overflow); - Ok(diff) -} - -fn emit_checked_mul( - builder: &mut FunctionBuilder, - a: Value, - b: Value, - ty: &crate::hir::HirType, -) -> Result { - let (prod, overflow) = if crate::typeck::is_unsigned_type(&ty.ty) { - builder.ins().umul_overflow(a, b) - } else { - builder.ins().smul_overflow(a, b) - }; - trap_on_overflow(builder, overflow); - Ok(prod) -} - -fn emit_checked_div( - builder: &mut FunctionBuilder, - a: Value, - b: Value, - ty: &crate::hir::HirType, -) -> Result { - let b_ty = builder.func.dfg.value_type(b); - let zero = builder.ins().iconst(b_ty, 0); - let is_zero = builder.ins().icmp(IntCC::Equal, b, zero); - let ok_block = builder.create_block(); - let trap_block = builder.create_block(); - builder.ins().brif(is_zero, trap_block, &[], ok_block, &[]); - builder.switch_to_block(trap_block); - builder.ins().trap(ir::TrapCode::IntegerDivisionByZero); - builder.switch_to_block(ok_block); - builder.seal_block(trap_block); - builder.seal_block(ok_block); - let value = if crate::typeck::is_unsigned_type(&ty.ty) { - builder.ins().udiv(a, b) - } else { - builder.ins().sdiv(a, b) - }; - Ok(value) -} - -fn emit_checked_mod( - builder: &mut FunctionBuilder, - a: Value, - b: Value, - ty: &crate::hir::HirType, -) -> Result { - let b_ty = builder.func.dfg.value_type(b); - let zero = builder.ins().iconst(b_ty, 0); - let is_zero = builder.ins().icmp(IntCC::Equal, b, zero); - let ok_block = builder.create_block(); - let trap_block = builder.create_block(); - builder.ins().brif(is_zero, trap_block, &[], ok_block, &[]); - builder.switch_to_block(trap_block); - builder.ins().trap(ir::TrapCode::IntegerDivisionByZero); - builder.switch_to_block(ok_block); - builder.seal_block(trap_block); - builder.seal_block(ok_block); - let value = if crate::typeck::is_unsigned_type(&ty.ty) { - builder.ins().urem(a, b) - } else { - builder.ins().srem(a, b) - }; - Ok(value) -} - -fn trap_on_overflow(builder: &mut FunctionBuilder, overflow: Value) { - let ok_block = builder.create_block(); - let trap_block = builder.create_block(); - builder.ins().brif(overflow, trap_block, &[], ok_block, &[]); - builder.switch_to_block(trap_block); - builder.ins().trap(ir::TrapCode::IntegerOverflow); - builder.switch_to_block(ok_block); - builder.seal_block(trap_block); - builder.seal_block(ok_block); -} - -/// Emit a call to the runtime string equality function. -/// Returns an i8 value: 1 if strings are equal, 0 otherwise. -fn emit_string_eq( - builder: &mut FunctionBuilder, - module: &mut ObjectModule, - lhs: Value, - rhs: Value, -) -> Result { - use cranelift_codegen::ir::{AbiParam, Signature}; - - let ptr_ty = module.isa().pointer_type(); - - // Build signature: (ptr, ptr) -> i8 - let mut sig = Signature::new(module.isa().default_call_conv()); - sig.params.push(AbiParam::new(ptr_ty)); - sig.params.push(AbiParam::new(ptr_ty)); - sig.returns.push(AbiParam::new(ir::types::I8)); - - // Declare and import the runtime function - let func_id = module - .declare_function("capable_rt_string_eq", Linkage::Import, &sig) - .map_err(|err| CodegenError::Codegen(err.to_string()))?; - let local_func = module.declare_func_in_func(func_id, builder.func); - - // Call the function - let call_inst = builder.ins().call(local_func, &[lhs, rhs]); - let results = builder.inst_results(call_inst); - Ok(results[0]) -} - -fn is_string_type(ty: &crate::typeck::Ty) -> bool { - matches!(ty, crate::typeck::Ty::Path(name, _) if name == "sys.string.string" || name == "string") -} - /// Emit an index expression, calling the appropriate runtime function. fn emit_hir_index( builder: &mut FunctionBuilder, diff --git a/capc/src/codegen/emit/arith.rs b/capc/src/codegen/emit/arith.rs new file mode 100644 index 0000000..6d75543 --- /dev/null +++ b/capc/src/codegen/emit/arith.rs @@ -0,0 +1,134 @@ +use super::*; + +pub(in crate::codegen) fn emit_checked_add( + builder: &mut FunctionBuilder, + a: Value, + b: Value, + ty: &crate::hir::HirType, +) -> Result { + let (sum, overflow) = if crate::typeck::is_unsigned_type(&ty.ty) { + builder.ins().uadd_overflow(a, b) + } else { + builder.ins().sadd_overflow(a, b) + }; + trap_on_overflow(builder, overflow); + Ok(sum) +} + +pub(in crate::codegen) fn emit_checked_sub( + builder: &mut FunctionBuilder, + a: Value, + b: Value, + ty: &crate::hir::HirType, +) -> Result { + let (diff, overflow) = if crate::typeck::is_unsigned_type(&ty.ty) { + builder.ins().usub_overflow(a, b) + } else { + builder.ins().ssub_overflow(a, b) + }; + trap_on_overflow(builder, overflow); + Ok(diff) +} + +pub(in crate::codegen) fn emit_checked_mul( + builder: &mut FunctionBuilder, + a: Value, + b: Value, + ty: &crate::hir::HirType, +) -> Result { + let (prod, overflow) = if crate::typeck::is_unsigned_type(&ty.ty) { + builder.ins().umul_overflow(a, b) + } else { + builder.ins().smul_overflow(a, b) + }; + trap_on_overflow(builder, overflow); + Ok(prod) +} + +pub(in crate::codegen) fn emit_checked_div( + builder: &mut FunctionBuilder, + a: Value, + b: Value, + ty: &crate::hir::HirType, +) -> Result { + let b_ty = builder.func.dfg.value_type(b); + let zero = builder.ins().iconst(b_ty, 0); + let is_zero = builder.ins().icmp(IntCC::Equal, b, zero); + let ok_block = builder.create_block(); + let trap_block = builder.create_block(); + builder.ins().brif(is_zero, trap_block, &[], ok_block, &[]); + builder.switch_to_block(trap_block); + builder.ins().trap(ir::TrapCode::IntegerDivisionByZero); + builder.switch_to_block(ok_block); + builder.seal_block(trap_block); + builder.seal_block(ok_block); + let value = if crate::typeck::is_unsigned_type(&ty.ty) { + builder.ins().udiv(a, b) + } else { + builder.ins().sdiv(a, b) + }; + Ok(value) +} + +pub(in crate::codegen) fn emit_checked_mod( + builder: &mut FunctionBuilder, + a: Value, + b: Value, + ty: &crate::hir::HirType, +) -> Result { + let b_ty = builder.func.dfg.value_type(b); + let zero = builder.ins().iconst(b_ty, 0); + let is_zero = builder.ins().icmp(IntCC::Equal, b, zero); + let ok_block = builder.create_block(); + let trap_block = builder.create_block(); + builder.ins().brif(is_zero, trap_block, &[], ok_block, &[]); + builder.switch_to_block(trap_block); + builder.ins().trap(ir::TrapCode::IntegerDivisionByZero); + builder.switch_to_block(ok_block); + builder.seal_block(trap_block); + builder.seal_block(ok_block); + let value = if crate::typeck::is_unsigned_type(&ty.ty) { + builder.ins().urem(a, b) + } else { + builder.ins().srem(a, b) + }; + Ok(value) +} + +fn trap_on_overflow(builder: &mut FunctionBuilder, overflow: Value) { + let ok_block = builder.create_block(); + let trap_block = builder.create_block(); + builder.ins().brif(overflow, trap_block, &[], ok_block, &[]); + builder.switch_to_block(trap_block); + builder.ins().trap(ir::TrapCode::IntegerOverflow); + builder.switch_to_block(ok_block); + builder.seal_block(trap_block); + builder.seal_block(ok_block); +} + +pub(in crate::codegen) fn emit_string_eq( + builder: &mut FunctionBuilder, + module: &mut ObjectModule, + lhs: Value, + rhs: Value, +) -> Result { + use cranelift_codegen::ir::{AbiParam, Signature}; + + let ptr_ty = module.isa().pointer_type(); + let mut sig = Signature::new(module.isa().default_call_conv()); + sig.params.push(AbiParam::new(ptr_ty)); + sig.params.push(AbiParam::new(ptr_ty)); + sig.returns.push(AbiParam::new(ir::types::I8)); + + let func_id = module + .declare_function("capable_rt_string_eq", Linkage::Import, &sig) + .map_err(|err| CodegenError::Codegen(err.to_string()))?; + let local_func = module.declare_func_in_func(func_id, builder.func); + let call_inst = builder.ins().call(local_func, &[lhs, rhs]); + let results = builder.inst_results(call_inst); + Ok(results[0]) +} + +pub(in crate::codegen) fn is_string_type(ty: &crate::typeck::Ty) -> bool { + matches!(ty, crate::typeck::Ty::Path(name, _) if name == "sys.string.string" || name == "string") +} diff --git a/capc/src/codegen/emit/defer.rs b/capc/src/codegen/emit/defer.rs new file mode 100644 index 0000000..9af339c --- /dev/null +++ b/capc/src/codegen/emit/defer.rs @@ -0,0 +1,162 @@ +use super::*; + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +enum DeferScopeKind { + Regular, + LoopBody, +} + +#[derive(Clone, Debug)] +struct DeferScope { + kind: DeferScopeKind, + defers: Vec, +} + +#[derive(Clone, Debug)] +pub(in crate::codegen) struct DeferStack { + scopes: Vec, +} + +impl DeferStack { + pub(in crate::codegen) fn new() -> Self { + Self { scopes: Vec::new() } + } + + pub(in crate::codegen) fn push_block_scope(&mut self) { + self.push_scope(DeferScopeKind::Regular); + } + + pub(in crate::codegen) fn push_loop_scope(&mut self) { + self.push_scope(DeferScopeKind::LoopBody); + } + + fn push_scope(&mut self, kind: DeferScopeKind) { + self.scopes.push(DeferScope { + kind, + defers: Vec::new(), + }); + } + + pub(in crate::codegen) fn pop_scope(&mut self) { + let _ = self.scopes.pop(); + } + + pub(in crate::codegen) fn push_defer(&mut self, expr: crate::hir::HirExpr) { + if let Some(scope) = self.scopes.last_mut() { + scope.defers.push(expr); + } + } + + fn emit_scope_defers( + &self, + scope: &DeferScope, + builder: &mut FunctionBuilder, + locals: &HashMap, + fn_map: &HashMap, + enum_index: &EnumIndex, + struct_layouts: &StructLayoutIndex, + return_lowering: &ReturnLowering, + module: &mut ObjectModule, + data_counter: &mut u32, + ) -> Result<(), CodegenError> { + for defer_expr in scope.defers.iter().rev() { + let _ = emit_hir_expr( + builder, + defer_expr, + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )?; + } + Ok(()) + } + + pub(in crate::codegen) fn emit_current_and_pop( + &mut self, + builder: &mut FunctionBuilder, + locals: &HashMap, + fn_map: &HashMap, + enum_index: &EnumIndex, + struct_layouts: &StructLayoutIndex, + return_lowering: &ReturnLowering, + module: &mut ObjectModule, + data_counter: &mut u32, + ) -> Result<(), CodegenError> { + if let Some(scope) = self.scopes.last() { + self.emit_scope_defers( + scope, + builder, + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )?; + } + self.pop_scope(); + Ok(()) + } + + pub(in crate::codegen) fn emit_all_and_clear( + &mut self, + builder: &mut FunctionBuilder, + locals: &HashMap, + fn_map: &HashMap, + enum_index: &EnumIndex, + struct_layouts: &StructLayoutIndex, + return_lowering: &ReturnLowering, + module: &mut ObjectModule, + data_counter: &mut u32, + ) -> Result<(), CodegenError> { + while let Some(scope) = self.scopes.pop() { + self.emit_scope_defers( + &scope, + builder, + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )?; + } + Ok(()) + } + + pub(in crate::codegen) fn emit_until_loop_and_pop( + &mut self, + builder: &mut FunctionBuilder, + locals: &HashMap, + fn_map: &HashMap, + enum_index: &EnumIndex, + struct_layouts: &StructLayoutIndex, + return_lowering: &ReturnLowering, + module: &mut ObjectModule, + data_counter: &mut u32, + ) -> Result<(), CodegenError> { + while let Some(scope) = self.scopes.pop() { + self.emit_scope_defers( + &scope, + builder, + locals, + fn_map, + enum_index, + struct_layouts, + return_lowering, + module, + data_counter, + )?; + if scope.kind == DeferScopeKind::LoopBody { + break; + } + } + Ok(()) + } +} diff --git a/capc/src/codegen/intrinsics.rs b/capc/src/codegen/intrinsics.rs index 02c280c..b5ed617 100644 --- a/capc/src/codegen/intrinsics.rs +++ b/capc/src/codegen/intrinsics.rs @@ -5,879 +5,29 @@ //! function is not listed here, the Capable implementation is used instead. //! See `stdlib/README.md` for the stdlib-facing explanation. +mod io; +mod memory; + use std::collections::HashMap; use cranelift_codegen::ir::Type; -use crate::abi::AbiType; - use super::{FnInfo, FnSig}; +fn runtime_fn(sig: FnSig, abi_sig: Option, symbol: &str) -> FnInfo { + FnInfo { + sig, + abi_sig, + symbol: symbol.to_string(), + runtime_symbol: None, + is_runtime: true, + } +} + pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { let mut map = HashMap::new(); - // System + args. - let system_console = FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Handle, - }; - let system_fs_read = FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Handle, - }; - let system_filesystem = FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Handle, - }; - // Filesystem. - let fs_root_dir = FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Handle, - }; - let fs_subdir = FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Handle, - }; - let fs_open_read = FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Handle, - }; - let fs_read_to_string = FnSig { - params: vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], - ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }; - let fs_read_to_string_abi = FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::Ptr, - AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }; - let fs_read_bytes = FnSig { - params: vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], - ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }; - let fs_read_bytes_abi = FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::Ptr, - AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }; - let fs_list_dir = FnSig { - params: vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], - ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }; - let fs_list_dir_abi = FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::Ptr, - AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }; - let fs_exists = FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Bool, - }; - let fs_readfs_close = FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Unit, - }; - let fs_filesystem_close = FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Unit, - }; - let fs_dir_close = FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Unit, - }; - let fs_file_read_to_string = FnSig { - params: vec![AbiType::Handle, AbiType::Handle], - ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }; - let fs_file_read_to_string_abi = FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }; - let fs_file_read_close = FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Unit, - }; - let fs_dir_list_dir = FnSig { - params: vec![AbiType::Handle, AbiType::Handle], - ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }; - let fs_dir_list_dir_abi = FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }; - let fs_dir_read_to_string = FnSig { - params: vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], - ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }; - let fs_dir_read_to_string_abi = FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::Ptr, - AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }; - let fs_join = FnSig { - params: vec![AbiType::Handle, AbiType::Ptr, AbiType::Ptr], - ret: AbiType::Ptr, - }; - let fs_join_abi = FnSig { - params: vec![AbiType::Ptr, AbiType::Handle, AbiType::Ptr, AbiType::Ptr], - ret: AbiType::Unit, - }; - // Console. - let console_println = FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Unit, - }; - let console_print = FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Unit, - }; - let console_print_i32 = FnSig { - params: vec![AbiType::Handle, AbiType::I32], - ret: AbiType::Unit, - }; - // Math. - let math_i32 = FnSig { - params: vec![AbiType::I32, AbiType::I32], - ret: AbiType::I32, - }; - let math_u32 = FnSig { - params: vec![AbiType::U32, AbiType::U32], - ret: AbiType::U32, - }; - let math_u8 = FnSig { - params: vec![AbiType::U8, AbiType::U8], - ret: AbiType::U8, - }; - // Alloc + slices. - let mem_malloc = FnSig { - params: vec![AbiType::Handle, AbiType::I32], - ret: AbiType::Ptr, - }; - let mem_free = FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Unit, - }; - let mem_cast = FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Ptr, - }; - let mem_alloc_default = FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Handle, - }; - let mem_default_alloc = FnSig { - params: vec![], - ret: AbiType::Handle, - }; - let system_mint_args = FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Handle, - }; - let system_mint_stdin = FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Handle, - }; - let system_mint_net = FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Handle, - }; - // Net. - let net_listen = FnSig { - params: vec![AbiType::Handle, AbiType::Ptr, AbiType::I32], - ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }; - let net_listen_abi = FnSig { - params: vec![ - AbiType::Handle, - AbiType::Ptr, - AbiType::I32, - AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }; - let net_connect = FnSig { - params: vec![AbiType::Handle, AbiType::Ptr, AbiType::I32], - ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }; - let net_connect_abi = FnSig { - params: vec![ - AbiType::Handle, - AbiType::Ptr, - AbiType::I32, - AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }; - let net_read_to_string = FnSig { - params: vec![AbiType::Handle, AbiType::Handle], - ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }; - let net_read_to_string_abi = FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }; - let net_read = FnSig { - params: vec![AbiType::Handle, AbiType::Handle, AbiType::I32], - ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }; - let net_read_abi = FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::I32, - AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }; - let net_write = FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Result(Box::new(AbiType::Unit), Box::new(AbiType::I32)), - }; - let net_write_abi = FnSig { - params: vec![ - AbiType::Handle, - AbiType::Ptr, - AbiType::ResultOut(Box::new(AbiType::Unit), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Unit), Box::new(AbiType::I32)), - }; - let net_accept = FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }; - let net_accept_abi = FnSig { - params: vec![ - AbiType::Handle, - AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }; - let net_close = FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Unit, - }; - let net_listener_close = FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Unit, - }; - let args_at = FnSig { - params: vec![AbiType::Handle, AbiType::I32], - ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }; - let args_at_abi = FnSig { - params: vec![ - AbiType::Handle, - AbiType::I32, - AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }; - // === System + args === - map.insert( - "sys.system.RootCap__mint_console".to_string(), - FnInfo { - sig: system_console, - abi_sig: None, - symbol: "capable_rt_mint_console".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.system.RootCap__mint_readfs".to_string(), - FnInfo { - sig: system_fs_read, - abi_sig: None, - symbol: "capable_rt_mint_readfs".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.system.RootCap__mint_filesystem".to_string(), - FnInfo { - sig: system_filesystem, - abi_sig: None, - symbol: "capable_rt_mint_filesystem".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.system.RootCap__mint_args".to_string(), - FnInfo { - sig: system_mint_args, - abi_sig: None, - symbol: "capable_rt_mint_args".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.system.RootCap__mint_stdin".to_string(), - FnInfo { - sig: system_mint_stdin, - abi_sig: None, - symbol: "capable_rt_mint_stdin".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.system.RootCap__mint_net".to_string(), - FnInfo { - sig: system_mint_net, - abi_sig: None, - symbol: "capable_rt_mint_net".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.args.Args__len".to_string(), - FnInfo { - sig: FnSig { - params: vec![AbiType::Handle], - ret: AbiType::I32, - }, - abi_sig: None, - symbol: "capable_rt_args_len".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.args.Args__at".to_string(), - FnInfo { - sig: args_at, - abi_sig: Some(args_at_abi), - symbol: "capable_rt_args_at".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - // === Stdin === - map.insert( - "sys.stdin.Stdin__read_to_string_with_alloc".to_string(), - FnInfo { - sig: FnSig { - params: vec![AbiType::Handle, AbiType::Handle], - ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }, - abi_sig: Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }), - symbol: "capable_rt_read_stdin_to_string".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - // === Net === - map.insert( - "sys.net.Net__listen".to_string(), - FnInfo { - sig: net_listen, - abi_sig: Some(net_listen_abi), - symbol: "capable_rt_net_listen".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.net.Net__connect".to_string(), - FnInfo { - sig: net_connect, - abi_sig: Some(net_connect_abi), - symbol: "capable_rt_net_connect".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.net.TcpListener__accept".to_string(), - FnInfo { - sig: net_accept, - abi_sig: Some(net_accept_abi), - symbol: "capable_rt_net_accept".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.net.TcpListener__close".to_string(), - FnInfo { - sig: net_listener_close, - abi_sig: None, - symbol: "capable_rt_net_listener_close".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.net.TcpConn__read_to_string_with_alloc".to_string(), - FnInfo { - sig: net_read_to_string, - abi_sig: Some(net_read_to_string_abi), - symbol: "capable_rt_net_read_to_string".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.net.TcpConn__read_with_alloc".to_string(), - FnInfo { - sig: net_read, - abi_sig: Some(net_read_abi), - symbol: "capable_rt_net_read".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.net.TcpConn__write".to_string(), - FnInfo { - sig: net_write, - abi_sig: Some(net_write_abi), - symbol: "capable_rt_net_write".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.net.TcpConn__close".to_string(), - FnInfo { - sig: net_close, - abi_sig: None, - symbol: "capable_rt_net_close".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - // === Alloc === - map.insert( - "sys.system.RootCap__mint_alloc_default".to_string(), - FnInfo { - sig: mem_alloc_default, - abi_sig: None, - symbol: "capable_rt_alloc_default".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.buffer.default_alloc".to_string(), - FnInfo { - sig: mem_default_alloc, - abi_sig: None, - symbol: "capable_rt_default_alloc".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - // === Console === - map.insert( - "sys.console.Console__println".to_string(), - FnInfo { - sig: console_println, - abi_sig: None, - symbol: "capable_rt_console_println".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.console.Console__print".to_string(), - FnInfo { - sig: console_print, - abi_sig: None, - symbol: "capable_rt_console_print".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.console.Console__print_i32".to_string(), - FnInfo { - sig: console_print_i32.clone(), - abi_sig: None, - symbol: "capable_rt_console_print_i32".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.console.Console__println_i32".to_string(), - FnInfo { - sig: console_print_i32, - abi_sig: None, - symbol: "capable_rt_console_println_i32".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.console.Console__assert".to_string(), - FnInfo { - sig: FnSig { - params: vec![AbiType::Handle, AbiType::Bool, AbiType::Ptr], - ret: AbiType::Unit, - }, - abi_sig: None, - symbol: "capable_rt_assert".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - // === Math === - map.insert( - "sys.math.add_wrap_i32".to_string(), - FnInfo { - sig: math_i32.clone(), - abi_sig: None, - symbol: "capable_rt_math_add_wrap_i32".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.math.sub_wrap_i32".to_string(), - FnInfo { - sig: math_i32.clone(), - abi_sig: None, - symbol: "capable_rt_math_sub_wrap_i32".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.math.mul_wrap_i32".to_string(), - FnInfo { - sig: math_i32, - abi_sig: None, - symbol: "capable_rt_math_mul_wrap_i32".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.math.add_wrap_u32".to_string(), - FnInfo { - sig: math_u32.clone(), - abi_sig: None, - symbol: "capable_rt_math_add_wrap_u32".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.math.sub_wrap_u32".to_string(), - FnInfo { - sig: math_u32.clone(), - abi_sig: None, - symbol: "capable_rt_math_sub_wrap_u32".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.math.mul_wrap_u32".to_string(), - FnInfo { - sig: math_u32, - abi_sig: None, - symbol: "capable_rt_math_mul_wrap_u32".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.math.add_wrap_u8".to_string(), - FnInfo { - sig: math_u8.clone(), - abi_sig: None, - symbol: "capable_rt_math_add_wrap_u8".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.math.sub_wrap_u8".to_string(), - FnInfo { - sig: math_u8.clone(), - abi_sig: None, - symbol: "capable_rt_math_sub_wrap_u8".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.math.mul_wrap_u8".to_string(), - FnInfo { - sig: math_u8, - abi_sig: None, - symbol: "capable_rt_math_mul_wrap_u8".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - // === Filesystem === - map.insert( - "sys.fs.ReadFS__read_to_string_with_alloc".to_string(), - FnInfo { - sig: fs_read_to_string, - abi_sig: Some(fs_read_to_string_abi), - symbol: "capable_rt_fs_read_to_string".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.ReadFS__read_bytes_with_alloc".to_string(), - FnInfo { - sig: fs_read_bytes.clone(), - abi_sig: Some(fs_read_bytes_abi.clone()), - symbol: "capable_rt_fs_read_bytes".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.ReadFS__list_dir_with_alloc".to_string(), - FnInfo { - sig: fs_list_dir, - abi_sig: Some(fs_list_dir_abi), - symbol: "capable_rt_fs_list_dir".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.ReadFS__exists".to_string(), - FnInfo { - sig: fs_exists.clone(), - abi_sig: None, - symbol: "capable_rt_fs_exists".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.ReadFS__close".to_string(), - FnInfo { - sig: fs_readfs_close, - abi_sig: None, - symbol: "capable_rt_fs_readfs_close".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.Filesystem__root_dir".to_string(), - FnInfo { - sig: fs_root_dir, - abi_sig: None, - symbol: "capable_rt_fs_root_dir".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.Filesystem__close".to_string(), - FnInfo { - sig: fs_filesystem_close, - abi_sig: None, - symbol: "capable_rt_fs_filesystem_close".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.Dir__subdir".to_string(), - FnInfo { - sig: fs_subdir, - abi_sig: None, - symbol: "capable_rt_fs_subdir".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.Dir__open_read".to_string(), - FnInfo { - sig: fs_open_read, - abi_sig: None, - symbol: "capable_rt_fs_open_read".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.Dir__read_bytes_with_alloc".to_string(), - FnInfo { - sig: fs_read_bytes, - abi_sig: Some(fs_read_bytes_abi), - symbol: "capable_rt_fs_dir_read_bytes".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.Dir__read_to_string_with_alloc".to_string(), - FnInfo { - sig: fs_dir_read_to_string, - abi_sig: Some(fs_dir_read_to_string_abi), - symbol: "capable_rt_fs_dir_read_to_string".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.Dir__list_dir_with_alloc".to_string(), - FnInfo { - sig: fs_dir_list_dir, - abi_sig: Some(fs_dir_list_dir_abi), - symbol: "capable_rt_fs_dir_list_dir".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.Dir__exists".to_string(), - FnInfo { - sig: fs_exists, - abi_sig: None, - symbol: "capable_rt_fs_dir_exists".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.Dir__close".to_string(), - FnInfo { - sig: fs_dir_close, - abi_sig: None, - symbol: "capable_rt_fs_dir_close".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.FileRead__read_to_string_with_alloc".to_string(), - FnInfo { - sig: fs_file_read_to_string, - abi_sig: Some(fs_file_read_to_string_abi), - symbol: "capable_rt_fs_file_read_to_string".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.FileRead__close".to_string(), - FnInfo { - sig: fs_file_read_close, - abi_sig: None, - symbol: "capable_rt_fs_file_read_close".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.fs.join_with_alloc".to_string(), - FnInfo { - sig: fs_join, - abi_sig: Some(fs_join_abi), - symbol: "capable_rt_fs_join".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - // === Slices === - map.insert( - "sys.buffer.Alloc__malloc".to_string(), - FnInfo { - sig: mem_malloc, - abi_sig: None, - symbol: "capable_rt_malloc".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.buffer.Alloc__free".to_string(), - FnInfo { - sig: mem_free, - abi_sig: None, - symbol: "capable_rt_free".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.buffer.Alloc__cast_u8_to_u32".to_string(), - FnInfo { - sig: mem_cast.clone(), - abi_sig: None, - symbol: "capable_rt_cast_u8_to_u32".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - map.insert( - "sys.buffer.Alloc__cast_u32_to_u8".to_string(), - FnInfo { - sig: mem_cast, - abi_sig: None, - symbol: "capable_rt_cast_u32_to_u8".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - // === Bytes === - map.insert( - "sys.bytes.u8__is_whitespace".to_string(), - FnInfo { - sig: FnSig { - params: vec![AbiType::U8], - ret: AbiType::Bool, - }, - abi_sig: None, - symbol: "capable_rt_bytes_is_whitespace".to_string(), - runtime_symbol: None, - is_runtime: true, - }, - ); - + io::register_io_intrinsics(&mut map); + memory::register_memory_intrinsics(&mut map); let _ = ptr_ty; map } diff --git a/capc/src/codegen/intrinsics/io.rs b/capc/src/codegen/intrinsics/io.rs new file mode 100644 index 0000000..2bdfc75 --- /dev/null +++ b/capc/src/codegen/intrinsics/io.rs @@ -0,0 +1,557 @@ +use std::collections::HashMap; + +use crate::abi::AbiType; + +use super::{runtime_fn, FnInfo, FnSig}; + +pub(super) fn register_io_intrinsics(map: &mut HashMap) { + map.insert( + "sys.system.RootCap__mint_console".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle], + ret: AbiType::Handle, + }, + None, + "capable_rt_mint_console", + ), + ); + map.insert( + "sys.system.RootCap__mint_readfs".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Ptr], + ret: AbiType::Handle, + }, + None, + "capable_rt_mint_readfs", + ), + ); + map.insert( + "sys.system.RootCap__mint_filesystem".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Ptr], + ret: AbiType::Handle, + }, + None, + "capable_rt_mint_filesystem", + ), + ); + map.insert( + "sys.system.RootCap__mint_args".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle], + ret: AbiType::Handle, + }, + None, + "capable_rt_mint_args", + ), + ); + map.insert( + "sys.system.RootCap__mint_stdin".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle], + ret: AbiType::Handle, + }, + None, + "capable_rt_mint_stdin", + ), + ); + map.insert( + "sys.system.RootCap__mint_net".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle], + ret: AbiType::Handle, + }, + None, + "capable_rt_mint_net", + ), + ); + + map.insert( + "sys.args.Args__len".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle], + ret: AbiType::I32, + }, + None, + "capable_rt_args_len", + ), + ); + map.insert( + "sys.args.Args__at".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::I32], + ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }, + Some(FnSig { + params: vec![ + AbiType::Handle, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }), + "capable_rt_args_at", + ), + ); + + map.insert( + "sys.stdin.Stdin__read_to_string_with_alloc".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Handle], + ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }, + Some(FnSig { + params: vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }), + "capable_rt_read_stdin_to_string", + ), + ); + + map.insert( + "sys.net.Net__listen".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Ptr, AbiType::I32], + ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + }, + Some(FnSig { + params: vec![ + AbiType::Handle, + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + }), + "capable_rt_net_listen", + ), + ); + map.insert( + "sys.net.Net__connect".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Ptr, AbiType::I32], + ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + }, + Some(FnSig { + params: vec![ + AbiType::Handle, + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + }), + "capable_rt_net_connect", + ), + ); + map.insert( + "sys.net.TcpListener__accept".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle], + ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + }, + Some(FnSig { + params: vec![ + AbiType::Handle, + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + }), + "capable_rt_net_accept", + ), + ); + map.insert( + "sys.net.TcpListener__close".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle], + ret: AbiType::Unit, + }, + None, + "capable_rt_net_listener_close", + ), + ); + map.insert( + "sys.net.TcpConn__read_to_string_with_alloc".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Handle], + ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }, + Some(FnSig { + params: vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }), + "capable_rt_net_read_to_string", + ), + ); + map.insert( + "sys.net.TcpConn__read_with_alloc".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Handle, AbiType::I32], + ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }, + Some(FnSig { + params: vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }), + "capable_rt_net_read", + ), + ); + map.insert( + "sys.net.TcpConn__write".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Ptr], + ret: AbiType::Result(Box::new(AbiType::Unit), Box::new(AbiType::I32)), + }, + Some(FnSig { + params: vec![ + AbiType::Handle, + AbiType::Ptr, + AbiType::ResultOut(Box::new(AbiType::Unit), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Unit), Box::new(AbiType::I32)), + }), + "capable_rt_net_write", + ), + ); + map.insert( + "sys.net.TcpConn__close".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle], + ret: AbiType::Unit, + }, + None, + "capable_rt_net_close", + ), + ); + + map.insert( + "sys.console.Console__println".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Ptr], + ret: AbiType::Unit, + }, + None, + "capable_rt_console_println", + ), + ); + map.insert( + "sys.console.Console__print".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Ptr], + ret: AbiType::Unit, + }, + None, + "capable_rt_console_print", + ), + ); + map.insert( + "sys.console.Console__print_i32".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::I32], + ret: AbiType::Unit, + }, + None, + "capable_rt_console_print_i32", + ), + ); + map.insert( + "sys.console.Console__println_i32".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::I32], + ret: AbiType::Unit, + }, + None, + "capable_rt_console_println_i32", + ), + ); + map.insert( + "sys.console.Console__assert".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Bool, AbiType::Ptr], + ret: AbiType::Unit, + }, + None, + "capable_rt_assert", + ), + ); + + map.insert( + "sys.fs.ReadFS__read_to_string_with_alloc".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], + ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }, + Some(FnSig { + params: vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::Ptr, + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }), + "capable_rt_fs_read_to_string", + ), + ); + map.insert( + "sys.fs.ReadFS__read_bytes_with_alloc".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], + ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + }, + Some(FnSig { + params: vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::Ptr, + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + }), + "capable_rt_fs_read_bytes", + ), + ); + map.insert( + "sys.fs.ReadFS__list_dir_with_alloc".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], + ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + }, + Some(FnSig { + params: vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::Ptr, + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + }), + "capable_rt_fs_list_dir", + ), + ); + map.insert( + "sys.fs.ReadFS__exists".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Ptr], + ret: AbiType::Bool, + }, + None, + "capable_rt_fs_exists", + ), + ); + map.insert( + "sys.fs.ReadFS__close".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle], + ret: AbiType::Unit, + }, + None, + "capable_rt_fs_readfs_close", + ), + ); + map.insert( + "sys.fs.Filesystem__root_dir".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle], + ret: AbiType::Handle, + }, + None, + "capable_rt_fs_root_dir", + ), + ); + map.insert( + "sys.fs.Filesystem__close".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle], + ret: AbiType::Unit, + }, + None, + "capable_rt_fs_filesystem_close", + ), + ); + map.insert( + "sys.fs.Dir__subdir".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Ptr], + ret: AbiType::Handle, + }, + None, + "capable_rt_fs_subdir", + ), + ); + map.insert( + "sys.fs.Dir__open_read".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Ptr], + ret: AbiType::Handle, + }, + None, + "capable_rt_fs_open_read", + ), + ); + map.insert( + "sys.fs.Dir__read_bytes_with_alloc".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], + ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + }, + Some(FnSig { + params: vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::Ptr, + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + }), + "capable_rt_fs_dir_read_bytes", + ), + ); + map.insert( + "sys.fs.Dir__read_to_string_with_alloc".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], + ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }, + Some(FnSig { + params: vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::Ptr, + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }), + "capable_rt_fs_dir_read_to_string", + ), + ); + map.insert( + "sys.fs.Dir__list_dir_with_alloc".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Handle], + ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + }, + Some(FnSig { + params: vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + }), + "capable_rt_fs_dir_list_dir", + ), + ); + map.insert( + "sys.fs.Dir__exists".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Ptr], + ret: AbiType::Bool, + }, + None, + "capable_rt_fs_dir_exists", + ), + ); + map.insert( + "sys.fs.Dir__close".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle], + ret: AbiType::Unit, + }, + None, + "capable_rt_fs_dir_close", + ), + ); + map.insert( + "sys.fs.FileRead__read_to_string_with_alloc".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Handle], + ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }, + Some(FnSig { + params: vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ], + ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + }), + "capable_rt_fs_file_read_to_string", + ), + ); + map.insert( + "sys.fs.FileRead__close".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle], + ret: AbiType::Unit, + }, + None, + "capable_rt_fs_file_read_close", + ), + ); + map.insert( + "sys.fs.join_with_alloc".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Ptr, AbiType::Ptr], + ret: AbiType::Ptr, + }, + Some(FnSig { + params: vec![AbiType::Ptr, AbiType::Handle, AbiType::Ptr, AbiType::Ptr], + ret: AbiType::Unit, + }), + "capable_rt_fs_join", + ), + ); +} diff --git a/capc/src/codegen/intrinsics/memory.rs b/capc/src/codegen/intrinsics/memory.rs new file mode 100644 index 0000000..ebc8313 --- /dev/null +++ b/capc/src/codegen/intrinsics/memory.rs @@ -0,0 +1,185 @@ +use std::collections::HashMap; + +use crate::abi::AbiType; + +use super::{runtime_fn, FnInfo, FnSig}; + +pub(super) fn register_memory_intrinsics(map: &mut HashMap) { + map.insert( + "sys.system.RootCap__mint_alloc_default".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle], + ret: AbiType::Handle, + }, + None, + "capable_rt_alloc_default", + ), + ); + map.insert( + "sys.buffer.default_alloc".to_string(), + runtime_fn( + FnSig { + params: vec![], + ret: AbiType::Handle, + }, + None, + "capable_rt_default_alloc", + ), + ); + map.insert( + "sys.buffer.Alloc__malloc".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::I32], + ret: AbiType::Ptr, + }, + None, + "capable_rt_malloc", + ), + ); + map.insert( + "sys.buffer.Alloc__free".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Ptr], + ret: AbiType::Unit, + }, + None, + "capable_rt_free", + ), + ); + map.insert( + "sys.buffer.Alloc__cast_u8_to_u32".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Ptr], + ret: AbiType::Ptr, + }, + None, + "capable_rt_cast_u8_to_u32", + ), + ); + map.insert( + "sys.buffer.Alloc__cast_u32_to_u8".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::Handle, AbiType::Ptr], + ret: AbiType::Ptr, + }, + None, + "capable_rt_cast_u32_to_u8", + ), + ); + map.insert( + "sys.bytes.u8__is_whitespace".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::U8], + ret: AbiType::Bool, + }, + None, + "capable_rt_bytes_is_whitespace", + ), + ); + + map.insert( + "sys.math.add_wrap_i32".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::I32, AbiType::I32], + ret: AbiType::I32, + }, + None, + "capable_rt_math_add_wrap_i32", + ), + ); + map.insert( + "sys.math.sub_wrap_i32".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::I32, AbiType::I32], + ret: AbiType::I32, + }, + None, + "capable_rt_math_sub_wrap_i32", + ), + ); + map.insert( + "sys.math.mul_wrap_i32".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::I32, AbiType::I32], + ret: AbiType::I32, + }, + None, + "capable_rt_math_mul_wrap_i32", + ), + ); + map.insert( + "sys.math.add_wrap_u32".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::U32, AbiType::U32], + ret: AbiType::U32, + }, + None, + "capable_rt_math_add_wrap_u32", + ), + ); + map.insert( + "sys.math.sub_wrap_u32".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::U32, AbiType::U32], + ret: AbiType::U32, + }, + None, + "capable_rt_math_sub_wrap_u32", + ), + ); + map.insert( + "sys.math.mul_wrap_u32".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::U32, AbiType::U32], + ret: AbiType::U32, + }, + None, + "capable_rt_math_mul_wrap_u32", + ), + ); + map.insert( + "sys.math.add_wrap_u8".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::U8, AbiType::U8], + ret: AbiType::U8, + }, + None, + "capable_rt_math_add_wrap_u8", + ), + ); + map.insert( + "sys.math.sub_wrap_u8".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::U8, AbiType::U8], + ret: AbiType::U8, + }, + None, + "capable_rt_math_sub_wrap_u8", + ), + ); + map.insert( + "sys.math.mul_wrap_u8".to_string(), + runtime_fn( + FnSig { + params: vec![AbiType::U8, AbiType::U8], + ret: AbiType::U8, + }, + None, + "capable_rt_math_mul_wrap_u8", + ), + ); +} diff --git a/capc/src/parser.rs b/capc/src/parser.rs index cbd35e4..1f9c881 100644 --- a/capc/src/parser.rs +++ b/capc/src/parser.rs @@ -1,3 +1,9 @@ +mod exprs; +mod items; +mod patterns; +mod stmts; +mod types; + use crate::ast::*; use crate::error::ParseError; use crate::lexer::{lex, to_ident, Token, TokenKind}; @@ -112,1561 +118,11 @@ impl Parser { }) } - fn parse_use(&mut self) -> Result { - let start = self.expect(TokenKind::Use)?.span.start; - let path = self.parse_path()?; - let span = Span::new(start, path.span.end); - self.maybe_consume(TokenKind::Semi); - Ok(UseDecl { path, span }) - } - - fn parse_item(&mut self, doc: Option) -> Result { - let mut is_pub = false; - let mut is_linear = false; - let mut is_copy = false; - let mut is_opaque = false; - let mut is_capability = false; - loop { - match self.peek_kind() { - Some(TokenKind::Pub) => { - if is_pub { - return Err(self.error_current("duplicate `pub` modifier".to_string())); - } - self.bump(); - is_pub = true; - } - Some(TokenKind::Linear) => { - if is_linear { - return Err(self.error_current("duplicate `linear` modifier".to_string())); - } - self.bump(); - is_linear = true; - } - Some(TokenKind::Copy) => { - if is_copy { - return Err(self.error_current("duplicate `copy` modifier".to_string())); - } - self.bump(); - is_copy = true; - } - Some(TokenKind::Opaque) => { - if is_opaque { - return Err(self.error_current("duplicate `opaque` modifier".to_string())); - } - self.bump(); - is_opaque = true; - } - Some(TokenKind::Capability) => { - if is_capability { - return Err( - self.error_current("duplicate `capability` modifier".to_string()) - ); - } - self.bump(); - is_capability = true; - } - _ => break, - } - } - if is_linear && is_copy { - return Err( - self.error_current("cannot combine `linear` and `copy` modifiers".to_string()) - ); - } - if self.peek_kind() == Some(TokenKind::Extern) { - if is_opaque || is_linear || is_copy || is_capability { - return Err(self.error_current( - "linear/copy/opaque/capability applies only to struct declarations".to_string(), - )); - } - return Ok(Item::ExternFunction( - self.parse_extern_function(is_pub, doc)?, - )); - } - match self.peek_kind() { - Some(TokenKind::Fn) => { - if is_opaque || is_linear || is_copy || is_capability { - return Err(self.error_current( - "linear/copy/opaque/capability applies only to struct declarations" - .to_string(), - )); - } - Ok(Item::Function(self.parse_function(is_pub, doc)?)) - } - Some(TokenKind::Struct) => Ok(Item::Struct(self.parse_struct( - is_pub, - is_opaque, - is_linear, - is_copy, - is_capability, - doc, - )?)), - Some(TokenKind::Enum) => { - if is_opaque || is_linear || is_copy || is_capability { - return Err(self.error_current( - "linear/copy/opaque/capability applies only to struct declarations" - .to_string(), - )); - } - Ok(Item::Enum(self.parse_enum(is_pub, doc)?)) - } - Some(TokenKind::Trait) => { - if is_opaque || is_linear || is_copy || is_capability { - return Err(self.error_current( - "linear/copy/opaque/capability applies only to struct declarations" - .to_string(), - )); - } - Ok(Item::Trait(self.parse_trait(is_pub, doc)?)) - } - Some(TokenKind::Impl) => { - if is_pub { - return Err(self.error_current("impl blocks cannot be marked pub".to_string())); - } - if is_opaque || is_linear || is_copy || is_capability { - return Err(self.error_current( - "linear/copy/opaque/capability applies only to struct declarations" - .to_string(), - )); - } - Ok(Item::Impl(self.parse_impl_block(doc)?)) - } - Some(other) => Err(self.error_current(format!("expected item, found {other:?}"))), - None => Err(self.error_current("unexpected end of input".to_string())), - } - } - - fn parse_impl_block(&mut self, impl_doc: Option) -> Result { - let start = self.expect(TokenKind::Impl)?.span.start; - let type_params = self.parse_type_params()?; - let first_type = self.parse_type()?; - let (trait_path, target) = if self.maybe_consume(TokenKind::For).is_some() { - let trait_path = match first_type { - Type::Path { path, args, .. } => { - if !args.is_empty() { - return Err(self.error_current( - "trait impls do not support type arguments yet".to_string(), - )); - } - path - } - _ => return Err(self.error_current("trait impls require a trait name".to_string())), - }; - let target = self.parse_type()?; - (Some(trait_path), target) - } else { - (None, first_type) - }; - self.expect(TokenKind::LBrace)?; - let mut methods = Vec::new(); - while self.peek_kind() != Some(TokenKind::RBrace) { - let doc = self.take_doc_comments(); - let is_pub = self.maybe_consume(TokenKind::Pub).is_some(); - if self.peek_kind() != Some(TokenKind::Fn) { - return Err( - self.error_current("expected method declaration in impl block".to_string()) - ); - } - methods.push(self.parse_function(is_pub, doc)?); - } - let end = self.expect(TokenKind::RBrace)?.span.end; - Ok(ImplBlock { - target, - methods, - type_params, - trait_path, - doc: impl_doc, - span: Span::new(start, end), - }) - } - - fn parse_extern_function( - &mut self, - is_pub: bool, - doc: Option, - ) -> Result { - let start = self.expect(TokenKind::Extern)?.span.start; - self.expect(TokenKind::Fn)?; - let name = self.expect_ident()?; - let type_params = self.parse_type_params()?; - self.expect(TokenKind::LParen)?; - let mut params = Vec::new(); - if self.peek_kind() != Some(TokenKind::RParen) { - loop { - let param_name = self.expect_ident()?; - self.expect(TokenKind::Colon)?; - let ty = self.parse_type()?; - params.push(Param { - name: param_name, - ty: Some(ty), - }); - if self.maybe_consume(TokenKind::Comma).is_some() { - continue; - } - break; - } - } - let rparen = self.expect(TokenKind::RParen)?; - let ret = if self.maybe_consume(TokenKind::Arrow).is_some() { - self.parse_type()? - } else { - unit_type_at(Span::new(rparen.span.end, rparen.span.end)) - }; - let end = self - .maybe_consume(TokenKind::Semi) - .map_or(ret.span().end, |t| t.span.end); - Ok(ExternFunction { - name, - type_params, - params, - ret, - is_pub, - doc, - span: Span::new(start, end), - }) - } - - fn parse_function( - &mut self, - is_pub: bool, - doc: Option, - ) -> Result { - let start = self.expect(TokenKind::Fn)?.span.start; - let name = self.expect_ident()?; - let type_params = self.parse_type_params()?; - self.expect(TokenKind::LParen)?; - let mut params = Vec::new(); - if self.peek_kind() != Some(TokenKind::RParen) { - loop { - let param_name = self.expect_ident()?; - let ty = if self.maybe_consume(TokenKind::Colon).is_some() { - Some(self.parse_type()?) - } else if param_name.item == "self" { - None - } else { - return Err(self.error_current("expected ':' after parameter name".to_string())); - }; - params.push(Param { - name: param_name, - ty, - }); - if self.maybe_consume(TokenKind::Comma).is_none() { - break; - } - } - } - let rparen = self.expect(TokenKind::RParen)?; - let ret = if self.maybe_consume(TokenKind::Arrow).is_some() { - self.parse_type()? - } else { - unit_type_at(Span::new(rparen.span.end, rparen.span.end)) - }; - let body = self.parse_block()?; - let span = Span::new(start, body.span.end); - Ok(Function { - name, - type_params, - params, - ret, - body, - is_pub, - doc, - span, - }) - } - - fn parse_trait(&mut self, is_pub: bool, doc: Option) -> Result { - let start = self.expect(TokenKind::Trait)?.span.start; - let name = self.expect_ident()?; - let type_params = self.parse_type_params()?; - self.expect(TokenKind::LBrace)?; - let mut methods = Vec::new(); - while self.peek_kind() != Some(TokenKind::RBrace) { - let doc = self.take_doc_comments(); - if self.maybe_consume(TokenKind::Pub).is_some() { - return Err(self.error_current("trait methods cannot be marked pub".to_string())); - } - methods.push(self.parse_trait_method(doc)?); - } - let end = self.expect(TokenKind::RBrace)?.span.end; - Ok(TraitDecl { - name, - type_params, - methods, - is_pub, - doc, - span: Span::new(start, end), - }) - } - - fn parse_trait_method(&mut self, doc: Option) -> Result { - let start = self.expect(TokenKind::Fn)?.span.start; - let name = self.expect_ident()?; - let type_params = self.parse_type_params()?; - self.expect(TokenKind::LParen)?; - let mut params = Vec::new(); - if self.peek_kind() != Some(TokenKind::RParen) { - loop { - let param_name = self.expect_ident()?; - let ty = if self.maybe_consume(TokenKind::Colon).is_some() { - Some(self.parse_type()?) - } else if param_name.item == "self" { - None - } else { - return Err(self.error_current("expected ':' after parameter name".to_string())); - }; - params.push(Param { - name: param_name, - ty, - }); - if self.maybe_consume(TokenKind::Comma).is_none() { - break; - } - } - } - let rparen = self.expect(TokenKind::RParen)?; - let ret = if self.maybe_consume(TokenKind::Arrow).is_some() { - self.parse_type()? - } else { - unit_type_at(Span::new(rparen.span.end, rparen.span.end)) - }; - let end = self.expect(TokenKind::Semi)?.span.end; - Ok(TraitMethod { - name, - type_params, - params, - ret, - doc, - span: Span::new(start, end), - }) - } - - fn parse_struct( - &mut self, - is_pub: bool, - is_opaque: bool, - is_linear: bool, - is_copy: bool, - is_capability: bool, - doc: Option, - ) -> Result { - let start = self.expect(TokenKind::Struct)?.span.start; - let name = self.expect_ident()?; - let type_params = self.parse_type_params()?; - let mut fields = Vec::new(); - let end = if self.peek_kind() == Some(TokenKind::LBrace) { - self.bump(); - if self.peek_kind() != Some(TokenKind::RBrace) { - loop { - let field_name = self.expect_ident()?; - self.expect(TokenKind::Colon)?; - let ty = self.parse_type()?; - fields.push(Field { - name: field_name, - ty, - }); - if self.maybe_consume(TokenKind::Comma).is_none() { - break; - } - if self.peek_kind() == Some(TokenKind::RBrace) { - break; - } - } - } - let end = self.expect(TokenKind::RBrace)?.span.end; - if is_capability && !fields.is_empty() { - return Err(self.error_at( - Span::new(start, end), - "capability struct cannot declare fields".to_string(), - )); - } - end - } else { - name.span.end - }; - Ok(StructDecl { - name, - type_params, - fields, - is_pub, - is_opaque, - is_linear, - is_copy, - is_capability, - doc, - span: Span::new(start, end), - }) - } - - fn parse_enum(&mut self, is_pub: bool, doc: Option) -> Result { - let start = self.expect(TokenKind::Enum)?.span.start; - let name = self.expect_ident()?; - let type_params = self.parse_type_params()?; - self.expect(TokenKind::LBrace)?; - let mut variants = Vec::new(); - if self.peek_kind() != Some(TokenKind::RBrace) { - loop { - let variant_name = self.expect_ident()?; - let variant_start = variant_name.span.start; - let payload = if self.peek_kind() == Some(TokenKind::LParen) { - self.bump(); - let ty = self.parse_type()?; - self.expect(TokenKind::RParen)?; - Some(ty) - } else { - None - }; - let end = payload - .as_ref() - .map_or(variant_name.span.end, |ty| ty.span().end); - variants.push(EnumVariant { - name: variant_name, - payload, - span: Span::new(variant_start, end), - }); - if self.maybe_consume(TokenKind::Comma).is_none() { - break; - } - if self.peek_kind() == Some(TokenKind::RBrace) { - break; - } - } - } - let end = self.expect(TokenKind::RBrace)?.span.end; - Ok(EnumDecl { - name, - type_params, - variants, - is_pub, - doc, - span: Span::new(start, end), - }) - } - - fn parse_block(&mut self) -> Result { - let start = self.expect(TokenKind::LBrace)?.span.start; - let mut stmts = Vec::new(); - while self.peek_kind() != Some(TokenKind::RBrace) { - if self.peek_kind().is_none() { - return Err(self.error_current("unexpected end of input in block".to_string())); - } - stmts.push(self.parse_stmt()?); - } - let end = self.expect(TokenKind::RBrace)?.span.end; - Ok(Block { - stmts, - span: Span::new(start, end), - }) - } - - fn parse_stmt(&mut self) -> Result { - match self.peek_kind() { - Some(TokenKind::Let) => self.parse_let(), - Some(TokenKind::Return) => Ok(Stmt::Return(self.parse_return()?)), - Some(TokenKind::Break) => Ok(Stmt::Break(self.parse_break()?)), - Some(TokenKind::Continue) => Ok(Stmt::Continue(self.parse_continue()?)), - Some(TokenKind::Defer) => Ok(Stmt::Defer(self.parse_defer()?)), - Some(TokenKind::Try) => self.parse_try_stmt(), - Some(TokenKind::If) => self.parse_if_stmt(), - Some(TokenKind::While) => Ok(Stmt::While(self.parse_while()?)), - Some(TokenKind::For) => self.parse_for_stmt(), - Some(TokenKind::Ident) => { - if self.peek_token(1).is_some_and(|t| t.kind == TokenKind::Eq) { - Ok(Stmt::Assign(self.parse_assign()?)) - } else { - Ok(Stmt::Expr(self.parse_expr_stmt()?)) - } - } - _ => Ok(Stmt::Expr(self.parse_expr_stmt()?)), - } - } - - fn parse_let(&mut self) -> Result { - let let_token = self.expect(TokenKind::Let)?; - let start = let_token.span.start; - if self.peek_kind() == Some(TokenKind::Ident) - && self - .peek_token(1) - .is_some_and(|t| matches!(t.kind, TokenKind::Colon | TokenKind::Eq)) - { - let name = self.expect_ident()?; - let ty = if self.maybe_consume(TokenKind::Colon).is_some() { - Some(self.parse_type()?) - } else { - None - }; - self.expect(TokenKind::Eq)?; - let expr = self.parse_expr()?; - let end = self - .maybe_consume(TokenKind::Semi) - .map_or(expr.span().end, |t| t.span.end); - return Ok(Stmt::Let(LetStmt { - name, - ty, - expr, - span: Span::new(start, end), - })); - } - - let pattern = self.parse_pattern()?; - self.expect(TokenKind::Eq)?; - let expr = self.parse_expr()?; - self.expect(TokenKind::Else)?; - let else_block = self.parse_block()?; - if self.pattern_binding_ident(&pattern).is_none() { - return Err(self.error_at( - let_token.span, - "`let ... else` requires a binding pattern".to_string(), - )); - } - let end = self - .maybe_consume(TokenKind::Semi) - .map_or(else_block.span.end, |t| t.span.end); - Ok(Stmt::LetElse(LetElseStmt { - pattern, - expr, - else_block, - span: Span::new(start, end), - })) - } - - fn parse_assign(&mut self) -> Result { - let name = self.expect_ident()?; - let start = name.span.start; - self.expect(TokenKind::Eq)?; - let expr = self.parse_expr()?; - let end = self - .maybe_consume(TokenKind::Semi) - .map_or(expr.span().end, |t| t.span.end); - Ok(AssignStmt { - name, - expr, - span: Span::new(start, end), - }) - } - - fn parse_return(&mut self) -> Result { - let start = self.expect(TokenKind::Return)?.span.start; - let expr = match self.peek_kind() { - Some(TokenKind::Semi) | Some(TokenKind::RBrace) => None, - Some(_) => Some(self.parse_expr()?), - None => None, - }; - let end = self - .maybe_consume(TokenKind::Semi) - .map_or(self.peek_span().end, |t| t.span.end); - Ok(ReturnStmt { - expr, - span: Span::new(start, end), - }) - } - - fn parse_try_stmt(&mut self) -> Result { - let try_token = self.expect(TokenKind::Try)?; - let start = try_token.span.start; - - if self.peek_kind() == Some(TokenKind::Let) { - self.bump(); - if !(self.peek_kind() == Some(TokenKind::Ident) - && self - .peek_token(1) - .is_some_and(|t| matches!(t.kind, TokenKind::Colon | TokenKind::Eq))) - { - return Err(self.error_at( - try_token.span, - "`try let` requires a plain binding name".to_string(), - )); - } - - let name = self.expect_ident()?; - let ty = if self.maybe_consume(TokenKind::Colon).is_some() { - Some(self.parse_type()?) - } else { - None - }; - self.expect(TokenKind::Eq)?; - let expr = self.parse_expr()?; - self.expect(TokenKind::Else)?; - let err_binding = if self.peek_kind() == Some(TokenKind::Ident) - && self - .peek_token(1) - .is_some_and(|t| t.kind == TokenKind::LBrace) - { - Some(self.expect_ident()?) - } else { - None - }; - let else_block = self.parse_block()?; - let end = self - .maybe_consume(TokenKind::Semi) - .map_or(else_block.span.end, |t| t.span.end); - return Ok(Stmt::TryLet(TryLetStmt { - name, - ty, - expr, - err_binding, - else_block, - span: Span::new(start, end), - })); - } - - let expr = self.parse_expr()?; - self.expect(TokenKind::Else)?; - let err_binding = if self.peek_kind() == Some(TokenKind::Ident) - && self - .peek_token(1) - .is_some_and(|t| t.kind == TokenKind::LBrace) - { - Some(self.expect_ident()?) - } else { - None - }; - let else_block = self.parse_block()?; - let end = self - .maybe_consume(TokenKind::Semi) - .map_or(else_block.span.end, |t| t.span.end); - Ok(Stmt::TryElse(TryElseStmt { - expr, - err_binding, - else_block, - span: Span::new(start, end), - })) - } - - fn parse_break(&mut self) -> Result { - let token = self.expect(TokenKind::Break)?; - let end = self - .maybe_consume(TokenKind::Semi) - .map_or(token.span.end, |t| t.span.end); - Ok(BreakStmt { - span: Span::new(token.span.start, end), - }) - } - - fn parse_continue(&mut self) -> Result { - let token = self.expect(TokenKind::Continue)?; - let end = self - .maybe_consume(TokenKind::Semi) - .map_or(token.span.end, |t| t.span.end); - Ok(ContinueStmt { - span: Span::new(token.span.start, end), - }) - } - - fn parse_defer(&mut self) -> Result { - let start = self.expect(TokenKind::Defer)?.span.start; - let expr = self.parse_expr()?; - let end = self - .maybe_consume(TokenKind::Semi) - .map_or(expr.span().end, |t| t.span.end); - Ok(DeferStmt { - expr, - span: Span::new(start, end), - }) - } - - fn parse_if_stmt(&mut self) -> Result { - let if_token = self.expect(TokenKind::If)?; - let start = if_token.span.start; - // Use parse_expr_no_struct because `{` after condition starts the then-block, not a struct literal - let cond = self.parse_expr_no_struct()?; - let then_block = self.parse_block()?; - let else_block = if self.peek_kind() == Some(TokenKind::Else) { - self.bump(); - if self.peek_kind() == Some(TokenKind::If) { - let else_if = self.parse_if_stmt()?; - let span = else_if.span(); - Some(Block { - stmts: vec![else_if], - span, - }) - } else { - Some(self.parse_block()?) - } - } else { - None - }; - let end = else_block - .as_ref() - .map_or(then_block.span.end, |b| b.span.end); - Ok(Stmt::If(IfStmt { - cond, - then_block, - else_block, - span: Span::new(start, end), - })) - } - - fn parse_while(&mut self) -> Result { - let start = self.expect(TokenKind::While)?.span.start; - // Use parse_expr_no_struct because `{` after condition starts the loop body, not a struct literal - let cond = self.parse_expr_no_struct()?; - let body = self.parse_block()?; - let end = body.span.end; - Ok(WhileStmt { - cond, - body, - span: Span::new(start, end), - }) - } - - fn parse_for_stmt(&mut self) -> Result { - let for_token = self.expect(TokenKind::For)?; - let start = for_token.span.start; - if self.peek_kind() == Some(TokenKind::LBrace) { - let body = self.parse_block()?; - let end = body.span.end; - let cond = Expr::Literal(LiteralExpr { - id: self.fresh_expr_id(), - value: Literal::Bool(true), - span: for_token.span, - }); - return Ok(Stmt::While(WhileStmt { - cond, - body, - span: Span::new(start, end), - })); - } - let first = self.expect_ident()?; - let second = if self.maybe_consume(TokenKind::Comma).is_some() { - Some(self.expect_ident()?) - } else { - None - }; - self.expect(TokenKind::In)?; - let range_or_source = self.parse_expr_no_struct()?; - if self.maybe_consume(TokenKind::DotDot).is_some() { - if second.is_some() { - return Err(self.error_at( - first.span, - "range for loops accept only one binding".to_string(), - )); - } - let range_end = self.parse_range_bound()?; - let body = self.parse_block()?; - let end = body.span.end; - return Ok(Stmt::For(ForStmt { - var: first, - start: range_or_source, - end: range_end, - body, - span: Span::new(start, end), - })); - } - - let item = second.clone().unwrap_or_else(|| first.clone()); - let index = second.map(|_| first); - let body = self.parse_block()?; - Ok(Stmt::ForEach(ForEachStmt { - index, - item, - source: range_or_source, - span: Span::new(start, body.span.end), - body, - })) - } - - /// Parse a simple expression for range bounds (no struct literals allowed) - fn parse_range_bound(&mut self) -> Result { - match self.peek_kind() { - Some(TokenKind::Int) => { - let token = self.bump().unwrap(); - let value = token.text.parse::().map_err(|_| { - self.error_at(token.span, "invalid integer literal".to_string()) - })?; - Ok(Expr::Literal(LiteralExpr { - id: self.fresh_expr_id(), - value: Literal::Int(value), - span: token.span, - })) - } - Some(TokenKind::True) => { - let token = self.bump().unwrap(); - Ok(Expr::Literal(LiteralExpr { - id: self.fresh_expr_id(), - value: Literal::Bool(true), - span: token.span, - })) - } - Some(TokenKind::False) => { - let token = self.bump().unwrap(); - Ok(Expr::Literal(LiteralExpr { - id: self.fresh_expr_id(), - value: Literal::Bool(false), - span: token.span, - })) - } - Some(TokenKind::Ident) => { - // Parse just a simple path (no struct literal) - let first_ident = self.expect_ident()?; - let start = first_ident.span.start; - let mut segments = vec![first_ident]; - - while self.peek_kind() == Some(TokenKind::ColonColon) { - self.bump(); - let segment = self.expect_ident()?; - segments.push(segment); - } - - let end = segments.last().unwrap().span.end; - Ok(Expr::Path(Path { - id: self.fresh_expr_id(), - segments, - span: Span::new(start, end), - })) - } - Some(other) => Err(self.error_current(format!( - "expected integer or identifier in range bound, found {other:?}" - ))), - None => Err(self.error_current("unexpected end of input".to_string())), - } - } - - fn parse_expr_stmt(&mut self) -> Result { - let expr = self.parse_expr()?; - if self.peek_kind() == Some(TokenKind::Else) { - return Err(self.error_current("`expr else` now requires a leading `try`".to_string())); - } - let expr_span = expr.span(); - let end = self - .maybe_consume(TokenKind::Semi) - .map_or(expr_span.end, |t| t.span.end); - Ok(ExprStmt { - expr, - span: Span::new(expr_span.start, end), - }) - } - - fn parse_expr(&mut self) -> Result { - self.parse_expr_inner(true) - } - - /// Parse an expression where struct literals are not allowed. - /// Used in if/while/for/match scrutinee positions where `{` starts a block, not a struct literal. - fn parse_expr_no_struct(&mut self) -> Result { - self.parse_expr_inner(false) - } - - fn parse_expr_inner(&mut self, allow_struct_literal: bool) -> Result { - self.parse_expr_bp(0, allow_struct_literal) - } - - /// Look ahead to see if a `<...>` type-arg list is closed and followed by - /// a call `(` or struct literal `{`. - fn type_args_followed_by_call_or_struct(&self) -> bool { - if self.peek_kind() != Some(TokenKind::Lt) { - return false; - } - let mut depth = 0usize; - let mut idx = self.index; - while idx < self.tokens.len() { - match self.tokens[idx].kind { - TokenKind::Lt => depth += 1, - TokenKind::Gt => { - if depth == 0 { - return false; - } - depth -= 1; - if depth == 0 { - return matches!( - self.tokens.get(idx + 1).map(|t| &t.kind), - Some(TokenKind::LParen) | Some(TokenKind::LBrace) - ); - } - } - _ => {} - } - idx += 1; - } - false - } - - fn parse_expr_bp( - &mut self, - min_bp: u8, - allow_struct_literal: bool, - ) -> Result { - let mut lhs = self.parse_prefix(allow_struct_literal)?; - - loop { - // First, check for postfix operators - if let Some(kind) = self.peek_kind() { - if let Some(bp) = postfix_binding_power(&kind) { - if bp < min_bp { - break; - } - - match kind { - TokenKind::Dot => { - let start = lhs.span().start; - self.bump(); // consume '.' - let field = self.expect_ident()?; - let type_args = if self.peek_kind() == Some(TokenKind::Lt) { - self.parse_type_args()? - } else { - Vec::new() - }; - - // Check if this is a struct literal (followed by '{') - if allow_struct_literal && self.peek_kind() == Some(TokenKind::LBrace) { - // Convert the lhs and field into a path for the struct literal - let mut path = match lhs { - Expr::Path(p) => p, - Expr::FieldAccess(ref fa) => { - // Convert FieldAccess chain to Path - self.field_access_to_path(fa)? - } - _ => { - return Err(self.error_current( - "expected path before struct literal".to_string(), - )) - } - }; - path.segments.push(field); - path.span = Span::new( - path.span.start, - path.segments.last().unwrap().span.end, - ); - lhs = self.parse_struct_literal(path, type_args)?; - continue; - } - - // Check if this is a method call (followed by '(') - if self.peek_kind() == Some(TokenKind::LParen) { - self.bump(); // consume '(' - let mut args = Vec::new(); - if self.peek_kind() != Some(TokenKind::RParen) { - loop { - args.push(self.parse_expr()?); - if self.maybe_consume(TokenKind::Comma).is_none() { - break; - } - } - } - let end = self.expect(TokenKind::RParen)?.span.end; - lhs = Expr::MethodCall(MethodCallExpr { - id: self.fresh_expr_id(), - receiver: Box::new(lhs), - method: field, - type_args, - args, - span: Span::new(start, end), - }); - continue; - } - - if !type_args.is_empty() { - return Err(self.error_current( - "type arguments require a method call or struct literal" - .to_string(), - )); - } - // Otherwise, it's a field access - let span = Span::new(start, field.span.end); - lhs = Expr::FieldAccess(FieldAccessExpr { - id: self.fresh_expr_id(), - object: Box::new(lhs), - field, - span, - }); - continue; - } - TokenKind::LParen => { - lhs = self.finish_call(lhs, Vec::new())?; - continue; - } - TokenKind::LBracket => { - // With <> for generics, [] is unambiguously for indexing - let start = lhs.span().start; - self.bump(); // consume '[' - let index = self.parse_expr()?; - let end = self.expect(TokenKind::RBracket)?.span.end; - lhs = Expr::Index(IndexExpr { - id: self.fresh_expr_id(), - object: Box::new(lhs), - index: Box::new(index), - span: Span::new(start, end), - }); - continue; - } - TokenKind::Question => { - let start = lhs.span().start; - let end = self.bump().unwrap().span.end; - lhs = Expr::Try(TryExpr { - id: self.fresh_expr_id(), - expr: Box::new(lhs), - span: Span::new(start, end), - }); - continue; - } - _ => unreachable!(), - } - } - } - - // Special handling for '<' which can be type arguments or less-than - if self.peek_kind() == Some(TokenKind::Lt) { - // Check if this looks like type arguments: path(args) or path{ ... } - if matches!(&lhs, Expr::Path(_) | Expr::FieldAccess(_)) - && self.type_args_followed_by_call_or_struct() - { - let type_args = self.parse_type_args()?; - if allow_struct_literal && self.peek_kind() == Some(TokenKind::LBrace) { - let path = match lhs { - Expr::Path(p) => p, - Expr::FieldAccess(ref fa) => self.field_access_to_path(fa)?, - _ => unreachable!(), - }; - lhs = self.parse_struct_literal(path, type_args)?; - continue; - } - if self.peek_kind() == Some(TokenKind::LParen) { - lhs = self.finish_call(lhs, type_args)?; - continue; - } - if !allow_struct_literal && self.peek_kind() == Some(TokenKind::LBrace) { - return Err(self.error_current( - "generic expressions in this context require parentheses".to_string(), - )); - } - return Err(self.error_current( - "type arguments require a call or struct literal".to_string(), - )); - } - // Fall through to treat as less-than comparison - } - - // Then, check for binary operators - let op = match self.peek_kind() { - Some(TokenKind::OrOr) => BinaryOp::Or, - Some(TokenKind::AndAnd) => BinaryOp::And, - Some(TokenKind::Pipe) => BinaryOp::BitOr, - Some(TokenKind::Caret) => BinaryOp::BitXor, - Some(TokenKind::Ampersand) => BinaryOp::BitAnd, - Some(TokenKind::EqEq) => BinaryOp::Eq, - Some(TokenKind::NotEq) => BinaryOp::Neq, - Some(TokenKind::Lt) => BinaryOp::Lt, - Some(TokenKind::Lte) => BinaryOp::Lte, - Some(TokenKind::Gt) => BinaryOp::Gt, - Some(TokenKind::Gte) => BinaryOp::Gte, - Some(TokenKind::Shl) => BinaryOp::Shl, - Some(TokenKind::Shr) => BinaryOp::Shr, - Some(TokenKind::Plus) => BinaryOp::Add, - Some(TokenKind::Minus) => BinaryOp::Sub, - Some(TokenKind::Star) => BinaryOp::Mul, - Some(TokenKind::Slash) => BinaryOp::Div, - Some(TokenKind::Percent) => BinaryOp::Mod, - _ => break, - }; - - let (l_bp, r_bp) = infix_binding_power(&op); - if l_bp < min_bp { - break; - } - - self.bump(); - // Propagate struct-literal allowance to avoid block ambiguity in no-struct contexts. - let rhs = self.parse_expr_bp(r_bp, allow_struct_literal)?; - let span = Span::new(lhs.span().start, rhs.span().end); - lhs = Expr::Binary(BinaryExpr { - id: self.fresh_expr_id(), - op, - left: Box::new(lhs), - right: Box::new(rhs), - span, - }); - } - - Ok(lhs) - } - - fn parse_prefix(&mut self, allow_struct_literal: bool) -> Result { - match self.peek_kind() { - Some(TokenKind::Minus) => { - let start = self.bump().unwrap().span.start; - // Propagate struct-literal allowance to avoid block ambiguity in no-struct contexts. - let expr = self.parse_expr_bp(7, allow_struct_literal)?; - Ok(Expr::Unary(UnaryExpr { - id: self.fresh_expr_id(), - op: UnaryOp::Neg, - span: Span::new(start, expr.span().end), - expr: Box::new(expr), - })) - } - Some(TokenKind::Tilde) => { - let start = self.bump().unwrap().span.start; - // Propagate struct-literal allowance to avoid block ambiguity in no-struct contexts. - let expr = self.parse_expr_bp(7, allow_struct_literal)?; - Ok(Expr::Unary(UnaryExpr { - id: self.fresh_expr_id(), - op: UnaryOp::BitNot, - span: Span::new(start, expr.span().end), - expr: Box::new(expr), - })) - } - Some(TokenKind::Bang) => { - let start = self.bump().unwrap().span.start; - // Propagate struct-literal allowance to avoid block ambiguity in no-struct contexts. - let expr = self.parse_expr_bp(7, allow_struct_literal)?; - Ok(Expr::Unary(UnaryExpr { - id: self.fresh_expr_id(), - op: UnaryOp::Not, - span: Span::new(start, expr.span().end), - expr: Box::new(expr), - })) - } - Some(TokenKind::Match) => self.parse_match(), - _ => self.parse_primary(allow_struct_literal), - } - } - - fn parse_primary(&mut self, allow_struct_literal: bool) -> Result { - match self.peek_kind() { - Some(TokenKind::Int) => { - let token = self.bump().unwrap(); - let value = token.text.parse::().map_err(|_| { - self.error_at(token.span, "invalid integer literal".to_string()) - })?; - if let Some(next) = self.peek_token(0) { - if next.kind == TokenKind::Ident - && next.text == "u8" - && next.span.start == token.span.end - { - let suffix = self.bump().unwrap(); - if !(0..=255).contains(&value) { - return Err(self.error_at( - Span::new(token.span.start, suffix.span.end), - "u8 literal out of range".to_string(), - )); - } - return Ok(Expr::Literal(LiteralExpr { - id: self.fresh_expr_id(), - value: Literal::U8(value as u8), - span: Span::new(token.span.start, suffix.span.end), - })); - } - } - Ok(Expr::Literal(LiteralExpr { - id: self.fresh_expr_id(), - value: Literal::Int(value), - span: token.span, - })) - } - Some(TokenKind::Str) => { - let token = self.bump().unwrap(); - let value = unescape_string(&token.text).map_err(|message| { - self.error_at(token.span, format!("invalid string literal: {message}")) - })?; - Ok(Expr::Literal(LiteralExpr { - id: self.fresh_expr_id(), - value: Literal::String(value), - span: token.span, - })) - } - Some(TokenKind::Char) => { - let token = self.bump().unwrap(); - let value = unescape_char(&token.text).map_err(|message| { - self.error_at(token.span, format!("invalid char literal: {message}")) - })?; - Ok(Expr::Literal(LiteralExpr { - id: self.fresh_expr_id(), - value: Literal::U8(value), - span: token.span, - })) - } - Some(TokenKind::True) => { - let token = self.bump().unwrap(); - Ok(Expr::Literal(LiteralExpr { - id: self.fresh_expr_id(), - value: Literal::Bool(true), - span: token.span, - })) - } - Some(TokenKind::False) => { - let token = self.bump().unwrap(); - Ok(Expr::Literal(LiteralExpr { - id: self.fresh_expr_id(), - value: Literal::Bool(false), - span: token.span, - })) - } - Some(TokenKind::LParen) => { - let start = self.bump().unwrap().span.start; - if self.peek_kind() == Some(TokenKind::RParen) { - let end = self.bump().unwrap().span.end; - Ok(Expr::Literal(LiteralExpr { - id: self.fresh_expr_id(), - value: Literal::Unit, - span: Span::new(start, end), - })) - } else { - let expr = self.parse_expr()?; - let end = self.expect(TokenKind::RParen)?.span.end; - Ok(Expr::Grouping(GroupingExpr { - id: self.fresh_expr_id(), - expr: Box::new(expr), - span: Span::new(start, end), - })) - } - } - Some(TokenKind::Ident) => { - // Parse path segments separated by :: - let first_ident = self.expect_ident()?; - let start = first_ident.span.start; - let mut segments = vec![first_ident]; - - // Parse additional segments with :: - while self.peek_kind() == Some(TokenKind::ColonColon) { - self.bump(); // consume :: - let segment = self.expect_ident()?; - segments.push(segment); - } - - let end = segments.last().unwrap().span.end; - let path = Path { - id: self.fresh_expr_id(), - segments, - span: Span::new(start, end), - }; - - if allow_struct_literal && self.peek_kind() == Some(TokenKind::LBrace) { - self.parse_struct_literal(path, Vec::new()) - } else { - Ok(Expr::Path(path)) - } - } - Some(other) => { - Err(self.error_current(format!("unexpected token in expression: {other:?}"))) - } - None => Err(self.error_current("unexpected end of input".to_string())), - } - } - - fn parse_match(&mut self) -> Result { - let match_token = self.expect(TokenKind::Match)?; - let start = match_token.span.start; - // Use parse_expr_no_struct because `{` after the scrutinee starts the match arms, not a struct literal - let expr = self.parse_expr_no_struct()?; - self.expect(TokenKind::LBrace)?; - let mut arms = Vec::new(); - while self.peek_kind() != Some(TokenKind::RBrace) { - let pattern = self.parse_pattern()?; - self.expect(TokenKind::FatArrow)?; - let body = self.parse_block()?; - let end = body.span.end; - arms.push(MatchArm { - pattern, - body, - span: Span::new(start, end), - }); - if self.maybe_consume(TokenKind::Comma).is_some() { - continue; - } - if self.peek_kind() == Some(TokenKind::RBrace) { - break; - } - } - let end = self.expect(TokenKind::RBrace)?.span.end; - Ok(Expr::Match(MatchExpr { - id: self.fresh_expr_id(), - expr: Box::new(expr), - arms, - span: Span::new(start, end), - match_span: match_token.span, - })) - } - - fn parse_pattern(&mut self) -> Result { - match self.peek_kind() { - Some(TokenKind::Int) => { - let token = self.bump().unwrap(); - let value = token.text.parse::().map_err(|_| { - self.error_at(token.span, "invalid integer literal".to_string()) - })?; - Ok(Pattern::Literal(Literal::Int(value))) - } - Some(TokenKind::Char) => { - let token = self.bump().unwrap(); - let value = unescape_char(&token.text).map_err(|message| { - self.error_at(token.span, format!("invalid char literal: {message}")) - })?; - Ok(Pattern::Literal(Literal::U8(value))) - } - Some(TokenKind::True) => { - self.bump(); - Ok(Pattern::Literal(Literal::Bool(true))) - } - Some(TokenKind::False) => { - self.bump(); - Ok(Pattern::Literal(Literal::Bool(false))) - } - Some(TokenKind::Ident) => { - let path = self.parse_path()?; - if self.peek_kind() == Some(TokenKind::LParen) { - let start = path.span.start; - self.bump(); - let binding = if self.peek_kind() == Some(TokenKind::Ident) { - Some(self.expect_ident()?) - } else if self.peek_kind() == Some(TokenKind::Underscore) { - self.bump(); - None - } else { - None - }; - let end = self.expect(TokenKind::RParen)?.span.end; - Ok(Pattern::Call { - path, - binding, - span: Span::new(start, end), - }) - } else if path.segments.len() == 1 { - // Single segment - could be binding or enum variant - // If lowercase, it's a binding; if uppercase, it's an enum variant - let name = &path.segments[0].item; - if name - .chars() - .next() - .map(|c| c.is_lowercase()) - .unwrap_or(false) - { - Ok(Pattern::Binding(path.segments.into_iter().next().unwrap())) - } else { - Ok(Pattern::Path(path)) - } - } else { - Ok(Pattern::Path(path)) - } - } - Some(TokenKind::Underscore) => { - let span = self.bump().unwrap().span; - Ok(Pattern::Wildcard(span)) - } - _ => Err(self.error_current("unexpected token in pattern".to_string())), - } - } - - fn pattern_binding_ident(&self, pattern: &Pattern) -> Option { - match pattern { - Pattern::Binding(ident) => Some(ident.clone()), - Pattern::Call { - binding: Some(ident), - .. - } => Some(ident.clone()), - _ => None, - } - } - - fn parse_path(&mut self) -> Result { - let first = self.expect_ident()?; - let start = first.span.start; - let mut segments = vec![first]; - // Parse path segments separated by :: - while self.peek_kind() == Some(TokenKind::ColonColon) { - self.bump(); - segments.push(self.expect_ident()?); - } - let end = segments.last().map(|s| s.span.end).unwrap_or(start); - Ok(Path { - id: self.fresh_expr_id(), - segments, - span: Span::new(start, end), - }) - } - - fn field_access_to_path(&self, field_access: &FieldAccessExpr) -> Result { - let mut segments = Vec::new(); - - // Recursively collect segments from the object - fn collect_segments(expr: &Expr, segments: &mut Vec) -> Option<()> { - match expr { - Expr::Path(path) => { - segments.extend(path.segments.clone()); - Some(()) - } - Expr::FieldAccess(fa) => { - collect_segments(&fa.object, segments)?; - segments.push(fa.field.clone()); - Some(()) - } - _ => None, - } - } - - collect_segments(&field_access.object, &mut segments).ok_or_else(|| { - self.error_at( - field_access.span, - "expected path or field access".to_string(), - ) - })?; - segments.push(field_access.field.clone()); - - let start = segments - .first() - .map(|s| s.span.start) - .unwrap_or(field_access.span.start); - let end = segments - .last() - .map(|s| s.span.end) - .unwrap_or(field_access.span.end); - - Ok(Path { - id: field_access.id, - segments, - span: Span::new(start, end), - }) - } - - fn parse_type(&mut self) -> Result { - if self.peek_kind() == Some(TokenKind::Star) { - let start = self.bump().unwrap().span.start; - let target = self.parse_type()?; - let span = Span::new(start, target.span().end); - return Ok(Type::Ptr { - target: Box::new(target), - span, - }); - } - if self.peek_kind() == Some(TokenKind::Ampersand) { - let start = self.bump().unwrap().span.start; - let target = self.parse_type()?; - let span = Span::new(start, target.span().end); - return Ok(Type::Ref { - target: Box::new(target), - span, - }); - } - - let path = self.parse_path()?; - let mut args = Vec::new(); - let mut end = path.span.end; - if self.peek_kind() == Some(TokenKind::Lt) { - self.bump(); - if self.peek_kind() != Some(TokenKind::Gt) { - loop { - args.push(self.parse_type()?); - if self.maybe_consume(TokenKind::Comma).is_none() { - break; - } - } - } - end = self.expect(TokenKind::Gt)?.span.end; - } - let span = Span::new(path.span.start, end); - Ok(Type::Path { path, args, span }) - } - - fn parse_struct_literal( - &mut self, - path: Path, - type_args: Vec, - ) -> Result { - let start = path.span.start; - self.expect(TokenKind::LBrace)?; - let mut fields = Vec::new(); - if self.peek_kind() != Some(TokenKind::RBrace) { - loop { - let name = self.expect_ident()?; - self.expect(TokenKind::Colon)?; - let expr = self.parse_expr()?; - let end = expr.span().end; - fields.push(StructLiteralField { - name, - expr, - span: Span::new(start, end), - }); - if self.maybe_consume(TokenKind::Comma).is_some() { - if self.peek_kind() == Some(TokenKind::RBrace) { - break; - } - continue; - } - break; - } - } - let end = self.expect(TokenKind::RBrace)?.span.end; - Ok(Expr::StructLiteral(StructLiteralExpr { - id: self.fresh_expr_id(), - path, - type_args, - fields, - span: Span::new(start, end), - })) - } - - fn finish_call(&mut self, callee: Expr, type_args: Vec) -> Result { - let start = callee.span().start; - self.expect(TokenKind::LParen)?; - let mut args = Vec::new(); - if self.peek_kind() != Some(TokenKind::RParen) { - loop { - args.push(self.parse_expr()?); - if self.maybe_consume(TokenKind::Comma).is_none() { - break; - } - } - } - let end = self.expect(TokenKind::RParen)?.span.end; - Ok(Expr::Call(CallExpr { - id: self.fresh_expr_id(), - callee: Box::new(callee), - type_args, - args, - span: Span::new(start, end), - })) - } - - fn parse_type_params(&mut self) -> Result, ParseError> { - if self.peek_kind() != Some(TokenKind::Lt) { - return Ok(Vec::new()); - } - self.bump(); - let mut params = Vec::new(); - if self.peek_kind() != Some(TokenKind::Gt) { - loop { - let name = self.expect_ident()?; - let mut bounds = Vec::new(); - if self.maybe_consume(TokenKind::Colon).is_some() { - loop { - let bound = self.parse_path()?; - bounds.push(bound); - if self.maybe_consume(TokenKind::Plus).is_none() { - break; - } - } - } - params.push(TypeParam { name, bounds }); - if self.maybe_consume(TokenKind::Comma).is_none() { - break; - } - } - } - self.expect(TokenKind::Gt)?; - Ok(params) - } - - fn parse_type_args(&mut self) -> Result, ParseError> { - if self.peek_kind() != Some(TokenKind::Lt) { - return Ok(Vec::new()); - } - self.bump(); - let mut args = Vec::new(); - if self.peek_kind() != Some(TokenKind::Gt) { - loop { - args.push(self.parse_type()?); - if self.maybe_consume(TokenKind::Comma).is_none() { - break; - } - } - } - self.expect(TokenKind::Gt)?; - Ok(args) - } - fn expect(&mut self, kind: TokenKind) -> Result { - match self.peek_kind() { - Some(k) if k == kind => Ok(self.bump().unwrap()), - Some(other) => Err(self.error_current(format!("expected {kind:?}, found {other:?}"))), - None => Err(self.error_current("unexpected end of input".to_string())), + match self.bump() { + Some(token) if token.kind == kind => Ok(token), + Some(token) => Err(self.error_at(token.span, format!("expected {kind:?}, found {:?}", token.kind))), + None => Err(self.error_current(format!("expected {kind:?}, found end of input"))), } } @@ -1688,7 +144,7 @@ impl Parser { fn bump_kind(&mut self) -> Result { self.bump() - .map(|t| t.kind) + .map(|token| token.kind) .ok_or_else(|| self.error_current("unexpected end of input".to_string())) } @@ -1701,19 +157,15 @@ impl Parser { } fn peek_kind(&self) -> Option { - self.tokens.get(self.index).map(|t| t.kind.clone()) + self.peek_kind_raw() } fn peek_span(&self) -> Span { - self.tokens - .get(self.index) - .map(|t| t.span) - .unwrap_or(self.eof_span) + self.peek_span_raw() } fn error_current(&self, message: String) -> ParseError { - let span = self.peek_span(); - ParseError::new(message, span) + ParseError::new(message, self.peek_span()) } fn error_at(&self, span: Span, message: String) -> ParseError { @@ -1742,39 +194,30 @@ fn infix_binding_power(op: &BinaryOp) -> (u8, u8) { fn postfix_binding_power(kind: &TokenKind) -> Option { match kind { - // Postfix operators have highest precedence (higher than any infix operator) TokenKind::Dot | TokenKind::LParen | TokenKind::LBracket | TokenKind::Question => Some(23), _ => None, } } fn unescape_string(text: &str) -> Result { - let mut chars = text.chars(); - if chars.next() != Some('"') || text.len() < 2 { - return Err("missing quotes".to_string()); - } let mut out = String::new(); - let mut escape = false; - for ch in chars { - if escape { - let escaped = match ch { - 'n' => '\n', - 'r' => '\r', - 't' => '\t', - '\\' => '\\', - '"' => '"', - other => { - return Err(format!("unsupported escape \\{other}")); - } - }; - out.push(escaped); - escape = false; - } else if ch == '\\' { - escape = true; - } else if ch == '"' { - break; - } else { + let mut chars = text[1..text.len() - 1].chars(); + while let Some(ch) = chars.next() { + if ch != '\\' { out.push(ch); + continue; + } + let Some(esc) = chars.next() else { + return Err("unterminated escape".to_string()); + }; + match esc { + '\\' => out.push('\\'), + '"' => out.push('"'), + 'n' => out.push('\n'), + 'r' => out.push('\r'), + 't' => out.push('\t'), + '0' => out.push('\0'), + other => return Err(format!("unsupported escape \\{other}")), } } Ok(out) @@ -1827,9 +270,5 @@ fn unit_type_at(span: Span) -> Type { segments: vec![ident], span, }; - Type::Path { - path, - args: Vec::new(), - span, - } + Type::Path { path, args: Vec::new(), span } } diff --git a/capc/src/parser/exprs.rs b/capc/src/parser/exprs.rs new file mode 100644 index 0000000..4ab1393 --- /dev/null +++ b/capc/src/parser/exprs.rs @@ -0,0 +1,503 @@ +use super::*; + +impl Parser { + pub(super) fn parse_expr(&mut self) -> Result { + self.parse_expr_inner(true) + } + + /// Parse an expression where struct literals are not allowed. + /// Used in if/while/for/match scrutinee positions where `{` starts a block, not a struct literal. + pub(super) fn parse_expr_no_struct(&mut self) -> Result { + self.parse_expr_inner(false) + } + + pub(super) fn parse_expr_inner( + &mut self, + allow_struct_literal: bool, + ) -> Result { + self.parse_expr_bp(0, allow_struct_literal) + } + + /// Look ahead to see if a `<...>` type-arg list is closed and followed by + /// a call `(` or struct literal `{`. + pub(super) fn type_args_followed_by_call_or_struct(&self) -> bool { + if self.peek_kind() != Some(TokenKind::Lt) { + return false; + } + let mut depth = 0usize; + let mut idx = self.index; + while idx < self.tokens.len() { + match self.tokens[idx].kind { + TokenKind::Lt => depth += 1, + TokenKind::Gt => { + if depth == 0 { + return false; + } + depth -= 1; + if depth == 0 { + return matches!( + self.tokens.get(idx + 1).map(|t| &t.kind), + Some(TokenKind::LParen) | Some(TokenKind::LBrace) + ); + } + } + _ => {} + } + idx += 1; + } + false + } + + pub(super) fn parse_expr_bp( + &mut self, + min_bp: u8, + allow_struct_literal: bool, + ) -> Result { + let mut lhs = self.parse_prefix(allow_struct_literal)?; + + loop { + if let Some(kind) = self.peek_kind() { + if let Some(bp) = postfix_binding_power(&kind) { + if bp < min_bp { + break; + } + + match kind { + TokenKind::Dot => { + let start = lhs.span().start; + self.bump(); + let field = self.expect_ident()?; + let type_args = if self.peek_kind() == Some(TokenKind::Lt) { + self.parse_type_args()? + } else { + Vec::new() + }; + + if allow_struct_literal && self.peek_kind() == Some(TokenKind::LBrace) { + let mut path = match lhs { + Expr::Path(p) => p, + Expr::FieldAccess(ref fa) => self.field_access_to_path(fa)?, + _ => { + return Err(self.error_current( + "expected path before struct literal".to_string(), + )) + } + }; + path.segments.push(field); + path.span = Span::new( + path.span.start, + path.segments.last().unwrap().span.end, + ); + lhs = self.parse_struct_literal(path, type_args)?; + continue; + } + + if self.peek_kind() == Some(TokenKind::LParen) { + self.bump(); + let mut args = Vec::new(); + if self.peek_kind() != Some(TokenKind::RParen) { + loop { + args.push(self.parse_expr()?); + if self.maybe_consume(TokenKind::Comma).is_none() { + break; + } + } + } + let end = self.expect(TokenKind::RParen)?.span.end; + lhs = Expr::MethodCall(MethodCallExpr { + id: self.fresh_expr_id(), + receiver: Box::new(lhs), + method: field, + type_args, + args, + span: Span::new(start, end), + }); + continue; + } + + if !type_args.is_empty() { + return Err(self.error_current( + "type arguments require a method call or struct literal" + .to_string(), + )); + } + let span = Span::new(start, field.span.end); + lhs = Expr::FieldAccess(FieldAccessExpr { + id: self.fresh_expr_id(), + object: Box::new(lhs), + field, + span, + }); + continue; + } + TokenKind::LParen => { + lhs = self.finish_call(lhs, Vec::new())?; + continue; + } + TokenKind::LBracket => { + let start = lhs.span().start; + self.bump(); + let index = self.parse_expr()?; + let end = self.expect(TokenKind::RBracket)?.span.end; + lhs = Expr::Index(IndexExpr { + id: self.fresh_expr_id(), + object: Box::new(lhs), + index: Box::new(index), + span: Span::new(start, end), + }); + continue; + } + TokenKind::Question => { + let start = lhs.span().start; + let end = self.bump().unwrap().span.end; + lhs = Expr::Try(TryExpr { + id: self.fresh_expr_id(), + expr: Box::new(lhs), + span: Span::new(start, end), + }); + continue; + } + _ => { + return Err(self.error_current( + "unexpected postfix operator".to_string(), + )) + } + } + } + } + + if self.peek_kind() == Some(TokenKind::Lt) { + if matches!(&lhs, Expr::Path(_) | Expr::FieldAccess(_)) + && self.type_args_followed_by_call_or_struct() + { + let type_args = self.parse_type_args()?; + if allow_struct_literal && self.peek_kind() == Some(TokenKind::LBrace) { + let path = match lhs { + Expr::Path(p) => p, + Expr::FieldAccess(ref fa) => self.field_access_to_path(fa)?, + _ => { + return Err(self.error_current( + "type arguments require a path receiver".to_string(), + )) + } + }; + lhs = self.parse_struct_literal(path, type_args)?; + continue; + } + if self.peek_kind() == Some(TokenKind::LParen) { + lhs = self.finish_call(lhs, type_args)?; + continue; + } + if !allow_struct_literal && self.peek_kind() == Some(TokenKind::LBrace) { + return Err(self.error_current( + "generic expressions in this context require parentheses".to_string(), + )); + } + return Err(self.error_current( + "type arguments require a call or struct literal".to_string(), + )); + } + } + + let op = match self.peek_kind() { + Some(TokenKind::OrOr) => BinaryOp::Or, + Some(TokenKind::AndAnd) => BinaryOp::And, + Some(TokenKind::Pipe) => BinaryOp::BitOr, + Some(TokenKind::Caret) => BinaryOp::BitXor, + Some(TokenKind::Ampersand) => BinaryOp::BitAnd, + Some(TokenKind::EqEq) => BinaryOp::Eq, + Some(TokenKind::NotEq) => BinaryOp::Neq, + Some(TokenKind::Lt) => BinaryOp::Lt, + Some(TokenKind::Lte) => BinaryOp::Lte, + Some(TokenKind::Gt) => BinaryOp::Gt, + Some(TokenKind::Gte) => BinaryOp::Gte, + Some(TokenKind::Shl) => BinaryOp::Shl, + Some(TokenKind::Shr) => BinaryOp::Shr, + Some(TokenKind::Plus) => BinaryOp::Add, + Some(TokenKind::Minus) => BinaryOp::Sub, + Some(TokenKind::Star) => BinaryOp::Mul, + Some(TokenKind::Slash) => BinaryOp::Div, + Some(TokenKind::Percent) => BinaryOp::Mod, + _ => break, + }; + + let (l_bp, r_bp) = infix_binding_power(&op); + if l_bp < min_bp { + break; + } + + self.bump(); + let rhs = self.parse_expr_bp(r_bp, allow_struct_literal)?; + let span = Span::new(lhs.span().start, rhs.span().end); + lhs = Expr::Binary(BinaryExpr { + id: self.fresh_expr_id(), + op, + left: Box::new(lhs), + right: Box::new(rhs), + span, + }); + } + + Ok(lhs) + } + + pub(super) fn parse_prefix( + &mut self, + allow_struct_literal: bool, + ) -> Result { + match self.peek_kind() { + Some(TokenKind::Minus) => { + let start = self.bump().unwrap().span.start; + let expr = self.parse_expr_bp(7, allow_struct_literal)?; + Ok(Expr::Unary(UnaryExpr { + id: self.fresh_expr_id(), + op: UnaryOp::Neg, + span: Span::new(start, expr.span().end), + expr: Box::new(expr), + })) + } + Some(TokenKind::Tilde) => { + let start = self.bump().unwrap().span.start; + let expr = self.parse_expr_bp(7, allow_struct_literal)?; + Ok(Expr::Unary(UnaryExpr { + id: self.fresh_expr_id(), + op: UnaryOp::BitNot, + span: Span::new(start, expr.span().end), + expr: Box::new(expr), + })) + } + Some(TokenKind::Bang) => { + let start = self.bump().unwrap().span.start; + let expr = self.parse_expr_bp(7, allow_struct_literal)?; + Ok(Expr::Unary(UnaryExpr { + id: self.fresh_expr_id(), + op: UnaryOp::Not, + span: Span::new(start, expr.span().end), + expr: Box::new(expr), + })) + } + Some(TokenKind::Match) => self.parse_match(), + _ => self.parse_primary(allow_struct_literal), + } + } + + pub(super) fn parse_primary( + &mut self, + allow_struct_literal: bool, + ) -> Result { + match self.peek_kind() { + Some(TokenKind::Int) => { + let token = self.bump().unwrap(); + let value = token.text.parse::().map_err(|_| { + self.error_at(token.span, "invalid integer literal".to_string()) + })?; + if let Some(next) = self.peek_token(0) { + if next.kind == TokenKind::Ident + && next.text == "u8" + && next.span.start == token.span.end + { + let suffix = self.bump().unwrap(); + if !(0..=255).contains(&value) { + return Err(self.error_at( + Span::new(token.span.start, suffix.span.end), + "u8 literal out of range".to_string(), + )); + } + return Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::U8(value as u8), + span: Span::new(token.span.start, suffix.span.end), + })); + } + } + Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::Int(value), + span: token.span, + })) + } + Some(TokenKind::Str) => { + let token = self.bump().unwrap(); + let value = unescape_string(&token.text).map_err(|message| { + self.error_at(token.span, format!("invalid string literal: {message}")) + })?; + Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::String(value), + span: token.span, + })) + } + Some(TokenKind::Char) => { + let token = self.bump().unwrap(); + let value = unescape_char(&token.text).map_err(|message| { + self.error_at(token.span, format!("invalid char literal: {message}")) + })?; + Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::U8(value), + span: token.span, + })) + } + Some(TokenKind::True) => { + let token = self.bump().unwrap(); + Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::Bool(true), + span: token.span, + })) + } + Some(TokenKind::False) => { + let token = self.bump().unwrap(); + Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::Bool(false), + span: token.span, + })) + } + Some(TokenKind::LParen) => { + let start = self.bump().unwrap().span.start; + if self.peek_kind() == Some(TokenKind::RParen) { + let end = self.bump().unwrap().span.end; + Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::Unit, + span: Span::new(start, end), + })) + } else { + let expr = self.parse_expr()?; + let end = self.expect(TokenKind::RParen)?.span.end; + Ok(Expr::Grouping(GroupingExpr { + id: self.fresh_expr_id(), + expr: Box::new(expr), + span: Span::new(start, end), + })) + } + } + Some(TokenKind::Ident) => { + let first_ident = self.expect_ident()?; + let start = first_ident.span.start; + let mut segments = vec![first_ident]; + + while self.peek_kind() == Some(TokenKind::ColonColon) { + self.bump(); + let segment = self.expect_ident()?; + segments.push(segment); + } + + let end = segments.last().unwrap().span.end; + let path = Path { + id: self.fresh_expr_id(), + segments, + span: Span::new(start, end), + }; + + if allow_struct_literal && self.peek_kind() == Some(TokenKind::LBrace) { + self.parse_struct_literal(path, Vec::new()) + } else { + Ok(Expr::Path(path)) + } + } + Some(other) => Err(self.error_current(format!( + "unexpected token in expression: {other:?}" + ))), + None => Err(self.error_current("unexpected end of input".to_string())), + } + } + + pub(super) fn parse_match(&mut self) -> Result { + let match_token = self.expect(TokenKind::Match)?; + let start = match_token.span.start; + let expr = self.parse_expr_no_struct()?; + self.expect(TokenKind::LBrace)?; + let mut arms = Vec::new(); + while self.peek_kind() != Some(TokenKind::RBrace) { + let pattern = self.parse_pattern()?; + self.expect(TokenKind::FatArrow)?; + let body = self.parse_block()?; + let end = body.span.end; + arms.push(MatchArm { + pattern, + body, + span: Span::new(start, end), + }); + if self.maybe_consume(TokenKind::Comma).is_some() { + continue; + } + if self.peek_kind() == Some(TokenKind::RBrace) { + break; + } + } + let end = self.expect(TokenKind::RBrace)?.span.end; + Ok(Expr::Match(MatchExpr { + id: self.fresh_expr_id(), + expr: Box::new(expr), + arms, + span: Span::new(start, end), + match_span: match_token.span, + })) + } + + pub(super) fn parse_struct_literal( + &mut self, + path: Path, + type_args: Vec, + ) -> Result { + let start = path.span.start; + self.expect(TokenKind::LBrace)?; + let mut fields = Vec::new(); + if self.peek_kind() != Some(TokenKind::RBrace) { + loop { + let name = self.expect_ident()?; + self.expect(TokenKind::Colon)?; + let expr = self.parse_expr()?; + let end = expr.span().end; + fields.push(StructLiteralField { + name, + expr, + span: Span::new(start, end), + }); + if self.maybe_consume(TokenKind::Comma).is_some() { + if self.peek_kind() == Some(TokenKind::RBrace) { + break; + } + continue; + } + break; + } + } + let end = self.expect(TokenKind::RBrace)?.span.end; + Ok(Expr::StructLiteral(StructLiteralExpr { + id: self.fresh_expr_id(), + path, + type_args, + fields, + span: Span::new(start, end), + })) + } + + pub(super) fn finish_call( + &mut self, + callee: Expr, + type_args: Vec, + ) -> Result { + let start = callee.span().start; + self.expect(TokenKind::LParen)?; + let mut args = Vec::new(); + if self.peek_kind() != Some(TokenKind::RParen) { + loop { + args.push(self.parse_expr()?); + if self.maybe_consume(TokenKind::Comma).is_none() { + break; + } + } + } + let end = self.expect(TokenKind::RParen)?.span.end; + Ok(Expr::Call(CallExpr { + id: self.fresh_expr_id(), + callee: Box::new(callee), + type_args, + args, + span: Span::new(start, end), + })) + } +} diff --git a/capc/src/parser/items.rs b/capc/src/parser/items.rs new file mode 100644 index 0000000..6a78725 --- /dev/null +++ b/capc/src/parser/items.rs @@ -0,0 +1,448 @@ +use super::*; + +impl Parser { + pub(super) fn parse_use(&mut self) -> Result { + let start = self.expect(TokenKind::Use)?.span.start; + let path = self.parse_path()?; + let span = Span::new(start, path.span.end); + self.maybe_consume(TokenKind::Semi); + Ok(UseDecl { path, span }) + } + + pub(super) fn parse_item(&mut self, doc: Option) -> Result { + let mut is_pub = false; + let mut is_linear = false; + let mut is_copy = false; + let mut is_opaque = false; + let mut is_capability = false; + loop { + match self.peek_kind() { + Some(TokenKind::Pub) => { + if is_pub { + return Err(self.error_current("duplicate `pub` modifier".to_string())); + } + self.bump(); + is_pub = true; + } + Some(TokenKind::Linear) => { + if is_linear { + return Err(self.error_current("duplicate `linear` modifier".to_string())); + } + self.bump(); + is_linear = true; + } + Some(TokenKind::Copy) => { + if is_copy { + return Err(self.error_current("duplicate `copy` modifier".to_string())); + } + self.bump(); + is_copy = true; + } + Some(TokenKind::Opaque) => { + if is_opaque { + return Err(self.error_current("duplicate `opaque` modifier".to_string())); + } + self.bump(); + is_opaque = true; + } + Some(TokenKind::Capability) => { + if is_capability { + return Err( + self.error_current("duplicate `capability` modifier".to_string()) + ); + } + self.bump(); + is_capability = true; + } + _ => break, + } + } + if is_linear && is_copy { + return Err( + self.error_current("cannot combine `linear` and `copy` modifiers".to_string()) + ); + } + if self.peek_kind() == Some(TokenKind::Extern) { + if is_opaque || is_linear || is_copy || is_capability { + return Err(self.error_current( + "linear/copy/opaque/capability applies only to struct declarations".to_string(), + )); + } + return Ok(Item::ExternFunction( + self.parse_extern_function(is_pub, doc)?, + )); + } + match self.peek_kind() { + Some(TokenKind::Fn) => { + if is_opaque || is_linear || is_copy || is_capability { + return Err(self.error_current( + "linear/copy/opaque/capability applies only to struct declarations" + .to_string(), + )); + } + Ok(Item::Function(self.parse_function(is_pub, doc)?)) + } + Some(TokenKind::Struct) => Ok(Item::Struct(self.parse_struct( + is_pub, + is_opaque, + is_linear, + is_copy, + is_capability, + doc, + )?)), + Some(TokenKind::Enum) => { + if is_opaque || is_linear || is_copy || is_capability { + return Err(self.error_current( + "linear/copy/opaque/capability applies only to struct declarations" + .to_string(), + )); + } + Ok(Item::Enum(self.parse_enum(is_pub, doc)?)) + } + Some(TokenKind::Trait) => { + if is_opaque || is_linear || is_copy || is_capability { + return Err(self.error_current( + "linear/copy/opaque/capability applies only to struct declarations" + .to_string(), + )); + } + Ok(Item::Trait(self.parse_trait(is_pub, doc)?)) + } + Some(TokenKind::Impl) => { + if is_pub { + return Err(self.error_current("impl blocks cannot be marked pub".to_string())); + } + if is_opaque || is_linear || is_copy || is_capability { + return Err(self.error_current( + "linear/copy/opaque/capability applies only to struct declarations" + .to_string(), + )); + } + Ok(Item::Impl(self.parse_impl_block(doc)?)) + } + Some(other) => Err(self.error_current(format!("expected item, found {other:?}"))), + None => Err(self.error_current("unexpected end of input".to_string())), + } + } + + pub(super) fn parse_impl_block( + &mut self, + impl_doc: Option, + ) -> Result { + let start = self.expect(TokenKind::Impl)?.span.start; + let type_params = self.parse_type_params()?; + let first_type = self.parse_type()?; + let (trait_path, target) = if self.maybe_consume(TokenKind::For).is_some() { + let trait_path = match first_type { + Type::Path { path, args, .. } => { + if !args.is_empty() { + return Err(self.error_current( + "trait impls do not support type arguments yet".to_string(), + )); + } + path + } + _ => return Err(self.error_current("trait impls require a trait name".to_string())), + }; + let target = self.parse_type()?; + (Some(trait_path), target) + } else { + (None, first_type) + }; + self.expect(TokenKind::LBrace)?; + let mut methods = Vec::new(); + while self.peek_kind() != Some(TokenKind::RBrace) { + let doc = self.take_doc_comments(); + let is_pub = self.maybe_consume(TokenKind::Pub).is_some(); + if self.peek_kind() != Some(TokenKind::Fn) { + return Err( + self.error_current("expected method declaration in impl block".to_string()) + ); + } + methods.push(self.parse_function(is_pub, doc)?); + } + let end = self.expect(TokenKind::RBrace)?.span.end; + Ok(ImplBlock { + target, + methods, + type_params, + trait_path, + doc: impl_doc, + span: Span::new(start, end), + }) + } + + pub(super) fn parse_extern_function( + &mut self, + is_pub: bool, + doc: Option, + ) -> Result { + let start = self.expect(TokenKind::Extern)?.span.start; + self.expect(TokenKind::Fn)?; + let name = self.expect_ident()?; + let type_params = self.parse_type_params()?; + self.expect(TokenKind::LParen)?; + let mut params = Vec::new(); + if self.peek_kind() != Some(TokenKind::RParen) { + loop { + let param_name = self.expect_ident()?; + self.expect(TokenKind::Colon)?; + let ty = self.parse_type()?; + params.push(Param { + name: param_name, + ty: Some(ty), + }); + if self.maybe_consume(TokenKind::Comma).is_some() { + continue; + } + break; + } + } + let rparen = self.expect(TokenKind::RParen)?; + let ret = if self.maybe_consume(TokenKind::Arrow).is_some() { + self.parse_type()? + } else { + unit_type_at(Span::new(rparen.span.end, rparen.span.end)) + }; + let end = self + .maybe_consume(TokenKind::Semi) + .map_or(ret.span().end, |t| t.span.end); + Ok(ExternFunction { + name, + type_params, + params, + ret, + is_pub, + doc, + span: Span::new(start, end), + }) + } + + pub(super) fn parse_function( + &mut self, + is_pub: bool, + doc: Option, + ) -> Result { + let start = self.expect(TokenKind::Fn)?.span.start; + let name = self.expect_ident()?; + let type_params = self.parse_type_params()?; + self.expect(TokenKind::LParen)?; + let mut params = Vec::new(); + if self.peek_kind() != Some(TokenKind::RParen) { + loop { + let param_name = self.expect_ident()?; + let ty = if self.maybe_consume(TokenKind::Colon).is_some() { + Some(self.parse_type()?) + } else if param_name.item == "self" { + None + } else { + return Err(self.error_current("expected ':' after parameter name".to_string())); + }; + params.push(Param { + name: param_name, + ty, + }); + if self.maybe_consume(TokenKind::Comma).is_none() { + break; + } + } + } + let rparen = self.expect(TokenKind::RParen)?; + let ret = if self.maybe_consume(TokenKind::Arrow).is_some() { + self.parse_type()? + } else { + unit_type_at(Span::new(rparen.span.end, rparen.span.end)) + }; + let body = self.parse_block()?; + let span = Span::new(start, body.span.end); + Ok(Function { + name, + type_params, + params, + ret, + body, + is_pub, + doc, + span, + }) + } + + pub(super) fn parse_trait( + &mut self, + is_pub: bool, + doc: Option, + ) -> Result { + let start = self.expect(TokenKind::Trait)?.span.start; + let name = self.expect_ident()?; + let type_params = self.parse_type_params()?; + self.expect(TokenKind::LBrace)?; + let mut methods = Vec::new(); + while self.peek_kind() != Some(TokenKind::RBrace) { + let doc = self.take_doc_comments(); + if self.maybe_consume(TokenKind::Pub).is_some() { + return Err(self.error_current("trait methods cannot be marked pub".to_string())); + } + methods.push(self.parse_trait_method(doc)?); + } + let end = self.expect(TokenKind::RBrace)?.span.end; + Ok(TraitDecl { + name, + type_params, + methods, + is_pub, + doc, + span: Span::new(start, end), + }) + } + + pub(super) fn parse_trait_method( + &mut self, + doc: Option, + ) -> Result { + let start = self.expect(TokenKind::Fn)?.span.start; + let name = self.expect_ident()?; + let type_params = self.parse_type_params()?; + self.expect(TokenKind::LParen)?; + let mut params = Vec::new(); + if self.peek_kind() != Some(TokenKind::RParen) { + loop { + let param_name = self.expect_ident()?; + let ty = if self.maybe_consume(TokenKind::Colon).is_some() { + Some(self.parse_type()?) + } else if param_name.item == "self" { + None + } else { + return Err(self.error_current("expected ':' after parameter name".to_string())); + }; + params.push(Param { + name: param_name, + ty, + }); + if self.maybe_consume(TokenKind::Comma).is_none() { + break; + } + } + } + let rparen = self.expect(TokenKind::RParen)?; + let ret = if self.maybe_consume(TokenKind::Arrow).is_some() { + self.parse_type()? + } else { + unit_type_at(Span::new(rparen.span.end, rparen.span.end)) + }; + let end = self.expect(TokenKind::Semi)?.span.end; + Ok(TraitMethod { + name, + type_params, + params, + ret, + doc, + span: Span::new(start, end), + }) + } + + pub(super) fn parse_struct( + &mut self, + is_pub: bool, + is_opaque: bool, + is_linear: bool, + is_copy: bool, + is_capability: bool, + doc: Option, + ) -> Result { + let start = self.expect(TokenKind::Struct)?.span.start; + let name = self.expect_ident()?; + let type_params = self.parse_type_params()?; + let mut fields = Vec::new(); + let end = if self.peek_kind() == Some(TokenKind::LBrace) { + self.bump(); + if self.peek_kind() != Some(TokenKind::RBrace) { + loop { + let field_name = self.expect_ident()?; + self.expect(TokenKind::Colon)?; + let ty = self.parse_type()?; + fields.push(Field { + name: field_name, + ty, + }); + if self.maybe_consume(TokenKind::Comma).is_none() { + break; + } + if self.peek_kind() == Some(TokenKind::RBrace) { + break; + } + } + } + let end = self.expect(TokenKind::RBrace)?.span.end; + if is_capability && !fields.is_empty() { + return Err(self.error_at( + Span::new(start, end), + "capability struct cannot declare fields".to_string(), + )); + } + end + } else { + name.span.end + }; + Ok(StructDecl { + name, + type_params, + fields, + is_pub, + is_opaque, + is_linear, + is_copy, + is_capability, + doc, + span: Span::new(start, end), + }) + } + + pub(super) fn parse_enum( + &mut self, + is_pub: bool, + doc: Option, + ) -> Result { + let start = self.expect(TokenKind::Enum)?.span.start; + let name = self.expect_ident()?; + let type_params = self.parse_type_params()?; + self.expect(TokenKind::LBrace)?; + let mut variants = Vec::new(); + if self.peek_kind() != Some(TokenKind::RBrace) { + loop { + let variant_name = self.expect_ident()?; + let variant_start = variant_name.span.start; + let payload = if self.peek_kind() == Some(TokenKind::LParen) { + self.bump(); + let ty = self.parse_type()?; + self.expect(TokenKind::RParen)?; + Some(ty) + } else { + None + }; + let end = payload + .as_ref() + .map_or(variant_name.span.end, |ty| ty.span().end); + variants.push(EnumVariant { + name: variant_name, + payload, + span: Span::new(variant_start, end), + }); + if self.maybe_consume(TokenKind::Comma).is_none() { + break; + } + if self.peek_kind() == Some(TokenKind::RBrace) { + break; + } + } + } + let end = self.expect(TokenKind::RBrace)?.span.end; + Ok(EnumDecl { + name, + type_params, + variants, + is_pub, + doc, + span: Span::new(start, end), + }) + } +} diff --git a/capc/src/parser/patterns.rs b/capc/src/parser/patterns.rs new file mode 100644 index 0000000..dfe0966 --- /dev/null +++ b/capc/src/parser/patterns.rs @@ -0,0 +1,142 @@ +use super::*; + +impl Parser { + pub(super) fn parse_pattern(&mut self) -> Result { + match self.peek_kind() { + Some(TokenKind::Int) => { + let token = self.bump().unwrap(); + let value = token.text.parse::().map_err(|_| { + self.error_at(token.span, "invalid integer literal".to_string()) + })?; + Ok(Pattern::Literal(Literal::Int(value))) + } + Some(TokenKind::Char) => { + let token = self.bump().unwrap(); + let value = unescape_char(&token.text).map_err(|message| { + self.error_at(token.span, format!("invalid char literal: {message}")) + })?; + Ok(Pattern::Literal(Literal::U8(value))) + } + Some(TokenKind::True) => { + self.bump(); + Ok(Pattern::Literal(Literal::Bool(true))) + } + Some(TokenKind::False) => { + self.bump(); + Ok(Pattern::Literal(Literal::Bool(false))) + } + Some(TokenKind::Ident) => { + let path = self.parse_path()?; + if self.peek_kind() == Some(TokenKind::LParen) { + let start = path.span.start; + self.bump(); + let binding = if self.peek_kind() == Some(TokenKind::Ident) { + Some(self.expect_ident()?) + } else if self.peek_kind() == Some(TokenKind::Underscore) { + self.bump(); + None + } else { + None + }; + let end = self.expect(TokenKind::RParen)?.span.end; + Ok(Pattern::Call { + path, + binding, + span: Span::new(start, end), + }) + } else if path.segments.len() == 1 { + let name = &path.segments[0].item; + if name + .chars() + .next() + .map(|c| c.is_lowercase()) + .unwrap_or(false) + { + Ok(Pattern::Binding(path.segments.into_iter().next().unwrap())) + } else { + Ok(Pattern::Path(path)) + } + } else { + Ok(Pattern::Path(path)) + } + } + Some(TokenKind::Underscore) => { + let span = self.bump().unwrap().span; + Ok(Pattern::Wildcard(span)) + } + _ => Err(self.error_current("unexpected token in pattern".to_string())), + } + } + + pub(super) fn pattern_binding_ident(&self, pattern: &Pattern) -> Option { + match pattern { + Pattern::Binding(ident) => Some(ident.clone()), + Pattern::Call { + binding: Some(ident), + .. + } => Some(ident.clone()), + _ => None, + } + } + + pub(super) fn parse_path(&mut self) -> Result { + let first = self.expect_ident()?; + let start = first.span.start; + let mut segments = vec![first]; + while self.peek_kind() == Some(TokenKind::ColonColon) { + self.bump(); + segments.push(self.expect_ident()?); + } + let end = segments.last().map(|s| s.span.end).unwrap_or(start); + Ok(Path { + id: self.fresh_expr_id(), + segments, + span: Span::new(start, end), + }) + } + + pub(super) fn field_access_to_path( + &self, + field_access: &FieldAccessExpr, + ) -> Result { + let mut segments = Vec::new(); + + fn collect_segments(expr: &Expr, segments: &mut Vec) -> Option<()> { + match expr { + Expr::Path(path) => { + segments.extend(path.segments.clone()); + Some(()) + } + Expr::FieldAccess(fa) => { + collect_segments(&fa.object, segments)?; + segments.push(fa.field.clone()); + Some(()) + } + _ => None, + } + } + + collect_segments(&field_access.object, &mut segments).ok_or_else(|| { + self.error_at( + field_access.span, + "expected path or field access".to_string(), + ) + })?; + segments.push(field_access.field.clone()); + + let start = segments + .first() + .map(|s| s.span.start) + .unwrap_or(field_access.span.start); + let end = segments + .last() + .map(|s| s.span.end) + .unwrap_or(field_access.span.end); + + Ok(Path { + id: field_access.id, + segments, + span: Span::new(start, end), + }) + } +} diff --git a/capc/src/parser/stmts.rs b/capc/src/parser/stmts.rs new file mode 100644 index 0000000..0d279b2 --- /dev/null +++ b/capc/src/parser/stmts.rs @@ -0,0 +1,393 @@ +use super::*; + +impl Parser { + pub(super) fn parse_block(&mut self) -> Result { + let start = self.expect(TokenKind::LBrace)?.span.start; + let mut stmts = Vec::new(); + while self.peek_kind() != Some(TokenKind::RBrace) { + if self.peek_kind().is_none() { + return Err(self.error_current("unexpected end of input in block".to_string())); + } + stmts.push(self.parse_stmt()?); + } + let end = self.expect(TokenKind::RBrace)?.span.end; + Ok(Block { + stmts, + span: Span::new(start, end), + }) + } + + pub(super) fn parse_stmt(&mut self) -> Result { + match self.peek_kind() { + Some(TokenKind::Let) => self.parse_let(), + Some(TokenKind::Return) => Ok(Stmt::Return(self.parse_return()?)), + Some(TokenKind::Break) => Ok(Stmt::Break(self.parse_break()?)), + Some(TokenKind::Continue) => Ok(Stmt::Continue(self.parse_continue()?)), + Some(TokenKind::Defer) => Ok(Stmt::Defer(self.parse_defer()?)), + Some(TokenKind::Try) => self.parse_try_stmt(), + Some(TokenKind::If) => self.parse_if_stmt(), + Some(TokenKind::While) => Ok(Stmt::While(self.parse_while()?)), + Some(TokenKind::For) => self.parse_for_stmt(), + Some(TokenKind::Ident) => { + if self.peek_token(1).is_some_and(|t| t.kind == TokenKind::Eq) { + Ok(Stmt::Assign(self.parse_assign()?)) + } else { + Ok(Stmt::Expr(self.parse_expr_stmt()?)) + } + } + _ => Ok(Stmt::Expr(self.parse_expr_stmt()?)), + } + } + + pub(super) fn parse_let(&mut self) -> Result { + let let_token = self.expect(TokenKind::Let)?; + let start = let_token.span.start; + if self.peek_kind() == Some(TokenKind::Ident) + && self + .peek_token(1) + .is_some_and(|t| matches!(t.kind, TokenKind::Colon | TokenKind::Eq)) + { + let name = self.expect_ident()?; + let ty = if self.maybe_consume(TokenKind::Colon).is_some() { + Some(self.parse_type()?) + } else { + None + }; + self.expect(TokenKind::Eq)?; + let expr = self.parse_expr()?; + let end = self + .maybe_consume(TokenKind::Semi) + .map_or(expr.span().end, |t| t.span.end); + return Ok(Stmt::Let(LetStmt { + name, + ty, + expr, + span: Span::new(start, end), + })); + } + + let pattern = self.parse_pattern()?; + self.expect(TokenKind::Eq)?; + let expr = self.parse_expr()?; + self.expect(TokenKind::Else)?; + let else_block = self.parse_block()?; + if self.pattern_binding_ident(&pattern).is_none() { + return Err(self.error_at( + let_token.span, + "`let ... else` requires a binding pattern".to_string(), + )); + } + let end = self + .maybe_consume(TokenKind::Semi) + .map_or(else_block.span.end, |t| t.span.end); + Ok(Stmt::LetElse(LetElseStmt { + pattern, + expr, + else_block, + span: Span::new(start, end), + })) + } + + pub(super) fn parse_assign(&mut self) -> Result { + let name = self.expect_ident()?; + let start = name.span.start; + self.expect(TokenKind::Eq)?; + let expr = self.parse_expr()?; + let end = self + .maybe_consume(TokenKind::Semi) + .map_or(expr.span().end, |t| t.span.end); + Ok(AssignStmt { + name, + expr, + span: Span::new(start, end), + }) + } + + pub(super) fn parse_return(&mut self) -> Result { + let start = self.expect(TokenKind::Return)?.span.start; + let expr = match self.peek_kind() { + Some(TokenKind::Semi) | Some(TokenKind::RBrace) => None, + Some(_) => Some(self.parse_expr()?), + None => None, + }; + let end = self + .maybe_consume(TokenKind::Semi) + .map_or(self.peek_span().end, |t| t.span.end); + Ok(ReturnStmt { + expr, + span: Span::new(start, end), + }) + } + + pub(super) fn parse_try_stmt(&mut self) -> Result { + let try_token = self.expect(TokenKind::Try)?; + let start = try_token.span.start; + + if self.peek_kind() == Some(TokenKind::Let) { + self.bump(); + if !(self.peek_kind() == Some(TokenKind::Ident) + && self + .peek_token(1) + .is_some_and(|t| matches!(t.kind, TokenKind::Colon | TokenKind::Eq))) + { + return Err(self.error_at( + try_token.span, + "`try let` requires a plain binding name".to_string(), + )); + } + + let name = self.expect_ident()?; + let ty = if self.maybe_consume(TokenKind::Colon).is_some() { + Some(self.parse_type()?) + } else { + None + }; + self.expect(TokenKind::Eq)?; + let expr = self.parse_expr()?; + self.expect(TokenKind::Else)?; + let err_binding = if self.peek_kind() == Some(TokenKind::Ident) + && self + .peek_token(1) + .is_some_and(|t| t.kind == TokenKind::LBrace) + { + Some(self.expect_ident()?) + } else { + None + }; + let else_block = self.parse_block()?; + let end = self + .maybe_consume(TokenKind::Semi) + .map_or(else_block.span.end, |t| t.span.end); + return Ok(Stmt::TryLet(TryLetStmt { + name, + ty, + expr, + err_binding, + else_block, + span: Span::new(start, end), + })); + } + + let expr = self.parse_expr()?; + self.expect(TokenKind::Else)?; + let err_binding = if self.peek_kind() == Some(TokenKind::Ident) + && self + .peek_token(1) + .is_some_and(|t| t.kind == TokenKind::LBrace) + { + Some(self.expect_ident()?) + } else { + None + }; + let else_block = self.parse_block()?; + let end = self + .maybe_consume(TokenKind::Semi) + .map_or(else_block.span.end, |t| t.span.end); + Ok(Stmt::TryElse(TryElseStmt { + expr, + err_binding, + else_block, + span: Span::new(start, end), + })) + } + + pub(super) fn parse_break(&mut self) -> Result { + let token = self.expect(TokenKind::Break)?; + let end = self + .maybe_consume(TokenKind::Semi) + .map_or(token.span.end, |t| t.span.end); + Ok(BreakStmt { + span: Span::new(token.span.start, end), + }) + } + + pub(super) fn parse_continue(&mut self) -> Result { + let token = self.expect(TokenKind::Continue)?; + let end = self + .maybe_consume(TokenKind::Semi) + .map_or(token.span.end, |t| t.span.end); + Ok(ContinueStmt { + span: Span::new(token.span.start, end), + }) + } + + pub(super) fn parse_defer(&mut self) -> Result { + let start = self.expect(TokenKind::Defer)?.span.start; + let expr = self.parse_expr()?; + let end = self + .maybe_consume(TokenKind::Semi) + .map_or(expr.span().end, |t| t.span.end); + Ok(DeferStmt { + expr, + span: Span::new(start, end), + }) + } + + pub(super) fn parse_if_stmt(&mut self) -> Result { + let if_token = self.expect(TokenKind::If)?; + let start = if_token.span.start; + let cond = self.parse_expr_no_struct()?; + let then_block = self.parse_block()?; + let else_block = if self.peek_kind() == Some(TokenKind::Else) { + self.bump(); + if self.peek_kind() == Some(TokenKind::If) { + let else_if = self.parse_if_stmt()?; + let span = else_if.span(); + Some(Block { + stmts: vec![else_if], + span, + }) + } else { + Some(self.parse_block()?) + } + } else { + None + }; + let end = else_block + .as_ref() + .map_or(then_block.span.end, |b| b.span.end); + Ok(Stmt::If(IfStmt { + cond, + then_block, + else_block, + span: Span::new(start, end), + })) + } + + pub(super) fn parse_while(&mut self) -> Result { + let start = self.expect(TokenKind::While)?.span.start; + let cond = self.parse_expr_no_struct()?; + let body = self.parse_block()?; + let end = body.span.end; + Ok(WhileStmt { + cond, + body, + span: Span::new(start, end), + }) + } + + pub(super) fn parse_for_stmt(&mut self) -> Result { + let for_token = self.expect(TokenKind::For)?; + let start = for_token.span.start; + if self.peek_kind() == Some(TokenKind::LBrace) { + let body = self.parse_block()?; + let end = body.span.end; + let cond = Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::Bool(true), + span: for_token.span, + }); + return Ok(Stmt::While(WhileStmt { + cond, + body, + span: Span::new(start, end), + })); + } + let first = self.expect_ident()?; + let second = if self.maybe_consume(TokenKind::Comma).is_some() { + Some(self.expect_ident()?) + } else { + None + }; + self.expect(TokenKind::In)?; + let range_or_source = self.parse_expr_no_struct()?; + if self.maybe_consume(TokenKind::DotDot).is_some() { + if second.is_some() { + return Err(self.error_at( + first.span, + "range for loops accept only one binding".to_string(), + )); + } + let range_end = self.parse_range_bound()?; + let body = self.parse_block()?; + let end = body.span.end; + return Ok(Stmt::For(ForStmt { + var: first, + start: range_or_source, + end: range_end, + body, + span: Span::new(start, end), + })); + } + + let item = second.clone().unwrap_or_else(|| first.clone()); + let index = second.map(|_| first); + let body = self.parse_block()?; + Ok(Stmt::ForEach(ForEachStmt { + index, + item, + source: range_or_source, + span: Span::new(start, body.span.end), + body, + })) + } + + pub(super) fn parse_range_bound(&mut self) -> Result { + match self.peek_kind() { + Some(TokenKind::Int) => { + let token = self.bump().unwrap(); + let value = token.text.parse::().map_err(|_| { + self.error_at(token.span, "invalid integer literal".to_string()) + })?; + Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::Int(value), + span: token.span, + })) + } + Some(TokenKind::True) => { + let token = self.bump().unwrap(); + Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::Bool(true), + span: token.span, + })) + } + Some(TokenKind::False) => { + let token = self.bump().unwrap(); + Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::Bool(false), + span: token.span, + })) + } + Some(TokenKind::Ident) => { + let first_ident = self.expect_ident()?; + let start = first_ident.span.start; + let mut segments = vec![first_ident]; + + while self.peek_kind() == Some(TokenKind::ColonColon) { + self.bump(); + let segment = self.expect_ident()?; + segments.push(segment); + } + + let end = segments.last().unwrap().span.end; + Ok(Expr::Path(Path { + id: self.fresh_expr_id(), + segments, + span: Span::new(start, end), + })) + } + Some(other) => Err(self.error_current(format!( + "expected integer or identifier in range bound, found {other:?}" + ))), + None => Err(self.error_current("unexpected end of input".to_string())), + } + } + + pub(super) fn parse_expr_stmt(&mut self) -> Result { + let expr = self.parse_expr()?; + if self.peek_kind() == Some(TokenKind::Else) { + return Err(self.error_current("`expr else` now requires a leading `try`".to_string())); + } + let expr_span = expr.span(); + let end = self + .maybe_consume(TokenKind::Semi) + .map_or(expr_span.end, |t| t.span.end); + Ok(ExprStmt { + expr, + span: Span::new(expr_span.start, end), + }) + } +} diff --git a/capc/src/parser/types.rs b/capc/src/parser/types.rs new file mode 100644 index 0000000..95b78f2 --- /dev/null +++ b/capc/src/parser/types.rs @@ -0,0 +1,89 @@ +use super::*; + +impl Parser { + pub(super) fn parse_type(&mut self) -> Result { + if self.peek_kind() == Some(TokenKind::Star) { + let start = self.bump().unwrap().span.start; + let target = self.parse_type()?; + let span = Span::new(start, target.span().end); + return Ok(Type::Ptr { + target: Box::new(target), + span, + }); + } + if self.peek_kind() == Some(TokenKind::Ampersand) { + let start = self.bump().unwrap().span.start; + let target = self.parse_type()?; + let span = Span::new(start, target.span().end); + return Ok(Type::Ref { + target: Box::new(target), + span, + }); + } + + let path = self.parse_path()?; + let mut args = Vec::new(); + let mut end = path.span.end; + if self.peek_kind() == Some(TokenKind::Lt) { + self.bump(); + if self.peek_kind() != Some(TokenKind::Gt) { + loop { + args.push(self.parse_type()?); + if self.maybe_consume(TokenKind::Comma).is_none() { + break; + } + } + } + end = self.expect(TokenKind::Gt)?.span.end; + } + let span = Span::new(path.span.start, end); + Ok(Type::Path { path, args, span }) + } + + pub(super) fn parse_type_params(&mut self) -> Result, ParseError> { + if self.peek_kind() != Some(TokenKind::Lt) { + return Ok(Vec::new()); + } + self.bump(); + let mut params = Vec::new(); + if self.peek_kind() != Some(TokenKind::Gt) { + loop { + let name = self.expect_ident()?; + let mut bounds = Vec::new(); + if self.maybe_consume(TokenKind::Colon).is_some() { + loop { + let bound = self.parse_path()?; + bounds.push(bound); + if self.maybe_consume(TokenKind::Plus).is_none() { + break; + } + } + } + params.push(TypeParam { name, bounds }); + if self.maybe_consume(TokenKind::Comma).is_none() { + break; + } + } + } + self.expect(TokenKind::Gt)?; + Ok(params) + } + + pub(super) fn parse_type_args(&mut self) -> Result, ParseError> { + if self.peek_kind() != Some(TokenKind::Lt) { + return Ok(Vec::new()); + } + self.bump(); + let mut args = Vec::new(); + if self.peek_kind() != Some(TokenKind::Gt) { + loop { + args.push(self.parse_type()?); + if self.maybe_consume(TokenKind::Comma).is_none() { + break; + } + } + } + self.expect(TokenKind::Gt)?; + Ok(args) + } +} diff --git a/capc/src/typeck/check.rs b/capc/src/typeck/check.rs index 54dd183..2c3266d 100644 --- a/capc/src/typeck/check.rs +++ b/capc/src/typeck/check.rs @@ -1,3 +1,8 @@ +mod calls; +mod match_check; +mod stmt; +mod type_params; + use std::collections::{HashMap, HashSet}; use crate::ast::*; @@ -14,6 +19,13 @@ use super::{ ensure_linear_all_consumed, ensure_linear_scope_consumed, ensure_linear_scopes_consumed_from, merge_branch_states, merge_match_states, stmt_is_total, }; +use calls::{check_call_expr, check_method_call_expr}; +use match_check::{check_match_expr_value, check_match_stmt}; +use stmt::{check_block, check_stmt}; +use type_params::{ + build_call_substitution, build_type_substitution, enforce_type_param_bounds, + lower_type_args, match_type_params, substitute_type, +}; /// Optional recorder for expression types during checking. pub(super) struct TypeRecorder<'a> { @@ -37,72 +49,6 @@ fn record_expr_type(recorder: &mut TypeRecorder, expr: &Expr, ty: Ty) -> Result< Ok(ty) } -fn enforce_vec_method_constraints( - receiver_ty: &Ty, - method: &str, - span: Span, -) -> Result<(), TypeError> { - let base = match receiver_ty { - Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref(), - _ => receiver_ty, - }; - let Ty::Path(name, args) = base else { - return Ok(()); - }; - if name != "Vec" && name != "sys.vec.Vec" { - return Ok(()); - } - if args.len() != 1 { - return Err(TypeError::new( - "Vec expects exactly one type argument".to_string(), - span, - )); - } - let elem = &args[0]; - let is_u8 = matches!(elem, Ty::Builtin(BuiltinType::U8)); - let is_i32 = matches!(elem, Ty::Builtin(BuiltinType::I32)); - let is_string = is_string_ty(elem); - let is_param = matches!(elem, Ty::Param(_)); - match method { - "as_slice" | "slice" | "extend_slice" | "to_string" => { - if !is_u8 { - return Err(TypeError::new( - format!("Vec<{elem:?}> does not support `{method}`"), - span, - )); - } - } - "capacity" | "reserve" | "shrink_to_fit" => { - if !is_u8 && !is_i32 && !is_string && !is_param { - return Err(TypeError::new( - format!("Vec<{elem:?}> does not support `{method}`"), - span, - )); - } - } - "filter" | "map_add" => { - if !is_u8 && !is_i32 { - return Err(TypeError::new( - format!("Vec<{elem:?}> does not support `{method}`"), - span, - )); - } - } - // set works for any type - "set" => {} - "join" => { - if !is_string { - return Err(TypeError::new( - format!("Vec<{elem:?}> does not support `{method}`"), - span, - )); - } - } - _ => {} - } - Ok(()) -} - /// Type-check a function body, including move/linear rules. pub(super) fn check_function( func: &Function, @@ -214,185 +160,136 @@ pub(super) fn check_function( Ok(()) } -/// Type-check a statement and update move state in the current scope. -fn check_stmt( - stmt: &Stmt, - ret_ty: &Ty, +/// Type-check an expression, applying move rules based on `use_mode`. +pub(super) fn check_expr( + expr: &Expr, functions: &HashMap, trait_map: &HashMap, trait_impls: &[TraitImplInfo], scopes: &mut Scopes, + use_mode: UseMode, recorder: &mut TypeRecorder, use_map: &UseMap, struct_map: &HashMap, enum_map: &HashMap, stdlib: &StdlibIndex, + ret_ty: &Ty, module_name: &str, type_params: &HashSet, type_param_bounds: &HashMap>, - in_loop: bool, -) -> Result<(), TypeError> { - let trusted_stdlib = module_name.starts_with("sys."); - match stmt { - Stmt::LetElse(_) - | Stmt::TryLet(_) - | Stmt::TryElse(_) - | Stmt::ForEach(_) => { - return Err(TypeError::new( - "internal error: desugaring did not lower high-level statement".to_string(), - stmt.span(), - )); - } - Stmt::Let(let_stmt) => { - if scopes.contains(&let_stmt.name.item) { - return Err(TypeError::new( - format!( - "variable shadowing is not allowed: `{}`", - let_stmt.name.item - ), - let_stmt.name.span, - )); - } - let annot_ref = let_stmt - .ty - .as_ref() - .is_some_and(|ty| matches!(ty, Type::Ref { .. })); - let expr_use_mode = if annot_ref { - UseMode::Read - } else { - UseMode::Move - }; - let expr_ty = if let Expr::Match(match_expr) = &let_stmt.expr { - let expr_ty = check_match_expr_value( - match_expr, - functions, - trait_map, - trait_impls, - scopes, - expr_use_mode, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - in_loop, - )?; - recorder.record(&let_stmt.expr, &expr_ty); - expr_ty - } else { - check_expr( - &let_stmt.expr, - functions, - trait_map, - trait_impls, - scopes, - expr_use_mode, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )? - }; - let final_ty = if let Some(annot) = &let_stmt.ty { - if let Some(span) = type_contains_ref(annot) { - match annot { - Type::Ref { target, .. } => { - if type_contains_ref(target).is_some() { - return Err(TypeError::new( - "nested reference types are not allowed".to_string(), - span, - )); - } - } - _ => { - return Err(TypeError::new( - "reference types are only allowed as direct local types" - .to_string(), - span, - )); - } - } - } - let annot_ty = lower_type(annot, use_map, stdlib, type_params)?; - validate_type_args(&annot_ty, struct_map, enum_map, annot.span())?; - let matches_ref = if let Ty::Ref(inner) = &annot_ty { - &expr_ty == inner.as_ref() || &expr_ty == &annot_ty - } else { - false - }; - if annot_ty != expr_ty - && !matches_ref - && !matches!(expr_ty, Ty::Builtin(BuiltinType::Never)) - { - return Err(TypeError::new( - format!("type mismatch: expected {annot_ty:?}, found {expr_ty:?}"), - let_stmt.span, - )); - } - if matches!(annot_ty, Ty::Ref(_)) { - let Some((name, _span)) = leftmost_local_in_chain(&let_stmt.expr) else { - return Err(TypeError::new( - "reference locals must be initialized from a local value".to_string(), - let_stmt.expr.span(), - )); - }; - if !scopes.contains(name) { +) -> Result { + let ty = match expr { + Expr::Literal(lit) => match &lit.value { + Literal::Int(_) => Ok(Ty::Builtin(BuiltinType::I32)), + Literal::U8(_) => Ok(Ty::Builtin(BuiltinType::U8)), + Literal::String(_) => Ok(stdlib_string_ty(stdlib)), + Literal::Bool(_) => Ok(Ty::Builtin(BuiltinType::Bool)), + Literal::Unit => Ok(Ty::Builtin(BuiltinType::Unit)), + }, + Expr::Path(path) => { + if path.segments.len() == 1 { + let name = &path.segments[0].item; + if let Some(info) = scopes.lookup(name) { + let ty = info.ty.clone(); + let trusted_stdlib = module_name.starts_with("sys."); + if info.state == MoveState::Moved && !trusted_stdlib { return Err(TypeError::new( - "reference locals must be initialized from a local value".to_string(), - let_stmt.expr.span(), + format!("use of moved value `{name}`"), + path.segments[0].span, )); } + if !trusted_stdlib + && use_mode == UseMode::Move + && is_affine_type(&ty, struct_map, enum_map) + { + scopes.mark_moved(name, path.segments[0].span)?; + } + return record_expr_type(recorder, expr, ty); } - annot_ty - } else { - if matches!(expr_ty, Ty::Ref(_)) { - let Some((name, _span)) = leftmost_local_in_chain(&let_stmt.expr) else { - return Err(TypeError::new( - "reference locals must be initialized from a local value".to_string(), - let_stmt.expr.span(), - )); + } + if let Some(Ty::Path(enum_name, _)) = + resolve_enum_variant(path, use_map, enum_map, module_name) + { + if let Some(info) = enum_map.get(&enum_name) { + let ty = if info.type_params.is_empty() { + Ty::Path(enum_name, Vec::new()) + } else if let Ty::Path(ret_name, ret_args) = ret_ty { + if ret_name == &enum_name && ret_args.len() == info.type_params.len() { + Ty::Path(enum_name, ret_args.clone()) + } else { + Ty::Path( + enum_name, + vec![Ty::Builtin(BuiltinType::Unit); info.type_params.len()], + ) + } + } else { + Ty::Path( + enum_name, + vec![Ty::Builtin(BuiltinType::Unit); info.type_params.len()], + ) }; - if !scopes.contains(name) { - return Err(TypeError::new( - "reference locals must be initialized from a local value".to_string(), - let_stmt.expr.span(), - )); - } + return record_expr_type(recorder, expr, ty); } - expr_ty - }; - scopes.insert_local(let_stmt.name.item.clone(), final_ty); - } - Stmt::Assign(assign) => { - let Some(existing) = scopes.lookup(&assign.name.item) else { - return Err(TypeError::new( - format!("unknown identifier `{}`", assign.name.item), - assign.name.span, - )); - }; - let existing = existing.ty.clone(); - if matches!(existing, Ty::Ref(_)) { - return Err(TypeError::new( - "cannot assign to a reference local".to_string(), - assign.span, - )); } + Err(TypeError::new(format!("unknown value `{path}`"), path.span)) + } + Expr::Call(call) => check_call_expr( + expr, + call, + functions, + trait_map, + trait_impls, + scopes, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + ), + Expr::MethodCall(method_call) => check_method_call_expr( + method_call, + functions, + trait_map, + trait_impls, + scopes, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + ), + Expr::StructLiteral(lit) => check_struct_literal( + lit, + functions, + trait_map, + trait_impls, + scopes, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + ), + Expr::Unary(unary) => { let expr_ty = check_expr( - &assign.expr, + &unary.expr, functions, trait_map, trait_impls, scopes, - UseMode::Move, + UseMode::Read, recorder, use_map, struct_map, @@ -403,31 +300,49 @@ fn check_stmt( type_params, type_param_bounds, )?; - if expr_ty != existing && !matches!(expr_ty, Ty::Builtin(BuiltinType::Never)) { - return Err(TypeError::new( - format!("assignment type mismatch: expected {existing:?}, found {expr_ty:?}"), - assign.span, - )); - } - scopes.assign(&assign.name.item, expr_ty); - } - Stmt::Defer(defer_stmt) => { - match &defer_stmt.expr { - Expr::Call(_) | Expr::MethodCall(_) => {} - _ => { - return Err(TypeError::new( - "defer expects a function or method call".to_string(), - defer_stmt.span, - )) + match unary.op { + UnaryOp::Neg => { + if expr_ty == Ty::Builtin(BuiltinType::I32) + || expr_ty == Ty::Builtin(BuiltinType::I64) + { + Ok(expr_ty) + } else { + Err(TypeError::new( + "unary - expects integer".to_string(), + unary.span, + )) + } + } + UnaryOp::BitNot => { + if is_numeric_type(&expr_ty) { + Ok(expr_ty) + } else { + Err(TypeError::new( + "unary ~ expects integer".to_string(), + unary.span, + )) + } + } + UnaryOp::Not => { + if expr_ty == Ty::Builtin(BuiltinType::Bool) { + Ok(expr_ty) + } else { + Err(TypeError::new( + "unary ! expects bool".to_string(), + unary.span, + )) + } } } - let _ = check_expr( - &defer_stmt.expr, + } + Expr::Binary(binary) => { + let left = check_expr( + &binary.left, functions, trait_map, trait_impls, scopes, - UseMode::Move, + UseMode::Read, recorder, use_map, struct_map, @@ -438,1356 +353,8 @@ fn check_stmt( type_params, type_param_bounds, )?; - } - Stmt::Return(ret_stmt) => { - let expr_ty = if let Some(expr) = &ret_stmt.expr { - check_expr( - expr, - functions, - trait_map, - trait_impls, - scopes, - UseMode::Move, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )? - } else { - Ty::Builtin(BuiltinType::Unit) - }; - if &expr_ty != ret_ty { - if matches!(expr_ty, Ty::Builtin(BuiltinType::Never)) { - if !trusted_stdlib { - ensure_linear_all_consumed(scopes, struct_map, enum_map, ret_stmt.span)?; - } - return Ok(()); - } - return Err(TypeError::new( - format!("return type mismatch: expected {ret_ty:?}, found {expr_ty:?}"), - ret_stmt.span, - )); - } - if !trusted_stdlib { - ensure_linear_all_consumed(scopes, struct_map, enum_map, ret_stmt.span)?; - } - } - Stmt::Break(break_stmt) => { - if !in_loop { - return Err(TypeError::new( - "break statement outside of loop".to_string(), - break_stmt.span, - )); - } - let depth = scopes.current_loop_depth().ok_or_else(|| { - TypeError::new( - "break statement outside of loop".to_string(), - break_stmt.span, - ) - })?; - if !trusted_stdlib { - ensure_linear_scopes_consumed_from( - scopes, - depth, - struct_map, - enum_map, - break_stmt.span, - )?; - } - } - Stmt::Continue(continue_stmt) => { - if !in_loop { - return Err(TypeError::new( - "continue statement outside of loop".to_string(), - continue_stmt.span, - )); - } - let depth = scopes.current_loop_depth().ok_or_else(|| { - TypeError::new( - "continue statement outside of loop".to_string(), - continue_stmt.span, - ) - })?; - if !trusted_stdlib { - ensure_linear_scopes_consumed_from( - scopes, - depth, - struct_map, - enum_map, - continue_stmt.span, - )?; - } - } - Stmt::If(if_stmt) => { - let cond_ty = check_expr( - &if_stmt.cond, - functions, - trait_map, - trait_impls, - scopes, - UseMode::Read, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - if cond_ty != Ty::Builtin(BuiltinType::Bool) { - return Err(TypeError::new( - "if condition must be bool".to_string(), - if_stmt.cond.span(), - )); - } - let mut then_scopes = scopes.clone(); - check_block( - &if_stmt.then_block, - ret_ty, - functions, - trait_map, - trait_impls, - &mut then_scopes, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - module_name, - type_params, - type_param_bounds, - in_loop, - )?; - let mut else_scopes = scopes.clone(); - if let Some(block) = &if_stmt.else_block { - check_block( - block, - ret_ty, - functions, - trait_map, - trait_impls, - &mut else_scopes, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - module_name, - type_params, - type_param_bounds, - in_loop, - )?; - } - if !trusted_stdlib { - merge_branch_states( - scopes, - &then_scopes, - &else_scopes, - struct_map, - enum_map, - if_stmt.span, - )?; - } - } - Stmt::While(while_stmt) => { - let cond_ty = check_expr( - &while_stmt.cond, - functions, - trait_map, - trait_impls, - scopes, - UseMode::Read, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - if cond_ty != Ty::Builtin(BuiltinType::Bool) { - return Err(TypeError::new( - "while condition must be bool".to_string(), - while_stmt.cond.span(), - )); - } - let mut body_scopes = scopes.clone(); - body_scopes.push_loop(); - check_block( - &while_stmt.body, - ret_ty, - functions, - trait_map, - trait_impls, - &mut body_scopes, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - module_name, - type_params, - type_param_bounds, - true, // inside loop, break/continue allowed - )?; - body_scopes.pop_loop(); - if !trusted_stdlib { - ensure_affine_states_match( - scopes, - &body_scopes, - struct_map, - enum_map, - while_stmt.span, - )?; - } - } - Stmt::For(for_stmt) => { - // Check start expression - must be i32 - let start_ty = check_expr( - &for_stmt.start, - functions, - trait_map, - trait_impls, - scopes, - UseMode::Read, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - if start_ty != Ty::Builtin(BuiltinType::I32) { - return Err(TypeError::new( - "for loop range start must be i32".to_string(), - for_stmt.start.span(), - )); - } - - // Check end expression - must be i32 - let end_ty = check_expr( - &for_stmt.end, - functions, - trait_map, - trait_impls, - scopes, - UseMode::Read, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - if end_ty != Ty::Builtin(BuiltinType::I32) { - return Err(TypeError::new( - "for loop range end must be i32".to_string(), - for_stmt.end.span(), - )); - } - - // Create body scope with loop variable bound - let mut body_scopes = scopes.clone(); - body_scopes.push_scope(); - body_scopes.insert_local(for_stmt.var.item.clone(), Ty::Builtin(BuiltinType::I32)); - - body_scopes.push_loop(); - check_block( - &for_stmt.body, - ret_ty, - functions, - trait_map, - trait_impls, - &mut body_scopes, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - module_name, - type_params, - type_param_bounds, - true, // inside loop, break/continue allowed - )?; - body_scopes.pop_loop(); - - // Pop the loop variable scope before checking affine states - body_scopes.pop_scope(); - - if !trusted_stdlib { - ensure_affine_states_match( - scopes, - &body_scopes, - struct_map, - enum_map, - for_stmt.span, - )?; - } - } - Stmt::Expr(expr_stmt) => { - if let Expr::Match(match_expr) = &expr_stmt.expr { - let _ = check_match_stmt( - match_expr, - functions, - trait_map, - trait_impls, - scopes, - UseMode::Move, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - in_loop, - )?; - } else { - check_expr( - &expr_stmt.expr, - functions, - trait_map, - trait_impls, - scopes, - UseMode::Move, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - } - } - } - - Ok(()) -} - -/// Type-check a block with a fresh lexical scope. -fn check_block( - block: &Block, - ret_ty: &Ty, - functions: &HashMap, - trait_map: &HashMap, - trait_impls: &[TraitImplInfo], - scopes: &mut Scopes, - recorder: &mut TypeRecorder, - use_map: &UseMap, - struct_map: &HashMap, - enum_map: &HashMap, - stdlib: &StdlibIndex, - module_name: &str, - type_params: &HashSet, - type_param_bounds: &HashMap>, - in_loop: bool, -) -> Result<(), TypeError> { - scopes.push_scope(); - for stmt in &block.stmts { - check_stmt( - stmt, - ret_ty, - functions, - trait_map, - trait_impls, - scopes, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - module_name, - type_params, - type_param_bounds, - in_loop, - )?; - } - if !module_name.starts_with("sys.") { - ensure_linear_scope_consumed(scopes, struct_map, enum_map, block.span)?; - } - scopes.pop_scope(); - Ok(()) -} - -/// Type-check an expression, applying move rules based on `use_mode`. -pub(super) fn check_expr( - expr: &Expr, - functions: &HashMap, - trait_map: &HashMap, - trait_impls: &[TraitImplInfo], - scopes: &mut Scopes, - use_mode: UseMode, - recorder: &mut TypeRecorder, - use_map: &UseMap, - struct_map: &HashMap, - enum_map: &HashMap, - stdlib: &StdlibIndex, - ret_ty: &Ty, - module_name: &str, - type_params: &HashSet, - type_param_bounds: &HashMap>, -) -> Result { - let ty = match expr { - Expr::Literal(lit) => match &lit.value { - Literal::Int(_) => Ok(Ty::Builtin(BuiltinType::I32)), - Literal::U8(_) => Ok(Ty::Builtin(BuiltinType::U8)), - Literal::String(_) => Ok(stdlib_string_ty(stdlib)), - Literal::Bool(_) => Ok(Ty::Builtin(BuiltinType::Bool)), - Literal::Unit => Ok(Ty::Builtin(BuiltinType::Unit)), - }, - Expr::Path(path) => { - if path.segments.len() == 1 { - let name = &path.segments[0].item; - if let Some(info) = scopes.lookup(name) { - let ty = info.ty.clone(); - let trusted_stdlib = module_name.starts_with("sys."); - if info.state == MoveState::Moved && !trusted_stdlib { - return Err(TypeError::new( - format!("use of moved value `{name}`"), - path.segments[0].span, - )); - } - if !trusted_stdlib - && use_mode == UseMode::Move - && is_affine_type(&ty, struct_map, enum_map) - { - scopes.mark_moved(name, path.segments[0].span)?; - } - return record_expr_type(recorder, expr, ty); - } - } - if let Some(Ty::Path(enum_name, _)) = - resolve_enum_variant(path, use_map, enum_map, module_name) - { - if let Some(info) = enum_map.get(&enum_name) { - let ty = if info.type_params.is_empty() { - Ty::Path(enum_name, Vec::new()) - } else if let Ty::Path(ret_name, ret_args) = ret_ty { - if ret_name == &enum_name && ret_args.len() == info.type_params.len() { - Ty::Path(enum_name, ret_args.clone()) - } else { - Ty::Path( - enum_name, - vec![Ty::Builtin(BuiltinType::Unit); info.type_params.len()], - ) - } - } else { - Ty::Path( - enum_name, - vec![Ty::Builtin(BuiltinType::Unit); info.type_params.len()], - ) - }; - return record_expr_type(recorder, expr, ty); - } - } - Err(TypeError::new(format!("unknown value `{path}`"), path.span)) - } - Expr::Call(call) => { - let path = call.callee.to_path().ok_or_else(|| { - TypeError::new( - "call target must be a function path".to_string(), - call.callee.span(), - ) - })?; - - if path.segments.len() == 1 { - let name = &path.segments[0].item; - if name == "drop" { - if call.args.len() != 1 { - return Err(TypeError::new( - "drop expects exactly one argument".to_string(), - call.span, - )); - } - let _ = check_expr( - &call.args[0], - functions, - trait_map, - trait_impls, - scopes, - UseMode::Move, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - return record_expr_type(recorder, expr, Ty::Builtin(BuiltinType::Unit)); - } - if name == "panic" { - if !call.args.is_empty() { - return Err(TypeError::new( - "panic takes no arguments".to_string(), - call.span, - )); - } - // panic() is a diverging expression - it never returns. - return record_expr_type(recorder, expr, Ty::Builtin(BuiltinType::Never)); - } - if name == "Ok" || name == "Err" { - if call.args.len() != 1 { - return Err(TypeError::new( - format!("{name} takes exactly one argument"), - call.span, - )); - } - let arg_ty = check_expr( - &call.args[0], - functions, - trait_map, - trait_impls, - scopes, - UseMode::Move, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - if let Ty::Path(ty_name, args) = ret_ty { - if ty_name == "sys.result.Result" && args.len() == 2 { - let expected = if name == "Ok" { &args[0] } else { &args[1] }; - if !ty_equivalent_for_set(&arg_ty, expected, type_params) { - return Err(TypeError::new( - format!("{name} argument type mismatch: expected {expected:?}, got {arg_ty:?}"), - call.args[0].span(), - )); - } - return record_expr_type(recorder, expr, ret_ty.clone()); - } - } - return record_expr_type( - recorder, - expr, - Ty::Path( - "sys.result.Result".to_string(), - if name == "Ok" { - vec![arg_ty, Ty::Builtin(BuiltinType::Unit)] - } else { - vec![Ty::Builtin(BuiltinType::Unit), arg_ty] - }, - ), - ); - } - } - - if let Some(Ty::Path(enum_name, _)) = - resolve_enum_variant(&path, use_map, enum_map, module_name) - { - let Some(info) = enum_map.get(&enum_name) else { - return Err(TypeError::new( - "unknown enum variant".to_string(), - call.span, - )); - }; - let variant = path - .segments - .last() - .map(|s| s.item.clone()) - .unwrap_or_else(|| "unknown".to_string()); - let payload = info.payloads.get(&variant).cloned().unwrap_or(None); - - if payload.is_none() && !call.args.is_empty() { - return Err(TypeError::new( - format!("{variant} takes no arguments"), - call.span, - )); - } - if payload.is_some() && call.args.len() != 1 { - return Err(TypeError::new( - format!("{variant} takes exactly one argument"), - call.span, - )); - } - - let mut inferred: HashMap = HashMap::new(); - let arg_ty = if let Some(payload_ty) = payload.clone() { - let arg_ty = check_expr( - &call.args[0], - functions, - trait_map, - trait_impls, - scopes, - UseMode::Move, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - if !infer_enum_args(&payload_ty, &arg_ty, &mut inferred) { - return Err(TypeError::new( - format!( - "variant argument type mismatch: expected {payload_ty:?}, got {arg_ty:?}" - ), - call.args[0].span(), - )); - } - Some(arg_ty) - } else { - None - }; - - let type_args = - resolve_enum_type_args(&enum_name, &info.type_params, &inferred, ret_ty); - - if let Some(payload_ty) = payload { - if let Some(arg_ty) = arg_ty { - if !enum_payload_matches( - &payload_ty, - &arg_ty, - &info.type_params, - &type_args, - ) { - return Err(TypeError::new( - "variant argument type mismatch".to_string(), - call.args[0].span(), - )); - } - } - } - - return record_expr_type(recorder, expr, Ty::Path(enum_name, type_args)); - } - - let resolved = resolve_path(&path, use_map); - let key = resolved.join("."); - - let sig = if let Some(sig) = functions.get(&key) { - sig - } else if resolved.len() == 1 { - let qualified = format!("{}.{}", module_name, key); - functions - .get(&qualified) - .ok_or_else(|| TypeError::new(format!("unknown function `{key}`"), path.span))? - } else { - return Err(TypeError::new( - format!("unknown function `{key}`"), - path.span, - )); - }; - if sig.module != module_name && !sig.is_pub { - return Err(TypeError::new( - format!("function `{}` is private", key), - call.span, - )); - } - let explicit_type_args = lower_type_args( - &call.type_args, - use_map, - stdlib, - struct_map, - enum_map, - type_params, - )?; - let subs = - build_call_substitution(sig, &explicit_type_args, HashMap::new(), call.span)?; - enforce_type_param_bounds(sig, &subs, trait_impls, call.span)?; - let instantiated_params: Vec = sig - .params - .iter() - .map(|ty| substitute_type(ty, &subs)) - .collect(); - let instantiated_ret = substitute_type(&sig.ret, &subs); - if instantiated_params.len() != call.args.len() { - return Err(TypeError::new( - format!( - "argument count mismatch: expected {}, found {}", - instantiated_params.len(), - call.args.len() - ), - call.span, - )); - } - for (arg, expected) in call.args.iter().zip(&instantiated_params) { - let (expected_inner, use_mode) = if let Ty::Ref(inner) = expected { - (inner.as_ref(), UseMode::Read) - } else { - (expected, UseMode::Move) - }; - let arg_ty = check_expr( - arg, - functions, - trait_map, - trait_impls, - scopes, - use_mode, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - if !matches!(expected, Ty::Ref(_)) && matches!(arg_ty, Ty::Ref(_)) { - return Err(TypeError::new( - "cannot pass a reference to a value parameter".to_string(), - arg.span(), - )); - } - let matches_ref = if let Ty::Ref(inner) = expected { - &arg_ty == inner.as_ref() || &arg_ty == expected - } else { - false - }; - if &arg_ty != expected_inner - && !matches_ref - && !matches!(arg_ty, Ty::Builtin(BuiltinType::Never)) - { - return Err(TypeError::new( - format!("argument type mismatch: expected {expected:?}, found {arg_ty:?}"), - arg.span(), - )); - } - } - Ok(instantiated_ret) - } - Expr::MethodCall(method_call) => { - fn get_leftmost_segment(expr: &Expr) -> Option<&str> { - match expr { - Expr::Path(path) if path.segments.len() == 1 => Some(&path.segments[0].item), - Expr::FieldAccess(fa) => get_leftmost_segment(&fa.object), - _ => None, - } - } - - let base_is_local = if let Some(base_name) = get_leftmost_segment(&method_call.receiver) - { - scopes.contains(base_name) - } else { - true - }; - - let path_call = method_call.receiver.to_path().map(|mut path| { - path.segments.push(method_call.method.clone()); - path.span = Span::new(path.span.start, method_call.method.span.end); - path - }); - - let is_function = if let Some(path) = &path_call { - let resolved = resolve_path(path, use_map); - let key = resolved.join("."); - functions.contains_key(&key) - } else { - false - }; - - if !base_is_local && is_function { - let path = path_call.expect("path exists for function call"); - let resolved = resolve_path(&path, use_map); - let key = resolved.join("."); - let sig = functions.get(&key).ok_or_else(|| { - TypeError::new(format!("unknown function `{key}`"), path.span) - })?; - if sig.module != module_name && !sig.is_pub { - return Err(TypeError::new( - format!("function `{key}` is private"), - method_call.span, - )); - } - let explicit_type_args = lower_type_args( - &method_call.type_args, - use_map, - stdlib, - struct_map, - enum_map, - type_params, - )?; - let subs = build_call_substitution( - sig, - &explicit_type_args, - HashMap::new(), - method_call.span, - )?; - enforce_type_param_bounds(sig, &subs, trait_impls, method_call.span)?; - let instantiated_params: Vec = sig - .params - .iter() - .map(|ty| substitute_type(ty, &subs)) - .collect(); - let instantiated_ret = substitute_type(&sig.ret, &subs); - if instantiated_params.len() != method_call.args.len() { - return Err(TypeError::new( - format!( - "argument count mismatch: expected {}, found {}", - instantiated_params.len(), - method_call.args.len() - ), - method_call.span, - )); - } - for (arg, expected) in method_call.args.iter().zip(&instantiated_params) { - let (expected_inner, use_mode) = if let Ty::Ref(inner) = expected { - (inner.as_ref(), UseMode::Read) - } else { - (expected, UseMode::Move) - }; - let arg_ty = check_expr( - arg, - functions, - trait_map, - trait_impls, - scopes, - use_mode, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - if !matches!(expected, Ty::Ref(_)) && matches!(arg_ty, Ty::Ref(_)) { - return Err(TypeError::new( - "cannot pass a reference to a value parameter".to_string(), - arg.span(), - )); - } - let matches_ref = if let Ty::Ref(inner) = expected { - &arg_ty == inner.as_ref() || &arg_ty == expected - } else { - false - }; - if &arg_ty != expected_inner - && !matches_ref - && !matches!(arg_ty, Ty::Builtin(BuiltinType::Never)) - { - return Err(TypeError::new( - format!( - "argument type mismatch: expected {expected:?}, found {arg_ty:?}" - ), - arg.span(), - )); - } - } - return record_expr_type(recorder, expr, instantiated_ret); - } - - let receiver_ty = check_expr( - &method_call.receiver, - functions, - trait_map, - trait_impls, - scopes, - UseMode::Read, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - enforce_vec_method_constraints( - &receiver_ty, - &method_call.method.item, - method_call.method.span, - )?; - let receiver_base = match &receiver_ty { - Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref(), - _ => &receiver_ty, - }; - if let Ty::Param(param_name) = receiver_base { - let bounds = type_param_bounds - .get(param_name) - .cloned() - .unwrap_or_default(); - let mut candidates = Vec::new(); - for bound in bounds { - if let Some(info) = trait_map.get(&bound) { - if let Some(sig) = info.methods.get(&method_call.method.item) { - candidates.push((bound, sig)); - } - } - } - if candidates.is_empty() { - return Err(TypeError::new( - format!( - "no trait bound provides method `{}` for `{}`", - method_call.method.item, param_name - ), - method_call.span, - )); - } - if candidates.len() > 1 { - return Err(TypeError::new( - format!( - "ambiguous method `{}` for `{}`; multiple trait bounds apply", - method_call.method.item, param_name - ), - method_call.span, - )); - } - let (_trait_name, sig) = candidates.remove(0); - let mut inferred = HashMap::new(); - let expected_receiver = match &sig.params[0] { - Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref(), - other => other, - }; - let actual_receiver = match &receiver_ty { - Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref(), - other => other, - }; - match_type_params( - expected_receiver, - actual_receiver, - &mut inferred, - method_call.receiver.span(), - )?; - let explicit_type_args = lower_type_args( - &method_call.type_args, - use_map, - stdlib, - struct_map, - enum_map, - type_params, - )?; - let subs = - build_call_substitution(sig, &explicit_type_args, inferred, method_call.span)?; - enforce_type_param_bounds(sig, &subs, trait_impls, method_call.span)?; - let instantiated_params: Vec = sig - .params - .iter() - .map(|ty| substitute_type(ty, &subs)) - .collect(); - let instantiated_ret = substitute_type(&sig.ret, &subs); - if instantiated_params.len() != method_call.args.len() + 1 { - return Err(TypeError::new( - format!( - "argument count mismatch: expected {}, found {}", - instantiated_params.len() - 1, - method_call.args.len() - ), - method_call.span, - )); - } - if instantiated_params[0] != receiver_ty { - return Err(TypeError::new( - format!( - "method receiver type mismatch: expected {expected:?}, found {receiver_ty:?}", - expected = instantiated_params[0] - ), - method_call.receiver.span(), - )); - } - if !matches!(instantiated_params[0], Ty::Ref(_)) { - let _ = check_expr( - &method_call.receiver, - functions, - trait_map, - trait_impls, - scopes, - UseMode::Move, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - } - for (arg, expected) in method_call.args.iter().zip(&instantiated_params[1..]) { - let (expected_inner, use_mode) = if let Ty::Ref(inner) = expected { - (inner.as_ref(), UseMode::Read) - } else { - (expected, UseMode::Move) - }; - let arg_ty = check_expr( - arg, - functions, - trait_map, - trait_impls, - scopes, - use_mode, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - if !matches!(expected, Ty::Ref(_)) && matches!(arg_ty, Ty::Ref(_)) { - return Err(TypeError::new( - "cannot pass a reference to a value parameter".to_string(), - arg.span(), - )); - } - let matches_ref = if let Ty::Ref(inner) = expected { - &arg_ty == inner.as_ref() || &arg_ty == expected - } else { - false - }; - if &arg_ty != expected_inner - && !matches_ref - && !matches!(arg_ty, Ty::Builtin(BuiltinType::Never)) - { - return Err(TypeError::new( - format!( - "argument type mismatch: expected {expected:?}, found {arg_ty:?}" - ), - arg.span(), - )); - } - } - return record_expr_type(recorder, expr, instantiated_ret); - } - let (method_module, type_name, receiver_args) = resolve_method_target( - &receiver_ty, - module_name, - struct_map, - enum_map, - method_call.receiver.span(), - )?; - - // Build type-specific method name (e.g., Vec__u8__map_add for Vec.map_add()) - let type_arg_suffix = super::build_type_arg_suffix(&receiver_args); - - let base_method_fn = format!("{type_name}__{}", method_call.method.item); - let specific_method_fn = - format!("{type_name}{type_arg_suffix}__{}", method_call.method.item); - - // Try type-specific method first (e.g., Vec__u8__map_add), then generic (Vec__map_add) - let qualified_specific = format!("{method_module}.{specific_method_fn}"); - let qualified_base = format!("{method_module}.{base_method_fn}"); - - let key = if !type_arg_suffix.is_empty() && functions.contains_key(&qualified_specific) - { - qualified_specific - } else if functions.contains_key(&qualified_base) { - qualified_base - } else if method_module == module_name - && !type_arg_suffix.is_empty() - && functions.contains_key(&specific_method_fn) - { - specific_method_fn.clone() - } else if method_module == module_name && functions.contains_key(&base_method_fn) { - base_method_fn.clone() - } else { - return Err(TypeError::new( - format!("unknown method `{qualified_base}`"), - method_call.span, - )); - }; - let sig = functions.get(&key).ok_or_else(|| { - TypeError::new(format!("unknown method `{key}`"), method_call.span) - })?; - if sig.module != module_name && !sig.is_pub { - return Err(TypeError::new( - format!("method `{key}` is private"), - method_call.span, - )); - } - let mut inferred = HashMap::new(); - let expected_receiver = match &sig.params[0] { - Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref(), - other => other, - }; - let actual_receiver = match &receiver_ty { - Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref(), - other => other, - }; - let normalized_actual_receiver = match (expected_receiver, actual_receiver) { - (Ty::Path(expected_name, _), Ty::Path(actual_name, args)) - if !expected_name.contains('.') - && actual_name - .rsplit_once('.') - .map(|(_, t)| t == expected_name) - .unwrap_or(false) => - { - Ty::Path(expected_name.clone(), args.clone()) - } - _ => actual_receiver.clone(), - }; - match_type_params( - expected_receiver, - &normalized_actual_receiver, - &mut inferred, - method_call.receiver.span(), - )?; - let explicit_type_args = lower_type_args( - &method_call.type_args, - use_map, - stdlib, - struct_map, - enum_map, - type_params, - )?; - let subs = - build_call_substitution(sig, &explicit_type_args, inferred, method_call.span)?; - enforce_type_param_bounds(sig, &subs, trait_impls, method_call.span)?; - let instantiated_params: Vec = sig - .params - .iter() - .map(|ty| substitute_type(ty, &subs)) - .collect(); - let instantiated_ret = substitute_type(&sig.ret, &subs); - if instantiated_params.len() != method_call.args.len() + 1 { - return Err(TypeError::new( - format!( - "argument count mismatch: expected {}, found {}", - instantiated_params.len() - 1, - method_call.args.len() - ), - method_call.span, - )); - } - let receiver_base = match &receiver_ty { - Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref().clone(), - _ => receiver_ty.clone(), - }; - let receiver_unqualified = Ty::Path(type_name.clone(), receiver_args); - let receiver_ref = Ty::Ref(Box::new(receiver_base.clone())); - let receiver_ref_unqualified = Ty::Ref(Box::new(receiver_unqualified.clone())); - let receiver_ptr = Ty::Ptr(Box::new(receiver_base.clone())); - let receiver_ptr_unqualified = Ty::Ptr(Box::new(receiver_unqualified.clone())); - let expected_qualified = match &instantiated_params[0] { - Ty::Path(name, args) if !name.contains('.') => { - Some(Ty::Path(format!("{method_module}.{name}"), args.clone())) - } - _ => None, - }; - let expected_ref_qualified = expected_qualified - .as_ref() - .map(|ty| Ty::Ref(Box::new(ty.clone()))); - let expected_ptr_qualified = expected_qualified - .as_ref() - .map(|ty| Ty::Ptr(Box::new(ty.clone()))); - - let expects_ref = matches!(instantiated_params[0], Ty::Ref(_)); - let expects_ptr = matches!(instantiated_params[0], Ty::Ptr(_)); - - if matches!(receiver_ty, Ty::Ref(_)) && !expects_ref { - return Err(TypeError::new( - "cannot use a reference receiver where a value is expected".to_string(), - method_call.receiver.span(), - )); - } - if matches!(receiver_ty, Ty::Ptr(_)) && !expects_ptr { - return Err(TypeError::new( - "cannot use a pointer receiver where a value is expected".to_string(), - method_call.receiver.span(), - )); - } - - if instantiated_params[0] != receiver_ty - && expected_qualified.as_ref() != Some(&receiver_ty) - && instantiated_params[0] != receiver_unqualified - && instantiated_params[0] != receiver_ref - && expected_ref_qualified.as_ref() != Some(&receiver_ref) - && instantiated_params[0] != receiver_ref_unqualified - && instantiated_params[0] != receiver_ptr - && expected_ptr_qualified.as_ref() != Some(&receiver_ptr) - && instantiated_params[0] != receiver_ptr_unqualified - { - return Err(TypeError::new( - format!( - "method receiver type mismatch: expected {expected:?}, found {receiver_ty:?}", - expected = instantiated_params[0] - ), - method_call.receiver.span(), - )); - } - if instantiated_params[0] != receiver_ref - && instantiated_params[0] != receiver_ref_unqualified - { - let _ = check_expr( - &method_call.receiver, - functions, - trait_map, - trait_impls, - scopes, - UseMode::Move, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - } - for (arg, expected) in method_call.args.iter().zip(&instantiated_params[1..]) { - let (expected_inner, use_mode) = if let Ty::Ref(inner) = expected { - (inner.as_ref(), UseMode::Read) - } else { - (expected, UseMode::Move) - }; - let arg_ty = check_expr( - arg, - functions, - trait_map, - trait_impls, - scopes, - use_mode, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - if !matches!(expected, Ty::Ref(_)) && matches!(arg_ty, Ty::Ref(_)) { - return Err(TypeError::new( - "cannot pass a reference to a value parameter".to_string(), - arg.span(), - )); - } - let matches_ref = if let Ty::Ref(inner) = expected { - &arg_ty == inner.as_ref() || &arg_ty == expected - } else { - false - }; - if &arg_ty != expected_inner - && !matches_ref - && !matches!(arg_ty, Ty::Builtin(BuiltinType::Never)) - { - return Err(TypeError::new( - format!("argument type mismatch: expected {expected:?}, found {arg_ty:?}"), - arg.span(), - )); - } - } - Ok(instantiated_ret) - } - Expr::StructLiteral(lit) => check_struct_literal( - lit, - functions, - trait_map, - trait_impls, - scopes, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - ), - Expr::Unary(unary) => { - let expr_ty = check_expr( - &unary.expr, - functions, - trait_map, - trait_impls, - scopes, - UseMode::Read, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - match unary.op { - UnaryOp::Neg => { - if expr_ty == Ty::Builtin(BuiltinType::I32) - || expr_ty == Ty::Builtin(BuiltinType::I64) - { - Ok(expr_ty) - } else { - Err(TypeError::new( - "unary - expects integer".to_string(), - unary.span, - )) - } - } - UnaryOp::BitNot => { - if is_numeric_type(&expr_ty) { - Ok(expr_ty) - } else { - Err(TypeError::new( - "unary ~ expects integer".to_string(), - unary.span, - )) - } - } - UnaryOp::Not => { - if expr_ty == Ty::Builtin(BuiltinType::Bool) { - Ok(expr_ty) - } else { - Err(TypeError::new( - "unary ! expects bool".to_string(), - unary.span, - )) - } - } - } - } - Expr::Binary(binary) => { - let left = check_expr( - &binary.left, - functions, - trait_map, - trait_impls, - scopes, - UseMode::Read, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - let right = check_expr( - &binary.right, + let right = check_expr( + &binary.right, functions, trait_map, trait_impls, @@ -2138,476 +705,89 @@ pub(super) fn check_expr( )?; // Type check the index (must be i32) - let index_ty = check_expr( - &index_expr.index, - functions, - trait_map, - trait_impls, - scopes, - UseMode::Read, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - - if index_ty != Ty::Builtin(BuiltinType::I32) { - return Err(TypeError::new( - format!("index must be i32, found {:?}", index_ty), - index_expr.index.span(), - )); - } - - // Determine element type based on object type - match &object_ty { - ty if is_string_ty(ty) => Ok(Ty::Builtin(BuiltinType::U8)), - Ty::Path(name, args) if name == "Slice" || name == "sys.buffer.Slice" => { - if args.len() != 1 { - return Err(TypeError::new( - "Slice requires exactly one type argument".to_string(), - index_expr.span, - )); - } - if args[0] != Ty::Builtin(BuiltinType::U8) { - return Err(TypeError::new( - "Slice indexing is only supported for Slice".to_string(), - index_expr.span, - )); - } - Ok(Ty::Builtin(BuiltinType::U8)) - } - Ty::Path(name, args) if name == "MutSlice" || name == "sys.buffer.MutSlice" => { - if args.len() != 1 { - return Err(TypeError::new( - "MutSlice requires exactly one type argument".to_string(), - index_expr.span, - )); - } - if args[0] != Ty::Builtin(BuiltinType::U8) { - return Err(TypeError::new( - "MutSlice indexing is only supported for MutSlice".to_string(), - index_expr.span, - )); - } - Ok(Ty::Builtin(BuiltinType::U8)) - } - // Vec types return Result - Ty::Path(name, args) if name == "Vec" || name == "sys.vec.Vec" => { - if args.len() != 1 { - return Err(TypeError::new( - "Vec expects exactly one type argument".to_string(), - index_expr.span, - )); - } - Ok(Ty::Path( - "sys.result.Result".to_string(), - vec![ - args[0].clone(), - Ty::Path("sys.vec.VecErr".to_string(), vec![]), - ], - )) - } - _ => Err(TypeError::new( - format!("cannot index into type {:?}; only string, Slice[T], MutSlice[T], and Vec types are indexable", object_ty), - index_expr.span, - )), - } - } - }?; - recorder.record(expr, &ty); - Ok(ty) -} - -/// Check a statement-form match (arms may return, no value required). -fn check_match_stmt( - match_expr: &MatchExpr, - functions: &HashMap, - trait_map: &HashMap, - trait_impls: &[TraitImplInfo], - scopes: &mut Scopes, - scrutinee_mode: UseMode, - recorder: &mut TypeRecorder, - use_map: &UseMap, - struct_map: &HashMap, - enum_map: &HashMap, - stdlib: &StdlibIndex, - ret_ty: &Ty, - module_name: &str, - type_params: &HashSet, - type_param_bounds: &HashMap>, - in_loop: bool, -) -> Result { - let match_ty = check_expr( - &match_expr.expr, - functions, - trait_map, - trait_impls, - scopes, - scrutinee_mode, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - let mut arm_scopes = Vec::with_capacity(match_expr.arms.len()); - for arm in &match_expr.arms { - let mut arm_scope = scopes.clone(); - arm_scope.push_scope(); - bind_pattern( - &arm.pattern, - &match_ty, - &mut arm_scope, - use_map, - enum_map, - module_name, - )?; - check_block( - &arm.body, - ret_ty, - functions, - trait_map, - trait_impls, - &mut arm_scope, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - module_name, - type_params, - type_param_bounds, - in_loop, - )?; - arm_scope.pop_scope(); - arm_scopes.push(arm_scope); - } - check_match_exhaustive( - &match_ty, - &match_expr.arms, - use_map, - enum_map, - module_name, - match_expr.match_span, - )?; - if !module_name.starts_with("sys.") { - merge_match_states(scopes, &arm_scopes, struct_map, enum_map, match_expr.span)?; - } - Ok(Ty::Builtin(BuiltinType::Unit)) -} - -/// Check an expression-form match (all arms must evaluate to the same type). -fn check_match_expr_value( - match_expr: &MatchExpr, - functions: &HashMap, - trait_map: &HashMap, - trait_impls: &[TraitImplInfo], - scopes: &mut Scopes, - scrutinee_mode: UseMode, - recorder: &mut TypeRecorder, - use_map: &UseMap, - struct_map: &HashMap, - enum_map: &HashMap, - stdlib: &StdlibIndex, - ret_ty: &Ty, - module_name: &str, - type_params: &HashSet, - type_param_bounds: &HashMap>, - in_loop: bool, -) -> Result { - let match_ty = check_expr( - &match_expr.expr, - functions, - trait_map, - trait_impls, - scopes, - scrutinee_mode, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - )?; - let mut result_ty: Option = None; - let mut arm_scopes = Vec::with_capacity(match_expr.arms.len()); - for arm in &match_expr.arms { - let mut arm_scope = scopes.clone(); - arm_scope.push_scope(); - bind_pattern( - &arm.pattern, - &match_ty, - &mut arm_scope, - use_map, - enum_map, - module_name, - )?; - let arm_ty = check_match_arm_value( - &arm.body, - functions, - trait_map, - trait_impls, - &mut arm_scope, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - in_loop, - )?; - arm_scope.pop_scope(); - let arm_continues = !matches!(arm_ty, Ty::Builtin(BuiltinType::Never)); - if arm_continues { - arm_scopes.push(arm_scope); - } - if let Some(prev) = &result_ty { - if matches!(prev, Ty::Builtin(BuiltinType::Never)) { - result_ty = Some(arm_ty); - } else if matches!(arm_ty, Ty::Builtin(BuiltinType::Never)) { - // Keep the previous type; never can coerce to any type. - } else if prev != &arm_ty { + let index_ty = check_expr( + &index_expr.index, + functions, + trait_map, + trait_impls, + scopes, + UseMode::Read, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + + if index_ty != Ty::Builtin(BuiltinType::I32) { return Err(TypeError::new( - format!("match arm type mismatch: expected {prev:?}, found {arm_ty:?}"), - arm.body.span, + format!("index must be i32, found {:?}", index_ty), + index_expr.index.span(), )); } - } else { - result_ty = Some(arm_ty); - } - } - check_match_exhaustive( - &match_ty, - &match_expr.arms, - use_map, - enum_map, - module_name, - match_expr.match_span, - )?; - if !module_name.starts_with("sys.") && !arm_scopes.is_empty() { - merge_match_states(scopes, &arm_scopes, struct_map, enum_map, match_expr.span)?; - } - Ok(result_ty.unwrap_or(Ty::Builtin(BuiltinType::Unit))) -} - -/// Check a single match arm in expression context. -fn check_match_arm_value( - block: &Block, - functions: &HashMap, - trait_map: &HashMap, - trait_impls: &[TraitImplInfo], - scopes: &mut Scopes, - recorder: &mut TypeRecorder, - use_map: &UseMap, - struct_map: &HashMap, - enum_map: &HashMap, - stdlib: &StdlibIndex, - ret_ty: &Ty, - module_name: &str, - type_params: &HashSet, - type_param_bounds: &HashMap>, - in_loop: bool, -) -> Result { - let Some((last, prefix)) = block.stmts.split_last() else { - return Err(TypeError::new( - "match arm must end with expression".to_string(), - block.span, - )); - }; - for stmt in prefix { - check_stmt( - stmt, - ret_ty, - functions, - trait_map, - trait_impls, - scopes, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - module_name, - type_params, - type_param_bounds, - in_loop, - )?; - } - match last { - Stmt::Expr(expr_stmt) => check_expr( - &expr_stmt.expr, - functions, - trait_map, - trait_impls, - scopes, - UseMode::Move, - recorder, - use_map, - struct_map, - enum_map, - stdlib, - ret_ty, - module_name, - type_params, - type_param_bounds, - ), - _ => Err(TypeError::new( - "match arm must end with expression".to_string(), - block.span, - )), - } -} - -fn check_match_exhaustive( - match_ty: &Ty, - arms: &[MatchArm], - use_map: &UseMap, - enum_map: &HashMap, - module_name: &str, - span: Span, -) -> Result<(), TypeError> { - if arms - .iter() - .any(|arm| matches!(arm.pattern, Pattern::Wildcard(_) | Pattern::Binding(_))) - { - return Ok(()); - } - match match_ty { - Ty::Builtin(BuiltinType::Bool) => { - let mut seen_true = false; - let mut seen_false = false; - for arm in arms { - if let Pattern::Literal(Literal::Bool(value)) = arm.pattern { - if value { - seen_true = true; - } else { - seen_false = true; + // Determine element type based on object type + match &object_ty { + ty if is_string_ty(ty) => Ok(Ty::Builtin(BuiltinType::U8)), + Ty::Path(name, args) if name == "Slice" || name == "sys.buffer.Slice" => { + if args.len() != 1 { + return Err(TypeError::new( + "Slice requires exactly one type argument".to_string(), + index_expr.span, + )); } - } - } - if seen_true && seen_false { - return Ok(()); - } - let mut missing = Vec::new(); - if !seen_true { - missing.push("true"); - } - if !seen_false { - missing.push("false"); - } - return Err(TypeError::new( - format!( - "non-exhaustive match on bool, missing: {}", - missing.join(", ") - ), - span, - )); - } - Ty::Path(name, args) if name == "sys.result.Result" && args.len() == 2 => { - let mut seen_ok = false; - let mut seen_err = false; - for arm in arms { - if let Pattern::Call { path, .. } = &arm.pattern { - if path.segments.len() == 1 { - let variant = path.segments[0].item.as_str(); - if variant == "Ok" { - seen_ok = true; - } else if variant == "Err" { - seen_err = true; - } + if args[0] != Ty::Builtin(BuiltinType::U8) { + return Err(TypeError::new( + "Slice indexing is only supported for Slice".to_string(), + index_expr.span, + )); } + Ok(Ty::Builtin(BuiltinType::U8)) } - } - if seen_ok && seen_err { - return Ok(()); - } - let mut missing = Vec::new(); - if !seen_ok { - missing.push("Ok"); - } - if !seen_err { - missing.push("Err"); - } - return Err(TypeError::new( - format!( - "non-exhaustive match on Result, missing: {}", - missing.join(", ") - ), - span, - )); - } - Ty::Path(name, _) => { - let info = enum_map.get(name).or_else(|| { - if name.contains('.') { - None - } else { - enum_map.get(&format!("{module_name}.{name}")) + Ty::Path(name, args) if name == "MutSlice" || name == "sys.buffer.MutSlice" => { + if args.len() != 1 { + return Err(TypeError::new( + "MutSlice requires exactly one type argument".to_string(), + index_expr.span, + )); + } + if args[0] != Ty::Builtin(BuiltinType::U8) { + return Err(TypeError::new( + "MutSlice indexing is only supported for MutSlice".to_string(), + index_expr.span, + )); + } + Ok(Ty::Builtin(BuiltinType::U8)) } - }); - let Some(info) = info else { - return Ok(()); - }; - let mut seen = HashSet::new(); - for arm in arms { - let path = match &arm.pattern { - Pattern::Path(path) => Some(path), - Pattern::Call { path, .. } => Some(path), - _ => None, - }; - if let Some(path) = path { - if let Some(ty) = resolve_enum_variant(path, use_map, enum_map, module_name) { - if same_type_constructor(&ty, match_ty) { - if let Some(seg) = path.segments.last() { - seen.insert(seg.item.clone()); - } - } + // Vec types return Result + Ty::Path(name, args) if name == "Vec" || name == "sys.vec.Vec" => { + if args.len() != 1 { + return Err(TypeError::new( + "Vec expects exactly one type argument".to_string(), + index_expr.span, + )); } + Ok(Ty::Path( + "sys.result.Result".to_string(), + vec![ + args[0].clone(), + Ty::Path("sys.vec.VecErr".to_string(), vec![]), + ], + )) } + _ => Err(TypeError::new( + format!("cannot index into type {:?}; only string, Slice[T], MutSlice[T], and Vec types are indexable", object_ty), + index_expr.span, + )), } - if info.variants.iter().all(|v| seen.contains(v)) { - return Ok(()); - } - let missing: Vec = info - .variants - .iter() - .filter(|v| !seen.contains(*v)) - .cloned() - .collect(); - return Err(TypeError::new( - format!( - "non-exhaustive match, missing variants: {}", - missing.join(", ") - ), - span, - )); } - _ => {} - } - - Ok(()) -} - -fn same_type_constructor(left: &Ty, right: &Ty) -> bool { - match (left, right) { - (Ty::Path(left_name, _), Ty::Path(right_name, _)) => left_name == right_name, - _ => left == right, - } + }?; + recorder.record(expr, &ty); + Ok(ty) } /// Check a struct literal and ensure all fields are present and typed. @@ -2730,207 +910,3 @@ fn check_struct_literal( Ok(Ty::Path(type_name, type_args)) } - -fn lower_type_args( - args: &[Type], - use_map: &UseMap, - stdlib: &StdlibIndex, - struct_map: &HashMap, - enum_map: &HashMap, - type_params: &HashSet, -) -> Result, TypeError> { - let mut out = Vec::with_capacity(args.len()); - for arg in args { - let ty = lower_type(arg, use_map, stdlib, type_params)?; - validate_type_args(&ty, struct_map, enum_map, arg.span())?; - out.push(ty); - } - Ok(out) -} - -fn build_type_substitution( - params: &[String], - args: &[Ty], - span: Span, -) -> Result, TypeError> { - if params.len() != args.len() { - return Err(TypeError::new( - format!( - "expected {} type argument(s), found {}", - params.len(), - args.len() - ), - span, - )); - } - let mut map = HashMap::new(); - for (param, arg) in params.iter().zip(args.iter()) { - map.insert(param.clone(), arg.clone()); - } - Ok(map) -} - -fn substitute_type(ty: &Ty, subs: &HashMap) -> Ty { - match ty { - Ty::Param(name) => subs.get(name).cloned().unwrap_or_else(|| ty.clone()), - Ty::Builtin(_) => ty.clone(), - Ty::Ptr(inner) => Ty::Ptr(Box::new(substitute_type(inner, subs))), - Ty::Ref(inner) => Ty::Ref(Box::new(substitute_type(inner, subs))), - Ty::Path(name, args) => Ty::Path( - name.clone(), - args.iter().map(|arg| substitute_type(arg, subs)).collect(), - ), - } -} - -fn match_type_params( - expected: &Ty, - actual: &Ty, - subs: &mut HashMap, - span: Span, -) -> Result<(), TypeError> { - match expected { - Ty::Param(name) => { - if let Some(existing) = subs.get(name) { - if existing != actual { - return Err(TypeError::new( - format!( - "conflicting type arguments for `{}`: {existing:?} vs {actual:?}", - name - ), - span, - )); - } - } else { - subs.insert(name.clone(), actual.clone()); - } - Ok(()) - } - Ty::Builtin(_) => { - if expected != actual { - return Err(TypeError::new( - format!("type mismatch: expected {expected:?}, found {actual:?}"), - span, - )); - } - Ok(()) - } - Ty::Ptr(inner) => match actual { - Ty::Ptr(actual_inner) => match_type_params(inner, actual_inner, subs, span), - _ => Err(TypeError::new( - format!("type mismatch: expected {expected:?}, found {actual:?}"), - span, - )), - }, - Ty::Ref(inner) => match actual { - Ty::Ref(actual_inner) => match_type_params(inner, actual_inner, subs, span), - _ => Err(TypeError::new( - format!("type mismatch: expected {expected:?}, found {actual:?}"), - span, - )), - }, - Ty::Path(name, args) => match actual { - Ty::Path(actual_name, actual_args) => { - if name != actual_name || args.len() != actual_args.len() { - return Err(TypeError::new( - format!("type mismatch: expected {expected:?}, found {actual:?}"), - span, - )); - } - for (arg, actual_arg) in args.iter().zip(actual_args.iter()) { - match_type_params(arg, actual_arg, subs, span)?; - } - Ok(()) - } - _ => Err(TypeError::new( - format!("type mismatch: expected {expected:?}, found {actual:?}"), - span, - )), - }, - } -} - -fn build_call_substitution( - sig: &FunctionSig, - explicit_args: &[Ty], - inferred: HashMap, - span: Span, -) -> Result, TypeError> { - if sig.type_params.is_empty() { - if !explicit_args.is_empty() { - return Err(TypeError::new( - format!( - "function does not accept type arguments (found {})", - explicit_args.len() - ), - span, - )); - } - return Ok(inferred); - } - - let mut subs = inferred; - let mut remaining = Vec::new(); - for name in &sig.type_params { - if !subs.contains_key(name) { - remaining.push(name.clone()); - } - } - if explicit_args.len() != remaining.len() { - return Err(TypeError::new( - format!( - "expected {} type argument(s), found {}", - remaining.len(), - explicit_args.len() - ), - span, - )); - } - for (name, arg) in remaining.into_iter().zip(explicit_args.iter()) { - subs.insert(name, arg.clone()); - } - Ok(subs) -} - -fn enforce_type_param_bounds( - sig: &FunctionSig, - subs: &HashMap, - trait_impls: &[TraitImplInfo], - span: Span, -) -> Result<(), TypeError> { - for (param, bounds) in &sig.type_param_bounds { - let Some(actual) = subs.get(param) else { - continue; - }; - for bound in bounds { - if type_satisfies_trait(actual, bound, trait_impls, span).is_err() { - return Err(TypeError::new( - format!("type parameter `{param}` does not implement `{bound}`"), - span, - )); - } - } - } - Ok(()) -} - -fn type_satisfies_trait( - actual: &Ty, - trait_name: &str, - trait_impls: &[TraitImplInfo], - span: Span, -) -> Result<(), TypeError> { - for impl_info in trait_impls { - if impl_info.trait_name != trait_name { - continue; - } - let mut subs = HashMap::new(); - if match_type_params(&impl_info.target_ty, actual, &mut subs, span).is_ok() { - return Ok(()); - } - } - Err(TypeError::new( - format!("type `{actual:?}` does not implement `{trait_name}`"), - span, - )) -} diff --git a/capc/src/typeck/check/calls.rs b/capc/src/typeck/check/calls.rs new file mode 100644 index 0000000..2c83ca8 --- /dev/null +++ b/capc/src/typeck/check/calls.rs @@ -0,0 +1,831 @@ +use super::*; + +fn enforce_vec_method_constraints( + receiver_ty: &Ty, + method: &str, + span: Span, +) -> Result<(), TypeError> { + let base = match receiver_ty { + Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref(), + _ => receiver_ty, + }; + let Ty::Path(name, args) = base else { + return Ok(()); + }; + if name != "Vec" && name != "sys.vec.Vec" { + return Ok(()); + } + if args.len() != 1 { + return Err(TypeError::new( + "Vec expects exactly one type argument".to_string(), + span, + )); + } + let elem = &args[0]; + let is_u8 = matches!(elem, Ty::Builtin(BuiltinType::U8)); + let is_i32 = matches!(elem, Ty::Builtin(BuiltinType::I32)); + let is_string = is_string_ty(elem); + let is_param = matches!(elem, Ty::Param(_)); + match method { + "as_slice" | "slice" | "extend_slice" | "copy_string" => { + if !is_u8 { + return Err(TypeError::new( + format!("Vec<{elem:?}> does not support `{method}`"), + span, + )); + } + } + "capacity" | "reserve" | "shrink_to_fit" => { + if !is_u8 && !is_i32 && !is_string && !is_param { + return Err(TypeError::new( + format!("Vec<{elem:?}> does not support `{method}`"), + span, + )); + } + } + "filter" | "map_add" => { + if !is_u8 && !is_i32 { + return Err(TypeError::new( + format!("Vec<{elem:?}> does not support `{method}`"), + span, + )); + } + } + "set" => {} + "join" => { + if !is_string { + return Err(TypeError::new( + format!("Vec<{elem:?}> does not support `{method}`"), + span, + )); + } + } + _ => {} + } + Ok(()) +} + +fn check_argument_types( + args: &[Expr], + expected_types: &[Ty], + functions: &HashMap, + trait_map: &HashMap, + trait_impls: &[TraitImplInfo], + scopes: &mut Scopes, + recorder: &mut TypeRecorder, + use_map: &UseMap, + struct_map: &HashMap, + enum_map: &HashMap, + stdlib: &StdlibIndex, + ret_ty: &Ty, + module_name: &str, + type_params: &HashSet, + type_param_bounds: &HashMap>, +) -> Result<(), TypeError> { + if expected_types.len() != args.len() { + return Err(TypeError::new( + format!( + "argument count mismatch: expected {}, found {}", + expected_types.len(), + args.len() + ), + args.last().map_or(Span::new(0, 0), Expr::span), + )); + } + + for (arg, expected) in args.iter().zip(expected_types) { + let (expected_inner, use_mode) = if let Ty::Ref(inner) = expected { + (inner.as_ref(), UseMode::Read) + } else { + (expected, UseMode::Move) + }; + let arg_ty = check_expr( + arg, + functions, + trait_map, + trait_impls, + scopes, + use_mode, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + if !matches!(expected, Ty::Ref(_)) && matches!(arg_ty, Ty::Ref(_)) { + return Err(TypeError::new( + "cannot pass a reference to a value parameter".to_string(), + arg.span(), + )); + } + let matches_ref = if let Ty::Ref(inner) = expected { + &arg_ty == inner.as_ref() || &arg_ty == expected + } else { + false + }; + if &arg_ty != expected_inner + && !matches_ref + && !matches!(arg_ty, Ty::Builtin(BuiltinType::Never)) + { + return Err(TypeError::new( + format!("argument type mismatch: expected {expected:?}, found {arg_ty:?}"), + arg.span(), + )); + } + } + + Ok(()) +} + +pub(super) fn check_call_expr( + expr: &Expr, + call: &CallExpr, + functions: &HashMap, + trait_map: &HashMap, + trait_impls: &[TraitImplInfo], + scopes: &mut Scopes, + recorder: &mut TypeRecorder, + use_map: &UseMap, + struct_map: &HashMap, + enum_map: &HashMap, + stdlib: &StdlibIndex, + ret_ty: &Ty, + module_name: &str, + type_params: &HashSet, + type_param_bounds: &HashMap>, +) -> Result { + let path = call.callee.to_path().ok_or_else(|| { + TypeError::new( + "call target must be a function path".to_string(), + call.callee.span(), + ) + })?; + + if path.segments.len() == 1 { + let name = &path.segments[0].item; + if name == "drop" { + if call.args.len() != 1 { + return Err(TypeError::new( + "drop expects exactly one argument".to_string(), + call.span, + )); + } + let _ = check_expr( + &call.args[0], + functions, + trait_map, + trait_impls, + scopes, + UseMode::Move, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + return Ok(Ty::Builtin(BuiltinType::Unit)); + } + if name == "panic" { + if !call.args.is_empty() { + return Err(TypeError::new( + "panic takes no arguments".to_string(), + call.span, + )); + } + return Ok(Ty::Builtin(BuiltinType::Never)); + } + if name == "Ok" || name == "Err" { + if call.args.len() != 1 { + return Err(TypeError::new( + format!("{name} takes exactly one argument"), + call.span, + )); + } + let arg_ty = check_expr( + &call.args[0], + functions, + trait_map, + trait_impls, + scopes, + UseMode::Move, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + if let Ty::Path(ty_name, args) = ret_ty { + if ty_name == "sys.result.Result" && args.len() == 2 { + let expected = if name == "Ok" { &args[0] } else { &args[1] }; + if !ty_equivalent_for_set(&arg_ty, expected, type_params) { + return Err(TypeError::new( + format!( + "{name} argument type mismatch: expected {expected:?}, got {arg_ty:?}" + ), + call.args[0].span(), + )); + } + return Ok(ret_ty.clone()); + } + } + return Ok(Ty::Path( + "sys.result.Result".to_string(), + if name == "Ok" { + vec![arg_ty, Ty::Builtin(BuiltinType::Unit)] + } else { + vec![Ty::Builtin(BuiltinType::Unit), arg_ty] + }, + )); + } + } + + if let Some(Ty::Path(enum_name, _)) = resolve_enum_variant(&path, use_map, enum_map, module_name) + { + let Some(info) = enum_map.get(&enum_name) else { + return Err(TypeError::new("unknown enum variant".to_string(), call.span)); + }; + let variant = path + .segments + .last() + .map(|s| s.item.clone()) + .unwrap_or_else(|| "unknown".to_string()); + let payload = info.payloads.get(&variant).cloned().unwrap_or(None); + + if payload.is_none() && !call.args.is_empty() { + return Err(TypeError::new( + format!("{variant} takes no arguments"), + call.span, + )); + } + if payload.is_some() && call.args.len() != 1 { + return Err(TypeError::new( + format!("{variant} takes exactly one argument"), + call.span, + )); + } + + let mut inferred: HashMap = HashMap::new(); + let arg_ty = if let Some(payload_ty) = payload.clone() { + let arg_ty = check_expr( + &call.args[0], + functions, + trait_map, + trait_impls, + scopes, + UseMode::Move, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + if !infer_enum_args(&payload_ty, &arg_ty, &mut inferred) { + return Err(TypeError::new( + format!( + "variant argument type mismatch: expected {payload_ty:?}, got {arg_ty:?}" + ), + call.args[0].span(), + )); + } + Some(arg_ty) + } else { + None + }; + + let type_args = resolve_enum_type_args(&enum_name, &info.type_params, &inferred, ret_ty); + + if let Some(payload_ty) = payload { + if let Some(arg_ty) = arg_ty { + if !enum_payload_matches(&payload_ty, &arg_ty, &info.type_params, &type_args) { + return Err(TypeError::new( + "variant argument type mismatch".to_string(), + call.args[0].span(), + )); + } + } + } + + return Ok(Ty::Path(enum_name, type_args)); + } + + let resolved = resolve_path(&path, use_map); + let key = resolved.join("."); + + let sig = if let Some(sig) = functions.get(&key) { + sig + } else if resolved.len() == 1 { + let qualified = format!("{}.{}", module_name, key); + functions + .get(&qualified) + .ok_or_else(|| TypeError::new(format!("unknown function `{key}`"), path.span))? + } else { + return Err(TypeError::new( + format!("unknown function `{key}`"), + path.span, + )); + }; + if sig.module != module_name && !sig.is_pub { + return Err(TypeError::new( + format!("function `{}` is private", key), + call.span, + )); + } + let explicit_type_args = + lower_type_args(&call.type_args, use_map, stdlib, struct_map, enum_map, type_params)?; + let subs = build_call_substitution(sig, &explicit_type_args, HashMap::new(), call.span)?; + enforce_type_param_bounds(sig, &subs, trait_impls, call.span)?; + let instantiated_params: Vec = sig + .params + .iter() + .map(|ty| substitute_type(ty, &subs)) + .collect(); + let instantiated_ret = substitute_type(&sig.ret, &subs); + if instantiated_params.len() != call.args.len() { + return Err(TypeError::new( + format!( + "argument count mismatch: expected {}, found {}", + instantiated_params.len(), + call.args.len() + ), + call.span, + )); + } + check_argument_types( + &call.args, + &instantiated_params, + functions, + trait_map, + trait_impls, + scopes, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + let _ = expr; + Ok(instantiated_ret) +} + +pub(super) fn check_method_call_expr( + method_call: &MethodCallExpr, + functions: &HashMap, + trait_map: &HashMap, + trait_impls: &[TraitImplInfo], + scopes: &mut Scopes, + recorder: &mut TypeRecorder, + use_map: &UseMap, + struct_map: &HashMap, + enum_map: &HashMap, + stdlib: &StdlibIndex, + ret_ty: &Ty, + module_name: &str, + type_params: &HashSet, + type_param_bounds: &HashMap>, +) -> Result { + fn get_leftmost_segment(expr: &Expr) -> Option<&str> { + match expr { + Expr::Path(path) if path.segments.len() == 1 => Some(&path.segments[0].item), + Expr::FieldAccess(fa) => get_leftmost_segment(&fa.object), + _ => None, + } + } + + let base_is_local = if let Some(base_name) = get_leftmost_segment(&method_call.receiver) { + scopes.contains(base_name) + } else { + true + }; + + let path_call = method_call.receiver.to_path().map(|mut path| { + path.segments.push(method_call.method.clone()); + path.span = Span::new(path.span.start, method_call.method.span.end); + path + }); + + let is_function = if let Some(path) = &path_call { + let resolved = resolve_path(path, use_map); + let key = resolved.join("."); + functions.contains_key(&key) + } else { + false + }; + + if !base_is_local && is_function { + let Some(path) = path_call else { + return Err(TypeError::new( + "method receiver path could not be resolved".to_string(), + method_call.span, + )); + }; + let resolved = resolve_path(&path, use_map); + let key = resolved.join("."); + let sig = functions + .get(&key) + .ok_or_else(|| TypeError::new(format!("unknown function `{key}`"), path.span))?; + if sig.module != module_name && !sig.is_pub { + return Err(TypeError::new( + format!("function `{key}` is private"), + method_call.span, + )); + } + let explicit_type_args = lower_type_args( + &method_call.type_args, + use_map, + stdlib, + struct_map, + enum_map, + type_params, + )?; + let subs = + build_call_substitution(sig, &explicit_type_args, HashMap::new(), method_call.span)?; + enforce_type_param_bounds(sig, &subs, trait_impls, method_call.span)?; + let instantiated_params: Vec = sig + .params + .iter() + .map(|ty| substitute_type(ty, &subs)) + .collect(); + let instantiated_ret = substitute_type(&sig.ret, &subs); + if instantiated_params.len() != method_call.args.len() { + return Err(TypeError::new( + format!( + "argument count mismatch: expected {}, found {}", + instantiated_params.len(), + method_call.args.len() + ), + method_call.span, + )); + } + check_argument_types( + &method_call.args, + &instantiated_params, + functions, + trait_map, + trait_impls, + scopes, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + return Ok(instantiated_ret); + } + + let receiver_ty = check_expr( + &method_call.receiver, + functions, + trait_map, + trait_impls, + scopes, + UseMode::Read, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + enforce_vec_method_constraints(&receiver_ty, &method_call.method.item, method_call.method.span)?; + let receiver_base = match &receiver_ty { + Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref(), + _ => &receiver_ty, + }; + if let Ty::Param(param_name) = receiver_base { + let bounds = type_param_bounds + .get(param_name) + .cloned() + .unwrap_or_default(); + let mut candidates = Vec::new(); + for bound in bounds { + if let Some(info) = trait_map.get(&bound) { + if let Some(sig) = info.methods.get(&method_call.method.item) { + candidates.push((bound, sig)); + } + } + } + if candidates.is_empty() { + return Err(TypeError::new( + format!( + "no trait bound provides method `{}` for `{}`", + method_call.method.item, param_name + ), + method_call.span, + )); + } + if candidates.len() > 1 { + return Err(TypeError::new( + format!( + "ambiguous method `{}` for `{}`; multiple trait bounds apply", + method_call.method.item, param_name + ), + method_call.span, + )); + } + let (_trait_name, sig) = candidates.remove(0); + let mut inferred = HashMap::new(); + let expected_receiver = match &sig.params[0] { + Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref(), + other => other, + }; + let actual_receiver = match &receiver_ty { + Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref(), + other => other, + }; + match_type_params( + expected_receiver, + actual_receiver, + &mut inferred, + method_call.receiver.span(), + )?; + let explicit_type_args = lower_type_args( + &method_call.type_args, + use_map, + stdlib, + struct_map, + enum_map, + type_params, + )?; + let subs = + build_call_substitution(sig, &explicit_type_args, inferred, method_call.span)?; + enforce_type_param_bounds(sig, &subs, trait_impls, method_call.span)?; + let instantiated_params: Vec = sig + .params + .iter() + .map(|ty| substitute_type(ty, &subs)) + .collect(); + let instantiated_ret = substitute_type(&sig.ret, &subs); + if instantiated_params.len() != method_call.args.len() + 1 { + return Err(TypeError::new( + format!( + "argument count mismatch: expected {}, found {}", + instantiated_params.len() - 1, + method_call.args.len() + ), + method_call.span, + )); + } + if instantiated_params[0] != receiver_ty { + return Err(TypeError::new( + format!( + "method receiver type mismatch: expected {expected:?}, found {receiver_ty:?}", + expected = instantiated_params[0] + ), + method_call.receiver.span(), + )); + } + if !matches!(instantiated_params[0], Ty::Ref(_)) { + let _ = check_expr( + &method_call.receiver, + functions, + trait_map, + trait_impls, + scopes, + UseMode::Move, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + } + check_argument_types( + &method_call.args, + &instantiated_params[1..], + functions, + trait_map, + trait_impls, + scopes, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + return Ok(instantiated_ret); + } + + let (method_module, type_name, receiver_args) = resolve_method_target( + &receiver_ty, + module_name, + struct_map, + enum_map, + method_call.receiver.span(), + )?; + + let type_arg_suffix = super::super::build_type_arg_suffix(&receiver_args); + let base_method_fn = format!("{type_name}__{}", method_call.method.item); + let specific_method_fn = format!("{type_name}{type_arg_suffix}__{}", method_call.method.item); + + let qualified_specific = format!("{method_module}.{specific_method_fn}"); + let qualified_base = format!("{method_module}.{base_method_fn}"); + + let key = if !type_arg_suffix.is_empty() && functions.contains_key(&qualified_specific) { + qualified_specific + } else if functions.contains_key(&qualified_base) { + qualified_base + } else if method_module == module_name + && !type_arg_suffix.is_empty() + && functions.contains_key(&specific_method_fn) + { + specific_method_fn.clone() + } else if method_module == module_name && functions.contains_key(&base_method_fn) { + base_method_fn.clone() + } else { + return Err(TypeError::new( + format!("unknown method `{qualified_base}`"), + method_call.span, + )); + }; + let sig = functions + .get(&key) + .ok_or_else(|| TypeError::new(format!("unknown method `{key}`"), method_call.span))?; + if sig.module != module_name && !sig.is_pub { + return Err(TypeError::new( + format!("method `{key}` is private"), + method_call.span, + )); + } + let mut inferred = HashMap::new(); + let expected_receiver = match &sig.params[0] { + Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref(), + other => other, + }; + let actual_receiver = match &receiver_ty { + Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref(), + other => other, + }; + let normalized_actual_receiver = match (expected_receiver, actual_receiver) { + (Ty::Path(expected_name, _), Ty::Path(actual_name, args)) + if !expected_name.contains('.') + && actual_name + .rsplit_once('.') + .map(|(_, t)| t == expected_name) + .unwrap_or(false) => + { + Ty::Path(expected_name.clone(), args.clone()) + } + _ => actual_receiver.clone(), + }; + match_type_params( + expected_receiver, + &normalized_actual_receiver, + &mut inferred, + method_call.receiver.span(), + )?; + let explicit_type_args = lower_type_args( + &method_call.type_args, + use_map, + stdlib, + struct_map, + enum_map, + type_params, + )?; + let subs = build_call_substitution(sig, &explicit_type_args, inferred, method_call.span)?; + enforce_type_param_bounds(sig, &subs, trait_impls, method_call.span)?; + let instantiated_params: Vec = sig + .params + .iter() + .map(|ty| substitute_type(ty, &subs)) + .collect(); + let instantiated_ret = substitute_type(&sig.ret, &subs); + if instantiated_params.len() != method_call.args.len() + 1 { + return Err(TypeError::new( + format!( + "argument count mismatch: expected {}, found {}", + instantiated_params.len() - 1, + method_call.args.len() + ), + method_call.span, + )); + } + let receiver_base = match &receiver_ty { + Ty::Ref(inner) | Ty::Ptr(inner) => inner.as_ref().clone(), + _ => receiver_ty.clone(), + }; + let receiver_unqualified = Ty::Path(type_name.clone(), receiver_args); + let receiver_ref = Ty::Ref(Box::new(receiver_base.clone())); + let receiver_ref_unqualified = Ty::Ref(Box::new(receiver_unqualified.clone())); + let receiver_ptr = Ty::Ptr(Box::new(receiver_base.clone())); + let receiver_ptr_unqualified = Ty::Ptr(Box::new(receiver_unqualified.clone())); + let expected_qualified = match &instantiated_params[0] { + Ty::Path(name, args) if !name.contains('.') => { + Some(Ty::Path(format!("{method_module}.{name}"), args.clone())) + } + _ => None, + }; + let expected_ref_qualified = expected_qualified + .as_ref() + .map(|ty| Ty::Ref(Box::new(ty.clone()))); + let expected_ptr_qualified = expected_qualified + .as_ref() + .map(|ty| Ty::Ptr(Box::new(ty.clone()))); + + let expects_ref = matches!(instantiated_params[0], Ty::Ref(_)); + let expects_ptr = matches!(instantiated_params[0], Ty::Ptr(_)); + + if matches!(receiver_ty, Ty::Ref(_)) && !expects_ref { + return Err(TypeError::new( + "cannot use a reference receiver where a value is expected".to_string(), + method_call.receiver.span(), + )); + } + if matches!(receiver_ty, Ty::Ptr(_)) && !expects_ptr { + return Err(TypeError::new( + "cannot use a pointer receiver where a value is expected".to_string(), + method_call.receiver.span(), + )); + } + + if instantiated_params[0] != receiver_ty + && expected_qualified.as_ref() != Some(&receiver_ty) + && instantiated_params[0] != receiver_unqualified + && instantiated_params[0] != receiver_ref + && expected_ref_qualified.as_ref() != Some(&receiver_ref) + && instantiated_params[0] != receiver_ref_unqualified + && instantiated_params[0] != receiver_ptr + && expected_ptr_qualified.as_ref() != Some(&receiver_ptr) + && instantiated_params[0] != receiver_ptr_unqualified + { + return Err(TypeError::new( + format!( + "method receiver type mismatch: expected {expected:?}, found {receiver_ty:?}", + expected = instantiated_params[0] + ), + method_call.receiver.span(), + )); + } + if instantiated_params[0] != receiver_ref && instantiated_params[0] != receiver_ref_unqualified + { + let _ = check_expr( + &method_call.receiver, + functions, + trait_map, + trait_impls, + scopes, + UseMode::Move, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + } + check_argument_types( + &method_call.args, + &instantiated_params[1..], + functions, + trait_map, + trait_impls, + scopes, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + Ok(instantiated_ret) +} diff --git a/capc/src/typeck/check/match_check.rs b/capc/src/typeck/check/match_check.rs new file mode 100644 index 0000000..6d3394f --- /dev/null +++ b/capc/src/typeck/check/match_check.rs @@ -0,0 +1,458 @@ +use super::*; + +struct CheckedMatchArm { + ty: Ty, + scope_after: Option, +} + +fn check_match_arms( + match_expr: &MatchExpr, + match_ty: &Ty, + functions: &HashMap, + trait_map: &HashMap, + trait_impls: &[TraitImplInfo], + scopes: &Scopes, + recorder: &mut TypeRecorder, + use_map: &UseMap, + struct_map: &HashMap, + enum_map: &HashMap, + stdlib: &StdlibIndex, + ret_ty: &Ty, + module_name: &str, + type_params: &HashSet, + type_param_bounds: &HashMap>, + in_loop: bool, + expression_mode: bool, +) -> Result, TypeError> { + let mut arms = Vec::with_capacity(match_expr.arms.len()); + for arm in &match_expr.arms { + let mut arm_scope = scopes.clone(); + arm_scope.push_scope(); + bind_pattern( + &arm.pattern, + match_ty, + &mut arm_scope, + use_map, + enum_map, + module_name, + )?; + let arm_ty = if expression_mode { + check_match_arm_value( + &arm.body, + functions, + trait_map, + trait_impls, + &mut arm_scope, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + in_loop, + )? + } else { + check_block( + &arm.body, + ret_ty, + functions, + trait_map, + trait_impls, + &mut arm_scope, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + module_name, + type_params, + type_param_bounds, + in_loop, + )?; + Ty::Builtin(BuiltinType::Unit) + }; + arm_scope.pop_scope(); + let scope_after = if matches!(arm_ty, Ty::Builtin(BuiltinType::Never)) { + None + } else { + Some(arm_scope) + }; + arms.push(CheckedMatchArm { + ty: arm_ty, + scope_after, + }); + } + Ok(arms) +} + +/// Check a statement-form match (arms may return, no value required). +pub(super) fn check_match_stmt( + match_expr: &MatchExpr, + functions: &HashMap, + trait_map: &HashMap, + trait_impls: &[TraitImplInfo], + scopes: &mut Scopes, + scrutinee_mode: UseMode, + recorder: &mut TypeRecorder, + use_map: &UseMap, + struct_map: &HashMap, + enum_map: &HashMap, + stdlib: &StdlibIndex, + ret_ty: &Ty, + module_name: &str, + type_params: &HashSet, + type_param_bounds: &HashMap>, + in_loop: bool, +) -> Result { + let match_ty = check_expr( + &match_expr.expr, + functions, + trait_map, + trait_impls, + scopes, + scrutinee_mode, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + let arms = check_match_arms( + match_expr, + &match_ty, + functions, + trait_map, + trait_impls, + scopes, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + in_loop, + false, + )?; + let arm_scopes = arms + .into_iter() + .filter_map(|arm| arm.scope_after) + .collect::>(); + check_match_exhaustive( + &match_ty, + &match_expr.arms, + use_map, + enum_map, + module_name, + match_expr.match_span, + )?; + if !module_name.starts_with("sys.") && !arm_scopes.is_empty() { + merge_match_states(scopes, &arm_scopes, struct_map, enum_map, match_expr.span)?; + } + Ok(Ty::Builtin(BuiltinType::Unit)) +} + +/// Check an expression-form match (all arms must evaluate to the same type). +pub(super) fn check_match_expr_value( + match_expr: &MatchExpr, + functions: &HashMap, + trait_map: &HashMap, + trait_impls: &[TraitImplInfo], + scopes: &mut Scopes, + scrutinee_mode: UseMode, + recorder: &mut TypeRecorder, + use_map: &UseMap, + struct_map: &HashMap, + enum_map: &HashMap, + stdlib: &StdlibIndex, + ret_ty: &Ty, + module_name: &str, + type_params: &HashSet, + type_param_bounds: &HashMap>, + in_loop: bool, +) -> Result { + let match_ty = check_expr( + &match_expr.expr, + functions, + trait_map, + trait_impls, + scopes, + scrutinee_mode, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + let arms = check_match_arms( + match_expr, + &match_ty, + functions, + trait_map, + trait_impls, + scopes, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + in_loop, + true, + )?; + let mut result_ty: Option = None; + let mut arm_scopes = Vec::new(); + for arm in arms { + if let Some(scope_after) = arm.scope_after { + arm_scopes.push(scope_after); + } + if let Some(prev) = &result_ty { + if matches!(prev, Ty::Builtin(BuiltinType::Never)) { + result_ty = Some(arm.ty); + } else if matches!(arm.ty, Ty::Builtin(BuiltinType::Never)) { + // Keep the previous type; never can coerce to any type. + } else if prev != &arm.ty { + return Err(TypeError::new( + format!("match arm type mismatch: expected {prev:?}, found {:?}", + arm.ty), + match_expr.span, + )); + } + } else { + result_ty = Some(arm.ty); + } + } + check_match_exhaustive( + &match_ty, + &match_expr.arms, + use_map, + enum_map, + module_name, + match_expr.match_span, + )?; + if !module_name.starts_with("sys.") && !arm_scopes.is_empty() { + merge_match_states(scopes, &arm_scopes, struct_map, enum_map, match_expr.span)?; + } + Ok(result_ty.unwrap_or(Ty::Builtin(BuiltinType::Unit))) +} + +/// Check a single match arm in expression context. +fn check_match_arm_value( + block: &Block, + functions: &HashMap, + trait_map: &HashMap, + trait_impls: &[TraitImplInfo], + scopes: &mut Scopes, + recorder: &mut TypeRecorder, + use_map: &UseMap, + struct_map: &HashMap, + enum_map: &HashMap, + stdlib: &StdlibIndex, + ret_ty: &Ty, + module_name: &str, + type_params: &HashSet, + type_param_bounds: &HashMap>, + in_loop: bool, +) -> Result { + let Some((last, prefix)) = block.stmts.split_last() else { + return Err(TypeError::new( + "match arm must end with expression".to_string(), + block.span, + )); + }; + for stmt in prefix { + check_stmt( + stmt, + ret_ty, + functions, + trait_map, + trait_impls, + scopes, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + module_name, + type_params, + type_param_bounds, + in_loop, + )?; + } + match last { + Stmt::Expr(expr_stmt) => check_expr( + &expr_stmt.expr, + functions, + trait_map, + trait_impls, + scopes, + UseMode::Move, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + ), + _ => Err(TypeError::new( + "match arm must end with expression".to_string(), + block.span, + )), + } +} + +fn check_match_exhaustive( + match_ty: &Ty, + arms: &[MatchArm], + use_map: &UseMap, + enum_map: &HashMap, + module_name: &str, + span: Span, +) -> Result<(), TypeError> { + if arms + .iter() + .any(|arm| matches!(arm.pattern, Pattern::Wildcard(_) | Pattern::Binding(_))) + { + return Ok(()); + } + + match match_ty { + Ty::Builtin(BuiltinType::Bool) => { + let mut seen_true = false; + let mut seen_false = false; + for arm in arms { + if let Pattern::Literal(Literal::Bool(value)) = arm.pattern { + if value { + seen_true = true; + } else { + seen_false = true; + } + } + } + if seen_true && seen_false { + return Ok(()); + } + let mut missing = Vec::new(); + if !seen_true { + missing.push("true"); + } + if !seen_false { + missing.push("false"); + } + return Err(TypeError::new( + format!( + "non-exhaustive match on bool, missing: {}", + missing.join(", ") + ), + span, + )); + } + Ty::Path(name, args) if name == "sys.result.Result" && args.len() == 2 => { + let mut seen_ok = false; + let mut seen_err = false; + for arm in arms { + if let Pattern::Call { path, .. } = &arm.pattern { + if path.segments.len() == 1 { + let variant = path.segments[0].item.as_str(); + if variant == "Ok" { + seen_ok = true; + } else if variant == "Err" { + seen_err = true; + } + } + } + } + if seen_ok && seen_err { + return Ok(()); + } + let mut missing = Vec::new(); + if !seen_ok { + missing.push("Ok"); + } + if !seen_err { + missing.push("Err"); + } + return Err(TypeError::new( + format!( + "non-exhaustive match on Result, missing: {}", + missing.join(", ") + ), + span, + )); + } + Ty::Path(name, _) => { + let info = enum_map.get(name).or_else(|| { + if name.contains('.') { + None + } else { + enum_map.get(&format!("{module_name}.{name}")) + } + }); + let Some(info) = info else { + return Ok(()); + }; + let mut seen = HashSet::new(); + for arm in arms { + let path = match &arm.pattern { + Pattern::Path(path) => Some(path), + Pattern::Call { path, .. } => Some(path), + _ => None, + }; + if let Some(path) = path { + if let Some(ty) = resolve_enum_variant(path, use_map, enum_map, module_name) { + if same_type_constructor(&ty, match_ty) { + if let Some(seg) = path.segments.last() { + seen.insert(seg.item.clone()); + } + } + } + } + } + if info.variants.iter().all(|v| seen.contains(v)) { + return Ok(()); + } + let missing: Vec = info + .variants + .iter() + .filter(|v| !seen.contains(*v)) + .cloned() + .collect(); + return Err(TypeError::new( + format!( + "non-exhaustive match, missing variants: {}", + missing.join(", ") + ), + span, + )); + } + _ => {} + } + + Ok(()) +} + +fn same_type_constructor(left: &Ty, right: &Ty) -> bool { + match (left, right) { + (Ty::Path(left_name, _), Ty::Path(right_name, _)) => left_name == right_name, + _ => left == right, + } +} diff --git a/capc/src/typeck/check/stmt.rs b/capc/src/typeck/check/stmt.rs new file mode 100644 index 0000000..ac5da1a --- /dev/null +++ b/capc/src/typeck/check/stmt.rs @@ -0,0 +1,611 @@ +use super::*; + +/// Type-check a statement and update move state in the current scope. +pub(super) fn check_stmt( + stmt: &Stmt, + ret_ty: &Ty, + functions: &HashMap, + trait_map: &HashMap, + trait_impls: &[TraitImplInfo], + scopes: &mut Scopes, + recorder: &mut TypeRecorder, + use_map: &UseMap, + struct_map: &HashMap, + enum_map: &HashMap, + stdlib: &StdlibIndex, + module_name: &str, + type_params: &HashSet, + type_param_bounds: &HashMap>, + in_loop: bool, +) -> Result<(), TypeError> { + let trusted_stdlib = module_name.starts_with("sys."); + match stmt { + Stmt::LetElse(_) + | Stmt::TryLet(_) + | Stmt::TryElse(_) + | Stmt::ForEach(_) => { + return Err(TypeError::new( + "internal error: desugaring did not lower high-level statement".to_string(), + stmt.span(), + )); + } + Stmt::Let(let_stmt) => { + if scopes.contains(&let_stmt.name.item) { + return Err(TypeError::new( + format!( + "variable shadowing is not allowed: `{}`", + let_stmt.name.item + ), + let_stmt.name.span, + )); + } + let annot_ref = let_stmt + .ty + .as_ref() + .is_some_and(|ty| matches!(ty, Type::Ref { .. })); + let expr_use_mode = if annot_ref { + UseMode::Read + } else { + UseMode::Move + }; + let expr_ty = if let Expr::Match(match_expr) = &let_stmt.expr { + let expr_ty = check_match_expr_value( + match_expr, + functions, + trait_map, + trait_impls, + scopes, + expr_use_mode, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + in_loop, + )?; + recorder.record(&let_stmt.expr, &expr_ty); + expr_ty + } else { + check_expr( + &let_stmt.expr, + functions, + trait_map, + trait_impls, + scopes, + expr_use_mode, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )? + }; + let final_ty = if let Some(annot) = &let_stmt.ty { + if let Some(span) = type_contains_ref(annot) { + match annot { + Type::Ref { target, .. } => { + if type_contains_ref(target).is_some() { + return Err(TypeError::new( + "nested reference types are not allowed".to_string(), + span, + )); + } + } + _ => { + return Err(TypeError::new( + "reference types are only allowed as direct local types" + .to_string(), + span, + )); + } + } + } + let annot_ty = lower_type(annot, use_map, stdlib, type_params)?; + validate_type_args(&annot_ty, struct_map, enum_map, annot.span())?; + let matches_ref = if let Ty::Ref(inner) = &annot_ty { + &expr_ty == inner.as_ref() || &expr_ty == &annot_ty + } else { + false + }; + if annot_ty != expr_ty + && !matches_ref + && !matches!(expr_ty, Ty::Builtin(BuiltinType::Never)) + { + return Err(TypeError::new( + format!("type mismatch: expected {annot_ty:?}, found {expr_ty:?}"), + let_stmt.span, + )); + } + if matches!(annot_ty, Ty::Ref(_)) { + let Some((name, _span)) = leftmost_local_in_chain(&let_stmt.expr) else { + return Err(TypeError::new( + "reference locals must be initialized from a local value".to_string(), + let_stmt.expr.span(), + )); + }; + if !scopes.contains(name) { + return Err(TypeError::new( + "reference locals must be initialized from a local value".to_string(), + let_stmt.expr.span(), + )); + } + } + annot_ty + } else { + if matches!(expr_ty, Ty::Ref(_)) { + let Some((name, _span)) = leftmost_local_in_chain(&let_stmt.expr) else { + return Err(TypeError::new( + "reference locals must be initialized from a local value".to_string(), + let_stmt.expr.span(), + )); + }; + if !scopes.contains(name) { + return Err(TypeError::new( + "reference locals must be initialized from a local value".to_string(), + let_stmt.expr.span(), + )); + } + } + expr_ty + }; + scopes.insert_local(let_stmt.name.item.clone(), final_ty); + } + Stmt::Assign(assign) => { + let Some(existing) = scopes.lookup(&assign.name.item) else { + return Err(TypeError::new( + format!("unknown identifier `{}`", assign.name.item), + assign.name.span, + )); + }; + let existing = existing.ty.clone(); + if matches!(existing, Ty::Ref(_)) { + return Err(TypeError::new( + "cannot assign to a reference local".to_string(), + assign.span, + )); + } + let expr_ty = check_expr( + &assign.expr, + functions, + trait_map, + trait_impls, + scopes, + UseMode::Move, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + if expr_ty != existing && !matches!(expr_ty, Ty::Builtin(BuiltinType::Never)) { + return Err(TypeError::new( + format!("assignment type mismatch: expected {existing:?}, found {expr_ty:?}"), + assign.span, + )); + } + scopes.assign(&assign.name.item, expr_ty); + } + Stmt::Defer(defer_stmt) => { + match &defer_stmt.expr { + Expr::Call(_) | Expr::MethodCall(_) => {} + _ => { + return Err(TypeError::new( + "defer expects a function or method call".to_string(), + defer_stmt.span, + )) + } + } + let _ = check_expr( + &defer_stmt.expr, + functions, + trait_map, + trait_impls, + scopes, + UseMode::Move, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + } + Stmt::Return(ret_stmt) => { + let expr_ty = if let Some(expr) = &ret_stmt.expr { + check_expr( + expr, + functions, + trait_map, + trait_impls, + scopes, + UseMode::Move, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )? + } else { + Ty::Builtin(BuiltinType::Unit) + }; + if &expr_ty != ret_ty { + if matches!(expr_ty, Ty::Builtin(BuiltinType::Never)) { + if !trusted_stdlib { + ensure_linear_all_consumed(scopes, struct_map, enum_map, ret_stmt.span)?; + } + return Ok(()); + } + return Err(TypeError::new( + format!("return type mismatch: expected {ret_ty:?}, found {expr_ty:?}"), + ret_stmt.span, + )); + } + if !trusted_stdlib { + ensure_linear_all_consumed(scopes, struct_map, enum_map, ret_stmt.span)?; + } + } + Stmt::Break(break_stmt) => { + if !in_loop { + return Err(TypeError::new( + "break statement outside of loop".to_string(), + break_stmt.span, + )); + } + let depth = scopes.current_loop_depth().ok_or_else(|| { + TypeError::new( + "break statement outside of loop".to_string(), + break_stmt.span, + ) + })?; + if !trusted_stdlib { + ensure_linear_scopes_consumed_from( + scopes, + depth, + struct_map, + enum_map, + break_stmt.span, + )?; + } + } + Stmt::Continue(continue_stmt) => { + if !in_loop { + return Err(TypeError::new( + "continue statement outside of loop".to_string(), + continue_stmt.span, + )); + } + let depth = scopes.current_loop_depth().ok_or_else(|| { + TypeError::new( + "continue statement outside of loop".to_string(), + continue_stmt.span, + ) + })?; + if !trusted_stdlib { + ensure_linear_scopes_consumed_from( + scopes, + depth, + struct_map, + enum_map, + continue_stmt.span, + )?; + } + } + Stmt::If(if_stmt) => { + let cond_ty = check_expr( + &if_stmt.cond, + functions, + trait_map, + trait_impls, + scopes, + UseMode::Read, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + if cond_ty != Ty::Builtin(BuiltinType::Bool) { + return Err(TypeError::new( + "if condition must be bool".to_string(), + if_stmt.cond.span(), + )); + } + let mut then_scopes = scopes.clone(); + check_block( + &if_stmt.then_block, + ret_ty, + functions, + trait_map, + trait_impls, + &mut then_scopes, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + module_name, + type_params, + type_param_bounds, + in_loop, + )?; + let mut else_scopes = scopes.clone(); + if let Some(block) = &if_stmt.else_block { + check_block( + block, + ret_ty, + functions, + trait_map, + trait_impls, + &mut else_scopes, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + module_name, + type_params, + type_param_bounds, + in_loop, + )?; + } + if !trusted_stdlib { + merge_branch_states( + scopes, + &then_scopes, + &else_scopes, + struct_map, + enum_map, + if_stmt.span, + )?; + } + } + Stmt::While(while_stmt) => { + let cond_ty = check_expr( + &while_stmt.cond, + functions, + trait_map, + trait_impls, + scopes, + UseMode::Read, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + if cond_ty != Ty::Builtin(BuiltinType::Bool) { + return Err(TypeError::new( + "while condition must be bool".to_string(), + while_stmt.cond.span(), + )); + } + let mut body_scopes = scopes.clone(); + body_scopes.push_loop(); + check_block( + &while_stmt.body, + ret_ty, + functions, + trait_map, + trait_impls, + &mut body_scopes, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + module_name, + type_params, + type_param_bounds, + true, + )?; + body_scopes.pop_loop(); + if !trusted_stdlib { + ensure_affine_states_match( + scopes, + &body_scopes, + struct_map, + enum_map, + while_stmt.span, + )?; + } + } + Stmt::For(for_stmt) => { + let start_ty = check_expr( + &for_stmt.start, + functions, + trait_map, + trait_impls, + scopes, + UseMode::Read, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + if start_ty != Ty::Builtin(BuiltinType::I32) { + return Err(TypeError::new( + "for loop range start must be i32".to_string(), + for_stmt.start.span(), + )); + } + + let end_ty = check_expr( + &for_stmt.end, + functions, + trait_map, + trait_impls, + scopes, + UseMode::Read, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + if end_ty != Ty::Builtin(BuiltinType::I32) { + return Err(TypeError::new( + "for loop range end must be i32".to_string(), + for_stmt.end.span(), + )); + } + + let mut body_scopes = scopes.clone(); + body_scopes.push_scope(); + body_scopes.insert_local(for_stmt.var.item.clone(), Ty::Builtin(BuiltinType::I32)); + + body_scopes.push_loop(); + check_block( + &for_stmt.body, + ret_ty, + functions, + trait_map, + trait_impls, + &mut body_scopes, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + module_name, + type_params, + type_param_bounds, + true, + )?; + body_scopes.pop_loop(); + body_scopes.pop_scope(); + + if !trusted_stdlib { + ensure_affine_states_match( + scopes, + &body_scopes, + struct_map, + enum_map, + for_stmt.span, + )?; + } + } + Stmt::Expr(expr_stmt) => { + if let Expr::Match(match_expr) = &expr_stmt.expr { + let _ = check_match_stmt( + match_expr, + functions, + trait_map, + trait_impls, + scopes, + UseMode::Move, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + in_loop, + )?; + } else { + check_expr( + &expr_stmt.expr, + functions, + trait_map, + trait_impls, + scopes, + UseMode::Move, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + ret_ty, + module_name, + type_params, + type_param_bounds, + )?; + } + } + } + + Ok(()) +} + +/// Type-check a block with a fresh lexical scope. +pub(super) fn check_block( + block: &Block, + ret_ty: &Ty, + functions: &HashMap, + trait_map: &HashMap, + trait_impls: &[TraitImplInfo], + scopes: &mut Scopes, + recorder: &mut TypeRecorder, + use_map: &UseMap, + struct_map: &HashMap, + enum_map: &HashMap, + stdlib: &StdlibIndex, + module_name: &str, + type_params: &HashSet, + type_param_bounds: &HashMap>, + in_loop: bool, +) -> Result<(), TypeError> { + scopes.push_scope(); + for stmt in &block.stmts { + check_stmt( + stmt, + ret_ty, + functions, + trait_map, + trait_impls, + scopes, + recorder, + use_map, + struct_map, + enum_map, + stdlib, + module_name, + type_params, + type_param_bounds, + in_loop, + )?; + } + if !module_name.starts_with("sys.") { + ensure_linear_scope_consumed(scopes, struct_map, enum_map, block.span)?; + } + scopes.pop_scope(); + Ok(()) +} diff --git a/capc/src/typeck/check/type_params.rs b/capc/src/typeck/check/type_params.rs new file mode 100644 index 0000000..f40f73d --- /dev/null +++ b/capc/src/typeck/check/type_params.rs @@ -0,0 +1,205 @@ +use super::*; + +pub(super) fn lower_type_args( + args: &[Type], + use_map: &UseMap, + stdlib: &StdlibIndex, + struct_map: &HashMap, + enum_map: &HashMap, + type_params: &HashSet, +) -> Result, TypeError> { + let mut out = Vec::with_capacity(args.len()); + for arg in args { + let ty = lower_type(arg, use_map, stdlib, type_params)?; + validate_type_args(&ty, struct_map, enum_map, arg.span())?; + out.push(ty); + } + Ok(out) +} + +pub(super) fn build_type_substitution( + params: &[String], + args: &[Ty], + span: Span, +) -> Result, TypeError> { + if params.len() != args.len() { + return Err(TypeError::new( + format!( + "expected {} type argument(s), found {}", + params.len(), + args.len() + ), + span, + )); + } + let mut map = HashMap::new(); + for (param, arg) in params.iter().zip(args.iter()) { + map.insert(param.clone(), arg.clone()); + } + Ok(map) +} + +pub(super) fn substitute_type(ty: &Ty, subs: &HashMap) -> Ty { + match ty { + Ty::Param(name) => subs.get(name).cloned().unwrap_or_else(|| ty.clone()), + Ty::Builtin(_) => ty.clone(), + Ty::Ptr(inner) => Ty::Ptr(Box::new(substitute_type(inner, subs))), + Ty::Ref(inner) => Ty::Ref(Box::new(substitute_type(inner, subs))), + Ty::Path(name, args) => Ty::Path( + name.clone(), + args.iter().map(|arg| substitute_type(arg, subs)).collect(), + ), + } +} + +pub(super) fn match_type_params( + expected: &Ty, + actual: &Ty, + subs: &mut HashMap, + span: Span, +) -> Result<(), TypeError> { + match expected { + Ty::Param(name) => { + if let Some(existing) = subs.get(name) { + if existing != actual { + return Err(TypeError::new( + format!( + "conflicting type arguments for `{}`: {existing:?} vs {actual:?}", + name + ), + span, + )); + } + } else { + subs.insert(name.clone(), actual.clone()); + } + Ok(()) + } + Ty::Builtin(_) => { + if expected != actual { + return Err(TypeError::new( + format!("type mismatch: expected {expected:?}, found {actual:?}"), + span, + )); + } + Ok(()) + } + Ty::Ptr(inner) => match actual { + Ty::Ptr(actual_inner) => match_type_params(inner, actual_inner, subs, span), + _ => Err(TypeError::new( + format!("type mismatch: expected {expected:?}, found {actual:?}"), + span, + )), + }, + Ty::Ref(inner) => match actual { + Ty::Ref(actual_inner) => match_type_params(inner, actual_inner, subs, span), + _ => Err(TypeError::new( + format!("type mismatch: expected {expected:?}, found {actual:?}"), + span, + )), + }, + Ty::Path(name, args) => match actual { + Ty::Path(actual_name, actual_args) => { + if name != actual_name || args.len() != actual_args.len() { + return Err(TypeError::new( + format!("type mismatch: expected {expected:?}, found {actual:?}"), + span, + )); + } + for (arg, actual_arg) in args.iter().zip(actual_args.iter()) { + match_type_params(arg, actual_arg, subs, span)?; + } + Ok(()) + } + _ => Err(TypeError::new( + format!("type mismatch: expected {expected:?}, found {actual:?}"), + span, + )), + }, + } +} + +pub(super) fn build_call_substitution( + sig: &FunctionSig, + explicit_args: &[Ty], + inferred: HashMap, + span: Span, +) -> Result, TypeError> { + if sig.type_params.is_empty() { + if !explicit_args.is_empty() { + return Err(TypeError::new( + format!( + "function does not accept type arguments (found {})", + explicit_args.len() + ), + span, + )); + } + return Ok(inferred); + } + + let mut subs = inferred; + let mut remaining = Vec::new(); + for name in &sig.type_params { + if !subs.contains_key(name) { + remaining.push(name.clone()); + } + } + if explicit_args.len() != remaining.len() { + return Err(TypeError::new( + format!( + "expected {} type argument(s), found {}", + remaining.len(), + explicit_args.len() + ), + span, + )); + } + for (name, arg) in remaining.into_iter().zip(explicit_args.iter()) { + subs.insert(name, arg.clone()); + } + Ok(subs) +} + +pub(super) fn enforce_type_param_bounds( + sig: &FunctionSig, + subs: &HashMap, + trait_impls: &[TraitImplInfo], + span: Span, +) -> Result<(), TypeError> { + for (param, bounds) in &sig.type_param_bounds { + let Some(actual) = subs.get(param) else { + continue; + }; + for bound in bounds { + if type_satisfies_trait(actual, bound, trait_impls, span).is_err() { + return Err(TypeError::new( + format!("type parameter `{param}` does not implement `{bound}`"), + span, + )); + } + } + } + Ok(()) +} + +pub(super) fn type_satisfies_trait( + actual: &Ty, + trait_name: &str, + trait_impls: &[TraitImplInfo], + span: Span, +) -> Result<(), TypeError> { + for impl_info in trait_impls { + if impl_info.trait_name != trait_name { + continue; + } + let mut subs = HashMap::new(); + if match_type_params(&impl_info.target_ty, actual, &mut subs, span).is_ok() { + return Ok(()); + } + } + Err(TypeError::new( + format!("type `{actual:?}` does not implement `{trait_name}`"), + span, + )) +} diff --git a/capc/src/typeck/lower.rs b/capc/src/typeck/lower.rs index 04ff8e9..f5e0c63 100644 --- a/capc/src/typeck/lower.rs +++ b/capc/src/typeck/lower.rs @@ -19,6 +19,11 @@ use super::{ }; /// Context for HIR lowering (uses the type checker as source of truth). +#[derive(Default)] +struct LoweringScope { + locals: HashMap, +} + struct LoweringCtx<'a> { functions: &'a HashMap, structs: &'a HashMap, @@ -29,10 +34,7 @@ struct LoweringCtx<'a> { module_name: &'a str, type_tables: Option<&'a FunctionTypeTables>, type_table: Option<&'a TypeTable>, - /// Maps variable names to their LocalId - local_map: HashMap, - /// Maps variable names to their types (needed for type checking during lowering) - local_types: HashMap, + scopes: Vec, local_counter: usize, type_params: HashSet, type_param_bounds: HashMap>, @@ -59,31 +61,54 @@ impl<'a> LoweringCtx<'a> { module_name, type_tables, type_table: None, - local_map: HashMap::new(), - local_types: HashMap::new(), + scopes: vec![LoweringScope::default()], local_counter: 0, type_params: HashSet::new(), type_param_bounds: HashMap::new(), } } + fn reset_function_scopes(&mut self) { + self.scopes.clear(); + self.scopes.push(LoweringScope::default()); + } + fn fresh_local(&mut self, name: String, ty: Ty) -> LocalId { let id = LocalId(self.local_counter); self.local_counter += 1; - self.local_map.insert(name.clone(), id); - self.local_types.insert(name, ty); + if self.scopes.is_empty() { + self.scopes.push(LoweringScope::default()); + } + if let Some(scope) = self.scopes.last_mut() { + scope.locals.insert(name, (id, ty)); + } id } fn get_local(&self, name: &str) -> Option { - self.local_map.get(name).copied() + self.lookup_local(name).map(|(id, _)| id) } - /// Push a new scope for name bindings (shadowing is not yet modeled here). - fn push_scope(&mut self) {} + fn has_local(&self, name: &str) -> bool { + self.lookup_local(name).is_some() + } + + fn lookup_local(&self, name: &str) -> Option<(LocalId, &Ty)> { + self.scopes + .iter() + .rev() + .find_map(|scope| scope.locals.get(name).map(|(id, ty)| (*id, ty))) + } + + fn push_scope(&mut self) { + self.scopes.push(LoweringScope::default()); + } - /// Pop the most recent scope (placeholder for future scope stacks). - fn pop_scope(&mut self) {} + fn pop_scope(&mut self) { + if self.scopes.len() > 1 { + self.scopes.pop(); + } + } } /// Lower a fully type-checked module into HIR. @@ -235,8 +260,7 @@ pub(super) fn lower_module( /// Lower a type-checked function into HIR, assigning LocalIds. fn lower_function(func: &Function, ctx: &mut LoweringCtx) -> Result { ctx.local_counter = 0; - ctx.local_map.clear(); - ctx.local_types.clear(); + ctx.reset_function_scopes(); let is_runtime_intrinsic = crate::runtime_intrinsics::is_runtime_intrinsic(ctx.module_name, &func.name.item); ctx.type_table = ctx @@ -470,7 +494,7 @@ fn lower_defer_stmt( let base_is_local = if let Some(base_name) = get_leftmost_segment(&method_call.receiver) { - ctx.local_types.contains_key(base_name) + ctx.has_local(base_name) } else { true }; @@ -490,7 +514,12 @@ fn lower_defer_stmt( }; if !base_is_local && is_function { - let path = path_call.expect("path exists for function call"); + let Some(path) = path_call else { + return Err(TypeError::new( + "defer receiver path could not be resolved".to_string(), + method_call.span, + )); + }; let mut args = Vec::with_capacity(method_call.args.len()); for arg in &method_call.args { args.push(capture_defer_expr(arg, ctx, ret_ty, &mut stmts)?); @@ -787,7 +816,7 @@ fn lower_expr(expr: &Expr, ctx: &mut LoweringCtx, ret_ty: &Ty) -> Result { if path.segments.len() == 1 { let name = &path.segments[0].item; - if ctx.local_types.contains_key(name) { + if ctx.has_local(name) { let local_id = ctx.get_local(name).unwrap(); return Ok(HirExpr::Local(HirLocal { local_id, @@ -920,7 +949,7 @@ fn lower_expr(expr: &Expr, ctx: &mut LoweringCtx, ret_ty: &Ty) -> Result Result, TypeError> = method_call @@ -1096,7 +1130,7 @@ fn lower_expr(expr: &Expr, ctx: &mut LoweringCtx, ret_ty: &Ty) -> Result, - extern_functions: Vec, - structs: Vec, - enums: Vec, -} - -impl ModuleOut { - fn new(name: String) -> Self { - Self { - name, - functions: Vec::new(), - extern_functions: Vec::new(), - structs: Vec::new(), - enums: Vec::new(), - } - } -} - -#[derive(Clone)] -struct FunctionInstance { - module: String, - base_name: String, - type_args: Vec, -} - struct MonoCtx { program: HirProgram, functions: HashMap, @@ -979,240 +957,3 @@ impl MonoCtx { } } } - -impl From for HirModule { - fn from(module: ModuleOut) -> Self { - Self { - name: module.name, - functions: module.functions, - extern_functions: module.extern_functions, - structs: module.structs, - enums: module.enums, - } - } -} - -trait GenericSig { - fn name(&self) -> &str; - fn type_params(&self) -> &Vec; - fn params(&self) -> &Vec; -} - -impl GenericSig for HirFunction { - fn name(&self) -> &str { - &self.name - } - - fn type_params(&self) -> &Vec { - &self.type_params - } - - fn params(&self) -> &Vec { - &self.params - } -} - -impl GenericSig for HirExternFunction { - fn name(&self) -> &str { - &self.name - } - - fn type_params(&self) -> &Vec { - &self.type_params - } - - fn params(&self) -> &Vec { - &self.params - } -} - -fn split_name(module: &str, name: &str) -> (String, String, bool) { - if let Some((mod_part, type_part)) = name.rsplit_once('.') { - (mod_part.to_string(), type_part.to_string(), true) - } else { - (module.to_string(), name.to_string(), false) - } -} - -/// Find the qualified name and module for a type, searching all modules if needed. -/// Returns (module, qualified_name) or None if not found. -fn find_type_in_all_modules<'a>( - name: &str, - current_module: &str, - structs: &'a HashMap, - enums: &'a HashMap, -) -> Option<(String, String)> { - // First try the direct qualified lookup - let (type_module, base_name, qualified) = split_name(current_module, name); - let qualified_key = qualify(&type_module, &base_name); - if structs.contains_key(&qualified_key) || enums.contains_key(&qualified_key) { - return Some((type_module, qualified_key)); - } - - // If the name was already qualified or we found it, we're done - if qualified { - return None; - } - - // Search all modules for this unqualified type name - for key in structs.keys() { - if key.ends_with(&format!(".{}", base_name)) { - let mod_part = key.rsplit_once('.').map(|(m, _)| m).unwrap_or(""); - return Some((mod_part.to_string(), key.clone())); - } - } - for key in enums.keys() { - if key.ends_with(&format!(".{}", base_name)) { - let mod_part = key.rsplit_once('.').map(|(m, _)| m).unwrap_or(""); - return Some((mod_part.to_string(), key.clone())); - } - } - - None -} - -fn qualify(module: &str, name: &str) -> String { - format!("{module}.{name}") -} - -fn function_symbol(module: &str, name: &str) -> String { - format!("capable_{}", qualify(module, name).replace('.', "_")) -} - -fn build_substitution( - params: &[String], - args: &[Ty], - span: Span, -) -> Result, TypeError> { - if params.len() != args.len() { - return Err(TypeError::new( - format!( - "expected {} type argument(s), found {}", - params.len(), - args.len() - ), - span, - )); - } - let mut map = HashMap::new(); - for (param, arg) in params.iter().zip(args.iter()) { - map.insert(param.clone(), arg.clone()); - } - Ok(map) -} - -fn substitute_ty(ty: &Ty, subs: &HashMap) -> Ty { - match ty { - Ty::Param(name) => subs.get(name).cloned().unwrap_or_else(|| ty.clone()), - Ty::Builtin(_) => ty.clone(), - Ty::Ptr(inner) => Ty::Ptr(Box::new(substitute_ty(inner, subs))), - Ty::Ref(inner) => Ty::Ref(Box::new(substitute_ty(inner, subs))), - Ty::Path(name, args) => Ty::Path( - name.clone(), - args.iter().map(|arg| substitute_ty(arg, subs)).collect(), - ), - } -} - -fn match_type_params( - expected: &Ty, - actual: &Ty, - subs: &mut HashMap, - span: Span, -) -> Result<(), TypeError> { - match expected { - Ty::Param(name) => { - if let Some(existing) = subs.get(name) { - if existing != actual { - return Err(TypeError::new( - format!( - "conflicting type arguments for `{}`: {existing:?} vs {actual:?}", - name - ), - span, - )); - } - } else { - subs.insert(name.clone(), actual.clone()); - } - Ok(()) - } - Ty::Builtin(_) => { - if expected != actual { - return Err(TypeError::new( - format!("type mismatch: expected {expected:?}, found {actual:?}"), - span, - )); - } - Ok(()) - } - Ty::Ptr(inner) => match actual { - Ty::Ptr(actual_inner) => match_type_params(inner, actual_inner, subs, span), - _ => Err(TypeError::new( - format!("type mismatch: expected {expected:?}, found {actual:?}"), - span, - )), - }, - Ty::Ref(inner) => match actual { - Ty::Ref(actual_inner) => match_type_params(inner, actual_inner, subs, span), - _ => Err(TypeError::new( - format!("type mismatch: expected {expected:?}, found {actual:?}"), - span, - )), - }, - Ty::Path(name, args) => match actual { - Ty::Path(actual_name, actual_args) => { - if name != actual_name || args.len() != actual_args.len() { - return Err(TypeError::new( - format!("type mismatch: expected {expected:?}, found {actual:?}"), - span, - )); - } - for (arg, actual_arg) in args.iter().zip(actual_args.iter()) { - match_type_params(arg, actual_arg, subs, span)?; - } - Ok(()) - } - _ => Err(TypeError::new( - format!("type mismatch: expected {expected:?}, found {actual:?}"), - span, - )), - }, - } -} - -fn mangle_name(base: &str, args: &[Ty]) -> String { - if args.is_empty() { - return base.to_string(); - } - let suffix = args.iter().map(mangle_type).collect::>().join("__"); - format!("{base}__{suffix}") -} - -fn mangle_type(ty: &Ty) -> String { - match ty { - Ty::Builtin(b) => match b { - crate::typeck::BuiltinType::I32 => "i32".to_string(), - crate::typeck::BuiltinType::I64 => "i64".to_string(), - crate::typeck::BuiltinType::U32 => "u32".to_string(), - crate::typeck::BuiltinType::U8 => "u8".to_string(), - crate::typeck::BuiltinType::Bool => "bool".to_string(), - crate::typeck::BuiltinType::Unit => "unit".to_string(), - crate::typeck::BuiltinType::Never => "never".to_string(), - }, - Ty::Ptr(inner) => format!("ptr_{}", mangle_type(inner)), - Ty::Ref(inner) => format!("ref_{}", mangle_type(inner)), - Ty::Param(name) => format!("param_{name}"), - Ty::Path(name, args) => { - if name == "sys.string.string" || name == "string" { - return "string".to_string(); - } - let mut base = name.replace('.', "_"); - if !args.is_empty() { - let suffix = args.iter().map(mangle_type).collect::>().join("__"); - base = format!("{base}__{suffix}"); - } - base - } - } -} diff --git a/capc/src/typeck/monomorphize/support.rs b/capc/src/typeck/monomorphize/support.rs new file mode 100644 index 0000000..11858b8 --- /dev/null +++ b/capc/src/typeck/monomorphize/support.rs @@ -0,0 +1,266 @@ +use std::collections::HashMap; + +use crate::ast::Span; +use crate::error::TypeError; +use crate::hir::*; +use crate::typeck::Ty; + +#[derive(Clone)] +pub(super) struct ModuleOut { + pub(super) name: String, + pub(super) functions: Vec, + pub(super) extern_functions: Vec, + pub(super) structs: Vec, + pub(super) enums: Vec, +} + +impl ModuleOut { + pub(super) fn new(name: String) -> Self { + Self { + name, + functions: Vec::new(), + extern_functions: Vec::new(), + structs: Vec::new(), + enums: Vec::new(), + } + } +} + +#[derive(Clone)] +pub(super) struct FunctionInstance { + pub(super) module: String, + pub(super) base_name: String, + pub(super) type_args: Vec, +} + +impl From for HirModule { + fn from(module: ModuleOut) -> Self { + Self { + name: module.name, + functions: module.functions, + extern_functions: module.extern_functions, + structs: module.structs, + enums: module.enums, + } + } +} + +pub(super) trait GenericSig { + fn name(&self) -> &str; + fn type_params(&self) -> &Vec; + fn params(&self) -> &Vec; +} + +impl GenericSig for HirFunction { + fn name(&self) -> &str { + &self.name + } + + fn type_params(&self) -> &Vec { + &self.type_params + } + + fn params(&self) -> &Vec { + &self.params + } +} + +impl GenericSig for HirExternFunction { + fn name(&self) -> &str { + &self.name + } + + fn type_params(&self) -> &Vec { + &self.type_params + } + + fn params(&self) -> &Vec { + &self.params + } +} + +pub(super) fn split_name(module: &str, name: &str) -> (String, String, bool) { + if let Some((mod_part, type_part)) = name.rsplit_once('.') { + (mod_part.to_string(), type_part.to_string(), true) + } else { + (module.to_string(), name.to_string(), false) + } +} + +pub(super) fn find_type_in_all_modules( + name: &str, + current_module: &str, + structs: &HashMap, + enums: &HashMap, +) -> Option<(String, String)> { + let (type_module, base_name, qualified) = split_name(current_module, name); + let qualified_key = qualify(&type_module, &base_name); + if structs.contains_key(&qualified_key) || enums.contains_key(&qualified_key) { + return Some((type_module, qualified_key)); + } + + if qualified { + return None; + } + + for key in structs.keys() { + if key.ends_with(&format!(".{}", base_name)) { + let mod_part = key.rsplit_once('.').map(|(m, _)| m).unwrap_or(""); + return Some((mod_part.to_string(), key.clone())); + } + } + for key in enums.keys() { + if key.ends_with(&format!(".{}", base_name)) { + let mod_part = key.rsplit_once('.').map(|(m, _)| m).unwrap_or(""); + return Some((mod_part.to_string(), key.clone())); + } + } + + None +} + +pub(super) fn qualify(module: &str, name: &str) -> String { + format!("{module}.{name}") +} + +pub(super) fn function_symbol(module: &str, name: &str) -> String { + format!("capable_{}", qualify(module, name).replace('.', "_")) +} + +pub(super) fn build_substitution( + params: &[String], + args: &[Ty], + span: Span, +) -> Result, TypeError> { + if params.len() != args.len() { + return Err(TypeError::new( + format!( + "expected {} type argument(s), found {}", + params.len(), + args.len() + ), + span, + )); + } + let mut map = HashMap::new(); + for (param, arg) in params.iter().zip(args.iter()) { + map.insert(param.clone(), arg.clone()); + } + Ok(map) +} + +pub(super) fn substitute_ty(ty: &Ty, subs: &HashMap) -> Ty { + match ty { + Ty::Param(name) => subs.get(name).cloned().unwrap_or_else(|| ty.clone()), + Ty::Builtin(_) => ty.clone(), + Ty::Ptr(inner) => Ty::Ptr(Box::new(substitute_ty(inner, subs))), + Ty::Ref(inner) => Ty::Ref(Box::new(substitute_ty(inner, subs))), + Ty::Path(name, args) => Ty::Path( + name.clone(), + args.iter().map(|arg| substitute_ty(arg, subs)).collect(), + ), + } +} + +pub(super) fn match_type_params( + expected: &Ty, + actual: &Ty, + subs: &mut HashMap, + span: Span, +) -> Result<(), TypeError> { + match expected { + Ty::Param(name) => { + if let Some(existing) = subs.get(name) { + if existing != actual { + return Err(TypeError::new( + format!( + "conflicting type arguments for `{}`: {existing:?} vs {actual:?}", + name + ), + span, + )); + } + } else { + subs.insert(name.clone(), actual.clone()); + } + Ok(()) + } + Ty::Builtin(_) => { + if expected != actual { + return Err(TypeError::new( + format!("type mismatch: expected {expected:?}, found {actual:?}"), + span, + )); + } + Ok(()) + } + Ty::Ptr(inner) => match actual { + Ty::Ptr(actual_inner) => match_type_params(inner, actual_inner, subs, span), + _ => Err(TypeError::new( + format!("type mismatch: expected {expected:?}, found {actual:?}"), + span, + )), + }, + Ty::Ref(inner) => match actual { + Ty::Ref(actual_inner) => match_type_params(inner, actual_inner, subs, span), + _ => Err(TypeError::new( + format!("type mismatch: expected {expected:?}, found {actual:?}"), + span, + )), + }, + Ty::Path(name, args) => match actual { + Ty::Path(actual_name, actual_args) => { + if name != actual_name || args.len() != actual_args.len() { + return Err(TypeError::new( + format!("type mismatch: expected {expected:?}, found {actual:?}"), + span, + )); + } + for (arg, actual_arg) in args.iter().zip(actual_args.iter()) { + match_type_params(arg, actual_arg, subs, span)?; + } + Ok(()) + } + _ => Err(TypeError::new( + format!("type mismatch: expected {expected:?}, found {actual:?}"), + span, + )), + }, + } +} + +pub(super) fn mangle_name(base: &str, args: &[Ty]) -> String { + if args.is_empty() { + return base.to_string(); + } + let suffix = args.iter().map(mangle_type).collect::>().join("__"); + format!("{base}__{suffix}") +} + +pub(super) fn mangle_type(ty: &Ty) -> String { + match ty { + Ty::Builtin(b) => match b { + crate::typeck::BuiltinType::I32 => "i32".to_string(), + crate::typeck::BuiltinType::I64 => "i64".to_string(), + crate::typeck::BuiltinType::U32 => "u32".to_string(), + crate::typeck::BuiltinType::U8 => "u8".to_string(), + crate::typeck::BuiltinType::Bool => "bool".to_string(), + crate::typeck::BuiltinType::Unit => "unit".to_string(), + crate::typeck::BuiltinType::Never => "never".to_string(), + }, + Ty::Ptr(inner) => format!("ptr_{}", mangle_type(inner)), + Ty::Ref(inner) => format!("ref_{}", mangle_type(inner)), + Ty::Param(name) => format!("param_{name}"), + Ty::Path(name, args) => { + if name == "sys.string.string" || name == "string" { + return "string".to_string(); + } + let mut base = name.replace('.', "_"); + if !args.is_empty() { + let suffix = args.iter().map(mangle_type).collect::>().join("__"); + base = format!("{base}__{suffix}"); + } + base + } + } +} From 9a6096b499d77253a62b6f117a15925d06286f7d Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Thu, 26 Mar 2026 10:45:50 -0700 Subject: [PATCH 11/17] Improve compiler diagnostics and runtime bindings --- COMPILER_CLEANUP.md | 125 ++--- FUTURE_COMPILER_WORK.md | 43 ++ capc/src/codegen/intrinsics.rs | 44 +- capc/src/codegen/intrinsics/io.rs | 557 ---------------------- capc/src/codegen/intrinsics/memory.rs | 185 ------- capc/src/codegen/mod.rs | 100 +++- capc/src/driver.rs | 168 ++++++- capc/src/error.rs | 45 +- capc/src/loader.rs | 113 +++-- capc/src/main.rs | 8 +- capc/src/runtime_intrinsics.rs | 663 +++++++++++++++++++++++--- capc/src/typeck/mod.rs | 22 +- capc/tests/cli.rs | 61 +++ 13 files changed, 1106 insertions(+), 1028 deletions(-) create mode 100644 FUTURE_COMPILER_WORK.md delete mode 100644 capc/src/codegen/intrinsics/io.rs delete mode 100644 capc/src/codegen/intrinsics/memory.rs diff --git a/COMPILER_CLEANUP.md b/COMPILER_CLEANUP.md index a05ef8a..509ca5c 100644 --- a/COMPILER_CLEANUP.md +++ b/COMPILER_CLEANUP.md @@ -1,107 +1,48 @@ -# Compiler Cleanup Plan +# Compiler Cleanup This pass is complete. -The goal was not a rewrite. The goal was to remove the worst structural -accidents that had built up in `capc/` while keeping language behavior stable. - -## Outcomes - -- lowering now has real lexical scopes instead of fake `push_scope` / - `pop_scope` placeholders -- match / `try` control-flow handling is more uniform across type checking and - codegen, including loop-aware `try let ... else { continue }` cases -- `typeck/check.rs` is split by concern -- `parser.rs` is split by syntactic domain -- monomorphization helper logic is split out of the main pass -- runtime intrinsic registration is grouped by domain instead of one large file -- normal compiler paths no longer rely on the most obvious `expect(...)` / - `unreachable!()` traps in lowering, parser expression parsing, and loop - codegen - -## Final Status - -- [x] First cleanup pass: stable expression identity, desugar pass, first - `typeck` split, first codegen split, shared driver pipeline -- [x] Milestone 1: real lowering scopes -- [x] Milestone 2: unify match and `try` control-flow handling -- [x] Milestone 3: split `typeck/check.rs` -- [x] Milestone 4: split `codegen/emit.rs` further -- [x] Milestone 5: split `parser.rs` -- [x] Milestone 6: split `typeck/monomorphize.rs` and `codegen/intrinsics.rs` -- [x] Milestone 7: remove remaining internal panics on normal compiler paths +The goal was to remove the worst architectural accidents in `capc/` without +turning the compiler into a rewrite project. ## What Landed -### Lowering - -- `capc/src/typeck/lower.rs` - - lowering locals now live in real scoped stacks - - synthetic bindings and ordinary locals share the same scope machinery - - path-based method fallback no longer uses unchecked `expect(...)` - -### Type Checking - -- `capc/src/typeck/check.rs` -- `capc/src/typeck/check/stmt.rs` -- `capc/src/typeck/check/match_check.rs` -- `capc/src/typeck/check/calls.rs` -- `capc/src/typeck/check/type_params.rs` - -The root checker is now a coordinator. Statement checking, match checking, -call/method-call checking, and generic substitution logic are separated. - -### Parsing +- stable expression identity and an explicit desugar pass are already in place +- lowering now has real lexical scopes instead of fake scope helpers +- parser, checker, monomorphization support, and codegen helpers are split by + concern instead of living in a few giant files +- multi-file diagnostics now attach the correct source file for imported-module + parse and type-check failures, and the driver has the plumbing needed for + codegen diagnostics to do the same +- runtime-backed stdlib functions now come from one shared runtime-binding + registry instead of a stringly boolean table in one place and a separate + signature registry in another +- the immediate MIR question is settled for now: do not add a full MIR/CFG + layer yet; if codegen needs another normalization step, start with a smaller + control-flow-normalized HIR pass first -- `capc/src/parser.rs` -- `capc/src/parser/items.rs` -- `capc/src/parser/stmts.rs` -- `capc/src/parser/exprs.rs` -- `capc/src/parser/patterns.rs` -- `capc/src/parser/types.rs` - -The root parser now owns shared state and common helpers. Items, statements, -expressions, patterns, and types live in separate modules. - -### Codegen - -- `capc/src/codegen/emit.rs` -- `capc/src/codegen/emit/defer.rs` -- `capc/src/codegen/emit/match_lowering.rs` -- `capc/src/codegen/emit/runtime.rs` -- `capc/src/codegen/emit/arith.rs` - -`emit.rs` is still the largest single file in the compiler, but the helper -domains that caused the most accidental coupling are now extracted: -defer handling, match lowering, runtime-wrapper lowering, and arithmetic / -trap helpers. - -### Monomorphization - -- `capc/src/typeck/monomorphize.rs` -- `capc/src/typeck/monomorphize/support.rs` - -Support types and substitution / mangling helpers are split out of the main -monomorphization pass. - -### Intrinsics - -- `capc/src/codegen/intrinsics.rs` -- `capc/src/codegen/intrinsics/io.rs` -- `capc/src/codegen/intrinsics/memory.rs` +## Final Status -The runtime intrinsic registry is now grouped by domain instead of one large -table. +- [x] Stable expression identity and explicit desugaring +- [x] Real lowering scopes +- [x] Match / `try` control-flow cleanup +- [x] `typeck` split by concern +- [x] Parser split by syntactic domain +- [x] Monomorphization helper split +- [x] Codegen helper split +- [x] Multi-file diagnostics for imported-module compiler errors +- [x] Shared runtime interface registry +- [x] Decision on MIR vs narrower normalization ## Result -The compiler is still direct, but it is less brittle: +The compiler is still direct, but the worst remaining problems are now +deliberate tradeoffs instead of accidental ones: -- fewer giant coordination files -- fewer phase-boundary accidents -- fewer panic-style assumptions on ordinary compile paths -- clearer places to change parser, checker, lowering, monomorphization, or - runtime-intrinsic behavior without touching unrelated logic +- diagnostics are no longer effectively entry-file-only +- runtime-backed stdlib functions have one source of truth +- the next control-flow step is bounded and explicit instead of “probably add + MIR someday” ## Verification diff --git a/FUTURE_COMPILER_WORK.md b/FUTURE_COMPILER_WORK.md new file mode 100644 index 0000000..4750f81 --- /dev/null +++ b/FUTURE_COMPILER_WORK.md @@ -0,0 +1,43 @@ +# Future Compiler Work + +These items are intentionally deferred. They are real architectural projects, +not cleanup chores. + +## High-Risk Work + +- Add a real MIR / CFG layer. + Current decision: do not do this yet. If codegen needs another simplification + step, first try a smaller control-flow-normalized HIR pass that lowers + `match`, `try`, `defer`, and loop exits into a flatter form without adding a + whole new compiler IR family. + +- Change the typecheck / lowering contract. + Current decision: do not rewrite this boundary yet. The compiler is stable + enough now that the next version of this work should only happen if a + normalized-HIR pass or incremental compilation effort makes the current + side-table contract too expensive to keep. + +- Replace the current build / link pipeline. + Current decision: do not tackle this in the cleanup pass. The compiler still + shells out through `cargo` and `rustc` and writes a small Rust stub at link + time. That is architecturally awkward, but it is isolated and working. It + should become its own project if cross-compilation, packaging, reproducible + builds, or compile-time performance make it worth paying down. + +## Trigger Conditions + +Re-open the work above only if one of these becomes true: + +- codegen complexity starts growing faster than localized helper extraction can + control +- diagnostics or optimization work need an explicit CFG-level representation +- incremental compilation or separate compilation becomes a real goal +- the current shell-driven build pipeline becomes a meaningful product problem + +## Order + +If this work is reopened, the preferred order is: + +1. Try control-flow-normalized HIR before full MIR. +2. Revisit the typecheck / lowering contract only after that. +3. Tackle the build / link pipeline as a separate delivery project. diff --git a/capc/src/codegen/intrinsics.rs b/capc/src/codegen/intrinsics.rs index b5ed617..eefd135 100644 --- a/capc/src/codegen/intrinsics.rs +++ b/capc/src/codegen/intrinsics.rs @@ -5,29 +5,31 @@ //! function is not listed here, the Capable implementation is used instead. //! See `stdlib/README.md` for the stdlib-facing explanation. -mod io; -mod memory; - use std::collections::HashMap; -use cranelift_codegen::ir::Type; - use super::{FnInfo, FnSig}; +use crate::runtime_intrinsics::runtime_bindings; -fn runtime_fn(sig: FnSig, abi_sig: Option, symbol: &str) -> FnInfo { - FnInfo { - sig, - abi_sig, - symbol: symbol.to_string(), - runtime_symbol: None, - is_runtime: true, - } -} - -pub fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { - let mut map = HashMap::new(); - io::register_io_intrinsics(&mut map); - memory::register_memory_intrinsics(&mut map); - let _ = ptr_ty; - map +pub fn register_runtime_intrinsics() -> HashMap { + runtime_bindings() + .iter() + .map(|(key, binding)| { + ( + key.clone(), + FnInfo { + sig: FnSig { + params: binding.sig.params.clone(), + ret: binding.sig.ret.clone(), + }, + abi_sig: binding.abi_sig.as_ref().map(|abi_sig| FnSig { + params: abi_sig.params.clone(), + ret: abi_sig.ret.clone(), + }), + symbol: binding.symbol.to_string(), + runtime_symbol: None, + is_runtime: true, + }, + ) + }) + .collect() } diff --git a/capc/src/codegen/intrinsics/io.rs b/capc/src/codegen/intrinsics/io.rs deleted file mode 100644 index 2bdfc75..0000000 --- a/capc/src/codegen/intrinsics/io.rs +++ /dev/null @@ -1,557 +0,0 @@ -use std::collections::HashMap; - -use crate::abi::AbiType; - -use super::{runtime_fn, FnInfo, FnSig}; - -pub(super) fn register_io_intrinsics(map: &mut HashMap) { - map.insert( - "sys.system.RootCap__mint_console".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Handle, - }, - None, - "capable_rt_mint_console", - ), - ); - map.insert( - "sys.system.RootCap__mint_readfs".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Handle, - }, - None, - "capable_rt_mint_readfs", - ), - ); - map.insert( - "sys.system.RootCap__mint_filesystem".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Handle, - }, - None, - "capable_rt_mint_filesystem", - ), - ); - map.insert( - "sys.system.RootCap__mint_args".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Handle, - }, - None, - "capable_rt_mint_args", - ), - ); - map.insert( - "sys.system.RootCap__mint_stdin".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Handle, - }, - None, - "capable_rt_mint_stdin", - ), - ); - map.insert( - "sys.system.RootCap__mint_net".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Handle, - }, - None, - "capable_rt_mint_net", - ), - ); - - map.insert( - "sys.args.Args__len".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle], - ret: AbiType::I32, - }, - None, - "capable_rt_args_len", - ), - ); - map.insert( - "sys.args.Args__at".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::I32], - ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }, - Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::I32, - AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }), - "capable_rt_args_at", - ), - ); - - map.insert( - "sys.stdin.Stdin__read_to_string_with_alloc".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Handle], - ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }, - Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }), - "capable_rt_read_stdin_to_string", - ), - ); - - map.insert( - "sys.net.Net__listen".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Ptr, AbiType::I32], - ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }, - Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::Ptr, - AbiType::I32, - AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }), - "capable_rt_net_listen", - ), - ); - map.insert( - "sys.net.Net__connect".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Ptr, AbiType::I32], - ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }, - Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::Ptr, - AbiType::I32, - AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }), - "capable_rt_net_connect", - ), - ); - map.insert( - "sys.net.TcpListener__accept".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }, - Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }), - "capable_rt_net_accept", - ), - ); - map.insert( - "sys.net.TcpListener__close".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Unit, - }, - None, - "capable_rt_net_listener_close", - ), - ); - map.insert( - "sys.net.TcpConn__read_to_string_with_alloc".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Handle], - ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }, - Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }), - "capable_rt_net_read_to_string", - ), - ); - map.insert( - "sys.net.TcpConn__read_with_alloc".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Handle, AbiType::I32], - ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }, - Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::I32, - AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }), - "capable_rt_net_read", - ), - ); - map.insert( - "sys.net.TcpConn__write".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Result(Box::new(AbiType::Unit), Box::new(AbiType::I32)), - }, - Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::Ptr, - AbiType::ResultOut(Box::new(AbiType::Unit), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Unit), Box::new(AbiType::I32)), - }), - "capable_rt_net_write", - ), - ); - map.insert( - "sys.net.TcpConn__close".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Unit, - }, - None, - "capable_rt_net_close", - ), - ); - - map.insert( - "sys.console.Console__println".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Unit, - }, - None, - "capable_rt_console_println", - ), - ); - map.insert( - "sys.console.Console__print".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Unit, - }, - None, - "capable_rt_console_print", - ), - ); - map.insert( - "sys.console.Console__print_i32".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::I32], - ret: AbiType::Unit, - }, - None, - "capable_rt_console_print_i32", - ), - ); - map.insert( - "sys.console.Console__println_i32".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::I32], - ret: AbiType::Unit, - }, - None, - "capable_rt_console_println_i32", - ), - ); - map.insert( - "sys.console.Console__assert".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Bool, AbiType::Ptr], - ret: AbiType::Unit, - }, - None, - "capable_rt_assert", - ), - ); - - map.insert( - "sys.fs.ReadFS__read_to_string_with_alloc".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], - ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }, - Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::Ptr, - AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }), - "capable_rt_fs_read_to_string", - ), - ); - map.insert( - "sys.fs.ReadFS__read_bytes_with_alloc".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], - ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }, - Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::Ptr, - AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }), - "capable_rt_fs_read_bytes", - ), - ); - map.insert( - "sys.fs.ReadFS__list_dir_with_alloc".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], - ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }, - Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::Ptr, - AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }), - "capable_rt_fs_list_dir", - ), - ); - map.insert( - "sys.fs.ReadFS__exists".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Bool, - }, - None, - "capable_rt_fs_exists", - ), - ); - map.insert( - "sys.fs.ReadFS__close".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Unit, - }, - None, - "capable_rt_fs_readfs_close", - ), - ); - map.insert( - "sys.fs.Filesystem__root_dir".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Handle, - }, - None, - "capable_rt_fs_root_dir", - ), - ); - map.insert( - "sys.fs.Filesystem__close".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Unit, - }, - None, - "capable_rt_fs_filesystem_close", - ), - ); - map.insert( - "sys.fs.Dir__subdir".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Handle, - }, - None, - "capable_rt_fs_subdir", - ), - ); - map.insert( - "sys.fs.Dir__open_read".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Handle, - }, - None, - "capable_rt_fs_open_read", - ), - ); - map.insert( - "sys.fs.Dir__read_bytes_with_alloc".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], - ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }, - Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::Ptr, - AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }), - "capable_rt_fs_dir_read_bytes", - ), - ); - map.insert( - "sys.fs.Dir__read_to_string_with_alloc".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], - ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }, - Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::Ptr, - AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }), - "capable_rt_fs_dir_read_to_string", - ), - ); - map.insert( - "sys.fs.Dir__list_dir_with_alloc".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Handle], - ret: AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }, - Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), - }), - "capable_rt_fs_dir_list_dir", - ), - ); - map.insert( - "sys.fs.Dir__exists".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Bool, - }, - None, - "capable_rt_fs_dir_exists", - ), - ); - map.insert( - "sys.fs.Dir__close".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Unit, - }, - None, - "capable_rt_fs_dir_close", - ), - ); - map.insert( - "sys.fs.FileRead__read_to_string_with_alloc".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Handle], - ret: AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }, - Some(FnSig { - params: vec![ - AbiType::Handle, - AbiType::Handle, - AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - ], - ret: AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), - }), - "capable_rt_fs_file_read_to_string", - ), - ); - map.insert( - "sys.fs.FileRead__close".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Unit, - }, - None, - "capable_rt_fs_file_read_close", - ), - ); - map.insert( - "sys.fs.join_with_alloc".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Ptr, AbiType::Ptr], - ret: AbiType::Ptr, - }, - Some(FnSig { - params: vec![AbiType::Ptr, AbiType::Handle, AbiType::Ptr, AbiType::Ptr], - ret: AbiType::Unit, - }), - "capable_rt_fs_join", - ), - ); -} diff --git a/capc/src/codegen/intrinsics/memory.rs b/capc/src/codegen/intrinsics/memory.rs deleted file mode 100644 index ebc8313..0000000 --- a/capc/src/codegen/intrinsics/memory.rs +++ /dev/null @@ -1,185 +0,0 @@ -use std::collections::HashMap; - -use crate::abi::AbiType; - -use super::{runtime_fn, FnInfo, FnSig}; - -pub(super) fn register_memory_intrinsics(map: &mut HashMap) { - map.insert( - "sys.system.RootCap__mint_alloc_default".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle], - ret: AbiType::Handle, - }, - None, - "capable_rt_alloc_default", - ), - ); - map.insert( - "sys.buffer.default_alloc".to_string(), - runtime_fn( - FnSig { - params: vec![], - ret: AbiType::Handle, - }, - None, - "capable_rt_default_alloc", - ), - ); - map.insert( - "sys.buffer.Alloc__malloc".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::I32], - ret: AbiType::Ptr, - }, - None, - "capable_rt_malloc", - ), - ); - map.insert( - "sys.buffer.Alloc__free".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Unit, - }, - None, - "capable_rt_free", - ), - ); - map.insert( - "sys.buffer.Alloc__cast_u8_to_u32".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Ptr, - }, - None, - "capable_rt_cast_u8_to_u32", - ), - ); - map.insert( - "sys.buffer.Alloc__cast_u32_to_u8".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::Handle, AbiType::Ptr], - ret: AbiType::Ptr, - }, - None, - "capable_rt_cast_u32_to_u8", - ), - ); - map.insert( - "sys.bytes.u8__is_whitespace".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::U8], - ret: AbiType::Bool, - }, - None, - "capable_rt_bytes_is_whitespace", - ), - ); - - map.insert( - "sys.math.add_wrap_i32".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::I32, AbiType::I32], - ret: AbiType::I32, - }, - None, - "capable_rt_math_add_wrap_i32", - ), - ); - map.insert( - "sys.math.sub_wrap_i32".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::I32, AbiType::I32], - ret: AbiType::I32, - }, - None, - "capable_rt_math_sub_wrap_i32", - ), - ); - map.insert( - "sys.math.mul_wrap_i32".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::I32, AbiType::I32], - ret: AbiType::I32, - }, - None, - "capable_rt_math_mul_wrap_i32", - ), - ); - map.insert( - "sys.math.add_wrap_u32".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::U32, AbiType::U32], - ret: AbiType::U32, - }, - None, - "capable_rt_math_add_wrap_u32", - ), - ); - map.insert( - "sys.math.sub_wrap_u32".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::U32, AbiType::U32], - ret: AbiType::U32, - }, - None, - "capable_rt_math_sub_wrap_u32", - ), - ); - map.insert( - "sys.math.mul_wrap_u32".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::U32, AbiType::U32], - ret: AbiType::U32, - }, - None, - "capable_rt_math_mul_wrap_u32", - ), - ); - map.insert( - "sys.math.add_wrap_u8".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::U8, AbiType::U8], - ret: AbiType::U8, - }, - None, - "capable_rt_math_add_wrap_u8", - ), - ); - map.insert( - "sys.math.sub_wrap_u8".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::U8, AbiType::U8], - ret: AbiType::U8, - }, - None, - "capable_rt_math_sub_wrap_u8", - ), - ); - map.insert( - "sys.math.mul_wrap_u8".to_string(), - runtime_fn( - FnSig { - params: vec![AbiType::U8, AbiType::U8], - ret: AbiType::U8, - }, - None, - "capable_rt_math_mul_wrap_u8", - ), - ); -} diff --git a/capc/src/codegen/mod.rs b/capc/src/codegen/mod.rs index ff3062b..ef11234 100644 --- a/capc/src/codegen/mod.rs +++ b/capc/src/codegen/mod.rs @@ -20,7 +20,7 @@ use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext}; use cranelift_module::{Linkage, Module as ModuleTrait}; use cranelift_native; use cranelift_object::{ObjectBuilder, ObjectModule}; -use miette::{Diagnostic, SourceSpan}; +use miette::{Diagnostic, NamedSource, SourceSpan}; use thiserror::Error; mod emit; @@ -42,6 +42,9 @@ pub enum CodegenError { Spanned { #[allow(dead_code)] message: String, + module_name: Option, + #[source_code] + source_code: Option>, #[label] span: SourceSpan, #[allow(dead_code)] @@ -64,6 +67,8 @@ impl CodegenError { let source_span: SourceSpan = (span.start, span.end.saturating_sub(span.start)).into(); CodegenError::Spanned { message: message.into(), + module_name: None, + source_code: None, span: source_span, span_raw: span, } @@ -80,16 +85,75 @@ impl CodegenError { match self { CodegenError::Spanned { message, + module_name, + source_code, span, span_raw, } => CodegenError::Spanned { message: format_with_context(context, message), + module_name, + source_code, span, span_raw, }, other => CodegenError::Codegen(format_with_context(context, other.to_string())), } } + + pub fn in_module(self, module_name: impl Into) -> Self { + match self { + CodegenError::Spanned { + message, + source_code, + span, + span_raw, + .. + } => CodegenError::Spanned { + message, + module_name: Some(module_name.into()), + source_code, + span, + span_raw, + }, + other => other, + } + } + + pub fn module_name(&self) -> Option<&str> { + match self { + CodegenError::Spanned { module_name, .. } => module_name.as_deref(), + _ => None, + } + } + + pub fn has_source(&self) -> bool { + matches!( + self, + CodegenError::Spanned { + source_code: Some(_), + .. + } + ) + } + + pub fn with_source(self, name: impl Into, source: impl Into) -> Self { + match self { + CodegenError::Spanned { + message, + module_name, + span, + span_raw, + .. + } => CodegenError::Spanned { + message, + module_name, + source_code: Some(NamedSource::new(name.into(), source.into())), + span, + span_raw, + }, + other => other, + } + } } /// Tracks control flow state during code emission. @@ -234,7 +298,7 @@ pub fn build_object( module.isa().pointer_type(), )?; - let runtime_intrinsics = register_runtime_intrinsics(module.isa().pointer_type()); + let runtime_intrinsics = register_runtime_intrinsics(); let mut fn_map = HashMap::new(); for module_ref in &program.stdlib { register_user_functions( @@ -338,7 +402,11 @@ pub fn build_object( &func.ret_ty, &enum_index, &struct_layouts, - )?; + ) + .map_err(|err| { + err.in_module(module_name.clone()) + .with_context(format!("in function `{}.{}`", module_name, func.name)) + })?; match value { ValueRepr::Unit => builder.ins().return_(&[]), ValueRepr::Single(val) => builder.ins().return_(&[val]), @@ -351,11 +419,15 @@ pub fn build_object( builder.finalize(); if let Err(err) = cranelift_codegen::verify_function(&ctx.func, module.isa()) { eprintln!("=== IR for {} ===\n{}", func.name, ctx.func.display()); - return Err(CodegenError::Codegen(format!("verifier errors: {err}"))); + return Err(CodegenError::Codegen(format!("verifier errors: {err}")) + .with_context(format!("in function `{}.{}`", module_name, func.name))); } module .define_function(func_id, &mut ctx) - .map_err(|err| CodegenError::Codegen(err.to_string()))?; + .map_err(|err| { + CodegenError::Codegen(err.to_string()) + .with_context(format!("in function `{}.{}`", module_name, func.name)) + })?; continue; } @@ -459,7 +531,11 @@ pub fn build_object( None, // no loop context at function top level &return_lowering, &mut defer_stack, - )?; + ) + .map_err(|err| { + err.in_module(module_name.clone()) + .with_context(format!("in function `{}.{}`", module_name, func.name)) + })?; if flow == Flow::Terminated { terminated = true; break; @@ -484,11 +560,15 @@ pub fn build_object( builder.finalize(); if let Err(err) = cranelift_codegen::verify_function(&ctx.func, module.isa()) { eprintln!("=== IR for {} ===\n{}", func.name, ctx.func.display()); - return Err(CodegenError::Codegen(format!("verifier errors: {err}"))); + return Err(CodegenError::Codegen(format!("verifier errors: {err}")) + .with_context(format!("in function `{}.{}`", module_name, func.name))); } module .define_function(func_id, &mut ctx) - .map_err(|err| CodegenError::Codegen(err.to_string()))?; + .map_err(|err| { + CodegenError::Codegen(err.to_string()) + .with_context(format!("in function `{}.{}`", module_name, func.name)) + })?; } } @@ -561,8 +641,8 @@ fn append_ty_returns(signature: &mut Signature, ty: &AbiType, ptr_ty: Type) { } /// Register runtime-backed intrinsics for stdlib symbols. -fn register_runtime_intrinsics(ptr_ty: Type) -> HashMap { - intrinsics::register_runtime_intrinsics(ptr_ty) +fn register_runtime_intrinsics() -> HashMap { + intrinsics::register_runtime_intrinsics() } fn is_non_opaque_struct_type( diff --git a/capc/src/driver.rs b/capc/src/driver.rs index dc5c922..6a9caa2 100644 --- a/capc/src/driver.rs +++ b/capc/src/driver.rs @@ -1,10 +1,20 @@ use std::path::{Path, PathBuf}; +use std::collections::HashMap; -use miette::{miette, NamedSource, Result}; +use miette::{miette, Result}; use crate::ast::{Module, PackageSafety, Path as AstPath}; +use crate::codegen::CodegenError; +use crate::error::TypeError; use crate::hir::HirProgram; -use crate::{build_object, parse_module, type_check_program, validate_module_path, ModuleGraph}; +use crate::loader::{load_module_from_path_with_source, LoadedModule}; +use crate::{build_object, type_check_program, validate_module_path, ModuleGraph}; + +#[derive(Clone)] +pub struct SourceFile { + pub path: PathBuf, + pub source: String, +} #[derive(Clone)] pub struct LoadedProgram { @@ -14,6 +24,7 @@ pub struct LoadedProgram { pub stdlib: Vec, pub user_modules: Vec, pub root: PathBuf, + sources: HashMap, } pub struct LinkOptions<'a> { @@ -24,32 +35,36 @@ pub struct LinkOptions<'a> { } pub fn load_program(path: &Path) -> Result { - let source = std::fs::read_to_string(path) - .map_err(|err| miette!("failed to read {}: {err}", path.display()))?; - let module = parse_module(&source).map_err(|err| { - let named = NamedSource::new(path.display().to_string(), source.clone()); - miette::Report::new(err).with_source_code(named) - })?; + let loaded_entry = load_module_from_path_with_source(path).map_err(miette::Report::new)?; + let source = loaded_entry.source.clone(); + let module = loaded_entry.module.clone(); let root = path .parent() .ok_or_else(|| miette!("entry path has no parent directory"))? .to_path_buf(); - validate_module_path(&module, path, &root).map_err(|err| { - let err = err.with_context(format!("while loading module `{}`", module.name)); - miette::Report::new(err) - })?; + validate_module_path(&module, path, &root) + .map_err(|err| miette::Report::new(err.with_source(path.display().to_string(), source.clone())))?; let mut graph = ModuleGraph::new(); - let stdlib = graph.load_stdlib().map_err(miette::Report::new)?; + let stdlib = graph.load_stdlib_with_sources().map_err(miette::Report::new)?; let user_modules = graph - .load_user_modules_transitive(path, &module) + .load_user_modules_transitive_with_sources(path, &module) .map_err(miette::Report::new)?; + let mut sources = HashMap::new(); + insert_source_file(&mut sources, &loaded_entry); + for loaded in &stdlib { + insert_source_file(&mut sources, loaded); + } + for loaded in &user_modules { + insert_source_file(&mut sources, loaded); + } Ok(LoadedProgram { path: path.to_path_buf(), source, module, - stdlib, - user_modules, + stdlib: stdlib.into_iter().map(|loaded| loaded.module).collect(), + user_modules: user_modules.into_iter().map(|loaded| loaded.module).collect(), root, + sources, }) } @@ -57,10 +72,8 @@ pub fn type_check_loaded(loaded: &LoadedProgram, safe_only: bool) -> Result Option<&SourceFile> { + self.sources.get(module_name) + } +} + +fn insert_source_file(target: &mut HashMap, loaded: &LoadedModule) { + target.insert( + loaded.module.name.to_string(), + SourceFile { + path: loaded.path.clone(), + source: loaded.source.clone(), + }, + ); +} + +fn attach_type_error_source(mut err: TypeError, loaded: &LoadedProgram) -> TypeError { + if err.has_source() { + return err; + } + if let Some(module_name) = err.module_name() { + if let Some(source) = loaded.source_for_module(module_name) { + err = err.with_source(source.path.display().to_string(), source.source.clone()); + return err; + } + } + err.with_source(loaded.path.display().to_string(), loaded.source.clone()) +} + +fn attach_codegen_error_source(mut err: CodegenError, loaded: &LoadedProgram) -> CodegenError { + if err.has_source() { + return err; + } + if let Some(module_name) = err.module_name() { + if let Some(source) = loaded.source_for_module(module_name) { + err = err.with_source(source.path.display().to_string(), source.source.clone()); + return err; + } + } + err.with_source(loaded.path.display().to_string(), loaded.source.clone()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::ast::{ExprId, PackageSafety, Path as AstPath, Span, Spanned}; + + fn dummy_module(name: &str) -> Module { + Module { + package: PackageSafety::Safe, + name: AstPath { + id: ExprId(0), + segments: vec![Spanned::new(name.to_string(), Span::new(0, 0))], + span: Span::new(0, 0), + }, + uses: Vec::new(), + items: Vec::new(), + span: Span::new(0, 0), + } + } + + fn loaded_with_helper_source() -> LoadedProgram { + let module = dummy_module("main"); + let helper = dummy_module("helper"); + let mut sources = HashMap::new(); + sources.insert( + "main".to_string(), + SourceFile { + path: PathBuf::from("main.cap"), + source: "package safe\nmodule main\n".to_string(), + }, + ); + sources.insert( + "helper".to_string(), + SourceFile { + path: PathBuf::from("helper.cap"), + source: "package safe\nmodule helper\n".to_string(), + }, + ); + LoadedProgram { + path: PathBuf::from("main.cap"), + source: "package safe\nmodule main\n".to_string(), + module, + stdlib: Vec::new(), + user_modules: vec![helper], + root: PathBuf::from("."), + sources, + } + } + + #[test] + fn attach_type_error_uses_module_source() { + let loaded = loaded_with_helper_source(); + let err = TypeError::new("boom".to_string(), Span::new(0, 1)).in_module("helper"); + let attached = attach_type_error_source(err, &loaded); + assert!(attached.has_source()); + assert_eq!(attached.module_name(), Some("helper")); + } + + #[test] + fn attach_codegen_error_uses_module_source() { + let loaded = loaded_with_helper_source(); + let err = CodegenError::spanned("boom", Span::new(0, 1)).in_module("helper"); + let attached = attach_codegen_error_source(err, &loaded); + assert!(attached.has_source()); + assert_eq!(attached.module_name(), Some("helper")); + } +} + pub fn enforce_safe_only(entry: &Module, user_modules: &[Module], root: &Path) -> Result<()> { let mut offenders = Vec::new(); if entry.package == PackageSafety::Unsafe { diff --git a/capc/src/error.rs b/capc/src/error.rs index 61fe5d7..4e7ee1c 100644 --- a/capc/src/error.rs +++ b/capc/src/error.rs @@ -1,6 +1,6 @@ #![allow(unused_assignments)] -use miette::{Diagnostic, SourceSpan}; +use miette::{Diagnostic, NamedSource, SourceSpan}; use thiserror::Error; use crate::ast::Span; @@ -21,6 +21,8 @@ pub fn format_with_context(context: impl AsRef, message: impl AsRef) - #[allow(unused)] pub struct ParseError { message: String, + #[source_code] + source_code: Option>, #[label] span: SourceSpan, span_raw: Span, @@ -30,6 +32,7 @@ impl ParseError { pub fn new(message: String, span: Span) -> Self { Self { message, + source_code: None, span: (span.start, span.end - span.start).into(), span_raw: span, } @@ -44,6 +47,19 @@ impl ParseError { self.span_raw } + pub fn has_source(&self) -> bool { + self.source_code.is_some() + } + + pub fn with_source( + mut self, + name: impl Into, + source: impl Into, + ) -> Self { + self.source_code = Some(NamedSource::new(name.into(), source.into())); + self + } + pub fn message(&self) -> &str { &self.message } @@ -54,6 +70,9 @@ impl ParseError { #[allow(unused)] pub struct TypeError { message: String, + module_name: Option, + #[source_code] + source_code: Option>, #[label] span: SourceSpan, span_raw: Span, @@ -63,6 +82,8 @@ impl TypeError { pub fn new(message: String, span: Span) -> Self { Self { message, + module_name: None, + source_code: None, span: (span.start, span.end - span.start).into(), span_raw: span, } @@ -77,6 +98,28 @@ impl TypeError { self.span_raw } + pub fn has_source(&self) -> bool { + self.source_code.is_some() + } + + pub fn in_module(mut self, module_name: impl Into) -> Self { + self.module_name = Some(module_name.into()); + self + } + + pub fn module_name(&self) -> Option<&str> { + self.module_name.as_deref() + } + + pub fn with_source( + mut self, + name: impl Into, + source: impl Into, + ) -> Self { + self.source_code = Some(NamedSource::new(name.into(), source.into())); + self + } + pub fn message(&self) -> &str { &self.message } diff --git a/capc/src/loader.rs b/capc/src/loader.rs index 671e137..7def091 100644 --- a/capc/src/loader.rs +++ b/capc/src/loader.rs @@ -2,49 +2,46 @@ use std::collections::{HashMap, VecDeque}; use std::fs; use std::path::{Path, PathBuf}; +use crate::ast::Module; use crate::error::ParseError; use crate::parser::parse_module; -use crate::ast::Module; + +#[derive(Debug, Clone)] +pub struct LoadedModule { + pub path: PathBuf, + pub source: String, + pub module: Module, +} pub fn stdlib_root() -> PathBuf { PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../stdlib") } pub fn load_stdlib() -> Result, ParseError> { - let root = stdlib_root().join("sys"); - let mut modules = Vec::new(); - let mut entries = Vec::new(); - for entry in fs::read_dir(&root).map_err(|err| { - ParseError::new(format!("failed to read stdlib dir {root:?}: {err}"), crate::ast::Span::new(0, 0)) - })? { - let entry = entry.map_err(|err| { - ParseError::new(format!("failed to read stdlib entry: {err}"), crate::ast::Span::new(0, 0)) - })?; - let path = entry.path(); - if path.extension().and_then(|s| s.to_str()) == Some("cap") { - entries.push(path); - } - } - entries.sort(); - for path in entries { - let module = load_module_from_path(&path) - .map_err(|err| err.with_context(format!("while reading {}", path.display())))?; - validate_module_path(&module, &path, &stdlib_root()).map_err(|err| { - err.with_context(format!("while loading module `{}`", module.name)) - })?; - modules.push(module); - } - Ok(modules) + let mut graph = ModuleGraph::new(); + graph + .load_stdlib_with_sources() + .map(|modules| modules.into_iter().map(|loaded| loaded.module).collect()) } -pub fn load_module_from_path(path: &Path) -> Result { +pub(crate) fn load_module_from_path_with_source(path: &Path) -> Result { let source = fs::read_to_string(path).map_err(|err| { ParseError::new( format!("failed to read {}: {err}", path.display()), crate::ast::Span::new(0, 0), ) })?; - parse_module(&source) + let module = parse_module(&source) + .map_err(|err| err.with_source(path.display().to_string(), source.clone()))?; + Ok(LoadedModule { + path: path.to_path_buf(), + source, + module, + }) +} + +pub fn load_module_from_path(path: &Path) -> Result { + load_module_from_path_with_source(path).map(|loaded| loaded.module) } pub fn validate_module_path( @@ -84,7 +81,7 @@ pub fn validate_module_path( } pub struct ModuleGraph { - cache: HashMap, + cache: HashMap, } impl ModuleGraph { @@ -95,6 +92,11 @@ impl ModuleGraph { } pub fn load_stdlib(&mut self) -> Result, ParseError> { + self.load_stdlib_with_sources() + .map(|modules| modules.into_iter().map(|loaded| loaded.module).collect()) + } + + pub fn load_stdlib_with_sources(&mut self) -> Result, ParseError> { let root = stdlib_root().join("sys"); let mut modules = Vec::new(); let mut entries = Vec::new(); @@ -117,12 +119,14 @@ impl ModuleGraph { } entries.sort(); for path in entries { - let module = self.load_cached(&path) + let loaded = self.load_cached(&path) .map_err(|err| err.with_context(format!("while reading {}", path.display())))?; - validate_module_path(&module, &path, &stdlib_root()).map_err(|err| { - err.with_context(format!("while loading module `{}`", module.name)) - })?; - modules.push(module); + validate_module_path(&loaded.module, &path, &stdlib_root()) + .map_err(|err| { + err.with_context(format!("while loading module `{}`", loaded.module.name)) + .with_source(path.display().to_string(), loaded.source.clone()) + })?; + modules.push(loaded); } Ok(modules) } @@ -132,6 +136,15 @@ impl ModuleGraph { entry_path: &Path, entry_module: &Module, ) -> Result, ParseError> { + self.load_user_modules_transitive_with_sources(entry_path, entry_module) + .map(|modules| modules.into_iter().map(|loaded| loaded.module).collect()) + } + + pub fn load_user_modules_transitive_with_sources( + &mut self, + entry_path: &Path, + entry_module: &Module, + ) -> Result, ParseError> { let dir = entry_path .parent() .ok_or_else(|| { @@ -154,30 +167,31 @@ impl ModuleGraph { if self.cache.contains_key(&path) { continue; } - let module = self.load_cached(&path) + let loaded = self.load_cached(&path) .map_err(|err| err.with_context(format!("while reading {}", path.display())))?; - validate_module_path(&module, &path, &base_dir).map_err(|err| { - err.with_context(format!("while loading module `{}`", module.name)) + validate_module_path(&loaded.module, &path, &base_dir).map_err(|err| { + err.with_context(format!("while loading module `{}`", loaded.module.name)) + .with_source(path.display().to_string(), loaded.source.clone()) })?; - for use_decl in &module.uses { + for use_decl in &loaded.module.uses { if let Some(dep_path) = resolve_use_path(&base_dir, use_decl)? { queue.push_back((dep_path, base_dir.clone())); } } - modules.push(module); + modules.push(loaded); } Ok(modules) } - fn load_cached(&mut self, path: &Path) -> Result { + fn load_cached(&mut self, path: &Path) -> Result { if let Some(module) = self.cache.get(path) { return Ok(module.clone()); } - let module = load_module_from_path(path) + let loaded = load_module_from_path_with_source(path) .map_err(|err| err.with_context(format!("while reading {}", path.display())))?; - self.cache.insert(path.to_path_buf(), module.clone()); - Ok(module) + self.cache.insert(path.to_path_buf(), loaded.clone()); + Ok(loaded) } } @@ -192,6 +206,8 @@ pub fn load_user_modules_transitive( entry_path: &Path, entry_module: &Module, ) -> Result, ParseError> { + let root = stdlib_root().join("sys"); + let _ = root; let dir = entry_path .parent() .ok_or_else(|| { @@ -215,18 +231,19 @@ pub fn load_user_modules_transitive( if cache.contains_key(&path) { continue; } - let module = load_module_from_path(&path) + let loaded = load_module_from_path_with_source(&path) .map_err(|err| err.with_context(format!("while reading {}", path.display())))?; - validate_module_path(&module, &path, &base_dir).map_err(|err| { - err.with_context(format!("while loading module `{}`", module.name)) + validate_module_path(&loaded.module, &path, &base_dir).map_err(|err| { + err.with_context(format!("while loading module `{}`", loaded.module.name)) + .with_source(path.display().to_string(), loaded.source.clone()) })?; - for use_decl in &module.uses { + for use_decl in &loaded.module.uses { if let Some(dep_path) = resolve_use_path(&base_dir, use_decl)? { queue.push_back((dep_path, base_dir.clone())); } } - cache.insert(path, module.clone()); - modules.push(module); + cache.insert(path, loaded.module.clone()); + modules.push(loaded.module); } Ok(modules) diff --git a/capc/src/main.rs b/capc/src/main.rs index 182e52f..7855ab7 100644 --- a/capc/src/main.rs +++ b/capc/src/main.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; use clap::{Parser, Subcommand}; -use miette::{miette, NamedSource, Result}; +use miette::{miette, Result}; use capc::{build_binary, load_program, module_path_for, parse_module, type_check_loaded, LinkOptions}; @@ -55,10 +55,8 @@ fn main() -> Result<()> { Command::Parse { path } => { let source = std::fs::read_to_string(&path) .map_err(|err| miette!("failed to read {}: {err}", path.display()))?; - let module = parse_module(&source).map_err(|err| { - let named = NamedSource::new(path.display().to_string(), source); - miette::Report::new(err).with_source_code(named) - })?; + let module = parse_module(&source) + .map_err(|err| miette::Report::new(err.with_source(path.display().to_string(), source)))?; println!("{module:#?}"); Ok(()) } diff --git a/capc/src/runtime_intrinsics.rs b/capc/src/runtime_intrinsics.rs index 764d234..e6a7b38 100644 --- a/capc/src/runtime_intrinsics.rs +++ b/capc/src/runtime_intrinsics.rs @@ -1,76 +1,591 @@ -pub(crate) fn is_runtime_intrinsic(module: &str, func: &str) -> bool { - matches!( - (module, func), - ( +use std::collections::HashMap; +use std::sync::OnceLock; + +use crate::abi::AbiType; + +#[derive(Debug, Clone)] +pub(crate) struct RuntimeFnSig { + pub params: Vec, + pub ret: AbiType, +} + +#[derive(Debug, Clone)] +pub(crate) struct RuntimeBinding { + pub module: &'static str, + pub func: &'static str, + pub symbol: &'static str, + pub sig: RuntimeFnSig, + pub abi_sig: Option, +} + +fn sig(params: Vec, ret: AbiType) -> RuntimeFnSig { + RuntimeFnSig { params, ret } +} + +fn binding( + module: &'static str, + func: &'static str, + symbol: &'static str, + sig: RuntimeFnSig, +) -> RuntimeBinding { + RuntimeBinding { + module, + func, + symbol, + sig, + abi_sig: None, + } +} + +fn binding_with_abi( + module: &'static str, + func: &'static str, + symbol: &'static str, + sig: RuntimeFnSig, + abi_sig: RuntimeFnSig, +) -> RuntimeBinding { + RuntimeBinding { + module, + func, + symbol, + sig, + abi_sig: Some(abi_sig), + } +} + +fn runtime_binding_list() -> Vec { + vec![ + binding( + "sys.system", + "RootCap__mint_console", + "capable_rt_mint_console", + sig(vec![AbiType::Handle], AbiType::Handle), + ), + binding_with_abi( + "sys.system", + "RootCap__mint_readfs", + "capable_rt_mint_readfs", + sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Handle), + sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Handle), + ), + binding( + "sys.system", + "RootCap__mint_filesystem", + "capable_rt_mint_filesystem", + sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Handle), + ), + binding( "sys.system", - "RootCap__mint_console" - | "RootCap__mint_readfs" - | "RootCap__mint_filesystem" - | "RootCap__mint_args" - | "RootCap__mint_stdin" - | "RootCap__mint_net" - | "RootCap__mint_alloc_default" - ) | ("sys.args", "Args__len" | "Args__at") - | ("sys.stdin", "Stdin__read_to_string_with_alloc") - | ( - "sys.net", - "Net__listen" - | "Net__connect" - | "TcpListener__accept" - | "TcpListener__close" - | "TcpConn__read_to_string_with_alloc" - | "TcpConn__read_with_alloc" - | "TcpConn__write" - | "TcpConn__close" - ) - | ("sys.buffer", "default_alloc") - | ( - "sys.console", - "Console__println" - | "Console__print" - | "Console__print_i32" - | "Console__println_i32" - | "Console__assert" - ) - | ( - "sys.math", - "add_wrap_i32" - | "sub_wrap_i32" - | "mul_wrap_i32" - | "add_wrap_u32" - | "sub_wrap_u32" - | "mul_wrap_u32" - | "add_wrap_u8" - | "sub_wrap_u8" - | "mul_wrap_u8" - ) - | ( - "sys.fs", - "ReadFS__read_to_string_with_alloc" - | "ReadFS__read_bytes_with_alloc" - | "ReadFS__list_dir_with_alloc" - | "ReadFS__exists" - | "ReadFS__close" - | "Filesystem__root_dir" - | "Filesystem__close" - | "Dir__subdir" - | "Dir__open_read" - | "Dir__read_bytes_with_alloc" - | "Dir__read_to_string_with_alloc" - | "Dir__list_dir_with_alloc" - | "Dir__exists" - | "Dir__close" - | "FileRead__read_to_string_with_alloc" - | "FileRead__close" - | "join_with_alloc" - ) - | ( - "sys.buffer", - "Alloc__malloc" - | "Alloc__free" - | "Alloc__cast_u8_to_u32" - | "Alloc__cast_u32_to_u8" - ) - | ("sys.bytes", "u8__is_whitespace") - ) + "RootCap__mint_args", + "capable_rt_mint_args", + sig(vec![AbiType::Handle], AbiType::Handle), + ), + binding( + "sys.system", + "RootCap__mint_stdin", + "capable_rt_mint_stdin", + sig(vec![AbiType::Handle], AbiType::Handle), + ), + binding( + "sys.system", + "RootCap__mint_net", + "capable_rt_mint_net", + sig(vec![AbiType::Handle], AbiType::Handle), + ), + binding( + "sys.system", + "RootCap__mint_alloc_default", + "capable_rt_alloc_default", + sig(vec![AbiType::Handle], AbiType::Handle), + ), + binding( + "sys.args", + "Args__len", + "capable_rt_args_len", + sig(vec![AbiType::Handle], AbiType::I32), + ), + binding_with_abi( + "sys.args", + "Args__at", + "capable_rt_args_at", + sig( + vec![AbiType::Handle, AbiType::I32], + AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.stdin", + "Stdin__read_to_string_with_alloc", + "capable_rt_read_stdin_to_string", + sig( + vec![AbiType::Handle, AbiType::Handle], + AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.net", + "Net__listen", + "capable_rt_net_listen", + sig( + vec![AbiType::Handle, AbiType::Ptr, AbiType::I32], + AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.net", + "Net__connect", + "capable_rt_net_connect", + sig( + vec![AbiType::Handle, AbiType::Ptr, AbiType::I32], + AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.net", + "TcpListener__accept", + "capable_rt_net_accept", + sig( + vec![AbiType::Handle], + AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ), + ), + binding( + "sys.net", + "TcpListener__close", + "capable_rt_net_listener_close", + sig(vec![AbiType::Handle], AbiType::Unit), + ), + binding_with_abi( + "sys.net", + "TcpConn__read_to_string_with_alloc", + "capable_rt_net_read_to_string", + sig( + vec![AbiType::Handle, AbiType::Handle], + AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.net", + "TcpConn__read_with_alloc", + "capable_rt_net_read", + sig( + vec![AbiType::Handle, AbiType::Handle, AbiType::I32], + AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.net", + "TcpConn__write", + "capable_rt_net_write", + sig( + vec![AbiType::Handle, AbiType::Ptr], + AbiType::Result(Box::new(AbiType::Unit), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::Ptr, + AbiType::ResultOut(Box::new(AbiType::Unit), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Unit), Box::new(AbiType::I32)), + ), + ), + binding( + "sys.net", + "TcpConn__close", + "capable_rt_net_close", + sig(vec![AbiType::Handle], AbiType::Unit), + ), + binding( + "sys.buffer", + "default_alloc", + "capable_rt_default_alloc", + sig(vec![], AbiType::Handle), + ), + binding( + "sys.console", + "Console__println", + "capable_rt_console_println", + sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Unit), + ), + binding( + "sys.console", + "Console__print", + "capable_rt_console_print", + sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Unit), + ), + binding( + "sys.console", + "Console__print_i32", + "capable_rt_console_print_i32", + sig(vec![AbiType::Handle, AbiType::I32], AbiType::Unit), + ), + binding( + "sys.console", + "Console__println_i32", + "capable_rt_console_println_i32", + sig(vec![AbiType::Handle, AbiType::I32], AbiType::Unit), + ), + binding( + "sys.console", + "Console__assert", + "capable_rt_assert", + sig(vec![AbiType::Handle, AbiType::Bool, AbiType::Ptr], AbiType::Unit), + ), + binding( + "sys.math", + "add_wrap_i32", + "capable_rt_math_add_wrap_i32", + sig(vec![AbiType::I32, AbiType::I32], AbiType::I32), + ), + binding( + "sys.math", + "sub_wrap_i32", + "capable_rt_math_sub_wrap_i32", + sig(vec![AbiType::I32, AbiType::I32], AbiType::I32), + ), + binding( + "sys.math", + "mul_wrap_i32", + "capable_rt_math_mul_wrap_i32", + sig(vec![AbiType::I32, AbiType::I32], AbiType::I32), + ), + binding( + "sys.math", + "add_wrap_u32", + "capable_rt_math_add_wrap_u32", + sig(vec![AbiType::U32, AbiType::U32], AbiType::U32), + ), + binding( + "sys.math", + "sub_wrap_u32", + "capable_rt_math_sub_wrap_u32", + sig(vec![AbiType::U32, AbiType::U32], AbiType::U32), + ), + binding( + "sys.math", + "mul_wrap_u32", + "capable_rt_math_mul_wrap_u32", + sig(vec![AbiType::U32, AbiType::U32], AbiType::U32), + ), + binding( + "sys.math", + "add_wrap_u8", + "capable_rt_math_add_wrap_u8", + sig(vec![AbiType::U8, AbiType::U8], AbiType::U8), + ), + binding( + "sys.math", + "sub_wrap_u8", + "capable_rt_math_sub_wrap_u8", + sig(vec![AbiType::U8, AbiType::U8], AbiType::U8), + ), + binding( + "sys.math", + "mul_wrap_u8", + "capable_rt_math_mul_wrap_u8", + sig(vec![AbiType::U8, AbiType::U8], AbiType::U8), + ), + binding_with_abi( + "sys.fs", + "ReadFS__read_to_string_with_alloc", + "capable_rt_fs_read_to_string", + sig( + vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], + AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::Ptr, + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.fs", + "ReadFS__read_bytes_with_alloc", + "capable_rt_fs_read_bytes", + sig( + vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], + AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::Ptr, + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.fs", + "ReadFS__list_dir_with_alloc", + "capable_rt_fs_list_dir", + sig( + vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], + AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::Ptr, + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ), + ), + binding( + "sys.fs", + "ReadFS__exists", + "capable_rt_fs_exists", + sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Bool), + ), + binding( + "sys.fs", + "ReadFS__close", + "capable_rt_fs_readfs_close", + sig(vec![AbiType::Handle], AbiType::Unit), + ), + binding( + "sys.fs", + "Filesystem__root_dir", + "capable_rt_fs_root_dir", + sig(vec![AbiType::Handle], AbiType::Handle), + ), + binding( + "sys.fs", + "Filesystem__close", + "capable_rt_fs_filesystem_close", + sig(vec![AbiType::Handle], AbiType::Unit), + ), + binding( + "sys.fs", + "Dir__subdir", + "capable_rt_fs_subdir", + sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Handle), + ), + binding( + "sys.fs", + "Dir__open_read", + "capable_rt_fs_open_read", + sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Handle), + ), + binding_with_abi( + "sys.fs", + "Dir__read_bytes_with_alloc", + "capable_rt_fs_dir_read_bytes", + sig( + vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], + AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::Ptr, + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.fs", + "Dir__read_to_string_with_alloc", + "capable_rt_fs_dir_read_to_string", + sig( + vec![AbiType::Handle, AbiType::Handle, AbiType::Ptr], + AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::Ptr, + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.fs", + "Dir__list_dir_with_alloc", + "capable_rt_fs_dir_list_dir", + sig( + vec![AbiType::Handle, AbiType::Handle], + AbiType::Result(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Handle), Box::new(AbiType::I32)), + ), + ), + binding( + "sys.fs", + "Dir__exists", + "capable_rt_fs_dir_exists", + sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Bool), + ), + binding( + "sys.fs", + "Dir__close", + "capable_rt_fs_dir_close", + sig(vec![AbiType::Handle], AbiType::Unit), + ), + binding_with_abi( + "sys.fs", + "FileRead__read_to_string_with_alloc", + "capable_rt_fs_file_read_to_string", + sig( + vec![AbiType::Handle, AbiType::Handle], + AbiType::Result(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::Handle, + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Ptr), Box::new(AbiType::I32)), + ), + ), + binding( + "sys.fs", + "FileRead__close", + "capable_rt_fs_file_read_close", + sig(vec![AbiType::Handle], AbiType::Unit), + ), + binding_with_abi( + "sys.fs", + "join_with_alloc", + "capable_rt_fs_join", + sig( + vec![AbiType::Handle, AbiType::Ptr, AbiType::Ptr], + AbiType::Ptr, + ), + sig( + vec![AbiType::Ptr, AbiType::Handle, AbiType::Ptr, AbiType::Ptr], + AbiType::Unit, + ), + ), + binding( + "sys.buffer", + "Alloc__malloc", + "capable_rt_malloc", + sig(vec![AbiType::Handle, AbiType::I32], AbiType::Ptr), + ), + binding( + "sys.buffer", + "Alloc__free", + "capable_rt_free", + sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Unit), + ), + binding( + "sys.buffer", + "Alloc__cast_u8_to_u32", + "capable_rt_cast_u8_to_u32", + sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Ptr), + ), + binding( + "sys.buffer", + "Alloc__cast_u32_to_u8", + "capable_rt_cast_u32_to_u8", + sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Ptr), + ), + binding( + "sys.bytes", + "u8__is_whitespace", + "capable_rt_bytes_is_whitespace", + sig(vec![AbiType::U8], AbiType::Bool), + ), + ] +} + +fn runtime_binding_index() -> &'static HashMap { + static INDEX: OnceLock> = OnceLock::new(); + INDEX.get_or_init(|| { + runtime_binding_list() + .into_iter() + .map(|binding| (format!("{}.{}", binding.module, binding.func), binding)) + .collect() + }) +} + +pub(crate) fn runtime_bindings() -> &'static HashMap { + runtime_binding_index() +} + +pub(crate) fn runtime_binding(module: &str, func: &str) -> Option<&'static RuntimeBinding> { + runtime_binding_index().get(&format!("{module}.{func}")) +} + +pub(crate) fn is_runtime_intrinsic(module: &str, func: &str) -> bool { + runtime_binding(module, func).is_some() } diff --git a/capc/src/typeck/mod.rs b/capc/src/typeck/mod.rs index 3c8edef..20cce22 100644 --- a/capc/src/typeck/mod.rs +++ b/capc/src/typeck/mod.rs @@ -409,21 +409,21 @@ pub fn type_check_program( .collect::>(); let module_name = module.name.to_string(); validate_package_safety(&module, false) - .map_err(|err| err.with_context(format!("in module `{}`", module.name)))?; + .map_err(|err| err.in_module(module.name.to_string()).with_context(format!("in module `{}`", module.name)))?; validate_import_safety(&module, &package_map, &stdlib_names) - .map_err(|err| err.with_context(format!("in module `{}`", module.name)))?; + .map_err(|err| err.in_module(module.name.to_string()).with_context(format!("in module `{}`", module.name)))?; for user_module in &user_modules { validate_package_safety(user_module, false) - .map_err(|err| err.with_context(format!("in module `{}`", user_module.name)))?; + .map_err(|err| err.in_module(user_module.name.to_string()).with_context(format!("in module `{}`", user_module.name)))?; validate_import_safety(user_module, &package_map, &stdlib_names) - .map_err(|err| err.with_context(format!("in module `{}`", user_module.name)))?; + .map_err(|err| err.in_module(user_module.name.to_string()).with_context(format!("in module `{}`", user_module.name)))?; } for stdlib_module in &stdlib { validate_package_safety(stdlib_module, true) - .map_err(|err| err.with_context(format!("in module `{}`", stdlib_module.name)))?; + .map_err(|err| err.in_module(stdlib_module.name.to_string()).with_context(format!("in module `{}`", stdlib_module.name)))?; if stdlib_module.package == PackageSafety::Safe { validate_import_safety(stdlib_module, &package_map, &stdlib_names) - .map_err(|err| err.with_context(format!("in module `{}`", stdlib_module.name)))?; + .map_err(|err| err.in_module(stdlib_module.name.to_string()).with_context(format!("in module `{}`", stdlib_module.name)))?; } } let struct_map = collect::collect_structs(&modules, &module_name, &stdlib_index) @@ -476,7 +476,7 @@ pub fn type_check_program( &module_name, Some(&mut table), ) - .map_err(|err| err.with_context(format!("in module `{}`", module_name)))?; + .map_err(|err| err.in_module(module_name.clone()).with_context(format!("in module `{}`", module_name)))?; type_tables.insert(function_key(&module_name, &func.name.item), table); } Item::Impl(impl_block) => { @@ -509,7 +509,7 @@ pub fn type_check_program( &module_name, Some(&mut table), ) - .map_err(|err| err.with_context(format!("in module `{}`", module_name)))?; + .map_err(|err| err.in_module(module_name.clone()).with_context(format!("in module `{}`", module_name)))?; type_tables.insert(function_key(&module_name, &method.name.item), table); } } @@ -542,7 +542,7 @@ pub fn type_check_program( &stdlib_index, Some(&type_tables), ) - .map_err(|err| err.with_context(format!("in module `{}`", m.name))) + .map_err(|err| err.in_module(m.name.to_string()).with_context(format!("in module `{}`", m.name))) }) .collect(); @@ -561,7 +561,7 @@ pub fn type_check_program( &stdlib_index, Some(&type_tables), ) - .map_err(|err| err.with_context(format!("in module `{}`", m.name))) + .map_err(|err| err.in_module(m.name.to_string()).with_context(format!("in module `{}`", m.name))) }) .collect(); @@ -576,7 +576,7 @@ pub fn type_check_program( &stdlib_index, Some(&type_tables), ) - .map_err(|err| err.with_context(format!("in module `{}`", module.name)))?; + .map_err(|err| err.in_module(module.name.to_string()).with_context(format!("in module `{}`", module.name)))?; let hir_trait_impls: Vec = trait_impls .iter() diff --git a/capc/tests/cli.rs b/capc/tests/cli.rs index 15659f6..51114f8 100644 --- a/capc/tests/cli.rs +++ b/capc/tests/cli.rs @@ -1,5 +1,7 @@ +use std::fs; use std::path::PathBuf; use std::process::Command; +use std::time::{SystemTime, UNIX_EPOCH}; fn run_capc(args: &[&str]) -> (i32, String, String) { let exe = env!("CARGO_BIN_EXE_capc"); @@ -17,6 +19,21 @@ fn run_capc(args: &[&str]) -> (i32, String, String) { ) } +fn make_temp_dir(test_name: &str) -> PathBuf { + let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(".."); + let nanos = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("time") + .as_nanos(); + let pid = std::process::id(); + let dir = root + .join("target") + .join("capc-test-input") + .join(format!("{test_name}-{pid}-{nanos}")); + fs::create_dir_all(&dir).expect("create temp dir"); + dir +} + #[test] fn safe_only_rejects_unsafe_package() { let (code, _stdout, stderr) = run_capc(&[ @@ -61,3 +78,47 @@ fn extern_missing_symbol_reports_link_error() { assert!(stderr.contains("link failed")); assert!(stderr.contains("missing_symbol"), "stderr was: {stderr:?}"); } + +#[test] +fn imported_parse_error_reports_imported_file() { + let dir = make_temp_dir("imported-parse-error"); + let entry = dir.join("main.cap"); + let helper = dir.join("helper.cap"); + fs::write( + &entry, + "package safe\nmodule main\nuse helper\n\npub fn main() -> i32 {\n return helper::value()\n}\n", + ) + .expect("write entry"); + fs::write( + &helper, + "package safe\nmodule helper\n\npub fn value() -> i32 {\n return (\n}\n", + ) + .expect("write helper"); + + let entry = entry.to_string_lossy().to_string(); + let (code, _stdout, stderr) = run_capc(&["check", &entry]); + assert_ne!(code, 0); + assert!(stderr.contains("helper.cap"), "stderr was: {stderr:?}"); +} + +#[test] +fn imported_type_error_reports_imported_file() { + let dir = make_temp_dir("imported-type-error"); + let entry = dir.join("main.cap"); + let helper = dir.join("helper.cap"); + fs::write( + &entry, + "package safe\nmodule main\nuse helper\n\npub fn main() -> i32 {\n return helper::value()\n}\n", + ) + .expect("write entry"); + fs::write( + &helper, + "package safe\nmodule helper\n\npub fn value() -> i32 {\n return true\n}\n", + ) + .expect("write helper"); + + let entry = entry.to_string_lossy().to_string(); + let (code, _stdout, stderr) = run_capc(&["check", &entry]); + assert_ne!(code, 0); + assert!(stderr.contains("helper.cap"), "stderr was: {stderr:?}"); +} From d10b147f823c203a328735ae39185101aba1761c Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Thu, 26 Mar 2026 12:21:21 -0700 Subject: [PATCH 12/17] Add static site generator example --- capc/src/runtime_intrinsics.rs | 47 ++++ capc/tests/run.rs | 28 ++ examples/static_site/content/about.md | 6 + examples/static_site/content/index.md | 6 + .../static_site/content/posts/capabilities.md | 6 + examples/static_site/content/posts/hello.md | 6 + examples/static_site/out/.gitkeep | 1 + examples/static_site/out/about.html | 13 + examples/static_site/out/index.html | 13 + .../static_site/out/posts/capabilities.html | 13 + examples/static_site/out/posts/hello.html | 13 + examples/static_site/sitegen.cap | 266 ++++++++++++++++++ runtime/src/lib.rs | 104 +++++++ stdlib/sys/fs.cap | 20 ++ stdlib/sys/path.cap | 58 ++++ 15 files changed, 600 insertions(+) create mode 100644 examples/static_site/content/about.md create mode 100644 examples/static_site/content/index.md create mode 100644 examples/static_site/content/posts/capabilities.md create mode 100644 examples/static_site/content/posts/hello.md create mode 100644 examples/static_site/out/.gitkeep create mode 100644 examples/static_site/out/about.html create mode 100644 examples/static_site/out/index.html create mode 100644 examples/static_site/out/posts/capabilities.html create mode 100644 examples/static_site/out/posts/hello.html create mode 100644 examples/static_site/sitegen.cap diff --git a/capc/src/runtime_intrinsics.rs b/capc/src/runtime_intrinsics.rs index e6a7b38..9400f73 100644 --- a/capc/src/runtime_intrinsics.rs +++ b/capc/src/runtime_intrinsics.rs @@ -404,6 +404,12 @@ fn runtime_binding_list() -> Vec { "capable_rt_fs_exists", sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Bool), ), + binding( + "sys.fs", + "ReadFS__is_dir", + "capable_rt_fs_is_dir", + sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Bool), + ), binding( "sys.fs", "ReadFS__close", @@ -493,6 +499,47 @@ fn runtime_binding_list() -> Vec { "capable_rt_fs_dir_exists", sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Bool), ), + binding( + "sys.fs", + "Dir__is_dir", + "capable_rt_fs_dir_is_dir", + sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Bool), + ), + binding_with_abi( + "sys.fs", + "Dir__create_dir_all", + "capable_rt_fs_dir_create_dir_all", + sig( + vec![AbiType::Handle, AbiType::Ptr], + AbiType::Result(Box::new(AbiType::Unit), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::Ptr, + AbiType::ResultOut(Box::new(AbiType::Unit), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Unit), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.fs", + "Dir__write_string", + "capable_rt_fs_dir_write_string", + sig( + vec![AbiType::Handle, AbiType::Ptr, AbiType::Ptr], + AbiType::Result(Box::new(AbiType::Unit), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Handle, + AbiType::Ptr, + AbiType::Ptr, + AbiType::ResultOut(Box::new(AbiType::Unit), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::Unit), Box::new(AbiType::I32)), + ), + ), binding( "sys.fs", "Dir__close", diff --git a/capc/tests/run.rs b/capc/tests/run.rs index cffe12d..fd60742 100644 --- a/capc/tests/run.rs +++ b/capc/tests/run.rs @@ -215,6 +215,34 @@ fn run_path_helpers() { assert!(stdout.contains("path ok"), "stdout was: {stdout:?}"); } +#[test] +fn run_static_site_example() { + let build_dir = make_out_dir("static_site_build"); + let generated_dir = make_out_dir("static_site_generated"); + let build_dir = build_dir.to_str().expect("utf8 build dir"); + let generated_dir_str = generated_dir.to_str().expect("utf8 output dir"); + let (code, stdout, stderr) = run_capc(&[ + "run", + "--out-dir", + build_dir, + "examples/static_site/sitegen.cap", + "examples/static_site/content", + generated_dir_str, + ]); + assert_eq!(code, 0, "stderr was: {stderr:?}"); + assert!( + stdout.contains("generated pages: 4"), + "stdout was: {stdout:?}" + ); + + let index = std::fs::read_to_string(generated_dir.join("index.html")).expect("read index"); + let post = + std::fs::read_to_string(generated_dir.join("posts").join("hello.html")).expect("read post"); + + assert!(index.contains("

Capable Static Site

"), "index was: {index:?}"); + assert!(post.contains("

Hello World

"), "post was: {post:?}"); +} + #[test] fn run_defer_free() { let out_dir = make_out_dir("defer_free"); diff --git a/examples/static_site/content/about.md b/examples/static_site/content/about.md new file mode 100644 index 0000000..64a3ff4 --- /dev/null +++ b/examples/static_site/content/about.md @@ -0,0 +1,6 @@ +# About This Example + +The generator reads markdown through a rooted ReadFS capability. + +It writes HTML through a rooted Dir capability derived from Filesystem. + diff --git a/examples/static_site/content/index.md b/examples/static_site/content/index.md new file mode 100644 index 0000000..3ead4b2 --- /dev/null +++ b/examples/static_site/content/index.md @@ -0,0 +1,6 @@ +# Capable Static Site + +This example turns a small tree of markdown files into HTML. + +It is intentionally simple so the language and stdlib friction stays visible. + diff --git a/examples/static_site/content/posts/capabilities.md b/examples/static_site/content/posts/capabilities.md new file mode 100644 index 0000000..63e2c3d --- /dev/null +++ b/examples/static_site/content/posts/capabilities.md @@ -0,0 +1,6 @@ +# Capabilities + +The generator can only read from the delegated content root. + +It can only write inside the delegated output root. + diff --git a/examples/static_site/content/posts/hello.md b/examples/static_site/content/posts/hello.md new file mode 100644 index 0000000..357512a --- /dev/null +++ b/examples/static_site/content/posts/hello.md @@ -0,0 +1,6 @@ +# Hello World + +This is the first generated page. + +The output path mirrors the input tree and replaces the extension with html. + diff --git a/examples/static_site/out/.gitkeep b/examples/static_site/out/.gitkeep new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/examples/static_site/out/.gitkeep @@ -0,0 +1 @@ + diff --git a/examples/static_site/out/about.html b/examples/static_site/out/about.html new file mode 100644 index 0000000..94298d3 --- /dev/null +++ b/examples/static_site/out/about.html @@ -0,0 +1,13 @@ + + + + +About This Example + + + +

About This Example

+

The generator reads markdown through a rooted ReadFS capability.

+

It writes HTML through a rooted Dir capability derived from Filesystem.

+ + diff --git a/examples/static_site/out/index.html b/examples/static_site/out/index.html new file mode 100644 index 0000000..e8f7483 --- /dev/null +++ b/examples/static_site/out/index.html @@ -0,0 +1,13 @@ + + + + +Capable Static Site + + + +

Capable Static Site

+

This example turns a small tree of markdown files into HTML.

+

It is intentionally simple so the language and stdlib friction stays visible.

+ + diff --git a/examples/static_site/out/posts/capabilities.html b/examples/static_site/out/posts/capabilities.html new file mode 100644 index 0000000..4844e89 --- /dev/null +++ b/examples/static_site/out/posts/capabilities.html @@ -0,0 +1,13 @@ + + + + +Capabilities + + + +

Capabilities

+

The generator can only read from the delegated content root.

+

It can only write inside the delegated output root.

+ + diff --git a/examples/static_site/out/posts/hello.html b/examples/static_site/out/posts/hello.html new file mode 100644 index 0000000..69d7f0f --- /dev/null +++ b/examples/static_site/out/posts/hello.html @@ -0,0 +1,13 @@ + + + + +Hello World + + + +

Hello World

+

This is the first generated page.

+

The output path mirrors the input tree and replaces the extension with html.

+ + diff --git a/examples/static_site/sitegen.cap b/examples/static_site/sitegen.cap new file mode 100644 index 0000000..36d4024 --- /dev/null +++ b/examples/static_site/sitegen.cap @@ -0,0 +1,266 @@ +package safe +module sitegen + +use sys::console +use sys::fs +use sys::path +use sys::string +use sys::system +use sys::vec + +enum GenErr { + Fs(fs::FsErr), + Alloc +} + +fn slice_after(s: string, count: i32) -> string { + return match (s.slice_range(count, s.len())) { + Ok(out) => { out } + Err(_) => { "" } + } +} + +fn join_rel(base: string, name: string) -> string { + if (base.is_empty()) { + return name + } + return path::join(base, name) +} + +fn push_text(out: string::Text, value: string) -> bool { + match (out.push_str(value)) { + Ok(_) => { return true } + Err(_) => { return false } + } +} + +fn push_byte(out: string::Text, value: u8) -> bool { + match (out.push_byte(value)) { + Ok(_) => { return true } + Err(_) => { return false } + } +} + +fn push_html_escaped(out: string::Text, value: string) -> bool { + let i = 0 + while (i < value.len()) { + let b = value.byte_at(i) + match (b) { + '&' => { + if (!push_text(out, "&")) { + return false + } + } + '<' => { + if (!push_text(out, "<")) { + return false + } + } + '>' => { + if (!push_text(out, ">")) { + return false + } + } + '"' => { + if (!push_text(out, """)) { + return false + } + } + _ => { + if (!push_byte(out, b)) { + return false + } + } + } + i = i + 1 + } + return true +} + +fn push_wrapped(out: string::Text, open: string, body: string, close: string) -> bool { + if (!push_text(out, open)) { + return false + } + if (!push_html_escaped(out, body)) { + return false + } + if (!push_text(out, close)) { + return false + } + return true +} + +fn fallback_title(rel: string) -> string { + return path::replace_extension(path::basename_view(rel), "") +} + +fn extract_title(rel: string, markdown: string) -> string { + let lines = markdown.lines_view() + defer lines.free() + for line in lines { + let trimmed = line.trim_view() + if (trimmed.starts_with("# ")) { + return slice_after(trimmed, 2).trim_view() + } + } + return fallback_title(rel) +} + +fn render_page(rel: string, markdown: string) -> string { + let title = extract_title(rel, markdown) + let out = string::text_new() + defer out.free() + + if (!push_text(out, "\n\n\n\n")) { + return "" + } + if (!push_html_escaped(out, title)) { + return "" + } + if (!push_text(out, "\n\n\n\n")) { + return "" + } + + let lines = markdown.lines_view() + defer lines.free() + for line in lines { + let trimmed = line.trim_view() + if (trimmed.is_empty()) { + continue + } + if (trimmed.starts_with("# ")) { + if (!push_wrapped(out, "

", slice_after(trimmed, 2).trim_view(), "

\n")) { + return "" + } + continue + } + if (trimmed.starts_with("## ")) { + if (!push_wrapped(out, "

", slice_after(trimmed, 3).trim_view(), "

\n")) { + return "" + } + continue + } + if (!push_wrapped(out, "

", trimmed, "

\n")) { + return "" + } + } + + if (!push_text(out, "\n\n")) { + return "" + } + match (out.copy_string()) { + Ok(html) => { return html } + Err(_) => { return "" } + } +} + +fn generate_tree(c: Console, src: &ReadFS, out: &Dir, rel: string) -> i32 { + if (src.is_dir(rel)) { + try let entries = src.list_dir(rel) else err { + c.print("failed to list ") + c.println(rel) + print_error(c, GenErr::Fs(err)) + return -1 + } + defer entries.free() + let generated = 0 + for entry in entries { + let child = join_rel(rel, entry) + let count = generate_tree(c, src, out, child) + if (count < 0) { + return -1 + } + generated = generated + count + } + return generated + } + + if (!rel.ends_with(".md")) { + return 0 + } + + try let markdown = src.read_to_string(rel) else err { + c.print("failed to read ") + c.println(rel) + print_error(c, GenErr::Fs(err)) + return -1 + } + let html = render_page(rel, markdown) + if (html.is_empty()) { + c.print("failed to render ") + c.println(rel) + print_error(c, GenErr::Alloc) + return -1 + } + let out_path = path::replace_extension(rel, "html") + let out_dir = path::dirname_view(out_path) + if (!out_dir.is_empty()) { + try out.create_dir_all(out_dir) else err { + c.print("failed to create output dir for ") + c.println(out_path) + print_error(c, GenErr::Fs(err)) + return -1 + } + } + try out.write_string(out_path, html) else err { + c.print("failed to write ") + c.println(out_path) + print_error(c, GenErr::Fs(err)) + return -1 + } + + c.print("generated ") + c.println(out_path) + return 1 +} + +fn print_error(c: Console, err: GenErr) -> unit { + match (err) { + GenErr::Alloc => { + c.println("allocation failed") + } + GenErr::Fs(kind) => { + match (kind) { + fs::FsErr::NotFound => { + c.println("filesystem error: not found") + } + fs::FsErr::PermissionDenied => { + c.println("filesystem error: permission denied") + } + fs::FsErr::InvalidPath => { + c.println("filesystem error: invalid path") + } + fs::FsErr::IoError => { + c.println("filesystem error: io error") + } + } + } + } + return () +} + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + let args = rc.mint_args() + let src_root = args.at_or(1, "examples/static_site/content") + let out_root = args.at_or(2, "examples/static_site/out") + + let src = rc.mint_readfs(src_root) + let out_fs = rc.mint_filesystem(out_root) + let out = out_fs.root_dir() + let src_ref: &ReadFS = src + let out_ref: &Dir = out + + defer src.close() + defer out.close() + + let generated = generate_tree(c, src_ref, out_ref, "") + if (generated < 0) { + c.println("site generation failed") + return 1 + } + + c.print("generated pages: ") + c.println_i32(generated) + return 0 +} diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index d1ea6ad..c5e4846 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -307,6 +307,25 @@ fn write_handle_result_code( } } +fn write_unit_result(out_err: *mut i32, result: Result<(), FsErr>) -> u8 { + unsafe { + if !out_err.is_null() { + *out_err = 0; + } + } + match result { + Ok(()) => 0, + Err(err) => { + unsafe { + if !out_err.is_null() { + *out_err = err as i32; + } + } + 1 + } + } +} + #[no_mangle] pub extern "C" fn capable_rt_mint_console(_sys: Handle) -> Handle { if !has_handle(&ROOT_CAPS, _sys, "root cap table") { @@ -493,6 +512,22 @@ pub extern "C" fn capable_rt_fs_exists(fs: Handle, path: *const CapString) -> u8 } } +#[no_mangle] +pub extern "C" fn capable_rt_fs_is_dir(fs: Handle, path: *const CapString) -> u8 { + let path = unsafe { read_cap_string(path) }; + let state = clone_handle(&READ_FS, fs, "readfs table"); + let (Some(state), Some(path)) = (state, path) else { + return 0; + }; + let Some(relative) = normalize_relative(Path::new(&path)) else { + return 0; + }; + match resolve_rooted_path(&state.root, &relative) { + Ok(path) => u8::from(path.is_dir()), + Err(_) => 0, + } +} + #[no_mangle] pub extern "C" fn capable_rt_fs_read_bytes( fs: Handle, @@ -577,6 +612,23 @@ pub extern "C" fn capable_rt_fs_dir_exists(dir: Handle, name: *const CapString) } } +#[no_mangle] +pub extern "C" fn capable_rt_fs_dir_is_dir(dir: Handle, name: *const CapString) -> u8 { + let name = unsafe { read_cap_string(name) }; + let state = clone_handle(&DIRS, dir, "dir table"); + let (Some(state), Some(name)) = (state, name) else { + return 0; + }; + let Some(name_rel) = normalize_relative(Path::new(&name)) else { + return 0; + }; + let combined = state.rel.join(name_rel); + match resolve_rooted_path(&state.root, &combined) { + Ok(path) => u8::from(path.is_dir()), + Err(_) => 0, + } +} + #[no_mangle] pub extern "C" fn capable_rt_fs_dir_read_bytes( dir: Handle, @@ -667,6 +719,58 @@ pub extern "C" fn capable_rt_fs_dir_read_to_string( } } +#[no_mangle] +pub extern "C" fn capable_rt_fs_dir_create_dir_all( + dir: Handle, + path: *const CapString, + out_err: *mut i32, +) -> u8 { + let path = unsafe { read_cap_string(path) }; + let state = clone_handle(&DIRS, dir, "dir table"); + let (Some(state), Some(path)) = (state, path) else { + return write_unit_result(out_err, Err(FsErr::PermissionDenied)); + }; + let Some(path_rel) = normalize_relative(Path::new(&path)) else { + return write_unit_result(out_err, Err(FsErr::InvalidPath)); + }; + let combined = state.rel.join(path_rel); + let full = state.root.join(combined); + if !full.starts_with(&state.root) { + return write_unit_result(out_err, Err(FsErr::InvalidPath)); + } + match std::fs::create_dir_all(&full) { + Ok(()) => write_unit_result(out_err, Ok(())), + Err(err) => write_unit_result(out_err, Err(map_fs_err(err))), + } +} + +#[no_mangle] +pub extern "C" fn capable_rt_fs_dir_write_string( + dir: Handle, + path: *const CapString, + data: *const CapString, + out_err: *mut i32, +) -> u8 { + let path = unsafe { read_cap_string(path) }; + let data = unsafe { read_cap_string(data) }; + let state = clone_handle(&DIRS, dir, "dir table"); + let (Some(state), Some(path), Some(data)) = (state, path, data) else { + return write_unit_result(out_err, Err(FsErr::PermissionDenied)); + }; + let Some(path_rel) = normalize_relative(Path::new(&path)) else { + return write_unit_result(out_err, Err(FsErr::InvalidPath)); + }; + let combined = state.rel.join(path_rel); + let full = state.root.join(combined); + if !full.starts_with(&state.root) { + return write_unit_result(out_err, Err(FsErr::InvalidPath)); + } + match std::fs::write(&full, data) { + Ok(()) => write_unit_result(out_err, Ok(())), + Err(err) => write_unit_result(out_err, Err(map_fs_err(err))), + } +} + #[no_mangle] pub extern "C" fn capable_rt_fs_join( out: *mut CapString, diff --git a/stdlib/sys/fs.cap b/stdlib/sys/fs.cap index 91c6ac8..805b2a9 100644 --- a/stdlib/sys/fs.cap +++ b/stdlib/sys/fs.cap @@ -53,6 +53,11 @@ impl ReadFS { return false } + /// True if a path exists and is a directory. + pub fn is_dir(self: &ReadFS, path: string) -> bool { + return false + } + /// Close the capability. pub fn close(self) -> unit { return () @@ -107,6 +112,11 @@ impl Dir { return false } + /// True if a path exists and is a directory. + pub fn is_dir(self: &Dir, name: string) -> bool { + return false + } + /// Read a file into a string using the process default allocator. pub fn read_to_string(self: &Dir, name: string) -> Result { return self.read_to_string_with_alloc(buffer::default_alloc(), name) @@ -117,6 +127,16 @@ impl Dir { return () } + /// Create a directory and any missing parents. + pub fn create_dir_all(self: &Dir, path: string) -> Result { + return Err(FsErr::IoError) + } + + /// Write a string to a file relative to this directory. + pub fn write_string(self: &Dir, path: string, data: string) -> Result { + return Err(FsErr::IoError) + } + /// Close the capability. pub fn close(self) -> unit { return () diff --git a/stdlib/sys/path.cap b/stdlib/sys/path.cap index 3e0a0db..2122099 100644 --- a/stdlib/sys/path.cap +++ b/stdlib/sys/path.cap @@ -4,6 +4,7 @@ module sys::path use sys::buffer use sys::fs +use sys::string use sys::vec fn trim_trailing_slashes(raw_path: string) -> string { @@ -128,6 +129,63 @@ pub fn dirname_view(raw_path: string) -> string { return result } +fn extension_cut(raw_path: string) -> i32 { + let base = basename_view(raw_path) + if (base.is_empty()) { + return raw_path.len() + } + let dot = 0 + match (base.last_index_of_byte('.')) { + Ok(found) => { dot = found } + Err(_) => { return raw_path.len() } + } + if (dot == 0) { + return raw_path.len() + } + return raw_path.len() - base.len() + dot +} + +/// Replace or add a path extension using the process default allocator. +/// `ext` may be provided with or without a leading `.`. +pub fn replace_extension(raw_path: string, ext: string) -> string { + return replace_extension_with_alloc(buffer::default_alloc(), raw_path, ext) +} + +/// Replace or add a path extension using the provided allocator. +/// `ext` may be provided with or without a leading `.`. +pub fn replace_extension_with_alloc(alloc: buffer::Alloc, raw_path: string, ext: string) -> string { + let cut = extension_cut(raw_path) + let out = string::text_new_with_alloc(alloc) + defer out.free() + if (cut > 0) { + let prefix = match (raw_path.slice_range(0, cut)) { + Ok(part) => { part } + Err(_) => { panic() } + } + match (out.push_str(prefix)) { + Ok(_) => { } + Err(_) => { panic() } + } + } + if (!ext.is_empty()) { + if (!ext.starts_with(".")) { + match (out.push_byte('.')) { + Ok(_) => { } + Err(_) => { panic() } + } + } + match (out.push_str(ext)) { + Ok(_) => { } + Err(_) => { panic() } + } + } + let result = out.copy_string() + return match (result) { + Ok(path) => { path } + Err(_) => { panic() } + } +} + /// Join two path segments using the process default allocator. pub fn join(a: string, b: string) -> string { return fs::join(a, b) From ac01cef84f4a9d6e7537ebc1333e062831c82b65 Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Thu, 26 Mar 2026 12:57:06 -0700 Subject: [PATCH 13/17] Expand static site generator example --- capc/tests/run.rs | 8 +- examples/static_site/content/about.md | 16 +- examples/static_site/content/index.md | 19 +- .../static_site/content/posts/capabilities.md | 17 +- examples/static_site/content/posts/hello.md | 11 +- examples/static_site/content/posts/tooling.md | 22 + examples/static_site/out/about.html | 25 +- examples/static_site/out/index.html | 46 +- .../static_site/out/posts/capabilities.html | 28 +- examples/static_site/out/posts/hello.html | 21 +- examples/static_site/out/posts/index.html | 41 ++ examples/static_site/out/posts/tooling.html | 35 ++ examples/static_site/out/site.css | 1 + examples/static_site/sitegen.cap | 586 ++++++++++++++++-- stdlib/sys/path.cap | 20 + stdlib/sys/string.cap | 16 + tests/programs/path_helpers.cap | 1 + tests/programs/string_helpers.cap | 4 + 18 files changed, 861 insertions(+), 56 deletions(-) create mode 100644 examples/static_site/content/posts/tooling.md create mode 100644 examples/static_site/out/posts/index.html create mode 100644 examples/static_site/out/posts/tooling.html create mode 100644 examples/static_site/out/site.css diff --git a/capc/tests/run.rs b/capc/tests/run.rs index fd60742..0eba46f 100644 --- a/capc/tests/run.rs +++ b/capc/tests/run.rs @@ -231,16 +231,22 @@ fn run_static_site_example() { ]); assert_eq!(code, 0, "stderr was: {stderr:?}"); assert!( - stdout.contains("generated pages: 4"), + stdout.contains("generated pages: 6"), "stdout was: {stdout:?}" ); let index = std::fs::read_to_string(generated_dir.join("index.html")).expect("read index"); let post = std::fs::read_to_string(generated_dir.join("posts").join("hello.html")).expect("read post"); + let archive = + std::fs::read_to_string(generated_dir.join("posts").join("index.html")).expect("read archive"); + let css = std::fs::read_to_string(generated_dir.join("site.css")).expect("read css"); assert!(index.contains("

Capable Static Site

"), "index was: {index:?}"); + assert!(index.contains("Recent Posts"), "index was: {index:?}"); assert!(post.contains("

Hello World

"), "post was: {post:?}"); + assert!(archive.contains("Pressure-Testing the Tooling"), "archive was: {archive:?}"); + assert!(css.contains(".site-header"), "css was: {css:?}"); } #[test] diff --git a/examples/static_site/content/about.md b/examples/static_site/content/about.md index 64a3ff4..5483992 100644 --- a/examples/static_site/content/about.md +++ b/examples/static_site/content/about.md @@ -1,6 +1,18 @@ +--- +title: About This Example +summary: Why this site generator exists and what it is trying to reveal. +--- + # About This Example -The generator reads markdown through a rooted ReadFS capability. +The generator reads markdown through a rooted `ReadFS` capability. + +The output side is narrower: it writes HTML through a rooted `Dir` capability derived from `Filesystem`. + +## Why It Exists -It writes HTML through a rooted Dir capability derived from Filesystem. +- to exercise path handling +- to exercise text building +- to exercise multi-page generation instead of one-off demos +> If the sample feels clumsy, that is signal about the language, not just the example. diff --git a/examples/static_site/content/index.md b/examples/static_site/content/index.md index 3ead4b2..12b0eac 100644 --- a/examples/static_site/content/index.md +++ b/examples/static_site/content/index.md @@ -1,6 +1,21 @@ +--- +title: Capable Static Site +summary: A small static site generated through rooted filesystem capabilities. +--- + # Capable Static Site -This example turns a small tree of markdown files into HTML. +This site is generated by a Capable program that only reads from a delegated content tree and only writes inside a delegated output tree. + +## What It Shows + +- capability-rooted filesystem access +- a small markdown-to-html renderer +- generated navigation, archive pages, and shared styling -It is intentionally simple so the language and stdlib friction stays visible. +> The point is not a production markdown engine. The point is to pressure the language and stdlib with real work. +```cap +let src = rc.mint_readfs("examples/static_site/content") +let out_fs = rc.mint_filesystem("examples/static_site/out") +``` diff --git a/examples/static_site/content/posts/capabilities.md b/examples/static_site/content/posts/capabilities.md index 63e2c3d..6f49f0c 100644 --- a/examples/static_site/content/posts/capabilities.md +++ b/examples/static_site/content/posts/capabilities.md @@ -1,6 +1,19 @@ -# Capabilities +--- +title: Capability-Scoped Generation +summary: The generator's authority is limited by the capabilities it receives up front. +date: 2026-03-24 +--- + +# Capability-Scoped Generation The generator can only read from the delegated content root. -It can only write inside the delegated output root. +The generator can only write inside the delegated output root. + +## Why That Matters + +- content reads are rooted +- output writes are rooted +- the program cannot escape either subtree in safe code +> This is the part that makes the example more interesting than a plain scripting demo. diff --git a/examples/static_site/content/posts/hello.md b/examples/static_site/content/posts/hello.md index 357512a..f632db8 100644 --- a/examples/static_site/content/posts/hello.md +++ b/examples/static_site/content/posts/hello.md @@ -1,6 +1,13 @@ +--- +title: Hello World +summary: The smallest post in the set, used to prove the page pipeline end to end. +date: 2026-03-18 +--- + # Hello World This is the first generated page. -The output path mirrors the input tree and replaces the extension with html. - +- it is read from the delegated content root +- it is rendered into HTML +- it lands beside the other generated files under `posts/` diff --git a/examples/static_site/content/posts/tooling.md b/examples/static_site/content/posts/tooling.md new file mode 100644 index 0000000..8c8d02d --- /dev/null +++ b/examples/static_site/content/posts/tooling.md @@ -0,0 +1,22 @@ +--- +title: Pressure-Testing the Tooling +summary: Real examples are where stdlib and compiler rough edges become obvious. +date: 2026-03-26 +--- + +# Pressure-Testing the Tooling + +The first version of the generator exposed missing pieces immediately: + +- directory classification +- recursive directory creation +- rooted string writes +- path extension replacement + +## The Useful Rule + +When a sample program wants a weird workaround, it is usually pointing at a stdlib deficiency. + +```text +sample pain -> repeated workaround -> stdlib helper +``` diff --git a/examples/static_site/out/about.html b/examples/static_site/out/about.html index 94298d3..314ef31 100644 --- a/examples/static_site/out/about.html +++ b/examples/static_site/out/about.html @@ -2,12 +2,31 @@ + About This Example - + + +
+

About This Example

-

The generator reads markdown through a rooted ReadFS capability.

-

It writes HTML through a rooted Dir capability derived from Filesystem.

+

The generator reads markdown through a rooted `ReadFS` capability.

+

The output side is narrower: it writes HTML through a rooted `Dir` capability derived from `Filesystem`.

+

Why It Exists

+
    +
  • to exercise path handling
  • +
  • to exercise text building
  • +
  • to exercise multi-page generation instead of one-off demos
  • +
+
If the sample feels clumsy, that is signal about the language, not just the example.
+
+
+
Generated with Capable.
diff --git a/examples/static_site/out/index.html b/examples/static_site/out/index.html index e8f7483..65f1083 100644 --- a/examples/static_site/out/index.html +++ b/examples/static_site/out/index.html @@ -2,12 +2,52 @@ + Capable Static Site - + + +
+

Capable Static Site

-

This example turns a small tree of markdown files into HTML.

-

It is intentionally simple so the language and stdlib friction stays visible.

+

This site is generated by a Capable program that only reads from a delegated content tree and only writes inside a delegated output tree.

+

What It Shows

+
    +
  • capability-rooted filesystem access
  • +
  • a small markdown-to-html renderer
  • +
  • generated navigation, archive pages, and shared styling
  • +
+
The point is not a production markdown engine. The point is to pressure the language and stdlib with real work.
+
let src = rc.mint_readfs("examples/static_site/content")
+let out_fs = rc.mint_filesystem("examples/static_site/out")
+
+
+

Recent Posts

+ + +
+

Hello World

+

2026-03-18

+

The smallest post in the set, used to prove the page pipeline end to end.

+
+ +
+
+
+
Generated with Capable.
diff --git a/examples/static_site/out/posts/capabilities.html b/examples/static_site/out/posts/capabilities.html index 4844e89..4ffdfed 100644 --- a/examples/static_site/out/posts/capabilities.html +++ b/examples/static_site/out/posts/capabilities.html @@ -2,12 +2,32 @@ -Capabilities - + +Capability-Scoped Generation + -

Capabilities

+ +
+
+

2026-03-24

+

Capability-Scoped Generation

The generator can only read from the delegated content root.

-

It can only write inside the delegated output root.

+

The generator can only write inside the delegated output root.

+

Why That Matters

+
    +
  • content reads are rooted
  • +
  • output writes are rooted
  • +
  • the program cannot escape either subtree in safe code
  • +
+
This is the part that makes the example more interesting than a plain scripting demo.
+
+
+
Generated with Capable.
diff --git a/examples/static_site/out/posts/hello.html b/examples/static_site/out/posts/hello.html index 69d7f0f..43651e5 100644 --- a/examples/static_site/out/posts/hello.html +++ b/examples/static_site/out/posts/hello.html @@ -2,12 +2,29 @@ + Hello World - + + +
+
+

2026-03-18

Hello World

This is the first generated page.

-

The output path mirrors the input tree and replaces the extension with html.

+
    +
  • it is read from the delegated content root
  • +
  • it is rendered into HTML
  • +
  • it lands beside the other generated files under `posts/`
  • +
+
+
+
Generated with Capable.
diff --git a/examples/static_site/out/posts/index.html b/examples/static_site/out/posts/index.html new file mode 100644 index 0000000..3afd605 --- /dev/null +++ b/examples/static_site/out/posts/index.html @@ -0,0 +1,41 @@ + + + + + +Post Archive + + + + +
+
+

Post Archive

+

Every generated post, sorted by date.

+
+ + +
+

Hello World

+

2026-03-18

+

The smallest post in the set, used to prove the page pipeline end to end.

+
+
+
+
+
Generated with Capable.
+ + diff --git a/examples/static_site/out/posts/tooling.html b/examples/static_site/out/posts/tooling.html new file mode 100644 index 0000000..c00480b --- /dev/null +++ b/examples/static_site/out/posts/tooling.html @@ -0,0 +1,35 @@ + + + + + +Pressure-Testing the Tooling + + + + +
+
+

2026-03-26

+

Pressure-Testing the Tooling

+

The first version of the generator exposed missing pieces immediately:

+
    +
  • directory classification
  • +
  • recursive directory creation
  • +
  • rooted string writes
  • +
  • path extension replacement
  • +
+

The Useful Rule

+

When a sample program wants a weird workaround, it is usually pointing at a stdlib deficiency.

+
sample pain -> repeated workaround -> stdlib helper
+
+
+
+
Generated with Capable.
+ + diff --git a/examples/static_site/out/site.css b/examples/static_site/out/site.css new file mode 100644 index 0000000..549011d --- /dev/null +++ b/examples/static_site/out/site.css @@ -0,0 +1 @@ +html{background:#f5efe7;color:#1a1714;}body{margin:0;font:16px/1.7 Georgia,serif;}a{color:#8b2f23;text-decoration:none;}a:hover{text-decoration:underline;}code,pre{font:14px/1.6 Menlo,monospace;}pre{background:#1f1a17;color:#f8efe4;padding:16px;border-radius:14px;overflow:auto;}blockquote{border-left:4px solid #d58b61;margin:1.2rem 0;padding:0.2rem 0 0.2rem 1rem;color:#5d4636;}ul{padding-left:1.2rem;}.site-shell{max-width:860px;margin:0 auto;padding:0 24px;}.site-header{background:#1f1a17;color:#f8efe4;border-bottom:1px solid #3e332c;}.brand-row{display:flex;justify-content:space-between;align-items:center;padding:20px 0;gap:16px;}.brand-row a{color:#f8efe4;}.site-nav{display:flex;gap:16px;}.site-footer{border-top:1px solid #d8cec3;color:#6d6054;padding:24px 0 40px;}.page{padding:40px 0 56px;}.page h1,.page h2,.post-card h2,.post-card h3{line-height:1.15;margin-bottom:0.5rem;}.meta{font:13px/1.5 Menlo,monospace;color:#6d6054;text-transform:uppercase;letter-spacing:0.08em;}.post-list{display:grid;gap:20px;}.post-card{background:#fffdf9;border:1px solid #e0d4c8;border-radius:18px;padding:20px 22px;box-shadow:0 10px 30px rgba(31,26,23,0.05);}.archive-link{margin-top:1rem;}.page p,.page li,.post-card p{max-width:70ch;} \ No newline at end of file diff --git a/examples/static_site/sitegen.cap b/examples/static_site/sitegen.cap index 36d4024..4b50f98 100644 --- a/examples/static_site/sitegen.cap +++ b/examples/static_site/sitegen.cap @@ -13,11 +13,11 @@ enum GenErr { Alloc } -fn slice_after(s: string, count: i32) -> string { - return match (s.slice_range(count, s.len())) { - Ok(out) => { out } - Err(_) => { "" } - } +copy struct PageInfo { + title: string, + summary: string, + date: string, + url: string } fn join_rel(base: string, name: string) -> string { @@ -27,6 +27,107 @@ fn join_rel(base: string, name: string) -> string { return path::join(base, name) } +fn str_lt(a: string, b: string) -> bool { + let a_bytes = a.as_slice() + let b_bytes = b.as_slice() + let len = a_bytes.len() + if (b_bytes.len() < len) { + len = b_bytes.len() + } + let i = 0 + while (i < len) { + let ca = a_bytes.at(i) + let cb = b_bytes.at(i) + if (ca < cb) { + return true + } + if (ca > cb) { + return false + } + i = i + 1 + } + return a.len() < b.len() +} + +fn empty_page_info() -> PageInfo { + return PageInfo { + title: "", + summary: "", + date: "", + url: "" + } +} + +fn fallback_title(rel: string) -> string { + return path::stem_view(rel) +} + +fn is_post_path(rel: string) -> bool { + return rel.starts_with("posts/") && rel.ends_with(".md") +} + +fn parse_page_info(rel: string, url: string, markdown: string) -> PageInfo { + let title = "" + let summary = "" + let date = "" + let phase = 0 + let lines = markdown.lines_view() + defer lines.free() + + for line in lines { + let trimmed = line.trim_view() + if (phase == 0) { + if (trimmed.is_empty()) { + continue + } + if (trimmed.eq("---")) { + phase = 1 + continue + } + phase = 2 + } + + if (phase == 1) { + if (trimmed.eq("---")) { + phase = 2 + continue + } + if (trimmed.starts_with("title:")) { + title = trimmed.trim_prefix_view("title:").trim_view() + continue + } + if (trimmed.starts_with("summary:")) { + summary = trimmed.trim_prefix_view("summary:").trim_view() + continue + } + if (trimmed.starts_with("date:")) { + date = trimmed.trim_prefix_view("date:").trim_view() + continue + } + continue + } + + if (title.is_empty() && trimmed.starts_with("# ")) { + title = trimmed.trim_prefix_view("# ").trim_view() + continue + } + if (summary.is_empty() && !trimmed.is_empty() && !trimmed.starts_with("#") && !trimmed.starts_with("- ") && !trimmed.starts_with("> ") && !trimmed.starts_with("```")) { + summary = trimmed + } + } + + if (title.is_empty()) { + title = fallback_title(rel) + } + + return PageInfo { + title: title, + summary: summary, + date: date, + url: url + } +} + fn push_text(out: string::Text, value: string) -> bool { match (out.push_str(value)) { Ok(_) => { return true } @@ -90,62 +191,359 @@ fn push_wrapped(out: string::Text, open: string, body: string, close: string) -> return true } -fn fallback_title(rel: string) -> string { - return path::replace_extension(path::basename_view(rel), "") +fn path_depth(page_rel: string) -> i32 { + let dir = path::dirname_view(page_rel) + if (dir.is_empty()) { + return 0 + } + let parts = path::segments_view(dir) + defer parts.free() + return parts.len() +} + +fn push_site_href(out: string::Text, page_rel: string, target_rel: string) -> bool { + let depth = path_depth(page_rel) + for i in 0..depth { + if (!push_text(out, "../")) { + return false + } + } + return push_text(out, target_rel) +} + +fn push_nav_link(out: string::Text, page_rel: string, target_rel: string, label: string) -> bool { + if (!push_text(out, "")) { + return false + } + if (!push_html_escaped(out, label)) { + return false + } + if (!push_text(out, "")) { + return false + } + return true +} + +fn render_layout_open(out: string::Text, page: PageInfo, page_rel: string) -> bool { + if (!push_text(out, "\n\n\n\n\n")) { + return false + } + if (!push_html_escaped(out, page.title)) { + return false + } + if (!push_text(out, "\n\n\n\n
\n
\n
")) { + return false + } + if (!push_nav_link(out, page_rel, "index.html", "Capable Static")) { + return false + } + if (!push_text(out, "\n
\n
\n
\n
\n")) { + return false + } + return true +} + +fn render_layout_close(out: string::Text) -> bool { + return push_text(out, "
\n
Generated with Capable.
\n\n\n") } -fn extract_title(rel: string, markdown: string) -> string { +fn render_markdown(out: string::Text, markdown: string) -> bool { + let phase = 0 + let in_list = false + let in_code = false let lines = markdown.lines_view() defer lines.free() + for line in lines { let trimmed = line.trim_view() + if (phase == 0) { + if (trimmed.is_empty()) { + continue + } + if (trimmed.eq("---")) { + phase = 1 + continue + } + phase = 2 + } + + if (phase == 1) { + if (trimmed.eq("---")) { + phase = 2 + } + continue + } + + if (in_code) { + if (trimmed.starts_with("```")) { + if (!push_text(out, "\n")) { + return false + } + in_code = false + continue + } + if (!push_html_escaped(out, line)) { + return false + } + if (!push_byte(out, '\n')) { + return false + } + continue + } + + if (trimmed.starts_with("```")) { + if (in_list) { + if (!push_text(out, "\n")) { + return false + } + in_list = false + } + if (!push_text(out, "
")) {
+        return false
+      }
+      in_code = true
+      continue
+    }
+
+    if (trimmed.is_empty()) {
+      if (in_list) {
+        if (!push_text(out, "\n")) {
+          return false
+        }
+        in_list = false
+      }
+      continue
+    }
+
+    if (trimmed.starts_with("- ")) {
+      if (!in_list) {
+        if (!push_text(out, "
    \n")) { + return false + } + in_list = true + } + if (!push_wrapped(out, "
  • ", trimmed.trim_prefix_view("- ").trim_view(), "
  • \n")) { + return false + } + continue + } + + if (in_list) { + if (!push_text(out, "
\n")) { + return false + } + in_list = false + } + if (trimmed.starts_with("# ")) { - return slice_after(trimmed, 2).trim_view() + if (!push_wrapped(out, "

", trimmed.trim_prefix_view("# ").trim_view(), "

\n")) { + return false + } + continue + } + + if (trimmed.starts_with("## ")) { + if (!push_wrapped(out, "

", trimmed.trim_prefix_view("## ").trim_view(), "

\n")) { + return false + } + continue + } + + if (trimmed.starts_with("> ")) { + if (!push_wrapped(out, "
", trimmed.trim_prefix_view("> ").trim_view(), "
\n")) { + return false + } + continue + } + + if (!push_wrapped(out, "

", trimmed, "

\n")) { + return false } } - return fallback_title(rel) + + if (in_list) { + if (!push_text(out, "\n")) { + return false + } + } + + if (in_code) { + if (!push_text(out, "
\n")) { + return false + } + } + + return true +} + +fn render_recent_posts(out: string::Text, page_rel: string, posts: vec::Vec) -> bool { + if (posts.is_empty()) { + return true + } + if (!push_text(out, "
\n

Recent Posts

\n")) { + return false + } + + let shown = 0 + for post in posts { + if (shown >= 3) { + break + } + if (!push_text(out, "\n")) { + return false + } + shown = shown + 1 + } + + if (!push_text(out, "

")) { + return false + } + if (!push_nav_link(out, page_rel, "posts/index.html", "Browse all posts")) { + return false + } + if (!push_text(out, "

\n
\n")) { + return false + } + return true } -fn render_page(rel: string, markdown: string) -> string { - let title = extract_title(rel, markdown) +fn render_page(rel: string, out_path: string, markdown: string, posts: vec::Vec) -> string { + let page = parse_page_info(rel, out_path, markdown) let out = string::text_new() defer out.free() - if (!push_text(out, "\n\n\n\n")) { + if (!render_layout_open(out, page, out_path)) { return "" } - if (!push_html_escaped(out, title)) { + if (!push_text(out, "<article class=\"page\">\n")) { return "" } - if (!push_text(out, "\n\n\n\n")) { + if (!page.date.is_empty()) { + if (!push_wrapped(out, "

", page.date, "

\n")) { + return "" + } + } + if (!render_markdown(out, markdown)) { return "" } + if (out_path.eq("index.html")) { + if (!render_recent_posts(out, out_path, posts)) { + return "" + } + } + if (!push_text(out, "\n")) { + return "" + } + if (!render_layout_close(out)) { + return "" + } + match (out.copy_string()) { + Ok(html) => { return html } + Err(_) => { return "" } + } +} - let lines = markdown.lines_view() - defer lines.free() - for line in lines { - let trimmed = line.trim_view() - if (trimmed.is_empty()) { - continue +fn render_archive(posts: vec::Vec) -> string { + let page = PageInfo { + title: "Post Archive", + summary: "", + date: "", + url: "posts/index.html" + } + let out = string::text_new() + defer out.free() + + if (!render_layout_open(out, page, "posts/index.html")) { + return "" + } + if (!push_text(out, "
\n

Post Archive

\n

Every generated post, sorted by date.

\n
\n")) { + return "" + } + + for post in posts { + if (!push_text(out, "
\n

", slice_after(trimmed, 2).trim_view(), "

\n")) { + if (!push_site_href(out, "posts/index.html", post.url)) { + return "" + } + if (!push_text(out, "\">")) { + return "" + } + if (!push_html_escaped(out, post.title)) { + return "" + } + if (!push_text(out, "\n")) { + return "" + } + if (!post.date.is_empty()) { + if (!push_wrapped(out, "

", post.date, "

\n")) { return "" } - continue } - if (trimmed.starts_with("## ")) { - if (!push_wrapped(out, "

", slice_after(trimmed, 3).trim_view(), "

\n")) { + if (!post.summary.is_empty()) { + if (!push_wrapped(out, "

", post.summary, "

\n")) { return "" } - continue } - if (!push_wrapped(out, "

", trimmed, "

\n")) { + if (!push_text(out, "
\n")) { return "" } } - if (!push_text(out, "\n\n")) { + if (!push_text(out, "
\n
\n")) { + return "" + } + if (!render_layout_close(out)) { return "" } match (out.copy_string()) { @@ -154,7 +552,83 @@ fn render_page(rel: string, markdown: string) -> string { } } -fn generate_tree(c: Console, src: &ReadFS, out: &Dir, rel: string) -> i32 { +fn write_site_css(c: Console, out: &Dir) -> bool { + let css = "html{background:#f5efe7;color:#1a1714;}body{margin:0;font:16px/1.7 Georgia,serif;}a{color:#8b2f23;text-decoration:none;}a:hover{text-decoration:underline;}code,pre{font:14px/1.6 Menlo,monospace;}pre{background:#1f1a17;color:#f8efe4;padding:16px;border-radius:14px;overflow:auto;}blockquote{border-left:4px solid #d58b61;margin:1.2rem 0;padding:0.2rem 0 0.2rem 1rem;color:#5d4636;}ul{padding-left:1.2rem;}.site-shell{max-width:860px;margin:0 auto;padding:0 24px;}.site-header{background:#1f1a17;color:#f8efe4;border-bottom:1px solid #3e332c;}.brand-row{display:flex;justify-content:space-between;align-items:center;padding:20px 0;gap:16px;}.brand-row a{color:#f8efe4;}.site-nav{display:flex;gap:16px;}.site-footer{border-top:1px solid #d8cec3;color:#6d6054;padding:24px 0 40px;}.page{padding:40px 0 56px;}.page h1,.page h2,.post-card h2,.post-card h3{line-height:1.15;margin-bottom:0.5rem;}.meta{font:13px/1.5 Menlo,monospace;color:#6d6054;text-transform:uppercase;letter-spacing:0.08em;}.post-list{display:grid;gap:20px;}.post-card{background:#fffdf9;border:1px solid #e0d4c8;border-radius:18px;padding:20px 22px;box-shadow:0 10px 30px rgba(31,26,23,0.05);}.archive-link{margin-top:1rem;}.page p,.page li,.post-card p{max-width:70ch;}" + try out.write_string("site.css", css) else err { + c.println("failed to write site.css") + print_error(c, GenErr::Fs(err)) + return false + } + c.println("generated site.css") + return true +} + +fn collect_posts(c: Console, src: &ReadFS, rel: string, posts: vec::Vec) -> bool { + if (src.is_dir(rel)) { + try let entries = src.list_dir(rel) else err { + c.print("failed to list ") + c.println(rel) + print_error(c, GenErr::Fs(err)) + return false + } + defer entries.free() + for entry in entries { + let child = join_rel(rel, entry) + if (!collect_posts(c, src, child, posts)) { + return false + } + } + return true + } + + if (!is_post_path(rel)) { + return true + } + + try let markdown = src.read_to_string(rel) else err { + c.print("failed to read ") + c.println(rel) + print_error(c, GenErr::Fs(err)) + return false + } + + let info = parse_page_info(rel, path::replace_extension(rel, "html"), markdown) + try posts.push(info) else { + c.println("failed to record post metadata") + print_error(c, GenErr::Alloc) + return false + } + return true +} + +fn post_before(posts: vec::Vec, i: i32, j: i32) -> bool { + let left = posts.get_or(i, empty_page_info()) + let right = posts.get_or(j, empty_page_info()) + if (!left.date.eq(right.date)) { + return str_lt(right.date, left.date) + } + return str_lt(left.title, right.title) +} + +fn sort_posts(posts: vec::Vec) -> bool { + let n = posts.len() + for i in 1..n { + let j = i + while (j > 0) { + if (post_before(posts, j, j - 1)) { + try posts.swap(j, j - 1) else { + return false + } + j = j - 1 + } else { + break + } + } + } + return true +} + +fn generate_tree(c: Console, src: &ReadFS, out: &Dir, rel: string, posts: vec::Vec) -> i32 { if (src.is_dir(rel)) { try let entries = src.list_dir(rel) else err { c.print("failed to list ") @@ -166,7 +640,7 @@ fn generate_tree(c: Console, src: &ReadFS, out: &Dir, rel: string) -> i32 { let generated = 0 for entry in entries { let child = join_rel(rel, entry) - let count = generate_tree(c, src, out, child) + let count = generate_tree(c, src, out, child, posts) if (count < 0) { return -1 } @@ -185,14 +659,16 @@ fn generate_tree(c: Console, src: &ReadFS, out: &Dir, rel: string) -> i32 { print_error(c, GenErr::Fs(err)) return -1 } - let html = render_page(rel, markdown) + + let out_path = path::replace_extension(rel, "html") + let html = render_page(rel, out_path, markdown, posts) if (html.is_empty()) { c.print("failed to render ") c.println(rel) print_error(c, GenErr::Alloc) return -1 } - let out_path = path::replace_extension(rel, "html") + let out_dir = path::dirname_view(out_path) if (!out_dir.is_empty()) { try out.create_dir_all(out_dir) else err { @@ -214,6 +690,27 @@ fn generate_tree(c: Console, src: &ReadFS, out: &Dir, rel: string) -> i32 { return 1 } +fn generate_archive(c: Console, out: &Dir, posts: vec::Vec) -> bool { + try out.create_dir_all("posts") else err { + c.println("failed to create posts archive dir") + print_error(c, GenErr::Fs(err)) + return false + } + let html = render_archive(posts) + if (html.is_empty()) { + c.println("failed to render posts archive") + print_error(c, GenErr::Alloc) + return false + } + try out.write_string("posts/index.html", html) else err { + c.println("failed to write posts archive") + print_error(c, GenErr::Fs(err)) + return false + } + c.println("generated posts/index.html") + return true +} + fn print_error(c: Console, err: GenErr) -> unit { match (err) { GenErr::Alloc => { @@ -250,17 +747,36 @@ pub fn main(rc: RootCap) -> i32 { let out = out_fs.root_dir() let src_ref: &ReadFS = src let out_ref: &Dir = out + let posts = vec::new() + defer posts.free() defer src.close() defer out.close() - let generated = generate_tree(c, src_ref, out_ref, "") + if (!collect_posts(c, src_ref, "", posts)) { + c.println("site generation failed") + return 1 + } + if (!sort_posts(posts)) { + c.println("failed to sort posts") + return 1 + } + + let generated = generate_tree(c, src_ref, out_ref, "", posts) if (generated < 0) { c.println("site generation failed") return 1 } + if (!generate_archive(c, out_ref, posts)) { + c.println("site generation failed") + return 1 + } + if (!write_site_css(c, out_ref)) { + c.println("site generation failed") + return 1 + } c.print("generated pages: ") - c.println_i32(generated) + c.println_i32(generated + 1) return 0 } diff --git a/stdlib/sys/path.cap b/stdlib/sys/path.cap index 2122099..58007a5 100644 --- a/stdlib/sys/path.cap +++ b/stdlib/sys/path.cap @@ -109,6 +109,26 @@ pub fn basename_view(raw_path: string) -> string { } } +/// Return the basename without the final extension as a view. +pub fn stem_view(raw_path: string) -> string { + let base = basename_view(raw_path) + if (base.is_empty()) { + return base + } + match (base.last_index_of_byte('.')) { + Ok(i) => { + if (i == 0) { + return base + } + return match (base.slice_range(0, i)) { + Ok(out) => { out } + Err(_) => { panic() } + } + } + Err(_) => { return base } + } +} + /// Return the dirname of a path as a view. pub fn dirname_view(raw_path: string) -> string { let path = trim_trailing_slashes(raw_path) diff --git a/stdlib/sys/string.cap b/stdlib/sys/string.cap index 4f1902c..03fdcb1 100644 --- a/stdlib/sys/string.cap +++ b/stdlib/sys/string.cap @@ -661,6 +661,14 @@ impl string { return self.trim_prefix_with_alloc(buffer::default_alloc(), prefix) } + /// Remove a leading prefix if present without copying. + pub fn trim_prefix_view(self, prefix: string) -> string { + if (self.starts_with(prefix)) { + return view_range(self, prefix.len(), self.len()) + } + return self + } + /// Remove a leading prefix if present using the provided allocator. pub fn trim_prefix_with_alloc(self, alloc: buffer::Alloc, prefix: string) -> string { if (self.starts_with(prefix)) { @@ -674,6 +682,14 @@ impl string { return self.trim_suffix_with_alloc(buffer::default_alloc(), suffix) } + /// Remove a trailing suffix if present without copying. + pub fn trim_suffix_view(self, suffix: string) -> string { + if (self.ends_with(suffix)) { + return view_range(self, 0, self.len() - suffix.len()) + } + return self + } + /// Remove a trailing suffix if present using the provided allocator. pub fn trim_suffix_with_alloc(self, alloc: buffer::Alloc, suffix: string) -> string { if (self.ends_with(suffix)) { diff --git a/tests/programs/path_helpers.cap b/tests/programs/path_helpers.cap index 599937e..9dd2c50 100644 --- a/tests/programs/path_helpers.cap +++ b/tests/programs/path_helpers.cap @@ -24,6 +24,7 @@ pub fn main(rc: RootCap) -> i32 { c.assert(parts.get_or(1, "").eq("docs")) c.assert(parts.get_or(2, "").eq("index.html")) c.assert(path::basename_view("site/docs/index.html").eq("index.html")) + c.assert(path::stem_view("site/docs/index.html").eq("index")) c.assert(path::dirname_view("site/docs/index.html").eq("site/docs")) let joined = path::join("site/docs", "index.html") diff --git a/tests/programs/string_helpers.cap b/tests/programs/string_helpers.cap index a67c282..269b17a 100644 --- a/tests/programs/string_helpers.cap +++ b/tests/programs/string_helpers.cap @@ -22,6 +22,8 @@ pub fn main(rc: RootCap) -> i32 { let trimmed_start_view = " hi ".trim_start_view() let trimmed_end = " hi ".trim_end() let trimmed_end_view = " hi ".trim_end_view() + let trimmed_prefix_view = "title: Hello".trim_prefix_view("title:") + let trimmed_suffix_view = "hello.html".trim_suffix_view(".html") let trimmed_ascii = " \tHi\n".trim_ascii() let lower = "AbC".to_lower_ascii() let upper = "AbC".to_upper_ascii() @@ -51,6 +53,8 @@ pub fn main(rc: RootCap) -> i32 { c.assert(trimmed_start_view.starts_with("hi")) c.assert(trimmed_end.ends_with("hi")) c.assert(trimmed_end_view.ends_with("hi")) + c.assert(trimmed_prefix_view.trim_view().eq("Hello")) + c.assert(trimmed_suffix_view.eq("hello")) c.assert(trimmed_ascii.eq("Hi")) c.assert(lower.eq("abc")) c.assert(upper.eq("ABC")) From 01268ae834ba92b4057fa798cb79916af309df74 Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Thu, 26 Mar 2026 15:24:36 -0700 Subject: [PATCH 14/17] Add Neovim support and string compare --- caplsp/src/main.rs | 26 ++-- docs/editor.md | 41 +++++++ examples/sort/sort.cap | 29 +---- examples/static_site/sitegen.cap | 36 +----- ftdetect/capable.lua | 5 + ftplugin/cap.lua | 1 + lua/capable/init.lua | 135 +++++++++++++++++++++ plugin/capable.lua | 7 ++ queries/capable/highlights.scm | 54 +++++++++ stdlib/sys/string.cap | 30 +++++ syntax/cap.vim | 31 +++++ tests/programs/path_helpers.cap | 2 + tests/programs/string_compare.cap | 6 + tree-sitter-capable/queries/highlights.scm | 6 + vscode/syntaxes/capable.tmLanguage.json | 2 +- 15 files changed, 343 insertions(+), 68 deletions(-) create mode 100644 ftdetect/capable.lua create mode 100644 ftplugin/cap.lua create mode 100644 lua/capable/init.lua create mode 100644 plugin/capable.lua create mode 100644 queries/capable/highlights.scm create mode 100644 syntax/cap.vim diff --git a/caplsp/src/main.rs b/caplsp/src/main.rs index 9cfdda9..bdb8609 100644 --- a/caplsp/src/main.rs +++ b/caplsp/src/main.rs @@ -7,7 +7,7 @@ use lsp_types::{ }; use capc::{load_stdlib, load_user_modules_transitive, parse_module, type_check_program}; -use capc::ast::Span; +use capc::ast::{Module, Span}; use capc::error::{ParseError, TypeError}; fn main() { @@ -51,6 +51,7 @@ fn main() { #[derive(Default)] struct ServerState { open_files: HashMap, + stdlib: Option>, } fn handle_request(req: Request, connection: &Connection) { @@ -88,7 +89,12 @@ fn handle_notification( "textDocument/didSave" => { let params: lsp_types::DidSaveTextDocumentParams = serde_json::from_value(notif.params).map_err(|err| err.to_string())?; - publish_diagnostics(state, ¶ms.text_document.uri, connection); + // `didChange` already publishes diagnostics for open buffers. Avoid doing the + // same full parse/load/typecheck pass again on save unless we do not have the + // file contents cached locally. + if !state.open_files.contains_key(¶ms.text_document.uri) { + publish_diagnostics(state, ¶ms.text_document.uri, connection); + } } _ => {} } @@ -103,7 +109,7 @@ fn publish_diagnostics(state: &mut ServerState, uri: &Url, connection: &Connecti Err(_) => String::new(), }, }; - let diagnostics = analyze(uri, &text); + let diagnostics = analyze(state, uri, &text); let params = PublishDiagnosticsParams { uri: uri.clone(), diagnostics, @@ -115,14 +121,20 @@ fn publish_diagnostics(state: &mut ServerState, uri: &Url, connection: &Connecti ))); } -fn analyze(uri: &Url, text: &str) -> Vec { +fn analyze(state: &mut ServerState, uri: &Url, text: &str) -> Vec { let module = match parse_module(text) { Ok(module) => module, Err(err) => return vec![diag_from_parse(text, &err)], }; - let stdlib = match load_stdlib() { - Ok(stdlib) => stdlib, - Err(err) => return vec![diag_from_parse(text, &err)], + if state.stdlib.is_none() { + match load_stdlib() { + Ok(stdlib) => state.stdlib = Some(stdlib), + Err(err) => return vec![diag_from_parse(text, &err)], + } + } + let stdlib = match state.stdlib.as_ref() { + Some(stdlib) => stdlib, + None => return Vec::new(), }; let user_modules = match uri.to_file_path() { Ok(path) => match load_user_modules_transitive(&path, &module) { diff --git a/docs/editor.md b/docs/editor.md index 711ea13..6110332 100644 --- a/docs/editor.md +++ b/docs/editor.md @@ -20,6 +20,47 @@ cd tree-sitter-capable tree-sitter generate ``` +## Neovim + +This repo now works directly as a Neovim plugin. + +What it gives you: + +- `*.cap` filetype detection +- built-in Vim syntax highlighting that works without tree-sitter +- comment settings for Capable buffers +- LSP startup through the `caplsp` binary +- optional `nvim-treesitter` parser registration for the bundled grammar + +### Install + +With `lazy.nvim`: + +```lua +{ + dir = "/absolute/path/to/capable", + config = function() + require("capable").setup({ + caplsp_cmd = { "/absolute/path/to/caplsp" }, + }) + end, +} +``` + +If `caplsp` is already on your `PATH`, the default `setup()` is enough. + +### Optional tree-sitter + +If you use `nvim-treesitter`, the plugin registers a `capable` parser config that +points at `tree-sitter-capable/` in this repo. After installing the plugin, run: + +```vim +:TSInstall capable +``` + +The regex-based `syntax/cap.vim` highlighting still works even if you do not use +tree-sitter. + ## VS Code 1. Launch VS Code (or Cursor) with the extension in dev mode: diff --git a/examples/sort/sort.cap b/examples/sort/sort.cap index 4ad0a20..a813194 100644 --- a/examples/sort/sort.cap +++ b/examples/sort/sort.cap @@ -6,38 +6,11 @@ use sys::stdin use sys::io use sys::vec -fn min_i32(a: i32, b: i32) -> i32 { - if (a < b) { - return a - } - return b -} - -// Returns true if a < b lexicographically -fn str_lt(a: string, b: string) -> bool { - let a_bytes = a.as_slice() - let b_bytes = b.as_slice() - let len = min_i32(a_bytes.len(), b_bytes.len()) - let i = 0 - while (i < len) { - let ca = a_bytes.at(i) - let cb = b_bytes.at(i) - if (ca < cb) { - return true - } - if (ca > cb) { - return false - } - i = i + 1 - } - return a.len() < b.len() -} - // Compare lines at indices i and j fn line_lt(lines: Vec, i: i32, j: i32) -> bool { let line_i = lines.get_or(i, "") let line_j = lines.get_or(j, "") - return str_lt(line_i, line_j) + return line_i.compare(line_j) < 0 } // Insertion sort on indices diff --git a/examples/static_site/sitegen.cap b/examples/static_site/sitegen.cap index 4b50f98..3ecce71 100644 --- a/examples/static_site/sitegen.cap +++ b/examples/static_site/sitegen.cap @@ -20,34 +20,6 @@ copy struct PageInfo { url: string } -fn join_rel(base: string, name: string) -> string { - if (base.is_empty()) { - return name - } - return path::join(base, name) -} - -fn str_lt(a: string, b: string) -> bool { - let a_bytes = a.as_slice() - let b_bytes = b.as_slice() - let len = a_bytes.len() - if (b_bytes.len() < len) { - len = b_bytes.len() - } - let i = 0 - while (i < len) { - let ca = a_bytes.at(i) - let cb = b_bytes.at(i) - if (ca < cb) { - return true - } - if (ca > cb) { - return false - } - i = i + 1 - } - return a.len() < b.len() -} fn empty_page_info() -> PageInfo { return PageInfo { @@ -573,7 +545,7 @@ fn collect_posts(c: Console, src: &ReadFS, rel: string, posts: vec::Vec, i: i32, j: i32) -> bool { let left = posts.get_or(i, empty_page_info()) let right = posts.get_or(j, empty_page_info()) if (!left.date.eq(right.date)) { - return str_lt(right.date, left.date) + return left.date.compare(right.date) > 0 } - return str_lt(left.title, right.title) + return left.title.compare(right.title) < 0 } fn sort_posts(posts: vec::Vec) -> bool { @@ -639,7 +611,7 @@ fn generate_tree(c: Console, src: &ReadFS, out: &Dir, rel: string, posts: vec::V defer entries.free() let generated = 0 for entry in entries { - let child = join_rel(rel, entry) + let child = path::join(rel, entry) let count = generate_tree(c, src, out, child, posts) if (count < 0) { return -1 diff --git a/ftdetect/capable.lua b/ftdetect/capable.lua new file mode 100644 index 0000000..5dc99da --- /dev/null +++ b/ftdetect/capable.lua @@ -0,0 +1,5 @@ +vim.filetype.add({ + extension = { + cap = "cap", + }, +}) diff --git a/ftplugin/cap.lua b/ftplugin/cap.lua new file mode 100644 index 0000000..56ce0bf --- /dev/null +++ b/ftplugin/cap.lua @@ -0,0 +1 @@ +vim.bo.commentstring = "// %s" diff --git a/lua/capable/init.lua b/lua/capable/init.lua new file mode 100644 index 0000000..85baa9f --- /dev/null +++ b/lua/capable/init.lua @@ -0,0 +1,135 @@ +local M = {} + +local defaults = { + lsp = true, + treesitter = true, + caplsp_cmd = { "caplsp" }, + filetypes = { "cap" }, + root_markers = { ".git", "justfile", "Cargo.toml" }, + on_attach = nil, + capabilities = nil, +} + +local state = { + config = nil, + setup_done = false, +} + +local function plugin_root() + local source = debug.getinfo(1, "S").source + if source:sub(1, 1) == "@" then + source = source:sub(2) + end + return vim.fs.dirname(vim.fs.dirname(vim.fs.dirname(source))) +end + +local function normalize_cmd(cmd) + if type(cmd) == "string" then + return { cmd } + end + return vim.deepcopy(cmd) +end + +local function resolve_root(bufnr, markers) + local name = vim.api.nvim_buf_get_name(bufnr) + local start = name ~= "" and vim.fs.dirname(name) or vim.uv.cwd() + return vim.fs.root(start, markers) or start or vim.uv.cwd() +end + +function M.register_treesitter() + local ok, parsers = pcall(require, "nvim-treesitter.parsers") + if not ok then + return false + end + + parsers.capable = parsers.capable or {} + parsers.capable.install_info = { + url = plugin_root() .. "/tree-sitter-capable", + files = { "src/parser.c" }, + requires_generate_from_grammar = false, + generate_requires_npm = false, + } + parsers.capable.filetype = "cap" + + if vim.treesitter and vim.treesitter.language and vim.treesitter.language.register then + pcall(vim.treesitter.language.register, "capable", "cap") + end + + return true +end + +local function start_lsp(bufnr) + if not state.config.lsp then + return + end + if vim.bo[bufnr].buftype ~= "" then + return + end + + local name = vim.api.nvim_buf_get_name(bufnr) + if name == "" then + return + end + + vim.lsp.start({ + name = "caplsp", + cmd = normalize_cmd(state.config.caplsp_cmd), + root_dir = resolve_root(bufnr, state.config.root_markers), + filetypes = state.config.filetypes, + single_file_support = true, + on_attach = state.config.on_attach, + capabilities = state.config.capabilities, + }, { + bufnr = bufnr, + }) +end + +function M.setup(opts) + state.config = vim.tbl_deep_extend("force", state.config or defaults, opts or {}) + + if state.config.treesitter then + if not M.register_treesitter() then + local group = vim.api.nvim_create_augroup("CapableNvimTreesitter", { clear = true }) + vim.api.nvim_create_autocmd("VimEnter", { + group = group, + once = true, + callback = function() + M.register_treesitter() + end, + }) + vim.api.nvim_create_autocmd("User", { + group = group, + pattern = "LazyDone", + once = true, + callback = function() + M.register_treesitter() + end, + }) + end + end + + if state.setup_done then + return M + end + + local group = vim.api.nvim_create_augroup("CapableNvim", { clear = true }) + vim.api.nvim_create_autocmd("FileType", { + group = group, + pattern = state.config.filetypes, + callback = function(args) + start_lsp(args.buf) + end, + }) + + state.setup_done = true + + for _, bufnr in ipairs(vim.api.nvim_list_bufs()) do + if vim.api.nvim_buf_is_loaded(bufnr) and vim.bo[bufnr].filetype == "cap" then + start_lsp(bufnr) + end + end + + return M +end + +return M diff --git a/plugin/capable.lua b/plugin/capable.lua new file mode 100644 index 0000000..cf4fa75 --- /dev/null +++ b/plugin/capable.lua @@ -0,0 +1,7 @@ +if vim.g.loaded_capable_nvim == 1 then + return +end + +vim.g.loaded_capable_nvim = 1 + +require("capable").setup(vim.g.capable_nvim_config or {}) diff --git a/queries/capable/highlights.scm b/queries/capable/highlights.scm new file mode 100644 index 0000000..9ed5fb7 --- /dev/null +++ b/queries/capable/highlights.scm @@ -0,0 +1,54 @@ +[ + "module" + "package" + "safe" + "unsafe" + "use" + "pub" + "extern" + "fn" + "let" + "if" + "else" + "while" + "for" + "in" + "break" + "continue" + "defer" + "try" + "return" + "struct" + "enum" + "impl" + "opaque" + "linear" + "copy" + "capability" + "match" + "true" + "false" +] @keyword + +"unit" @constant.builtin + +[ + (int_lit) + (u8_lit) +] @number + +(string_lit) @string + +(comment) @comment + +(identifier) @variable + +(function_decl name: (identifier) @function) +(extern_function_decl name: (identifier) @function) +(method_decl name: (identifier) @function) + +(struct_decl name: (identifier) @type) +(enum_decl name: (identifier) @type) + +(field name: (identifier) @property) +(struct_field name: (identifier) @property) diff --git a/stdlib/sys/string.cap b/stdlib/sys/string.cap index 03fdcb1..6a0f9e0 100644 --- a/stdlib/sys/string.cap +++ b/stdlib/sys/string.cap @@ -777,6 +777,36 @@ impl string { return true } + /// Byte-wise lexical comparison. + /// Returns -1 if self < other, 1 if self > other, and 0 if equal. + pub fn compare(self, other: string) -> i32 { + let self_len = self.len() + let other_len = other.len() + let limit = self_len + if (other_len < limit) { + limit = other_len + } + let i = 0 + while (i < limit) { + let a = self.byte_at(i) + let b = other.byte_at(i) + if (a < b) { + return -1 + } + if (a > b) { + return 1 + } + i = i + 1 + } + if (self_len < other_len) { + return -1 + } + if (self_len > other_len) { + return 1 + } + return 0 + } + /// True if empty. pub fn is_empty(self) -> bool { return self.len() == 0 diff --git a/syntax/cap.vim b/syntax/cap.vim new file mode 100644 index 0000000..f7841f4 --- /dev/null +++ b/syntax/cap.vim @@ -0,0 +1,31 @@ +if exists("b:current_syntax") + finish +endif + +syntax case match + +syntax keyword capKeyword package module use pub extern fn let if else while for in return struct enum impl match break continue defer try unsafe safe opaque linear copy capability true false +syntax keyword capBuiltin unit +syntax keyword capType i32 u32 u8 bool string Result + +syntax match capComment "//.*$" +syntax region capString start=+"+ skip=+\\\\\|\\"+ end=+"+ +syntax match capNumber "\<\d\+u8\>\|\<\d\+\>" + +syntax match capFunctionDecl "\\s\+\zs[A-Za-z_][A-Za-z0-9_]*" +syntax match capTypeDecl "\<\(struct\|enum\|impl\)\>\s\+\zs[A-Za-z_][A-Za-z0-9_:]*" +syntax match capModulePath "\<[A-Za-z_][A-Za-z0-9_]*\(::[A-Za-z_][A-Za-z0-9_]*\)\+" +syntax match capField "\.\zs[A-Za-z_][A-Za-z0-9_]*" + +highlight default link capKeyword Keyword +highlight default link capBuiltin Constant +highlight default link capType Type +highlight default link capComment Comment +highlight default link capString String +highlight default link capNumber Number +highlight default link capFunctionDecl Function +highlight default link capTypeDecl Type +highlight default link capModulePath Include +highlight default link capField Identifier + +let b:current_syntax = "cap" diff --git a/tests/programs/path_helpers.cap b/tests/programs/path_helpers.cap index 9dd2c50..49ea423 100644 --- a/tests/programs/path_helpers.cap +++ b/tests/programs/path_helpers.cap @@ -29,6 +29,8 @@ pub fn main(rc: RootCap) -> i32 { let joined = path::join("site/docs", "index.html") c.assert(joined.eq("site/docs/index.html")) + let root_join = path::join("", "posts") + c.assert(root_join.eq("posts")) c.println("path ok") return 0 diff --git a/tests/programs/string_compare.cap b/tests/programs/string_compare.cap index c374975..47a49fa 100644 --- a/tests/programs/string_compare.cap +++ b/tests/programs/string_compare.cap @@ -38,6 +38,12 @@ pub fn main(rc: RootCap) -> i32 { c.println("literal ok") } + c.assert("abc".compare("abc") == 0) + c.assert("abc".compare("abd") < 0) + c.assert("abd".compare("abc") > 0) + c.assert("abc".compare("ab") > 0) + c.assert("ab".compare("abc") < 0) + c.println("string_compare ok") return 0 } diff --git a/tree-sitter-capable/queries/highlights.scm b/tree-sitter-capable/queries/highlights.scm index c946c69..9ed5fb7 100644 --- a/tree-sitter-capable/queries/highlights.scm +++ b/tree-sitter-capable/queries/highlights.scm @@ -11,6 +11,12 @@ "if" "else" "while" + "for" + "in" + "break" + "continue" + "defer" + "try" "return" "struct" "enum" diff --git a/vscode/syntaxes/capable.tmLanguage.json b/vscode/syntaxes/capable.tmLanguage.json index 27d4451..95a4ca6 100644 --- a/vscode/syntaxes/capable.tmLanguage.json +++ b/vscode/syntaxes/capable.tmLanguage.json @@ -45,7 +45,7 @@ "patterns": [ { "name": "keyword.control.cap", - "match": "\\b(module|package|safe|unsafe|use|pub|extern|fn|let|if|else|while|return|struct|enum|impl|opaque|linear|copy|capability|match|true|false)\\b" + "match": "\\b(module|package|safe|unsafe|use|pub|extern|fn|let|if|else|while|for|in|break|continue|defer|try|return|struct|enum|impl|opaque|linear|copy|capability|match|true|false)\\b" }, { "name": "constant.language.cap", From 408d2a914b764c00af4da1900c7ead2ae1238b01 Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Thu, 26 Mar 2026 15:39:38 -0700 Subject: [PATCH 15/17] Debounce caplsp diagnostics --- Cargo.lock | 1 + caplsp/Cargo.toml | 1 + caplsp/src/main.rs | 119 ++++++++++++++++++++++++++++++++++++++++++--- 3 files changed, 113 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 096dbcc..1ff03fe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -188,6 +188,7 @@ name = "caplsp" version = "0.1.0" dependencies = [ "capc", + "crossbeam-channel", "lsp-server", "lsp-types", "serde_json", diff --git a/caplsp/Cargo.toml b/caplsp/Cargo.toml index 7a21e6a..3b145b4 100644 --- a/caplsp/Cargo.toml +++ b/caplsp/Cargo.toml @@ -6,6 +6,7 @@ license.workspace = true [dependencies] capc = { path = "../capc" } +crossbeam-channel = "0.5" lsp-server = "0.7" lsp-types = "0.95" serde_json = "1.0" diff --git a/caplsp/src/main.rs b/caplsp/src/main.rs index bdb8609..d506f8d 100644 --- a/caplsp/src/main.rs +++ b/caplsp/src/main.rs @@ -1,5 +1,7 @@ use std::collections::HashMap; +use std::time::{Duration, Instant}; +use crossbeam_channel::RecvTimeoutError; use lsp_server::{Connection, Message, Notification, Request}; use lsp_types::{ Diagnostic, DiagnosticSeverity, InitializeResult, Position, PublishDiagnosticsParams, Range, @@ -10,6 +12,8 @@ use capc::{load_stdlib, load_user_modules_transitive, parse_module, type_check_p use capc::ast::{Module, Span}; use capc::error::{ParseError, TypeError}; +const DIAGNOSTIC_DEBOUNCE: Duration = Duration::from_millis(250); + fn main() { let (connection, io_threads) = Connection::stdio(); @@ -28,7 +32,13 @@ fn main() { let mut state = ServerState::default(); - for msg in &connection.receiver { + loop { + flush_due_diagnostics(&mut state, &connection); + let msg = match recv_next(&connection, &state) { + Ok(msg) => msg, + Err(RecvTimeoutError::Timeout) => continue, + Err(RecvTimeoutError::Disconnected) => break, + }; match msg { Message::Request(req) => { if connection.handle_shutdown(&req).unwrap_or(false) { @@ -52,6 +62,7 @@ fn main() { struct ServerState { open_files: HashMap, stdlib: Option>, + pending_diagnostics: HashMap, } fn handle_request(req: Request, connection: &Connection) { @@ -83,24 +94,74 @@ fn handle_notification( let uri = params.text_document.uri; if let Some(change) = params.content_changes.into_iter().last() { state.open_files.insert(uri.clone(), change.text); - publish_diagnostics(state, &uri, connection); + schedule_diagnostics(state, &uri, DIAGNOSTIC_DEBOUNCE); } } "textDocument/didSave" => { let params: lsp_types::DidSaveTextDocumentParams = serde_json::from_value(notif.params).map_err(|err| err.to_string())?; - // `didChange` already publishes diagnostics for open buffers. Avoid doing the - // same full parse/load/typecheck pass again on save unless we do not have the - // file contents cached locally. - if !state.open_files.contains_key(¶ms.text_document.uri) { - publish_diagnostics(state, ¶ms.text_document.uri, connection); - } + state.pending_diagnostics.remove(¶ms.text_document.uri); + publish_diagnostics(state, ¶ms.text_document.uri, connection); + } + "textDocument/didClose" => { + let params: lsp_types::DidCloseTextDocumentParams = + serde_json::from_value(notif.params).map_err(|err| err.to_string())?; + let uri = params.text_document.uri; + state.open_files.remove(&uri); + state.pending_diagnostics.remove(&uri); + clear_diagnostics(&uri, connection); } _ => {} } Ok(()) } +fn recv_next( + connection: &Connection, + state: &ServerState, +) -> Result { + match next_diagnostic_deadline(state) { + Some(deadline) => connection + .receiver + .recv_timeout(deadline.saturating_duration_since(Instant::now())), + None => connection + .receiver + .recv() + .map_err(|_| RecvTimeoutError::Disconnected), + } +} + +fn next_diagnostic_deadline(state: &ServerState) -> Option { + let mut next = None; + for due in state.pending_diagnostics.values() { + match next { + Some(current) if current <= *due => {} + _ => next = Some(*due), + } + } + next +} + +fn schedule_diagnostics(state: &mut ServerState, uri: &Url, delay: Duration) { + state + .pending_diagnostics + .insert(uri.clone(), Instant::now() + delay); +} + +fn flush_due_diagnostics(state: &mut ServerState, connection: &Connection) { + let now = Instant::now(); + let mut ready = Vec::new(); + for (uri, due) in &state.pending_diagnostics { + if *due <= now { + ready.push(uri.clone()); + } + } + for uri in ready { + state.pending_diagnostics.remove(&uri); + publish_diagnostics(state, &uri, connection); + } +} + fn publish_diagnostics(state: &mut ServerState, uri: &Url, connection: &Connection) { let text = match state.open_files.get(uri) { Some(text) => text.clone(), @@ -121,6 +182,18 @@ fn publish_diagnostics(state: &mut ServerState, uri: &Url, connection: &Connecti ))); } +fn clear_diagnostics(uri: &Url, connection: &Connection) { + let params = PublishDiagnosticsParams { + uri: uri.clone(), + diagnostics: Vec::new(), + version: None, + }; + let _ = connection.sender.send(Message::Notification(Notification::new( + "textDocument/publishDiagnostics".to_string(), + params, + ))); +} + fn analyze(state: &mut ServerState, uri: &Url, text: &str) -> Vec { let module = match parse_module(text) { Ok(module) => module, @@ -193,3 +266,33 @@ fn offset_to_position(text: &str, offset: usize) -> Position { } Position { line, character: col } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn schedule_replaces_existing_deadline() { + let mut state = ServerState::default(); + let uri = Url::parse("file:///tmp/test.cap").expect("uri"); + schedule_diagnostics(&mut state, &uri, Duration::from_millis(10)); + let first = state.pending_diagnostics.get(&uri).copied().expect("deadline"); + schedule_diagnostics(&mut state, &uri, Duration::from_millis(50)); + let second = state.pending_diagnostics.get(&uri).copied().expect("deadline"); + assert!(second >= first); + } + + #[test] + fn next_deadline_picks_earliest_pending_uri() { + let mut state = ServerState::default(); + let a = Url::parse("file:///tmp/a.cap").expect("uri"); + let b = Url::parse("file:///tmp/b.cap").expect("uri"); + state + .pending_diagnostics + .insert(a, Instant::now() + Duration::from_millis(40)); + let earliest = Instant::now() + Duration::from_millis(10); + state.pending_diagnostics.insert(b, earliest); + let next = next_diagnostic_deadline(&state).expect("next"); + assert!(next <= earliest + Duration::from_millis(1)); + } +} From e8066a5586fbb55b04b9404ea9ccd8fe1a798a2b Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Thu, 26 Mar 2026 17:17:03 -0700 Subject: [PATCH 16/17] Add first-class 64-bit integer support --- CURRENT_STATUS.md | 15 +- capc/src/abi.rs | 2 + capc/src/ast.rs | 2 + capc/src/codegen/emit.rs | 38 +- capc/src/codegen/emit/match_lowering.rs | 8 + capc/src/codegen/emit/runtime.rs | 7 +- capc/src/codegen/layout.rs | 1 + capc/src/codegen/mod.rs | 4 + capc/src/parser/exprs.rs | 66 ++- capc/src/parser/patterns.rs | 38 ++ capc/src/runtime_intrinsics.rs | 296 +++++++++++++ capc/src/typeck/check.rs | 7 +- capc/src/typeck/lower.rs | 6 +- capc/src/typeck/mod.rs | 10 +- capc/src/typeck/monomorphize.rs | 6 +- capc/src/typeck/monomorphize/support.rs | 1 + capc/src/typeck/resolve.rs | 11 +- capc/tests/run.rs | 41 ++ capc/tests/typecheck.rs | 16 + docs/TUTORIAL.md | 4 + examples/elfdump/elfdump.cap | 552 ++++++++++++++++++++++++ examples/elfdump/minimal64.elf | Bin 0 -> 120 bytes runtime/src/lib.rs | 451 +++++++++++++++++++ stdlib/sys/buffer.cap | 92 +++- stdlib/sys/console.cap | 20 + stdlib/sys/ints.cap | 17 + stdlib/sys/math.cap | 12 + tests/programs/bytes_helpers.cap | 95 ++++ tests/programs/int64_basic.cap | 47 ++ tests/programs/u8_match.cap | 21 + 30 files changed, 1832 insertions(+), 54 deletions(-) create mode 100644 examples/elfdump/elfdump.cap create mode 100644 examples/elfdump/minimal64.elf create mode 100644 stdlib/sys/ints.cap create mode 100644 tests/programs/int64_basic.cap create mode 100644 tests/programs/u8_match.cap diff --git a/CURRENT_STATUS.md b/CURRENT_STATUS.md index d150847..32d4ea8 100644 --- a/CURRENT_STATUS.md +++ b/CURRENT_STATUS.md @@ -18,7 +18,7 @@ repository. It is based on code and tests, not older design docs. ## Types and ownership -- Built-in types: i32, u32, u8, bool, unit, never. +- Built-in types: i32, i64, u32, u64, u8, bool, unit, never. - `string` is a stdlib struct (a view over bytes), not a compiler builtin. - Pointers (`*T`) and borrows (`&T`) are supported in the type system. - Plain data is unrestricted by default. @@ -27,7 +27,8 @@ repository. It is based on code and tests, not older design docs. move-tracked fields. - Borrows are deliberately narrow: refs cannot be stored in structs/enums or returned, and ref locals must be initialized from another local. -- Integer literals type as i32; char literals are u8. +- Unsuffixed integer literals type as i32; suffixed integer literals currently + support `u8`, `i64`, and `u64`. Char literals are `u8`. ## Standard library and runtime @@ -60,10 +61,10 @@ repository. It is based on code and tests, not older design docs. - Child handles remain linear where appropriate: - `FileRead` - `TcpConn` -- On move-tracked capabilities, methods that return capabilities still take - `self` by value under the current checker. This is why `Dir.open_read` - consumes `Dir`, while `TcpListener.accept` can borrow: `TcpListener` is a - copy capability. +- On move-tracked capabilities, borrowed receivers can return fresh linear + child capabilities, but they cannot return reusable capabilities. This is + why `Dir.open_read` can borrow `Dir`, while attenuation like `Dir.subdir` + still consumes `self`. - Deliberately copyable capabilities currently include: - `RootCap` - `Console` @@ -80,8 +81,6 @@ repository. It is based on code and tests, not older design docs. ## Known limitations and gaps -- i64 is parsed but rejected by the current backend; only 32-bit integer types - are supported. - Inline-by-value struct returns are not implemented (sret only). - Vec element types are restricted to u8, i32, string, or type parameters. - Variable shadowing is not currently modeled in lowering. diff --git a/capc/src/abi.rs b/capc/src/abi.rs index 7495cc9..7ef805f 100644 --- a/capc/src/abi.rs +++ b/capc/src/abi.rs @@ -5,7 +5,9 @@ pub enum AbiType { Unit, I32, + I64, U32, + U64, U8, Bool, Handle, diff --git a/capc/src/ast.rs b/capc/src/ast.rs index 9fea3d1..43090c5 100644 --- a/capc/src/ast.rs +++ b/capc/src/ast.rs @@ -358,6 +358,8 @@ pub struct GroupingExpr { #[derive(Debug, Clone, PartialEq, Eq)] pub enum Literal { Int(i64), + I64(i64), + U64(u64), U8(u8), String(String), Bool(bool), diff --git a/capc/src/codegen/emit.rs b/capc/src/codegen/emit.rs index be12c68..3707d21 100644 --- a/capc/src/codegen/emit.rs +++ b/capc/src/codegen/emit.rs @@ -833,6 +833,12 @@ fn emit_hir_expr_inner( Literal::Int(value) => Ok(ValueRepr::Single( builder.ins().iconst(ir::types::I32, *value as i64), )), + Literal::I64(value) => Ok(ValueRepr::Single( + builder.ins().iconst(ir::types::I64, *value), + )), + Literal::U64(value) => Ok(ValueRepr::Single( + builder.ins().iconst(ir::types::I64, *value as i64), + )), Literal::U8(value) => Ok(ValueRepr::Single( builder.ins().iconst(ir::types::I8, *value as i64), )), @@ -2122,7 +2128,9 @@ fn store_out_value( } match ty.abi { AbiType::I32 + | AbiType::I64 | AbiType::U32 + | AbiType::U64 | AbiType::U8 | AbiType::Bool | AbiType::Handle @@ -2164,6 +2172,13 @@ fn store_value_by_ty( builder.ins().store(MemFlags::new(), val, addr, 0); Ok(()) } + BuiltinType::I64 | BuiltinType::U64 => { + let ValueRepr::Single(val) = value else { + return Err(CodegenError::Unsupported("store i64".to_string())); + }; + builder.ins().store(MemFlags::new(), val, addr, 0); + Ok(()) + } BuiltinType::U8 | BuiltinType::Bool => { let ValueRepr::Single(val) = value else { return Err(CodegenError::Unsupported("store u8".to_string())); @@ -2171,9 +2186,6 @@ fn store_value_by_ty( builder.ins().store(MemFlags::new(), val, addr, 0); Ok(()) } - BuiltinType::I64 => Err(CodegenError::Unsupported( - "i64 not yet supported".to_string(), - )), }, Ty::Ptr(_) => { let ValueRepr::Single(val) = value else { @@ -2304,7 +2316,9 @@ fn store_value_by_tykind( }; match ty { AbiType::I32 + | AbiType::I64 | AbiType::U32 + | AbiType::U64 | AbiType::U8 | AbiType::Bool | AbiType::Handle @@ -2341,15 +2355,18 @@ fn load_value_by_ty( addr, 0, ))), + BuiltinType::I64 | BuiltinType::U64 => Ok(ValueRepr::Single(builder.ins().load( + ir::types::I64, + MemFlags::new(), + addr, + 0, + ))), BuiltinType::U8 | BuiltinType::Bool => Ok(ValueRepr::Single(builder.ins().load( ir::types::I8, MemFlags::new(), addr, 0, ))), - BuiltinType::I64 => Err(CodegenError::Unsupported( - "i64 not yet supported".to_string(), - )), }, Ty::Ptr(_) => Ok(ValueRepr::Single(builder.ins().load( ptr_ty, @@ -2443,6 +2460,7 @@ fn load_value_by_tykind( ) -> Result { let load_ty = match ty { AbiType::I32 | AbiType::U32 => ir::types::I32, + AbiType::I64 | AbiType::U64 => ir::types::I64, AbiType::U8 | AbiType::Bool => ir::types::I8, AbiType::Handle => ir::types::I64, AbiType::Ptr => ptr_ty, @@ -2703,6 +2721,7 @@ fn load_local(builder: &mut FunctionBuilder, local: &LocalValue, ptr_ty: Type) - fn value_type_for_result_out(ty: &AbiType, ptr_ty: Type) -> Result { match ty { AbiType::I32 | AbiType::U32 => Ok(ir::types::I32), + AbiType::I64 | AbiType::U64 => Ok(ir::types::I64), AbiType::U8 | AbiType::Bool => Ok(ir::types::I8), AbiType::Handle => Ok(ir::types::I64), AbiType::Ptr => Ok(ptr_ty), @@ -2724,6 +2743,9 @@ fn zero_value_for_tykind( AbiType::I32 | AbiType::U32 => { Ok(ValueRepr::Single(builder.ins().iconst(ir::types::I32, 0))) } + AbiType::I64 | AbiType::U64 => { + Ok(ValueRepr::Single(builder.ins().iconst(ir::types::I64, 0))) + } AbiType::U8 | AbiType::Bool => { Ok(ValueRepr::Single(builder.ins().iconst(ir::types::I8, 0))) } @@ -2857,7 +2879,9 @@ pub(super) fn value_from_params( match ty { AbiType::Unit => Ok(ValueRepr::Unit), AbiType::I32 + | AbiType::I64 | AbiType::U32 + | AbiType::U64 | AbiType::U8 | AbiType::Bool | AbiType::Handle @@ -2895,7 +2919,9 @@ fn value_from_results( match ty { AbiType::Unit => Ok(ValueRepr::Unit), AbiType::I32 + | AbiType::I64 | AbiType::U32 + | AbiType::U64 | AbiType::U8 | AbiType::Bool | AbiType::Handle diff --git a/capc/src/codegen/emit/match_lowering.rs b/capc/src/codegen/emit/match_lowering.rs index 39f78dc..8cc55db 100644 --- a/capc/src/codegen/emit/match_lowering.rs +++ b/capc/src/codegen/emit/match_lowering.rs @@ -434,6 +434,14 @@ fn hir_match_pattern_cond( let rhs = builder.ins().iconst(ir::types::I32, *n); Ok(builder.ins().icmp(IntCC::Equal, match_val, rhs)) } + Literal::I64(n) => { + let rhs = builder.ins().iconst(ir::types::I64, *n); + Ok(builder.ins().icmp(IntCC::Equal, match_val, rhs)) + } + Literal::U64(n) => { + let rhs = builder.ins().iconst(ir::types::I64, *n as i64); + Ok(builder.ins().icmp(IntCC::Equal, match_val, rhs)) + } Literal::U8(n) => { let rhs = builder.ins().iconst(ir::types::I8, i64::from(*n)); Ok(builder.ins().icmp(IntCC::Equal, match_val, rhs)) diff --git a/capc/src/codegen/emit/runtime.rs b/capc/src/codegen/emit/runtime.rs index e3c7d0c..d608672 100644 --- a/capc/src/codegen/emit/runtime.rs +++ b/capc/src/codegen/emit/runtime.rs @@ -530,12 +530,9 @@ fn hir_type_from_ty( let abi = match ty { Ty::Builtin(b) => match b { BuiltinType::I32 => AbiType::I32, - BuiltinType::I64 => { - return Err(CodegenError::Unsupported( - "i64 is not supported by the current codegen backend".to_string(), - )) - } + BuiltinType::I64 => AbiType::I64, BuiltinType::U32 => AbiType::U32, + BuiltinType::U64 => AbiType::U64, BuiltinType::U8 => AbiType::U8, BuiltinType::Bool => AbiType::Bool, BuiltinType::Unit | BuiltinType::Never => AbiType::Unit, diff --git a/capc/src/codegen/layout.rs b/capc/src/codegen/layout.rs index 975feb2..0cf1f89 100644 --- a/capc/src/codegen/layout.rs +++ b/capc/src/codegen/layout.rs @@ -276,6 +276,7 @@ pub(super) fn type_layout_for_abi( match ty { AbiType::Unit => Ok(TypeLayout { size: 0, align: 1 }), AbiType::I32 | AbiType::U32 => Ok(TypeLayout { size: 4, align: 4 }), + AbiType::I64 | AbiType::U64 => Ok(TypeLayout { size: 8, align: 8 }), AbiType::U8 | AbiType::Bool => Ok(TypeLayout { size: 1, align: 1 }), AbiType::Handle => Ok(TypeLayout { size: 8, align: 8 }), AbiType::Ptr => Ok(TypeLayout { diff --git a/capc/src/codegen/mod.rs b/capc/src/codegen/mod.rs index ef11234..0f0b4be 100644 --- a/capc/src/codegen/mod.rs +++ b/capc/src/codegen/mod.rs @@ -600,7 +600,9 @@ fn append_ty_params(signature: &mut Signature, ty: &AbiType, ptr_ty: Type) { AbiType::Handle => signature.params.push(AbiParam::new(ir::types::I64)), AbiType::Ptr => signature.params.push(AbiParam::new(ptr_ty)), AbiType::I32 => signature.params.push(AbiParam::new(ir::types::I32)), + AbiType::I64 => signature.params.push(AbiParam::new(ir::types::I64)), AbiType::U32 => signature.params.push(AbiParam::new(ir::types::I32)), + AbiType::U64 => signature.params.push(AbiParam::new(ir::types::I64)), AbiType::U8 => signature.params.push(AbiParam::new(ir::types::I8)), AbiType::Bool => signature.params.push(AbiParam::new(ir::types::I8)), AbiType::Result(ok, err) => { @@ -624,7 +626,9 @@ fn append_ty_returns(signature: &mut Signature, ty: &AbiType, ptr_ty: Type) { match ty { AbiType::Unit => {} AbiType::I32 => signature.returns.push(AbiParam::new(ir::types::I32)), + AbiType::I64 => signature.returns.push(AbiParam::new(ir::types::I64)), AbiType::U32 => signature.returns.push(AbiParam::new(ir::types::I32)), + AbiType::U64 => signature.returns.push(AbiParam::new(ir::types::I64)), AbiType::U8 => signature.returns.push(AbiParam::new(ir::types::I8)), AbiType::Bool => signature.returns.push(AbiParam::new(ir::types::I8)), AbiType::Handle => signature.returns.push(AbiParam::new(ir::types::I64)), diff --git a/capc/src/parser/exprs.rs b/capc/src/parser/exprs.rs index 4ab1393..78f0dc0 100644 --- a/capc/src/parser/exprs.rs +++ b/capc/src/parser/exprs.rs @@ -288,28 +288,58 @@ impl Parser { match self.peek_kind() { Some(TokenKind::Int) => { let token = self.bump().unwrap(); - let value = token.text.parse::().map_err(|_| { - self.error_at(token.span, "invalid integer literal".to_string()) - })?; if let Some(next) = self.peek_token(0) { - if next.kind == TokenKind::Ident - && next.text == "u8" - && next.span.start == token.span.end - { - let suffix = self.bump().unwrap(); - if !(0..=255).contains(&value) { - return Err(self.error_at( - Span::new(token.span.start, suffix.span.end), - "u8 literal out of range".to_string(), - )); + if next.kind == TokenKind::Ident && next.span.start == token.span.end { + match next.text.as_str() { + "u8" => { + let value = token.text.parse::().map_err(|_| { + self.error_at( + token.span, + "invalid integer literal".to_string(), + ) + })?; + let suffix = self.bump().unwrap(); + if value > 255 { + return Err(self.error_at( + Span::new(token.span.start, suffix.span.end), + "u8 literal out of range".to_string(), + )); + } + return Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::U8(value as u8), + span: Span::new(token.span.start, suffix.span.end), + })); + } + "i64" => { + let value = token.text.parse::().map_err(|_| { + self.error_at(token.span, "invalid i64 literal".to_string()) + })?; + let suffix = self.bump().unwrap(); + return Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::I64(value), + span: Span::new(token.span.start, suffix.span.end), + })); + } + "u64" => { + let value = token.text.parse::().map_err(|_| { + self.error_at(token.span, "invalid u64 literal".to_string()) + })?; + let suffix = self.bump().unwrap(); + return Ok(Expr::Literal(LiteralExpr { + id: self.fresh_expr_id(), + value: Literal::U64(value), + span: Span::new(token.span.start, suffix.span.end), + })); + } + _ => {} } - return Ok(Expr::Literal(LiteralExpr { - id: self.fresh_expr_id(), - value: Literal::U8(value as u8), - span: Span::new(token.span.start, suffix.span.end), - })); } } + let value = token.text.parse::().map_err(|_| { + self.error_at(token.span, "invalid integer literal".to_string()) + })?; Ok(Expr::Literal(LiteralExpr { id: self.fresh_expr_id(), value: Literal::Int(value), diff --git a/capc/src/parser/patterns.rs b/capc/src/parser/patterns.rs index dfe0966..b0553ca 100644 --- a/capc/src/parser/patterns.rs +++ b/capc/src/parser/patterns.rs @@ -5,6 +5,44 @@ impl Parser { match self.peek_kind() { Some(TokenKind::Int) => { let token = self.bump().unwrap(); + if let Some(next) = self.peek_token(0) { + if next.kind == TokenKind::Ident && next.span.start == token.span.end { + match next.text.as_str() { + "u8" => { + let value = token.text.parse::().map_err(|_| { + self.error_at( + token.span, + "invalid integer literal".to_string(), + ) + })?; + let suffix_end = next.span.end; + self.bump(); + if value > 255 { + return Err(self.error_at( + Span::new(token.span.start, suffix_end), + "u8 literal out of range".to_string(), + )); + } + return Ok(Pattern::Literal(Literal::U8(value as u8))); + } + "i64" => { + let value = token.text.parse::().map_err(|_| { + self.error_at(token.span, "invalid i64 literal".to_string()) + })?; + self.bump(); + return Ok(Pattern::Literal(Literal::I64(value))); + } + "u64" => { + let value = token.text.parse::().map_err(|_| { + self.error_at(token.span, "invalid u64 literal".to_string()) + })?; + self.bump(); + return Ok(Pattern::Literal(Literal::U64(value))); + } + _ => {} + } + } + } let value = token.text.parse::().map_err(|_| { self.error_at(token.span, "invalid integer literal".to_string()) })?; diff --git a/capc/src/runtime_intrinsics.rs b/capc/src/runtime_intrinsics.rs index 9400f73..e93e98c 100644 --- a/capc/src/runtime_intrinsics.rs +++ b/capc/src/runtime_intrinsics.rs @@ -284,6 +284,30 @@ fn runtime_binding_list() -> Vec { "capable_rt_console_println_i32", sig(vec![AbiType::Handle, AbiType::I32], AbiType::Unit), ), + binding( + "sys.console", + "Console__print_i64", + "capable_rt_console_print_i64", + sig(vec![AbiType::Handle, AbiType::I64], AbiType::Unit), + ), + binding( + "sys.console", + "Console__println_i64", + "capable_rt_console_println_i64", + sig(vec![AbiType::Handle, AbiType::I64], AbiType::Unit), + ), + binding( + "sys.console", + "Console__print_u64", + "capable_rt_console_print_u64", + sig(vec![AbiType::Handle, AbiType::U64], AbiType::Unit), + ), + binding( + "sys.console", + "Console__println_u64", + "capable_rt_console_println_u64", + sig(vec![AbiType::Handle, AbiType::U64], AbiType::Unit), + ), binding( "sys.console", "Console__assert", @@ -296,36 +320,72 @@ fn runtime_binding_list() -> Vec { "capable_rt_math_add_wrap_i32", sig(vec![AbiType::I32, AbiType::I32], AbiType::I32), ), + binding( + "sys.math", + "add_wrap_i64", + "capable_rt_math_add_wrap_i64", + sig(vec![AbiType::I64, AbiType::I64], AbiType::I64), + ), binding( "sys.math", "sub_wrap_i32", "capable_rt_math_sub_wrap_i32", sig(vec![AbiType::I32, AbiType::I32], AbiType::I32), ), + binding( + "sys.math", + "sub_wrap_i64", + "capable_rt_math_sub_wrap_i64", + sig(vec![AbiType::I64, AbiType::I64], AbiType::I64), + ), binding( "sys.math", "mul_wrap_i32", "capable_rt_math_mul_wrap_i32", sig(vec![AbiType::I32, AbiType::I32], AbiType::I32), ), + binding( + "sys.math", + "mul_wrap_i64", + "capable_rt_math_mul_wrap_i64", + sig(vec![AbiType::I64, AbiType::I64], AbiType::I64), + ), binding( "sys.math", "add_wrap_u32", "capable_rt_math_add_wrap_u32", sig(vec![AbiType::U32, AbiType::U32], AbiType::U32), ), + binding( + "sys.math", + "add_wrap_u64", + "capable_rt_math_add_wrap_u64", + sig(vec![AbiType::U64, AbiType::U64], AbiType::U64), + ), binding( "sys.math", "sub_wrap_u32", "capable_rt_math_sub_wrap_u32", sig(vec![AbiType::U32, AbiType::U32], AbiType::U32), ), + binding( + "sys.math", + "sub_wrap_u64", + "capable_rt_math_sub_wrap_u64", + sig(vec![AbiType::U64, AbiType::U64], AbiType::U64), + ), binding( "sys.math", "mul_wrap_u32", "capable_rt_math_mul_wrap_u32", sig(vec![AbiType::U32, AbiType::U32], AbiType::U32), ), + binding( + "sys.math", + "mul_wrap_u64", + "capable_rt_math_mul_wrap_u64", + sig(vec![AbiType::U64, AbiType::U64], AbiType::U64), + ), binding( "sys.math", "add_wrap_u8", @@ -606,6 +666,242 @@ fn runtime_binding_list() -> Vec { "capable_rt_cast_u32_to_u8", sig(vec![AbiType::Handle, AbiType::Ptr], AbiType::Ptr), ), + binding_with_abi( + "sys.buffer", + "Slice__u8__read_u16_le", + "capable_rt_slice_u8_read_u16_le", + sig( + vec![AbiType::Ptr, AbiType::I32], + AbiType::Result(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.buffer", + "Slice__u8__read_u16_be", + "capable_rt_slice_u8_read_u16_be", + sig( + vec![AbiType::Ptr, AbiType::I32], + AbiType::Result(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.buffer", + "Slice__u8__read_u32_le", + "capable_rt_slice_u8_read_u32_le", + sig( + vec![AbiType::Ptr, AbiType::I32], + AbiType::Result(Box::new(AbiType::U32), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::U32), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::U32), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.buffer", + "Slice__u8__read_u32_be", + "capable_rt_slice_u8_read_u32_be", + sig( + vec![AbiType::Ptr, AbiType::I32], + AbiType::Result(Box::new(AbiType::U32), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::U32), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::U32), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.buffer", + "Slice__u8__read_u64_le", + "capable_rt_slice_u8_read_u64_le", + sig( + vec![AbiType::Ptr, AbiType::I32], + AbiType::Result(Box::new(AbiType::U64), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::U64), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::U64), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.buffer", + "Slice__u8__read_u64_be", + "capable_rt_slice_u8_read_u64_be", + sig( + vec![AbiType::Ptr, AbiType::I32], + AbiType::Result(Box::new(AbiType::U64), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::U64), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::U64), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.buffer", + "Slice__u8__read_i32_le", + "capable_rt_slice_u8_read_i32_le", + sig( + vec![AbiType::Ptr, AbiType::I32], + AbiType::Result(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.buffer", + "Slice__u8__read_i32_be", + "capable_rt_slice_u8_read_i32_be", + sig( + vec![AbiType::Ptr, AbiType::I32], + AbiType::Result(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.buffer", + "Slice__u8__read_i64_le", + "capable_rt_slice_u8_read_i64_le", + sig( + vec![AbiType::Ptr, AbiType::I32], + AbiType::Result(Box::new(AbiType::I64), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::I64), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::I64), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.buffer", + "Slice__u8__read_i64_be", + "capable_rt_slice_u8_read_i64_be", + sig( + vec![AbiType::Ptr, AbiType::I32], + AbiType::Result(Box::new(AbiType::I64), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::I64), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::I64), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.buffer", + "Slice__u8__read_u64_fit_i32_le", + "capable_rt_slice_u8_read_u64_fit_i32_le", + sig( + vec![AbiType::Ptr, AbiType::I32], + AbiType::Result(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.buffer", + "Slice__u8__read_u64_fit_i32_be", + "capable_rt_slice_u8_read_u64_fit_i32_be", + sig( + vec![AbiType::Ptr, AbiType::I32], + AbiType::Result(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ), + sig( + vec![ + AbiType::Ptr, + AbiType::I32, + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ], + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::I32)), + ), + ), + binding_with_abi( + "sys.ints", + "i64__try_i32", + "capable_rt_i64_try_i32", + sig( + vec![AbiType::I64], + AbiType::Result(Box::new(AbiType::I32), Box::new(AbiType::Unit)), + ), + sig( + vec![ + AbiType::I64, + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::Unit)), + ], + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::Unit)), + ), + ), + binding_with_abi( + "sys.ints", + "u64__try_i32", + "capable_rt_u64_try_i32", + sig( + vec![AbiType::U64], + AbiType::Result(Box::new(AbiType::I32), Box::new(AbiType::Unit)), + ), + sig( + vec![ + AbiType::U64, + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::Unit)), + ], + AbiType::ResultOut(Box::new(AbiType::I32), Box::new(AbiType::Unit)), + ), + ), binding( "sys.bytes", "u8__is_whitespace", diff --git a/capc/src/typeck/check.rs b/capc/src/typeck/check.rs index 2c3266d..b7a954c 100644 --- a/capc/src/typeck/check.rs +++ b/capc/src/typeck/check.rs @@ -181,6 +181,8 @@ pub(super) fn check_expr( let ty = match expr { Expr::Literal(lit) => match &lit.value { Literal::Int(_) => Ok(Ty::Builtin(BuiltinType::I32)), + Literal::I64(_) => Ok(Ty::Builtin(BuiltinType::I64)), + Literal::U64(_) => Ok(Ty::Builtin(BuiltinType::U64)), Literal::U8(_) => Ok(Ty::Builtin(BuiltinType::U8)), Literal::String(_) => Ok(stdlib_string_ty(stdlib)), Literal::Bool(_) => Ok(Ty::Builtin(BuiltinType::Bool)), @@ -372,10 +374,7 @@ pub(super) fn check_expr( )?; match binary.op { BinaryOp::Add | BinaryOp::Sub | BinaryOp::Mul | BinaryOp::Div | BinaryOp::Mod => { - if left == right - && (left == Ty::Builtin(BuiltinType::I32) - || left == Ty::Builtin(BuiltinType::I64)) - { + if left == right && is_numeric_type(&left) { Ok(left) } else if left == right && matches!(left, Ty::Param(_)) diff --git a/capc/src/typeck/lower.rs b/capc/src/typeck/lower.rs index f5e0c63..f133fc2 100644 --- a/capc/src/typeck/lower.rs +++ b/capc/src/typeck/lower.rs @@ -746,11 +746,9 @@ fn abi_type_for(ty: &Ty, ctx: &LoweringCtx, span: Span) -> Result match b { BuiltinType::I32 => Ok(AbiType::I32), - BuiltinType::I64 => Err(TypeError::new( - "i64 is not supported by the current codegen backend".to_string(), - span, - )), + BuiltinType::I64 => Ok(AbiType::I64), BuiltinType::U32 => Ok(AbiType::U32), + BuiltinType::U64 => Ok(AbiType::U64), BuiltinType::U8 => Ok(AbiType::U8), BuiltinType::Bool => Ok(AbiType::Bool), BuiltinType::Unit => Ok(AbiType::Unit), diff --git a/capc/src/typeck/mod.rs b/capc/src/typeck/mod.rs index 20cce22..d162c78 100644 --- a/capc/src/typeck/mod.rs +++ b/capc/src/typeck/mod.rs @@ -49,8 +49,8 @@ use type_params::{ build_type_param_bounds, build_type_params, merge_type_params, type_param_names, }; -pub(super) const RESERVED_TYPE_PARAMS: [&str; 8] = - ["i32", "i64", "u32", "u8", "bool", "unit", "never", "Self"]; +pub(super) const RESERVED_TYPE_PARAMS: [&str; 9] = + ["i32", "i64", "u32", "u64", "u8", "bool", "unit", "never", "Self"]; /// Resolved type used after lowering. No spans, fully qualified paths. #[derive(Debug, Clone, PartialEq, Eq)] @@ -71,6 +71,7 @@ pub enum BuiltinType { I32, I64, U32, + U64, U8, Bool, Unit, @@ -88,6 +89,7 @@ pub fn is_numeric_type(ty: &Ty) -> bool { Ty::Builtin(BuiltinType::I32) | Ty::Builtin(BuiltinType::I64) | Ty::Builtin(BuiltinType::U32) + | Ty::Builtin(BuiltinType::U64) | Ty::Builtin(BuiltinType::U8) ) } @@ -101,7 +103,9 @@ pub fn is_orderable_type(ty: &Ty) -> bool { pub fn is_unsigned_type(ty: &Ty) -> bool { matches!( ty, - Ty::Builtin(BuiltinType::U32) | Ty::Builtin(BuiltinType::U8) + Ty::Builtin(BuiltinType::U32) + | Ty::Builtin(BuiltinType::U64) + | Ty::Builtin(BuiltinType::U8) ) } diff --git a/capc/src/typeck/monomorphize.rs b/capc/src/typeck/monomorphize.rs index 8242d16..25714b3 100644 --- a/capc/src/typeck/monomorphize.rs +++ b/capc/src/typeck/monomorphize.rs @@ -905,11 +905,9 @@ impl MonoCtx { match ty { Ty::Builtin(b) => match b { BuiltinType::I32 => Ok(AbiType::I32), - BuiltinType::I64 => Err(TypeError::new( - "i64 is not supported by the current codegen backend".to_string(), - DUMMY_SPAN, - )), + BuiltinType::I64 => Ok(AbiType::I64), BuiltinType::U32 => Ok(AbiType::U32), + BuiltinType::U64 => Ok(AbiType::U64), BuiltinType::U8 => Ok(AbiType::U8), BuiltinType::Bool => Ok(AbiType::Bool), BuiltinType::Unit => Ok(AbiType::Unit), diff --git a/capc/src/typeck/monomorphize/support.rs b/capc/src/typeck/monomorphize/support.rs index 11858b8..d0e2814 100644 --- a/capc/src/typeck/monomorphize/support.rs +++ b/capc/src/typeck/monomorphize/support.rs @@ -243,6 +243,7 @@ pub(super) fn mangle_type(ty: &Ty) -> String { crate::typeck::BuiltinType::I32 => "i32".to_string(), crate::typeck::BuiltinType::I64 => "i64".to_string(), crate::typeck::BuiltinType::U32 => "u32".to_string(), + crate::typeck::BuiltinType::U64 => "u64".to_string(), crate::typeck::BuiltinType::U8 => "u8".to_string(), crate::typeck::BuiltinType::Bool => "bool".to_string(), crate::typeck::BuiltinType::Unit => "unit".to_string(), diff --git a/capc/src/typeck/resolve.rs b/capc/src/typeck/resolve.rs index 76c9223..be385b9 100644 --- a/capc/src/typeck/resolve.rs +++ b/capc/src/typeck/resolve.rs @@ -34,7 +34,7 @@ pub(super) fn path_to_string(path: &Path) -> String { } /// Resolve a method receiver type to (module, type name, type args). -/// Builtins with methods (string/u8) are mapped to their stdlib modules. +/// Builtins with methods are mapped to their stdlib modules. pub(super) fn resolve_method_target( receiver_ty: &Ty, module_name: &str, @@ -51,6 +51,12 @@ pub(super) fn resolve_method_target( Ty::Builtin(BuiltinType::U8) => { return Ok(("sys.bytes".to_string(), "u8".to_string(), Vec::new())); } + Ty::Builtin(BuiltinType::I64) => { + return Ok(("sys.ints".to_string(), "i64".to_string(), Vec::new())); + } + Ty::Builtin(BuiltinType::U64) => { + return Ok(("sys.ints".to_string(), "u64".to_string(), Vec::new())); + } _ => { return Err(TypeError::new( "method receiver must be a struct or enum value".to_string(), @@ -166,6 +172,8 @@ pub(super) fn resolve_impl_target( } Ty::Builtin(BuiltinType::I32) => (module_name.to_string(), "i32".to_string()), Ty::Builtin(BuiltinType::U32) => (module_name.to_string(), "u32".to_string()), + Ty::Builtin(BuiltinType::I64) => (module_name.to_string(), "i64".to_string()), + Ty::Builtin(BuiltinType::U64) => (module_name.to_string(), "u64".to_string()), Ty::Builtin(BuiltinType::U8) => (module_name.to_string(), "u8".to_string()), Ty::Builtin(BuiltinType::Bool) => (module_name.to_string(), "bool".to_string()), _ => { @@ -498,6 +506,7 @@ pub(super) fn lower_type( "i32" => Some(BuiltinType::I32), "i64" => Some(BuiltinType::I64), "u32" => Some(BuiltinType::U32), + "u64" => Some(BuiltinType::U64), "u8" => Some(BuiltinType::U8), "bool" => Some(BuiltinType::Bool), "unit" => Some(BuiltinType::Unit), diff --git a/capc/tests/run.rs b/capc/tests/run.rs index 0eba46f..2287bf8 100644 --- a/capc/tests/run.rs +++ b/capc/tests/run.rs @@ -249,6 +249,25 @@ fn run_static_site_example() { assert!(css.contains(".site-header"), "css was: {css:?}"); } +#[test] +fn run_elfdump_example() { + let out_dir = make_out_dir("elfdump"); + let out_dir = out_dir.to_str().expect("utf8 out dir"); + let (code, stdout, stderr) = run_capc(&[ + "run", + "--out-dir", + out_dir, + "examples/elfdump/elfdump.cap", + "examples/elfdump/minimal64.elf", + ]); + assert_eq!(code, 0, "stderr was: {stderr:?}"); + assert!(stdout.contains("class: ELF64"), "stdout was: {stdout:?}"); + assert!(stdout.contains("machine: x86-64"), "stdout was: {stdout:?}"); + assert!(stdout.contains("entry: 0x0000000000401000"), "stdout was: {stdout:?}"); + assert!(stdout.contains("phdr[0] type=LOAD"), "stdout was: {stdout:?}"); + assert!(stdout.contains("flags=R-X"), "stdout was: {stdout:?}"); +} + #[test] fn run_defer_free() { let out_dir = make_out_dir("defer_free"); @@ -338,6 +357,28 @@ fn run_unsigned_compare() { ); } +#[test] +fn run_u8_match() { + let out_dir = make_out_dir("u8_match"); + let out_dir = out_dir.to_str().expect("utf8 out dir"); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/u8_match.cap"]); + assert_eq!(code, 0); + assert!(stdout.contains("u8 match ok"), "stdout was: {stdout:?}"); +} + +#[test] +fn run_int64_basic() { + let out_dir = make_out_dir("int64_basic"); + let out_dir = out_dir.to_str().expect("utf8 out dir"); + let (code, stdout, _stderr) = + run_capc(&["run", "--out-dir", out_dir, "tests/programs/int64_basic.cap"]); + assert_eq!(code, 0); + assert!(stdout.contains("4999999998"), "stdout was: {stdout:?}"); + assert!(stdout.contains("1099511627800"), "stdout was: {stdout:?}"); + assert!(stdout.contains("int64 ok"), "stdout was: {stdout:?}"); +} + #[test] fn run_string_bytes_alias() { let out_dir = make_out_dir("string_bytes_alias"); diff --git a/capc/tests/typecheck.rs b/capc/tests/typecheck.rs index 273374e..abb7e43 100644 --- a/capc/tests/typecheck.rs +++ b/capc/tests/typecheck.rs @@ -990,6 +990,22 @@ fn typecheck_bytes_helpers_ok() { type_check_program(&module, &stdlib, &[]).expect("typecheck module"); } +#[test] +fn typecheck_u8_match_ok() { + let source = load_program("u8_match.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + +#[test] +fn typecheck_int64_basic_ok() { + let source = load_program("int64_basic.cap"); + let module = parse_module(&source).expect("parse module"); + let stdlib = load_stdlib().expect("load stdlib"); + type_check_program(&module, &stdlib, &[]).expect("typecheck module"); +} + #[test] fn typecheck_error_on_missing_return() { let source = r#" diff --git a/docs/TUTORIAL.md b/docs/TUTORIAL.md index 46abc99..8f0558e 100644 --- a/docs/TUTORIAL.md +++ b/docs/TUTORIAL.md @@ -64,6 +64,7 @@ Key syntax: - Modules + imports: `module ...` and `use ...` (alias by last path segment). - `for { ... }` is an infinite loop; `for i in a..b` is a range loop. - Integer arithmetic traps on overflow. +- Built-in integer types are `i32`, `i64`, `u32`, `u64`, and `u8`. - Variable shadowing is not allowed. ## 3) Control flow and pattern matching @@ -350,6 +351,9 @@ Helpers: - `Vec.as_string()` borrows bytes as text; `Vec.copy_string()` allocates a copy. - `Text.slice_range` returns a `string` view into its buffer. +For binary parsing, `Slice` also exposes checked endian readers like +`read_u16_le`, `read_u32_be`, `read_u64_le`, and `read_i64_be`. + ## 11) Slices and indexing Slices are bounds-checked in safe code. Indexing out of bounds traps. diff --git a/examples/elfdump/elfdump.cap b/examples/elfdump/elfdump.cap new file mode 100644 index 0000000..a3b190d --- /dev/null +++ b/examples/elfdump/elfdump.cap @@ -0,0 +1,552 @@ +package safe +module elfdump + +use sys::buffer +use sys::console +use sys::fs +use sys::string +use sys::system + +copy struct ElfHeader { + class: i32, + little: bool, + kind: i32, + machine: i32, + version: i32, + phoff: i32, + shoff: i32, + phentsize: i32, + phnum: i32, + shentsize: i32, + shnum: i32 +} + +fn push_text(out: string::Text, value: string) -> bool { + match (out.push_str(value)) { + Ok(_) => { return true } + Err(_) => { return false } + } +} + +fn hex_digit(n: u8) -> string { + match (n) { + 0u8 => { return "0" } + 1u8 => { return "1" } + 2u8 => { return "2" } + 3u8 => { return "3" } + 4u8 => { return "4" } + 5u8 => { return "5" } + 6u8 => { return "6" } + 7u8 => { return "7" } + 8u8 => { return "8" } + 9u8 => { return "9" } + 10u8 => { return "a" } + 11u8 => { return "b" } + 12u8 => { return "c" } + 13u8 => { return "d" } + 14u8 => { return "e" } + 15u8 => { return "f" } + _ => { return "?" } + } +} + +fn parse_class(value: u8) -> Result { + match (value) { + 1u8 => { return Ok(1) } + 2u8 => { return Ok(2) } + _ => { return Err(()) } + } +} + +fn parse_endianness(value: u8) -> Result { + match (value) { + 1u8 => { return Ok(true) } + 2u8 => { return Ok(false) } + _ => { return Err(()) } + } +} + +fn header_size_for_class(class: i32) -> i32 { + if (class == 1) { + return 52 + } + return 64 +} + +fn push_hex_byte(out: string::Text, value: u8) -> bool { + let shift: u8 = 4u8 + let mask: u8 = 15u8 + let hi = value >> shift + let lo = value & mask + if (!push_text(out, hex_digit(hi))) { + return false + } + return push_text(out, hex_digit(lo)) +} + +fn append_hex_field( + out: string::Text, + data: buffer::Slice, + offset: i32, + count: i32, + little: bool +) -> bool { + if (!push_text(out, "0x")) { + return false + } + let i = 0 + while (i < count) { + let index = offset + i + if (little) { + index = offset + (count - 1 - i) + } + try let byte = data.at_checked(index) else { + return false + } + if (!push_hex_byte(out, byte)) { + return false + } + i = i + 1 + } + return true +} + +fn format_hex_field( + data: buffer::Slice, + offset: i32, + count: i32, + little: bool +) -> string { + let out = string::text_new() + defer out.free() + if (!append_hex_field(out, data, offset, count, little)) { + return "" + } + match (out.copy_string()) { + Ok(s) => { return s } + Err(_) => { return "" } + } +} + +fn read_u16(data: buffer::Slice, little: bool, offset: i32) -> Result { + if (little) { + return data.read_u16_le(offset) + } + return data.read_u16_be(offset) +} + +fn read_i32(data: buffer::Slice, little: bool, offset: i32) -> Result { + if (little) { + return data.read_i32_le(offset) + } + return data.read_i32_be(offset) +} + +fn read_u64(data: buffer::Slice, little: bool, offset: i32) -> Result { + if (little) { + return data.read_u64_le(offset) + } + return data.read_u64_be(offset) +} + +fn parse_header(data: buffer::Slice) -> Result { + if (data.len() < 16 || !data.matches4(0, 127u8, 'E', 'L', 'F')) { + return Err(()) + } + + try let class_byte = data.read_u8(4) else { + return Err(()) + } + try let little_byte = data.read_u8(5) else { + return Err(()) + } + try let class = parse_class(class_byte) else { + return Err(()) + } + try let little = parse_endianness(little_byte) else { + return Err(()) + } + + let min_header = header_size_for_class(class) + if (data.len() < min_header) { + return Err(()) + } + + try let kind = read_u16(data, little, 16) else { + return Err(()) + } + try let machine = read_u16(data, little, 18) else { + return Err(()) + } + try let version = read_i32(data, little, 20) else { + return Err(()) + } + + if (class == 1) { + try let phoff = read_i32(data, little, 28) else { + return Err(()) + } + try let shoff = read_i32(data, little, 32) else { + return Err(()) + } + try let phentsize = read_u16(data, little, 42) else { + return Err(()) + } + try let phnum = read_u16(data, little, 44) else { + return Err(()) + } + try let shentsize = read_u16(data, little, 46) else { + return Err(()) + } + try let shnum = read_u16(data, little, 48) else { + return Err(()) + } + return Ok(ElfHeader { + class: class, + little: little, + kind: kind, + machine: machine, + version: version, + phoff: phoff, + shoff: shoff, + phentsize: phentsize, + phnum: phnum, + shentsize: shentsize, + shnum: shnum + }) + } + + try let phoff64 = read_u64(data, little, 32) else { + return Err(()) + } + try let phoff = phoff64.try_i32() else { + return Err(()) + } + try let shoff64 = read_u64(data, little, 40) else { + return Err(()) + } + try let shoff = shoff64.try_i32() else { + return Err(()) + } + try let phentsize = read_u16(data, little, 54) else { + return Err(()) + } + try let phnum = read_u16(data, little, 56) else { + return Err(()) + } + try let shentsize = read_u16(data, little, 58) else { + return Err(()) + } + try let shnum = read_u16(data, little, 60) else { + return Err(()) + } + return Ok(ElfHeader { + class: class, + little: little, + kind: kind, + machine: machine, + version: version, + phoff: phoff, + shoff: shoff, + phentsize: phentsize, + phnum: phnum, + shentsize: shentsize, + shnum: shnum + }) +} + +fn class_name(class: i32) -> string { + match (class) { + 1 => { return "ELF32" } + 2 => { return "ELF64" } + _ => { return "unknown" } + } +} + +fn endian_name(little: bool) -> string { + if (little) { + return "little-endian" + } + return "big-endian" +} + +fn file_type_name(kind: i32) -> string { + match (kind) { + 0 => { return "NONE" } + 1 => { return "REL" } + 2 => { return "EXEC" } + 3 => { return "DYN" } + 4 => { return "CORE" } + _ => { return "OTHER" } + } +} + +fn machine_name(machine: i32) -> string { + match (machine) { + 3 => { return "x86" } + 40 => { return "ARM" } + 62 => { return "x86-64" } + 183 => { return "AArch64" } + _ => { return "other" } + } +} + +fn ph_type_name(kind: i32) -> string { + match (kind) { + 0 => { return "NULL" } + 1 => { return "LOAD" } + 2 => { return "DYNAMIC" } + 3 => { return "INTERP" } + 4 => { return "NOTE" } + 5 => { return "SHLIB" } + 6 => { return "PHDR" } + 7 => { return "TLS" } + _ => { return "OTHER" } + } +} + +fn sh_type_name(kind: i32) -> string { + match (kind) { + 0 => { return "NULL" } + 1 => { return "PROGBITS" } + 2 => { return "SYMTAB" } + 3 => { return "STRTAB" } + 4 => { return "RELA" } + 8 => { return "NOBITS" } + 9 => { return "REL" } + 11 => { return "DYNSYM" } + _ => { return "OTHER" } + } +} + +fn ph_flag_text(flags: i32) -> string { + let out = string::text_new() + defer out.free() + let read = 4 + let write = 2 + let exec = 1 + if ((flags & read) == read) { + if (!push_text(out, "R")) { + return "R??" + } + } else { + if (!push_text(out, "-")) { + return "---" + } + } + if ((flags & write) == write) { + if (!push_text(out, "W")) { + return "-W-" + } + } else { + if (!push_text(out, "-")) { + return "---" + } + } + if ((flags & exec) == exec) { + if (!push_text(out, "X")) { + return "--X" + } + } else { + if (!push_text(out, "-")) { + return "---" + } + } + match (out.copy_string()) { + Ok(s) => { return s } + Err(_) => { return "---" } + } +} + +fn print_header(c: Console, path: string, data: buffer::Slice, header: ElfHeader) -> unit { + c.print("file: ") + c.println(path) + c.print("class: ") + c.println(class_name(header.class)) + c.print("endianness: ") + c.println(endian_name(header.little)) + c.print("type: ") + c.println(file_type_name(header.kind)) + c.print("machine: ") + c.println(machine_name(header.machine)) + c.print("version: ") + c.println_i32(header.version) + c.print("entry: ") + if (header.class == 1) { + c.println(format_hex_field(data, 24, 4, header.little)) + } else { + c.println(format_hex_field(data, 24, 8, header.little)) + } + c.print("program headers: ") + c.println_i32(header.phnum) + c.print("section headers: ") + c.println_i32(header.shnum) + return () +} + +fn dump_program_headers(c: Console, data: buffer::Slice, header: ElfHeader) -> bool { + if (header.phnum == 0) { + return true + } + let required = 0 + if (header.class == 1) { + required = 32 + } else { + required = 56 + } + if (required > header.phentsize) { + c.println("program header entries are smaller than expected") + return false + } + + let i = 0 + while (i < header.phnum) { + let base = header.phoff + (i * header.phentsize) + if (base < 0 || base + required > data.len()) { + c.println("program headers truncated") + return false + } + let kind = 0 + let flags = 0 + if (header.class == 1) { + try let k = read_i32(data, header.little, base) else { + c.println("program headers truncated") + return false + } + kind = k + try let f = read_i32(data, header.little, base + 24) else { + c.println("program headers truncated") + return false + } + flags = f + } else { + try let k = read_i32(data, header.little, base) else { + c.println("program headers truncated") + return false + } + kind = k + try let f = read_i32(data, header.little, base + 4) else { + c.println("program headers truncated") + return false + } + flags = f + } + + c.print("phdr[") + c.print_i32(i) + c.print("] type=") + c.print(ph_type_name(kind)) + c.print(" flags=") + c.print(ph_flag_text(flags)) + c.print(" offset=") + if (header.class == 1) { + c.print(format_hex_field(data, base + 4, 4, header.little)) + c.print(" vaddr=") + c.print(format_hex_field(data, base + 8, 4, header.little)) + c.print(" filesz=") + c.print(format_hex_field(data, base + 16, 4, header.little)) + c.print(" memsz=") + c.print(format_hex_field(data, base + 20, 4, header.little)) + c.print(" align=") + c.println(format_hex_field(data, base + 28, 4, header.little)) + } else { + c.print(format_hex_field(data, base + 8, 8, header.little)) + c.print(" vaddr=") + c.print(format_hex_field(data, base + 16, 8, header.little)) + c.print(" filesz=") + c.print(format_hex_field(data, base + 32, 8, header.little)) + c.print(" memsz=") + c.print(format_hex_field(data, base + 40, 8, header.little)) + c.print(" align=") + c.println(format_hex_field(data, base + 48, 8, header.little)) + } + i = i + 1 + } + return true +} + +fn dump_section_headers(c: Console, data: buffer::Slice, header: ElfHeader) -> bool { + if (header.shnum == 0) { + return true + } + let required = 0 + if (header.class == 1) { + required = 40 + } else { + required = 64 + } + if (required > header.shentsize) { + c.println("section header entries are smaller than expected") + return false + } + + let i = 0 + while (i < header.shnum) { + let base = header.shoff + (i * header.shentsize) + if (base < 0 || base + required > data.len()) { + c.println("section headers truncated") + return false + } + try let kind = read_i32(data, header.little, base + 4) else { + c.println("section headers truncated") + return false + } + c.print("shdr[") + c.print_i32(i) + c.print("] type=") + c.print(sh_type_name(kind)) + c.print(" offset=") + if (header.class == 1) { + c.print(format_hex_field(data, base + 16, 4, header.little)) + c.print(" size=") + c.println(format_hex_field(data, base + 20, 4, header.little)) + } else { + c.print(format_hex_field(data, base + 24, 8, header.little)) + c.print(" size=") + c.println(format_hex_field(data, base + 32, 8, header.little)) + } + i = i + 1 + } + return true +} + +fn print_fs_err(c: Console, err: fs::FsErr) -> unit { + match (err) { + fs::FsErr::NotFound => { c.println("filesystem error: not found") } + fs::FsErr::PermissionDenied => { c.println("filesystem error: permission denied") } + fs::FsErr::InvalidPath => { c.println("filesystem error: invalid path") } + fs::FsErr::IoError => { c.println("filesystem error: io error") } + } + return () +} + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + let args = rc.mint_args() + let path = args.at_or(1, "examples/elfdump/minimal64.elf") + let readfs = rc.mint_readfs(".") + + try let bytes = readfs.read_bytes(path) else err { + readfs.close() + c.println("failed to read ELF file") + print_fs_err(c, err) + return 1 + } + readfs.close() + defer bytes.free() + + let data = bytes.as_slice() + try let header = parse_header(data) else { + c.println("invalid or unsupported ELF file") + return 1 + } + + print_header(c, path, data, header) + if (!dump_program_headers(c, data, header)) { + return 1 + } + if (!dump_section_headers(c, data, header)) { + return 1 + } + return 0 +} diff --git a/examples/elfdump/minimal64.elf b/examples/elfdump/minimal64.elf new file mode 100644 index 0000000000000000000000000000000000000000..c4af0a2924bffa3ec27bf7a055445e37e5a11fa0 GIT binary patch literal 120 zcmb<-^>JfjWMqH=CI&kOFi*e%ECeB8vJl#V!2+lTA_G#(iXsgWXMoCAK;>XGg8)PT E07B^mYXATM literal 0 HcmV?d00001 diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index c5e4846..7f8f35f 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -326,6 +326,145 @@ fn write_unit_result(out_err: *mut i32, result: Result<(), FsErr>) -> u8 { } } +fn write_i32_result(out_ok: *mut i32, out_err: *mut i32, result: Result) -> u8 { + unsafe { + if !out_ok.is_null() { + *out_ok = 0; + } + if !out_err.is_null() { + *out_err = 0; + } + } + match result { + Ok(value) => { + unsafe { + if !out_ok.is_null() { + *out_ok = value; + } + } + 0 + } + Err(err) => { + unsafe { + if !out_err.is_null() { + *out_err = err; + } + } + 1 + } + } +} + +fn write_i32_unit_result(out_ok: *mut i32, result: Result) -> u8 { + match result { + Ok(value) => { + unsafe { + if !out_ok.is_null() { + *out_ok = value; + } + } + 0 + } + Err(()) => 1, + } +} + +fn write_u32_result(out_ok: *mut u32, out_err: *mut i32, result: Result) -> u8 { + unsafe { + if !out_ok.is_null() { + *out_ok = 0; + } + if !out_err.is_null() { + *out_err = 0; + } + } + match result { + Ok(value) => { + unsafe { + if !out_ok.is_null() { + *out_ok = value; + } + } + 0 + } + Err(err) => { + unsafe { + if !out_err.is_null() { + *out_err = err; + } + } + 1 + } + } +} + +fn write_i64_result(out_ok: *mut i64, out_err: *mut i32, result: Result) -> u8 { + match result { + Ok(value) => { + unsafe { + if !out_ok.is_null() { + *out_ok = value; + } + if !out_err.is_null() { + *out_err = 0; + } + } + 0 + } + Err(err) => { + unsafe { + if !out_err.is_null() { + *out_err = err; + } + } + 1 + } + } +} + +fn write_u64_result(out_ok: *mut u64, out_err: *mut i32, result: Result) -> u8 { + match result { + Ok(value) => { + unsafe { + if !out_ok.is_null() { + *out_ok = value; + } + if !out_err.is_null() { + *out_err = 0; + } + } + 0 + } + Err(err) => { + unsafe { + if !out_err.is_null() { + *out_err = err; + } + } + 1 + } + } +} + +fn cap_slice_window(ptr: *const CapSlice, offset: i32, width: usize) -> Result<*const u8, i32> { + if ptr.is_null() || offset < 0 { + return Err(0); + } + let slice = unsafe { *ptr }; + if slice.len < 0 { + return Err(0); + } + let len = slice.len as usize; + let start = offset as usize; + if start.checked_add(width).is_none_or(|end| end > len) { + return Err(0); + } + if width > 0 && slice.ptr.is_null() { + return Err(0); + } + Ok(unsafe { slice.ptr.add(start) as *const u8 }) +} + #[no_mangle] pub extern "C" fn capable_rt_mint_console(_sys: Handle) -> Handle { if !has_handle(&ROOT_CAPS, _sys, "root cap table") { @@ -888,36 +1027,106 @@ pub extern "C" fn capable_rt_console_println_i32(_console: Handle, value: i32) { let _ = stdout.flush(); } +#[no_mangle] +pub extern "C" fn capable_rt_console_print_i64(_console: Handle, value: i64) { + if !has_handle(&CONSOLES, _console, "console table") { + return; + } + let mut stdout = io::stdout().lock(); + let _ = write!(stdout, "{value}"); + let _ = stdout.flush(); +} + +#[no_mangle] +pub extern "C" fn capable_rt_console_println_i64(_console: Handle, value: i64) { + if !has_handle(&CONSOLES, _console, "console table") { + return; + } + let mut stdout = io::stdout().lock(); + let _ = writeln!(stdout, "{value}"); + let _ = stdout.flush(); +} + +#[no_mangle] +pub extern "C" fn capable_rt_console_print_u64(_console: Handle, value: u64) { + if !has_handle(&CONSOLES, _console, "console table") { + return; + } + let mut stdout = io::stdout().lock(); + let _ = write!(stdout, "{value}"); + let _ = stdout.flush(); +} + +#[no_mangle] +pub extern "C" fn capable_rt_console_println_u64(_console: Handle, value: u64) { + if !has_handle(&CONSOLES, _console, "console table") { + return; + } + let mut stdout = io::stdout().lock(); + let _ = writeln!(stdout, "{value}"); + let _ = stdout.flush(); +} + #[no_mangle] pub extern "C" fn capable_rt_math_add_wrap_i32(a: i32, b: i32) -> i32 { a.wrapping_add(b) } +#[no_mangle] +pub extern "C" fn capable_rt_math_add_wrap_i64(a: i64, b: i64) -> i64 { + a.wrapping_add(b) +} + #[no_mangle] pub extern "C" fn capable_rt_math_sub_wrap_i32(a: i32, b: i32) -> i32 { a.wrapping_sub(b) } +#[no_mangle] +pub extern "C" fn capable_rt_math_sub_wrap_i64(a: i64, b: i64) -> i64 { + a.wrapping_sub(b) +} + #[no_mangle] pub extern "C" fn capable_rt_math_mul_wrap_i32(a: i32, b: i32) -> i32 { a.wrapping_mul(b) } +#[no_mangle] +pub extern "C" fn capable_rt_math_mul_wrap_i64(a: i64, b: i64) -> i64 { + a.wrapping_mul(b) +} + #[no_mangle] pub extern "C" fn capable_rt_math_add_wrap_u32(a: u32, b: u32) -> u32 { a.wrapping_add(b) } +#[no_mangle] +pub extern "C" fn capable_rt_math_add_wrap_u64(a: u64, b: u64) -> u64 { + a.wrapping_add(b) +} + #[no_mangle] pub extern "C" fn capable_rt_math_sub_wrap_u32(a: u32, b: u32) -> u32 { a.wrapping_sub(b) } +#[no_mangle] +pub extern "C" fn capable_rt_math_sub_wrap_u64(a: u64, b: u64) -> u64 { + a.wrapping_sub(b) +} + #[no_mangle] pub extern "C" fn capable_rt_math_mul_wrap_u32(a: u32, b: u32) -> u32 { a.wrapping_mul(b) } +#[no_mangle] +pub extern "C" fn capable_rt_math_mul_wrap_u64(a: u64, b: u64) -> u64 { + a.wrapping_mul(b) +} + #[no_mangle] pub extern "C" fn capable_rt_math_add_wrap_u8(a: u8, b: u8) -> u8 { a.wrapping_add(b) @@ -933,6 +1142,248 @@ pub extern "C" fn capable_rt_math_mul_wrap_u8(a: u8, b: u8) -> u8 { a.wrapping_mul(b) } +#[no_mangle] +pub extern "C" fn capable_rt_slice_u8_read_u16_le( + slice: *const CapSlice, + offset: i32, + out_ok: *mut i32, + out_err: *mut i32, +) -> u8 { + let ptr = match cap_slice_window(slice, offset, 2) { + Ok(ptr) => ptr, + Err(err) => return write_i32_result(out_ok, out_err, Err(err)), + }; + let bytes = unsafe { std::slice::from_raw_parts(ptr, 2) }; + write_i32_result(out_ok, out_err, Ok(u16::from_le_bytes([bytes[0], bytes[1]]) as i32)) +} + +#[no_mangle] +pub extern "C" fn capable_rt_slice_u8_read_u16_be( + slice: *const CapSlice, + offset: i32, + out_ok: *mut i32, + out_err: *mut i32, +) -> u8 { + let ptr = match cap_slice_window(slice, offset, 2) { + Ok(ptr) => ptr, + Err(err) => return write_i32_result(out_ok, out_err, Err(err)), + }; + let bytes = unsafe { std::slice::from_raw_parts(ptr, 2) }; + write_i32_result(out_ok, out_err, Ok(u16::from_be_bytes([bytes[0], bytes[1]]) as i32)) +} + +#[no_mangle] +pub extern "C" fn capable_rt_slice_u8_read_u32_le( + slice: *const CapSlice, + offset: i32, + out_ok: *mut u32, + out_err: *mut i32, +) -> u8 { + let ptr = match cap_slice_window(slice, offset, 4) { + Ok(ptr) => ptr, + Err(err) => return write_u32_result(out_ok, out_err, Err(err)), + }; + let bytes = unsafe { std::slice::from_raw_parts(ptr, 4) }; + write_u32_result( + out_ok, + out_err, + Ok(u32::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]])), + ) +} + +#[no_mangle] +pub extern "C" fn capable_rt_slice_u8_read_u32_be( + slice: *const CapSlice, + offset: i32, + out_ok: *mut u32, + out_err: *mut i32, +) -> u8 { + let ptr = match cap_slice_window(slice, offset, 4) { + Ok(ptr) => ptr, + Err(err) => return write_u32_result(out_ok, out_err, Err(err)), + }; + let bytes = unsafe { std::slice::from_raw_parts(ptr, 4) }; + write_u32_result( + out_ok, + out_err, + Ok(u32::from_be_bytes([bytes[0], bytes[1], bytes[2], bytes[3]])), + ) +} + +#[no_mangle] +pub extern "C" fn capable_rt_slice_u8_read_u64_le( + slice: *const CapSlice, + offset: i32, + out_ok: *mut u64, + out_err: *mut i32, +) -> u8 { + let ptr = match cap_slice_window(slice, offset, 8) { + Ok(ptr) => ptr, + Err(err) => return write_u64_result(out_ok, out_err, Err(err)), + }; + let bytes = unsafe { std::slice::from_raw_parts(ptr, 8) }; + write_u64_result( + out_ok, + out_err, + Ok(u64::from_le_bytes([ + bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7], + ])), + ) +} + +#[no_mangle] +pub extern "C" fn capable_rt_slice_u8_read_u64_be( + slice: *const CapSlice, + offset: i32, + out_ok: *mut u64, + out_err: *mut i32, +) -> u8 { + let ptr = match cap_slice_window(slice, offset, 8) { + Ok(ptr) => ptr, + Err(err) => return write_u64_result(out_ok, out_err, Err(err)), + }; + let bytes = unsafe { std::slice::from_raw_parts(ptr, 8) }; + write_u64_result( + out_ok, + out_err, + Ok(u64::from_be_bytes([ + bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7], + ])), + ) +} + +#[no_mangle] +pub extern "C" fn capable_rt_slice_u8_read_i32_le( + slice: *const CapSlice, + offset: i32, + out_ok: *mut i32, + out_err: *mut i32, +) -> u8 { + let ptr = match cap_slice_window(slice, offset, 4) { + Ok(ptr) => ptr, + Err(err) => return write_i32_result(out_ok, out_err, Err(err)), + }; + let bytes = unsafe { std::slice::from_raw_parts(ptr, 4) }; + write_i32_result( + out_ok, + out_err, + Ok(i32::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]])), + ) +} + +#[no_mangle] +pub extern "C" fn capable_rt_slice_u8_read_i32_be( + slice: *const CapSlice, + offset: i32, + out_ok: *mut i32, + out_err: *mut i32, +) -> u8 { + let ptr = match cap_slice_window(slice, offset, 4) { + Ok(ptr) => ptr, + Err(err) => return write_i32_result(out_ok, out_err, Err(err)), + }; + let bytes = unsafe { std::slice::from_raw_parts(ptr, 4) }; + write_i32_result( + out_ok, + out_err, + Ok(i32::from_be_bytes([bytes[0], bytes[1], bytes[2], bytes[3]])), + ) +} + +#[no_mangle] +pub extern "C" fn capable_rt_slice_u8_read_i64_le( + slice: *const CapSlice, + offset: i32, + out_ok: *mut i64, + out_err: *mut i32, +) -> u8 { + let ptr = match cap_slice_window(slice, offset, 8) { + Ok(ptr) => ptr, + Err(err) => return write_i64_result(out_ok, out_err, Err(err)), + }; + let bytes = unsafe { std::slice::from_raw_parts(ptr, 8) }; + write_i64_result( + out_ok, + out_err, + Ok(i64::from_le_bytes([ + bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7], + ])), + ) +} + +#[no_mangle] +pub extern "C" fn capable_rt_slice_u8_read_i64_be( + slice: *const CapSlice, + offset: i32, + out_ok: *mut i64, + out_err: *mut i32, +) -> u8 { + let ptr = match cap_slice_window(slice, offset, 8) { + Ok(ptr) => ptr, + Err(err) => return write_i64_result(out_ok, out_err, Err(err)), + }; + let bytes = unsafe { std::slice::from_raw_parts(ptr, 8) }; + write_i64_result( + out_ok, + out_err, + Ok(i64::from_be_bytes([ + bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7], + ])), + ) +} + +#[no_mangle] +pub extern "C" fn capable_rt_slice_u8_read_u64_fit_i32_le( + slice: *const CapSlice, + offset: i32, + out_ok: *mut i32, + out_err: *mut i32, +) -> u8 { + let ptr = match cap_slice_window(slice, offset, 8) { + Ok(ptr) => ptr, + Err(err) => return write_i32_result(out_ok, out_err, Err(err)), + }; + let bytes = unsafe { std::slice::from_raw_parts(ptr, 8) }; + let value = u64::from_le_bytes([ + bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7], + ]); + match i32::try_from(value) { + Ok(value) => write_i32_result(out_ok, out_err, Ok(value)), + Err(_) => write_i32_result(out_ok, out_err, Err(1)), + } +} + +#[no_mangle] +pub extern "C" fn capable_rt_slice_u8_read_u64_fit_i32_be( + slice: *const CapSlice, + offset: i32, + out_ok: *mut i32, + out_err: *mut i32, +) -> u8 { + let ptr = match cap_slice_window(slice, offset, 8) { + Ok(ptr) => ptr, + Err(err) => return write_i32_result(out_ok, out_err, Err(err)), + }; + let bytes = unsafe { std::slice::from_raw_parts(ptr, 8) }; + let value = u64::from_be_bytes([ + bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7], + ]); + match i32::try_from(value) { + Ok(value) => write_i32_result(out_ok, out_err, Ok(value)), + Err(_) => write_i32_result(out_ok, out_err, Err(1)), + } +} + +#[no_mangle] +pub extern "C" fn capable_rt_i64_try_i32(value: i64, out_ok: *mut i32) -> u8 { + write_i32_unit_result(out_ok, i32::try_from(value).map_err(|_| ())) +} + +#[no_mangle] +pub extern "C" fn capable_rt_u64_try_i32(value: u64, out_ok: *mut i32) -> u8 { + write_i32_unit_result(out_ok, i32::try_from(value).map_err(|_| ())) +} + #[no_mangle] pub extern "C" fn capable_rt_fs_read_to_string( fs: Handle, diff --git a/stdlib/sys/buffer.cap b/stdlib/sys/buffer.cap index f079c7a..1c3093a 100644 --- a/stdlib/sys/buffer.cap +++ b/stdlib/sys/buffer.cap @@ -28,7 +28,9 @@ pub enum AllocErr { /// Slice errors. pub enum SliceErr { /// Index is out of range. - OutOfRange + OutOfRange, + /// Value does not fit in i32. + DoesNotFitI32 } /// Copy a slice into a new owned slice using the provided allocator. @@ -238,6 +240,94 @@ impl Slice { let ptr = unsafe_ptr::ptr_add(self.ptr, i) return unsafe_ptr::ptr_read(ptr) } + + /// Checked index into the slice. + pub fn at_checked(self, i: i32) -> Result { + if (i < 0 || i >= self.len) { + return Err(SliceErr::OutOfRange) + } + return Ok(self.at(i)) + } + + /// Read a single byte at an offset. + pub fn read_u8(self, offset: i32) -> Result { + return self.at_checked(offset) + } + + /// Compare four bytes at an offset. + pub fn matches4(self, offset: i32, a: u8, b: u8, c: u8, d: u8) -> bool { + if (offset < 0) { + return false + } + if (self.len - offset < 4) { + return false + } + return + self.at(offset) == a && + self.at(offset + 1) == b && + self.at(offset + 2) == c && + self.at(offset + 3) == d + } + + /// Read a little-endian 16-bit word into i32. + pub fn read_u16_le(self, offset: i32) -> Result { + return Err(SliceErr::OutOfRange) + } + + /// Read a big-endian 16-bit word into i32. + pub fn read_u16_be(self, offset: i32) -> Result { + return Err(SliceErr::OutOfRange) + } + + /// Read a little-endian 32-bit word into u32. + pub fn read_u32_le(self, offset: i32) -> Result { + return Err(SliceErr::OutOfRange) + } + + /// Read a big-endian 32-bit word into u32. + pub fn read_u32_be(self, offset: i32) -> Result { + return Err(SliceErr::OutOfRange) + } + + /// Read a little-endian 64-bit word into u64. + pub fn read_u64_le(self, offset: i32) -> Result { + return Err(SliceErr::OutOfRange) + } + + /// Read a big-endian 64-bit word into u64. + pub fn read_u64_be(self, offset: i32) -> Result { + return Err(SliceErr::OutOfRange) + } + + /// Read a little-endian 32-bit word into i32. + pub fn read_i32_le(self, offset: i32) -> Result { + return Err(SliceErr::OutOfRange) + } + + /// Read a big-endian 32-bit word into i32. + pub fn read_i32_be(self, offset: i32) -> Result { + return Err(SliceErr::OutOfRange) + } + + /// Read a little-endian 64-bit word into i64. + pub fn read_i64_le(self, offset: i32) -> Result { + return Err(SliceErr::OutOfRange) + } + + /// Read a big-endian 64-bit word into i64. + pub fn read_i64_be(self, offset: i32) -> Result { + return Err(SliceErr::OutOfRange) + } + + /// Read a little-endian 64-bit word if it fits in i32. + pub fn read_u64_fit_i32_le(self, offset: i32) -> Result { + return Err(SliceErr::OutOfRange) + } + + /// Read a big-endian 64-bit word if it fits in i32. + pub fn read_u64_fit_i32_be(self, offset: i32) -> Result { + return Err(SliceErr::OutOfRange) + } } impl MutSlice { diff --git a/stdlib/sys/console.cap b/stdlib/sys/console.cap index 2ae645f..79a242f 100644 --- a/stdlib/sys/console.cap +++ b/stdlib/sys/console.cap @@ -26,6 +26,26 @@ impl Console { return () } + /// Print an i64 without a newline. + pub fn print_i64(self, v: i64) -> unit { + return () + } + + /// Print an i64 with a newline. + pub fn println_i64(self, v: i64) -> unit { + return () + } + + /// Print a u64 without a newline. + pub fn print_u64(self, v: u64) -> unit { + return () + } + + /// Print a u64 with a newline. + pub fn println_u64(self, v: u64) -> unit { + return () + } + /// Trap if condition is false. pub fn assert(self, cond: bool) -> unit { return () diff --git a/stdlib/sys/ints.cap b/stdlib/sys/ints.cap new file mode 100644 index 0000000..2f0cbf0 --- /dev/null +++ b/stdlib/sys/ints.cap @@ -0,0 +1,17 @@ +/// Integer helper methods for fixed-width ints. +package safe +module sys::ints + +impl i64 { + /// Convert to i32 if the value fits. + pub fn try_i32(self) -> Result { + return Err(()) + } +} + +impl u64 { + /// Convert to i32 if the value fits. + pub fn try_i32(self) -> Result { + return Err(()) + } +} diff --git a/stdlib/sys/math.cap b/stdlib/sys/math.cap index f726e42..dfb3dd3 100644 --- a/stdlib/sys/math.cap +++ b/stdlib/sys/math.cap @@ -4,17 +4,29 @@ module sys::math /// Wrapping add for i32. pub fn add_wrap_i32(a: i32, b: i32) -> i32 { return 0 } +/// Wrapping add for i64. +pub fn add_wrap_i64(a: i64, b: i64) -> i64 { return 0 } /// Wrapping subtract for i32. pub fn sub_wrap_i32(a: i32, b: i32) -> i32 { return 0 } +/// Wrapping subtract for i64. +pub fn sub_wrap_i64(a: i64, b: i64) -> i64 { return 0 } /// Wrapping multiply for i32. pub fn mul_wrap_i32(a: i32, b: i32) -> i32 { return 0 } +/// Wrapping multiply for i64. +pub fn mul_wrap_i64(a: i64, b: i64) -> i64 { return 0 } /// Wrapping add for u32. pub fn add_wrap_u32(a: u32, b: u32) -> u32 { return 0 } +/// Wrapping add for u64. +pub fn add_wrap_u64(a: u64, b: u64) -> u64 { return 0 } /// Wrapping subtract for u32. pub fn sub_wrap_u32(a: u32, b: u32) -> u32 { return 0 } +/// Wrapping subtract for u64. +pub fn sub_wrap_u64(a: u64, b: u64) -> u64 { return 0 } /// Wrapping multiply for u32. pub fn mul_wrap_u32(a: u32, b: u32) -> u32 { return 0 } +/// Wrapping multiply for u64. +pub fn mul_wrap_u64(a: u64, b: u64) -> u64 { return 0 } /// Wrapping add for u8. pub fn add_wrap_u8(a: u8, b: u8) -> u8 { return 0 } diff --git a/tests/programs/bytes_helpers.cap b/tests/programs/bytes_helpers.cap index 580d1b9..c393239 100644 --- a/tests/programs/bytes_helpers.cap +++ b/tests/programs/bytes_helpers.cap @@ -1,6 +1,8 @@ package safe module bytes_helpers +use sys::buffer use sys::system +use sys::vec pub fn main(rc: RootCap) -> i32 { let c = rc.mint_console() @@ -14,6 +16,99 @@ pub fn main(rc: RootCap) -> i32 { Err(_) => { c.assert(false) } } c.assert(b0.is_whitespace() && b1.is_whitespace()) + + let raw = vec::new() + defer raw.free() + try raw.push(1u8) else { c.assert(false) } + try raw.push(2u8) else { c.assert(false) } + try raw.push(3u8) else { c.assert(false) } + try raw.push(4u8) else { c.assert(false) } + let data = raw.as_slice() + c.assert(data.matches4(0, 1u8, 2u8, 3u8, 4u8)) + c.assert(!data.matches4(1, 1u8, 2u8, 3u8, 4u8)) + + try let byte2 = data.read_u8(2) else { + c.assert(false) + return 1 + } + c.assert(byte2 == 3u8) + + try let le16 = data.read_u16_le(0) else { + c.assert(false) + return 1 + } + try let be16 = data.read_u16_be(0) else { + c.assert(false) + return 1 + } + try let le32 = data.read_u32_le(0) else { + c.assert(false) + return 1 + } + try let be32 = data.read_u32_be(0) else { + c.assert(false) + return 1 + } + try let signed = data.read_i32_le(0) else { + c.assert(false) + return 1 + } + c.assert(le16 == 513) + c.assert(be16 == 258) + c.assert(le32 != be32) + c.assert(le32 > be32) + c.assert(signed == 67305985) + match (data.read_u32_le(1)) { + Ok(_) => { + c.assert(false) + return 1 + } + Err(_) => {} + } + + let wide = vec::new() + defer wide.free() + try wide.push(120u8) else { c.assert(false) } + try wide.push(86u8) else { c.assert(false) } + try wide.push(52u8) else { c.assert(false) } + try wide.push(18u8) else { c.assert(false) } + try wide.push(0u8) else { c.assert(false) } + try wide.push(0u8) else { c.assert(false) } + try wide.push(0u8) else { c.assert(false) } + try wide.push(0u8) else { c.assert(false) } + let wide_data = wide.as_slice() + try let wide64 = wide_data.read_u64_le(0) else { + c.assert(false) + return 1 + } + try let fit64 = wide64.try_i32() else { + c.assert(false) + return 1 + } + c.assert(fit64 == 305419896) + + let too_wide = vec::new() + defer too_wide.free() + try too_wide.push(0u8) else { c.assert(false) } + try too_wide.push(0u8) else { c.assert(false) } + try too_wide.push(0u8) else { c.assert(false) } + try too_wide.push(0u8) else { c.assert(false) } + try too_wide.push(1u8) else { c.assert(false) } + try too_wide.push(0u8) else { c.assert(false) } + try too_wide.push(0u8) else { c.assert(false) } + try too_wide.push(0u8) else { c.assert(false) } + try let too_wide_value = too_wide.as_slice().read_u64_le(0) else { + c.assert(false) + return 1 + } + match (too_wide_value.try_i32()) { + Ok(_) => { + c.assert(false) + return 1 + } + Err(_) => {} + } + c.println("bytes ok") return 0 } diff --git a/tests/programs/int64_basic.cap b/tests/programs/int64_basic.cap new file mode 100644 index 0000000..6970dc1 --- /dev/null +++ b/tests/programs/int64_basic.cap @@ -0,0 +1,47 @@ +module int64_basic +use sys::system + +fn add_i64(a: i64, b: i64) -> i64 { + return a + b +} + +fn add_u64(a: u64, b: u64) -> u64 { + return a + b +} + +fn classify(n: u64) -> string { + return match (n) { + 0u64 => { "zero" } + 1099511627776u64 => { "tera" } + _ => { "other" } + } +} + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + + let signed = add_i64(5000000000i64, -2i64) + let unsigned = add_u64(1099511627776u64, 24u64) + + c.assert(signed == 4999999998i64) + c.assert(unsigned == 1099511627800u64) + c.assert((1099511627776u64 >> 4u64) == 68719476736u64) + c.assert(5000000000i64 > 4000000000i64) + c.assert(1099511627776u64 > 255u64) + c.assert(classify(1099511627776u64) == "tera") + + try let narrowed_signed = 42i64.try_i32() else { + c.assert(false) + return 1 + } + try let narrowed_unsigned = 42u64.try_i32() else { + c.assert(false) + return 1 + } + c.assert(narrowed_signed == 42) + c.assert(narrowed_unsigned == 42) + c.println_i64(signed) + c.println_u64(unsigned) + c.println("int64 ok") + return 0 +} diff --git a/tests/programs/u8_match.cap b/tests/programs/u8_match.cap new file mode 100644 index 0000000..7b105a3 --- /dev/null +++ b/tests/programs/u8_match.cap @@ -0,0 +1,21 @@ +module u8_match +use sys::system + +fn nibble_name(n: u8) -> string { + return match (n) { + 0u8 => { "zero" } + 10u8 => { "ten" } + 15u8 => { "fifteen" } + _ => { "other" } + } +} + +pub fn main(rc: RootCap) -> i32 { + let c = rc.mint_console() + c.assert(nibble_name(0u8) == "zero") + c.assert(nibble_name(10u8) == "ten") + c.assert(nibble_name(15u8) == "fifteen") + c.assert(nibble_name(7u8) == "other") + c.println("u8 match ok") + return 0 +} From 471c07b556c60be365a4ff810ac1076c7f3c5a53 Mon Sep 17 00:00:00 2001 From: Jordan Mecom Date: Mon, 30 Mar 2026 09:35:57 -0700 Subject: [PATCH 17/17] Update editor grammars for 64-bit integers --- lua/capable/init.lua | 29 + queries/capable/highlights.scm | 15 +- syntax/cap.vim | 4 +- tree-sitter-capable/grammar.js | 4 + tree-sitter-capable/queries/highlights.scm | 15 +- tree-sitter-capable/src/grammar.json | 16 + tree-sitter-capable/src/node-types.json | 16 + tree-sitter-capable/src/parser.c | 5395 ++++++++++--------- tree-sitter-capable/src/tree_sitter/array.h | 181 +- vscode/syntaxes/capable.tmLanguage.json | 4 +- 10 files changed, 2990 insertions(+), 2689 deletions(-) diff --git a/lua/capable/init.lua b/lua/capable/init.lua index 85baa9f..6839ac7 100644 --- a/lua/capable/init.lua +++ b/lua/capable/init.lua @@ -23,6 +23,31 @@ local function plugin_root() return vim.fs.dirname(vim.fs.dirname(vim.fs.dirname(source))) end +local function parser_candidates() + local root = plugin_root() + local data = vim.fn.stdpath("data") + return { + data .. "/site/parser/capable.so", + root .. "/parser/capable.so", + root .. "/tree-sitter-capable/capable.so", + } +end + +local function register_compiled_parser() + if not (vim.treesitter and vim.treesitter.language and vim.treesitter.language.add) then + return false + end + + for _, path in ipairs(parser_candidates()) do + if vim.uv.fs_stat(path) then + pcall(vim.treesitter.language.add, "capable", { path = path }) + return true + end + end + + return false +end + local function normalize_cmd(cmd) if type(cmd) == "string" then return { cmd } @@ -37,6 +62,8 @@ local function resolve_root(bufnr, markers) end function M.register_treesitter() + register_compiled_parser() + local ok, parsers = pcall(require, "nvim-treesitter.parsers") if not ok then return false @@ -55,6 +82,8 @@ function M.register_treesitter() pcall(vim.treesitter.language.register, "capable", "cap") end + register_compiled_parser() + return true end diff --git a/queries/capable/highlights.scm b/queries/capable/highlights.scm index 9ed5fb7..1f8ca5d 100644 --- a/queries/capable/highlights.scm +++ b/queries/capable/highlights.scm @@ -11,12 +11,6 @@ "if" "else" "while" - "for" - "in" - "break" - "continue" - "defer" - "try" "return" "struct" "enum" @@ -35,8 +29,13 @@ [ (int_lit) (u8_lit) + (i64_lit) + (u64_lit) ] @number +((type_path (identifier) @type.builtin) + (#match? @type.builtin "^(i32|i64|u32|u64|u8|bool|string|Result)$")) + (string_lit) @string (comment) @comment @@ -50,5 +49,5 @@ (struct_decl name: (identifier) @type) (enum_decl name: (identifier) @type) -(field name: (identifier) @property) -(struct_field name: (identifier) @property) +(field (identifier) @property) +(struct_field (identifier) @property) diff --git a/syntax/cap.vim b/syntax/cap.vim index f7841f4..e000419 100644 --- a/syntax/cap.vim +++ b/syntax/cap.vim @@ -6,11 +6,11 @@ syntax case match syntax keyword capKeyword package module use pub extern fn let if else while for in return struct enum impl match break continue defer try unsafe safe opaque linear copy capability true false syntax keyword capBuiltin unit -syntax keyword capType i32 u32 u8 bool string Result +syntax keyword capType i32 i64 u32 u64 u8 bool string Result syntax match capComment "//.*$" syntax region capString start=+"+ skip=+\\\\\|\\"+ end=+"+ -syntax match capNumber "\<\d\+u8\>\|\<\d\+\>" +syntax match capNumber "\<\d\+\%(u8\|i64\|u64\)\>\|\<\d\+\>" syntax match capFunctionDecl "\\s\+\zs[A-Za-z_][A-Za-z0-9_]*" syntax match capTypeDecl "\<\(struct\|enum\|impl\)\>\s\+\zs[A-Za-z_][A-Za-z0-9_:]*" diff --git a/tree-sitter-capable/grammar.js b/tree-sitter-capable/grammar.js index 892b30e..76590af 100644 --- a/tree-sitter-capable/grammar.js +++ b/tree-sitter-capable/grammar.js @@ -257,6 +257,8 @@ module.exports = grammar({ choice( $.int_lit, $.u8_lit, + $.i64_lit, + $.u64_lit, $.string_lit, "true", "false", @@ -265,6 +267,8 @@ module.exports = grammar({ int_lit: ($) => /[0-9]+/, u8_lit: ($) => /[0-9]+u8/, + i64_lit: ($) => /[0-9]+i64/, + u64_lit: ($) => /[0-9]+u64/, string_lit: ($) => /"([^"\\]|\\.)*"/, identifier: ($) => /[a-zA-Z_][a-zA-Z0-9_]*/, diff --git a/tree-sitter-capable/queries/highlights.scm b/tree-sitter-capable/queries/highlights.scm index 9ed5fb7..1f8ca5d 100644 --- a/tree-sitter-capable/queries/highlights.scm +++ b/tree-sitter-capable/queries/highlights.scm @@ -11,12 +11,6 @@ "if" "else" "while" - "for" - "in" - "break" - "continue" - "defer" - "try" "return" "struct" "enum" @@ -35,8 +29,13 @@ [ (int_lit) (u8_lit) + (i64_lit) + (u64_lit) ] @number +((type_path (identifier) @type.builtin) + (#match? @type.builtin "^(i32|i64|u32|u64|u8|bool|string|Result)$")) + (string_lit) @string (comment) @comment @@ -50,5 +49,5 @@ (struct_decl name: (identifier) @type) (enum_decl name: (identifier) @type) -(field name: (identifier) @property) -(struct_field name: (identifier) @property) +(field (identifier) @property) +(struct_field (identifier) @property) diff --git a/tree-sitter-capable/src/grammar.json b/tree-sitter-capable/src/grammar.json index ed1b88b..f4938c8 100644 --- a/tree-sitter-capable/src/grammar.json +++ b/tree-sitter-capable/src/grammar.json @@ -1698,6 +1698,14 @@ "type": "SYMBOL", "name": "u8_lit" }, + { + "type": "SYMBOL", + "name": "i64_lit" + }, + { + "type": "SYMBOL", + "name": "u64_lit" + }, { "type": "SYMBOL", "name": "string_lit" @@ -1724,6 +1732,14 @@ "type": "PATTERN", "value": "[0-9]+u8" }, + "i64_lit": { + "type": "PATTERN", + "value": "[0-9]+i64" + }, + "u64_lit": { + "type": "PATTERN", + "value": "[0-9]+u64" + }, "string_lit": { "type": "PATTERN", "value": "\"([^\"\\\\]|\\\\.)*\"" diff --git a/tree-sitter-capable/src/node-types.json b/tree-sitter-capable/src/node-types.json index fb16376..7e05894 100644 --- a/tree-sitter-capable/src/node-types.json +++ b/tree-sitter-capable/src/node-types.json @@ -422,6 +422,10 @@ "multiple": false, "required": false, "types": [ + { + "type": "i64_lit", + "named": true + }, { "type": "int_lit", "named": true @@ -430,6 +434,10 @@ "type": "string_lit", "named": true }, + { + "type": "u64_lit", + "named": true + }, { "type": "u8_lit", "named": true @@ -1075,6 +1083,10 @@ "type": "fn", "named": false }, + { + "type": "i64_lit", + "named": true + }, { "type": "identifier", "named": true @@ -1143,6 +1155,10 @@ "type": "true", "named": false }, + { + "type": "u64_lit", + "named": true + }, { "type": "u8_lit", "named": true diff --git a/tree-sitter-capable/src/parser.c b/tree-sitter-capable/src/parser.c index e616f29..d9392ad 100644 --- a/tree-sitter-capable/src/parser.c +++ b/tree-sitter-capable/src/parser.c @@ -9,9 +9,9 @@ #define LANGUAGE_VERSION 14 #define STATE_COUNT 284 #define LARGE_STATE_COUNT 2 -#define SYMBOL_COUNT 119 +#define SYMBOL_COUNT 121 #define ALIAS_COUNT 0 -#define TOKEN_COUNT 61 +#define TOKEN_COUNT 63 #define EXTERNAL_TOKEN_COUNT 0 #define FIELD_COUNT 7 #define MAX_ALIAS_SEQUENCE_LENGTH 8 @@ -78,66 +78,68 @@ enum ts_symbol_identifiers { anon_sym_unit = 56, sym_int_lit = 57, sym_u8_lit = 58, - sym_string_lit = 59, - sym_comment = 60, - sym_source_file = 61, - sym_package_decl = 62, - sym_module_decl = 63, - sym_use_decl = 64, - sym_module_path = 65, - sym__item = 66, - sym_function_decl = 67, - sym_extern_function_decl = 68, - sym_struct_decl = 69, - sym_enum_decl = 70, - sym_impl_block = 71, - sym_method_decl = 72, - sym_field_list = 73, - sym_field = 74, - sym_enum_variants = 75, - sym_enum_variant = 76, - sym_param_list = 77, - sym_param = 78, - sym_type = 79, - sym_type_path = 80, - sym_type_args = 81, - sym_block = 82, - sym_statement = 83, - sym_let_stmt = 84, - sym_assign_stmt = 85, - sym_return_stmt = 86, - sym_if_stmt = 87, - sym_while_stmt = 88, - sym_expr_stmt = 89, - sym_expression = 90, - sym_match_expr = 91, - sym_match_arm = 92, - sym_pattern = 93, - sym_pattern_call = 94, - sym_call_expr = 95, - sym_try_expr = 96, - sym_arg_list = 97, - sym_struct_literal = 98, - sym_struct_field = 99, - sym_path_expr = 100, - sym_unary_expr = 101, - sym_binary_expr = 102, - sym_grouping = 103, - sym_literal = 104, - aux_sym_source_file_repeat1 = 105, - aux_sym_source_file_repeat2 = 106, - aux_sym_module_path_repeat1 = 107, - aux_sym_struct_decl_repeat1 = 108, - aux_sym_impl_block_repeat1 = 109, - aux_sym_field_list_repeat1 = 110, - aux_sym_enum_variants_repeat1 = 111, - aux_sym_param_list_repeat1 = 112, - aux_sym_type_args_repeat1 = 113, - aux_sym_block_repeat1 = 114, - aux_sym_match_expr_repeat1 = 115, - aux_sym_arg_list_repeat1 = 116, - aux_sym_struct_literal_repeat1 = 117, - aux_sym_path_expr_repeat1 = 118, + sym_i64_lit = 59, + sym_u64_lit = 60, + sym_string_lit = 61, + sym_comment = 62, + sym_source_file = 63, + sym_package_decl = 64, + sym_module_decl = 65, + sym_use_decl = 66, + sym_module_path = 67, + sym__item = 68, + sym_function_decl = 69, + sym_extern_function_decl = 70, + sym_struct_decl = 71, + sym_enum_decl = 72, + sym_impl_block = 73, + sym_method_decl = 74, + sym_field_list = 75, + sym_field = 76, + sym_enum_variants = 77, + sym_enum_variant = 78, + sym_param_list = 79, + sym_param = 80, + sym_type = 81, + sym_type_path = 82, + sym_type_args = 83, + sym_block = 84, + sym_statement = 85, + sym_let_stmt = 86, + sym_assign_stmt = 87, + sym_return_stmt = 88, + sym_if_stmt = 89, + sym_while_stmt = 90, + sym_expr_stmt = 91, + sym_expression = 92, + sym_match_expr = 93, + sym_match_arm = 94, + sym_pattern = 95, + sym_pattern_call = 96, + sym_call_expr = 97, + sym_try_expr = 98, + sym_arg_list = 99, + sym_struct_literal = 100, + sym_struct_field = 101, + sym_path_expr = 102, + sym_unary_expr = 103, + sym_binary_expr = 104, + sym_grouping = 105, + sym_literal = 106, + aux_sym_source_file_repeat1 = 107, + aux_sym_source_file_repeat2 = 108, + aux_sym_module_path_repeat1 = 109, + aux_sym_struct_decl_repeat1 = 110, + aux_sym_impl_block_repeat1 = 111, + aux_sym_field_list_repeat1 = 112, + aux_sym_enum_variants_repeat1 = 113, + aux_sym_param_list_repeat1 = 114, + aux_sym_type_args_repeat1 = 115, + aux_sym_block_repeat1 = 116, + aux_sym_match_expr_repeat1 = 117, + aux_sym_arg_list_repeat1 = 118, + aux_sym_struct_literal_repeat1 = 119, + aux_sym_path_expr_repeat1 = 120, }; static const char * const ts_symbol_names[] = { @@ -200,6 +202,8 @@ static const char * const ts_symbol_names[] = { [anon_sym_unit] = "unit", [sym_int_lit] = "int_lit", [sym_u8_lit] = "u8_lit", + [sym_i64_lit] = "i64_lit", + [sym_u64_lit] = "u64_lit", [sym_string_lit] = "string_lit", [sym_comment] = "comment", [sym_source_file] = "source_file", @@ -322,6 +326,8 @@ static const TSSymbol ts_symbol_map[] = { [anon_sym_unit] = anon_sym_unit, [sym_int_lit] = sym_int_lit, [sym_u8_lit] = sym_u8_lit, + [sym_i64_lit] = sym_i64_lit, + [sym_u64_lit] = sym_u64_lit, [sym_string_lit] = sym_string_lit, [sym_comment] = sym_comment, [sym_source_file] = sym_source_file, @@ -621,6 +627,14 @@ static const TSSymbolMetadata ts_symbol_metadata[] = { .visible = true, .named = true, }, + [sym_i64_lit] = { + .visible = true, + .named = true, + }, + [sym_u64_lit] = { + .visible = true, + .named = true, + }, [sym_string_lit] = { .visible = true, .named = true, @@ -997,7 +1011,7 @@ static const TSStateId ts_primary_state_ids[STATE_COUNT] = { [44] = 44, [45] = 45, [46] = 46, - [47] = 47, + [47] = 44, [48] = 48, [49] = 49, [50] = 50, @@ -1005,22 +1019,22 @@ static const TSStateId ts_primary_state_ids[STATE_COUNT] = { [52] = 52, [53] = 53, [54] = 54, - [55] = 44, + [55] = 55, [56] = 56, [57] = 57, [58] = 58, - [59] = 59, - [60] = 60, + [59] = 57, + [60] = 56, [61] = 61, - [62] = 54, - [63] = 51, - [64] = 56, - [65] = 65, - [66] = 49, - [67] = 52, + [62] = 62, + [63] = 46, + [64] = 55, + [65] = 61, + [66] = 62, + [67] = 67, [68] = 68, [69] = 69, - [70] = 59, + [70] = 70, [71] = 71, [72] = 72, [73] = 73, @@ -1029,10 +1043,10 @@ static const TSStateId ts_primary_state_ids[STATE_COUNT] = { [76] = 76, [77] = 77, [78] = 78, - [79] = 79, + [79] = 2, [80] = 80, [81] = 81, - [82] = 2, + [82] = 82, [83] = 83, [84] = 84, [85] = 85, @@ -1041,12 +1055,12 @@ static const TSStateId ts_primary_state_ids[STATE_COUNT] = { [88] = 88, [89] = 89, [90] = 90, - [91] = 12, + [91] = 91, [92] = 92, [93] = 93, [94] = 94, - [95] = 18, - [96] = 23, + [95] = 95, + [96] = 96, [97] = 97, [98] = 98, [99] = 99, @@ -1054,15 +1068,15 @@ static const TSStateId ts_primary_state_ids[STATE_COUNT] = { [101] = 101, [102] = 102, [103] = 103, - [104] = 104, + [104] = 12, [105] = 105, - [106] = 106, - [107] = 24, - [108] = 19, - [109] = 25, - [110] = 110, - [111] = 111, - [112] = 112, + [106] = 23, + [107] = 107, + [108] = 24, + [109] = 109, + [110] = 25, + [111] = 19, + [112] = 18, [113] = 113, [114] = 114, [115] = 115, @@ -1073,9 +1087,9 @@ static const TSStateId ts_primary_state_ids[STATE_COUNT] = { [120] = 120, [121] = 121, [122] = 122, - [123] = 76, - [124] = 81, - [125] = 125, + [123] = 123, + [124] = 74, + [125] = 72, [126] = 126, [127] = 127, [128] = 128, @@ -1232,7 +1246,7 @@ static const TSStateId ts_primary_state_ids[STATE_COUNT] = { [279] = 279, [280] = 280, [281] = 281, - [282] = 258, + [282] = 275, [283] = 283, }; @@ -1241,280 +1255,297 @@ static bool ts_lex(TSLexer *lexer, TSStateId state) { eof = lexer->eof(lexer); switch (state) { case 0: - if (eof) ADVANCE(13); + if (eof) ADVANCE(16); ADVANCE_MAP( - '!', 34, + '!', 37, '"', 1, - '&', 25, - '(', 22, - ')', 23, - '*', 24, - '+', 45, - ',', 19, - '-', 36, - '.', 33, - '/', 46, - ':', 21, - ';', 16, - '<', 41, - '=', 30, - '>', 43, - '?', 32, - '[', 26, - ']', 27, - '{', 17, - '|', 8, - '}', 18, + '&', 28, + '(', 25, + ')', 26, + '*', 27, + '+', 48, + ',', 22, + '-', 39, + '.', 36, + '/', 49, + ':', 24, + ';', 19, + '<', 44, + '=', 33, + '>', 46, + '?', 35, + '[', 29, + ']', 30, + '{', 20, + '|', 11, + '}', 21, ); if (('\t' <= lookahead && lookahead <= '\r') || lookahead == ' ') SKIP(0); - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(47); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(50); if (('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' || - ('a' <= lookahead && lookahead <= 'z')) ADVANCE(50); + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(55); END_STATE(); case 1: - if (lookahead == '"') ADVANCE(49); - if (lookahead == '\\') ADVANCE(9); + if (lookahead == '"') ADVANCE(54); + if (lookahead == '\\') ADVANCE(12); if (lookahead != 0) ADVANCE(1); END_STATE(); case 2: - if (lookahead == '&') ADVANCE(38); + if (lookahead == '&') ADVANCE(41); END_STATE(); case 3: - if (lookahead == '/') ADVANCE(51); + if (lookahead == '/') ADVANCE(56); END_STATE(); case 4: - if (lookahead == '8') ADVANCE(48); + if (lookahead == '4') ADVANCE(52); END_STATE(); case 5: - if (lookahead == ':') ADVANCE(14); + if (lookahead == '4') ADVANCE(53); END_STATE(); case 6: - if (lookahead == '=') ADVANCE(39); - if (lookahead == '>') ADVANCE(31); + if (lookahead == '6') ADVANCE(4); END_STATE(); case 7: - if (lookahead == '>') ADVANCE(15); + if (lookahead == '6') ADVANCE(5); + if (lookahead == '8') ADVANCE(51); END_STATE(); case 8: - if (lookahead == '|') ADVANCE(37); + if (lookahead == ':') ADVANCE(17); END_STATE(); case 9: + if (lookahead == '=') ADVANCE(42); + if (lookahead == '>') ADVANCE(34); + END_STATE(); + case 10: + if (lookahead == '>') ADVANCE(18); + END_STATE(); + case 11: + if (lookahead == '|') ADVANCE(40); + END_STATE(); + case 12: if (lookahead != 0 && lookahead != '\n') ADVANCE(1); END_STATE(); - case 10: - if (eof) ADVANCE(13); + case 13: + if (eof) ADVANCE(16); ADVANCE_MAP( - '!', 34, + '!', 37, '"', 1, '&', 2, - '(', 22, - ')', 23, - '*', 24, - '+', 45, - ',', 19, - '-', 35, - '.', 33, - '/', 46, - ':', 5, - ';', 16, - '<', 41, - '=', 6, - '>', 43, - '?', 32, - '{', 17, - '|', 8, - '}', 18, + '(', 25, + ')', 26, + '*', 27, + '+', 48, + ',', 22, + '-', 38, + '.', 36, + '/', 49, + ':', 8, + ';', 19, + '<', 44, + '=', 9, + '>', 46, + '?', 35, + '{', 20, + '|', 11, + '}', 21, ); if (('\t' <= lookahead && lookahead <= '\r') || - lookahead == ' ') SKIP(10); - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(47); + lookahead == ' ') SKIP(13); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(50); if (('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' || - ('a' <= lookahead && lookahead <= 'z')) ADVANCE(50); + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(55); END_STATE(); - case 11: - if (eof) ADVANCE(13); + case 14: + if (eof) ADVANCE(16); ADVANCE_MAP( - '!', 34, + '!', 37, '"', 1, '&', 2, - '(', 22, - ')', 23, - '*', 24, - '+', 45, - ',', 19, - '-', 35, - '.', 33, - '/', 46, - ':', 5, - ';', 16, - '<', 41, - '=', 29, - '>', 43, - '?', 32, - '[', 26, - ']', 27, - '{', 17, - '|', 8, - '}', 18, + '(', 25, + ')', 26, + '*', 27, + '+', 48, + ',', 22, + '-', 38, + '.', 36, + '/', 49, + ':', 8, + ';', 19, + '<', 44, + '=', 32, + '>', 46, + '?', 35, + '[', 29, + ']', 30, + '{', 20, + '|', 11, + '}', 21, ); if (('\t' <= lookahead && lookahead <= '\r') || - lookahead == ' ') SKIP(11); - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(47); + lookahead == ' ') SKIP(14); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(50); if (('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' || - ('a' <= lookahead && lookahead <= 'z')) ADVANCE(50); + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(55); END_STATE(); - case 12: - if (eof) ADVANCE(13); - if (lookahead == '-') ADVANCE(7); + case 15: + if (eof) ADVANCE(16); + if (lookahead == '-') ADVANCE(10); if (lookahead == '/') ADVANCE(3); - if (lookahead == ':') ADVANCE(20); - if (lookahead == ';') ADVANCE(16); - if (lookahead == '=') ADVANCE(28); - if (lookahead == '{') ADVANCE(17); + if (lookahead == ':') ADVANCE(23); + if (lookahead == ';') ADVANCE(19); + if (lookahead == '=') ADVANCE(31); + if (lookahead == '{') ADVANCE(20); if (('\t' <= lookahead && lookahead <= '\r') || - lookahead == ' ') SKIP(12); + lookahead == ' ') SKIP(15); if (('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' || - ('a' <= lookahead && lookahead <= 'z')) ADVANCE(50); + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(55); END_STATE(); - case 13: + case 16: ACCEPT_TOKEN(ts_builtin_sym_end); END_STATE(); - case 14: + case 17: ACCEPT_TOKEN(anon_sym_COLON_COLON); END_STATE(); - case 15: + case 18: ACCEPT_TOKEN(anon_sym_DASH_GT); END_STATE(); - case 16: + case 19: ACCEPT_TOKEN(anon_sym_SEMI); END_STATE(); - case 17: + case 20: ACCEPT_TOKEN(anon_sym_LBRACE); END_STATE(); - case 18: + case 21: ACCEPT_TOKEN(anon_sym_RBRACE); END_STATE(); - case 19: + case 22: ACCEPT_TOKEN(anon_sym_COMMA); END_STATE(); - case 20: + case 23: ACCEPT_TOKEN(anon_sym_COLON); END_STATE(); - case 21: + case 24: ACCEPT_TOKEN(anon_sym_COLON); - if (lookahead == ':') ADVANCE(14); + if (lookahead == ':') ADVANCE(17); END_STATE(); - case 22: + case 25: ACCEPT_TOKEN(anon_sym_LPAREN); END_STATE(); - case 23: + case 26: ACCEPT_TOKEN(anon_sym_RPAREN); END_STATE(); - case 24: + case 27: ACCEPT_TOKEN(anon_sym_STAR); END_STATE(); - case 25: + case 28: ACCEPT_TOKEN(anon_sym_AMP); END_STATE(); - case 26: + case 29: ACCEPT_TOKEN(anon_sym_LBRACK); END_STATE(); - case 27: + case 30: ACCEPT_TOKEN(anon_sym_RBRACK); END_STATE(); - case 28: + case 31: ACCEPT_TOKEN(anon_sym_EQ); END_STATE(); - case 29: + case 32: ACCEPT_TOKEN(anon_sym_EQ); - if (lookahead == '=') ADVANCE(39); + if (lookahead == '=') ADVANCE(42); END_STATE(); - case 30: + case 33: ACCEPT_TOKEN(anon_sym_EQ); - if (lookahead == '=') ADVANCE(39); - if (lookahead == '>') ADVANCE(31); + if (lookahead == '=') ADVANCE(42); + if (lookahead == '>') ADVANCE(34); END_STATE(); - case 31: + case 34: ACCEPT_TOKEN(anon_sym_EQ_GT); END_STATE(); - case 32: + case 35: ACCEPT_TOKEN(anon_sym_QMARK); END_STATE(); - case 33: + case 36: ACCEPT_TOKEN(anon_sym_DOT); END_STATE(); - case 34: + case 37: ACCEPT_TOKEN(anon_sym_BANG); - if (lookahead == '=') ADVANCE(40); + if (lookahead == '=') ADVANCE(43); END_STATE(); - case 35: + case 38: ACCEPT_TOKEN(anon_sym_DASH); END_STATE(); - case 36: + case 39: ACCEPT_TOKEN(anon_sym_DASH); - if (lookahead == '>') ADVANCE(15); + if (lookahead == '>') ADVANCE(18); END_STATE(); - case 37: + case 40: ACCEPT_TOKEN(anon_sym_PIPE_PIPE); END_STATE(); - case 38: + case 41: ACCEPT_TOKEN(anon_sym_AMP_AMP); END_STATE(); - case 39: + case 42: ACCEPT_TOKEN(anon_sym_EQ_EQ); END_STATE(); - case 40: + case 43: ACCEPT_TOKEN(anon_sym_BANG_EQ); END_STATE(); - case 41: + case 44: ACCEPT_TOKEN(anon_sym_LT); - if (lookahead == '=') ADVANCE(42); + if (lookahead == '=') ADVANCE(45); END_STATE(); - case 42: + case 45: ACCEPT_TOKEN(anon_sym_LT_EQ); END_STATE(); - case 43: + case 46: ACCEPT_TOKEN(anon_sym_GT); - if (lookahead == '=') ADVANCE(44); + if (lookahead == '=') ADVANCE(47); END_STATE(); - case 44: + case 47: ACCEPT_TOKEN(anon_sym_GT_EQ); END_STATE(); - case 45: + case 48: ACCEPT_TOKEN(anon_sym_PLUS); END_STATE(); - case 46: + case 49: ACCEPT_TOKEN(anon_sym_SLASH); - if (lookahead == '/') ADVANCE(51); + if (lookahead == '/') ADVANCE(56); END_STATE(); - case 47: + case 50: ACCEPT_TOKEN(sym_int_lit); - if (lookahead == 'u') ADVANCE(4); - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(47); + if (lookahead == 'i') ADVANCE(6); + if (lookahead == 'u') ADVANCE(7); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(50); END_STATE(); - case 48: + case 51: ACCEPT_TOKEN(sym_u8_lit); END_STATE(); - case 49: + case 52: + ACCEPT_TOKEN(sym_i64_lit); + END_STATE(); + case 53: + ACCEPT_TOKEN(sym_u64_lit); + END_STATE(); + case 54: ACCEPT_TOKEN(sym_string_lit); END_STATE(); - case 50: + case 55: ACCEPT_TOKEN(sym_identifier); if (('0' <= lookahead && lookahead <= '9') || ('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' || - ('a' <= lookahead && lookahead <= 'z')) ADVANCE(50); + ('a' <= lookahead && lookahead <= 'z')) ADVANCE(55); END_STATE(); - case 51: + case 56: ACCEPT_TOKEN(sym_comment); if (lookahead != 0 && - lookahead != '\n') ADVANCE(51); + lookahead != '\n') ADVANCE(56); END_STATE(); default: return false; @@ -1886,126 +1917,126 @@ static bool ts_lex_keywords(TSLexer *lexer, TSStateId state) { static const TSLexMode ts_lex_modes[STATE_COUNT] = { [0] = {.lex_state = 0}, [1] = {.lex_state = 0}, - [2] = {.lex_state = 10}, - [3] = {.lex_state = 10}, - [4] = {.lex_state = 10}, - [5] = {.lex_state = 10}, - [6] = {.lex_state = 10}, - [7] = {.lex_state = 10}, - [8] = {.lex_state = 10}, - [9] = {.lex_state = 11}, - [10] = {.lex_state = 10}, - [11] = {.lex_state = 10}, - [12] = {.lex_state = 10}, - [13] = {.lex_state = 10}, - [14] = {.lex_state = 10}, - [15] = {.lex_state = 10}, - [16] = {.lex_state = 10}, - [17] = {.lex_state = 10}, - [18] = {.lex_state = 10}, - [19] = {.lex_state = 10}, - [20] = {.lex_state = 10}, - [21] = {.lex_state = 10}, - [22] = {.lex_state = 10}, - [23] = {.lex_state = 10}, - [24] = {.lex_state = 10}, - [25] = {.lex_state = 10}, - [26] = {.lex_state = 10}, - [27] = {.lex_state = 10}, - [28] = {.lex_state = 10}, - [29] = {.lex_state = 10}, - [30] = {.lex_state = 10}, - [31] = {.lex_state = 10}, - [32] = {.lex_state = 10}, - [33] = {.lex_state = 10}, - [34] = {.lex_state = 10}, - [35] = {.lex_state = 10}, - [36] = {.lex_state = 10}, - [37] = {.lex_state = 10}, - [38] = {.lex_state = 10}, - [39] = {.lex_state = 10}, - [40] = {.lex_state = 10}, - [41] = {.lex_state = 10}, - [42] = {.lex_state = 10}, - [43] = {.lex_state = 10}, - [44] = {.lex_state = 10}, - [45] = {.lex_state = 0}, - [46] = {.lex_state = 10}, - [47] = {.lex_state = 0}, - [48] = {.lex_state = 10}, - [49] = {.lex_state = 10}, - [50] = {.lex_state = 10}, - [51] = {.lex_state = 10}, - [52] = {.lex_state = 10}, - [53] = {.lex_state = 10}, - [54] = {.lex_state = 10}, - [55] = {.lex_state = 10}, - [56] = {.lex_state = 10}, - [57] = {.lex_state = 10}, - [58] = {.lex_state = 10}, - [59] = {.lex_state = 10}, - [60] = {.lex_state = 0}, - [61] = {.lex_state = 10}, - [62] = {.lex_state = 10}, - [63] = {.lex_state = 10}, - [64] = {.lex_state = 10}, - [65] = {.lex_state = 10}, - [66] = {.lex_state = 10}, - [67] = {.lex_state = 10}, - [68] = {.lex_state = 11}, + [2] = {.lex_state = 13}, + [3] = {.lex_state = 13}, + [4] = {.lex_state = 13}, + [5] = {.lex_state = 13}, + [6] = {.lex_state = 13}, + [7] = {.lex_state = 13}, + [8] = {.lex_state = 13}, + [9] = {.lex_state = 14}, + [10] = {.lex_state = 13}, + [11] = {.lex_state = 13}, + [12] = {.lex_state = 13}, + [13] = {.lex_state = 13}, + [14] = {.lex_state = 13}, + [15] = {.lex_state = 13}, + [16] = {.lex_state = 13}, + [17] = {.lex_state = 13}, + [18] = {.lex_state = 13}, + [19] = {.lex_state = 13}, + [20] = {.lex_state = 13}, + [21] = {.lex_state = 13}, + [22] = {.lex_state = 13}, + [23] = {.lex_state = 13}, + [24] = {.lex_state = 13}, + [25] = {.lex_state = 13}, + [26] = {.lex_state = 13}, + [27] = {.lex_state = 13}, + [28] = {.lex_state = 13}, + [29] = {.lex_state = 13}, + [30] = {.lex_state = 13}, + [31] = {.lex_state = 13}, + [32] = {.lex_state = 13}, + [33] = {.lex_state = 13}, + [34] = {.lex_state = 13}, + [35] = {.lex_state = 13}, + [36] = {.lex_state = 13}, + [37] = {.lex_state = 13}, + [38] = {.lex_state = 13}, + [39] = {.lex_state = 13}, + [40] = {.lex_state = 13}, + [41] = {.lex_state = 13}, + [42] = {.lex_state = 13}, + [43] = {.lex_state = 13}, + [44] = {.lex_state = 13}, + [45] = {.lex_state = 13}, + [46] = {.lex_state = 13}, + [47] = {.lex_state = 13}, + [48] = {.lex_state = 13}, + [49] = {.lex_state = 13}, + [50] = {.lex_state = 13}, + [51] = {.lex_state = 13}, + [52] = {.lex_state = 13}, + [53] = {.lex_state = 13}, + [54] = {.lex_state = 13}, + [55] = {.lex_state = 13}, + [56] = {.lex_state = 13}, + [57] = {.lex_state = 13}, + [58] = {.lex_state = 13}, + [59] = {.lex_state = 13}, + [60] = {.lex_state = 13}, + [61] = {.lex_state = 13}, + [62] = {.lex_state = 13}, + [63] = {.lex_state = 13}, + [64] = {.lex_state = 13}, + [65] = {.lex_state = 13}, + [66] = {.lex_state = 13}, + [67] = {.lex_state = 0}, + [68] = {.lex_state = 0}, [69] = {.lex_state = 0}, - [70] = {.lex_state = 10}, - [71] = {.lex_state = 10}, - [72] = {.lex_state = 11}, - [73] = {.lex_state = 11}, - [74] = {.lex_state = 11}, - [75] = {.lex_state = 11}, - [76] = {.lex_state = 10}, - [77] = {.lex_state = 0}, + [70] = {.lex_state = 14}, + [71] = {.lex_state = 0}, + [72] = {.lex_state = 13}, + [73] = {.lex_state = 14}, + [74] = {.lex_state = 13}, + [75] = {.lex_state = 14}, + [76] = {.lex_state = 14}, + [77] = {.lex_state = 14}, [78] = {.lex_state = 0}, - [79] = {.lex_state = 0}, + [79] = {.lex_state = 13}, [80] = {.lex_state = 0}, - [81] = {.lex_state = 10}, - [82] = {.lex_state = 10}, - [83] = {.lex_state = 11}, - [84] = {.lex_state = 11}, - [85] = {.lex_state = 11}, - [86] = {.lex_state = 11}, - [87] = {.lex_state = 10}, - [88] = {.lex_state = 10}, - [89] = {.lex_state = 10}, - [90] = {.lex_state = 10}, - [91] = {.lex_state = 10}, - [92] = {.lex_state = 10}, - [93] = {.lex_state = 10}, - [94] = {.lex_state = 10}, - [95] = {.lex_state = 10}, - [96] = {.lex_state = 10}, - [97] = {.lex_state = 10}, - [98] = {.lex_state = 10}, - [99] = {.lex_state = 10}, - [100] = {.lex_state = 10}, - [101] = {.lex_state = 10}, - [102] = {.lex_state = 10}, - [103] = {.lex_state = 10}, - [104] = {.lex_state = 10}, - [105] = {.lex_state = 10}, - [106] = {.lex_state = 10}, - [107] = {.lex_state = 10}, - [108] = {.lex_state = 10}, - [109] = {.lex_state = 10}, - [110] = {.lex_state = 0}, - [111] = {.lex_state = 0}, - [112] = {.lex_state = 0}, - [113] = {.lex_state = 0}, - [114] = {.lex_state = 10}, - [115] = {.lex_state = 12}, - [116] = {.lex_state = 12}, - [117] = {.lex_state = 12}, - [118] = {.lex_state = 10}, - [119] = {.lex_state = 12}, - [120] = {.lex_state = 12}, - [121] = {.lex_state = 12}, + [81] = {.lex_state = 13}, + [82] = {.lex_state = 0}, + [83] = {.lex_state = 0}, + [84] = {.lex_state = 13}, + [85] = {.lex_state = 14}, + [86] = {.lex_state = 13}, + [87] = {.lex_state = 14}, + [88] = {.lex_state = 13}, + [89] = {.lex_state = 13}, + [90] = {.lex_state = 13}, + [91] = {.lex_state = 13}, + [92] = {.lex_state = 14}, + [93] = {.lex_state = 14}, + [94] = {.lex_state = 13}, + [95] = {.lex_state = 13}, + [96] = {.lex_state = 13}, + [97] = {.lex_state = 13}, + [98] = {.lex_state = 0}, + [99] = {.lex_state = 0}, + [100] = {.lex_state = 13}, + [101] = {.lex_state = 13}, + [102] = {.lex_state = 13}, + [103] = {.lex_state = 0}, + [104] = {.lex_state = 13}, + [105] = {.lex_state = 13}, + [106] = {.lex_state = 13}, + [107] = {.lex_state = 13}, + [108] = {.lex_state = 13}, + [109] = {.lex_state = 13}, + [110] = {.lex_state = 13}, + [111] = {.lex_state = 13}, + [112] = {.lex_state = 13}, + [113] = {.lex_state = 13}, + [114] = {.lex_state = 0}, + [115] = {.lex_state = 15}, + [116] = {.lex_state = 15}, + [117] = {.lex_state = 15}, + [118] = {.lex_state = 13}, + [119] = {.lex_state = 15}, + [120] = {.lex_state = 15}, + [121] = {.lex_state = 15}, [122] = {.lex_state = 0}, [123] = {.lex_state = 0}, [124] = {.lex_state = 0}, @@ -2046,11 +2077,11 @@ static const TSLexMode ts_lex_modes[STATE_COUNT] = { [159] = {.lex_state = 0}, [160] = {.lex_state = 0}, [161] = {.lex_state = 0}, - [162] = {.lex_state = 0}, + [162] = {.lex_state = 13}, [163] = {.lex_state = 0}, [164] = {.lex_state = 0}, [165] = {.lex_state = 0}, - [166] = {.lex_state = 10}, + [166] = {.lex_state = 0}, [167] = {.lex_state = 0}, [168] = {.lex_state = 0}, [169] = {.lex_state = 0}, @@ -2065,11 +2096,11 @@ static const TSLexMode ts_lex_modes[STATE_COUNT] = { [178] = {.lex_state = 0}, [179] = {.lex_state = 0}, [180] = {.lex_state = 0}, - [181] = {.lex_state = 12}, + [181] = {.lex_state = 0}, [182] = {.lex_state = 0}, [183] = {.lex_state = 0}, [184] = {.lex_state = 0}, - [185] = {.lex_state = 12}, + [185] = {.lex_state = 0}, [186] = {.lex_state = 0}, [187] = {.lex_state = 0}, [188] = {.lex_state = 0}, @@ -2078,8 +2109,8 @@ static const TSLexMode ts_lex_modes[STATE_COUNT] = { [191] = {.lex_state = 0}, [192] = {.lex_state = 0}, [193] = {.lex_state = 0}, - [194] = {.lex_state = 0}, - [195] = {.lex_state = 0}, + [194] = {.lex_state = 15}, + [195] = {.lex_state = 15}, [196] = {.lex_state = 0}, [197] = {.lex_state = 0}, [198] = {.lex_state = 0}, @@ -2092,18 +2123,18 @@ static const TSLexMode ts_lex_modes[STATE_COUNT] = { [205] = {.lex_state = 0}, [206] = {.lex_state = 0}, [207] = {.lex_state = 0}, - [208] = {.lex_state = 0}, + [208] = {.lex_state = 15}, [209] = {.lex_state = 0}, [210] = {.lex_state = 0}, - [211] = {.lex_state = 0}, - [212] = {.lex_state = 12}, + [211] = {.lex_state = 15}, + [212] = {.lex_state = 0}, [213] = {.lex_state = 0}, - [214] = {.lex_state = 12}, + [214] = {.lex_state = 0}, [215] = {.lex_state = 0}, [216] = {.lex_state = 0}, - [217] = {.lex_state = 0}, - [218] = {.lex_state = 10}, - [219] = {.lex_state = 12}, + [217] = {.lex_state = 13}, + [218] = {.lex_state = 0}, + [219] = {.lex_state = 0}, [220] = {.lex_state = 0}, [221] = {.lex_state = 0}, [222] = {.lex_state = 0}, @@ -2123,51 +2154,51 @@ static const TSLexMode ts_lex_modes[STATE_COUNT] = { [236] = {.lex_state = 0}, [237] = {.lex_state = 0}, [238] = {.lex_state = 0}, - [239] = {.lex_state = 0}, + [239] = {.lex_state = 15}, [240] = {.lex_state = 0}, [241] = {.lex_state = 0}, [242] = {.lex_state = 0}, [243] = {.lex_state = 0}, - [244] = {.lex_state = 10}, - [245] = {.lex_state = 0}, + [244] = {.lex_state = 0}, + [245] = {.lex_state = 15}, [246] = {.lex_state = 0}, [247] = {.lex_state = 0}, [248] = {.lex_state = 0}, - [249] = {.lex_state = 12}, - [250] = {.lex_state = 12}, + [249] = {.lex_state = 13}, + [250] = {.lex_state = 0}, [251] = {.lex_state = 0}, [252] = {.lex_state = 0}, [253] = {.lex_state = 0}, [254] = {.lex_state = 0}, - [255] = {.lex_state = 10}, + [255] = {.lex_state = 13}, [256] = {.lex_state = 0}, [257] = {.lex_state = 0}, [258] = {.lex_state = 0}, - [259] = {.lex_state = 0}, + [259] = {.lex_state = 13}, [260] = {.lex_state = 0}, [261] = {.lex_state = 0}, - [262] = {.lex_state = 10}, - [263] = {.lex_state = 0}, + [262] = {.lex_state = 0}, + [263] = {.lex_state = 13}, [264] = {.lex_state = 0}, [265] = {.lex_state = 0}, [266] = {.lex_state = 0}, [267] = {.lex_state = 0}, - [268] = {.lex_state = 12}, + [268] = {.lex_state = 0}, [269] = {.lex_state = 0}, [270] = {.lex_state = 0}, [271] = {.lex_state = 0}, - [272] = {.lex_state = 0}, + [272] = {.lex_state = 15}, [273] = {.lex_state = 0}, [274] = {.lex_state = 0}, [275] = {.lex_state = 0}, [276] = {.lex_state = 0}, - [277] = {.lex_state = 10}, - [278] = {.lex_state = 11}, + [277] = {.lex_state = 0}, + [278] = {.lex_state = 14}, [279] = {.lex_state = 0}, [280] = {.lex_state = 0}, [281] = {.lex_state = 0}, [282] = {.lex_state = 0}, - [283] = {.lex_state = 0}, + [283] = {.lex_state = 15}, }; static const uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = { @@ -2230,13 +2261,15 @@ static const uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = { [anon_sym_unit] = ACTIONS(1), [sym_int_lit] = ACTIONS(1), [sym_u8_lit] = ACTIONS(1), + [sym_i64_lit] = ACTIONS(1), + [sym_u64_lit] = ACTIONS(1), [sym_string_lit] = ACTIONS(1), [sym_comment] = ACTIONS(3), }, [STATE(1)] = { - [sym_source_file] = STATE(271), - [sym_package_decl] = STATE(228), - [sym_module_decl] = STATE(45), + [sym_source_file] = STATE(247), + [sym_package_decl] = STATE(237), + [sym_module_decl] = STATE(69), [anon_sym_package] = ACTIONS(5), [anon_sym_module] = ACTIONS(7), [sym_comment] = ACTIONS(3), @@ -2255,7 +2288,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_DOT, STATE(3), 1, aux_sym_path_expr_repeat1, - STATE(73), 1, + STATE(75), 1, aux_sym_module_path_repeat1, ACTIONS(9), 14, anon_sym_let, @@ -2272,7 +2305,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(13), 17, + ACTIONS(13), 19, anon_sym_SEMI, anon_sym_RBRACE, anon_sym_COMMA, @@ -2289,8 +2322,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [54] = 5, + [56] = 5, ACTIONS(3), 1, sym_comment, STATE(4), 1, @@ -2313,7 +2348,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(21), 19, + ACTIONS(21), 21, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -2332,8 +2367,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [102] = 5, + [106] = 5, ACTIONS(3), 1, sym_comment, STATE(4), 1, @@ -2356,7 +2393,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(28), 19, + ACTIONS(28), 21, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -2375,8 +2412,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [150] = 17, + [156] = 17, ACTIONS(3), 1, sym_comment, ACTIONS(30), 1, @@ -2397,14 +2436,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, STATE(38), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, STATE(7), 2, sym_statement, aux_sym_block_repeat1, @@ -2413,7 +2449,12 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_false, anon_sym_unit, sym_int_lit, - STATE(101), 6, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + STATE(84), 6, sym_let_stmt, sym_assign_stmt, sym_return_stmt, @@ -2430,7 +2471,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [221] = 17, + [229] = 17, ACTIONS(3), 1, sym_comment, ACTIONS(30), 1, @@ -2451,14 +2492,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_RBRACE, STATE(38), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, STATE(7), 2, sym_statement, aux_sym_block_repeat1, @@ -2467,7 +2505,12 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_false, anon_sym_unit, sym_int_lit, - STATE(101), 6, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + STATE(84), 6, sym_let_stmt, sym_assign_stmt, sym_return_stmt, @@ -2484,7 +2527,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [292] = 17, + [302] = 17, ACTIONS(3), 1, sym_comment, ACTIONS(54), 1, @@ -2505,14 +2548,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, STATE(38), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(77), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(83), 2, - sym_u8_lit, - sym_string_lit, STATE(7), 2, sym_statement, aux_sym_block_repeat1, @@ -2521,7 +2561,12 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_false, anon_sym_unit, sym_int_lit, - STATE(101), 6, + ACTIONS(83), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + STATE(84), 6, sym_let_stmt, sym_assign_stmt, sym_return_stmt, @@ -2538,7 +2583,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [363] = 3, + [375] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(23), 14, @@ -2556,7 +2601,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(28), 21, + ACTIONS(28), 23, anon_sym_COLON_COLON, anon_sym_SEMI, anon_sym_LBRACE, @@ -2577,8 +2622,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [406] = 9, + [420] = 9, ACTIONS(3), 1, sym_comment, ACTIONS(11), 1, @@ -2591,7 +2638,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_EQ, STATE(3), 1, aux_sym_path_expr_repeat1, - STATE(73), 1, + STATE(75), 1, aux_sym_module_path_repeat1, ACTIONS(9), 14, anon_sym_let, @@ -2608,7 +2655,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(13), 15, + ACTIONS(13), 17, anon_sym_SEMI, anon_sym_RBRACE, anon_sym_LPAREN, @@ -2623,8 +2670,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [461] = 17, + [477] = 17, ACTIONS(3), 1, sym_comment, ACTIONS(30), 1, @@ -2645,14 +2694,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_RBRACE, STATE(38), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, STATE(6), 2, sym_statement, aux_sym_block_repeat1, @@ -2661,7 +2707,12 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_false, anon_sym_unit, sym_int_lit, - STATE(101), 6, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + STATE(84), 6, sym_let_stmt, sym_assign_stmt, sym_return_stmt, @@ -2678,7 +2729,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [532] = 17, + [550] = 17, ACTIONS(3), 1, sym_comment, ACTIONS(30), 1, @@ -2699,14 +2750,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_RBRACE, STATE(38), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, STATE(5), 2, sym_statement, aux_sym_block_repeat1, @@ -2715,7 +2763,12 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_false, anon_sym_unit, sym_int_lit, - STATE(101), 6, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + STATE(84), 6, sym_let_stmt, sym_assign_stmt, sym_return_stmt, @@ -2732,7 +2785,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [603] = 5, + [623] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(92), 1, @@ -2754,7 +2807,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(28), 18, + ACTIONS(28), 20, anon_sym_SEMI, anon_sym_RBRACE, anon_sym_COMMA, @@ -2772,8 +2825,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [649] = 6, + [671] = 6, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, @@ -2797,7 +2852,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(99), 16, + ACTIONS(99), 18, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -2813,8 +2868,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [696] = 3, + [720] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(105), 14, @@ -2832,7 +2889,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(107), 19, + ACTIONS(107), 21, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -2851,8 +2908,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [737] = 6, + [763] = 6, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, @@ -2876,7 +2935,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(111), 16, + ACTIONS(111), 18, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -2892,8 +2951,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [784] = 3, + [812] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(113), 14, @@ -2911,7 +2972,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(115), 18, + ACTIONS(115), 20, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -2929,8 +2990,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [824] = 3, + [854] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(117), 14, @@ -2948,7 +3011,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(119), 18, + ACTIONS(119), 20, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -2966,8 +3029,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [864] = 11, + [896] = 11, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, @@ -2989,17 +3054,6 @@ static const uint16_t ts_small_parse_table[] = { ACTIONS(127), 2, anon_sym_LT_EQ, anon_sym_GT_EQ, - ACTIONS(111), 10, - anon_sym_SEMI, - anon_sym_RBRACE, - anon_sym_COMMA, - anon_sym_RPAREN, - anon_sym_PIPE_PIPE, - anon_sym_AMP_AMP, - anon_sym_EQ_EQ, - anon_sym_BANG_EQ, - sym_u8_lit, - sym_string_lit, ACTIONS(109), 11, anon_sym_let, anon_sym_return, @@ -3012,7 +3066,20 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - [920] = 12, + ACTIONS(111), 12, + anon_sym_SEMI, + anon_sym_RBRACE, + anon_sym_COMMA, + anon_sym_RPAREN, + anon_sym_PIPE_PIPE, + anon_sym_AMP_AMP, + anon_sym_EQ_EQ, + anon_sym_BANG_EQ, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + [954] = 12, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, @@ -3037,7 +3104,7 @@ static const uint16_t ts_small_parse_table[] = { ACTIONS(131), 2, anon_sym_EQ_EQ, anon_sym_BANG_EQ, - ACTIONS(111), 8, + ACTIONS(111), 10, anon_sym_SEMI, anon_sym_RBRACE, anon_sym_COMMA, @@ -3045,6 +3112,8 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_PIPE_PIPE, anon_sym_AMP_AMP, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, ACTIONS(109), 11, anon_sym_let, @@ -3058,7 +3127,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - [978] = 3, + [1014] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(133), 14, @@ -3076,7 +3145,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(135), 18, + ACTIONS(135), 20, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -3094,8 +3163,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [1018] = 3, + [1056] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(137), 14, @@ -3113,7 +3184,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(139), 18, + ACTIONS(139), 20, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -3131,8 +3202,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [1058] = 3, + [1098] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(141), 14, @@ -3150,7 +3223,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(143), 18, + ACTIONS(143), 20, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -3168,8 +3241,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [1098] = 9, + [1140] = 9, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, @@ -3185,19 +3260,6 @@ static const uint16_t ts_small_parse_table[] = { ACTIONS(123), 2, anon_sym_DASH, anon_sym_PLUS, - ACTIONS(111), 12, - anon_sym_SEMI, - anon_sym_RBRACE, - anon_sym_COMMA, - anon_sym_RPAREN, - anon_sym_PIPE_PIPE, - anon_sym_AMP_AMP, - anon_sym_EQ_EQ, - anon_sym_BANG_EQ, - anon_sym_LT_EQ, - anon_sym_GT_EQ, - sym_u8_lit, - sym_string_lit, ACTIONS(109), 13, anon_sym_let, anon_sym_return, @@ -3212,7 +3274,22 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - [1150] = 8, + ACTIONS(111), 14, + anon_sym_SEMI, + anon_sym_RBRACE, + anon_sym_COMMA, + anon_sym_RPAREN, + anon_sym_PIPE_PIPE, + anon_sym_AMP_AMP, + anon_sym_EQ_EQ, + anon_sym_BANG_EQ, + anon_sym_LT_EQ, + anon_sym_GT_EQ, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + [1194] = 8, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, @@ -3239,7 +3316,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(111), 14, + ACTIONS(111), 16, anon_sym_SEMI, anon_sym_RBRACE, anon_sym_COMMA, @@ -3253,8 +3330,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [1200] = 13, + [1246] = 13, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, @@ -3281,13 +3360,15 @@ static const uint16_t ts_small_parse_table[] = { ACTIONS(131), 2, anon_sym_EQ_EQ, anon_sym_BANG_EQ, - ACTIONS(111), 7, + ACTIONS(111), 9, anon_sym_SEMI, anon_sym_RBRACE, anon_sym_COMMA, anon_sym_RPAREN, anon_sym_PIPE_PIPE, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, ACTIONS(109), 11, anon_sym_let, @@ -3301,7 +3382,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - [1260] = 3, + [1308] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(147), 14, @@ -3319,7 +3400,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(149), 18, + ACTIONS(149), 20, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -3337,8 +3418,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [1300] = 3, + [1350] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(151), 14, @@ -3356,7 +3439,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(153), 18, + ACTIONS(153), 20, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -3374,8 +3457,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [1340] = 3, + [1392] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(155), 14, @@ -3393,7 +3478,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(157), 18, + ACTIONS(157), 20, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -3411,8 +3496,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [1380] = 3, + [1434] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(159), 14, @@ -3430,7 +3517,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(161), 18, + ACTIONS(161), 20, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -3448,8 +3535,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [1420] = 3, + [1476] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(163), 14, @@ -3467,7 +3556,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(165), 18, + ACTIONS(165), 20, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -3485,8 +3574,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [1460] = 3, + [1518] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(167), 14, @@ -3504,7 +3595,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(169), 18, + ACTIONS(169), 20, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -3522,8 +3613,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [1500] = 3, + [1560] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(171), 14, @@ -3541,7 +3634,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(173), 18, + ACTIONS(173), 20, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -3559,8 +3652,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [1540] = 3, + [1602] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(175), 14, @@ -3578,7 +3673,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(177), 18, + ACTIONS(177), 20, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -3596,8 +3691,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [1580] = 3, + [1644] = 3, ACTIONS(3), 1, sym_comment, ACTIONS(179), 14, @@ -3615,7 +3712,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - ACTIONS(181), 18, + ACTIONS(181), 20, anon_sym_SEMI, anon_sym_LBRACE, anon_sym_RBRACE, @@ -3633,8 +3730,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_GT_EQ, anon_sym_PLUS, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - [1620] = 15, + [1686] = 15, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, @@ -3665,9 +3764,11 @@ static const uint16_t ts_small_parse_table[] = { ACTIONS(131), 2, anon_sym_EQ_EQ, anon_sym_BANG_EQ, - ACTIONS(187), 3, + ACTIONS(187), 5, anon_sym_RBRACE, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, ACTIONS(183), 11, anon_sym_let, @@ -3681,7 +3782,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - [1682] = 15, + [1750] = 15, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, @@ -3712,9 +3813,11 @@ static const uint16_t ts_small_parse_table[] = { ACTIONS(131), 2, anon_sym_EQ_EQ, anon_sym_BANG_EQ, - ACTIONS(195), 3, + ACTIONS(195), 5, anon_sym_RBRACE, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, ACTIONS(191), 11, anon_sym_let, @@ -3728,7 +3831,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - [1744] = 15, + [1814] = 15, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, @@ -3759,9 +3862,11 @@ static const uint16_t ts_small_parse_table[] = { ACTIONS(131), 2, anon_sym_EQ_EQ, anon_sym_BANG_EQ, - ACTIONS(201), 3, + ACTIONS(201), 5, anon_sym_RBRACE, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, ACTIONS(197), 11, anon_sym_let, @@ -3775,7 +3880,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - [1806] = 15, + [1878] = 15, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, @@ -3806,9 +3911,11 @@ static const uint16_t ts_small_parse_table[] = { ACTIONS(131), 2, anon_sym_EQ_EQ, anon_sym_BANG_EQ, - ACTIONS(207), 3, + ACTIONS(207), 5, anon_sym_RBRACE, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, ACTIONS(203), 11, anon_sym_let, @@ -3822,7 +3929,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - [1868] = 15, + [1942] = 15, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, @@ -3853,9 +3960,11 @@ static const uint16_t ts_small_parse_table[] = { ACTIONS(131), 2, anon_sym_EQ_EQ, anon_sym_BANG_EQ, - ACTIONS(213), 3, + ACTIONS(213), 5, anon_sym_RBRACE, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, ACTIONS(209), 11, anon_sym_let, @@ -3869,7 +3978,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - [1930] = 13, + [2006] = 13, ACTIONS(3), 1, sym_comment, ACTIONS(215), 1, @@ -3884,14 +3993,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, STATE(37), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(230), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(236), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(225), 4, anon_sym_let, anon_sym_return, @@ -3902,6 +4008,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(236), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -3912,7 +4023,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [1986] = 11, + [2064] = 11, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -3923,21 +4034,23 @@ static const uint16_t ts_small_parse_table[] = { sym_identifier, ACTIONS(241), 1, anon_sym_RPAREN, - STATE(87), 1, + STATE(91), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(48), 4, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -3948,7 +4061,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2033] = 11, + [2113] = 11, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -3959,21 +4072,23 @@ static const uint16_t ts_small_parse_table[] = { sym_identifier, ACTIONS(243), 1, anon_sym_RPAREN, - STATE(93), 1, + STATE(102), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(48), 4, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -3984,7 +4099,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2080] = 11, + [2162] = 11, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -3995,21 +4110,23 @@ static const uint16_t ts_small_parse_table[] = { sym_identifier, ACTIONS(245), 1, anon_sym_RPAREN, - STATE(93), 1, + STATE(102), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(48), 4, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4020,7 +4137,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2127] = 10, + [2211] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -4029,13 +4146,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, ACTIONS(247), 1, sym_identifier, - STATE(95), 1, + STATE(106), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(249), 2, anon_sym_BANG, anon_sym_DASH, @@ -4044,6 +4158,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4054,44 +4173,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2171] = 13, - ACTIONS(3), 1, - sym_comment, - ACTIONS(251), 1, - ts_builtin_sym_end, - ACTIONS(253), 1, - anon_sym_use, - ACTIONS(255), 1, - anon_sym_pub, - ACTIONS(257), 1, - anon_sym_fn, - ACTIONS(259), 1, - anon_sym_extern, - ACTIONS(263), 1, - anon_sym_struct, - ACTIONS(265), 1, - anon_sym_enum, - ACTIONS(267), 1, - anon_sym_impl, - STATE(151), 1, - aux_sym_struct_decl_repeat1, - STATE(47), 2, - sym_use_decl, - aux_sym_source_file_repeat1, - ACTIONS(261), 4, - anon_sym_linear, - anon_sym_copy, - anon_sym_opaque, - anon_sym_capability, - STATE(77), 7, - sym__item, - sym_function_decl, - sym_extern_function_decl, - sym_struct_decl, - sym_enum_decl, - sym_impl_block, - aux_sym_source_file_repeat2, - [2221] = 10, + [2257] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -4102,19 +4184,21 @@ static const uint16_t ts_small_parse_table[] = { sym_identifier, STATE(35), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(48), 4, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4125,60 +4209,20 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2265] = 13, - ACTIONS(3), 1, - sym_comment, - ACTIONS(253), 1, - anon_sym_use, - ACTIONS(255), 1, - anon_sym_pub, - ACTIONS(257), 1, - anon_sym_fn, - ACTIONS(259), 1, - anon_sym_extern, - ACTIONS(263), 1, - anon_sym_struct, - ACTIONS(265), 1, - anon_sym_enum, - ACTIONS(267), 1, - anon_sym_impl, - ACTIONS(269), 1, - ts_builtin_sym_end, - STATE(151), 1, - aux_sym_struct_decl_repeat1, - STATE(113), 2, - sym_use_decl, - aux_sym_source_file_repeat1, - ACTIONS(261), 4, - anon_sym_linear, - anon_sym_copy, - anon_sym_opaque, - anon_sym_capability, - STATE(78), 7, - sym__item, - sym_function_decl, - sym_extern_function_decl, - sym_struct_decl, - sym_enum_decl, - sym_impl_block, - aux_sym_source_file_repeat2, - [2315] = 10, + [2303] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, anon_sym_LPAREN, ACTIONS(44), 1, anon_sym_match, - ACTIONS(247), 1, + ACTIONS(239), 1, sym_identifier, - STATE(88), 1, + STATE(18), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, - ACTIONS(249), 2, + ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, ACTIONS(48), 4, @@ -4186,6 +4230,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4196,7 +4245,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2359] = 10, + [2349] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -4205,21 +4254,23 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, ACTIONS(239), 1, sym_identifier, - STATE(15), 1, + STATE(23), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(48), 4, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4230,23 +4281,20 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2403] = 10, + [2395] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, anon_sym_LPAREN, ACTIONS(44), 1, anon_sym_match, - ACTIONS(247), 1, + ACTIONS(239), 1, sym_identifier, - STATE(90), 1, + STATE(36), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, - ACTIONS(249), 2, + ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, ACTIONS(48), 4, @@ -4254,6 +4302,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4264,7 +4317,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2447] = 10, + [2441] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -4273,21 +4326,23 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, ACTIONS(239), 1, sym_identifier, - STATE(24), 1, + STATE(105), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(48), 4, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4298,7 +4353,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2491] = 10, + [2487] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -4307,21 +4362,23 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, ACTIONS(239), 1, sym_identifier, - STATE(25), 1, + STATE(109), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(48), 4, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4332,7 +4389,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2535] = 10, + [2533] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -4341,21 +4398,23 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, ACTIONS(239), 1, sym_identifier, - STATE(92), 1, + STATE(39), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(48), 4, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4373,23 +4432,25 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_LPAREN, ACTIONS(44), 1, anon_sym_match, - ACTIONS(239), 1, + ACTIONS(247), 1, sym_identifier, - STATE(19), 1, + STATE(100), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, - ACTIONS(46), 2, + ACTIONS(249), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(48), 4, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4400,30 +4461,32 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2623] = 10, + [2625] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, anon_sym_LPAREN, ACTIONS(44), 1, anon_sym_match, - ACTIONS(239), 1, + ACTIONS(247), 1, sym_identifier, - STATE(18), 1, + STATE(101), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, - ACTIONS(46), 2, + ACTIONS(249), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(48), 4, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4434,30 +4497,32 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2667] = 10, + [2671] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, anon_sym_LPAREN, ACTIONS(44), 1, anon_sym_match, - ACTIONS(239), 1, + ACTIONS(247), 1, sym_identifier, - STATE(23), 1, + STATE(107), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, - ACTIONS(46), 2, + ACTIONS(249), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(48), 4, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4468,7 +4533,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2711] = 10, + [2717] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -4477,21 +4542,23 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, ACTIONS(239), 1, sym_identifier, - STATE(39), 1, + STATE(13), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(48), 4, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4502,23 +4569,20 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2755] = 10, + [2763] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, anon_sym_LPAREN, ACTIONS(44), 1, anon_sym_match, - ACTIONS(247), 1, + ACTIONS(239), 1, sym_identifier, - STATE(105), 1, + STATE(24), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, - ACTIONS(249), 2, + ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, ACTIONS(48), 4, @@ -4526,6 +4590,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4536,7 +4605,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2799] = 10, + [2809] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -4545,21 +4614,23 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, ACTIONS(239), 1, sym_identifier, - STATE(13), 1, + STATE(15), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(48), 4, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4570,44 +4641,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2843] = 13, - ACTIONS(3), 1, - sym_comment, - ACTIONS(253), 1, - anon_sym_use, - ACTIONS(255), 1, - anon_sym_pub, - ACTIONS(257), 1, - anon_sym_fn, - ACTIONS(259), 1, - anon_sym_extern, - ACTIONS(263), 1, - anon_sym_struct, - ACTIONS(265), 1, - anon_sym_enum, - ACTIONS(267), 1, - anon_sym_impl, - ACTIONS(271), 1, - ts_builtin_sym_end, - STATE(151), 1, - aux_sym_struct_decl_repeat1, - STATE(113), 2, - sym_use_decl, - aux_sym_source_file_repeat1, - ACTIONS(261), 4, - anon_sym_linear, - anon_sym_copy, - anon_sym_opaque, - anon_sym_capability, - STATE(79), 7, - sym__item, - sym_function_decl, - sym_extern_function_decl, - sym_struct_decl, - sym_enum_decl, - sym_impl_block, - aux_sym_source_file_repeat2, - [2893] = 10, + [2855] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -4616,21 +4650,23 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, ACTIONS(239), 1, sym_identifier, - STATE(36), 1, + STATE(102), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(48), 4, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4641,7 +4677,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2937] = 10, + [2901] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -4650,13 +4686,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, ACTIONS(247), 1, sym_identifier, - STATE(108), 1, + STATE(15), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(249), 2, anon_sym_BANG, anon_sym_DASH, @@ -4665,6 +4698,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4675,7 +4713,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [2981] = 10, + [2947] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -4684,13 +4722,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, ACTIONS(247), 1, sym_identifier, - STATE(107), 1, + STATE(108), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(249), 2, anon_sym_BANG, anon_sym_DASH, @@ -4699,6 +4734,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4709,7 +4749,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [3025] = 10, + [2993] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -4718,13 +4758,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, ACTIONS(247), 1, sym_identifier, - STATE(96), 1, + STATE(110), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(249), 2, anon_sym_BANG, anon_sym_DASH, @@ -4733,6 +4770,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4743,30 +4785,32 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [3069] = 10, + [3039] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, anon_sym_LPAREN, ACTIONS(44), 1, anon_sym_match, - ACTIONS(239), 1, + ACTIONS(247), 1, sym_identifier, - STATE(93), 1, + STATE(111), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, - ACTIONS(46), 2, + ACTIONS(249), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(48), 4, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4777,7 +4821,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [3113] = 10, + [3085] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -4786,13 +4830,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, ACTIONS(247), 1, sym_identifier, - STATE(15), 1, + STATE(112), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, ACTIONS(249), 2, anon_sym_BANG, anon_sym_DASH, @@ -4801,6 +4842,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4811,7 +4857,7 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [3157] = 10, + [3131] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(34), 1, @@ -4820,14 +4866,47 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_match, ACTIONS(247), 1, sym_identifier, - STATE(109), 1, + STATE(13), 1, sym_expression, - STATE(260), 1, + STATE(264), 1, sym_type_path, - ACTIONS(50), 2, + ACTIONS(249), 2, + anon_sym_BANG, + anon_sym_DASH, + ACTIONS(48), 4, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + ACTIONS(50), 4, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - ACTIONS(249), 2, + STATE(32), 9, + sym_match_expr, + sym_call_expr, + sym_try_expr, + sym_struct_literal, + sym_path_expr, + sym_unary_expr, + sym_binary_expr, + sym_grouping, + sym_literal, + [3177] = 10, + ACTIONS(3), 1, + sym_comment, + ACTIONS(34), 1, + anon_sym_LPAREN, + ACTIONS(44), 1, + anon_sym_match, + ACTIONS(239), 1, + sym_identifier, + STATE(25), 1, + sym_expression, + STATE(264), 1, + sym_type_path, + ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, ACTIONS(48), 4, @@ -4835,6 +4914,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_false, anon_sym_unit, sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, STATE(32), 9, sym_match_expr, sym_call_expr, @@ -4845,15 +4929,162 @@ static const uint16_t ts_small_parse_table[] = { sym_binary_expr, sym_grouping, sym_literal, - [3201] = 4, + [3223] = 10, ACTIONS(3), 1, sym_comment, - ACTIONS(273), 1, - anon_sym_COLON_COLON, - STATE(68), 1, - aux_sym_module_path_repeat1, - ACTIONS(95), 20, - ts_builtin_sym_end, + ACTIONS(34), 1, + anon_sym_LPAREN, + ACTIONS(44), 1, + anon_sym_match, + ACTIONS(239), 1, + sym_identifier, + STATE(19), 1, + sym_expression, + STATE(264), 1, + sym_type_path, + ACTIONS(46), 2, + anon_sym_BANG, + anon_sym_DASH, + ACTIONS(48), 4, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + STATE(32), 9, + sym_match_expr, + sym_call_expr, + sym_try_expr, + sym_struct_literal, + sym_path_expr, + sym_unary_expr, + sym_binary_expr, + sym_grouping, + sym_literal, + [3269] = 13, + ACTIONS(3), 1, + sym_comment, + ACTIONS(251), 1, + ts_builtin_sym_end, + ACTIONS(253), 1, + anon_sym_use, + ACTIONS(255), 1, + anon_sym_pub, + ACTIONS(257), 1, + anon_sym_fn, + ACTIONS(259), 1, + anon_sym_extern, + ACTIONS(263), 1, + anon_sym_struct, + ACTIONS(265), 1, + anon_sym_enum, + ACTIONS(267), 1, + anon_sym_impl, + STATE(151), 1, + aux_sym_struct_decl_repeat1, + STATE(71), 2, + sym_use_decl, + aux_sym_source_file_repeat1, + ACTIONS(261), 4, + anon_sym_linear, + anon_sym_copy, + anon_sym_opaque, + anon_sym_capability, + STATE(80), 7, + sym__item, + sym_function_decl, + sym_extern_function_decl, + sym_struct_decl, + sym_enum_decl, + sym_impl_block, + aux_sym_source_file_repeat2, + [3319] = 13, + ACTIONS(3), 1, + sym_comment, + ACTIONS(251), 1, + ts_builtin_sym_end, + ACTIONS(253), 1, + anon_sym_use, + ACTIONS(255), 1, + anon_sym_pub, + ACTIONS(257), 1, + anon_sym_fn, + ACTIONS(259), 1, + anon_sym_extern, + ACTIONS(263), 1, + anon_sym_struct, + ACTIONS(265), 1, + anon_sym_enum, + ACTIONS(267), 1, + anon_sym_impl, + STATE(151), 1, + aux_sym_struct_decl_repeat1, + STATE(114), 2, + sym_use_decl, + aux_sym_source_file_repeat1, + ACTIONS(261), 4, + anon_sym_linear, + anon_sym_copy, + anon_sym_opaque, + anon_sym_capability, + STATE(80), 7, + sym__item, + sym_function_decl, + sym_extern_function_decl, + sym_struct_decl, + sym_enum_decl, + sym_impl_block, + aux_sym_source_file_repeat2, + [3369] = 13, + ACTIONS(3), 1, + sym_comment, + ACTIONS(253), 1, + anon_sym_use, + ACTIONS(255), 1, + anon_sym_pub, + ACTIONS(257), 1, + anon_sym_fn, + ACTIONS(259), 1, + anon_sym_extern, + ACTIONS(263), 1, + anon_sym_struct, + ACTIONS(265), 1, + anon_sym_enum, + ACTIONS(267), 1, + anon_sym_impl, + ACTIONS(269), 1, + ts_builtin_sym_end, + STATE(151), 1, + aux_sym_struct_decl_repeat1, + STATE(68), 2, + sym_use_decl, + aux_sym_source_file_repeat1, + ACTIONS(261), 4, + anon_sym_linear, + anon_sym_copy, + anon_sym_opaque, + anon_sym_capability, + STATE(78), 7, + sym__item, + sym_function_decl, + sym_extern_function_decl, + sym_struct_decl, + sym_enum_decl, + sym_impl_block, + aux_sym_source_file_repeat2, + [3419] = 4, + ACTIONS(3), 1, + sym_comment, + ACTIONS(271), 1, + anon_sym_COLON_COLON, + STATE(70), 1, + aux_sym_module_path_repeat1, + ACTIONS(95), 20, + ts_builtin_sym_end, anon_sym_use, anon_sym_pub, anon_sym_fn, @@ -4873,7 +5104,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_LBRACK, anon_sym_RBRACK, anon_sym_EQ, - [3233] = 13, + [3451] = 13, ACTIONS(3), 1, sym_comment, ACTIONS(253), 1, @@ -4890,11 +5121,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_enum, ACTIONS(267), 1, anon_sym_impl, - ACTIONS(269), 1, + ACTIONS(274), 1, ts_builtin_sym_end, STATE(151), 1, aux_sym_struct_decl_repeat1, - STATE(60), 2, + STATE(114), 2, sym_use_decl, aux_sym_source_file_repeat1, ACTIONS(261), 4, @@ -4902,7 +5133,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_copy, anon_sym_opaque, anon_sym_capability, - STATE(78), 7, + STATE(82), 7, sym__item, sym_function_decl, sym_extern_function_decl, @@ -4910,80 +5141,38 @@ static const uint16_t ts_small_parse_table[] = { sym_enum_decl, sym_impl_block, aux_sym_source_file_repeat2, - [3283] = 10, - ACTIONS(3), 1, - sym_comment, - ACTIONS(34), 1, - anon_sym_LPAREN, - ACTIONS(44), 1, - anon_sym_match, - ACTIONS(247), 1, - sym_identifier, - STATE(13), 1, - sym_expression, - STATE(260), 1, - sym_type_path, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, - ACTIONS(249), 2, - anon_sym_BANG, - anon_sym_DASH, - ACTIONS(48), 4, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - STATE(32), 9, - sym_match_expr, - sym_call_expr, - sym_try_expr, - sym_struct_literal, - sym_path_expr, - sym_unary_expr, - sym_binary_expr, - sym_grouping, - sym_literal, - [3327] = 10, + [3501] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(34), 1, + ACTIONS(278), 9, + anon_sym_RBRACE, + anon_sym_COMMA, anon_sym_LPAREN, - ACTIONS(44), 1, - anon_sym_match, - ACTIONS(239), 1, - sym_identifier, - STATE(97), 1, - sym_expression, - STATE(260), 1, - sym_type_path, - ACTIONS(46), 2, anon_sym_BANG, anon_sym_DASH, - ACTIONS(50), 2, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - ACTIONS(48), 4, + ACTIONS(276), 12, + anon_sym_let, + anon_sym_return, + anon_sym_if, + anon_sym_else, + anon_sym_while, + anon_sym_match, + anon_sym__, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, - STATE(32), 9, - sym_match_expr, - sym_call_expr, - sym_try_expr, - sym_struct_literal, - sym_path_expr, - sym_unary_expr, - sym_binary_expr, - sym_grouping, - sym_literal, - [3371] = 4, + sym_identifier, + [3530] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(276), 1, + ACTIONS(280), 1, anon_sym_COLON_COLON, - STATE(73), 1, + STATE(75), 1, aux_sym_module_path_repeat1, ACTIONS(15), 19, ts_builtin_sym_end, @@ -5005,14 +5194,40 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_LBRACK, anon_sym_RBRACK, anon_sym_EQ, - [3402] = 4, + [3561] = 3, + ACTIONS(3), 1, + sym_comment, + ACTIONS(284), 9, + anon_sym_RBRACE, + anon_sym_COMMA, + anon_sym_LPAREN, + anon_sym_BANG, + anon_sym_DASH, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + ACTIONS(282), 12, + anon_sym_let, + anon_sym_return, + anon_sym_if, + anon_sym_else, + anon_sym_while, + anon_sym_match, + anon_sym__, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + sym_identifier, + [3590] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(276), 1, + ACTIONS(280), 1, anon_sym_COLON_COLON, - STATE(68), 1, + STATE(70), 1, aux_sym_module_path_repeat1, - ACTIONS(278), 19, + ACTIONS(286), 19, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -5032,7 +5247,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_LBRACK, anon_sym_RBRACK, anon_sym_EQ, - [3433] = 2, + [3621] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(95), 21, @@ -5057,14 +5272,14 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_LBRACK, anon_sym_RBRACK, anon_sym_EQ, - [3460] = 4, + [3648] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(282), 1, + ACTIONS(290), 1, anon_sym_LBRACK, - STATE(83), 1, + STATE(93), 1, sym_type_args, - ACTIONS(280), 18, + ACTIONS(288), 18, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -5083,33 +5298,11 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_RPAREN, anon_sym_RBRACK, anon_sym_EQ, - [3490] = 3, - ACTIONS(3), 1, - sym_comment, - ACTIONS(286), 7, - anon_sym_RBRACE, - anon_sym_COMMA, - anon_sym_LPAREN, - anon_sym_BANG, - anon_sym_DASH, - sym_u8_lit, - sym_string_lit, - ACTIONS(284), 12, - anon_sym_let, - anon_sym_return, - anon_sym_if, - anon_sym_else, - anon_sym_while, - anon_sym_match, - anon_sym__, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - sym_identifier, - [3517] = 11, + [3678] = 11, ACTIONS(3), 1, sym_comment, + ACTIONS(251), 1, + ts_builtin_sym_end, ACTIONS(255), 1, anon_sym_pub, ACTIONS(257), 1, @@ -5122,8 +5315,6 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_enum, ACTIONS(267), 1, anon_sym_impl, - ACTIONS(269), 1, - ts_builtin_sym_end, STATE(151), 1, aux_sym_struct_decl_repeat1, ACTIONS(261), 4, @@ -5131,7 +5322,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_copy, anon_sym_opaque, anon_sym_capability, - STATE(80), 7, + STATE(83), 7, sym__item, sym_function_decl, sym_extern_function_decl, @@ -5139,10 +5330,39 @@ static const uint16_t ts_small_parse_table[] = { sym_enum_decl, sym_impl_block, aux_sym_source_file_repeat2, - [3560] = 11, + [3721] = 8, ACTIONS(3), 1, sym_comment, - ACTIONS(255), 1, + ACTIONS(17), 1, + anon_sym_DOT, + ACTIONS(292), 1, + anon_sym_COLON_COLON, + ACTIONS(294), 1, + anon_sym_LBRACE, + STATE(3), 1, + aux_sym_path_expr_repeat1, + STATE(75), 1, + aux_sym_module_path_repeat1, + ACTIONS(9), 3, + anon_sym_LT, + anon_sym_GT, + anon_sym_SLASH, + ACTIONS(13), 11, + anon_sym_LPAREN, + anon_sym_STAR, + anon_sym_QMARK, + anon_sym_DASH, + anon_sym_PIPE_PIPE, + anon_sym_AMP_AMP, + anon_sym_EQ_EQ, + anon_sym_BANG_EQ, + anon_sym_LT_EQ, + anon_sym_GT_EQ, + anon_sym_PLUS, + [3758] = 11, + ACTIONS(3), 1, + sym_comment, + ACTIONS(255), 1, anon_sym_pub, ACTIONS(257), 1, anon_sym_fn, @@ -5154,7 +5374,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_enum, ACTIONS(267), 1, anon_sym_impl, - ACTIONS(271), 1, + ACTIONS(274), 1, ts_builtin_sym_end, STATE(151), 1, aux_sym_struct_decl_repeat1, @@ -5163,7 +5383,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_copy, anon_sym_opaque, anon_sym_capability, - STATE(80), 7, + STATE(83), 7, sym__item, sym_function_decl, sym_extern_function_decl, @@ -5171,7 +5391,32 @@ static const uint16_t ts_small_parse_table[] = { sym_enum_decl, sym_impl_block, aux_sym_source_file_repeat2, - [3603] = 11, + [3801] = 4, + ACTIONS(3), 1, + sym_comment, + ACTIONS(301), 1, + anon_sym_else, + ACTIONS(299), 8, + anon_sym_RBRACE, + anon_sym_LPAREN, + anon_sym_BANG, + anon_sym_DASH, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + ACTIONS(297), 10, + anon_sym_let, + anon_sym_return, + anon_sym_if, + anon_sym_while, + anon_sym_match, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + sym_identifier, + [3830] = 11, ACTIONS(3), 1, sym_comment, ACTIONS(255), 1, @@ -5186,7 +5431,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_enum, ACTIONS(267), 1, anon_sym_impl, - ACTIONS(288), 1, + ACTIONS(303), 1, ts_builtin_sym_end, STATE(151), 1, aux_sym_struct_decl_repeat1, @@ -5195,7 +5440,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_copy, anon_sym_opaque, anon_sym_capability, - STATE(80), 7, + STATE(83), 7, sym__item, sym_function_decl, sym_extern_function_decl, @@ -5203,31 +5448,31 @@ static const uint16_t ts_small_parse_table[] = { sym_enum_decl, sym_impl_block, aux_sym_source_file_repeat2, - [3646] = 11, + [3873] = 11, ACTIONS(3), 1, sym_comment, - ACTIONS(290), 1, + ACTIONS(305), 1, ts_builtin_sym_end, - ACTIONS(292), 1, + ACTIONS(307), 1, anon_sym_pub, - ACTIONS(295), 1, + ACTIONS(310), 1, anon_sym_fn, - ACTIONS(298), 1, + ACTIONS(313), 1, anon_sym_extern, - ACTIONS(304), 1, + ACTIONS(319), 1, anon_sym_struct, - ACTIONS(307), 1, + ACTIONS(322), 1, anon_sym_enum, - ACTIONS(310), 1, + ACTIONS(325), 1, anon_sym_impl, STATE(151), 1, aux_sym_struct_decl_repeat1, - ACTIONS(301), 4, + ACTIONS(316), 4, anon_sym_linear, anon_sym_copy, anon_sym_opaque, anon_sym_capability, - STATE(80), 7, + STATE(83), 7, sym__item, sym_function_decl, sym_extern_function_decl, @@ -5235,63 +5480,33 @@ static const uint16_t ts_small_parse_table[] = { sym_enum_decl, sym_impl_block, aux_sym_source_file_repeat2, - [3689] = 3, + [3916] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(315), 7, + ACTIONS(330), 8, anon_sym_RBRACE, - anon_sym_COMMA, anon_sym_LPAREN, anon_sym_BANG, anon_sym_DASH, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - ACTIONS(313), 12, + ACTIONS(328), 10, anon_sym_let, anon_sym_return, anon_sym_if, - anon_sym_else, anon_sym_while, anon_sym_match, - anon_sym__, anon_sym_true, anon_sym_false, anon_sym_unit, sym_int_lit, sym_identifier, - [3716] = 8, - ACTIONS(3), 1, - sym_comment, - ACTIONS(17), 1, - anon_sym_DOT, - ACTIONS(317), 1, - anon_sym_COLON_COLON, - ACTIONS(319), 1, - anon_sym_LBRACE, - STATE(3), 1, - aux_sym_path_expr_repeat1, - STATE(73), 1, - aux_sym_module_path_repeat1, - ACTIONS(9), 3, - anon_sym_LT, - anon_sym_GT, - anon_sym_SLASH, - ACTIONS(13), 11, - anon_sym_LPAREN, - anon_sym_STAR, - anon_sym_QMARK, - anon_sym_DASH, - anon_sym_PIPE_PIPE, - anon_sym_AMP_AMP, - anon_sym_EQ_EQ, - anon_sym_BANG_EQ, - anon_sym_LT_EQ, - anon_sym_GT_EQ, - anon_sym_PLUS, - [3753] = 2, + [3942] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(322), 18, + ACTIONS(332), 18, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -5310,32 +5525,33 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_RPAREN, anon_sym_RBRACK, anon_sym_EQ, - [3777] = 2, + [3966] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(324), 18, - ts_builtin_sym_end, - anon_sym_pub, - anon_sym_fn, - anon_sym_extern, - anon_sym_SEMI, - anon_sym_linear, - anon_sym_copy, - anon_sym_opaque, - anon_sym_capability, - anon_sym_struct, - anon_sym_LBRACE, + ACTIONS(336), 8, anon_sym_RBRACE, - anon_sym_enum, - anon_sym_impl, - anon_sym_COMMA, - anon_sym_RPAREN, - anon_sym_RBRACK, - anon_sym_EQ, - [3801] = 2, + anon_sym_LPAREN, + anon_sym_BANG, + anon_sym_DASH, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + ACTIONS(334), 10, + anon_sym_let, + anon_sym_return, + anon_sym_if, + anon_sym_while, + anon_sym_match, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + sym_identifier, + [3992] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(326), 18, + ACTIONS(338), 18, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -5354,29 +5570,76 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_RPAREN, anon_sym_RBRACK, anon_sym_EQ, - [3825] = 2, + [4016] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(328), 18, - ts_builtin_sym_end, - anon_sym_pub, - anon_sym_fn, - anon_sym_extern, - anon_sym_SEMI, - anon_sym_linear, - anon_sym_copy, - anon_sym_opaque, - anon_sym_capability, - anon_sym_struct, - anon_sym_LBRACE, + ACTIONS(201), 8, anon_sym_RBRACE, - anon_sym_enum, - anon_sym_impl, - anon_sym_COMMA, - anon_sym_RPAREN, - anon_sym_RBRACK, - anon_sym_EQ, - [3849] = 15, + anon_sym_LPAREN, + anon_sym_BANG, + anon_sym_DASH, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + ACTIONS(197), 10, + anon_sym_let, + anon_sym_return, + anon_sym_if, + anon_sym_while, + anon_sym_match, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + sym_identifier, + [4042] = 3, + ACTIONS(3), 1, + sym_comment, + ACTIONS(342), 8, + anon_sym_RBRACE, + anon_sym_LPAREN, + anon_sym_BANG, + anon_sym_DASH, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + ACTIONS(340), 10, + anon_sym_let, + anon_sym_return, + anon_sym_if, + anon_sym_while, + anon_sym_match, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + sym_identifier, + [4068] = 3, + ACTIONS(3), 1, + sym_comment, + ACTIONS(346), 8, + anon_sym_RBRACE, + anon_sym_LPAREN, + anon_sym_BANG, + anon_sym_DASH, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + ACTIONS(344), 10, + anon_sym_let, + anon_sym_return, + anon_sym_if, + anon_sym_while, + anon_sym_match, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + sym_identifier, + [4094] = 15, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, @@ -5391,13 +5654,13 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_AMP_AMP, ACTIONS(189), 1, anon_sym_PIPE_PIPE, - ACTIONS(330), 1, + ACTIONS(348), 1, anon_sym_COMMA, - ACTIONS(332), 1, + ACTIONS(350), 1, anon_sym_RPAREN, STATE(21), 1, sym_arg_list, - STATE(186), 1, + STATE(183), 1, aux_sym_arg_list_repeat1, ACTIONS(123), 2, anon_sym_DASH, @@ -5411,52 +5674,63 @@ static const uint16_t ts_small_parse_table[] = { ACTIONS(131), 2, anon_sym_EQ_EQ, anon_sym_BANG_EQ, - [3899] = 14, + [4144] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(101), 1, - anon_sym_LPAREN, - ACTIONS(103), 1, - anon_sym_QMARK, - ACTIONS(334), 1, + ACTIONS(352), 18, + ts_builtin_sym_end, + anon_sym_pub, + anon_sym_fn, + anon_sym_extern, + anon_sym_SEMI, + anon_sym_linear, + anon_sym_copy, + anon_sym_opaque, + anon_sym_capability, + anon_sym_struct, anon_sym_LBRACE, - ACTIONS(336), 1, - anon_sym_STAR, - ACTIONS(340), 1, - anon_sym_PIPE_PIPE, - ACTIONS(342), 1, - anon_sym_AMP_AMP, - ACTIONS(350), 1, - anon_sym_SLASH, - STATE(21), 1, - sym_arg_list, - STATE(89), 1, - sym_block, - ACTIONS(338), 2, - anon_sym_DASH, - anon_sym_PLUS, - ACTIONS(344), 2, - anon_sym_EQ_EQ, - anon_sym_BANG_EQ, - ACTIONS(346), 2, - anon_sym_LT, - anon_sym_GT, - ACTIONS(348), 2, - anon_sym_LT_EQ, - anon_sym_GT_EQ, - [3946] = 4, + anon_sym_RBRACE, + anon_sym_enum, + anon_sym_impl, + anon_sym_COMMA, + anon_sym_RPAREN, + anon_sym_RBRACK, + anon_sym_EQ, + [4168] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(356), 1, - anon_sym_else, - ACTIONS(354), 6, + ACTIONS(354), 18, + ts_builtin_sym_end, + anon_sym_pub, + anon_sym_fn, + anon_sym_extern, + anon_sym_SEMI, + anon_sym_linear, + anon_sym_copy, + anon_sym_opaque, + anon_sym_capability, + anon_sym_struct, + anon_sym_LBRACE, + anon_sym_RBRACE, + anon_sym_enum, + anon_sym_impl, + anon_sym_COMMA, + anon_sym_RPAREN, + anon_sym_RBRACK, + anon_sym_EQ, + [4192] = 3, + ACTIONS(3), 1, + sym_comment, + ACTIONS(358), 8, anon_sym_RBRACE, anon_sym_LPAREN, anon_sym_BANG, anon_sym_DASH, sym_u8_lit, + sym_i64_lit, + sym_u64_lit, sym_string_lit, - ACTIONS(352), 10, + ACTIONS(356), 10, anon_sym_let, anon_sym_return, anon_sym_if, @@ -5467,63 +5741,202 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_unit, sym_int_lit, sym_identifier, - [3973] = 14, + [4218] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(101), 1, + ACTIONS(362), 8, + anon_sym_RBRACE, anon_sym_LPAREN, - ACTIONS(103), 1, - anon_sym_QMARK, - ACTIONS(334), 1, - anon_sym_LBRACE, - ACTIONS(336), 1, - anon_sym_STAR, - ACTIONS(340), 1, - anon_sym_PIPE_PIPE, - ACTIONS(342), 1, + anon_sym_BANG, + anon_sym_DASH, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + ACTIONS(360), 10, + anon_sym_let, + anon_sym_return, + anon_sym_if, + anon_sym_while, + anon_sym_match, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + sym_identifier, + [4244] = 3, + ACTIONS(3), 1, + sym_comment, + ACTIONS(366), 8, + anon_sym_RBRACE, + anon_sym_LPAREN, + anon_sym_BANG, + anon_sym_DASH, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + ACTIONS(364), 10, + anon_sym_let, + anon_sym_return, + anon_sym_if, + anon_sym_while, + anon_sym_match, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + sym_identifier, + [4270] = 3, + ACTIONS(3), 1, + sym_comment, + ACTIONS(370), 8, + anon_sym_RBRACE, + anon_sym_LPAREN, + anon_sym_BANG, + anon_sym_DASH, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + ACTIONS(368), 10, + anon_sym_let, + anon_sym_return, + anon_sym_if, + anon_sym_while, + anon_sym_match, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + sym_identifier, + [4296] = 11, + ACTIONS(3), 1, + sym_comment, + ACTIONS(372), 1, + sym_identifier, + ACTIONS(374), 1, + anon_sym_RBRACE, + ACTIONS(376), 1, + anon_sym__, + STATE(99), 1, + aux_sym_match_expr_repeat1, + STATE(123), 1, + sym_match_arm, + STATE(217), 1, + sym_path_expr, + STATE(249), 1, + sym_pattern, + STATE(259), 2, + sym_pattern_call, + sym_literal, + ACTIONS(48), 4, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + [4337] = 11, + ACTIONS(3), 1, + sym_comment, + ACTIONS(372), 1, + sym_identifier, + ACTIONS(376), 1, + anon_sym__, + ACTIONS(378), 1, + anon_sym_RBRACE, + STATE(103), 1, + aux_sym_match_expr_repeat1, + STATE(123), 1, + sym_match_arm, + STATE(217), 1, + sym_path_expr, + STATE(249), 1, + sym_pattern, + STATE(259), 2, + sym_pattern_call, + sym_literal, + ACTIONS(48), 4, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + ACTIONS(50), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + [4378] = 14, + ACTIONS(3), 1, + sym_comment, + ACTIONS(101), 1, + anon_sym_LPAREN, + ACTIONS(103), 1, + anon_sym_QMARK, + ACTIONS(380), 1, + anon_sym_LBRACE, + ACTIONS(382), 1, + anon_sym_STAR, + ACTIONS(386), 1, + anon_sym_PIPE_PIPE, + ACTIONS(388), 1, anon_sym_AMP_AMP, - ACTIONS(350), 1, + ACTIONS(396), 1, anon_sym_SLASH, STATE(21), 1, sym_arg_list, - STATE(98), 1, + STATE(81), 1, sym_block, - ACTIONS(338), 2, + ACTIONS(384), 2, anon_sym_DASH, anon_sym_PLUS, - ACTIONS(344), 2, + ACTIONS(390), 2, anon_sym_EQ_EQ, anon_sym_BANG_EQ, - ACTIONS(346), 2, + ACTIONS(392), 2, anon_sym_LT, anon_sym_GT, - ACTIONS(348), 2, + ACTIONS(394), 2, anon_sym_LT_EQ, anon_sym_GT_EQ, - [4020] = 4, + [4425] = 14, ACTIONS(3), 1, sym_comment, - ACTIONS(92), 2, - anon_sym_COLON_COLON, - anon_sym_LBRACE, - ACTIONS(23), 3, - anon_sym_LT, - anon_sym_GT, - anon_sym_SLASH, - ACTIONS(28), 12, + ACTIONS(101), 1, anon_sym_LPAREN, - anon_sym_STAR, + ACTIONS(103), 1, anon_sym_QMARK, - anon_sym_DOT, - anon_sym_DASH, + ACTIONS(380), 1, + anon_sym_LBRACE, + ACTIONS(382), 1, + anon_sym_STAR, + ACTIONS(386), 1, anon_sym_PIPE_PIPE, + ACTIONS(388), 1, anon_sym_AMP_AMP, + ACTIONS(396), 1, + anon_sym_SLASH, + STATE(21), 1, + sym_arg_list, + STATE(90), 1, + sym_block, + ACTIONS(384), 2, + anon_sym_DASH, + anon_sym_PLUS, + ACTIONS(390), 2, anon_sym_EQ_EQ, anon_sym_BANG_EQ, + ACTIONS(392), 2, + anon_sym_LT, + anon_sym_GT, + ACTIONS(394), 2, anon_sym_LT_EQ, anon_sym_GT_EQ, - anon_sym_PLUS, - [4047] = 13, + [4472] = 13, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, @@ -5552,10 +5965,63 @@ static const uint16_t ts_small_parse_table[] = { ACTIONS(131), 2, anon_sym_EQ_EQ, anon_sym_BANG_EQ, - ACTIONS(358), 2, - anon_sym_RBRACE, + ACTIONS(398), 2, anon_sym_COMMA, - [4092] = 13, + anon_sym_RPAREN, + [4517] = 11, + ACTIONS(3), 1, + sym_comment, + ACTIONS(400), 1, + sym_identifier, + ACTIONS(403), 1, + anon_sym_RBRACE, + ACTIONS(405), 1, + anon_sym__, + STATE(103), 1, + aux_sym_match_expr_repeat1, + STATE(123), 1, + sym_match_arm, + STATE(217), 1, + sym_path_expr, + STATE(249), 1, + sym_pattern, + STATE(259), 2, + sym_pattern_call, + sym_literal, + ACTIONS(408), 4, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + ACTIONS(411), 4, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + [4558] = 4, + ACTIONS(3), 1, + sym_comment, + ACTIONS(92), 2, + anon_sym_COLON_COLON, + anon_sym_LBRACE, + ACTIONS(23), 3, + anon_sym_LT, + anon_sym_GT, + anon_sym_SLASH, + ACTIONS(28), 12, + anon_sym_LPAREN, + anon_sym_STAR, + anon_sym_QMARK, + anon_sym_DOT, + anon_sym_DASH, + anon_sym_PIPE_PIPE, + anon_sym_AMP_AMP, + anon_sym_EQ_EQ, + anon_sym_BANG_EQ, + anon_sym_LT_EQ, + anon_sym_GT_EQ, + anon_sym_PLUS, + [4585] = 13, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, @@ -5584,494 +6050,221 @@ static const uint16_t ts_small_parse_table[] = { ACTIONS(131), 2, anon_sym_EQ_EQ, anon_sym_BANG_EQ, - ACTIONS(360), 2, - anon_sym_COMMA, - anon_sym_RPAREN, - [4137] = 3, - ACTIONS(3), 1, - sym_comment, - ACTIONS(364), 6, + ACTIONS(414), 2, anon_sym_RBRACE, - anon_sym_LPAREN, - anon_sym_BANG, - anon_sym_DASH, - sym_u8_lit, - sym_string_lit, - ACTIONS(362), 10, - anon_sym_let, - anon_sym_return, - anon_sym_if, - anon_sym_while, - anon_sym_match, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - sym_identifier, - [4161] = 10, + anon_sym_COMMA, + [4630] = 9, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, anon_sym_LPAREN, ACTIONS(103), 1, anon_sym_QMARK, - ACTIONS(336), 1, + ACTIONS(382), 1, anon_sym_STAR, - ACTIONS(350), 1, + ACTIONS(396), 1, anon_sym_SLASH, STATE(21), 1, sym_arg_list, - ACTIONS(338), 2, - anon_sym_DASH, - anon_sym_PLUS, - ACTIONS(346), 2, + ACTIONS(109), 2, anon_sym_LT, anon_sym_GT, - ACTIONS(348), 2, - anon_sym_LT_EQ, - anon_sym_GT_EQ, - ACTIONS(111), 5, + ACTIONS(384), 2, + anon_sym_DASH, + anon_sym_PLUS, + ACTIONS(111), 7, anon_sym_LBRACE, anon_sym_PIPE_PIPE, anon_sym_AMP_AMP, anon_sym_EQ_EQ, anon_sym_BANG_EQ, - [4199] = 9, + anon_sym_LT_EQ, + anon_sym_GT_EQ, + [4666] = 13, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, anon_sym_LPAREN, ACTIONS(103), 1, anon_sym_QMARK, - ACTIONS(336), 1, + ACTIONS(382), 1, anon_sym_STAR, - ACTIONS(350), 1, + ACTIONS(386), 1, + anon_sym_PIPE_PIPE, + ACTIONS(388), 1, + anon_sym_AMP_AMP, + ACTIONS(396), 1, anon_sym_SLASH, + ACTIONS(416), 1, + anon_sym_LBRACE, STATE(21), 1, sym_arg_list, - ACTIONS(109), 2, - anon_sym_LT, - anon_sym_GT, - ACTIONS(338), 2, + ACTIONS(384), 2, anon_sym_DASH, anon_sym_PLUS, - ACTIONS(111), 7, - anon_sym_LBRACE, - anon_sym_PIPE_PIPE, - anon_sym_AMP_AMP, + ACTIONS(390), 2, anon_sym_EQ_EQ, anon_sym_BANG_EQ, + ACTIONS(392), 2, + anon_sym_LT, + anon_sym_GT, + ACTIONS(394), 2, anon_sym_LT_EQ, anon_sym_GT_EQ, - [4235] = 13, + [4710] = 8, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, anon_sym_LPAREN, ACTIONS(103), 1, anon_sym_QMARK, - ACTIONS(121), 1, + ACTIONS(382), 1, anon_sym_STAR, - ACTIONS(129), 1, + ACTIONS(396), 1, anon_sym_SLASH, - ACTIONS(145), 1, - anon_sym_AMP_AMP, - ACTIONS(189), 1, - anon_sym_PIPE_PIPE, - ACTIONS(366), 1, - anon_sym_RPAREN, STATE(21), 1, sym_arg_list, - ACTIONS(123), 2, - anon_sym_DASH, - anon_sym_PLUS, - ACTIONS(125), 2, + ACTIONS(109), 2, anon_sym_LT, anon_sym_GT, - ACTIONS(127), 2, - anon_sym_LT_EQ, - anon_sym_GT_EQ, - ACTIONS(131), 2, + ACTIONS(111), 9, + anon_sym_LBRACE, + anon_sym_DASH, + anon_sym_PIPE_PIPE, + anon_sym_AMP_AMP, anon_sym_EQ_EQ, anon_sym_BANG_EQ, - [4279] = 3, - ACTIONS(3), 1, - sym_comment, - ACTIONS(370), 6, - anon_sym_RBRACE, - anon_sym_LPAREN, - anon_sym_BANG, - anon_sym_DASH, - sym_u8_lit, - sym_string_lit, - ACTIONS(368), 10, - anon_sym_let, - anon_sym_return, - anon_sym_if, - anon_sym_while, - anon_sym_match, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - sym_identifier, - [4303] = 3, - ACTIONS(3), 1, - sym_comment, - ACTIONS(374), 6, - anon_sym_RBRACE, - anon_sym_LPAREN, - anon_sym_BANG, - anon_sym_DASH, - sym_u8_lit, - sym_string_lit, - ACTIONS(372), 10, - anon_sym_let, - anon_sym_return, - anon_sym_if, - anon_sym_while, - anon_sym_match, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - sym_identifier, - [4327] = 3, - ACTIONS(3), 1, - sym_comment, - ACTIONS(201), 6, - anon_sym_RBRACE, - anon_sym_LPAREN, - anon_sym_BANG, - anon_sym_DASH, - sym_u8_lit, - sym_string_lit, - ACTIONS(197), 10, - anon_sym_let, - anon_sym_return, - anon_sym_if, - anon_sym_while, - anon_sym_match, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - sym_identifier, - [4351] = 3, - ACTIONS(3), 1, - sym_comment, - ACTIONS(378), 6, - anon_sym_RBRACE, - anon_sym_LPAREN, - anon_sym_BANG, - anon_sym_DASH, - sym_u8_lit, - sym_string_lit, - ACTIONS(376), 10, - anon_sym_let, - anon_sym_return, - anon_sym_if, - anon_sym_while, - anon_sym_match, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - sym_identifier, - [4375] = 3, - ACTIONS(3), 1, - sym_comment, - ACTIONS(382), 6, - anon_sym_RBRACE, - anon_sym_LPAREN, - anon_sym_BANG, - anon_sym_DASH, - sym_u8_lit, - sym_string_lit, - ACTIONS(380), 10, - anon_sym_let, - anon_sym_return, - anon_sym_if, - anon_sym_while, - anon_sym_match, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - sym_identifier, - [4399] = 3, - ACTIONS(3), 1, - sym_comment, - ACTIONS(386), 6, - anon_sym_RBRACE, - anon_sym_LPAREN, - anon_sym_BANG, - anon_sym_DASH, - sym_u8_lit, - sym_string_lit, - ACTIONS(384), 10, - anon_sym_let, - anon_sym_return, - anon_sym_if, - anon_sym_while, - anon_sym_match, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - sym_identifier, - [4423] = 3, - ACTIONS(3), 1, - sym_comment, - ACTIONS(390), 6, - anon_sym_RBRACE, - anon_sym_LPAREN, - anon_sym_BANG, - anon_sym_DASH, - sym_u8_lit, - sym_string_lit, - ACTIONS(388), 10, - anon_sym_let, - anon_sym_return, - anon_sym_if, - anon_sym_while, - anon_sym_match, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - sym_identifier, - [4447] = 13, + anon_sym_LT_EQ, + anon_sym_GT_EQ, + anon_sym_PLUS, + [4744] = 13, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, anon_sym_LPAREN, ACTIONS(103), 1, anon_sym_QMARK, - ACTIONS(336), 1, + ACTIONS(121), 1, anon_sym_STAR, - ACTIONS(340), 1, - anon_sym_PIPE_PIPE, - ACTIONS(342), 1, - anon_sym_AMP_AMP, - ACTIONS(350), 1, + ACTIONS(129), 1, anon_sym_SLASH, - ACTIONS(392), 1, - anon_sym_LBRACE, + ACTIONS(145), 1, + anon_sym_AMP_AMP, + ACTIONS(189), 1, + anon_sym_PIPE_PIPE, + ACTIONS(418), 1, + anon_sym_RPAREN, STATE(21), 1, sym_arg_list, - ACTIONS(338), 2, + ACTIONS(123), 2, anon_sym_DASH, anon_sym_PLUS, - ACTIONS(344), 2, - anon_sym_EQ_EQ, - anon_sym_BANG_EQ, - ACTIONS(346), 2, + ACTIONS(125), 2, anon_sym_LT, anon_sym_GT, - ACTIONS(348), 2, + ACTIONS(127), 2, anon_sym_LT_EQ, anon_sym_GT_EQ, - [4491] = 3, - ACTIONS(3), 1, - sym_comment, - ACTIONS(396), 6, - anon_sym_RBRACE, - anon_sym_LPAREN, - anon_sym_BANG, - anon_sym_DASH, - sym_u8_lit, - sym_string_lit, - ACTIONS(394), 10, - anon_sym_let, - anon_sym_return, - anon_sym_if, - anon_sym_while, - anon_sym_match, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - sym_identifier, - [4515] = 8, + ACTIONS(131), 2, + anon_sym_EQ_EQ, + anon_sym_BANG_EQ, + [4788] = 12, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, anon_sym_LPAREN, ACTIONS(103), 1, anon_sym_QMARK, - ACTIONS(336), 1, + ACTIONS(382), 1, anon_sym_STAR, - ACTIONS(350), 1, + ACTIONS(388), 1, + anon_sym_AMP_AMP, + ACTIONS(396), 1, anon_sym_SLASH, STATE(21), 1, sym_arg_list, - ACTIONS(109), 2, - anon_sym_LT, - anon_sym_GT, - ACTIONS(111), 9, + ACTIONS(111), 2, anon_sym_LBRACE, - anon_sym_DASH, anon_sym_PIPE_PIPE, - anon_sym_AMP_AMP, + ACTIONS(384), 2, + anon_sym_DASH, + anon_sym_PLUS, + ACTIONS(390), 2, anon_sym_EQ_EQ, anon_sym_BANG_EQ, + ACTIONS(392), 2, + anon_sym_LT, + anon_sym_GT, + ACTIONS(394), 2, anon_sym_LT_EQ, anon_sym_GT_EQ, - anon_sym_PLUS, - [4549] = 11, + [4830] = 11, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, anon_sym_LPAREN, ACTIONS(103), 1, anon_sym_QMARK, - ACTIONS(336), 1, + ACTIONS(382), 1, anon_sym_STAR, - ACTIONS(350), 1, + ACTIONS(396), 1, anon_sym_SLASH, STATE(21), 1, sym_arg_list, - ACTIONS(338), 2, + ACTIONS(384), 2, anon_sym_DASH, anon_sym_PLUS, - ACTIONS(344), 2, + ACTIONS(390), 2, anon_sym_EQ_EQ, anon_sym_BANG_EQ, - ACTIONS(346), 2, + ACTIONS(392), 2, anon_sym_LT, anon_sym_GT, - ACTIONS(348), 2, + ACTIONS(394), 2, anon_sym_LT_EQ, anon_sym_GT_EQ, ACTIONS(111), 3, anon_sym_LBRACE, anon_sym_PIPE_PIPE, anon_sym_AMP_AMP, - [4589] = 12, + [4870] = 10, ACTIONS(3), 1, sym_comment, ACTIONS(101), 1, anon_sym_LPAREN, ACTIONS(103), 1, anon_sym_QMARK, - ACTIONS(336), 1, + ACTIONS(382), 1, anon_sym_STAR, - ACTIONS(342), 1, - anon_sym_AMP_AMP, - ACTIONS(350), 1, + ACTIONS(396), 1, anon_sym_SLASH, STATE(21), 1, sym_arg_list, - ACTIONS(111), 2, - anon_sym_LBRACE, - anon_sym_PIPE_PIPE, - ACTIONS(338), 2, + ACTIONS(384), 2, anon_sym_DASH, anon_sym_PLUS, - ACTIONS(344), 2, - anon_sym_EQ_EQ, - anon_sym_BANG_EQ, - ACTIONS(346), 2, + ACTIONS(392), 2, anon_sym_LT, anon_sym_GT, - ACTIONS(348), 2, + ACTIONS(394), 2, anon_sym_LT_EQ, anon_sym_GT_EQ, - [4631] = 11, - ACTIONS(3), 1, - sym_comment, - ACTIONS(398), 1, - sym_identifier, - ACTIONS(400), 1, - anon_sym_RBRACE, - ACTIONS(402), 1, - anon_sym__, - STATE(112), 1, - aux_sym_match_expr_repeat1, - STATE(146), 1, - sym_match_arm, - STATE(218), 1, - sym_path_expr, - STATE(262), 1, - sym_pattern, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, - STATE(277), 2, - sym_pattern_call, - sym_literal, - ACTIONS(48), 4, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - [4670] = 11, - ACTIONS(3), 1, - sym_comment, - ACTIONS(398), 1, - sym_identifier, - ACTIONS(402), 1, - anon_sym__, - ACTIONS(404), 1, - anon_sym_RBRACE, - STATE(110), 1, - aux_sym_match_expr_repeat1, - STATE(146), 1, - sym_match_arm, - STATE(218), 1, - sym_path_expr, - STATE(262), 1, - sym_pattern, - ACTIONS(50), 2, - sym_u8_lit, - sym_string_lit, - STATE(277), 2, - sym_pattern_call, - sym_literal, - ACTIONS(48), 4, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - [4709] = 11, - ACTIONS(3), 1, - sym_comment, - ACTIONS(406), 1, - sym_identifier, - ACTIONS(409), 1, - anon_sym_RBRACE, - ACTIONS(411), 1, - anon_sym__, - STATE(112), 1, - aux_sym_match_expr_repeat1, - STATE(146), 1, - sym_match_arm, - STATE(218), 1, - sym_path_expr, - STATE(262), 1, - sym_pattern, - ACTIONS(417), 2, - sym_u8_lit, - sym_string_lit, - STATE(277), 2, - sym_pattern_call, - sym_literal, - ACTIONS(414), 4, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - [4748] = 4, + ACTIONS(111), 5, + anon_sym_LBRACE, + anon_sym_PIPE_PIPE, + anon_sym_AMP_AMP, + anon_sym_EQ_EQ, + anon_sym_BANG_EQ, + [4908] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(422), 1, - anon_sym_use, - STATE(113), 2, - sym_use_decl, - aux_sym_source_file_repeat1, - ACTIONS(420), 11, + ACTIONS(280), 1, + anon_sym_COLON_COLON, + STATE(70), 1, + aux_sym_module_path_repeat1, + ACTIONS(420), 12, ts_builtin_sym_end, + anon_sym_use, anon_sym_pub, anon_sym_fn, anon_sym_extern, @@ -6082,16 +6275,16 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [4772] = 4, + [4932] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(276), 1, - anon_sym_COLON_COLON, - STATE(118), 1, - aux_sym_module_path_repeat1, - ACTIONS(425), 12, - ts_builtin_sym_end, + ACTIONS(424), 1, anon_sym_use, + STATE(114), 2, + sym_use_decl, + aux_sym_source_file_repeat1, + ACTIONS(422), 11, + ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, anon_sym_extern, @@ -6102,7 +6295,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [4796] = 2, + [4956] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(427), 14, @@ -6120,7 +6313,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_LBRACE, anon_sym_enum, anon_sym_impl, - [4816] = 2, + [4976] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(429), 14, @@ -6138,7 +6331,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_LBRACE, anon_sym_enum, anon_sym_impl, - [4836] = 2, + [4996] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(431), 14, @@ -6156,12 +6349,12 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_LBRACE, anon_sym_enum, anon_sym_impl, - [4856] = 4, + [5016] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(276), 1, + ACTIONS(280), 1, anon_sym_COLON_COLON, - STATE(68), 1, + STATE(113), 1, aux_sym_module_path_repeat1, ACTIONS(433), 12, ts_builtin_sym_end, @@ -6176,7 +6369,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [4880] = 2, + [5040] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(435), 14, @@ -6194,7 +6387,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_LBRACE, anon_sym_enum, anon_sym_impl, - [4900] = 4, + [5060] = 4, ACTIONS(3), 1, sym_comment, ACTIONS(439), 1, @@ -6213,7 +6406,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [4923] = 4, + [5083] = 4, ACTIONS(3), 1, sym_comment, ACTIONS(445), 1, @@ -6232,7 +6425,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [4946] = 2, + [5106] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(449), 12, @@ -6248,10 +6441,28 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [4964] = 2, + [5124] = 4, + ACTIONS(3), 1, + sym_comment, + ACTIONS(455), 1, + anon_sym_COMMA, + ACTIONS(453), 5, + anon_sym_RBRACE, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + ACTIONS(451), 6, + anon_sym__, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + sym_identifier, + [5146] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(286), 12, + ACTIONS(284), 12, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6264,10 +6475,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_RBRACE, anon_sym_enum, anon_sym_impl, - [4982] = 2, + [5164] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(315), 12, + ACTIONS(278), 12, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6280,12 +6491,12 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_RBRACE, anon_sym_enum, anon_sym_impl, - [5000] = 3, + [5182] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(453), 1, + ACTIONS(459), 1, anon_sym_SEMI, - ACTIONS(451), 11, + ACTIONS(457), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6297,13 +6508,12 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5020] = 3, + [5202] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(457), 1, - anon_sym_SEMI, - ACTIONS(455), 11, + ACTIONS(461), 12, ts_builtin_sym_end, + anon_sym_use, anon_sym_pub, anon_sym_fn, anon_sym_extern, @@ -6314,12 +6524,30 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5040] = 2, + [5220] = 3, + ACTIONS(3), 1, + sym_comment, + ACTIONS(463), 6, + anon_sym__, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + sym_identifier, + ACTIONS(465), 6, + anon_sym_RBRACE, + anon_sym_COMMA, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + [5240] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(459), 12, + ACTIONS(469), 1, + anon_sym_SEMI, + ACTIONS(467), 11, ts_builtin_sym_end, - anon_sym_use, anon_sym_pub, anon_sym_fn, anon_sym_extern, @@ -6330,10 +6558,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5058] = 2, + [5260] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(461), 11, + ACTIONS(471), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6345,10 +6573,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5075] = 2, + [5277] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(463), 11, + ACTIONS(473), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6360,10 +6588,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5092] = 2, + [5294] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(465), 11, + ACTIONS(475), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6375,10 +6603,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5109] = 2, + [5311] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(467), 11, + ACTIONS(477), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6390,10 +6618,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5126] = 2, + [5328] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(469), 11, + ACTIONS(479), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6405,10 +6633,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5143] = 2, + [5345] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(471), 11, + ACTIONS(481), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6420,10 +6648,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5160] = 2, + [5362] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(473), 11, + ACTIONS(483), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6435,10 +6663,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5177] = 2, + [5379] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(475), 11, + ACTIONS(485), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6450,10 +6678,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5194] = 2, + [5396] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(477), 11, + ACTIONS(487), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6465,10 +6693,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5211] = 2, + [5413] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(479), 11, + ACTIONS(489), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6480,10 +6708,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5228] = 2, + [5430] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(481), 11, + ACTIONS(491), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6495,10 +6723,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5245] = 2, + [5447] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(483), 11, + ACTIONS(493), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6510,10 +6738,26 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5262] = 2, + [5464] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(485), 11, + ACTIONS(403), 5, + anon_sym_RBRACE, + sym_u8_lit, + sym_i64_lit, + sym_u64_lit, + sym_string_lit, + ACTIONS(495), 6, + anon_sym__, + anon_sym_true, + anon_sym_false, + anon_sym_unit, + sym_int_lit, + sym_identifier, + [5483] = 2, + ACTIONS(3), 1, + sym_comment, + ACTIONS(497), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6525,10 +6769,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5279] = 2, + [5500] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(487), 11, + ACTIONS(499), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6540,10 +6784,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5296] = 2, + [5517] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(489), 11, + ACTIONS(501), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6555,10 +6799,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5313] = 2, + [5534] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(491), 11, + ACTIONS(503), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6570,10 +6814,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5330] = 2, + [5551] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(493), 11, + ACTIONS(505), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6585,10 +6829,10 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5347] = 2, + [5568] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(495), 11, + ACTIONS(507), 11, ts_builtin_sym_end, anon_sym_pub, anon_sym_fn, @@ -6600,68 +6844,23 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_struct, anon_sym_enum, anon_sym_impl, - [5364] = 4, - ACTIONS(3), 1, - sym_comment, - ACTIONS(501), 1, - anon_sym_COMMA, - ACTIONS(499), 3, - anon_sym_RBRACE, - sym_u8_lit, - sym_string_lit, - ACTIONS(497), 6, - anon_sym__, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - sym_identifier, - [5384] = 3, - ACTIONS(3), 1, - sym_comment, - ACTIONS(505), 4, - anon_sym_RBRACE, - anon_sym_COMMA, - sym_u8_lit, - sym_string_lit, - ACTIONS(503), 6, - anon_sym__, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - sym_identifier, - [5402] = 5, + [5585] = 5, ACTIONS(3), 1, sym_comment, - ACTIONS(509), 1, - anon_sym_fn, ACTIONS(511), 1, - anon_sym_extern, + anon_sym_fn, ACTIONS(513), 1, + anon_sym_extern, + ACTIONS(515), 1, anon_sym_enum, - ACTIONS(507), 6, + ACTIONS(509), 6, anon_sym_pub, anon_sym_linear, anon_sym_copy, anon_sym_opaque, anon_sym_capability, anon_sym_struct, - [5423] = 3, - ACTIONS(3), 1, - sym_comment, - ACTIONS(409), 3, - anon_sym_RBRACE, - sym_u8_lit, - sym_string_lit, - ACTIONS(515), 6, - anon_sym__, - anon_sym_true, - anon_sym_false, - anon_sym_unit, - sym_int_lit, - sym_identifier, - [5440] = 4, + [5606] = 4, ACTIONS(3), 1, sym_comment, ACTIONS(520), 1, @@ -6674,7 +6873,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_copy, anon_sym_opaque, anon_sym_capability, - [5457] = 4, + [5623] = 4, ACTIONS(3), 1, sym_comment, ACTIONS(524), 1, @@ -6687,35 +6886,47 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_copy, anon_sym_opaque, anon_sym_capability, - [5474] = 6, + [5640] = 6, ACTIONS(3), 1, sym_comment, ACTIONS(526), 1, sym_identifier, ACTIONS(530), 1, anon_sym_RBRACK, - STATE(75), 1, + STATE(77), 1, sym_type_path, - STATE(220), 1, + STATE(243), 1, sym_type, ACTIONS(528), 2, anon_sym_STAR, anon_sym_AMP, - [5494] = 6, + [5660] = 6, ACTIONS(3), 1, sym_comment, ACTIONS(526), 1, sym_identifier, ACTIONS(532), 1, anon_sym_RBRACK, - STATE(75), 1, + STATE(77), 1, + sym_type_path, + STATE(243), 1, + sym_type, + ACTIONS(528), 2, + anon_sym_STAR, + anon_sym_AMP, + [5680] = 5, + ACTIONS(3), 1, + sym_comment, + ACTIONS(526), 1, + sym_identifier, + STATE(77), 1, sym_type_path, - STATE(220), 1, + STATE(252), 1, sym_type, ACTIONS(528), 2, anon_sym_STAR, anon_sym_AMP, - [5514] = 5, + [5697] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(534), 1, @@ -6724,94 +6935,93 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_fn, ACTIONS(538), 1, anon_sym_RBRACE, - STATE(162), 2, + STATE(163), 2, sym_method_decl, aux_sym_impl_block_repeat1, - [5531] = 5, - ACTIONS(3), 1, - sym_comment, - ACTIONS(526), 1, - sym_identifier, - STATE(75), 1, - sym_type_path, - STATE(208), 1, - sym_type, - ACTIONS(528), 2, - anon_sym_STAR, - anon_sym_AMP, - [5548] = 5, + [5714] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(526), 1, sym_identifier, - STATE(75), 1, + STATE(77), 1, sym_type_path, - STATE(215), 1, + STATE(126), 1, sym_type, ACTIONS(528), 2, anon_sym_STAR, anon_sym_AMP, - [5565] = 5, + [5731] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(526), 1, sym_identifier, - STATE(75), 1, + STATE(77), 1, sym_type_path, STATE(278), 1, sym_type, ACTIONS(528), 2, anon_sym_STAR, anon_sym_AMP, - [5582] = 5, + [5748] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(526), 1, sym_identifier, - STATE(75), 1, + STATE(77), 1, sym_type_path, - STATE(223), 1, + STATE(221), 1, sym_type, ACTIONS(528), 2, anon_sym_STAR, anon_sym_AMP, - [5599] = 5, + [5765] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(526), 1, sym_identifier, - STATE(75), 1, + STATE(77), 1, sym_type_path, - STATE(220), 1, + STATE(241), 1, sym_type, ACTIONS(528), 2, anon_sym_STAR, anon_sym_AMP, - [5616] = 5, + [5782] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(526), 1, sym_identifier, - STATE(75), 1, + STATE(77), 1, sym_type_path, - STATE(276), 1, + STATE(243), 1, sym_type, ACTIONS(528), 2, anon_sym_STAR, anon_sym_AMP, - [5633] = 5, + [5799] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(526), 1, sym_identifier, - STATE(75), 1, + STATE(77), 1, sym_type_path, - STATE(221), 1, + STATE(225), 1, sym_type, ACTIONS(528), 2, anon_sym_STAR, anon_sym_AMP, - [5650] = 5, + [5816] = 4, + ACTIONS(3), 1, + sym_comment, + STATE(3), 1, + aux_sym_path_expr_repeat1, + ACTIONS(13), 2, + anon_sym_LPAREN, + anon_sym_EQ_GT, + ACTIONS(17), 2, + anon_sym_COLON_COLON, + anon_sym_DOT, + [5831] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(540), 1, @@ -6820,1230 +7030,1219 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_fn, ACTIONS(546), 1, anon_sym_RBRACE, - STATE(162), 2, + STATE(163), 2, sym_method_decl, aux_sym_impl_block_repeat1, - [5667] = 5, + [5848] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(526), 1, sym_identifier, - STATE(75), 1, + STATE(77), 1, sym_type_path, - STATE(222), 1, + STATE(267), 1, sym_type, ACTIONS(528), 2, anon_sym_STAR, anon_sym_AMP, - [5684] = 5, + [5865] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(526), 1, sym_identifier, - STATE(75), 1, + STATE(77), 1, sym_type_path, - STATE(126), 1, + STATE(228), 1, sym_type, ACTIONS(528), 2, anon_sym_STAR, anon_sym_AMP, - [5701] = 5, + [5882] = 5, + ACTIONS(3), 1, + sym_comment, + ACTIONS(534), 1, + anon_sym_pub, + ACTIONS(536), 1, + anon_sym_fn, + ACTIONS(548), 1, + anon_sym_RBRACE, + STATE(155), 2, + sym_method_decl, + aux_sym_impl_block_repeat1, + [5899] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(526), 1, sym_identifier, - STATE(75), 1, + STATE(77), 1, sym_type_path, - STATE(283), 1, + STATE(198), 1, sym_type, ACTIONS(528), 2, anon_sym_STAR, anon_sym_AMP, - [5718] = 4, - ACTIONS(3), 1, - sym_comment, - STATE(3), 1, - aux_sym_path_expr_repeat1, - ACTIONS(13), 2, - anon_sym_LPAREN, - anon_sym_EQ_GT, - ACTIONS(17), 2, - anon_sym_COLON_COLON, - anon_sym_DOT, - [5733] = 5, + [5916] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(526), 1, sym_identifier, - STATE(75), 1, + STATE(77), 1, sym_type_path, - STATE(239), 1, + STATE(93), 1, sym_type, ACTIONS(528), 2, anon_sym_STAR, anon_sym_AMP, - [5750] = 5, + [5933] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(526), 1, sym_identifier, - STATE(75), 1, + STATE(77), 1, sym_type_path, - STATE(125), 1, + STATE(242), 1, sym_type, ACTIONS(528), 2, anon_sym_STAR, anon_sym_AMP, - [5767] = 5, + [5950] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(526), 1, sym_identifier, - STATE(75), 1, + STATE(77), 1, sym_type_path, - STATE(234), 1, + STATE(129), 1, sym_type, ACTIONS(528), 2, anon_sym_STAR, anon_sym_AMP, - [5784] = 5, + [5967] = 5, ACTIONS(3), 1, sym_comment, ACTIONS(526), 1, sym_identifier, - STATE(75), 1, + STATE(77), 1, sym_type_path, - STATE(83), 1, + STATE(238), 1, sym_type, ACTIONS(528), 2, anon_sym_STAR, anon_sym_AMP, - [5801] = 5, - ACTIONS(3), 1, - sym_comment, - ACTIONS(534), 1, - anon_sym_pub, - ACTIONS(536), 1, - anon_sym_fn, - ACTIONS(548), 1, - anon_sym_RBRACE, - STATE(154), 2, - sym_method_decl, - aux_sym_impl_block_repeat1, - [5818] = 4, + [5984] = 5, ACTIONS(3), 1, sym_comment, - ACTIONS(334), 1, - anon_sym_LBRACE, ACTIONS(550), 1, - anon_sym_if, - STATE(103), 2, - sym_block, - sym_if_stmt, - [5832] = 5, - ACTIONS(3), 1, - sym_comment, - ACTIONS(552), 1, - sym_identifier, - ACTIONS(554), 1, - anon_sym_RBRACE, - STATE(204), 1, - sym_field, - STATE(248), 1, - sym_field_list, - [5848] = 5, - ACTIONS(3), 1, - sym_comment, - ACTIONS(556), 1, sym_identifier, - ACTIONS(558), 1, + ACTIONS(552), 1, anon_sym_RPAREN, - ACTIONS(560), 1, + ACTIONS(554), 1, sym_self_param, - STATE(194), 1, + STATE(205), 1, sym_param, - [5864] = 5, + [6000] = 5, ACTIONS(3), 1, sym_comment, - ACTIONS(556), 1, + ACTIONS(550), 1, sym_identifier, - ACTIONS(560), 1, + ACTIONS(554), 1, sym_self_param, - ACTIONS(562), 1, + ACTIONS(556), 1, anon_sym_RPAREN, - STATE(235), 1, + STATE(231), 1, sym_param, - [5880] = 5, + [6016] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(552), 1, + ACTIONS(380), 1, + anon_sym_LBRACE, + ACTIONS(558), 1, + anon_sym_if, + STATE(95), 2, + sym_block, + sym_if_stmt, + [6030] = 5, + ACTIONS(3), 1, + sym_comment, + ACTIONS(560), 1, sym_identifier, + ACTIONS(562), 1, + anon_sym_RBRACE, + STATE(189), 1, + sym_enum_variant, + STATE(266), 1, + sym_enum_variants, + [6046] = 5, + ACTIONS(3), 1, + sym_comment, ACTIONS(564), 1, + sym_identifier, + ACTIONS(566), 1, anon_sym_RBRACE, - STATE(204), 1, + STATE(213), 1, sym_field, - STATE(269), 1, + STATE(279), 1, sym_field_list, - [5896] = 5, + [6062] = 5, ACTIONS(3), 1, sym_comment, - ACTIONS(566), 1, + ACTIONS(560), 1, sym_identifier, ACTIONS(568), 1, anon_sym_RBRACE, - STATE(213), 1, + STATE(189), 1, sym_enum_variant, - STATE(264), 1, + STATE(276), 1, sym_enum_variants, - [5912] = 5, + [6078] = 5, ACTIONS(3), 1, sym_comment, - ACTIONS(556), 1, + ACTIONS(564), 1, sym_identifier, - ACTIONS(560), 1, - sym_self_param, ACTIONS(570), 1, - anon_sym_RPAREN, - STATE(235), 1, - sym_param, - [5928] = 5, - ACTIONS(3), 1, - sym_comment, - ACTIONS(566), 1, - sym_identifier, - ACTIONS(572), 1, anon_sym_RBRACE, STATE(213), 1, - sym_enum_variant, - STATE(279), 1, - sym_enum_variants, - [5944] = 4, + sym_field, + STATE(270), 1, + sym_field_list, + [6094] = 5, ACTIONS(3), 1, sym_comment, - ACTIONS(360), 1, + ACTIONS(550), 1, + sym_identifier, + ACTIONS(554), 1, + sym_self_param, + ACTIONS(572), 1, anon_sym_RPAREN, - ACTIONS(574), 1, - anon_sym_COMMA, - STATE(180), 1, - aux_sym_arg_list_repeat1, - [5957] = 4, - ACTIONS(3), 1, - sym_comment, - ACTIONS(577), 1, - anon_sym_DASH_GT, - ACTIONS(579), 1, - anon_sym_LBRACE, - STATE(141), 1, - sym_block, - [5970] = 4, + STATE(231), 1, + sym_param, + [6110] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(581), 1, + ACTIONS(560), 1, sym_identifier, - ACTIONS(583), 1, + ACTIONS(574), 1, anon_sym_RBRACE, - STATE(227), 1, - sym_struct_field, - [5983] = 4, + STATE(236), 1, + sym_enum_variant, + [6123] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(583), 1, + ACTIONS(576), 3, + anon_sym_pub, + anon_sym_fn, anon_sym_RBRACE, - ACTIONS(585), 1, - anon_sym_COMMA, - STATE(190), 1, - aux_sym_struct_literal_repeat1, - [5996] = 3, + [6132] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(589), 1, + ACTIONS(556), 1, anon_sym_RPAREN, - ACTIONS(587), 2, - anon_sym__, - sym_identifier, - [6007] = 4, - ACTIONS(3), 1, - sym_comment, - ACTIONS(579), 1, - anon_sym_LBRACE, - ACTIONS(591), 1, - anon_sym_DASH_GT, - STATE(143), 1, - sym_block, - [6020] = 4, + ACTIONS(578), 1, + anon_sym_COMMA, + STATE(199), 1, + aux_sym_param_list_repeat1, + [6145] = 4, ACTIONS(3), 1, sym_comment, ACTIONS(245), 1, anon_sym_RPAREN, - ACTIONS(593), 1, + ACTIONS(580), 1, anon_sym_COMMA, - STATE(180), 1, + STATE(214), 1, aux_sym_arg_list_repeat1, - [6033] = 4, + [6158] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(595), 1, + ACTIONS(582), 1, + anon_sym_RBRACE, + ACTIONS(584), 1, anon_sym_COMMA, - ACTIONS(598), 1, - anon_sym_RPAREN, - STATE(187), 1, - aux_sym_param_list_repeat1, - [6046] = 4, + STATE(196), 1, + aux_sym_struct_literal_repeat1, + [6171] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(556), 1, - sym_identifier, - ACTIONS(560), 1, - sym_self_param, - STATE(235), 1, - sym_param, - [6059] = 4, + ACTIONS(586), 3, + anon_sym_pub, + anon_sym_fn, + anon_sym_RBRACE, + [6180] = 4, + ACTIONS(3), 1, + sym_comment, + ACTIONS(588), 1, + anon_sym_COMMA, + ACTIONS(591), 1, + anon_sym_RBRACK, + STATE(186), 1, + aux_sym_type_args_repeat1, + [6193] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(581), 1, + ACTIONS(560), 1, sym_identifier, - ACTIONS(600), 1, + ACTIONS(593), 1, anon_sym_RBRACE, - STATE(227), 1, - sym_struct_field, - [6072] = 4, + STATE(236), 1, + sym_enum_variant, + [6206] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(602), 1, + ACTIONS(593), 1, anon_sym_RBRACE, - ACTIONS(604), 1, + ACTIONS(595), 1, anon_sym_COMMA, - STATE(190), 1, - aux_sym_struct_literal_repeat1, - [6085] = 2, + STATE(210), 1, + aux_sym_enum_variants_repeat1, + [6219] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(607), 3, - anon_sym_pub, - anon_sym_fn, + ACTIONS(597), 1, anon_sym_RBRACE, - [6094] = 4, + ACTIONS(599), 1, + anon_sym_COMMA, + STATE(188), 1, + aux_sym_enum_variants_repeat1, + [6232] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(552), 1, + ACTIONS(603), 1, + anon_sym_RPAREN, + ACTIONS(601), 2, + anon_sym__, sym_identifier, - ACTIONS(609), 1, - anon_sym_RBRACE, - STATE(240), 1, - sym_field, - [6107] = 2, + [6243] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(611), 3, - anon_sym_pub, - anon_sym_fn, + ACTIONS(607), 1, + anon_sym_LPAREN, + ACTIONS(605), 2, anon_sym_RBRACE, - [6116] = 4, - ACTIONS(3), 1, - sym_comment, - ACTIONS(613), 1, anon_sym_COMMA, - ACTIONS(615), 1, - anon_sym_RPAREN, - STATE(199), 1, - aux_sym_param_list_repeat1, - [6129] = 4, + [6254] = 4, ACTIONS(3), 1, sym_comment, ACTIONS(609), 1, + sym_identifier, + ACTIONS(611), 1, anon_sym_RBRACE, - ACTIONS(617), 1, - anon_sym_COMMA, - STATE(202), 1, - aux_sym_field_list_repeat1, - [6142] = 4, + STATE(224), 1, + sym_struct_field, + [6267] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(619), 1, + ACTIONS(613), 1, anon_sym_RBRACE, - ACTIONS(621), 1, + ACTIONS(615), 1, anon_sym_COMMA, - STATE(183), 1, + STATE(193), 1, aux_sym_struct_literal_repeat1, - [6155] = 4, + [6280] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(552), 1, - sym_identifier, - ACTIONS(623), 1, - anon_sym_RBRACE, - STATE(240), 1, - sym_field, - [6168] = 2, + ACTIONS(618), 1, + anon_sym_DASH_GT, + ACTIONS(620), 1, + anon_sym_LBRACE, + STATE(133), 1, + sym_block, + [6293] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(625), 3, - anon_sym_pub, - anon_sym_fn, - anon_sym_RBRACE, - [6177] = 4, + ACTIONS(620), 1, + anon_sym_LBRACE, + ACTIONS(622), 1, + anon_sym_DASH_GT, + STATE(145), 1, + sym_block, + [6306] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(562), 1, - anon_sym_RPAREN, - ACTIONS(627), 1, + ACTIONS(624), 1, + anon_sym_RBRACE, + ACTIONS(626), 1, anon_sym_COMMA, - STATE(187), 1, - aux_sym_param_list_repeat1, - [6190] = 2, + STATE(193), 1, + aux_sym_struct_literal_repeat1, + [6319] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(629), 3, + ACTIONS(628), 3, anon_sym_pub, anon_sym_fn, anon_sym_RBRACE, - [6199] = 4, + [6328] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(532), 1, - anon_sym_RBRACK, - ACTIONS(631), 1, + ACTIONS(630), 1, anon_sym_COMMA, + ACTIONS(632), 1, + anon_sym_RBRACK, STATE(203), 1, aux_sym_type_args_repeat1, - [6212] = 4, + [6341] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(633), 1, - anon_sym_RBRACE, - ACTIONS(635), 1, + ACTIONS(634), 1, anon_sym_COMMA, - STATE(202), 1, - aux_sym_field_list_repeat1, - [6225] = 4, + ACTIONS(637), 1, + anon_sym_RPAREN, + STATE(199), 1, + aux_sym_param_list_repeat1, + [6354] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(638), 1, - anon_sym_COMMA, - ACTIONS(641), 1, - anon_sym_RBRACK, - STATE(203), 1, - aux_sym_type_args_repeat1, - [6238] = 4, + ACTIONS(609), 1, + sym_identifier, + ACTIONS(639), 1, + anon_sym_RBRACE, + STATE(184), 1, + sym_struct_field, + [6367] = 2, ACTIONS(3), 1, sym_comment, + ACTIONS(641), 3, + anon_sym_pub, + anon_sym_fn, + anon_sym_RBRACE, + [6376] = 4, + ACTIONS(3), 1, + sym_comment, + ACTIONS(564), 1, + sym_identifier, ACTIONS(643), 1, anon_sym_RBRACE, + STATE(229), 1, + sym_field, + [6389] = 4, + ACTIONS(3), 1, + sym_comment, + ACTIONS(532), 1, + anon_sym_RBRACK, ACTIONS(645), 1, anon_sym_COMMA, - STATE(195), 1, - aux_sym_field_list_repeat1, - [6251] = 4, + STATE(186), 1, + aux_sym_type_args_repeat1, + [6402] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(566), 1, + ACTIONS(564), 1, sym_identifier, ACTIONS(647), 1, anon_sym_RBRACE, - STATE(242), 1, - sym_enum_variant, - [6264] = 3, + STATE(229), 1, + sym_field, + [6415] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(651), 1, - anon_sym_LPAREN, - ACTIONS(649), 2, - anon_sym_RBRACE, + ACTIONS(649), 1, anon_sym_COMMA, - [6275] = 4, + ACTIONS(651), 1, + anon_sym_RPAREN, + STATE(182), 1, + aux_sym_param_list_repeat1, + [6428] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(647), 1, + ACTIONS(643), 1, anon_sym_RBRACE, ACTIONS(653), 1, anon_sym_COMMA, - STATE(211), 1, - aux_sym_enum_variants_repeat1, - [6288] = 4, + STATE(207), 1, + aux_sym_field_list_repeat1, + [6441] = 4, ACTIONS(3), 1, sym_comment, ACTIONS(655), 1, - anon_sym_COMMA, + anon_sym_RBRACE, ACTIONS(657), 1, - anon_sym_RBRACK, - STATE(201), 1, - aux_sym_type_args_repeat1, - [6301] = 4, + anon_sym_COMMA, + STATE(207), 1, + aux_sym_field_list_repeat1, + [6454] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(581), 1, - sym_identifier, - ACTIONS(659), 1, - anon_sym_RBRACE, - STATE(196), 1, - sym_struct_field, - [6314] = 4, + ACTIONS(620), 1, + anon_sym_LBRACE, + ACTIONS(660), 1, + anon_sym_DASH_GT, + STATE(185), 1, + sym_block, + [6467] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(566), 1, + ACTIONS(550), 1, sym_identifier, - ACTIONS(661), 1, - anon_sym_RBRACE, - STATE(242), 1, - sym_enum_variant, - [6327] = 4, + ACTIONS(554), 1, + sym_self_param, + STATE(231), 1, + sym_param, + [6480] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(663), 1, + ACTIONS(662), 1, anon_sym_RBRACE, - ACTIONS(665), 1, + ACTIONS(664), 1, anon_sym_COMMA, - STATE(211), 1, + STATE(210), 1, aux_sym_enum_variants_repeat1, - [6340] = 4, + [6493] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(579), 1, + ACTIONS(620), 1, anon_sym_LBRACE, - ACTIONS(668), 1, + ACTIONS(667), 1, anon_sym_DASH_GT, - STATE(200), 1, + STATE(181), 1, sym_block, - [6353] = 4, + [6506] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(670), 1, + ACTIONS(609), 1, + sym_identifier, + ACTIONS(624), 1, anon_sym_RBRACE, - ACTIONS(672), 1, - anon_sym_COMMA, - STATE(207), 1, - aux_sym_enum_variants_repeat1, - [6366] = 4, + STATE(224), 1, + sym_struct_field, + [6519] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(579), 1, - anon_sym_LBRACE, - ACTIONS(674), 1, - anon_sym_DASH_GT, - STATE(191), 1, - sym_block, - [6379] = 3, + ACTIONS(669), 1, + anon_sym_RBRACE, + ACTIONS(671), 1, + anon_sym_COMMA, + STATE(206), 1, + aux_sym_field_list_repeat1, + [6532] = 4, ACTIONS(3), 1, sym_comment, - ACTIONS(579), 1, - anon_sym_LBRACE, - STATE(198), 1, - sym_block, - [6389] = 3, + ACTIONS(398), 1, + anon_sym_RPAREN, + ACTIONS(673), 1, + anon_sym_COMMA, + STATE(214), 1, + aux_sym_arg_list_repeat1, + [6545] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(676), 1, + ACTIONS(560), 1, sym_identifier, - STATE(127), 1, - sym_module_path, - [6399] = 3, + STATE(236), 1, + sym_enum_variant, + [6555] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(678), 1, + ACTIONS(676), 1, anon_sym_LPAREN, - STATE(214), 1, + STATE(121), 1, sym_param_list, - [6409] = 3, + [6565] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(680), 1, + ACTIONS(678), 1, anon_sym_LPAREN, - ACTIONS(682), 1, + ACTIONS(680), 1, anon_sym_EQ_GT, - [6419] = 3, - ACTIONS(3), 1, - sym_comment, - ACTIONS(684), 1, - anon_sym_COLON, - ACTIONS(686), 1, - anon_sym_EQ, - [6429] = 2, + [6575] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(641), 2, - anon_sym_COMMA, - anon_sym_RBRACK, - [6437] = 3, + ACTIONS(682), 1, + sym_identifier, + STATE(127), 1, + sym_module_path, + [6585] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(579), 1, - anon_sym_LBRACE, - STATE(142), 1, - sym_block, - [6447] = 3, + ACTIONS(682), 1, + sym_identifier, + STATE(122), 1, + sym_module_path, + [6595] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(579), 1, - anon_sym_LBRACE, - STATE(132), 1, - sym_block, - [6457] = 3, + ACTIONS(676), 1, + anon_sym_LPAREN, + STATE(195), 1, + sym_param_list, + [6605] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(579), 1, + ACTIONS(620), 1, anon_sym_LBRACE, - STATE(193), 1, + STATE(197), 1, sym_block, - [6467] = 3, - ACTIONS(3), 1, - sym_comment, - ACTIONS(678), 1, - anon_sym_LPAREN, - STATE(121), 1, - sym_param_list, - [6477] = 3, + [6615] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(334), 1, + ACTIONS(380), 1, anon_sym_LBRACE, - STATE(147), 1, + STATE(128), 1, sym_block, - [6487] = 3, + [6625] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(678), 1, + ACTIONS(676), 1, anon_sym_LPAREN, - STATE(181), 1, + STATE(211), 1, sym_param_list, - [6497] = 2, + [6635] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(602), 2, + ACTIONS(613), 2, anon_sym_RBRACE, anon_sym_COMMA, - [6505] = 3, + [6643] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(7), 1, - anon_sym_module, - STATE(69), 1, - sym_module_decl, - [6515] = 3, + ACTIONS(684), 2, + anon_sym_RBRACE, + anon_sym_COMMA, + [6651] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(678), 1, + ACTIONS(676), 1, anon_sym_LPAREN, - STATE(185), 1, + STATE(120), 1, sym_param_list, - [6525] = 2, + [6661] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(688), 2, + ACTIONS(686), 2, anon_sym_safe, anon_sym_unsafe, - [6533] = 3, + [6669] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(581), 1, - sym_identifier, - STATE(227), 1, - sym_struct_field, - [6543] = 3, + ACTIONS(620), 1, + anon_sym_LBRACE, + STATE(201), 1, + sym_block, + [6679] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(676), 1, - sym_identifier, - STATE(122), 1, - sym_module_path, - [6553] = 3, + ACTIONS(655), 2, + anon_sym_RBRACE, + anon_sym_COMMA, + [6687] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(678), 1, + ACTIONS(676), 1, anon_sym_LPAREN, - STATE(120), 1, + STATE(194), 1, sym_param_list, - [6563] = 2, + [6697] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(690), 2, + ACTIONS(637), 2, anon_sym_COMMA, anon_sym_RPAREN, - [6571] = 2, + [6705] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(598), 2, + ACTIONS(609), 1, + sym_identifier, + STATE(224), 1, + sym_struct_field, + [6715] = 2, + ACTIONS(3), 1, + sym_comment, + ACTIONS(688), 2, anon_sym_COMMA, anon_sym_RPAREN, - [6579] = 3, + [6723] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(552), 1, + ACTIONS(564), 1, sym_identifier, - STATE(240), 1, + STATE(229), 1, sym_field, - [6589] = 2, + [6733] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(692), 2, + ACTIONS(690), 2, anon_sym_RBRACE, anon_sym_COMMA, - [6597] = 3, - ACTIONS(3), 1, - sym_comment, - ACTIONS(566), 1, - sym_identifier, - STATE(242), 1, - sym_enum_variant, - [6607] = 2, + [6741] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(694), 2, + ACTIONS(662), 2, anon_sym_RBRACE, anon_sym_COMMA, - [6615] = 2, + [6749] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(633), 2, - anon_sym_RBRACE, - anon_sym_COMMA, - [6623] = 3, + ACTIONS(7), 1, + anon_sym_module, + STATE(67), 1, + sym_module_decl, + [6759] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(678), 1, + ACTIONS(620), 1, + anon_sym_LBRACE, + STATE(136), 1, + sym_block, + [6769] = 3, + ACTIONS(3), 1, + sym_comment, + ACTIONS(692), 1, + anon_sym_COLON, + ACTIONS(694), 1, + anon_sym_EQ, + [6779] = 3, + ACTIONS(3), 1, + sym_comment, + ACTIONS(676), 1, anon_sym_LPAREN, - STATE(212), 1, + STATE(208), 1, sym_param_list, - [6633] = 2, + [6789] = 2, ACTIONS(3), 1, sym_comment, - ACTIONS(663), 2, - anon_sym_RBRACE, + ACTIONS(696), 2, anon_sym_COMMA, - [6641] = 2, + anon_sym_RPAREN, + [6797] = 3, ACTIONS(3), 1, sym_comment, - ACTIONS(696), 2, + ACTIONS(620), 1, + anon_sym_LBRACE, + STATE(132), 1, + sym_block, + [6807] = 2, + ACTIONS(3), 1, + sym_comment, + ACTIONS(591), 2, anon_sym_COMMA, - anon_sym_RPAREN, - [6649] = 2, + anon_sym_RBRACK, + [6815] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(698), 1, - anon_sym_EQ_GT, - [6656] = 2, + sym_identifier, + [6822] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(700), 1, - anon_sym_LBRACE, - [6663] = 2, + anon_sym_COLON, + [6829] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(702), 1, - anon_sym_LBRACE, - [6670] = 2, + sym_identifier, + [6836] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(704), 1, - anon_sym_LBRACE, - [6677] = 2, + ts_builtin_sym_end, + [6843] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(706), 1, - anon_sym_RBRACE, - [6684] = 2, + sym_identifier, + [6850] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(708), 1, - anon_sym_COLON, - [6691] = 2, + anon_sym_EQ_GT, + [6857] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(710), 1, - anon_sym_COLON, - [6698] = 2, + sym_identifier, + [6864] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(712), 1, - sym_identifier, - [6705] = 2, + anon_sym_LBRACE, + [6871] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(714), 1, - sym_identifier, - [6712] = 2, + anon_sym_LBRACE, + [6878] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(716), 1, sym_identifier, - [6719] = 2, + [6885] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(718), 1, - sym_identifier, - [6726] = 2, + anon_sym_fn, + [6892] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(720), 1, anon_sym_EQ_GT, - [6733] = 2, + [6899] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(722), 1, anon_sym_RPAREN, - [6740] = 2, + [6906] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(724), 1, anon_sym_LBRACE, - [6747] = 2, + [6913] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(726), 1, sym_identifier, - [6754] = 2, + [6920] = 2, + ACTIONS(3), 1, + sym_comment, + ACTIONS(680), 1, + anon_sym_EQ_GT, + [6927] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(728), 1, sym_identifier, - [6761] = 2, + [6934] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(730), 1, - anon_sym_LBRACE, - [6768] = 2, + anon_sym_fn, + [6941] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(732), 1, - anon_sym_module, - [6775] = 2, + sym_identifier, + [6948] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(734), 1, anon_sym_EQ_GT, - [6782] = 2, + [6955] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(736), 1, - sym_identifier, - [6789] = 2, + anon_sym_LBRACE, + [6962] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(738), 1, - anon_sym_RBRACE, - [6796] = 2, + anon_sym_LBRACE, + [6969] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(740), 1, - anon_sym_fn, - [6803] = 2, + anon_sym_RBRACE, + [6976] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(742), 1, - sym_identifier, - [6810] = 2, + anon_sym_RPAREN, + [6983] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(744), 1, - sym_identifier, - [6817] = 2, + anon_sym_module, + [6990] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(746), 1, - anon_sym_COLON, - [6824] = 2, + sym_identifier, + [6997] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(748), 1, anon_sym_RBRACE, - [6831] = 2, + [7004] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(750), 1, - anon_sym_fn, - [6838] = 2, + sym_identifier, + [7011] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(752), 1, - ts_builtin_sym_end, - [6845] = 2, + anon_sym_COLON, + [7018] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(754), 1, - sym_identifier, - [6852] = 2, + anon_sym_fn, + [7025] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(756), 1, sym_identifier, - [6859] = 2, + [7032] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(758), 1, sym_identifier, - [6866] = 2, + [7039] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(760), 1, - sym_identifier, - [6873] = 2, + anon_sym_RBRACE, + [7046] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(762), 1, - anon_sym_RPAREN, - [6880] = 2, - ACTIONS(3), 1, - sym_comment, - ACTIONS(682), 1, - anon_sym_EQ_GT, - [6887] = 2, + sym_identifier, + [7053] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(764), 1, anon_sym_EQ, - [6894] = 2, + [7060] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(766), 1, anon_sym_RBRACE, - [6901] = 2, + [7067] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(768), 1, sym_identifier, - [6908] = 2, + [7074] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(770), 1, - anon_sym_fn, - [6915] = 2, + anon_sym_LBRACE, + [7081] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(772), 1, sym_identifier, - [6922] = 2, + [7088] = 2, ACTIONS(3), 1, sym_comment, ACTIONS(774), 1, - anon_sym_LBRACE, + anon_sym_COLON, }; static const uint32_t ts_small_parse_table_map[] = { [SMALL_STATE(2)] = 0, - [SMALL_STATE(3)] = 54, - [SMALL_STATE(4)] = 102, - [SMALL_STATE(5)] = 150, - [SMALL_STATE(6)] = 221, - [SMALL_STATE(7)] = 292, - [SMALL_STATE(8)] = 363, - [SMALL_STATE(9)] = 406, - [SMALL_STATE(10)] = 461, - [SMALL_STATE(11)] = 532, - [SMALL_STATE(12)] = 603, - [SMALL_STATE(13)] = 649, - [SMALL_STATE(14)] = 696, - [SMALL_STATE(15)] = 737, - [SMALL_STATE(16)] = 784, - [SMALL_STATE(17)] = 824, - [SMALL_STATE(18)] = 864, - [SMALL_STATE(19)] = 920, - [SMALL_STATE(20)] = 978, - [SMALL_STATE(21)] = 1018, - [SMALL_STATE(22)] = 1058, - [SMALL_STATE(23)] = 1098, - [SMALL_STATE(24)] = 1150, - [SMALL_STATE(25)] = 1200, - [SMALL_STATE(26)] = 1260, - [SMALL_STATE(27)] = 1300, - [SMALL_STATE(28)] = 1340, - [SMALL_STATE(29)] = 1380, - [SMALL_STATE(30)] = 1420, - [SMALL_STATE(31)] = 1460, - [SMALL_STATE(32)] = 1500, - [SMALL_STATE(33)] = 1540, - [SMALL_STATE(34)] = 1580, - [SMALL_STATE(35)] = 1620, - [SMALL_STATE(36)] = 1682, - [SMALL_STATE(37)] = 1744, - [SMALL_STATE(38)] = 1806, - [SMALL_STATE(39)] = 1868, - [SMALL_STATE(40)] = 1930, - [SMALL_STATE(41)] = 1986, - [SMALL_STATE(42)] = 2033, - [SMALL_STATE(43)] = 2080, - [SMALL_STATE(44)] = 2127, - [SMALL_STATE(45)] = 2171, - [SMALL_STATE(46)] = 2221, - [SMALL_STATE(47)] = 2265, - [SMALL_STATE(48)] = 2315, - [SMALL_STATE(49)] = 2359, - [SMALL_STATE(50)] = 2403, - [SMALL_STATE(51)] = 2447, - [SMALL_STATE(52)] = 2491, - [SMALL_STATE(53)] = 2535, - [SMALL_STATE(54)] = 2579, - [SMALL_STATE(55)] = 2623, - [SMALL_STATE(56)] = 2667, - [SMALL_STATE(57)] = 2711, - [SMALL_STATE(58)] = 2755, - [SMALL_STATE(59)] = 2799, - [SMALL_STATE(60)] = 2843, - [SMALL_STATE(61)] = 2893, - [SMALL_STATE(62)] = 2937, - [SMALL_STATE(63)] = 2981, - [SMALL_STATE(64)] = 3025, - [SMALL_STATE(65)] = 3069, - [SMALL_STATE(66)] = 3113, - [SMALL_STATE(67)] = 3157, - [SMALL_STATE(68)] = 3201, - [SMALL_STATE(69)] = 3233, - [SMALL_STATE(70)] = 3283, - [SMALL_STATE(71)] = 3327, - [SMALL_STATE(72)] = 3371, - [SMALL_STATE(73)] = 3402, - [SMALL_STATE(74)] = 3433, - [SMALL_STATE(75)] = 3460, - [SMALL_STATE(76)] = 3490, - [SMALL_STATE(77)] = 3517, - [SMALL_STATE(78)] = 3560, - [SMALL_STATE(79)] = 3603, - [SMALL_STATE(80)] = 3646, - [SMALL_STATE(81)] = 3689, - [SMALL_STATE(82)] = 3716, - [SMALL_STATE(83)] = 3753, - [SMALL_STATE(84)] = 3777, - [SMALL_STATE(85)] = 3801, - [SMALL_STATE(86)] = 3825, - [SMALL_STATE(87)] = 3849, - [SMALL_STATE(88)] = 3899, - [SMALL_STATE(89)] = 3946, - [SMALL_STATE(90)] = 3973, - [SMALL_STATE(91)] = 4020, - [SMALL_STATE(92)] = 4047, - [SMALL_STATE(93)] = 4092, - [SMALL_STATE(94)] = 4137, - [SMALL_STATE(95)] = 4161, - [SMALL_STATE(96)] = 4199, - [SMALL_STATE(97)] = 4235, - [SMALL_STATE(98)] = 4279, - [SMALL_STATE(99)] = 4303, - [SMALL_STATE(100)] = 4327, - [SMALL_STATE(101)] = 4351, - [SMALL_STATE(102)] = 4375, - [SMALL_STATE(103)] = 4399, - [SMALL_STATE(104)] = 4423, - [SMALL_STATE(105)] = 4447, - [SMALL_STATE(106)] = 4491, - [SMALL_STATE(107)] = 4515, - [SMALL_STATE(108)] = 4549, - [SMALL_STATE(109)] = 4589, - [SMALL_STATE(110)] = 4631, - [SMALL_STATE(111)] = 4670, - [SMALL_STATE(112)] = 4709, - [SMALL_STATE(113)] = 4748, - [SMALL_STATE(114)] = 4772, - [SMALL_STATE(115)] = 4796, - [SMALL_STATE(116)] = 4816, - [SMALL_STATE(117)] = 4836, - [SMALL_STATE(118)] = 4856, - [SMALL_STATE(119)] = 4880, - [SMALL_STATE(120)] = 4900, - [SMALL_STATE(121)] = 4923, - [SMALL_STATE(122)] = 4946, - [SMALL_STATE(123)] = 4964, - [SMALL_STATE(124)] = 4982, - [SMALL_STATE(125)] = 5000, - [SMALL_STATE(126)] = 5020, - [SMALL_STATE(127)] = 5040, - [SMALL_STATE(128)] = 5058, - [SMALL_STATE(129)] = 5075, - [SMALL_STATE(130)] = 5092, - [SMALL_STATE(131)] = 5109, - [SMALL_STATE(132)] = 5126, - [SMALL_STATE(133)] = 5143, - [SMALL_STATE(134)] = 5160, - [SMALL_STATE(135)] = 5177, - [SMALL_STATE(136)] = 5194, - [SMALL_STATE(137)] = 5211, - [SMALL_STATE(138)] = 5228, - [SMALL_STATE(139)] = 5245, - [SMALL_STATE(140)] = 5262, - [SMALL_STATE(141)] = 5279, - [SMALL_STATE(142)] = 5296, - [SMALL_STATE(143)] = 5313, - [SMALL_STATE(144)] = 5330, - [SMALL_STATE(145)] = 5347, - [SMALL_STATE(146)] = 5364, - [SMALL_STATE(147)] = 5384, - [SMALL_STATE(148)] = 5402, - [SMALL_STATE(149)] = 5423, - [SMALL_STATE(150)] = 5440, - [SMALL_STATE(151)] = 5457, - [SMALL_STATE(152)] = 5474, - [SMALL_STATE(153)] = 5494, - [SMALL_STATE(154)] = 5514, - [SMALL_STATE(155)] = 5531, - [SMALL_STATE(156)] = 5548, - [SMALL_STATE(157)] = 5565, - [SMALL_STATE(158)] = 5582, - [SMALL_STATE(159)] = 5599, - [SMALL_STATE(160)] = 5616, - [SMALL_STATE(161)] = 5633, - [SMALL_STATE(162)] = 5650, - [SMALL_STATE(163)] = 5667, - [SMALL_STATE(164)] = 5684, - [SMALL_STATE(165)] = 5701, - [SMALL_STATE(166)] = 5718, - [SMALL_STATE(167)] = 5733, - [SMALL_STATE(168)] = 5750, - [SMALL_STATE(169)] = 5767, - [SMALL_STATE(170)] = 5784, - [SMALL_STATE(171)] = 5801, - [SMALL_STATE(172)] = 5818, - [SMALL_STATE(173)] = 5832, - [SMALL_STATE(174)] = 5848, - [SMALL_STATE(175)] = 5864, - [SMALL_STATE(176)] = 5880, - [SMALL_STATE(177)] = 5896, - [SMALL_STATE(178)] = 5912, - [SMALL_STATE(179)] = 5928, - [SMALL_STATE(180)] = 5944, - [SMALL_STATE(181)] = 5957, - [SMALL_STATE(182)] = 5970, - [SMALL_STATE(183)] = 5983, - [SMALL_STATE(184)] = 5996, - [SMALL_STATE(185)] = 6007, - [SMALL_STATE(186)] = 6020, - [SMALL_STATE(187)] = 6033, - [SMALL_STATE(188)] = 6046, - [SMALL_STATE(189)] = 6059, - [SMALL_STATE(190)] = 6072, - [SMALL_STATE(191)] = 6085, - [SMALL_STATE(192)] = 6094, - [SMALL_STATE(193)] = 6107, - [SMALL_STATE(194)] = 6116, - [SMALL_STATE(195)] = 6129, - [SMALL_STATE(196)] = 6142, - [SMALL_STATE(197)] = 6155, - [SMALL_STATE(198)] = 6168, - [SMALL_STATE(199)] = 6177, - [SMALL_STATE(200)] = 6190, - [SMALL_STATE(201)] = 6199, - [SMALL_STATE(202)] = 6212, - [SMALL_STATE(203)] = 6225, - [SMALL_STATE(204)] = 6238, - [SMALL_STATE(205)] = 6251, - [SMALL_STATE(206)] = 6264, - [SMALL_STATE(207)] = 6275, - [SMALL_STATE(208)] = 6288, - [SMALL_STATE(209)] = 6301, - [SMALL_STATE(210)] = 6314, - [SMALL_STATE(211)] = 6327, - [SMALL_STATE(212)] = 6340, - [SMALL_STATE(213)] = 6353, - [SMALL_STATE(214)] = 6366, - [SMALL_STATE(215)] = 6379, - [SMALL_STATE(216)] = 6389, - [SMALL_STATE(217)] = 6399, - [SMALL_STATE(218)] = 6409, - [SMALL_STATE(219)] = 6419, - [SMALL_STATE(220)] = 6429, - [SMALL_STATE(221)] = 6437, - [SMALL_STATE(222)] = 6447, - [SMALL_STATE(223)] = 6457, - [SMALL_STATE(224)] = 6467, - [SMALL_STATE(225)] = 6477, - [SMALL_STATE(226)] = 6487, - [SMALL_STATE(227)] = 6497, - [SMALL_STATE(228)] = 6505, - [SMALL_STATE(229)] = 6515, - [SMALL_STATE(230)] = 6525, - [SMALL_STATE(231)] = 6533, - [SMALL_STATE(232)] = 6543, - [SMALL_STATE(233)] = 6553, - [SMALL_STATE(234)] = 6563, - [SMALL_STATE(235)] = 6571, - [SMALL_STATE(236)] = 6579, - [SMALL_STATE(237)] = 6589, - [SMALL_STATE(238)] = 6597, - [SMALL_STATE(239)] = 6607, - [SMALL_STATE(240)] = 6615, - [SMALL_STATE(241)] = 6623, - [SMALL_STATE(242)] = 6633, - [SMALL_STATE(243)] = 6641, - [SMALL_STATE(244)] = 6649, - [SMALL_STATE(245)] = 6656, - [SMALL_STATE(246)] = 6663, - [SMALL_STATE(247)] = 6670, - [SMALL_STATE(248)] = 6677, - [SMALL_STATE(249)] = 6684, - [SMALL_STATE(250)] = 6691, - [SMALL_STATE(251)] = 6698, - [SMALL_STATE(252)] = 6705, - [SMALL_STATE(253)] = 6712, - [SMALL_STATE(254)] = 6719, - [SMALL_STATE(255)] = 6726, - [SMALL_STATE(256)] = 6733, - [SMALL_STATE(257)] = 6740, - [SMALL_STATE(258)] = 6747, - [SMALL_STATE(259)] = 6754, - [SMALL_STATE(260)] = 6761, - [SMALL_STATE(261)] = 6768, - [SMALL_STATE(262)] = 6775, - [SMALL_STATE(263)] = 6782, - [SMALL_STATE(264)] = 6789, - [SMALL_STATE(265)] = 6796, - [SMALL_STATE(266)] = 6803, - [SMALL_STATE(267)] = 6810, - [SMALL_STATE(268)] = 6817, - [SMALL_STATE(269)] = 6824, - [SMALL_STATE(270)] = 6831, - [SMALL_STATE(271)] = 6838, - [SMALL_STATE(272)] = 6845, - [SMALL_STATE(273)] = 6852, - [SMALL_STATE(274)] = 6859, - [SMALL_STATE(275)] = 6866, - [SMALL_STATE(276)] = 6873, - [SMALL_STATE(277)] = 6880, - [SMALL_STATE(278)] = 6887, - [SMALL_STATE(279)] = 6894, - [SMALL_STATE(280)] = 6901, - [SMALL_STATE(281)] = 6908, - [SMALL_STATE(282)] = 6915, - [SMALL_STATE(283)] = 6922, + [SMALL_STATE(3)] = 56, + [SMALL_STATE(4)] = 106, + [SMALL_STATE(5)] = 156, + [SMALL_STATE(6)] = 229, + [SMALL_STATE(7)] = 302, + [SMALL_STATE(8)] = 375, + [SMALL_STATE(9)] = 420, + [SMALL_STATE(10)] = 477, + [SMALL_STATE(11)] = 550, + [SMALL_STATE(12)] = 623, + [SMALL_STATE(13)] = 671, + [SMALL_STATE(14)] = 720, + [SMALL_STATE(15)] = 763, + [SMALL_STATE(16)] = 812, + [SMALL_STATE(17)] = 854, + [SMALL_STATE(18)] = 896, + [SMALL_STATE(19)] = 954, + [SMALL_STATE(20)] = 1014, + [SMALL_STATE(21)] = 1056, + [SMALL_STATE(22)] = 1098, + [SMALL_STATE(23)] = 1140, + [SMALL_STATE(24)] = 1194, + [SMALL_STATE(25)] = 1246, + [SMALL_STATE(26)] = 1308, + [SMALL_STATE(27)] = 1350, + [SMALL_STATE(28)] = 1392, + [SMALL_STATE(29)] = 1434, + [SMALL_STATE(30)] = 1476, + [SMALL_STATE(31)] = 1518, + [SMALL_STATE(32)] = 1560, + [SMALL_STATE(33)] = 1602, + [SMALL_STATE(34)] = 1644, + [SMALL_STATE(35)] = 1686, + [SMALL_STATE(36)] = 1750, + [SMALL_STATE(37)] = 1814, + [SMALL_STATE(38)] = 1878, + [SMALL_STATE(39)] = 1942, + [SMALL_STATE(40)] = 2006, + [SMALL_STATE(41)] = 2064, + [SMALL_STATE(42)] = 2113, + [SMALL_STATE(43)] = 2162, + [SMALL_STATE(44)] = 2211, + [SMALL_STATE(45)] = 2257, + [SMALL_STATE(46)] = 2303, + [SMALL_STATE(47)] = 2349, + [SMALL_STATE(48)] = 2395, + [SMALL_STATE(49)] = 2441, + [SMALL_STATE(50)] = 2487, + [SMALL_STATE(51)] = 2533, + [SMALL_STATE(52)] = 2579, + [SMALL_STATE(53)] = 2625, + [SMALL_STATE(54)] = 2671, + [SMALL_STATE(55)] = 2717, + [SMALL_STATE(56)] = 2763, + [SMALL_STATE(57)] = 2809, + [SMALL_STATE(58)] = 2855, + [SMALL_STATE(59)] = 2901, + [SMALL_STATE(60)] = 2947, + [SMALL_STATE(61)] = 2993, + [SMALL_STATE(62)] = 3039, + [SMALL_STATE(63)] = 3085, + [SMALL_STATE(64)] = 3131, + [SMALL_STATE(65)] = 3177, + [SMALL_STATE(66)] = 3223, + [SMALL_STATE(67)] = 3269, + [SMALL_STATE(68)] = 3319, + [SMALL_STATE(69)] = 3369, + [SMALL_STATE(70)] = 3419, + [SMALL_STATE(71)] = 3451, + [SMALL_STATE(72)] = 3501, + [SMALL_STATE(73)] = 3530, + [SMALL_STATE(74)] = 3561, + [SMALL_STATE(75)] = 3590, + [SMALL_STATE(76)] = 3621, + [SMALL_STATE(77)] = 3648, + [SMALL_STATE(78)] = 3678, + [SMALL_STATE(79)] = 3721, + [SMALL_STATE(80)] = 3758, + [SMALL_STATE(81)] = 3801, + [SMALL_STATE(82)] = 3830, + [SMALL_STATE(83)] = 3873, + [SMALL_STATE(84)] = 3916, + [SMALL_STATE(85)] = 3942, + [SMALL_STATE(86)] = 3966, + [SMALL_STATE(87)] = 3992, + [SMALL_STATE(88)] = 4016, + [SMALL_STATE(89)] = 4042, + [SMALL_STATE(90)] = 4068, + [SMALL_STATE(91)] = 4094, + [SMALL_STATE(92)] = 4144, + [SMALL_STATE(93)] = 4168, + [SMALL_STATE(94)] = 4192, + [SMALL_STATE(95)] = 4218, + [SMALL_STATE(96)] = 4244, + [SMALL_STATE(97)] = 4270, + [SMALL_STATE(98)] = 4296, + [SMALL_STATE(99)] = 4337, + [SMALL_STATE(100)] = 4378, + [SMALL_STATE(101)] = 4425, + [SMALL_STATE(102)] = 4472, + [SMALL_STATE(103)] = 4517, + [SMALL_STATE(104)] = 4558, + [SMALL_STATE(105)] = 4585, + [SMALL_STATE(106)] = 4630, + [SMALL_STATE(107)] = 4666, + [SMALL_STATE(108)] = 4710, + [SMALL_STATE(109)] = 4744, + [SMALL_STATE(110)] = 4788, + [SMALL_STATE(111)] = 4830, + [SMALL_STATE(112)] = 4870, + [SMALL_STATE(113)] = 4908, + [SMALL_STATE(114)] = 4932, + [SMALL_STATE(115)] = 4956, + [SMALL_STATE(116)] = 4976, + [SMALL_STATE(117)] = 4996, + [SMALL_STATE(118)] = 5016, + [SMALL_STATE(119)] = 5040, + [SMALL_STATE(120)] = 5060, + [SMALL_STATE(121)] = 5083, + [SMALL_STATE(122)] = 5106, + [SMALL_STATE(123)] = 5124, + [SMALL_STATE(124)] = 5146, + [SMALL_STATE(125)] = 5164, + [SMALL_STATE(126)] = 5182, + [SMALL_STATE(127)] = 5202, + [SMALL_STATE(128)] = 5220, + [SMALL_STATE(129)] = 5240, + [SMALL_STATE(130)] = 5260, + [SMALL_STATE(131)] = 5277, + [SMALL_STATE(132)] = 5294, + [SMALL_STATE(133)] = 5311, + [SMALL_STATE(134)] = 5328, + [SMALL_STATE(135)] = 5345, + [SMALL_STATE(136)] = 5362, + [SMALL_STATE(137)] = 5379, + [SMALL_STATE(138)] = 5396, + [SMALL_STATE(139)] = 5413, + [SMALL_STATE(140)] = 5430, + [SMALL_STATE(141)] = 5447, + [SMALL_STATE(142)] = 5464, + [SMALL_STATE(143)] = 5483, + [SMALL_STATE(144)] = 5500, + [SMALL_STATE(145)] = 5517, + [SMALL_STATE(146)] = 5534, + [SMALL_STATE(147)] = 5551, + [SMALL_STATE(148)] = 5568, + [SMALL_STATE(149)] = 5585, + [SMALL_STATE(150)] = 5606, + [SMALL_STATE(151)] = 5623, + [SMALL_STATE(152)] = 5640, + [SMALL_STATE(153)] = 5660, + [SMALL_STATE(154)] = 5680, + [SMALL_STATE(155)] = 5697, + [SMALL_STATE(156)] = 5714, + [SMALL_STATE(157)] = 5731, + [SMALL_STATE(158)] = 5748, + [SMALL_STATE(159)] = 5765, + [SMALL_STATE(160)] = 5782, + [SMALL_STATE(161)] = 5799, + [SMALL_STATE(162)] = 5816, + [SMALL_STATE(163)] = 5831, + [SMALL_STATE(164)] = 5848, + [SMALL_STATE(165)] = 5865, + [SMALL_STATE(166)] = 5882, + [SMALL_STATE(167)] = 5899, + [SMALL_STATE(168)] = 5916, + [SMALL_STATE(169)] = 5933, + [SMALL_STATE(170)] = 5950, + [SMALL_STATE(171)] = 5967, + [SMALL_STATE(172)] = 5984, + [SMALL_STATE(173)] = 6000, + [SMALL_STATE(174)] = 6016, + [SMALL_STATE(175)] = 6030, + [SMALL_STATE(176)] = 6046, + [SMALL_STATE(177)] = 6062, + [SMALL_STATE(178)] = 6078, + [SMALL_STATE(179)] = 6094, + [SMALL_STATE(180)] = 6110, + [SMALL_STATE(181)] = 6123, + [SMALL_STATE(182)] = 6132, + [SMALL_STATE(183)] = 6145, + [SMALL_STATE(184)] = 6158, + [SMALL_STATE(185)] = 6171, + [SMALL_STATE(186)] = 6180, + [SMALL_STATE(187)] = 6193, + [SMALL_STATE(188)] = 6206, + [SMALL_STATE(189)] = 6219, + [SMALL_STATE(190)] = 6232, + [SMALL_STATE(191)] = 6243, + [SMALL_STATE(192)] = 6254, + [SMALL_STATE(193)] = 6267, + [SMALL_STATE(194)] = 6280, + [SMALL_STATE(195)] = 6293, + [SMALL_STATE(196)] = 6306, + [SMALL_STATE(197)] = 6319, + [SMALL_STATE(198)] = 6328, + [SMALL_STATE(199)] = 6341, + [SMALL_STATE(200)] = 6354, + [SMALL_STATE(201)] = 6367, + [SMALL_STATE(202)] = 6376, + [SMALL_STATE(203)] = 6389, + [SMALL_STATE(204)] = 6402, + [SMALL_STATE(205)] = 6415, + [SMALL_STATE(206)] = 6428, + [SMALL_STATE(207)] = 6441, + [SMALL_STATE(208)] = 6454, + [SMALL_STATE(209)] = 6467, + [SMALL_STATE(210)] = 6480, + [SMALL_STATE(211)] = 6493, + [SMALL_STATE(212)] = 6506, + [SMALL_STATE(213)] = 6519, + [SMALL_STATE(214)] = 6532, + [SMALL_STATE(215)] = 6545, + [SMALL_STATE(216)] = 6555, + [SMALL_STATE(217)] = 6565, + [SMALL_STATE(218)] = 6575, + [SMALL_STATE(219)] = 6585, + [SMALL_STATE(220)] = 6595, + [SMALL_STATE(221)] = 6605, + [SMALL_STATE(222)] = 6615, + [SMALL_STATE(223)] = 6625, + [SMALL_STATE(224)] = 6635, + [SMALL_STATE(225)] = 6643, + [SMALL_STATE(226)] = 6651, + [SMALL_STATE(227)] = 6661, + [SMALL_STATE(228)] = 6669, + [SMALL_STATE(229)] = 6679, + [SMALL_STATE(230)] = 6687, + [SMALL_STATE(231)] = 6697, + [SMALL_STATE(232)] = 6705, + [SMALL_STATE(233)] = 6715, + [SMALL_STATE(234)] = 6723, + [SMALL_STATE(235)] = 6733, + [SMALL_STATE(236)] = 6741, + [SMALL_STATE(237)] = 6749, + [SMALL_STATE(238)] = 6759, + [SMALL_STATE(239)] = 6769, + [SMALL_STATE(240)] = 6779, + [SMALL_STATE(241)] = 6789, + [SMALL_STATE(242)] = 6797, + [SMALL_STATE(243)] = 6807, + [SMALL_STATE(244)] = 6815, + [SMALL_STATE(245)] = 6822, + [SMALL_STATE(246)] = 6829, + [SMALL_STATE(247)] = 6836, + [SMALL_STATE(248)] = 6843, + [SMALL_STATE(249)] = 6850, + [SMALL_STATE(250)] = 6857, + [SMALL_STATE(251)] = 6864, + [SMALL_STATE(252)] = 6871, + [SMALL_STATE(253)] = 6878, + [SMALL_STATE(254)] = 6885, + [SMALL_STATE(255)] = 6892, + [SMALL_STATE(256)] = 6899, + [SMALL_STATE(257)] = 6906, + [SMALL_STATE(258)] = 6913, + [SMALL_STATE(259)] = 6920, + [SMALL_STATE(260)] = 6927, + [SMALL_STATE(261)] = 6934, + [SMALL_STATE(262)] = 6941, + [SMALL_STATE(263)] = 6948, + [SMALL_STATE(264)] = 6955, + [SMALL_STATE(265)] = 6962, + [SMALL_STATE(266)] = 6969, + [SMALL_STATE(267)] = 6976, + [SMALL_STATE(268)] = 6983, + [SMALL_STATE(269)] = 6990, + [SMALL_STATE(270)] = 6997, + [SMALL_STATE(271)] = 7004, + [SMALL_STATE(272)] = 7011, + [SMALL_STATE(273)] = 7018, + [SMALL_STATE(274)] = 7025, + [SMALL_STATE(275)] = 7032, + [SMALL_STATE(276)] = 7039, + [SMALL_STATE(277)] = 7046, + [SMALL_STATE(278)] = 7053, + [SMALL_STATE(279)] = 7060, + [SMALL_STATE(280)] = 7067, + [SMALL_STATE(281)] = 7074, + [SMALL_STATE(282)] = 7081, + [SMALL_STATE(283)] = 7088, }; static const TSParseActionEntry ts_parse_actions[] = { [0] = {.entry = {.count = 0, .reusable = false}}, [1] = {.entry = {.count = 1, .reusable = false}}, RECOVER(), [3] = {.entry = {.count = 1, .reusable = true}}, SHIFT_EXTRA(), - [5] = {.entry = {.count = 1, .reusable = true}}, SHIFT(230), - [7] = {.entry = {.count = 1, .reusable = true}}, SHIFT(216), + [5] = {.entry = {.count = 1, .reusable = true}}, SHIFT(227), + [7] = {.entry = {.count = 1, .reusable = true}}, SHIFT(218), [9] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_path_expr, 1, 0, 0), - [11] = {.entry = {.count = 1, .reusable = true}}, SHIFT(258), + [11] = {.entry = {.count = 1, .reusable = true}}, SHIFT(275), [13] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_path_expr, 1, 0, 0), [15] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_type_path, 1, 0, 0), - [17] = {.entry = {.count = 1, .reusable = true}}, SHIFT(266), + [17] = {.entry = {.count = 1, .reusable = true}}, SHIFT(262), [19] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_path_expr, 2, 0, 0), [21] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_path_expr, 2, 0, 0), [23] = {.entry = {.count = 1, .reusable = false}}, REDUCE(aux_sym_path_expr_repeat1, 2, 0, 0), - [25] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_path_expr_repeat1, 2, 0, 0), SHIFT_REPEAT(266), + [25] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_path_expr_repeat1, 2, 0, 0), SHIFT_REPEAT(262), [28] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_path_expr_repeat1, 2, 0, 0), [30] = {.entry = {.count = 1, .reusable = false}}, SHIFT(9), - [32] = {.entry = {.count = 1, .reusable = true}}, SHIFT(76), - [34] = {.entry = {.count = 1, .reusable = true}}, SHIFT(71), - [36] = {.entry = {.count = 1, .reusable = false}}, SHIFT(272), + [32] = {.entry = {.count = 1, .reusable = true}}, SHIFT(72), + [34] = {.entry = {.count = 1, .reusable = true}}, SHIFT(50), + [36] = {.entry = {.count = 1, .reusable = false}}, SHIFT(246), [38] = {.entry = {.count = 1, .reusable = false}}, SHIFT(40), - [40] = {.entry = {.count = 1, .reusable = false}}, SHIFT(48), - [42] = {.entry = {.count = 1, .reusable = false}}, SHIFT(50), - [44] = {.entry = {.count = 1, .reusable = false}}, SHIFT(58), - [46] = {.entry = {.count = 1, .reusable = true}}, SHIFT(59), + [40] = {.entry = {.count = 1, .reusable = false}}, SHIFT(52), + [42] = {.entry = {.count = 1, .reusable = false}}, SHIFT(53), + [44] = {.entry = {.count = 1, .reusable = false}}, SHIFT(54), + [46] = {.entry = {.count = 1, .reusable = true}}, SHIFT(55), [48] = {.entry = {.count = 1, .reusable = false}}, SHIFT(14), [50] = {.entry = {.count = 1, .reusable = true}}, SHIFT(14), - [52] = {.entry = {.count = 1, .reusable = true}}, SHIFT(123), + [52] = {.entry = {.count = 1, .reusable = true}}, SHIFT(125), [54] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(9), [57] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), - [59] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(71), - [62] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(272), + [59] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(50), + [62] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(246), [65] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(40), - [68] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(48), - [71] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(50), - [74] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(58), - [77] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(59), + [68] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(52), + [71] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(53), + [74] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(54), + [77] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(55), [80] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(14), [83] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_block_repeat1, 2, 0, 0), SHIFT_REPEAT(14), - [86] = {.entry = {.count = 1, .reusable = false}}, SHIFT(46), + [86] = {.entry = {.count = 1, .reusable = false}}, SHIFT(45), [88] = {.entry = {.count = 1, .reusable = true}}, SHIFT(124), - [90] = {.entry = {.count = 1, .reusable = true}}, SHIFT(81), + [90] = {.entry = {.count = 1, .reusable = true}}, SHIFT(74), [92] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_module_path_repeat1, 2, 0, 0), REDUCE(aux_sym_path_expr_repeat1, 2, 0, 0), [95] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_module_path_repeat1, 2, 0, 0), [97] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_unary_expr, 2, 0, 0), @@ -8058,19 +8257,19 @@ static const TSParseActionEntry ts_parse_actions[] = { [115] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_match_expr, 5, 0, 0), [117] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_struct_literal, 3, 0, 0), [119] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_struct_literal, 3, 0, 0), - [121] = {.entry = {.count = 1, .reusable = true}}, SHIFT(49), - [123] = {.entry = {.count = 1, .reusable = true}}, SHIFT(51), - [125] = {.entry = {.count = 1, .reusable = false}}, SHIFT(56), - [127] = {.entry = {.count = 1, .reusable = true}}, SHIFT(56), - [129] = {.entry = {.count = 1, .reusable = false}}, SHIFT(49), - [131] = {.entry = {.count = 1, .reusable = true}}, SHIFT(55), + [121] = {.entry = {.count = 1, .reusable = true}}, SHIFT(57), + [123] = {.entry = {.count = 1, .reusable = true}}, SHIFT(56), + [125] = {.entry = {.count = 1, .reusable = false}}, SHIFT(47), + [127] = {.entry = {.count = 1, .reusable = true}}, SHIFT(47), + [129] = {.entry = {.count = 1, .reusable = false}}, SHIFT(57), + [131] = {.entry = {.count = 1, .reusable = true}}, SHIFT(46), [133] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_try_expr, 2, 0, 10), [135] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_try_expr, 2, 0, 10), [137] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_call_expr, 2, 0, 11), [139] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_call_expr, 2, 0, 11), [141] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_arg_list, 2, 0, 0), [143] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_arg_list, 2, 0, 0), - [145] = {.entry = {.count = 1, .reusable = true}}, SHIFT(54), + [145] = {.entry = {.count = 1, .reusable = true}}, SHIFT(66), [147] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_match_expr, 4, 0, 0), [149] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_match_expr, 4, 0, 0), [151] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_struct_literal, 4, 0, 0), @@ -8090,287 +8289,287 @@ static const TSParseActionEntry ts_parse_actions[] = { [179] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_grouping, 3, 0, 0), [181] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_grouping, 3, 0, 0), [183] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_assign_stmt, 3, 0, 0), - [185] = {.entry = {.count = 1, .reusable = true}}, SHIFT(99), + [185] = {.entry = {.count = 1, .reusable = true}}, SHIFT(96), [187] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_assign_stmt, 3, 0, 0), - [189] = {.entry = {.count = 1, .reusable = true}}, SHIFT(52), + [189] = {.entry = {.count = 1, .reusable = true}}, SHIFT(65), [191] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_let_stmt, 4, 0, 0), - [193] = {.entry = {.count = 1, .reusable = true}}, SHIFT(102), + [193] = {.entry = {.count = 1, .reusable = true}}, SHIFT(94), [195] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_let_stmt, 4, 0, 0), [197] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_return_stmt, 2, 0, 0), - [199] = {.entry = {.count = 1, .reusable = true}}, SHIFT(94), + [199] = {.entry = {.count = 1, .reusable = true}}, SHIFT(89), [201] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_return_stmt, 2, 0, 0), [203] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_expr_stmt, 1, 0, 0), - [205] = {.entry = {.count = 1, .reusable = true}}, SHIFT(104), + [205] = {.entry = {.count = 1, .reusable = true}}, SHIFT(86), [207] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_expr_stmt, 1, 0, 0), [209] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_let_stmt, 6, 0, 0), - [211] = {.entry = {.count = 1, .reusable = true}}, SHIFT(106), + [211] = {.entry = {.count = 1, .reusable = true}}, SHIFT(97), [213] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_let_stmt, 6, 0, 0), [215] = {.entry = {.count = 2, .reusable = false}}, REDUCE(sym_return_stmt, 1, 0, 0), SHIFT(2), - [218] = {.entry = {.count = 1, .reusable = true}}, SHIFT(100), + [218] = {.entry = {.count = 1, .reusable = true}}, SHIFT(88), [220] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_return_stmt, 1, 0, 0), - [222] = {.entry = {.count = 2, .reusable = true}}, REDUCE(sym_return_stmt, 1, 0, 0), SHIFT(71), + [222] = {.entry = {.count = 2, .reusable = true}}, REDUCE(sym_return_stmt, 1, 0, 0), SHIFT(50), [225] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_return_stmt, 1, 0, 0), - [227] = {.entry = {.count = 2, .reusable = false}}, REDUCE(sym_return_stmt, 1, 0, 0), SHIFT(58), - [230] = {.entry = {.count = 2, .reusable = true}}, REDUCE(sym_return_stmt, 1, 0, 0), SHIFT(59), + [227] = {.entry = {.count = 2, .reusable = false}}, REDUCE(sym_return_stmt, 1, 0, 0), SHIFT(54), + [230] = {.entry = {.count = 2, .reusable = true}}, REDUCE(sym_return_stmt, 1, 0, 0), SHIFT(55), [233] = {.entry = {.count = 2, .reusable = false}}, REDUCE(sym_return_stmt, 1, 0, 0), SHIFT(14), [236] = {.entry = {.count = 2, .reusable = true}}, REDUCE(sym_return_stmt, 1, 0, 0), SHIFT(14), [239] = {.entry = {.count = 1, .reusable = false}}, SHIFT(2), [241] = {.entry = {.count = 1, .reusable = true}}, SHIFT(22), [243] = {.entry = {.count = 1, .reusable = true}}, SHIFT(33), [245] = {.entry = {.count = 1, .reusable = true}}, SHIFT(30), - [247] = {.entry = {.count = 1, .reusable = false}}, SHIFT(82), - [249] = {.entry = {.count = 1, .reusable = true}}, SHIFT(70), - [251] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 1, 0, 0), - [253] = {.entry = {.count = 1, .reusable = true}}, SHIFT(232), - [255] = {.entry = {.count = 1, .reusable = true}}, SHIFT(148), - [257] = {.entry = {.count = 1, .reusable = true}}, SHIFT(273), - [259] = {.entry = {.count = 1, .reusable = true}}, SHIFT(265), + [247] = {.entry = {.count = 1, .reusable = false}}, SHIFT(79), + [249] = {.entry = {.count = 1, .reusable = true}}, SHIFT(64), + [251] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 2, 0, 0), + [253] = {.entry = {.count = 1, .reusable = true}}, SHIFT(219), + [255] = {.entry = {.count = 1, .reusable = true}}, SHIFT(149), + [257] = {.entry = {.count = 1, .reusable = true}}, SHIFT(253), + [259] = {.entry = {.count = 1, .reusable = true}}, SHIFT(254), [261] = {.entry = {.count = 1, .reusable = true}}, SHIFT(151), - [263] = {.entry = {.count = 1, .reusable = true}}, SHIFT(280), - [265] = {.entry = {.count = 1, .reusable = true}}, SHIFT(254), - [267] = {.entry = {.count = 1, .reusable = true}}, SHIFT(165), - [269] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 2, 0, 0), - [271] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 3, 0, 0), - [273] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_module_path_repeat1, 2, 0, 0), SHIFT_REPEAT(252), - [276] = {.entry = {.count = 1, .reusable = true}}, SHIFT(252), - [278] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_type_path, 2, 0, 0), - [280] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_type, 1, 0, 0), - [282] = {.entry = {.count = 1, .reusable = true}}, SHIFT(155), - [284] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_block, 3, 0, 0), - [286] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_block, 3, 0, 0), - [288] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 4, 0, 0), - [290] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), - [292] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), SHIFT_REPEAT(148), - [295] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), SHIFT_REPEAT(273), - [298] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), SHIFT_REPEAT(265), - [301] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), SHIFT_REPEAT(151), - [304] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), SHIFT_REPEAT(280), - [307] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), SHIFT_REPEAT(254), - [310] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), SHIFT_REPEAT(165), - [313] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_block, 2, 0, 0), - [315] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_block, 2, 0, 0), - [317] = {.entry = {.count = 1, .reusable = true}}, SHIFT(282), - [319] = {.entry = {.count = 2, .reusable = true}}, REDUCE(sym_type_path, 1, 0, 0), REDUCE(sym_path_expr, 1, 0, 0), - [322] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_type, 2, 0, 0), - [324] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_type_args, 5, 0, 0), - [326] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_type_args, 3, 0, 0), - [328] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_type_args, 4, 0, 0), - [330] = {.entry = {.count = 1, .reusable = true}}, SHIFT(43), - [332] = {.entry = {.count = 1, .reusable = true}}, SHIFT(29), - [334] = {.entry = {.count = 1, .reusable = true}}, SHIFT(11), - [336] = {.entry = {.count = 1, .reusable = true}}, SHIFT(66), - [338] = {.entry = {.count = 1, .reusable = true}}, SHIFT(63), - [340] = {.entry = {.count = 1, .reusable = true}}, SHIFT(67), - [342] = {.entry = {.count = 1, .reusable = true}}, SHIFT(62), - [344] = {.entry = {.count = 1, .reusable = true}}, SHIFT(44), - [346] = {.entry = {.count = 1, .reusable = false}}, SHIFT(64), - [348] = {.entry = {.count = 1, .reusable = true}}, SHIFT(64), - [350] = {.entry = {.count = 1, .reusable = false}}, SHIFT(66), - [352] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_if_stmt, 3, 0, 0), - [354] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_if_stmt, 3, 0, 0), - [356] = {.entry = {.count = 1, .reusable = false}}, SHIFT(172), - [358] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_struct_field, 3, 0, 0), - [360] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_arg_list_repeat1, 2, 0, 0), - [362] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_return_stmt, 3, 0, 0), - [364] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_return_stmt, 3, 0, 0), - [366] = {.entry = {.count = 1, .reusable = true}}, SHIFT(34), - [368] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_while_stmt, 3, 0, 0), - [370] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_while_stmt, 3, 0, 0), - [372] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_assign_stmt, 4, 0, 0), - [374] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_assign_stmt, 4, 0, 0), - [376] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_statement, 1, 0, 0), - [378] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_statement, 1, 0, 0), - [380] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_let_stmt, 5, 0, 0), - [382] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_let_stmt, 5, 0, 0), - [384] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_if_stmt, 5, 0, 0), - [386] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_if_stmt, 5, 0, 0), - [388] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_expr_stmt, 2, 0, 0), - [390] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_expr_stmt, 2, 0, 0), - [392] = {.entry = {.count = 1, .reusable = true}}, SHIFT(111), - [394] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_let_stmt, 7, 0, 0), - [396] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_let_stmt, 7, 0, 0), - [398] = {.entry = {.count = 1, .reusable = false}}, SHIFT(166), - [400] = {.entry = {.count = 1, .reusable = true}}, SHIFT(16), - [402] = {.entry = {.count = 1, .reusable = false}}, SHIFT(277), - [404] = {.entry = {.count = 1, .reusable = true}}, SHIFT(26), - [406] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_match_expr_repeat1, 2, 0, 0), SHIFT_REPEAT(166), - [409] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_match_expr_repeat1, 2, 0, 0), - [411] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_match_expr_repeat1, 2, 0, 0), SHIFT_REPEAT(277), - [414] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_match_expr_repeat1, 2, 0, 0), SHIFT_REPEAT(14), - [417] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_match_expr_repeat1, 2, 0, 0), SHIFT_REPEAT(14), - [420] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), - [422] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(232), - [425] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_module_path, 1, 0, 0), - [427] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_param_list, 5, 0, 0), - [429] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_param_list, 3, 0, 0), - [431] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_param_list, 2, 0, 0), - [433] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_module_path, 2, 0, 0), - [435] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_param_list, 4, 0, 0), - [437] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 4, 0, 3), - [439] = {.entry = {.count = 1, .reusable = true}}, SHIFT(164), - [441] = {.entry = {.count = 1, .reusable = true}}, SHIFT(129), - [443] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 5, 0, 7), - [445] = {.entry = {.count = 1, .reusable = true}}, SHIFT(168), - [447] = {.entry = {.count = 1, .reusable = true}}, SHIFT(139), + [263] = {.entry = {.count = 1, .reusable = true}}, SHIFT(258), + [265] = {.entry = {.count = 1, .reusable = true}}, SHIFT(269), + [267] = {.entry = {.count = 1, .reusable = true}}, SHIFT(154), + [269] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 1, 0, 0), + [271] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_module_path_repeat1, 2, 0, 0), SHIFT_REPEAT(271), + [274] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 3, 0, 0), + [276] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_block, 3, 0, 0), + [278] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_block, 3, 0, 0), + [280] = {.entry = {.count = 1, .reusable = true}}, SHIFT(271), + [282] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_block, 2, 0, 0), + [284] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_block, 2, 0, 0), + [286] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_type_path, 2, 0, 0), + [288] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_type, 1, 0, 0), + [290] = {.entry = {.count = 1, .reusable = true}}, SHIFT(167), + [292] = {.entry = {.count = 1, .reusable = true}}, SHIFT(282), + [294] = {.entry = {.count = 2, .reusable = true}}, REDUCE(sym_type_path, 1, 0, 0), REDUCE(sym_path_expr, 1, 0, 0), + [297] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_if_stmt, 3, 0, 0), + [299] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_if_stmt, 3, 0, 0), + [301] = {.entry = {.count = 1, .reusable = false}}, SHIFT(174), + [303] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 4, 0, 0), + [305] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), + [307] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), SHIFT_REPEAT(149), + [310] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), SHIFT_REPEAT(253), + [313] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), SHIFT_REPEAT(254), + [316] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), SHIFT_REPEAT(151), + [319] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), SHIFT_REPEAT(258), + [322] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), SHIFT_REPEAT(269), + [325] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat2, 2, 0, 0), SHIFT_REPEAT(154), + [328] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_statement, 1, 0, 0), + [330] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_statement, 1, 0, 0), + [332] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_type_args, 3, 0, 0), + [334] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_expr_stmt, 2, 0, 0), + [336] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_expr_stmt, 2, 0, 0), + [338] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_type_args, 4, 0, 0), + [340] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_return_stmt, 3, 0, 0), + [342] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_return_stmt, 3, 0, 0), + [344] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_while_stmt, 3, 0, 0), + [346] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_while_stmt, 3, 0, 0), + [348] = {.entry = {.count = 1, .reusable = true}}, SHIFT(43), + [350] = {.entry = {.count = 1, .reusable = true}}, SHIFT(29), + [352] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_type_args, 5, 0, 0), + [354] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_type, 2, 0, 0), + [356] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_let_stmt, 5, 0, 0), + [358] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_let_stmt, 5, 0, 0), + [360] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_if_stmt, 5, 0, 0), + [362] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_if_stmt, 5, 0, 0), + [364] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_assign_stmt, 4, 0, 0), + [366] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_assign_stmt, 4, 0, 0), + [368] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_let_stmt, 7, 0, 0), + [370] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_let_stmt, 7, 0, 0), + [372] = {.entry = {.count = 1, .reusable = false}}, SHIFT(162), + [374] = {.entry = {.count = 1, .reusable = true}}, SHIFT(26), + [376] = {.entry = {.count = 1, .reusable = false}}, SHIFT(259), + [378] = {.entry = {.count = 1, .reusable = true}}, SHIFT(16), + [380] = {.entry = {.count = 1, .reusable = true}}, SHIFT(11), + [382] = {.entry = {.count = 1, .reusable = true}}, SHIFT(59), + [384] = {.entry = {.count = 1, .reusable = true}}, SHIFT(60), + [386] = {.entry = {.count = 1, .reusable = true}}, SHIFT(61), + [388] = {.entry = {.count = 1, .reusable = true}}, SHIFT(62), + [390] = {.entry = {.count = 1, .reusable = true}}, SHIFT(63), + [392] = {.entry = {.count = 1, .reusable = false}}, SHIFT(44), + [394] = {.entry = {.count = 1, .reusable = true}}, SHIFT(44), + [396] = {.entry = {.count = 1, .reusable = false}}, SHIFT(59), + [398] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_arg_list_repeat1, 2, 0, 0), + [400] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_match_expr_repeat1, 2, 0, 0), SHIFT_REPEAT(162), + [403] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_match_expr_repeat1, 2, 0, 0), + [405] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_match_expr_repeat1, 2, 0, 0), SHIFT_REPEAT(259), + [408] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_match_expr_repeat1, 2, 0, 0), SHIFT_REPEAT(14), + [411] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_match_expr_repeat1, 2, 0, 0), SHIFT_REPEAT(14), + [414] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_struct_field, 3, 0, 0), + [416] = {.entry = {.count = 1, .reusable = true}}, SHIFT(98), + [418] = {.entry = {.count = 1, .reusable = true}}, SHIFT(34), + [420] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_module_path, 2, 0, 0), + [422] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), + [424] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(219), + [427] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_param_list, 3, 0, 0), + [429] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_param_list, 4, 0, 0), + [431] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_param_list, 5, 0, 0), + [433] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_module_path, 1, 0, 0), + [435] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_param_list, 2, 0, 0), + [437] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 5, 0, 7), + [439] = {.entry = {.count = 1, .reusable = true}}, SHIFT(170), + [441] = {.entry = {.count = 1, .reusable = true}}, SHIFT(147), + [443] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 4, 0, 3), + [445] = {.entry = {.count = 1, .reusable = true}}, SHIFT(156), + [447] = {.entry = {.count = 1, .reusable = true}}, SHIFT(137), [449] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_use_decl, 2, 0, 0), - [451] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 7, 0, 14), - [453] = {.entry = {.count = 1, .reusable = true}}, SHIFT(136), - [455] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 6, 0, 12), - [457] = {.entry = {.count = 1, .reusable = true}}, SHIFT(135), - [459] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_module_decl, 2, 0, 0), - [461] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_struct_decl, 4, 0, 4), - [463] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 5, 0, 3), - [465] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_impl_block, 5, 0, 5), - [467] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_struct_decl, 6, 0, 3), - [469] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_function_decl, 7, 0, 13), - [471] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_struct_decl, 5, 0, 4), - [473] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_decl, 5, 0, 4), - [475] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 7, 0, 12), - [477] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 8, 0, 14), - [479] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_struct_decl, 5, 0, 3), - [481] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_decl, 5, 0, 3), - [483] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 6, 0, 7), - [485] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_decl, 6, 0, 3), - [487] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_function_decl, 4, 0, 2), - [489] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_function_decl, 6, 0, 9), - [491] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_function_decl, 5, 0, 6), - [493] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_impl_block, 4, 0, 5), - [495] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_decl, 4, 0, 4), - [497] = {.entry = {.count = 1, .reusable = false}}, REDUCE(aux_sym_match_expr_repeat1, 1, 0, 0), - [499] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_match_expr_repeat1, 1, 0, 0), - [501] = {.entry = {.count = 1, .reusable = true}}, SHIFT(149), - [503] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_match_arm, 3, 0, 0), - [505] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_match_arm, 3, 0, 0), - [507] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_struct_decl_repeat1, 1, 0, 0), - [509] = {.entry = {.count = 1, .reusable = true}}, SHIFT(259), - [511] = {.entry = {.count = 1, .reusable = true}}, SHIFT(270), - [513] = {.entry = {.count = 1, .reusable = true}}, SHIFT(275), - [515] = {.entry = {.count = 1, .reusable = false}}, REDUCE(aux_sym_match_expr_repeat1, 2, 0, 0), + [451] = {.entry = {.count = 1, .reusable = false}}, REDUCE(aux_sym_match_expr_repeat1, 1, 0, 0), + [453] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_match_expr_repeat1, 1, 0, 0), + [455] = {.entry = {.count = 1, .reusable = true}}, SHIFT(142), + [457] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 6, 0, 12), + [459] = {.entry = {.count = 1, .reusable = true}}, SHIFT(138), + [461] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_module_decl, 2, 0, 0), + [463] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_match_arm, 3, 0, 0), + [465] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_match_arm, 3, 0, 0), + [467] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 7, 0, 14), + [469] = {.entry = {.count = 1, .reusable = true}}, SHIFT(148), + [471] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_struct_decl, 6, 0, 3), + [473] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_decl, 6, 0, 3), + [475] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_function_decl, 6, 0, 9), + [477] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_function_decl, 5, 0, 6), + [479] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_decl, 5, 0, 3), + [481] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_impl_block, 4, 0, 5), + [483] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_function_decl, 7, 0, 13), + [485] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 5, 0, 3), + [487] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 7, 0, 12), + [489] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_decl, 4, 0, 4), + [491] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_struct_decl, 5, 0, 4), + [493] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_decl, 5, 0, 4), + [495] = {.entry = {.count = 1, .reusable = false}}, REDUCE(aux_sym_match_expr_repeat1, 2, 0, 0), + [497] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_impl_block, 5, 0, 5), + [499] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_struct_decl, 5, 0, 3), + [501] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_function_decl, 4, 0, 2), + [503] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_struct_decl, 4, 0, 4), + [505] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 6, 0, 7), + [507] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_extern_function_decl, 8, 0, 14), + [509] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_struct_decl_repeat1, 1, 0, 0), + [511] = {.entry = {.count = 1, .reusable = true}}, SHIFT(248), + [513] = {.entry = {.count = 1, .reusable = true}}, SHIFT(273), + [515] = {.entry = {.count = 1, .reusable = true}}, SHIFT(280), [517] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_struct_decl_repeat1, 2, 0, 0), SHIFT_REPEAT(150), [520] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_struct_decl_repeat1, 2, 0, 0), [522] = {.entry = {.count = 1, .reusable = true}}, SHIFT(150), - [524] = {.entry = {.count = 1, .reusable = true}}, SHIFT(267), - [526] = {.entry = {.count = 1, .reusable = true}}, SHIFT(72), - [528] = {.entry = {.count = 1, .reusable = true}}, SHIFT(170), - [530] = {.entry = {.count = 1, .reusable = true}}, SHIFT(84), - [532] = {.entry = {.count = 1, .reusable = true}}, SHIFT(86), - [534] = {.entry = {.count = 1, .reusable = true}}, SHIFT(281), - [536] = {.entry = {.count = 1, .reusable = true}}, SHIFT(251), - [538] = {.entry = {.count = 1, .reusable = true}}, SHIFT(130), - [540] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_impl_block_repeat1, 2, 0, 0), SHIFT_REPEAT(281), - [543] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_impl_block_repeat1, 2, 0, 0), SHIFT_REPEAT(251), + [524] = {.entry = {.count = 1, .reusable = true}}, SHIFT(274), + [526] = {.entry = {.count = 1, .reusable = true}}, SHIFT(73), + [528] = {.entry = {.count = 1, .reusable = true}}, SHIFT(168), + [530] = {.entry = {.count = 1, .reusable = true}}, SHIFT(92), + [532] = {.entry = {.count = 1, .reusable = true}}, SHIFT(87), + [534] = {.entry = {.count = 1, .reusable = true}}, SHIFT(261), + [536] = {.entry = {.count = 1, .reusable = true}}, SHIFT(260), + [538] = {.entry = {.count = 1, .reusable = true}}, SHIFT(143), + [540] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_impl_block_repeat1, 2, 0, 0), SHIFT_REPEAT(261), + [543] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_impl_block_repeat1, 2, 0, 0), SHIFT_REPEAT(260), [546] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_impl_block_repeat1, 2, 0, 0), - [548] = {.entry = {.count = 1, .reusable = true}}, SHIFT(144), - [550] = {.entry = {.count = 1, .reusable = true}}, SHIFT(48), - [552] = {.entry = {.count = 1, .reusable = true}}, SHIFT(268), - [554] = {.entry = {.count = 1, .reusable = true}}, SHIFT(137), - [556] = {.entry = {.count = 1, .reusable = false}}, SHIFT(249), - [558] = {.entry = {.count = 1, .reusable = true}}, SHIFT(117), - [560] = {.entry = {.count = 1, .reusable = false}}, SHIFT(243), - [562] = {.entry = {.count = 1, .reusable = true}}, SHIFT(119), - [564] = {.entry = {.count = 1, .reusable = true}}, SHIFT(128), - [566] = {.entry = {.count = 1, .reusable = true}}, SHIFT(206), - [568] = {.entry = {.count = 1, .reusable = true}}, SHIFT(138), - [570] = {.entry = {.count = 1, .reusable = true}}, SHIFT(115), - [572] = {.entry = {.count = 1, .reusable = true}}, SHIFT(145), - [574] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_arg_list_repeat1, 2, 0, 0), SHIFT_REPEAT(65), - [577] = {.entry = {.count = 1, .reusable = true}}, SHIFT(161), - [579] = {.entry = {.count = 1, .reusable = true}}, SHIFT(10), - [581] = {.entry = {.count = 1, .reusable = true}}, SHIFT(250), - [583] = {.entry = {.count = 1, .reusable = true}}, SHIFT(31), - [585] = {.entry = {.count = 1, .reusable = true}}, SHIFT(189), - [587] = {.entry = {.count = 1, .reusable = false}}, SHIFT(256), - [589] = {.entry = {.count = 1, .reusable = true}}, SHIFT(255), - [591] = {.entry = {.count = 1, .reusable = true}}, SHIFT(163), - [593] = {.entry = {.count = 1, .reusable = true}}, SHIFT(42), - [595] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_param_list_repeat1, 2, 0, 0), SHIFT_REPEAT(188), - [598] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_param_list_repeat1, 2, 0, 0), - [600] = {.entry = {.count = 1, .reusable = true}}, SHIFT(28), - [602] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_struct_literal_repeat1, 2, 0, 0), - [604] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_struct_literal_repeat1, 2, 0, 0), SHIFT_REPEAT(231), - [607] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_method_decl, 5, 0, 6), - [609] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_field_list, 2, 0, 0), - [611] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_method_decl, 6, 0, 9), - [613] = {.entry = {.count = 1, .reusable = true}}, SHIFT(175), - [615] = {.entry = {.count = 1, .reusable = true}}, SHIFT(116), - [617] = {.entry = {.count = 1, .reusable = true}}, SHIFT(197), - [619] = {.entry = {.count = 1, .reusable = true}}, SHIFT(27), - [621] = {.entry = {.count = 1, .reusable = true}}, SHIFT(182), - [623] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_field_list, 3, 0, 0), - [625] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_method_decl, 7, 0, 13), - [627] = {.entry = {.count = 1, .reusable = true}}, SHIFT(178), - [629] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_method_decl, 4, 0, 2), - [631] = {.entry = {.count = 1, .reusable = true}}, SHIFT(152), - [633] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_field_list_repeat1, 2, 0, 0), - [635] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_field_list_repeat1, 2, 0, 0), SHIFT_REPEAT(236), - [638] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_type_args_repeat1, 2, 0, 0), SHIFT_REPEAT(159), - [641] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_type_args_repeat1, 2, 0, 0), - [643] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_field_list, 1, 0, 0), - [645] = {.entry = {.count = 1, .reusable = true}}, SHIFT(192), - [647] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_variants, 2, 0, 0), - [649] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_variant, 1, 0, 0), - [651] = {.entry = {.count = 1, .reusable = true}}, SHIFT(160), - [653] = {.entry = {.count = 1, .reusable = true}}, SHIFT(210), - [655] = {.entry = {.count = 1, .reusable = true}}, SHIFT(153), - [657] = {.entry = {.count = 1, .reusable = true}}, SHIFT(85), - [659] = {.entry = {.count = 1, .reusable = true}}, SHIFT(17), - [661] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_variants, 3, 0, 0), - [663] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_enum_variants_repeat1, 2, 0, 0), - [665] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_enum_variants_repeat1, 2, 0, 0), SHIFT_REPEAT(238), - [668] = {.entry = {.count = 1, .reusable = true}}, SHIFT(158), - [670] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_variants, 1, 0, 0), - [672] = {.entry = {.count = 1, .reusable = true}}, SHIFT(205), - [674] = {.entry = {.count = 1, .reusable = true}}, SHIFT(156), - [676] = {.entry = {.count = 1, .reusable = true}}, SHIFT(114), - [678] = {.entry = {.count = 1, .reusable = true}}, SHIFT(174), - [680] = {.entry = {.count = 1, .reusable = true}}, SHIFT(184), - [682] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_pattern, 1, 0, 0), - [684] = {.entry = {.count = 1, .reusable = true}}, SHIFT(157), - [686] = {.entry = {.count = 1, .reusable = true}}, SHIFT(61), - [688] = {.entry = {.count = 1, .reusable = true}}, SHIFT(261), - [690] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_param, 3, 0, 8), - [692] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_variant, 4, 0, 0), - [694] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_field, 3, 0, 0), - [696] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_param, 1, 0, 1), - [698] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_pattern_call, 4, 0, 0), - [700] = {.entry = {.count = 1, .reusable = true}}, SHIFT(176), - [702] = {.entry = {.count = 1, .reusable = true}}, SHIFT(179), - [704] = {.entry = {.count = 1, .reusable = true}}, SHIFT(173), - [706] = {.entry = {.count = 1, .reusable = true}}, SHIFT(131), - [708] = {.entry = {.count = 1, .reusable = true}}, SHIFT(169), - [710] = {.entry = {.count = 1, .reusable = true}}, SHIFT(53), - [712] = {.entry = {.count = 1, .reusable = true}}, SHIFT(241), - [714] = {.entry = {.count = 1, .reusable = true}}, SHIFT(74), - [716] = {.entry = {.count = 1, .reusable = true}}, SHIFT(224), - [718] = {.entry = {.count = 1, .reusable = true}}, SHIFT(246), + [548] = {.entry = {.count = 1, .reusable = true}}, SHIFT(135), + [550] = {.entry = {.count = 1, .reusable = false}}, SHIFT(283), + [552] = {.entry = {.count = 1, .reusable = true}}, SHIFT(119), + [554] = {.entry = {.count = 1, .reusable = false}}, SHIFT(233), + [556] = {.entry = {.count = 1, .reusable = true}}, SHIFT(116), + [558] = {.entry = {.count = 1, .reusable = true}}, SHIFT(52), + [560] = {.entry = {.count = 1, .reusable = true}}, SHIFT(191), + [562] = {.entry = {.count = 1, .reusable = true}}, SHIFT(139), + [564] = {.entry = {.count = 1, .reusable = true}}, SHIFT(272), + [566] = {.entry = {.count = 1, .reusable = true}}, SHIFT(146), + [568] = {.entry = {.count = 1, .reusable = true}}, SHIFT(134), + [570] = {.entry = {.count = 1, .reusable = true}}, SHIFT(144), + [572] = {.entry = {.count = 1, .reusable = true}}, SHIFT(117), + [574] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_variants, 3, 0, 0), + [576] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_method_decl, 4, 0, 2), + [578] = {.entry = {.count = 1, .reusable = true}}, SHIFT(179), + [580] = {.entry = {.count = 1, .reusable = true}}, SHIFT(42), + [582] = {.entry = {.count = 1, .reusable = true}}, SHIFT(27), + [584] = {.entry = {.count = 1, .reusable = true}}, SHIFT(212), + [586] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_method_decl, 5, 0, 6), + [588] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_type_args_repeat1, 2, 0, 0), SHIFT_REPEAT(160), + [591] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_type_args_repeat1, 2, 0, 0), + [593] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_variants, 2, 0, 0), + [595] = {.entry = {.count = 1, .reusable = true}}, SHIFT(180), + [597] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_variants, 1, 0, 0), + [599] = {.entry = {.count = 1, .reusable = true}}, SHIFT(187), + [601] = {.entry = {.count = 1, .reusable = false}}, SHIFT(256), + [603] = {.entry = {.count = 1, .reusable = true}}, SHIFT(255), + [605] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_variant, 1, 0, 0), + [607] = {.entry = {.count = 1, .reusable = true}}, SHIFT(164), + [609] = {.entry = {.count = 1, .reusable = true}}, SHIFT(245), + [611] = {.entry = {.count = 1, .reusable = true}}, SHIFT(28), + [613] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_struct_literal_repeat1, 2, 0, 0), + [615] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_struct_literal_repeat1, 2, 0, 0), SHIFT_REPEAT(232), + [618] = {.entry = {.count = 1, .reusable = true}}, SHIFT(171), + [620] = {.entry = {.count = 1, .reusable = true}}, SHIFT(10), + [622] = {.entry = {.count = 1, .reusable = true}}, SHIFT(169), + [624] = {.entry = {.count = 1, .reusable = true}}, SHIFT(31), + [626] = {.entry = {.count = 1, .reusable = true}}, SHIFT(192), + [628] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_method_decl, 6, 0, 9), + [630] = {.entry = {.count = 1, .reusable = true}}, SHIFT(153), + [632] = {.entry = {.count = 1, .reusable = true}}, SHIFT(85), + [634] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_param_list_repeat1, 2, 0, 0), SHIFT_REPEAT(209), + [637] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_param_list_repeat1, 2, 0, 0), + [639] = {.entry = {.count = 1, .reusable = true}}, SHIFT(17), + [641] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_method_decl, 7, 0, 13), + [643] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_field_list, 2, 0, 0), + [645] = {.entry = {.count = 1, .reusable = true}}, SHIFT(152), + [647] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_field_list, 3, 0, 0), + [649] = {.entry = {.count = 1, .reusable = true}}, SHIFT(173), + [651] = {.entry = {.count = 1, .reusable = true}}, SHIFT(115), + [653] = {.entry = {.count = 1, .reusable = true}}, SHIFT(204), + [655] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_field_list_repeat1, 2, 0, 0), + [657] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_field_list_repeat1, 2, 0, 0), SHIFT_REPEAT(234), + [660] = {.entry = {.count = 1, .reusable = true}}, SHIFT(165), + [662] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_enum_variants_repeat1, 2, 0, 0), + [664] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_enum_variants_repeat1, 2, 0, 0), SHIFT_REPEAT(215), + [667] = {.entry = {.count = 1, .reusable = true}}, SHIFT(158), + [669] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_field_list, 1, 0, 0), + [671] = {.entry = {.count = 1, .reusable = true}}, SHIFT(202), + [673] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_arg_list_repeat1, 2, 0, 0), SHIFT_REPEAT(58), + [676] = {.entry = {.count = 1, .reusable = true}}, SHIFT(172), + [678] = {.entry = {.count = 1, .reusable = true}}, SHIFT(190), + [680] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_pattern, 1, 0, 0), + [682] = {.entry = {.count = 1, .reusable = true}}, SHIFT(118), + [684] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_field, 3, 0, 0), + [686] = {.entry = {.count = 1, .reusable = true}}, SHIFT(268), + [688] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_param, 1, 0, 1), + [690] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_enum_variant, 4, 0, 0), + [692] = {.entry = {.count = 1, .reusable = true}}, SHIFT(157), + [694] = {.entry = {.count = 1, .reusable = true}}, SHIFT(48), + [696] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_param, 3, 0, 8), + [698] = {.entry = {.count = 1, .reusable = true}}, SHIFT(226), + [700] = {.entry = {.count = 1, .reusable = true}}, SHIFT(49), + [702] = {.entry = {.count = 1, .reusable = true}}, SHIFT(239), + [704] = {.entry = {.count = 1, .reusable = true}}, ACCEPT_INPUT(), + [706] = {.entry = {.count = 1, .reusable = true}}, SHIFT(230), + [708] = {.entry = {.count = 1, .reusable = true}}, SHIFT(222), + [710] = {.entry = {.count = 1, .reusable = true}}, SHIFT(216), + [712] = {.entry = {.count = 1, .reusable = true}}, SHIFT(176), + [714] = {.entry = {.count = 1, .reusable = true}}, SHIFT(166), + [716] = {.entry = {.count = 1, .reusable = true}}, SHIFT(220), + [718] = {.entry = {.count = 1, .reusable = true}}, SHIFT(250), [720] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_pattern_call, 3, 0, 0), - [722] = {.entry = {.count = 1, .reusable = true}}, SHIFT(244), + [722] = {.entry = {.count = 1, .reusable = true}}, SHIFT(263), [724] = {.entry = {.count = 1, .reusable = true}}, SHIFT(177), - [726] = {.entry = {.count = 1, .reusable = true}}, SHIFT(12), - [728] = {.entry = {.count = 1, .reusable = true}}, SHIFT(229), - [730] = {.entry = {.count = 1, .reusable = true}}, SHIFT(209), - [732] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_package_decl, 2, 0, 0), - [734] = {.entry = {.count = 1, .reusable = true}}, SHIFT(225), - [736] = {.entry = {.count = 1, .reusable = true}}, SHIFT(233), - [738] = {.entry = {.count = 1, .reusable = true}}, SHIFT(140), - [740] = {.entry = {.count = 1, .reusable = true}}, SHIFT(263), - [742] = {.entry = {.count = 1, .reusable = true}}, SHIFT(8), - [744] = {.entry = {.count = 1, .reusable = true}}, SHIFT(247), - [746] = {.entry = {.count = 1, .reusable = true}}, SHIFT(167), - [748] = {.entry = {.count = 1, .reusable = true}}, SHIFT(133), - [750] = {.entry = {.count = 1, .reusable = true}}, SHIFT(253), - [752] = {.entry = {.count = 1, .reusable = true}}, ACCEPT_INPUT(), - [754] = {.entry = {.count = 1, .reusable = true}}, SHIFT(219), - [756] = {.entry = {.count = 1, .reusable = true}}, SHIFT(226), - [758] = {.entry = {.count = 1, .reusable = true}}, SHIFT(217), - [760] = {.entry = {.count = 1, .reusable = true}}, SHIFT(257), - [762] = {.entry = {.count = 1, .reusable = true}}, SHIFT(237), - [764] = {.entry = {.count = 1, .reusable = true}}, SHIFT(57), - [766] = {.entry = {.count = 1, .reusable = true}}, SHIFT(134), - [768] = {.entry = {.count = 1, .reusable = true}}, SHIFT(245), - [770] = {.entry = {.count = 1, .reusable = true}}, SHIFT(274), - [772] = {.entry = {.count = 1, .reusable = true}}, SHIFT(91), - [774] = {.entry = {.count = 1, .reusable = true}}, SHIFT(171), + [726] = {.entry = {.count = 1, .reusable = true}}, SHIFT(251), + [728] = {.entry = {.count = 1, .reusable = true}}, SHIFT(223), + [730] = {.entry = {.count = 1, .reusable = true}}, SHIFT(277), + [732] = {.entry = {.count = 1, .reusable = true}}, SHIFT(8), + [734] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_pattern_call, 4, 0, 0), + [736] = {.entry = {.count = 1, .reusable = true}}, SHIFT(200), + [738] = {.entry = {.count = 1, .reusable = true}}, SHIFT(175), + [740] = {.entry = {.count = 1, .reusable = true}}, SHIFT(141), + [742] = {.entry = {.count = 1, .reusable = true}}, SHIFT(235), + [744] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_package_decl, 2, 0, 0), + [746] = {.entry = {.count = 1, .reusable = true}}, SHIFT(265), + [748] = {.entry = {.count = 1, .reusable = true}}, SHIFT(130), + [750] = {.entry = {.count = 1, .reusable = true}}, SHIFT(76), + [752] = {.entry = {.count = 1, .reusable = true}}, SHIFT(161), + [754] = {.entry = {.count = 1, .reusable = true}}, SHIFT(244), + [756] = {.entry = {.count = 1, .reusable = true}}, SHIFT(281), + [758] = {.entry = {.count = 1, .reusable = true}}, SHIFT(12), + [760] = {.entry = {.count = 1, .reusable = true}}, SHIFT(131), + [762] = {.entry = {.count = 1, .reusable = true}}, SHIFT(240), + [764] = {.entry = {.count = 1, .reusable = true}}, SHIFT(51), + [766] = {.entry = {.count = 1, .reusable = true}}, SHIFT(140), + [768] = {.entry = {.count = 1, .reusable = true}}, SHIFT(257), + [770] = {.entry = {.count = 1, .reusable = true}}, SHIFT(178), + [772] = {.entry = {.count = 1, .reusable = true}}, SHIFT(104), + [774] = {.entry = {.count = 1, .reusable = true}}, SHIFT(159), }; #ifdef __cplusplus diff --git a/tree-sitter-capable/src/tree_sitter/array.h b/tree-sitter-capable/src/tree_sitter/array.h index a17a574..56fc8cd 100644 --- a/tree-sitter-capable/src/tree_sitter/array.h +++ b/tree-sitter-capable/src/tree_sitter/array.h @@ -52,67 +52,96 @@ extern "C" { /// Reserve `new_capacity` elements of space in the array. If `new_capacity` is /// less than the array's current capacity, this function has no effect. -#define array_reserve(self, new_capacity) \ - _array__reserve((Array *)(self), array_elem_size(self), new_capacity) +#define array_reserve(self, new_capacity) \ + ((self)->contents = _array__reserve( \ + (void *)(self)->contents, &(self)->capacity, \ + array_elem_size(self), new_capacity) \ + ) /// Free any memory allocated for this array. Note that this does not free any /// memory allocated for the array's contents. -#define array_delete(self) _array__delete((Array *)(self)) +#define array_delete(self) \ + do { \ + if ((self)->contents) ts_free((self)->contents); \ + (self)->contents = NULL; \ + (self)->size = 0; \ + (self)->capacity = 0; \ + } while (0) /// Push a new `element` onto the end of the array. -#define array_push(self, element) \ - (_array__grow((Array *)(self), 1, array_elem_size(self)), \ - (self)->contents[(self)->size++] = (element)) +#define array_push(self, element) \ + do { \ + (self)->contents = _array__grow( \ + (void *)(self)->contents, (self)->size, &(self)->capacity, \ + 1, array_elem_size(self) \ + ); \ + (self)->contents[(self)->size++] = (element); \ + } while(0) /// Increase the array's size by `count` elements. /// New elements are zero-initialized. -#define array_grow_by(self, count) \ - do { \ - if ((count) == 0) break; \ - _array__grow((Array *)(self), count, array_elem_size(self)); \ +#define array_grow_by(self, count) \ + do { \ + if ((count) == 0) break; \ + (self)->contents = _array__grow( \ + (self)->contents, (self)->size, &(self)->capacity, \ + count, array_elem_size(self) \ + ); \ memset((self)->contents + (self)->size, 0, (count) * array_elem_size(self)); \ - (self)->size += (count); \ + (self)->size += (count); \ } while (0) /// Append all elements from one array to the end of another. -#define array_push_all(self, other) \ +#define array_push_all(self, other) \ array_extend((self), (other)->size, (other)->contents) /// Append `count` elements to the end of the array, reading their values from the /// `contents` pointer. -#define array_extend(self, count, contents) \ - _array__splice( \ - (Array *)(self), array_elem_size(self), (self)->size, \ - 0, count, contents \ +#define array_extend(self, count, other_contents) \ + (self)->contents = _array__splice( \ + (void*)(self)->contents, &(self)->size, &(self)->capacity, \ + array_elem_size(self), (self)->size, 0, count, other_contents \ ) /// Remove `old_count` elements from the array starting at the given `index`. At /// the same index, insert `new_count` new elements, reading their values from the /// `new_contents` pointer. -#define array_splice(self, _index, old_count, new_count, new_contents) \ - _array__splice( \ - (Array *)(self), array_elem_size(self), _index, \ - old_count, new_count, new_contents \ +#define array_splice(self, _index, old_count, new_count, new_contents) \ + (self)->contents = _array__splice( \ + (void *)(self)->contents, &(self)->size, &(self)->capacity, \ + array_elem_size(self), _index, old_count, new_count, new_contents \ ) /// Insert one `element` into the array at the given `index`. -#define array_insert(self, _index, element) \ - _array__splice((Array *)(self), array_elem_size(self), _index, 0, 1, &(element)) +#define array_insert(self, _index, element) \ + (self)->contents = _array__splice( \ + (void *)(self)->contents, &(self)->size, &(self)->capacity, \ + array_elem_size(self), _index, 0, 1, &(element) \ + ) /// Remove one element from the array at the given `index`. #define array_erase(self, _index) \ - _array__erase((Array *)(self), array_elem_size(self), _index) + _array__erase((void *)(self)->contents, &(self)->size, array_elem_size(self), _index) /// Pop the last element off the array, returning the element by value. #define array_pop(self) ((self)->contents[--(self)->size]) /// Assign the contents of one array to another, reallocating if necessary. -#define array_assign(self, other) \ - _array__assign((Array *)(self), (const Array *)(other), array_elem_size(self)) +#define array_assign(self, other) \ + (self)->contents = _array__assign( \ + (void *)(self)->contents, &(self)->size, &(self)->capacity, \ + (const void *)(other)->contents, (other)->size, array_elem_size(self) \ + ) /// Swap one array with another -#define array_swap(self, other) \ - _array__swap((Array *)(self), (Array *)(other)) +#define array_swap(self, other) \ + do { \ + void *_array_swap_tmp = (void *)(self)->contents; \ + (self)->contents = (other)->contents; \ + (other)->contents = _array_swap_tmp; \ + _array__swap(&(self)->size, &(self)->capacity, \ + &(other)->size, &(other)->capacity); \ + } while (0) /// Get the size of the array contents #define array_elem_size(self) (sizeof *(self)->contents) @@ -157,82 +186,90 @@ extern "C" { // Private -typedef Array(void) Array; - -/// This is not what you're looking for, see `array_delete`. -static inline void _array__delete(Array *self) { - if (self->contents) { - ts_free(self->contents); - self->contents = NULL; - self->size = 0; - self->capacity = 0; - } -} +// Pointers to individual `Array` fields (rather than the entire `Array` itself) +// are passed to the various `_array__*` functions below to address strict aliasing +// violations that arises when the _entire_ `Array` struct is passed as `Array(void)*`. +// +// The `Array` type itself was not altered as a solution in order to avoid breakage +// with existing consumers (in particular, parsers with external scanners). /// This is not what you're looking for, see `array_erase`. -static inline void _array__erase(Array *self, size_t element_size, - uint32_t index) { - assert(index < self->size); - char *contents = (char *)self->contents; +static inline void _array__erase(void* self_contents, uint32_t *size, + size_t element_size, uint32_t index) { + assert(index < *size); + char *contents = (char *)self_contents; memmove(contents + index * element_size, contents + (index + 1) * element_size, - (self->size - index - 1) * element_size); - self->size--; + (*size - index - 1) * element_size); + (*size)--; } /// This is not what you're looking for, see `array_reserve`. -static inline void _array__reserve(Array *self, size_t element_size, uint32_t new_capacity) { - if (new_capacity > self->capacity) { - if (self->contents) { - self->contents = ts_realloc(self->contents, new_capacity * element_size); +static inline void *_array__reserve(void *contents, uint32_t *capacity, + size_t element_size, uint32_t new_capacity) { + void *new_contents = contents; + if (new_capacity > *capacity) { + if (contents) { + new_contents = ts_realloc(contents, new_capacity * element_size); } else { - self->contents = ts_malloc(new_capacity * element_size); + new_contents = ts_malloc(new_capacity * element_size); } - self->capacity = new_capacity; + *capacity = new_capacity; } + return new_contents; } /// This is not what you're looking for, see `array_assign`. -static inline void _array__assign(Array *self, const Array *other, size_t element_size) { - _array__reserve(self, element_size, other->size); - self->size = other->size; - memcpy(self->contents, other->contents, self->size * element_size); +static inline void *_array__assign(void* self_contents, uint32_t *self_size, uint32_t *self_capacity, + const void *other_contents, uint32_t other_size, size_t element_size) { + void *new_contents = _array__reserve(self_contents, self_capacity, element_size, other_size); + *self_size = other_size; + memcpy(new_contents, other_contents, *self_size * element_size); + return new_contents; } /// This is not what you're looking for, see `array_swap`. -static inline void _array__swap(Array *self, Array *other) { - Array swap = *other; - *other = *self; - *self = swap; +static inline void _array__swap(uint32_t *self_size, uint32_t *self_capacity, + uint32_t *other_size, uint32_t *other_capacity) { + uint32_t tmp_size = *self_size; + uint32_t tmp_capacity = *self_capacity; + *self_size = *other_size; + *self_capacity = *other_capacity; + *other_size = tmp_size; + *other_capacity = tmp_capacity; } /// This is not what you're looking for, see `array_push` or `array_grow_by`. -static inline void _array__grow(Array *self, uint32_t count, size_t element_size) { - uint32_t new_size = self->size + count; - if (new_size > self->capacity) { - uint32_t new_capacity = self->capacity * 2; +static inline void *_array__grow(void *contents, uint32_t size, uint32_t *capacity, + uint32_t count, size_t element_size) { + void *new_contents = contents; + uint32_t new_size = size + count; + if (new_size > *capacity) { + uint32_t new_capacity = *capacity * 2; if (new_capacity < 8) new_capacity = 8; if (new_capacity < new_size) new_capacity = new_size; - _array__reserve(self, element_size, new_capacity); + new_contents = _array__reserve(contents, capacity, element_size, new_capacity); } + return new_contents; } /// This is not what you're looking for, see `array_splice`. -static inline void _array__splice(Array *self, size_t element_size, +static inline void *_array__splice(void *self_contents, uint32_t *size, uint32_t *capacity, + size_t element_size, uint32_t index, uint32_t old_count, uint32_t new_count, const void *elements) { - uint32_t new_size = self->size + new_count - old_count; + uint32_t new_size = *size + new_count - old_count; uint32_t old_end = index + old_count; uint32_t new_end = index + new_count; - assert(old_end <= self->size); + assert(old_end <= *size); - _array__reserve(self, element_size, new_size); + void *new_contents = _array__reserve(self_contents, capacity, element_size, new_size); - char *contents = (char *)self->contents; - if (self->size > old_end) { + char *contents = (char *)new_contents; + if (*size > old_end) { memmove( contents + new_end * element_size, contents + old_end * element_size, - (self->size - old_end) * element_size + (*size - old_end) * element_size ); } if (new_count > 0) { @@ -250,7 +287,9 @@ static inline void _array__splice(Array *self, size_t element_size, ); } } - self->size += new_count - old_count; + *size += new_count - old_count; + + return new_contents; } /// A binary search routine, based on Rust's `std::slice::binary_search_by`. diff --git a/vscode/syntaxes/capable.tmLanguage.json b/vscode/syntaxes/capable.tmLanguage.json index 95a4ca6..2f4e31c 100644 --- a/vscode/syntaxes/capable.tmLanguage.json +++ b/vscode/syntaxes/capable.tmLanguage.json @@ -37,7 +37,7 @@ "patterns": [ { "name": "constant.numeric.cap", - "match": "\\b\\d+u8\\b|\\b\\d+\\b" + "match": "\\b\\d+(?:u8|i64|u64)\\b|\\b\\d+\\b" } ] }, @@ -57,7 +57,7 @@ "patterns": [ { "name": "storage.type.cap", - "match": "\\b(i32|u32|u8|bool|string|Result)\\b" + "match": "\\b(i32|i64|u32|u64|u8|bool|string|Result)\\b" } ] }