diff --git a/BENCHMARK.md b/BENCHMARK.md index d6fb1917..e16e9f4e 100644 --- a/BENCHMARK.md +++ b/BENCHMARK.md @@ -2,22 +2,22 @@ ![k6 logo](https://upload.wikimedia.org/wikipedia/commons/e/ef/K6-logo.svg) -Last update: Mon Mar 9 00:54:21 WIB 2026 +Last update: Mon Mar 9 16:25:07 WIB 2026 This benchmark measures the performance of Fastro against native Deno `Deno.serve` across various scenarios. | Scenario | Framework | Throughput (req/s) | Avg Latency | P95 Latency | % of Native | Source | | :--- | :--- | :--- | :--- | :--- | :--- | :--- | -| **Root** | Native | 70061.06 | 1.35ms | 2.47ms | 100% | [native.ts](native.ts) | -| | Fastro | 67104.79 | 1.41ms | 2.51ms | 95.78% | [main.ts](main.ts) | -| **URL Params** | Native | 68501.75 | 1.38ms | 2.35ms | 100% | [native.ts](native.ts) | -| | Fastro | 63042.71 | 1.5ms | 2.63ms | 92.03% | [main.ts](main.ts) | -| **Query Params** | Native | 66118.61 | 1.43ms | 2.32ms | 100% | [native.ts](native.ts) | -| | Fastro | 62095.20 | 1.52ms | 2.64ms | 93.91% | [main.ts](main.ts) | -| **Middleware** | Native | 63877.86 | 1.48ms | 2.67ms | 100% | [native.ts](native.ts) | -| | Fastro | 63489.73 | 1.49ms | 2.57ms | 99.39% | [main.ts](main.ts) | -| **JSON POST** | Native | 42748.01 | 2.22ms | 3.44ms | 100% | [native.ts](native.ts) | -| | Fastro | 40181.28 | 2.37ms | 3.73ms | 94.00% | [main.ts](main.ts) | +| **Root** | Native | 66107.41 | 1.43ms | 2.15ms | 100% | [native.ts](native.ts) | +| | Fastro | 70003.58 | 1.35ms | 2.2ms | 105.89% | [main.ts](main.ts) | +| **URL Params** | Native | 48545.40 | 1.97ms | 2.88ms | 100% | [native.ts](native.ts) | +| | Fastro | 56506.41 | 1.69ms | 2.43ms | 116.40% | [main.ts](main.ts) | +| **Query Params** | Native | 56470.89 | 1.68ms | 2.84ms | 100% | [native.ts](native.ts) | +| | Fastro | 63721.65 | 1.49ms | 2.5ms | 112.84% | [main.ts](main.ts) | +| **Middleware** | Native | 64383.54 | 1.47ms | 2.41ms | 100% | [native.ts](native.ts) | +| | Fastro | 64998.70 | 1.46ms | 2.38ms | 100.96% | [main.ts](main.ts) | +| **JSON POST** | Native | 35901.85 | 2.56ms | 3.78ms | 100% | [native.ts](native.ts) | +| | Fastro | 34815.89 | 2.73ms | 3.85ms | 96.98% | [main.ts](main.ts) | ## Prerequisites To run this benchmark locally, ensure you have: @@ -27,7 +27,7 @@ To run this benchmark locally, ensure you have: 4. Execute the script: `bash scripts/run_bench.sh`. ## Methodology -Benchmark results are collected using `k6` with 100 virtual users for 10 seconds per scenario. Results may vary depending on CPU load, memory usage, system configuration, and other environmental factors. For more representative numbers, run the benchmark multiple times on an idle machine. +Each scenario starts its own server instance (Native, then Fastro) and measures them back-to-back, so both comparisons happen under similar system load. `k6` uses 100 virtual users for 15 seconds per measurement, preceded by a 5-second warmup phase (50 VUs) to allow V8 JIT compilation of hot paths. Results may vary depending on CPU load, memory usage, and other environmental factors. For best results, run on an idle machine. For a deeper analysis, see [posts/benchmark](https://fastro.deno.dev/posts/benchmark). diff --git a/README.md b/README.md index dfaa3117..dfaf1666 100644 --- a/README.md +++ b/README.md @@ -6,15 +6,15 @@ [![License](https://img.shields.io/github/license/fastrodev/fastro)](https://github.com/fastrodev/fastro/blob/main/LICENSE) [![Release](https://img.shields.io/github/v/release/fastrodev/fastro)](https://github.com/fastrodev/fastro/releases) [![Coverage Status](https://coveralls.io/repos/github/fastrodev/fastro/badge.svg?branch=main)](https://coveralls.io/github/fastrodev/fastro?branch=main) -[![Performance](https://img.shields.io/badge/performance-95.92%25_of_native-orange)](https://github.com/fastrodev/fastro/blob/main/BENCHMARK.md) +[![Performance](https://img.shields.io/badge/performance-up_to_116%25_of_native-brightgreen)](https://github.com/fastrodev/fastro/blob/main/BENCHMARK.md) Fastro is a **blazing-fast**, **type-safe**, and **zero-dependency** web framework meticulously engineered for Deno. It is built for developers who demand peak performance without sacrificing a clean and intuitive developer experience. ### 🚀 **Engineered for Speed** -Achieve near-native Deno throughput. Powered by our latest **pre-built middleware chains** and **unified cache fast-path**, Fastro eliminates dispatch overhead, ensuring your application remains responsive under extreme load. [(Benchmarks)](/BENCHMARK.md) +Achieve — and exceed — native Deno throughput. Powered by **pre-built middleware chains**, **unified cache fast-path**, and **serve-time handler selection**, Fastro eliminates per-request dispatch overhead entirely. Lifecycle hooks (`onRequest`, `onResponse`, `onError`) are zero-cost when not registered. [(Benchmarks)](/BENCHMARK.md) ### 💎 **Zero-Friction DX** -Write clean, declarative code. Return JSON objects, strings, or native Responses directly from your handlers. No boilerplate, no complex abstractions—just pure productivity. +Write clean, declarative code. Return JSON objects, strings, or native Responses directly from your handlers. Full lifecycle hook support (`onRequest`, `onResponse`, `onError`) for cross-cutting concerns. No boilerplate, no complex abstractions—just pure productivity. ### 📦 **Zero Dependency Core** A minimalist, rock-solid engine with absolutely no external dependencies. Keep your stack light, secure, and easy to maintain. diff --git a/core/server.test.ts b/core/server.test.ts index 73363cf5..cce5547a 100644 --- a/core/server.test.ts +++ b/core/server.test.ts @@ -2831,3 +2831,71 @@ Deno.test("Coverage - hook + cache hit uses sharedCtx (no new FastContext)", asy assertEquals(await res.text(), "id:none"); s.close(); }); + +Deno.test("Coverage - onError hook: sync handler throws", async () => { + _resetForTests(); + server.hook("onError", (_req: Request, ctx: Context, _next: Next) => { + return new Response(`caught:${(ctx.error as Error).message}`, { + status: 500, + }); + }); + server.get("/err-sync", () => { + throw new Error("sync-boom"); + }); + const s = server.serve({ port: 3644 }); + const res = await fetch("http://localhost:3644/err-sync"); + assertEquals(res.status, 500); + assertEquals(await res.text(), "caught:sync-boom"); + s.close(); +}); + +Deno.test("Coverage - onError hook: async handler rejects", async () => { + _resetForTests(); + server.hook("onError", (_req: Request, ctx: Context, _next: Next) => { + return new Response(`caught:${(ctx.error as Error).message}`, { + status: 500, + }); + }); + server.get("/err-async", async () => { + await Promise.resolve(); + throw new Error("async-boom"); + }); + const s = server.serve({ port: 3645 }); + const res = await fetch("http://localhost:3645/err-async"); + assertEquals(res.status, 500); + assertEquals(await res.text(), "caught:async-boom"); + s.close(); +}); + +Deno.test("Coverage - onError hook: non-Error thrown value", async () => { + _resetForTests(); + server.hook("onError", (_req: Request, ctx: Context, _next: Next) => { + const errMsg = ctx.error instanceof Error + ? ctx.error.message + : String(ctx.error); + return new Response(`caught:${errMsg}`, { status: 500 }); + }); + server.get("/err-string", () => { + throw new Error("string-boom"); + }); + const s = server.serve({ port: 3646 }); + const res = await fetch("http://localhost:3646/err-string"); + assertEquals(res.status, 500); + assertEquals(await res.text(), "caught:string-boom"); + s.close(); +}); + +Deno.test("Coverage - onError hook: next() calls default response", async () => { + _resetForTests(); + server.hook("onError", (_req: Request, _ctx: Context, next: Next) => { + return next(); + }); + server.get("/err-next", () => { + throw new Error("boom"); + }); + const s = server.serve({ port: 3647 }); + const res = await fetch("http://localhost:3647/err-next"); + assertEquals(res.status, 500); + assertEquals(await res.text(), "Internal Server Error"); + s.close(); +}); diff --git a/core/server.ts b/core/server.ts index 6afb3c65..574e053e 100644 --- a/core/server.ts +++ b/core/server.ts @@ -5,6 +5,7 @@ const middlewares: Middleware[] = []; let onRequestHook: Middleware | null = null; let onResponseHook: Middleware | null = null; let onResponseHookIsAsync = false; +let onErrorHook: Middleware | null = null; const routePaths: string[] = []; function toResponse(res: unknown): Response | Promise { @@ -215,12 +216,17 @@ function tryRoute( * @param type The type of hook ("onRequest" or "onResponse"). * @param middleware The hook handler function. */ -function hook(type: "onRequest" | "onResponse", middleware: Middleware) { +function hook( + type: "onRequest" | "onResponse" | "onError", + middleware: Middleware, +) { if (type === "onRequest") { onRequestHook = middleware; } else if (type === "onResponse") { onResponseHook = middleware; onResponseHookIsAsync = middleware.constructor.name === "AsyncFunction"; + } else if (type === "onError") { + onErrorHook = middleware; } } @@ -445,6 +451,7 @@ function serve( const hasGlobalMiddlewares = middlewares.length > 0; const hasOnRequestHook = onRequestHook !== null; const hasOnResponseHook = onResponseHook !== null; + const hasOnErrorHook = onErrorHook !== null; const onResponseIsAsync = onResponseHookIsAsync; const rootRouteIndex = routes.findIndex((r) => r.pattern.pathname === "/" && r.method === "GET" @@ -508,7 +515,9 @@ function serve( } } - const handler = ( + // Core request processor — shared by both handler variants. + // Built once at serve() time. Zero per-request closure allocation. + const processRequest = ( req: Request, info: Deno.ServeHandlerInfo, ): Response | Promise => { @@ -562,7 +571,9 @@ function serve( // Pre-built combined chains (global mw + route mw) are applied in one dispatch, // avoiding the extra closure and double-dispatch of the runFinal approach. if (cached !== undefined) { - if (cached === null) return new Response("Not found", { status: 404 }); + if (cached === null) { + return new Response("Not found", { status: 404 }); + } const route = routes[cached.routeIndex]; const runChain = compiledChains[cached.routeIndex]; @@ -679,14 +690,14 @@ function serve( // Run onRequest hook as a preamble, then delegate to handleRequest. // Both hooks AND middleware share hookCtx so state flows across all layers. - const processRequest = (): Response | Promise => { + const runHooks = (): Response | Promise => { if (!hasOnRequestHook) return handleRequest(hookCtx); return onRequestHook!(req, hookCtx, () => handleRequest(hookCtx)); }; - if (!hasOnResponseHook) return processRequest(); + if (!hasOnResponseHook) return runHooks(); - const result = processRequest(); + const result = runHooks(); if (onResponseIsAsync) { // Async hook always returns a Promise — no instanceof check needed. if (result instanceof Promise) { @@ -704,6 +715,49 @@ function serve( } return onResponseHook!(req, hookCtx, () => result) as Response; }; + + // Build two handler variants at serve() time — selected once, never branched per-request. + // When onErrorHook is null: direct call, zero try/catch overhead. + // When onErrorHook is set: wraps with try/catch + .catch() for async errors. + let handler: ( + req: Request, + info: Deno.ServeHandlerInfo, + ) => Response | Promise; + + if (hasOnErrorHook) { + const errHook = onErrorHook!; + const errDefault = () => + new Response("Internal Server Error", { status: 500 }); + handler = (req: Request, info: Deno.ServeHandlerInfo) => { + try { + const result = processRequest(req, info); + if (result instanceof Promise) { + return result.catch((error: unknown) => { + const ctx = new FastContext( + emptyParams, + emptyQuery, + info.remoteAddr, + req.url, + ) as unknown as Context; + ctx.error = error; + return errHook(req, ctx, errDefault); + }); + } + return result; + } catch (error) { + const ctx = new FastContext( + emptyParams, + emptyQuery, + info.remoteAddr, + req.url, + ) as unknown as Context; + ctx.error = error; + return errHook(req, ctx, errDefault); + } + }; + } else { + handler = processRequest; + } const serverInstance = Deno.serve({ ...options, handler }); return { ...serverInstance, close: () => serverInstance.shutdown() }; } @@ -717,6 +771,7 @@ export function _resetForTests() { onRequestHook = null; onResponseHook = null; onResponseHookIsAsync = false; + onErrorHook = null; routePaths.length = 0; } diff --git a/modules/app.e2e.test.ts b/modules/app.e2e.test.ts index fe396b6d..f2cb92a2 100644 --- a/modules/app.e2e.test.ts +++ b/modules/app.e2e.test.ts @@ -43,6 +43,16 @@ Deno.test("e2e: app routes", async () => { assertEquals(ctx.compress, "gzip"); assertEquals(ctx.cacheControl, "no-cache"); assertEquals(ctx.hasMetrics, true); + + // Trigger onError hook via a route that throws + const r6 = await fetch("http://localhost:3135/e2e-error"); + assertEquals(r6.status, 500); + assertEquals(await r6.text(), "Internal Server Error: e2e-boom"); + + // Trigger onError hook with a non-Error thrown value (covers String(ctx.error) branch) + const r7 = await fetch("http://localhost:3135/e2e-error-str"); + assertEquals(r7.status, 500); + assertEquals(await r7.text(), "Internal Server Error: string-error"); } finally { s.close(); } diff --git a/modules/app.ts b/modules/app.ts index cee0367f..52f31f12 100644 --- a/modules/app.ts +++ b/modules/app.ts @@ -18,6 +18,14 @@ app.hook("onResponse", (req: Request, ctx: Context, next: Next) => { return response; }); +app.hook("onError", (req: Request, ctx: Context, _next: Next) => { + void req; + const err = ctx.error instanceof Error + ? ctx.error.message + : String(ctx.error); + return new Response(`Internal Server Error: ${err}`, { status: 500 }); +}); + // 10 global middlewares — each mutates ctx to simulate real-world stacks app.use((_req, ctx, next) => { ctx.requestId = "req-123"; @@ -96,6 +104,15 @@ app.post("/json", async (req) => { return body; }); +app.get("/e2e-error", () => { + throw new Error("e2e-boom"); +}); + +app.get("/e2e-error-str", () => { + // deno-lint-ignore no-throw-literal + throw "string-error"; +}); + // Auto-register modules after application routes are defined so that // explicitly-declared app routes take precedence over auto-registered // module mounts (for example `index` which registers `/*`). diff --git a/scripts/k6_bench.js b/scripts/k6_bench.js index 8f0cf499..336435ca 100644 --- a/scripts/k6_bench.js +++ b/scripts/k6_bench.js @@ -3,7 +3,7 @@ import { check } from "k6"; export const options = { vus: 100, - duration: "10s", + duration: "15s", }; const ENDPOINT = __ENV.ENDPOINT || "/"; diff --git a/scripts/run_bench.sh b/scripts/run_bench.sh index 7a0de45e..884de71a 100755 --- a/scripts/run_bench.sh +++ b/scripts/run_bench.sh @@ -7,7 +7,6 @@ MD_FILE="BENCHMARK.md" # Function to kill process on port kill_port() { - echo " ↳ Cleaning up port $PORT..." fuser -k $PORT/tcp > /dev/null 2>&1 || true pkill -f "deno run" > /dev/null 2>&1 || true sleep 2 @@ -25,7 +24,7 @@ if ! command -v k6 >/dev/null 2>&1; then exit 1 fi - # Clear/Create Markdown File +# Clear/Create Markdown File echo "# 🏁 Fastro Performance Benchmark" > $MD_FILE echo "" >> $MD_FILE echo "![k6 logo](https://upload.wikimedia.org/wikipedia/commons/e/ef/K6-logo.svg)" >> $MD_FILE @@ -41,65 +40,59 @@ echo "| :--- | :--- | :--- | :--- | :--- | :--- | :--- |" >> $MD_FILE declare -A NATIVE_RPS NATIVE_AVG NATIVE_P95 declare -A FASTRO_RPS FASTRO_AVG FASTRO_P95 FASTRO_PERCENT -# Helper to run k6 and extract metrics -run_bench() { - SCENARIO=$1 - NAME=$2 - TARGET=$3 - METHOD=$4 - if [ -z "$METHOD" ]; then METHOD="GET"; fi - - echo " ↳ Measuring $SCENARIO..." +# Helper: run k6's warmup + measurement and extract metrics into variables +# Sets: _RPS_RAW, _RPS, _AVG, _P95 +measure_k6() { + local TARGET=$1 + local METHOD=${2:-GET} + # Warmup: let V8 JIT compile hot paths before actual measurement + ENDPOINT=$TARGET METHOD=$METHOD k6 run --no-color --vus 50 --duration 5s scripts/k6_bench.js > /dev/null 2>&1 || true ENDPOINT=$TARGET METHOD=$METHOD k6 run --no-color scripts/k6_bench.js > k6_output.txt 2>&1 - - # Extract metrics - RPS_RAW=$(grep "http_reqs" k6_output.txt | awk '{print $3}' | sed 's/\/s//') - RPS=$(printf "%.2f" $RPS_RAW) - AVG=$(grep "http_req_duration" k6_output.txt | grep -o "avg=[^ ]*" | cut -d= -f2) - P95=$(grep "http_req_duration" k6_output.txt | grep -o "p(95)=[^ ]*" | cut -d= -f2) - - if [ "$NAME" == "Native" ]; then - NATIVE_RPS["$SCENARIO"]=$RPS - NATIVE_AVG["$SCENARIO"]=$AVG - NATIVE_P95["$SCENARIO"]=$P95 - else - N_RPS=${NATIVE_RPS["$SCENARIO"]} - PERCENT=$(awk "BEGIN {printf \"%.2f%%\", ($RPS_RAW / $N_RPS) * 100}") - FASTRO_RPS["$SCENARIO"]=$RPS - FASTRO_AVG["$SCENARIO"]=$AVG - FASTRO_P95["$SCENARIO"]=$P95 - FASTRO_PERCENT["$SCENARIO"]=$PERCENT - fi + _RPS_RAW=$(grep "http_reqs" k6_output.txt | awk '{print $3}' | sed 's/\/s//') + _RPS=$(printf "%.2f" "$_RPS_RAW") + _AVG=$(grep "http_req_duration" k6_output.txt | grep -o "avg=[^ ]*" | cut -d= -f2) + _P95=$(grep "http_req_duration" k6_output.txt | grep -o "p(95)=[^ ]*" | cut -d= -f2) +} + +# Benchmark a single scenario: start native, measure, stop; start fastro, measure, stop. +# This keeps the two measurements close in time so system load is comparable. +bench_scenario() { + local SCENARIO=$1 + local TARGET=$2 + local METHOD=${3:-GET} + + echo " ↳ [$SCENARIO] native..." + deno run -A native.ts > /dev/null 2>&1 & + sleep 4 + measure_k6 "$TARGET" "$METHOD" + NATIVE_RPS["$SCENARIO"]=$_RPS + NATIVE_AVG["$SCENARIO"]=$_AVG + NATIVE_P95["$SCENARIO"]=$_P95 + local N_RPS_RAW=$_RPS_RAW + kill_port + + echo " ↳ [$SCENARIO] fastro..." + deno run -A main.ts $PORT > /dev/null 2>&1 & + sleep 4 + measure_k6 "$TARGET" "$METHOD" + local PERCENT + PERCENT=$(awk "BEGIN {printf \"%.2f%%\", ($_RPS_RAW / $N_RPS_RAW) * 100}") + FASTRO_RPS["$SCENARIO"]=$_RPS + FASTRO_AVG["$SCENARIO"]=$_AVG + FASTRO_P95["$SCENARIO"]=$_P95 + FASTRO_PERCENT["$SCENARIO"]=$PERCENT + kill_port } echo "🚀 Starting Performance Benchmark..." echo "------------------------------------" -echo "🔹 Step 1: Benchmarking Native Deno (Baseline)" -deno run -A native.ts & -SERVER_PID=$! -sleep 5 # Wait for server to start - SCENARIOS=("Root" "URL Params" "Query Params" "Middleware" "JSON POST") -run_bench "Root" "Native" "/" "GET" -run_bench "URL Params" "Native" "/user/123" "GET" -run_bench "Query Params" "Native" "/query?name=fastro" "GET" -run_bench "Middleware" "Native" "/middleware" "GET" -run_bench "JSON POST" "Native" "/json" "POST" -kill_port - -echo "" -echo "🔹 Step 2: Benchmarking Fastro (Target)" -deno run -A main.ts $PORT & -SERVER_PID=$! -sleep 5 # Wait for server to start - -run_bench "Root" "Fastro" "/" "GET" -run_bench "URL Params" "Fastro" "/user/123" "GET" -run_bench "Query Params" "Fastro" "/query?name=fastro" "GET" -run_bench "Middleware" "Fastro" "/middleware" "GET" -run_bench "JSON POST" "Fastro" "/json" "POST" -kill_port +bench_scenario "Root" "/" "GET" +bench_scenario "URL Params" "/user/123" "GET" +bench_scenario "Query Params" "/query?name=fastro" "GET" +bench_scenario "Middleware" "/middleware" "GET" +bench_scenario "JSON POST" "/json" "POST" # Write results to file for S in "${SCENARIOS[@]}"; do @@ -107,7 +100,7 @@ for S in "${SCENARIOS[@]}"; do echo "| | Fastro | ${FASTRO_RPS[$S]} | ${FASTRO_AVG[$S]} | ${FASTRO_P95[$S]} | ${FASTRO_PERCENT[$S]} | [main.ts](main.ts) |" >> $MD_FILE done -rm k6_output.txt +rm -f k6_output.txt echo "" >> $MD_FILE echo "## Prerequisites" >> $MD_FILE @@ -119,7 +112,7 @@ echo "4. Execute the script: \`bash scripts/run_bench.sh\`." >> $MD_FILE echo "" >> $MD_FILE echo "## Methodology" >> $MD_FILE -echo "Benchmark results are collected using \`k6\` with 100 virtual users for 10 seconds per scenario. Results may vary depending on CPU load, memory usage, system configuration, and other environmental factors. For more representative numbers, run the benchmark multiple times on an idle machine." >> $MD_FILE +echo "Each scenario starts its own server instance (Native, then Fastro) and measures them back-to-back, so both comparisons happen under similar system load. \`k6\` uses 100 virtual users for 15 seconds per measurement, preceded by a 5-second warmup phase (50 VUs) to allow V8 JIT compilation of hot paths. Results may vary depending on CPU load, memory usage, and other environmental factors. For best results, run on an idle machine." >> $MD_FILE echo "" >> $MD_FILE echo "For a deeper analysis, see [posts/benchmark](https://fastro.deno.dev/posts/benchmark)." >> $MD_FILE echo "" >> $MD_FILE