-
Notifications
You must be signed in to change notification settings - Fork 0
feat: add KQL search for completions #70
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from all commits
Commits
Show all changes
5 commits
Select commit
Hold shift + click to select a range
5df3d59
feat: add KQL search for completions (#54)
pescn 5f2034b
refactor: move KQL validation to frontend, remove CLAUDE.md/TODOs.md
pescn d49dbbe
fix: address code review issues in KQL search
pescn 1f2518f
fix: address automated review comments in KQL search
pescn 946a182
fix: return 400 for CompilerError in search endpoints
pescn File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,285 @@ | ||
| import { Elysia, t } from "elysia"; | ||
| import { | ||
| parseKql, | ||
| compileSearch, | ||
| getSearchableFields, | ||
| } from "@/search"; | ||
| import { | ||
| searchCompletions, | ||
| aggregateCompletions, | ||
| searchCompletionsTimeSeries, | ||
| getDistinctFieldValues, | ||
| } from "@/db"; | ||
| import { createLogger } from "@/utils/logger"; | ||
|
|
||
| const logger = createLogger("search"); | ||
|
|
||
| function parseTimeRange( | ||
| from?: string, | ||
| to?: string, | ||
| ): { from: Date; to: Date } | undefined { | ||
| if (!from || !to) { | ||
| return undefined; | ||
| } | ||
| const fromDate = new Date(from); | ||
| const toDate = new Date(to); | ||
| if (Number.isNaN(fromDate.getTime()) || Number.isNaN(toDate.getTime())) { | ||
| throw new Error("Invalid timeRange date"); | ||
| } | ||
| if (fromDate > toDate) { | ||
| throw new Error("timeRange.from must be <= timeRange.to"); | ||
| } | ||
| return { from: fromDate, to: toDate }; | ||
| } | ||
|
|
||
| function escapeCsvField(value: unknown): string { | ||
| if (value == null) { | ||
| return ""; | ||
| } | ||
| const str = typeof value === "object" ? JSON.stringify(value) : String(value as string | number | boolean); | ||
| if (str.includes(",") || str.includes("\n") || str.includes("\r") || str.includes('"')) { | ||
| return `"${str.replace(/"/g, '""')}"`; | ||
| } | ||
| return str; | ||
| } | ||
|
|
||
| export const adminSearch = new Elysia() | ||
| // Search completions | ||
| .post( | ||
| "/search", | ||
| async ({ body, status }) => { | ||
| const result = parseKql(body.query); | ||
| if (!result.success) { | ||
| return status(400, { | ||
| error: "Invalid query", | ||
| details: result.error, | ||
| }); | ||
| } | ||
|
|
||
| let timeRange: { from: Date; to: Date } | undefined; | ||
| try { | ||
| timeRange = parseTimeRange(body.timeRange?.from, body.timeRange?.to); | ||
| } catch (err) { | ||
| return status(400, { error: err instanceof Error ? err.message : "Invalid timeRange" }); | ||
| } | ||
| let compiled; | ||
| try { | ||
| compiled = compileSearch(result.query, { timeRange }); | ||
| } catch (err) { | ||
| return status(400, { | ||
| error: "Invalid query", | ||
| details: err instanceof Error ? err.message : "Compilation failed", | ||
| }); | ||
| } | ||
|
|
||
| // If the query has aggregation, return aggregation results | ||
| if (compiled.aggregation) { | ||
| try { | ||
| const results = await aggregateCompletions(compiled); | ||
| return { type: "aggregation" as const, results }; | ||
| } catch (err) { | ||
| logger.error("Aggregation failed", { error: err }); | ||
| return status(500, { | ||
| error: "Aggregation failed", | ||
| }); | ||
| } | ||
|
pescn marked this conversation as resolved.
|
||
| } | ||
|
|
||
| // Otherwise, return paginated document results | ||
| try { | ||
| const data = await searchCompletions( | ||
| compiled, | ||
| body.offset ?? 0, | ||
| body.limit ?? 100, | ||
| ); | ||
| // Truncate model names that contain '@' | ||
| data.data.forEach((row) => { | ||
| if (row.model && row.model.includes("@")) { | ||
| row.model = row.model.split("@", 2)[0]!; | ||
| } | ||
| }); | ||
| return { type: "documents" as const, ...data }; | ||
| } catch (err) { | ||
| logger.error("Search failed", { error: err }); | ||
| return status(500, { | ||
| error: "Search failed", | ||
| }); | ||
| } | ||
| }, | ||
| { | ||
| body: t.Object({ | ||
| query: t.String({ maxLength: 2000 }), | ||
| timeRange: t.Optional( | ||
| t.Object({ | ||
| from: t.Optional(t.String()), | ||
| to: t.Optional(t.String()), | ||
| }), | ||
| ), | ||
| offset: t.Optional(t.Integer({ minimum: 0 })), | ||
| limit: t.Optional(t.Integer({ minimum: 1, maximum: 500 })), | ||
| }), | ||
| }, | ||
| ) | ||
| // Search histogram (time series) | ||
| .post( | ||
| "/search/histogram", | ||
| async ({ body, status }) => { | ||
| const result = parseKql(body.query); | ||
| if (!result.success) { | ||
| return status(400, { | ||
| error: "Invalid query", | ||
| details: result.error, | ||
| }); | ||
| } | ||
|
|
||
| let timeRange: { from: Date; to: Date } | undefined; | ||
| try { | ||
| timeRange = parseTimeRange(body.timeRange?.from, body.timeRange?.to); | ||
| } catch (err) { | ||
| return status(400, { error: err instanceof Error ? err.message : "Invalid timeRange" }); | ||
| } | ||
| let compiled; | ||
| try { | ||
| compiled = compileSearch(result.query, { timeRange }); | ||
| } catch (err) { | ||
| return status(400, { | ||
| error: "Invalid query", | ||
| details: err instanceof Error ? err.message : "Compilation failed", | ||
| }); | ||
| } | ||
|
|
||
| try { | ||
| const buckets = await searchCompletionsTimeSeries( | ||
| compiled, | ||
| body.bucketSeconds ?? 60, | ||
| ); | ||
| return { buckets }; | ||
| } catch (err) { | ||
| logger.error("Histogram query failed", { error: err }); | ||
| return status(500, { | ||
| error: "Histogram query failed", | ||
| }); | ||
| } | ||
| }, | ||
| { | ||
| body: t.Object({ | ||
| query: t.String({ maxLength: 2000 }), | ||
| timeRange: t.Optional( | ||
| t.Object({ | ||
| from: t.Optional(t.String()), | ||
| to: t.Optional(t.String()), | ||
| }), | ||
| ), | ||
| bucketSeconds: t.Optional(t.Integer({ minimum: 1 })), | ||
| }), | ||
| }, | ||
| ) | ||
| // Get searchable fields (for autocomplete) | ||
| .get("/search/fields", async () => { | ||
| const fields = getSearchableFields(); | ||
|
|
||
| // Enrich with distinct values for key fields | ||
| const modelValues = await getDistinctFieldValues("model"); | ||
|
|
||
| return { | ||
| fields: fields.map((f) => { | ||
| if (f.name === "model") { | ||
| return Object.assign({}, f, { values: modelValues }); | ||
| } | ||
| return f; | ||
| }), | ||
| }; | ||
| }) | ||
| // Export search results | ||
| .post( | ||
| "/search/export", | ||
| async ({ body, status, set }) => { | ||
| const result = parseKql(body.query); | ||
| if (!result.success) { | ||
| return status(400, { | ||
| error: "Invalid query", | ||
| details: result.error, | ||
| }); | ||
| } | ||
|
|
||
| let timeRange: { from: Date; to: Date } | undefined; | ||
| try { | ||
| timeRange = parseTimeRange(body.timeRange?.from, body.timeRange?.to); | ||
| } catch (err) { | ||
| return status(400, { error: err instanceof Error ? err.message : "Invalid timeRange" }); | ||
| } | ||
| let compiled; | ||
| try { | ||
| compiled = compileSearch(result.query, { timeRange }); | ||
| } catch (err) { | ||
| return status(400, { | ||
| error: "Invalid query", | ||
| details: err instanceof Error ? err.message : "Compilation failed", | ||
| }); | ||
| } | ||
|
|
||
| try { | ||
| // Fetch all results (up to 10000 for export) | ||
| const data = await searchCompletions(compiled, 0, 10000); | ||
|
|
||
| if (body.format === "csv") { | ||
| set.headers["content-type"] = "text/csv"; | ||
| set.headers["content-disposition"] = | ||
| 'attachment; filename="search-results.csv"'; | ||
|
|
||
| const headers = [ | ||
| "id", | ||
| "model", | ||
| "status", | ||
| "duration", | ||
| "ttft", | ||
| "prompt_tokens", | ||
| "completion_tokens", | ||
| "created_at", | ||
| "provider_name", | ||
| "api_format", | ||
| "rating", | ||
| ]; | ||
| const rows = data.data.map((row) => | ||
| [ | ||
| escapeCsvField(row.id), | ||
| escapeCsvField(row.model), | ||
| escapeCsvField(row.status), | ||
| escapeCsvField(row.duration), | ||
| escapeCsvField(row.ttft), | ||
| escapeCsvField(row.prompt_tokens), | ||
| escapeCsvField(row.completion_tokens), | ||
| escapeCsvField(row.created_at), | ||
| escapeCsvField(row.provider_name), | ||
| escapeCsvField(row.api_format), | ||
| escapeCsvField(row.rating), | ||
| ].join(","), | ||
| ); | ||
| return [headers.join(","), ...rows].join("\n"); | ||
|
pescn marked this conversation as resolved.
coderabbitai[bot] marked this conversation as resolved.
|
||
| } | ||
|
|
||
| // JSON format | ||
| set.headers["content-type"] = "application/json"; | ||
| set.headers["content-disposition"] = | ||
| 'attachment; filename="search-results.json"'; | ||
| return JSON.stringify(data.data, null, 2); | ||
| } catch (err) { | ||
| logger.error("Export failed", { error: err }); | ||
| return status(500, { | ||
| error: "Export failed", | ||
| }); | ||
| } | ||
| }, | ||
| { | ||
| body: t.Object({ | ||
| query: t.String({ maxLength: 2000 }), | ||
| timeRange: t.Optional( | ||
| t.Object({ | ||
| from: t.Optional(t.String()), | ||
| to: t.Optional(t.String()), | ||
| }), | ||
| ), | ||
| format: t.Union([t.Literal("csv"), t.Literal("json")]), | ||
| }), | ||
| }, | ||
| ); | ||
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.