diff --git a/apps/example/package.json b/apps/example/package.json index 11406e0..460e1aa 100644 --- a/apps/example/package.json +++ b/apps/example/package.json @@ -4,8 +4,8 @@ "private": true, "type": "module", "scripts": { - "dev": "bun run --filter leadtype build && bun run pipeline:build && PORTLESS_PORT=1355 PORTLESS_HTTPS=0 portless run vite dev", - "build": "bun run --filter leadtype build && bun run pipeline:build && vite build", + "dev": "bun run --filter leadtype build && bun run --silent pipeline:build && PORTLESS_PORT=1355 PORTLESS_HTTPS=0 portless run vite dev", + "build": "bun run --filter leadtype build && bun run --silent pipeline:build && vite build", "preview": "PORTLESS_PORT=1355 PORTLESS_HTTPS=0 portless run vite preview", "check-types": "tsgo --noEmit", "test:e2e": "bun run --filter leadtype build && bun run pipeline:build && playwright test", diff --git a/apps/example/scripts/mdx-convert.ts b/apps/example/scripts/mdx-convert.ts index b83ad47..dc98776 100644 --- a/apps/example/scripts/mdx-convert.ts +++ b/apps/example/scripts/mdx-convert.ts @@ -36,11 +36,9 @@ if (!existsSync(srcDir)) { process.exit(1); } -process.stdout.write(`Converting MDX from ${srcDir} → ${outDir}\n`); await convertAllMdx({ srcDir, outDir, remarkPlugins, enrichFrontmatterFromGit: true, }); -process.stdout.write("MDX conversion complete\n"); diff --git a/packages/leadtype/package.json b/packages/leadtype/package.json index 7e5135a..5b20468 100644 --- a/packages/leadtype/package.json +++ b/packages/leadtype/package.json @@ -73,7 +73,7 @@ "README.md" ], "scripts": { - "build": "rollup -c --configPlugin rollup-plugin-esbuild && bun run docs:generate", + "build": "rollup -c --configPlugin rollup-plugin-esbuild --silent && bun run --silent docs:generate", "dev": "rollup -c --configPlugin rollup-plugin-esbuild -w", "check-types": "tsgo --noEmit", "docs:generate": "bun run ./scripts/generate-docs.ts", diff --git a/packages/leadtype/scripts/generate-docs.ts b/packages/leadtype/scripts/generate-docs.ts index aa5af9c..107ed76 100644 --- a/packages/leadtype/scripts/generate-docs.ts +++ b/packages/leadtype/scripts/generate-docs.ts @@ -3,6 +3,7 @@ import { dirname, join, resolve } from "node:path"; import { fileURLToPath } from "node:url"; import docsConfig from "../../../docs/docs.config"; import { convertAllMdx } from "../src/convert/index"; +import { logger } from "../src/internal/logger"; import { generateAgentsMd, resolveDocsNavigation } from "../src/llm/index"; import { defaultRemarkPlugins } from "../src/remark/index"; @@ -34,9 +35,13 @@ const navigation = await resolveDocsNavigation({ }); if (navigation.unknown.length > 0) { for (const { urlPath, slug } of navigation.unknown) { - process.stderr.write( - `error: ${urlPath} declares unknown group "${slug}".\n` - ); + logger.error({ + human: { message: `${urlPath} declares unknown group "${slug}"` }, + json: { + event: "docs.unknown_group", + fields: { urlPath, slug }, + }, + }); } process.exit(1); } @@ -52,4 +57,12 @@ const { outputPath } = await generateAgentsMd({ groups: docsConfig.groups, }); -process.stdout.write(`Generated ${outputPath} and ${OUT_DOCS_DIR}/*.md\n`); +logger.info({ + human: { + message: `Generated ${outputPath} and ${OUT_DOCS_DIR}/*.md`, + }, + json: { + event: "docs.generate.done", + fields: { outputPath, docsDir: OUT_DOCS_DIR }, + }, +}); diff --git a/packages/leadtype/src/cli.test.ts b/packages/leadtype/src/cli.test.ts index 35b2a88..cdca393 100644 --- a/packages/leadtype/src/cli.test.ts +++ b/packages/leadtype/src/cli.test.ts @@ -112,7 +112,8 @@ describe("leadtype CLI", () => { ); expect(code).toBe(0); - expect(capture.stdout).toContain("Generated docs pipeline output"); + expect(capture.stdout).toBe(""); + expect(capture.stderr).toContain("Generated docs pipeline output"); expect(existsSync(path.join(outDir, "docs", "methodology.md"))).toBe(true); expect( existsSync(path.join(outDir, "docs", "build", "connect-docs-site.md")) diff --git a/packages/leadtype/src/cli.ts b/packages/leadtype/src/cli.ts index 23f6475..8ac1338 100644 --- a/packages/leadtype/src/cli.ts +++ b/packages/leadtype/src/cli.ts @@ -2,6 +2,7 @@ import { resolve } from "node:path"; import { pathToFileURL } from "node:url"; import { getGenerateUsage, runGenerateCommand } from "./cli/generate"; +import { logger, setLogStreams } from "./internal/logger"; import { getLintUsage, runLintCommand } from "./lint/cli"; type CliIo = { @@ -36,6 +37,7 @@ export async function runCli( argv: string[], io: CliIo = { stderr: process.stderr, stdout: process.stdout } ): Promise { + setLogStreams(io); const [command, ...rest] = argv; if (!command || command === "-h" || command === "--help") { @@ -72,7 +74,10 @@ if (isDirectRun()) { }) .catch((error) => { const message = error instanceof Error ? error.message : String(error); - process.stderr.write(`leadtype: ${message}\n`); + logger.error({ + human: { message, hint: "set DEBUG=1 to print the stack" }, + json: { event: "cli.fatal", fields: { message } }, + }); if (process.env.DEBUG && error instanceof Error && error.stack) { process.stderr.write(`${error.stack}\n`); } diff --git a/packages/leadtype/src/cli/generate.ts b/packages/leadtype/src/cli/generate.ts index 8267360..995cf00 100644 --- a/packages/leadtype/src/cli/generate.ts +++ b/packages/leadtype/src/cli/generate.ts @@ -5,6 +5,12 @@ import path from "node:path"; import fg from "fast-glob"; import matter from "gray-matter"; import { convertAllMdx } from "../convert"; +import { + logger, + setLogFormat, + setLogStreams, + setVerbose, +} from "../internal/logger"; import type { DocsGroup, ProductInfo } from "../llm"; import { generateAgentReadabilityArtifacts, @@ -38,6 +44,7 @@ export type GenerateArgs = { outDir: string; srcDir: string; summary?: string; + verbose: boolean; }; export type GenerateIo = { @@ -106,6 +113,7 @@ Options: --enrich-git Add lastModified and lastAuthor from git history --format text | json (default: text) --json Alias for --format json + -v, --verbose Print per-file progress events to stderr -h, --help Show this help `; @@ -132,6 +140,7 @@ export function parseGenerateArgs(argv: string[]): GenerateArgs { include: [], outDir: DEFAULT_OUT_DIR, srcDir: ".", + verbose: false, }; for (let i = 0; i < argv.length; i++) { @@ -166,6 +175,8 @@ export function parseGenerateArgs(argv: string[]): GenerateArgs { args.format = value; } else if (arg === "--json") { args.format = "json"; + } else if (arg === "--verbose" || arg === "-v") { + args.verbose = true; } else if (arg) { throw new Error(`unknown option: ${arg}`); } @@ -358,22 +369,23 @@ export async function runGenerateCommand( return 0; } + setLogFormat(args.format === "json" ? "json" : "human"); + setVerbose(args.verbose); + setLogStreams({ stderr: io.stderr }); + const srcDir = path.resolve(args.srcDir); const docsDir = path.resolve(srcDir, args.docsDir); const outDir = path.resolve(args.outDir); if (!existsSync(docsDir)) { if (args.format === "json") { - io.stderr.write( - `${JSON.stringify( - { - error: "docs directory not found", - path: docsDir, - }, - null, - 2 - )}\n` - ); + logger.error({ + human: { message: `docs directory not found at ${docsDir}` }, + json: { + event: "generate.docs_not_found", + fields: { error: "docs directory not found", path: docsDir }, + }, + }); } else { io.stderr.write( `leadtype generate: docs directory not found at ${docsDir}\n` @@ -465,25 +477,30 @@ export async function runGenerateCommand( if (args.format === "json") { io.stdout.write(`${renderGenerateResult(result)}\n`); - } else { - io.stdout.write(`Generated docs pipeline output in ${outDir}\n`); } + logger.info({ + human: { message: `Generated docs pipeline output in ${outDir}` }, + json: { + event: "generate.done", + fields: { outDir, mode: result.mode }, + }, + }); } catch (error) { const message = error instanceof Error ? error.message : String(error); if (args.format === "json") { - io.stderr.write( - `${JSON.stringify( - { + logger.error({ + human: { message }, + json: { + event: "generate.fail", + fields: { error: message, filters: { exclude: args.exclude, include: args.include, }, }, - null, - 2 - )}\n` - ); + }, + }); } else { io.stderr.write(`leadtype generate: ${message}\n`); } diff --git a/packages/leadtype/src/convert/convert.ts b/packages/leadtype/src/convert/convert.ts index cdfb543..c4832d4 100644 --- a/packages/leadtype/src/convert/convert.ts +++ b/packages/leadtype/src/convert/convert.ts @@ -14,7 +14,7 @@ import { deriveDocContext, resolvePlaceholderStrings, } from "../internal/docs-context"; -import { log } from "../internal/logger"; +import { logger } from "../internal/logger"; const execFileAsync = promisify(execFile); @@ -426,11 +426,15 @@ async function processMdxFile( const resolvedPath = resolve(mdxFilePath); if (!resolvedPath.endsWith(".mdx")) { - log.error(`Not an MDX file: ${resolvedPath}`); + logger.error({ + human: { message: `not an MDX file: ${resolvedPath}` }, + json: { event: "convert.skip_non_mdx", fields: { path: resolvedPath } }, + }); return false; } try { + const startedAt = Date.now(); const { markdown } = await convertMdxToMarkdown( resolvedPath, remarkPlugins, @@ -446,11 +450,31 @@ async function processMdxFile( await writeFile(outputPath, markdown); if (!writeToStdout) { - log.summary(`Converted: ${resolvedPath} → ${outputPath}`); + const ms = Date.now() - startedAt; + logger.debug({ + human: { message: `convert ${resolvedPath} → ${outputPath} (${ms}ms)` }, + json: { + event: "convert.file", + fields: { src: resolvedPath, out: outputPath, ms }, + }, + }); } return true; } catch (error) { - log.error(`Failed to process ${mdxFilePath}: ${String(error)}`); + const reason = error instanceof Error ? error.message : String(error); + const stack = error instanceof Error ? error.stack : undefined; + logger.error({ + human: { + message: `failed to process ${mdxFilePath}: ${reason}`, + hint: stack ?? "run with LEADTYPE_VERBOSE=1 for more verbose logs", + }, + json: { + event: "convert.fail", + fields: stack + ? { file: mdxFilePath, reason, stack } + : { file: mdxFilePath, reason }, + }, + }); return false; } } @@ -495,7 +519,10 @@ export async function convertAllMdx( : resolve(process.cwd(), "public"); if (!existsSync(srcDir)) { - log.verbose(`Source directory does not exist: ${srcDir}`); + logger.debug({ + human: { message: `source directory does not exist: ${srcDir}` }, + json: { event: "convert.batch.no_src", fields: { srcDir } }, + }); return; } @@ -523,8 +550,10 @@ export async function convertAllMdx( Array.from(outputDirs, (dir) => mkdir(dir, { recursive: true })) ); + const startedAt = Date.now(); const results = await mapLimit(mdxFiles, concurrency, async (mdxFilePath) => { try { + const fileStartedAt = Date.now(); const { markdown } = await convertMdxToMarkdown( mdxFilePath, remarkPlugins, @@ -532,16 +561,51 @@ export async function convertAllMdx( ); const outputPath = deriveOutputPath(mdxFilePath, srcDir, outDir); await writeFile(outputPath, markdown); + logger.debug({ + human: { + message: `convert ${mdxFilePath} → ${outputPath} (${Date.now() - fileStartedAt}ms)`, + }, + json: { + event: "convert.file", + fields: { + src: mdxFilePath, + out: outputPath, + ms: Date.now() - fileStartedAt, + }, + }, + }); return true; } catch (fileError) { - log.error(`Failed to process ${mdxFilePath}: ${String(fileError)}`); + const reason = + fileError instanceof Error ? fileError.message : String(fileError); + logger.error({ + human: { message: `failed to process ${mdxFilePath}: ${reason}` }, + json: { + event: "convert.fail", + fields: { file: mdxFilePath, reason }, + }, + }); return false; } }); - const converted = results.filter(Boolean).length; - const failed = results.length - converted; - log.verbose( - `Converted ${converted} MDX files${failed > 0 ? `, ${failed} failed` : ""}` - ); + const ok = results.filter(Boolean).length; + const failed = results.length - ok; + const ms = Date.now() - startedAt; + logger.info({ + human: { + message: `Converted ${ok} docs in ${ms} ms${failed > 0 ? ` (${failed} failed)` : ""}`, + }, + json: { + event: "convert.batch", + fields: { + srcDir, + outDir, + files: results.length, + ok, + failed, + ms, + }, + }, + }); } diff --git a/packages/leadtype/src/internal/logger.ts b/packages/leadtype/src/internal/logger.ts index 77147ce..7b02f9c 100644 --- a/packages/leadtype/src/internal/logger.ts +++ b/packages/leadtype/src/internal/logger.ts @@ -1,15 +1,83 @@ -const VERBOSE = process.env.LEADTYPE_VERBOSE === "1"; - -export const log = { - error(message: string): void { - process.stderr.write(`[leadtype] error: ${message}\n`); - }, - summary(message: string): void { - process.stdout.write(`[leadtype] ${message}\n`); - }, - verbose(message: string): void { - if (VERBOSE) { - process.stderr.write(`[leadtype] ${message}\n`); +type Level = "error" | "warn" | "info" | "debug"; +type Value = + | boolean + | null + | number + | string + | undefined + | Value[] + | { [key: string]: Value }; +type Fields = Record; +const RESERVED_JSON_FIELDS = new Set(["ts", "level", "event"]); + +export type LogCall = { + human: { message: string; hint?: string }; + json: { event: string; fields?: Fields }; +}; + +type Stream = Pick; + +let format: "human" | "json" = + process.env.LEADTYPE_LOG_FORMAT === "json" ? "json" : "human"; +let verbose = process.env.LEADTYPE_VERBOSE === "1"; +let stderr: Stream = process.stderr; + +export function setLogFormat(f: "human" | "json"): void { + format = f; +} + +export function setVerbose(v: boolean): void { + verbose = v; +} + +export function setLogStreams(s: { stderr: Stream }): void { + stderr = s.stderr; +} + +function sanitizeJsonFields(fields: Fields | undefined): Fields { + const sanitized: Fields = {}; + if (!fields) { + return sanitized; + } + for (const [key, value] of Object.entries(fields)) { + if (value === undefined || RESERVED_JSON_FIELDS.has(key)) { + continue; } - }, + sanitized[key] = value; + } + return sanitized; +} + +function emit(level: Level, call: LogCall): void { + if (level === "debug" && !verbose) { + return; + } + if (format === "json") { + stderr.write( + `${JSON.stringify({ + ts: new Date().toISOString(), + level, + event: call.json.event, + ...sanitizeJsonFields(call.json.fields), + })}\n` + ); + return; + } + let prefix = ""; + if (level === "error") { + prefix = "Error: "; + } else if (level === "warn") { + prefix = "Warning: "; + } + stderr.write(`${prefix}${call.human.message}\n`); + if (call.human.hint) { + stderr.write(` → ${call.human.hint}\n`); + } +} + +export const logger = { + error: (call: LogCall): void => emit("error", call), + warn: (call: LogCall): void => emit("warn", call), + info: (call: LogCall): void => emit("info", call), + debug: (call: LogCall): void => emit("debug", call), }; diff --git a/packages/leadtype/src/lint/cli.ts b/packages/leadtype/src/lint/cli.ts index 4c2f13f..8a880ef 100644 --- a/packages/leadtype/src/lint/cli.ts +++ b/packages/leadtype/src/lint/cli.ts @@ -1,5 +1,6 @@ #!/usr/bin/env node import { resolve } from "node:path"; +import { setLogFormat, setVerbose } from "../internal/logger"; import { type ReporterFormat, renderReport } from "./reporters"; import { DEFAULT_IGNORE_GLOBS, type LintSeverity, lintDocs } from "./runner"; @@ -14,6 +15,7 @@ type CliArgs = { unknownFieldSeverity: LintSeverity; maxWarnings: number; help: boolean; + verbose: boolean; }; export type LintCliIo = { @@ -34,6 +36,7 @@ Options: --warn-unknown Unknown fields warn (default) --error-unknown Unknown fields error --max-warnings Exit non-zero if warnings exceed n (default: Infinity) + -v, --verbose Print extra progress events to stderr -h, --help Show this help Exit codes: @@ -54,6 +57,7 @@ export function parseLintArgs(argv: string[]): CliArgs { unknownFieldSeverity: "warn", maxWarnings: Number.POSITIVE_INFINITY, help: false, + verbose: false, }; let positional = 0; const readValue = (argv_: string[], index: number, flag: string): string => { @@ -93,6 +97,8 @@ export function parseLintArgs(argv: string[]): CliArgs { throw new Error("--max-warnings must be a non-negative integer"); } args.maxWarnings = parsed; + } else if (arg === "--verbose" || arg === "-v") { + args.verbose = true; } else if (arg && !arg.startsWith("-")) { if (positional === 0) { args.srcDir = arg; @@ -128,6 +134,15 @@ export async function runLintCommand( return 0; } + setLogFormat("human"); + setVerbose(false); + if (args.format === "json") { + setLogFormat("json"); + } + if (args.verbose) { + setVerbose(true); + } + const resolvedSrcDir = resolve(args.srcDir); const result = await lintDocs({ srcDir: resolvedSrcDir, diff --git a/packages/leadtype/src/remark/plugins/include.remark.ts b/packages/leadtype/src/remark/plugins/include.remark.ts index f026aa8..a7a057b 100644 --- a/packages/leadtype/src/remark/plugins/include.remark.ts +++ b/packages/leadtype/src/remark/plugins/include.remark.ts @@ -12,6 +12,7 @@ import remarkGfm from "remark-gfm"; import remarkMdx from "remark-mdx"; import type { Transformer } from "unified"; import { visit } from "unist-util-visit"; +import { logger } from "../../internal/logger"; // Regex patterns defined at top level for performance const FRONTMATTER_REGEX = /^---\r?\n([\s\S]*?)\r?\n---\r?\n([\s\S]*)$/; @@ -364,9 +365,17 @@ async function processIncludeNode( if (!specifier) { // Misconfigured / — surface instead of silently // dropping so authors can find the offending tag in build logs. - process.stderr.write( - `[leadtype] missing specifier (no text content and no src= attribute); attributes: ${JSON.stringify(params)}\n` - ); + logger.warn({ + human: { + message: + " missing specifier (no text content and no src= attribute)", + hint: `attributes: ${JSON.stringify(params)}`, + }, + json: { + event: "include.missing_specifier", + fields: { attributes: JSON.stringify(params) }, + }, + }); return; } @@ -426,9 +435,15 @@ async function processIncludeNode( }); } catch (error) { const errorMessage = error instanceof Error ? error.message : String(error); - process.stderr.write( - `Warning: Failed to include file ${targetPath}: ${errorMessage}\n` - ); + logger.warn({ + human: { + message: `failed to include ${targetPath}: ${errorMessage}`, + }, + json: { + event: "include.read_failed", + fields: { target: targetPath, reason: errorMessage }, + }, + }); // Replace with error message Object.assign(node, { diff --git a/packages/leadtype/src/search/node.ts b/packages/leadtype/src/search/node.ts index b03b332..1f14abb 100644 --- a/packages/leadtype/src/search/node.ts +++ b/packages/leadtype/src/search/node.ts @@ -2,6 +2,7 @@ import { existsSync } from "node:fs"; import { mkdir, readdir, readFile, writeFile } from "node:fs/promises"; import path from "node:path"; import matter from "gray-matter"; +import { logger } from "../internal/logger"; import { type CreateDocsSearchIndexOptions, createDocsSearchIndex, @@ -173,22 +174,54 @@ async function readMarkdownDocs( return docs; } +function formatBytes(bytes: number): string { + if (bytes >= 1024 * 1024) { + return `${(bytes / (1024 * 1024)).toFixed(1)} MB`; + } + if (bytes >= 1024) { + return `${(bytes / 1024).toFixed(1)} KB`; + } + return `${bytes} B`; +} + function warnIfLarge(result: GenerateDocsSearchFilesResult): void { - if (result.indexBytes > WARN_INDEX_BYTES) { - process.stderr.write( - `Search index is ${result.indexBytes} bytes, which is above the ${WARN_INDEX_BYTES} byte guidance threshold.\n` - ); + const overIndex = result.indexBytes > WARN_INDEX_BYTES; + const overTotal = result.bytes > WARN_TOTAL_BYTES; + const overChunks = result.chunks > WARN_CHUNK_COUNT; + if (!(overIndex || overTotal || overChunks)) { + return; } - if (result.bytes > WARN_TOTAL_BYTES) { - process.stderr.write( - `Search index and content are ${result.bytes} bytes, which is above the ${WARN_TOTAL_BYTES} byte guidance threshold.\n` + const breaches: string[] = []; + if (overIndex) { + breaches.push( + `index ${formatBytes(result.indexBytes)} exceeds ${formatBytes(WARN_INDEX_BYTES)}` ); } - if (result.chunks > WARN_CHUNK_COUNT) { - process.stderr.write( - `Search index has ${result.chunks} chunks, which is above the ${WARN_CHUNK_COUNT} chunk guidance threshold.\n` + if (overTotal) { + breaches.push( + `total ${formatBytes(result.bytes)} exceeds ${formatBytes(WARN_TOTAL_BYTES)}` ); } + if (overChunks) { + breaches.push(`chunks ${result.chunks} exceeds ${WARN_CHUNK_COUNT}`); + } + logger.warn({ + human: { + message: `search index size: ${breaches.join("; ")}`, + hint: "consider --include / --exclude to scope the index", + }, + json: { + event: "search.index.size", + fields: { + indexBytes: result.indexBytes, + totalBytes: result.bytes, + chunks: result.chunks, + indexThreshold: WARN_INDEX_BYTES, + totalThreshold: WARN_TOTAL_BYTES, + chunksThreshold: WARN_CHUNK_COUNT, + }, + }, + }); } function resolveDocsOutputPath( diff --git a/packages/leadtype/src/search/vercel.ts b/packages/leadtype/src/search/vercel.ts index d491874..b36a2e6 100644 --- a/packages/leadtype/src/search/vercel.ts +++ b/packages/leadtype/src/search/vercel.ts @@ -4,7 +4,7 @@ import { type TimeoutConfiguration, type ToolSet, } from "ai"; -import { log } from "../internal/logger"; +import { logger } from "../internal/logger"; import { appendToolInstructions, createDocsTextStreamResponse, @@ -107,9 +107,14 @@ export function streamDocsAnswer( providerOptions: options.providerOptions, tools: options.tools, onError: ({ error }) => { - log.error( - `streamDocsAnswer provider error: ${getStreamErrorMessage(error)}` - ); + const reason = getStreamErrorMessage(error); + logger.error({ + human: { message: `streamDocsAnswer provider error: ${reason}` }, + json: { + event: "search.stream_provider_error", + fields: { reason }, + }, + }); }, }); const responseInit = getPlainTextResponseInit();