From 84f228d9923b1c370e2e363af52b6d93206b1254 Mon Sep 17 00:00:00 2001 From: "N.V.J.K Kartik" Date: Tue, 20 May 2025 13:51:58 +0530 Subject: [PATCH] package and readme --- .../packages/traceai_anthropic/README.md | 110 +++++++++++++++++ .../packages/traceai_langchain/README.md | 111 +++++++++++++++++ .../packages/traceai_langchain/package.json | 2 +- typescript/packages/traceai_openai/README.md | 113 ++++++++++++++++++ 4 files changed, 335 insertions(+), 1 deletion(-) create mode 100644 typescript/packages/traceai_anthropic/README.md create mode 100644 typescript/packages/traceai_langchain/README.md create mode 100644 typescript/packages/traceai_openai/README.md diff --git a/typescript/packages/traceai_anthropic/README.md b/typescript/packages/traceai_anthropic/README.md new file mode 100644 index 0000000..4632f8a --- /dev/null +++ b/typescript/packages/traceai_anthropic/README.md @@ -0,0 +1,110 @@ +# @traceai/anthropic + +OpenTelemetry instrumentation for Anthropic's Claude API. This package provides automatic tracing and monitoring for your Anthropic applications. + +## Installation + +```bash +npm install @traceai/anthropic +# or +yarn add @traceai/anthropic +# or +pnpm add @traceai/anthropic +``` + +## Quick Start + +```typescript +// export Future AGI API KEYS +// export FI_API_KEY=your_api_key +// export FI_SECRET_KEY=your_secret_key + +import { register, ProjectType } from "@traceai/fi-core"; +import { AnthropicInstrumentation } from "@traceai/anthropic"; +import { registerInstrumentations } from "@opentelemetry/instrumentation"; +import { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api"; + +// Enable OpenTelemetry internal diagnostics (optional, for debugging) +diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.INFO); + +// 1. Register FI Core TracerProvider +const tracerProvider = register({ + projectName: "your-project-name", + projectType: ProjectType.OBSERVE, + sessionName: "your-session-name" +}); + +// 2. Initialize and enable Anthropic Instrumentation +const anthropicInstrumentation = new AnthropicInstrumentation({}); +registerInstrumentations({ + instrumentations: [anthropicInstrumentation], + tracerProvider: tracerProvider, +}); + +// 3. Dynamically import Anthropic SDK AFTER instrumentation is registered +const Anthropic = (await import("@anthropic-ai/sdk")).default; +const client = new Anthropic(); + +// 4. Use Anthropic as normal +const response = await client.messages.create({ + model: "claude-3-haiku-20240307", + max_tokens: 50, + messages: [{ role: "user", content: "Hello, Claude!" }], +}); + +// 5. Don't forget to shutdown the tracer provider when done +try { + await tracerProvider.shutdown(); + console.log("Tracer provider shut down successfully."); +} catch (error) { + console.error("Error shutting down tracer provider:", error); +} +``` + +## Environment Variables + +The following environment variables are required: + +```bash +# For Anthropic API +ANTHROPIC_API_KEY=your_anthropic_api_key + +# For TraceAI telemetry +FI_API_KEY=your_api_key +FI_SECRET_KEY=your_secret_key +``` + +## Features + +- Automatic tracing of Anthropic API calls +- Support for both streaming and non-streaming requests +- Captures token usage and response metadata +- Tracks tool usage in conversations +- No manual instrumentation required +- Integration with TraceAI's observability platform + +## Peer Dependencies + +This package requires the following peer dependency: +- `@anthropic-ai/sdk`: ^0.27.3 + +## Development + +```bash +# Install dependencies +pnpm install + +# Build the package +pnpm build + +# Run tests +pnpm test + +# Type checking +pnpm type:check +``` + +## Support + +For support, please open an issue in our [GitHub repository](https://github.com/future-agi/traceAI/issues). + diff --git a/typescript/packages/traceai_langchain/README.md b/typescript/packages/traceai_langchain/README.md new file mode 100644 index 0000000..8704bf9 --- /dev/null +++ b/typescript/packages/traceai_langchain/README.md @@ -0,0 +1,111 @@ +# @traceai/langchain + +OpenTelemetry instrumentation for LangChain.js. This package provides automatic tracing and monitoring for your LangChain applications. + +## Installation + +```bash +npm install @traceai/langchain +# or +yarn add @traceai/langchain +# or +pnpm add @traceai/langchain +``` + +## Quick Start + +```typescript +// export Future AGI API KEYS +// export FI_API_KEY=your_api_key +// export FI_SECRET_KEY=your_secret_key + +import { register, ProjectType } from "@traceai/fi-core"; +import { registerInstrumentations } from "@opentelemetry/instrumentation"; +import { LangChainInstrumentation } from "@traceai/langchain"; +import { ChatOpenAI } from "@langchain/openai"; +import { HumanMessage } from "@langchain/core/messages"; +import { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api"; +import * as CallbackManagerModule from "@langchain/core/callbacks/manager"; + +// Enable OpenTelemetry internal diagnostics (optional, for debugging) +diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG); + +// 1. Register FI Core TracerProvider +const tracerProvider = register({ + projectName: "your-project-name", + projectType: ProjectType.OBSERVE, + sessionName: "your-session-name" +}); + +// 2. Register LangChain Instrumentation +const lcInstrumentation = new LangChainInstrumentation(); +registerInstrumentations({ + tracerProvider: tracerProvider, + instrumentations: [lcInstrumentation], +}); + +// 3. Manually instrument LangChain (required as it doesn't have a traditional module structure) +lcInstrumentation.manuallyInstrument(CallbackManagerModule); + +// 4. Use LangChain as normal +const chatModel = new ChatOpenAI({ + openAIApiKey: process.env.OPENAI_API_KEY, + metadata: { + session_id: "your-session-id", + }, +}); + +// Your LangChain code here... + +// 5. Don't forget to shutdown the tracer provider when done +try { + await tracerProvider.shutdown(); + console.log("Tracer provider shut down successfully."); +} catch (error) { + console.error("Error shutting down tracer provider:", error); +} +``` + +## Environment Variables + +The following environment variables are required for telemetry: + +```bash +FI_API_KEY=your_api_key +FI_SECRET_KEY=your_secret_key +``` + + + +## Features + +- Automatic tracing of LangChain operations +- Support for both ESM and CommonJS modules +- Compatible with LangChain.js v0.2.0 and v0.3.0 +- Integration with TraceAI's observability platform + +## Peer Dependencies + +This package requires the following peer dependencies: +- `@langchain/core`: ^0.2.0 || ^0.3.0 + +## Development + +```bash +# Install dependencies +pnpm install + +# Build the package +pnpm build + +# Run tests +pnpm test + +# Type checking +pnpm type:check +``` + +## Support + +For support, please open an issue in our [GitHub repository](https://github.com/future-agi/traceAI/issues). + diff --git a/typescript/packages/traceai_langchain/package.json b/typescript/packages/traceai_langchain/package.json index b86f782..387e930 100644 --- a/typescript/packages/traceai_langchain/package.json +++ b/typescript/packages/traceai_langchain/package.json @@ -1,6 +1,6 @@ { "name": "@traceai/langchain", - "version": "3.2.0", + "version": "0.1.0`", "description": "TraceAI Instrumentation for LangChain.js", "main": "dist/src/index.js", "module": "dist/esm/index.js", diff --git a/typescript/packages/traceai_openai/README.md b/typescript/packages/traceai_openai/README.md new file mode 100644 index 0000000..77230d6 --- /dev/null +++ b/typescript/packages/traceai_openai/README.md @@ -0,0 +1,113 @@ +# @traceai/openai + +OpenTelemetry instrumentation for OpenAI's API. This package provides automatic tracing and monitoring for your OpenAI applications. + +## Installation + +```bash +npm install @traceai/openai +# or +yarn add @traceai/openai +# or +pnpm add @traceai/openai +``` + +## Quick Start + +```typescript +// export Future AGI API KEYS +// export FI_API_KEY=your_api_key +// export FI_SECRET_KEY=your_secret_key + +import { register, ProjectType } from "@traceai/fi-core"; +import { OpenAIInstrumentation } from "@traceai/openai"; +import { registerInstrumentations } from "@opentelemetry/instrumentation"; +import { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api"; + +// Enable OpenTelemetry internal diagnostics (optional, for debugging) +diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG); + +// 1. Register FI Core TracerProvider +const tracerProvider = register({ + projectName: "your-project-name", + projectType: ProjectType.OBSERVE, + sessionName: "your-session-name" +}); + +// 2. Register OpenAI Instrumentation BEFORE importing/using OpenAI client +registerInstrumentations({ + tracerProvider: tracerProvider, + instrumentations: [new OpenAIInstrumentation()], +}); + +// 3. Import and Initialize OpenAI Client +const OpenAI = (await import("openai")).default; +const openai = new OpenAI({ + apiKey: process.env.OPENAI_API_KEY, +}); + +// 4. Use OpenAI as normal +const chatCompletion = await openai.chat.completions.create({ + messages: [{ role: "user", content: "Hello!" }], + model: "gpt-4o", +}); + +// 5. Don't forget to shutdown the tracer provider when done +try { + await tracerProvider.shutdown(); + console.log("Tracer provider shut down successfully."); +} catch (error) { + console.error("Error shutting down tracer provider:", error); +} +``` + +## Environment Variables + +The following environment variables are required: + +```bash +# For OpenAI API +OPENAI_API_KEY=your_openai_api_key + +# For TraceAI telemetry +FI_API_KEY=your_api_key +FI_SECRET_KEY=your_secret_key +``` + +## Features + +- Automatic tracing of OpenAI API calls +- Support for all OpenAI API endpoints: + - Chat Completions + - Completions + - Embeddings + - Responses +- Streaming support for chat completions +- No manual instrumentation required +- Integration with TraceAI's observability platform + +## Peer Dependencies + +This package requires the following peer dependency: +- `openai`: ^4.0.0 + +## Development + +```bash +# Install dependencies +pnpm install + +# Build the package +pnpm build + +# Run tests +pnpm test + +# Type checking +pnpm type:check +``` + +## Support + +For support, please open an issue in our [GitHub repository](https://github.com/future-agi/traceAI/issues). +