From e7160df8a76beb2dd12220bea5a4a58a1e6b6c57 Mon Sep 17 00:00:00 2001 From: RulaKhaled Date: Wed, 4 Mar 2026 10:02:48 +0100 Subject: [PATCH] fix(core): Standardize Vercel AI span descriptions to align with GenAI semantic conventions --- .../tracing/vercelai/test-generate-object.ts | 4 +- .../suites/tracing/vercelai/test.ts | 34 +++++------ .../suites/tracing/vercelai/v5/test.ts | 28 ++++----- .../suites/tracing/vercelai/v6/test.ts | 28 ++++----- .../core/src/tracing/vercel-ai/constants.ts | 6 ++ packages/core/src/tracing/vercel-ai/index.ts | 59 ++++++++----------- 6 files changed, 78 insertions(+), 81 deletions(-) diff --git a/dev-packages/node-integration-tests/suites/tracing/vercelai/test-generate-object.ts b/dev-packages/node-integration-tests/suites/tracing/vercelai/test-generate-object.ts index ac6614af7502..3156a19bb806 100644 --- a/dev-packages/node-integration-tests/suites/tracing/vercelai/test-generate-object.ts +++ b/dev-packages/node-integration-tests/suites/tracing/vercelai/test-generate-object.ts @@ -28,7 +28,7 @@ describe('Vercel AI integration - generateObject', () => { 'sentry.op': 'gen_ai.invoke_agent', 'sentry.origin': 'auto.vercelai.otel', }), - description: 'generateObject', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -51,7 +51,7 @@ describe('Vercel AI integration - generateObject', () => { 'gen_ai.usage.output_tokens': 25, 'gen_ai.usage.total_tokens': 40, }), - description: 'generate_object mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_object', origin: 'auto.vercelai.otel', status: 'ok', diff --git a/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts b/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts index 0f1efb26d1f0..2919815b8f0d 100644 --- a/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts +++ b/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts @@ -52,7 +52,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxSteps': 1, 'vercel.ai.streaming': false, }, - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -81,7 +81,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxRetries': 2, 'vercel.ai.streaming': false, }, - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -109,7 +109,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxSteps': 1, 'vercel.ai.streaming': false, }, - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -142,7 +142,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxRetries': 2, 'vercel.ai.streaming': false, }, - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -166,7 +166,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxSteps': 1, 'vercel.ai.streaming': false, }, - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -195,7 +195,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxRetries': 2, 'vercel.ai.streaming': false, }, - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -248,7 +248,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxSteps': 1, 'vercel.ai.streaming': false, }, - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -287,7 +287,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxRetries': 2, 'vercel.ai.streaming': false, }, - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -320,7 +320,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxSteps': 1, 'vercel.ai.streaming': false, }, - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -358,7 +358,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxRetries': 2, 'vercel.ai.streaming': false, }, - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -392,7 +392,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxSteps': 1, 'vercel.ai.streaming': false, }, - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -433,7 +433,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxRetries': 2, 'vercel.ai.streaming': false, }, - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -503,7 +503,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxSteps': 1, 'vercel.ai.streaming': false, }, - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'internal_error', @@ -531,7 +531,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxRetries': 2, 'vercel.ai.streaming': false, }, - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -623,7 +623,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxSteps': 1, 'vercel.ai.streaming': false, }, - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'internal_error', @@ -651,7 +651,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.settings.maxRetries': 2, 'vercel.ai.streaming': false, }, - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -735,7 +735,7 @@ describe('Vercel AI integration', () => { spans: expect.arrayContaining([ // The generateText span should have the correct op even though model ID was not available at span start expect.objectContaining({ - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', diff --git a/dev-packages/node-integration-tests/suites/tracing/vercelai/v5/test.ts b/dev-packages/node-integration-tests/suites/tracing/vercelai/v5/test.ts index eb42156920e9..7d981a878363 100644 --- a/dev-packages/node-integration-tests/suites/tracing/vercelai/v5/test.ts +++ b/dev-packages/node-integration-tests/suites/tracing/vercelai/v5/test.ts @@ -50,7 +50,7 @@ describe('Vercel AI integration (V5)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }, - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -79,7 +79,7 @@ describe('Vercel AI integration (V5)', () => { [GEN_AI_RESPONSE_MODEL_ATTRIBUTE]: 'mock-model-id', [GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 30, }, - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -106,7 +106,7 @@ describe('Vercel AI integration (V5)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }, - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -138,7 +138,7 @@ describe('Vercel AI integration (V5)', () => { [GEN_AI_RESPONSE_MODEL_ATTRIBUTE]: 'mock-model-id', [GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 30, }, - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -161,7 +161,7 @@ describe('Vercel AI integration (V5)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }, - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -190,7 +190,7 @@ describe('Vercel AI integration (V5)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_text', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }, - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -242,7 +242,7 @@ describe('Vercel AI integration (V5)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }, - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -275,7 +275,7 @@ describe('Vercel AI integration (V5)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_text', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }, - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -302,7 +302,7 @@ describe('Vercel AI integration (V5)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }, - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -334,7 +334,7 @@ describe('Vercel AI integration (V5)', () => { [GEN_AI_RESPONSE_MODEL_ATTRIBUTE]: 'mock-model-id', [GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 30, }, - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -361,7 +361,7 @@ describe('Vercel AI integration (V5)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }, - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -396,7 +396,7 @@ describe('Vercel AI integration (V5)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_text', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }), - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -480,7 +480,7 @@ describe('Vercel AI integration (V5)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', 'vercel.ai.response.finishReason': 'tool-calls', }, - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', }), @@ -507,7 +507,7 @@ describe('Vercel AI integration (V5)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_text', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }, - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', diff --git a/dev-packages/node-integration-tests/suites/tracing/vercelai/v6/test.ts b/dev-packages/node-integration-tests/suites/tracing/vercelai/v6/test.ts index 2a75cfdfbfca..2a213f39410d 100644 --- a/dev-packages/node-integration-tests/suites/tracing/vercelai/v6/test.ts +++ b/dev-packages/node-integration-tests/suites/tracing/vercelai/v6/test.ts @@ -50,7 +50,7 @@ describe('Vercel AI integration (V6)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }), - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -80,7 +80,7 @@ describe('Vercel AI integration (V6)', () => { [GEN_AI_RESPONSE_MODEL_ATTRIBUTE]: 'mock-model-id', [GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 30, }), - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -107,7 +107,7 @@ describe('Vercel AI integration (V6)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }), - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -139,7 +139,7 @@ describe('Vercel AI integration (V6)', () => { [GEN_AI_RESPONSE_MODEL_ATTRIBUTE]: 'mock-model-id', [GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 30, }), - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -163,7 +163,7 @@ describe('Vercel AI integration (V6)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }), - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -193,7 +193,7 @@ describe('Vercel AI integration (V6)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_text', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }), - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -245,7 +245,7 @@ describe('Vercel AI integration (V6)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }), - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -278,7 +278,7 @@ describe('Vercel AI integration (V6)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_text', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }), - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -305,7 +305,7 @@ describe('Vercel AI integration (V6)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }), - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -337,7 +337,7 @@ describe('Vercel AI integration (V6)', () => { [GEN_AI_RESPONSE_MODEL_ATTRIBUTE]: 'mock-model-id', [GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 30, }), - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -364,7 +364,7 @@ describe('Vercel AI integration (V6)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }), - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', status: 'ok', @@ -399,7 +399,7 @@ describe('Vercel AI integration (V6)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_text', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }), - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', @@ -484,7 +484,7 @@ describe('Vercel AI integration (V6)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', 'vercel.ai.response.finishReason': 'tool-calls', }), - description: 'generateText', + description: 'invoke_agent', op: 'gen_ai.invoke_agent', origin: 'auto.vercelai.otel', }), @@ -512,7 +512,7 @@ describe('Vercel AI integration (V6)', () => { [SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_text', [SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel', }), - description: 'generate_text mock-model-id', + description: 'generate_content mock-model-id', op: 'gen_ai.generate_text', origin: 'auto.vercelai.otel', status: 'ok', diff --git a/packages/core/src/tracing/vercel-ai/constants.ts b/packages/core/src/tracing/vercel-ai/constants.ts index 82baf0312d7c..d6ad34355e8e 100644 --- a/packages/core/src/tracing/vercel-ai/constants.ts +++ b/packages/core/src/tracing/vercel-ai/constants.ts @@ -25,3 +25,9 @@ export const GENERATE_CONTENT_OPS = new Set([ export const EMBEDDINGS_OPS = new Set(['ai.embed.doEmbed', 'ai.embedMany.doEmbed']); export const RERANK_OPS = new Set(['ai.rerank.doRerank']); + +export const DO_SPAN_NAME_PREFIX: Record = { + 'ai.embed.doEmbed': 'embed', + 'ai.embedMany.doEmbed': 'embed_many', + 'ai.rerank.doRerank': 'rerank', +}; diff --git a/packages/core/src/tracing/vercel-ai/index.ts b/packages/core/src/tracing/vercel-ai/index.ts index d3c4b036e228..cd61feed1827 100644 --- a/packages/core/src/tracing/vercel-ai/index.ts +++ b/packages/core/src/tracing/vercel-ai/index.ts @@ -1,7 +1,7 @@ import type { Client } from '../../client'; import { SEMANTIC_ATTRIBUTE_SENTRY_OP, SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN } from '../../semanticAttributes'; import type { Event } from '../../types-hoist/event'; -import type { Span, SpanAttributes, SpanAttributeValue, SpanJSON, SpanOrigin } from '../../types-hoist/span'; +import type { Span, SpanAttributes, SpanAttributeValue, SpanJSON } from '../../types-hoist/span'; import { spanToJSON } from '../../utils/spanUtils'; import { GEN_AI_INPUT_MESSAGES_ATTRIBUTE, @@ -19,7 +19,14 @@ import { GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE, GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE, } from '../ai/gen-ai-attributes'; -import { EMBEDDINGS_OPS, GENERATE_CONTENT_OPS, INVOKE_AGENT_OPS, RERANK_OPS, toolCallSpanMap } from './constants'; +import { + DO_SPAN_NAME_PREFIX, + EMBEDDINGS_OPS, + GENERATE_CONTENT_OPS, + INVOKE_AGENT_OPS, + RERANK_OPS, + toolCallSpanMap, +} from './constants'; import type { TokenSummary } from './types'; import { accumulateTokensForParent, @@ -50,10 +57,6 @@ import { OPERATION_NAME_ATTRIBUTE, } from './vercel-ai-attributes'; -function addOriginToSpan(span: Span, origin: SpanOrigin): void { - span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN, origin); -} - /** * Maps Vercel AI SDK operation names to OpenTelemetry semantic convention values * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-spans/#llm-request-spans @@ -226,7 +229,7 @@ function renameAttributeKey(attributes: Record, oldKey: string, } function processToolCallSpan(span: Span, attributes: SpanAttributes): void { - addOriginToSpan(span, 'auto.vercelai.otel'); + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN, 'auto.vercelai.otel'); span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.execute_tool'); span.setAttribute(GEN_AI_OPERATION_NAME_ATTRIBUTE, 'execute_tool'); renameAttributeKey(attributes, AI_TOOL_CALL_NAME_ATTRIBUTE, GEN_AI_TOOL_NAME_ATTRIBUTE); @@ -251,17 +254,14 @@ function processToolCallSpan(span: Span, attributes: SpanAttributes): void { } function processGenerateSpan(span: Span, name: string, attributes: SpanAttributes): void { - addOriginToSpan(span, 'auto.vercelai.otel'); + span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN, 'auto.vercelai.otel'); const nameWthoutAi = name.replace('ai.', ''); span.setAttribute('ai.pipeline.name', nameWthoutAi); span.updateName(nameWthoutAi); - // If a telemetry name is set and the span represents a pipeline, use it as the operation name. - // This name can be set at the request level by adding `experimental_telemetry.functionId`. const functionId = attributes[AI_TELEMETRY_FUNCTION_ID_ATTRIBUTE]; if (functionId && typeof functionId === 'string') { - span.updateName(`${nameWthoutAi} ${functionId}`); span.setAttribute('gen_ai.function_id', functionId); } @@ -278,31 +278,22 @@ function processGenerateSpan(span: Span, name: string, attributes: SpanAttribute span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, op); } - // Update span names for .do* spans to include the model ID (only if model ID exists) + // For invoke_agent pipeline spans, use 'invoke_agent' as the description + // to be consistent with other AI integrations (e.g. LangGraph) + if (INVOKE_AGENT_OPS.has(name)) { + if (functionId && typeof functionId === 'string') { + span.updateName(`invoke_agent ${functionId}`); + } else { + span.updateName('invoke_agent'); + } + return; + } + const modelId = attributes[AI_MODEL_ID_ATTRIBUTE]; if (modelId) { - switch (name) { - case 'ai.generateText.doGenerate': - span.updateName(`generate_text ${modelId}`); - break; - case 'ai.streamText.doStream': - span.updateName(`stream_text ${modelId}`); - break; - case 'ai.generateObject.doGenerate': - span.updateName(`generate_object ${modelId}`); - break; - case 'ai.streamObject.doStream': - span.updateName(`stream_object ${modelId}`); - break; - case 'ai.embed.doEmbed': - span.updateName(`embed ${modelId}`); - break; - case 'ai.embedMany.doEmbed': - span.updateName(`embed_many ${modelId}`); - break; - case 'ai.rerank.doRerank': - span.updateName(`rerank ${modelId}`); - break; + const doSpanPrefix = GENERATE_CONTENT_OPS.has(name) ? 'generate_content' : DO_SPAN_NAME_PREFIX[name]; + if (doSpanPrefix) { + span.updateName(`${doSpanPrefix} ${modelId}`); } } }