diff --git a/.size-limit.js b/.size-limit.js index e8124a622962..39063a460793 100644 --- a/.size-limit.js +++ b/.size-limit.js @@ -96,21 +96,21 @@ module.exports = [ path: 'packages/browser/build/npm/esm/prod/index.js', import: createImport('init', 'feedbackIntegration'), gzip: true, - limit: '42 KB', + limit: '43 KB', }, { name: '@sentry/browser (incl. sendFeedback)', path: 'packages/browser/build/npm/esm/prod/index.js', import: createImport('init', 'sendFeedback'), gzip: true, - limit: '30 KB', + limit: '31 KB', }, { name: '@sentry/browser (incl. FeedbackAsync)', path: 'packages/browser/build/npm/esm/prod/index.js', import: createImport('init', 'feedbackAsyncIntegration'), gzip: true, - limit: '35 KB', + limit: '36 KB', }, { name: '@sentry/browser (incl. Metrics)', @@ -140,7 +140,7 @@ module.exports = [ import: createImport('init', 'ErrorBoundary'), ignore: ['react/jsx-runtime'], gzip: true, - limit: '27 KB', + limit: '28 KB', }, { name: '@sentry/react (incl. Tracing)', @@ -208,7 +208,7 @@ module.exports = [ name: 'CDN Bundle (incl. Tracing, Replay, Feedback, Logs, Metrics)', path: createCDNPath('bundle.tracing.replay.feedback.logs.metrics.min.js'), gzip: true, - limit: '86 KB', + limit: '87 KB', }, // browser CDN bundles (non-gzipped) { @@ -223,7 +223,7 @@ module.exports = [ path: createCDNPath('bundle.tracing.min.js'), gzip: false, brotli: false, - limit: '127 KB', + limit: '128 KB', }, { name: 'CDN Bundle (incl. Tracing, Logs, Metrics) - uncompressed', @@ -278,7 +278,7 @@ module.exports = [ import: createImport('init'), ignore: [...builtinModules, ...nodePrefixedBuiltinModules], gzip: true, - limit: '52 KB', + limit: '53 KB', }, // Node SDK (ESM) { diff --git a/CHANGELOG.md b/CHANGELOG.md index e481c0732535..9be05c3727eb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,23 @@ Work in this release was contributed by @sebws and @harshit078. Thank you for your contributions! +- **feat(core): Introduces a new `Sentry.setConversationId()` API to track multi turn AI conversations across API calls. ([#18909](https://github.com/getsentry/sentry-javascript/pull/18909))** + + You can now set a conversation ID that will be automatically applied to spans within that scope. This allows you to link traces from the same conversation together. + + ```javascript + import * as Sentry from '@sentry/node'; + + // Set conversation ID for all subsequent spans + Sentry.setConversationId('conv_abc123'); + + // All AI spans will now include the gen_ai.conversation.id attribute + await openai.chat.completions.create({...}); + ``` + + This is particularly useful for tracking multiple AI API calls that are part of the same conversation, allowing you to analyze entire conversation flows in Sentry. + The conversation ID is stored on the isolation scope and automatically applied to spans via the new `conversationIdIntegration`. + - **feat(tanstackstart-react): Auto-instrument global middleware in `sentryTanstackStart` Vite plugin ([#18884](https://github.com/getsentry/sentry-javascript/pull/18844))** The `sentryTanstackStart` Vite plugin now automatically instruments `requestMiddleware` and `functionMiddleware` arrays in `createStart()`. This captures performance data without requiring manual wrapping. diff --git a/dev-packages/browser-integration-tests/suites/public-api/debug/test.ts b/dev-packages/browser-integration-tests/suites/public-api/debug/test.ts index b15c64280544..675f9a776cbf 100644 --- a/dev-packages/browser-integration-tests/suites/public-api/debug/test.ts +++ b/dev-packages/browser-integration-tests/suites/public-api/debug/test.ts @@ -24,6 +24,7 @@ sentryTest('logs debug messages correctly', async ({ getLocalTestUrl, page }) => ? [ 'Sentry Logger [log]: Integration installed: InboundFilters', 'Sentry Logger [log]: Integration installed: FunctionToString', + 'Sentry Logger [log]: Integration installed: ConversationId', 'Sentry Logger [log]: Integration installed: BrowserApiErrors', 'Sentry Logger [log]: Integration installed: Breadcrumbs', 'Sentry Logger [log]: Global Handler attached: onerror', diff --git a/dev-packages/node-integration-tests/suites/tracing/openai/scenario-manual-conversation-id.mjs b/dev-packages/node-integration-tests/suites/tracing/openai/scenario-manual-conversation-id.mjs new file mode 100644 index 000000000000..a44b4767bbae --- /dev/null +++ b/dev-packages/node-integration-tests/suites/tracing/openai/scenario-manual-conversation-id.mjs @@ -0,0 +1,79 @@ +import * as Sentry from '@sentry/node'; +import express from 'express'; +import OpenAI from 'openai'; + +function startMockServer() { + const app = express(); + app.use(express.json()); + + // Chat completions endpoint + app.post('/openai/chat/completions', (req, res) => { + const { model } = req.body; + + res.send({ + id: 'chatcmpl-mock123', + object: 'chat.completion', + created: 1677652288, + model: model, + choices: [ + { + index: 0, + message: { + role: 'assistant', + content: 'Mock response from OpenAI', + }, + finish_reason: 'stop', + }, + ], + usage: { + prompt_tokens: 10, + completion_tokens: 15, + total_tokens: 25, + }, + }); + }); + + return new Promise(resolve => { + const server = app.listen(0, () => { + resolve(server); + }); + }); +} + +async function run() { + const server = await startMockServer(); + + // Test: Multiple chat completions in the same conversation with manual conversation ID + await Sentry.startSpan({ op: 'function', name: 'chat-with-manual-conversation-id' }, async () => { + const client = new OpenAI({ + baseURL: `http://localhost:${server.address().port}/openai`, + apiKey: 'mock-api-key', + }); + + // Set conversation ID manually using Sentry API + Sentry.setConversationId('user_chat_session_abc123'); + + // First message in the conversation + await client.chat.completions.create({ + model: 'gpt-4', + messages: [{ role: 'user', content: 'What is the capital of France?' }], + }); + + // Second message in the same conversation + await client.chat.completions.create({ + model: 'gpt-4', + messages: [{ role: 'user', content: 'Tell me more about it' }], + }); + + // Third message in the same conversation + await client.chat.completions.create({ + model: 'gpt-4', + messages: [{ role: 'user', content: 'What is its population?' }], + }); + }); + + server.close(); + await Sentry.flush(2000); +} + +run(); diff --git a/dev-packages/node-integration-tests/suites/tracing/openai/scenario-separate-scope-1.mjs b/dev-packages/node-integration-tests/suites/tracing/openai/scenario-separate-scope-1.mjs new file mode 100644 index 000000000000..dab303a401d9 --- /dev/null +++ b/dev-packages/node-integration-tests/suites/tracing/openai/scenario-separate-scope-1.mjs @@ -0,0 +1,74 @@ +import * as Sentry from '@sentry/node'; +import express from 'express'; +import OpenAI from 'openai'; + +function startMockServer() { + const app = express(); + app.use(express.json()); + + // Chat completions endpoint + app.post('/openai/chat/completions', (req, res) => { + const { model } = req.body; + + res.send({ + id: 'chatcmpl-mock123', + object: 'chat.completion', + created: 1677652288, + model: model, + choices: [ + { + index: 0, + message: { + role: 'assistant', + content: 'Mock response from OpenAI', + }, + finish_reason: 'stop', + }, + ], + usage: { + prompt_tokens: 10, + completion_tokens: 15, + total_tokens: 25, + }, + }); + }); + + return new Promise(resolve => { + const server = app.listen(0, () => { + resolve(server); + }); + }); +} + +async function run() { + const server = await startMockServer(); + const client = new OpenAI({ + baseURL: `http://localhost:${server.address().port}/openai`, + apiKey: 'mock-api-key', + }); + + // First request/conversation scope + await Sentry.withScope(async scope => { + // Set conversation ID for this request scope BEFORE starting the span + scope.setConversationId('conv_user1_session_abc'); + + await Sentry.startSpan({ op: 'http.server', name: 'GET /chat/conversation-1' }, async () => { + // First message in conversation 1 + await client.chat.completions.create({ + model: 'gpt-4', + messages: [{ role: 'user', content: 'Hello from conversation 1' }], + }); + + // Second message in conversation 1 + await client.chat.completions.create({ + model: 'gpt-4', + messages: [{ role: 'user', content: 'Follow-up in conversation 1' }], + }); + }); + }); + + server.close(); + await Sentry.flush(2000); +} + +run(); diff --git a/dev-packages/node-integration-tests/suites/tracing/openai/scenario-separate-scope-2.mjs b/dev-packages/node-integration-tests/suites/tracing/openai/scenario-separate-scope-2.mjs new file mode 100644 index 000000000000..09f73afed761 --- /dev/null +++ b/dev-packages/node-integration-tests/suites/tracing/openai/scenario-separate-scope-2.mjs @@ -0,0 +1,74 @@ +import * as Sentry from '@sentry/node'; +import express from 'express'; +import OpenAI from 'openai'; + +function startMockServer() { + const app = express(); + app.use(express.json()); + + // Chat completions endpoint + app.post('/openai/chat/completions', (req, res) => { + const { model } = req.body; + + res.send({ + id: 'chatcmpl-mock123', + object: 'chat.completion', + created: 1677652288, + model: model, + choices: [ + { + index: 0, + message: { + role: 'assistant', + content: 'Mock response from OpenAI', + }, + finish_reason: 'stop', + }, + ], + usage: { + prompt_tokens: 10, + completion_tokens: 15, + total_tokens: 25, + }, + }); + }); + + return new Promise(resolve => { + const server = app.listen(0, () => { + resolve(server); + }); + }); +} + +async function run() { + const server = await startMockServer(); + const client = new OpenAI({ + baseURL: `http://localhost:${server.address().port}/openai`, + apiKey: 'mock-api-key', + }); + + // Second request/conversation scope (completely separate) + await Sentry.withScope(async scope => { + // Set different conversation ID for this request scope BEFORE starting the span + scope.setConversationId('conv_user2_session_xyz'); + + await Sentry.startSpan({ op: 'http.server', name: 'GET /chat/conversation-2' }, async () => { + // First message in conversation 2 + await client.chat.completions.create({ + model: 'gpt-4', + messages: [{ role: 'user', content: 'Hello from conversation 2' }], + }); + + // Second message in conversation 2 + await client.chat.completions.create({ + model: 'gpt-4', + messages: [{ role: 'user', content: 'Follow-up in conversation 2' }], + }); + }); + }); + + server.close(); + await Sentry.flush(2000); +} + +run(); diff --git a/dev-packages/node-integration-tests/suites/tracing/openai/test.ts b/dev-packages/node-integration-tests/suites/tracing/openai/test.ts index 52ddc0837097..dc4c9bc41fd3 100644 --- a/dev-packages/node-integration-tests/suites/tracing/openai/test.ts +++ b/dev-packages/node-integration-tests/suites/tracing/openai/test.ts @@ -761,4 +761,144 @@ describe('OpenAI integration', () => { .completed(); }); }); + + // Test for manual conversation ID setting using setConversationId() + const EXPECTED_TRANSACTION_MANUAL_CONVERSATION_ID = { + transaction: 'chat-with-manual-conversation-id', + spans: expect.arrayContaining([ + // All three chat completion spans should have the same manually-set conversation ID + expect.objectContaining({ + data: expect.objectContaining({ + 'gen_ai.conversation.id': 'user_chat_session_abc123', + 'gen_ai.system': 'openai', + 'gen_ai.request.model': 'gpt-4', + 'gen_ai.operation.name': 'chat', + 'sentry.op': 'gen_ai.chat', + }), + description: 'chat gpt-4', + op: 'gen_ai.chat', + origin: 'auto.ai.openai', + status: 'ok', + }), + expect.objectContaining({ + data: expect.objectContaining({ + 'gen_ai.conversation.id': 'user_chat_session_abc123', + 'gen_ai.system': 'openai', + 'gen_ai.request.model': 'gpt-4', + 'gen_ai.operation.name': 'chat', + 'sentry.op': 'gen_ai.chat', + }), + description: 'chat gpt-4', + op: 'gen_ai.chat', + origin: 'auto.ai.openai', + status: 'ok', + }), + expect.objectContaining({ + data: expect.objectContaining({ + 'gen_ai.conversation.id': 'user_chat_session_abc123', + 'gen_ai.system': 'openai', + 'gen_ai.request.model': 'gpt-4', + 'gen_ai.operation.name': 'chat', + 'sentry.op': 'gen_ai.chat', + }), + description: 'chat gpt-4', + op: 'gen_ai.chat', + origin: 'auto.ai.openai', + status: 'ok', + }), + ]), + }; + + createEsmAndCjsTests(__dirname, 'scenario-manual-conversation-id.mjs', 'instrument.mjs', (createRunner, test) => { + test('attaches manual conversation ID set via setConversationId() to all chat spans', async () => { + await createRunner() + .ignore('event') + .expect({ transaction: EXPECTED_TRANSACTION_MANUAL_CONVERSATION_ID }) + .start() + .completed(); + }); + }); + + // Test for scope isolation - different scopes have different conversation IDs + const EXPECTED_TRANSACTION_CONVERSATION_1 = { + transaction: 'GET /chat/conversation-1', + spans: expect.arrayContaining([ + // Both chat completion spans in conversation 1 should have conv_user1_session_abc + expect.objectContaining({ + data: expect.objectContaining({ + 'gen_ai.conversation.id': 'conv_user1_session_abc', + 'gen_ai.system': 'openai', + 'gen_ai.request.model': 'gpt-4', + 'sentry.op': 'gen_ai.chat', + }), + description: 'chat gpt-4', + op: 'gen_ai.chat', + origin: 'auto.ai.openai', + status: 'ok', + }), + expect.objectContaining({ + data: expect.objectContaining({ + 'gen_ai.conversation.id': 'conv_user1_session_abc', + 'gen_ai.system': 'openai', + 'gen_ai.request.model': 'gpt-4', + 'sentry.op': 'gen_ai.chat', + }), + description: 'chat gpt-4', + op: 'gen_ai.chat', + origin: 'auto.ai.openai', + status: 'ok', + }), + ]), + }; + + const EXPECTED_TRANSACTION_CONVERSATION_2 = { + transaction: 'GET /chat/conversation-2', + spans: expect.arrayContaining([ + // Both chat completion spans in conversation 2 should have conv_user2_session_xyz + expect.objectContaining({ + data: expect.objectContaining({ + 'gen_ai.conversation.id': 'conv_user2_session_xyz', + 'gen_ai.system': 'openai', + 'gen_ai.request.model': 'gpt-4', + 'sentry.op': 'gen_ai.chat', + }), + description: 'chat gpt-4', + op: 'gen_ai.chat', + origin: 'auto.ai.openai', + status: 'ok', + }), + expect.objectContaining({ + data: expect.objectContaining({ + 'gen_ai.conversation.id': 'conv_user2_session_xyz', + 'gen_ai.system': 'openai', + 'gen_ai.request.model': 'gpt-4', + 'sentry.op': 'gen_ai.chat', + }), + description: 'chat gpt-4', + op: 'gen_ai.chat', + origin: 'auto.ai.openai', + status: 'ok', + }), + ]), + }; + + createEsmAndCjsTests(__dirname, 'scenario-separate-scope-1.mjs', 'instrument.mjs', (createRunner, test) => { + test('isolates conversation IDs across separate scopes - conversation 1', async () => { + await createRunner() + .ignore('event') + .expect({ transaction: EXPECTED_TRANSACTION_CONVERSATION_1 }) + .start() + .completed(); + }); + }); + + createEsmAndCjsTests(__dirname, 'scenario-separate-scope-2.mjs', 'instrument.mjs', (createRunner, test) => { + test('isolates conversation IDs across separate scopes - conversation 2', async () => { + await createRunner() + .ignore('event') + .expect({ transaction: EXPECTED_TRANSACTION_CONVERSATION_2 }) + .start() + .completed(); + }); + }); }); diff --git a/packages/angular/src/sdk.ts b/packages/angular/src/sdk.ts index c6cf3b17fcd0..45b2b1fc9759 100755 --- a/packages/angular/src/sdk.ts +++ b/packages/angular/src/sdk.ts @@ -12,6 +12,7 @@ import { import type { Client, Integration } from '@sentry/core'; import { applySdkMetadata, + conversationIdIntegration, debug, dedupeIntegration, functionToStringIntegration, @@ -36,6 +37,7 @@ export function getDefaultIntegrations(_options: BrowserOptions = {}): Integrati // eslint-disable-next-line deprecation/deprecation inboundFiltersIntegration(), functionToStringIntegration(), + conversationIdIntegration(), breadcrumbsIntegration(), globalHandlersIntegration(), linkedErrorsIntegration(), diff --git a/packages/astro/src/index.server.ts b/packages/astro/src/index.server.ts index 28623724db19..7005fcf26b86 100644 --- a/packages/astro/src/index.server.ts +++ b/packages/astro/src/index.server.ts @@ -114,6 +114,7 @@ export { SEMANTIC_ATTRIBUTE_SENTRY_SAMPLE_RATE, SEMANTIC_ATTRIBUTE_SENTRY_SOURCE, setContext, + setConversationId, setCurrentClient, setExtra, setExtras, diff --git a/packages/aws-serverless/src/index.ts b/packages/aws-serverless/src/index.ts index fd0a0cb83095..34889236032c 100644 --- a/packages/aws-serverless/src/index.ts +++ b/packages/aws-serverless/src/index.ts @@ -25,6 +25,7 @@ export { Scope, SDK_VERSION, setContext, + setConversationId, setExtra, setExtras, setTag, diff --git a/packages/browser/src/sdk.ts b/packages/browser/src/sdk.ts index 800c1b701352..eeff23fe8f17 100644 --- a/packages/browser/src/sdk.ts +++ b/packages/browser/src/sdk.ts @@ -1,5 +1,6 @@ import type { Client, Integration, Options } from '@sentry/core'; import { + conversationIdIntegration, dedupeIntegration, functionToStringIntegration, getIntegrationsToSetup, @@ -31,6 +32,7 @@ export function getDefaultIntegrations(_options: Options): Integration[] { // eslint-disable-next-line deprecation/deprecation inboundFiltersIntegration(), functionToStringIntegration(), + conversationIdIntegration(), browserApiErrorsIntegration(), breadcrumbsIntegration(), globalHandlersIntegration(), diff --git a/packages/bun/src/index.ts b/packages/bun/src/index.ts index 9de1e55dacb6..5f2d628ce983 100644 --- a/packages/bun/src/index.ts +++ b/packages/bun/src/index.ts @@ -48,6 +48,7 @@ export { Scope, SDK_VERSION, setContext, + setConversationId, setExtra, setExtras, setTag, diff --git a/packages/cloudflare/src/sdk.ts b/packages/cloudflare/src/sdk.ts index 238cc13253a5..0211fa7f96a9 100644 --- a/packages/cloudflare/src/sdk.ts +++ b/packages/cloudflare/src/sdk.ts @@ -1,6 +1,7 @@ import type { Integration } from '@sentry/core'; import { consoleIntegration, + conversationIdIntegration, dedupeIntegration, functionToStringIntegration, getIntegrationsToSetup, @@ -30,6 +31,7 @@ export function getDefaultIntegrations(options: CloudflareOptions): Integration[ // eslint-disable-next-line deprecation/deprecation inboundFiltersIntegration(), functionToStringIntegration(), + conversationIdIntegration(), linkedErrorsIntegration(), fetchIntegration(), honoIntegration(), diff --git a/packages/core/src/exports.ts b/packages/core/src/exports.ts index a59e521febc7..d7931565b7ab 100644 --- a/packages/core/src/exports.ts +++ b/packages/core/src/exports.ts @@ -111,6 +111,15 @@ export function setUser(user: User | null): void { getIsolationScope().setUser(user); } +/** + * Sets the conversation ID for the current isolation scope. + * + * @param conversationId The conversation ID to set. Pass `null` or `undefined` to unset the conversation ID. + */ +export function setConversationId(conversationId: string | null | undefined): void { + getIsolationScope().setConversationId(conversationId); +} + /** * The last error event id of the isolation scope. * diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 19a83d230155..30ace1803b1a 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -25,6 +25,7 @@ export { setTag, setTags, setUser, + setConversationId, isInitialized, isEnabled, startSession, @@ -120,6 +121,7 @@ export { thirdPartyErrorFilterIntegration } from './integrations/third-party-err export { consoleIntegration } from './integrations/console'; export { featureFlagsIntegration, type FeatureFlagsIntegration } from './integrations/featureFlags'; export { growthbookIntegration } from './integrations/featureFlags'; +export { conversationIdIntegration } from './integrations/conversationId'; export { profiler } from './profiling'; // eslint thinks the entire function is deprecated (while only one overload is actually deprecated) diff --git a/packages/core/src/integrations/conversationId.ts b/packages/core/src/integrations/conversationId.ts new file mode 100644 index 000000000000..c11b587d3a71 --- /dev/null +++ b/packages/core/src/integrations/conversationId.ts @@ -0,0 +1,35 @@ +import type { Client } from '../client'; +import { getCurrentScope, getIsolationScope } from '../currentScopes'; +import { defineIntegration } from '../integration'; +import { GEN_AI_CONVERSATION_ID_ATTRIBUTE } from '../semanticAttributes'; +import type { IntegrationFn } from '../types-hoist/integration'; +import type { Span } from '../types-hoist/span'; + +const INTEGRATION_NAME = 'ConversationId'; + +const _conversationIdIntegration = (() => { + return { + name: INTEGRATION_NAME, + setup(client: Client) { + client.on('spanStart', (span: Span) => { + const scopeData = getCurrentScope().getScopeData(); + const isolationScopeData = getIsolationScope().getScopeData(); + + const conversationId = scopeData.conversationId || isolationScopeData.conversationId; + + if (conversationId) { + span.setAttribute(GEN_AI_CONVERSATION_ID_ATTRIBUTE, conversationId); + } + }); + }, + }; +}) satisfies IntegrationFn; + +/** + * Automatically applies conversation ID from scope to spans. + * + * This integration reads the conversation ID from the current or isolation scope + * and applies it to spans when they start. This ensures the conversation ID is + * available for all AI-related operations. + */ +export const conversationIdIntegration = defineIntegration(_conversationIdIntegration); diff --git a/packages/core/src/scope.ts b/packages/core/src/scope.ts index b5a64bb8818a..8f05cf78c16f 100644 --- a/packages/core/src/scope.ts +++ b/packages/core/src/scope.ts @@ -51,6 +51,7 @@ export interface ScopeContext { attributes?: RawAttributes>; fingerprint: string[]; propagationContext: PropagationContext; + conversationId?: string; } export interface SdkProcessingMetadata { @@ -85,6 +86,7 @@ export interface ScopeData { level?: SeverityLevel; transactionName?: string; span?: Span; + conversationId?: string; } /** @@ -153,6 +155,9 @@ export class Scope { /** Contains the last event id of a captured event. */ protected _lastEventId?: string; + /** Conversation ID */ + protected _conversationId?: string; + // NOTE: Any field which gets added here should get added not only to the constructor but also to the `clone` method. public constructor() { @@ -202,6 +207,7 @@ export class Scope { newScope._propagationContext = { ...this._propagationContext }; newScope._client = this._client; newScope._lastEventId = this._lastEventId; + newScope._conversationId = this._conversationId; _setSpanForScope(newScope, _getSpanForScope(this)); @@ -284,6 +290,16 @@ export class Scope { return this._user; } + /** + * Set the conversation ID for this scope. + * Set to `null` to unset the conversation ID. + */ + public setConversationId(conversationId: string | null | undefined): this { + this._conversationId = conversationId || undefined; + this._notifyScopeListeners(); + return this; + } + /** * Set an object that will be merged into existing tags on the scope, * and will be sent as tags data with the event. @@ -507,6 +523,7 @@ export class Scope { level, fingerprint = [], propagationContext, + conversationId, } = scopeInstance || {}; this._tags = { ...this._tags, ...tags }; @@ -530,6 +547,10 @@ export class Scope { this._propagationContext = propagationContext; } + if (conversationId) { + this._conversationId = conversationId; + } + return this; } @@ -549,6 +570,7 @@ export class Scope { this._transactionName = undefined; this._fingerprint = undefined; this._session = undefined; + this._conversationId = undefined; _setSpanForScope(this, undefined); this._attachments = []; this.setPropagationContext({ @@ -641,6 +663,7 @@ export class Scope { sdkProcessingMetadata: this._sdkProcessingMetadata, transactionName: this._transactionName, span: _getSpanForScope(this), + conversationId: this._conversationId, }; } diff --git a/packages/core/src/semanticAttributes.ts b/packages/core/src/semanticAttributes.ts index 9b90809c0091..88b0f470dfa3 100644 --- a/packages/core/src/semanticAttributes.ts +++ b/packages/core/src/semanticAttributes.ts @@ -77,3 +77,18 @@ export const SEMANTIC_ATTRIBUTE_URL_FULL = 'url.full'; * @see https://develop.sentry.dev/sdk/telemetry/traces/span-links/#link-types */ export const SEMANTIC_LINK_ATTRIBUTE_LINK_TYPE = 'sentry.link.type'; + +/** + * ============================================================================= + * GEN AI ATTRIBUTES + * Based on OpenTelemetry Semantic Conventions for Generative AI + * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/ + * ============================================================================= + */ + +/** + * The conversation ID for linking messages across API calls. + * For OpenAI Assistants API: thread_id + * For LangGraph: configurable.thread_id + */ +export const GEN_AI_CONVERSATION_ID_ATTRIBUTE = 'gen_ai.conversation.id'; diff --git a/packages/core/test/lib/integrations/conversationId.test.ts b/packages/core/test/lib/integrations/conversationId.test.ts new file mode 100644 index 000000000000..e9ea9cc50d45 --- /dev/null +++ b/packages/core/test/lib/integrations/conversationId.test.ts @@ -0,0 +1,98 @@ +import { afterEach, beforeEach, describe, expect, it } from 'vitest'; +import { getCurrentScope, getIsolationScope, setCurrentClient, startSpan } from '../../../src'; +import { conversationIdIntegration } from '../../../src/integrations/conversationId'; +import { GEN_AI_CONVERSATION_ID_ATTRIBUTE } from '../../../src/semanticAttributes'; +import { spanToJSON } from '../../../src/utils/spanUtils'; +import { getDefaultTestClientOptions, TestClient } from '../../mocks/client'; + +describe('ConversationId', () => { + beforeEach(() => { + const testClient = new TestClient( + getDefaultTestClientOptions({ + tracesSampleRate: 1, + }), + ); + setCurrentClient(testClient); + testClient.init(); + testClient.addIntegration(conversationIdIntegration()); + }); + + afterEach(() => { + getCurrentScope().setClient(undefined); + getCurrentScope().setConversationId(null); + getIsolationScope().setConversationId(null); + }); + + it('applies conversation ID from current scope to span', () => { + getCurrentScope().setConversationId('conv_test_123'); + + startSpan({ name: 'test-span' }, span => { + const spanJSON = spanToJSON(span); + expect(spanJSON.data[GEN_AI_CONVERSATION_ID_ATTRIBUTE]).toBe('conv_test_123'); + }); + }); + + it('applies conversation ID from isolation scope when current scope does not have one', () => { + getIsolationScope().setConversationId('conv_isolation_456'); + + startSpan({ name: 'test-span' }, span => { + const spanJSON = spanToJSON(span); + expect(spanJSON.data[GEN_AI_CONVERSATION_ID_ATTRIBUTE]).toBe('conv_isolation_456'); + }); + }); + + it('prefers current scope over isolation scope', () => { + getCurrentScope().setConversationId('conv_current_789'); + getIsolationScope().setConversationId('conv_isolation_999'); + + startSpan({ name: 'test-span' }, span => { + const spanJSON = spanToJSON(span); + expect(spanJSON.data[GEN_AI_CONVERSATION_ID_ATTRIBUTE]).toBe('conv_current_789'); + }); + }); + + it('does not apply conversation ID when not set in scope', () => { + startSpan({ name: 'test-span' }, span => { + const spanJSON = spanToJSON(span); + expect(spanJSON.data[GEN_AI_CONVERSATION_ID_ATTRIBUTE]).toBeUndefined(); + }); + }); + + it('works when conversation ID is unset with null', () => { + getCurrentScope().setConversationId('conv_test_123'); + getCurrentScope().setConversationId(null); + + startSpan({ name: 'test-span' }, span => { + const spanJSON = spanToJSON(span); + expect(spanJSON.data[GEN_AI_CONVERSATION_ID_ATTRIBUTE]).toBeUndefined(); + }); + }); + + it('applies conversation ID to nested spans', () => { + getCurrentScope().setConversationId('conv_nested_abc'); + + startSpan({ name: 'parent-span' }, () => { + startSpan({ name: 'child-span' }, childSpan => { + const childJSON = spanToJSON(childSpan); + expect(childJSON.data[GEN_AI_CONVERSATION_ID_ATTRIBUTE]).toBe('conv_nested_abc'); + }); + }); + }); + + it('scope conversation ID overrides explicitly set attribute', () => { + getCurrentScope().setConversationId('conv_from_scope'); + + startSpan( + { + name: 'test-span', + attributes: { + [GEN_AI_CONVERSATION_ID_ATTRIBUTE]: 'conv_explicit', + }, + }, + span => { + const spanJSON = spanToJSON(span); + expect(spanJSON.data[GEN_AI_CONVERSATION_ID_ATTRIBUTE]).toBe('conv_from_scope'); + }, + ); + }); +}); diff --git a/packages/core/test/lib/scope.test.ts b/packages/core/test/lib/scope.test.ts index f1e5c58550be..11fc4cb62fff 100644 --- a/packages/core/test/lib/scope.test.ts +++ b/packages/core/test/lib/scope.test.ts @@ -1011,6 +1011,63 @@ describe('Scope', () => { }); }); + describe('setConversationId() / getScopeData()', () => { + test('sets and gets conversation ID via getScopeData', () => { + const scope = new Scope(); + scope.setConversationId('conv_abc123'); + expect(scope.getScopeData().conversationId).toEqual('conv_abc123'); + }); + + test('unsets conversation ID with null or undefined', () => { + const scope = new Scope(); + scope.setConversationId('conv_abc123'); + scope.setConversationId(null); + expect(scope.getScopeData().conversationId).toBeUndefined(); + + scope.setConversationId('conv_abc123'); + scope.setConversationId(undefined); + expect(scope.getScopeData().conversationId).toBeUndefined(); + }); + + test('clones conversation ID to new scope', () => { + const scope = new Scope(); + scope.setConversationId('conv_clone123'); + const clonedScope = scope.clone(); + expect(clonedScope.getScopeData().conversationId).toEqual('conv_clone123'); + }); + + test('notifies scope listeners when conversation ID is set', () => { + const scope = new Scope(); + const listener = vi.fn(); + scope.addScopeListener(listener); + scope.setConversationId('conv_listener'); + expect(listener).toHaveBeenCalledWith(scope); + }); + + test('clears conversation ID when scope is cleared', () => { + const scope = new Scope(); + scope.setConversationId('conv_to_clear'); + expect(scope.getScopeData().conversationId).toEqual('conv_to_clear'); + scope.clear(); + expect(scope.getScopeData().conversationId).toBeUndefined(); + }); + + test('updates conversation ID when scope is updated with ScopeContext', () => { + const scope = new Scope(); + scope.setConversationId('conv_old'); + scope.update({ conversationId: 'conv_updated' }); + expect(scope.getScopeData().conversationId).toEqual('conv_updated'); + }); + + test('updates conversation ID when scope is updated with another Scope', () => { + const scope1 = new Scope(); + const scope2 = new Scope(); + scope2.setConversationId('conv_from_scope2'); + scope1.update(scope2); + expect(scope1.getScopeData().conversationId).toEqual('conv_from_scope2'); + }); + }); + describe('addBreadcrumb()', () => { test('adds a breadcrumb', () => { const scope = new Scope(); diff --git a/packages/google-cloud-serverless/src/index.ts b/packages/google-cloud-serverless/src/index.ts index 4fa5c727be59..636852d722d3 100644 --- a/packages/google-cloud-serverless/src/index.ts +++ b/packages/google-cloud-serverless/src/index.ts @@ -25,6 +25,7 @@ export { Scope, SDK_VERSION, setContext, + setConversationId, setExtra, setExtras, setTag, diff --git a/packages/node-core/src/sdk/index.ts b/packages/node-core/src/sdk/index.ts index 1f0fd8835340..3d6b4c61619e 100644 --- a/packages/node-core/src/sdk/index.ts +++ b/packages/node-core/src/sdk/index.ts @@ -3,6 +3,7 @@ import { applySdkMetadata, consoleIntegration, consoleSandbox, + conversationIdIntegration, debug, functionToStringIntegration, getCurrentScope, @@ -55,6 +56,7 @@ export function getDefaultIntegrations(): Integration[] { linkedErrorsIntegration(), requestDataIntegration(), systemErrorIntegration(), + conversationIdIntegration(), // Native Wrappers consoleIntegration(), httpIntegration(), diff --git a/packages/node/src/index.ts b/packages/node/src/index.ts index 84fdf97539bc..e96a28483174 100644 --- a/packages/node/src/index.ts +++ b/packages/node/src/index.ts @@ -83,6 +83,7 @@ export { setTag, setTags, setUser, + setConversationId, SEMANTIC_ATTRIBUTE_SENTRY_OP, SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN, SEMANTIC_ATTRIBUTE_SENTRY_SOURCE, diff --git a/packages/vercel-edge/src/sdk.ts b/packages/vercel-edge/src/sdk.ts index 5c8387c9bc7a..269d9ada280a 100644 --- a/packages/vercel-edge/src/sdk.ts +++ b/packages/vercel-edge/src/sdk.ts @@ -9,6 +9,7 @@ import { import type { Client, Integration, Options } from '@sentry/core'; import { consoleIntegration, + conversationIdIntegration, createStackParser, debug, dedupeIntegration, @@ -56,6 +57,7 @@ export function getDefaultIntegrations(options: Options): Integration[] { // eslint-disable-next-line deprecation/deprecation inboundFiltersIntegration(), functionToStringIntegration(), + conversationIdIntegration(), linkedErrorsIntegration(), winterCGFetchIntegration(), consoleIntegration(),