Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@ sentryTest('manual Anthropic instrumentation sends gen_ai transactions', async (
const eventData = envelopeRequestParser(req);

// Verify it's a gen_ai transaction
expect(eventData.transaction).toBe('messages claude-3-haiku-20240307');
expect(eventData.contexts?.trace?.op).toBe('gen_ai.messages');
expect(eventData.transaction).toBe('chat claude-3-haiku-20240307');
expect(eventData.contexts?.trace?.op).toBe('gen_ai.chat');
expect(eventData.contexts?.trace?.origin).toBe('auto.ai.anthropic');
expect(eventData.contexts?.trace?.data).toMatchObject({
'gen_ai.operation.name': 'messages',
'gen_ai.operation.name': 'chat',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-haiku-20240307',
'gen_ai.request.temperature': 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ it('traces a basic message creation request', async ({ signal }) => {
expect.arrayContaining([
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'messages',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.messages',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'chat',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.chat',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.anthropic',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'anthropic',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'claude-3-haiku-20240307',
Expand All @@ -39,8 +39,8 @@ it('traces a basic message creation request', async ({ signal }) => {
[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE]: 10,
[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]: 15,
}),
description: 'messages claude-3-haiku-20240307',
op: 'gen_ai.messages',
description: 'chat claude-3-haiku-20240307',
op: 'gen_ai.chat',
origin: 'auto.ai.anthropic',
}),
]),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@ it('traces Google GenAI chat creation and message sending', async () => {
// Third span - models.generateContent
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'gemini-1.5-flash',
Expand All @@ -74,8 +74,8 @@ it('traces Google GenAI chat creation and message sending', async () => {
[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]: 12,
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 20,
}),
description: 'models gemini-1.5-flash',
op: 'gen_ai.models',
description: 'generate_content gemini-1.5-flash',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
}),
]),
Expand Down
148 changes: 74 additions & 74 deletions dev-packages/node-integration-tests/suites/tracing/anthropic/test.ts

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ describe('Google GenAI integration', () => {
// Third span - models.generateContent
expect.objectContaining({
data: {
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'gemini-1.5-flash',
Expand All @@ -78,22 +78,22 @@ describe('Google GenAI integration', () => {
[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]: 12,
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 20,
},
description: 'models gemini-1.5-flash',
op: 'gen_ai.models',
description: 'generate_content gemini-1.5-flash',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
// Fourth span - error handling
expect.objectContaining({
data: {
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'error-model',
},
description: 'models error-model',
op: 'gen_ai.models',
description: 'generate_content error-model',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
status: 'internal_error',
}),
Expand Down Expand Up @@ -143,8 +143,8 @@ describe('Google GenAI integration', () => {
// Third span - models.generateContent with PII
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'gemini-1.5-flash',
Expand All @@ -157,23 +157,23 @@ describe('Google GenAI integration', () => {
[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]: 12,
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 20,
}),
description: 'models gemini-1.5-flash',
op: 'gen_ai.models',
description: 'generate_content gemini-1.5-flash',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
// Fourth span - error handling with PII
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'error-model',
[GEN_AI_REQUEST_MESSAGES_ATTRIBUTE]: expect.any(String), // Should include contents when recordInputs: true
}),
description: 'models error-model',
op: 'gen_ai.models',
description: 'generate_content error-model',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
status: 'internal_error',
}),
Expand Down Expand Up @@ -233,8 +233,8 @@ describe('Google GenAI integration', () => {
// Non-streaming with tools
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'gemini-2.0-flash-001',
Expand All @@ -246,16 +246,16 @@ describe('Google GenAI integration', () => {
[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]: 8,
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 23,
}),
description: 'models gemini-2.0-flash-001',
op: 'gen_ai.models',
description: 'generate_content gemini-2.0-flash-001',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
// Streaming with tools
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'gemini-2.0-flash-001',
Expand All @@ -270,16 +270,16 @@ describe('Google GenAI integration', () => {
[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]: 10,
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 22,
}),
description: 'models gemini-2.0-flash-001 stream-response',
op: 'gen_ai.models',
description: 'generate_content gemini-2.0-flash-001 stream-response',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
// Without tools for comparison
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'gemini-2.0-flash-001',
Expand All @@ -289,8 +289,8 @@ describe('Google GenAI integration', () => {
[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]: 12,
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 20,
}),
description: 'models gemini-2.0-flash-001',
op: 'gen_ai.models',
description: 'generate_content gemini-2.0-flash-001',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
Expand All @@ -309,8 +309,8 @@ describe('Google GenAI integration', () => {
// First span - models.generateContentStream (streaming)
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'gemini-1.5-flash',
Expand All @@ -325,8 +325,8 @@ describe('Google GenAI integration', () => {
[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]: 12,
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 22,
}),
description: 'models gemini-1.5-flash stream-response',
op: 'gen_ai.models',
description: 'generate_content gemini-1.5-flash stream-response',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
Expand Down Expand Up @@ -367,24 +367,24 @@ describe('Google GenAI integration', () => {
// Fourth span - blocked content streaming
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
}),
description: 'models blocked-model stream-response',
op: 'gen_ai.models',
description: 'generate_content blocked-model stream-response',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
status: 'internal_error',
}),
// Fifth span - error handling for streaming
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
}),
description: 'models error-model stream-response',
op: 'gen_ai.models',
description: 'generate_content error-model stream-response',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
status: 'internal_error',
}),
Expand All @@ -397,8 +397,8 @@ describe('Google GenAI integration', () => {
// First span - models.generateContentStream (streaming) with PII
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'gemini-1.5-flash',
Expand All @@ -414,8 +414,8 @@ describe('Google GenAI integration', () => {
[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]: 12,
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 22,
}),
description: 'models gemini-1.5-flash stream-response',
op: 'gen_ai.models',
description: 'generate_content gemini-1.5-flash stream-response',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
Expand Down Expand Up @@ -461,33 +461,33 @@ describe('Google GenAI integration', () => {
// Fourth span - blocked content stream with PII
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'blocked-model',
[GEN_AI_REQUEST_TEMPERATURE_ATTRIBUTE]: 0.7,
[GEN_AI_REQUEST_MESSAGES_ATTRIBUTE]: expect.any(String), // Should include contents when recordInputs: true
[GEN_AI_RESPONSE_STREAMING_ATTRIBUTE]: true,
}),
description: 'models blocked-model stream-response',
op: 'gen_ai.models',
description: 'generate_content blocked-model stream-response',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
status: 'internal_error',
}),
// Fifth span - error handling for streaming with PII
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'error-model',
[GEN_AI_REQUEST_TEMPERATURE_ATTRIBUTE]: 0.7,
[GEN_AI_REQUEST_MESSAGES_ATTRIBUTE]: expect.any(String), // Should include contents when recordInputs: true
}),
description: 'models error-model stream-response',
op: 'gen_ai.models',
description: 'generate_content error-model stream-response',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
status: 'internal_error',
}),
Expand Down Expand Up @@ -525,8 +525,8 @@ describe('Google GenAI integration', () => {
// First call: Last message is large and gets truncated (only C's remain, D's are cropped)
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'gemini-1.5-flash',
Expand All @@ -535,16 +535,16 @@ describe('Google GenAI integration', () => {
/^\[\{"role":"user","parts":\[\{"text":"C+"\}\]\}\]$/,
),
}),
description: 'models gemini-1.5-flash',
op: 'gen_ai.models',
description: 'generate_content gemini-1.5-flash',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
// Second call: Last message is small and kept without truncation
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'models',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.models',
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'gemini-1.5-flash',
Expand All @@ -556,8 +556,8 @@ describe('Google GenAI integration', () => {
},
]),
}),
description: 'models gemini-1.5-flash',
op: 'gen_ai.models',
description: 'generate_content gemini-1.5-flash',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -303,8 +303,7 @@ describe('LangChain integration', () => {
// First call: Direct Anthropic call made BEFORE LangChain import
// This should have Anthropic instrumentation (origin: 'auto.ai.anthropic')
const firstAnthropicSpan = spans.find(
span =>
span.description === 'messages claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
span => span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
);

// Second call: LangChain call
Expand All @@ -317,8 +316,7 @@ describe('LangChain integration', () => {
// This should NOT have Anthropic instrumentation (skip works correctly)
// Count how many Anthropic spans we have - should be exactly 1
const anthropicSpans = spans.filter(
span =>
span.description === 'messages claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
span => span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
);

// Verify the edge case limitation:
Expand Down
Loading
Loading