Skip to content

Commit 2861132

Browse files
committed
add tests
1 parent 0d02060 commit 2861132

File tree

6 files changed

+333
-0
lines changed

6 files changed

+333
-0
lines changed
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
import * as Sentry from '@sentry/node';
2+
import { nodeContextIntegration } from '@sentry/node-core';
3+
import { loggingTransport } from '@sentry-internal/node-integration-tests';
4+
5+
Sentry.init({
6+
dsn: 'https://[email protected]/1337',
7+
release: '1.0',
8+
tracesSampleRate: 1.0,
9+
sendDefaultPii: false,
10+
transport: loggingTransport,
11+
integrations: [
12+
Sentry.anthropicAIIntegration({
13+
recordInputs: true,
14+
recordOutputs: true,
15+
}),
16+
nodeContextIntegration(),
17+
],
18+
});
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
import * as Sentry from '@sentry/node';
2+
import { nodeContextIntegration } from '@sentry/node-core';
3+
import { loggingTransport } from '@sentry-internal/node-integration-tests';
4+
5+
Sentry.init({
6+
dsn: 'https://[email protected]/1337',
7+
release: '1.0',
8+
tracesSampleRate: 1.0,
9+
sendDefaultPii: true,
10+
transport: loggingTransport,
11+
integrations: [
12+
Sentry.anthropicAIIntegration(),
13+
nodeContextIntegration(),
14+
],
15+
});
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
import * as Sentry from '@sentry/node';
2+
import { nodeContextIntegration } from '@sentry/node-core';
3+
import { loggingTransport } from '@sentry-internal/node-integration-tests';
4+
5+
Sentry.init({
6+
dsn: 'https://[email protected]/1337',
7+
release: '1.0',
8+
tracesSampleRate: 1.0,
9+
sendDefaultPii: false,
10+
transport: loggingTransport,
11+
// Force include the integration
12+
integrations: [
13+
Sentry.anthropicAIIntegration(),
14+
nodeContextIntegration(),
15+
],
16+
});
Lines changed: 106 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,106 @@
1+
import { instrumentAnthropicAiClient } from '@sentry/core';
2+
import * as Sentry from '@sentry/node';
3+
4+
class MockAnthropic {
5+
constructor(config) {
6+
this.apiKey = config.apiKey;
7+
8+
// Create messages object with create and countTokens methods
9+
this.messages = {
10+
create: this._messagesCreate.bind(this),
11+
countTokens: this._messagesCountTokens.bind(this)
12+
};
13+
}
14+
15+
/**
16+
* Create a mock message
17+
*/
18+
async _messagesCreate(params) {
19+
// Simulate processing time
20+
await new Promise(resolve => setTimeout(resolve, 10));
21+
22+
if (params.model === 'error-model') {
23+
const error = new Error('Model not found');
24+
error.status = 404;
25+
error.headers = { 'x-request-id': 'mock-request-123' };
26+
throw error;
27+
}
28+
29+
return {
30+
id: 'msg_mock123',
31+
type: 'message',
32+
model: params.model,
33+
role: 'assistant',
34+
content: [
35+
{
36+
type: 'text',
37+
text: 'Hello from Anthropic mock!',
38+
},
39+
],
40+
stop_reason: 'end_turn',
41+
stop_sequence: null,
42+
usage: {
43+
input_tokens: 10,
44+
output_tokens: 15,
45+
},
46+
};
47+
}
48+
49+
async _messagesCountTokens(params) {
50+
// Simulate processing time
51+
await new Promise(resolve => setTimeout(resolve, 10));
52+
53+
if (params.model === 'error-model') {
54+
const error = new Error('Model not found');
55+
error.status = 404;
56+
error.headers = { 'x-request-id': 'mock-request-123' };
57+
throw error;
58+
}
59+
60+
// For countTokens, just return input_tokens to match implementation
61+
return {
62+
input_tokens: 15
63+
}
64+
}
65+
}
66+
67+
async function run() {
68+
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
69+
const mockClient = new MockAnthropic({
70+
apiKey: 'mock-api-key',
71+
});
72+
73+
const client = instrumentAnthropicAiClient(mockClient);
74+
75+
// First test: basic message completion
76+
await client.messages.create({
77+
model: 'claude-3-haiku-20240307',
78+
system: 'You are a helpful assistant.',
79+
messages: [
80+
{ role: 'user', content: 'What is the capital of France?' },
81+
],
82+
temperature: 0.7,
83+
max_tokens: 100,
84+
});
85+
86+
// Second test: error handling
87+
try {
88+
await client.messages.create({
89+
model: 'error-model',
90+
messages: [{ role: 'user', content: 'This will fail' }],
91+
});
92+
} catch {
93+
// Error is expected and handled
94+
}
95+
96+
// Third test: count tokens with cached tokens
97+
await client.messages.countTokens({
98+
model: 'claude-3-haiku-20240307',
99+
messages: [
100+
{ role: 'user', content: 'What is the capital of France?' },
101+
],
102+
});
103+
});
104+
}
105+
106+
run();
Lines changed: 176 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,176 @@
1+
import { afterAll, describe, expect } from 'vitest';
2+
import { cleanupChildProcesses, createEsmAndCjsTests } from '../../../utils/runner';
3+
4+
describe('Anthropic integration', () => {
5+
afterAll(() => {
6+
cleanupChildProcesses();
7+
});
8+
9+
const EXPECTED_TRANSACTION_DEFAULT_PII_FALSE = {
10+
transaction: 'main',
11+
spans: expect.arrayContaining([
12+
// First span - basic message completion without PII
13+
expect.objectContaining({
14+
data: {
15+
'gen_ai.operation.name': 'messages.create',
16+
'sentry.op': 'gen_ai.messages.create',
17+
'sentry.origin': 'auto.ai.anthropic',
18+
'gen_ai.system': 'anthropic',
19+
'gen_ai.request.model': 'claude-3-haiku-20240307',
20+
'gen_ai.request.temperature': 0.7,
21+
'gen_ai.request.max_tokens': 100,
22+
'gen_ai.response.model': 'claude-3-haiku-20240307',
23+
'gen_ai.response.id': 'msg_mock123',
24+
'gen_ai.usage.input_tokens': 10,
25+
'gen_ai.usage.output_tokens': 15,
26+
'gen_ai.usage.total_tokens': 25,
27+
},
28+
description: 'messages.create claude-3-haiku-20240307',
29+
op: 'gen_ai.messages.create',
30+
origin: 'auto.ai.anthropic',
31+
status: 'ok',
32+
}),
33+
// Second span - error handling
34+
expect.objectContaining({
35+
data: {
36+
'gen_ai.operation.name': 'messages.create',
37+
'sentry.op': 'gen_ai.messages.create',
38+
'sentry.origin': 'auto.ai.anthropic',
39+
'gen_ai.system': 'anthropic',
40+
'gen_ai.request.model': 'error-model',
41+
},
42+
description: 'messages.create error-model',
43+
op: 'gen_ai.messages.create',
44+
origin: 'auto.ai.anthropic',
45+
status: 'unknown_error',
46+
}),
47+
// Third span - token counting (no response.text because recordOutputs=false by default)
48+
expect.objectContaining({
49+
data: {
50+
'gen_ai.operation.name': 'messages.countTokens',
51+
'sentry.op': 'gen_ai.messages.countTokens',
52+
'sentry.origin': 'auto.ai.anthropic',
53+
'gen_ai.system': 'anthropic',
54+
'gen_ai.request.model': 'claude-3-haiku-20240307',
55+
},
56+
description: 'messages.countTokens claude-3-haiku-20240307',
57+
op: 'gen_ai.messages.countTokens',
58+
origin: 'auto.ai.anthropic',
59+
status: 'ok',
60+
}),
61+
]),
62+
};
63+
64+
const EXPECTED_TRANSACTION_DEFAULT_PII_TRUE = {
65+
transaction: 'main',
66+
spans: expect.arrayContaining([
67+
// First span - basic message completion with PII
68+
expect.objectContaining({
69+
data: {
70+
'gen_ai.operation.name': 'messages.create',
71+
'sentry.op': 'gen_ai.messages.create',
72+
'sentry.origin': 'auto.ai.anthropic',
73+
'gen_ai.system': 'anthropic',
74+
'gen_ai.request.model': 'claude-3-haiku-20240307',
75+
'gen_ai.request.temperature': 0.7,
76+
'gen_ai.request.max_tokens': 100,
77+
'gen_ai.request.messages': '[{"role":"user","content":"What is the capital of France?"}]',
78+
'gen_ai.response.model': 'claude-3-haiku-20240307',
79+
'gen_ai.response.id': 'msg_mock123',
80+
'gen_ai.response.text': 'Hello from Anthropic mock!',
81+
'gen_ai.usage.input_tokens': 10,
82+
'gen_ai.usage.output_tokens': 15,
83+
'gen_ai.usage.total_tokens': 25,
84+
},
85+
description: 'messages.create claude-3-haiku-20240307',
86+
op: 'gen_ai.messages.create',
87+
origin: 'auto.ai.anthropic',
88+
status: 'ok',
89+
}),
90+
// Second span - error handling with PII
91+
expect.objectContaining({
92+
data: {
93+
'gen_ai.operation.name': 'messages.create',
94+
'sentry.op': 'gen_ai.messages.create',
95+
'sentry.origin': 'auto.ai.anthropic',
96+
'gen_ai.system': 'anthropic',
97+
'gen_ai.request.model': 'error-model',
98+
'gen_ai.request.messages': '[{"role":"user","content":"This will fail"}]',
99+
},
100+
description: 'messages.create error-model',
101+
op: 'gen_ai.messages.create',
102+
103+
origin: 'auto.ai.anthropic',
104+
status: 'unknown_error',
105+
}),
106+
// Third span - token counting with PII (response.text is present because sendDefaultPii=true enables recordOutputs)
107+
expect.objectContaining({
108+
data: {
109+
'gen_ai.operation.name': 'messages.countTokens',
110+
'sentry.op': 'gen_ai.messages.countTokens',
111+
'sentry.origin': 'auto.ai.anthropic',
112+
'gen_ai.system': 'anthropic',
113+
'gen_ai.request.model': 'claude-3-haiku-20240307',
114+
'gen_ai.request.messages': '[{"role":"user","content":"What is the capital of France?"}]',
115+
'gen_ai.response.text': '15', // Only present because recordOutputs=true when sendDefaultPii=true
116+
},
117+
description: 'messages.countTokens claude-3-haiku-20240307',
118+
op: 'gen_ai.messages.countTokens',
119+
origin: 'auto.ai.anthropic',
120+
status: 'ok',
121+
}),
122+
]),
123+
};
124+
125+
const EXPECTED_TRANSACTION_WITH_OPTIONS = {
126+
transaction: 'main',
127+
spans: expect.arrayContaining([
128+
// Check that custom options are respected
129+
expect.objectContaining({
130+
data: expect.objectContaining({
131+
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
132+
'gen_ai.response.text': expect.any(String), // Should include response text when recordOutputs: true
133+
}),
134+
}),
135+
// Check token counting with options
136+
expect.objectContaining({
137+
data: expect.objectContaining({
138+
'gen_ai.operation.name': 'messages.countTokens',
139+
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
140+
'gen_ai.response.text': '15', // Present because recordOutputs=true is set in options
141+
}),
142+
op: 'gen_ai.messages.countTokens',
143+
}),
144+
]),
145+
};
146+
147+
createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument.mjs', (createRunner, test) => {
148+
test('creates anthropic related spans with sendDefaultPii: false', async () => {
149+
await createRunner()
150+
.ignore('event')
151+
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_FALSE })
152+
.start()
153+
.completed();
154+
});
155+
});
156+
157+
createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument-with-pii.mjs', (createRunner, test) => {
158+
test('creates anthropic related spans with sendDefaultPii: true', async () => {
159+
await createRunner()
160+
.ignore('event')
161+
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_TRUE })
162+
.start()
163+
.completed();
164+
});
165+
});
166+
167+
createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument-with-options.mjs', (createRunner, test) => {
168+
test('creates anthropic related spans with custom options', async () => {
169+
await createRunner()
170+
.ignore('event')
171+
.expect({ transaction: EXPECTED_TRANSACTION_WITH_OPTIONS })
172+
.start()
173+
.completed();
174+
});
175+
});
176+
});

packages/core/src/utils/anthropic-ai/index.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import {
88
GEN_AI_OPERATION_NAME_ATTRIBUTE,
99
GEN_AI_PROMPT_ATTRIBUTE,
1010
GEN_AI_REQUEST_FREQUENCY_PENALTY_ATTRIBUTE,
11+
GEN_AI_REQUEST_MAX_TOKENS_ATTRIBUTE,
1112
GEN_AI_REQUEST_MESSAGES_ATTRIBUTE,
1213
GEN_AI_REQUEST_MODEL_ATTRIBUTE,
1314
GEN_AI_REQUEST_STREAM_ATTRIBUTE,
@@ -48,6 +49,7 @@ function extractRequestAttributes(args: unknown[], methodPath: string): Record<s
4849
if ('stream' in params) attributes[GEN_AI_REQUEST_STREAM_ATTRIBUTE] = params.stream;
4950
if ('top_k' in params) attributes[GEN_AI_REQUEST_TOP_K_ATTRIBUTE] = params.top_k;
5051
attributes[GEN_AI_REQUEST_FREQUENCY_PENALTY_ATTRIBUTE] = params.frequency_penalty;
52+
attributes[GEN_AI_REQUEST_MAX_TOKENS_ATTRIBUTE] = params.max_tokens;
5153
} else {
5254
attributes[GEN_AI_REQUEST_MODEL_ATTRIBUTE] = 'unknown';
5355
}

0 commit comments

Comments
 (0)