Skip to content

Commit 444bd82

Browse files
authored
feat(node): Add Anthropic AI integration (#17348)
This PR adds official support for instrumenting Anthropic AI SDK calls in Node with Sentry tracing, following OpenTelemetry semantic conventions for Generative AI. We instrument the following Anthropic AI SDK methods: - messages.create - messages.countTokens - models.get - models.retrieve - completions.create Supported in: - Node.js (ESM and CJS) - Will be added to cloudflare workers and vercel edge functions exports. The anthropicAIIntegration() accepts the following options: ``` // The integration respects your sendDefaultPii client option interface AnthropicAiOptions { recordInputs?: boolean; // Whether to record prompt messages recordOutputs?: boolean; // Whether to record response text } ```
1 parent 382af9b commit 444bd82

File tree

25 files changed

+1023
-5
lines changed

25 files changed

+1023
-5
lines changed

.size-limit.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ module.exports = [
233233
import: createImport('init'),
234234
ignore: [...builtinModules, ...nodePrefixedBuiltinModules],
235235
gzip: true,
236-
limit: '148 KB',
236+
limit: '149 KB',
237237
},
238238
{
239239
name: '@sentry/node - without tracing',
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
import * as Sentry from '@sentry/node';
2+
import { nodeContextIntegration } from '@sentry/node-core';
3+
import { loggingTransport } from '@sentry-internal/node-integration-tests';
4+
5+
Sentry.init({
6+
dsn: 'https://[email protected]/1337',
7+
release: '1.0',
8+
tracesSampleRate: 1.0,
9+
sendDefaultPii: false,
10+
transport: loggingTransport,
11+
integrations: [
12+
Sentry.anthropicAIIntegration({
13+
recordInputs: true,
14+
recordOutputs: true,
15+
}),
16+
nodeContextIntegration(),
17+
],
18+
});
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
import * as Sentry from '@sentry/node';
2+
import { nodeContextIntegration } from '@sentry/node-core';
3+
import { loggingTransport } from '@sentry-internal/node-integration-tests';
4+
5+
Sentry.init({
6+
dsn: 'https://[email protected]/1337',
7+
release: '1.0',
8+
tracesSampleRate: 1.0,
9+
sendDefaultPii: true,
10+
transport: loggingTransport,
11+
integrations: [
12+
Sentry.anthropicAIIntegration(),
13+
nodeContextIntegration(),
14+
],
15+
});
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
import * as Sentry from '@sentry/node';
2+
import { nodeContextIntegration } from '@sentry/node-core';
3+
import { loggingTransport } from '@sentry-internal/node-integration-tests';
4+
5+
Sentry.init({
6+
dsn: 'https://[email protected]/1337',
7+
release: '1.0',
8+
tracesSampleRate: 1.0,
9+
sendDefaultPii: false,
10+
transport: loggingTransport,
11+
// Force include the integration
12+
integrations: [
13+
Sentry.anthropicAIIntegration(),
14+
nodeContextIntegration(),
15+
],
16+
});
Lines changed: 119 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,119 @@
1+
import { instrumentAnthropicAiClient } from '@sentry/core';
2+
import * as Sentry from '@sentry/node';
3+
4+
class MockAnthropic {
5+
constructor(config) {
6+
this.apiKey = config.apiKey;
7+
8+
// Create messages object with create and countTokens methods
9+
this.messages = {
10+
create: this._messagesCreate.bind(this),
11+
countTokens: this._messagesCountTokens.bind(this)
12+
};
13+
14+
this.models = {
15+
retrieve: this._modelsRetrieve.bind(this),
16+
};
17+
}
18+
19+
/**
20+
* Create a mock message
21+
*/
22+
async _messagesCreate(params) {
23+
// Simulate processing time
24+
await new Promise(resolve => setTimeout(resolve, 10));
25+
26+
if (params.model === 'error-model') {
27+
const error = new Error('Model not found');
28+
error.status = 404;
29+
error.headers = { 'x-request-id': 'mock-request-123' };
30+
throw error;
31+
}
32+
33+
return {
34+
id: 'msg_mock123',
35+
type: 'message',
36+
model: params.model,
37+
role: 'assistant',
38+
content: [
39+
{
40+
type: 'text',
41+
text: 'Hello from Anthropic mock!',
42+
},
43+
],
44+
stop_reason: 'end_turn',
45+
stop_sequence: null,
46+
usage: {
47+
input_tokens: 10,
48+
output_tokens: 15,
49+
},
50+
};
51+
}
52+
53+
async _messagesCountTokens() {
54+
// Simulate processing time
55+
await new Promise(resolve => setTimeout(resolve, 10));
56+
57+
// For countTokens, just return input_tokens
58+
return {
59+
input_tokens: 15
60+
}
61+
}
62+
63+
async _modelsRetrieve(modelId) {
64+
// Simulate processing time
65+
await new Promise(resolve => setTimeout(resolve, 10));
66+
67+
// Match what the actual implementation would return
68+
return {
69+
id: modelId,
70+
name: modelId,
71+
created_at: 1715145600,
72+
model: modelId, // Add model field to match the check in addResponseAttributes
73+
};
74+
}
75+
}
76+
77+
async function run() {
78+
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
79+
const mockClient = new MockAnthropic({
80+
apiKey: 'mock-api-key',
81+
});
82+
83+
const client = instrumentAnthropicAiClient(mockClient);
84+
85+
// First test: basic message completion
86+
await client.messages.create({
87+
model: 'claude-3-haiku-20240307',
88+
system: 'You are a helpful assistant.',
89+
messages: [
90+
{ role: 'user', content: 'What is the capital of France?' },
91+
],
92+
temperature: 0.7,
93+
max_tokens: 100,
94+
});
95+
96+
// Second test: error handling
97+
try {
98+
await client.messages.create({
99+
model: 'error-model',
100+
messages: [{ role: 'user', content: 'This will fail' }],
101+
});
102+
} catch {
103+
// Error is expected and handled
104+
}
105+
106+
// Third test: count tokens with cached tokens
107+
await client.messages.countTokens({
108+
model: 'claude-3-haiku-20240307',
109+
messages: [
110+
{ role: 'user', content: 'What is the capital of France?' },
111+
],
112+
});
113+
114+
// Fourth test: models.retrieve
115+
await client.models.retrieve('claude-3-haiku-20240307');
116+
});
117+
}
118+
119+
run();

0 commit comments

Comments
 (0)