diff --git a/dev-packages/cloudflare-integration-tests/suites/tracing/anthropic-ai/index.ts b/dev-packages/cloudflare-integration-tests/suites/tracing/anthropic-ai/index.ts new file mode 100644 index 000000000000..08df5e24b05b --- /dev/null +++ b/dev-packages/cloudflare-integration-tests/suites/tracing/anthropic-ai/index.ts @@ -0,0 +1,32 @@ +import * as Sentry from '@sentry/cloudflare'; +import type { AnthropicAiClient } from '@sentry/core'; +import { MockAnthropic } from './mocks'; + +interface Env { + SENTRY_DSN: string; +} + +const mockClient = new MockAnthropic({ + apiKey: 'mock-api-key', +}); + +const client: AnthropicAiClient = Sentry.instrumentAnthropicAiClient(mockClient); + +export default Sentry.withSentry( + (env: Env) => ({ + dsn: env.SENTRY_DSN, + tracesSampleRate: 1.0, + }), + { + async fetch(_request, _env, _ctx) { + const response = await client.messages?.create({ + model: 'claude-3-haiku-20240307', + messages: [{ role: 'user', content: 'What is the capital of France?' }], + temperature: 0.7, + max_tokens: 100, + }); + + return new Response(JSON.stringify(response)); + }, + }, +); diff --git a/dev-packages/cloudflare-integration-tests/suites/tracing/anthropic-ai/mocks.ts b/dev-packages/cloudflare-integration-tests/suites/tracing/anthropic-ai/mocks.ts new file mode 100644 index 000000000000..cff87ca84cbc --- /dev/null +++ b/dev-packages/cloudflare-integration-tests/suites/tracing/anthropic-ai/mocks.ts @@ -0,0 +1,68 @@ +import type { AnthropicAiClient, AnthropicAiResponse } from '@sentry/core'; + +export class MockAnthropic implements AnthropicAiClient { + public messages: { + create: (...args: unknown[]) => Promise; + countTokens: (...args: unknown[]) => Promise; + }; + public models: { + list: (...args: unknown[]) => Promise; + get: (...args: unknown[]) => Promise; + }; + public completions: { + create: (...args: unknown[]) => Promise; + }; + public apiKey: string; + + public constructor(config: { apiKey: string }) { + this.apiKey = config.apiKey; + + // Main focus: messages.create functionality + this.messages = { + create: async (...args: unknown[]) => { + const params = args[0] as { model: string; stream?: boolean }; + // Simulate processing time + await new Promise(resolve => setTimeout(resolve, 10)); + + if (params.model === 'error-model') { + const error = new Error('Model not found'); + (error as unknown as { status: number }).status = 404; + (error as unknown as { headers: Record }).headers = { 'x-request-id': 'mock-request-123' }; + throw error; + } + + return { + id: 'msg_mock123', + type: 'message', + role: 'assistant', + model: params.model, + content: [ + { + type: 'text', + text: 'Hello from Anthropic mock!', + }, + ], + stop_reason: 'end_turn', + stop_sequence: null, + usage: { + input_tokens: 10, + output_tokens: 15, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }, + }; + }, + countTokens: async (..._args: unknown[]) => ({ id: 'mock', type: 'model', model: 'mock', input_tokens: 0 }), + }; + + // Minimal implementations for required interface compliance + this.models = { + list: async (..._args: unknown[]) => ({ id: 'mock', type: 'model', model: 'mock' }), + get: async (..._args: unknown[]) => ({ id: 'mock', type: 'model', model: 'mock' }), + }; + + this.completions = { + create: async (..._args: unknown[]) => ({ id: 'mock', type: 'completion', model: 'mock' }), + }; + } +} diff --git a/dev-packages/cloudflare-integration-tests/suites/tracing/anthropic-ai/test.ts b/dev-packages/cloudflare-integration-tests/suites/tracing/anthropic-ai/test.ts new file mode 100644 index 000000000000..13966caaf460 --- /dev/null +++ b/dev-packages/cloudflare-integration-tests/suites/tracing/anthropic-ai/test.ts @@ -0,0 +1,41 @@ +import { expect, it } from 'vitest'; +import { createRunner } from '../../../runner'; + +// These tests are not exhaustive because the instrumentation is +// already tested in the node integration tests and we merely +// want to test that the instrumentation does not break in our +// cloudflare SDK. + +it('traces a basic message creation request', async () => { + const runner = createRunner(__dirname) + .ignore('event') + .expect(envelope => { + const transactionEvent = envelope[1]?.[0]?.[1] as any; + + expect(transactionEvent.transaction).toBe('GET /'); + expect(transactionEvent.spans).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + data: expect.objectContaining({ + 'gen_ai.operation.name': 'messages', + 'sentry.op': 'gen_ai.messages', + 'sentry.origin': 'auto.ai.anthropic', + 'gen_ai.system': 'anthropic', + 'gen_ai.request.model': 'claude-3-haiku-20240307', + 'gen_ai.request.temperature': 0.7, + 'gen_ai.response.model': 'claude-3-haiku-20240307', + 'gen_ai.response.id': 'msg_mock123', + 'gen_ai.usage.input_tokens': 10, + 'gen_ai.usage.output_tokens': 15, + }), + description: 'messages claude-3-haiku-20240307', + op: 'gen_ai.messages', + origin: 'auto.ai.anthropic', + }), + ]), + ); + }) + .start(); + await runner.makeRequest('get', '/'); + await runner.completed(); +}); diff --git a/dev-packages/cloudflare-integration-tests/suites/tracing/anthropic-ai/wrangler.jsonc b/dev-packages/cloudflare-integration-tests/suites/tracing/anthropic-ai/wrangler.jsonc new file mode 100644 index 000000000000..d6be01281f0c --- /dev/null +++ b/dev-packages/cloudflare-integration-tests/suites/tracing/anthropic-ai/wrangler.jsonc @@ -0,0 +1,6 @@ +{ + "name": "worker-name", + "compatibility_date": "2025-06-17", + "main": "index.ts", + "compatibility_flags": ["nodejs_compat"], +} diff --git a/packages/cloudflare/src/index.ts b/packages/cloudflare/src/index.ts index 23e902c4dc2e..5272d475572d 100644 --- a/packages/cloudflare/src/index.ts +++ b/packages/cloudflare/src/index.ts @@ -70,6 +70,7 @@ export { // eslint-disable-next-line deprecation/deprecation inboundFiltersIntegration, instrumentOpenAiClient, + instrumentAnthropicAiClient, eventFiltersIntegration, linkedErrorsIntegration, requestDataIntegration, diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index ef61364ab3f0..d54ac9d435f2 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -131,7 +131,12 @@ export { OPENAI_INTEGRATION_NAME } from './utils/openai/constants'; export { instrumentAnthropicAiClient } from './utils/anthropic-ai'; export { ANTHROPIC_AI_INTEGRATION_NAME } from './utils/anthropic-ai/constants'; export type { OpenAiClient, OpenAiOptions, InstrumentedMethod } from './utils/openai/types'; -export type { AnthropicAiClient, AnthropicAiOptions, AnthropicAiInstrumentedMethod } from './utils/anthropic-ai/types'; +export type { + AnthropicAiClient, + AnthropicAiOptions, + AnthropicAiInstrumentedMethod, + AnthropicAiResponse, +} from './utils/anthropic-ai/types'; export type { FeatureFlag } from './utils/featureFlags'; export { diff --git a/packages/core/src/utils/anthropic-ai/index.ts b/packages/core/src/utils/anthropic-ai/index.ts index 563724d98c5c..a771dff4c75d 100644 --- a/packages/core/src/utils/anthropic-ai/index.ts +++ b/packages/core/src/utils/anthropic-ai/index.ts @@ -27,7 +27,6 @@ import { buildMethodPath, getFinalOperationName, getSpanOperation, setTokenUsage import { ANTHROPIC_AI_INTEGRATION_NAME } from './constants'; import { instrumentStream } from './streaming'; import type { - AnthropicAiClient, AnthropicAiInstrumentedMethod, AnthropicAiIntegration, AnthropicAiOptions, @@ -304,7 +303,7 @@ function instrumentMethod( /** * Create a deep proxy for Anthropic AI client instrumentation */ -function createDeepProxy(target: T, currentPath = '', options?: AnthropicAiOptions): T { +function createDeepProxy(target: T, currentPath = '', options?: AnthropicAiOptions): T { return new Proxy(target, { get(obj: object, prop: string): unknown { const value = (obj as Record)[prop]; @@ -332,11 +331,11 @@ function createDeepProxy(target: T, currentPath = ' * Instrument an Anthropic AI client with Sentry tracing * Can be used across Node.js, Cloudflare Workers, and Vercel Edge * - * @template T - The type of the client that extends AnthropicAiClient + * @template T - The type of the client that extends object * @param client - The Anthropic AI client to instrument * @param options - Optional configuration for recording inputs and outputs * @returns The instrumented client with the same type as the input */ -export function instrumentAnthropicAiClient(client: T, options?: AnthropicAiOptions): T { +export function instrumentAnthropicAiClient(client: T, options?: AnthropicAiOptions): T { return createDeepProxy(client, '', options); } diff --git a/packages/vercel-edge/src/index.ts b/packages/vercel-edge/src/index.ts index e2fd90eda5b7..374e5d8ef606 100644 --- a/packages/vercel-edge/src/index.ts +++ b/packages/vercel-edge/src/index.ts @@ -70,6 +70,7 @@ export { // eslint-disable-next-line deprecation/deprecation inboundFiltersIntegration, instrumentOpenAiClient, + instrumentAnthropicAiClient, eventFiltersIntegration, linkedErrorsIntegration, requestDataIntegration,