diff --git a/CHANGELOG.md b/CHANGELOG.md index 10d2467b0c9d..950e60c1191c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,22 @@ Sentry.init({ Spans matching the filter criteria will not be recorded. Potential child spans of filtered spans will be re-parented, if possible. +- **feat(cloudflare,vercel-edge): Add support for OpenAI instrumentation ([#17338](https://github.com/getsentry/sentry-javascript/pull/17338))** + +Adds support for OpenAI manual instrumentation in `@sentry/cloudflare` and `@sentry/vercel-edge`. + +To instrument the OpenAI client, wrap it with `Sentry.instrumentOpenAiClient` and set recording settings. + +```js +import * as Sentry from '@sentry/cloudflare'; +import OpenAI from 'openai'; + +const openai = new OpenAI(); +const client = Sentry.instrumentOpenAiClient(openai, { recordInputs: true, recordOutputs: true }); + +// use the wrapped client +``` + ## 10.1.0 - feat(nuxt): Align build-time options to follow bundler plugins structure ([#17255](https://github.com/getsentry/sentry-javascript/pull/17255)) diff --git a/dev-packages/cloudflare-integration-tests/suites/tracing/openai/index.ts b/dev-packages/cloudflare-integration-tests/suites/tracing/openai/index.ts new file mode 100644 index 000000000000..8b21b479ce80 --- /dev/null +++ b/dev-packages/cloudflare-integration-tests/suites/tracing/openai/index.ts @@ -0,0 +1,34 @@ +import * as Sentry from '@sentry/cloudflare'; +import { MockOpenAi } from './mocks'; + +interface Env { + SENTRY_DSN: string; +} + +const mockClient = new MockOpenAi({ + apiKey: 'mock-api-key', +}); + +const client = Sentry.instrumentOpenAiClient(mockClient); + +export default Sentry.withSentry( + (env: Env) => ({ + dsn: env.SENTRY_DSN, + tracesSampleRate: 1.0, + }), + { + async fetch(_request, _env, _ctx) { + const response = await client.chat?.completions?.create({ + model: 'gpt-3.5-turbo', + messages: [ + { role: 'system', content: 'You are a helpful assistant.' }, + { role: 'user', content: 'What is the capital of France?' }, + ], + temperature: 0.7, + max_tokens: 100, + }); + + return new Response(JSON.stringify(response)); + }, + }, +); diff --git a/dev-packages/cloudflare-integration-tests/suites/tracing/openai/mocks.ts b/dev-packages/cloudflare-integration-tests/suites/tracing/openai/mocks.ts new file mode 100644 index 000000000000..cca72d5bd37d --- /dev/null +++ b/dev-packages/cloudflare-integration-tests/suites/tracing/openai/mocks.ts @@ -0,0 +1,50 @@ +import type { OpenAiClient } from '@sentry/core'; + +export class MockOpenAi implements OpenAiClient { + public chat?: Record; + public apiKey: string; + + public constructor(config: { apiKey: string }) { + this.apiKey = config.apiKey; + + this.chat = { + completions: { + create: async (...args: unknown[]) => { + const params = args[0] as { model: string; stream?: boolean }; + // Simulate processing time + await new Promise(resolve => setTimeout(resolve, 10)); + + if (params.model === 'error-model') { + const error = new Error('Model not found'); + (error as unknown as { status: number }).status = 404; + (error as unknown as { headers: Record }).headers = { 'x-request-id': 'mock-request-123' }; + throw error; + } + + return { + id: 'chatcmpl-mock123', + object: 'chat.completion', + created: 1677652288, + model: params.model, + system_fingerprint: 'fp_44709d6fcb', + choices: [ + { + index: 0, + message: { + role: 'assistant', + content: 'Hello from OpenAI mock!', + }, + finish_reason: 'stop', + }, + ], + usage: { + prompt_tokens: 10, + completion_tokens: 15, + total_tokens: 25, + }, + }; + }, + }, + }; + } +} diff --git a/dev-packages/cloudflare-integration-tests/suites/tracing/openai/test.ts b/dev-packages/cloudflare-integration-tests/suites/tracing/openai/test.ts new file mode 100644 index 000000000000..1dc4ca077665 --- /dev/null +++ b/dev-packages/cloudflare-integration-tests/suites/tracing/openai/test.ts @@ -0,0 +1,42 @@ +import { expect, it } from 'vitest'; +import { createRunner } from '../../../runner'; + +// These tests are not exhaustive because the instrumentation is +// already tested in the node integration tests and we merely +// want to test that the instrumentation does not break in our +// cloudflare SDK. + +it('traces a basic chat completion request', async () => { + const runner = createRunner(__dirname) + .ignore('event') + .expect(envelope => { + const transactionEvent = envelope[1]?.[0]?.[1]; + + expect(transactionEvent.transaction).toBe('GET /'); + expect(transactionEvent.spans).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + data: expect.objectContaining({ + 'gen_ai.operation.name': 'chat', + 'sentry.op': 'gen_ai.chat', + 'gen_ai.system': 'openai', + 'gen_ai.request.model': 'gpt-3.5-turbo', + 'gen_ai.request.temperature': 0.7, + 'gen_ai.response.model': 'gpt-3.5-turbo', + 'gen_ai.response.id': 'chatcmpl-mock123', + 'gen_ai.usage.input_tokens': 10, + 'gen_ai.usage.output_tokens': 15, + 'gen_ai.usage.total_tokens': 25, + 'gen_ai.response.finish_reasons': '["stop"]', + }), + description: 'chat gpt-3.5-turbo', + op: 'gen_ai.chat', + origin: 'manual', + }), + ]), + ); + }) + .start(); + await runner.makeRequest('get', '/'); + await runner.completed(); +}); diff --git a/dev-packages/cloudflare-integration-tests/suites/tracing/openai/wrangler.jsonc b/dev-packages/cloudflare-integration-tests/suites/tracing/openai/wrangler.jsonc new file mode 100644 index 000000000000..24fb2861023d --- /dev/null +++ b/dev-packages/cloudflare-integration-tests/suites/tracing/openai/wrangler.jsonc @@ -0,0 +1,6 @@ +{ + "name": "worker-name", + "compatibility_date": "2025-06-17", + "main": "index.ts", + "compatibility_flags": ["nodejs_compat"] +} diff --git a/dev-packages/cloudflare-integration-tests/vite.config.mts b/dev-packages/cloudflare-integration-tests/vite.config.mts index cfa15b12c3f1..a80bbbf63f32 100644 --- a/dev-packages/cloudflare-integration-tests/vite.config.mts +++ b/dev-packages/cloudflare-integration-tests/vite.config.mts @@ -22,6 +22,12 @@ export default defineConfig({ // already run in their own processes. We use threads instead because the // overhead is significantly less. pool: 'threads', + // Run tests sequentially to avoid port conflicts with wrangler dev processes + poolOptions: { + threads: { + singleThread: true, + }, + }, reporters: process.env.DEBUG ? ['default', { summary: false }] : process.env.GITHUB_ACTIONS diff --git a/packages/cloudflare/src/index.ts b/packages/cloudflare/src/index.ts index a5bb99d40818..23e902c4dc2e 100644 --- a/packages/cloudflare/src/index.ts +++ b/packages/cloudflare/src/index.ts @@ -69,6 +69,7 @@ export { functionToStringIntegration, // eslint-disable-next-line deprecation/deprecation inboundFiltersIntegration, + instrumentOpenAiClient, eventFiltersIntegration, linkedErrorsIntegration, requestDataIntegration, diff --git a/packages/core/src/utils/openai/index.ts b/packages/core/src/utils/openai/index.ts index 8bd1c3625782..3fb4f0d16fce 100644 --- a/packages/core/src/utils/openai/index.ts +++ b/packages/core/src/utils/openai/index.ts @@ -24,7 +24,6 @@ import type { ChatCompletionChunk, InstrumentedMethod, OpenAiChatCompletionObject, - OpenAiClient, OpenAiIntegration, OpenAiOptions, OpenAiResponse, @@ -294,7 +293,7 @@ function instrumentMethod( /** * Create a deep proxy for OpenAI client instrumentation */ -function createDeepProxy(target: object, currentPath = '', options?: OpenAiOptions): OpenAiClient { +function createDeepProxy(target: T, currentPath = '', options?: OpenAiOptions): T { return new Proxy(target, { get(obj: object, prop: string): unknown { const value = (obj as Record)[prop]; @@ -316,13 +315,13 @@ function createDeepProxy(target: object, currentPath = '', options?: OpenAiOptio return value; }, - }); + }) as T; } /** * Instrument an OpenAI client with Sentry tracing * Can be used across Node.js, Cloudflare Workers, and Vercel Edge */ -export function instrumentOpenAiClient(client: OpenAiClient, options?: OpenAiOptions): OpenAiClient { +export function instrumentOpenAiClient(client: T, options?: OpenAiOptions): T { return createDeepProxy(client, '', options); } diff --git a/packages/core/src/utils/openai/types.ts b/packages/core/src/utils/openai/types.ts index 7ac8fb8d7b91..daa478db4ba6 100644 --- a/packages/core/src/utils/openai/types.ts +++ b/packages/core/src/utils/openai/types.ts @@ -15,11 +15,11 @@ export type AttributeValue = export interface OpenAiOptions { /** - * Enable or disable input recording. Enabled if `sendDefaultPii` is `true` + * Enable or disable input recording. */ recordInputs?: boolean; /** - * Enable or disable output recording. Enabled if `sendDefaultPii` is `true` + * Enable or disable output recording. */ recordOutputs?: boolean; } diff --git a/packages/vercel-edge/src/index.ts b/packages/vercel-edge/src/index.ts index 5325d1e62391..e2fd90eda5b7 100644 --- a/packages/vercel-edge/src/index.ts +++ b/packages/vercel-edge/src/index.ts @@ -69,6 +69,7 @@ export { functionToStringIntegration, // eslint-disable-next-line deprecation/deprecation inboundFiltersIntegration, + instrumentOpenAiClient, eventFiltersIntegration, linkedErrorsIntegration, requestDataIntegration,