-
-
Notifications
You must be signed in to change notification settings - Fork 1.7k
feat(core,node): Add instrumentation for GoogleGenAI
#17625
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 6 commits
7aaece3
f3d5940
251fccd
0c5648e
275a9a7
6fb278a
afa472c
b1db2bb
7f9b83a
d3053a3
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
import * as Sentry from '@sentry/node'; | ||
import { loggingTransport } from '@sentry-internal/node-integration-tests'; | ||
|
||
Sentry.init({ | ||
dsn: 'https://[email protected]/1337', | ||
release: '1.0', | ||
tracesSampleRate: 1.0, | ||
sendDefaultPii: false, | ||
transport: loggingTransport, | ||
integrations: [ | ||
Sentry.googleGenAIIntegration({ | ||
recordInputs: true, | ||
recordOutputs: true, | ||
}), | ||
], | ||
beforeSendTransaction: event => { | ||
// Filter out mock express server transactions | ||
if (event.transaction.includes('/v1beta/')) { | ||
return null; | ||
} | ||
return event; | ||
}, | ||
}); |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
import * as Sentry from '@sentry/node'; | ||
import { loggingTransport } from '@sentry-internal/node-integration-tests'; | ||
|
||
Sentry.init({ | ||
dsn: 'https://[email protected]/1337', | ||
release: '1.0', | ||
tracesSampleRate: 1.0, | ||
sendDefaultPii: true, | ||
transport: loggingTransport, | ||
integrations: [Sentry.googleGenAIIntegration()], | ||
beforeSendTransaction: event => { | ||
// Filter out mock express server transactions | ||
if (event.transaction.includes('/v1beta/')) { | ||
return null; | ||
} | ||
return event; | ||
}, | ||
}); |
Original file line number | Diff line number | Diff line change | ||
---|---|---|---|---|
@@ -0,0 +1,18 @@ | ||||
import * as Sentry from '@sentry/node'; | ||||
import { loggingTransport } from '@sentry-internal/node-integration-tests'; | ||||
|
||||
Sentry.init({ | ||||
dsn: 'https://[email protected]/1337', | ||||
release: '1.0', | ||||
tracesSampleRate: 1.0, | ||||
sendDefaultPii: false, | ||||
transport: loggingTransport, | ||||
integrations: [Sentry.googleGenAIIntegration()], | ||||
|
integrations: [Sentry.googleGenAIIntegration()], |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,109 @@ | ||
import { GoogleGenAI } from '@google/genai'; | ||
import * as Sentry from '@sentry/node'; | ||
import express from 'express'; | ||
|
||
const PORT = 3333; | ||
|
||
function startMockGoogleGenAIServer() { | ||
const app = express(); | ||
app.use(express.json()); | ||
|
||
app.post('/v1beta/models/:model\\:generateContent', (req, res) => { | ||
const model = req.params.model; | ||
|
||
if (model === 'error-model') { | ||
res.status(404).set('x-request-id', 'mock-request-123').end('Model not found'); | ||
return; | ||
} | ||
|
||
res.send({ | ||
candidates: [ | ||
{ | ||
content: { | ||
parts: [ | ||
{ | ||
text: 'Mock response from Google GenAI!', | ||
}, | ||
], | ||
role: 'model', | ||
}, | ||
finishReason: 'stop', | ||
index: 0, | ||
}, | ||
], | ||
usageMetadata: { | ||
promptTokenCount: 8, | ||
candidatesTokenCount: 12, | ||
totalTokenCount: 20, | ||
}, | ||
}); | ||
}); | ||
|
||
return app.listen(PORT); | ||
} | ||
|
||
async function run() { | ||
const server = startMockGoogleGenAIServer(); | ||
|
||
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => { | ||
const client = new GoogleGenAI({ | ||
apiKey: 'mock-api-key', | ||
httpOptions: { baseUrl: `http://localhost:${PORT}` } | ||
}); | ||
|
||
// Test 1: chats.create and sendMessage flow | ||
const chat = client.chats.create({ | ||
model: 'gemini-1.5-pro', | ||
config: { | ||
temperature: 0.8, | ||
topP: 0.9, | ||
maxOutputTokens: 150, | ||
}, | ||
history: [ | ||
{ | ||
role: 'user', | ||
parts: [{ text: 'Hello, how are you?' }], | ||
}, | ||
], | ||
}); | ||
|
||
await chat.sendMessage({ | ||
message: 'Tell me a joke', | ||
}); | ||
|
||
// Test 2: models.generateContent | ||
await client.models.generateContent({ | ||
model: 'gemini-1.5-flash', | ||
config: { | ||
temperature: 0.7, | ||
topP: 0.9, | ||
maxOutputTokens: 100, | ||
}, | ||
contents: [ | ||
{ | ||
role: 'user', | ||
parts: [{ text: 'What is the capital of France?' }], | ||
}, | ||
], | ||
}); | ||
|
||
// Test 3: Error handling | ||
try { | ||
await client.models.generateContent({ | ||
model: 'error-model', | ||
contents: [ | ||
{ | ||
role: 'user', | ||
parts: [{ text: 'This will fail' }], | ||
}, | ||
], | ||
}); | ||
} catch (error) { | ||
// Expected error | ||
} | ||
}); | ||
|
||
server.close(); | ||
} | ||
|
||
run(); |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,205 @@ | ||
import { afterAll, describe, expect } from 'vitest'; | ||
import { cleanupChildProcesses, createEsmAndCjsTests } from '../../../utils/runner'; | ||
|
||
describe('Google GenAI integration', () => { | ||
afterAll(() => { | ||
cleanupChildProcesses(); | ||
}); | ||
|
||
const EXPECTED_TRANSACTION_DEFAULT_PII_FALSE = { | ||
transaction: 'main', | ||
spans: expect.arrayContaining([ | ||
// First span - chats.create | ||
expect.objectContaining({ | ||
data: { | ||
'gen_ai.operation.name': 'chat', | ||
'sentry.op': 'gen_ai.chat', | ||
'sentry.origin': 'auto.ai.google_genai', | ||
'gen_ai.system': 'google_genai', | ||
'gen_ai.request.model': 'gemini-1.5-pro', | ||
'gen_ai.request.temperature': 0.8, | ||
'gen_ai.request.top_p': 0.9, | ||
'gen_ai.request.max_tokens': 150, | ||
}, | ||
description: 'chat gemini-1.5-pro create', | ||
op: 'gen_ai.chat', | ||
origin: 'auto.ai.google_genai', | ||
status: 'ok', | ||
}), | ||
// Second span - chat.sendMessage (should get model from context) | ||
expect.objectContaining({ | ||
data: { | ||
'gen_ai.operation.name': 'chat', | ||
'sentry.op': 'gen_ai.chat', | ||
'sentry.origin': 'auto.ai.google_genai', | ||
'gen_ai.system': 'google_genai', | ||
'gen_ai.request.model': 'gemini-1.5-pro', // Should get from chat context | ||
'gen_ai.usage.input_tokens': 8, | ||
'gen_ai.usage.output_tokens': 12, | ||
'gen_ai.usage.total_tokens': 20, | ||
}, | ||
description: 'chat gemini-1.5-pro', | ||
op: 'gen_ai.chat', | ||
origin: 'auto.ai.google_genai', | ||
status: 'ok', | ||
}), | ||
// Third span - models.generateContent | ||
expect.objectContaining({ | ||
data: { | ||
'gen_ai.operation.name': 'models', | ||
'sentry.op': 'gen_ai.models', | ||
'sentry.origin': 'auto.ai.google_genai', | ||
'gen_ai.system': 'google_genai', | ||
'gen_ai.request.model': 'gemini-1.5-flash', | ||
'gen_ai.request.temperature': 0.7, | ||
'gen_ai.request.top_p': 0.9, | ||
'gen_ai.request.max_tokens': 100, | ||
'gen_ai.usage.input_tokens': 8, | ||
'gen_ai.usage.output_tokens': 12, | ||
'gen_ai.usage.total_tokens': 20, | ||
}, | ||
description: 'models gemini-1.5-flash', | ||
op: 'gen_ai.models', | ||
origin: 'auto.ai.google_genai', | ||
status: 'ok', | ||
}), | ||
// Fourth span - error handling | ||
expect.objectContaining({ | ||
data: { | ||
'gen_ai.operation.name': 'models', | ||
'sentry.op': 'gen_ai.models', | ||
'sentry.origin': 'auto.ai.google_genai', | ||
'gen_ai.system': 'google_genai', | ||
'gen_ai.request.model': 'error-model', | ||
}, | ||
description: 'models error-model', | ||
op: 'gen_ai.models', | ||
origin: 'auto.ai.google_genai', | ||
status: 'unknown_error', | ||
}), | ||
]), | ||
}; | ||
|
||
const EXPECTED_TRANSACTION_DEFAULT_PII_TRUE = { | ||
transaction: 'main', | ||
spans: expect.arrayContaining([ | ||
// First span - chats.create with PII | ||
expect.objectContaining({ | ||
data: expect.objectContaining({ | ||
'gen_ai.operation.name': 'chat', | ||
'sentry.op': 'gen_ai.chat', | ||
'sentry.origin': 'auto.ai.google_genai', | ||
'gen_ai.system': 'google_genai', | ||
'gen_ai.request.model': 'gemini-1.5-pro', | ||
'gen_ai.request.temperature': 0.8, | ||
'gen_ai.request.top_p': 0.9, | ||
'gen_ai.request.max_tokens': 150, | ||
'gen_ai.request.messages': expect.any(String), // Should include history when recordInputs: true | ||
}), | ||
description: 'chat gemini-1.5-pro create', | ||
op: 'gen_ai.chat', | ||
origin: 'auto.ai.google_genai', | ||
status: 'ok', | ||
}), | ||
// Second span - chat.sendMessage with PII | ||
expect.objectContaining({ | ||
data: expect.objectContaining({ | ||
'gen_ai.operation.name': 'chat', | ||
'sentry.op': 'gen_ai.chat', | ||
'sentry.origin': 'auto.ai.google_genai', | ||
'gen_ai.system': 'google_genai', | ||
'gen_ai.request.model': 'gemini-1.5-pro', | ||
'gen_ai.request.messages': expect.any(String), // Should include message when recordInputs: true | ||
'gen_ai.response.text': expect.any(String), // Should include response when recordOutputs: true | ||
'gen_ai.usage.input_tokens': 8, | ||
'gen_ai.usage.output_tokens': 12, | ||
'gen_ai.usage.total_tokens': 20, | ||
}), | ||
description: 'chat gemini-1.5-pro', | ||
op: 'gen_ai.chat', | ||
origin: 'auto.ai.google_genai', | ||
status: 'ok', | ||
}), | ||
// Third span - models.generateContent with PII | ||
expect.objectContaining({ | ||
data: expect.objectContaining({ | ||
'gen_ai.operation.name': 'models', | ||
'sentry.op': 'gen_ai.models', | ||
'sentry.origin': 'auto.ai.google_genai', | ||
'gen_ai.system': 'google_genai', | ||
'gen_ai.request.model': 'gemini-1.5-flash', | ||
'gen_ai.request.temperature': 0.7, | ||
'gen_ai.request.top_p': 0.9, | ||
'gen_ai.request.max_tokens': 100, | ||
'gen_ai.request.messages': expect.any(String), // Should include contents when recordInputs: true | ||
'gen_ai.response.text': expect.any(String), // Should include response when recordOutputs: true | ||
'gen_ai.usage.input_tokens': 8, | ||
'gen_ai.usage.output_tokens': 12, | ||
'gen_ai.usage.total_tokens': 20, | ||
}), | ||
description: 'models gemini-1.5-flash', | ||
op: 'gen_ai.models', | ||
origin: 'auto.ai.google_genai', | ||
status: 'ok', | ||
}), | ||
// Fourth span - error handling with PII | ||
expect.objectContaining({ | ||
data: expect.objectContaining({ | ||
'gen_ai.operation.name': 'models', | ||
'sentry.op': 'gen_ai.models', | ||
'sentry.origin': 'auto.ai.google_genai', | ||
'gen_ai.system': 'google_genai', | ||
'gen_ai.request.model': 'error-model', | ||
'gen_ai.request.messages': expect.any(String), // Should include contents when recordInputs: true | ||
}), | ||
description: 'models error-model', | ||
op: 'gen_ai.models', | ||
origin: 'auto.ai.google_genai', | ||
status: 'unknown_error', | ||
}), | ||
]), | ||
}; | ||
|
||
const EXPECTED_TRANSACTION_WITH_OPTIONS = { | ||
transaction: 'main', | ||
spans: expect.arrayContaining([ | ||
// Check that custom options are respected | ||
expect.objectContaining({ | ||
data: expect.objectContaining({ | ||
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true | ||
'gen_ai.response.text': expect.any(String), // Should include response text when recordOutputs: true | ||
}), | ||
}), | ||
]), | ||
}; | ||
|
||
createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument.mjs', (createRunner, test) => { | ||
test('creates google genai related spans with sendDefaultPii: false', async () => { | ||
Check failure on line 177 in dev-packages/node-integration-tests/suites/tracing/google-genai/test.ts
|
||
await createRunner() | ||
.ignore('event') | ||
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_FALSE }) | ||
.start() | ||
.completed(); | ||
}); | ||
}); | ||
|
||
createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument-with-pii.mjs', (createRunner, test) => { | ||
test('creates google genai related spans with sendDefaultPii: true', async () => { | ||
Check failure on line 187 in dev-packages/node-integration-tests/suites/tracing/google-genai/test.ts
|
||
await createRunner() | ||
.ignore('event') | ||
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_TRUE }) | ||
.start() | ||
.completed(); | ||
}); | ||
}); | ||
|
||
createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument-with-options.mjs', (createRunner, test) => { | ||
test('creates google genai related spans with custom options', async () => { | ||
Check failure on line 197 in dev-packages/node-integration-tests/suites/tracing/google-genai/test.ts
|
||
await createRunner() | ||
.ignore('event') | ||
.expect({ transaction: EXPECTED_TRANSACTION_WITH_OPTIONS }) | ||
.start() | ||
.completed(); | ||
}); | ||
}); | ||
}); |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
this is added by default, right? So no need to add this here :)