Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import * as Sentry from '@sentry/browser';

window.Sentry = Sentry;

Sentry.init({
dsn: 'https://[email protected]/1337',
tracesSampleRate: 1,
debug: true,
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
// Mock Anthropic client for browser testing
export class MockAnthropic {
constructor(config) {
// eslint-disable-next-line no-console
console.log('[Mock Anthropic] Constructor called with config:', config);
this.apiKey = config.apiKey;

// Main focus: messages.create functionality
this.messages = {
create: async (...args) => {
// eslint-disable-next-line no-console
console.log('[Mock Anthropic] messages.create called with args:', args);
const params = args[0];
// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 10));

if (params.model === 'error-model') {
const error = new Error('Model not found');
error.status = 404;
error.headers = { 'x-request-id': 'mock-request-123' };
throw error;
}

const response = {
id: 'msg_mock123',
type: 'message',
role: 'assistant',
model: params.model,
content: [
{
type: 'text',
text: 'Hello from Anthropic mock!',
},
],
stop_reason: 'end_turn',
stop_sequence: null,
usage: {
input_tokens: 10,
output_tokens: 15,
cache_creation_input_tokens: 0,
cache_read_input_tokens: 0,
},
};
// eslint-disable-next-line no-console
console.log('[Mock Anthropic] Returning response:', response);
return response;
},
countTokens: async (..._args) => ({ id: 'mock', type: 'model', model: 'mock', input_tokens: 0 }),
};

// Minimal implementations for required interface compliance
this.models = {
list: async (..._args) => ({ id: 'mock', type: 'model', model: 'mock' }),
get: async (..._args) => ({ id: 'mock', type: 'model', model: 'mock' }),
};

this.completions = {
create: async (..._args) => ({ id: 'mock', type: 'completion', model: 'mock' }),
};
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import * as Sentry from '@sentry/browser';
import { MockAnthropic } from './mocks.js';

// eslint-disable-next-line no-console
console.log('[Anthropic Test] Starting test...');

// eslint-disable-next-line no-console
console.log('[Anthropic Test] Creating mock client...');
const mockClient = new MockAnthropic({
apiKey: 'mock-api-key',
});

// eslint-disable-next-line no-console
console.log('[Anthropic Test] Mock client created:', mockClient);

// eslint-disable-next-line no-console
console.log('[Anthropic Test] Instrumenting client with Sentry...');
const client = Sentry.instrumentAnthropicAiClient(mockClient);

// eslint-disable-next-line no-console
console.log('[Anthropic Test] Client instrumented:', client);

// Test that manual instrumentation doesn't crash the browser
// The instrumentation automatically creates spans
// eslint-disable-next-line no-console
console.log('[Anthropic Test] Calling messages.create...');
const response = await client.messages.create({
model: 'claude-3-haiku-20240307',
messages: [{ role: 'user', content: 'What is the capital of France?' }],
temperature: 0.7,
max_tokens: 100,
});

// eslint-disable-next-line no-console
console.log('[Anthropic Test] Response received:', JSON.stringify(response));

// eslint-disable-next-line no-console
console.log('[Anthropic Test] Flushing Sentry...');
// Ensure transaction is flushed in CI
await Sentry.flush(2000);

// eslint-disable-next-line no-console
console.log('[Anthropic Test] Test completed!');
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import { expect } from '@playwright/test';
import { sentryTest } from '../../../../utils/fixtures';
import { envelopeRequestParser, waitForTransactionRequest } from '../../../../utils/helpers';

// These tests are not exhaustive because the instrumentation is
// already tested in the node integration tests and we merely
// want to test that the instrumentation does not crash in the browser
// and that gen_ai transactions are sent.

sentryTest('manual Anthropic instrumentation sends gen_ai transactions', async ({ getLocalTestUrl, page }) => {
// Listen for console logs
page.on('console', msg => {
// eslint-disable-next-line no-console
console.log(`[Browser Console ${msg.type()}]`, msg.text());
});

// Listen for page errors
page.on('pageerror', error => {
// eslint-disable-next-line no-console
console.error('[Browser Error]', error);
});

const transactionPromise = waitForTransactionRequest(page, event => {
// eslint-disable-next-line no-console
console.log('[Test] Received transaction event:', JSON.stringify(event, null, 2));
return !!event.transaction?.includes('claude-3-haiku-20240307');
});

const url = await getLocalTestUrl({ testDir: __dirname });
// eslint-disable-next-line no-console
console.log('[Test] Navigating to URL:', url);
await page.goto(url);

// eslint-disable-next-line no-console
console.log('[Test] Waiting for transaction...');
const req = await transactionPromise;
// eslint-disable-next-line no-console
console.log('[Test] Transaction received!');

const eventData = envelopeRequestParser(req);
// eslint-disable-next-line no-console
console.log('[Test] Parsed event data:', JSON.stringify(eventData, null, 2));

// Verify it's a gen_ai transaction
expect(eventData.transaction).toBe('messages claude-3-haiku-20240307');
expect(eventData.contexts?.trace?.op).toBe('gen_ai.messages');
expect(eventData.contexts?.trace?.origin).toBe('auto.ai.anthropic');
expect(eventData.contexts?.trace?.data).toMatchObject({
'gen_ai.operation.name': 'messages',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-haiku-20240307',
'gen_ai.request.temperature': 0.7,
'gen_ai.response.model': 'claude-3-haiku-20240307',
'gen_ai.response.id': 'msg_mock123',
'gen_ai.usage.input_tokens': 10,
'gen_ai.usage.output_tokens': 15,
});
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import * as Sentry from '@sentry/browser';

window.Sentry = Sentry;

Sentry.init({
dsn: 'https://[email protected]/1337',
tracesSampleRate: 1,
debug: true,
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
// Mock Google GenAI client for browser testing
export class MockGoogleGenAI {
constructor(config) {
// eslint-disable-next-line no-console
console.log('[Mock Google GenAI] Constructor called with config:', config);
this.apiKey = config.apiKey;

// models.generateContent functionality
this.models = {
generateContent: async (...args) => {
// eslint-disable-next-line no-console
console.log('[Mock Google GenAI] models.generateContent called with args:', args);
const params = args[0];
// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 10));

if (params.model === 'error-model') {
const error = new Error('Model not found');
error.status = 404;
error.headers = { 'x-request-id': 'mock-request-123' };
throw error;
}

return {
candidates: [
{
content: {
parts: [
{
text: 'Hello from Google GenAI mock!',
},
],
role: 'model',
},
finishReason: 'stop',
index: 0,
},
],
usageMetadata: {
promptTokenCount: 8,
candidatesTokenCount: 12,
totalTokenCount: 20,
},
};
},
generateContentStream: async () => {
// Return a promise that resolves to an async generator
return (async function* () {
yield {
candidates: [
{
content: {
parts: [{ text: 'Streaming response' }],
role: 'model',
},
finishReason: 'stop',
index: 0,
},
],
};
})();
},
};

// chats.create implementation
this.chats = {
create: (...args) => {
// eslint-disable-next-line no-console
console.log('[Mock Google GenAI] chats.create called with args:', args);
const params = args[0];
const model = params.model;

return {
modelVersion: model,
sendMessage: async (..._messageArgs) => {
// eslint-disable-next-line no-console
console.log('[Mock Google GenAI] chat.sendMessage called with args:', _messageArgs);
// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 10));

const response = {
candidates: [
{
content: {
parts: [
{
text: 'This is a joke from the chat!',
},
],
role: 'model',
},
finishReason: 'stop',
index: 0,
},
],
usageMetadata: {
promptTokenCount: 8,
candidatesTokenCount: 12,
totalTokenCount: 20,
},
modelVersion: model, // Include model version in response
};
// eslint-disable-next-line no-console
console.log('[Mock Google GenAI] Returning response:', response);
return response;
},
sendMessageStream: async () => {
// Return a promise that resolves to an async generator
return (async function* () {
yield {
candidates: [
{
content: {
parts: [{ text: 'Streaming chat response' }],
role: 'model',
},
finishReason: 'stop',
index: 0,
},
],
};
})();
},
};
},
};
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import * as Sentry from '@sentry/browser';
import { MockGoogleGenAI } from './mocks.js';

// eslint-disable-next-line no-console
console.log('[Google GenAI Test] Starting test...');

// eslint-disable-next-line no-console
console.log('[Google GenAI Test] Creating mock client...');
const mockClient = new MockGoogleGenAI({
apiKey: 'mock-api-key',
});

// eslint-disable-next-line no-console
console.log('[Google GenAI Test] Mock client created:', mockClient);

// eslint-disable-next-line no-console
console.log('[Google GenAI Test] Instrumenting client with Sentry...');
const client = Sentry.instrumentGoogleGenAIClient(mockClient);

// eslint-disable-next-line no-console
console.log('[Google GenAI Test] Client instrumented:', client);

// Test that manual instrumentation doesn't crash the browser
// The instrumentation automatically creates spans
// Test both chats and models APIs
// eslint-disable-next-line no-console
console.log('[Google GenAI Test] Creating chat...');
const chat = client.chats.create({
model: 'gemini-1.5-pro',
config: {
temperature: 0.8,
topP: 0.9,
maxOutputTokens: 150,
},
history: [
{
role: 'user',
parts: [{ text: 'Hello, how are you?' }],
},
],
});

// eslint-disable-next-line no-console
console.log('[Google GenAI Test] Sending message...');
const response = await chat.sendMessage({
message: 'Tell me a joke',
});

// eslint-disable-next-line no-console
console.log('[Google GenAI Test] Response received:', JSON.stringify(response));

// eslint-disable-next-line no-console
console.log('[Google GenAI Test] Flushing Sentry...');
// Ensure transaction is flushed in CI
await Sentry.flush(2000);

// eslint-disable-next-line no-console
console.log('[Google GenAI Test] Test completed!');
Loading
Loading