Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
import { END, MessagesAnnotation, START, StateGraph } from '@langchain/langgraph';
import * as Sentry from '@sentry/node';

async function run() {
await Sentry.startSpan({ op: 'function', name: 'langgraph-thread-id-test' }, async () => {
// Define a simple mock LLM function
const mockLlm = () => {
return {
messages: [
{
role: 'assistant',
content: 'Mock LLM response',
response_metadata: {
model_name: 'mock-model',
finish_reason: 'stop',
tokenUsage: {
promptTokens: 20,
completionTokens: 10,
totalTokens: 30,
},
},
},
],
};
};

// Create and compile the graph
const graph = new StateGraph(MessagesAnnotation)
.addNode('agent', mockLlm)
.addEdge(START, 'agent')
.addEdge('agent', END)
.compile({ name: 'thread_test_agent' });

// Test 1: Invoke with thread_id in config
await graph.invoke(
{
messages: [{ role: 'user', content: 'Hello with thread ID' }],
},
{
configurable: {
thread_id: 'thread_abc123_session_1',
},
},
);

// Test 2: Invoke with different thread_id (simulating different conversation)
await graph.invoke(
{
messages: [{ role: 'user', content: 'Different conversation' }],
},
{
configurable: {
thread_id: 'thread_xyz789_session_2',
},
},
);

// Test 3: Invoke without thread_id (should not have gen_ai.conversation.id)
await graph.invoke({
messages: [{ role: 'user', content: 'No thread ID here' }],
});
});

await Sentry.flush(2000);
}

run();
Original file line number Diff line number Diff line change
Expand Up @@ -205,4 +205,72 @@ describe('LangGraph integration', () => {
await createRunner().ignore('event').expect({ transaction: EXPECTED_TRANSACTION_WITH_TOOLS }).start().completed();
});
});

// Test for thread_id (conversation ID) support
const EXPECTED_TRANSACTION_THREAD_ID = {
transaction: 'langgraph-thread-id-test',
spans: expect.arrayContaining([
// create_agent span
expect.objectContaining({
data: {
'gen_ai.operation.name': 'create_agent',
'sentry.op': 'gen_ai.create_agent',
'sentry.origin': 'auto.ai.langgraph',
'gen_ai.agent.name': 'thread_test_agent',
},
description: 'create_agent thread_test_agent',
op: 'gen_ai.create_agent',
origin: 'auto.ai.langgraph',
status: 'ok',
}),
// First invoke_agent span with thread_id
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.operation.name': 'invoke_agent',
'sentry.op': 'gen_ai.invoke_agent',
'sentry.origin': 'auto.ai.langgraph',
'gen_ai.agent.name': 'thread_test_agent',
'gen_ai.pipeline.name': 'thread_test_agent',
// The thread_id should be captured as conversation.id
'gen_ai.conversation.id': 'thread_abc123_session_1',
}),
description: 'invoke_agent thread_test_agent',
op: 'gen_ai.invoke_agent',
origin: 'auto.ai.langgraph',
status: 'ok',
}),
// Second invoke_agent span with different thread_id
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.operation.name': 'invoke_agent',
'sentry.op': 'gen_ai.invoke_agent',
'sentry.origin': 'auto.ai.langgraph',
'gen_ai.agent.name': 'thread_test_agent',
'gen_ai.pipeline.name': 'thread_test_agent',
// Different thread_id for different conversation
'gen_ai.conversation.id': 'thread_xyz789_session_2',
}),
description: 'invoke_agent thread_test_agent',
op: 'gen_ai.invoke_agent',
origin: 'auto.ai.langgraph',
status: 'ok',
}),
// Third invoke_agent span without thread_id (should NOT have gen_ai.conversation.id)
expect.objectContaining({
data: expect.not.objectContaining({
'gen_ai.conversation.id': expect.anything(),
}),
description: 'invoke_agent thread_test_agent',
op: 'gen_ai.invoke_agent',
origin: 'auto.ai.langgraph',
status: 'ok',
}),
]),
};

createEsmAndCjsTests(__dirname, 'scenario-thread-id.mjs', 'instrument.mjs', (createRunner, test) => {
test('should capture thread_id as gen_ai.conversation.id', async () => {
await createRunner().ignore('event').expect({ transaction: EXPECTED_TRANSACTION_THREAD_ID }).start().completed();
});
});
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
import * as Sentry from '@sentry/node';
import express from 'express';
import OpenAI from 'openai';

function startMockServer() {
const app = express();
app.use(express.json());

// Conversations API endpoint - create conversation
app.post('/openai/conversations', (req, res) => {
res.send({
id: 'conv_689667905b048191b4740501625afd940c7533ace33a2dab',
object: 'conversation',
created_at: 1704067200,
metadata: {},
});
});

// Responses API endpoint - with conversation support
app.post('/openai/responses', (req, res) => {
const { model, conversation, previous_response_id } = req.body;

res.send({
id: 'resp_mock_conv_123',
object: 'response',
created_at: 1704067210,
model: model,
output: [
{
type: 'message',
id: 'msg_mock_output_1',
status: 'completed',
role: 'assistant',
content: [
{
type: 'output_text',
text: `Response with conversation: ${conversation || 'none'}, previous_response_id: ${previous_response_id || 'none'}`,
annotations: [],
},
],
},
],
output_text: `Response with conversation: ${conversation || 'none'}`,
status: 'completed',
usage: {
input_tokens: 10,
output_tokens: 15,
total_tokens: 25,
},
});
});

return new Promise(resolve => {
const server = app.listen(0, () => {
resolve(server);
});
});
}

async function run() {
const server = await startMockServer();

await Sentry.startSpan({ op: 'function', name: 'conversation-test' }, async () => {
const client = new OpenAI({
baseURL: `http://localhost:${server.address().port}/openai`,
apiKey: 'mock-api-key',
});

// Test 1: Create a conversation
const conversation = await client.conversations.create();

// Test 2: Use conversation ID in responses.create
await client.responses.create({
model: 'gpt-4',
input: 'Hello, this is a conversation test',
conversation: conversation.id,
});

// Test 3: Use previous_response_id for chaining (without formal conversation)
const firstResponse = await client.responses.create({
model: 'gpt-4',
input: 'Tell me a joke',
});

await client.responses.create({
model: 'gpt-4',
input: 'Explain why that is funny',
previous_response_id: firstResponse.id,
});
});

server.close();
}

run();
71 changes: 71 additions & 0 deletions dev-packages/node-integration-tests/suites/tracing/openai/test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -645,4 +645,75 @@ describe('OpenAI integration', () => {
});
},
);

// Test for conversation ID support (Conversations API and previous_response_id)
const EXPECTED_TRANSACTION_CONVERSATION = {
transaction: 'conversation-test',
spans: expect.arrayContaining([
// First span - conversations.create returns conversation object with id
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.operation.name': 'conversations',
'sentry.op': 'gen_ai.conversations',
'sentry.origin': 'auto.ai.openai',
'gen_ai.system': 'openai',
// The conversation ID should be captured from the response
'gen_ai.conversation.id': 'conv_689667905b048191b4740501625afd940c7533ace33a2dab',
}),
description: 'conversations unknown',
op: 'gen_ai.conversations',
origin: 'auto.ai.openai',
status: 'ok',
}),
// Second span - responses.create with conversation parameter
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.operation.name': 'responses',
'sentry.op': 'gen_ai.responses',
'sentry.origin': 'auto.ai.openai',
'gen_ai.system': 'openai',
'gen_ai.request.model': 'gpt-4',
// The conversation ID should be captured from the request
'gen_ai.conversation.id': 'conv_689667905b048191b4740501625afd940c7533ace33a2dab',
}),
op: 'gen_ai.responses',
origin: 'auto.ai.openai',
status: 'ok',
}),
// Third span - responses.create without conversation (first in chain, should NOT have gen_ai.conversation.id)
expect.objectContaining({
data: expect.not.objectContaining({
'gen_ai.conversation.id': expect.anything(),
}),
op: 'gen_ai.responses',
origin: 'auto.ai.openai',
status: 'ok',
}),
// Fourth span - responses.create with previous_response_id (chaining)
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.operation.name': 'responses',
'sentry.op': 'gen_ai.responses',
'sentry.origin': 'auto.ai.openai',
'gen_ai.system': 'openai',
'gen_ai.request.model': 'gpt-4',
// The previous_response_id should be captured as conversation.id
'gen_ai.conversation.id': 'resp_mock_conv_123',
}),
op: 'gen_ai.responses',
origin: 'auto.ai.openai',
status: 'ok',
}),
]),
};

createEsmAndCjsTests(__dirname, 'scenario-conversation.mjs', 'instrument.mjs', (createRunner, test) => {
test('captures conversation ID from Conversations API and previous_response_id', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_CONVERSATION })
.start()
.completed();
});
});
});
8 changes: 8 additions & 0 deletions packages/core/src/tracing/ai/gen-ai-attributes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,13 @@ export const GEN_AI_AGENT_NAME_ATTRIBUTE = 'gen_ai.agent.name';
*/
export const GEN_AI_PIPELINE_NAME_ATTRIBUTE = 'gen_ai.pipeline.name';

/**
* The conversation ID for linking messages across API calls
* For OpenAI Assistants API: thread_id
* For LangGraph: configurable.thread_id
*/
export const GEN_AI_CONVERSATION_ID_ATTRIBUTE = 'gen_ai.conversation.id';

/**
* The number of cache creation input tokens used
*/
Expand Down Expand Up @@ -254,6 +261,7 @@ export const OPENAI_OPERATIONS = {
CHAT: 'chat',
RESPONSES: 'responses',
EMBEDDINGS: 'embeddings',
CONVERSATIONS: 'conversations',
} as const;

// =============================================================================
Expand Down
10 changes: 10 additions & 0 deletions packages/core/src/tracing/langgraph/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import { SEMANTIC_ATTRIBUTE_SENTRY_OP, SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN } from '
import { SPAN_STATUS_ERROR } from '../../tracing';
import {
GEN_AI_AGENT_NAME_ATTRIBUTE,
GEN_AI_CONVERSATION_ID_ATTRIBUTE,
GEN_AI_INVOKE_AGENT_OPERATION_ATTRIBUTE,
GEN_AI_OPERATION_NAME_ATTRIBUTE,
GEN_AI_PIPELINE_NAME_ATTRIBUTE,
Expand Down Expand Up @@ -113,6 +114,15 @@ function instrumentCompiledGraphInvoke(
span.updateName(`invoke_agent ${graphName}`);
}

// Extract thread_id from the config (second argument)
// LangGraph uses config.configurable.thread_id for conversation/session linking
const config = args.length > 1 ? (args[1] as Record<string, unknown> | undefined) : undefined;
const configurable = config?.configurable as Record<string, unknown> | undefined;
const threadId = configurable?.thread_id;
if (threadId && typeof threadId === 'string') {
span.setAttribute(GEN_AI_CONVERSATION_ID_ATTRIBUTE, threadId);
}

// Extract available tools from the graph instance
const tools = extractToolsFromCompiledGraph(graphInstance);
if (tools) {
Expand Down
10 changes: 9 additions & 1 deletion packages/core/src/tracing/openai/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,15 @@ export const OPENAI_INTEGRATION_NAME = 'OpenAI';

// https://platform.openai.com/docs/quickstart?api-mode=responses
// https://platform.openai.com/docs/quickstart?api-mode=chat
export const INSTRUMENTED_METHODS = ['responses.create', 'chat.completions.create', 'embeddings.create'] as const;
// https://platform.openai.com/docs/api-reference/conversations
export const INSTRUMENTED_METHODS = [
'responses.create',
'chat.completions.create',
'embeddings.create',
// Conversations API - for conversation state management
// https://platform.openai.com/docs/guides/conversation-state
'conversations.create',
] as const;
export const RESPONSES_TOOL_CALL_EVENT_TYPES = [
'response.output_item.added',
'response.function_call_arguments.delta',
Expand Down
Loading
Loading