Skip to content

Commit 314babc

Browse files
authored
feat(core): Add gen_ai.conversation.id attribute to OpenAI and LangGr… (#18703)
This PR adds support for capturing conversation/session identifiers in AI integrations to enable linking messages across API calls. ## Changes ### OpenAI - Added instrumentation for the Conversations API (`conversations.create`) - Captures `gen_ai.conversation.id` from: - `conversation` parameter in `responses.create` - `previous_response_id` parameter for response chaining - Response object from `conversations.create` ### LangGraph - Captures `gen_ai.conversation.id` from `config.configurable.thread_id` when invoking agents Closes #18702
1 parent 497a0b5 commit 314babc

File tree

11 files changed

+496
-35
lines changed

11 files changed

+496
-35
lines changed
Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
import { END, MessagesAnnotation, START, StateGraph } from '@langchain/langgraph';
2+
import * as Sentry from '@sentry/node';
3+
4+
async function run() {
5+
await Sentry.startSpan({ op: 'function', name: 'langgraph-thread-id-test' }, async () => {
6+
// Define a simple mock LLM function
7+
const mockLlm = () => {
8+
return {
9+
messages: [
10+
{
11+
role: 'assistant',
12+
content: 'Mock LLM response',
13+
response_metadata: {
14+
model_name: 'mock-model',
15+
finish_reason: 'stop',
16+
tokenUsage: {
17+
promptTokens: 20,
18+
completionTokens: 10,
19+
totalTokens: 30,
20+
},
21+
},
22+
},
23+
],
24+
};
25+
};
26+
27+
// Create and compile the graph
28+
const graph = new StateGraph(MessagesAnnotation)
29+
.addNode('agent', mockLlm)
30+
.addEdge(START, 'agent')
31+
.addEdge('agent', END)
32+
.compile({ name: 'thread_test_agent' });
33+
34+
// Test 1: Invoke with thread_id in config
35+
await graph.invoke(
36+
{
37+
messages: [{ role: 'user', content: 'Hello with thread ID' }],
38+
},
39+
{
40+
configurable: {
41+
thread_id: 'thread_abc123_session_1',
42+
},
43+
},
44+
);
45+
46+
// Test 2: Invoke with different thread_id (simulating different conversation)
47+
await graph.invoke(
48+
{
49+
messages: [{ role: 'user', content: 'Different conversation' }],
50+
},
51+
{
52+
configurable: {
53+
thread_id: 'thread_xyz789_session_2',
54+
},
55+
},
56+
);
57+
58+
// Test 3: Invoke without thread_id (should not have gen_ai.conversation.id)
59+
await graph.invoke({
60+
messages: [{ role: 'user', content: 'No thread ID here' }],
61+
});
62+
});
63+
64+
await Sentry.flush(2000);
65+
}
66+
67+
run();

dev-packages/node-integration-tests/suites/tracing/langgraph/test.ts

Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -205,4 +205,72 @@ describe('LangGraph integration', () => {
205205
await createRunner().ignore('event').expect({ transaction: EXPECTED_TRANSACTION_WITH_TOOLS }).start().completed();
206206
});
207207
});
208+
209+
// Test for thread_id (conversation ID) support
210+
const EXPECTED_TRANSACTION_THREAD_ID = {
211+
transaction: 'langgraph-thread-id-test',
212+
spans: expect.arrayContaining([
213+
// create_agent span
214+
expect.objectContaining({
215+
data: {
216+
'gen_ai.operation.name': 'create_agent',
217+
'sentry.op': 'gen_ai.create_agent',
218+
'sentry.origin': 'auto.ai.langgraph',
219+
'gen_ai.agent.name': 'thread_test_agent',
220+
},
221+
description: 'create_agent thread_test_agent',
222+
op: 'gen_ai.create_agent',
223+
origin: 'auto.ai.langgraph',
224+
status: 'ok',
225+
}),
226+
// First invoke_agent span with thread_id
227+
expect.objectContaining({
228+
data: expect.objectContaining({
229+
'gen_ai.operation.name': 'invoke_agent',
230+
'sentry.op': 'gen_ai.invoke_agent',
231+
'sentry.origin': 'auto.ai.langgraph',
232+
'gen_ai.agent.name': 'thread_test_agent',
233+
'gen_ai.pipeline.name': 'thread_test_agent',
234+
// The thread_id should be captured as conversation.id
235+
'gen_ai.conversation.id': 'thread_abc123_session_1',
236+
}),
237+
description: 'invoke_agent thread_test_agent',
238+
op: 'gen_ai.invoke_agent',
239+
origin: 'auto.ai.langgraph',
240+
status: 'ok',
241+
}),
242+
// Second invoke_agent span with different thread_id
243+
expect.objectContaining({
244+
data: expect.objectContaining({
245+
'gen_ai.operation.name': 'invoke_agent',
246+
'sentry.op': 'gen_ai.invoke_agent',
247+
'sentry.origin': 'auto.ai.langgraph',
248+
'gen_ai.agent.name': 'thread_test_agent',
249+
'gen_ai.pipeline.name': 'thread_test_agent',
250+
// Different thread_id for different conversation
251+
'gen_ai.conversation.id': 'thread_xyz789_session_2',
252+
}),
253+
description: 'invoke_agent thread_test_agent',
254+
op: 'gen_ai.invoke_agent',
255+
origin: 'auto.ai.langgraph',
256+
status: 'ok',
257+
}),
258+
// Third invoke_agent span without thread_id (should NOT have gen_ai.conversation.id)
259+
expect.objectContaining({
260+
data: expect.not.objectContaining({
261+
'gen_ai.conversation.id': expect.anything(),
262+
}),
263+
description: 'invoke_agent thread_test_agent',
264+
op: 'gen_ai.invoke_agent',
265+
origin: 'auto.ai.langgraph',
266+
status: 'ok',
267+
}),
268+
]),
269+
};
270+
271+
createEsmAndCjsTests(__dirname, 'scenario-thread-id.mjs', 'instrument.mjs', (createRunner, test) => {
272+
test('should capture thread_id as gen_ai.conversation.id', async () => {
273+
await createRunner().ignore('event').expect({ transaction: EXPECTED_TRANSACTION_THREAD_ID }).start().completed();
274+
});
275+
});
208276
});
Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
import * as Sentry from '@sentry/node';
2+
import express from 'express';
3+
import OpenAI from 'openai';
4+
5+
function startMockServer() {
6+
const app = express();
7+
app.use(express.json());
8+
9+
// Conversations API endpoint - create conversation
10+
app.post('/openai/conversations', (req, res) => {
11+
res.send({
12+
id: 'conv_689667905b048191b4740501625afd940c7533ace33a2dab',
13+
object: 'conversation',
14+
created_at: 1704067200,
15+
metadata: {},
16+
});
17+
});
18+
19+
// Responses API endpoint - with conversation support
20+
app.post('/openai/responses', (req, res) => {
21+
const { model, conversation, previous_response_id } = req.body;
22+
23+
res.send({
24+
id: 'resp_mock_conv_123',
25+
object: 'response',
26+
created_at: 1704067210,
27+
model: model,
28+
output: [
29+
{
30+
type: 'message',
31+
id: 'msg_mock_output_1',
32+
status: 'completed',
33+
role: 'assistant',
34+
content: [
35+
{
36+
type: 'output_text',
37+
text: `Response with conversation: ${conversation || 'none'}, previous_response_id: ${previous_response_id || 'none'}`,
38+
annotations: [],
39+
},
40+
],
41+
},
42+
],
43+
output_text: `Response with conversation: ${conversation || 'none'}`,
44+
status: 'completed',
45+
usage: {
46+
input_tokens: 10,
47+
output_tokens: 15,
48+
total_tokens: 25,
49+
},
50+
});
51+
});
52+
53+
return new Promise(resolve => {
54+
const server = app.listen(0, () => {
55+
resolve(server);
56+
});
57+
});
58+
}
59+
60+
async function run() {
61+
const server = await startMockServer();
62+
63+
await Sentry.startSpan({ op: 'function', name: 'conversation-test' }, async () => {
64+
const client = new OpenAI({
65+
baseURL: `http://localhost:${server.address().port}/openai`,
66+
apiKey: 'mock-api-key',
67+
});
68+
69+
// Test 1: Create a conversation
70+
const conversation = await client.conversations.create();
71+
72+
// Test 2: Use conversation ID in responses.create
73+
await client.responses.create({
74+
model: 'gpt-4',
75+
input: 'Hello, this is a conversation test',
76+
conversation: conversation.id,
77+
});
78+
79+
// Test 3: Use previous_response_id for chaining (without formal conversation)
80+
const firstResponse = await client.responses.create({
81+
model: 'gpt-4',
82+
input: 'Tell me a joke',
83+
});
84+
85+
await client.responses.create({
86+
model: 'gpt-4',
87+
input: 'Explain why that is funny',
88+
previous_response_id: firstResponse.id,
89+
});
90+
});
91+
92+
server.close();
93+
}
94+
95+
run();

dev-packages/node-integration-tests/suites/tracing/openai/test.ts

Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -645,4 +645,75 @@ describe('OpenAI integration', () => {
645645
});
646646
},
647647
);
648+
649+
// Test for conversation ID support (Conversations API and previous_response_id)
650+
const EXPECTED_TRANSACTION_CONVERSATION = {
651+
transaction: 'conversation-test',
652+
spans: expect.arrayContaining([
653+
// First span - conversations.create returns conversation object with id
654+
expect.objectContaining({
655+
data: expect.objectContaining({
656+
'gen_ai.operation.name': 'conversations',
657+
'sentry.op': 'gen_ai.conversations',
658+
'sentry.origin': 'auto.ai.openai',
659+
'gen_ai.system': 'openai',
660+
// The conversation ID should be captured from the response
661+
'gen_ai.conversation.id': 'conv_689667905b048191b4740501625afd940c7533ace33a2dab',
662+
}),
663+
description: 'conversations unknown',
664+
op: 'gen_ai.conversations',
665+
origin: 'auto.ai.openai',
666+
status: 'ok',
667+
}),
668+
// Second span - responses.create with conversation parameter
669+
expect.objectContaining({
670+
data: expect.objectContaining({
671+
'gen_ai.operation.name': 'responses',
672+
'sentry.op': 'gen_ai.responses',
673+
'sentry.origin': 'auto.ai.openai',
674+
'gen_ai.system': 'openai',
675+
'gen_ai.request.model': 'gpt-4',
676+
// The conversation ID should be captured from the request
677+
'gen_ai.conversation.id': 'conv_689667905b048191b4740501625afd940c7533ace33a2dab',
678+
}),
679+
op: 'gen_ai.responses',
680+
origin: 'auto.ai.openai',
681+
status: 'ok',
682+
}),
683+
// Third span - responses.create without conversation (first in chain, should NOT have gen_ai.conversation.id)
684+
expect.objectContaining({
685+
data: expect.not.objectContaining({
686+
'gen_ai.conversation.id': expect.anything(),
687+
}),
688+
op: 'gen_ai.responses',
689+
origin: 'auto.ai.openai',
690+
status: 'ok',
691+
}),
692+
// Fourth span - responses.create with previous_response_id (chaining)
693+
expect.objectContaining({
694+
data: expect.objectContaining({
695+
'gen_ai.operation.name': 'responses',
696+
'sentry.op': 'gen_ai.responses',
697+
'sentry.origin': 'auto.ai.openai',
698+
'gen_ai.system': 'openai',
699+
'gen_ai.request.model': 'gpt-4',
700+
// The previous_response_id should be captured as conversation.id
701+
'gen_ai.conversation.id': 'resp_mock_conv_123',
702+
}),
703+
op: 'gen_ai.responses',
704+
origin: 'auto.ai.openai',
705+
status: 'ok',
706+
}),
707+
]),
708+
};
709+
710+
createEsmAndCjsTests(__dirname, 'scenario-conversation.mjs', 'instrument.mjs', (createRunner, test) => {
711+
test('captures conversation ID from Conversations API and previous_response_id', async () => {
712+
await createRunner()
713+
.ignore('event')
714+
.expect({ transaction: EXPECTED_TRANSACTION_CONVERSATION })
715+
.start()
716+
.completed();
717+
});
718+
});
648719
});

packages/core/src/tracing/ai/gen-ai-attributes.ts

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -154,6 +154,13 @@ export const GEN_AI_AGENT_NAME_ATTRIBUTE = 'gen_ai.agent.name';
154154
*/
155155
export const GEN_AI_PIPELINE_NAME_ATTRIBUTE = 'gen_ai.pipeline.name';
156156

157+
/**
158+
* The conversation ID for linking messages across API calls
159+
* For OpenAI Assistants API: thread_id
160+
* For LangGraph: configurable.thread_id
161+
*/
162+
export const GEN_AI_CONVERSATION_ID_ATTRIBUTE = 'gen_ai.conversation.id';
163+
157164
/**
158165
* The number of cache creation input tokens used
159166
*/
@@ -254,6 +261,7 @@ export const OPENAI_OPERATIONS = {
254261
CHAT: 'chat',
255262
RESPONSES: 'responses',
256263
EMBEDDINGS: 'embeddings',
264+
CONVERSATIONS: 'conversations',
257265
} as const;
258266

259267
// =============================================================================

packages/core/src/tracing/langgraph/index.ts

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import { SEMANTIC_ATTRIBUTE_SENTRY_OP, SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN } from '
33
import { SPAN_STATUS_ERROR } from '../../tracing';
44
import {
55
GEN_AI_AGENT_NAME_ATTRIBUTE,
6+
GEN_AI_CONVERSATION_ID_ATTRIBUTE,
67
GEN_AI_INVOKE_AGENT_OPERATION_ATTRIBUTE,
78
GEN_AI_OPERATION_NAME_ATTRIBUTE,
89
GEN_AI_PIPELINE_NAME_ATTRIBUTE,
@@ -113,6 +114,15 @@ function instrumentCompiledGraphInvoke(
113114
span.updateName(`invoke_agent ${graphName}`);
114115
}
115116

117+
// Extract thread_id from the config (second argument)
118+
// LangGraph uses config.configurable.thread_id for conversation/session linking
119+
const config = args.length > 1 ? (args[1] as Record<string, unknown> | undefined) : undefined;
120+
const configurable = config?.configurable as Record<string, unknown> | undefined;
121+
const threadId = configurable?.thread_id;
122+
if (threadId && typeof threadId === 'string') {
123+
span.setAttribute(GEN_AI_CONVERSATION_ID_ATTRIBUTE, threadId);
124+
}
125+
116126
// Extract available tools from the graph instance
117127
const tools = extractToolsFromCompiledGraph(graphInstance);
118128
if (tools) {

packages/core/src/tracing/openai/constants.ts

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,15 @@ export const OPENAI_INTEGRATION_NAME = 'OpenAI';
22

33
// https://platform.openai.com/docs/quickstart?api-mode=responses
44
// https://platform.openai.com/docs/quickstart?api-mode=chat
5-
export const INSTRUMENTED_METHODS = ['responses.create', 'chat.completions.create', 'embeddings.create'] as const;
5+
// https://platform.openai.com/docs/api-reference/conversations
6+
export const INSTRUMENTED_METHODS = [
7+
'responses.create',
8+
'chat.completions.create',
9+
'embeddings.create',
10+
// Conversations API - for conversation state management
11+
// https://platform.openai.com/docs/guides/conversation-state
12+
'conversations.create',
13+
] as const;
614
export const RESPONSES_TOOL_CALL_EVENT_TYPES = [
715
'response.output_item.added',
816
'response.function_call_arguments.delta',

0 commit comments

Comments
 (0)