@@ -65,7 +65,7 @@ describe('OpenAI Tool Calls integration', () => {
65
65
data : {
66
66
'gen_ai.operation.name' : 'chat' ,
67
67
'sentry.op' : 'gen_ai.chat' ,
68
- 'sentry.origin' : 'manual ' ,
68
+ 'sentry.origin' : 'auto.ai.openai ' ,
69
69
'gen_ai.system' : 'openai' ,
70
70
'gen_ai.request.model' : 'gpt-4' ,
71
71
'gen_ai.request.available_tools' : WEATHER_TOOL_DEFINITION ,
@@ -83,15 +83,15 @@ describe('OpenAI Tool Calls integration', () => {
83
83
} ,
84
84
description : 'chat gpt-4' ,
85
85
op : 'gen_ai.chat' ,
86
- origin : 'manual ' ,
86
+ origin : 'auto.ai.openai ' ,
87
87
status : 'ok' ,
88
88
} ) ,
89
89
// Second span - chat completion with tools and streaming
90
90
expect . objectContaining ( {
91
91
data : {
92
92
'gen_ai.operation.name' : 'chat' ,
93
93
'sentry.op' : 'gen_ai.chat' ,
94
- 'sentry.origin' : 'manual ' ,
94
+ 'sentry.origin' : 'auto.ai.openai ' ,
95
95
'gen_ai.system' : 'openai' ,
96
96
'gen_ai.request.model' : 'gpt-4' ,
97
97
'gen_ai.request.stream' : true ,
@@ -111,15 +111,15 @@ describe('OpenAI Tool Calls integration', () => {
111
111
} ,
112
112
description : 'chat gpt-4 stream-response' ,
113
113
op : 'gen_ai.chat' ,
114
- origin : 'manual ' ,
114
+ origin : 'auto.ai.openai ' ,
115
115
status : 'ok' ,
116
116
} ) ,
117
117
// Third span - responses API with tools (non-streaming)
118
118
expect . objectContaining ( {
119
119
data : {
120
120
'gen_ai.operation.name' : 'responses' ,
121
121
'sentry.op' : 'gen_ai.responses' ,
122
- 'sentry.origin' : 'manual ' ,
122
+ 'sentry.origin' : 'auto.ai.openai ' ,
123
123
'gen_ai.system' : 'openai' ,
124
124
'gen_ai.request.model' : 'gpt-4' ,
125
125
'gen_ai.request.available_tools' : WEATHER_TOOL_DEFINITION ,
@@ -137,15 +137,15 @@ describe('OpenAI Tool Calls integration', () => {
137
137
} ,
138
138
description : 'responses gpt-4' ,
139
139
op : 'gen_ai.responses' ,
140
- origin : 'manual ' ,
140
+ origin : 'auto.ai.openai ' ,
141
141
status : 'ok' ,
142
142
} ) ,
143
143
// Fourth span - responses API with tools and streaming
144
144
expect . objectContaining ( {
145
145
data : {
146
146
'gen_ai.operation.name' : 'responses' ,
147
147
'sentry.op' : 'gen_ai.responses' ,
148
- 'sentry.origin' : 'manual ' ,
148
+ 'sentry.origin' : 'auto.ai.openai ' ,
149
149
'gen_ai.system' : 'openai' ,
150
150
'gen_ai.request.model' : 'gpt-4' ,
151
151
'gen_ai.request.stream' : true ,
@@ -165,7 +165,7 @@ describe('OpenAI Tool Calls integration', () => {
165
165
} ,
166
166
description : 'responses gpt-4 stream-response' ,
167
167
op : 'gen_ai.responses' ,
168
- origin : 'manual ' ,
168
+ origin : 'auto.ai.openai ' ,
169
169
status : 'ok' ,
170
170
} ) ,
171
171
] ) ,
@@ -179,7 +179,7 @@ describe('OpenAI Tool Calls integration', () => {
179
179
data : {
180
180
'gen_ai.operation.name' : 'chat' ,
181
181
'sentry.op' : 'gen_ai.chat' ,
182
- 'sentry.origin' : 'manual ' ,
182
+ 'sentry.origin' : 'auto.ai.openai ' ,
183
183
'gen_ai.system' : 'openai' ,
184
184
'gen_ai.request.model' : 'gpt-4' ,
185
185
'gen_ai.request.messages' : '[{"role":"user","content":"What is the weather like in Paris today?"}]' ,
@@ -200,15 +200,15 @@ describe('OpenAI Tool Calls integration', () => {
200
200
} ,
201
201
description : 'chat gpt-4' ,
202
202
op : 'gen_ai.chat' ,
203
- origin : 'manual ' ,
203
+ origin : 'auto.ai.openai ' ,
204
204
status : 'ok' ,
205
205
} ) ,
206
206
// Second span - chat completion with tools and streaming with PII
207
207
expect . objectContaining ( {
208
208
data : {
209
209
'gen_ai.operation.name' : 'chat' ,
210
210
'sentry.op' : 'gen_ai.chat' ,
211
- 'sentry.origin' : 'manual ' ,
211
+ 'sentry.origin' : 'auto.ai.openai ' ,
212
212
'gen_ai.system' : 'openai' ,
213
213
'gen_ai.request.model' : 'gpt-4' ,
214
214
'gen_ai.request.stream' : true ,
@@ -230,15 +230,15 @@ describe('OpenAI Tool Calls integration', () => {
230
230
} ,
231
231
description : 'chat gpt-4 stream-response' ,
232
232
op : 'gen_ai.chat' ,
233
- origin : 'manual ' ,
233
+ origin : 'auto.ai.openai ' ,
234
234
status : 'ok' ,
235
235
} ) ,
236
236
// Third span - responses API with tools (non-streaming) with PII
237
237
expect . objectContaining ( {
238
238
data : {
239
239
'gen_ai.operation.name' : 'responses' ,
240
240
'sentry.op' : 'gen_ai.responses' ,
241
- 'sentry.origin' : 'manual ' ,
241
+ 'sentry.origin' : 'auto.ai.openai ' ,
242
242
'gen_ai.system' : 'openai' ,
243
243
'gen_ai.request.model' : 'gpt-4' ,
244
244
'gen_ai.request.messages' : '[{"role":"user","content":"What is the weather like in Paris today?"}]' ,
@@ -258,15 +258,15 @@ describe('OpenAI Tool Calls integration', () => {
258
258
} ,
259
259
description : 'responses gpt-4' ,
260
260
op : 'gen_ai.responses' ,
261
- origin : 'manual ' ,
261
+ origin : 'auto.ai.openai ' ,
262
262
status : 'ok' ,
263
263
} ) ,
264
264
// Fourth span - responses API with tools and streaming with PII
265
265
expect . objectContaining ( {
266
266
data : {
267
267
'gen_ai.operation.name' : 'responses' ,
268
268
'sentry.op' : 'gen_ai.responses' ,
269
- 'sentry.origin' : 'manual ' ,
269
+ 'sentry.origin' : 'auto.ai.openai ' ,
270
270
'gen_ai.system' : 'openai' ,
271
271
'gen_ai.request.model' : 'gpt-4' ,
272
272
'gen_ai.request.stream' : true ,
@@ -288,7 +288,7 @@ describe('OpenAI Tool Calls integration', () => {
288
288
} ,
289
289
description : 'responses gpt-4 stream-response' ,
290
290
op : 'gen_ai.responses' ,
291
- origin : 'manual ' ,
291
+ origin : 'auto.ai.openai ' ,
292
292
status : 'ok' ,
293
293
} ) ,
294
294
] ) ,
0 commit comments