10
10
import com .openai .models .embeddings .CreateEmbeddingResponse ;
11
11
import com .openai .models .embeddings .Embedding ;
12
12
import com .openai .models .embeddings .EmbeddingCreateParams ;
13
+ import datadog .trace .api .Config ;
14
+ import datadog .trace .api .llmobs .LLMObs ;
15
+ import datadog .trace .api .llmobs .LLMObsSpan ;
13
16
import datadog .trace .bootstrap .instrumentation .api .AgentScope ;
14
17
import datadog .trace .bootstrap .instrumentation .api .AgentSpan ;
15
18
import datadog .trace .bootstrap .instrumentation .api .InternalSpanTypes ;
20
23
import java .util .Optional ;
21
24
22
25
public class OpenAIClientDecorator extends ClientDecorator {
26
+ private static final String mlApp = Config .get ().getLlmObsMlApp ();
27
+ private static final String mlProvider = "openai" ;
23
28
private static final String COMPONENT_NAME = "openai" ;
24
29
private static final UTF8BytesString OPENAI_REQUEST = UTF8BytesString .create ("openai.request" );
25
30
@@ -42,14 +47,14 @@ protected CharSequence spanType() {
42
47
43
48
@ Override
44
49
protected String service () {
45
- return null ; // Use default service name
50
+ return null ;
46
51
}
47
52
48
53
public AgentScope startChatCompletionSpan (ChatCompletionCreateParams params ) {
49
54
AgentSpan span = startSpan (OPENAI_REQUEST );
50
55
span .setTag ("openai.request.endpoint" , "/chat/completions" );
51
56
span .setResourceName ("chat.completions.create" );
52
- span .setTag ("openai.provider" , "openai" );
57
+ span .setTag ("openai.request. provider" , "openai" );
53
58
extractChatCompletionRequestData (span , params );
54
59
afterStart (span );
55
60
return activateSpan (span );
@@ -59,7 +64,7 @@ public AgentScope startLLMChatCompletionSpan(ChatCompletionCreateParams params)
59
64
AgentSpan span = startSpan (OPENAI_REQUEST );
60
65
span .setTag ("openai.request.endpoint" , "/chat/completions" );
61
66
span .setResourceName ("chat.completions.create" );
62
- span .setTag ("openai.provider" , "openai" );
67
+ span .setTag ("openai.request. provider" , "openai" );
63
68
extractChatCompletionRequestData (span , params );
64
69
afterStart (span );
65
70
return activateSpan (span );
@@ -88,6 +93,115 @@ public AgentScope startEmbeddingSpan(EmbeddingCreateParams params) {
88
93
return activateSpan (span );
89
94
}
90
95
96
+ public LLMObsSpan startLLMObsChatCompletionSpan (ChatCompletionCreateParams params ) {
97
+ String modelName = params .model ().toString ();
98
+
99
+ LLMObsSpan llmObsSpan =
100
+ LLMObs .startLLMSpan ("chat_completion" , modelName , mlProvider , mlApp , null );
101
+
102
+ // Extract and set input data from chat completion params
103
+ // May need to be reformatted
104
+ StringBuilder inputData = new StringBuilder ();
105
+ List <ChatCompletionMessageParam > messages = params .messages ();
106
+ for (int i = 0 ; i < messages .size (); i ++) {
107
+ ChatCompletionMessageParam messageParam = messages .get (i );
108
+ if (i > 0 ) {
109
+ inputData .append (" | " );
110
+ }
111
+
112
+ if (messageParam .isUser ()) {
113
+ inputData .append ("User: " ).append (messageParam .asUser ().content ());
114
+ } else if (messageParam .isAssistant ()) {
115
+ inputData .append ("Assistant: " ).append (messageParam .asAssistant ().content ());
116
+ } else if (messageParam .isDeveloper ()) {
117
+ inputData .append ("Developer: " ).append (messageParam .asDeveloper ().content ());
118
+ } else if (messageParam .isSystem ()) {
119
+ inputData .append ("System: " ).append (messageParam .asSystem ().content ());
120
+ } else if (messageParam .isTool ()) {
121
+ inputData .append ("Tool: " ).append (messageParam .asTool ().content ());
122
+ }
123
+ }
124
+
125
+ if (inputData .length () > 0 ) {
126
+ llmObsSpan .annotateIO (inputData .toString (), null ); // No output yet, will be set in response
127
+ }
128
+
129
+ java .util .Map <String , Object > metadata = new java .util .HashMap <>();
130
+ metadata .put ("endpoint" , "/chat/completions" );
131
+ metadata .put ("provider" , "openai" );
132
+ metadata .put ("model" , modelName );
133
+
134
+ params .maxTokens ().ifPresent (tokens -> metadata .put ("max_tokens" , tokens ));
135
+ params .temperature ().ifPresent (temp -> metadata .put ("temperature" , temp ));
136
+
137
+ llmObsSpan .setMetadata (metadata );
138
+
139
+ return llmObsSpan ;
140
+ }
141
+
142
+ public void finishLLMObsChatCompletionSpan (
143
+ LLMObsSpan llmObsSpan , ChatCompletion response , Throwable throwable ) {
144
+ try {
145
+ if (throwable != null ) {
146
+ // Set error information
147
+ java .util .Map <String , Object > errorMetadata = new java .util .HashMap <>();
148
+ errorMetadata .put ("error.type" , throwable .getClass ().getSimpleName ());
149
+ errorMetadata .put ("error.message" , throwable .getMessage ());
150
+ llmObsSpan .setMetadata (errorMetadata );
151
+ } else if (response != null ) {
152
+ StringBuilder outputData = new StringBuilder ();
153
+ List <ChatCompletion .Choice > choices = response .choices ();
154
+
155
+ for (int i = 0 ; i < choices .size (); i ++) {
156
+ ChatCompletion .Choice choice = choices .get (i );
157
+ ChatCompletionMessage message = choice .message ();
158
+
159
+ if (i > 0 ) {
160
+ outputData .append (" | " );
161
+ }
162
+
163
+ // Extract content
164
+ Optional <String > content = message .content ();
165
+ content .ifPresent (s -> outputData .append ("Assistant: " ).append (s ));
166
+
167
+ // Extract tool calls if present
168
+ Optional <List <ChatCompletionMessageToolCall >> toolCalls = message .toolCalls ();
169
+ if (toolCalls .isPresent () && !toolCalls .get ().isEmpty ()) {
170
+ content .ifPresent (s -> outputData .append (" | " ));
171
+ outputData .append ("Tool calls: " );
172
+ for (int j = 0 ; j < toolCalls .get ().size (); j ++) {
173
+ ChatCompletionMessageToolCall call = toolCalls .get ().get (j );
174
+ if (j > 0 ) {
175
+ outputData .append (", " );
176
+ }
177
+ outputData
178
+ .append (call .function ().name ())
179
+ .append ("(" )
180
+ .append (call .function ().arguments ())
181
+ .append (")" );
182
+ }
183
+ }
184
+ }
185
+
186
+ if (outputData .length () > 0 ) {
187
+ llmObsSpan .annotateIO (null , outputData .toString ());
188
+ }
189
+ java .util .Map <String , Object > responseMetadata = new java .util .HashMap <>();
190
+ responseMetadata .put ("response.choices_count" , choices .size ());
191
+
192
+ llmObsSpan .setMetadata (responseMetadata );
193
+ }
194
+ } catch (Exception e ) {
195
+ java .util .Map <String , Object > errorMetadata = new java .util .HashMap <>();
196
+ errorMetadata .put ("error.type" , "ResponseProcessingError" );
197
+ errorMetadata .put ("error.message" , "Failed to process response: " + e .getMessage ());
198
+ llmObsSpan .setMetadata (errorMetadata );
199
+ } finally {
200
+ // Always finish the span
201
+ llmObsSpan .finish ();
202
+ }
203
+ }
204
+
91
205
public void finishSpan (AgentScope scope , Object result , Throwable throwable ) {
92
206
93
207
AgentSpan span = scope .span ();
@@ -107,8 +221,6 @@ public void finishSpan(AgentScope scope, Object result, Throwable throwable) {
107
221
108
222
private void extractChatCompletionRequestData (AgentSpan span , ChatCompletionCreateParams params ) {
109
223
110
- // Extract model
111
-
112
224
span .setTag ("openai.model.name" , params .model ().toString ());
113
225
114
226
// Extract messages
@@ -124,11 +236,7 @@ private void extractChatCompletionRequestData(AgentSpan span, ChatCompletionCrea
124
236
125
237
private void extractCompletionRequestData (
126
238
AgentSpan span , CompletionCreateParams completionParams ) {
127
- // Extract model
128
- CompletionCreateParams .Model model = completionParams .model ();
129
- if (model != null ) {
130
- span .setTag ("openai.model.name" , model .toString ());
131
- }
239
+ span .setTag ("openai.model.name" , completionParams .model ().toString ());
132
240
133
241
// Extract prompt
134
242
Optional <CompletionCreateParams .Prompt > prompt = completionParams .prompt ();
@@ -156,18 +264,13 @@ private void extractCompletionRequestData(
156
264
157
265
private void extractEmbeddingRequestData (AgentSpan span , EmbeddingCreateParams embeddingParams ) {
158
266
159
- // Extract model
160
- Object model = embeddingParams .model ();
161
- if (model != null ) {
162
- span .setTag ("openai.model.name" , model .toString ());
163
- }
164
-
267
+ span .setTag ("openai.model.name" , embeddingParams .model ().toString ());
165
268
// Extract input
166
269
EmbeddingCreateParams .Input input = embeddingParams .input ();
167
270
int inputIndex = 0 ;
168
271
List <String > inputStrings = input .asArrayOfStrings ();
169
272
for (String inputItem : inputStrings ) {
170
- span .setTag ("openai.request.input." + inputIndex , input . asString () );
273
+ span .setTag ("openai.request.input." + inputIndex , inputItem );
171
274
inputIndex ++;
172
275
}
173
276
}
@@ -205,25 +308,19 @@ private void extractMessageData(
205
308
206
309
private void extractChatCompletionParameters (AgentSpan span , ChatCompletionCreateParams params ) {
207
310
// Extract max_tokens
208
- Optional <Long > maxTokens = params .maxTokens ();
209
- maxTokens .ifPresent (tokens -> span .setTag ("openai.request.max_tokens" , tokens ));
311
+ params
312
+ .maxCompletionTokens ()
313
+ .ifPresent (tokens -> span .setTag ("openai.request.max_tokens" , tokens ));
210
314
211
315
// Extract temperature
212
- Optional <Double > temperature = params .temperature ();
213
- temperature .ifPresent (temp -> span .setTag ("openai.request.temperature" , temp ));
316
+ params .temperature ().ifPresent (temp -> span .setTag ("openai.request.temperature" , temp ));
214
317
}
215
318
216
319
private void extractCompletionParameters (AgentSpan span , CompletionCreateParams params ) {
217
320
// Extract max_tokens
218
- if (params .maxTokens ().isPresent ()) {
219
- span .setTag ("openai.request.max_tokens" , params .maxTokens ().get ());
220
- }
221
-
321
+ params .maxTokens ().ifPresent (tokens -> span .setTag ("openai.request.max_tokens" , tokens ));
222
322
// Extract temperature
223
- Optional <Double > temperature = params .temperature ();
224
- if (temperature .isPresent ()) {
225
- span .setTag ("openai.request.temperature" , temperature .get ());
226
- }
323
+ params .temperature ().ifPresent (temp -> span .setTag ("openai.request.temperature" , temp ));
227
324
}
228
325
229
326
private void extractResponseData (AgentSpan span , Object result ) {
0 commit comments