1010import com .openai .models .embeddings .CreateEmbeddingResponse ;
1111import com .openai .models .embeddings .Embedding ;
1212import com .openai .models .embeddings .EmbeddingCreateParams ;
13+ import datadog .trace .api .Config ;
14+ import datadog .trace .api .llmobs .LLMObs ;
15+ import datadog .trace .api .llmobs .LLMObsSpan ;
1316import datadog .trace .bootstrap .instrumentation .api .AgentScope ;
1417import datadog .trace .bootstrap .instrumentation .api .AgentSpan ;
1518import datadog .trace .bootstrap .instrumentation .api .InternalSpanTypes ;
2023import java .util .Optional ;
2124
2225public class OpenAIClientDecorator extends ClientDecorator {
26+ private static final String mlApp = Config .get ().getLlmObsMlApp ();
27+ private static final String mlProvider = "openai" ;
2328 private static final String COMPONENT_NAME = "openai" ;
2429 private static final UTF8BytesString OPENAI_REQUEST = UTF8BytesString .create ("openai.request" );
2530
@@ -42,14 +47,14 @@ protected CharSequence spanType() {
4247
4348 @ Override
4449 protected String service () {
45- return null ; // Use default service name
50+ return null ;
4651 }
4752
4853 public AgentScope startChatCompletionSpan (ChatCompletionCreateParams params ) {
4954 AgentSpan span = startSpan (OPENAI_REQUEST );
5055 span .setTag ("openai.request.endpoint" , "/chat/completions" );
5156 span .setResourceName ("chat.completions.create" );
52- span .setTag ("openai.provider" , "openai" );
57+ span .setTag ("openai.request. provider" , "openai" );
5358 extractChatCompletionRequestData (span , params );
5459 afterStart (span );
5560 return activateSpan (span );
@@ -59,7 +64,7 @@ public AgentScope startLLMChatCompletionSpan(ChatCompletionCreateParams params)
5964 AgentSpan span = startSpan (OPENAI_REQUEST );
6065 span .setTag ("openai.request.endpoint" , "/chat/completions" );
6166 span .setResourceName ("chat.completions.create" );
62- span .setTag ("openai.provider" , "openai" );
67+ span .setTag ("openai.request. provider" , "openai" );
6368 extractChatCompletionRequestData (span , params );
6469 afterStart (span );
6570 return activateSpan (span );
@@ -88,6 +93,115 @@ public AgentScope startEmbeddingSpan(EmbeddingCreateParams params) {
8893 return activateSpan (span );
8994 }
9095
96+ public LLMObsSpan startLLMObsChatCompletionSpan (ChatCompletionCreateParams params ) {
97+ String modelName = params .model ().toString ();
98+
99+ LLMObsSpan llmObsSpan =
100+ LLMObs .startLLMSpan ("chat_completion" , modelName , mlProvider , mlApp , null );
101+
102+ // Extract and set input data from chat completion params
103+ // May need to be reformatted
104+ StringBuilder inputData = new StringBuilder ();
105+ List <ChatCompletionMessageParam > messages = params .messages ();
106+ for (int i = 0 ; i < messages .size (); i ++) {
107+ ChatCompletionMessageParam messageParam = messages .get (i );
108+ if (i > 0 ) {
109+ inputData .append (" | " );
110+ }
111+
112+ if (messageParam .isUser ()) {
113+ inputData .append ("User: " ).append (messageParam .asUser ().content ());
114+ } else if (messageParam .isAssistant ()) {
115+ inputData .append ("Assistant: " ).append (messageParam .asAssistant ().content ());
116+ } else if (messageParam .isDeveloper ()) {
117+ inputData .append ("Developer: " ).append (messageParam .asDeveloper ().content ());
118+ } else if (messageParam .isSystem ()) {
119+ inputData .append ("System: " ).append (messageParam .asSystem ().content ());
120+ } else if (messageParam .isTool ()) {
121+ inputData .append ("Tool: " ).append (messageParam .asTool ().content ());
122+ }
123+ }
124+
125+ if (inputData .length () > 0 ) {
126+ llmObsSpan .annotateIO (inputData .toString (), null ); // No output yet, will be set in response
127+ }
128+
129+ java .util .Map <String , Object > metadata = new java .util .HashMap <>();
130+ metadata .put ("endpoint" , "/chat/completions" );
131+ metadata .put ("provider" , "openai" );
132+ metadata .put ("model" , modelName );
133+
134+ params .maxTokens ().ifPresent (tokens -> metadata .put ("max_tokens" , tokens ));
135+ params .temperature ().ifPresent (temp -> metadata .put ("temperature" , temp ));
136+
137+ llmObsSpan .setMetadata (metadata );
138+
139+ return llmObsSpan ;
140+ }
141+
142+ public void finishLLMObsChatCompletionSpan (
143+ LLMObsSpan llmObsSpan , ChatCompletion response , Throwable throwable ) {
144+ try {
145+ if (throwable != null ) {
146+ // Set error information
147+ java .util .Map <String , Object > errorMetadata = new java .util .HashMap <>();
148+ errorMetadata .put ("error.type" , throwable .getClass ().getSimpleName ());
149+ errorMetadata .put ("error.message" , throwable .getMessage ());
150+ llmObsSpan .setMetadata (errorMetadata );
151+ } else if (response != null ) {
152+ StringBuilder outputData = new StringBuilder ();
153+ List <ChatCompletion .Choice > choices = response .choices ();
154+
155+ for (int i = 0 ; i < choices .size (); i ++) {
156+ ChatCompletion .Choice choice = choices .get (i );
157+ ChatCompletionMessage message = choice .message ();
158+
159+ if (i > 0 ) {
160+ outputData .append (" | " );
161+ }
162+
163+ // Extract content
164+ Optional <String > content = message .content ();
165+ content .ifPresent (s -> outputData .append ("Assistant: " ).append (s ));
166+
167+ // Extract tool calls if present
168+ Optional <List <ChatCompletionMessageToolCall >> toolCalls = message .toolCalls ();
169+ if (toolCalls .isPresent () && !toolCalls .get ().isEmpty ()) {
170+ content .ifPresent (s -> outputData .append (" | " ));
171+ outputData .append ("Tool calls: " );
172+ for (int j = 0 ; j < toolCalls .get ().size (); j ++) {
173+ ChatCompletionMessageToolCall call = toolCalls .get ().get (j );
174+ if (j > 0 ) {
175+ outputData .append (", " );
176+ }
177+ outputData
178+ .append (call .function ().name ())
179+ .append ("(" )
180+ .append (call .function ().arguments ())
181+ .append (")" );
182+ }
183+ }
184+ }
185+
186+ if (outputData .length () > 0 ) {
187+ llmObsSpan .annotateIO (null , outputData .toString ());
188+ }
189+ java .util .Map <String , Object > responseMetadata = new java .util .HashMap <>();
190+ responseMetadata .put ("response.choices_count" , choices .size ());
191+
192+ llmObsSpan .setMetadata (responseMetadata );
193+ }
194+ } catch (Exception e ) {
195+ java .util .Map <String , Object > errorMetadata = new java .util .HashMap <>();
196+ errorMetadata .put ("error.type" , "ResponseProcessingError" );
197+ errorMetadata .put ("error.message" , "Failed to process response: " + e .getMessage ());
198+ llmObsSpan .setMetadata (errorMetadata );
199+ } finally {
200+ // Always finish the span
201+ llmObsSpan .finish ();
202+ }
203+ }
204+
91205 public void finishSpan (AgentScope scope , Object result , Throwable throwable ) {
92206
93207 AgentSpan span = scope .span ();
@@ -107,8 +221,6 @@ public void finishSpan(AgentScope scope, Object result, Throwable throwable) {
107221
108222 private void extractChatCompletionRequestData (AgentSpan span , ChatCompletionCreateParams params ) {
109223
110- // Extract model
111-
112224 span .setTag ("openai.model.name" , params .model ().toString ());
113225
114226 // Extract messages
@@ -124,11 +236,7 @@ private void extractChatCompletionRequestData(AgentSpan span, ChatCompletionCrea
124236
125237 private void extractCompletionRequestData (
126238 AgentSpan span , CompletionCreateParams completionParams ) {
127- // Extract model
128- CompletionCreateParams .Model model = completionParams .model ();
129- if (model != null ) {
130- span .setTag ("openai.model.name" , model .toString ());
131- }
239+ span .setTag ("openai.model.name" , completionParams .model ().toString ());
132240
133241 // Extract prompt
134242 Optional <CompletionCreateParams .Prompt > prompt = completionParams .prompt ();
@@ -156,18 +264,13 @@ private void extractCompletionRequestData(
156264
157265 private void extractEmbeddingRequestData (AgentSpan span , EmbeddingCreateParams embeddingParams ) {
158266
159- // Extract model
160- Object model = embeddingParams .model ();
161- if (model != null ) {
162- span .setTag ("openai.model.name" , model .toString ());
163- }
164-
267+ span .setTag ("openai.model.name" , embeddingParams .model ().toString ());
165268 // Extract input
166269 EmbeddingCreateParams .Input input = embeddingParams .input ();
167270 int inputIndex = 0 ;
168271 List <String > inputStrings = input .asArrayOfStrings ();
169272 for (String inputItem : inputStrings ) {
170- span .setTag ("openai.request.input." + inputIndex , input . asString () );
273+ span .setTag ("openai.request.input." + inputIndex , inputItem );
171274 inputIndex ++;
172275 }
173276 }
@@ -205,25 +308,19 @@ private void extractMessageData(
205308
206309 private void extractChatCompletionParameters (AgentSpan span , ChatCompletionCreateParams params ) {
207310 // Extract max_tokens
208- Optional <Long > maxTokens = params .maxTokens ();
209- maxTokens .ifPresent (tokens -> span .setTag ("openai.request.max_tokens" , tokens ));
311+ params
312+ .maxCompletionTokens ()
313+ .ifPresent (tokens -> span .setTag ("openai.request.max_tokens" , tokens ));
210314
211315 // Extract temperature
212- Optional <Double > temperature = params .temperature ();
213- temperature .ifPresent (temp -> span .setTag ("openai.request.temperature" , temp ));
316+ params .temperature ().ifPresent (temp -> span .setTag ("openai.request.temperature" , temp ));
214317 }
215318
216319 private void extractCompletionParameters (AgentSpan span , CompletionCreateParams params ) {
217320 // Extract max_tokens
218- if (params .maxTokens ().isPresent ()) {
219- span .setTag ("openai.request.max_tokens" , params .maxTokens ().get ());
220- }
221-
321+ params .maxTokens ().ifPresent (tokens -> span .setTag ("openai.request.max_tokens" , tokens ));
222322 // Extract temperature
223- Optional <Double > temperature = params .temperature ();
224- if (temperature .isPresent ()) {
225- span .setTag ("openai.request.temperature" , temperature .get ());
226- }
323+ params .temperature ().ifPresent (temp -> span .setTag ("openai.request.temperature" , temp ));
227324 }
228325
229326 private void extractResponseData (AgentSpan span , Object result ) {
0 commit comments