55import com .openai .core .http .Headers ;
66import com .openai .core .http .HttpResponse ;
77import com .openai .helpers .ChatCompletionAccumulator ;
8+ import com .openai .models .Reasoning ;
89import com .openai .models .ResponsesModel ;
910import com .openai .models .chat .completions .ChatCompletion ;
1011import com .openai .models .chat .completions .ChatCompletionChunk ;
2425import com .openai .models .responses .ResponseOutputItem ;
2526import com .openai .models .responses .ResponseOutputMessage ;
2627import com .openai .models .responses .ResponseOutputText ;
28+ import com .openai .models .responses .ResponseReasoningItem ;
2729import com .openai .models .responses .ResponseStreamEvent ;
30+ import datadog .json .JsonWriter ;
2831import datadog .trace .api .DDSpanId ;
2932import datadog .trace .api .llmobs .LLMObs ;
3033import datadog .trace .api .llmobs .LLMObsContext ;
@@ -366,21 +369,15 @@ public void withResponseCreateParams(AgentSpan span, ResponseCreateParams params
366369 String modelName = extractResponseModel (params ._model ());
367370 span .setTag (REQUEST_MODEL , modelName );
368371
369- // Set model_name and model_provider as fallback (will be overridden by withResponse if called)
370- // span.setTag("_ml_obs_tag.model_name", modelName);
371- // span.setTag("_ml_obs_tag.model_provider", "openai");
372-
373372 List <LLMObs .LLMMessage > inputMessages = new ArrayList <>();
374373
375- // Add instructions as system message first (if present)
376374 params
377375 .instructions ()
378376 .ifPresent (
379377 instructions -> {
380378 inputMessages .add (LLMObs .LLMMessage .from ("system" , instructions ));
381379 });
382380
383- // Add user input message
384381 Optional <String > textOpt = params ._input ().asString ();
385382 if (textOpt .isPresent ()) {
386383 inputMessages .add (LLMObs .LLMMessage .from ("user" , textOpt .get ()));
@@ -389,6 +386,43 @@ public void withResponseCreateParams(AgentSpan span, ResponseCreateParams params
389386 if (!inputMessages .isEmpty ()) {
390387 span .setTag ("_ml_obs_tag.input" , inputMessages );
391388 }
389+
390+ extractReasoningFromParams (params )
391+ .ifPresent (reasoningMap -> span .setTag ("_ml_obs_request.reasoning" , reasoningMap ));
392+ }
393+
394+ private Optional <Map <String , String >> extractReasoningFromParams (ResponseCreateParams params ) {
395+ com .openai .core .JsonField <Reasoning > reasoningField = params ._reasoning ();
396+ if (reasoningField .isMissing ()) {
397+ return Optional .empty ();
398+ }
399+
400+ Map <String , String > reasoningMap = new HashMap <>();
401+
402+ Optional <Reasoning > knownReasoning = reasoningField .asKnown ();
403+ if (knownReasoning .isPresent ()) {
404+ Reasoning reasoning = knownReasoning .get ();
405+ reasoning .effort ().ifPresent (effort -> reasoningMap .put ("effort" , effort .asString ()));
406+ reasoning .summary ().ifPresent (summary -> reasoningMap .put ("summary" , summary .asString ()));
407+ } else {
408+ Optional <Map <String , com .openai .core .JsonValue >> rawObject = reasoningField .asObject ();
409+ if (rawObject .isPresent ()) {
410+ Map <String , com .openai .core .JsonValue > obj = rawObject .get ();
411+ com .openai .core .JsonValue effortVal = obj .get ("effort" );
412+ if (effortVal != null ) {
413+ effortVal .asString ().ifPresent (v -> reasoningMap .put ("effort" , String .valueOf (v )));
414+ }
415+ com .openai .core .JsonValue summaryVal = obj .get ("summary" );
416+ if (summaryVal == null ) {
417+ summaryVal = obj .get ("generate_summary" );
418+ }
419+ if (summaryVal != null ) {
420+ summaryVal .asString ().ifPresent (v -> reasoningMap .put ("summary" , String .valueOf (v )));
421+ }
422+ }
423+ }
424+
425+ return reasoningMap .isEmpty () ? Optional .empty () : Optional .of (reasoningMap );
392426 }
393427
394428 public void withResponse (AgentSpan span , Response response ) {
@@ -423,11 +457,15 @@ private void withResponse(AgentSpan span, Response response, boolean stream) {
423457
424458 Map <String , Object > metadata = new HashMap <>();
425459
460+ Object reasoningTag = span .getTag ("_ml_obs_request.reasoning" );
461+ if (reasoningTag != null ) {
462+ metadata .put ("reasoning" , reasoningTag );
463+ }
464+
426465 response .maxOutputTokens ().ifPresent (v -> metadata .put ("max_output_tokens" , v ));
427466 response .temperature ().ifPresent (v -> metadata .put ("temperature" , v ));
428467 response .topP ().ifPresent (v -> metadata .put ("top_p" , v ));
429468
430- // Extract tool_choice as string
431469 Response .ToolChoice toolChoice = response .toolChoice ();
432470 if (toolChoice .isOptions ()) {
433471 metadata .put ("tool_choice" , toolChoice .asOptions ()._value ().asString ().orElse (null ));
@@ -437,14 +475,12 @@ private void withResponse(AgentSpan span, Response response, boolean stream) {
437475 metadata .put ("tool_choice" , "function" );
438476 }
439477
440- // Extract truncation as string
441478 response
442479 .truncation ()
443480 .ifPresent (
444481 (Response .Truncation t ) ->
445482 metadata .put ("truncation" , t ._value ().asString ().orElse (null )));
446483
447- // Extract text format
448484 response
449485 .text ()
450486 .ifPresent (
@@ -491,24 +527,35 @@ private void withResponse(AgentSpan span, Response response, boolean stream) {
491527
492528 private List <LLMObs .LLMMessage > extractResponseOutputMessages (List <ResponseOutputItem > output ) {
493529 List <LLMObs .LLMMessage > messages = new ArrayList <>();
494- List <LLMObs .ToolCall > toolCalls = new ArrayList <>();
495- String textContent = null ;
496530
497531 for (ResponseOutputItem item : output ) {
498532 if (item .isFunctionCall ()) {
499533 ResponseFunctionToolCall functionCall = item .asFunctionCall ();
500534 LLMObs .ToolCall toolCall = ToolCallExtractor .getToolCall (functionCall );
501535 if (toolCall != null ) {
502- toolCalls .add (toolCall );
536+ List <LLMObs .ToolCall > toolCalls = Collections .singletonList (toolCall );
537+ messages .add (LLMObs .LLMMessage .from ("assistant" , null , toolCalls ));
503538 }
504539 } else if (item .isMessage ()) {
505540 ResponseOutputMessage message = item .asMessage ();
506- textContent = extractMessageContent (message );
541+ String textContent = extractMessageContent (message );
542+ Optional <String > roleOpt = message ._role ().asString ();
543+ String role = roleOpt .orElse ("assistant" );
544+ messages .add (LLMObs .LLMMessage .from (role , textContent ));
545+ } else if (item .isReasoning ()) {
546+ ResponseReasoningItem reasoning = item .asReasoning ();
547+ try (JsonWriter writer = new JsonWriter ()) {
548+ writer .beginObject ();
549+ if (!reasoning .summary ().isEmpty ()) {
550+ writer .name ("summary" ).value (reasoning .summary ().get (0 ).text ());
551+ }
552+ reasoning .encryptedContent ().ifPresent (v -> writer .name ("encrypted_content" ).value (v ));
553+ writer .name ("id" ).value (reasoning .id ());
554+ writer .endObject ();
555+ messages .add (LLMObs .LLMMessage .from ("reasoning" , writer .toString ()));
556+ }
507557 }
508558 }
509-
510- messages .add (LLMObs .LLMMessage .from ("assistant" , textContent , toolCalls ));
511-
512559 return messages ;
513560 }
514561
0 commit comments