Skip to content

Commit ec29979

Browse files
committed
Fix format
1 parent 81295b9 commit ec29979

File tree

1 file changed

+15
-13
lines changed
  • dd-java-agent/instrumentation/openai-java/openai-java-1.0/src/main/java/datadog/trace/instrumentation/openai_java

1 file changed

+15
-13
lines changed

dd-java-agent/instrumentation/openai-java/openai-java-1.0/src/main/java/datadog/trace/instrumentation/openai_java/OpenAiDecorator.java

Lines changed: 15 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,8 @@ public void decorateWithClientOptions(AgentSpan span, ClientOptions clientOption
193193
// clientOptions.queryParams().values("api-version")
194194
}
195195

196-
public void decorateChatCompletion(AgentSpan span, ChatCompletionCreateParams params, boolean stream) {
196+
public void decorateChatCompletion(
197+
AgentSpan span, ChatCompletionCreateParams params, boolean stream) {
197198
span.setResourceName(CHAT_COMPLETIONS_CREATE);
198199
span.setTag("openai.request.endpoint", "v1/chat/completions");
199200
span.setTag("openai.request.method", "POST");
@@ -213,11 +214,14 @@ public void decorateChatCompletion(AgentSpan span, ChatCompletionCreateParams pa
213214
if (stream) {
214215
metadata.put("stream", true);
215216
}
216-
params.streamOptions().ifPresent(v -> {
217-
if (v.includeUsage().orElse(false)) {
218-
metadata.put("stream_options", Collections.singletonMap("include_usage", true));
219-
}
220-
});
217+
params
218+
.streamOptions()
219+
.ifPresent(
220+
v -> {
221+
if (v.includeUsage().orElse(false)) {
222+
metadata.put("stream_options", Collections.singletonMap("include_usage", true));
223+
}
224+
});
221225
span.setTag("_ml_obs_tag.metadata", metadata);
222226
}
223227

@@ -253,9 +257,7 @@ public void decorateWithChatCompletion(AgentSpan span, ChatCompletion completion
253257
// gpt-3.5-turbo-instruct:20230824-v2c
254258

255259
List<LLMObs.LLMMessage> output =
256-
completion.choices().stream()
257-
.map(OpenAiDecorator::llmMessage)
258-
.collect(Collectors.toList());
260+
completion.choices().stream().map(OpenAiDecorator::llmMessage).collect(Collectors.toList());
259261
span.setTag("_ml_obs_tag.output", output);
260262

261263
completion.usage().ifPresent(usage -> OpenAiDecorator.annotateWithCompletionUsage(span, usage));
@@ -295,7 +297,7 @@ public void decorateWithChatCompletionChunks(AgentSpan span, List<ChatCompletion
295297
final int choiceNum = firstChunk.choices().size();
296298
// collect roles by choices by the first chunk
297299
String[] roles = new String[choiceNum];
298-
for (int i=0; i < choiceNum; i++) {
300+
for (int i = 0; i < choiceNum; i++) {
299301
ChatCompletionChunk.Choice choice = firstChunk.choices().get(i);
300302
Optional<String> role = choice.delta().role().flatMap(r -> r._value().asString());
301303
if (role.isPresent()) {
@@ -304,13 +306,13 @@ public void decorateWithChatCompletionChunks(AgentSpan span, List<ChatCompletion
304306
}
305307
// collect content by choices for all chunks
306308
StringBuilder[] contents = new StringBuilder[choiceNum];
307-
for (int i=0; i < choiceNum; i++) {
309+
for (int i = 0; i < choiceNum; i++) {
308310
contents[i] = new StringBuilder(128);
309311
}
310312
for (ChatCompletionChunk chunk : chunks) {
311313
// choices can be empty for the last chunk
312314
List<ChatCompletionChunk.Choice> choices = chunk.choices();
313-
for (int i=0; i < choiceNum && i < choices.size(); i++) {
315+
for (int i = 0; i < choiceNum && i < choices.size(); i++) {
314316
ChatCompletionChunk.Choice choice = choices.get(i);
315317
ChatCompletionChunk.Choice.Delta delta = choice.delta();
316318
delta.content().ifPresent(contents[i]::append);
@@ -319,7 +321,7 @@ public void decorateWithChatCompletionChunks(AgentSpan span, List<ChatCompletion
319321
}
320322
// build LLMMessages
321323
List<LLMObs.LLMMessage> llmMessages = new ArrayList<>(choiceNum);
322-
for (int i=0; i < choiceNum; i++) {
324+
for (int i = 0; i < choiceNum; i++) {
323325
llmMessages.add(LLMObs.LLMMessage.from(roles[i], contents[i].toString()));
324326
}
325327
span.setTag("_ml_obs_tag.output", llmMessages);

0 commit comments

Comments
 (0)