Skip to content

Commit df19b40

Browse files
committed
Rename context
1 parent 4b78428 commit df19b40

File tree

4 files changed

+65
-55
lines changed

4 files changed

+65
-55
lines changed

instrumentation/openai/openai-java-1.1/library/src/main/java/io/opentelemetry/instrumentation/openai/v1_1/ChatCompletionEventsHelper.java

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ final class ChatCompletionEventsHelper {
3535
private static final AttributeKey<String> EVENT_NAME = stringKey("event.name");
3636

3737
public static void emitPromptLogEvents(
38-
Context ctx,
38+
Context context,
3939
Logger eventLogger,
4040
ChatCompletionCreateParams request,
4141
boolean captureMessageContent) {
@@ -86,7 +86,7 @@ public static void emitPromptLogEvents(
8686
} else {
8787
continue;
8888
}
89-
newEvent(eventLogger, eventType).setContext(ctx).setBody(Value.of(body)).emit();
89+
newEvent(eventLogger, eventType).setContext(context).setBody(Value.of(body)).emit();
9090
}
9191
}
9292

@@ -162,7 +162,10 @@ private static String joinContentParts(List<ChatCompletionContentPartText> conte
162162
}
163163

164164
public static void emitCompletionLogEvents(
165-
Context ctx, Logger eventLogger, ChatCompletion completion, boolean captureMessageContent) {
165+
Context context,
166+
Logger eventLogger,
167+
ChatCompletion completion,
168+
boolean captureMessageContent) {
166169
for (ChatCompletion.Choice choice : completion.choices()) {
167170
ChatCompletionMessage choiceMsg = choice.message();
168171
Map<String, Value<?>> message = new HashMap<>();
@@ -181,12 +184,16 @@ public static void emitCompletionLogEvents(
181184
.collect(Collectors.toList())));
182185
});
183186
emitCompletionLogEvent(
184-
ctx, eventLogger, choice.index(), choice.finishReason().toString(), Value.of(message));
187+
context,
188+
eventLogger,
189+
choice.index(),
190+
choice.finishReason().toString(),
191+
Value.of(message));
185192
}
186193
}
187194

188195
public static void emitCompletionLogEvent(
189-
Context ctx,
196+
Context context,
190197
Logger eventLogger,
191198
long index,
192199
String finishReason,
@@ -195,7 +202,7 @@ public static void emitCompletionLogEvent(
195202
body.put("finish_reason", Value.of(finishReason));
196203
body.put("index", Value.of(index));
197204
body.put("message", eventMessageObject);
198-
newEvent(eventLogger, "gen_ai.choice").setContext(ctx).setBody(Value.of(body)).emit();
205+
newEvent(eventLogger, "gen_ai.choice").setContext(context).setBody(Value.of(body)).emit();
199206
}
200207

201208
private static LogRecordBuilder newEvent(Logger eventLogger, String name) {

instrumentation/openai/openai-java-1.1/library/src/main/java/io/opentelemetry/instrumentation/openai/v1_1/InstrumentedChatCompletionService.java

Lines changed: 22 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -73,65 +73,66 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl
7373

7474
private ChatCompletion create(
7575
ChatCompletionCreateParams chatCompletionCreateParams, RequestOptions requestOptions) {
76-
Context parentCtx = Context.current();
77-
if (!instrumenter.shouldStart(parentCtx, chatCompletionCreateParams)) {
78-
return createWithLogs(parentCtx, chatCompletionCreateParams, requestOptions);
76+
Context parentContext = Context.current();
77+
if (!instrumenter.shouldStart(parentContext, chatCompletionCreateParams)) {
78+
return createWithLogs(parentContext, chatCompletionCreateParams, requestOptions);
7979
}
8080

81-
Context ctx = instrumenter.start(parentCtx, chatCompletionCreateParams);
81+
Context context = instrumenter.start(parentContext, chatCompletionCreateParams);
8282
ChatCompletion completion;
83-
try (Scope ignored = ctx.makeCurrent()) {
84-
completion = createWithLogs(ctx, chatCompletionCreateParams, requestOptions);
83+
try (Scope ignored = context.makeCurrent()) {
84+
completion = createWithLogs(context, chatCompletionCreateParams, requestOptions);
8585
} catch (Throwable t) {
86-
instrumenter.end(ctx, chatCompletionCreateParams, null, t);
86+
instrumenter.end(context, chatCompletionCreateParams, null, t);
8787
throw t;
8888
}
8989

90-
instrumenter.end(ctx, chatCompletionCreateParams, completion, null);
90+
instrumenter.end(context, chatCompletionCreateParams, completion, null);
9191
return completion;
9292
}
9393

9494
private ChatCompletion createWithLogs(
95-
Context ctx,
95+
Context context,
9696
ChatCompletionCreateParams chatCompletionCreateParams,
9797
RequestOptions requestOptions) {
9898
ChatCompletionEventsHelper.emitPromptLogEvents(
99-
ctx, eventLogger, chatCompletionCreateParams, captureMessageContent);
99+
context, eventLogger, chatCompletionCreateParams, captureMessageContent);
100100
ChatCompletion result = delegate.create(chatCompletionCreateParams, requestOptions);
101101
ChatCompletionEventsHelper.emitCompletionLogEvents(
102-
ctx, eventLogger, result, captureMessageContent);
102+
context, eventLogger, result, captureMessageContent);
103103
return result;
104104
}
105105

106106
private StreamResponse<ChatCompletionChunk> createStreaming(
107107
ChatCompletionCreateParams chatCompletionCreateParams, RequestOptions requestOptions) {
108-
Context parentCtx = Context.current();
109-
if (!instrumenter.shouldStart(parentCtx, chatCompletionCreateParams)) {
110-
return createStreamingWithLogs(parentCtx, chatCompletionCreateParams, requestOptions, false);
108+
Context parentContext = Context.current();
109+
if (!instrumenter.shouldStart(parentContext, chatCompletionCreateParams)) {
110+
return createStreamingWithLogs(
111+
parentContext, chatCompletionCreateParams, requestOptions, false);
111112
}
112113

113-
Context ctx = instrumenter.start(parentCtx, chatCompletionCreateParams);
114-
try (Scope ignored = ctx.makeCurrent()) {
115-
return createStreamingWithLogs(ctx, chatCompletionCreateParams, requestOptions, true);
114+
Context context = instrumenter.start(parentContext, chatCompletionCreateParams);
115+
try (Scope ignored = context.makeCurrent()) {
116+
return createStreamingWithLogs(context, chatCompletionCreateParams, requestOptions, true);
116117
} catch (Throwable t) {
117-
instrumenter.end(ctx, chatCompletionCreateParams, null, t);
118+
instrumenter.end(context, chatCompletionCreateParams, null, t);
118119
throw t;
119120
}
120121
}
121122

122123
private StreamResponse<ChatCompletionChunk> createStreamingWithLogs(
123-
Context ctx,
124+
Context context,
124125
ChatCompletionCreateParams chatCompletionCreateParams,
125126
RequestOptions requestOptions,
126127
boolean newSpan) {
127128
ChatCompletionEventsHelper.emitPromptLogEvents(
128-
ctx, eventLogger, chatCompletionCreateParams, captureMessageContent);
129+
context, eventLogger, chatCompletionCreateParams, captureMessageContent);
129130
StreamResponse<ChatCompletionChunk> result =
130131
delegate.createStreaming(chatCompletionCreateParams, requestOptions);
131132
return new TracingStreamedResponse(
132133
result,
133134
new StreamListener(
134-
ctx,
135+
context,
135136
chatCompletionCreateParams,
136137
instrumenter,
137138
eventLogger,

instrumentation/openai/openai-java-1.1/library/src/main/java/io/opentelemetry/instrumentation/openai/v1_1/InstrumentedChatCompletionServiceAsync.java

Lines changed: 24 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -75,69 +75,71 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl
7575

7676
private CompletableFuture<ChatCompletion> create(
7777
ChatCompletionCreateParams chatCompletionCreateParams, RequestOptions requestOptions) {
78-
Context parentCtx = Context.current();
79-
if (!instrumenter.shouldStart(parentCtx, chatCompletionCreateParams)) {
80-
return createWithLogs(parentCtx, chatCompletionCreateParams, requestOptions);
78+
Context parentContext = Context.current();
79+
if (!instrumenter.shouldStart(parentContext, chatCompletionCreateParams)) {
80+
return createWithLogs(parentContext, chatCompletionCreateParams, requestOptions);
8181
}
8282

83-
Context ctx = instrumenter.start(parentCtx, chatCompletionCreateParams);
83+
Context context = instrumenter.start(parentContext, chatCompletionCreateParams);
8484
CompletableFuture<ChatCompletion> future;
85-
try (Scope ignored = ctx.makeCurrent()) {
86-
future = createWithLogs(ctx, chatCompletionCreateParams, requestOptions);
85+
try (Scope ignored = context.makeCurrent()) {
86+
future = createWithLogs(context, chatCompletionCreateParams, requestOptions);
8787
} catch (Throwable t) {
88-
instrumenter.end(ctx, chatCompletionCreateParams, null, t);
88+
instrumenter.end(context, chatCompletionCreateParams, null, t);
8989
throw t;
9090
}
9191

9292
future =
93-
future.whenComplete((res, t) -> instrumenter.end(ctx, chatCompletionCreateParams, res, t));
94-
return CompletableFutureWrapper.wrap(future, ctx);
93+
future.whenComplete(
94+
(res, t) -> instrumenter.end(context, chatCompletionCreateParams, res, t));
95+
return CompletableFutureWrapper.wrap(future, context);
9596
}
9697

9798
private CompletableFuture<ChatCompletion> createWithLogs(
98-
Context ctx,
99+
Context context,
99100
ChatCompletionCreateParams chatCompletionCreateParams,
100101
RequestOptions requestOptions) {
101102
ChatCompletionEventsHelper.emitPromptLogEvents(
102-
ctx, eventLogger, chatCompletionCreateParams, captureMessageContent);
103+
context, eventLogger, chatCompletionCreateParams, captureMessageContent);
103104
CompletableFuture<ChatCompletion> future =
104105
delegate.create(chatCompletionCreateParams, requestOptions);
105106
future.thenAccept(
106107
r ->
107108
ChatCompletionEventsHelper.emitCompletionLogEvents(
108-
ctx, eventLogger, r, captureMessageContent));
109+
context, eventLogger, r, captureMessageContent));
109110
return future;
110111
}
111112

112113
private AsyncStreamResponse<ChatCompletionChunk> createStreaming(
113114
ChatCompletionCreateParams chatCompletionCreateParams, RequestOptions requestOptions) {
114-
Context parentCtx = Context.current();
115-
if (!instrumenter.shouldStart(parentCtx, chatCompletionCreateParams)) {
116-
return createStreamingWithLogs(parentCtx, chatCompletionCreateParams, requestOptions, false);
115+
Context parentContext = Context.current();
116+
if (!instrumenter.shouldStart(parentContext, chatCompletionCreateParams)) {
117+
return createStreamingWithLogs(
118+
parentContext, chatCompletionCreateParams, requestOptions, false);
117119
}
118120

119-
Context ctx = instrumenter.start(parentCtx, chatCompletionCreateParams);
120-
try (Scope ignored = ctx.makeCurrent()) {
121-
return createStreamingWithLogs(ctx, chatCompletionCreateParams, requestOptions, true);
121+
Context context = instrumenter.start(parentContext, chatCompletionCreateParams);
122+
try (Scope ignored = context.makeCurrent()) {
123+
return createStreamingWithLogs(context, chatCompletionCreateParams, requestOptions, true);
122124
} catch (Throwable t) {
123-
instrumenter.end(ctx, chatCompletionCreateParams, null, t);
125+
instrumenter.end(context, chatCompletionCreateParams, null, t);
124126
throw t;
125127
}
126128
}
127129

128130
private AsyncStreamResponse<ChatCompletionChunk> createStreamingWithLogs(
129-
Context ctx,
131+
Context context,
130132
ChatCompletionCreateParams chatCompletionCreateParams,
131133
RequestOptions requestOptions,
132134
boolean newSpan) {
133135
ChatCompletionEventsHelper.emitPromptLogEvents(
134-
ctx, eventLogger, chatCompletionCreateParams, captureMessageContent);
136+
context, eventLogger, chatCompletionCreateParams, captureMessageContent);
135137
AsyncStreamResponse<ChatCompletionChunk> result =
136138
delegate.createStreaming(chatCompletionCreateParams, requestOptions);
137139
return new TracingAsyncStreamedResponse(
138140
result,
139141
new StreamListener(
140-
ctx,
142+
context,
141143
chatCompletionCreateParams,
142144
instrumenter,
143145
eventLogger,

instrumentation/openai/openai-java-1.1/library/src/main/java/io/opentelemetry/instrumentation/openai/v1_1/StreamListener.java

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020

2121
final class StreamListener {
2222

23-
private final Context parentCtx;
23+
private final Context context;
2424
private final ChatCompletionCreateParams request;
2525
private final List<StreamedMessageBuffer> choiceBuffers;
2626

@@ -35,13 +35,13 @@ final class StreamListener {
3535
@Nullable private String responseId;
3636

3737
StreamListener(
38-
Context parentCtx,
38+
Context context,
3939
ChatCompletionCreateParams request,
4040
Instrumenter<ChatCompletionCreateParams, ChatCompletion> instrumenter,
4141
Logger eventLogger,
4242
boolean captureMessageContent,
4343
boolean newSpan) {
44-
this.parentCtx = parentCtx;
44+
this.context = context;
4545
this.request = request;
4646
this.instrumenter = instrumenter;
4747
this.eventLogger = eventLogger;
@@ -71,7 +71,7 @@ void onChunk(ChatCompletionChunk chunk) {
7171

7272
// message has ended, let's emit
7373
ChatCompletionEventsHelper.emitCompletionLogEvent(
74-
parentCtx, eventLogger, choice.index(), buffer.finishReason, buffer.toEventBody());
74+
context, eventLogger, choice.index(), buffer.finishReason, buffer.toEventBody());
7575
}
7676
}
7777
}
@@ -86,7 +86,7 @@ void endSpan(@Nullable Throwable error) {
8686
if (model == null || responseId == null) {
8787
// Only happens if we got no chunks, so we have no response.
8888
if (newSpan) {
89-
instrumenter.end(parentCtx, request, null, error);
89+
instrumenter.end(context, request, null, error);
9090
}
9191
return;
9292
}
@@ -106,7 +106,7 @@ void endSpan(@Nullable Throwable error) {
106106
}
107107

108108
if (newSpan) {
109-
instrumenter.end(parentCtx, request, result.build(), error);
109+
instrumenter.end(context, request, result.build(), error);
110110
}
111111
}
112112
}

0 commit comments

Comments
 (0)