Skip to content

Commit e72a320

Browse files
committed
Update documentation, rename system to provider
1 parent 4f8e860 commit e72a320

File tree

4 files changed

+28
-32
lines changed

4 files changed

+28
-32
lines changed

util/opentelemetry-util-genai/README.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,14 +13,14 @@ Set the environment variable `OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT
1313
This package provides these span attributes.
1414
-> gen_ai.provider.name: Str(openai)
1515
-> gen_ai.operation.name: Str(chat)
16-
-> gen_ai.system: Str(openai) # deprecated
1716
-> gen_ai.request.model: Str(gpt-3.5-turbo)
1817
-> gen_ai.response.finish_reasons: Slice(["stop"])
1918
-> gen_ai.response.model: Str(gpt-3.5-turbo-0125)
2019
-> gen_ai.response.id: Str(chatcmpl-Bz8yrvPnydD9pObv625n2CGBPHS13)
2120
-> gen_ai.usage.input_tokens: Int(24)
2221
-> gen_ai.usage.output_tokens: Int(7)
23-
-> gen_ai.input.messages: Str("[{\"role\": \"user\", \"content\": \"hello world\"}]")
22+
-> gen_ai.input.messages: Str('[{"role": "Human", "parts": [{"content": "hello world", "type": "text"}]}]')
23+
-> gen_ai.output.messages: Str('[{"role": "AI", "parts": [{"content": "hello back", "type": "text"}], "finish_reason": "stop"}]')
2424

2525

2626
Installation

util/opentelemetry-util-genai/src/opentelemetry/util/genai/generators.py

Lines changed: 23 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -72,11 +72,11 @@ def _get_genai_attributes(
7272
request_model: Optional[str],
7373
response_model: Optional[str],
7474
operation_name: Optional[str],
75-
system: Optional[str],
75+
provider: Optional[str],
7676
) -> Dict[str, AttributeValue]:
7777
attributes: Dict[str, AttributeValue] = {}
78-
if system:
79-
attributes[GenAI.GEN_AI_PROVIDER_NAME] = system
78+
if provider:
79+
attributes[GenAI.GEN_AI_PROVIDER_NAME] = provider
8080
if operation_name:
8181
attributes[GenAI.GEN_AI_OPERATION_NAME] = operation_name
8282
if request_model:
@@ -90,16 +90,16 @@ def _get_genai_attributes(
9090
def _set_initial_span_attributes(
9191
span: Span,
9292
request_model: Optional[str],
93-
system: Optional[str],
93+
provider: Optional[str],
9494
) -> None:
9595
span.set_attribute(
9696
GenAI.GEN_AI_OPERATION_NAME, GenAI.GenAiOperationNameValues.CHAT.value
9797
)
9898
if request_model:
9999
span.set_attribute(GenAI.GEN_AI_REQUEST_MODEL, request_model)
100-
if system is not None:
101-
# TODO: clean system name to match GenAiProviderNameValues?
102-
span.set_attribute(GenAI.GEN_AI_PROVIDER_NAME, system)
100+
if provider is not None:
101+
# TODO: clean provider name to match GenAiProviderNameValues?
102+
span.set_attribute(GenAI.GEN_AI_PROVIDER_NAME, provider)
103103

104104

105105
def _set_response_and_usage_attributes(
@@ -126,31 +126,24 @@ def _collect_finish_reasons(generations: List[OutputMessage]) -> List[str]:
126126
return finish_reasons
127127

128128

129-
def _maybe_set_span_input_messages(
130-
span: Span, messages: List[InputMessage]
129+
def _maybe_set_span_messages(
130+
span: Span,
131+
input_messages: List[InputMessage],
132+
output_messages: List[OutputMessage],
131133
) -> None:
132134
if not is_experimental_mode() or get_content_capturing_mode() not in (
133135
ContentCapturingMode.SPAN_ONLY,
134136
ContentCapturingMode.SPAN_AND_EVENT,
135137
):
136138
return
137139
message_parts: List[Dict[str, Any]] = [
138-
asdict(message) for message in messages
140+
asdict(message) for message in input_messages
139141
]
140142
if message_parts:
141143
span.set_attribute("gen_ai.input.messages", json.dumps(message_parts))
142144

143-
144-
def _maybe_set_span_output_messages(
145-
span: Span, generations: List[OutputMessage]
146-
) -> None:
147-
if not is_experimental_mode() or get_content_capturing_mode() not in (
148-
ContentCapturingMode.SPAN_ONLY,
149-
ContentCapturingMode.SPAN_AND_EVENT,
150-
):
151-
return
152145
generation_parts: List[Dict[str, Any]] = [
153-
asdict(generation) for generation in generations
146+
asdict(generation) for generation in output_messages
154147
]
155148
if generation_parts:
156149
span.set_attribute(
@@ -184,7 +177,7 @@ def __init__(
184177
):
185178
self._tracer: Tracer = tracer or trace.get_tracer(__name__)
186179

187-
# Map from run_id -> _SpanState, to keep track of spans and parent/child relationships
180+
# TODO: Map from run_id -> _SpanState, to keep track of spans and parent/child relationships
188181
self.spans: Dict[UUID, _SpanState] = {}
189182

190183
def _start_span(
@@ -200,6 +193,7 @@ def _start_span(
200193
else:
201194
# top-level or missing parent
202195
span = self._tracer.start_span(name=name, kind=kind)
196+
set_span_in_context(span)
203197

204198
return span
205199

@@ -210,6 +204,7 @@ def _end_span(self, run_id: UUID):
210204
if child_state:
211205
child_state.span.end()
212206
state.span.end()
207+
del self.spans[run_id]
213208

214209
def start(self, invocation: LLMInvocation):
215210
if (
@@ -245,12 +240,12 @@ def _apply_common_span_attributes(
245240
) -> Tuple[Dict[str, AttributeValue]]:
246241
"""Apply attributes shared by finish() and error() and compute metrics.
247242
248-
Returns (genai_attributes).
243+
Returns (genai_attributes) for use with metrics.
249244
"""
250245
request_model = invocation.attributes.get("request_model")
251-
system = invocation.attributes.get("system")
246+
provider = invocation.attributes.get("provider")
252247

253-
_set_initial_span_attributes(span, request_model, system)
248+
_set_initial_span_attributes(span, request_model, provider)
254249

255250
finish_reasons = _collect_finish_reasons(invocation.chat_generations)
256251
if finish_reasons:
@@ -273,7 +268,7 @@ def _apply_common_span_attributes(
273268
request_model,
274269
response_model,
275270
GenAI.GenAiOperationNameValues.CHAT.value,
276-
system,
271+
provider,
277272
)
278273
return (genai_attributes,)
279274

@@ -286,8 +281,9 @@ def finish(self, invocation: LLMInvocation):
286281
_ = self._apply_common_span_attributes(
287282
span, invocation
288283
) # return value to be used with metrics
289-
_maybe_set_span_input_messages(span, invocation.messages)
290-
_maybe_set_span_output_messages(span, invocation.chat_generations)
284+
_maybe_set_span_messages(
285+
span, invocation.messages, invocation.chat_generations
286+
)
291287
self._finalize_invocation(invocation)
292288

293289
def error(self, error: Error, invocation: LLMInvocation):

util/opentelemetry-util-genai/src/opentelemetry/util/genai/handler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
2828
Usage:
2929
handler = get_telemetry_handler()
30-
handler.start_llm(prompts, run_id, system="provider-name", **attrs)
30+
handler.start_llm(prompts, run_id, **attrs)
3131
handler.stop_llm(run_id, chat_generations, **attrs)
3232
handler.fail_llm(run_id, error, **attrs)
3333
"""

util/opentelemetry-util-genai/tests/test_utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,7 @@ def test_llm_start_and_stop_creates_span(self): # pylint: disable=no-self-use
136136
prompts=[message],
137137
run_id=run_id,
138138
custom_attr="value",
139-
system="test-system",
139+
provider="test-provider",
140140
)
141141
invocation = self.telemetry_handler.stop_llm(
142142
run_id, chat_generations=[chat_generation], extra="info"
@@ -153,7 +153,7 @@ def test_llm_start_and_stop_creates_span(self): # pylint: disable=no-self-use
153153
assert span.attributes is not None
154154
span_attrs = span.attributes
155155
assert span_attrs.get("gen_ai.operation.name") == "chat"
156-
assert span_attrs.get("gen_ai.provider.name") == "test-system"
156+
assert span_attrs.get("gen_ai.provider.name") == "test-provider"
157157
assert span.start_time is not None
158158
assert span.end_time is not None
159159
assert span.end_time > span.start_time

0 commit comments

Comments
 (0)