Skip to content

Commit 260cf24

Browse files
committed
add input and output value in llm span
1 parent cf26b14 commit 260cf24

File tree

1 file changed

+14
-0
lines changed

1 file changed

+14
-0
lines changed

veadk/tracing/telemetry/attributes/extractors/llm_attributes_extractors.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -359,6 +359,18 @@ def llm_gen_ai_choice(params: LLMAttributesParams) -> ExtractorResponse:
359359
return ExtractorResponse(type="event", content=message)
360360

361361

362+
def llm_input_value(params: LLMAttributesParams) -> ExtractorResponse:
363+
return ExtractorResponse(
364+
content=str(params.llm_request.model_dump(exclude_none=True))
365+
)
366+
367+
368+
def llm_output_value(params: LLMAttributesParams) -> ExtractorResponse:
369+
return ExtractorResponse(
370+
content=str(params.llm_response.model_dump(exclude_none=True))
371+
)
372+
373+
362374
LLM_ATTRIBUTES = {
363375
# ===== request attributes =====
364376
"gen_ai.request.model": llm_gen_ai_request_model,
@@ -383,6 +395,8 @@ def llm_gen_ai_choice(params: LLMAttributesParams) -> ExtractorResponse:
383395
# attributes
384396
"gen_ai.prompt": llm_gen_ai_prompt,
385397
"gen_ai.completion": llm_gen_ai_completion,
398+
"input.value": llm_input_value, # TLS required
399+
"output.value": llm_output_value, # TLS required
386400
# ===== usage =====
387401
"gen_ai.usage.input_tokens": llm_gen_ai_usage_input_tokens,
388402
"gen_ai.usage.output_tokens": llm_gen_ai_usage_output_tokens,

0 commit comments

Comments
 (0)