Skip to content

Commit 24c7f82

Browse files
committed
fix bugs
1 parent fc9af7f commit 24c7f82

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

veadk/tracing/telemetry/attributes/extractors/llm_attributes_extractors.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -119,8 +119,8 @@ def llm_gen_ai_prompt(params: LLMAttributesParams) -> ExtractorResponse:
119119
if part.function_response:
120120
message[f"gen_ai.prompt.{idx}.role"] = content.role
121121
message[f"gen_ai.prompt.{idx}.content"] = (
122-
str(content.parts[0].function_response.response)
123-
if content.parts[0].function_response
122+
str(part.function_response.response)
123+
if part.function_response
124124
else "<unknown_function_response>"
125125
)
126126
# function call
@@ -236,7 +236,7 @@ def llm_gen_ai_user_message(params: LLMAttributesParams) -> ExtractorResponse:
236236
if part.function_response:
237237
message_part[f"parts.{idx}.type"] = "function"
238238
message_part[f"parts.{idx}.content"] = str(
239-
content.parts[0].function_response
239+
part.function_response
240240
)
241241

242242
message_parts.append(message_part)

0 commit comments

Comments
 (0)