Skip to content

Commit 3c30699

Browse files
committed
fix function response
1 parent e4b20b7 commit 3c30699

File tree

1 file changed

+5
-4
lines changed

1 file changed

+5
-4
lines changed

veadk/tracing/telemetry/attributes/extractors/llm_attributes_extractors.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import json
22

3-
43
from veadk.tracing.telemetry.attributes.extractors.types import (
54
ExtractorResponse,
65
LLMAttributesParams,
@@ -105,7 +104,7 @@ def llm_gen_ai_usage_cache_read_input_tokens(
105104

106105

107106
def llm_gen_ai_prompt(params: LLMAttributesParams) -> ExtractorResponse:
108-
# a content is a message
107+
# a part is a message
109108
messages: list[dict] = []
110109

111110
for content in params.llm_request.contents:
@@ -119,8 +118,10 @@ def llm_gen_ai_prompt(params: LLMAttributesParams) -> ExtractorResponse:
119118
# function response
120119
if part.function_response:
121120
message[f"gen_ai.prompt.{idx}.role"] = content.role
122-
message[f"gen_ai.prompt.{idx}.content"] = str(
123-
content.parts[0].function_response
121+
message[f"gen_ai.prompt.{idx}.content"] = (
122+
str(content.parts[0].function_response.response)
123+
if content.parts[0].function_response
124+
else "<unknown_function_response>"
124125
)
125126
# function call
126127
if part.function_call:

0 commit comments

Comments
 (0)