We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent f274d6c commit 7106cefCopy full SHA for 7106cef
veadk/tracing/telemetry/attributes/extractors/llm_attributes_extractors.py
@@ -103,10 +103,11 @@ def llm_gen_ai_usage_cache_read_input_tokens(
103
def llm_gen_ai_prompt(params: LLMAttributesParams) -> ExtractorResponse:
104
# a part is a message
105
messages: list[dict] = []
106
+ idx = 0
107
108
for content in params.llm_request.contents:
109
if content.parts:
- for idx, part in enumerate(content.parts):
110
+ for part in content.parts:
111
message = {}
112
# text part
113
if part.text:
@@ -141,6 +142,7 @@ def llm_gen_ai_prompt(params: LLMAttributesParams) -> ExtractorResponse:
141
142
143
if message:
144
messages.append(message)
145
+ idx += 1
146
147
return ExtractorResponse(content=messages)
148
0 commit comments