Skip to content

Commit c6b10db

Browse files
committed
handle attribute setting
1 parent 0415a5e commit c6b10db

File tree

3 files changed

+30
-57
lines changed

3 files changed

+30
-57
lines changed

src/langtrace_python_sdk/instrumentation/mistral/instrumentation.py

Lines changed: 22 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,7 @@
2020

2121
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
2222
from opentelemetry.trace import get_tracer
23-
from wrapt import wrap_function_wrapper
24-
25-
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
23+
from wrapt import wrap_function_wrapper as _W
2624

2725
from langtrace_python_sdk.instrumentation.mistral.patch import (
2826
chat_complete,
@@ -34,41 +32,41 @@
3432
class MistralInstrumentation(BaseInstrumentor):
3533

3634
def instrumentation_dependencies(self) -> Collection[str]:
37-
return ["mistralai >= 1.0.1", "trace-attributes >= 4.0.5"]
35+
return ["mistralai >= 1.0.1"]
3836

3937
def _instrument(self, **kwargs):
4038
tracer_provider = kwargs.get("tracer_provider")
4139
tracer = get_tracer(__name__, "", tracer_provider)
4240
version = importlib.metadata.version("mistralai")
4341

44-
wrap_function_wrapper(
45-
"mistralai.chat",
46-
"Chat.complete",
47-
chat_complete("mistral.chat.complete", version, tracer),
42+
_W(
43+
module="mistralai.chat",
44+
name="Chat.complete",
45+
wrapper=chat_complete("mistral.chat.complete", version, tracer),
4846
)
4947

50-
wrap_function_wrapper(
51-
"mistralai.chat",
52-
"Chat.stream",
53-
chat_complete("mistral.chat.stream", version, tracer, is_streaming=True),
48+
_W(
49+
module="mistralai.chat",
50+
name="Chat.stream",
51+
wrapper=chat_complete("mistral.chat.stream", version, tracer, is_streaming=True),
5452
)
5553

56-
wrap_function_wrapper(
57-
"mistralai.chat",
58-
"Chat.complete_async",
59-
chat_complete("mistral.chat.complete_async", version, tracer, is_async=True),
54+
_W(
55+
module="mistralai.chat",
56+
name="Chat.complete_async",
57+
wrapper=chat_complete("mistral.chat.complete_async", version, tracer, is_async=True),
6058
)
6159

62-
wrap_function_wrapper(
63-
"mistralai.embeddings",
64-
"Embeddings.create",
65-
embeddings_create("mistral.embeddings.create", version, tracer),
60+
_W(
61+
module="mistralai.embeddings",
62+
name="Embeddings.create",
63+
wrapper=embeddings_create("mistral.embeddings.create", version, tracer),
6664
)
6765

68-
wrap_function_wrapper(
69-
"mistralai.embeddings",
70-
"Embeddings.create_async",
71-
embeddings_create("mistral.embeddings.create_async", version, tracer, is_async=True),
66+
_W(
67+
module="mistralai.embeddings",
68+
name="Embeddings.create_async",
69+
wrapper=embeddings_create("mistral.embeddings.create_async", version, tracer, is_async=True),
7270
)
7371

7472
def _uninstrument(self, **kwargs):

src/langtrace_python_sdk/instrumentation/mistral/patch.py

Lines changed: 2 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -32,17 +32,15 @@
3232
from langtrace_python_sdk.constants.instrumentation.mistral import APIS
3333
from langtrace_python_sdk.utils.llm import (
3434
calculate_prompt_tokens,
35-
get_base_url,
3635
get_extra_attributes,
3736
get_langtrace_attributes,
3837
get_llm_request_attributes,
3938
get_llm_url,
4039
get_span_name,
41-
get_tool_calls,
42-
is_streaming,
4340
set_event_completion,
4441
StreamWrapper,
4542
set_span_attributes,
43+
set_usage_attributes,
4644
)
4745

4846
from langtrace_python_sdk.instrumentation.openai.patch import extract_content
@@ -80,19 +78,6 @@ def traced_method(wrapped, instance, args, kwargs):
8078
result = wrapped(*args, **kwargs)
8179
if is_streaming:
8280
prompt_tokens = 0
83-
for message in kwargs.get("messages", {}):
84-
prompt_tokens += calculate_prompt_tokens(
85-
json.dumps((str(message))), kwargs.get("model")
86-
)
87-
88-
if (
89-
kwargs.get("functions") is not None
90-
):
91-
for function in kwargs.get("functions"):
92-
prompt_tokens += calculate_prompt_tokens(
93-
json.dumps(function), kwargs.get("model")
94-
)
95-
9681
return StreamWrapper(
9782
result,
9883
span,
@@ -207,20 +192,4 @@ def _set_response_attributes(span, kwargs, result):
207192

208193
# Get the usage
209194
if hasattr(result, "usage") and result.usage is not None:
210-
usage = result.usage
211-
if usage is not None:
212-
set_span_attribute(
213-
span,
214-
SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
215-
result.usage.prompt_tokens,
216-
)
217-
set_span_attribute(
218-
span,
219-
SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
220-
result.usage.completion_tokens,
221-
)
222-
set_span_attribute(
223-
span,
224-
SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
225-
result.usage.total_tokens,
226-
)
195+
set_usage_attributes(span, result.usage)

src/langtrace_python_sdk/utils/llm.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -411,6 +411,12 @@ def set_usage_attributes(self, chunk):
411411
self.completion_tokens = chunk["eval_count"]
412412

413413
def process_chunk(self, chunk):
414+
# Mistral nests the chunk data under a `data` attribute
415+
if (
416+
hasattr(chunk, "data") and chunk.data is not None
417+
and hasattr(chunk.data, "choices") and chunk.data.choices is not None
418+
):
419+
chunk = chunk.data
414420
self.set_response_model(chunk=chunk)
415421
self.build_streaming_response(chunk=chunk)
416422
self.set_usage_attributes(chunk=chunk)

0 commit comments

Comments
 (0)