Skip to content

Commit 980bb85

Browse files
committed
quick fix for ollama prompt
1 parent cebd63c commit 980bb85

File tree

2 files changed

+4
-2
lines changed

2 files changed

+4
-2
lines changed

src/langtrace_python_sdk/instrumentation/ollama/patch.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,10 @@ def traced_method(wrapped, instance, args, kwargs):
2222
service_provider = SERVICE_PROVIDERS["OLLAMA"]
2323
span_attributes = {
2424
**get_langtrace_attributes(version, service_provider),
25-
**get_llm_request_attributes(kwargs, prompts=kwargs.get("messages", [])),
25+
**get_llm_request_attributes(
26+
kwargs,
27+
prompts=kwargs.get("messages", None),
28+
),
2629
**get_llm_url(instance),
2730
SpanAttributes.LLM_PATH: api["ENDPOINT"],
2831
SpanAttributes.LLM_RESPONSE_FORMAT: kwargs.get("format"),

src/langtrace_python_sdk/utils/llm.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,6 @@ def get_llm_request_attributes(kwargs, prompts=None, model=None):
100100
if "prompt" in kwargs
101101
else None
102102
)
103-
print("PRMPT", prompts)
104103
top_k = (
105104
kwargs.get("n", None)
106105
or kwargs.get("k", None)

0 commit comments

Comments
 (0)