Skip to content

Commit 7a9eb6c

Browse files
committed
Bump version
2 parents 559ae41 + 4ce1297 commit 7a9eb6c

File tree

8 files changed

+107
-22
lines changed

8 files changed

+107
-22
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ classifiers=[
1818
"Operating System :: OS Independent",
1919
]
2020
dependencies = [
21-
'trace-attributes>=6.0.0,<7.0.0',
21+
'trace-attributes>=6.0.3,<7.0.0',
2222
'opentelemetry-api>=1.25.0',
2323
'opentelemetry-sdk>=1.25.0',
2424
'opentelemetry-instrumentation>=0.46b0',

src/examples/dspy_example/math_problems_cot_parallel.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from concurrent.futures import ThreadPoolExecutor
66

77
# flake8: noqa
8-
from langtrace_python_sdk import langtrace, with_langtrace_root_span
8+
from langtrace_python_sdk import langtrace, with_langtrace_root_span, inject_additional_attributes
99

1010
langtrace.init()
1111

Lines changed: 16 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,32 @@
1-
# langtrace.init(write_spans_to_console=True)
21
import fsspec
32
from inspect_ai import Task, task
4-
from inspect_ai.dataset import csv_dataset
3+
from inspect_ai.dataset import csv_dataset, Sample
54
from inspect_ai.scorer import model_graded_qa
6-
from inspect_ai.solver import chain_of_thought, generate, self_critique
7-
5+
from inspect_ai.solver import chain_of_thought, self_critique
86
from langtrace_python_sdk.extensions.langtrace_filesystem import LangTraceFileSystem
97

10-
# from langtrace_python_sdk import langtrace
11-
128

139
# Manually register the filesystem with fsspec
1410
# Note: This is only necessary because the filesystem is not registered.
1511
fsspec.register_implementation(LangTraceFileSystem.protocol, LangTraceFileSystem)
1612

13+
question = "What is the price?"
14+
15+
16+
def hydrate_with_question(record):
17+
# add context to input
18+
record["input"] = f"Context: {record['input']}\n question: {question}"
19+
20+
return Sample(
21+
input=record["input"],
22+
target=record["target"],
23+
)
24+
1725

1826
@task
19-
def security_guide():
27+
def pricing_question():
2028
return Task(
21-
dataset=csv_dataset("langtracefs://clxc2mxu6000lpc7ntsvcjvp9"),
29+
dataset=csv_dataset("langtracefs://clyythmcs0001145cuvi426zi", hydrate_with_question),
2230
plan=[chain_of_thought(), self_critique()],
2331
scorer=model_graded_qa(),
2432
)
Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
from langtrace_python_sdk import langtrace
2+
from openai import OpenAI
3+
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
4+
5+
service_name = "langtrace-python-ollama"
6+
otlp_endpoint = "http://localhost:4318/v1/traces"
7+
otlp_exporter = OTLPSpanExporter(
8+
endpoint=otlp_endpoint,
9+
headers=(("Content-Type", "application/json"),))
10+
langtrace.init(custom_remote_exporter=otlp_exporter, batch=False)
11+
12+
13+
def chat_with_ollama():
14+
# Use the OpenAI endpoint, not the Ollama API.
15+
base_url = "http://localhost:11434/v1"
16+
client = OpenAI(base_url=base_url, api_key="unused")
17+
messages = [
18+
{
19+
"role": "user",
20+
"content": "Hello, I'm a human.",
21+
},
22+
]
23+
chat_completion = client.chat.completions.create(
24+
model="llama3", messages=messages
25+
)
26+
print(chat_completion.choices[0].message.content)
27+
28+
29+
def main():
30+
chat_with_ollama()
31+
32+
33+
if __name__ == "__main__":
34+
main()
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
# Instructions
2+
# 1. Run the OpenTelemetry Collector with the OTLP receiver enabled
3+
# Create otel-config.yaml with the following content:
4+
# receivers:
5+
# otlp:
6+
# protocols:
7+
# grpc:
8+
# endpoint: "0.0.0.0:4317"
9+
# http:
10+
# endpoint: "0.0.0.0:4318"
11+
12+
# exporters:
13+
# logging:
14+
# loglevel: debug
15+
16+
# service:
17+
# pipelines:
18+
# traces:
19+
# receivers: [otlp]
20+
# exporters: [logging]
21+
# docker pull otel/opentelemetry-collector:latest
22+
# docker run --rm -p 4317:4317 -p 4318:4318 -v $(pwd)/otel-config.yaml:/otel-config.yaml otel/opentelemetry-collector --config otel-config.yaml
23+
# 2. Run the following code
24+
25+
from opentelemetry import trace
26+
from opentelemetry.sdk.trace import TracerProvider
27+
from opentelemetry.sdk.trace.export import BatchSpanProcessor
28+
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
29+
30+
# Set up the tracer provider
31+
trace.set_tracer_provider(TracerProvider())
32+
tracer = trace.get_tracer(__name__)
33+
34+
# Set up the OTLP exporter
35+
otlp_exporter = OTLPSpanExporter(endpoint="http://localhost:4317")
36+
37+
# Set up a span processor and add it to the tracer provider
38+
span_processor = BatchSpanProcessor(otlp_exporter)
39+
trace.get_tracer_provider().add_span_processor(span_processor)
40+
41+
# Create a span
42+
with tracer.start_as_current_span("example-span"):
43+
print("Hello, World!")

src/langtrace_python_sdk/instrumentation/groq/patch.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -104,10 +104,10 @@ def traced_method(wrapped, instance, args, kwargs):
104104

105105
# TODO(Karthik): Gotta figure out how to handle streaming with context
106106
# with tracer.start_as_current_span(APIS["CHAT_COMPLETION"]["METHOD"],
107-
# kind=SpanKind.CLIENT.value) as span:
107+
# kind=SpanKind.CLIENT) as span:
108108
span = tracer.start_span(
109109
APIS["CHAT_COMPLETION"]["METHOD"],
110-
kind=SpanKind.CLIENT.value,
110+
kind=SpanKind.CLIENT,
111111
context=set_span_in_context(trace.get_current_span()),
112112
)
113113
for field, value in attributes.model_dump(by_alias=True).items():
@@ -333,9 +333,9 @@ async def traced_method(wrapped, instance, args, kwargs):
333333

334334
# TODO(Karthik): Gotta figure out how to handle streaming with context
335335
# with tracer.start_as_current_span(APIS["CHAT_COMPLETION"]["METHOD"],
336-
# kind=SpanKind.CLIENT.value) as span:
336+
# kind=SpanKind.CLIENT) as span:
337337
span = tracer.start_span(
338-
APIS["CHAT_COMPLETION"]["METHOD"], kind=SpanKind.CLIENT.value
338+
APIS["CHAT_COMPLETION"]["METHOD"], kind=SpanKind.CLIENT
339339
)
340340
for field, value in attributes.model_dump(by_alias=True).items():
341341
set_span_attribute(span, field, value)

src/langtrace_python_sdk/instrumentation/openai/patch.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ def traced_method(wrapped, instance, args, kwargs):
6565

6666
with tracer.start_as_current_span(
6767
APIS["IMAGES_GENERATION"]["METHOD"],
68-
kind=SpanKind.CLIENT.value,
68+
kind=SpanKind.CLIENT,
6969
context=set_span_in_context(trace.get_current_span()),
7070
) as span:
7171
set_span_attributes(span, attributes)
@@ -128,7 +128,7 @@ async def traced_method(wrapped, instance, args, kwargs):
128128

129129
with tracer.start_as_current_span(
130130
APIS["IMAGES_GENERATION"]["METHOD"],
131-
kind=SpanKind.CLIENT.value,
131+
kind=SpanKind.CLIENT,
132132
context=set_span_in_context(trace.get_current_span()),
133133
) as span:
134134
set_span_attributes(span, attributes)
@@ -193,7 +193,7 @@ def traced_method(wrapped, instance, args, kwargs):
193193

194194
with tracer.start_as_current_span(
195195
APIS["IMAGES_EDIT"]["METHOD"],
196-
kind=SpanKind.CLIENT.value,
196+
kind=SpanKind.CLIENT,
197197
context=set_span_in_context(trace.get_current_span()),
198198
) as span:
199199
set_span_attributes(span, attributes)
@@ -283,7 +283,7 @@ def traced_method(wrapped, instance, args, kwargs):
283283

284284
span = tracer.start_span(
285285
APIS["CHAT_COMPLETION"]["METHOD"],
286-
kind=SpanKind.CLIENT.value,
286+
kind=SpanKind.CLIENT,
287287
context=set_span_in_context(trace.get_current_span()),
288288
)
289289
_set_input_attributes(span, kwargs, attributes)
@@ -377,7 +377,7 @@ async def traced_method(wrapped, instance, args, kwargs):
377377

378378
span = tracer.start_span(
379379
APIS["CHAT_COMPLETION"]["METHOD"],
380-
kind=SpanKind.CLIENT.value,
380+
kind=SpanKind.CLIENT,
381381
context=set_span_in_context(trace.get_current_span()),
382382
)
383383
_set_input_attributes(span, kwargs, attributes)
@@ -456,7 +456,7 @@ def traced_method(wrapped, instance, args, kwargs):
456456

457457
with tracer.start_as_current_span(
458458
APIS["EMBEDDINGS_CREATE"]["METHOD"],
459-
kind=SpanKind.CLIENT.value,
459+
kind=SpanKind.CLIENT,
460460
context=set_span_in_context(trace.get_current_span()),
461461
) as span:
462462

@@ -513,7 +513,7 @@ async def traced_method(wrapped, instance, args, kwargs):
513513

514514
with tracer.start_as_current_span(
515515
APIS["EMBEDDINGS_CREATE"]["METHOD"],
516-
kind=SpanKind.CLIENT.value,
516+
kind=SpanKind.CLIENT,
517517
context=set_span_in_context(trace.get_current_span()),
518518
) as span:
519519

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "2.2.4"
1+
__version__ = "2.2.5"

0 commit comments

Comments
 (0)