Skip to content

Commit 0707958

Browse files
authored
Merge pull request #235 from Scale3-Labs/ali/s3en-2136-instrument-litellm
Support LiteLLM
2 parents 1fa3f85 + 658b62c commit 0707958

File tree

3 files changed

+107
-7
lines changed

3 files changed

+107
-7
lines changed

src/examples/crewai_example/trip_planner/agents.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,9 @@ class TravelAgents:
1212
def __init__(self):
1313
self.OpenAIGPT35 = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0.7)
1414
self.OpenAIGPT4 = ChatOpenAI(model_name="gpt-4", temperature=0.7)
15-
self.Ollama = ChatOllama(model="openhermes")
15+
self.Ollama = ChatOllama(model="llama3")
1616
self.Cohere = ChatCohere(model="command-r")
17-
self.Anthropic = ChatAnthropic(model="claude-3-5-sonnet")
17+
self.Anthropic = ChatAnthropic(model="claude-3-5-sonnet-20240620")
1818

1919
def expert_travel_agent(self):
2020
return Agent(
@@ -28,7 +28,7 @@ def expert_travel_agent(self):
2828
# tools=[tool_1, tool_2],
2929
allow_delegation=False,
3030
verbose=True,
31-
llm=self.OpenAIGPT4,
31+
llm=self.Cohere,
3232
)
3333

3434
def city_selection_expert(self):
@@ -39,7 +39,7 @@ def city_selection_expert(self):
3939
# tools=[tool_1, tool_2],
4040
allow_delegation=False,
4141
verbose=True,
42-
llm=self.OpenAIGPT4,
42+
llm=self.Cohere,
4343
)
4444

4545
def local_tour_guide(self):
@@ -50,5 +50,5 @@ def local_tour_guide(self):
5050
# tools=[tool_1, tool_2],
5151
allow_delegation=False,
5252
verbose=True,
53-
llm=self.OpenAIGPT4,
53+
llm=self.Cohere,
5454
)
Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,98 @@
1+
from langtrace_python_sdk import with_langtrace_root_span, langtrace
2+
from dotenv import load_dotenv
3+
from litellm import completion, acompletion
4+
import litellm
5+
import asyncio
6+
7+
load_dotenv()
8+
9+
10+
litellm.success_callback = ["langtrace"]
11+
langtrace.init()
12+
litellm.set_verbose = False
13+
14+
15+
@with_langtrace_root_span("Litellm Example OpenAI")
16+
def openAI(streaming=False):
17+
response = completion(
18+
model="gpt-3.5-turbo",
19+
messages=[
20+
{"content": "respond only in Yoda speak.", "role": "system"},
21+
{"content": "Hello, how are you?", "role": "user"},
22+
],
23+
stream=streaming,
24+
stream_options={"include_usage": True},
25+
)
26+
if streaming:
27+
for _ in response:
28+
pass
29+
else:
30+
return response
31+
32+
33+
# @with_langtrace_root_span("Litellm Example Anthropic Completion")
34+
def anthropic(streaming=False):
35+
try:
36+
37+
response = completion(
38+
model="claude-2.1",
39+
messages=[
40+
{"content": "respond only in Yoda speak.", "role": "system"},
41+
{"content": "what is 2 + 2?", "role": "user"},
42+
],
43+
temperature=0.5,
44+
top_p=0.5,
45+
n=1,
46+
stream=streaming,
47+
stream_options={"include_usage": True},
48+
)
49+
# print(response)
50+
if streaming:
51+
for _ in response:
52+
pass
53+
else:
54+
return response
55+
except Exception as e:
56+
print("ERORRRR", e)
57+
58+
59+
# @with_langtrace_root_span("Litellm Example OpenAI Async Streaming")
60+
async def async_anthropic(streaming=False):
61+
response = await acompletion(
62+
model="claude-2.1",
63+
messages=[{"content": "Hello, how are you?", "role": "user"}],
64+
stream=streaming,
65+
stream_options={"include_usage": True},
66+
temperature=0.5,
67+
top_p=0.5,
68+
n=1,
69+
)
70+
if streaming:
71+
async for _ in response:
72+
pass
73+
else:
74+
return response
75+
76+
77+
def cohere(streaming=False):
78+
response = completion(
79+
model="command-r",
80+
messages=[
81+
{"content": "respond only in Yoda speak.", "role": "system"},
82+
{"content": "Hello, how are you?", "role": "user"},
83+
],
84+
stream=streaming,
85+
stream_options={"include_usage": True},
86+
)
87+
if streaming:
88+
for _ in response:
89+
pass
90+
else:
91+
return response
92+
93+
94+
if __name__ == "__main__":
95+
# openAI()
96+
anthropic(streaming=False)
97+
cohere(streaming=True)
98+
# asyncio.run(async_anthropic(streaming=True))

src/langtrace_python_sdk/langtrace.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ def init(
7272
disable_instrumentations: Optional[DisableInstrumentations] = None,
7373
disable_tracing_for_functions: Optional[InstrumentationMethods] = None,
7474
service_name: Optional[str] = None,
75-
disable_logging = False
75+
disable_logging=False,
7676
):
7777
if disable_logging:
7878
sys.stdout = open(os.devnull, "w")
@@ -93,7 +93,9 @@ def init(
9393
provider = TracerProvider(resource=resource, sampler=sampler)
9494

9595
remote_write_exporter = (
96-
LangTraceExporter(api_key=api_key, api_host=host, disable_logging=disable_logging)
96+
LangTraceExporter(
97+
api_key=api_key, api_host=host, disable_logging=disable_logging
98+
)
9799
if custom_remote_exporter is None
98100
else custom_remote_exporter
99101
)

0 commit comments

Comments
 (0)