Skip to content

Commit 9680a5e

Browse files
committed
chore: local test
1 parent e4fc70a commit 9680a5e

File tree

1 file changed

+42
-1
lines changed

1 file changed

+42
-1
lines changed

llm_observability_examples.py

Lines changed: 42 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,11 +33,13 @@ def main_sync():
3333
groups = {"company": "test_company"}
3434

3535
try:
36-
basic_openai_call(distinct_id, trace_id, properties, groups)
36+
# basic_openai_call(distinct_id, trace_id, properties, groups)
3737
# streaming_openai_call(distinct_id, trace_id, properties, groups)
3838
# embedding_openai_call(distinct_id, trace_id, properties, groups)
3939
# image_openai_call()
4040
# beta_openai_call(distinct_id, trace_id, properties, groups)
41+
# tool_call_openai_call(distinct_id, trace_id, properties, groups)
42+
streaming_tool_call_openai_call(distinct_id, trace_id, properties, groups)
4143
except Exception as e:
4244
print("Error during OpenAI call:", str(e))
4345

@@ -213,6 +215,45 @@ def beta_openai_call(distinct_id, trace_id, properties, groups):
213215
return response
214216

215217

218+
def tool_call_openai_call(distinct_id, trace_id, properties, groups):
219+
response = openai_client.chat.completions.create(
220+
model="gpt-4o-mini",
221+
messages=[{"role": "user", "content": "What's the weather in San Francisco?"}],
222+
tools=[{"type": "function", "function": {"name": "get_weather", "description": "Get weather", "parameters": {
223+
"type": "object",
224+
"properties": {
225+
"location": {"type": "string", "description": "The location to get the weather for"},
226+
"unit": {"type": "string", "description": "The unit of temperature to return the weather in", "enum": ["celsius", "fahrenheit"]}
227+
},
228+
"required": ["location", "unit"]
229+
}}}],
230+
posthog_distinct_id=distinct_id,
231+
posthog_trace_id=trace_id,
232+
posthog_properties=properties,
233+
posthog_groups=groups,
234+
)
235+
print(response)
236+
return response
237+
238+
239+
def streaming_tool_call_openai_call(distinct_id, trace_id, properties, groups):
240+
response = openai_client.chat.completions.create(
241+
model="gpt-4o-mini",
242+
messages=[{"role": "user", "content": "What's the weather in San Francisco?"}],
243+
tools=[{"type": "function", "function": {"name": "get_weather", "description": "Get weather", "parameters": {}}}],
244+
stream=True,
245+
posthog_distinct_id=distinct_id,
246+
posthog_trace_id=trace_id,
247+
posthog_properties=properties,
248+
posthog_groups=groups,
249+
)
250+
251+
for chunk in response:
252+
if hasattr(chunk, "choices") and chunk.choices and len(chunk.choices) > 0:
253+
print(chunk.choices[0].delta.content or "", end="")
254+
255+
return response
256+
216257
# HOW TO RUN:
217258
# comment out one of these to run the other
218259

0 commit comments

Comments
 (0)