Skip to content

Commit 04be8fb

Browse files
committed
feat: add support for async and streaming
1 parent cde65a7 commit 04be8fb

File tree

6 files changed

+354
-126
lines changed

6 files changed

+354
-126
lines changed

llm_observability_example.py

Lines changed: 0 additions & 46 deletions
This file was deleted.

llm_observability_examples.py

Lines changed: 99 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
import os
2+
import uuid
3+
import asyncio
4+
5+
import posthog
6+
from posthog.ai import OpenAI, AsyncOpenAI
7+
8+
# Example credentials - replace these with your own or use environment variables
9+
posthog.project_api_key = os.getenv("POSTHOG_PROJECT_API_KEY", "your-project-api-key")
10+
posthog.personal_api_key = os.getenv("POSTHOG_PERSONAL_API_KEY", "your-personal-api-key")
11+
posthog.host = os.getenv("POSTHOG_HOST", "http://localhost:8000") # Or https://app.posthog.com
12+
posthog.debug = True
13+
14+
openai_client = OpenAI(
15+
api_key=os.getenv("OPENAI_API_KEY", "your-openai-api-key"),
16+
posthog_client=posthog,
17+
)
18+
19+
async_openai_client = AsyncOpenAI(
20+
api_key=os.getenv("OPENAI_API_KEY", "your-openai-api-key"),
21+
posthog_client=posthog,
22+
)
23+
24+
def main_sync():
25+
trace_id = str(uuid.uuid4())
26+
print("Trace ID:", trace_id)
27+
28+
try:
29+
basic_openai_call()
30+
streaming_openai_call()
31+
except Exception as e:
32+
print("Error during OpenAI call:", str(e))
33+
34+
async def main_async():
35+
try:
36+
#await basic_async_openai_call()
37+
await streaming_async_openai_call()
38+
except Exception as e:
39+
print("Error during OpenAI call:", str(e))
40+
41+
42+
def basic_openai_call():
43+
response = openai_client.chat.completions.create(
44+
model="gpt-4o-mini",
45+
messages=[{"role": "system", "content": "You are a complex problem solver."}, {"role": "user", "content": "Explain quantum computing in simple terms."}],
46+
max_tokens=100,
47+
temperature=0.7,
48+
)
49+
if response and response.choices:
50+
print("OpenAI response:", response.choices[0].message.content)
51+
else:
52+
print("No response or unexpected format returned.")
53+
return response
54+
55+
async def basic_async_openai_call():
56+
response = await async_openai_client.chat.completions.create(
57+
model="gpt-4o-mini",
58+
messages=[{"role": "system", "content": "You are a complex problem solver."}, {"role": "user", "content": "Explain quantum computing in simple terms."}],
59+
max_tokens=100,
60+
temperature=0.7,
61+
)
62+
if response and hasattr(response, "choices"):
63+
print("OpenAI response:", response.choices[0].message.content)
64+
else:
65+
print("No response or unexpected format returned.")
66+
return response
67+
68+
def streaming_openai_call():
69+
response = openai_client.chat.completions.create(
70+
model="gpt-4o-mini",
71+
messages=[{"role": "system", "content": "You are a complex problem solver."}, {"role": "user", "content": "Explain quantum computing in simple terms."}],
72+
max_tokens=100,
73+
temperature=0.7,
74+
stream=True,
75+
)
76+
77+
for chunk in response:
78+
print(chunk.choices[0].delta.content or "", end="")
79+
80+
return response
81+
82+
async def streaming_async_openai_call():
83+
response = await async_openai_client.chat.completions.create(
84+
model="gpt-4o-mini",
85+
messages=[{"role": "system", "content": "You are a complex problem solver."}, {"role": "user", "content": "Explain quantum computing in simple terms."}],
86+
max_tokens=100,
87+
temperature=0.7,
88+
stream=True,
89+
)
90+
91+
async for chunk in response:
92+
print(chunk.choices[0].delta.content or "", end="")
93+
94+
return response
95+
96+
if __name__ == "__main__":
97+
main_sync()
98+
99+
# asyncio.run(main_async())

llm_observability_stream_example.py

Lines changed: 0 additions & 43 deletions
This file was deleted.

posthog/ai/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
from .openai import OpenAI
1+
from .openai import OpenAI, AsyncOpenAI
22

3-
__all__ = ["OpenAI"]
3+
__all__ = ["OpenAI", "AsyncOpenAI"]

0 commit comments

Comments
 (0)