Skip to content

Commit cde65a7

Browse files
committed
feat: add llm observability to python sdk
1 parent 05932b3 commit cde65a7

File tree

5 files changed

+240
-1
lines changed

5 files changed

+240
-1
lines changed

llm_observability_example.py

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
import os
2+
import uuid
3+
4+
import posthog
5+
from posthog.ai import OpenAI
6+
7+
# Example credentials - replace these with your own or use environment variables
8+
posthog.project_api_key = os.getenv("POSTHOG_PROJECT_API_KEY", "your-project-api-key")
9+
posthog.personal_api_key = os.getenv("POSTHOG_PERSONAL_API_KEY", "your-personal-api-key")
10+
posthog.host = os.getenv("POSTHOG_HOST", "http://localhost:8000") # Or https://app.posthog.com
11+
posthog.debug = True
12+
13+
openai_client = OpenAI(
14+
api_key=os.getenv("OPENAI_API_KEY", "your-openai-api-key"),
15+
posthog_client=posthog,
16+
)
17+
18+
def main():
19+
trace_id = str(uuid.uuid4())
20+
print("Trace ID:", trace_id)
21+
22+
try:
23+
print("Calling OpenAI")
24+
response = openai_client.chat.completions.create(
25+
model="gpt-4o-mini",
26+
messages=[
27+
{"role": "system", "content": "You are a complex problem solver."},
28+
{"role": "user", "content": "Explain quantum computing in simple terms."},
29+
],
30+
max_tokens=100,
31+
temperature=0.7,
32+
posthog_distinct_id="user_12345",
33+
posthog_trace_id=trace_id,
34+
posthog_properties={"example_key": "example_value"},
35+
)
36+
print("RESPONSE:", response)
37+
38+
if response and response.choices:
39+
print("OpenAI response:", response.choices[0].message.content)
40+
else:
41+
print("No response or unexpected format returned.")
42+
except Exception as e:
43+
print("Error during OpenAI call:", str(e))
44+
45+
if __name__ == "__main__":
46+
main()
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
import os
2+
import uuid
3+
4+
import posthog
5+
from posthog.ai import OpenAI
6+
7+
# Example credentials - replace these with your own or use environment variables
8+
posthog.project_api_key = os.getenv("POSTHOG_PROJECT_API_KEY", "your-project-api-key")
9+
posthog.personal_api_key = os.getenv("POSTHOG_PERSONAL_API_KEY", "your-personal-api-key")
10+
posthog.host = os.getenv("POSTHOG_HOST", "http://localhost:8000") # Or https://app.posthog.com
11+
posthog.debug = True
12+
13+
openai_client = OpenAI(
14+
api_key=os.getenv("OPENAI_API_KEY", "your-openai-api-key"),
15+
posthog_client=posthog,
16+
)
17+
18+
def main():
19+
trace_id = str(uuid.uuid4())
20+
print("Trace ID:", trace_id)
21+
22+
try:
23+
print("Calling OpenAI")
24+
stream = openai_client.chat.completions.create(
25+
model="gpt-4o-mini",
26+
messages=[
27+
{"role": "system", "content": "You are a complex problem solver."},
28+
{"role": "user", "content": "Explain quantum computing in simple terms."},
29+
],
30+
max_tokens=1000,
31+
temperature=0.7,
32+
posthog_distinct_id="user_12345",
33+
posthog_trace_id=trace_id,
34+
posthog_properties={"example_key": "example_value"},
35+
stream=True,
36+
)
37+
for chunk in stream:
38+
print(chunk.choices[0].delta.content or "", end="")
39+
except Exception as e:
40+
print("Error during OpenAI call:", str(e))
41+
42+
if __name__ == "__main__":
43+
main()

posthog/ai/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
from .openai import OpenAI
2+
3+
__all__ = ["OpenAI"]

posthog/ai/openai.py

Lines changed: 147 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,147 @@
1+
import time
2+
from typing import Any, Dict, Optional, Union
3+
4+
import openai
5+
from posthog.client import Client as PostHogClient
6+
7+
8+
def get_model_params(kwargs: Dict[str, Any]) -> Dict[str, Any]:
9+
"""
10+
Extracts model parameters from the kwargs dictionary.
11+
"""
12+
model_params = {}
13+
for param in ["temperature", "max_tokens", "top_p", "frequency_penalty",
14+
"presence_penalty", "n", "stop", "stream"]:
15+
if param in kwargs:
16+
model_params[param] = kwargs.get(param)
17+
return model_params
18+
19+
def get_output(response: openai.types.chat.ChatCompletion) -> Dict[str, Any]:
20+
output = {
21+
"choices": []
22+
}
23+
for choice in response.choices:
24+
if choice.message.content:
25+
output["choices"].append({
26+
"content": choice.message.content,
27+
"role": choice.message.role,
28+
})
29+
return output
30+
31+
32+
class OpenAI:
33+
"""
34+
A wrapper around the OpenAI SDK that automatically sends LLM usage events to PostHog.
35+
"""
36+
37+
def __init__(
38+
self,
39+
posthog_client: PostHogClient,
40+
**openai_config: Any,
41+
):
42+
"""
43+
Args:
44+
api_key: OpenAI API key.
45+
posthog_client: If provided, events will be captured via this client instance instead
46+
of the global posthog module.
47+
**openai_config: Any additional keyword args to set on openai (e.g. organization="xxx").
48+
"""
49+
# Initialize OpenAI client instead of setting global config
50+
self._openai_client = openai.OpenAI(**openai_config)
51+
self._posthog_client = posthog_client
52+
53+
@property
54+
def chat(self) -> "ChatNamespace":
55+
return ChatNamespace(self._posthog_client, self._openai_client)
56+
57+
58+
class ChatNamespace:
59+
def __init__(self, posthog_client: Union[PostHogClient, Any], openai_client: Any):
60+
self._ph_client = posthog_client
61+
self._openai_client = openai_client
62+
63+
@property
64+
def completions(self):
65+
return ChatCompletions(self._ph_client, self._openai_client)
66+
67+
68+
class ChatCompletions:
69+
70+
def __init__(self, posthog_client: Union[PostHogClient, Any], openai_client: Any):
71+
self._ph_client = posthog_client
72+
self._openai_client = openai_client
73+
74+
def create(
75+
self,
76+
posthog_distinct_id: Optional[str] = None,
77+
posthog_trace_id: Optional[str] = None,
78+
posthog_properties: Optional[Dict[str, Any]] = None,
79+
**kwargs: Any,
80+
):
81+
"""
82+
Wraps open ai chat completions and captures a $ai_generation event in PostHog.
83+
84+
PostHog-specific parameters:
85+
posthog_distinct_id: Ties the resulting event to a user in PostHog.
86+
posthog_trace_id: For grouping multiple calls into a single trace.
87+
posthog_properties: Additional custom properties to include on the PostHog event.
88+
"""
89+
start_time = time.time()
90+
response = None
91+
error = None
92+
http_status = 200
93+
usage: Dict[str, Any] = {}
94+
95+
try:
96+
response = self._openai_client.chat.completions.create(**kwargs)
97+
except Exception as exc:
98+
error = exc
99+
http_status = getattr(exc, 'status_code', 500)
100+
finally:
101+
end_time = time.time()
102+
latency = end_time - start_time
103+
104+
# Update usage extraction for new response format
105+
if response and hasattr(response, "usage"):
106+
usage = response.usage.model_dump()
107+
108+
input_tokens = usage.get("prompt_tokens", 0)
109+
output_tokens = usage.get("completion_tokens", 0)
110+
111+
# Build PostHog event properties
112+
event_properties = {
113+
"$ai_provider": "openai",
114+
"$ai_model": kwargs.get("model"),
115+
"$ai_model_parameters": get_model_params(kwargs),
116+
"$ai_input": kwargs.get("messages"),
117+
"$ai_output": None,
118+
"$ai_http_status": http_status,
119+
"$ai_input_tokens": input_tokens,
120+
"$ai_output_tokens": output_tokens,
121+
"$ai_latency": latency,
122+
"$ai_trace_id": posthog_trace_id,
123+
}
124+
125+
# If not streaming and no error, try storing some output detail
126+
# TODO: we need to support streaming responses
127+
stream = kwargs.get("stream", False)
128+
if response and not error and not stream:
129+
event_properties["$ai_output"] = get_output(response)
130+
131+
# Merge in any custom PostHog properties
132+
if posthog_properties:
133+
event_properties.update(posthog_properties)
134+
135+
# Capture event in PostHog
136+
if hasattr(self._ph_client, "capture") and callable(self._ph_client.capture):
137+
distinct_id = posthog_distinct_id or "anonymous_ai_user"
138+
self._ph_client.capture(
139+
distinct_id=distinct_id,
140+
event="$ai_generation",
141+
properties=event_properties,
142+
)
143+
144+
if error:
145+
raise error
146+
147+
return response

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
PostHog is developer-friendly, self-hosted product analytics. posthog-python is the python package.
1515
"""
1616

17-
install_requires = ["requests>=2.7,<3.0", "six>=1.5", "monotonic>=1.5", "backoff>=1.10.0", "python-dateutil>2.1"]
17+
install_requires = ["requests>=2.7,<3.0", "six>=1.5", "monotonic>=1.5", "backoff>=1.10.0", "python-dateutil>2.1", "openai>=1.59.5"]
1818

1919
extras_require = {
2020
"dev": [

0 commit comments

Comments
 (0)