Skip to content

Commit 5a4167d

Browse files
authored
feat: add support for responses api (#205)
* feat: add suppoort for responses api * fix: test * fix: black * fix: test - hopefully * fix: test - hopefully #2 * fix: test - hopefully #3 * fix: test - hopefully #4 * fix: greptaile catch * fix: mypy is not my friend * fix: isort usort weallsort * fix: noredef * fix: mypy baseline * fix: mypy * fix: mypy
1 parent 332a6ff commit 5a4167d

File tree

9 files changed

+577
-57
lines changed

9 files changed

+577
-57
lines changed

CHANGELOG.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,9 @@
1+
2+
3+
## 3.20.0 – 2025-03-13
4+
5+
1. Add support for OpenAI Responses API.
6+
17
## 3.19.2 – 2025-03-11
28

39
1. Fix install requirements for analytics package

mypy-baseline.txt

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -35,21 +35,7 @@ posthog/sentry/posthog_integration.py:0: error: Statement is unreachable [unrea
3535
posthog/ai/utils.py:0: error: Need type annotation for "output" (hint: "output: list[<type>] = ...") [var-annotated]
3636
posthog/ai/utils.py:0: error: Function "builtins.any" is not valid as a type [valid-type]
3737
posthog/ai/utils.py:0: note: Perhaps you meant "typing.Any" instead of "any"?
38-
posthog/ai/utils.py:0: error: Incompatible types in assignment (expression has type "UUID", variable has type "str | None") [assignment]
3938
posthog/ai/utils.py:0: error: Function "builtins.any" is not valid as a type [valid-type]
4039
posthog/ai/utils.py:0: note: Perhaps you meant "typing.Any" instead of "any"?
41-
posthog/ai/utils.py:0: error: Incompatible types in assignment (expression has type "UUID", variable has type "str | None") [assignment]
4240
sentry_django_example/sentry_django_example/settings.py:0: error: Need type annotation for "ALLOWED_HOSTS" (hint: "ALLOWED_HOSTS: list[<type>] = ...") [var-annotated]
43-
sentry_django_example/sentry_django_example/settings.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "None") [assignment]
44-
posthog/ai/openai/openai_async.py:0: error: Incompatible types in assignment (expression has type "UUID", variable has type "str | None") [assignment]
45-
posthog/ai/openai/openai_async.py:0: error: Incompatible types in assignment (expression has type "UUID", variable has type "str | None") [assignment]
46-
posthog/ai/openai/openai_async.py:0: error: Incompatible types in assignment (expression has type "UUID", variable has type "str | None") [assignment]
47-
posthog/ai/openai/openai.py:0: error: Incompatible types in assignment (expression has type "UUID", variable has type "str | None") [assignment]
48-
posthog/ai/openai/openai.py:0: error: Incompatible types in assignment (expression has type "UUID", variable has type "str | None") [assignment]
49-
posthog/ai/openai/openai.py:0: error: Incompatible types in assignment (expression has type "UUID", variable has type "str | None") [assignment]
50-
posthog/ai/anthropic/anthropic_async.py:0: error: Incompatible types in assignment (expression has type "UUID", variable has type "str | None") [assignment]
51-
posthog/ai/anthropic/anthropic_async.py:0: error: Incompatible types in assignment (expression has type "UUID", variable has type "str | None") [assignment]
52-
posthog/ai/anthropic/anthropic_async.py:0: error: Incompatible types in assignment (expression has type "UUID", variable has type "str | None") [assignment]
53-
posthog/ai/anthropic/anthropic.py:0: error: Incompatible types in assignment (expression has type "UUID", variable has type "str | None") [assignment]
54-
posthog/ai/anthropic/anthropic.py:0: error: Incompatible types in assignment (expression has type "UUID", variable has type "str | None") [assignment]
55-
posthog/ai/anthropic/anthropic.py:0: error: Incompatible types in assignment (expression has type "UUID", variable has type "str | None") [assignment]
41+
sentry_django_example/sentry_django_example/settings.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "None") [assignment]

posthog/ai/anthropic/anthropic.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ def create(
5454
**kwargs: Arguments passed to Anthropic's messages.create
5555
"""
5656
if posthog_trace_id is None:
57-
posthog_trace_id = uuid.uuid4()
57+
posthog_trace_id = str(uuid.uuid4())
5858

5959
if kwargs.get("stream", False):
6060
return self._create_streaming(
@@ -89,7 +89,7 @@ def stream(
8989
**kwargs: Any,
9090
):
9191
if posthog_trace_id is None:
92-
posthog_trace_id = uuid.uuid4()
92+
posthog_trace_id = str(uuid.uuid4())
9393

9494
return self._create_streaming(
9595
posthog_distinct_id,
@@ -167,7 +167,7 @@ def _capture_streaming_event(
167167
output: str,
168168
):
169169
if posthog_trace_id is None:
170-
posthog_trace_id = uuid.uuid4()
170+
posthog_trace_id = str(uuid.uuid4())
171171

172172
event_properties = {
173173
"$ai_provider": "anthropic",

posthog/ai/anthropic/anthropic_async.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ async def create(
5454
**kwargs: Arguments passed to Anthropic's messages.create
5555
"""
5656
if posthog_trace_id is None:
57-
posthog_trace_id = uuid.uuid4()
57+
posthog_trace_id = str(uuid.uuid4())
5858

5959
if kwargs.get("stream", False):
6060
return await self._create_streaming(
@@ -89,7 +89,7 @@ async def stream(
8989
**kwargs: Any,
9090
):
9191
if posthog_trace_id is None:
92-
posthog_trace_id = uuid.uuid4()
92+
posthog_trace_id = str(uuid.uuid4())
9393

9494
return await self._create_streaming(
9595
posthog_distinct_id,
@@ -167,7 +167,7 @@ async def _capture_streaming_event(
167167
output: str,
168168
):
169169
if posthog_trace_id is None:
170-
posthog_trace_id = uuid.uuid4()
170+
posthog_trace_id = str(uuid.uuid4())
171171

172172
event_properties = {
173173
"$ai_provider": "anthropic",

posthog/ai/openai/openai.py

Lines changed: 171 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,167 @@ def __init__(self, posthog_client: PostHogClient, **kwargs):
3232
self.chat = WrappedChat(self)
3333
self.embeddings = WrappedEmbeddings(self)
3434
self.beta = WrappedBeta(self)
35+
self.responses = WrappedResponses(self)
36+
37+
38+
class WrappedResponses(openai.resources.responses.Responses):
39+
_client: OpenAI
40+
41+
def create(
42+
self,
43+
posthog_distinct_id: Optional[str] = None,
44+
posthog_trace_id: Optional[str] = None,
45+
posthog_properties: Optional[Dict[str, Any]] = None,
46+
posthog_privacy_mode: bool = False,
47+
posthog_groups: Optional[Dict[str, Any]] = None,
48+
**kwargs: Any,
49+
):
50+
if posthog_trace_id is None:
51+
posthog_trace_id = str(uuid.uuid4())
52+
53+
if kwargs.get("stream", False):
54+
return self._create_streaming(
55+
posthog_distinct_id,
56+
posthog_trace_id,
57+
posthog_properties,
58+
posthog_privacy_mode,
59+
posthog_groups,
60+
**kwargs,
61+
)
62+
63+
return call_llm_and_track_usage(
64+
posthog_distinct_id,
65+
self._client._ph_client,
66+
"openai",
67+
posthog_trace_id,
68+
posthog_properties,
69+
posthog_privacy_mode,
70+
posthog_groups,
71+
self._client.base_url,
72+
super().create,
73+
**kwargs,
74+
)
75+
76+
def _create_streaming(
77+
self,
78+
posthog_distinct_id: Optional[str],
79+
posthog_trace_id: Optional[str],
80+
posthog_properties: Optional[Dict[str, Any]],
81+
posthog_privacy_mode: bool,
82+
posthog_groups: Optional[Dict[str, Any]],
83+
**kwargs: Any,
84+
):
85+
start_time = time.time()
86+
usage_stats: Dict[str, int] = {}
87+
final_content = []
88+
response = super().create(**kwargs)
89+
90+
def generator():
91+
nonlocal usage_stats
92+
nonlocal final_content
93+
94+
try:
95+
for chunk in response:
96+
if hasattr(chunk, "type") and chunk.type == "response.completed":
97+
res = chunk.response
98+
if res.output and len(res.output) > 0:
99+
final_content.append(res.output[0])
100+
101+
if hasattr(chunk, "usage") and chunk.usage:
102+
usage_stats = {
103+
k: getattr(chunk.usage, k, 0)
104+
for k in [
105+
"input_tokens",
106+
"output_tokens",
107+
"total_tokens",
108+
]
109+
}
110+
111+
# Add support for cached tokens
112+
if hasattr(chunk.usage, "output_tokens_details") and hasattr(
113+
chunk.usage.output_tokens_details, "reasoning_tokens"
114+
):
115+
usage_stats["reasoning_tokens"] = chunk.usage.output_tokens_details.reasoning_tokens
116+
117+
if hasattr(chunk.usage, "input_tokens_details") and hasattr(
118+
chunk.usage.input_tokens_details, "cached_tokens"
119+
):
120+
usage_stats["cache_read_input_tokens"] = chunk.usage.input_tokens_details.cached_tokens
121+
122+
yield chunk
123+
124+
finally:
125+
end_time = time.time()
126+
latency = end_time - start_time
127+
output = final_content
128+
self._capture_streaming_event(
129+
posthog_distinct_id,
130+
posthog_trace_id,
131+
posthog_properties,
132+
posthog_privacy_mode,
133+
posthog_groups,
134+
kwargs,
135+
usage_stats,
136+
latency,
137+
output,
138+
)
139+
140+
return generator()
141+
142+
def _capture_streaming_event(
143+
self,
144+
posthog_distinct_id: Optional[str],
145+
posthog_trace_id: Optional[str],
146+
posthog_properties: Optional[Dict[str, Any]],
147+
posthog_privacy_mode: bool,
148+
posthog_groups: Optional[Dict[str, Any]],
149+
kwargs: Dict[str, Any],
150+
usage_stats: Dict[str, int],
151+
latency: float,
152+
output: Any,
153+
tool_calls: Optional[List[Dict[str, Any]]] = None,
154+
):
155+
if posthog_trace_id is None:
156+
posthog_trace_id = str(uuid.uuid4())
157+
158+
event_properties = {
159+
"$ai_provider": "openai",
160+
"$ai_model": kwargs.get("model"),
161+
"$ai_model_parameters": get_model_params(kwargs),
162+
"$ai_input": with_privacy_mode(self._client._ph_client, posthog_privacy_mode, kwargs.get("input")),
163+
"$ai_output_choices": with_privacy_mode(
164+
self._client._ph_client,
165+
posthog_privacy_mode,
166+
output,
167+
),
168+
"$ai_http_status": 200,
169+
"$ai_input_tokens": usage_stats.get("input_tokens", 0),
170+
"$ai_output_tokens": usage_stats.get("output_tokens", 0),
171+
"$ai_cache_read_input_tokens": usage_stats.get("cache_read_input_tokens", 0),
172+
"$ai_reasoning_tokens": usage_stats.get("reasoning_tokens", 0),
173+
"$ai_latency": latency,
174+
"$ai_trace_id": posthog_trace_id,
175+
"$ai_base_url": str(self._client.base_url),
176+
**(posthog_properties or {}),
177+
}
178+
179+
if tool_calls:
180+
event_properties["$ai_tools"] = with_privacy_mode(
181+
self._client._ph_client,
182+
posthog_privacy_mode,
183+
tool_calls,
184+
)
185+
186+
if posthog_distinct_id is None:
187+
event_properties["$process_person_profile"] = False
188+
189+
if hasattr(self._client._ph_client, "capture"):
190+
self._client._ph_client.capture(
191+
distinct_id=posthog_distinct_id or posthog_trace_id,
192+
event="$ai_generation",
193+
properties=event_properties,
194+
groups=posthog_groups,
195+
)
35196

36197

37198
class WrappedChat(openai.resources.chat.Chat):
@@ -55,7 +216,7 @@ def create(
55216
**kwargs: Any,
56217
):
57218
if posthog_trace_id is None:
58-
posthog_trace_id = uuid.uuid4()
219+
posthog_trace_id = str(uuid.uuid4())
59220

60221
if kwargs.get("stream", False):
61222
return self._create_streaming(
@@ -121,6 +282,11 @@ def generator():
121282
):
122283
usage_stats["cache_read_input_tokens"] = chunk.usage.prompt_tokens_details.cached_tokens
123284

285+
if hasattr(chunk.usage, "output_tokens_details") and hasattr(
286+
chunk.usage.output_tokens_details, "reasoning_tokens"
287+
):
288+
usage_stats["reasoning_tokens"] = chunk.usage.output_tokens_details.reasoning_tokens
289+
124290
if hasattr(chunk, "choices") and chunk.choices and len(chunk.choices) > 0:
125291
if chunk.choices[0].delta and chunk.choices[0].delta.content:
126292
content = chunk.choices[0].delta.content
@@ -171,11 +337,11 @@ def _capture_streaming_event(
171337
kwargs: Dict[str, Any],
172338
usage_stats: Dict[str, int],
173339
latency: float,
174-
output: str,
340+
output: Any,
175341
tool_calls: Optional[List[Dict[str, Any]]] = None,
176342
):
177343
if posthog_trace_id is None:
178-
posthog_trace_id = uuid.uuid4()
344+
posthog_trace_id = str(uuid.uuid4())
179345

180346
event_properties = {
181347
"$ai_provider": "openai",
@@ -191,6 +357,7 @@ def _capture_streaming_event(
191357
"$ai_input_tokens": usage_stats.get("prompt_tokens", 0),
192358
"$ai_output_tokens": usage_stats.get("completion_tokens", 0),
193359
"$ai_cache_read_input_tokens": usage_stats.get("cache_read_input_tokens", 0),
360+
"$ai_reasoning_tokens": usage_stats.get("reasoning_tokens", 0),
194361
"$ai_latency": latency,
195362
"$ai_trace_id": posthog_trace_id,
196363
"$ai_base_url": str(self._client.base_url),
@@ -241,7 +408,7 @@ def create(
241408
The response from OpenAI's embeddings.create call.
242409
"""
243410
if posthog_trace_id is None:
244-
posthog_trace_id = uuid.uuid4()
411+
posthog_trace_id = str(uuid.uuid4())
245412

246413
start_time = time.time()
247414
response = super().create(**kwargs)

0 commit comments

Comments
 (0)