Skip to content

Commit b3e55bf

Browse files
committed
Merge branch 'master' into feat/modal_param_extraction
2 parents e429889 + 80f0b3e commit b3e55bf

File tree

4 files changed

+91
-15
lines changed

4 files changed

+91
-15
lines changed

posthog/ai/anthropic/anthropic.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,13 @@
88
import uuid
99
from typing import Any, Dict, Optional
1010

11-
from posthog.ai.utils import call_llm_and_track_usage, extract_core_model_params, get_model_params, with_privacy_mode
11+
from posthog.ai.utils import (
12+
call_llm_and_track_usage,
13+
extract_core_model_params,
14+
get_model_params,
15+
merge_system_prompt,
16+
with_privacy_mode,
17+
)
1218
from posthog.client import Client as PostHogClient
1319

1420

@@ -171,7 +177,11 @@ def _capture_streaming_event(
171177
"$ai_provider": "anthropic",
172178
"$ai_model": kwargs.get("model"),
173179
"$ai_model_parameters": get_model_params(kwargs),
174-
"$ai_input": with_privacy_mode(self._client._ph_client, posthog_privacy_mode, kwargs.get("messages")),
180+
"$ai_input": with_privacy_mode(
181+
self._client._ph_client,
182+
posthog_privacy_mode,
183+
merge_system_prompt(kwargs, "anthropic"),
184+
),
175185
"$ai_output_choices": with_privacy_mode(
176186
self._client._ph_client,
177187
posthog_privacy_mode,

posthog/ai/anthropic/anthropic_async.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import uuid
99
from typing import Any, Dict, Optional
1010

11-
from posthog.ai.utils import call_llm_and_track_usage_async, get_model_params, with_privacy_mode
11+
from posthog.ai.utils import call_llm_and_track_usage_async, get_model_params, merge_system_prompt, with_privacy_mode
1212
from posthog.client import Client as PostHogClient
1313

1414

@@ -171,7 +171,11 @@ async def _capture_streaming_event(
171171
"$ai_provider": "anthropic",
172172
"$ai_model": kwargs.get("model"),
173173
"$ai_model_parameters": get_model_params(kwargs),
174-
"$ai_input": with_privacy_mode(self._client._ph_client, posthog_privacy_mode, kwargs.get("messages")),
174+
"$ai_input": with_privacy_mode(
175+
self._client._ph_client,
176+
posthog_privacy_mode,
177+
merge_system_prompt(kwargs, "anthropic"),
178+
),
175179
"$ai_output_choices": with_privacy_mode(
176180
self._client._ph_client,
177181
posthog_privacy_mode,

posthog/ai/utils.py

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,15 @@ def format_response_openai(response):
115115
return output
116116

117117

118+
def merge_system_prompt(kwargs: Dict[str, Any], provider: str):
119+
if provider != "anthropic":
120+
return kwargs.get("messages")
121+
messages = kwargs.get("messages") or []
122+
if kwargs.get("system") is None:
123+
return messages
124+
return [{"role": "system", "content": kwargs.get("system")}] + messages
125+
126+
118127
def call_llm_and_track_usage(
119128
posthog_distinct_id: Optional[str],
120129
ph_client: PostHogClient,
@@ -152,11 +161,13 @@ def call_llm_and_track_usage(
152161
if response and hasattr(response, "usage"):
153162
usage = get_usage(response, provider)
154163

164+
messages = merge_system_prompt(kwargs, provider)
165+
155166
event_properties = {
156167
"$ai_provider": provider,
157168
"$ai_model": kwargs.get("model"),
158169
"$ai_model_parameters": get_model_params(kwargs),
159-
"$ai_input": with_privacy_mode(ph_client, posthog_privacy_mode, kwargs.get("messages")),
170+
"$ai_input": with_privacy_mode(ph_client, posthog_privacy_mode, messages),
160171
"$ai_output_choices": with_privacy_mode(
161172
ph_client, posthog_privacy_mode, format_response(response, provider)
162173
),
@@ -221,11 +232,13 @@ async def call_llm_and_track_usage_async(
221232
if response and hasattr(response, "usage"):
222233
usage = get_usage(response, provider)
223234

235+
messages = merge_system_prompt(kwargs, provider)
236+
224237
event_properties = {
225238
"$ai_provider": provider,
226239
"$ai_model": kwargs.get("model"),
227240
"$ai_model_parameters": get_model_params(kwargs),
228-
"$ai_input": with_privacy_mode(ph_client, posthog_privacy_mode, kwargs.get("messages")),
241+
"$ai_input": with_privacy_mode(ph_client, posthog_privacy_mode, messages),
229242
"$ai_output_choices": with_privacy_mode(
230243
ph_client, posthog_privacy_mode, format_response(response, provider)
231244
),

posthog/test/ai/anthropic/test_anthropic.py

Lines changed: 58 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -218,25 +218,29 @@ def test_basic_integration(mock_client):
218218
client = Anthropic(posthog_client=mock_client)
219219
client.messages.create(
220220
model="claude-3-opus-20240229",
221-
messages=[{"role": "user", "content": "You must always answer with 'Bar'."}],
221+
messages=[{"role": "user", "content": "Foo"}],
222222
max_tokens=1,
223223
temperature=0,
224224
posthog_distinct_id="test-id",
225225
posthog_properties={"foo": "bar"},
226+
system="You must always answer with 'Bar'.",
226227
)
227228

228229
assert mock_client.capture.call_count == 1
229230

230231
call_args = mock_client.capture.call_args[1]
231232
props = call_args["properties"]
232-
233233
assert call_args["distinct_id"] == "test-id"
234234
assert call_args["event"] == "$ai_generation"
235235
assert props["$ai_provider"] == "anthropic"
236236
assert props["$ai_model"] == "claude-3-opus-20240229"
237-
assert props["$ai_input"] == [{"role": "user", "content": "You must always answer with 'Bar'."}]
237+
assert props["$ai_input"] == [
238+
{"role": "system", "content": "You must always answer with 'Bar'."},
239+
{"role": "user", "content": "Foo"},
240+
]
238241
assert props["$ai_output_choices"][0]["role"] == "assistant"
239-
assert props["$ai_input_tokens"] == 16
242+
assert props["$ai_output_choices"][0]["content"] == "Bar"
243+
assert props["$ai_input_tokens"] == 18
240244
assert props["$ai_output_tokens"] == 1
241245
assert props["$ai_http_status"] == 200
242246
assert props["foo"] == "bar"
@@ -273,6 +277,54 @@ async def test_basic_async_integration(mock_client):
273277
assert isinstance(props["$ai_latency"], float)
274278

275279

280+
def test_streaming_system_prompt(mock_client, mock_anthropic_stream):
281+
with patch("anthropic.resources.Messages.create", return_value=mock_anthropic_stream):
282+
client = Anthropic(api_key="test-key", posthog_client=mock_client)
283+
response = client.messages.create(
284+
model="claude-3-opus-20240229",
285+
system="Foo",
286+
messages=[{"role": "user", "content": "Bar"}],
287+
stream=True,
288+
)
289+
290+
# Consume the stream
291+
list(response)
292+
293+
# Wait a bit to ensure the capture is called
294+
time.sleep(0.1)
295+
assert mock_client.capture.call_count == 1
296+
297+
call_args = mock_client.capture.call_args[1]
298+
props = call_args["properties"]
299+
assert props["$ai_input"] == [{"role": "system", "content": "Foo"}, {"role": "user", "content": "Bar"}]
300+
301+
302+
@pytest.mark.skipif(not ANTHROPIC_API_KEY, reason="ANTHROPIC_API_KEY is not set")
303+
async def test_async_streaming_system_prompt(mock_client, mock_anthropic_stream):
304+
client = AsyncAnthropic(posthog_client=mock_client)
305+
response = await client.messages.create(
306+
model="claude-3-opus-20240229",
307+
system="You must always answer with 'Bar'.",
308+
messages=[{"role": "user", "content": "Foo"}],
309+
stream=True,
310+
max_tokens=1,
311+
)
312+
313+
# Consume the stream
314+
[c async for c in response]
315+
316+
# Wait a bit to ensure the capture is called
317+
time.sleep(0.1)
318+
assert mock_client.capture.call_count == 1
319+
320+
call_args = mock_client.capture.call_args[1]
321+
props = call_args["properties"]
322+
323+
assert props["$ai_input"] == [
324+
{"role": "system", "content": "You must always answer with 'Bar'."},
325+
{"role": "user", "content": "Foo"},
326+
]
327+
276328
def test_core_model_params(mock_client, mock_anthropic_response):
277329
with patch("anthropic.resources.Messages.create", return_value=mock_anthropic_response):
278330
client = Anthropic(api_key="test-key", posthog_client=mock_client)
@@ -287,12 +339,9 @@ def test_core_model_params(mock_client, mock_anthropic_response):
287339
)
288340

289341
assert response == mock_anthropic_response
290-
assert mock_client.capture.call_count == 1
291-
292-
call_args = mock_client.capture.call_args[1]
293-
props = call_args["properties"]
342+
props = mock_client.capture.call_args[1]["properties"]
294343
assert props["$ai_model_parameters"] == {"temperature": 0.5, "max_tokens": 100, "stream": False}
295344
assert props["$ai_temperature"] == 0.5
296345
assert props["$ai_max_tokens"] == 100
297346
assert props["$ai_stream"] == False
298-
assert props["foo"] == "bar"
347+
assert props["foo"] == "bar"

0 commit comments

Comments
 (0)