Skip to content

Commit f0da5bb

Browse files
committed
fix: add system prompt
1 parent bd497b3 commit f0da5bb

File tree

4 files changed

+86
-11
lines changed

4 files changed

+86
-11
lines changed

posthog/ai/anthropic/anthropic.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import uuid
99
from typing import Any, Dict, Optional
1010

11-
from posthog.ai.utils import call_llm_and_track_usage, get_model_params, with_privacy_mode
11+
from posthog.ai.utils import call_llm_and_track_usage, get_model_params, merge_system_prompt, with_privacy_mode
1212
from posthog.client import Client as PostHogClient
1313

1414

@@ -171,7 +171,11 @@ def _capture_streaming_event(
171171
"$ai_provider": "anthropic",
172172
"$ai_model": kwargs.get("model"),
173173
"$ai_model_parameters": get_model_params(kwargs),
174-
"$ai_input": with_privacy_mode(self._client._ph_client, posthog_privacy_mode, kwargs.get("messages")),
174+
"$ai_input": with_privacy_mode(
175+
self._client._ph_client,
176+
posthog_privacy_mode,
177+
merge_system_prompt(kwargs, "anthropic"),
178+
),
175179
"$ai_output_choices": with_privacy_mode(
176180
self._client._ph_client,
177181
posthog_privacy_mode,

posthog/ai/anthropic/anthropic_async.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import uuid
99
from typing import Any, Dict, Optional
1010

11-
from posthog.ai.utils import call_llm_and_track_usage_async, get_model_params, with_privacy_mode
11+
from posthog.ai.utils import call_llm_and_track_usage_async, get_model_params, merge_system_prompt, with_privacy_mode
1212
from posthog.client import Client as PostHogClient
1313

1414

@@ -171,7 +171,11 @@ async def _capture_streaming_event(
171171
"$ai_provider": "anthropic",
172172
"$ai_model": kwargs.get("model"),
173173
"$ai_model_parameters": get_model_params(kwargs),
174-
"$ai_input": with_privacy_mode(self._client._ph_client, posthog_privacy_mode, kwargs.get("messages")),
174+
"$ai_input": with_privacy_mode(
175+
self._client._ph_client,
176+
posthog_privacy_mode,
177+
merge_system_prompt(kwargs, "anthropic"),
178+
),
175179
"$ai_output_choices": with_privacy_mode(
176180
self._client._ph_client,
177181
posthog_privacy_mode,

posthog/ai/utils.py

Lines changed: 16 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import time
22
import uuid
3-
from typing import Any, Callable, Dict, Optional
3+
from typing import Any, Callable, Dict, List, Optional
44

55
from httpx import URL
66

@@ -86,6 +86,15 @@ def format_response_openai(response):
8686
return output
8787

8888

89+
def merge_system_prompt(kwargs: Dict[str, Any], provider: str):
90+
if provider != "anthropic":
91+
return kwargs.get("messages")
92+
messages = kwargs.get("messages") or []
93+
if kwargs.get("system") is None:
94+
return messages
95+
return [{"role": "system", "content": kwargs.get("system")}] + messages
96+
97+
8998
def call_llm_and_track_usage(
9099
posthog_distinct_id: Optional[str],
91100
ph_client: PostHogClient,
@@ -123,11 +132,13 @@ def call_llm_and_track_usage(
123132
if response and hasattr(response, "usage"):
124133
usage = get_usage(response, provider)
125134

135+
messages = merge_system_prompt(kwargs, provider)
136+
126137
event_properties = {
127138
"$ai_provider": provider,
128139
"$ai_model": kwargs.get("model"),
129140
"$ai_model_parameters": get_model_params(kwargs),
130-
"$ai_input": with_privacy_mode(ph_client, posthog_privacy_mode, kwargs.get("messages")),
141+
"$ai_input": with_privacy_mode(ph_client, posthog_privacy_mode, messages),
131142
"$ai_output_choices": with_privacy_mode(
132143
ph_client, posthog_privacy_mode, format_response(response, provider)
133144
),
@@ -191,11 +202,13 @@ async def call_llm_and_track_usage_async(
191202
if response and hasattr(response, "usage"):
192203
usage = get_usage(response, provider)
193204

205+
messages = merge_system_prompt(kwargs, provider)
206+
194207
event_properties = {
195208
"$ai_provider": provider,
196209
"$ai_model": kwargs.get("model"),
197210
"$ai_model_parameters": get_model_params(kwargs),
198-
"$ai_input": with_privacy_mode(ph_client, posthog_privacy_mode, kwargs.get("messages")),
211+
"$ai_input": with_privacy_mode(ph_client, posthog_privacy_mode, messages),
199212
"$ai_output_choices": with_privacy_mode(
200213
ph_client, posthog_privacy_mode, format_response(response, provider)
201214
),

posthog/test/ai/anthropic/test_anthropic.py

Lines changed: 58 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -218,25 +218,29 @@ def test_basic_integration(mock_client):
218218
client = Anthropic(posthog_client=mock_client)
219219
client.messages.create(
220220
model="claude-3-opus-20240229",
221-
messages=[{"role": "user", "content": "You must always answer with 'Bar'."}],
221+
messages=[{"role": "user", "content": "Foo"}],
222222
max_tokens=1,
223223
temperature=0,
224224
posthog_distinct_id="test-id",
225225
posthog_properties={"foo": "bar"},
226+
system="You must always answer with 'Bar'.",
226227
)
227228

228229
assert mock_client.capture.call_count == 1
229230

230231
call_args = mock_client.capture.call_args[1]
231232
props = call_args["properties"]
232-
233233
assert call_args["distinct_id"] == "test-id"
234234
assert call_args["event"] == "$ai_generation"
235235
assert props["$ai_provider"] == "anthropic"
236236
assert props["$ai_model"] == "claude-3-opus-20240229"
237-
assert props["$ai_input"] == [{"role": "user", "content": "You must always answer with 'Bar'."}]
237+
assert props["$ai_input"] == [
238+
{"role": "system", "content": "You must always answer with 'Bar'."},
239+
{"role": "user", "content": "Foo"},
240+
]
238241
assert props["$ai_output_choices"][0]["role"] == "assistant"
239-
assert props["$ai_input_tokens"] == 16
242+
assert props["$ai_output_choices"][0]["content"] == "Bar"
243+
assert props["$ai_input_tokens"] == 18
240244
assert props["$ai_output_tokens"] == 1
241245
assert props["$ai_http_status"] == 200
242246
assert props["foo"] == "bar"
@@ -271,3 +275,53 @@ async def test_basic_async_integration(mock_client):
271275
assert props["$ai_http_status"] == 200
272276
assert props["foo"] == "bar"
273277
assert isinstance(props["$ai_latency"], float)
278+
279+
280+
def test_streaming_system_prompt(mock_client, mock_anthropic_stream):
281+
with patch("anthropic.resources.Messages.create", return_value=mock_anthropic_stream):
282+
client = Anthropic(api_key="test-key", posthog_client=mock_client)
283+
response = client.messages.create(
284+
model="claude-3-opus-20240229",
285+
system="Foo",
286+
messages=[{"role": "user", "content": "Bar"}],
287+
stream=True,
288+
)
289+
290+
# Consume the stream
291+
list(response)
292+
293+
# Wait a bit to ensure the capture is called
294+
time.sleep(0.1)
295+
assert mock_client.capture.call_count == 1
296+
297+
call_args = mock_client.capture.call_args[1]
298+
props = call_args["properties"]
299+
300+
assert props["$ai_input"] == [{"role": "system", "content": "Foo"}, {"role": "user", "content": "Bar"}]
301+
302+
303+
@pytest.mark.skipif(not ANTHROPIC_API_KEY, reason="ANTHROPIC_API_KEY is not set")
304+
async def test_async_streaming_system_prompt(mock_client, mock_anthropic_stream):
305+
client = AsyncAnthropic(posthog_client=mock_client)
306+
response = await client.messages.create(
307+
model="claude-3-opus-20240229",
308+
system="You must always answer with 'Bar'.",
309+
messages=[{"role": "user", "content": "Foo"}],
310+
stream=True,
311+
max_tokens=1,
312+
)
313+
314+
# Consume the stream
315+
[c async for c in response]
316+
317+
# Wait a bit to ensure the capture is called
318+
time.sleep(0.1)
319+
assert mock_client.capture.call_count == 1
320+
321+
call_args = mock_client.capture.call_args[1]
322+
props = call_args["properties"]
323+
324+
assert props["$ai_input"] == [
325+
{"role": "system", "content": "You must always answer with 'Bar'."},
326+
{"role": "user", "content": "Foo"},
327+
]

0 commit comments

Comments
 (0)