Skip to content

Commit 996cce3

Browse files
committed
feat: privacy_mode
1 parent 190c628 commit 996cce3

File tree

5 files changed

+24
-15
lines changed

5 files changed

+24
-15
lines changed

llm_observability_examples.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,12 @@
99
posthog.personal_api_key = os.getenv("POSTHOG_PERSONAL_API_KEY", "your-personal-api-key")
1010
posthog.host = os.getenv("POSTHOG_HOST", "http://localhost:8000") # Or https://app.posthog.com
1111
posthog.debug = True
12+
# change this to False to see usage events
13+
posthog.privacy_mode = True
1214

1315
openai_client = OpenAI(
1416
api_key=os.getenv("OPENAI_API_KEY", "your-openai-api-key"),
15-
posthog_client=posthog,
17+
posthog_client=posthog
1618
)
1719

1820
async_openai_client = AsyncOpenAI(

posthog/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,8 @@
2626
exception_autocapture_integrations = [] # type: List[Integrations]
2727
# Used to determine in app paths for exception autocapture. Defaults to the current working directory
2828
project_root = None # type: Optional[str]
29+
# Used for our AI observability feature to not capture any prompt or output just usage + metadata
30+
privacy_mode = False # type: bool
2931

3032
default_client = None # type: Optional[Client]
3133

posthog/ai/openai/openai.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
except ImportError:
99
raise ModuleNotFoundError("Please install the OpenAI SDK to use this feature: 'pip install openai'")
1010

11-
from posthog.ai.utils import call_llm_and_track_usage, get_model_params
11+
from posthog.ai.utils import call_llm_and_track_usage, get_model_params, with_privacy_mode
1212
from posthog.client import Client as PostHogClient
1313

1414

@@ -142,15 +142,15 @@ def _capture_streaming_event(
142142
"$ai_provider": "openai",
143143
"$ai_model": kwargs.get("model"),
144144
"$ai_model_parameters": get_model_params(kwargs),
145-
"$ai_input": kwargs.get("messages"),
146-
"$ai_output": {
145+
"$ai_input": with_privacy_mode(self._client._ph_client, kwargs.get("messages")),
146+
"$ai_output": with_privacy_mode(self._client._ph_client, {
147147
"choices": [
148148
{
149149
"content": output,
150150
"role": "assistant",
151151
}
152152
]
153-
},
153+
}),
154154
"$ai_http_status": 200,
155155
"$ai_input_tokens": usage_stats.get("prompt_tokens", 0),
156156
"$ai_output_tokens": usage_stats.get("completion_tokens", 0),
@@ -214,7 +214,7 @@ def create(
214214
event_properties = {
215215
"$ai_provider": "openai",
216216
"$ai_model": kwargs.get("model"),
217-
"$ai_input": kwargs.get("input"),
217+
"$ai_input": with_privacy_mode(self._client._ph_client, kwargs.get("input")),
218218
"$ai_http_status": 200,
219219
"$ai_input_tokens": usage_stats.get("prompt_tokens", 0),
220220
"$ai_latency": latency,

posthog/ai/openai/openai_async.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
except ImportError:
99
raise ModuleNotFoundError("Please install the OpenAI SDK to use this feature: 'pip install openai'")
1010

11-
from posthog.ai.utils import call_llm_and_track_usage_async, get_model_params
11+
from posthog.ai.utils import call_llm_and_track_usage_async, get_model_params, with_privacy_mode
1212
from posthog.client import Client as PostHogClient
1313

1414

@@ -141,15 +141,15 @@ def _capture_streaming_event(
141141
"$ai_provider": "openai",
142142
"$ai_model": kwargs.get("model"),
143143
"$ai_model_parameters": get_model_params(kwargs),
144-
"$ai_input": kwargs.get("messages"),
145-
"$ai_output": {
144+
"$ai_input": with_privacy_mode(self._client._ph_client, kwargs.get("messages")),
145+
"$ai_output": with_privacy_mode(self._client._ph_client, {
146146
"choices": [
147147
{
148148
"content": output,
149149
"role": "assistant",
150150
}
151151
]
152-
},
152+
}),
153153
"$ai_http_status": 200,
154154
"$ai_input_tokens": usage_stats.get("prompt_tokens", 0),
155155
"$ai_output_tokens": usage_stats.get("completion_tokens", 0),
@@ -213,7 +213,7 @@ async def create(
213213
event_properties = {
214214
"$ai_provider": "openai",
215215
"$ai_model": kwargs.get("model"),
216-
"$ai_input": kwargs.get("input"),
216+
"$ai_input": with_privacy_mode(self._client._ph_client, kwargs.get("input")),
217217
"$ai_http_status": 200,
218218
"$ai_input_tokens": usage_stats.get("prompt_tokens", 0),
219219
"$ai_latency": latency,

posthog/ai/utils.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -86,8 +86,8 @@ def call_llm_and_track_usage(
8686
"$ai_provider": "openai",
8787
"$ai_model": kwargs.get("model"),
8888
"$ai_model_parameters": get_model_params(kwargs),
89-
"$ai_input": kwargs.get("messages"),
90-
"$ai_output": format_response(response),
89+
"$ai_input": with_privacy_mode(ph_client, kwargs.get("messages")),
90+
"$ai_output": with_privacy_mode(ph_client, format_response(response)),
9191
"$ai_http_status": http_status,
9292
"$ai_input_tokens": input_tokens,
9393
"$ai_output_tokens": output_tokens,
@@ -150,8 +150,8 @@ async def call_llm_and_track_usage_async(
150150
"$ai_provider": "openai",
151151
"$ai_model": kwargs.get("model"),
152152
"$ai_model_parameters": get_model_params(kwargs),
153-
"$ai_input": kwargs.get("messages"),
154-
"$ai_output": format_response(response),
153+
"$ai_input": with_privacy_mode(ph_client, kwargs.get("messages")),
154+
"$ai_output": with_privacy_mode(ph_client, format_response(response)),
155155
"$ai_http_status": http_status,
156156
"$ai_input_tokens": input_tokens,
157157
"$ai_output_tokens": output_tokens,
@@ -176,3 +176,8 @@ async def call_llm_and_track_usage_async(
176176
raise error
177177

178178
return response
179+
180+
def with_privacy_mode(ph_client: PostHogClient, value: Any):
181+
if ph_client.privacy_mode:
182+
return None
183+
return value

0 commit comments

Comments
 (0)