Skip to content

Commit 7870ccd

Browse files
authored
feat: privacy_mode (#164)
1 parent 190c628 commit 7870ccd

File tree

9 files changed

+229
-42
lines changed

9 files changed

+229
-42
lines changed

llm_observability_examples.py

Lines changed: 22 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,8 @@
99
posthog.personal_api_key = os.getenv("POSTHOG_PERSONAL_API_KEY", "your-personal-api-key")
1010
posthog.host = os.getenv("POSTHOG_HOST", "http://localhost:8000") # Or https://app.posthog.com
1111
posthog.debug = True
12+
# change this to False to see usage events
13+
# posthog.privacy_mode = True
1214

1315
openai_client = OpenAI(
1416
api_key=os.getenv("OPENAI_API_KEY", "your-openai-api-key"),
@@ -26,11 +28,12 @@ def main_sync():
2628
print("Trace ID:", trace_id)
2729
distinct_id = "test2_distinct_id"
2830
properties = {"test_property": "test_value"}
31+
groups = {"company": "test_company"}
2932

3033
try:
31-
basic_openai_call(distinct_id, trace_id, properties)
32-
streaming_openai_call(distinct_id, trace_id, properties)
33-
embedding_openai_call(distinct_id, trace_id, properties)
34+
basic_openai_call(distinct_id, trace_id, properties, groups)
35+
streaming_openai_call(distinct_id, trace_id, properties, groups)
36+
embedding_openai_call(distinct_id, trace_id, properties, groups)
3437
image_openai_call()
3538
except Exception as e:
3639
print("Error during OpenAI call:", str(e))
@@ -41,17 +44,18 @@ async def main_async():
4144
print("Trace ID:", trace_id)
4245
distinct_id = "test_distinct_id"
4346
properties = {"test_property": "test_value"}
47+
groups = {"company": "test_company"}
4448

4549
try:
46-
await basic_async_openai_call(distinct_id, trace_id, properties)
47-
await streaming_async_openai_call(distinct_id, trace_id, properties)
48-
await embedding_async_openai_call(distinct_id, trace_id, properties)
50+
await basic_async_openai_call(distinct_id, trace_id, properties, groups)
51+
await streaming_async_openai_call(distinct_id, trace_id, properties, groups)
52+
await embedding_async_openai_call(distinct_id, trace_id, properties, groups)
4953
await image_async_openai_call()
5054
except Exception as e:
5155
print("Error during OpenAI call:", str(e))
5256

5357

54-
def basic_openai_call(distinct_id, trace_id, properties):
58+
def basic_openai_call(distinct_id, trace_id, properties, groups):
5559
response = openai_client.chat.completions.create(
5660
model="gpt-4o-mini",
5761
messages=[
@@ -63,6 +67,7 @@ def basic_openai_call(distinct_id, trace_id, properties):
6367
posthog_distinct_id=distinct_id,
6468
posthog_trace_id=trace_id,
6569
posthog_properties=properties,
70+
posthog_groups=groups,
6671
)
6772
print(response)
6873
if response and response.choices:
@@ -72,7 +77,7 @@ def basic_openai_call(distinct_id, trace_id, properties):
7277
return response
7378

7479

75-
async def basic_async_openai_call(distinct_id, trace_id, properties):
80+
async def basic_async_openai_call(distinct_id, trace_id, properties, groups):
7681
response = await async_openai_client.chat.completions.create(
7782
model="gpt-4o-mini",
7883
messages=[
@@ -84,6 +89,7 @@ async def basic_async_openai_call(distinct_id, trace_id, properties):
8489
posthog_distinct_id=distinct_id,
8590
posthog_trace_id=trace_id,
8691
posthog_properties=properties,
92+
posthog_groups=groups,
8793
)
8894
if response and hasattr(response, "choices"):
8995
print("OpenAI response:", response.choices[0].message.content)
@@ -92,7 +98,7 @@ async def basic_async_openai_call(distinct_id, trace_id, properties):
9298
return response
9399

94100

95-
def streaming_openai_call(distinct_id, trace_id, properties):
101+
def streaming_openai_call(distinct_id, trace_id, properties, groups):
96102

97103
response = openai_client.chat.completions.create(
98104
model="gpt-4o-mini",
@@ -106,6 +112,7 @@ def streaming_openai_call(distinct_id, trace_id, properties):
106112
posthog_distinct_id=distinct_id,
107113
posthog_trace_id=trace_id,
108114
posthog_properties=properties,
115+
posthog_groups=groups,
109116
)
110117

111118
for chunk in response:
@@ -115,7 +122,7 @@ def streaming_openai_call(distinct_id, trace_id, properties):
115122
return response
116123

117124

118-
async def streaming_async_openai_call(distinct_id, trace_id, properties):
125+
async def streaming_async_openai_call(distinct_id, trace_id, properties, groups):
119126
response = await async_openai_client.chat.completions.create(
120127
model="gpt-4o-mini",
121128
messages=[
@@ -128,6 +135,7 @@ async def streaming_async_openai_call(distinct_id, trace_id, properties):
128135
posthog_distinct_id=distinct_id,
129136
posthog_trace_id=trace_id,
130137
posthog_properties=properties,
138+
posthog_groups=groups,
131139
)
132140

133141
async for chunk in response:
@@ -153,25 +161,27 @@ async def image_async_openai_call():
153161
return response
154162

155163

156-
def embedding_openai_call(posthog_distinct_id, posthog_trace_id, posthog_properties):
164+
def embedding_openai_call(posthog_distinct_id, posthog_trace_id, posthog_properties, posthog_groups):
157165
response = openai_client.embeddings.create(
158166
input="The hedgehog is cute",
159167
model="text-embedding-3-small",
160168
posthog_distinct_id=posthog_distinct_id,
161169
posthog_trace_id=posthog_trace_id,
162170
posthog_properties=posthog_properties,
171+
posthog_groups=posthog_groups,
163172
)
164173
print(response)
165174
return response
166175

167176

168-
async def embedding_async_openai_call(posthog_distinct_id, posthog_trace_id, posthog_properties):
177+
async def embedding_async_openai_call(posthog_distinct_id, posthog_trace_id, posthog_properties, posthog_groups):
169178
response = await async_openai_client.embeddings.create(
170179
input="The hedgehog is cute",
171180
model="text-embedding-3-small",
172181
posthog_distinct_id=posthog_distinct_id,
173182
posthog_trace_id=posthog_trace_id,
174183
posthog_properties=posthog_properties,
184+
posthog_groups=posthog_groups,
175185
)
176186
print(response)
177187
return response

posthog/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,8 @@
2626
exception_autocapture_integrations = [] # type: List[Integrations]
2727
# Used to determine in app paths for exception autocapture. Defaults to the current working directory
2828
project_root = None # type: Optional[str]
29+
# Used for our AI observability feature to not capture any prompt or output just usage + metadata
30+
privacy_mode = False # type: bool
2931

3032
default_client = None # type: Optional[Client]
3133

posthog/ai/langchain/callbacks.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
from langchain_core.outputs import ChatGeneration, LLMResult
2424
from pydantic import BaseModel
2525

26-
from posthog.ai.utils import get_model_params
26+
from posthog.ai.utils import get_model_params, with_privacy_mode
2727
from posthog.client import Client
2828

2929
log = logging.getLogger("posthog")
@@ -69,18 +69,24 @@ def __init__(
6969
distinct_id: Optional[Union[str, int, float, UUID]] = None,
7070
trace_id: Optional[Union[str, int, float, UUID]] = None,
7171
properties: Optional[Dict[str, Any]] = None,
72+
privacy_mode: bool = False,
73+
groups: Optional[Dict[str, Any]] = None,
7274
):
7375
"""
7476
Args:
7577
client: PostHog client instance.
7678
distinct_id: Optional distinct ID of the user to associate the trace with.
7779
trace_id: Optional trace ID to use for the event.
7880
properties: Optional additional metadata to use for the trace.
81+
privacy_mode: Whether to redact the input and output of the trace.
82+
groups: Optional additional PostHog groups to use for the trace.
7983
"""
8084
self._client = client
8185
self._distinct_id = distinct_id
8286
self._trace_id = trace_id
8387
self._properties = properties or {}
88+
self._privacy_mode = privacy_mode
89+
self._groups = groups or {}
8490
self._runs = {}
8591
self._parent_tree = {}
8692

@@ -164,8 +170,8 @@ def on_llm_end(
164170
"$ai_provider": run.get("provider"),
165171
"$ai_model": run.get("model"),
166172
"$ai_model_parameters": run.get("model_params"),
167-
"$ai_input": run.get("messages"),
168-
"$ai_output": {"choices": output},
173+
"$ai_input": with_privacy_mode(self._client, self._privacy_mode, run.get("messages")),
174+
"$ai_output": with_privacy_mode(self._client, self._privacy_mode, {"choices": output}),
169175
"$ai_http_status": 200,
170176
"$ai_input_tokens": input_tokens,
171177
"$ai_output_tokens": output_tokens,
@@ -180,6 +186,7 @@ def on_llm_end(
180186
distinct_id=self._distinct_id or trace_id,
181187
event="$ai_generation",
182188
properties=event_properties,
189+
groups=self._groups,
183190
)
184191

185192
def on_chain_error(
@@ -212,7 +219,7 @@ def on_llm_error(
212219
"$ai_provider": run.get("provider"),
213220
"$ai_model": run.get("model"),
214221
"$ai_model_parameters": run.get("model_params"),
215-
"$ai_input": run.get("messages"),
222+
"$ai_input": with_privacy_mode(self._client, self._privacy_mode, run.get("messages")),
216223
"$ai_http_status": _get_http_status(error),
217224
"$ai_latency": latency,
218225
"$ai_trace_id": trace_id,
@@ -225,6 +232,7 @@ def on_llm_error(
225232
distinct_id=self._distinct_id or trace_id,
226233
event="$ai_generation",
227234
properties=event_properties,
235+
groups=self._groups,
228236
)
229237

230238
def _set_parent_of_run(self, run_id: UUID, parent_run_id: Optional[UUID] = None):

posthog/ai/openai/openai.py

Lines changed: 31 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
except ImportError:
99
raise ModuleNotFoundError("Please install the OpenAI SDK to use this feature: 'pip install openai'")
1010

11-
from posthog.ai.utils import call_llm_and_track_usage, get_model_params
11+
from posthog.ai.utils import call_llm_and_track_usage, get_model_params, with_privacy_mode
1212
from posthog.client import Client as PostHogClient
1313

1414

@@ -49,6 +49,8 @@ def create(
4949
posthog_distinct_id: Optional[str] = None,
5050
posthog_trace_id: Optional[str] = None,
5151
posthog_properties: Optional[Dict[str, Any]] = None,
52+
posthog_privacy_mode: bool = False,
53+
posthog_groups: Optional[Dict[str, Any]] = None,
5254
**kwargs: Any,
5355
):
5456
if posthog_trace_id is None:
@@ -59,6 +61,8 @@ def create(
5961
posthog_distinct_id,
6062
posthog_trace_id,
6163
posthog_properties,
64+
posthog_privacy_mode,
65+
posthog_groups,
6266
**kwargs,
6367
)
6468

@@ -67,6 +71,8 @@ def create(
6771
self._client._ph_client,
6872
posthog_trace_id,
6973
posthog_properties,
74+
posthog_privacy_mode,
75+
posthog_groups,
7076
self._client.base_url,
7177
super().create,
7278
**kwargs,
@@ -77,6 +83,8 @@ def _create_streaming(
7783
posthog_distinct_id: Optional[str],
7884
posthog_trace_id: Optional[str],
7985
posthog_properties: Optional[Dict[str, Any]],
86+
posthog_privacy_mode: bool,
87+
posthog_groups: Optional[Dict[str, Any]],
8088
**kwargs: Any,
8189
):
8290
start_time = time.time()
@@ -117,6 +125,8 @@ def generator():
117125
posthog_distinct_id,
118126
posthog_trace_id,
119127
posthog_properties,
128+
posthog_privacy_mode,
129+
posthog_groups,
120130
kwargs,
121131
usage_stats,
122132
latency,
@@ -130,6 +140,8 @@ def _capture_streaming_event(
130140
posthog_distinct_id: Optional[str],
131141
posthog_trace_id: Optional[str],
132142
posthog_properties: Optional[Dict[str, Any]],
143+
posthog_privacy_mode: bool,
144+
posthog_groups: Optional[Dict[str, Any]],
133145
kwargs: Dict[str, Any],
134146
usage_stats: Dict[str, int],
135147
latency: float,
@@ -142,15 +154,19 @@ def _capture_streaming_event(
142154
"$ai_provider": "openai",
143155
"$ai_model": kwargs.get("model"),
144156
"$ai_model_parameters": get_model_params(kwargs),
145-
"$ai_input": kwargs.get("messages"),
146-
"$ai_output": {
147-
"choices": [
148-
{
149-
"content": output,
150-
"role": "assistant",
151-
}
152-
]
153-
},
157+
"$ai_input": with_privacy_mode(self._client._ph_client, posthog_privacy_mode, kwargs.get("messages")),
158+
"$ai_output": with_privacy_mode(
159+
self._client._ph_client,
160+
posthog_privacy_mode,
161+
{
162+
"choices": [
163+
{
164+
"content": output,
165+
"role": "assistant",
166+
}
167+
]
168+
},
169+
),
154170
"$ai_http_status": 200,
155171
"$ai_input_tokens": usage_stats.get("prompt_tokens", 0),
156172
"$ai_output_tokens": usage_stats.get("completion_tokens", 0),
@@ -168,6 +184,7 @@ def _capture_streaming_event(
168184
distinct_id=posthog_distinct_id or posthog_trace_id,
169185
event="$ai_generation",
170186
properties=event_properties,
187+
groups=posthog_groups,
171188
)
172189

173190

@@ -179,6 +196,8 @@ def create(
179196
posthog_distinct_id: Optional[str] = None,
180197
posthog_trace_id: Optional[str] = None,
181198
posthog_properties: Optional[Dict[str, Any]] = None,
199+
posthog_privacy_mode: bool = False,
200+
posthog_groups: Optional[Dict[str, Any]] = None,
182201
**kwargs: Any,
183202
):
184203
"""
@@ -214,7 +233,7 @@ def create(
214233
event_properties = {
215234
"$ai_provider": "openai",
216235
"$ai_model": kwargs.get("model"),
217-
"$ai_input": kwargs.get("input"),
236+
"$ai_input": with_privacy_mode(self._client._ph_client, posthog_privacy_mode, kwargs.get("input")),
218237
"$ai_http_status": 200,
219238
"$ai_input_tokens": usage_stats.get("prompt_tokens", 0),
220239
"$ai_latency": latency,
@@ -232,6 +251,7 @@ def create(
232251
distinct_id=posthog_distinct_id or posthog_trace_id,
233252
event="$ai_embedding",
234253
properties=event_properties,
254+
groups=posthog_groups,
235255
)
236256

237257
return response

0 commit comments

Comments
 (0)