Skip to content

Commit 476a823

Browse files
feat(ai): replace otel client-side span mapping with PostHogTraceExporter (#3232)
* Replace otel client-side span mapping with PostHogTraceExporter Replace the PostHogSpanProcessor (which did client-side span mapping via posthog-node) with PostHogTraceExporter, a thin wrapper around OTLPTraceExporter that points to PostHog's /i/v0/ai/otel endpoint. PostHog now handles gen_ai.* span conversion to $ai_generation events server-side, so the client no longer needs the mapper pipeline. * fix(ai): address PR review feedback Use URL.origin for trailing slash handling instead of regex, add inline comment on auth header, consolidate tests into it.each, add changeset. * fix(ai): add apiKey validation, clean up unused deps, improve tests Remove @opentelemetry/sdk-trace-base from peer and dev dependencies since it's no longer imported after the exporter rewrite. Add apiKey validation to PostHogTraceExporter constructor. Assert on mock constructor args instead of internal _config in tests. * switch to gpt-5-mini * fix otel for convex
1 parent 697e423 commit 476a823

File tree

20 files changed

+730
-2234
lines changed

20 files changed

+730
-2234
lines changed

.changeset/otel-trace-exporter.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@posthog/ai': minor
3+
---
4+
5+
Replace otel client-side span mapping with PostHogTraceExporter. PostHog now converts gen_ai.* spans into $ai_generation events server-side, so the client-side mapper pipeline (PostHogSpanProcessor, captureSpan, aiSdkSpanMapper) has been replaced with a simple OTLPTraceExporter wrapper.

examples/example-convex/convex/aiSdk/manualCapture.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ export const generate = action({
1818
const startTime = Date.now()
1919

2020
const result = await generateText({
21-
model: openai('gpt-4o-mini'),
21+
model: openai('gpt-5-mini'),
2222
prompt: args.prompt,
2323
})
2424

@@ -33,7 +33,7 @@ export const generate = action({
3333

3434
// Core identification
3535
$ai_provider: 'openai',
36-
$ai_model: 'gpt-4o-mini',
36+
$ai_model: 'gpt-5-mini',
3737

3838
// Token usage
3939
$ai_input_tokens: result.usage.inputTokens,

examples/example-convex/convex/aiSdk/openTelemetry.ts

Lines changed: 7 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,46 +1,37 @@
11
"use node"
22

33
import { NodeSDK } from '@opentelemetry/sdk-node'
4-
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http'
54
import { resourceFromAttributes } from '@opentelemetry/resources'
65
import { generateText } from 'ai'
76
import { openai } from '@ai-sdk/openai'
7+
import { PostHogTraceExporter } from '@posthog/ai/otel'
88
import { action } from '../_generated/server'
99
import { v } from 'convex/values'
1010

1111
// Demonstrates using the Vercel AI SDK's experimental_telemetry with
12-
// PostHog's native OTel endpoint to automatically capture $ai_generation events.
12+
// PostHog's PostHogTraceExporter to automatically capture $ai_generation events.
1313
export const generate = action({
1414
args: {
1515
prompt: v.string(),
1616
distinctId: v.optional(v.string()),
1717
},
1818
handler: async (_ctx, args) => {
19-
// Set up an OTel SDK that exports spans directly to PostHog's
20-
// /i/v0/ai/otel endpoint. PostHog converts gen_ai.* spans into
21-
// $ai_generation events server-side. NodeSDK (vs NodeTracerProvider)
22-
// handles context propagation via AsyncLocalStorage automatically,
23-
// which the AI SDK needs to connect parent and child spans.
24-
const exporter = new OTLPTraceExporter({
25-
url: `${process.env.POSTHOG_HOST || 'https://us.i.posthog.com'}/i/v0/ai/otel`,
26-
headers: {
27-
Authorization: `Bearer ${process.env.POSTHOG_API_KEY}`,
28-
},
29-
})
30-
3119
const distinctId = args.distinctId ?? 'anonymous'
3220

3321
const sdk = new NodeSDK({
3422
resource: resourceFromAttributes({
3523
'service.name': 'example-convex',
3624
'user.id': distinctId,
3725
}),
38-
traceExporter: exporter,
26+
traceExporter: new PostHogTraceExporter({
27+
apiKey: process.env.POSTHOG_API_KEY!,
28+
host: process.env.POSTHOG_HOST,
29+
}),
3930
})
4031
sdk.start()
4132

4233
const result = await generateText({
43-
model: openai('gpt-4o-mini'),
34+
model: openai('gpt-5-mini'),
4435
prompt: args.prompt,
4536
experimental_telemetry: {
4637
isEnabled: true,

examples/example-convex/convex/aiSdk/withTracing.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ export const generate = action({
2828
handler: async (_ctx, args) => {
2929
// Wrap the model with PostHog tracing — this automatically captures
3030
// $ai_generation events with token usage, latency, and content.
31-
const model = withTracing(openai('gpt-4o-mini'), phClient as unknown as WithTracingPostHog, {
31+
const model = withTracing(openai('gpt-5-mini'), phClient as unknown as WithTracingPostHog, {
3232
posthogDistinctId: args.distinctId,
3333
})
3434

examples/example-convex/convex/convexAgent/manualCapture.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ import { posthog } from '../posthog.js'
77

88
const supportAgent = new Agent(components.agent, {
99
name: 'support-agent',
10-
languageModel: openai('gpt-4o-mini'),
10+
languageModel: openai('gpt-5-mini'),
1111
instructions: 'You are a helpful support agent. Answer questions concisely.',
1212
})
1313

examples/example-convex/convex/convexAgent/openTelemetry.ts

Lines changed: 7 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,49 +1,40 @@
11
"use node"
22

33
import { NodeSDK } from '@opentelemetry/sdk-node'
4-
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http'
54
import { resourceFromAttributes } from '@opentelemetry/resources'
65
import { Agent } from '@convex-dev/agent'
76
import { openai } from '@ai-sdk/openai'
7+
import { PostHogTraceExporter } from '@posthog/ai/otel'
88
import { components } from '../_generated/api'
99
import { action } from '../_generated/server'
1010
import { v } from 'convex/values'
1111

1212
// Demonstrates using @convex-dev/agent with the Vercel AI SDK's
13-
// experimental_telemetry and PostHog's native OTel endpoint to
13+
// experimental_telemetry and PostHog's PostHogTraceExporter to
1414
// automatically capture $ai_generation events.
1515
export const generate = action({
1616
args: {
1717
prompt: v.string(),
1818
distinctId: v.optional(v.string()),
1919
},
2020
handler: async (ctx, args) => {
21-
// Set up an OTel SDK that exports spans directly to PostHog's
22-
// /i/v0/ai/otel endpoint. PostHog converts gen_ai.* spans into
23-
// $ai_generation events server-side. NodeSDK (vs NodeTracerProvider)
24-
// handles context propagation via AsyncLocalStorage automatically,
25-
// which the AI SDK needs to connect parent and child spans.
26-
const exporter = new OTLPTraceExporter({
27-
url: `${process.env.POSTHOG_HOST || 'https://us.i.posthog.com'}/i/v0/ai/otel`,
28-
headers: {
29-
Authorization: `Bearer ${process.env.POSTHOG_API_KEY}`,
30-
},
31-
})
32-
3321
const distinctId = args.distinctId ?? 'anonymous'
3422

3523
const sdk = new NodeSDK({
3624
resource: resourceFromAttributes({
3725
'service.name': 'example-convex',
3826
'user.id': distinctId,
3927
}),
40-
traceExporter: exporter,
28+
traceExporter: new PostHogTraceExporter({
29+
apiKey: process.env.POSTHOG_API_KEY!,
30+
host: process.env.POSTHOG_HOST,
31+
}),
4132
})
4233
sdk.start()
4334

4435
const supportAgent = new Agent(components.agent, {
4536
name: 'support-agent',
46-
languageModel: openai('gpt-4o-mini'),
37+
languageModel: openai('gpt-5-mini'),
4738
instructions: 'You are a helpful support agent. Answer questions concisely.',
4839
})
4940

examples/example-convex/convex/convexAgent/withTracing.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ export const generate = action({
2929
handler: async (ctx, args) => {
3030
// Wrap the model with PostHog tracing before passing it to the agent.
3131
// Every LLM call the agent makes will automatically capture $ai_generation events.
32-
const model = withTracing(openai('gpt-4o-mini'), phClient as unknown as WithTracingPostHog, {
32+
const model = withTracing(openai('gpt-5-mini'), phClient as unknown as WithTracingPostHog, {
3333
posthogDistinctId: args.distinctId,
3434
})
3535

packages/ai/README.md

Lines changed: 16 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ const client = new OpenAI({
2424
})
2525

2626
const completion = await client.chat.completions.create({
27-
model: 'gpt-3.5-turbo',
27+
model: 'gpt-5-mini',
2828
messages: [{ role: 'user', content: 'Tell me a fun fact about hedgehogs' }],
2929
posthogDistinctId: 'user_123', // optional
3030
posthogTraceId: 'trace_123', // optional
@@ -39,61 +39,44 @@ console.log(completion.choices[0].message.content)
3939
await phClient.shutdown()
4040
```
4141

42-
## OTEL + AI SDK (`experimental_telemetry`)
42+
## OpenTelemetry
4343

44-
Use this when working with Vercel AI SDK telemetry. `@posthog/ai` exposes an OTEL `SpanProcessor` that maps spans to PostHog AI events and sends them through `posthog-node`.
44+
`@posthog/ai` provides a `PostHogTraceExporter` that sends OpenTelemetry traces to PostHog's OTLP ingestion endpoint. PostHog converts `gen_ai.*` spans into `$ai_generation` events server-side. This works with any LLM provider SDK that supports OpenTelemetry.
45+
46+
```bash
47+
npm install @posthog/ai @opentelemetry/sdk-node @opentelemetry/exporter-trace-otlp-http
48+
```
4549

4650
```typescript
4751
import { NodeSDK } from '@opentelemetry/sdk-node'
48-
import { PostHog } from 'posthog-node'
4952
import { generateText } from 'ai'
5053
import { openai } from '@ai-sdk/openai'
51-
import { PostHogSpanProcessor } from '@posthog/ai/otel'
52-
53-
const phClient = new PostHog('<YOUR_PROJECT_API_KEY>', { host: 'https://us.i.posthog.com' })
54+
import { PostHogTraceExporter } from '@posthog/ai/otel'
5455

5556
const sdk = new NodeSDK({
56-
spanProcessors: [
57-
new PostHogSpanProcessor(phClient),
58-
],
57+
traceExporter: new PostHogTraceExporter({
58+
apiKey: '<YOUR_PROJECT_API_KEY>',
59+
host: 'https://us.i.posthog.com', // optional, defaults to https://us.i.posthog.com
60+
}),
5961
})
60-
6162
sdk.start()
6263

63-
await generateText({
64-
model: openai('gpt-5.1'),
64+
const result = await generateText({
65+
model: openai('gpt-5-mini'),
6566
prompt: 'Write a short haiku about debugging',
6667
experimental_telemetry: {
6768
isEnabled: true,
6869
functionId: 'my-awesome-function',
6970
metadata: {
71+
posthog_distinct_id: 'user_123',
7072
conversation_id: 'abc123',
71-
plan: 'pro',
7273
},
7374
},
7475
})
7576

76-
await phClient.shutdown()
77+
await sdk.shutdown()
7778
```
7879

79-
### Custom Mappers
80-
81-
The OTEL processor supports adapter mappers for different span formats:
82-
83-
- `aiSdkSpanMapper` is the default mapper.
84-
- You can pass custom `mappers` in `PostHogSpanProcessor` options to support additional span schemas.
85-
86-
### Per-call Metadata (Recommended)
87-
88-
For dynamic properties, pass values in `experimental_telemetry.metadata` on each AI SDK call.
89-
These are captured from `ai.telemetry.metadata.*` and forwarded as PostHog event properties.
90-
Use processor options (`posthogProperties`) only for global defaults.
91-
92-
## Notes
93-
94-
- The OTEL route currently maps supported spans into PostHog AI events (manual capture path).
95-
- Existing wrapper-based tracing (for example `withTracing`) still works and is unchanged.
96-
9780
LLM Observability [docs](https://posthog.com/docs/ai-engineering/observability)
9881

9982
Please see the main [PostHog docs](https://www.posthog.com/docs).

packages/ai/package.json

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
"devDependencies": {
2525
"@ai-sdk/provider": "^3.0.8",
2626
"@opentelemetry/api": "^1.9.0",
27-
"@opentelemetry/sdk-trace-base": "^2.2.0",
27+
"@opentelemetry/exporter-trace-otlp-http": "^0.200.0",
2828
"@babel/preset-env": "catalog:",
2929
"@babel/preset-typescript": "catalog:",
3030
"@posthog-tooling/rollup-utils": "workspace:*",
@@ -58,15 +58,15 @@
5858
},
5959
"peerDependencies": {
6060
"@opentelemetry/api": "^1.9.0",
61-
"@opentelemetry/sdk-trace-base": "^2.2.0",
61+
"@opentelemetry/exporter-trace-otlp-http": "^0.200.0",
6262
"@ai-sdk/provider": "^2.0.0 || ^3.0.0",
6363
"posthog-node": "^5.0.0"
6464
},
6565
"peerDependenciesMeta": {
6666
"@opentelemetry/api": {
6767
"optional": true
6868
},
69-
"@opentelemetry/sdk-trace-base": {
69+
"@opentelemetry/exporter-trace-otlp-http": {
7070
"optional": true
7171
},
7272
"@ai-sdk/provider": {

packages/ai/src/index.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import PostHogOpenAI from './openai'
22
import PostHogAzureOpenAI from './openai/azure'
33
import { wrapVercelLanguageModel } from './vercel/middleware'
4-
import { PostHogSpanProcessor, createPostHogSpanProcessor, captureSpan } from './otel'
4+
import { PostHogTraceExporter } from './otel'
55
import PostHogAnthropic from './anthropic'
66
import PostHogGoogleGenAI from './gemini'
77
import { LangChainCallbackHandler } from './langchain/callbacks'
@@ -12,6 +12,6 @@ export { PostHogAzureOpenAI as AzureOpenAI }
1212
export { PostHogAnthropic as Anthropic }
1313
export { PostHogGoogleGenAI as GoogleGenAI }
1414
export { wrapVercelLanguageModel as withTracing }
15-
export { PostHogSpanProcessor, createPostHogSpanProcessor, captureSpan }
15+
export { PostHogTraceExporter }
1616
export { LangChainCallbackHandler }
1717
export { Prompts }

0 commit comments

Comments
 (0)