Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .sampo/changesets/roguish-windweaver-tuonetar.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
hex/posthog: minor
---

Add support for Anthropic messages in the LLM analytis module
34 changes: 33 additions & 1 deletion lib/posthog/integrations/llm_analytics/req.ex
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ defmodule PostHog.Integrations.LLMAnalytics.Req do
responses. Currently, it works best with the following APIs:
* OpenAI (Responses)
* OpenAI (Chat Completions)
* Anthropic (Create Message)
* Gemini (generateContent)

## Usage

Expand Down Expand Up @@ -204,6 +206,16 @@ defmodule PostHog.Integrations.LLMAnalytics.Req do
}
end

defp request_url_properties(
%Req.Request{url: %URI{host: "api.anthropic.com", path: "/v1/messages" <> _}} = request
) do
%{
"$ai_base_url": "https://api.anthropic.com/v1/messages",
"$ai_request_url": URI.to_string(request.url),
"$ai_provider": "anthropic"
}
end

defp request_url_properties(%Req.Request{} = request) do
%{
"$ai_base_url": URI.to_string(%{request.url | path: nil}),
Expand Down Expand Up @@ -233,6 +245,7 @@ defmodule PostHog.Integrations.LLMAnalytics.Req do
# OpenAI Responses
# OpenAI Chat Completions
# Gemini generateContent
# Anthropic
get_in(body, [atom_or_string_key(:input)]) ||
get_in(body, [atom_or_string_key(:messages)]) ||
get_in(body, [atom_or_string_key(:contents)])
Expand All @@ -241,6 +254,7 @@ defmodule PostHog.Integrations.LLMAnalytics.Req do
defp request_optional_property(:"$ai_temperature", body) do
# OpenAI Responses
# OpenAI Chat Completions
# Anthropic
get_in(body, [atom_or_string_key(:temperature)]) ||
get_in(body, [atom_or_string_key(:generationConfig), atom_or_string_key(:temperature)])
end
Expand All @@ -254,15 +268,18 @@ defmodule PostHog.Integrations.LLMAnalytics.Req do
defp request_optional_property(:"$ai_max_tokens", body) do
# OpenAI Responses
# OpenAI Chat Completions
# Anthropic
get_in(body, [atom_or_string_key(:max_output_tokens)]) ||
get_in(body, [atom_or_string_key(:max_completion_tokens)]) ||
get_in(body, [atom_or_string_key(:generationConfig), atom_or_string_key(:maxOutputTokens)])
get_in(body, [atom_or_string_key(:generationConfig), atom_or_string_key(:maxOutputTokens)]) ||
get_in(body, [atom_or_string_key(:max_tokens)])
end

defp request_optional_property(:"$ai_tools", body) do
# OpenAI Responses
# OpenAI Chat Completions
# Gemini
# Anthropic
get_in(body, [atom_or_string_key(:tools)])
end

Expand Down Expand Up @@ -324,6 +341,21 @@ defmodule PostHog.Integrations.LLMAnalytics.Req do
}
end

# Anthropic Create Message
defp response_properties(%{
"model" => model,
"content" => output,
"usage" => %{"output_tokens" => output_tokens, "input_tokens" => input_tokens}
}) do
%{
"$ai_output_choices": output,
"$ai_input_tokens": input_tokens,
"$ai_output_tokens": output_tokens,
"$ai_model": model,
"$ai_is_error": false
}
end

defp response_properties(%{"error" => error}) do
%{
"$ai_is_error": true,
Expand Down
2 changes: 1 addition & 1 deletion lib/posthog/llm_analytics.ex
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ defmodule PostHog.LLMAnalytics do
def generate_response(user_message) do
LLMAnalytics.start_span(%{"$ai_span_name": "LLM call", "$ai_input_state": user_message})

Req.post!("https://api.openai.com/v1/responses, json: %{input: user_message})
Req.post!("https://api.openai.com/v1/responses", json: %{input: user_message})
|> handle_response()
end

Expand Down
63 changes: 63 additions & 0 deletions test/integration_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -273,5 +273,68 @@ defmodule PostHog.IntegrationTest do

wait.()
end

test "Anthropic", %{wait_fun: wait} do
Req.new()
|> LLMReq.attach()
|> Req.post!(
url: "https://api.anthropic.com/v1/messages",
headers: [
{"x-api-key", Application.get_env(:posthog, :anthropic_key)},
{"anthropic-version", "2023-06-01"}
],
json: %{
messages: [
%{
role: :user,
content: "Cite me the greatest opening line in the history of cyberpunk."
}
],
max_tokens: 1024,
model: "claude-haiku-4-5"
}
)

wait.()
end

test "Anthropic with tool", %{wait_fun: wait} do
Req.new()
|> LLMReq.attach()
|> Req.post!(
url: "https://api.anthropic.com/v1/messages",
headers: [
{"x-api-key", Application.get_env(:posthog, :anthropic_key)},
{"anthropic-version", "2023-06-01"}
],
json: %{
messages: [%{role: :user, content: "Tell me weather in Vancouver, BC. Celsius."}],
max_tokens: 1024,
model: "claude-haiku-4-5",
tools: [
%{
name: "get_current_weather",
description: "Get the current weather in a given location",
input_schema: %{
type: "object",
properties: %{
location: %{
type: "string",
description: "The city and state, e.g. San Francisco, CA"
},
unit: %{
type: "string",
enum: ["celsius", "fahrenheit"]
}
},
required: ["location", "unit"]
}
}
]
}
)

wait.()
end
end
end
191 changes: 191 additions & 0 deletions test/posthog/integrations/llm_analytics/req_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -845,4 +845,195 @@ defmodule PostHog.Integrations.LLMAnalytics.ReqTest do
}
] = all_captured(@supervisor_name)
end

test "captures Anthropic create message properties", %{req: req} do
Req.Test.expect(@mock_module, fn conn ->
Req.Test.json(conn, %{
"content" => [
%{
"text" =>
"I'd argue for this one from **William Gibson's \"Neuromancer\" (1984)**:\n\n\"The sky above the port was the color of television, tuned to a dead channel.\"\n\nIt's often cited as one of the greatest opening lines in science fiction period. It's evocative, immediately establishes a gritty aesthetic, and perfectly captures that cyberpunk blend of high-tech futurity meeting urban decay. The image is both poetic and oddly mundane—comparing something vast to the banal experience of dead air on a TV screen.\n\nThat said, honorable mentions go to:\n- **Bruce Sterling's \"Schismatrix\"** for its ambitious far-future worldbuilding\n- **Pat Cadigan's work** for her psychological approach to cyberspace\n\nBut Gibson's opening has become almost archetypal—it defined what cyberpunk *feels* like for generations of readers and writers who followed.",
"type" => "text"
}
],
"id" => "msg_01USjdBRtDVTcZCcVQMdtAnW",
"model" => "claude-haiku-4-5-20251001",
"role" => "assistant",
"stop_reason" => "end_turn",
"stop_sequence" => nil,
"type" => "message",
"usage" => %{
"cache_creation" => %{
"ephemeral_1h_input_tokens" => 0,
"ephemeral_5m_input_tokens" => 0
},
"cache_creation_input_tokens" => 0,
"cache_read_input_tokens" => 0,
"inference_geo" => "not_available",
"input_tokens" => 22,
"output_tokens" => 205,
"service_tier" => "standard"
}
})
end)

assert %{status: 200, body: %{}} =
Req.post!(req,
url: "https://api.anthropic.com/v1/messages",
headers: [
{"x-api-key", Application.get_env(:posthog, :anthropic_key)},
{"anthropic-version", "2023-06-01"}
],
json: %{
messages: [
%{
role: :user,
content: "Cite me the greatest opening line in the history of cyberpunk."
}
],
max_tokens: 1024,
model: "claude-haiku-4-5"
}
)

assert [
%{
event: "$ai_generation",
properties: %{
"$ai_base_url": "https://api.anthropic.com/v1/messages",
"$ai_http_status": 200,
"$ai_input": [
%{
role: :user,
content: "Cite me the greatest opening line in the history of cyberpunk."
}
],
"$ai_input_tokens": 22,
"$ai_model": "claude-haiku-4-5-20251001",
"$ai_output_choices": [
%{
"text" =>
"I'd argue for this one from **William Gibson's \"Neuromancer\" (1984)**:\n\n\"The sky above the port was the color of television, tuned to a dead channel.\"\n\nIt's often cited as one of the greatest opening lines in science fiction period. It's evocative, immediately establishes a gritty aesthetic, and perfectly captures that cyberpunk blend of high-tech futurity meeting urban decay. The image is both poetic and oddly mundane—comparing something vast to the banal experience of dead air on a TV screen.\n\nThat said, honorable mentions go to:\n- **Bruce Sterling's \"Schismatrix\"** for its ambitious far-future worldbuilding\n- **Pat Cadigan's work** for her psychological approach to cyberspace\n\nBut Gibson's opening has become almost archetypal—it defined what cyberpunk *feels* like for generations of readers and writers who followed.",
"type" => "text"
}
],
"$ai_output_tokens": 205,
"$ai_provider": "anthropic",
"$ai_request_url": "https://api.anthropic.com/v1/messages",
"$ai_is_error": false
}
}
] = all_captured(@supervisor_name)
end

test "captures Anthropic with tools", %{req: req} do
Req.Test.expect(@mock_module, fn conn ->
Req.Test.json(conn, %{
"content" => [
%{
"id" => "toolu_01TB42J8UvzAYvRwaQC4xT2R",
"input" => %{"location" => "Vancouver, BC", "unit" => "celsius"},
"name" => "get_current_weather",
"type" => "tool_use"
}
],
"id" => "msg_012zLWPheFNRzjaGeMSr35u9",
"model" => "claude-haiku-4-5-20251001",
"role" => "assistant",
"stop_reason" => "tool_use",
"stop_sequence" => nil,
"type" => "message",
"usage" => %{
"cache_creation" => %{
"ephemeral_1h_input_tokens" => 0,
"ephemeral_5m_input_tokens" => 0
},
"cache_creation_input_tokens" => 0,
"cache_read_input_tokens" => 0,
"inference_geo" => "not_available",
"input_tokens" => 613,
"output_tokens" => 75,
"service_tier" => "standard"
}
})
end)

assert %{status: 200, body: %{}} =
Req.post!(req,
url: "https://api.anthropic.com/v1/messages",
headers: [
{"x-api-key", Application.get_env(:posthog, :anthropic_key)},
{"anthropic-version", "2023-06-01"}
],
json: %{
messages: [%{role: :user, content: "Tell me weather in Vancouver, BC. Celsius."}],
max_tokens: 1024,
model: "claude-haiku-4-5",
tools: [
%{
name: "get_current_weather",
description: "Get the current weather in a given location",
input_schema: %{
type: "object",
properties: %{
location: %{
type: "string",
description: "The city and state, e.g. San Francisco, CA"
},
unit: %{
type: "string",
enum: ["celsius", "fahrenheit"]
}
},
required: ["location", "unit"]
}
}
]
}
)

assert [
%{
event: "$ai_generation",
properties: %{
"$ai_base_url": "https://api.anthropic.com/v1/messages",
"$ai_http_status": 200,
"$ai_input": [
%{role: :user, content: "Tell me weather in Vancouver, BC. Celsius."}
],
"$ai_input_tokens": 613,
"$ai_model": "claude-haiku-4-5-20251001",
"$ai_output_choices": [
%{
"type" => "tool_use",
"id" => "toolu_01TB42J8UvzAYvRwaQC4xT2R",
"input" => %{"location" => "Vancouver, BC", "unit" => "celsius"},
"name" => "get_current_weather"
}
],
"$ai_output_tokens": 75,
"$ai_provider": "anthropic",
"$ai_request_url": "https://api.anthropic.com/v1/messages",
"$ai_is_error": false,
"$ai_tools": [
%{
name: "get_current_weather",
description: "Get the current weather in a given location",
input_schema: %{
type: "object",
required: ["location", "unit"],
properties: %{
unit: %{type: "string", enum: ["celsius", "fahrenheit"]},
location: %{
type: "string",
description: "The city and state, e.g. San Francisco, CA"
}
}
}
}
]
}
}
] = all_captured(@supervisor_name)
end
end
Loading