Skip to content

Commit 90c0417

Browse files
committed
feat(llm-observability): $ai_tools capture in Langchain
1 parent ea4e7fa commit 90c0417

File tree

2 files changed

+61
-1
lines changed

2 files changed

+61
-1
lines changed

posthog/ai/langchain/callbacks.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,8 @@ class GenerationMetadata(SpanMetadata):
6060
"""Model parameters of the run: temperature, max_tokens, etc."""
6161
base_url: Optional[str] = None
6262
"""Base URL of the provider's API used in the run."""
63-
63+
tools: Optional[List[Dict[str, Any]]] = None
64+
"""Tools provided to the model."""
6465

6566
RunMetadata = Union[SpanMetadata, GenerationMetadata]
6667
RunMetadataStorage = Dict[UUID, RunMetadata]
@@ -377,6 +378,8 @@ def _set_llm_metadata(
377378
generation = GenerationMetadata(name=run_name, input=messages, start_time=time.time(), end_time=None)
378379
if isinstance(invocation_params, dict):
379380
generation.model_params = get_model_params(invocation_params)
381+
if tools := invocation_params.get("tools"):
382+
generation.tools = tools
380383
if isinstance(metadata, dict):
381384
if model := metadata.get("ls_model_name"):
382385
generation.model = model
@@ -500,6 +503,8 @@ def _capture_generation(
500503
"$ai_latency": run.latency,
501504
"$ai_base_url": run.base_url,
502505
}
506+
if run.tools:
507+
event_properties["$ai_tools"] = run.tools
503508

504509
if isinstance(output, BaseException):
505510
event_properties["$ai_http_status"] = _get_http_status(output)

posthog/test/ai/langchain/test_callbacks.py

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1168,6 +1168,61 @@ async def test_async_anthropic_streaming(mock_client):
11681168
assert isinstance(trace_props["$ai_output_state"], AIMessage)
11691169

11701170

1171+
def test_metadata_tools(mock_client):
1172+
callbacks = CallbackHandler(mock_client)
1173+
run_id = uuid.uuid4()
1174+
tools = [
1175+
[
1176+
{
1177+
"type": "function",
1178+
"function": {
1179+
"name": "foo",
1180+
"description": "The foo.",
1181+
"parameters": {
1182+
"properties": {
1183+
"bar": {
1184+
"description": "The bar of foo.",
1185+
"type": "string",
1186+
},
1187+
},
1188+
"required": ["query_description", "query_kind"],
1189+
"type": "object",
1190+
"additionalProperties": False,
1191+
},
1192+
"strict": True,
1193+
},
1194+
}
1195+
]
1196+
]
1197+
1198+
with patch("time.time", return_value=1234567890):
1199+
callbacks._set_llm_metadata(
1200+
{"kwargs": {"openai_api_base": "https://us.posthog.com"}},
1201+
run_id,
1202+
messages=[{"role": "user", "content": "What's the weather like in SF?"}],
1203+
invocation_params={"temperature": 0.5, "tools": tools},
1204+
metadata={"ls_model_name": "hog-mini", "ls_provider": "posthog"},
1205+
name="test",
1206+
)
1207+
expected = GenerationMetadata(
1208+
model="hog-mini",
1209+
input=[{"role": "user", "content": "What's the weather like in SF?"}],
1210+
start_time=1234567890,
1211+
model_params={"temperature": 0.5},
1212+
provider="posthog",
1213+
base_url="https://us.posthog.com",
1214+
name="test",
1215+
tools=tools,
1216+
end_time=None,
1217+
)
1218+
assert callbacks._runs[run_id] == expected
1219+
with patch("time.time", return_value=1234567891):
1220+
run = callbacks._pop_run_metadata(run_id)
1221+
expected.end_time = 1234567891
1222+
assert run == expected
1223+
assert callbacks._runs == {}
1224+
1225+
11711226
def test_tool_calls(mock_client):
11721227
prompt = ChatPromptTemplate.from_messages([("user", "Foo")])
11731228
model = FakeMessagesListChatModel(

0 commit comments

Comments
 (0)