Skip to content

Commit 1645d9d

Browse files
committed
feat(langfuse): add langfuse handler to callback for langchain integration
1 parent 053c17f commit 1645d9d

File tree

2 files changed

+12
-5
lines changed

2 files changed

+12
-5
lines changed

backend/api/core/agent/orchestration.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from langgraph.prebuilt import ToolNode, tools_condition
1111

1212
from api.core.agent.prompts import SYSTEM_PROMPT
13+
from api.core.dependencies import LangfuseHandlerDep
1314

1415

1516
class State(MessagesState):
@@ -70,7 +71,8 @@ def get_graph(
7071
return graph_factory(worker_node, tools, checkpointer, name)
7172

7273

73-
def get_config():
74+
def get_config(langfuse_handler: LangfuseHandlerDep):
7475
return dict(
7576
configurable=dict(thread_id="1"),
77+
callbacks=[langfuse_handler],
7678
)

backend/api/routers/llms.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
from starlette.responses import Response
99

1010
from api.core.agent.orchestration import get_config, get_graph
11-
from api.core.dependencies import LLMDep, setup_graph
11+
from api.core.dependencies import LangfuseHandlerDep, LLMDep, setup_graph
1212
from api.core.logs import print, uvicorn
1313

1414
router = APIRouter(tags=["chat"])
@@ -26,13 +26,17 @@ async def completions(query: str, llm: LLMDep) -> Response:
2626

2727

2828
@router.get("/chat/agent")
29-
async def agent(query: str, llm: LLMDep) -> Response:
29+
async def agent(
30+
query: str,
31+
llm: LLMDep,
32+
langfuse_handler: LangfuseHandlerDep,
33+
) -> Response:
3034
"""Stream LangGraph completions as Server-Sent Events (SSE).
3135
3236
This endpoint streams LangGraph-generated events in real-time, allowing the client
3337
to receive responses as they are processed, useful for agent-based workflows.
3438
"""
35-
return EventSourceResponse(stream_graph(query, llm))
39+
return EventSourceResponse(stream_graph(query, llm, langfuse_handler))
3640

3741

3842
async def stream_completions(
@@ -57,14 +61,15 @@ async def checkpointer_setup(pool):
5761
async def stream_graph(
5862
query: str,
5963
llm: LLMDep,
64+
langfuse_handler: LangfuseHandlerDep,
6065
) -> AsyncGenerator[dict[str, str], None]:
6166
async with setup_graph() as resource:
6267
graph = get_graph(
6368
llm,
6469
tools=resource.tools,
6570
checkpointer=resource.checkpointer,
6671
)
67-
config = get_config()
72+
config = get_config(langfuse_handler)
6873
events = dict(messages=[HumanMessage(content=query)])
6974

7075
async for event in graph.astream_events(events, config, version="v2"):

0 commit comments

Comments
 (0)