-
Notifications
You must be signed in to change notification settings - Fork 0
feat: LangGraph integration helpers and example #33
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
8bb3c02
8a9a13f
6dcf323
8f24d87
e124a9a
6106a82
f55b44d
99c68c9
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,39 +1,60 @@ | ||
| """ | ||
| TODO!! | ||
| Minimal LangGraph example identical to the README snippet. | ||
|
|
||
| This example demonstrates how to use StackOne tools with LangGraph. | ||
| Run: | ||
| uv run examples/langgraph_tool_node.py | ||
|
|
||
| ```bash | ||
| uv run examples/langgraph_tool_node.py | ||
| ``` | ||
| Prerequisites: | ||
| - `pip install langgraph langchain-openai` | ||
| - `STACKONE_API_KEY` and `OPENAI_API_KEY` | ||
| - Optionally set `STACKONE_ACCOUNT_ID` (required by some tools) | ||
| """ | ||
|
|
||
| import os | ||
| from typing import Annotated | ||
|
|
||
| from dotenv import load_dotenv | ||
| from langchain_openai import ChatOpenAI | ||
| from langgraph.graph import START, StateGraph | ||
| from langgraph.graph.message import add_messages | ||
| from langgraph.prebuilt import tools_condition | ||
| from typing_extensions import TypedDict | ||
|
|
||
| from stackone_ai import StackOneToolSet | ||
| from stackone_ai.integrations.langgraph import bind_model_with_tools, to_tool_node | ||
|
|
||
| load_dotenv() | ||
|
|
||
| account_id = "45072196112816593343" | ||
| employee_id = "c28xIQaWQ6MzM5MzczMDA2NzMzMzkwNzIwNA" | ||
|
|
||
| def main() -> None: | ||
| load_dotenv() | ||
|
|
||
| def langgraph_tool_node() -> None: | ||
| """Demonstrate basic LangGraph integration with StackOne tools.""" | ||
| # Prepare tools | ||
| account_id = os.getenv("STACKONE_ACCOUNT_ID") # Set if your tools require it | ||
| toolset = StackOneToolSet() | ||
| tools = toolset.get_tools("hris_*", account_id=account_id) | ||
| langchain_tools = tools.to_langchain() | ||
|
|
||
| # Verify we have the tools we need | ||
| assert len(tools) > 0, "Expected at least one HRIS tool" | ||
| employee_tool = tools.get_tool("hris_get_employee") | ||
| assert employee_tool is not None, "Expected hris_get_employee tool" | ||
| class State(TypedDict): | ||
| messages: Annotated[list, add_messages] | ||
|
|
||
| # TODO: Add LangGraph specific integration | ||
| # For now, just verify the tools are properly configured | ||
| langchain_tools = tools.to_langchain() | ||
| assert len(langchain_tools) > 0, "Expected LangChain tools" | ||
| assert all(hasattr(tool, "_run") for tool in langchain_tools), "Expected all tools to have _run method" | ||
| # Build a small agent loop: LLM -> maybe tools -> back to LLM | ||
| graph = StateGraph(State) | ||
| graph.add_node("tools", to_tool_node(langchain_tools)) | ||
|
|
||
| def call_llm(state: dict): | ||
| llm = ChatOpenAI(model="gpt-4o-mini") | ||
| llm = bind_model_with_tools(llm, langchain_tools) | ||
| resp = llm.invoke(state["messages"]) # returns AIMessage with optional tool_calls | ||
| return {"messages": state["messages"] + [resp]} | ||
|
|
||
| graph.add_node("llm", call_llm) | ||
| graph.add_edge(START, "llm") | ||
| graph.add_conditional_edges("llm", tools_condition) | ||
| graph.add_edge("tools", "llm") | ||
| app = graph.compile() | ||
|
|
||
| # Kick off with a simple instruction; replace IDs as needed | ||
| _ = app.invoke({"messages": [("user", "Get employee with id emp123")]}) | ||
|
|
||
|
|
||
| if __name__ == "__main__": | ||
| langgraph_tool_node() | ||
| main() |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,20 @@ | ||
| """Integration helpers for external frameworks. | ||
|
|
||
| Currently includes: | ||
|
|
||
| - LangGraph helpers to turn StackOne tools into a `ToolNode` or `ToolExecutor`. | ||
| """ | ||
|
|
||
| from .langgraph import ( | ||
| bind_model_with_tools, | ||
| create_react_agent, | ||
| to_tool_executor, | ||
| to_tool_node, | ||
| ) | ||
|
|
||
| __all__ = [ | ||
| "to_tool_node", | ||
| "to_tool_executor", | ||
| "bind_model_with_tools", | ||
| "create_react_agent", | ||
| ] |
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
| @@ -0,0 +1,89 @@ | ||||||
| """LangGraph integration helpers. | ||||||
| These utilities convert StackOne tools into LangGraph prebuilt components. | ||||||
| Usage: | ||||||
| from stackone_ai import StackOneToolSet | ||||||
| from stackone_ai.integrations.langgraph import to_tool_node | ||||||
| toolset = StackOneToolSet() | ||||||
| tools = toolset.get_tools("hris_*", account_id="...") | ||||||
| node = to_tool_node(tools) # langgraph.prebuilt.ToolNode | ||||||
| """ | ||||||
|
|
||||||
| from __future__ import annotations | ||||||
|
|
||||||
| from collections.abc import Sequence | ||||||
| from typing import TYPE_CHECKING, Any | ||||||
|
|
||||||
| from langchain_core.tools import BaseTool | ||||||
|
|
||||||
| from stackone_ai.models import Tools | ||||||
|
|
||||||
| if TYPE_CHECKING: # pragma: no cover - only for typing | ||||||
| try: | ||||||
| from langgraph.prebuilt import ToolExecutor, ToolNode | ||||||
| except Exception: # pragma: no cover | ||||||
|
||||||
| except Exception: # pragma: no cover | |
| except ImportError: # pragma: no cover |
Copilot
AI
Sep 2, 2025
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Using a bare Exception catch is too broad. Consider catching ImportError or ModuleNotFoundError specifically since this is checking for missing dependencies.
| except Exception as e: # pragma: no cover | |
| except ImportError as e: # pragma: no cover |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
With add_messages, returning state["messages"] + [resp] duplicates messages; return only the new message so the reducer can append it.
Prompt for AI agents