Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 52 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,58 @@ for tool_call in response.tool_calls:

</details>

<details>
<summary>LangGraph Integration</summary>

StackOne tools convert to LangChain tools, which LangGraph consumes via its prebuilt nodes:

Prerequisites:

```bash
pip install langgraph langchain-openai
```

```python
from langchain_openai import ChatOpenAI
from typing import Annotated
from typing_extensions import TypedDict

from langgraph.graph import StateGraph, START, END
from langgraph.graph.message import add_messages
from langgraph.prebuilt import tools_condition

from stackone_ai import StackOneToolSet
from stackone_ai.integrations.langgraph import to_tool_node, bind_model_with_tools

# Prepare tools
toolset = StackOneToolSet()
tools = toolset.get_tools("hris_*", account_id="your-account-id")
langchain_tools = tools.to_langchain()

class State(TypedDict):
messages: Annotated[list, add_messages]

# Build a small agent loop: LLM -> maybe tools -> back to LLM
graph = StateGraph(State)
graph.add_node("tools", to_tool_node(langchain_tools))

def call_llm(state: dict):
llm = ChatOpenAI(model="gpt-4o-mini")
llm = bind_model_with_tools(llm, langchain_tools)
resp = llm.invoke(state["messages"]) # returns AIMessage with optional tool_calls
return {"messages": state["messages"] + [resp]}
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

With add_messages, returning state["messages"] + [resp] duplicates messages; return only the new message so the reducer can append it.

Prompt for AI agents
Address the following comment on README.md at line 146:

<comment>With add_messages, returning state[&quot;messages&quot;] + [resp] duplicates messages; return only the new message so the reducer can append it.</comment>

<file context>
@@ -110,6 +110,52 @@ for tool_call in response.tool_calls:
+    llm = ChatOpenAI(model=&quot;gpt-4o-mini&quot;)
+    llm = bind_model_with_tools(llm, langchain_tools)
+    resp = llm.invoke(state[&quot;messages&quot;])  # returns AIMessage with optional tool_calls
+    return {&quot;messages&quot;: state[&quot;messages&quot;] + [resp]}
+
+graph.add_node(&quot;llm&quot;, call_llm)
</file context>
Suggested change
return {"messages": state["messages"] + [resp]}
return {"messages": [resp]}


graph.add_node("llm", call_llm)
graph.add_edge(START, "llm")
graph.add_conditional_edges("llm", tools_condition)
graph.add_edge("tools", "llm")
app = graph.compile()

_ = app.invoke({"messages": [("user", "Get employee with id emp123") ]})
```

</details>

<details>
<summary>CrewAI Integration (Python 3.10+)</summary>

Expand Down
63 changes: 42 additions & 21 deletions examples/langgraph_tool_node.py
Original file line number Diff line number Diff line change
@@ -1,39 +1,60 @@
"""
TODO!!
Minimal LangGraph example identical to the README snippet.

This example demonstrates how to use StackOne tools with LangGraph.
Run:
uv run examples/langgraph_tool_node.py

```bash
uv run examples/langgraph_tool_node.py
```
Prerequisites:
- `pip install langgraph langchain-openai`
- `STACKONE_API_KEY` and `OPENAI_API_KEY`
- Optionally set `STACKONE_ACCOUNT_ID` (required by some tools)
"""

import os
from typing import Annotated

from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from langgraph.graph import START, StateGraph
from langgraph.graph.message import add_messages
from langgraph.prebuilt import tools_condition
from typing_extensions import TypedDict

from stackone_ai import StackOneToolSet
from stackone_ai.integrations.langgraph import bind_model_with_tools, to_tool_node

load_dotenv()

account_id = "45072196112816593343"
employee_id = "c28xIQaWQ6MzM5MzczMDA2NzMzMzkwNzIwNA"

def main() -> None:
load_dotenv()

def langgraph_tool_node() -> None:
"""Demonstrate basic LangGraph integration with StackOne tools."""
# Prepare tools
account_id = os.getenv("STACKONE_ACCOUNT_ID") # Set if your tools require it
toolset = StackOneToolSet()
tools = toolset.get_tools("hris_*", account_id=account_id)
langchain_tools = tools.to_langchain()

# Verify we have the tools we need
assert len(tools) > 0, "Expected at least one HRIS tool"
employee_tool = tools.get_tool("hris_get_employee")
assert employee_tool is not None, "Expected hris_get_employee tool"
class State(TypedDict):
messages: Annotated[list, add_messages]

# TODO: Add LangGraph specific integration
# For now, just verify the tools are properly configured
langchain_tools = tools.to_langchain()
assert len(langchain_tools) > 0, "Expected LangChain tools"
assert all(hasattr(tool, "_run") for tool in langchain_tools), "Expected all tools to have _run method"
# Build a small agent loop: LLM -> maybe tools -> back to LLM
graph = StateGraph(State)
graph.add_node("tools", to_tool_node(langchain_tools))

def call_llm(state: dict):
llm = ChatOpenAI(model="gpt-4o-mini")
llm = bind_model_with_tools(llm, langchain_tools)
resp = llm.invoke(state["messages"]) # returns AIMessage with optional tool_calls
return {"messages": state["messages"] + [resp]}

graph.add_node("llm", call_llm)
graph.add_edge(START, "llm")
graph.add_conditional_edges("llm", tools_condition)
graph.add_edge("tools", "llm")
app = graph.compile()

# Kick off with a simple instruction; replace IDs as needed
_ = app.invoke({"messages": [("user", "Get employee with id emp123")]})


if __name__ == "__main__":
langgraph_tool_node()
main()
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ mcp = [
examples = [
"crewai>=0.102.0; python_version>='3.10'",
"langchain-openai>=0.3.6",
"langgraph>=0.2.0",
"openai>=1.63.2",
"python-dotenv>=1.0.1",
]
Expand Down
20 changes: 20 additions & 0 deletions stackone_ai/integrations/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
"""Integration helpers for external frameworks.

Currently includes:

- LangGraph helpers to turn StackOne tools into a `ToolNode` or `ToolExecutor`.
"""

from .langgraph import (
bind_model_with_tools,
create_react_agent,
to_tool_executor,
to_tool_node,
)

__all__ = [
"to_tool_node",
"to_tool_executor",
"bind_model_with_tools",
"create_react_agent",
]
89 changes: 89 additions & 0 deletions stackone_ai/integrations/langgraph.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
"""LangGraph integration helpers.
These utilities convert StackOne tools into LangGraph prebuilt components.
Usage:
from stackone_ai import StackOneToolSet
from stackone_ai.integrations.langgraph import to_tool_node
toolset = StackOneToolSet()
tools = toolset.get_tools("hris_*", account_id="...")
node = to_tool_node(tools) # langgraph.prebuilt.ToolNode
"""

from __future__ import annotations

from collections.abc import Sequence
from typing import TYPE_CHECKING, Any

from langchain_core.tools import BaseTool

from stackone_ai.models import Tools

if TYPE_CHECKING: # pragma: no cover - only for typing
try:
from langgraph.prebuilt import ToolExecutor, ToolNode
except Exception: # pragma: no cover
Copy link

Copilot AI Sep 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using a bare Exception catch is too broad. Consider catching ImportError or ModuleNotFoundError specifically since this is checking for missing dependencies.

Suggested change
except Exception: # pragma: no cover
except ImportError: # pragma: no cover

Copilot uses AI. Check for mistakes.
ToolExecutor = Any
ToolNode = Any


def _ensure_langgraph() -> None:
try:
from langgraph import prebuilt as _ # noqa: F401
except Exception as e: # pragma: no cover
Copy link

Copilot AI Sep 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using a bare Exception catch is too broad. Consider catching ImportError or ModuleNotFoundError specifically since this is checking for missing dependencies.

Suggested change
except Exception as e: # pragma: no cover
except ImportError as e: # pragma: no cover

Copilot uses AI. Check for mistakes.
raise ImportError(
"LangGraph is not installed. Install with `pip install langgraph` or "
"`pip install 'stackone-ai[examples]'`"
) from e


def _to_langchain_tools(tools: Tools | Sequence[BaseTool]) -> Sequence[BaseTool]:
if isinstance(tools, Tools):
return tools.to_langchain()
return tools


def to_tool_node(tools: Tools | Sequence[BaseTool], **kwargs: Any) -> Any:
"""Create a LangGraph `ToolNode` from StackOne tools or LangChain tools.
Accepts either a `Tools` collection from this SDK or an existing sequence of
LangChain `BaseTool` instances and returns a LangGraph `ToolNode` suitable
for inclusion in a graph.
"""
_ensure_langgraph()
from langgraph.prebuilt import ToolNode # local import with helpful error

langchain_tools = _to_langchain_tools(tools)
return ToolNode(langchain_tools, **kwargs)


def to_tool_executor(tools: Tools | Sequence[BaseTool], **kwargs: Any) -> Any:
"""Create a LangGraph `ToolExecutor` from StackOne tools or LangChain tools."""
_ensure_langgraph()
from langgraph.prebuilt import ToolExecutor # local import with helpful error

langchain_tools = _to_langchain_tools(tools)
return ToolExecutor(langchain_tools, **kwargs)


def bind_model_with_tools(model: Any, tools: Tools | Sequence[BaseTool]) -> Any:
"""Bind tools to an LLM that supports LangChain's `.bind_tools()` API.
This is a tiny helper that converts a `Tools` collection to LangChain tools
and calls `model.bind_tools(...)`.
"""
langchain_tools = _to_langchain_tools(tools)
return model.bind_tools(langchain_tools)


def create_react_agent(llm: Any, tools: Tools | Sequence[BaseTool], **kwargs: Any) -> Any:
"""Create a LangGraph ReAct agent using StackOne tools.
Thin wrapper around `langgraph.prebuilt.create_react_agent` that accepts a
`Tools` collection from this SDK.
"""
_ensure_langgraph()
from langgraph.prebuilt import create_react_agent as _create

return _create(llm, _to_langchain_tools(tools), **kwargs)
Loading
Loading