Skip to content

Commit 36c9088

Browse files
committed
fix langgraph tool to match example
1 parent aff3264 commit 36c9088

File tree

2 files changed

+4
-4
lines changed
  • typescript-sdk
    • apps/dojo/src
    • integrations/langgraph/examples/python/agents/backend_tool_rendering

2 files changed

+4
-4
lines changed

typescript-sdk/apps/dojo/src/files.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -572,7 +572,7 @@
572572
},
573573
{
574574
"name": "agent.py",
575-
"content": "\"\"\"\nA simple agentic chat flow using LangGraph instead of CrewAI.\n\"\"\"\n\nfrom typing import List, Any, Optional\nimport os\n\n# Updated imports for LangGraph\nfrom langchain_core.runnables import RunnableConfig\nfrom langchain_core.messages import SystemMessage\nfrom langchain_core.tools import tool\nfrom langchain_openai import ChatOpenAI\nfrom langgraph.graph import StateGraph, END, START\nfrom langgraph.graph import MessagesState\nfrom langgraph.types import Command\nfrom requests.api import get\nfrom langgraph.prebuilt import create_react_agent\n\n\n@tool\ndef get_weather(city: str):\n \"\"\"\n Get the weather for a given city.\n \"\"\"\n return {\n \"temperature\": 20,\n \"conditions\": \"sunny\",\n \"humidity\": 50,\n \"wind_speed\": 10,\n \"feelsLike\": 25,\n }\n\n\n# Conditionally use a checkpointer based on the environment\n# Check for multiple indicators that we're running in LangGraph dev/API mode\nis_fast_api = os.environ.get(\"LANGGRAPH_FAST_API\", \"false\").lower() == \"true\"\n\n# Compile the graph\nif is_fast_api:\n # For CopilotKit and other contexts, use MemorySaver\n from langgraph.checkpoint.memory import MemorySaver\n\n graph = create_react_agent(\n model=\"openai:gpt-4.1-mini\",\n tools=[get_weather],\n prompt=\"You are a helpful assistant\",\n checkpointer=MemorySaver(),\n )\nelse:\n # When running in LangGraph API/dev, don't use a custom checkpointer\n graph = create_react_agent(\n model=\"openai:gpt-4.1-mini\",\n tools=[get_weather],\n prompt=\"You are a helpful assistant\",\n )\n",
575+
"content": "\"\"\"\nA simple agentic chat flow using LangGraph instead of CrewAI.\n\"\"\"\n\nfrom typing import List, Any, Optional\nimport os\n\n# Updated imports for LangGraph\nfrom langchain_core.runnables import RunnableConfig\nfrom langchain_core.messages import SystemMessage\nfrom langchain_core.tools import tool\nfrom langchain_openai import ChatOpenAI\nfrom langgraph.graph import StateGraph, END, START\nfrom langgraph.graph import MessagesState\nfrom langgraph.types import Command\nfrom requests.api import get\nfrom langgraph.prebuilt import create_react_agent\n\n\n@tool\ndef get_weather(location: str):\n \"\"\"\n Get the weather for a given location.\n \"\"\"\n return {\n \"temperature\": 20,\n \"conditions\": \"sunny\",\n \"humidity\": 50,\n \"wind_speed\": 10,\n \"feelsLike\": 25,\n }\n\n\n# Conditionally use a checkpointer based on the environment\n# Check for multiple indicators that we're running in LangGraph dev/API mode\nis_fast_api = os.environ.get(\"LANGGRAPH_FAST_API\", \"false\").lower() == \"true\"\n\n# Compile the graph\nif is_fast_api:\n # For CopilotKit and other contexts, use MemorySaver\n from langgraph.checkpoint.memory import MemorySaver\n\n graph = create_react_agent(\n model=\"openai:gpt-4.1-mini\",\n tools=[get_weather],\n prompt=\"You are a helpful assistant\",\n checkpointer=MemorySaver(),\n )\nelse:\n # When running in LangGraph API/dev, don't use a custom checkpointer\n graph = create_react_agent(\n model=\"openai:gpt-4.1-mini\",\n tools=[get_weather],\n prompt=\"You are a helpful assistant\",\n )\n",
576576
"language": "python",
577577
"type": "file"
578578
},
@@ -849,7 +849,7 @@
849849
},
850850
{
851851
"name": "agent.py",
852-
"content": "\"\"\"\nA simple agentic chat flow using LangGraph instead of CrewAI.\n\"\"\"\n\nfrom typing import List, Any, Optional\nimport os\n\n# Updated imports for LangGraph\nfrom langchain_core.runnables import RunnableConfig\nfrom langchain_core.messages import SystemMessage\nfrom langchain_core.tools import tool\nfrom langchain_openai import ChatOpenAI\nfrom langgraph.graph import StateGraph, END, START\nfrom langgraph.graph import MessagesState\nfrom langgraph.types import Command\nfrom requests.api import get\nfrom langgraph.prebuilt import create_react_agent\n\n\n@tool\ndef get_weather(city: str):\n \"\"\"\n Get the weather for a given city.\n \"\"\"\n return {\n \"temperature\": 20,\n \"conditions\": \"sunny\",\n \"humidity\": 50,\n \"wind_speed\": 10,\n \"feelsLike\": 25,\n }\n\n\n# Conditionally use a checkpointer based on the environment\n# Check for multiple indicators that we're running in LangGraph dev/API mode\nis_fast_api = os.environ.get(\"LANGGRAPH_FAST_API\", \"false\").lower() == \"true\"\n\n# Compile the graph\nif is_fast_api:\n # For CopilotKit and other contexts, use MemorySaver\n from langgraph.checkpoint.memory import MemorySaver\n\n graph = create_react_agent(\n model=\"openai:gpt-4.1-mini\",\n tools=[get_weather],\n prompt=\"You are a helpful assistant\",\n checkpointer=MemorySaver(),\n )\nelse:\n # When running in LangGraph API/dev, don't use a custom checkpointer\n graph = create_react_agent(\n model=\"openai:gpt-4.1-mini\",\n tools=[get_weather],\n prompt=\"You are a helpful assistant\",\n )\n",
852+
"content": "\"\"\"\nA simple agentic chat flow using LangGraph instead of CrewAI.\n\"\"\"\n\nfrom typing import List, Any, Optional\nimport os\n\n# Updated imports for LangGraph\nfrom langchain_core.runnables import RunnableConfig\nfrom langchain_core.messages import SystemMessage\nfrom langchain_core.tools import tool\nfrom langchain_openai import ChatOpenAI\nfrom langgraph.graph import StateGraph, END, START\nfrom langgraph.graph import MessagesState\nfrom langgraph.types import Command\nfrom requests.api import get\nfrom langgraph.prebuilt import create_react_agent\n\n\n@tool\ndef get_weather(location: str):\n \"\"\"\n Get the weather for a given location.\n \"\"\"\n return {\n \"temperature\": 20,\n \"conditions\": \"sunny\",\n \"humidity\": 50,\n \"wind_speed\": 10,\n \"feelsLike\": 25,\n }\n\n\n# Conditionally use a checkpointer based on the environment\n# Check for multiple indicators that we're running in LangGraph dev/API mode\nis_fast_api = os.environ.get(\"LANGGRAPH_FAST_API\", \"false\").lower() == \"true\"\n\n# Compile the graph\nif is_fast_api:\n # For CopilotKit and other contexts, use MemorySaver\n from langgraph.checkpoint.memory import MemorySaver\n\n graph = create_react_agent(\n model=\"openai:gpt-4.1-mini\",\n tools=[get_weather],\n prompt=\"You are a helpful assistant\",\n checkpointer=MemorySaver(),\n )\nelse:\n # When running in LangGraph API/dev, don't use a custom checkpointer\n graph = create_react_agent(\n model=\"openai:gpt-4.1-mini\",\n tools=[get_weather],\n prompt=\"You are a helpful assistant\",\n )\n",
853853
"language": "python",
854854
"type": "file"
855855
}

typescript-sdk/integrations/langgraph/examples/python/agents/backend_tool_rendering/agent.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@
1818

1919

2020
@tool
21-
def get_weather(city: str):
21+
def get_weather(location: str):
2222
"""
23-
Get the weather for a given city.
23+
Get the weather for a given location.
2424
"""
2525
return {
2626
"temperature": 20,

0 commit comments

Comments
 (0)