Skip to content

Commit 19294ec

Browse files
committed
chore: use checkpointer in fastapi version and omit on lgp
1 parent 8888989 commit 19294ec

File tree

7 files changed

+41
-55
lines changed

7 files changed

+41
-55
lines changed

typescript-sdk/integrations/langgraph/examples/python/agents/agentic_chat/agent.py

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -79,17 +79,14 @@ async def chat_node(state: AgentState, config: RunnableConfig):
7979

8080
# Conditionally use a checkpointer based on the environment
8181
# Check for multiple indicators that we're running in LangGraph dev/API mode
82-
is_langgraph_api = (
83-
os.environ.get("LANGGRAPH_API", "false").lower() == "true" or
84-
os.environ.get("LANGGRAPH_API_DIR") is not None
85-
)
82+
is_fast_api = os.environ.get("LANGGRAPH_FAST_API", "false").lower() == "true"
8683

8784
# Compile the graph
88-
if is_langgraph_api:
89-
# When running in LangGraph API/dev, don't use a custom checkpointer
90-
graph = workflow.compile()
91-
else:
85+
if is_fast_api:
9286
# For CopilotKit and other contexts, use MemorySaver
9387
from langgraph.checkpoint.memory import MemorySaver
9488
memory = MemorySaver()
95-
graph = workflow.compile(checkpointer=memory)
89+
graph = workflow.compile(checkpointer=memory)
90+
else:
91+
# When running in LangGraph API/dev, don't use a custom checkpointer
92+
graph = workflow.compile()

typescript-sdk/integrations/langgraph/examples/python/agents/agentic_generative_ui/agent.py

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -192,17 +192,14 @@ async def chat_node(state: AgentState, config: RunnableConfig):
192192

193193
# Conditionally use a checkpointer based on the environment
194194
# Check for multiple indicators that we're running in LangGraph dev/API mode
195-
is_langgraph_api = (
196-
os.environ.get("LANGGRAPH_API", "false").lower() == "true" or
197-
os.environ.get("LANGGRAPH_API_DIR") is not None
198-
)
195+
is_fast_api = os.environ.get("LANGGRAPH_FAST_API", "false").lower() == "true"
199196

200197
# Compile the graph
201-
if is_langgraph_api:
202-
# When running in LangGraph API/dev, don't use a custom checkpointer
203-
graph = workflow.compile()
204-
else:
198+
if is_fast_api:
205199
# For CopilotKit and other contexts, use MemorySaver
206200
from langgraph.checkpoint.memory import MemorySaver
207201
memory = MemorySaver()
208-
graph = workflow.compile(checkpointer=memory)
202+
graph = workflow.compile(checkpointer=memory)
203+
else:
204+
# When running in LangGraph API/dev, don't use a custom checkpointer
205+
graph = workflow.compile()

typescript-sdk/integrations/langgraph/examples/python/agents/dojo.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,8 @@
55
from dotenv import load_dotenv
66
load_dotenv()
77

8+
os.environ["IS_FASTAPI"] = "true"
9+
810
from ag_ui_langgraph import LangGraphAgent, add_langgraph_fastapi_endpoint
911
from .human_in_the_loop.agent import graph as human_in_the_loop_graph
1012
from .predictive_state_updates.agent import graph as predictive_state_updates_graph

typescript-sdk/integrations/langgraph/examples/python/agents/human_in_the_loop/agent.py

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -180,7 +180,7 @@ async def chat_node(state: Dict[str, Any], config: RunnableConfig):
180180
"steps": state["steps"],
181181
}
182182
)
183-
183+
184184
# If no tool calls or not plan_execution_steps, return to END with the updated messages
185185
return Command(
186186
goto=END,
@@ -206,7 +206,7 @@ async def process_steps_node(state: Dict[str, Any], config: RunnableConfig):
206206
user_response = interrupt({"steps": state["steps"]})
207207
# Store the user response in state for when the node restarts
208208
state["user_response"] = user_response
209-
209+
210210
# Generate the creative completion response
211211
final_prompt = """
212212
Provide a textual description of how you are performing the task.
@@ -223,11 +223,11 @@ async def process_steps_node(state: Dict[str, Any], config: RunnableConfig):
223223

224224
# Add the final response to messages
225225
messages = state["messages"] + [final_response]
226-
226+
227227
# Clear the user_response from state to prepare for future interactions
228228
if "user_response" in state:
229229
state.pop("user_response")
230-
230+
231231
# Return to END with the updated messages
232232
return Command(
233233
goto=END,
@@ -272,17 +272,14 @@ def should_continue(command: Command):
272272

273273
# Conditionally use a checkpointer based on the environment
274274
# Check for multiple indicators that we're running in LangGraph dev/API mode
275-
is_langgraph_api = (
276-
os.environ.get("LANGGRAPH_API", "false").lower() == "true" or
277-
os.environ.get("LANGGRAPH_API_DIR") is not None
278-
)
275+
is_fast_api = os.environ.get("LANGGRAPH_FAST_API", "false").lower() == "true"
279276

280277
# Compile the graph
281-
if is_langgraph_api:
282-
# When running in LangGraph API/dev, don't use a custom checkpointer
283-
graph = workflow.compile()
284-
else:
278+
if is_fast_api:
285279
# For CopilotKit and other contexts, use MemorySaver
286280
from langgraph.checkpoint.memory import MemorySaver
287281
memory = MemorySaver()
288282
graph = workflow.compile(checkpointer=memory)
283+
else:
284+
# When running in LangGraph API/dev, don't use a custom checkpointer
285+
graph = workflow.compile()

typescript-sdk/integrations/langgraph/examples/python/agents/predictive_state_updates/agent.py

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -176,17 +176,15 @@ async def chat_node(state: AgentState, config: RunnableConfig):
176176

177177
# Conditionally use a checkpointer based on the environment
178178
# Check for multiple indicators that we're running in LangGraph dev/API mode
179-
is_langgraph_api = (
180-
os.environ.get("LANGGRAPH_API", "false").lower() == "true" or
181-
os.environ.get("LANGGRAPH_API_DIR") is not None
182-
)
179+
is_fast_api = os.environ.get("LANGGRAPH_FAST_API", "false").lower() == "true"
183180

184181
# Compile the graph
185-
if is_langgraph_api:
186-
# When running in LangGraph API/dev, don't use a custom checkpointer
187-
graph = workflow.compile()
188-
else:
182+
if is_fast_api:
189183
# For CopilotKit and other contexts, use MemorySaver
190184
from langgraph.checkpoint.memory import MemorySaver
191185
memory = MemorySaver()
192186
graph = workflow.compile(checkpointer=memory)
187+
else:
188+
# When running in LangGraph API/dev, don't use a custom checkpointer
189+
graph = workflow.compile()
190+

typescript-sdk/integrations/langgraph/examples/python/agents/shared_state/agent.py

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -300,17 +300,14 @@ async def chat_node(state: Dict[str, Any], config: RunnableConfig):
300300

301301
# Conditionally use a checkpointer based on the environment
302302
# Check for multiple indicators that we're running in LangGraph dev/API mode
303-
is_langgraph_api = (
304-
os.environ.get("LANGGRAPH_API", "false").lower() == "true" or
305-
os.environ.get("LANGGRAPH_API_DIR") is not None
306-
)
303+
is_fast_api = os.environ.get("LANGGRAPH_FAST_API", "false").lower() == "true"
307304

308305
# Compile the graph
309-
if is_langgraph_api:
310-
# When running in LangGraph API/dev, don't use a custom checkpointer
311-
graph = workflow.compile()
312-
else:
306+
if is_fast_api:
313307
# For CopilotKit and other contexts, use MemorySaver
314308
from langgraph.checkpoint.memory import MemorySaver
315309
memory = MemorySaver()
316-
graph = workflow.compile(checkpointer=memory)
310+
graph = workflow.compile(checkpointer=memory)
311+
else:
312+
# When running in LangGraph API/dev, don't use a custom checkpointer
313+
graph = workflow.compile()

typescript-sdk/integrations/langgraph/examples/python/agents/tool_based_generative_ui/agent.py

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -127,18 +127,16 @@ async def chat_node(state: AgentState, config: RunnableConfig):
127127

128128
# Conditionally use a checkpointer based on the environment
129129
# Check for multiple indicators that we're running in LangGraph dev/API mode
130-
is_langgraph_api = (
131-
os.environ.get("LANGGRAPH_API", "false").lower() == "true" or
132-
os.environ.get("LANGGRAPH_API_DIR") is not None
133-
)
130+
is_fast_api = os.environ.get("LANGGRAPH_FAST_API", "false").lower() == "true"
134131

135132
# Compile the graph
136-
if is_langgraph_api:
137-
# When running in LangGraph API/dev, don't use a custom checkpointer
138-
graph = workflow.compile()
139-
else:
133+
if is_fast_api:
140134
# For CopilotKit and other contexts, use MemorySaver
141135
from langgraph.checkpoint.memory import MemorySaver
142136
memory = MemorySaver()
143137
graph = workflow.compile(checkpointer=memory)
138+
else:
139+
# When running in LangGraph API/dev, don't use a custom checkpointer
140+
graph = workflow.compile()
141+
144142

0 commit comments

Comments
 (0)