Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -168,3 +168,5 @@ generated/
*.db
*.wav
mlartifacts
*.sqlite
*.bin
4 changes: 4 additions & 0 deletions docs/references.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@
- [Custom UI for Deep Agents](https://github.com/langchain-ai/deep-agents-ui)
- [How to deploy self-hosted standalone server](https://docs.langchain.com/langgraph-platform/deploy-standalone-server)
- [「現場で活用するための AI エージェント実践入門」リポジトリ](https://github.com/masamasa59/genai-agent-advanced-book)
- [Add and manage memory](https://docs.langchain.com/oss/python/langgraph/add-memory)
- [Persistence](https://langchain-ai.github.io/langgraph/concepts/persistence/)
- [Chatbot with message summarization & external DB memory](https://github.com/langchain-ai/langchain-academy/blob/main/module-2/chatbot-external-memory.ipynb)
- [LangGraph の会話履歴を SQLite に保持しよう](https://www.creationline.com/tech-blog/chatgpt-ai/75797)

### LangChain

Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ dependencies = [
"langchain-text-splitters>=0.3.9",
"langfuse>=3.6.2",
"langgraph>=0.6.2",
"langgraph-checkpoint-sqlite>=2.0.11",
"langgraph-supervisor>=0.0.29",
"mlflow>=3.4.0",
"openai-whisper>=20250625",
Expand Down
11 changes: 10 additions & 1 deletion template_langgraph/agents/chat_with_tools_agent/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,16 @@ def __call__(self, inputs: dict):


class ChatWithToolsAgent:
def __init__(self, tools=get_default_tools()):
def __init__(
self,
tools=get_default_tools(),
checkpointer=None,
store=None,
):
self.llm = AzureOpenAiWrapper().chat_model
self.tools = tools
self.checkpointer = checkpointer
self.store = store

def create_graph(self):
"""Create the main graph for the agent."""
Expand Down Expand Up @@ -83,6 +90,8 @@ def create_graph(self):
# Compile the graph
return workflow.compile(
name=ChatWithToolsAgent.__name__,
checkpointer=self.checkpointer,
store=self.store,
)

def chat_with_tools(self, state: AgentState) -> AgentState:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import os
import sqlite3
import tempfile
import uuid
from base64 import b64encode
from dataclasses import dataclass

Expand All @@ -9,6 +11,8 @@
StreamlitCallbackHandler,
)
from langfuse.langchain import CallbackHandler
from langgraph.checkpoint.sqlite import SqliteSaver
from langgraph.store.sqlite import SqliteStore

from template_langgraph.agents.chat_with_tools_agent.agent import (
AgentState,
Expand All @@ -18,6 +22,10 @@
from template_langgraph.speeches.tts import TtsWrapper
from template_langgraph.tools.common import get_default_tools

checkpoints_conn = sqlite3.connect("checkpoints.sqlite", check_same_thread=False)
store_conn = sqlite3.connect("store.sqlite", check_same_thread=False)
thread_id = str(uuid.uuid4())


def image_to_base64(image_bytes: bytes) -> str:
return b64encode(image_bytes).decode("utf-8")
Expand Down Expand Up @@ -68,7 +76,15 @@ def ensure_agent_graph(selected_tools: list) -> None:
signature = tuple(tool.name for tool in selected_tools)
graph_signature = st.session_state.get("graph_tools_signature")
if "graph" not in st.session_state or graph_signature != signature:
st.session_state["graph"] = ChatWithToolsAgent(tools=selected_tools).create_graph()
st.session_state["graph"] = ChatWithToolsAgent(
tools=selected_tools,
checkpointer=SqliteSaver(
conn=checkpoints_conn,
),
store=SqliteStore(
conn=store_conn,
),
).create_graph()
st.session_state["graph_tools_signature"] = signature


Expand Down Expand Up @@ -296,12 +312,18 @@ def build_graph_messages() -> list:

def invoke_agent(graph_messages: list) -> AgentState:
return st.session_state["graph"].invoke(
{"messages": graph_messages},
{
"messages": graph_messages,
},
{
"callbacks": [
StreamlitCallbackHandler(st.container()),
CallbackHandler(),
]
],
"configurable": {
"thread_id": thread_id,
"user_id": "user_1",
},
},
)

Expand Down
40 changes: 40 additions & 0 deletions uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.