Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 8 additions & 4 deletions docs/index.md
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
# template-langgraph

## Operations

see [test_all.sh](../scripts/test_all.sh) for all operations
## Overview

```shell
# Start Docker containers
# Set up environment
docker compose up -d
```

## Testing

see [test_all.sh](../scripts/test_all.sh) for all operations

```shell
# Test all scripts
bash scripts/test_all.sh
```
Expand Down
120 changes: 120 additions & 0 deletions notebooks/chat_with_tools.ipynb

Large diffs are not rendered by default.

45 changes: 0 additions & 45 deletions notebooks/templates.ipynb

This file was deleted.

File renamed without changes.
2 changes: 1 addition & 1 deletion scripts/dify_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def run_workflow(
ensure_ascii=False,
)
)
logger.info(f"Input: {response['data']['outputs']['requirements']}, Output: {response['data']['outputs']['text']}")
logger.info(f"Output: {response['data']['outputs']['text']}")


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion scripts/qdrant_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from template_langgraph.llms.azure_openais import AzureOpenAiWrapper
from template_langgraph.loggers import get_logger
from template_langgraph.tools.qdrants import QdrantClientWrapper
from template_langgraph.tools.qdrant_tool import QdrantClientWrapper
from template_langgraph.utilities.csv_loaders import CsvLoaderWrapper

# Initialize the Typer application
Expand Down
13 changes: 11 additions & 2 deletions scripts/test_all.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
#!/bin/bash

# Stop on errors, undefined variables, and pipe failures
set -euo pipefail

# Run Docker Compose to set up the environment
docker compose up -d --wait

# Qdrant
uv run python scripts/qdrant_operator.py --help
uv run python scripts/qdrant_operator.py delete-collection --collection-name qa_kabuto --verbose
Expand All @@ -24,7 +30,7 @@ AGENT_NAMES=(
"task_decomposer_agent"
)
for AGENT_NAME in "${AGENT_NAMES[@]}"; do
uv run python scripts/agent_runner.py png --name "$AGENT_NAME" --verbose --output "generated/${AGENT_NAME}.png"
uv run python scripts/agent_operator.py png --name "$AGENT_NAME" --verbose --output "generated/${AGENT_NAME}.png"
done

## Run agents
Expand All @@ -38,5 +44,8 @@ NAME_QUESTION_ARRAY=(
for NAME_QUESTION in "${NAME_QUESTION_ARRAY[@]}"; do
IFS=':' read -r AGENT_NAME QUESTION <<< "$NAME_QUESTION"
echo "Running agent: $AGENT_NAME with question: $QUESTION"
uv run python scripts/agent_runner.py run --name "$AGENT_NAME" --verbose --question "$QUESTION"
uv run python scripts/agent_operator.py run --name "$AGENT_NAME" --verbose --question "$QUESTION"
done

# Clean up Docker Compose environment
docker compose down --remove-orphans
18 changes: 9 additions & 9 deletions template_langgraph/agents/chat_with_tools_agent/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@
from template_langgraph.agents.chat_with_tools_agent.models import AgentState
from template_langgraph.llms.azure_openais import AzureOpenAiWrapper
from template_langgraph.loggers import get_logger
from template_langgraph.tools.dify_tool import run_dify_workflow
from template_langgraph.tools.elasticsearch_tool import search_elasticsearch
from template_langgraph.tools.qdrants import search_qdrant
from template_langgraph.tools.qdrant_tool import search_qdrant

logger = get_logger(__name__)

Expand Down Expand Up @@ -39,6 +40,11 @@ def __call__(self, inputs: dict):
class ChatWithToolsAgent:
def __init__(self):
self.llm = AzureOpenAiWrapper().chat_model
self.tools = [
run_dify_workflow,
search_qdrant,
search_elasticsearch,
]

def create_graph(self):
"""Create the main graph for the agent."""
Expand All @@ -50,10 +56,7 @@ def create_graph(self):
workflow.add_node(
"tools",
BasicToolNode(
tools=[
search_qdrant,
search_elasticsearch,
]
tools=self.tools,
),
)

Expand All @@ -78,10 +81,7 @@ def chat_with_tools(self, state: AgentState) -> AgentState:
"""Chat with tools using the state."""
logger.info(f"Chatting with tools using state: {state}")
llm_with_tools = self.llm.bind_tools(
tools=[
search_qdrant,
search_elasticsearch,
],
tools=self.tools,
)
return {
"messages": [
Expand Down
4 changes: 3 additions & 1 deletion template_langgraph/agents/kabuto_helpdesk_agent/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@

from template_langgraph.llms.azure_openais import AzureOpenAiWrapper
from template_langgraph.loggers import get_logger
from template_langgraph.tools.dify_tool import run_dify_workflow
from template_langgraph.tools.elasticsearch_tool import search_elasticsearch
from template_langgraph.tools.qdrants import search_qdrant
from template_langgraph.tools.qdrant_tool import search_qdrant

logger = get_logger(__name__)

Expand All @@ -13,6 +14,7 @@ def __init__(self, tools=None):
if tools is None:
# Default tool for searching Qdrant
tools = [
run_dify_workflow,
search_qdrant,
search_elasticsearch,
# Add other tools as needed
Expand Down
35 changes: 35 additions & 0 deletions template_langgraph/tools/dify_tool.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from functools import lru_cache

import httpx
from langchain.tools import tool
from pydantic import BaseModel, Field
from pydantic_settings import BaseSettings, SettingsConfigDict


Expand Down Expand Up @@ -48,3 +50,36 @@ def run_workflow(
)
response.raise_for_status()
return response.json()


class DifyWorkflowInput(BaseModel):
requirements: str = Field(
default="生成 AI のサービス概要を教えてください。日本語でお願いします",
description="Requirements for running the Dify workflow",
)


class DifyWorkflowOutput(BaseModel):
response: dict = Field(description="Output data from the Dify workflow")


@tool(args_schema=DifyWorkflowInput)
def run_dify_workflow(
requirements: str = "生成 AI のサービス概要を教えてください。日本語でお願いします",
) -> DifyWorkflowOutput:
"""
Difyワークフローを実行します。
指定された入力パラメータでワークフローを実行し、結果を返します。
"""
wrapper = DifyClientWrapper()
response = wrapper.run_workflow(
inputs={
"inputs": {
"requirements": requirements,
},
"response_mode": "blocking",
"user": "abc-123",
}
)

return DifyWorkflowOutput(response=response)