Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion .env.template
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ AI_SEARCH_KEY="xxx"
AI_SEARCH_INDEX_NAME="kabuto"

# ---------
# Utilities
# Internals
# ---------

## CSV Loader Settings
Expand All @@ -63,3 +63,13 @@ PDF_LOADER_DATA_DIR_PATH="./data"
## OpenTelemetry Settings
OTEL_SERVICE_NAME="template-langgraph"
OTEL_COLLECTOR_ENDPOINT="http://localhost:4317"

## Scraper Settings
SCRAPER_TYPE="mock" # Options: "mock", "httpx", "youtube_transcript"

## Summarizer Settings
SUMMARIZER_TYPE="mock" # Options: "mock", "llm"

## Notifier Settings
NOTIFIER_TYPE="mock" # Options: "mock", "slack"
NOTIFIER_SLACK_WEBHOOK_URL="https://hooks.slack.com/services/xxx"
2 changes: 1 addition & 1 deletion docs/index.ja.md
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ Pydantic モデルを使用して AI 応答から構造化データを取得す

- **`template_langgraph/llms/`** - LLM API ラッパー(Azure OpenAI など)
- **`template_langgraph/tools/`** - 検索、データ取得用ツール実装
- **`template_langgraph/utilities/`** - ドキュメント読み込みと処理用ヘルパー関数
- **`template_langgraph/internals/`** - 内部ユーティリティとヘルパー関数(CSV/PDF ローダー、Otel ラッパーなど)

## サンプルコードの実行

Expand Down
2 changes: 1 addition & 1 deletion docs/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ Implements the supervisor pattern where one agent coordinates multiple specializ

- **`template_langgraph/llms/`** - LLM API wrappers (Azure OpenAI, etc.)
- **`template_langgraph/tools/`** - Tool implementations for search, data retrieval
- **`template_langgraph/utilities/`** - Helper functions for document loading and processing
- **`template_langgraph/internals/`** - Internal utilities and helper functions (CSV/PDF loaders, Otel wrappers, etc.)

## Running the Examples

Expand Down
37 changes: 2 additions & 35 deletions scripts/agent_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,23 +9,12 @@
from template_langgraph.agents.image_classifier_agent.models import Results
from template_langgraph.agents.issue_formatter_agent.agent import graph as issue_formatter_agent_graph
from template_langgraph.agents.kabuto_helpdesk_agent.agent import graph as kabuto_helpdesk_agent_graph
from template_langgraph.agents.news_summarizer_agent.agent import MockNotifier, NewsSummarizerAgent
from template_langgraph.agents.news_summarizer_agent.agent import (
graph as news_summarizer_agent_graph,
)
from template_langgraph.agents.news_summarizer_agent.agent import graph as news_summarizer_agent_graph
from template_langgraph.agents.news_summarizer_agent.models import (
AgentInputState,
AgentState,
Article,
)
from template_langgraph.agents.news_summarizer_agent.scrapers import (
BaseScraper,
HttpxScraper,
YouTubeTranscriptScraper,
)
from template_langgraph.agents.news_summarizer_agent.summarizers import (
LlmSummarizer,
)
from template_langgraph.agents.task_decomposer_agent.agent import graph as task_decomposer_agent_graph
from template_langgraph.loggers import get_logger

Expand Down Expand Up @@ -56,18 +45,6 @@ def get_agent_graph(name: str):
raise ValueError(f"Unknown agent name: {name}")


def get_scraper(scraper_type: str) -> BaseScraper:
scraper = None
if scraper_type == "Httpx":
scraper = HttpxScraper()
elif scraper_type == "YouTubeTranscript":
scraper = YouTubeTranscriptScraper()

if not scraper:
raise ValueError(f"Unknown scraper type: {scraper_type}")
return scraper


@app.command()
def png(
name: str = typer.Option(
Expand Down Expand Up @@ -159,12 +136,6 @@ def news_summarizer_agent(
"-u",
help="Comma-separated list of URLs to summarize",
),
scraper: str = typer.Option(
"Httpx", # YouTubeTranscript
"--scraper",
"-s",
help="Scraper to use for fetching content",
),
verbose: bool = typer.Option(
False,
"--verbose",
Expand All @@ -176,11 +147,7 @@ def news_summarizer_agent(
if verbose:
logger.setLevel(logging.DEBUG)

graph = NewsSummarizerAgent(
notifier=MockNotifier(),
scraper=get_scraper(scraper),
summarizer=LlmSummarizer(),
).create_graph()
graph = news_summarizer_agent_graph
for event in graph.stream(
input=AgentState(
input=AgentInputState(
Expand Down
4 changes: 2 additions & 2 deletions scripts/ai_search_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@
import typer
from dotenv import load_dotenv

from template_langgraph.internals.csv_loaders import CsvLoaderWrapper
from template_langgraph.internals.pdf_loaders import PdfLoaderWrapper
from template_langgraph.loggers import get_logger
from template_langgraph.tools.ai_search_tool import AiSearchClientWrapper
from template_langgraph.utilities.csv_loaders import CsvLoaderWrapper
from template_langgraph.utilities.pdf_loaders import PdfLoaderWrapper

# Initialize the Typer application
app = typer.Typer(
Expand Down
4 changes: 2 additions & 2 deletions scripts/cosmosdb_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@
import typer
from dotenv import load_dotenv

from template_langgraph.internals.csv_loaders import CsvLoaderWrapper
from template_langgraph.internals.pdf_loaders import PdfLoaderWrapper
from template_langgraph.loggers import get_logger
from template_langgraph.tools.cosmosdb_tool import CosmosdbClientWrapper
from template_langgraph.utilities.csv_loaders import CsvLoaderWrapper
from template_langgraph.utilities.pdf_loaders import PdfLoaderWrapper

# Initialize the Typer application
app = typer.Typer(
Expand Down
2 changes: 1 addition & 1 deletion scripts/elasticsearch_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
import typer
from dotenv import load_dotenv

from template_langgraph.internals.pdf_loaders import PdfLoaderWrapper
from template_langgraph.loggers import get_logger
from template_langgraph.tools.elasticsearch_tool import ElasticsearchClientWrapper
from template_langgraph.utilities.pdf_loaders import PdfLoaderWrapper

# Initialize the Typer application
app = typer.Typer(
Expand Down
2 changes: 1 addition & 1 deletion scripts/otel_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
import typer
from dotenv import load_dotenv

from template_langgraph.internals.otel_helpers import OtelWrapper
from template_langgraph.loggers import get_logger
from template_langgraph.utilities.otel_helpers import OtelWrapper

# Initialize the Typer application
app = typer.Typer(
Expand Down
2 changes: 1 addition & 1 deletion scripts/qdrant_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@
from dotenv import load_dotenv
from qdrant_client.models import PointStruct

from template_langgraph.internals.csv_loaders import CsvLoaderWrapper
from template_langgraph.llms.azure_openais import AzureOpenAiWrapper
from template_langgraph.loggers import get_logger
from template_langgraph.tools.qdrant_tool import QdrantClientWrapper
from template_langgraph.utilities.csv_loaders import CsvLoaderWrapper

# Initialize the Typer application
app = typer.Typer(
Expand Down
48 changes: 16 additions & 32 deletions template_langgraph/agents/news_summarizer_agent/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,40 +8,27 @@
StructuredArticle,
SummarizeWebContentState,
)
from template_langgraph.agents.news_summarizer_agent.scrapers import (
BaseScraper,
HttpxScraper,
MockScraper,
)
from template_langgraph.agents.news_summarizer_agent.summarizers import (
BaseSummarizer,
LlmSummarizer,
MockSummarizer,
)
from template_langgraph.internals.notifiers import get_notifier
from template_langgraph.internals.scrapers import get_scraper
from template_langgraph.internals.summarizers import get_summarizer
from template_langgraph.llms.azure_openais import AzureOpenAiWrapper
from template_langgraph.loggers import get_logger

logger = get_logger(__name__)


class MockNotifier:
def notify(self, id: str, body: dict) -> None:
"""Simulate sending a notification to the user."""
logger.info(f"Notification sent for request {id}: {body}")


class NewsSummarizerAgent:
def __init__(
self,
llm=AzureOpenAiWrapper().chat_model,
notifier=MockNotifier(),
scraper: BaseScraper = MockScraper(),
summarizer: BaseSummarizer = MockSummarizer(),
notifier=get_notifier(),
scraper=get_scraper(),
summarizer=get_summarizer(),
):
self.llm = llm
self.notifier = notifier
self.scraper: BaseScraper = scraper
self.summarizer: BaseSummarizer = summarizer
self.scraper = scraper
self.summarizer = summarizer

def create_graph(self):
"""Create the main graph for the agent."""
Expand Down Expand Up @@ -127,23 +114,20 @@ def summarize_web_content(self, state: SummarizeWebContentState):
def notify(self, state: AgentState) -> AgentState:
"""Send notifications to the user."""
logger.info(f"Sending notifications with state: {state}")
# Simulate sending notifications
# convert list of articles to a dictionary for notification
summary = {}
for i, article in enumerate(state.articles):
summary[i] = article.model_dump()
summary[i] = {
"url": article.url,
"structured_article": article.structured_article.model_dump(),
}
self.notifier.notify(
id=state.input.id,
body=summary,
text=summary.__str__(),
)
return state


# For testing
# graph = NewsSummarizerAgent().create_graph()

graph = NewsSummarizerAgent(
notifier=MockNotifier(),
scraper=HttpxScraper(),
summarizer=LlmSummarizer(),
notifier=get_notifier(),
scraper=get_scraper(),
summarizer=get_summarizer(),
).create_graph()
91 changes: 91 additions & 0 deletions template_langgraph/internals/notifiers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
"""Scraper interfaces and implementations for NewsSummarizerAgent.

This module defines an abstract base scraper so different scraping strategies
(mock, httpx-based, future headless browser, etc.) can be plugged into the agent
without changing orchestration logic.
"""

from __future__ import annotations

from abc import ABC, abstractmethod
from enum import Enum
from functools import lru_cache

import httpx
from pydantic_settings import BaseSettings, SettingsConfigDict

from template_langgraph.loggers import get_logger

logger = get_logger(__name__)


class NotifierType(str, Enum):
MOCK = "mock"
SLACK = "slack"


class Settings(BaseSettings):
notifier_type: NotifierType = NotifierType.MOCK
notifier_slack_webhook_url: str = "https://hooks.slack.com/services/Txxx/Bxxx/xxx"

model_config = SettingsConfigDict(
env_file=".env",
env_ignore_empty=True,
extra="ignore",
)


@lru_cache
def get_notifier_settings() -> Settings:
"""Get notifier settings."""
return Settings()


class BaseNotifier(ABC):
"""Abstract base notifier."""

@abstractmethod
def notify(self, text: str):
"""Send a notification with the given text.

Args:
text: The text to include in the notification.

"""
raise NotImplementedError


class MockNotifier(BaseNotifier):
"""Deterministic notifier for tests / offline development."""

def notify(self, text: str):
logger.info(f"Mock notify with text: {text}")


class SlackNotifier(BaseNotifier):
"""Slack notifier for sending notifications to a Slack channel."""

def __init__(self, settings=get_notifier_settings()):
self.webhook_url = settings.notifier_slack_webhook_url

def notify(self, text: str):
logger.info(f"Slack notify with text: {text}")
with httpx.Client() as client:
client.post(
self.webhook_url,
json={
"text": text,
},
)


def get_notifier(settings: Settings = None) -> BaseNotifier:
if settings is None:
settings = get_notifier_settings()

if settings.notifier_type == NotifierType.MOCK:
return MockNotifier()
elif settings.notifier_type == NotifierType.SLACK:
return SlackNotifier(settings)
else:
raise ValueError(f"Unknown notifier type: {settings.notifier_type}")
Loading