|
9 | 9 | from template_langgraph.agents.image_classifier_agent.models import Results |
10 | 10 | from template_langgraph.agents.issue_formatter_agent.agent import graph as issue_formatter_agent_graph |
11 | 11 | from template_langgraph.agents.kabuto_helpdesk_agent.agent import graph as kabuto_helpdesk_agent_graph |
| 12 | +from template_langgraph.agents.news_summarizer_agent.agent import MockNotifier, NewsSummarizerAgent |
12 | 13 | from template_langgraph.agents.news_summarizer_agent.agent import ( |
13 | 14 | graph as news_summarizer_agent_graph, |
14 | 15 | ) |
15 | | -from template_langgraph.agents.news_summarizer_agent.models import Article |
| 16 | +from template_langgraph.agents.news_summarizer_agent.models import ( |
| 17 | + AgentInputState, |
| 18 | + AgentState, |
| 19 | + Article, |
| 20 | +) |
| 21 | +from template_langgraph.agents.news_summarizer_agent.scrapers import ( |
| 22 | + BaseScraper, |
| 23 | + HttpxScraper, |
| 24 | + YouTubeTranscriptScraper, |
| 25 | +) |
| 26 | +from template_langgraph.agents.news_summarizer_agent.summarizers import ( |
| 27 | + LlmSummarizer, |
| 28 | +) |
16 | 29 | from template_langgraph.agents.task_decomposer_agent.agent import graph as task_decomposer_agent_graph |
17 | 30 | from template_langgraph.loggers import get_logger |
18 | 31 |
|
@@ -43,6 +56,18 @@ def get_agent_graph(name: str): |
43 | 56 | raise ValueError(f"Unknown agent name: {name}") |
44 | 57 |
|
45 | 58 |
|
| 59 | +def get_scraper(scraper_type: str) -> BaseScraper: |
| 60 | + scraper = None |
| 61 | + if scraper_type == "Httpx": |
| 62 | + scraper = HttpxScraper() |
| 63 | + elif scraper_type == "YouTubeTranscript": |
| 64 | + scraper = YouTubeTranscriptScraper() |
| 65 | + |
| 66 | + if not scraper: |
| 67 | + raise ValueError(f"Unknown scraper type: {scraper_type}") |
| 68 | + return scraper |
| 69 | + |
| 70 | + |
46 | 71 | @app.command() |
47 | 72 | def png( |
48 | 73 | name: str = typer.Option( |
@@ -134,23 +159,28 @@ def news_summarizer_agent( |
134 | 159 | "-u", |
135 | 160 | help="Comma-separated list of URLs to summarize", |
136 | 161 | ), |
| 162 | + scraper: str = typer.Option( |
| 163 | + "Httpx", # YouTubeTranscript |
| 164 | + "--scraper", |
| 165 | + "-s", |
| 166 | + help="Scraper to use for fetching content", |
| 167 | + ), |
137 | 168 | verbose: bool = typer.Option( |
138 | 169 | False, |
139 | 170 | "--verbose", |
140 | 171 | "-v", |
141 | 172 | help="Enable verbose output", |
142 | 173 | ), |
143 | 174 | ): |
144 | | - from template_langgraph.agents.news_summarizer_agent.models import ( |
145 | | - AgentInputState, |
146 | | - AgentState, |
147 | | - ) |
148 | | - |
149 | 175 | # Set up logging |
150 | 176 | if verbose: |
151 | 177 | logger.setLevel(logging.DEBUG) |
152 | 178 |
|
153 | | - graph = news_summarizer_agent_graph |
| 179 | + graph = NewsSummarizerAgent( |
| 180 | + notifier=MockNotifier(), |
| 181 | + scraper=get_scraper(scraper), |
| 182 | + summarizer=LlmSummarizer(), |
| 183 | + ).create_graph() |
154 | 184 | for event in graph.stream( |
155 | 185 | input=AgentState( |
156 | 186 | input=AgentInputState( |
|
0 commit comments