Skip to content

Commit 0cc163f

Browse files
committed
add jupyter notebook
1 parent 93db6f8 commit 0cc163f

File tree

2 files changed

+109
-1
lines changed

2 files changed

+109
-1
lines changed

agents/web_search_agent/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ curl -X 'POST' \
5959

6060
4. AgentOps will automatically capture the session:
6161
- View the [agentops.log](agentops.log) file
62-
- See the[AgentOps Dashboard](https://app.agentops.ai/drilldown)
62+
- See the [AgentOps Dashboard](https://app.agentops.ai/drilldown)
6363

6464
## Dependencies
6565

Lines changed: 108 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,108 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "markdown",
5+
"metadata": {},
6+
"source": [
7+
"# Web Search Agent\n",
8+
"This notebook demonstrates a web search agent using SwarmZero framework and Tavily API."
9+
]
10+
},
11+
{
12+
"cell_type": "code",
13+
"execution_count": null,
14+
"metadata": {},
15+
"source": [
16+
"import os\n",
17+
"\n",
18+
"import agentops\n",
19+
"from dotenv import load_dotenv\n",
20+
"from swarmzero import Agent\n",
21+
"from tavily import TavilyClient\n",
22+
"\n",
23+
"load_dotenv()\n",
24+
"agentops.init(os.getenv(\"AGENTOPS_API_KEY\"))\n",
25+
"tavily_client = TavilyClient(api_key=os.getenv(\"TAVILY_API_KEY\"))"
26+
]
27+
},
28+
{
29+
"cell_type": "markdown",
30+
"metadata": {},
31+
"source": [
32+
"## Define Search Functions"
33+
]
34+
},
35+
{
36+
"cell_type": "code",
37+
"execution_count": null,
38+
"metadata": {},
39+
"source": [
40+
"async def web_search(query: str) -> dict:\n",
41+
" response = tavily_client.search(query)\n",
42+
" results = []\n",
43+
" for result in response[\"results\"][:3]:\n",
44+
" results.append({\"title\": result[\"title\"], \"url\": result[\"url\"], \"content\": result[\"content\"]})\n",
45+
" return results\n",
46+
"\n",
47+
"\n",
48+
"async def extract_from_urls(urls: list[str]) -> dict:\n",
49+
" response = tavily_client.extract(urls=urls)\n",
50+
"\n",
51+
" if response[\"failed_results\"]:\n",
52+
" print(f\"Failed to extract from {response['failed_results']}\")\n",
53+
"\n",
54+
" results = []\n",
55+
" for result in response[\"results\"]:\n",
56+
" results.append({\"url\": result[\"url\"], \"raw_content\": result[\"raw_content\"]})\n",
57+
"\n",
58+
" return results"
59+
]
60+
},
61+
{
62+
"cell_type": "markdown",
63+
"metadata": {},
64+
"source": [
65+
"## Initialize and Run Agent"
66+
]
67+
},
68+
{
69+
"cell_type": "code",
70+
"execution_count": null,
71+
"metadata": {},
72+
"source": [
73+
"my_agent = Agent(\n",
74+
" name=\"workflow-assistant\",\n",
75+
" functions=[\n",
76+
" web_search,\n",
77+
" extract_from_urls,\n",
78+
" ],\n",
79+
" config_path=\"./swarmzero_config.toml\",\n",
80+
" instruction=\"You are a helpful assistant that can search the web and extract information from a given URL.\",\n",
81+
")\n",
82+
"\n",
83+
"my_agent.run() # see agent API at localhost:8000/docs"
84+
]
85+
}
86+
],
87+
"metadata": {
88+
"kernelspec": {
89+
"display_name": "Python 3",
90+
"language": "python",
91+
"name": "python3"
92+
},
93+
"language_info": {
94+
"codemirror_mode": {
95+
"name": "ipython",
96+
"version": 3
97+
},
98+
"file_extension": ".py",
99+
"mimetype": "text/x-python",
100+
"name": "python",
101+
"nbconvert_exporter": "python",
102+
"pygments_lexer": "ipython3",
103+
"version": "3.11"
104+
}
105+
},
106+
"nbformat": 4,
107+
"nbformat_minor": 4
108+
}

0 commit comments

Comments
 (0)