-
Notifications
You must be signed in to change notification settings - Fork 55
Expand file tree
/
Copy pathexample_deep_research.py
More file actions
97 lines (83 loc) · 3.44 KB
/
example_deep_research.py
File metadata and controls
97 lines (83 loc) · 3.44 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import asyncio
import os
import litellm
from dotenv import load_dotenv
from minion_agent.config import AgentConfig, AgentFramework, MCPStdio
from minion_agent import MinionAgent
from smolagents import AzureOpenAIServerModel, CodeAgent
import minion_agent
# Load environment variables
load_dotenv()
async def main():
# Configure the main agent that will drive the research
main_agent_config = AgentConfig(
model_id=os.environ.get("AZURE_DEPLOYMENT_NAME"),
name="main_agent",
description="Main agent that coordinates research and saves results",
model_args={"azure_endpoint": os.environ.get("AZURE_OPENAI_ENDPOINT"),
"api_key": os.environ.get("AZURE_OPENAI_API_KEY"),
"api_version": os.environ.get("OPENAI_API_VERSION"),
},
model_type=AzureOpenAIServerModel, # Updated to use our custom model
# model_type="CustomAzureOpenAIServerModel", # Updated to use our custom model
agent_type=CodeAgent,
agent_args={"additional_authorized_imports": "*",
# "planning_interval":3
},
tools=[
MCPStdio(
command="npx",
args=["-y", "@modelcontextprotocol/server-filesystem", "/Users/femtozheng/workspace",
"/Users/femtozheng/python-project/minion-agent"]
)
],
)
#litellm._turn_on_debug()
# Configure the deep research agent
research_agent_config = AgentConfig(
framework=AgentFramework.DEEP_RESEARCH,
model_id=os.environ.get("AZURE_DEPLOYMENT_NAME"),
name="research_assistant",
description="A helpful research assistant that conducts deep research on topics",
agent_args={
"planning_model": "azure/" + os.environ.get("AZURE_DEPLOYMENT_NAME"),
"summarization_model": "azure/" + os.environ.get("AZURE_DEPLOYMENT_NAME"),
"json_model": "azure/" + os.environ.get("AZURE_DEPLOYMENT_NAME"),
"answer_model": "azure/" + os.environ.get("AZURE_DEPLOYMENT_NAME")
}
)
research_agent = await MinionAgent.create_async(
AgentFramework.DEEP_RESEARCH,
research_agent_config,
)
result = await research_agent.run_async("Research The evolution of Indo-European languages, and generate a pdf out of it.")
result
# Create the main agent with the research agent as a managed agent
main_agent = await MinionAgent.create_async(
AgentFramework.SMOLAGENTS,
main_agent_config,
managed_agents=[research_agent_config]
)
# Example research query
research_query = """
Research The evolution of Indo-European languages, and generate a pdf out of it.
"""
try:
# Run the research through the main agent
result = await main_agent.run_async(research_query)
print("\n=== Research Results ===\n")
print(result)
# Save the results to a file
# output_path = "research_results.md"
# with open(output_path, "w") as f:
# f.write(result)
#
# print(f"\nResults saved to {output_path}")
except Exception as e:
print(f"Error during research: {str(e)}")
if __name__ == "__main__":
# Set up environment variables if needed
if not os.getenv("AZURE_OPENAI_API_KEY"):
print("Please set AZURE_OPENAI_API_KEY environment variable")
exit(1)
asyncio.run(main())