1+ import asyncio
2+ from praisonaiagents import Agent , Task , PraisonAIAgents
3+
4+ # Example tools (replace with real implementations as needed)
5+ def fetch_favorite_article ():
6+ # Simulate fetching your favorite morning article
7+ return "Your favorite morning article: 'How to Start Your Day Right'"
8+
9+ def search_trending_kenya ():
10+ # Simulate searching for trending news in Kenya
11+ return "Trending in Kenya: 'Kenya launches new tech hub in Nairobi'"
12+
13+ def fetch_twitter_feed ():
14+ # Simulate fetching Twitter feed
15+ return "Latest tweet: 'AI is transforming the world!'"
16+
17+ # Agents for each unique task
18+ article_agent = Agent (
19+ name = "ArticleAgent" ,
20+ role = "Morning Article Fetcher" ,
21+ goal = "Fetch the user's favorite morning article" ,
22+ tools = [fetch_favorite_article ],
23+ llm = "gemini/gemini-2.5-flash-lite-preview-06-17" ,
24+ verbose = True
25+ )
26+
27+ news_agent = Agent (
28+ name = "KenyaNewsAgent" ,
29+ role = "Kenya News Searcher" ,
30+ goal = "Search for trending news in Kenya" ,
31+ tools = [search_trending_kenya ],
32+ llm = "gemini/gemini-2.5-flash-lite-preview-06-17" ,
33+ verbose = True
34+ )
35+
36+ twitter_agent = Agent (
37+ name = "TwitterAgent" ,
38+ role = "Twitter Feed Fetcher" ,
39+ goal = "Fetch the latest Twitter feed" ,
40+ tools = [fetch_twitter_feed ],
41+ llm = "gemini/gemini-2.5-flash-lite-preview-06-17" ,
42+ verbose = True
43+ )
44+
45+ aggregator = Agent (
46+ name = "Aggregator" ,
47+ role = "Result Aggregator" ,
48+ goal = "Aggregate and summarize all results" ,
49+ llm = "gemini/gemini-2.5-flash-lite-preview-06-17" ,
50+ verbose = True
51+ )
52+
53+ # Tasks for each agent
54+ article_task = Task (
55+ name = "fetch_article" ,
56+ description = "Fetch the user's favorite morning article." ,
57+ expected_output = "The favorite morning article." ,
58+ agent = article_agent ,
59+ is_start = True ,
60+ async_execution = True
61+ )
62+
63+ news_task = Task (
64+ name = "search_kenya_news" ,
65+ description = "Search for trending news in Kenya." ,
66+ expected_output = "Trending news in Kenya." ,
67+ agent = news_agent ,
68+ is_start = True ,
69+ async_execution = True
70+ )
71+
72+ twitter_task = Task (
73+ name = "fetch_twitter" ,
74+ description = "Fetch the latest Twitter feed." ,
75+ expected_output = "Latest Twitter feed." ,
76+ agent = twitter_agent ,
77+ is_start = True ,
78+ async_execution = True
79+ )
80+
81+ # Aggregator task that depends on the above tasks
82+ aggregate_task = Task (
83+ name = "aggregate_results" ,
84+ description = "Summarize the article, news, and Twitter feed results." ,
85+ expected_output = "A summary of all fetched information." ,
86+ agent = aggregator ,
87+ context = [article_task , news_task , twitter_task ]
88+ )
89+
90+ async def main ():
91+ workflow = PraisonAIAgents (
92+ agents = [article_agent , news_agent , twitter_agent , aggregator ],
93+ tasks = [article_task , news_task , twitter_task , aggregate_task ],
94+ process = "workflow" ,
95+ verbose = True
96+ )
97+ results = await workflow .astart ()
98+
99+ print ("\n Parallel Processing Results:" )
100+ for task_id , result in results ["task_results" ].items ():
101+ if result :
102+ print (f"Task { task_id } : { result .raw } " )
103+
104+ if __name__ == "__main__" :
105+ asyncio .run (main ())
0 commit comments