1- from praisonaiagents import Agent , Task , PraisonAIAgents
2- from praisonaiagents .tools import (
3- read_file , write_file , list_files , get_file_info ,
4- copy_file , move_file , delete_file
5- )
6-
7- # Get user input
8- d = input ("Demande: " )
1+ from dotenv import load_dotenv
2+ from praisonaiagents import Agent , MCP
93
10- # Create file manager agent
11- file_manager_agent = Agent (
12- name = "FileManager" ,
13- tools = [read_file , write_file , list_files , get_file_info ,
14- copy_file , move_file , delete_file ],
15- llm = {
16- "model" : "ollama/llama3.2" ,
17- "base_url" : "http://localhost:11434" # Ollama default
18- }
19- )
4+ # Load .env before importing anything else
5+ load_dotenv ()
206
21- # Dynamically create a task based on input
22- file_task = Task (
23- name = "Q" ,
24- description = f"faire '{ d } '." ,
25- expected_output = f"'{ d } ' bien fait." ,
26- agent = file_manager_agent
27- )
7+ # Define allowed directories for filesystem access
8+ allowed_dirs = [
9+ "/Users/praison/praisonai-package/src/praisonai-agents" ,
10+ ]
2811
29- # Run agent with the task
30- agents = PraisonAIAgents (
31- agents = [file_manager_agent ],
32- tasks = [file_task ],
33- process = "sequential"
12+ # Use the correct pattern from filesystem MCP documentation
13+ filesystem_agent = Agent (
14+ instructions = """You are a helpful assistant that can interact with the filesystem.
15+ Use the available tools when relevant to manage files and directories.""" ,
16+ llm = "gpt-4o-mini" ,
17+ tools = MCP (
18+ command = "npx" ,
19+ args = ["-y" , "@modelcontextprotocol/server-filesystem" ] + allowed_dirs
20+ )
3421)
3522
36- agents .start ()
23+ filesystem_agent .start ("List files in /Users/praison/praisonai-package/src/praisonai-agents directory using MCP list_files" )
0 commit comments