Skip to content
117 changes: 86 additions & 31 deletions AgentCrew/modules/chat/message/command_processor.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from dataclasses import dataclass
from typing import Optional, Tuple
from typing import Optional, Tuple, List, Dict, Any, Union
import os

from AgentCrew.modules.agents.local_agent import LocalAgent
Expand Down Expand Up @@ -438,39 +438,94 @@ def _handle_agent_command(self, command: str) -> Tuple[bool, str]:
)

def _handle_file_command(self, user_input: str) -> CommandResult:
"""Handle file command."""
file_path = user_input[6:].strip()
file_path = os.path.expanduser(file_path)

self.message_handler._notify("file_processing", {"file_path": file_path})
# Remove the command prefix "/file " and strip whitespace
file_paths_str: str = user_input[6:].strip()

# Handle empty input
if not file_paths_str:
self.message_handler._notify("error", "Usage: /file <file_path1> [file_path2] ...")
return CommandResult(handled=True, clear_flag=True)

# Split file paths and expand user paths
file_paths: List[str] = [os.path.expanduser(path.strip()) for path in file_paths_str.split()]

# Remove empty paths
file_paths = [path for path in file_paths if path]

if not file_paths:
self.message_handler._notify("error", "No valid file paths provided")
return CommandResult(handled=True, clear_flag=True)

# Process file with the file handling service
# Initialize file handler if needed
if self.message_handler.file_handler is None:
self.message_handler.file_handler = FileHandler()
file_content = self.message_handler.file_handler.process_file(file_path)
# Fallback to llm handle
if not file_content:
from AgentCrew.modules.agents.base import MessageType

file_content = self.message_handler.agent.format_message(
MessageType.FileContent, {"file_uri": file_path}
)
processed_files: List[str] = []
failed_files: List[str] = []
file_contents: List[str] = []

# Process each file
for file_path in file_paths:
# Process file with the file handling service
file_content: Optional[Union[Dict[str, Any], str]] = self.message_handler.file_handler.process_file(file_path)

# Fallback to llm handle
if not file_content:
from AgentCrew.modules.agents.base import MessageType
file_content = self.message_handler.agent.format_message(
MessageType.FileContent, {"file_uri": file_path}
)

if file_content:
self.message_handler._messages_append(
{"role": "user", "content": [file_content]}
)
self.message_handler._notify(
"file_processed",
{
"file_path": file_path,
"message": self.message_handler.agent.history[-1],
},
)
return CommandResult(handled=True, clear_flag=True)
if file_content:
# Extract text content from the result
content_text: str
if isinstance(file_content, dict) and "text" in file_content:
content_text = file_content["text"]
elif isinstance(file_content, str):
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

do not process file here, llm already handle the file message in agent.format_message function. only append file content to message as it is

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

reverted to the previous logic, just modified

  • adding a for loop for supporting multiple files
  • adding type hints
  • changing the message history format to {"role": "user", "content": [{"type": "text", "text": "<file file_path=".."> .... </file>"]}

content_text = file_content
else:
# Handle unexpected format
content_text = str(file_content)

file_contents.append(content_text)
processed_files.append(file_path)
# Notify using the correct format that matches ConsoleUI.listen()
self.message_handler._notify("file_processed", {"file_path": file_path})
else:
failed_files.append(file_path)
self.message_handler._notify(
"error",
f"Failed to process file {file_path} or model is not supported",
)

# Add all successfully processed files to the conversation
if file_contents:
# Create a single message with all file contents using XML-style boundaries
combined_content: str
if len(file_contents) == 1:
# Single file - wrap in XML tags for consistency
file_path = processed_files[0]
combined_content = f"<file path='{file_path}'>\n{file_contents[0]}\n</file>"
else:
# Multiple files - use XML-style boundaries for each file
combined_parts: List[str] = []
for i, content in enumerate(file_contents):
file_path = processed_files[i]
combined_parts.append(f"<file path='{file_path}'>\n{content}\n</file>")
combined_content = "\n\n".join(combined_parts)

message_content: Dict[str, Any] = {"role": "user", "content": [{"type": "text", "text": combined_content}]}
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Change message_content to this format to be consistence with other messages.
I got issue that can't receive response from LLM when using previous format {"role": "user", "content": [file_content]}

self.message_handler._messages_append(message_content)

# Send summary message
summary: str
if processed_files and failed_files:
summary = f"Processed {len(processed_files)} files successfully, {len(failed_files)} failed"
elif processed_files:
summary = f"Successfully processed {len(processed_files)} file(s)"
else:
self.message_handler._notify(
"error",
f"Failed to process file {file_path} Or Model is not supported",
)
return CommandResult(handled=True, clear_flag=True)
summary = f"Failed to process all {len(failed_files)} file(s)"

self.message_handler._notify("system_message", summary)

return CommandResult(handled=True, clear_flag=True)