-
-
Notifications
You must be signed in to change notification settings - Fork 31
Feature: Allow add multiple files with /file command #25
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
daltonnyx
merged 7 commits into
saigontechnology:main
from
lewistransts:feat/add-multiple-files
Jul 6, 2025
Merged
Changes from 4 commits
Commits
Show all changes
7 commits
Select commit
Hold shift + click to select a range
748acf5
feat(command_processor.py): enhance file command to support multiple …
lewistransts 254176c
refactor(command_processor.py): add type hints for better code clarit…
lewistransts 1fa4d93
refactor(command_processor.py): update comments for clarity and remov…
lewistransts 5347ea7
refactor(command_processor.py): remove redundant notification for fil…
lewistransts d1a4187
refactor(command_processor.py): simplify file command handling by rem…
lewistransts 557cd27
refactor(command_processor.py): update type hint for all_file_content…
lewistransts d5c1374
refactor(command_processor.py): simplify file content processing by r…
lewistransts File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,5 +1,5 @@ | ||
| from dataclasses import dataclass | ||
| from typing import Optional, Tuple | ||
| from typing import Optional, Tuple, List, Dict, Any, Union | ||
| import os | ||
|
|
||
| from AgentCrew.modules.agents.local_agent import LocalAgent | ||
|
|
@@ -438,39 +438,94 @@ def _handle_agent_command(self, command: str) -> Tuple[bool, str]: | |
| ) | ||
|
|
||
| def _handle_file_command(self, user_input: str) -> CommandResult: | ||
| """Handle file command.""" | ||
| file_path = user_input[6:].strip() | ||
| file_path = os.path.expanduser(file_path) | ||
|
|
||
| self.message_handler._notify("file_processing", {"file_path": file_path}) | ||
| # Remove the command prefix "/file " and strip whitespace | ||
| file_paths_str: str = user_input[6:].strip() | ||
|
|
||
| # Handle empty input | ||
| if not file_paths_str: | ||
| self.message_handler._notify("error", "Usage: /file <file_path1> [file_path2] ...") | ||
| return CommandResult(handled=True, clear_flag=True) | ||
|
|
||
| # Split file paths and expand user paths | ||
| file_paths: List[str] = [os.path.expanduser(path.strip()) for path in file_paths_str.split()] | ||
|
|
||
| # Remove empty paths | ||
| file_paths = [path for path in file_paths if path] | ||
|
|
||
| if not file_paths: | ||
| self.message_handler._notify("error", "No valid file paths provided") | ||
| return CommandResult(handled=True, clear_flag=True) | ||
|
|
||
| # Process file with the file handling service | ||
| # Initialize file handler if needed | ||
| if self.message_handler.file_handler is None: | ||
| self.message_handler.file_handler = FileHandler() | ||
| file_content = self.message_handler.file_handler.process_file(file_path) | ||
| # Fallback to llm handle | ||
| if not file_content: | ||
| from AgentCrew.modules.agents.base import MessageType | ||
|
|
||
| file_content = self.message_handler.agent.format_message( | ||
| MessageType.FileContent, {"file_uri": file_path} | ||
| ) | ||
| processed_files: List[str] = [] | ||
| failed_files: List[str] = [] | ||
| file_contents: List[str] = [] | ||
|
|
||
| # Process each file | ||
| for file_path in file_paths: | ||
| # Process file with the file handling service | ||
| file_content: Optional[Union[Dict[str, Any], str]] = self.message_handler.file_handler.process_file(file_path) | ||
|
|
||
| # Fallback to llm handle | ||
| if not file_content: | ||
| from AgentCrew.modules.agents.base import MessageType | ||
| file_content = self.message_handler.agent.format_message( | ||
| MessageType.FileContent, {"file_uri": file_path} | ||
| ) | ||
|
|
||
| if file_content: | ||
| self.message_handler._messages_append( | ||
| {"role": "user", "content": [file_content]} | ||
| ) | ||
| self.message_handler._notify( | ||
| "file_processed", | ||
| { | ||
| "file_path": file_path, | ||
| "message": self.message_handler.agent.history[-1], | ||
| }, | ||
| ) | ||
| return CommandResult(handled=True, clear_flag=True) | ||
| if file_content: | ||
| # Extract text content from the result | ||
| content_text: str | ||
| if isinstance(file_content, dict) and "text" in file_content: | ||
| content_text = file_content["text"] | ||
| elif isinstance(file_content, str): | ||
| content_text = file_content | ||
| else: | ||
| # Handle unexpected format | ||
| content_text = str(file_content) | ||
|
|
||
| file_contents.append(content_text) | ||
| processed_files.append(file_path) | ||
| # Notify using the correct format that matches ConsoleUI.listen() | ||
| self.message_handler._notify("file_processed", {"file_path": file_path}) | ||
| else: | ||
| failed_files.append(file_path) | ||
| self.message_handler._notify( | ||
| "error", | ||
| f"Failed to process file {file_path} or model is not supported", | ||
| ) | ||
|
|
||
| # Add all successfully processed files to the conversation | ||
| if file_contents: | ||
| # Create a single message with all file contents using XML-style boundaries | ||
| combined_content: str | ||
| if len(file_contents) == 1: | ||
| # Single file - wrap in XML tags for consistency | ||
| file_path = processed_files[0] | ||
| combined_content = f"<file path='{file_path}'>\n{file_contents[0]}\n</file>" | ||
| else: | ||
| # Multiple files - use XML-style boundaries for each file | ||
| combined_parts: List[str] = [] | ||
| for i, content in enumerate(file_contents): | ||
| file_path = processed_files[i] | ||
| combined_parts.append(f"<file path='{file_path}'>\n{content}\n</file>") | ||
| combined_content = "\n\n".join(combined_parts) | ||
|
|
||
| message_content: Dict[str, Any] = {"role": "user", "content": [{"type": "text", "text": combined_content}]} | ||
|
||
| self.message_handler._messages_append(message_content) | ||
|
|
||
| # Send summary message | ||
| summary: str | ||
| if processed_files and failed_files: | ||
| summary = f"Processed {len(processed_files)} files successfully, {len(failed_files)} failed" | ||
| elif processed_files: | ||
| summary = f"Successfully processed {len(processed_files)} file(s)" | ||
| else: | ||
| self.message_handler._notify( | ||
| "error", | ||
| f"Failed to process file {file_path} Or Model is not supported", | ||
| ) | ||
| return CommandResult(handled=True, clear_flag=True) | ||
| summary = f"Failed to process all {len(failed_files)} file(s)" | ||
|
|
||
| self.message_handler._notify("system_message", summary) | ||
|
|
||
| return CommandResult(handled=True, clear_flag=True) | ||
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
do not process file here, llm already handle the file message in agent.format_message function. only append file content to message as it is
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
reverted to the previous logic, just modified
forloop for supporting multiple files{"role": "user", "content": [{"type": "text", "text": "<file file_path=".."> .... </file>"]}