Skip to content

Commit 11cdf46

Browse files
authored
Merge pull request #1897 from Ruyuan37/windows_terminal_adaptation
[terminal.py] Add Windows Terminal support to terminal.py
2 parents fc6e843 + de17c62 commit 11cdf46

File tree

3 files changed

+35
-22
lines changed

3 files changed

+35
-22
lines changed

.pre-commit-config.yaml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ default_stages: [ commit ]
55
# 2. pre-commit install
66
# 3. pre-commit run --all-files # make sure all files are clean
77
repos:
8-
- repo: https://github.com/pycqa/isort
8+
- repo: git@github.com:pycqa/isort.git
99
rev: 5.11.5
1010
hooks:
1111
- id: isort
@@ -15,14 +15,14 @@ repos:
1515
.*__init__\.py$
1616
)
1717
18-
- repo: https://github.com/astral-sh/ruff-pre-commit
18+
- repo: git@github.com:astral-sh/ruff-pre-commit.git
1919
# Ruff version.
2020
rev: v0.0.284
2121
hooks:
2222
- id: ruff
2323
args: [ --fix ]
2424

25-
- repo: https://github.com/psf/black
25+
- repo: git@github.com:psf/black.git
2626
rev: 23.3.0
2727
hooks:
2828
- id: black

metagpt/tools/libs/terminal.py

Lines changed: 30 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import asyncio
22
import os
33
import re
4+
import sys
45
from asyncio import Queue
56
from asyncio.subprocess import PIPE, STDOUT
67
from typing import Optional
@@ -22,8 +23,17 @@ class Terminal:
2223
"""
2324

2425
def __init__(self):
25-
self.shell_command = ["bash"] # FIXME: should consider windows support later
26-
self.command_terminator = "\n"
26+
if sys.platform.startswith("win"):
27+
self.shell_command = ["cmd.exe"] # Windows
28+
self.executable = None
29+
self.command_terminator = "\r\n"
30+
self.pwd_command = "cd"
31+
else:
32+
self.shell_command = ["bash"] # Linux / macOS
33+
self.executable = "bash"
34+
self.command_terminator = "\n"
35+
self.pwd_command = "pwd"
36+
2737
self.stdout_queue = Queue(maxsize=1000)
2838
self.observer = TerminalReporter()
2939
self.process: Optional[asyncio.subprocess.Process] = None
@@ -41,17 +51,17 @@ async def _start_process(self):
4151
stdin=PIPE,
4252
stdout=PIPE,
4353
stderr=STDOUT,
44-
executable="bash",
54+
executable=self.executable,
4555
env=os.environ.copy(),
46-
cwd=DEFAULT_WORKSPACE_ROOT.absolute(),
56+
cwd=str(DEFAULT_WORKSPACE_ROOT) if sys.platform.startswith("win") else DEFAULT_WORKSPACE_ROOT, # Windows
4757
)
4858
await self._check_state()
4959

5060
async def _check_state(self):
5161
"""
52-
Check the state of the terminal, e.g. the current directory of the terminal process. Useful for agent to understand.
62+
Check the state of the terminal, e.g. the current directory.
5363
"""
54-
output = await self.run_command("pwd")
64+
output = await self.run_command(self.pwd_command)
5565
logger.info("The terminal is at:", output)
5666

5767
async def run_command(self, cmd: str, daemon=False) -> str:
@@ -74,20 +84,21 @@ async def run_command(self, cmd: str, daemon=False) -> str:
7484
output = ""
7585
# Remove forbidden commands
7686
commands = re.split(r"\s*&&\s*", cmd)
87+
skip_cmd = "echo Skipped" if sys.platform.startswith("win") else "true"
7788
for cmd_name, reason in self.forbidden_commands.items():
7889
# "true" is a pass command in linux terminal.
7990
for index, command in enumerate(commands):
8091
if cmd_name in command:
81-
output += f"Failed to execut {command}. {reason}\n"
82-
commands[index] = "true"
92+
output += f"Failed to execute {command}. {reason}\n"
93+
commands[index] = skip_cmd
8394
cmd = " && ".join(commands)
84-
8595
# Send the command
8696
self.process.stdin.write((cmd + self.command_terminator).encode())
87-
self.process.stdin.write(
88-
f'echo "{END_MARKER_VALUE}"{self.command_terminator}'.encode() # write EOF
89-
) # Unique marker to signal command end
97+
98+
marker_cmd = f"echo {END_MARKER_VALUE}"
99+
self.process.stdin.write((marker_cmd + self.command_terminator).encode()) # Unique marker to signal command end
90100
await self.process.stdin.drain()
101+
91102
if daemon:
92103
asyncio.create_task(self._read_and_process_output(cmd))
93104
else:
@@ -116,7 +127,8 @@ async def execute_in_conda_env(self, cmd: str, env, daemon=False) -> str:
116127
This function wraps `run_command`, prepending the necessary Conda activation commands
117128
to ensure the specified environment is active for the command's execution.
118129
"""
119-
cmd = f"conda run -n {env} {cmd}"
130+
# windows & linux conda run
131+
cmd = f"conda activate {env} && {cmd}" if sys.platform.startswith("win") else f"conda run -n {env} {cmd}"
120132
return await self.run_command(cmd, daemon=daemon)
121133

122134
async def get_stdout_output(self) -> str:
@@ -147,10 +159,10 @@ async def _read_and_process_output(self, cmd, daemon=False) -> str:
147159
continue
148160
*lines, tmp = output.splitlines(True)
149161
for line in lines:
150-
line = line.decode()
162+
line = line.decode(errors="ignore")
151163
ix = line.rfind(END_MARKER_VALUE)
152164
if ix >= 0:
153-
line = line[0:ix]
165+
line = line[:ix]
154166
if line:
155167
await observer.async_report(line, "output")
156168
# report stdout in real-time
@@ -164,8 +176,9 @@ async def _read_and_process_output(self, cmd, daemon=False) -> str:
164176

165177
async def close(self):
166178
"""Close the persistent shell process."""
167-
self.process.stdin.close()
168-
await self.process.wait()
179+
if self.process:
180+
self.process.stdin.close()
181+
await self.process.wait()
169182

170183

171184
@register_tool(include_functions=["run"])

metagpt/utils/token_counter.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,10 +113,10 @@
113113
"doubao-pro-128k-240515": {"prompt": 0.0007, "completion": 0.0013},
114114
"llama3-70b-llama3-70b-instruct": {"prompt": 0.0, "completion": 0.0},
115115
"llama3-8b-llama3-8b-instruct": {"prompt": 0.0, "completion": 0.0},
116-
"llama-4-Scout-17B-16E-Instruct-FP8" : {"prompt": 0.0, "completion": 0.0}, # start, for Llama API
116+
"llama-4-Scout-17B-16E-Instruct-FP8": {"prompt": 0.0, "completion": 0.0}, # start, for Llama API
117117
"llama-4-Maverick-17B-128E-Instruct-FP8": {"prompt": 0.0, "completion": 0.0},
118118
"llama-3.3-8B-Instruct": {"prompt": 0.0, "completion": 0.0},
119-
"llama-3.3-70B-Instruct": {"prompt": 0.0, "completion": 0.0}, # end, for Llama API
119+
"llama-3.3-70B-Instruct": {"prompt": 0.0, "completion": 0.0}, # end, for Llama API
120120
}
121121

122122

0 commit comments

Comments
 (0)