Skip to content

Commit 6c3cf9b

Browse files
committed
code update
1 parent 1dcd352 commit 6c3cf9b

File tree

9 files changed

+77
-50
lines changed

9 files changed

+77
-50
lines changed

.github/demo.gif

4.49 MB
Loading

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -235,3 +235,4 @@ pyrightconfig.json
235235

236236
.idea/
237237
.vscode/
238+
brew_formula/

README.md

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
11
# Open Codex
22

3-
<h1 align="center">Open Codex Codex CLI</h1>
3+
<h1 align="center">Open Codex CLI</h1>
44
<p align="center">Lightweight coding agent that runs in your terminal</p>
5-
65
<p align="center"><code>
76
brew install brew tap codingmoh/open-codex
87
brew install open-codex

pyproject.toml

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[project]
22
name = "open_codex"
3-
version = "0.1.9"
4-
description = "CLI with OSS LLM integration"
3+
version = "0.1.12"
4+
description = "AI CLI with OSS LLM integration"
55
readme = "README.md"
66
requires-python = ">=3.11"
77
dependencies = [
@@ -23,3 +23,6 @@ package-dir = {"" = "src"}
2323

2424
[tool.setuptools.packages.find]
2525
where = ["src"]
26+
27+
[tool.setuptools.package-data]
28+
"open_codex" = ["resources/*.txt"]

src/open_codex/agent_builder.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
from importlib.resources import files
12

23
from open_codex.agents.phi_4_mini import AgentPhi4Mini
34
from open_codex.interfaces.llm_agent import LLMAgent
@@ -6,8 +7,8 @@ class AgentBuilder:
67

78
@staticmethod
89
def get_agent() -> LLMAgent:
9-
prompt = AgentBuilder.read_file("src/open_codex/resources/prompt.txt")
10-
return AgentPhi4Mini(system_prompt=prompt)
10+
system_prompt = files("open_codex.resources").joinpath("prompt.txt").read_text(encoding="utf-8")
11+
return AgentPhi4Mini(system_prompt=system_prompt)
1112

1213
@staticmethod
1314
def read_file(file_path: str) -> str:

src/open_codex/agents/phi_4_mini.py

Lines changed: 16 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
from open_codex.interfaces.llm_agent import LLMAgent
44
import contextlib
55
import os
6-
import sys
76

87

98
class AgentPhi4Mini(LLMAgent):
@@ -13,7 +12,7 @@ def __init__(self, system_prompt: str):
1312
# which prints a lot of warnings and errors to stderr
1413
# when loading the model
1514
# this is a temporary solution until the library is fixed
16-
with AgentPhi4Mini.suppress_stderr():
15+
with AgentPhi4Mini.suppress_native_stderr():
1716
self.llm: Llama = Llama.from_pretrained( # type: ignore
1817
repo_id="lmstudio-community/Phi-4-mini-instruct-GGUF",
1918
filename="Phi-4-mini-instruct-Q3_K_L.gguf",
@@ -25,7 +24,8 @@ def one_shot_mode(self, user_input: str) -> str:
2524
chat_history = [{"role": "system", "content": self.system_prompt}]
2625
chat_history.append({"role": "user", "content": user_input})
2726
full_prompt = self.format_chat(chat_history)
28-
output_raw = self.llm(prompt=full_prompt, max_tokens=100, temperature=0.2, stream=False)
27+
with AgentPhi4Mini.suppress_native_stderr():
28+
output_raw = self.llm(prompt=full_prompt, max_tokens=100, temperature=0.2, stream=False)
2929

3030
# unfortuntely llama_cpp has a union type for the output
3131
output = cast(CreateCompletionResponse, output_raw)
@@ -44,11 +44,16 @@ def format_chat(self, messages: List[dict[str, str]]) -> str:
4444

4545
@contextlib.contextmanager
4646
@staticmethod
47-
def suppress_stderr():
48-
with open(os.devnull, 'w') as devnull:
49-
old_stderr = sys.stderr
50-
sys.stderr = devnull
51-
try:
52-
yield
53-
finally:
54-
sys.stderr = old_stderr
47+
def suppress_native_stderr():
48+
"""
49+
Redirect C‐level stderr (fd 2) into /dev/null, so llama.cpp logs vanish.
50+
"""
51+
devnull_fd = os.open(os.devnull, os.O_WRONLY)
52+
saved_stderr_fd = os.dup(2)
53+
try:
54+
os.dup2(devnull_fd, 2)
55+
yield
56+
finally:
57+
os.dup2(saved_stderr_fd, 2)
58+
os.close(devnull_fd)
59+
os.close(saved_stderr_fd)

src/open_codex/main.py

Lines changed: 41 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -36,48 +36,61 @@ def get_keypress():
3636
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
3737
return key
3838

39+
def print_response(command: str):
40+
print(f"{BLUE}Command found:")
41+
print(f"{BLUE}=====================")
42+
print(f"{GREEN}{command}{RESET}") # Print the command in green
43+
print(f"{BLUE}====================={RESET}")
44+
45+
print(f"{BLUE}What do you want to do with this command?{RESET}")
46+
print(f"{BLUE}[c] Copy [e] Execute [a] Abort{RESET}")
47+
print(f"{BLUE}Press key: ", end="", flush=True)
48+
choice = get_keypress().lower()
49+
print(f"{RESET}") # Clear the line after the prompt
50+
51+
if choice == "e":
52+
print(f"{BLUE}Executing command: {command}{RESET}")
53+
result = subprocess.run(command, shell=True, capture_output=True, text=True)
54+
print(f"{GREEN}Command output: {result.stdout}{RESET}")
55+
if result.stderr:
56+
print(f"{RED}Error: {result.stderr}{RESET}")
57+
58+
elif choice == "c":
59+
pyperclip.copy(command)
60+
print(f"{GREEN}Command copied to clipboard! Paste it manually in your terminal.{RESET}")
61+
62+
elif choice == "a":
63+
print(f"{BLUE}Aborted.{RESET}")
64+
else:
65+
print(f"{RED}Unknown choice. Nothing happened.{RESET}")
66+
3967
def one_shot_mode(agent: LLMAgent, prompt: str):
4068
try:
4169
response = agent.one_shot_mode(prompt)
42-
print(f"{GREEN}: {response}{RESET}")
43-
command = response.strip()
44-
45-
print(f"{BLUE}What do you want to do with this command?{RESET}")
46-
print(f"{BLUE}[c] Copy [e] Execute [a] Abort{RESET}")
47-
print(f"{BLUE}Press key: ", end="", flush=True)
48-
choice = get_keypress().lower()
49-
print(f"{RESET}") # Clear the line after the prompt
50-
51-
if choice == "e":
52-
print(f"{BLUE}Executing command: {command}{RESET}")
53-
result = subprocess.run(command, shell=True, capture_output=True, text=True)
54-
print(f"{GREEN}Command output: {result.stdout}{RESET}")
55-
if result.stderr:
56-
print(f"{RED}Error: {result.stderr}{RESET}")
57-
58-
elif choice == "c":
59-
pyperclip.copy(command)
60-
print(f"{GREEN}Command copied to clipboard! Paste it manually in your terminal.{RESET}")
61-
62-
elif choice == "a":
63-
print(f"{BLUE}Aborted.{RESET}")
64-
else:
65-
print(f"{RED}Unknown choice. Nothing happened.{RESET}")
66-
70+
print_response(response)
6771
except Exception as e:
6872
print(f"{RED}Error: {e}{RESET}")
6973

74+
def print_help_message():
75+
"""Print help message with usage examples."""
76+
print(f"{BLUE}Open Codex - Natural Language to CLI commands{RESET}")
77+
print(f"{BLUE}Usage examples:{RESET}")
78+
print(f"{GREEN}open-codex \"list all files in current directory\"")
79+
print(f"{GREEN}open-codex \"find all python files modified in the last week\"")
80+
print(f"{GREEN}open-codex \"create a tarball of the src directory\"")
81+
print()
82+
7083
def main():
7184
parser = argparse.ArgumentParser(description="Open Codex - Natural Language to CLI commands")
7285
parser.add_argument("prompt", nargs="*", help="Optional prompt for one-shot mode")
7386
args = parser.parse_args()
74-
7587
prompt = " ".join(args.prompt).strip()
76-
if not prompt:
77-
print("Please provide a prompt")
88+
if not prompt or prompt == "--help":
89+
print_help_message()
7890
sys.exit(1)
7991

8092
agent = AgentBuilder.get_agent()
93+
print(f"{BLUE}Using model: phi-4-mini-instruct{RESET}")
8194
one_shot_mode(agent, prompt)
8295

8396
if __name__ == "__main__":

src/open_codex/resources/__init__.py

Whitespace-only changes.
Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,10 @@
1-
You are a helpful AI that converts natural language instructions into valid shell commands. Always provide a concise shell command.
2-
No shebang, no explanations, no extra text. Just the command. Think hard about the command you are going to provide, and make sure it is the best one.
3-
Stick to the easiest command possible, and if there is a simpler one, provide it.
4-
If the command is not straightforward, and there is an easier one, that can be run using a 3rd party tool,
5-
INFORM the user about it, and provide the command to install it.
1+
You are a helpful AI that helps user identify the shell command they are looking for.
2+
Always provide a concise shell command.
3+
4+
Important:
5+
- Think hard about the command you are going to provide, and make sure it is the best one.
6+
- No shebang
7+
- No explanations
8+
- No extra text. Just the command.
9+
10+
User uses MacOS Catalina

0 commit comments

Comments
 (0)