Skip to content

Commit e9c4898

Browse files
committed
added local model support with phi4-mini
1 parent 4122250 commit e9c4898

File tree

11 files changed

+446
-338
lines changed

11 files changed

+446
-338
lines changed

.gitignore

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,8 @@ wheels/
3333
.LSOverride
3434

3535
# Icon must end with two \r
36-
Icon
36+
Icon
37+
3738

3839
# Thumbnails
3940
._*
@@ -232,3 +233,5 @@ pyrightconfig.json
232233

233234
# End of https://www.toptal.com/developers/gitignore/api/python,macos,linux
234235

236+
.idea/
237+
.vscode/

main.py

Lines changed: 0 additions & 147 deletions
This file was deleted.

pyproject.toml

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,19 @@
11
[project]
2-
name = "ghost-in-the-shell"
2+
name = "open_codex"
33
version = "0.1.7"
4-
description = "CLI with OpenAI integration"
4+
description = "CLI with OSS LLM integration"
55
readme = "README.md"
66
requires-python = ">=3.11"
77
dependencies = [
8-
"openai>=1.69.0",
8+
"huggingface-hub>=0.30.2",
9+
"llama-cpp-python>=0.3.8",
910
"prompt_toolkit",
11+
"pyperclip>=1.9.0",
1012
]
1113

1214
[project.scripts]
13-
ai = "main:main"
15+
ai = "open_codex.main:main"
1416

1517
[build-system]
1618
requires = ["setuptools", "wheel"]
17-
build-backend = "setuptools.build_meta"
19+
build-backend = "setuptools.build_meta"

src/open_codex/agent_builder.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
2+
from open_codex.agents.phi_4_mini import AgentPhi4Mini
3+
from open_codex.interfaces.llm_agent import LLMAgent
4+
5+
class AgentBuilder:
6+
7+
@staticmethod
8+
def get_agent() -> LLMAgent:
9+
prompt = AgentBuilder.read_file("src/open_codex/resources/prompt.txt")
10+
return AgentPhi4Mini(system_prompt=prompt)
11+
12+
@staticmethod
13+
def read_file(file_path: str) -> str:
14+
with open(file_path, 'r') as file:
15+
content = file.read()
16+
return content

src/open_codex/agents/__init__.py

Whitespace-only changes.
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
from typing import List, cast
2+
from llama_cpp import CreateCompletionResponse, Llama
3+
from open_codex.interfaces.llm_agent import LLMAgent
4+
import contextlib
5+
import os
6+
import sys
7+
8+
9+
class AgentPhi4Mini(LLMAgent):
10+
def __init__(self, system_prompt: str):
11+
# suppress the stderr output from llama_cpp
12+
# this is a workaround for the llama_cpp library
13+
# which prints a lot of warnings and errors to stderr
14+
# when loading the model
15+
# this is a temporary solution until the library is fixed
16+
with AgentPhi4Mini.suppress_stderr():
17+
self.llm: Llama = Llama.from_pretrained( # type: ignore
18+
repo_id="lmstudio-community/Phi-4-mini-instruct-GGUF",
19+
filename="Phi-4-mini-instruct-Q3_K_L.gguf",
20+
additional_files=[],
21+
)
22+
self.system_prompt = system_prompt
23+
24+
def one_shot_mode(self, user_input: str) -> str:
25+
chat_history = [{"role": "system", "content": self.system_prompt}]
26+
chat_history.append({"role": "user", "content": user_input})
27+
full_prompt = self.format_chat(chat_history)
28+
output_raw = self.llm(prompt=full_prompt, max_tokens=100, temperature=0.2, stream=False)
29+
30+
# unfortuntely llama_cpp has a union type for the output
31+
output = cast(CreateCompletionResponse, output_raw)
32+
33+
assistant_reply : str = output["choices"][0]["text"].strip()
34+
return assistant_reply
35+
36+
37+
def format_chat(self, messages: List[dict[str, str]]) -> str:
38+
chat_prompt = ""
39+
for msg in messages:
40+
role_tag = "user" if msg["role"] == "user" else "assistant"
41+
chat_prompt += f"<|{role_tag}|>\n{msg['content']}\n"
42+
chat_prompt += "<|assistant|>\n"
43+
return chat_prompt
44+
45+
@contextlib.contextmanager
46+
@staticmethod
47+
def suppress_stderr():
48+
with open(os.devnull, 'w') as devnull:
49+
old_stderr = sys.stderr
50+
sys.stderr = devnull
51+
try:
52+
yield
53+
finally:
54+
sys.stderr = old_stderr

src/open_codex/interfaces/__init__.py

Whitespace-only changes.
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
from abc import ABC, abstractmethod
2+
3+
class LLMAgent(ABC):
4+
@abstractmethod
5+
def one_shot_mode(self, user_input: str) -> str:
6+
pass
7+

src/open_codex/main.py

Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
import sys
2+
import argparse
3+
import subprocess
4+
import pyperclip
5+
6+
from open_codex.agent_builder import AgentBuilder
7+
from open_codex.interfaces.llm_agent import LLMAgent
8+
9+
10+
GREEN = "\033[92m"
11+
RED = "\033[91m"
12+
BLUE = "\033[94m"
13+
RESET = "\033[0m"
14+
15+
# Capture single keypress (terminal) from the user
16+
# and returns it as a string. It works on both Windows and Unix systems.
17+
# Windows
18+
if sys.platform == "win32":
19+
import msvcrt
20+
21+
def get_keypress():
22+
return msvcrt.getch().decode("utf-8")
23+
24+
# Unix (Linux/macOS)
25+
else:
26+
import termios
27+
import tty
28+
29+
def get_keypress():
30+
fd = sys.stdin.fileno()
31+
old_settings = termios.tcgetattr(fd)
32+
try:
33+
tty.setraw(fd)
34+
key = sys.stdin.read(1)
35+
finally:
36+
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
37+
return key
38+
39+
def one_shot_mode(agent: LLMAgent, prompt: str):
40+
try:
41+
response = agent.one_shot_mode(prompt)
42+
print(f"{GREEN}: {response}{RESET}")
43+
command = response.strip()
44+
45+
print(f"{BLUE}What do you want to do with this command?{RESET}")
46+
print(f"{BLUE}[c] Copy [e] Execute [a] Abort{RESET}")
47+
print(f"{BLUE}Press key: ", end="", flush=True)
48+
choice = get_keypress().lower()
49+
print(f"{RESET}") # Clear the line after the prompt
50+
51+
if choice == "e":
52+
print(f"{BLUE}Executing command: {command}{RESET}")
53+
result = subprocess.run(command, shell=True, capture_output=True, text=True)
54+
print(f"{GREEN}Command output: {result.stdout}{RESET}")
55+
if result.stderr:
56+
print(f"{RED}Error: {result.stderr}{RESET}")
57+
58+
elif choice == "c":
59+
pyperclip.copy(command)
60+
print(f"{GREEN}Command copied to clipboard! Paste it manually in your terminal.{RESET}")
61+
62+
elif choice == "a":
63+
print(f"{BLUE}Aborted.{RESET}")
64+
else:
65+
print(f"{RED}Unknown choice. Nothing happened.{RESET}")
66+
67+
except Exception as e:
68+
print(f"{RED}Error: {e}{RESET}")
69+
70+
def main():
71+
parser = argparse.ArgumentParser(description="Open Codex - Natural Language to CLI commands")
72+
parser.add_argument("prompt", nargs="*", help="Optional prompt for one-shot mode")
73+
args = parser.parse_args()
74+
75+
prompt = " ".join(args.prompt).strip()
76+
if not prompt:
77+
print("Please provide a prompt")
78+
sys.exit(1)
79+
80+
agent = AgentBuilder.get_agent()
81+
one_shot_mode(agent, prompt)
82+
83+
if __name__ == "__main__":
84+
main()
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
You are a helpful AI that converts natural language instructions into valid shell commands. Always provide a concise shell command.
2+
No shebang, no explanations, no extra text. Just the command. Think hard about the command you are going to provide, and make sure it is the best one.
3+
Stick to the easiest command possible, and if there is a simpler one, provide it.
4+
If the command is not straightforward, and there is an easier one, that can be run using a 3rd party tool,
5+
INFORM the user about it, and provide the command to install it.

0 commit comments

Comments
 (0)