Skip to content

Commit 94d669b

Browse files
committed
Update hacxgpt/ui/interface.py
1 parent 4151add commit 94d669b

File tree

1 file changed

+86
-13
lines changed

1 file changed

+86
-13
lines changed

hacxgpt/ui/interface.py

Lines changed: 86 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -61,11 +61,22 @@ def show_msg(self, title: str, content: str, color: str = "white"):
6161
def get_input(self, label: str = "COMMAND", multiline: bool = False) -> str:
6262
"""
6363
Get input using prompt_toolkit.
64-
If multiline is True, user presses Esc+Enter or Alt+Enter to submit.
64+
If multiline is True, behavior is swapped:
65+
- Enter: Submit
66+
- Shift+Enter: New line
6567
"""
66-
prompt_text = [
67-
('class:prompt', f"┌──({label})-[~]\n└─> ")
68-
]
68+
from prompt_toolkit.key_binding import KeyBindings
69+
70+
kb = KeyBindings()
71+
72+
if multiline:
73+
@kb.add('enter')
74+
def _(event):
75+
event.current_buffer.validate_and_handle()
76+
77+
@kb.add('escape', 'enter')
78+
def _(event):
79+
event.current_buffer.insert_text('\n')
6980

7081
try:
7182
# We construct the prompt text manually for visual style
@@ -75,6 +86,7 @@ def get_input(self, label: str = "COMMAND", multiline: bool = False) -> str:
7586
"└─> ",
7687
style=self.pt_style,
7788
multiline=multiline,
89+
key_bindings=kb if multiline else None,
7890
prompt_continuation=lambda width, line_number, is_soft_wrap: '.' * (width - 1) + ' '
7991
)
8092
return user_input
@@ -85,34 +97,95 @@ def get_input(self, label: str = "COMMAND", multiline: bool = False) -> str:
8597

8698
def stream_markdown(self, title: str, content_generator):
8799
"""
88-
Renders Markdown content in real-time as it streams.
89-
No panels are used to avoid copy-paste issues with borders.
100+
Renders Markdown content in real-time as it streams,
101+
with specific support for <think> reasoning tags.
90102
"""
103+
from rich.console import Group
104+
91105
full_response = ""
106+
thinking_text = ""
107+
display_text = ""
108+
is_thinking = False
92109

93110
self.console.print(Rule(f"[bold cyan]{title}[/bold cyan]", style="cyan"))
94111

95112
with Live(
96113
Spinner("dots", text="Establishing neural link...", style="cyan"),
97114
console=self.console,
98-
refresh_per_second=10,
115+
refresh_per_second=12,
99116
transient=True
100117
) as live:
101118

102119
for chunk in content_generator:
103120
if not chunk: continue
104121
full_response += chunk
105-
# Show the actual markdown content as it streams
106-
live.update(Markdown(full_response, code_theme=Config.CODE_THEME))
122+
123+
# Handle thinking tags
124+
raw_text = full_response
125+
if "<think>" in raw_text:
126+
if "</think>" in raw_text:
127+
# Thinking completed
128+
parts = raw_text.split("</think>")
129+
thinking_text = parts[0].replace("<think>", "").strip()
130+
display_text = parts[1].strip()
131+
is_thinking = False
132+
else:
133+
# Still thinking
134+
thinking_text = raw_text.replace("<think>", "").strip()
135+
display_text = ""
136+
is_thinking = True
137+
else:
138+
# Normal response
139+
thinking_text = ""
140+
display_text = raw_text.strip()
141+
is_thinking = False
142+
143+
# Prepare the UI group
144+
ui_elements = []
145+
146+
if thinking_text:
147+
ui_elements.append(Panel(
148+
thinking_text,
149+
title="[dim]🧠 Model Reasoning[/]",
150+
border_style="dim cyan",
151+
subtitle="[dim]Thinking...[/]" if is_thinking else "[dim]Thought Process Captured[/]",
152+
width=self.console.width - 4
153+
))
154+
155+
if display_text:
156+
# Remove banners from streaming view if needed
157+
clean_display = display_text.replace("[HacxGPT]:", "").replace("[CODE]:", "").strip()
158+
ui_elements.append(Markdown(clean_display, code_theme=Config.CODE_THEME))
159+
160+
if not ui_elements:
161+
live.update(Spinner("dots", text="HacxGPT is processing...", style="cyan"))
162+
else:
163+
live.update(Group(*ui_elements))
107164

108165
if not full_response:
109166
self.console.print("[bold red]✗ Fatal Error: The neural link remained silent (No response).[/]")
110167

111-
# Clean format for display
112-
display_text = full_response.replace("[HacxGPT]:", "").replace("[CODE]:", "").strip()
168+
# Final clean render (static)
169+
# 1. Show the final thinking process if it existed
170+
final_thinking = ""
171+
final_response = full_response
113172

114-
# Render Markdown directly to console without panel
115-
md = Markdown(display_text, code_theme=Config.CODE_THEME)
173+
if "<think>" in full_response and "</think>" in full_response:
174+
parts = full_response.split("</think>")
175+
final_thinking = parts[0].replace("<think>", "").strip()
176+
final_response = parts[1].strip()
177+
178+
self.console.print(Panel(
179+
final_thinking,
180+
title="[bold cyan]🧠 THOUGHT PROCESS[/]",
181+
border_style="cyan",
182+
style="dim",
183+
padding=(1, 2)
184+
))
185+
186+
# 2. Show the final response
187+
clean_response = final_response.replace("[HacxGPT]:", "").replace("[CODE]:", "").strip()
188+
md = Markdown(clean_response, code_theme=Config.CODE_THEME)
116189
self.console.print(md)
117190
self.console.print(Rule(style="dim cyan"))
118191

0 commit comments

Comments
 (0)