Skip to content

Commit 6a830c0

Browse files
committed
add: prompt rendering with rich
1 parent 921fae9 commit 6a830c0

File tree

4 files changed

+87
-48
lines changed

4 files changed

+87
-48
lines changed

docs/help.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -152,6 +152,7 @@ Options:
152152
-u, --usage Show token usage
153153
-x, --extract Extract first fenced code block
154154
--xl, --extract-last Extract last fenced code block
155+
--render Render for terminal
155156
-h, --help Show this message and exit.
156157
```
157158

llm/cli.py

Lines changed: 29 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@
4141
remove_alias,
4242
)
4343
from llm.models import _BaseConversation, ChainResponse
44+
from .cli_utils import prompt_output
4445

4546
from .migrations import migrate
4647
from .plugins import pm, load_plugins
@@ -471,6 +472,7 @@ def cli():
471472
is_flag=True,
472473
help="Extract last fenced code block",
473474
)
475+
@click.option("--render", is_flag=True, help="Render for terminal")
474476
def prompt(
475477
prompt,
476478
system,
@@ -502,6 +504,7 @@ def prompt(
502504
usage,
503505
extract,
504506
extract_last,
507+
render,
505508
):
506509
"""
507510
Execute a prompt
@@ -834,59 +837,37 @@ def read_prompt():
834837
if async_:
835838

836839
async def inner():
837-
if should_stream:
838-
response = prompt_method(
839-
prompt,
840-
attachments=resolved_attachments,
841-
system=system,
842-
schema=schema,
843-
fragments=resolved_fragments,
844-
system_fragments=resolved_system_fragments,
845-
**kwargs,
846-
)
847-
async for chunk in response:
848-
print(chunk, end="")
849-
sys.stdout.flush()
850-
print("")
851-
else:
852-
response = prompt_method(
853-
prompt,
854-
fragments=resolved_fragments,
855-
attachments=resolved_attachments,
856-
schema=schema,
857-
system=system,
858-
system_fragments=resolved_system_fragments,
859-
**kwargs,
860-
)
861-
text = await response.text()
862-
if extract or extract_last:
863-
text = (
864-
extract_fenced_code_block(text, last=extract_last) or text
865-
)
866-
print(text)
867-
return response
840+
return prompt_output(
841+
prompt_method,
842+
prompt,
843+
should_stream,
844+
render,
845+
extract,
846+
extract_last,
847+
resolved_fragments,
848+
resolved_attachments,
849+
system,
850+
schema,
851+
resolved_system_fragments,
852+
kwargs,
853+
)
868854

869855
response = asyncio.run(inner())
870856
else:
871-
response = prompt_method(
857+
response = prompt_output(
858+
prompt_method,
872859
prompt,
873-
fragments=resolved_fragments,
874-
attachments=resolved_attachments,
875-
system=system,
876-
schema=schema,
877-
system_fragments=resolved_system_fragments,
878-
**kwargs,
860+
should_stream,
861+
render,
862+
extract,
863+
extract_last,
864+
resolved_fragments,
865+
resolved_attachments,
866+
system,
867+
schema,
868+
resolved_system_fragments,
869+
kwargs,
879870
)
880-
if should_stream:
881-
for chunk in response:
882-
print(chunk, end="")
883-
sys.stdout.flush()
884-
print("")
885-
else:
886-
text = response.text()
887-
if extract or extract_last:
888-
text = extract_fenced_code_block(text, last=extract_last) or text
889-
print(text)
890871
# List of exceptions that should never be raised in pytest:
891872
except (ValueError, NotImplementedError) as ex:
892873
raise click.ClickException(str(ex))

llm/cli_utils.py

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
from rich.console import Console
2+
from rich.live import Live
3+
from rich.markdown import Markdown
4+
5+
from llm import AsyncResponse, Attachment
6+
7+
from .utils import Fragment, extract_fenced_code_block
8+
9+
10+
def prompt_output(
11+
prompt_method: AsyncResponse,
12+
prompt: str,
13+
should_stream: bool,
14+
render: bool,
15+
extract: bool,
16+
extract_last: bool,
17+
resolved_fragments: list[Fragment],
18+
resolved_attachments: list[Attachment],
19+
system: str,
20+
schema: str,
21+
resolved_system_fragments: list[Fragment],
22+
kwargs: dict,
23+
):
24+
response = prompt_method(
25+
prompt,
26+
fragments=resolved_fragments,
27+
attachments=resolved_attachments,
28+
system=system,
29+
schema=schema,
30+
system_fragments=resolved_system_fragments,
31+
**kwargs,
32+
)
33+
34+
console = Console()
35+
36+
if should_stream:
37+
accumulated_text = ""
38+
with Live(accumulated_text, console=console, refresh_per_second=10) as live:
39+
for chunk in response:
40+
accumulated_text += chunk
41+
42+
if render:
43+
display_content = Markdown(accumulated_text)
44+
else:
45+
display_content = accumulated_text
46+
47+
live.update(display_content)
48+
else:
49+
text = response.text()
50+
if extract or extract_last:
51+
text = extract_fenced_code_block(text, last=extract_last) or text
52+
if render:
53+
text = Markdown(text)
54+
console.print(text)
55+
56+
return response

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ dependencies = [
3939
"pip",
4040
"pyreadline3; sys_platform == 'win32'",
4141
"puremagic",
42+
"rich"
4243
]
4344

4445
[project.urls]

0 commit comments

Comments
 (0)