Skip to content

Commit 293b729

Browse files
committed
Backwards compatiable refactor + make sure tests pass
1 parent 4e75448 commit 293b729

File tree

6 files changed

+2754
-2693
lines changed

6 files changed

+2754
-2693
lines changed

clai/README.md

Lines changed: 15 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -53,27 +53,19 @@ Either way, running `clai` will start an interactive session where you can chat
5353
## Help
5454

5555
```
56-
usage: clai [-h] [-m [MODEL]] [-a AGENT] [-l] [-t [CODE_THEME]] [--no-stream] [--version] [prompt]
57-
58-
Pydantic AI CLI v...
59-
60-
Special prompts:
61-
* `/exit` - exit the interactive mode (ctrl-c and ctrl-d also work)
62-
* `/markdown` - show the last markdown output of the last question
63-
* `/multiline` - toggle multiline mode
64-
65-
positional arguments:
66-
prompt AI Prompt, if omitted fall into interactive mode
67-
68-
options:
69-
-h, --help show this help message and exit
70-
-m [MODEL], --model [MODEL]
71-
Model to use, in format "<provider>:<model>" e.g. "openai:gpt-4.1" or "anthropic:claude-sonnet-4-0". Defaults to "openai:gpt-4.1".
72-
-a AGENT, --agent AGENT
73-
Custom Agent to use, in format "module:variable", e.g. "mymodule.submodule:my_agent"
74-
-l, --list-models List all available models and exit
75-
-t [CODE_THEME], --code-theme [CODE_THEME]
76-
Which colors to use for code, can be "dark", "light" or any theme from pygments.org/styles/. Defaults to "dark" which works well on dark terminals.
77-
--no-stream Disable streaming from the model
78-
--version Show version and exit
56+
Usage: clai [OPTIONS] [PROMPT]
57+
58+
Options:
59+
-m, --model TEXT Model to use, in format "<provider>:<model>" e.g.
60+
"openai:gpt-4.1" or "anthropic:claude-sonnet-4-0".
61+
Defaults to "openai:gpt-4.1".
62+
-a, --agent TEXT Custom Agent to use, in format "module:variable",
63+
e.g. "mymodule.submodule:my_agent"
64+
-l, --list-models List all available models and exit
65+
-t, --code-theme TEXT Which colors to use for code, can be "dark", "light"
66+
or any theme from pygments.org/styles/. Defaults to
67+
"dark" which works well on dark terminals.
68+
--no-stream Disable streaming from the model
69+
--version Show version and exit
70+
-h, --help Show this message and exit.
7971
```

clai/pyproject.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,9 @@ requires-python = ">=3.9"
4949
[tool.hatch.metadata.hooks.uv-dynamic-versioning]
5050
dependencies = [
5151
"pydantic-ai=={{ version }}",
52+
"rich>=13",
53+
"prompt-toolkit>=3",
54+
"click>=8.0.0",
5255
]
5356

5457
[tool.hatch.metadata]

package-lock.json

Lines changed: 6 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pydantic_ai_slim/pydantic_ai/_cli.py

Lines changed: 126 additions & 65 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
from __future__ import annotations as _annotations
22

3-
import argparse
43
import asyncio
54
import importlib
65
import os
@@ -10,7 +9,7 @@
109
from contextlib import ExitStack
1110
from datetime import datetime, timezone
1211
from pathlib import Path
13-
from typing import Any, cast
12+
from typing import Any
1413

1514
from typing_inspection.introspection import get_literal_values
1615

@@ -23,7 +22,7 @@
2322
from .output import OutputDataT
2423

2524
try:
26-
import argcomplete
25+
import click
2726
from prompt_toolkit import PromptSession
2827
from prompt_toolkit.auto_suggest import AutoSuggestFromHistory, Suggestion
2928
from prompt_toolkit.buffer import Buffer
@@ -38,7 +37,7 @@
3837
from rich.text import Text
3938
except ImportError as _import_error:
4039
raise ImportError(
41-
'Please install `rich`, `prompt-toolkit` and `argcomplete` to use the Pydantic AI CLI, '
40+
'Please install `rich`, `prompt-toolkit` and `click` to use the Pydantic AI CLI, '
4241
'you can use the `cli` optional group — `pip install "pydantic-ai-slim[cli]"`'
4342
) from _import_error
4443

@@ -64,7 +63,13 @@ class SimpleCodeBlock(CodeBlock):
6463
def __rich_console__(self, console: Console, options: ConsoleOptions) -> RenderResult:
6564
code = str(self.text).rstrip()
6665
yield Text(self.lexer_name, style='dim')
67-
yield Syntax(code, self.lexer_name, theme=self.theme, background_color='default', word_wrap=True)
66+
yield Syntax(
67+
code,
68+
self.lexer_name,
69+
theme=self.theme,
70+
background_color='default',
71+
word_wrap=True,
72+
)
6873
yield Text(f'/{self.lexer_name}', style='dim')
6974

7075

@@ -101,119 +106,171 @@ def cli_exit(prog_name: str = 'pai'): # pragma: no cover
101106
sys.exit(cli(prog_name=prog_name))
102107

103108

104-
def cli( # noqa: C901
105-
args_list: Sequence[str] | None = None, *, prog_name: str = 'pai', default_model: str = 'openai:gpt-4.1'
109+
def cli(
110+
args_list: Sequence[str] | None = None,
111+
*,
112+
prog_name: str = 'pai',
113+
default_model: str = 'openai:gpt-4.1',
106114
) -> int:
107115
"""Run the CLI and return the exit code for the process."""
108-
parser = argparse.ArgumentParser(
109-
prog=prog_name,
110-
description=f"""\
111-
Pydantic AI CLI v{__version__}\n\n
112-
113-
Special prompts:
114-
* `/exit` - exit the interactive mode (ctrl-c and ctrl-d also work)
115-
* `/markdown` - show the last markdown output of the last question
116-
* `/multiline` - toggle multiline mode
117-
""",
118-
formatter_class=argparse.RawTextHelpFormatter,
119-
)
120-
parser.add_argument('prompt', nargs='?', help='AI Prompt, if omitted fall into interactive mode')
121-
arg = parser.add_argument(
116+
117+
# Create click command for parsing
118+
@click.command(context_settings={'help_option_names': ['-h', '--help']})
119+
@click.argument('prompt', required=False)
120+
@click.option(
122121
'-m',
123122
'--model',
124-
nargs='?',
125123
help=f'Model to use, in format "<provider>:<model>" e.g. "openai:gpt-4.1" or "anthropic:claude-sonnet-4-0". Defaults to "{default_model}".',
126124
)
127-
# we don't want to autocomplete or list models that don't include the provider,
128-
# e.g. we want to show `openai:gpt-4o` but not `gpt-4o`
129-
qualified_model_names = [n for n in get_literal_values(KnownModelName.__value__) if ':' in n]
130-
arg.completer = argcomplete.ChoicesCompleter(qualified_model_names) # type: ignore[reportPrivateUsage]
131-
parser.add_argument(
125+
@click.option(
132126
'-a',
133127
'--agent',
134128
help='Custom Agent to use, in format "module:variable", e.g. "mymodule.submodule:my_agent"',
135129
)
136-
parser.add_argument(
130+
@click.option(
137131
'-l',
138132
'--list-models',
139-
action='store_true',
133+
is_flag=True,
140134
help='List all available models and exit',
141135
)
142-
parser.add_argument(
136+
@click.option(
143137
'-t',
144138
'--code-theme',
145-
nargs='?',
146-
help='Which colors to use for code, can be "dark", "light" or any theme from pygments.org/styles/. Defaults to "dark" which works well on dark terminals.',
147139
default='dark',
140+
help='Which colors to use for code, can be "dark", "light" or any theme from pygments.org/styles/. Defaults to "dark" which works well on dark terminals.',
148141
)
149-
parser.add_argument('--no-stream', action='store_true', help='Disable streaming from the model')
150-
parser.add_argument('--version', action='store_true', help='Show version and exit')
142+
@click.option('--no-stream', is_flag=True, help='Disable streaming from the model')
143+
@click.option('--version', is_flag=True, help='Show version and exit')
144+
def click_cli(
145+
prompt: str | None,
146+
model: str | None,
147+
agent: str | None,
148+
list_models: bool,
149+
code_theme: str,
150+
no_stream: bool,
151+
version: bool,
152+
) -> int:
153+
f"""Pydantic AI CLI v{__version__}
154+
155+
Special prompts:
156+
* /exit - exit the interactive mode (ctrl-c and ctrl-d also work)
157+
* /markdown - show the last markdown output of the last question
158+
* /multiline - toggle multiline mode
159+
"""
160+
return _cli_impl(
161+
prompt=prompt,
162+
model=model,
163+
agent=agent,
164+
list_models=list_models,
165+
code_theme=code_theme,
166+
no_stream=no_stream,
167+
version=version,
168+
prog_name=prog_name,
169+
default_model=default_model,
170+
)
151171

152-
argcomplete.autocomplete(parser)
153-
args = parser.parse_args(args_list)
172+
# Check if this is a help or version request that should raise SystemExit
173+
should_exit = args_list and any(arg in ['--help', '-h', '--version'] for arg in args_list)
174+
175+
# Invoke click command with appropriate mode
176+
try:
177+
if should_exit:
178+
# Use standalone_mode=True for --help/--version to get SystemExit behavior
179+
click_cli.main(args_list, standalone_mode=True, prog_name=prog_name)
180+
else:
181+
# Use standalone_mode=False for normal operations
182+
result = click_cli.main(args_list, standalone_mode=False, prog_name=prog_name)
183+
return result if result is not None else 0
184+
except click.ClickException as e:
185+
e.show()
186+
return 1
187+
188+
189+
def _cli_impl( # noqa: C901
190+
prompt: str | None,
191+
model: str | None,
192+
agent: str | None,
193+
list_models: bool,
194+
code_theme: str,
195+
no_stream: bool,
196+
version: bool,
197+
prog_name: str,
198+
default_model: str,
199+
) -> int:
200+
"""Implementation of CLI logic, separated from click decorators."""
201+
# we don't want to autocomplete or list models that don't include the provider,
202+
# e.g. we want to show `openai:gpt-4o` but not `gpt-4o`
203+
qualified_model_names = [n for n in get_literal_values(KnownModelName.__value__) if ':' in n]
154204

155205
console = Console()
156206
name_version = f'[green]{prog_name} - Pydantic AI CLI v{__version__}[/green]'
157-
if args.version:
207+
if version:
158208
console.print(name_version, highlight=False)
159209
return 0
160-
if args.list_models:
210+
if list_models:
161211
console.print(f'{name_version}\n\n[green]Available models:[/green]')
162-
for model in qualified_model_names:
163-
console.print(f' {model}', highlight=False)
212+
for model_name in qualified_model_names:
213+
console.print(f' {model_name}', highlight=False)
164214
return 0
165215

166-
agent: Agent[None, str] = cli_agent
167-
if args.agent:
216+
agent_instance: Agent[None, str] = cli_agent
217+
if agent:
168218
sys.path.append(os.getcwd())
169219
try:
170-
module_path, variable_name = args.agent.split(':')
220+
module_path, variable_name = agent.split(':')
171221
except ValueError:
172222
console.print('[red]Error: Agent must be specified in "module:variable" format[/red]')
173223
return 1
174224

175225
module = importlib.import_module(module_path)
176-
agent = getattr(module, variable_name)
177-
if not isinstance(agent, Agent):
178-
console.print(f'[red]Error: {args.agent} is not an Agent instance[/red]')
226+
agent_instance = getattr(module, variable_name)
227+
if not isinstance(agent_instance, Agent):
228+
console.print(f'[red]Error: {agent} is not an Agent instance[/red]')
179229
return 1
180230

181-
model_arg_set = args.model is not None
182-
if agent.model is None or model_arg_set:
231+
model_arg_set = model is not None
232+
if agent_instance.model is None or model_arg_set:
183233
try:
184-
agent.model = infer_model(args.model or default_model)
234+
agent_instance.model = infer_model(model or default_model)
185235
except UserError as e:
186-
console.print(f'Error initializing [magenta]{args.model}[/magenta]:\n[red]{e}[/red]')
236+
console.print(f'Error initializing [magenta]{model}[/magenta]:\n[red]{e}[/red]')
187237
return 1
188238

189-
model_name = agent.model if isinstance(agent.model, str) else f'{agent.model.system}:{agent.model.model_name}'
190-
if args.agent and model_arg_set:
239+
model_name = (
240+
agent_instance.model
241+
if isinstance(agent_instance.model, str)
242+
else f'{agent_instance.model.system}:{agent_instance.model.model_name}'
243+
)
244+
if agent and model_arg_set:
245+
console.print(
246+
f'{name_version} using custom agent [magenta]{agent}[/magenta] with [magenta]{model_name}[/magenta]',
247+
highlight=False,
248+
)
249+
elif agent:
191250
console.print(
192-
f'{name_version} using custom agent [magenta]{args.agent}[/magenta] with [magenta]{model_name}[/magenta]',
251+
f'{name_version} using custom agent [magenta]{agent}[/magenta]',
193252
highlight=False,
194253
)
195-
elif args.agent:
196-
console.print(f'{name_version} using custom agent [magenta]{args.agent}[/magenta]', highlight=False)
197254
else:
198255
console.print(f'{name_version} with [magenta]{model_name}[/magenta]', highlight=False)
199256

200-
stream = not args.no_stream
201-
if args.code_theme == 'light':
202-
code_theme = 'default'
203-
elif args.code_theme == 'dark':
204-
code_theme = 'monokai'
257+
stream = not no_stream
258+
if code_theme == 'light':
259+
theme = 'default'
260+
elif code_theme == 'dark':
261+
theme = 'monokai'
205262
else:
206-
code_theme = args.code_theme # pragma: no cover
263+
theme = code_theme # pragma: no cover
207264

208-
if prompt := cast(str, args.prompt):
265+
if prompt:
209266
try:
210-
asyncio.run(ask_agent(agent, prompt, stream, console, code_theme))
267+
asyncio.run(ask_agent(agent_instance, prompt, stream, console, theme))
211268
except KeyboardInterrupt:
212269
pass
213270
return 0
214271

215272
try:
216-
return asyncio.run(run_chat(stream, agent, console, code_theme, prog_name))
273+
return asyncio.run(run_chat(stream, agent_instance, console, theme, prog_name))
217274
except KeyboardInterrupt: # pragma: no cover
218275
return 0
219276

@@ -314,7 +371,11 @@ def get_suggestion(self, buffer: Buffer, document: Document) -> Suggestion | Non
314371

315372

316373
def handle_slash_command(
317-
ident_prompt: str, messages: list[ModelMessage], multiline: bool, console: Console, code_theme: str
374+
ident_prompt: str,
375+
messages: list[ModelMessage],
376+
multiline: bool,
377+
console: Console,
378+
code_theme: str,
318379
) -> tuple[int | None, bool]:
319380
if ident_prompt == '/markdown':
320381
try:

pydantic_ai_slim/pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ huggingface = ["huggingface-hub[inference]>=0.33.5"]
7575
duckduckgo = ["ddgs>=9.0.0"]
7676
tavily = ["tavily-python>=0.5.0"]
7777
# CLI
78-
cli = ["rich>=13", "prompt-toolkit>=3", "argcomplete>=3.5.0"]
78+
cli = ["rich>=13", "prompt-toolkit>=3", "click>=8.0.0"]
7979
# MCP
8080
mcp = ["mcp>=1.10.0; python_version >= '3.10'"]
8181
# Evals

0 commit comments

Comments
 (0)