diff --git a/llm/cli.py b/llm/cli.py index 2e11e2c8..3e881926 100644 --- a/llm/cli.py +++ b/llm/cli.py @@ -338,7 +338,7 @@ def cli(): @cli.command(name="prompt") -@click.argument("prompt", required=False) +@click.argument("prompt", nargs=-1, required=False) @click.option("-s", "--system", help="System prompt to use") @click.option("model_id", "-m", "--model", help="Model to use", envvar="LLM_MODEL") @click.option( @@ -564,19 +564,17 @@ def prompt( def read_prompt(): nonlocal prompt, schema + bits = [] + # Is there extra prompt available on stdin? - stdin_prompt = None if not sys.stdin.isatty(): - stdin_prompt = sys.stdin.read() + bits.append(sys.stdin.read()) - if stdin_prompt: - bits = [stdin_prompt] - if prompt: - bits.append(prompt) - prompt = " ".join(bits) + # Populate prompt from command line arguments + bits.extend(prompt) if ( - prompt is None + not bits and not save and sys.stdin.isatty() and not attachments @@ -585,8 +583,9 @@ def read_prompt(): and not fragments ): # Hang waiting for input to stdin (unless --save) - prompt = sys.stdin.read() - return prompt + bits.append(sys.stdin.read()) + + return " ".join(bits) if save: # We are saving their prompt/system/etc to a new template