Skip to content

Commit 1c53f96

Browse files
authored
feat: support requesting more information (#80)
1 parent 0e000f5 commit 1c53f96

24 files changed

+467
-256
lines changed

README.md

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,5 +22,3 @@ pipx install git-draft[openai]
2222
## Next steps
2323

2424
* MCP bot.
25-
* Mechanism for reporting feedback from a bot, and possibly allowing user to
26-
interactively respond.

docs/git-draft.adoc

Lines changed: 27 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -34,53 +34,65 @@ git draft [options] --templates [--json | [--edit] TEMPLATE]
3434
-a::
3535
--accept::
3636
--no-accept::
37-
Merge generated changes automatically.
38-
Can be repeated.
37+
Merge generated changes automatically, updating the working directory.
38+
This option can be repeated up to 3 times, with the following behavior at each level:
39+
+
40+
* `-a`: Merge changes conservatively, flagging any conflicts for manual user resolution.
41+
* `-aa`: Merge changes aggressively, resolving all conflicts in favor of the generated change.
42+
* `-aaa`: Merge changes aggressively (same as `-aa`) then run `--quit` automatically.
43+
+
44+
By default, changes are not merged - keeping the working directory untouched.
45+
A different default can be set in the configuration file.
46+
When doing so, the `--no-accept` flag can be used to disable merging at CLI invocation time.
3947

4048
-b BOT::
4149
--bot=BOT::
42-
Bot name.
50+
Bot name.
51+
Defaults to the first bot defined in the configuration.
4352

4453
-e::
4554
--edit::
46-
Enable interactive editing of draft prompts and templates.
47-
See `--generate` and `--show-templates` for details.
55+
Enable interactive editing of draft prompts and templates.
56+
See `--new` and `--templates` for details.
4857

4958
-h::
5059
--help::
51-
Show help message and exit.
60+
Show help message and exit.
5261

5362
-j::
5463
--json::
5564
Use JSON output.
5665

57-
--log::
66+
--log-path::
5867
Show log path and exit.
5968

6069
-N::
6170
--new::
62-
Create an AI-generated draft.
63-
If the `--edit` option is set, an interactive editor will be open with the rendered prompt to allow modification before it is forwarded to the bot.
71+
Create an AI-generated draft.
72+
If the `--edit` option is set, an interactive editor will be opened with the rendered prompt to allow modification before it is forwarded to the bot.
6473

6574
-Q::
6675
--quit::
67-
Go back to the draft's origin branch with the current working directory.
76+
Go back to the draft's origin branch, keeping the working directory's current state.
77+
This will delete the draft branch and its upstream.
78+
Generated commits remain available via `ref/drafts`.
6879

6980
--root::
70-
Repository search root.
81+
Repository search root.
82+
Defaults to the current working directory.
7183

7284
-T::
7385
--templates::
74-
With no argument, lists available templates.
75-
With an template name argument, displays the corresponding template's contents or, if the `--edit` option is set, opens an interactive editor.
86+
With no argument, lists available templates.
87+
With an template name argument, displays the corresponding template's contents or, if the `--edit` option is set, opens an interactive editor.
7688

7789
--version::
78-
Show version and exit.
90+
Show version and exit.
7991

8092

8193
== Examples
8294

83-
The workhorse command is `git draft --generate` which leverages AI to edit our code.
95+
The workhorse command is `git draft --new` which leverages AI to edit code.
8496
A prompt can be specified as standard input, for example `echo "Add a test for compute_offset in chart.py" | git draft --generate`.
8597
If no prompt is specified and stdin is a TTY, `$EDITOR` will be opened to enter the prompt.
8698

@@ -93,14 +105,6 @@ This will check out the branch used when creating the draft, adding the final st
93105
Note that you can come back to an existing draft anytime (by checking its branch out), but you will not be able to apply it if its origin branch has moved since the draft was created.
94106

95107

96-
* Generate: create a new draft to the current folio, or create a new folio if none exists.
97-
* Finalize
98-
** Apply: include changes into origin branch.
99-
** Discard: abandon folio.
100-
** Save: return to original branch. Q: how to load after?
101-
* List templates
102-
103-
104108
o Foo (main)
105109
o Sync (drafts/123/pub)
106110

poetry.lock

Lines changed: 13 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ readme = "README.md"
77
dynamic = ["version"]
88
requires-python = ">=3.12"
99
dependencies = [
10+
"docopt-ng (>=0.9,<0.10)",
1011
"jinja2 (>=3.1.5,<4)",
1112
"prettytable (>=3.15.1,<4)",
1213
"xdg-base-dirs (>=6.0.2,<7)",

src/git_draft/__main__.py

Lines changed: 25 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,12 @@
2020
from .drafter import Drafter, DraftMergeStrategy
2121
from .editor import open_editor
2222
from .git import Repo
23-
from .prompt import Template, TemplatedPrompt, find_template, templates_table
23+
from .prompt import (
24+
PromptMetadata,
25+
TemplatedPrompt,
26+
find_prompt_metadata,
27+
templates_table,
28+
)
2429
from .store import Store
2530

2631

@@ -178,43 +183,45 @@ def main() -> None: # noqa: PLR0912 PLR0915
178183
bot = load_bot(bot_config)
179184

180185
prompt: str | TemplatedPrompt
181-
editable = opts.edit
182186
if args:
183-
prompt = TemplatedPrompt.parse(args[0], *args[1:])
184-
elif opts.edit:
185-
editable = False
187+
if args[0] == "-":
188+
prompt = sys.stdin.read()
189+
else:
190+
prompt = TemplatedPrompt.public(args[0], args[1:])
191+
editable = opts.edit
192+
else:
186193
prompt = edit(
187194
text=drafter.latest_draft_prompt() or _PROMPT_PLACEHOLDER
188195
).strip()
189-
if not prompt or prompt == _PROMPT_PLACEHOLDER:
190-
raise ValueError("Aborting: empty or placeholder prompt")
191-
else:
192-
if sys.stdin.isatty():
193-
print("Reading prompt from stdin... (press C-D when done)")
194-
prompt = sys.stdin.read()
196+
if prompt.strip() == _PROMPT_PLACEHOLDER:
197+
prompt = "" # Enable consistent error message
198+
editable = False # We already edited the prompt
195199

196200
accept = Accept(opts.accept or 0)
197-
_ = drafter.generate_draft(
201+
drafter.generate_draft(
198202
prompt,
199203
bot,
200204
prompt_transform=open_editor if editable else None,
201205
merge_strategy=accept.merge_strategy(),
202206
)
207+
if accept == Accept.MERGE_THEN_QUIT:
208+
# TODO: Refuse to quit on pending question?
209+
drafter.quit_folio()
203210
case "quit":
204211
drafter.quit_folio()
205212
case "templates":
206213
if args:
207214
name = args[0]
208-
tpl = find_template(name)
215+
meta = find_prompt_metadata(name)
209216
if opts.edit:
210-
if tpl:
211-
edit(path=tpl.local_path(), text=tpl.source)
217+
if meta:
218+
edit(path=meta.local_path(), text=meta.source())
212219
else:
213-
edit(path=Template.local_path_for(name))
220+
edit(path=PromptMetadata.local_path_for(name))
214221
else:
215-
if not tpl:
222+
if not meta:
216223
raise ValueError(f"No template named {name!r}")
217-
print(tpl.source)
224+
print(meta.source())
218225
else:
219226
table = templates_table()
220227
print(table.to_json() if opts.json else table)

src/git_draft/bots/common.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ class Action:
2828
title: str | None = None
2929
request_count: int | None = None
3030
token_count: int | None = None
31+
question: str | None = None
3132

3233
def increment_request_count(self, n: int = 1, init: bool = False) -> None:
3334
self._increment("request_count", n, init)

src/git_draft/bots/openai.py

Lines changed: 55 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020

2121
import openai
2222

23-
from ..common import JSONObject, config_string, reindent
23+
from ..common import JSONObject, UnreachableError, config_string, reindent
2424
from .common import Action, Bot, Goal, Toolbox
2525

2626

@@ -45,7 +45,7 @@ def threads_bot(
4545
model: str = _DEFAULT_MODEL,
4646
) -> Bot:
4747
"""Beta bot, uses assistant threads with function calling"""
48-
return _ThreadsBot.create(_new_client(api_key, base_url), model)
48+
return _ThreadsBot(_new_client(api_key, base_url), model)
4949

5050

5151
def _new_client(api_key: str | None, base_url: str | None) -> openai.OpenAI:
@@ -85,6 +85,21 @@ def _param(
8585

8686
def params(self) -> Sequence[openai.types.chat.ChatCompletionToolParam]:
8787
return [
88+
self._param(
89+
name="ask_user",
90+
description="""
91+
Request more information from the user
92+
93+
Call this function if and only if you are unable to achieve
94+
your task with the information you already have.
95+
""",
96+
inputs={
97+
"question": {
98+
"type": "string",
99+
"description": "Question to be answered by the user",
100+
},
101+
},
102+
),
88103
self._param(
89104
name="list_files",
90105
description="List all available files",
@@ -152,17 +167,18 @@ def params(self) -> Sequence[openai.types.chat.ChatCompletionToolParam]:
152167
read the content of the relevant ones, and save the changes you suggest.
153168
154169
You should stop when and ONLY WHEN all the files you need to change have
155-
been updated. If you stop for any reason before completing your task,
156-
explain why by updating a REASON file before stopping. For example if you
157-
are missing some information or noticed something inconsistent with the
158-
instructions, say so there. DO NOT STOP without updating at least this
159-
file.
170+
been updated. If you do not have enough information to complete your task,
171+
use the provided tool to request it from the user, then stop.
160172
"""
161173

162174

163175
class _ToolHandler[V]:
164176
def __init__(self, toolbox: Toolbox) -> None:
165177
self._toolbox = toolbox
178+
self.question: str | None = None
179+
180+
def _on_ask_user(self) -> V:
181+
raise NotImplementedError()
166182

167183
def _on_read_file(self, path: PurePosixPath, contents: str | None) -> V:
168184
raise NotImplementedError()
@@ -185,6 +201,10 @@ def handle_function(self, function: Any) -> V:
185201
inputs = json.loads(function.arguments)
186202
_logger.info("Requested function: %s", function)
187203
match function.name:
204+
case "ask_user":
205+
assert not self.question
206+
self.question = inputs["question"]
207+
return self._on_ask_user()
188208
case "read_file":
189209
path = PurePosixPath(inputs["path"])
190210
return self._on_read_file(path, self._toolbox.read_file(path))
@@ -202,10 +222,11 @@ def handle_function(self, function: Any) -> V:
202222
dst_path = PurePosixPath(inputs["dst_path"])
203223
self._toolbox.rename_file(src_path, dst_path)
204224
return self._on_rename_file(src_path, dst_path)
205-
case _ as name:
206-
assert name == "list_files" and not inputs
225+
case "list_files":
207226
paths = self._toolbox.list_files()
208227
return self._on_list_files(paths)
228+
case _ as name:
229+
raise UnreachableError(f"Unexpected function: {name}")
209230

210231

211232
class _CompletionsBot(Bot):
@@ -243,10 +264,16 @@ def act(self, goal: Goal, toolbox: Toolbox) -> Action:
243264
if done:
244265
break
245266

246-
return Action(request_count=request_count)
267+
return Action(
268+
request_count=request_count,
269+
question=tool_handler.question,
270+
)
247271

248272

249273
class _CompletionsToolHandler(_ToolHandler[str | None]):
274+
def _on_ask_user(self) -> None:
275+
return None
276+
250277
def _on_read_file(self, path: PurePosixPath, contents: str | None) -> str:
251278
if contents is None:
252279
return f"`{path}` does not exist."
@@ -269,32 +296,31 @@ def _on_list_files(self, paths: Sequence[PurePosixPath]) -> str:
269296

270297

271298
class _ThreadsBot(Bot):
272-
def __init__(self, client: openai.OpenAI, assistant_id: str) -> None:
299+
def __init__(self, client: openai.OpenAI, model: str) -> None:
273300
self._client = client
274-
self._assistant_id = assistant_id
301+
self._model = model
275302

276-
@classmethod
277-
def create(cls, client: openai.OpenAI, model: str) -> Self:
278-
assistant_kwargs: JSONObject = dict(
279-
model=model,
303+
def _load_assistant_id(self) -> str:
304+
kwargs: JSONObject = dict(
305+
model=self._model,
280306
instructions=reindent(_INSTRUCTIONS),
281307
tools=_ToolsFactory(strict=True).params(),
282308
)
283-
284-
path = cls.state_folder_path(ensure_exists=True) / "ASSISTANT_ID"
309+
path = self.state_folder_path(ensure_exists=True) / "ASSISTANT_ID"
285310
try:
286311
with open(path) as f:
287312
assistant_id = f.read()
288-
client.beta.assistants.update(assistant_id, **assistant_kwargs)
313+
self._client.beta.assistants.update(assistant_id, **kwargs)
289314
except (FileNotFoundError, openai.NotFoundError):
290-
assistant = client.beta.assistants.create(**assistant_kwargs)
315+
assistant = self._client.beta.assistants.create(**kwargs)
291316
assistant_id = assistant.id
292317
with open(path, "w") as f:
293318
f.write(assistant_id)
294-
295-
return cls(client, assistant_id)
319+
return assistant_id
296320

297321
def act(self, goal: Goal, toolbox: Toolbox) -> Action:
322+
assistant_id = self._load_assistant_id()
323+
298324
thread = self._client.beta.threads.create()
299325
self._client.beta.threads.messages.create(
300326
thread_id=thread.id,
@@ -307,7 +333,7 @@ def act(self, goal: Goal, toolbox: Toolbox) -> Action:
307333
action = Action(request_count=0, token_count=0)
308334
with self._client.beta.threads.runs.stream(
309335
thread_id=thread.id,
310-
assistant_id=self._assistant_id,
336+
assistant_id=assistant_id,
311337
event_handler=_EventHandler(self._client, toolbox, action),
312338
) as stream:
313339
stream.until_done()
@@ -353,6 +379,9 @@ def _handle_action(self, _run_id: str, data: Any) -> None:
353379
for tool in data.required_action.submit_tool_outputs.tool_calls:
354380
handler = _ThreadToolHandler(self._toolbox, tool.id)
355381
tool_outputs.append(handler.handle_function(tool.function))
382+
if handler.question:
383+
assert not self._action.question
384+
self._action.question = handler.question
356385

357386
run = self.current_run
358387
assert run, "No ongoing run"
@@ -378,6 +407,9 @@ def __init__(self, toolbox: Toolbox, call_id: str) -> None:
378407
def _wrap(self, output: str) -> _ToolOutput:
379408
return _ToolOutput(tool_call_id=self._call_id, output=output)
380409

410+
def _on_ask_user(self) -> _ToolOutput:
411+
return self._wrap("OK")
412+
381413
def _on_read_file(
382414
self, _path: PurePosixPath, contents: str | None
383415
) -> _ToolOutput:

0 commit comments

Comments
 (0)