Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,5 +22,3 @@ pipx install git-draft[openai]
## Next steps

* MCP bot.
* Mechanism for reporting feedback from a bot, and possibly allowing user to
interactively respond.
50 changes: 27 additions & 23 deletions docs/git-draft.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -34,53 +34,65 @@ git draft [options] --templates [--json | [--edit] TEMPLATE]
-a::
--accept::
--no-accept::
Merge generated changes automatically.
Can be repeated.
Merge generated changes automatically, updating the working directory.
This option can be repeated up to 3 times, with the following behavior at each level:
+
* `-a`: Merge changes conservatively, flagging any conflicts for manual user resolution.
* `-aa`: Merge changes aggressively, resolving all conflicts in favor of the generated change.
* `-aaa`: Merge changes aggressively (same as `-aa`) then run `--quit` automatically.
+
By default, changes are not merged - keeping the working directory untouched.
A different default can be set in the configuration file.
When doing so, the `--no-accept` flag can be used to disable merging at CLI invocation time.

-b BOT::
--bot=BOT::
Bot name.
Bot name.
Defaults to the first bot defined in the configuration.

-e::
--edit::
Enable interactive editing of draft prompts and templates.
See `--generate` and `--show-templates` for details.
Enable interactive editing of draft prompts and templates.
See `--new` and `--templates` for details.

-h::
--help::
Show help message and exit.
Show help message and exit.

-j::
--json::
Use JSON output.

--log::
--log-path::
Show log path and exit.

-N::
--new::
Create an AI-generated draft.
If the `--edit` option is set, an interactive editor will be open with the rendered prompt to allow modification before it is forwarded to the bot.
Create an AI-generated draft.
If the `--edit` option is set, an interactive editor will be opened with the rendered prompt to allow modification before it is forwarded to the bot.

-Q::
--quit::
Go back to the draft's origin branch with the current working directory.
Go back to the draft's origin branch, keeping the working directory's current state.
This will delete the draft branch and its upstream.
Generated commits remain available via `ref/drafts`.

--root::
Repository search root.
Repository search root.
Defaults to the current working directory.

-T::
--templates::
With no argument, lists available templates.
With an template name argument, displays the corresponding template's contents or, if the `--edit` option is set, opens an interactive editor.
With no argument, lists available templates.
With an template name argument, displays the corresponding template's contents or, if the `--edit` option is set, opens an interactive editor.

--version::
Show version and exit.
Show version and exit.


== Examples

The workhorse command is `git draft --generate` which leverages AI to edit our code.
The workhorse command is `git draft --new` which leverages AI to edit code.
A prompt can be specified as standard input, for example `echo "Add a test for compute_offset in chart.py" | git draft --generate`.
If no prompt is specified and stdin is a TTY, `$EDITOR` will be opened to enter the prompt.

Expand All @@ -93,14 +105,6 @@ This will check out the branch used when creating the draft, adding the final st
Note that you can come back to an existing draft anytime (by checking its branch out), but you will not be able to apply it if its origin branch has moved since the draft was created.


* Generate: create a new draft to the current folio, or create a new folio if none exists.
* Finalize
** Apply: include changes into origin branch.
** Discard: abandon folio.
** Save: return to original branch. Q: how to load after?
* List templates


o Foo (main)
o Sync (drafts/123/pub)

Expand Down
14 changes: 13 additions & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ readme = "README.md"
dynamic = ["version"]
requires-python = ">=3.12"
dependencies = [
"docopt-ng (>=0.9,<0.10)",
"jinja2 (>=3.1.5,<4)",
"prettytable (>=3.15.1,<4)",
"xdg-base-dirs (>=6.0.2,<7)",
Expand Down
43 changes: 25 additions & 18 deletions src/git_draft/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,12 @@
from .drafter import Drafter, DraftMergeStrategy
from .editor import open_editor
from .git import Repo
from .prompt import Template, TemplatedPrompt, find_template, templates_table
from .prompt import (
PromptMetadata,
TemplatedPrompt,
find_prompt_metadata,
templates_table,
)
from .store import Store


Expand Down Expand Up @@ -178,43 +183,45 @@ def main() -> None: # noqa: PLR0912 PLR0915
bot = load_bot(bot_config)

prompt: str | TemplatedPrompt
editable = opts.edit
if args:
prompt = TemplatedPrompt.parse(args[0], *args[1:])
elif opts.edit:
editable = False
if args[0] == "-":
prompt = sys.stdin.read()
else:
prompt = TemplatedPrompt.public(args[0], args[1:])
editable = opts.edit
else:
prompt = edit(
text=drafter.latest_draft_prompt() or _PROMPT_PLACEHOLDER
).strip()
if not prompt or prompt == _PROMPT_PLACEHOLDER:
raise ValueError("Aborting: empty or placeholder prompt")
else:
if sys.stdin.isatty():
print("Reading prompt from stdin... (press C-D when done)")
prompt = sys.stdin.read()
if prompt.strip() == _PROMPT_PLACEHOLDER:
prompt = "" # Enable consistent error message
editable = False # We already edited the prompt

accept = Accept(opts.accept or 0)
_ = drafter.generate_draft(
drafter.generate_draft(
prompt,
bot,
prompt_transform=open_editor if editable else None,
merge_strategy=accept.merge_strategy(),
)
if accept == Accept.MERGE_THEN_QUIT:
# TODO: Refuse to quit on pending question?
drafter.quit_folio()
case "quit":
drafter.quit_folio()
case "templates":
if args:
name = args[0]
tpl = find_template(name)
meta = find_prompt_metadata(name)
if opts.edit:
if tpl:
edit(path=tpl.local_path(), text=tpl.source)
if meta:
edit(path=meta.local_path(), text=meta.source())
else:
edit(path=Template.local_path_for(name))
edit(path=PromptMetadata.local_path_for(name))
else:
if not tpl:
if not meta:
raise ValueError(f"No template named {name!r}")
print(tpl.source)
print(meta.source())
else:
table = templates_table()
print(table.to_json() if opts.json else table)
Expand Down
1 change: 1 addition & 0 deletions src/git_draft/bots/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ class Action:
title: str | None = None
request_count: int | None = None
token_count: int | None = None
question: str | None = None

def increment_request_count(self, n: int = 1, init: bool = False) -> None:
self._increment("request_count", n, init)
Expand Down
78 changes: 55 additions & 23 deletions src/git_draft/bots/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

import openai

from ..common import JSONObject, config_string, reindent
from ..common import JSONObject, UnreachableError, config_string, reindent
from .common import Action, Bot, Goal, Toolbox


Expand All @@ -45,7 +45,7 @@ def threads_bot(
model: str = _DEFAULT_MODEL,
) -> Bot:
"""Beta bot, uses assistant threads with function calling"""
return _ThreadsBot.create(_new_client(api_key, base_url), model)
return _ThreadsBot(_new_client(api_key, base_url), model)


def _new_client(api_key: str | None, base_url: str | None) -> openai.OpenAI:
Expand Down Expand Up @@ -85,6 +85,21 @@ def _param(

def params(self) -> Sequence[openai.types.chat.ChatCompletionToolParam]:
return [
self._param(
name="ask_user",
description="""
Request more information from the user

Call this function if and only if you are unable to achieve
your task with the information you already have.
""",
inputs={
"question": {
"type": "string",
"description": "Question to be answered by the user",
},
},
),
self._param(
name="list_files",
description="List all available files",
Expand Down Expand Up @@ -152,17 +167,18 @@ def params(self) -> Sequence[openai.types.chat.ChatCompletionToolParam]:
read the content of the relevant ones, and save the changes you suggest.

You should stop when and ONLY WHEN all the files you need to change have
been updated. If you stop for any reason before completing your task,
explain why by updating a REASON file before stopping. For example if you
are missing some information or noticed something inconsistent with the
instructions, say so there. DO NOT STOP without updating at least this
file.
been updated. If you do not have enough information to complete your task,
use the provided tool to request it from the user, then stop.
"""


class _ToolHandler[V]:
def __init__(self, toolbox: Toolbox) -> None:
self._toolbox = toolbox
self.question: str | None = None

def _on_ask_user(self) -> V:
raise NotImplementedError()

def _on_read_file(self, path: PurePosixPath, contents: str | None) -> V:
raise NotImplementedError()
Expand All @@ -185,6 +201,10 @@ def handle_function(self, function: Any) -> V:
inputs = json.loads(function.arguments)
_logger.info("Requested function: %s", function)
match function.name:
case "ask_user":
assert not self.question
self.question = inputs["question"]
return self._on_ask_user()
case "read_file":
path = PurePosixPath(inputs["path"])
return self._on_read_file(path, self._toolbox.read_file(path))
Expand All @@ -202,10 +222,11 @@ def handle_function(self, function: Any) -> V:
dst_path = PurePosixPath(inputs["dst_path"])
self._toolbox.rename_file(src_path, dst_path)
return self._on_rename_file(src_path, dst_path)
case _ as name:
assert name == "list_files" and not inputs
case "list_files":
paths = self._toolbox.list_files()
return self._on_list_files(paths)
case _ as name:
raise UnreachableError(f"Unexpected function: {name}")


class _CompletionsBot(Bot):
Expand Down Expand Up @@ -243,10 +264,16 @@ def act(self, goal: Goal, toolbox: Toolbox) -> Action:
if done:
break

return Action(request_count=request_count)
return Action(
request_count=request_count,
question=tool_handler.question,
)


class _CompletionsToolHandler(_ToolHandler[str | None]):
def _on_ask_user(self) -> None:
return None

def _on_read_file(self, path: PurePosixPath, contents: str | None) -> str:
if contents is None:
return f"`{path}` does not exist."
Expand All @@ -269,32 +296,31 @@ def _on_list_files(self, paths: Sequence[PurePosixPath]) -> str:


class _ThreadsBot(Bot):
def __init__(self, client: openai.OpenAI, assistant_id: str) -> None:
def __init__(self, client: openai.OpenAI, model: str) -> None:
self._client = client
self._assistant_id = assistant_id
self._model = model

@classmethod
def create(cls, client: openai.OpenAI, model: str) -> Self:
assistant_kwargs: JSONObject = dict(
model=model,
def _load_assistant_id(self) -> str:
kwargs: JSONObject = dict(
model=self._model,
instructions=reindent(_INSTRUCTIONS),
tools=_ToolsFactory(strict=True).params(),
)

path = cls.state_folder_path(ensure_exists=True) / "ASSISTANT_ID"
path = self.state_folder_path(ensure_exists=True) / "ASSISTANT_ID"
try:
with open(path) as f:
assistant_id = f.read()
client.beta.assistants.update(assistant_id, **assistant_kwargs)
self._client.beta.assistants.update(assistant_id, **kwargs)
except (FileNotFoundError, openai.NotFoundError):
assistant = client.beta.assistants.create(**assistant_kwargs)
assistant = self._client.beta.assistants.create(**kwargs)
assistant_id = assistant.id
with open(path, "w") as f:
f.write(assistant_id)

return cls(client, assistant_id)
return assistant_id

def act(self, goal: Goal, toolbox: Toolbox) -> Action:
assistant_id = self._load_assistant_id()

thread = self._client.beta.threads.create()
self._client.beta.threads.messages.create(
thread_id=thread.id,
Expand All @@ -307,7 +333,7 @@ def act(self, goal: Goal, toolbox: Toolbox) -> Action:
action = Action(request_count=0, token_count=0)
with self._client.beta.threads.runs.stream(
thread_id=thread.id,
assistant_id=self._assistant_id,
assistant_id=assistant_id,
event_handler=_EventHandler(self._client, toolbox, action),
) as stream:
stream.until_done()
Expand Down Expand Up @@ -353,6 +379,9 @@ def _handle_action(self, _run_id: str, data: Any) -> None:
for tool in data.required_action.submit_tool_outputs.tool_calls:
handler = _ThreadToolHandler(self._toolbox, tool.id)
tool_outputs.append(handler.handle_function(tool.function))
if handler.question:
assert not self._action.question
self._action.question = handler.question

run = self.current_run
assert run, "No ongoing run"
Expand All @@ -378,6 +407,9 @@ def __init__(self, toolbox: Toolbox, call_id: str) -> None:
def _wrap(self, output: str) -> _ToolOutput:
return _ToolOutput(tool_call_id=self._call_id, output=output)

def _on_ask_user(self) -> _ToolOutput:
return self._wrap("OK")

def _on_read_file(
self, _path: PurePosixPath, contents: str | None
) -> _ToolOutput:
Expand Down
Loading