Skip to content

Commit 8d8cf36

Browse files
committed
fix apply cancellation and make slash commands cancelable
1 parent fd5646c commit 8d8cf36

File tree

13 files changed

+41
-22
lines changed

13 files changed

+41
-22
lines changed

core/commands/index.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ export function slashFromCustomCommand(
2525
config,
2626
selectedCode,
2727
fetch,
28+
abortController,
2829
}) {
2930
// Render prompt template
3031
let renderedPrompt: string;
@@ -63,7 +64,7 @@ export function slashFromCustomCommand(
6364

6465
for await (const chunk of llm.streamChat(
6566
messages,
66-
new AbortController().signal,
67+
abortController.signal,
6768
completionOptions,
6869
)) {
6970
yield renderChatMessage(chunk);

core/commands/slash/commit.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import { renderChatMessage } from "../../util/messageContent.js";
44
const CommitMessageCommand: SlashCommand = {
55
name: "commit",
66
description: "Generate a commit message for current changes",
7-
run: async function* ({ ide, llm, params }) {
7+
run: async function* ({ ide, llm, params, abortController }) {
88
const includeUnstaged = params?.includeUnstaged ?? false;
99
const diff = await ide.getDiff(includeUnstaged);
1010

@@ -16,7 +16,7 @@ const CommitMessageCommand: SlashCommand = {
1616
const prompt = `${diff.join("\n")}\n\nGenerate a commit message for the above set of changes. First, give a single sentence, no more than 80 characters. Then, after 2 line breaks, give a list of no more than 5 short bullet points, each no more than 40 characters. Output nothing except for the commit message, and don't surround it in quotes.`;
1717
for await (const chunk of llm.streamChat(
1818
[{ role: "user", content: prompt }],
19-
new AbortController().signal,
19+
abortController.signal,
2020
)) {
2121
yield renderChatMessage(chunk);
2222
}

core/commands/slash/draftIssue.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ Body:\n\n`;
2424
const DraftIssueCommand: SlashCommand = {
2525
name: "issue",
2626
description: "Draft a GitHub issue",
27-
run: async function* ({ input, llm, history, params }) {
27+
run: async function* ({ input, llm, history, params, abortController }) {
2828
if (params?.repositoryUrl === undefined) {
2929
yield "This command requires a repository URL to be set in the config file.";
3030
return;
@@ -46,7 +46,7 @@ const DraftIssueCommand: SlashCommand = {
4646

4747
for await (const chunk of llm.streamChat(
4848
messages,
49-
new AbortController().signal,
49+
abortController.signal,
5050
)) {
5151
body += chunk.content;
5252
yield renderChatMessage(chunk);

core/commands/slash/mcp.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ export function constructMcpSlashCommand(
6060

6161
for await (const chunk of context.llm.streamChat(
6262
messages,
63-
new AbortController().signal,
63+
context.abortController.signal,
6464
context.completionOptions,
6565
)) {
6666
yield renderChatMessage(chunk);

core/commands/slash/onboard.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,15 +41,15 @@ const MAX_EXPLORE_DEPTH = 2;
4141
const OnboardSlashCommand: SlashCommand = {
4242
name: "onboard",
4343
description: "Familiarize yourself with the codebase",
44-
run: async function* ({ llm, ide }) {
44+
run: async function* ({ llm, ide, abortController }) {
4545
const [workspaceDir] = await ide.getWorkspaceDirs();
4646

4747
const context = await gatherProjectContext(workspaceDir, ide);
4848
const prompt = createOnboardingPrompt(context);
4949

5050
for await (const chunk of llm.streamChat(
5151
[{ role: "user", content: prompt }],
52-
new AbortController().signal,
52+
abortController.signal,
5353
)) {
5454
yield renderChatMessage(chunk);
5555
}

core/commands/slash/review.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,14 +38,14 @@ function getLastUserHistory(history: ChatMessage[]): string {
3838
const ReviewMessageCommand: SlashCommand = {
3939
name: "review",
4040
description: "Review code and give feedback",
41-
run: async function* ({ llm, history }) {
41+
run: async function* ({ llm, history, abortController }) {
4242
const reviewText = getLastUserHistory(history).replace("\\review", "");
4343

4444
const content = `${prompt} \r\n ${reviewText}`;
4545

4646
for await (const chunk of llm.streamChat(
4747
[{ role: "user", content: content }],
48-
new AbortController().signal,
48+
abortController.signal,
4949
)) {
5050
yield renderChatMessage(chunk);
5151
}

core/edit/lazy/applyCodeBlock.ts

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ export async function applyCodeBlock(
1616
newLazyFile: string,
1717
filename: string,
1818
llm: ILLM,
19+
abortController: AbortController,
1920
): Promise<{
2021
isInstantApply: boolean;
2122
diffLinesGenerator: AsyncGenerator<DiffLine>;
@@ -51,6 +52,12 @@ export async function applyCodeBlock(
5152

5253
return {
5354
isInstantApply: false,
54-
diffLinesGenerator: streamLazyApply(oldFile, filename, newLazyFile, llm),
55+
diffLinesGenerator: streamLazyApply(
56+
oldFile,
57+
filename,
58+
newLazyFile,
59+
llm,
60+
abortController,
61+
),
5562
};
5663
}

core/edit/lazy/replace.ts

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@ export async function* getReplacementWithLlm(
5959
linesBefore: string[],
6060
linesAfter: string[],
6161
llm: ILLM,
62+
abortController: AbortController,
6263
): AsyncGenerator<string> {
6364
const userPrompt = dedent`
6465
ORIGINAL CODE:
@@ -83,10 +84,13 @@ export async function* getReplacementWithLlm(
8384
\`\`\`
8485
`;
8586

86-
const completion = await llm.streamChat([
87-
{ role: "user", content: userPrompt },
88-
{ role: "assistant", content: assistantPrompt },
89-
], new AbortController().signal);
87+
const completion = await llm.streamChat(
88+
[
89+
{ role: "user", content: userPrompt },
90+
{ role: "assistant", content: assistantPrompt },
91+
],
92+
abortController.signal,
93+
);
9094

9195
let lines = streamLines(completion);
9296
lines = filterLeadingNewline(lines);

core/edit/lazy/streamLazyApply.ts

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,17 +16,15 @@ export async function* streamLazyApply(
1616
filename: string,
1717
newCode: string,
1818
llm: ILLM,
19+
abortController: AbortController,
1920
): AsyncGenerator<DiffLine> {
2021
const promptFactory = lazyApplyPromptForModel(llm.model, llm.providerName);
2122
if (!promptFactory) {
2223
throw new Error(`Lazy apply not supported for model ${llm.model}`);
2324
}
2425

2526
const promptMessages = promptFactory(oldCode, filename, newCode);
26-
const lazyCompletion = llm.streamChat(
27-
promptMessages,
28-
new AbortController().signal,
29-
);
27+
const lazyCompletion = llm.streamChat(promptMessages, abortController.signal);
3028

3129
// Do find and replace over the lazy edit response
3230
async function* replacementFunction(
@@ -39,6 +37,7 @@ export async function* streamLazyApply(
3937
linesBefore,
4038
linesAfter,
4139
llm,
40+
abortController,
4241
)) {
4342
yield line;
4443
}

core/index.d.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -836,6 +836,7 @@ export interface ContinueSDK {
836836
config: ContinueConfig;
837837
fetch: FetchFunction;
838838
completionOptions?: LLMFullCompletionOptions;
839+
abortController: AbortController;
839840
}
840841

841842
export interface SlashCommand {
@@ -1141,7 +1142,7 @@ export interface StreamableHTTPOptions {
11411142
requestOptions?: RequestOptions;
11421143
}
11431144

1144-
export type TransportOptions =
1145+
export type TransportOptions =
11451146
| StdioOptions
11461147
| WebSocketOptions
11471148
| SSEOptions

0 commit comments

Comments
 (0)