Skip to content

Commit 7ad77ab

Browse files
feat(gemini): Improve tool call processing and error handling
- Enhance the tool call processing in GeminiLLM to better handle function results and planning. - Update the prompt construction to include plan information when available. - Implement retry mechanism with user prompt for failed function calls with fallback to Groq. - Add timeout mechanism for generateContentWithTools and fallback to Groq on timeout. - Change the default CodeBuddy mode to Ask.
1 parent 9392165 commit 7ad77ab

File tree

4 files changed

+80
-82
lines changed

4 files changed

+80
-82
lines changed

src/llms/gemini/gemini.ts

Lines changed: 77 additions & 80 deletions
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,8 @@ export class GeminiLLM
6060
private intializeDisposable(): void {
6161
this.disposables.push(
6262
vscode.workspace.onDidChangeConfiguration(() =>
63-
this.handleConfigurationChange(),
64-
),
63+
this.handleConfigurationChange()
64+
)
6565
);
6666
}
6767

@@ -91,7 +91,7 @@ export class GeminiLLM
9191

9292
public async generateText(
9393
prompt: string,
94-
instruction?: string,
94+
instruction?: string
9595
): Promise<string> {
9696
try {
9797
const model = this.getModel();
@@ -141,15 +141,15 @@ export class GeminiLLM
141141
}
142142

143143
async generateContentWithTools(
144-
userInput: string,
144+
userInput: string
145145
): Promise<GenerateContentResult> {
146146
try {
147147
await this.buildChatHistory(
148148
userInput,
149149
undefined,
150150
undefined,
151151
undefined,
152-
true,
152+
true
153153
);
154154
const prompt = createPrompt(userInput);
155155
const contents = Memory.get(COMMON.GEMINI_CHAT_HISTORY) as Content[];
@@ -184,83 +184,80 @@ export class GeminiLLM
184184
*/
185185
private async processToolCalls(
186186
toolCalls: FunctionCall[],
187-
userInput: string,
187+
userInput: string
188188
): Promise<any> {
189189
let finalResult: string | undefined = undefined;
190-
let userQuery = userInput;
191-
let callCount = 0;
192-
193-
for (const functionCall of toolCalls) {
194-
try {
195-
const functionResult =
196-
await this.handleSingleFunctionCall(functionCall);
197-
198-
if (functionResult === undefined) {
199-
finalResult = await this.groqLLM.generateText(userInput);
200-
if (finalResult) {
201-
finalResult = await this.fallBackToGroq(userInput);
202-
return finalResult;
203-
}
204-
}
205-
206-
userQuery = `Tool result: ${JSON.stringify(functionResult)}. What is your next step?`;
207-
208-
if (functionCall.name === "think") {
209-
const thought = functionResult?.response.content;
210-
this.initialThought = thought;
211-
if (thought) {
212-
this.orchestrator.publish("onStrategizing", thought);
213-
this.planSteps = this.parseThought(thought);
214-
if (this.planSteps?.length > 0) {
215-
userQuery = `Based on the plan, please provide an answer`;
216-
} else {
217-
userQuery = userInput;
218-
this.planSteps = [];
190+
try {
191+
let userQuery = userInput;
192+
let callCount = 0;
193+
194+
for (const functionCall of toolCalls) {
195+
try {
196+
const functionResult =
197+
await this.handleSingleFunctionCall(functionCall);
198+
199+
if (functionCall.name === "think") {
200+
const thought = functionResult?.response.content;
201+
this.initialThought = this.initialThought ?? thought;
202+
if (thought) {
203+
this.orchestrator.publish("onStrategizing", thought);
204+
this.planSteps = this.parseThought(thought);
205+
if (this.planSteps?.length > 0) {
206+
userQuery = `Tool result: ${JSON.stringify(functionResult)} \n. Based on these plans, Plans: ${this.planSteps} from the tool result \n What is your next step?`;
207+
} else {
208+
userQuery = `Tool result: ${JSON.stringify(functionResult)}. What is your next step?`;
209+
this.planSteps = [];
210+
}
219211
}
220212
} else {
221213
userQuery = `Tool result: ${JSON.stringify(functionResult)}. What is your next step?`;
222214
}
223-
}
224-
225-
await this.buildChatHistory(
226-
userQuery,
227-
functionCall.name,
228-
functionResult,
229-
undefined,
230-
false,
231-
);
232215

233-
const snapShot = this.createSnapShot({
234-
lastQuery: userQuery,
235-
lastCall: functionCall.name,
236-
lastResult: functionResult,
237-
currentStepIndex: this.currentStepIndex,
238-
planSteps: this.planSteps,
239-
});
240-
Memory.set(COMMON.GEMINI_SNAPSHOT, snapShot);
241-
callCount++;
242-
} catch (error: any) {
243-
console.error("Error processing function call", error);
244-
245-
const retry = await vscode.window.showErrorMessage(
246-
`Function call failed: ${error.message}. Retry or abort?`,
247-
"Retry",
248-
"Abort",
249-
);
250-
251-
if (retry === "Retry") {
252-
continue; // Retry the current function call
253-
} else {
254-
finalResult = `Function call error: ${error.message}. Falling back to last response.`;
255-
break; // Exit the loop and return the error result
216+
await this.buildChatHistory(
217+
userQuery,
218+
functionCall.name,
219+
functionResult,
220+
undefined,
221+
false
222+
);
223+
224+
const snapShot = this.createSnapShot({
225+
lastQuery: userQuery,
226+
lastCall: functionCall.name,
227+
lastResult: functionResult,
228+
currentStepIndex: this.currentStepIndex,
229+
planSteps: this.planSteps,
230+
});
231+
Memory.set(COMMON.GEMINI_SNAPSHOT, snapShot);
232+
callCount++;
233+
} catch (error: any) {
234+
console.error("Error processing function call", error);
235+
// Send this to the webview instead and let the user decide
236+
const retry = await vscode.window.showErrorMessage(
237+
`Function call failed: ${error.message}. Retry or abort?`,
238+
"Retry",
239+
"Abort"
240+
);
241+
242+
if (retry === "Retry") {
243+
continue; // Retry the current function call
244+
} else {
245+
finalResult = `Function call error: ${error.message}. Falling back to last response.`;
246+
break; // Exit the loop and return the error result
247+
}
256248
}
257249
}
250+
return finalResult;
251+
} catch (error) {
252+
console.error("Error processing tool calls", error);
253+
finalResult = await this.fallBackToGroq(
254+
`User Input: ${userInput} \n Plans: ${this.initialThought ?? "Write production ready code to demonstrate your solution"}`
255+
);
258256
}
259-
return finalResult;
260257
}
261258

262259
async processUserQuery(
263-
userInput: string,
260+
userInput: string
264261
): Promise<string | GenerateContentResult | undefined> {
265262
let finalResult: string | GenerateContentResult | undefined;
266263
let userQuery = userInput;
@@ -285,8 +282,8 @@ export class GeminiLLM
285282
const timeoutPromise = new Promise((_, reject) =>
286283
setTimeout(
287284
() => reject(new Error("TImeout Exceeded")),
288-
this.timeOutMs,
289-
),
285+
this.timeOutMs
286+
)
290287
);
291288
const responsePromise = await this.generateContentWithTools(userQuery);
292289
const result = (await Promise.race([
@@ -320,7 +317,7 @@ export class GeminiLLM
320317
this.lastFunctionCalls.add(currentCallSignatures);
321318
if (this.lastFunctionCalls.size > 10) {
322319
this.lastFunctionCalls = new Set(
323-
[...this.lastFunctionCalls].slice(-10),
320+
[...this.lastFunctionCalls].slice(-10)
324321
);
325322
}
326323
if (toolCalls && toolCalls.length > 0) {
@@ -357,7 +354,7 @@ export class GeminiLLM
357354
if (snapshot?.length > 0) {
358355
Memory.removeItems(
359356
COMMON.GEMINI_SNAPSHOT,
360-
Memory.get(COMMON.GEMINI_SNAPSHOT).length,
357+
Memory.get(COMMON.GEMINI_SNAPSHOT).length
361358
);
362359
}
363360

@@ -369,15 +366,15 @@ export class GeminiLLM
369366
// );
370367
console.log("Error processing user query", error);
371368
finalResult = await this.fallBackToGroq(
372-
`${userInput} \n ${this.initialThought ?? "Write production ready code to demonstrate your solution"}`,
369+
`${userInput} \n ${this.initialThought ?? "Write production ready code to demonstrate your solution"}`
373370
);
374371
console.log("Model not responding at this time, please try again", error);
375372
}
376373
}
377374

378375
private async handleSingleFunctionCall(
379376
functionCall: FunctionCall,
380-
attempt: number = 0,
377+
attempt: number = 0
381378
): Promise<any> {
382379
const MAX_RETRIES = 3;
383380
const args = functionCall.args as Record<string, any>;
@@ -402,7 +399,7 @@ export class GeminiLLM
402399
if (attempt < MAX_RETRIES) {
403400
console.warn(
404401
`Retry attempt ${attempt + 1} for function ${name}`,
405-
JSON.stringify({ error, args }),
402+
JSON.stringify({ error, args })
406403
);
407404
return this.handleSingleFunctionCall(functionCall, attempt + 1);
408405
}
@@ -424,7 +421,7 @@ export class GeminiLLM
424421
functionCall?: any,
425422
functionResponse?: any,
426423
chat?: ChatSession,
427-
isInitialQuery: boolean = false,
424+
isInitialQuery: boolean = false
428425
): Promise<Content[]> {
429426
// Check if it makes sense to kind of seperate agent and Edit Mode memory, when switching.
430427
let chatHistory: any = Memory.get(COMMON.GEMINI_CHAT_HISTORY) || [];
@@ -450,17 +447,17 @@ export class GeminiLLM
450447
Message.of({
451448
role: "model",
452449
parts: [{ functionCall }],
453-
}),
450+
})
454451
);
455452

456453
const observationResult = await chat.sendMessage(
457-
`Tool result: ${JSON.stringify(functionResponse)}`,
454+
`Tool result: ${JSON.stringify(functionResponse)}`
458455
);
459456
chatHistory.push(
460457
Message.of({
461458
role: "user",
462459
parts: [{ text: observationResult.response.text() }],
463-
}),
460+
})
464461
);
465462
}
466463
if (chatHistory.length > 50) chatHistory = chatHistory.slice(-50);

src/llms/groq/groq.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ export class GroqLLM extends BaseLLM<any> implements vscode.Disposable {
2626
return [1, 2];
2727
}
2828

29+
// TODO Implement function call, especially think for this model.
2930
async generateText(message: string): Promise<string> {
3031
try {
3132
const { temperature, top_p, stop } = GROQ_CONFIG;

webviewUi/src/components/webview.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ export const WebviewUI = () => {
4949
const css = getChatCss("tokyo night");
5050
updateStyles(css);
5151
const [selectedModel, setSelectedModel] = useState("Gemini");
52-
const [selectedCodeBuddyMode, setSelectedCodeBuddyMode] = useState("Edit");
52+
const [selectedCodeBuddyMode, setSelectedCodeBuddyMode] = useState("Ask");
5353
const [userInput, setUserInput] = useState("");
5454
const [messages, setMessages] = useState<Message[]>([]);
5555
const [isBotLoading, setIsBotLoading] = useState(false);

webviewUi/src/constants/constant.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,5 +7,5 @@ export const modelOptions = [
77

88
export const codeBuddyMode = [
99
{ value: "Agent", label: "Agent" },
10-
{ value: "Edit", label: "Edit" },
10+
{ value: "Ask", label: "Ask" },
1111
];

0 commit comments

Comments
 (0)