Skip to content

Commit 9392165

Browse files
format
1 parent f667865 commit 9392165

File tree

10 files changed

+75
-142
lines changed

10 files changed

+75
-142
lines changed

docs/infra.dot

Lines changed: 0 additions & 13 deletions
This file was deleted.

docs/infra.svg

Lines changed: 0 additions & 54 deletions
This file was deleted.

src/agents/orchestrator.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ export class Orchestrator extends EventEmitter implements vscode.Disposable {
1010
super();
1111
this.disposables.push(
1212
this.onStatusChange(this.handleStatus.bind(this)),
13-
this.onPromptGenerated(this.handlePromptGeneratedEvent.bind(this))
13+
this.onPromptGenerated(this.handlePromptGeneratedEvent.bind(this)),
1414
// this.onError(this.handleError.bind(this)),
1515
);
1616
}

src/emitter/publisher.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,10 +14,10 @@ export class EventEmitter extends BaseEmitter<Record<string, IEventPayload>> {
1414
onBootstrap: vscode.Event<IEventPayload> = this.createEvent("onBootstrap");
1515
onFileUpload: vscode.Event<IEventPayload> = this.createEvent("onFileUpload");
1616
onFileProcessSuccess: vscode.Event<IEventPayload> = this.createEvent(
17-
"onFileProcessSuccess"
17+
"onFileProcessSuccess",
1818
);
1919
onActiveworkspaceUpdate: vscode.Event<IEventPayload> = this.createEvent(
20-
"onActiveworkspaceUpdate"
20+
"onActiveworkspaceUpdate",
2121
);
2222
onFilesRetrieved: vscode.Event<IEventPayload> =
2323
this.createEvent("onFilesRetrieved");

src/extension.ts

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -116,44 +116,44 @@ export async function activate(context: vscode.ExtensionContext) {
116116
} = OLA_ACTIONS;
117117
const getComment = new Comments(
118118
`${USER_MESSAGE} generates the code comments...`,
119-
context
119+
context,
120120
);
121121
const getInLineChat = new InLineChat(
122122
`${USER_MESSAGE} generates a response...`,
123-
context
123+
context,
124124
);
125125
const generateOptimizeCode = new OptimizeCode(
126126
`${USER_MESSAGE} optimizes the code...`,
127-
context
127+
context,
128128
);
129129
const generateRefactoredCode = new RefactorCode(
130130
`${USER_MESSAGE} refactors the code...`,
131-
context
131+
context,
132132
);
133133
const explainCode = new ExplainCode(
134134
`${USER_MESSAGE} explains the code...`,
135-
context
135+
context,
136136
);
137137
const generateReview = new ReviewCode(
138138
`${USER_MESSAGE} reviews the code...`,
139-
context
139+
context,
140140
);
141141
const codeChartGenerator = new CodeChartGenerator(
142142
`${USER_MESSAGE} creates the code chart...`,
143-
context
143+
context,
144144
);
145145
const generateCommitMessage = new GenerateCommitMessage(
146146
`${USER_MESSAGE} generates a commit message...`,
147-
context
147+
context,
148148
);
149149
const generateInterviewQuestions = new InterviewMe(
150150
`${USER_MESSAGE} generates interview questions...`,
151-
context
151+
context,
152152
);
153153

154154
const generateUnitTests = new GenerateUnitTest(
155155
`${USER_MESSAGE} generates unit tests...`,
156-
context
156+
context,
157157
);
158158

159159
const actionMap = {
@@ -167,7 +167,7 @@ export async function activate(context: vscode.ExtensionContext) {
167167
new FixError(
168168
`${USER_MESSAGE} finds a solution to the error...`,
169169
context,
170-
errorMessage
170+
errorMessage,
171171
).execute(errorMessage),
172172
[explain]: async () => await explainCode.execute(),
173173
[commitMessage]: async () =>
@@ -177,15 +177,15 @@ export async function activate(context: vscode.ExtensionContext) {
177177
};
178178

179179
const subscriptions: vscode.Disposable[] = Object.entries(actionMap).map(
180-
([action, handler]) => vscode.commands.registerCommand(action, handler)
180+
([action, handler]) => vscode.commands.registerCommand(action, handler),
181181
);
182182

183183
const selectedGenerativeAiModel = getConfigValue("generativeAi.option");
184184

185185
const quickFix = new CodeActionsProvider();
186186
quickFixCodeAction = vscode.languages.registerCodeActionsProvider(
187187
{ scheme: "file", language: "*" },
188-
quickFix
188+
quickFix,
189189
);
190190

191191
agentEventEmmitter = new EventEmitter();
@@ -233,13 +233,13 @@ export async function activate(context: vscode.ExtensionContext) {
233233
webviewProviderClass,
234234
subscriptions,
235235
quickFixCodeAction,
236-
agentEventEmmitter
236+
agentEventEmmitter,
237237
);
238238
}
239239
} catch (error) {
240240
Memory.clear();
241241
vscode.window.showErrorMessage(
242-
"An Error occured while setting up generative AI model"
242+
"An Error occured while setting up generative AI model",
243243
);
244244
console.log(error);
245245
}

src/llms/deepseek/deepseek.ts

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -49,8 +49,8 @@ export class DeepseekLLM
4949
private initializeDisposable(): void {
5050
this.disposables.push(
5151
vscode.workspace.onDidChangeConfiguration(() =>
52-
this.handleConfigurationChange()
53-
)
52+
this.handleConfigurationChange(),
53+
),
5454
);
5555
}
5656

@@ -87,7 +87,7 @@ export class DeepseekLLM
8787

8888
public async generateText(
8989
prompt: string,
90-
instruction?: string
90+
instruction?: string,
9191
): Promise<string> {
9292
try {
9393
const messages = [
@@ -145,7 +145,7 @@ export class DeepseekLLM
145145
undefined,
146146
undefined,
147147
undefined,
148-
true
148+
true,
149149
);
150150
// Note this prompt should be for system instruction only.
151151
const prompt = createPrompt(userInput);
@@ -203,7 +203,7 @@ export class DeepseekLLM
203203
{
204204
name: response.choices[0].message.function_call.name,
205205
args: JSON.parse(
206-
response.choices[0].message.function_call.arguments
206+
response.choices[0].message.function_call.arguments,
207207
),
208208
},
209209
];
@@ -246,8 +246,8 @@ export class DeepseekLLM
246246
const timeoutPromise = new Promise((_, reject) =>
247247
setTimeout(
248248
() => reject(new Error("Timeout Exceeded")),
249-
this.timeOutMs
250-
)
249+
this.timeOutMs,
250+
),
251251
);
252252

253253
const responsePromise = this.generateContentWithTools(userQuery);
@@ -276,17 +276,17 @@ export class DeepseekLLM
276276
) {
277277
this.logger.warn(
278278
"Detecting no progress: same function calls repeated",
279-
""
279+
"",
280280
);
281281

282282
const regeneratedQuery = await this.generateText(
283283
userQuery,
284-
"Rewrite the user query to more clearly and effectively express the user's underlying intent. The goal is to enable the system to retrieve and utilize the available tools more accurately. Identify the core information need and rephrase the query to highlight it. Consider what information the tools need to function optimally and ensure the query provides it."
284+
"Rewrite the user query to more clearly and effectively express the user's underlying intent. The goal is to enable the system to retrieve and utilize the available tools more accurately. Identify the core information need and rephrase the query to highlight it. Consider what information the tools need to function optimally and ensure the query provides it.",
285285
);
286286

287287
this.orchestrator.publish(
288288
"onQuery",
289-
JSON.stringify(regeneratedQuery)
289+
JSON.stringify(regeneratedQuery),
290290
);
291291

292292
let answer = await this.processUserQuery(regeneratedQuery);
@@ -300,13 +300,13 @@ export class DeepseekLLM
300300
this.lastFunctionCalls.add(currentCallSignatures);
301301
if (this.lastFunctionCalls.size > 10) {
302302
this.lastFunctionCalls = new Set(
303-
[...this.lastFunctionCalls].slice(-10)
303+
[...this.lastFunctionCalls].slice(-10),
304304
);
305305
}
306306

307307
if (toolCalls && toolCalls.length > 0) {
308308
this.logger.info(
309-
`Function calls detected: ${JSON.stringify(toolCalls)}`
309+
`Function calls detected: ${JSON.stringify(toolCalls)}`,
310310
);
311311

312312
for (const functionCall of toolCalls) {
@@ -320,7 +320,7 @@ export class DeepseekLLM
320320
functionCall,
321321
functionResult,
322322
undefined,
323-
false
323+
false,
324324
);
325325

326326
const snapShot = this.createSnapShot({
@@ -336,7 +336,7 @@ export class DeepseekLLM
336336
const retry = await vscode.window.showErrorMessage(
337337
`Function call failed: ${error.message}. Retry or abort?`,
338338
"Retry",
339-
"Abort"
339+
"Abort",
340340
);
341341

342342
if (retry === "Retry") {
@@ -369,28 +369,28 @@ export class DeepseekLLM
369369
if (snapshot?.length > 0) {
370370
Memory.removeItems(
371371
"DEEPSEEK_SNAPSHOT",
372-
Memory.get("DEEPSEEK_SNAPSHOT").length
372+
Memory.get("DEEPSEEK_SNAPSHOT").length,
373373
);
374374
}
375375

376376
return finalResult;
377377
} catch (error: any) {
378378
this.orchestrator.publish(
379379
"onError",
380-
"Model not responding at this time, please try again"
380+
"Model not responding at this time, please try again",
381381
);
382382
vscode.window.showErrorMessage("Error processing user query");
383383
this.logger.error(
384384
"Error generating queries, thoughts from user query",
385-
error
385+
error,
386386
);
387387
throw error;
388388
}
389389
}
390390

391391
private async handleSingleFunctionCall(
392392
functionCall: any,
393-
attempt: number = 0
393+
attempt: number = 0,
394394
): Promise<any> {
395395
const MAX_RETRIES = 3;
396396
const args = functionCall.args as Record<string, any>;
@@ -418,7 +418,7 @@ export class DeepseekLLM
418418
if (attempt < MAX_RETRIES) {
419419
this.logger.warn(
420420
`Retry attempt ${attempt + 1} for function ${name}`,
421-
JSON.stringify({ error: error.message, args })
421+
JSON.stringify({ error: error.message, args }),
422422
);
423423
return this.handleSingleFunctionCall(functionCall, attempt + 1);
424424
}
@@ -441,7 +441,7 @@ export class DeepseekLLM
441441
functionCall?: any,
442442
functionResponse?: any,
443443
chat?: any,
444-
isInitialQuery: boolean = false
444+
isInitialQuery: boolean = false,
445445
): Promise<any[]> {
446446
let chatHistory: any = Memory.get(COMMON.DEEPSEEK_CHAT_HISTORY) || [];
447447
Memory.removeItems(COMMON.DEEPSEEK_CHAT_HISTORY);
@@ -471,15 +471,15 @@ export class DeepseekLLM
471471
},
472472
},
473473
],
474-
})
474+
}),
475475
);
476476

477477
// Add function result as user message
478478
chatHistory.push(
479479
Message.of({
480480
role: "user",
481481
content: `Tool result: ${JSON.stringify(functionResponse)}`,
482-
})
482+
}),
483483
);
484484
}
485485

0 commit comments

Comments
 (0)