Skip to content

Commit 91aa2a3

Browse files
authored
Merge pull request #547 from devchat-ai/add_debug_config_for_code_completions
Refactor LLM Code for Completion Endpoint and Performance Optimization
2 parents a05caa9 + a05e6ca commit 91aa2a3

File tree

2 files changed

+29
-1
lines changed

2 files changed

+29
-1
lines changed

src/contributes/codecomplete/llm.ts

Lines changed: 25 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,13 +186,19 @@ export async function * ollamaDeepseekComplete(prompt: string) : AsyncGenerator<
186186

187187
export async function * devchatComplete(prompt: string) : AsyncGenerator<CodeCompletionChunk> {
188188
const devchatEndpoint = DevChatConfig.getInstance().get("providers.devchat.api_base");
189-
const completionApiBase = devchatEndpoint + "/completions";
189+
const llmApiBase = DevChatConfig.getInstance().get("complete_api_base");
190+
let completionApiBase = devchatEndpoint + "/completions";
191+
if (llmApiBase) {
192+
completionApiBase = llmApiBase + "/completions";
193+
}
190194

191195
let model = DevChatConfig.getInstance().get("complete_model");
192196
if (!model) {
193197
model = "ollama/starcoder2:15b";
194198
}
195199

200+
const startTimeLLM = process.hrtime();
201+
196202
const headers = {
197203
'Content-Type': 'application/json'
198204
};
@@ -217,7 +223,19 @@ export async function * devchatComplete(prompt: string) : AsyncGenerator<CodeCom
217223
const stream = response.body as any;
218224
const decoder = new TextDecoder("utf-8");
219225

226+
const endTimeLLM = process.hrtime(startTimeLLM);
227+
const durationLLM = endTimeLLM[0] + endTimeLLM[1] / 1e9;
228+
logger.channel()?.debug(`LLM api post took ${durationLLM} seconds`);
229+
230+
let hasFirstLine = false;
231+
let hasFirstChunk = false;
220232
for await (const chunk of stream) {
233+
if (!hasFirstChunk) {
234+
hasFirstChunk = true;
235+
const endTimeFirstChunk = process.hrtime(startTimeLLM);
236+
const durationFirstChunk = endTimeFirstChunk[0] + endTimeFirstChunk[1] / 1e9;
237+
logger.channel()?.debug(`LLM first chunk took ${durationFirstChunk} seconds`);
238+
}
221239
const chunkDataText = decoder.decode(chunk).trim();
222240
// split chunkText by "data: ", for example:
223241
// data: 123 data: 456 will split to ["", "data: 123 ", "data: 456"]
@@ -245,6 +263,12 @@ export async function * devchatComplete(prompt: string) : AsyncGenerator<CodeCom
245263

246264
try {
247265
const data = JSON.parse(chunkText.substring(5).trim());
266+
if (!hasFirstLine && data.choices[0].text.indexOf("\n") !== -1) {
267+
hasFirstLine = true;
268+
const endTimeLine = process.hrtime(startTimeLLM);
269+
const durationLine = endTimeLine[0] + endTimeLine[1] / 1e9;
270+
logger.channel()?.debug(`LLM first line took ${durationLine} seconds`);
271+
}
248272
yield {
249273
text: data.choices[0].text,
250274
id: data.id

src/contributes/codecomplete/promptCreator.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -677,6 +677,10 @@ export async function createPrompt(filePath: string, fileContent: string, line:
677677
}
678678
if (completeModel.indexOf("deepseek") > -1) {
679679
prompt = "<|fim▁begin|>" + taskDescriptionContextWithCommentPrefix + neighborFileContext + recentEditContext + symbolContext + callDefContext + similarBlockContext + gitDiffContext + `${commentPrefix}<filename>${filePath}\n\n` + prefix + "<|fim▁hole|>" + suffix + "<|fim▁end|>";
680+
} else if (completeModel.indexOf("starcoder") > -1) {
681+
prompt = "<fim_prefix>" + taskDescriptionContextWithCommentPrefix + neighborFileContext + recentEditContext + symbolContext + callDefContext + similarBlockContext + gitDiffContext + `${commentPrefix}<filename>${filePath}\n\n` + prefix + "<fim_suffix>" + suffix + "<fim_middle>";
682+
} else if (completeModel.indexOf("codestral") > -1) {
683+
prompt = "<s>[SUFFIX]" + suffix + "[PREFIX]" + taskDescriptionContextWithCommentPrefix + neighborFileContext + recentEditContext + symbolContext + callDefContext + similarBlockContext + gitDiffContext + `${commentPrefix}<filename>${filePath}\n\n` + prefix;
680684
} else {
681685
prompt = "<fim_prefix>" + taskDescriptionContextWithCommentPrefix + neighborFileContext + recentEditContext + symbolContext + callDefContext + similarBlockContext + gitDiffContext + `${commentPrefix}<filename>${filePath}\n\n` + prefix + "<fim_suffix>" + suffix + "<fim_middle>";
682686
}

0 commit comments

Comments
 (0)