Skip to content

Commit ee6adae

Browse files
committed
ref: enable deepseek
1 parent 22b28e8 commit ee6adae

File tree

5 files changed

+44
-16
lines changed

5 files changed

+44
-16
lines changed

package.json

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
"name": "llama-coder",
33
"displayName": "Llama Coder",
44
"description": "Better and self-hosted Github Copilot replacement",
5-
"version": "0.0.6",
5+
"version": "0.0.7",
66
"icon": "icon.png",
77
"publisher": "ex3ndr",
88
"repository": {
@@ -70,14 +70,15 @@
7070
"codellama:34b-code-q4_K_S",
7171
"codellama:34b-code-q4_K_M",
7272
"codellama:34b-code-q6_K",
73-
"deepseek-coder:6.7b-instruct-q4_K_S",
74-
"deepseek-coder:6.7b-instruct-q4_K_M",
75-
"deepseek-coder:6.7b-instruct-q8_0",
76-
"deepseek-coder:6.7b-instruct-fp16",
77-
"deepseek-coder:33b-instruct-q4_K_S",
78-
"deepseek-coder:33b-instruct-q4_K_M",
79-
"deepseek-coder:33b-instruct-q8_0",
80-
"deepseek-coder:33b-instruct-fp16"
73+
"deepseek-coder:1.3b-base-q4_0",
74+
"deepseek-coder:1.3b-base-q4_1",
75+
"deepseek-coder:1.3b-base-q8_0",
76+
"deepseek-coder:6.7b-base-q4_K_S",
77+
"deepseek-coder:6.7b-base-q8_0",
78+
"deepseek-coder:6.7b-base-fp16",
79+
"deepseek-coder:33b-base-q4_K_S",
80+
"deepseek-coder:33b-base-q4_K_M",
81+
"deepseek-coder:33b-base-fp16"
8182
],
8283
"default": "codellama:7b-code-q4_K_M",
8384
"description": "Inference model to use"

src/prompts/adaptors/adaptPrompt.ts

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,24 @@
1-
export function adaptPrompt(args: { model: string, prefix: string, suffix: string }): string {
1+
export function adaptPrompt(args: { model: string, prefix: string, suffix: string }): { prompt: string, stop: string[] } {
22

33
// Starcoder format
44
if (args.model.startsWith('deepseek-coder')) {
5-
return `<|fim▁begin|>${args.prefix}<|fim▁hole|>${args.suffix}<|fim▁end|>`;
5+
6+
if (args.suffix.length < 1000) {
7+
return {
8+
prompt: args.prefix,
9+
stop: [`<END>`]
10+
};
11+
}
12+
13+
return {
14+
prompt: `<|fim▁begin|>${args.prefix}<|fim▁hole|>${args.suffix}<|fim▁end|>`,
15+
stop: [`<|fim▁begin|>`, `<|fim▁hole|>`, `<|fim▁end|>`, `<END>`]
16+
};
617
}
718

819
// Codellama format
9-
return `<PRE> ${args.prefix} <SUF>${args.suffix} <MID>`;
20+
return {
21+
prompt: `<PRE> ${args.prefix} <SUF>${args.suffix} <MID>`,
22+
stop: [`<PRE>`, `<SUF>`, `<MID>`, `<END>`]
23+
};
1024
}

src/prompts/autocomplete.ts

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,12 +14,15 @@ export async function autocomplete(args: {
1414
canceled?: () => boolean,
1515
}): Promise<string> {
1616

17+
let prompt = adaptPrompt({ prefix: args.prefix, suffix: args.suffix, model: args.model });
18+
1719
// Calculate arguments
1820
let data = {
1921
model: args.model,
20-
prompt: adaptPrompt({ prefix: args.prefix, suffix: args.suffix, model: args.model }),
22+
prompt: prompt.prompt,
2123
raw: true,
2224
options: {
25+
stop: prompt.stop,
2326
num_predict: args.maxTokens,
2427
temperature: args.temperature
2528
}

src/prompts/filter.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,9 @@ export function isNotNeeded(doc: vscode.TextDocument, position: vscode.Position,
1313
// }
1414

1515
// Avoid autocomplete when system menu is shown (ghost text is hidden anyway)
16-
if (context.selectedCompletionInfo) {
17-
return true;
18-
}
16+
// if (context.selectedCompletionInfo) {
17+
// return true;
18+
// }
1919

2020
return false;
2121
}
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
// import path from 'path';
2+
3+
// let languages: { [key: string]: {} } = {
4+
5+
// };
6+
7+
// export function fileHeaderProcessor(uri: string, languageId: string): string | null {
8+
// let basename = path.basename(uri);
9+
// let extname =
10+
// }

0 commit comments

Comments
 (0)