Skip to content

Commit 15fa99c

Browse files
Merge pull request #3415 from Kilo-Org/catrielmuller/autocomplete-non-fim
Autocomplete Continue Experimental FIM Endpoint
2 parents c17b88d + 8a1f36d commit 15fa99c

File tree

13 files changed

+751
-51
lines changed

13 files changed

+751
-51
lines changed

src/esbuild.mjs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,9 @@ async function main() {
6464
// Copy walkthrough files to dist directory
6565
copyPaths([["walkthrough", "walkthrough"]], srcDir, distDir)
6666

67+
// Copy tree-sitter files to dist directory
68+
copyPaths([["services/continuedev/tree-sitter", "tree-sitter"]], srcDir, distDir)
69+
6770
// Copy JSDOM xhr-sync-worker.js to fix runtime resolution
6871
const jsdomWorkerDest = path.join(distDir, "xhr-sync-worker.js")
6972

src/services/autocomplete/AutocompleteModel.ts

Lines changed: 32 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import {
2-
AUTOCOMPLETE_PROVIDER_MODELS,
32
AutocompleteProviderKey,
43
defaultProviderUsabilityChecker,
54
getKiloBaseUriFromToken,
@@ -14,7 +13,15 @@ import { ApiStreamChunk } from "../../api/transform/stream"
1413
import { ILLM, LLMOptions } from "../continuedev/core/index.js"
1514
import { DEFAULT_AUTOCOMPLETE_OPTS } from "../continuedev/core/util/parameters.js"
1615
import Mistral from "../continuedev/core/llm/llms/Mistral"
17-
import { OpenAI } from "../continuedev/core/llm/llms/OpenAI"
16+
import OpenRouter from "../continuedev/core/llm/llms/OpenRouter"
17+
import KiloCode from "../continuedev/core/llm/llms/KiloCode"
18+
19+
export const AUTOCOMPLETE_PROVIDER_MODELS = {
20+
mistral: "codestral-2501",
21+
kilocode: "codestral-2501",
22+
openrouter: "mistralai/codestral-2501",
23+
bedrock: "mistral.codestral-2501-v1:0",
24+
} as const
1825

1926
export class AutocompleteModel {
2027
private apiHandler: ApiHandler | null = null
@@ -118,6 +125,16 @@ export class AutocompleteModel {
118125
useCache: false, // Disable caching for autocomplete
119126
},
120127
uniqueId: `autocomplete-${provider}-${Date.now()}`,
128+
// Add env for KiloCode metadata (organizationId and tester suppression)
129+
env: config.organizationId
130+
? {
131+
kilocodeOrganizationId: config.organizationId,
132+
// Add tester suppression if configured
133+
...(this.profile?.kilocodeTesterWarningsDisabledUntil && {
134+
kilocodeTesterWarningsDisabledUntil: this.profile.kilocodeTesterWarningsDisabledUntil,
135+
}),
136+
}
137+
: undefined,
121138
}
122139

123140
// Create appropriate LLM instance based on provider
@@ -131,7 +148,12 @@ export class AutocompleteModel {
131148
/**
132149
* Extracts provider-specific configuration (API key, base URL, model) from this.profile
133150
*/
134-
private extractProviderConfig(): { apiKey: string; apiBase: string; model: string } | null {
151+
private extractProviderConfig(): {
152+
apiKey: string
153+
apiBase: string
154+
model: string
155+
organizationId?: string
156+
} | null {
135157
if (!this.profile?.apiProvider) {
136158
return null
137159
}
@@ -158,8 +180,9 @@ export class AutocompleteModel {
158180
}
159181
return {
160182
apiKey: this.profile.kilocodeToken,
161-
apiBase: `${getKiloBaseUriFromToken(this.profile.kilocodeToken)}/openrouter/api/v1`,
183+
apiBase: `${getKiloBaseUriFromToken(this.profile.kilocodeToken)}/api/openrouter/v1`,
162184
model,
185+
organizationId: this.profile.kilocodeOrganizationId,
163186
}
164187

165188
case "openrouter":
@@ -194,9 +217,12 @@ export class AutocompleteModel {
194217
return new Mistral(options)
195218

196219
case "kilocode":
220+
// Use dedicated KiloCode class with custom headers and routing
221+
return new KiloCode(options)
222+
197223
case "openrouter":
198-
// Both use OpenAI-compatible API
199-
return new OpenAI(options)
224+
// Use standard OpenRouter
225+
return new OpenRouter(options)
200226

201227
case "bedrock":
202228
// Bedrock would need a custom implementation

src/services/autocomplete/AutocompleteProvider.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ import { ClineProvider } from "../../core/webview/ClineProvider"
1313
import { MinimalConfigProvider } from "../continuedev/core/autocomplete/MinimalConfig"
1414
import { VsCodeIde } from "../continuedev/core/vscode-test-harness/src/VSCodeIde"
1515
import { ContinueCompletionProvider } from "../continuedev/core/vscode-test-harness/src/autocomplete/completionProvider"
16+
import OpenRouter from "../continuedev/core/llm/llms/OpenRouter"
1617

1718
export class AutocompleteProvider {
1819
private static instance: AutocompleteProvider | null = null

src/services/autocomplete/__tests__/AutocompleteModel.spec.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ describe("AutocompleteModel", () => {
3131
expect(result).toBeDefined()
3232
expect(Mistral).toHaveBeenCalledWith(
3333
expect.objectContaining({
34-
model: "codestral-latest",
34+
model: "codestral-2501",
3535
apiKey: "test-mistral-key",
3636
apiBase: "https://custom.mistral.ai/v1/",
3737
contextLength: 32000,
@@ -78,7 +78,7 @@ describe("AutocompleteModel", () => {
7878
expect(result).toBeDefined()
7979
expect(OpenAI).toHaveBeenCalledWith(
8080
expect.objectContaining({
81-
model: "mistralai/codestral-2508",
81+
model: "codestral-2501",
8282
apiKey: "test-kilocode-token",
8383
}),
8484
)
@@ -109,7 +109,7 @@ describe("AutocompleteModel", () => {
109109
expect(result).toBeDefined()
110110
expect(OpenAI).toHaveBeenCalledWith(
111111
expect.objectContaining({
112-
model: "mistralai/codestral-2508",
112+
model: "mistralai/codestral-2501",
113113
apiKey: "test-openrouter-key",
114114
apiBase: "https://custom.openrouter.ai/api/v1",
115115
}),

src/services/continuedev/core/autocomplete/CompletionProvider.ts

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,6 @@ export class CompletionProvider {
5555
return undefined
5656
}
5757

58-
5958
// Temporary fix for JetBrains autocomplete bug as described in https://github.com/continuedev/continue/pull/3022
6059
if (llm.model === undefined && llm.completionOptions?.model !== undefined) {
6160
llm.model = llm.completionOptions.model
@@ -76,7 +75,6 @@ export class CompletionProvider {
7675
llm.useLegacyCompletionsEndpoint = true
7776
}
7877

79-
console.log('using LLM', llm)
8078
return llm
8179
}
8280

@@ -123,7 +121,7 @@ export class CompletionProvider {
123121

124122
// Enable static contextualization if defined.
125123
if (config?.experimental?.enableStaticContextualization) {
126-
options.experimental_enableStaticContextualization = true
124+
options.experimental_enableStaticContextualization = false
127125
}
128126

129127
return options
@@ -138,7 +136,6 @@ export class CompletionProvider {
138136
// Create abort signal if not given
139137
if (!token) {
140138
const controller = this.loggingService.createAbortController(input.completionId)
141-
console.log('creating abort token because none given')
142139
token = controller.signal
143140
}
144141
const startTime = Date.now()
@@ -214,12 +211,9 @@ export class CompletionProvider {
214211

215212
// Don't postprocess if aborted
216213
if (token.aborted) {
217-
console.log('aborted')
218214
return undefined
219215
}
220216

221-
console.log('raw completion', completion)
222-
223217
const processedCompletion = helper.options.transform
224218
? postprocessCompletion({
225219
completion,

src/services/continuedev/core/autocomplete/templating/AutocompleteTemplate.ts

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -373,15 +373,16 @@ export function getTemplateForModel(model: string): AutocompleteTemplate {
373373
return codegeexFimTemplate
374374
}
375375

376-
if (
377-
lowerCaseModel.includes("gpt") ||
378-
lowerCaseModel.includes("davinci-002") ||
379-
lowerCaseModel.includes("claude") ||
380-
lowerCaseModel.includes("granite3") ||
381-
lowerCaseModel.includes("granite-3")
382-
) {
383-
return holeFillerTemplate
384-
}
385-
386-
return stableCodeFimTemplate
376+
// if (
377+
// lowerCaseModel.includes("gpt") ||
378+
// lowerCaseModel.includes("davinci-002") ||
379+
// lowerCaseModel.includes("claude") ||
380+
// lowerCaseModel.includes("granite3") ||
381+
// lowerCaseModel.includes("granite-3")
382+
// ) {
383+
// Default Fallback Mode
384+
return holeFillerTemplate
385+
//}
386+
387+
//return stableCodeFimTemplate
387388
}

src/services/continuedev/core/index.d.ts

Lines changed: 16 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -333,7 +333,6 @@ export interface ILLMLogger {
333333

334334
export interface LLMOptions {
335335
model: string
336-
337336
title?: string
338337
uniqueId?: string
339338
autocompleteOptions?: Partial<TabAutocompleteOptions>
@@ -346,6 +345,7 @@ export interface LLMOptions {
346345
useLegacyCompletionsEndpoint?: boolean
347346
capabilities?: ModelCapability
348347
env?: Record<string, string | number | boolean>
348+
promptTemplates?: Partial<Record<keyof PromptTemplates, PromptTemplate>>
349349
}
350350

351351
type RequireAtLeastOne<T, Keys extends keyof T = keyof T> = Pick<T, Exclude<keyof T, Keys>> &
@@ -511,6 +511,9 @@ export interface BaseCompletionOptions {
511511
maxTokens?: number
512512
raw?: boolean
513513
stream?: boolean
514+
reasoning?: boolean
515+
reasoningBudgetTokens?: number
516+
promptCaching?: boolean
514517
}
515518

516519
export interface ModelCapability {
@@ -617,18 +620,18 @@ export class SignatureInformation {
617620
* The label of this signature. Will be shown in
618621
* the UI.
619622
*/
620-
label: strin
621-
/**
622-
* The parameters of this signature.
623-
*/
624-
parameters: ParameterInformation[];
625-
626-
/**
627-
* The index of the active parameter.
628-
*
629-
* If provided, this is used in place of {@linkcode SignatureHelp.activeParameter}.
630-
*/
631-
activeParameter?: number;g
623+
label: string
624+
/**
625+
* The parameters of this signature.
626+
*/
627+
parameters: ParameterInformation[]
628+
629+
/**
630+
* The index of the active parameter.
631+
*
632+
* If provided, this is used in place of {@linkcode SignatureHelp.activeParameter}.
633+
*/
634+
activeParameter?: number
632635
}
633636

634637
export type ConfigMergeType = "merge" | "overwrite"

src/services/continuedev/core/llm/index.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ import {
1515
TabAutocompleteOptions,
1616
Usage,
1717
} from "../index.js"
18-
import type { ILLMInteractionLog, ILLMLogger } from "../index.js"
18+
import type { CacheBehavior, ILLMInteractionLog, ILLMLogger } from "../index.js"
1919
import { mergeJson } from "../util/merge.js"
2020
import { renderChatMessage } from "../util/messageContent.js"
2121
import { TokensBatchingService } from "../util/TokensBatchingService.js"
@@ -86,6 +86,7 @@ export abstract class BaseLLM implements ILLM {
8686
llmRequestHook?: (model: string, prompt: string) => any
8787
apiKey?: string
8888
apiBase?: string
89+
cacheBehavior?: CacheBehavior
8990
capabilities?: ModelCapability
9091

9192
lastRequestId: string | undefined

0 commit comments

Comments
 (0)