Skip to content

Commit 7bc39db

Browse files
authored
fix: use system prompts for default endpoint type instances in next edit (#8660)
fix: use system prompts for default endpointtype models
1 parent 824e45c commit 7bc39db

File tree

1 file changed

+7
-3
lines changed

1 file changed

+7
-3
lines changed

core/nextEdit/NextEditProvider.ts

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -461,9 +461,13 @@ export class NextEditProvider {
461461
// prompts[1] extracts the user prompt from the system-user prompt pair.
462462
// NOTE: Stream is currently set to false, but this should ideally be a per-model flag.
463463
// Mercury Coder currently does not support streaming.
464-
const msg: ChatMessage = await llm.chat([prompts[1]], token, {
465-
stream: false,
466-
});
464+
const msg: ChatMessage = await llm.chat(
465+
this.endpointType === "fineTuned" ? [prompts[1]] : prompts,
466+
token,
467+
{
468+
stream: false,
469+
},
470+
);
467471

468472
if (typeof msg.content !== "string") {
469473
return undefined;

0 commit comments

Comments
 (0)