Skip to content

Commit 656bf3c

Browse files
authored
feat: add FalconChatPromptWrapper (#53)
1 parent c94a7fa commit 656bf3c

File tree

5 files changed

+51
-4
lines changed

5 files changed

+51
-4
lines changed

.github/PULL_REQUEST_TEMPLATE.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
-->
2525

2626
- [ ] Code is up-to-date with the `master` branch
27-
- [ ] `npm run format` to apply prettier formatting
27+
- [ ] `npm run format` to apply eslint formatting
2828
- [ ] `npm run test` passes with this change
2929
- [ ] This pull request links relevant issues as `Fixes #0000`
3030
- [ ] There are new or updated unit tests validating the change

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -322,7 +322,7 @@ Optional:
322322
share false information."]
323323
-w, --wrapper Chat wrapper to use. Use `auto` to automatically select a wrapper based on
324324
the model's BOS token
325-
[string] [choices: "auto", "general", "llamaChat", "chatML"] [default: "general"]
325+
[string] [choices: "auto", "general", "llamaChat", "chatML", "falconChat"] [default: "general"]
326326
-c, --contextSize Context size to use for the model [number] [default: 4096]
327327
-g, --grammar Restrict the model response to a specific grammar, like JSON for example
328328
[string] [choices: "text", "json", "list", "arithmetic", "japanese", "chess"] [default: "text"]
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
import {ChatPromptWrapper} from "../ChatPromptWrapper.js";
2+
import {getTextCompletion} from "../utils/getTextCompletion.js";
3+
4+
export class FalconChatPromptWrapper extends ChatPromptWrapper {
5+
public readonly wrapperName: string = "Falcon";
6+
private readonly _instructionName: string;
7+
private readonly _responseName: string;
8+
9+
public constructor({instructionName = "User", responseName = "Assistant"}: {instructionName?: string, responseName?: string} = {}) {
10+
super();
11+
12+
this._instructionName = instructionName;
13+
this._responseName = responseName;
14+
}
15+
16+
public override wrapPrompt(prompt: string, {systemPrompt, promptIndex, lastStopString, lastStopStringSuffix}: {
17+
systemPrompt: string, promptIndex: number, lastStopString: string | null, lastStopStringSuffix: string | null
18+
}) {
19+
if (promptIndex === 0)
20+
return systemPrompt + `\n${this._instructionName}: ` + prompt + `\n${this._responseName}: `;
21+
22+
return this._getPromptPrefix(lastStopString, lastStopStringSuffix) + prompt + `\n${this._responseName}: `;
23+
}
24+
25+
public override getStopStrings(): string[] {
26+
return [
27+
`\n${this._instructionName}: `,
28+
`\n${this._responseName}:`
29+
];
30+
}
31+
32+
public override getDefaultStopString(): string {
33+
return `\n${this._instructionName}: `;
34+
}
35+
36+
private _getPromptPrefix(lastStopString: string | null, lastStopStringSuffix: string | null) {
37+
return getTextCompletion((lastStopString ?? "") + (lastStopStringSuffix ?? ""), [
38+
`\n${this._instructionName}: `,
39+
`${this._instructionName}: `
40+
]) ?? `\n${this._instructionName}: `;
41+
}
42+
}

src/cli/commands/ChatCommand.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,13 +9,14 @@ import {GeneralChatPromptWrapper} from "../../chatWrappers/GeneralChatPromptWrap
99
import {ChatMLPromptWrapper} from "../../chatWrappers/ChatMLPromptWrapper.js";
1010
import {getChatWrapperByBos} from "../../chatWrappers/createChatWrapperByBos.js";
1111
import {ChatPromptWrapper} from "../../ChatPromptWrapper.js";
12+
import {FalconChatPromptWrapper} from "../../chatWrappers/FalconChatPromptWrapper.js";
1213
import type {LlamaGrammar} from "../../llamaEvaluator/LlamaGrammar.js";
1314

1415
type ChatCommand = {
1516
model: string,
1617
systemInfo: boolean,
1718
systemPrompt: string,
18-
wrapper: "auto" | "general" | "llamaChat" | "chatML",
19+
wrapper: "auto" | "general" | "llamaChat" | "chatML" | "falconChat",
1920
contextSize: number,
2021
grammar: "text" | Parameters<typeof LlamaGrammar.getFor>[0],
2122
threads: number,
@@ -58,7 +59,7 @@ export const ChatCommand: CommandModule<object, ChatCommand> = {
5859
alias: "w",
5960
type: "string",
6061
default: "general" as ChatCommand["wrapper"],
61-
choices: ["auto", "general", "llamaChat", "chatML"] satisfies ChatCommand["wrapper"][],
62+
choices: ["auto", "general", "llamaChat", "chatML", "falconChat"] satisfies ChatCommand["wrapper"][],
6263
description: "Chat wrapper to use. Use `auto` to automatically select a wrapper based on the model's BOS token",
6364
group: "Optional:"
6465
})
@@ -213,6 +214,8 @@ function getChatWrapper(wrapper: ChatCommand["wrapper"], bos: string | null): Ch
213214
return new LlamaChatPromptWrapper();
214215
case "chatML":
215216
return new ChatMLPromptWrapper();
217+
case "falconChat":
218+
return new FalconChatPromptWrapper();
216219
default:
217220
}
218221

src/index.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import {EmptyChatPromptWrapper} from "./chatWrappers/EmptyChatPromptWrapper.js";
88
import {LlamaChatPromptWrapper} from "./chatWrappers/LlamaChatPromptWrapper.js";
99
import {GeneralChatPromptWrapper} from "./chatWrappers/GeneralChatPromptWrapper.js";
1010
import {ChatMLPromptWrapper} from "./chatWrappers/ChatMLPromptWrapper.js";
11+
import {FalconChatPromptWrapper} from "./chatWrappers/FalconChatPromptWrapper.js";
1112
import {getChatWrapperByBos} from "./chatWrappers/createChatWrapperByBos.js";
1213

1314
import {type ConversationInteraction, type Token} from "./types.js";
@@ -29,6 +30,7 @@ export {
2930
LlamaChatPromptWrapper,
3031
GeneralChatPromptWrapper,
3132
ChatMLPromptWrapper,
33+
FalconChatPromptWrapper,
3234
getChatWrapperByBos,
3335
type Token
3436
};

0 commit comments

Comments
 (0)