Skip to content

Commit b769d94

Browse files
committed
increase the default input and response limits, as current LLMs generally support longer contexts
1 parent c2d6a4b commit b769d94

File tree

2 files changed

+6
-6
lines changed

2 files changed

+6
-6
lines changed

src/config/index.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -361,7 +361,7 @@ export const defaultConfig = {
361361

362362
// advanced
363363

364-
maxResponseTokenLength: 1000,
364+
maxResponseTokenLength: 2000,
365365
maxConversationContextLength: 9,
366366
temperature: 1,
367367
customChatGptWebApiUrl: 'https://chatgpt.com',

src/utils/crop-text.mjs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,9 @@ const clamp = (v, min, max) => {
3030

3131
export async function cropText(
3232
text,
33-
maxLength = 4000,
34-
startLength = 400,
35-
endLength = 300,
33+
maxLength = 8000,
34+
startLength = 800,
35+
endLength = 600,
3636
tiktoken = true,
3737
) {
3838
const userConfig = await getUserConfig()
@@ -45,9 +45,9 @@ export async function cropText(
4545
).match(/[- (]*([0-9]+)k/)?.[1]
4646
if (k) {
4747
maxLength = Number(k) * 1000
48-
maxLength -= 100 + clamp(userConfig.maxResponseTokenLength, 1, maxLength - 1000)
48+
maxLength -= 100 + clamp(userConfig.maxResponseTokenLength, 1, maxLength - 2000)
4949
} else {
50-
maxLength -= 100 + clamp(userConfig.maxResponseTokenLength, 1, maxLength - 1000)
50+
maxLength -= 100 + clamp(userConfig.maxResponseTokenLength, 1, maxLength - 2000)
5151
}
5252

5353
const splits = text.split(/[,?!;]/).map((s) => s.trim())

0 commit comments

Comments
 (0)