Skip to content

Commit 0347a40

Browse files
committed
feat: Max Conversation Length setting (#241)
1 parent bc273ea commit 0347a40

File tree

10 files changed

+56
-9
lines changed

10 files changed

+56
-9
lines changed

src/_locales/en/main.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -101,5 +101,6 @@
101101
"Store to Independent Conversation Page": "Store to Independent Conversation Page",
102102
"Keep Conversation Window in Background": "Keep conversation window in background, so that you can use shortcut keys to call it up in any program",
103103
"Max Response Token Length": "Max Response Token Length",
104+
"Max Conversation Length": "Max Conversation Length",
104105
"Always pin the floating window": "Always pin the floating window"
105106
}

src/_locales/zh-hans/main.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -101,5 +101,6 @@
101101
"Store to Independent Conversation Page": "收纳到独立对话页",
102102
"Keep Conversation Window in Background": "保持对话窗口在后台, 以便在任何程序中使用快捷键呼出",
103103
"Max Response Token Length": "响应的最大token长度",
104+
"Max Conversation Length": "对话处理的最大长度",
104105
"Always pin the floating window": "总是固定浮动窗口"
105106
}

src/_locales/zh-hant/main.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -101,5 +101,6 @@
101101
"Store to Independent Conversation Page": "收納到獨立對話頁",
102102
"Keep Conversation Window in Background": "保持對話窗口在後臺, 以便在任何程序中使用快捷鍵呼出",
103103
"Max Response Token Length": "響應的最大token長度",
104+
"Max Conversation Length": "對話處理的最大長度",
104105
"Always pin the floating window": "總是固定浮動視窗"
105106
}

src/background/apis/azure-openai-api.mjs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,10 @@ export async function generateAnswersWithAzureOpenaiApi(port, question, session)
1313
const { controller, messageListener } = setAbortController(port)
1414
const config = await getUserConfig()
1515

16-
const prompt = getConversationPairs(session.conversationRecords, false)
16+
const prompt = getConversationPairs(
17+
session.conversationRecords.slice(-config.maxConversationContextLength),
18+
false,
19+
)
1720
prompt.unshift({ role: 'system', content: await getChatSystemPromptBase() })
1821
prompt.push({ role: 'user', content: question })
1922

src/background/apis/custom-api.mjs

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,13 @@ import { getCustomApiPromptBase, pushRecord, setAbortController } from './shared
2121
export async function generateAnswersWithCustomApi(port, question, session, apiKey, modelName) {
2222
const { controller, messageListener } = setAbortController(port)
2323

24-
const prompt = getConversationPairs(session.conversationRecords, false)
24+
const config = await getUserConfig()
25+
const prompt = getConversationPairs(
26+
session.conversationRecords.slice(-config.maxConversationContextLength),
27+
false,
28+
)
2529
prompt.unshift({ role: 'system', content: await getCustomApiPromptBase() })
2630
prompt.push({ role: 'user', content: question })
27-
const config = await getUserConfig()
2831
const apiUrl = config.customModelApiUrl
2932

3033
let answer = ''

src/background/apis/openai-api.mjs

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,11 +27,14 @@ export async function generateAnswersWithGptCompletionApi(
2727
) {
2828
const { controller, messageListener } = setAbortController(port)
2929

30+
const config = await getUserConfig()
3031
const prompt =
3132
(await getCompletionPromptBase()) +
32-
getConversationPairs(session.conversationRecords, true) +
33+
getConversationPairs(
34+
session.conversationRecords.slice(-config.maxConversationContextLength),
35+
true,
36+
) +
3337
`Human: ${question}\nAI: `
34-
const config = await getUserConfig()
3538
const apiUrl = config.customOpenAiApiUrl
3639

3740
let answer = ''
@@ -89,10 +92,13 @@ export async function generateAnswersWithGptCompletionApi(
8992
export async function generateAnswersWithChatgptApi(port, question, session, apiKey, modelName) {
9093
const { controller, messageListener } = setAbortController(port)
9194

92-
const prompt = getConversationPairs(session.conversationRecords, false)
95+
const config = await getUserConfig()
96+
const prompt = getConversationPairs(
97+
session.conversationRecords.slice(-config.maxConversationContextLength),
98+
false,
99+
)
93100
prompt.unshift({ role: 'system', content: await getChatSystemPromptBase() })
94101
prompt.push({ role: 'user', content: question })
95-
const config = await getUserConfig()
96102
const apiUrl = config.customOpenAiApiUrl
97103

98104
let answer = ''

src/config/index.mjs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,7 @@ export const defaultConfig = {
104104
// advanced
105105

106106
maxResponseTokenLength: 1000,
107+
maxConversationContextLength: 9,
107108
customChatGptWebApiUrl: 'https://chat.openai.com',
108109
customChatGptWebApiPath: '/backend-api/conversation',
109110
customOpenAiApiUrl: 'https://api.openai.com',

src/popup/Popup.jsx

Lines changed: 23 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,14 @@ import wechatpay from './donation/wechatpay.jpg'
2727
import bugmeacoffee from './donation/bugmeacoffee.png'
2828
import { useWindowTheme } from '../hooks/use-window-theme.mjs'
2929
import { languageList } from '../config/language.mjs'
30-
import { isEdge, isFirefox, isMobile, isSafari, openUrl } from '../utils/index.mjs'
30+
import {
31+
isEdge,
32+
isFirefox,
33+
isMobile,
34+
isSafari,
35+
openUrl,
36+
parseIntWithClamp,
37+
} from '../utils/index.mjs'
3138
import { useTranslation } from 'react-i18next'
3239

3340
function GeneralPart({ config, updateConfig }) {
@@ -413,11 +420,25 @@ function AdvancedPart({ config, updateConfig }) {
413420
step="100"
414421
value={config.maxResponseTokenLength}
415422
onChange={(e) => {
416-
const value = parseInt(e.target.value)
423+
const value = parseIntWithClamp(e.target.value, 1000, 100, 40000)
417424
updateConfig({ maxResponseTokenLength: value })
418425
}}
419426
/>
420427
</label>
428+
<label>
429+
{t('Max Conversation Length')}
430+
<input
431+
type="number"
432+
min="0"
433+
max="100"
434+
step="1"
435+
value={config.maxConversationContextLength}
436+
onChange={(e) => {
437+
const value = parseIntWithClamp(e.target.value, 9, 0, 100)
438+
updateConfig({ maxConversationContextLength: value })
439+
}}
440+
/>
441+
</label>
421442
<label>
422443
{t('Custom ChatGPT Web API Url')}
423444
<input

src/utils/index.mjs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ export * from './is-mobile'
1414
export * from './is-safari'
1515
export * from './limited-fetch'
1616
export * from './open-url'
17+
export * from './parse-int-with-clamp'
1718
export * from './set-element-position-in-viewport'
1819
export * from './stream-async-iterable'
1920
export * from './update-ref-height'

src/utils/parse-int-with-clamp.js

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
export function parseIntWithClamp(value, defaultValue, min, max) {
2+
value = parseInt(value)
3+
4+
if (isNaN(value)) value = defaultValue
5+
else if (value > max) value = max
6+
else if (value < min) value = min
7+
8+
return value
9+
}

0 commit comments

Comments
 (0)