Skip to content

Commit 56349a7

Browse files
committed
feat: make maxResponseTokenLength configurable (#170, #171)
1 parent a19e029 commit 56349a7

File tree

9 files changed

+35
-17
lines changed

9 files changed

+35
-17
lines changed

src/_locales/en/main.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,5 +98,6 @@
9898
"Open Conversation Page": "Open Conversation Page",
9999
"Open Conversation Window": "Open Conversation Window",
100100
"Store to Independent Conversation Page": "Store to Independent Conversation Page",
101-
"Keep Conversation Window in Background": "Keep conversation window in background, so that you can use shortcut keys to call it up in any program"
101+
"Keep Conversation Window in Background": "Keep conversation window in background, so that you can use shortcut keys to call it up in any program",
102+
"Max Response Token Length": "Max Response Token Length"
102103
}

src/_locales/zh-hans/main.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,5 +98,6 @@
9898
"Open Conversation Page": "打开独立对话页",
9999
"Open Conversation Window": "打开独立对话窗口",
100100
"Store to Independent Conversation Page": "收纳到独立对话页",
101-
"Keep Conversation Window in Background": "保持对话窗口在后台, 以便在任何程序中使用快捷键呼出"
101+
"Keep Conversation Window in Background": "保持对话窗口在后台, 以便在任何程序中使用快捷键呼出",
102+
"Max Response Token Length": "响应的最大token长度"
102103
}

src/_locales/zh-hant/main.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,5 +98,6 @@
9898
"Open Conversation Page": "開啟獨立對話頁",
9999
"Open Conversation Window": "開啟獨立對話視窗",
100100
"Store to Independent Conversation Page": "收納到獨立對話頁",
101-
"Keep Conversation Window in Background": "保持對話窗口在後臺, 以便在任何程序中使用快捷鍵呼出"
101+
"Keep Conversation Window in Background": "保持對話窗口在後臺, 以便在任何程序中使用快捷鍵呼出",
102+
"Max Response Token Length": "響應的最大token長度"
102103
}

src/background/apis/azure-openai-api.mjs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { getUserConfig, maxResponseTokenLength } from '../../config/index.mjs'
1+
import { getUserConfig } from '../../config/index.mjs'
22
import { getChatSystemPromptBase, pushRecord, setAbortController } from './shared.mjs'
33
import { getConversationPairs } from '../../utils/get-conversation-pairs'
44
import { fetchSSE } from '../../utils/fetch-sse'
@@ -32,7 +32,7 @@ export async function generateAnswersWithAzureOpenaiApi(port, question, session)
3232
body: JSON.stringify({
3333
messages: prompt,
3434
stream: true,
35-
max_tokens: maxResponseTokenLength,
35+
max_tokens: config.maxResponseTokenLength,
3636
}),
3737
onMessage(message) {
3838
console.debug('sse message', message)

src/background/apis/custom-api.mjs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
// and it has not yet had a negative impact on maintenance.
66
// If necessary, I will refactor.
77

8-
import { getUserConfig, maxResponseTokenLength } from '../../config/index.mjs'
8+
import { getUserConfig } from '../../config/index.mjs'
99
import { fetchSSE } from '../../utils/fetch-sse'
1010
import { getConversationPairs } from '../../utils/get-conversation-pairs'
1111
import { isEmpty } from 'lodash-es'
@@ -24,7 +24,8 @@ export async function generateAnswersWithCustomApi(port, question, session, apiK
2424
const prompt = getConversationPairs(session.conversationRecords, false)
2525
prompt.unshift({ role: 'system', content: await getCustomApiPromptBase() })
2626
prompt.push({ role: 'user', content: question })
27-
const apiUrl = (await getUserConfig()).customModelApiUrl
27+
const config = await getUserConfig()
28+
const apiUrl = config.customModelApiUrl
2829

2930
let answer = ''
3031
await fetchSSE(apiUrl, {
@@ -38,7 +39,7 @@ export async function generateAnswersWithCustomApi(port, question, session, apiK
3839
messages: prompt,
3940
model: modelName,
4041
stream: true,
41-
max_tokens: maxResponseTokenLength,
42+
max_tokens: config.maxResponseTokenLength,
4243
}),
4344
onMessage(message) {
4445
console.debug('sse message', message)

src/background/apis/openai-api.mjs

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
// api version
22

3-
import { maxResponseTokenLength, Models, getUserConfig } from '../../config/index.mjs'
3+
import { Models, getUserConfig } from '../../config/index.mjs'
44
import { fetchSSE } from '../../utils/fetch-sse'
55
import { getConversationPairs } from '../../utils/get-conversation-pairs'
66
import { isEmpty } from 'lodash-es'
@@ -31,7 +31,8 @@ export async function generateAnswersWithGptCompletionApi(
3131
(await getCompletionPromptBase()) +
3232
getConversationPairs(session.conversationRecords, true) +
3333
`Human: ${question}\nAI: `
34-
const apiUrl = (await getUserConfig()).customOpenAiApiUrl
34+
const config = await getUserConfig()
35+
const apiUrl = config.customOpenAiApiUrl
3536

3637
let answer = ''
3738
await fetchSSE(`${apiUrl}/v1/completions`, {
@@ -45,7 +46,7 @@ export async function generateAnswersWithGptCompletionApi(
4546
prompt: prompt,
4647
model: Models[modelName].value,
4748
stream: true,
48-
max_tokens: maxResponseTokenLength,
49+
max_tokens: config.maxResponseTokenLength,
4950
}),
5051
onMessage(message) {
5152
console.debug('sse message', message)
@@ -94,7 +95,8 @@ export async function generateAnswersWithChatgptApi(port, question, session, api
9495
const prompt = getConversationPairs(session.conversationRecords, false)
9596
prompt.unshift({ role: 'system', content: await getChatSystemPromptBase() })
9697
prompt.push({ role: 'user', content: question })
97-
const apiUrl = (await getUserConfig()).customOpenAiApiUrl
98+
const config = await getUserConfig()
99+
const apiUrl = config.customOpenAiApiUrl
98100

99101
let answer = ''
100102
await fetchSSE(`${apiUrl}/v1/chat/completions`, {
@@ -108,7 +110,7 @@ export async function generateAnswersWithChatgptApi(port, question, session, api
108110
messages: prompt,
109111
model: Models[modelName].value,
110112
stream: true,
111-
max_tokens: maxResponseTokenLength,
113+
max_tokens: config.maxResponseTokenLength,
112114
}),
113115
onMessage(message) {
114116
console.debug('sse message', message)

src/config/index.mjs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,8 +48,6 @@ export const ModelMode = {
4848
fast: 'Fast',
4949
}
5050

51-
export const maxResponseTokenLength = 1000
52-
5351
/**
5452
* @typedef {typeof defaultConfig} UserConfig
5553
*/
@@ -82,6 +80,7 @@ export const defaultConfig = {
8280

8381
// advanced
8482

83+
maxResponseTokenLength: 1000,
8584
customChatGptWebApiUrl: 'https://chat.openai.com',
8685
customChatGptWebApiPath: '/backend-api/conversation',
8786
customOpenAiApiUrl: 'https://api.openai.com',

src/popup/Popup.jsx

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -367,6 +367,20 @@ function AdvancedPart({ config, updateConfig }) {
367367

368368
return (
369369
<>
370+
<label>
371+
{t('Max Response Token Length')}
372+
<input
373+
type="number"
374+
min="100"
375+
max="40000"
376+
step="100"
377+
value={config.maxResponseTokenLength}
378+
onChange={(e) => {
379+
const value = parseInt(e.target.value)
380+
updateConfig({ maxResponseTokenLength: value })
381+
}}
382+
/>
383+
</label>
370384
<label>
371385
{t('Custom ChatGPT Web API Url')}
372386
<input

src/utils/crop-text.mjs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,13 +20,12 @@
2020
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
2121
// SOFTWARE.
2222

23-
import { maxResponseTokenLength } from '../config/index.mjs'
2423
import { encode } from '@nem035/gpt-3-encoder'
2524

2625
// TODO add model support
2726
export function cropText(
2827
text,
29-
maxLength = 3900 - maxResponseTokenLength,
28+
maxLength = 3900 - 1000,
3029
startLength = 400,
3130
endLength = 300,
3231
tiktoken = true,

0 commit comments

Comments
 (0)