Skip to content

Commit 7f61722

Browse files
committed
feat: temperature config (#256)
1 parent 3d5d7e3 commit 7f61722

File tree

8 files changed

+34
-4
lines changed

8 files changed

+34
-4
lines changed

src/config/index.mjs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,7 @@ export const defaultConfig = {
105105

106106
maxResponseTokenLength: 1000,
107107
maxConversationContextLength: 9,
108+
temperature: 1,
108109
customChatGptWebApiUrl: 'https://chat.openai.com',
109110
customChatGptWebApiPath: '/backend-api/conversation',
110111
customOpenAiApiUrl: 'https://api.openai.com',

src/popup/Popup.jsx

Lines changed: 19 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ import {
3333
isMobile,
3434
isSafari,
3535
openUrl,
36+
parseFloatWithClamp,
3637
parseIntWithClamp,
3738
} from '../utils/index.mjs'
3839
import { useTranslation } from 'react-i18next'
@@ -432,9 +433,9 @@ function AdvancedPart({ config, updateConfig }) {
432433
return (
433434
<>
434435
<label>
435-
{t('Max Response Token Length')}
436+
{t('Max Response Token Length') + `: ${config.maxResponseTokenLength}`}
436437
<input
437-
type="number"
438+
type="range"
438439
min="100"
439440
max="40000"
440441
step="100"
@@ -446,9 +447,9 @@ function AdvancedPart({ config, updateConfig }) {
446447
/>
447448
</label>
448449
<label>
449-
{t('Max Conversation Length')}
450+
{t('Max Conversation Length') + `: ${config.maxConversationContextLength}`}
450451
<input
451-
type="number"
452+
type="range"
452453
min="0"
453454
max="100"
454455
step="1"
@@ -459,6 +460,20 @@ function AdvancedPart({ config, updateConfig }) {
459460
}}
460461
/>
461462
</label>
463+
<label>
464+
{t('Temperature') + `: ${config.temperature}`}
465+
<input
466+
type="range"
467+
min="0"
468+
max="2"
469+
step="0.1"
470+
value={config.temperature}
471+
onChange={(e) => {
472+
const value = parseFloatWithClamp(e.target.value, 1, 0, 2)
473+
updateConfig({ temperature: value })
474+
}}
475+
/>
476+
</label>
462477
<label>
463478
{t('Custom ChatGPT Web API Url')}
464479
<input

src/services/apis/azure-openai-api.mjs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ export async function generateAnswersWithAzureOpenaiApi(port, question, session)
3636
messages: prompt,
3737
stream: true,
3838
max_tokens: config.maxResponseTokenLength,
39+
temperature: config.temperature,
3940
}),
4041
onMessage(message) {
4142
console.debug('sse message', message)

src/services/apis/custom-api.mjs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ export async function generateAnswersWithCustomApi(port, question, session, apiK
4343
model: modelName,
4444
stream: true,
4545
max_tokens: config.maxResponseTokenLength,
46+
temperature: config.temperature,
4647
}),
4748
onMessage(message) {
4849
console.debug('sse message', message)

src/services/apis/openai-api.mjs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@ export async function generateAnswersWithGptCompletionApi(
5050
model: Models[modelName].value,
5151
stream: true,
5252
max_tokens: config.maxResponseTokenLength,
53+
temperature: config.temperature,
5354
}),
5455
onMessage(message) {
5556
console.debug('sse message', message)
@@ -114,6 +115,7 @@ export async function generateAnswersWithChatgptApi(port, question, session, api
114115
model: Models[modelName].value,
115116
stream: true,
116117
max_tokens: config.maxResponseTokenLength,
118+
temperature: config.temperature,
117119
}),
118120
onMessage(message) {
119121
console.debug('sse message', message)

src/utils/index.mjs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ export * from './is-mobile'
1313
export * from './is-safari'
1414
export * from './limited-fetch'
1515
export * from './open-url'
16+
export * from './parse-float-with-clamp'
1617
export * from './parse-int-with-clamp'
1718
export * from './set-element-position-in-viewport'
1819
export * from './stream-async-iterable'

src/utils/parse-float-with-clamp.mjs

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
export function parseFloatWithClamp(value, defaultValue, min, max) {
2+
value = parseFloat(value)
3+
4+
if (isNaN(value)) value = defaultValue
5+
else if (value > max) value = max
6+
else if (value < min) value = min
7+
8+
return value
9+
}

0 commit comments

Comments
 (0)