Skip to content

Commit b461d3b

Browse files
committed
feat: add maxTokens and temperature config options to CLI
- Add maxTokens config option with default value of 4096 - Add temperature config option with default value of 0.7 - Update the CLI to pass these options to the generateText function - Update tests to include the new configuration options Closes #118
1 parent 3911370 commit b461d3b

File tree

4 files changed

+20
-0
lines changed

4 files changed

+20
-0
lines changed

packages/cli/src/commands/$default.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,8 @@ export const command: CommandModule<SharedOptions, DefaultArgs> = {
9494
const userConfig = getConfig();
9595
const userModelProvider = argv.modelProvider || userConfig.modelProvider;
9696
const userModelName = argv.modelName || userConfig.modelName;
97+
const userMaxTokens = argv.maxTokens || userConfig.maxTokens;
98+
const userTemperature = argv.temperature || userConfig.temperature;
9799

98100
// Early API key check based on model provider
99101
const providerSettings =
@@ -166,6 +168,8 @@ export const command: CommandModule<SharedOptions, DefaultArgs> = {
166168
userModelName,
167169
{ ollamaBaseUrl: config.ollamaBaseUrl },
168170
),
171+
maxTokens: userMaxTokens,
172+
temperature: userTemperature,
169173
};
170174

171175
const result = await toolAgent(prompt, tools, agentConfig, {

packages/cli/src/options.ts

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,8 @@ export type SharedOptions = {
99
readonly sentryDsn?: string;
1010
readonly modelProvider?: string;
1111
readonly modelName?: string;
12+
readonly maxTokens?: number;
13+
readonly temperature?: number;
1214
readonly profile?: boolean;
1315
};
1416

@@ -34,6 +36,14 @@ export const sharedOptions = {
3436
type: 'string',
3537
description: 'AI model name to use',
3638
} as const,
39+
maxTokens: {
40+
type: 'number',
41+
description: 'Maximum number of tokens to generate',
42+
} as const,
43+
temperature: {
44+
type: 'number',
45+
description: 'Temperature for text generation (0.0-1.0)',
46+
} as const,
3747
interactive: {
3848
type: 'boolean',
3949
alias: 'i',

packages/cli/src/settings/config.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@ const defaultConfig = {
1414
pageFilter: 'none' as 'simple' | 'none' | 'readability',
1515
modelProvider: 'anthropic',
1616
modelName: 'claude-3-7-sonnet-20250219',
17+
maxTokens: 4096,
18+
temperature: 0.7,
1719
ollamaBaseUrl: 'http://localhost:11434/api',
1820
customPrompt: '',
1921
profile: false,

packages/cli/tests/settings/config.test.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,8 @@ describe('Config', () => {
4343
pageFilter: 'none',
4444
modelProvider: 'anthropic',
4545
modelName: 'claude-3-7-sonnet-20250219',
46+
maxTokens: 4096,
47+
temperature: 0.7,
4648
ollamaBaseUrl: 'http://localhost:11434/api',
4749
profile: false,
4850
customPrompt: '',
@@ -77,6 +79,8 @@ describe('Config', () => {
7779
pageFilter: 'none',
7880
modelProvider: 'anthropic',
7981
modelName: 'claude-3-7-sonnet-20250219',
82+
maxTokens: 4096,
83+
temperature: 0.7,
8084
ollamaBaseUrl: 'http://localhost:11434/api',
8185
profile: false,
8286
customPrompt: '',

0 commit comments

Comments
 (0)