Skip to content

Commit 832f7f4

Browse files
committed
feat: enhance OpenRouterSummarizeConfig with temperature and maxTokens options
1 parent a7ca713 commit 832f7f4

File tree

1 file changed

+14
-7
lines changed

1 file changed

+14
-7
lines changed

packages/typescript/ai-openrouter/src/adapters/summarize.ts

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,12 @@ import type { OpenRouterConfig } from './text'
1010
/**
1111
* Configuration for OpenRouter summarize adapter
1212
*/
13-
export interface OpenRouterSummarizeConfig extends OpenRouterConfig {}
13+
export interface OpenRouterSummarizeConfig extends OpenRouterConfig {
14+
/** Default temperature for summarization (0-2). Defaults to 0.3. */
15+
temperature?: number
16+
/** Default maximum tokens in the response */
17+
maxTokens?: number
18+
}
1419

1520
/**
1621
* OpenRouter-specific provider options for summarization
@@ -35,16 +40,19 @@ export class OpenRouterSummarizeAdapter<
3540
readonly name = 'openrouter' as const
3641

3742
private textAdapter: OpenRouterTextAdapter<TModel>
43+
private temperature: number
44+
private maxTokens: number | undefined
3845

3946
constructor(config: OpenRouterSummarizeConfig, model: TModel) {
4047
super({}, model)
4148
this.textAdapter = new OpenRouterTextAdapter(config, model)
49+
this.temperature = config.temperature ?? 0.3
50+
this.maxTokens = config.maxTokens
4251
}
4352

4453
async summarize(options: SummarizationOptions): Promise<SummarizationResult> {
4554
const systemPrompt = this.buildSummarizationPrompt(options)
4655

47-
// Use the text adapter's streaming and collect the result
4856
let summary = ''
4957
let id = ''
5058
let model = options.model
@@ -54,8 +62,8 @@ export class OpenRouterSummarizeAdapter<
5462
model: options.model,
5563
messages: [{ role: 'user', content: options.text }],
5664
systemPrompts: [systemPrompt],
57-
maxTokens: options.maxLength,
58-
temperature: 0.3,
65+
maxTokens: this.maxTokens ?? options.maxLength,
66+
temperature: this.temperature,
5967
})) {
6068
if (chunk.type === 'content') {
6169
summary = chunk.content
@@ -75,13 +83,12 @@ export class OpenRouterSummarizeAdapter<
7583
): AsyncIterable<StreamChunk> {
7684
const systemPrompt = this.buildSummarizationPrompt(options)
7785

78-
// Delegate directly to the text adapter's streaming
7986
yield* this.textAdapter.chatStream({
8087
model: options.model,
8188
messages: [{ role: 'user', content: options.text }],
8289
systemPrompts: [systemPrompt],
83-
maxTokens: options.maxLength,
84-
temperature: 0.3,
90+
maxTokens: this.maxTokens ?? options.maxLength,
91+
temperature: this.temperature,
8592
})
8693
}
8794

0 commit comments

Comments
 (0)