Skip to content

Commit ee20911

Browse files
committed
fix: max_tokens for haiku is lower
1 parent a8b9ce8 commit ee20911

File tree

1 file changed

+9
-1
lines changed

1 file changed

+9
-1
lines changed

packages/openai-adapters/src/apis/Anthropic.ts

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,14 @@ export class AnthropicApi implements BaseLlmApi {
5555
return cachingStrategy(cleanBody);
5656
}
5757

58+
private maxTokensForModel(model: string): number {
59+
if (model.includes("haiku")) {
60+
return 8192;
61+
}
62+
63+
return 32_000;
64+
}
65+
5866
private _convertToCleanAnthropicBody(oaiBody: ChatCompletionCreateParams) {
5967
let stop = undefined;
6068
if (oaiBody.stop && Array.isArray(oaiBody.stop)) {
@@ -81,7 +89,7 @@ export class AnthropicApi implements BaseLlmApi {
8189
: systemMessage,
8290
top_p: oaiBody.top_p,
8391
temperature: oaiBody.temperature,
84-
max_tokens: oaiBody.max_tokens ?? 32_000, // max_tokens is required
92+
max_tokens: oaiBody.max_tokens ?? this.maxTokensForModel(oaiBody.model), // max_tokens is required
8593
model: oaiBody.model,
8694
stop_sequences: stop,
8795
stream: oaiBody.stream,

0 commit comments

Comments
 (0)