Skip to content

Commit 3cfc4fb

Browse files
authored
fix(anthropic): default model options (#9221)
1 parent 0c558fb commit 3cfc4fb

File tree

7 files changed

+61
-61
lines changed

7 files changed

+61
-61
lines changed

β€Žlibs/langchain-standard-tests/src/integration_tests/chat_models.tsβ€Ž

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1227,8 +1227,6 @@ export abstract class ChatModelIntegrationTests<
12271227
callOptionsWithHandler
12281228
);
12291229

1230-
console.log("result", handler.extraParams, handler);
1231-
12321230
// Verify that the 'a' field is present and is a number
12331231
this.expect(result.a).toBeDefined();
12341232
this.expect(typeof result.a).toBe("number");

β€Žlibs/langchain/src/agents/middleware/modelFallback.tsβ€Ž

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ import { createMiddleware } from "../middleware.js";
1717
* // Create middleware with fallback models (not including primary)
1818
* const fallback = modelFallbackMiddleware({
1919
* "openai:gpt-4o-mini", // First fallback
20-
* "anthropic:claude-3-5-sonnet-20241022", // Second fallback
20+
* "anthropic:claude-sonnet-4-5-20250929", // Second fallback
2121
* });
2222
*
2323
* const agent = createAgent({

β€Žlibs/providers/langchain-anthropic/src/chat_models.tsβ€Ž

Lines changed: 54 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,29 @@ import {
4949
} from "./types.js";
5050
import { wrapAnthropicClientError } from "./utils/errors.js";
5151

52+
const MODEL_DEFAULT_MAX_OUTPUT_TOKENS: Partial<
53+
Record<Anthropic.Model, number>
54+
> = {
55+
"claude-opus-4-1": 8192,
56+
"claude-opus-4": 8192,
57+
"claude-sonnet-4": 8192,
58+
"claude-sonnet-3-7-sonnet": 8192,
59+
"claude-3-5-sonnet": 4096,
60+
"claude-3-5-haiku": 4096,
61+
"claude-3-haiku": 2048,
62+
};
63+
const FALLBACK_MAX_OUTPUT_TOKENS = 2048;
64+
65+
function defaultMaxOutputTokensForModel(model?: Anthropic.Model): number {
66+
if (!model) {
67+
return FALLBACK_MAX_OUTPUT_TOKENS;
68+
}
69+
const maxTokens = Object.entries(MODEL_DEFAULT_MAX_OUTPUT_TOKENS).find(
70+
([key]) => model.startsWith(key)
71+
)?.[1];
72+
return maxTokens ?? FALLBACK_MAX_OUTPUT_TOKENS;
73+
}
74+
5275
export interface ChatAnthropicCallOptions
5376
extends BaseChatModelCallOptions,
5477
Pick<AnthropicInput, "streamUsage"> {
@@ -144,40 +167,36 @@ export type AnthropicMessagesModelId =
144167
* Input to AnthropicChat class.
145168
*/
146169
export interface AnthropicInput {
147-
/** Amount of randomness injected into the response. Ranges
148-
* from 0 to 1. Use temp closer to 0 for analytical /
149-
* multiple choice, and temp closer to 1 for creative
170+
/**
171+
* Amount of randomness injected into the response. Ranges
172+
* from 0 to 1. Use temperature closer to 0 for analytical /
173+
* multiple choice, and temperature closer to 1 for creative
150174
* and generative tasks.
151-
* To not set this field, pass `null`. If `undefined` is passed,
152-
* the default (1) will be used.
153175
*/
154-
temperature?: number | null;
176+
temperature?: number;
155177

156-
/** Only sample from the top K options for each subsequent
178+
/**
179+
* Only sample from the top K options for each subsequent
157180
* token. Used to remove "long tail" low probability
158-
* responses. Defaults to -1, which disables it.
181+
* responses.
159182
*/
160183
topK?: number;
161184

162-
/** Does nucleus sampling, in which we compute the
185+
/**
186+
* Does nucleus sampling, in which we compute the
163187
* cumulative distribution over all the options for each
164188
* subsequent token in decreasing probability order and
165189
* cut it off once it reaches a particular probability
166-
* specified by top_p. Defaults to -1, which disables it.
167-
* Note that you should either alter temperature or top_p,
168-
* but not both.
169-
*
170-
* To not set this field, pass `null`. If `undefined` is passed,
171-
* the default (-1) will be used.
172-
*
173-
* For Opus 4.1 and Sonnet 4.5, this defaults to `null`.
190+
* specified by top_p. Note that you should either alter
191+
* temperature or top_p, but not both.
174192
*/
175193
topP?: number | null;
176194

177195
/** A maximum number of tokens to generate before stopping. */
178196
maxTokens?: number;
179197

180-
/** A list of strings upon which to stop generating.
198+
/**
199+
* A list of strings upon which to stop generating.
181200
* You probably want `["\n\nHuman:"]`, as that's the cue for
182201
* the next turn in the dialog agent.
183202
*/
@@ -304,7 +323,7 @@ function extractToken(chunk: AIMessageChunk): string | undefined {
304323
* import { ChatAnthropic } from '@langchain/anthropic';
305324
*
306325
* const llm = new ChatAnthropic({
307-
* model: "claude-3-5-sonnet-20240620",
326+
* model: "claude-sonnet-4-5-20250929",
308327
* temperature: 0,
309328
* maxTokens: undefined,
310329
* maxRetries: 2,
@@ -334,7 +353,7 @@ function extractToken(chunk: AIMessageChunk): string | undefined {
334353
* "content": "Here's the translation to French:\n\nJ'adore la programmation.",
335354
* "response_metadata": {
336355
* "id": "msg_01QDpd78JUHpRP6bRRNyzbW3",
337-
* "model": "claude-3-5-sonnet-20240620",
356+
* "model": "claude-sonnet-4-5-20250929",
338357
* "stop_reason": "end_turn",
339358
* "stop_sequence": null,
340359
* "usage": {
@@ -372,7 +391,7 @@ function extractToken(chunk: AIMessageChunk): string | undefined {
372391
* "id": "msg_01N8MwoYxiKo9w4chE4gXUs4",
373392
* "type": "message",
374393
* "role": "assistant",
375-
* "model": "claude-3-5-sonnet-20240620"
394+
* "model": "claude-sonnet-4-5-20250929"
376395
* },
377396
* "usage_metadata": {
378397
* "input_tokens": 25,
@@ -441,7 +460,7 @@ function extractToken(chunk: AIMessageChunk): string | undefined {
441460
* "id": "msg_01SBTb5zSGXfjUc7yQ8EKEEA",
442461
* "type": "message",
443462
* "role": "assistant",
444-
* "model": "claude-3-5-sonnet-20240620",
463+
* "model": "claude-sonnet-4-5-20250929",
445464
* "stop_reason": "end_turn",
446465
* "stop_sequence": null
447466
* },
@@ -626,7 +645,7 @@ function extractToken(chunk: AIMessageChunk): string | undefined {
626645
* ```txt
627646
* {
628647
* id: 'msg_01STxeQxJmp4sCSpioD6vK3L',
629-
* model: 'claude-3-5-sonnet-20240620',
648+
* model: 'claude-sonnet-4-5-20250929',
630649
* stop_reason: 'end_turn',
631650
* stop_sequence: null,
632651
* usage: { input_tokens: 25, output_tokens: 19 },
@@ -669,17 +688,17 @@ export class ChatAnthropicMessages<
669688

670689
apiUrl?: string;
671690

672-
temperature: number | undefined = 1;
691+
temperature?: number;
673692

674-
topK = -1;
693+
topK?: number;
675694

676-
topP: number | undefined = -1;
695+
topP?: number;
677696

678-
maxTokens = 2048;
697+
maxTokens: number;
679698

680-
modelName = "claude-2.1";
699+
modelName = "claude-3-5-sonnet-latest";
681700

682-
model = "claude-2.1";
701+
model = "claude-3-5-sonnet-latest";
683702

684703
invocationKwargs?: Kwargs;
685704

@@ -732,21 +751,12 @@ export class ChatAnthropicMessages<
732751

733752
this.invocationKwargs = fields?.invocationKwargs ?? {};
734753

735-
if (this.model.includes("opus-4-1") || this.model.includes("sonnet-4-5")) {
736-
// Default to `undefined` for `topP` for Opus 4.1 models
737-
this.topP = fields?.topP === null ? undefined : fields?.topP;
738-
} else {
739-
this.topP = fields?.topP ?? this.topP;
740-
}
754+
this.topP = fields?.topP ?? this.topP;
741755

742-
// If the user passes `null`, set it to `undefined`. Otherwise, use their value or the default. We have to check for null, because
743-
// there's no way for us to know if they explicitly set it to `undefined`, or never passed a value
744-
this.temperature =
745-
fields?.temperature === null
746-
? undefined
747-
: fields?.temperature ?? this.temperature;
756+
this.temperature = fields?.temperature ?? this.temperature;
748757
this.topK = fields?.topK ?? this.topK;
749-
this.maxTokens = fields?.maxTokens ?? this.maxTokens;
758+
this.maxTokens =
759+
fields?.maxTokens ?? defaultMaxOutputTokensForModel(this.model);
750760
this.stopSequences = fields?.stopSequences ?? this.stopSequences;
751761

752762
this.streaming = fields?.streaming ?? false;
@@ -847,17 +857,10 @@ export class ChatAnthropicMessages<
847857
| undefined = handleToolChoice(options?.tool_choice);
848858

849859
if (this.thinking.type === "enabled") {
850-
if (this.topK !== -1) {
860+
if (this.topP !== undefined && this.topK !== -1) {
851861
throw new Error("topK is not supported when thinking is enabled");
852862
}
853-
if (
854-
this.model.includes("opus-4-1") || this.model.includes("sonnet-4-5")
855-
? this.topP !== undefined
856-
: this.topP !== -1
857-
) {
858-
throw new Error("topP is not supported when thinking is enabled");
859-
}
860-
if (this.temperature !== 1) {
863+
if (this.temperature !== undefined && this.temperature !== 1) {
861864
throw new Error(
862865
"temperature is not supported when thinking is enabled"
863866
);
@@ -999,7 +1002,6 @@ export class ChatAnthropicMessages<
9991002
content,
10001003
additionalKwargs
10011004
);
1002-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
10031005
const { role: _role, type: _type, ...rest } = additionalKwargs;
10041006
return { generations, llmOutput: rest };
10051007
}

β€Žlibs/providers/langchain-anthropic/src/tests/chat_models-web_search.int.test.tsβ€Ž

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ import { HumanMessage, AIMessage } from "@langchain/core/messages";
33
import { ChatAnthropic } from "../chat_models.js";
44

55
const model = new ChatAnthropic({
6-
model: "claude-3-5-sonnet-20241022",
6+
model: "claude-sonnet-4-5-20250929",
77
temperature: 0,
88
}).bindTools([
99
{

β€Žlibs/providers/langchain-anthropic/src/tests/chat_models.int.test.tsβ€Ž

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
/* eslint-disable @typescript-eslint/no-explicit-any */
22

3-
import { expect, test, describe } from "vitest";
3+
import { expect, test, it, describe } from "vitest";
44
import fs from "fs/promises";
55
import {
66
AIMessage,
@@ -58,7 +58,7 @@ async function invoke(
5858
const extendedThinkingModelName = "claude-3-7-sonnet-20250219";
5959

6060
// use this for tests involving citations
61-
const citationsModelName = "claude-3-5-sonnet-20241022";
61+
const citationsModelName = "claude-sonnet-4-5-20250929";
6262

6363
// use this for tests involving PDF documents
6464
const pdfModelName = "claude-3-5-haiku-20241022";
@@ -987,7 +987,7 @@ test("Can accept PDF documents", async () => {
987987
});
988988

989989
const pdfPath =
990-
"../langchain-community/src/document_loaders/tests/example_data/Jacob_Lee_Resume_2023.pdf";
990+
"../../langchain-community/src/document_loaders/tests/example_data/Jacob_Lee_Resume_2023.pdf";
991991
const pdfBase64 = await fs.readFile(pdfPath, "base64");
992992

993993
const response = await model.invoke([

β€Žlibs/providers/langchain-anthropic/src/tests/chat_models.standard.int.test.tsβ€Ž

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ class ChatAnthropicStandardIntegrationTests extends ChatModelIntegrationTests<
3434
const constructorArgsCopy = { ...this.constructorArgs };
3535
this.constructorArgs = {
3636
...this.constructorArgs,
37-
model: "claude-3-5-sonnet-20240620",
37+
model: "claude-sonnet-4-5-20250929",
3838
};
3939
await super.testParallelToolCalling();
4040
this.constructorArgs = constructorArgsCopy;

β€Žlibs/providers/langchain-anthropic/src/utils/prompts.tsβ€Ž

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import { _convertMessagesToAnthropicPayload } from "./message_inputs.js";
2929
* });
3030
*
3131
* const anthropicResponse = await anthropicClient.messages.create({
32-
* model: "claude-3-5-sonnet-20240620",
32+
* model: "claude-sonnet-4-5-20250929",
3333
* max_tokens: 1024,
3434
* stream: false,
3535
* system,

0 commit comments

Comments
Β (0)