Skip to content

Commit b45de6c

Browse files
authored
feat: Add support for Anthropic 'thinking' parameter (#955)
This commit introduces support for the `thinking` parameter in the Anthropic LLM integration. Users can now enable Claude 3.7 Sonnet's extended thinking mode by passing the `thinking` parameter either via `default_options` during initialization or directly to the `chat` method. Changes include: - Added `thinking` to the `chat_parameters` schema in `Langchain::LLM::Anthropic`. - Included tests for the `thinking` parameter in `spec/langchain/llm/anthropic_spec.rb`. - Updated inline documentation for the `initialize` and `chat` methods to reflect the new parameter.
1 parent c61594c commit b45de6c

File tree

2 files changed

+28
-2
lines changed

2 files changed

+28
-2
lines changed

lib/langchain/llm/anthropic.rb

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ class Anthropic < Base
2222
#
2323
# @param api_key [String] The API key to use
2424
# @param llm_options [Hash] Options to pass to the Anthropic client
25-
# @param default_options [Hash] Default options to use on every call to LLM, e.g.: { temperature:, completion_model:, chat_model:, max_tokens: }
25+
# @param default_options [Hash] Default options to use on every call to LLM, e.g.: { temperature:, completion_model:, chat_model:, max_tokens:, thinking: }
2626
# @return [Langchain::LLM::Anthropic] Langchain::LLM::Anthropic instance
2727
def initialize(api_key:, llm_options: {}, default_options: {})
2828
begin
@@ -39,7 +39,8 @@ def initialize(api_key:, llm_options: {}, default_options: {})
3939
temperature: {default: @defaults[:temperature]},
4040
max_tokens: {default: @defaults[:max_tokens]},
4141
metadata: {},
42-
system: {}
42+
system: {},
43+
thinking: {default: @defaults[:thinking]}
4344
)
4445
chat_parameters.ignore(:n, :user)
4546
chat_parameters.remap(stop: :stop_sequences)
@@ -102,6 +103,7 @@ def complete(
102103
# @option params [String] :system System prompt
103104
# @option params [Float] :temperature Amount of randomness injected into the response
104105
# @option params [Array<String>] :tools Definitions of tools that the model may use
106+
# @option params [Hash] :thinking Enable extended thinking mode, e.g. { type: "enabled", budget_tokens: 4000 }
105107
# @option params [Integer] :top_k Only sample from the top K options for each subsequent token
106108
# @option params [Float] :top_p Use nucleus sampling.
107109
# @return [Langchain::LLM::AnthropicResponse] The chat completion

spec/langchain/llm/anthropic_spec.rb

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -106,6 +106,30 @@
106106
end
107107
end
108108

109+
context "with thinking parameter" do
110+
let(:thinking_params) { {type: "enabled", budget_tokens: 4000} }
111+
112+
context "passed in default_options" do
113+
subject { described_class.new(api_key: "123", default_options: {thinking: thinking_params}) }
114+
115+
it "includes thinking parameter in the request" do
116+
expect(subject.client).to receive(:messages)
117+
.with(parameters: hash_including(thinking: thinking_params))
118+
.and_return(response)
119+
subject.chat(messages: messages)
120+
end
121+
end
122+
123+
context "passed directly to chat method" do
124+
it "includes thinking parameter in the request" do
125+
expect(subject.client).to receive(:messages)
126+
.with(parameters: hash_including(thinking: thinking_params))
127+
.and_return(response)
128+
subject.chat(messages: messages, thinking: thinking_params)
129+
end
130+
end
131+
end
132+
109133
context "with streaming" do
110134
let(:fixture) { File.read("spec/fixtures/llm/anthropic/chat_stream.json") }
111135
let(:response) { JSON.parse(fixture) }

0 commit comments

Comments
 (0)