Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.

Commit 0ce17a1

Browse files
authored
FIX: Correctly pass tool_choice when using Claude models. (#1364)
The `ClaudePrompt` object couldn't access the original prompt's tool_choice attribute, affecting both Anthropic and Bedrock.
1 parent cf220c5 commit 0ce17a1

File tree

3 files changed

+114
-5
lines changed

3 files changed

+114
-5
lines changed

lib/completions/dialects/claude.rb

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,14 +13,13 @@ def can_translate?(llm_model)
1313
end
1414

1515
class ClaudePrompt
16-
attr_reader :system_prompt
17-
attr_reader :messages
18-
attr_reader :tools
16+
attr_reader :system_prompt, :messages, :tools, :tool_choice
1917

20-
def initialize(system_prompt, messages, tools)
18+
def initialize(system_prompt, messages, tools, tool_choice)
2119
@system_prompt = system_prompt
2220
@messages = messages
2321
@tools = tools
22+
@tool_choice = tool_choice
2423
end
2524

2625
def has_tools?
@@ -55,7 +54,7 @@ def translate
5554
tools = nil
5655
tools = tools_dialect.translated_tools if native_tool_support?
5756

58-
ClaudePrompt.new(system_prompt.presence, interleving_messages, tools)
57+
ClaudePrompt.new(system_prompt.presence, interleving_messages, tools, tool_choice)
5958
end
6059

6160
def max_prompt_tokens

spec/lib/completions/endpoints/anthropic_spec.rb

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -770,6 +770,55 @@
770770
end
771771
end
772772

773+
describe "forced tool use" do
774+
it "can properly force tool use" do
775+
prompt =
776+
DiscourseAi::Completions::Prompt.new(
777+
"You are a bot",
778+
messages: [type: :user, id: "user1", content: "echo hello"],
779+
tools: [echo_tool],
780+
tool_choice: "echo",
781+
)
782+
783+
response_body = {
784+
id: "msg_01RdJkxCbsEj9VFyFYAkfy2S",
785+
type: "message",
786+
role: "assistant",
787+
model: "claude-3-haiku-20240307",
788+
content: [
789+
{
790+
type: "tool_use",
791+
id: "toolu_bdrk_014CMjxtGmKUtGoEFPgc7PF7",
792+
name: "echo",
793+
input: {
794+
text: "hello",
795+
},
796+
},
797+
],
798+
stop_reason: "end_turn",
799+
stop_sequence: nil,
800+
usage: {
801+
input_tokens: 345,
802+
output_tokens: 65,
803+
},
804+
}.to_json
805+
806+
parsed_body = nil
807+
stub_request(:post, url).with(
808+
body:
809+
proc do |req_body|
810+
parsed_body = JSON.parse(req_body, symbolize_names: true)
811+
true
812+
end,
813+
).to_return(status: 200, body: response_body)
814+
815+
llm.generate(prompt, user: Discourse.system_user)
816+
817+
# Verify that tool_choice: "echo" is present
818+
expect(parsed_body.dig(:tool_choice, :name)).to eq("echo")
819+
end
820+
end
821+
773822
describe "structured output via prefilling" do
774823
it "forces the response to be a JSON and using the given JSON schema" do
775824
schema = {

spec/lib/completions/endpoints/aws_bedrock_spec.rb

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -547,6 +547,67 @@ def encode_message(message)
547547
end
548548
end
549549

550+
describe "forced tool use" do
551+
it "can properly force tool use" do
552+
proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}")
553+
request = nil
554+
555+
tools = [
556+
{
557+
name: "echo",
558+
description: "echo something",
559+
parameters: [
560+
{ name: "text", type: "string", description: "text to echo", required: true },
561+
],
562+
},
563+
]
564+
565+
prompt =
566+
DiscourseAi::Completions::Prompt.new(
567+
"You are a bot",
568+
messages: [type: :user, id: "user1", content: "echo hello"],
569+
tools: tools,
570+
tool_choice: "echo",
571+
)
572+
573+
# Mock response from Bedrock
574+
content = {
575+
content: [
576+
{
577+
type: "tool_use",
578+
id: "toolu_bdrk_014CMjxtGmKUtGoEFPgc7PF7",
579+
name: "echo",
580+
input: {
581+
text: "hello",
582+
},
583+
},
584+
],
585+
usage: {
586+
input_tokens: 25,
587+
output_tokens: 15,
588+
},
589+
}.to_json
590+
591+
stub_request(
592+
:post,
593+
"https://bedrock-runtime.us-east-1.amazonaws.com/model/anthropic.claude-3-sonnet-20240229-v1:0/invoke",
594+
)
595+
.with do |inner_request|
596+
request = inner_request
597+
true
598+
end
599+
.to_return(status: 200, body: content)
600+
601+
proxy.generate(prompt, user: user)
602+
603+
# Parse the request body
604+
request_body = JSON.parse(request.body)
605+
606+
# Verify that tool_choice: "echo" is present
607+
expect(request_body.dig("tool_choice", "name")).to eq("echo")
608+
end
609+
end
610+
550611
describe "structured output via prefilling" do
551612
it "forces the response to be a JSON and using the given JSON schema" do
552613
schema = {

0 commit comments

Comments
 (0)