Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.

Commit 4923837

Browse files
authored
FIX: Llm selector / forced tools / search tool (#862)
* FIX: Llm selector / forced tools / search tool This fixes a few issues: 1. When search was not finding any semantic results we would break the tool 2. Gemin / Anthropic models did not implement forced tools previously despite it being an API option 3. Mechanics around displaying llm selector were not right. If you disabled LLM selector server side persona PM did not work correctly. 4. Disabling native tools for anthropic model moved out of a site setting. This deliberately does not migrate cause this feature is really rare to need now, people who had it set probably did not need it. 5. Updates anthropic model names to latest release * linting * fix a couple of tests I missed * clean up conditional
1 parent 3022d34 commit 4923837

File tree

16 files changed

+121
-40
lines changed

16 files changed

+121
-40
lines changed

app/models/llm_model.rb

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,10 @@ def self.provider_params
1919
aws_bedrock: {
2020
access_key_id: :text,
2121
region: :text,
22+
disable_native_tools: :checkbox,
23+
},
24+
anthropic: {
25+
disable_native_tools: :checkbox,
2226
},
2327
open_ai: {
2428
organization: :text,

assets/javascripts/discourse/connectors/composer-fields/persona-llm-selector.gjs

Lines changed: 37 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ function isBotMessage(composer, currentUser) {
1515
const reciepients = composer.targetRecipients.split(",");
1616

1717
return currentUser.ai_enabled_chat_bots
18-
.filter((bot) => !bot.is_persona)
18+
.filter((bot) => bot.username)
1919
.any((bot) => reciepients.any((username) => username === bot.username));
2020
}
2121
return false;
@@ -43,7 +43,7 @@ export default class BotSelector extends Component {
4343
constructor() {
4444
super(...arguments);
4545

46-
if (this.botOptions && this.composer) {
46+
if (this.botOptions && this.botOptions.length && this.composer) {
4747
let personaId = this.preferredPersonaStore.getObject("id");
4848

4949
this._value = this.botOptions[0].id;
@@ -57,29 +57,49 @@ export default class BotSelector extends Component {
5757
this.composer.metaData = { ai_persona_id: this._value };
5858
this.setAllowLLMSelector();
5959

60-
let llm = this.preferredLlmStore.getObject("id");
60+
if (this.hasLlmSelector) {
61+
let llm = this.preferredLlmStore.getObject("id");
6162

62-
const llmOption =
63-
this.llmOptions.find((innerLlmOption) => innerLlmOption.id === llm) ||
64-
this.llmOptions[0];
63+
const llmOption =
64+
this.llmOptions.find((innerLlmOption) => innerLlmOption.id === llm) ||
65+
this.llmOptions[0];
6566

66-
llm = llmOption.id;
67+
if (llmOption) {
68+
llm = llmOption.id;
69+
} else {
70+
llm = "";
71+
}
6772

68-
if (llm) {
69-
next(() => {
70-
this.currentLlm = llm;
71-
});
73+
if (llm) {
74+
next(() => {
75+
this.currentLlm = llm;
76+
});
77+
}
7278
}
79+
80+
next(() => {
81+
this.resetTargetRecipients();
82+
});
7383
}
7484
}
7585

7686
get composer() {
7787
return this.args?.outletArgs?.model;
7888
}
7989

90+
get hasLlmSelector() {
91+
return this.currentUser.ai_enabled_chat_bots.any((bot) => !bot.is_persona);
92+
}
93+
8094
get botOptions() {
8195
if (this.currentUser.ai_enabled_personas) {
82-
return this.currentUser.ai_enabled_personas.map((persona) => {
96+
let enabledPersonas = this.currentUser.ai_enabled_personas;
97+
98+
if (!this.hasLlmSelector) {
99+
enabledPersonas = enabledPersonas.filter((persona) => persona.username);
100+
}
101+
102+
return enabledPersonas.map((persona) => {
83103
return {
84104
id: persona.id,
85105
name: persona.name,
@@ -106,6 +126,11 @@ export default class BotSelector extends Component {
106126
}
107127

108128
setAllowLLMSelector() {
129+
if (!this.hasLlmSelector) {
130+
this.allowLLMSelector = false;
131+
return;
132+
}
133+
109134
const persona = this.currentUser.ai_enabled_personas.find(
110135
(innerPersona) => innerPersona.id === this._value
111136
);

config/locales/client.en.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -329,6 +329,7 @@ en:
329329
organization: "Optional OpenAI Organization ID"
330330
disable_system_prompt: "Disable system message in prompts"
331331
enable_native_tool: "Enable native tool support"
332+
disable_native_tools: "Disable native tool support (use XML based tools)"
332333

333334
related_topics:
334335
title: "Related Topics"

config/locales/server.en.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,6 @@ en:
4949

5050
ai_openai_embeddings_url: "Custom URL used for the OpenAI embeddings API. (in the case of Azure it can be: https://COMPANY.openai.azure.com/openai/deployments/DEPLOYMENT/embeddings?api-version=2023-05-15)"
5151
ai_openai_api_key: "API key for OpenAI API. ONLY used for embeddings and Dall-E. For GPT use the LLM config tab"
52-
ai_anthropic_native_tool_call_models: "List of models that will use native tool calls vs legacy XML based tools."
5352
ai_hugging_face_tei_endpoint: URL where the API is running for the Hugging Face text embeddings inference
5453
ai_hugging_face_tei_api_key: API key for Hugging Face text embeddings inference
5554

config/settings.yml

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -125,16 +125,6 @@ discourse_ai:
125125
ai_anthropic_api_key:
126126
default: ""
127127
hidden: true
128-
ai_anthropic_native_tool_call_models:
129-
type: list
130-
list_type: compact
131-
default: "claude-3-sonnet|claude-3-haiku"
132-
allow_any: false
133-
choices:
134-
- claude-3-opus
135-
- claude-3-sonnet
136-
- claude-3-haiku
137-
- claude-3-5-sonnet
138128
ai_cohere_api_key:
139129
default: ""
140130
hidden: true

lib/ai_bot/entry_point.rb

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -145,6 +145,8 @@ def inject_into(plugin)
145145

146146
persona_users = AiPersona.persona_users(user: scope.user)
147147
if persona_users.present?
148+
persona_users.filter! { |persona_user| persona_user[:username].present? }
149+
148150
bots_map.concat(
149151
persona_users.map do |persona_user|
150152
{

lib/completions/dialects/claude.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def max_prompt_tokens
6161
end
6262

6363
def native_tool_support?
64-
SiteSetting.ai_anthropic_native_tool_call_models_map.include?(llm_model.name)
64+
!llm_model.lookup_custom_param("disable_native_tools")
6565
end
6666

6767
private

lib/completions/endpoints/anthropic.rb

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ def default_options(dialect)
2727
when "claude-3-opus"
2828
"claude-3-opus-20240229"
2929
when "claude-3-5-sonnet"
30-
"claude-3-5-sonnet-20240620"
30+
"claude-3-5-sonnet-latest"
3131
else
3232
llm_model.name
3333
end
@@ -70,7 +70,12 @@ def prepare_payload(prompt, model_params, dialect)
7070

7171
payload[:system] = prompt.system_prompt if prompt.system_prompt.present?
7272
payload[:stream] = true if @streaming_mode
73-
payload[:tools] = prompt.tools if prompt.has_tools?
73+
if prompt.has_tools?
74+
payload[:tools] = prompt.tools
75+
if dialect.tool_choice.present?
76+
payload[:tool_choice] = { type: "tool", name: dialect.tool_choice }
77+
end
78+
end
7479

7580
payload
7681
end

lib/completions/endpoints/aws_bedrock.rb

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def model_uri
6161
when "claude-3-opus"
6262
"anthropic.claude-3-opus-20240229-v1:0"
6363
when "claude-3-5-sonnet"
64-
"anthropic.claude-3-5-sonnet-20240620-v1:0"
64+
"anthropic.claude-3-5-sonnet-20241022-v2:0"
6565
else
6666
llm_model.name
6767
end
@@ -83,7 +83,13 @@ def prepare_payload(prompt, model_params, dialect)
8383

8484
payload = default_options(dialect).merge(model_params).merge(messages: prompt.messages)
8585
payload[:system] = prompt.system_prompt if prompt.system_prompt.present?
86-
payload[:tools] = prompt.tools if prompt.has_tools?
86+
87+
if prompt.has_tools?
88+
payload[:tools] = prompt.tools
89+
if dialect.tool_choice.present?
90+
payload[:tool_choice] = { type: "tool", name: dialect.tool_choice }
91+
end
92+
end
8793

8894
payload
8995
end

lib/completions/endpoints/gemini.rb

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,16 @@ def prepare_payload(prompt, model_params, dialect)
6767
} if prompt[:system_instruction].present?
6868
if tools.present?
6969
payload[:tools] = tools
70-
payload[:tool_config] = { function_calling_config: { mode: "AUTO" } }
70+
71+
function_calling_config = { mode: "AUTO" }
72+
if dialect.tool_choice.present?
73+
function_calling_config = {
74+
mode: "ANY",
75+
allowed_function_names: [dialect.tool_choice],
76+
}
77+
end
78+
79+
payload[:tool_config] = { function_calling_config: function_calling_config }
7180
end
7281
payload[:generationConfig].merge!(model_params) if model_params.present?
7382
payload
@@ -88,8 +97,10 @@ def extract_completion_from(response_raw)
8897
end
8998
response_h = parsed.dig(:candidates, 0, :content, :parts, 0)
9099

91-
@has_function_call ||= response_h.dig(:functionCall).present?
92-
@has_function_call ? response_h[:functionCall] : response_h.dig(:text)
100+
if response_h
101+
@has_function_call ||= response_h.dig(:functionCall).present?
102+
@has_function_call ? response_h.dig(:functionCall) : response_h.dig(:text)
103+
end
93104
end
94105

95106
def partials_from(decoded_chunk)

0 commit comments

Comments
 (0)