Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.

Commit 5a29074

Browse files
committed
DEV: Use default LLM model
1 parent 29d4157 commit 5a29074

File tree

8 files changed

+74
-22
lines changed

8 files changed

+74
-22
lines changed

config/locales/client.en.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ en:
183183

184184
default_llm:
185185
title: "Default LLM model"
186-
description: "The default LLM model to use for all AI features. This will be used if no LLM is specified in the feature configuration or persona."
186+
description: "The default LLM model to use for all AI features. This will be used if no LLM is specified in the feature configuration or persona. If no default LLM is specified, the last created LLM will be used."
187187

188188
features:
189189
short_title: "Features"

config/settings.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ discourse_ai:
109109
default: false
110110
client: true
111111
area: "ai-features/ai_helper"
112-
ai_helper_model:
112+
ai_helper_model: # Deprecated. TODO(keegan): Remove 2025-09-01
113113
default: ""
114114
allow_any: false
115115
type: enum
@@ -155,7 +155,7 @@ discourse_ai:
155155
- "context_menu"
156156
- "image_caption"
157157
area: "ai-features/ai_helper"
158-
ai_helper_image_caption_model:
158+
ai_helper_image_caption_model: # Deprecated. TODO(keegan): Remove 2025-09-01
159159
default: ""
160160
type: enum
161161
enum: "DiscourseAi::Configuration::LlmVisionEnumerator"
@@ -266,7 +266,7 @@ discourse_ai:
266266
client: true
267267
validator: "DiscourseAi::Configuration::LlmDependencyValidator"
268268
area: "ai-features/embeddings"
269-
ai_embeddings_semantic_search_hyde_model:
269+
ai_embeddings_semantic_search_hyde_model: # Deprecated. TODO(keegan): Remove 2025-09-01
270270
default: ""
271271
type: enum
272272
allow_any: false
@@ -322,7 +322,7 @@ discourse_ai:
322322
client: true
323323
validator: "DiscourseAi::Configuration::LlmDependencyValidator"
324324
area: "ai-features/summarization"
325-
ai_summarization_model:
325+
ai_summarization_model: # Deprecated. TODO(keegan): Remove 2025-09-01
326326
default: ""
327327
allow_any: false
328328
type: enum
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
# frozen_string_literal: true
2+
class SeedAiDefaultLlmModel < ActiveRecord::Migration[7.2]
3+
def up
4+
return if DB.query_single("SELECT 1 FROM llm_models LIMIT 1").empty?
5+
6+
last_model_id = DB.query_single("SELECT id FROM llm_models ORDER BY id DESC LIMIT 1").first
7+
8+
if last_model_id.present?
9+
execute "UPDATE site_settings SET value = 'custom:#{last_model_id}' WHERE name = 'ai_default_llm_model' AND (value IS NULL OR value = '');"
10+
end
11+
end
12+
13+
def down
14+
raise ActiveRecord::IrreversibleMigration
15+
end
16+
end
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
# frozen_string_literal: true
2+
class CopyAiSummarizationModelToPersonaDefault < ActiveRecord::Migration[7.2]
3+
def up
4+
ai_summarization_model =
5+
DB.query_single("SELECT value FROM site_settings WHERE name = 'ai_summarization_model'").first
6+
7+
if ai_summarization_model.present? && ai_summarization_model.start_with?("custom:")
8+
# Extract the model ID from the setting value (e.g., "custom:-5" -> "-5")
9+
model_id = ai_summarization_model.split(":").last
10+
11+
# Update the summarization personas (IDs -11 and -12) with the extracted model ID
12+
execute(<<~SQL)
13+
UPDATE ai_personas
14+
SET default_llm_id = #{model_id}
15+
WHERE id IN (-11, -12) AND default_llm_id IS NULL
16+
SQL
17+
end
18+
end
19+
20+
def down
21+
raise ActiveRecord::IrreversibleMigration
22+
end
23+
end
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
# frozen_string_literal: true
2+
class CopyAiHelperModelToPersonaDefault < ActiveRecord::Migration[7.2]
3+
def up
4+
ai_helper_model =
5+
DB.query_single("SELECT value FROM site_settings WHERE name = 'ai_helper_model'").first
6+
7+
if ai_helper_model.present? && ai_helper_model.start_with?("custom:")
8+
# Extract the model ID from the setting value (e.g., "custom:1" -> "1")
9+
model_id = ai_helper_model.split(":").last
10+
11+
# Update the helper personas with the extracted model ID
12+
execute(<<~SQL)
13+
UPDATE ai_personas
14+
SET default_llm_id = #{model_id}
15+
WHERE id IN (-18, -19, -20, -21, -22, -23, -24, -25, -26) AND default_llm_id IS NULL
16+
SQL
17+
end
18+
end
19+
20+
def down
21+
raise ActiveRecord::IrreversibleMigration
22+
end
23+
end

lib/ai_helper/assistant.rb

Lines changed: 3 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -312,18 +312,10 @@ def find_ai_helper_model(helper_mode, persona_klass)
312312

313313
# Priorities are:
314314
# 1. Persona's default LLM
315-
# 2. Hidden `ai_helper_model` setting, or `ai_helper_image_caption_model` for image_caption.
316-
# 3. Newest LLM config
315+
# 2. SiteSetting.ai_default_llm_id (or newest LLM if not set)
317316
def self.find_ai_helper_model(helper_mode, persona_klass)
318-
model_id = persona_klass.default_llm_id
319-
320-
if !model_id
321-
if helper_mode == IMAGE_CAPTION
322-
model_id = SiteSetting.ai_helper_image_caption_model&.split(":")&.last
323-
else
324-
model_id = SiteSetting.ai_helper_model&.split(":")&.last
325-
end
326-
end
317+
model_id =
318+
persona_klass.default_llm_id || SiteSetting.ai_default_llm_model&.split(":")&.last # Remove legacy custom provider.
327319

328320
if model_id.present?
329321
LlmModel.find_by(id: model_id)

lib/embeddings/semantic_search.rb

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -210,11 +210,10 @@ def hypothetical_post_from(search_term)
210210

211211
# Priorities are:
212212
# 1. Persona's default LLM
213-
# 2. `ai_embeddings_semantic_search_hyde_model` setting.
213+
# 2. SiteSetting.ai_default_llm_id (or newest LLM if not set)
214214
def find_ai_hyde_model(persona_klass)
215215
model_id =
216-
persona_klass.default_llm_id ||
217-
SiteSetting.ai_embeddings_semantic_search_hyde_model&.split(":")&.last
216+
persona_klass.default_llm_id || SiteSetting.ai_default_llm_model&.split(":")&.last # Remove legacy custom provider.
218217

219218
return if model_id.blank?
220219

lib/summarization.rb

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -54,11 +54,10 @@ def chat_channel_summary(channel, time_window_in_hours)
5454

5555
# Priorities are:
5656
# 1. Persona's default LLM
57-
# 2. Hidden `ai_summarization_model` setting
58-
# 3. Newest LLM config
57+
# 2. SiteSetting.ai_default_llm_id (or newest LLM if not set)
5958
def find_summarization_model(persona_klass)
6059
model_id =
61-
persona_klass.default_llm_id || SiteSetting.ai_summarization_model&.split(":")&.last # Remove legacy custom provider.
60+
persona_klass.default_llm_id || SiteSetting.ai_default_llm_model&.split(":")&.last # Remove legacy custom provider.
6261

6362
if model_id.present?
6463
LlmModel.find_by(id: model_id)

0 commit comments

Comments
 (0)