From 2592be65ff0c7392e189bcc21d5f879499caddf1 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Wed, 9 Jul 2025 15:29:24 -0700 Subject: [PATCH 01/34] FEATURE: configure a default LLM model for all features This update adds a setting to the configure a default LLM model to be used for all features (unless overridden). --- config/locales/server.en.yml | 1 + config/settings.yml | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/config/locales/server.en.yml b/config/locales/server.en.yml index 1126bd93c..82348acb7 100644 --- a/config/locales/server.en.yml +++ b/config/locales/server.en.yml @@ -25,6 +25,7 @@ en: description: "Periodic report based on a large language model" site_settings: discourse_ai_enabled: "Enable the discourse AI plugin." + ai_default_llm_model: "The default LLM model to use for all AI features" ai_artifact_security: "The AI artifact system generates IFRAMEs with runnable code. Strict mode forces an extra click to run code. Lax mode runs code immediately. Hybrid mode allows user to supply data-ai-artifact-autorun to show right away. Disabled mode disables the artifact system." ai_toxicity_enabled: "Enable the toxicity module." ai_toxicity_inference_service_api_endpoint: "URL where the API is running for the toxicity module" diff --git a/config/settings.yml b/config/settings.yml index 117b41153..59bfea044 100644 --- a/config/settings.yml +++ b/config/settings.yml @@ -11,6 +11,12 @@ discourse_ai: - "lax" - "hybrid" - "strict" + ai_default_llm_model: + default: "" + type: enum + allow_any: false + enum: "DiscourseAi::Configuration::LlmEnumerator" + validator: "DiscourseAi::Configuration::LlmValidator" ai_sentiment_enabled: default: false From cad39c1aa42bed550d1bde4c051d3df434b5350d Mon Sep 17 00:00:00 2001 From: Keegan George Date: Wed, 9 Jul 2025 15:29:50 -0700 Subject: [PATCH 02/34] DEV: client side --- .../components/ai-default-llm-selector.gjs | 46 +++++++++++++++++++ .../discourse/components/ai-features.gjs | 3 ++ .../components/ai-llms-list-editor.gjs | 4 ++ assets/stylesheets/common/ai-features.scss | 44 ++++++++++++++++++ config/locales/client.en.yml | 4 ++ 5 files changed, 101 insertions(+) create mode 100644 assets/javascripts/discourse/components/ai-default-llm-selector.gjs diff --git a/assets/javascripts/discourse/components/ai-default-llm-selector.gjs b/assets/javascripts/discourse/components/ai-default-llm-selector.gjs new file mode 100644 index 000000000..9a28c7b75 --- /dev/null +++ b/assets/javascripts/discourse/components/ai-default-llm-selector.gjs @@ -0,0 +1,46 @@ +import Component from "@glimmer/component"; +import { tracked } from "@glimmer/tracking"; +import { ajax } from "discourse/lib/ajax"; +import { i18n } from "discourse-i18n"; +import SiteSettingComponent from "admin/components/site-setting"; +import SiteSetting from "admin/models/site-setting"; + +export default class AiDefaultLlmSelector extends Component { + @tracked defaultLlmSetting = null; + + constructor() { + super(...arguments); + this.#loadDefaultLlmSetting(); + } + + async #loadDefaultLlmSetting() { + const { site_settings } = await ajax("/admin/config/site_settings.json", { + data: { + plugin: "discourse-ai", + category: "discourse_ai", + }, + }); + + const defaultLlmSetting = site_settings.find( + (setting) => setting.setting === "ai_default_llm_model" + ); + + this.defaultLlmSetting = SiteSetting.create(defaultLlmSetting); + } + + +} diff --git a/assets/javascripts/discourse/components/ai-features.gjs b/assets/javascripts/discourse/components/ai-features.gjs index 45f1b12ef..8790e51b2 100644 --- a/assets/javascripts/discourse/components/ai-features.gjs +++ b/assets/javascripts/discourse/components/ai-features.gjs @@ -10,6 +10,7 @@ import DSelect from "discourse/components/d-select"; import FilterInput from "discourse/components/filter-input"; import { i18n } from "discourse-i18n"; import AiFeaturesList from "./ai-features-list"; +import AiDefaultLlmSelector from "./ai-default-llm-selector"; const ALL = "all"; const CONFIGURED = "configured"; @@ -202,6 +203,8 @@ export default class AiFeatures extends Component { /> + + {{#if this.filteredFeatures.length}} {{else}} diff --git a/assets/javascripts/discourse/components/ai-llms-list-editor.gjs b/assets/javascripts/discourse/components/ai-llms-list-editor.gjs index 08f1715c8..8d70b4350 100644 --- a/assets/javascripts/discourse/components/ai-llms-list-editor.gjs +++ b/assets/javascripts/discourse/components/ai-llms-list-editor.gjs @@ -9,6 +9,7 @@ import I18n, { i18n } from "discourse-i18n"; import AdminSectionLandingItem from "admin/components/admin-section-landing-item"; import AdminSectionLandingWrapper from "admin/components/admin-section-landing-wrapper"; import DTooltip from "float-kit/components/d-tooltip"; +import AiDefaultLlmSelector from "./ai-default-llm-selector"; import AiLlmEditor from "./ai-llm-editor"; function isPreseeded(llm) { @@ -137,6 +138,9 @@ export default class AiLlmsListEditor extends Component { }} @learnMoreUrl="https://meta.discourse.org/t/discourse-ai-large-language-model-llm-settings-page/319903" /> + + + {{#if this.hasLlmElements}}
Date: Wed, 9 Jul 2025 15:36:34 -0700 Subject: [PATCH 03/34] DEV: updates... --- assets/javascripts/discourse/components/ai-features.gjs | 2 +- assets/stylesheets/common/ai-features.scss | 9 ++++----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/assets/javascripts/discourse/components/ai-features.gjs b/assets/javascripts/discourse/components/ai-features.gjs index 8790e51b2..8c762fc0d 100644 --- a/assets/javascripts/discourse/components/ai-features.gjs +++ b/assets/javascripts/discourse/components/ai-features.gjs @@ -9,8 +9,8 @@ import DPageSubheader from "discourse/components/d-page-subheader"; import DSelect from "discourse/components/d-select"; import FilterInput from "discourse/components/filter-input"; import { i18n } from "discourse-i18n"; -import AiFeaturesList from "./ai-features-list"; import AiDefaultLlmSelector from "./ai-default-llm-selector"; +import AiFeaturesList from "./ai-features-list"; const ALL = "all"; const CONFIGURED = "configured"; diff --git a/assets/stylesheets/common/ai-features.scss b/assets/stylesheets/common/ai-features.scss index f8bb58fd6..0e444c2c0 100644 --- a/assets/stylesheets/common/ai-features.scss +++ b/assets/stylesheets/common/ai-features.scss @@ -163,6 +163,10 @@ display: flex; align-items: center; gap: var(--space-2); + background: var(--primary-very-low); + padding: 1rem; + margin-block: 1rem; + border-radius: var(--d-border-radius); &__header { flex: 3; @@ -170,16 +174,11 @@ &__setting { flex: 2; - display: flex; gap: var(--space-2); align-items: center; justify-content: center; } - background: var(--primary-very-low); - padding: 1rem; - margin-block: 1rem; - border-radius: var(--d-border-radius); h3 { color: var(--primary); From 29d415755aa7a0ebb486b3b51a89a7f42de616e5 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Wed, 9 Jul 2025 15:42:44 -0700 Subject: [PATCH 04/34] fix --- assets/stylesheets/common/ai-features.scss | 1 + 1 file changed, 1 insertion(+) diff --git a/assets/stylesheets/common/ai-features.scss b/assets/stylesheets/common/ai-features.scss index 0e444c2c0..bd9999e64 100644 --- a/assets/stylesheets/common/ai-features.scss +++ b/assets/stylesheets/common/ai-features.scss @@ -196,6 +196,7 @@ .btn { font-size: var(--font-down-1); } + .setting-label, .desc { display: none; From 5a29074799d2b5bb7e5df5e251f2d72a3d8a3670 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Thu, 10 Jul 2025 11:54:35 -0700 Subject: [PATCH 05/34] DEV: Use default LLM model --- config/locales/client.en.yml | 2 +- config/settings.yml | 8 +++---- ...0250710173803_seed_ai_default_llm_model.rb | 16 +++++++++++++ ..._summarization_model_to_persona_default.rb | 23 +++++++++++++++++++ ...copy_ai_helper_model_to_persona_default.rb | 23 +++++++++++++++++++ lib/ai_helper/assistant.rb | 14 +++-------- lib/embeddings/semantic_search.rb | 5 ++-- lib/summarization.rb | 5 ++-- 8 files changed, 74 insertions(+), 22 deletions(-) create mode 100644 db/migrate/20250710173803_seed_ai_default_llm_model.rb create mode 100644 db/migrate/20250710180401_copy_ai_summarization_model_to_persona_default.rb create mode 100644 db/migrate/20250710181656_copy_ai_helper_model_to_persona_default.rb diff --git a/config/locales/client.en.yml b/config/locales/client.en.yml index ec4b96320..8e797711f 100644 --- a/config/locales/client.en.yml +++ b/config/locales/client.en.yml @@ -183,7 +183,7 @@ en: default_llm: title: "Default LLM model" - description: "The default LLM model to use for all AI features. This will be used if no LLM is specified in the feature configuration or persona." + description: "The default LLM model to use for all AI features. This will be used if no LLM is specified in the feature configuration or persona. If no default LLM is specified, the last created LLM will be used." features: short_title: "Features" diff --git a/config/settings.yml b/config/settings.yml index 59bfea044..d5bfa8a33 100644 --- a/config/settings.yml +++ b/config/settings.yml @@ -109,7 +109,7 @@ discourse_ai: default: false client: true area: "ai-features/ai_helper" - ai_helper_model: + ai_helper_model: # Deprecated. TODO(keegan): Remove 2025-09-01 default: "" allow_any: false type: enum @@ -155,7 +155,7 @@ discourse_ai: - "context_menu" - "image_caption" area: "ai-features/ai_helper" - ai_helper_image_caption_model: + ai_helper_image_caption_model: # Deprecated. TODO(keegan): Remove 2025-09-01 default: "" type: enum enum: "DiscourseAi::Configuration::LlmVisionEnumerator" @@ -266,7 +266,7 @@ discourse_ai: client: true validator: "DiscourseAi::Configuration::LlmDependencyValidator" area: "ai-features/embeddings" - ai_embeddings_semantic_search_hyde_model: + ai_embeddings_semantic_search_hyde_model: # Deprecated. TODO(keegan): Remove 2025-09-01 default: "" type: enum allow_any: false @@ -322,7 +322,7 @@ discourse_ai: client: true validator: "DiscourseAi::Configuration::LlmDependencyValidator" area: "ai-features/summarization" - ai_summarization_model: + ai_summarization_model: # Deprecated. TODO(keegan): Remove 2025-09-01 default: "" allow_any: false type: enum diff --git a/db/migrate/20250710173803_seed_ai_default_llm_model.rb b/db/migrate/20250710173803_seed_ai_default_llm_model.rb new file mode 100644 index 000000000..c97106134 --- /dev/null +++ b/db/migrate/20250710173803_seed_ai_default_llm_model.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true +class SeedAiDefaultLlmModel < ActiveRecord::Migration[7.2] + def up + return if DB.query_single("SELECT 1 FROM llm_models LIMIT 1").empty? + + last_model_id = DB.query_single("SELECT id FROM llm_models ORDER BY id DESC LIMIT 1").first + + if last_model_id.present? + execute "UPDATE site_settings SET value = 'custom:#{last_model_id}' WHERE name = 'ai_default_llm_model' AND (value IS NULL OR value = '');" + end + end + + def down + raise ActiveRecord::IrreversibleMigration + end +end diff --git a/db/migrate/20250710180401_copy_ai_summarization_model_to_persona_default.rb b/db/migrate/20250710180401_copy_ai_summarization_model_to_persona_default.rb new file mode 100644 index 000000000..ef286cb3f --- /dev/null +++ b/db/migrate/20250710180401_copy_ai_summarization_model_to_persona_default.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true +class CopyAiSummarizationModelToPersonaDefault < ActiveRecord::Migration[7.2] + def up + ai_summarization_model = + DB.query_single("SELECT value FROM site_settings WHERE name = 'ai_summarization_model'").first + + if ai_summarization_model.present? && ai_summarization_model.start_with?("custom:") + # Extract the model ID from the setting value (e.g., "custom:-5" -> "-5") + model_id = ai_summarization_model.split(":").last + + # Update the summarization personas (IDs -11 and -12) with the extracted model ID + execute(<<~SQL) + UPDATE ai_personas + SET default_llm_id = #{model_id} + WHERE id IN (-11, -12) AND default_llm_id IS NULL + SQL + end + end + + def down + raise ActiveRecord::IrreversibleMigration + end +end diff --git a/db/migrate/20250710181656_copy_ai_helper_model_to_persona_default.rb b/db/migrate/20250710181656_copy_ai_helper_model_to_persona_default.rb new file mode 100644 index 000000000..0b4187b11 --- /dev/null +++ b/db/migrate/20250710181656_copy_ai_helper_model_to_persona_default.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true +class CopyAiHelperModelToPersonaDefault < ActiveRecord::Migration[7.2] + def up + ai_helper_model = + DB.query_single("SELECT value FROM site_settings WHERE name = 'ai_helper_model'").first + + if ai_helper_model.present? && ai_helper_model.start_with?("custom:") + # Extract the model ID from the setting value (e.g., "custom:1" -> "1") + model_id = ai_helper_model.split(":").last + + # Update the helper personas with the extracted model ID + execute(<<~SQL) + UPDATE ai_personas + SET default_llm_id = #{model_id} + WHERE id IN (-18, -19, -20, -21, -22, -23, -24, -25, -26) AND default_llm_id IS NULL + SQL + end + end + + def down + raise ActiveRecord::IrreversibleMigration + end +end diff --git a/lib/ai_helper/assistant.rb b/lib/ai_helper/assistant.rb index be61e415a..8081e9cdb 100644 --- a/lib/ai_helper/assistant.rb +++ b/lib/ai_helper/assistant.rb @@ -312,18 +312,10 @@ def find_ai_helper_model(helper_mode, persona_klass) # Priorities are: # 1. Persona's default LLM - # 2. Hidden `ai_helper_model` setting, or `ai_helper_image_caption_model` for image_caption. - # 3. Newest LLM config + # 2. SiteSetting.ai_default_llm_id (or newest LLM if not set) def self.find_ai_helper_model(helper_mode, persona_klass) - model_id = persona_klass.default_llm_id - - if !model_id - if helper_mode == IMAGE_CAPTION - model_id = SiteSetting.ai_helper_image_caption_model&.split(":")&.last - else - model_id = SiteSetting.ai_helper_model&.split(":")&.last - end - end + model_id = + persona_klass.default_llm_id || SiteSetting.ai_default_llm_model&.split(":")&.last # Remove legacy custom provider. if model_id.present? LlmModel.find_by(id: model_id) diff --git a/lib/embeddings/semantic_search.rb b/lib/embeddings/semantic_search.rb index 726f203dc..3ed279206 100644 --- a/lib/embeddings/semantic_search.rb +++ b/lib/embeddings/semantic_search.rb @@ -210,11 +210,10 @@ def hypothetical_post_from(search_term) # Priorities are: # 1. Persona's default LLM - # 2. `ai_embeddings_semantic_search_hyde_model` setting. + # 2. SiteSetting.ai_default_llm_id (or newest LLM if not set) def find_ai_hyde_model(persona_klass) model_id = - persona_klass.default_llm_id || - SiteSetting.ai_embeddings_semantic_search_hyde_model&.split(":")&.last + persona_klass.default_llm_id || SiteSetting.ai_default_llm_model&.split(":")&.last # Remove legacy custom provider. return if model_id.blank? diff --git a/lib/summarization.rb b/lib/summarization.rb index a7b697638..f530b89b2 100644 --- a/lib/summarization.rb +++ b/lib/summarization.rb @@ -54,11 +54,10 @@ def chat_channel_summary(channel, time_window_in_hours) # Priorities are: # 1. Persona's default LLM - # 2. Hidden `ai_summarization_model` setting - # 3. Newest LLM config + # 2. SiteSetting.ai_default_llm_id (or newest LLM if not set) def find_summarization_model(persona_klass) model_id = - persona_klass.default_llm_id || SiteSetting.ai_summarization_model&.split(":")&.last # Remove legacy custom provider. + persona_klass.default_llm_id || SiteSetting.ai_default_llm_model&.split(":")&.last # Remove legacy custom provider. if model_id.present? LlmModel.find_by(id: model_id) From 249aab100ceb8aa282250490d0e0aa17a1b9ace7 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Thu, 10 Jul 2025 14:44:02 -0700 Subject: [PATCH 06/34] DEV: Use a simple validator - no need for seeding checks or module dependant checks --- config/settings.yml | 2 +- lib/configuration/simple_llm_validator.rb | 34 +++++++++++++++++++++++ 2 files changed, 35 insertions(+), 1 deletion(-) create mode 100644 lib/configuration/simple_llm_validator.rb diff --git a/config/settings.yml b/config/settings.yml index d5bfa8a33..c0bea91e4 100644 --- a/config/settings.yml +++ b/config/settings.yml @@ -16,7 +16,7 @@ discourse_ai: type: enum allow_any: false enum: "DiscourseAi::Configuration::LlmEnumerator" - validator: "DiscourseAi::Configuration::LlmValidator" + validator: "DiscourseAi::Configuration::SimpleLlmValidator" ai_sentiment_enabled: default: false diff --git a/lib/configuration/simple_llm_validator.rb b/lib/configuration/simple_llm_validator.rb new file mode 100644 index 000000000..f15d2d827 --- /dev/null +++ b/lib/configuration/simple_llm_validator.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +module DiscourseAi + module Configuration + class SimpleLlmValidator + def initialize(opts = {}) + @opts = opts + end + + def valid_value?(val) + return true if val == "" + + run_test(val).tap { |result| @unreachable = result } + rescue StandardError => e + raise e if Rails.env.test? + @unreachable = true + true + end + + def run_test(val) + DiscourseAi::Completions::Llm + .proxy(val) + .generate("How much is 1 + 1?", user: nil, feature_name: "llm_validator") + .present? + end + + def error_message + return unless @unreachable + + I18n.t("discourse_ai.llm.configuration.model_unreachable") + end + end + end +end From b46940fc66341719475b469f4f0ae0f4edfbcd6c Mon Sep 17 00:00:00 2001 From: Keegan George Date: Thu, 10 Jul 2025 14:46:02 -0700 Subject: [PATCH 07/34] DEV: hide reset button It resets it to default before the migration --- assets/stylesheets/common/ai-features.scss | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/assets/stylesheets/common/ai-features.scss b/assets/stylesheets/common/ai-features.scss index bd9999e64..372be1740 100644 --- a/assets/stylesheets/common/ai-features.scss +++ b/assets/stylesheets/common/ai-features.scss @@ -178,6 +178,10 @@ gap: var(--space-2); align-items: center; justify-content: center; + + .setting-controls__undo { + display: none; + } } h3 { From c49f1df2ba0c06f50aeeb1ef4f3f18b98487310f Mon Sep 17 00:00:00 2001 From: Keegan George Date: Thu, 10 Jul 2025 15:00:34 -0700 Subject: [PATCH 08/34] DEV: migrate hyde model to content creator persona --- ...250710215720_copy_hyde_model_to_persona.rb | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 db/migrate/20250710215720_copy_hyde_model_to_persona.rb diff --git a/db/migrate/20250710215720_copy_hyde_model_to_persona.rb b/db/migrate/20250710215720_copy_hyde_model_to_persona.rb new file mode 100644 index 000000000..982ab0e5f --- /dev/null +++ b/db/migrate/20250710215720_copy_hyde_model_to_persona.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true +class CopyHydeModelToPersona < ActiveRecord::Migration[7.2] + def up + hyde_model = + DB.query_single("SELECT value FROM site_settings WHERE name = 'ai_embeddings_semantic_search_hyde_model'").first + + if hyde_model.present? && hyde_model.start_with?("custom:") + # Extract the model ID from the setting value (e.g., "custom:1" -> "1") + model_id = hyde_model.split(":").last + + # Update the hyde persona with the extracted model ID + execute(<<~SQL) + UPDATE ai_personas + SET default_llm_id = #{model_id} + WHERE id IN (-32) AND default_llm_id IS NULL + SQL + end + end + + def down + raise ActiveRecord::IrreversibleMigration + end +end From 87c42145d45f0ca2b46605846c118fa6c4f4e3d2 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Thu, 10 Jul 2025 15:07:12 -0700 Subject: [PATCH 09/34] DEV: hide hyde model setting --- config/settings.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/config/settings.yml b/config/settings.yml index c0bea91e4..ff7701816 100644 --- a/config/settings.yml +++ b/config/settings.yml @@ -267,6 +267,7 @@ discourse_ai: validator: "DiscourseAi::Configuration::LlmDependencyValidator" area: "ai-features/embeddings" ai_embeddings_semantic_search_hyde_model: # Deprecated. TODO(keegan): Remove 2025-09-01 + hidden: true default: "" type: enum allow_any: false From f366ded03b69347710abbc09434015e53a1dee80 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Fri, 11 Jul 2025 08:19:30 -0700 Subject: [PATCH 10/34] DEV: Force default to be set if it was found not to be set! --- lib/ai_helper/assistant.rb | 7 +++++++ lib/embeddings/semantic_search.rb | 14 ++++++++++++-- lib/summarization.rb | 7 +++++++ 3 files changed, 26 insertions(+), 2 deletions(-) diff --git a/lib/ai_helper/assistant.rb b/lib/ai_helper/assistant.rb index 8081e9cdb..550a33624 100644 --- a/lib/ai_helper/assistant.rb +++ b/lib/ai_helper/assistant.rb @@ -320,6 +320,13 @@ def self.find_ai_helper_model(helper_mode, persona_klass) if model_id.present? LlmModel.find_by(id: model_id) else + last_model_id = LlmModel.last&.id + + # SiteSetting.ai_default_llm_model shouldn't be empty, but if it is, we set it to the last model. + if last_model_id.present? && SiteSetting.ai_default_llm_model.empty? + SiteSetting.set_and_log("ai_default_llm_model", "custom:#{last_model_id}", Discourse.system_user) # Remove legacy custom provider. + end + LlmModel.last end end diff --git a/lib/embeddings/semantic_search.rb b/lib/embeddings/semantic_search.rb index 3ed279206..a1c0b01ee 100644 --- a/lib/embeddings/semantic_search.rb +++ b/lib/embeddings/semantic_search.rb @@ -215,9 +215,19 @@ def find_ai_hyde_model(persona_klass) model_id = persona_klass.default_llm_id || SiteSetting.ai_default_llm_model&.split(":")&.last # Remove legacy custom provider. - return if model_id.blank? + if model_id.present? + LlmModel.find_by(id: model_id) + else + last_model_id = LlmModel.last&.id + + # SiteSetting.ai_default_llm_model shouldn't be empty, but if it is, we set it to the last model. + if last_model_id.present? && SiteSetting.ai_default_llm_model.empty? + SiteSetting.set_and_log("ai_default_llm_model", "custom:#{last_model_id}", Discourse.system_user) # Remove legacy custom provider. + end + + LlmModel.last + end - LlmModel.find_by(id: model_id) end private diff --git a/lib/summarization.rb b/lib/summarization.rb index f530b89b2..e4706cbd4 100644 --- a/lib/summarization.rb +++ b/lib/summarization.rb @@ -62,6 +62,13 @@ def find_summarization_model(persona_klass) if model_id.present? LlmModel.find_by(id: model_id) else + last_model_id = LlmModel.last&.id + + # SiteSetting.ai_default_llm_model shouldn't be empty, but if it is, we set it to the last model. + if last_model_id.present? && SiteSetting.ai_default_llm_model.empty? + SiteSetting.set_and_log("ai_default_llm_model", "custom:#{last_model_id}", Discourse.system_user) # Remove legacy custom provider. + end + LlmModel.last end end From b6a88ff03e41640ee60f89dd2e0aca0b3b4e28ff Mon Sep 17 00:00:00 2001 From: Keegan George Date: Fri, 11 Jul 2025 12:02:50 -0700 Subject: [PATCH 11/34] spec --- spec/requests/ai_helper/assistant_controller_spec.rb | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/spec/requests/ai_helper/assistant_controller_spec.rb b/spec/requests/ai_helper/assistant_controller_spec.rb index 47d5d7d73..0615f9eec 100644 --- a/spec/requests/ai_helper/assistant_controller_spec.rb +++ b/spec/requests/ai_helper/assistant_controller_spec.rb @@ -1,10 +1,14 @@ # frozen_string_literal: true RSpec.describe DiscourseAi::AiHelper::AssistantController do - before { assign_fake_provider_to(:ai_helper_model) } + fab!(:fake_model) fab!(:newuser) fab!(:user) { Fabricate(:user, refresh_auto_groups: true) } + before do + SiteSetting.ai_default_llm_model = "custom:#{fake_model.id}" + end + describe "#stream_suggestion" do before do Jobs.run_immediately! @@ -305,8 +309,6 @@ end let(:bad_caption) { "A picture of a cat \nsitting on a |table|" } - before { assign_fake_provider_to(:ai_helper_image_caption_model) } - def request_caption(params, caption = "A picture of a cat sitting on a table") DiscourseAi::Completions::Llm.with_prepared_responses([caption]) do post "/discourse-ai/ai-helper/caption_image", params: params @@ -411,7 +413,6 @@ def request_caption(params, caption = "A picture of a cat sitting on a table") SiteSetting.provider = SiteSettings::DbProvider.new(SiteSetting) setup_s3 stub_s3_store - assign_fake_provider_to(:ai_helper_image_caption_model) SiteSetting.secure_uploads = true SiteSetting.composer_ai_helper_allowed_groups = Group::AUTO_GROUPS[:trust_level_1] From 8da43180218ee3e7622225397e8a04de4ffaa1e6 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Fri, 11 Jul 2025 12:32:36 -0700 Subject: [PATCH 12/34] DEV: `assign_fake_provider` --- spec/requests/ai_helper/assistant_controller_spec.rb | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/spec/requests/ai_helper/assistant_controller_spec.rb b/spec/requests/ai_helper/assistant_controller_spec.rb index 0615f9eec..333252bb9 100644 --- a/spec/requests/ai_helper/assistant_controller_spec.rb +++ b/spec/requests/ai_helper/assistant_controller_spec.rb @@ -1,12 +1,11 @@ # frozen_string_literal: true RSpec.describe DiscourseAi::AiHelper::AssistantController do - fab!(:fake_model) fab!(:newuser) fab!(:user) { Fabricate(:user, refresh_auto_groups: true) } before do - SiteSetting.ai_default_llm_model = "custom:#{fake_model.id}" + assign_fake_provider_to(:ai_default_llm_model) end describe "#stream_suggestion" do From 2377b286dde0042eea6d42c84a69ddf1b2b684c5 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Tue, 15 Jul 2025 09:56:24 -0700 Subject: [PATCH 13/34] DEV: rely on default llm model in spec --- spec/configuration/llm_validator_spec.rb | 2 +- spec/jobs/regular/fast_track_topic_gist_spec.rb | 3 ++- spec/jobs/regular/stream_composer_helper_spec.rb | 2 +- spec/jobs/regular/stream_post_helper_spec.rb | 2 +- spec/jobs/regular/stream_topic_ai_summary_spec.rb | 2 +- spec/jobs/scheduled/summaries_backfill_spec.rb | 2 +- spec/lib/guardian_extensions_spec.rb | 2 +- spec/lib/modules/ai_helper/assistant_spec.rb | 2 +- spec/lib/modules/ai_helper/chat_thread_titler_spec.rb | 2 +- spec/lib/modules/ai_helper/entry_point_spec.rb | 5 ++--- spec/lib/modules/ai_helper/painter_spec.rb | 2 +- spec/lib/modules/embeddings/semantic_search_spec.rb | 2 +- spec/lib/modules/summarization/entry_point_spec.rb | 2 +- spec/lib/modules/summarization/fold_content_spec.rb | 2 +- spec/lib/personas/tools/search_spec.rb | 3 ++- spec/lib/utils/search_spec.rb | 6 ++++-- spec/models/user_option_spec.rb | 3 +-- spec/plugin_spec.rb | 2 +- spec/requests/admin/ai_llms_controller_spec.rb | 3 +-- spec/requests/ai_helper/assistant_controller_spec.rb | 1 + spec/requests/embeddings/embeddings_controller_spec.rb | 2 +- spec/requests/summarization/chat_summary_controller_spec.rb | 2 +- spec/requests/summarization/summary_controller_spec.rb | 2 +- spec/services/discourse_ai/topic_summarization_spec.rb | 2 +- spec/system/admin_ai_features_spec.rb | 2 +- spec/system/ai_helper/ai_composer_helper_spec.rb | 2 +- spec/system/ai_helper/ai_image_caption_spec.rb | 3 +-- spec/system/ai_helper/ai_post_helper_spec.rb | 2 +- spec/system/ai_helper/ai_proofreading_spec.rb | 2 +- spec/system/ai_helper/ai_split_topic_suggestion_spec.rb | 2 +- spec/system/ai_user_preferences_spec.rb | 3 +-- spec/system/summarization/chat_summarization_spec.rb | 2 +- spec/system/summarization/topic_summarization_spec.rb | 2 +- 33 files changed, 39 insertions(+), 39 deletions(-) diff --git a/spec/configuration/llm_validator_spec.rb b/spec/configuration/llm_validator_spec.rb index 5c9fdecc4..eb1aaad2c 100644 --- a/spec/configuration/llm_validator_spec.rb +++ b/spec/configuration/llm_validator_spec.rb @@ -4,7 +4,7 @@ describe "#valid_value?" do context "when the parent module is enabled and we try to reset the selected model" do before do - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true end diff --git a/spec/jobs/regular/fast_track_topic_gist_spec.rb b/spec/jobs/regular/fast_track_topic_gist_spec.rb index ef7dbc47e..77821f856 100644 --- a/spec/jobs/regular/fast_track_topic_gist_spec.rb +++ b/spec/jobs/regular/fast_track_topic_gist_spec.rb @@ -7,7 +7,8 @@ fab!(:post_2) { Fabricate(:post, topic: topic_1, post_number: 2) } before do - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) + SiteSetting.ai_summarization_enabled = true SiteSetting.ai_summary_gists_enabled = true end diff --git a/spec/jobs/regular/stream_composer_helper_spec.rb b/spec/jobs/regular/stream_composer_helper_spec.rb index 350f758fa..3be1b22be 100644 --- a/spec/jobs/regular/stream_composer_helper_spec.rb +++ b/spec/jobs/regular/stream_composer_helper_spec.rb @@ -3,7 +3,7 @@ RSpec.describe Jobs::StreamComposerHelper do subject(:job) { described_class.new } - before { assign_fake_provider_to(:ai_helper_model) } + before { assign_fake_provider_to(:ai_default_llm_model) } describe "#execute" do let!(:input) { "I liek to eet pie fur brakefast becuz it is delishus." } diff --git a/spec/jobs/regular/stream_post_helper_spec.rb b/spec/jobs/regular/stream_post_helper_spec.rb index 06cb69d59..3301c549c 100644 --- a/spec/jobs/regular/stream_post_helper_spec.rb +++ b/spec/jobs/regular/stream_post_helper_spec.rb @@ -3,7 +3,7 @@ RSpec.describe Jobs::StreamPostHelper do subject(:job) { described_class.new } - before { assign_fake_provider_to(:ai_helper_model) } + before { assign_fake_provider_to(:ai_default_llm_model) } describe "#execute" do fab!(:topic) diff --git a/spec/jobs/regular/stream_topic_ai_summary_spec.rb b/spec/jobs/regular/stream_topic_ai_summary_spec.rb index 1591e7018..7d3ffa0d2 100644 --- a/spec/jobs/regular/stream_topic_ai_summary_spec.rb +++ b/spec/jobs/regular/stream_topic_ai_summary_spec.rb @@ -11,7 +11,7 @@ before do Group.find(Group::AUTO_GROUPS[:trust_level_3]).add(user) - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true end diff --git a/spec/jobs/scheduled/summaries_backfill_spec.rb b/spec/jobs/scheduled/summaries_backfill_spec.rb index ccf23b06a..f21d77253 100644 --- a/spec/jobs/scheduled/summaries_backfill_spec.rb +++ b/spec/jobs/scheduled/summaries_backfill_spec.rb @@ -8,7 +8,7 @@ let(:intervals) { 12 } # budget is split into intervals. Job runs every five minutes. before do - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true SiteSetting.ai_summary_backfill_maximum_topics_per_hour = limit SiteSetting.ai_summary_gists_enabled = true diff --git a/spec/lib/guardian_extensions_spec.rb b/spec/lib/guardian_extensions_spec.rb index 33d43c456..1481d358f 100644 --- a/spec/lib/guardian_extensions_spec.rb +++ b/spec/lib/guardian_extensions_spec.rb @@ -7,7 +7,7 @@ before do group.add(user) - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true SiteSetting.ai_summary_gists_enabled = true end diff --git a/spec/lib/modules/ai_helper/assistant_spec.rb b/spec/lib/modules/ai_helper/assistant_spec.rb index 832235d9c..1a25f67de 100644 --- a/spec/lib/modules/ai_helper/assistant_spec.rb +++ b/spec/lib/modules/ai_helper/assistant_spec.rb @@ -5,7 +5,7 @@ fab!(:empty_locale_user) { Fabricate(:user, locale: "") } before do - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) Group.refresh_automatic_groups! end diff --git a/spec/lib/modules/ai_helper/chat_thread_titler_spec.rb b/spec/lib/modules/ai_helper/chat_thread_titler_spec.rb index 16a4a9733..13a655d5b 100644 --- a/spec/lib/modules/ai_helper/chat_thread_titler_spec.rb +++ b/spec/lib/modules/ai_helper/chat_thread_titler_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::AiHelper::ChatThreadTitler do subject(:titler) { described_class.new(thread) } - before { assign_fake_provider_to(:ai_helper_model) } + before { assign_fake_provider_to(:ai_default_llm_model) } fab!(:thread) { Fabricate(:chat_thread) } fab!(:chat_message) { Fabricate(:chat_message, thread: thread) } diff --git a/spec/lib/modules/ai_helper/entry_point_spec.rb b/spec/lib/modules/ai_helper/entry_point_spec.rb index 33765deb6..67f60a7ef 100644 --- a/spec/lib/modules/ai_helper/entry_point_spec.rb +++ b/spec/lib/modules/ai_helper/entry_point_spec.rb @@ -5,7 +5,7 @@ fab!(:french_user) { Fabricate(:user, locale: "fr") } it "will correctly localize available prompts" do - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.default_locale = "en" SiteSetting.allow_user_locale = true SiteSetting.ai_helper_enabled = true @@ -38,8 +38,7 @@ end it "will include auto_image_caption field in the user_option if image caption is enabled" do - assign_fake_provider_to(:ai_helper_model) - assign_fake_provider_to(:ai_helper_image_caption_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true SiteSetting.ai_helper_enabled_features = "image_caption" SiteSetting.ai_auto_image_caption_allowed_groups = "10" # tl0 diff --git a/spec/lib/modules/ai_helper/painter_spec.rb b/spec/lib/modules/ai_helper/painter_spec.rb index 1b97bbf51..ec84243a2 100644 --- a/spec/lib/modules/ai_helper/painter_spec.rb +++ b/spec/lib/modules/ai_helper/painter_spec.rb @@ -6,7 +6,7 @@ fab!(:user) before do - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_stability_api_url = "https://api.stability.dev" SiteSetting.ai_stability_api_key = "abc" SiteSetting.ai_openai_api_key = "abc" diff --git a/spec/lib/modules/embeddings/semantic_search_spec.rb b/spec/lib/modules/embeddings/semantic_search_spec.rb index 431255453..f96629973 100644 --- a/spec/lib/modules/embeddings/semantic_search_spec.rb +++ b/spec/lib/modules/embeddings/semantic_search_spec.rb @@ -11,7 +11,7 @@ before do SiteSetting.ai_embeddings_selected_model = vector_def.id - assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model) + assign_fake_provider_to(:ai_default_llm_model) end describe "#search_for_topics" do diff --git a/spec/lib/modules/summarization/entry_point_spec.rb b/spec/lib/modules/summarization/entry_point_spec.rb index 0d57cbf9d..d2c721087 100644 --- a/spec/lib/modules/summarization/entry_point_spec.rb +++ b/spec/lib/modules/summarization/entry_point_spec.rb @@ -2,7 +2,7 @@ RSpec.describe DiscourseAi::Summarization::EntryPoint do before do - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true SiteSetting.ai_summary_gists_enabled = true end diff --git a/spec/lib/modules/summarization/fold_content_spec.rb b/spec/lib/modules/summarization/fold_content_spec.rb index d2497c70f..fb99022c8 100644 --- a/spec/lib/modules/summarization/fold_content_spec.rb +++ b/spec/lib/modules/summarization/fold_content_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Summarization::FoldContent do subject(:summarizer) { DiscourseAi::Summarization.topic_summary(topic) } - let!(:llm_model) { assign_fake_provider_to(:ai_summarization_model) } + let!(:llm_model) { assign_fake_provider_to(:ai_default_llm_model) } fab!(:topic) { Fabricate(:topic, highest_post_number: 2) } fab!(:post_1) { Fabricate(:post, topic: topic, post_number: 1, raw: "This is a text") } diff --git a/spec/lib/personas/tools/search_spec.rb b/spec/lib/personas/tools/search_spec.rb index 4de518d2a..f90f660b0 100644 --- a/spec/lib/personas/tools/search_spec.rb +++ b/spec/lib/personas/tools/search_spec.rb @@ -105,7 +105,8 @@ after { DiscourseAi::Embeddings::SemanticSearch.clear_cache_for(query) } it "supports semantic search when enabled" do - assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model) + assign_fake_provider_to(:ai_default_llm_model) + vector_def = Fabricate(:embedding_definition) SiteSetting.ai_embeddings_selected_model = vector_def.id SiteSetting.ai_embeddings_semantic_search_enabled = true diff --git a/spec/lib/utils/search_spec.rb b/spec/lib/utils/search_spec.rb index 561aac207..6c185fc16 100644 --- a/spec/lib/utils/search_spec.rb +++ b/spec/lib/utils/search_spec.rb @@ -141,7 +141,8 @@ end it "includes semantic search results when enabled" do - assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model) + assign_fake_provider_to(:ai_default_llm_model) + vector_def = Fabricate(:embedding_definition) SiteSetting.ai_embeddings_selected_model = vector_def.id SiteSetting.ai_embeddings_semantic_search_enabled = true @@ -165,7 +166,8 @@ end it "can disable semantic search with hyde parameter" do - assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model) + assign_fake_provider_to(:ai_default_llm_model) + vector_def = Fabricate(:embedding_definition) SiteSetting.ai_embeddings_selected_model = vector_def.id SiteSetting.ai_embeddings_semantic_search_enabled = true diff --git a/spec/models/user_option_spec.rb b/spec/models/user_option_spec.rb index 34121ab93..23050676f 100644 --- a/spec/models/user_option_spec.rb +++ b/spec/models/user_option_spec.rb @@ -9,8 +9,7 @@ end before do - assign_fake_provider_to(:ai_helper_model) - assign_fake_provider_to(:ai_helper_image_caption_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true SiteSetting.ai_helper_enabled_features = "image_caption" SiteSetting.ai_auto_image_caption_allowed_groups = "10" # tl0 diff --git a/spec/plugin_spec.rb b/spec/plugin_spec.rb index 9a69041f7..41523c4eb 100644 --- a/spec/plugin_spec.rb +++ b/spec/plugin_spec.rb @@ -7,7 +7,7 @@ fab!(:user) before do - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true SiteSetting.ai_helper_illustrate_post_model = "disabled" Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) diff --git a/spec/requests/admin/ai_llms_controller_spec.rb b/spec/requests/admin/ai_llms_controller_spec.rb index e21467273..e0aaa27ec 100644 --- a/spec/requests/admin/ai_llms_controller_spec.rb +++ b/spec/requests/admin/ai_llms_controller_spec.rb @@ -498,7 +498,6 @@ it "logs staff action when deleting an LLM model" do # Capture the model details before deletion for comparison - model_id = llm_model.id model_display_name = llm_model.display_name # Delete the model @@ -516,7 +515,7 @@ end it "validates the model is not in use" do - fake_llm = assign_fake_provider_to(:ai_helper_model) + fake_llm = assign_fake_provider_to(:ai_default_llm_model) delete "/admin/plugins/discourse-ai/ai-llms/#{fake_llm.id}.json" diff --git a/spec/requests/ai_helper/assistant_controller_spec.rb b/spec/requests/ai_helper/assistant_controller_spec.rb index 333252bb9..5b921e160 100644 --- a/spec/requests/ai_helper/assistant_controller_spec.rb +++ b/spec/requests/ai_helper/assistant_controller_spec.rb @@ -6,6 +6,7 @@ before do assign_fake_provider_to(:ai_default_llm_model) + SiteSetting.ai_helper_enabled = true end describe "#stream_suggestion" do diff --git a/spec/requests/embeddings/embeddings_controller_spec.rb b/spec/requests/embeddings/embeddings_controller_spec.rb index 0408aa37d..dd46d03bc 100644 --- a/spec/requests/embeddings/embeddings_controller_spec.rb +++ b/spec/requests/embeddings/embeddings_controller_spec.rb @@ -120,7 +120,7 @@ def create_api_key(user) end it "doesn't skip HyDE if the hyde param is missing" do - assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model) + assign_fake_provider_to(:ai_default_llm_model) index(topic) index(topic_in_subcategory) diff --git a/spec/requests/summarization/chat_summary_controller_spec.rb b/spec/requests/summarization/chat_summary_controller_spec.rb index f23c447ec..203fb609e 100644 --- a/spec/requests/summarization/chat_summary_controller_spec.rb +++ b/spec/requests/summarization/chat_summary_controller_spec.rb @@ -7,7 +7,7 @@ before do group.add(current_user) - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true SiteSetting.ai_custom_summarization_allowed_groups = group.id diff --git a/spec/requests/summarization/summary_controller_spec.rb b/spec/requests/summarization/summary_controller_spec.rb index 5c051f3da..5f368727b 100644 --- a/spec/requests/summarization/summary_controller_spec.rb +++ b/spec/requests/summarization/summary_controller_spec.rb @@ -7,7 +7,7 @@ fab!(:post_2) { Fabricate(:post, topic: topic, post_number: 2) } before do - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true end diff --git a/spec/services/discourse_ai/topic_summarization_spec.rb b/spec/services/discourse_ai/topic_summarization_spec.rb index 444cfc671..71696d4ab 100644 --- a/spec/services/discourse_ai/topic_summarization_spec.rb +++ b/spec/services/discourse_ai/topic_summarization_spec.rb @@ -7,7 +7,7 @@ fab!(:post_2) { Fabricate(:post, topic: topic, post_number: 2) } before do - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true end diff --git a/spec/system/admin_ai_features_spec.rb b/spec/system/admin_ai_features_spec.rb index 39c07bbe2..e395d3cc9 100644 --- a/spec/system/admin_ai_features_spec.rb +++ b/spec/system/admin_ai_features_spec.rb @@ -13,7 +13,7 @@ before do summarization_persona.allowed_group_ids = [group_1.id, group_2.id] summarization_persona.save! - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true SiteSetting.ai_summarization_persona = summarization_persona.id sign_in(admin) diff --git a/spec/system/ai_helper/ai_composer_helper_spec.rb b/spec/system/ai_helper/ai_composer_helper_spec.rb index 7cd5f319f..71a9e4d94 100644 --- a/spec/system/ai_helper/ai_composer_helper_spec.rb +++ b/spec/system/ai_helper/ai_composer_helper_spec.rb @@ -7,7 +7,7 @@ before do Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true Jobs.run_immediately! sign_in(user) diff --git a/spec/system/ai_helper/ai_image_caption_spec.rb b/spec/system/ai_helper/ai_image_caption_spec.rb index d034d28ab..9134bd31a 100644 --- a/spec/system/ai_helper/ai_image_caption_spec.rb +++ b/spec/system/ai_helper/ai_image_caption_spec.rb @@ -23,8 +23,7 @@ before do Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) - assign_fake_provider_to(:ai_helper_model) - assign_fake_provider_to(:ai_helper_image_caption_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true SiteSetting.ai_helper_enabled_features = "image_caption" sign_in(user) diff --git a/spec/system/ai_helper/ai_post_helper_spec.rb b/spec/system/ai_helper/ai_post_helper_spec.rb index 14c1b5766..8c9ec16a3 100644 --- a/spec/system/ai_helper/ai_post_helper_spec.rb +++ b/spec/system/ai_helper/ai_post_helper_spec.rb @@ -28,7 +28,7 @@ before do Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true Jobs.run_immediately! sign_in(user) diff --git a/spec/system/ai_helper/ai_proofreading_spec.rb b/spec/system/ai_helper/ai_proofreading_spec.rb index b2e00733b..fd64999fc 100644 --- a/spec/system/ai_helper/ai_proofreading_spec.rb +++ b/spec/system/ai_helper/ai_proofreading_spec.rb @@ -6,7 +6,7 @@ fab!(:admin) { Fabricate(:admin, refresh_auto_groups: true) } before do - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true # This needs to be done because the streaming suggestions for composer diff --git a/spec/system/ai_helper/ai_split_topic_suggestion_spec.rb b/spec/system/ai_helper/ai_split_topic_suggestion_spec.rb index 6cb3bdd92..ba8041b50 100644 --- a/spec/system/ai_helper/ai_split_topic_suggestion_spec.rb +++ b/spec/system/ai_helper/ai_split_topic_suggestion_spec.rb @@ -39,7 +39,7 @@ before do Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true sign_in(user) end diff --git a/spec/system/ai_user_preferences_spec.rb b/spec/system/ai_user_preferences_spec.rb index e5379da58..2c1865fc0 100644 --- a/spec/system/ai_user_preferences_spec.rb +++ b/spec/system/ai_user_preferences_spec.rb @@ -12,8 +12,7 @@ SiteSetting.discourse_ai_enabled = true SiteSetting.ai_bot_discover_persona = discovery_persona.id Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) - assign_fake_provider_to(:ai_helper_model) - assign_fake_provider_to(:ai_helper_image_caption_model) + assign_fake_provider_to(:ai_default_llm_model) sign_in(user) end diff --git a/spec/system/summarization/chat_summarization_spec.rb b/spec/system/summarization/chat_summarization_spec.rb index dbb78417d..92dd609cb 100644 --- a/spec/system/summarization/chat_summarization_spec.rb +++ b/spec/system/summarization/chat_summarization_spec.rb @@ -11,7 +11,7 @@ before do group.add(current_user) - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) assign_persona_to(:ai_summarization_persona, [group.id]) SiteSetting.ai_summarization_enabled = true diff --git a/spec/system/summarization/topic_summarization_spec.rb b/spec/system/summarization/topic_summarization_spec.rb index 30b147a02..598323de6 100644 --- a/spec/system/summarization/topic_summarization_spec.rb +++ b/spec/system/summarization/topic_summarization_spec.rb @@ -23,7 +23,7 @@ before do group.add(current_user) - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) assign_persona_to(:ai_summarization_persona, [group.id]) SiteSetting.ai_summarization_enabled = true From 88811f97bf876e77fcaab7a84cbe098ea2613de6 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Tue, 15 Jul 2025 10:23:58 -0700 Subject: [PATCH 14/34] fixes --- lib/ai_helper/painter.rb | 2 +- lib/configuration/llm_dependency_validator.rb | 3 ++- spec/requests/admin/ai_features_controller_spec.rb | 1 + spec/requests/admin/ai_llms_controller_spec.rb | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/ai_helper/painter.rb b/lib/ai_helper/painter.rb index 9be8b95a0..3e51534fe 100644 --- a/lib/ai_helper/painter.rb +++ b/lib/ai_helper/painter.rb @@ -66,7 +66,7 @@ def diffusion_prompt(text, user) messages: [{ type: :user, content: text, id: user.username }], ) - DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_helper_model).generate( + DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_default_llm_model).generate( prompt, user: user, feature_name: "illustrate_post", diff --git a/lib/configuration/llm_dependency_validator.rb b/lib/configuration/llm_dependency_validator.rb index 0cf715fe0..bf6a5c122 100644 --- a/lib/configuration/llm_dependency_validator.rb +++ b/lib/configuration/llm_dependency_validator.rb @@ -10,7 +10,8 @@ def initialize(opts = {}) def valid_value?(val) return true if val == "f" - if @opts[:name] == :ai_summarization_enabled || @opts[:name] == :ai_helper_enabled + if @opts[:name] == :ai_summarization_enabled || @opts[:name] == :ai_helper_enabled || + @opts[:name] == :ai_embeddings_semantic_search_enabled has_llms = LlmModel.count > 0 @no_llms_configured = !has_llms has_llms diff --git a/spec/requests/admin/ai_features_controller_spec.rb b/spec/requests/admin/ai_features_controller_spec.rb index f826e0bc4..e7b2cc840 100644 --- a/spec/requests/admin/ai_features_controller_spec.rb +++ b/spec/requests/admin/ai_features_controller_spec.rb @@ -10,6 +10,7 @@ before do sign_in(admin) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_bot_enabled = true SiteSetting.discourse_ai_enabled = true end diff --git a/spec/requests/admin/ai_llms_controller_spec.rb b/spec/requests/admin/ai_llms_controller_spec.rb index e0aaa27ec..e0444e651 100644 --- a/spec/requests/admin/ai_llms_controller_spec.rb +++ b/spec/requests/admin/ai_llms_controller_spec.rb @@ -515,7 +515,7 @@ end it "validates the model is not in use" do - fake_llm = assign_fake_provider_to(:ai_default_llm_model) + fake_llm = assign_fake_provider_to(:ai_helper_model) delete "/admin/plugins/discourse-ai/ai-llms/#{fake_llm.id}.json" From b58f198ee87e77204de27ee9f6a8ac2113bdd996 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Tue, 15 Jul 2025 11:08:39 -0700 Subject: [PATCH 15/34] FIX: don't `run_test` in testing env --- lib/configuration/simple_llm_validator.rb | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lib/configuration/simple_llm_validator.rb b/lib/configuration/simple_llm_validator.rb index f15d2d827..301ac4ad0 100644 --- a/lib/configuration/simple_llm_validator.rb +++ b/lib/configuration/simple_llm_validator.rb @@ -18,6 +18,11 @@ def valid_value?(val) end def run_test(val) + if Rails.env.test? + # In test mode, we assume the model is reachable. + return true + end + DiscourseAi::Completions::Llm .proxy(val) .generate("How much is 1 + 1?", user: nil, feature_name: "llm_validator") From f593ab6e1e2ae060417d568d7f382b288178e596 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Tue, 15 Jul 2025 18:07:05 -0700 Subject: [PATCH 16/34] DEV: remove `custom:` prefix and update migrations for translations --- app/models/llm_model.rb | 2 +- .../discourse/components/ai-spam.gjs | 2 +- config/settings.yml | 2 +- ...0250710173803_seed_ai_default_llm_model.rb | 2 +- ...copy_ai_helper_model_to_persona_default.rb | 2 +- ...05451_copy_translation_model_to_persona.rb | 23 +++++++++++ ..._image_caption_model_to_persona_default.rb | 26 +++++++++++++ lib/ai_helper/assistant.rb | 10 +---- lib/configuration/llm_enumerator.rb | 2 +- lib/configuration/llm_validator.rb | 38 +++++------------- lib/configuration/simple_llm_validator.rb | 39 ------------------- lib/embeddings/semantic_search.rb | 11 +----- lib/summarization.rb | 12 +----- lib/translation/base_translator.rb | 10 +++-- 14 files changed, 75 insertions(+), 106 deletions(-) create mode 100644 db/migrate/20250716005451_copy_translation_model_to_persona.rb create mode 100644 db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb delete mode 100644 lib/configuration/simple_llm_validator.rb diff --git a/app/models/llm_model.rb b/app/models/llm_model.rb index 937c5373b..7834d065b 100644 --- a/app/models/llm_model.rb +++ b/app/models/llm_model.rb @@ -99,7 +99,7 @@ def to_llm end def identifier - "custom:#{id}" + "#{id}" end def toggle_companion_user diff --git a/assets/javascripts/discourse/components/ai-spam.gjs b/assets/javascripts/discourse/components/ai-spam.gjs index 068c715ec..7f4464cf1 100644 --- a/assets/javascripts/discourse/components/ai-spam.gjs +++ b/assets/javascripts/discourse/components/ai-spam.gjs @@ -90,7 +90,7 @@ export default class AiSpam extends Component { this.isEnabled = model.is_enabled; if (model.llm_id) { - this.selectedLLM = "custom:" + model.llm_id; + this.selectedLLM = model.llm_id; } else { if (this.availableLLMs.length) { this.selectedLLM = this.availableLLMs[0].id; diff --git a/config/settings.yml b/config/settings.yml index ff7701816..432aafdc2 100644 --- a/config/settings.yml +++ b/config/settings.yml @@ -16,7 +16,7 @@ discourse_ai: type: enum allow_any: false enum: "DiscourseAi::Configuration::LlmEnumerator" - validator: "DiscourseAi::Configuration::SimpleLlmValidator" + validator: "DiscourseAi::Configuration::LlmValidator" ai_sentiment_enabled: default: false diff --git a/db/migrate/20250710173803_seed_ai_default_llm_model.rb b/db/migrate/20250710173803_seed_ai_default_llm_model.rb index c97106134..b5d9a1227 100644 --- a/db/migrate/20250710173803_seed_ai_default_llm_model.rb +++ b/db/migrate/20250710173803_seed_ai_default_llm_model.rb @@ -6,7 +6,7 @@ def up last_model_id = DB.query_single("SELECT id FROM llm_models ORDER BY id DESC LIMIT 1").first if last_model_id.present? - execute "UPDATE site_settings SET value = 'custom:#{last_model_id}' WHERE name = 'ai_default_llm_model' AND (value IS NULL OR value = '');" + execute "UPDATE site_settings SET value = '#{last_model_id}' WHERE name = 'ai_default_llm_model' AND (value IS NULL OR value = '');" end end diff --git a/db/migrate/20250710181656_copy_ai_helper_model_to_persona_default.rb b/db/migrate/20250710181656_copy_ai_helper_model_to_persona_default.rb index 0b4187b11..9657ea23f 100644 --- a/db/migrate/20250710181656_copy_ai_helper_model_to_persona_default.rb +++ b/db/migrate/20250710181656_copy_ai_helper_model_to_persona_default.rb @@ -12,7 +12,7 @@ def up execute(<<~SQL) UPDATE ai_personas SET default_llm_id = #{model_id} - WHERE id IN (-18, -19, -20, -21, -22, -23, -24, -25, -26) AND default_llm_id IS NULL + WHERE id IN (-18, -19, -20, -21, -22, -23, -24, -25) AND default_llm_id IS NULL SQL end end diff --git a/db/migrate/20250716005451_copy_translation_model_to_persona.rb b/db/migrate/20250716005451_copy_translation_model_to_persona.rb new file mode 100644 index 000000000..478a6c465 --- /dev/null +++ b/db/migrate/20250716005451_copy_translation_model_to_persona.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true +class CopyTranslationModelToPersona < ActiveRecord::Migration[7.2] + def up + ai_translation_model = + DB.query_single("SELECT value FROM site_settings WHERE name = 'ai_translation_model'").first + + if ai_translation_model.present? && ai_translation_model.start_with?("custom:") + # Extract the model ID from the setting value (e.g., "custom:-5" -> "-5") + model_id = ai_translation_model.split(":").last + + # Update the translation personas (IDs -27, -28, -29, -30) with the extracted model ID + execute(<<~SQL) + UPDATE ai_personas + SET default_llm_id = #{model_id} + WHERE id IN (-27, -28, -29, -30) AND default_llm_id IS NULL + SQL + end + end + + def down + raise ActiveRecord::IrreversibleMigration + end +end diff --git a/db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb b/db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb new file mode 100644 index 000000000..a797c1b31 --- /dev/null +++ b/db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true +class CopyAiImageCaptionModelToPersonaDefault < ActiveRecord::Migration[7.2] + def up + ai_helper_image_caption_model = + DB.query_single( + "SELECT value FROM site_settings WHERE name = 'ai_helper_image_caption_model'", + ).first + + if ai_helper_image_caption_model.present? && + ai_helper_image_caption_model.start_with?("custom:") + # Extract the model ID from the setting value (e.g., "custom:1" -> "1") + model_id = ai_helper_image_caption_model.split(":").last + + # Update the helper personas with the extracted model ID + execute(<<~SQL) + UPDATE ai_personas + SET default_llm_id = #{model_id} + WHERE id IN (-26) AND default_llm_id IS NULL + SQL + end + end + + def down + raise ActiveRecord::IrreversibleMigration + end +end diff --git a/lib/ai_helper/assistant.rb b/lib/ai_helper/assistant.rb index 550a33624..01a27675e 100644 --- a/lib/ai_helper/assistant.rb +++ b/lib/ai_helper/assistant.rb @@ -314,19 +314,11 @@ def find_ai_helper_model(helper_mode, persona_klass) # 1. Persona's default LLM # 2. SiteSetting.ai_default_llm_id (or newest LLM if not set) def self.find_ai_helper_model(helper_mode, persona_klass) - model_id = - persona_klass.default_llm_id || SiteSetting.ai_default_llm_model&.split(":")&.last # Remove legacy custom provider. + model_id = persona_klass.default_llm_id || SiteSetting.ai_default_llm_model if model_id.present? LlmModel.find_by(id: model_id) else - last_model_id = LlmModel.last&.id - - # SiteSetting.ai_default_llm_model shouldn't be empty, but if it is, we set it to the last model. - if last_model_id.present? && SiteSetting.ai_default_llm_model.empty? - SiteSetting.set_and_log("ai_default_llm_model", "custom:#{last_model_id}", Discourse.system_user) # Remove legacy custom provider. - end - LlmModel.last end end diff --git a/lib/configuration/llm_enumerator.rb b/lib/configuration/llm_enumerator.rb index 200fc0a27..ac455c0af 100644 --- a/lib/configuration/llm_enumerator.rb +++ b/lib/configuration/llm_enumerator.rb @@ -5,6 +5,7 @@ module DiscourseAi module Configuration class LlmEnumerator < ::EnumSiteSetting + # TODO: global_usage is no longer accurate, it should be removed/updated def self.global_usage rval = Hash.new { |h, k| h[k] = [] } @@ -107,7 +108,6 @@ def self.values(allowed_seeded_llms: nil) end end - values.each { |value_h| value_h[:value] = "custom:#{value_h[:value]}" } values end end diff --git a/lib/configuration/llm_validator.rb b/lib/configuration/llm_validator.rb index 36c3c63b1..7c985c5fa 100644 --- a/lib/configuration/llm_validator.rb +++ b/lib/configuration/llm_validator.rb @@ -2,9 +2,6 @@ module DiscourseAi module Configuration - class InvalidSeededModelError < StandardError - end - class LlmValidator def initialize(opts = {}) @opts = opts @@ -18,12 +15,7 @@ def valid_value?(val) return !@parent_enabled end - allowed_seeded_model?(val) - run_test(val).tap { |result| @unreachable = result } - rescue DiscourseAi::Configuration::InvalidSeededModelError => e - @unreachable = true - false rescue StandardError => e raise e if Rails.env.test? @unreachable = true @@ -31,6 +23,11 @@ def valid_value?(val) end def run_test(val) + if Rails.env.test? + # In test mode, we assume the model is reachable. + return true + end + DiscourseAi::Completions::Llm .proxy(val) .generate("How much is 1 + 1?", user: nil, feature_name: "llm_validator") @@ -53,10 +50,6 @@ def error_message ) end - if @invalid_seeded_model - return I18n.t("discourse_ai.llm.configuration.invalid_seeded_model") - end - return unless @unreachable I18n.t("discourse_ai.llm.configuration.model_unreachable") @@ -68,25 +61,12 @@ def choose_llm_setting_for(module_enabler_setting) def modules_and_choose_llm_settings { - ai_embeddings_semantic_search_enabled: :ai_embeddings_semantic_search_hyde_model, - ai_helper_enabled: :ai_helper_model, - ai_summarization_enabled: :ai_summarization_model, - ai_translation_enabled: :ai_translation_model, + ai_embeddings_semantic_search_enabled: :ai_default_llm_model, + ai_helper_enabled: :ai_default_llm_model, + ai_summarization_enabled: :ai_default_llm_model, + ai_translation_enabled: :ai_default_llm_model, } end - - def allowed_seeded_model?(val) - id = val.split(":").last - return true if id.to_i > 0 - - setting = @opts[:name] - allowed_list = SiteSetting.public_send("#{setting}_allowed_seeded_models") - - if allowed_list.split("|").exclude?(id) - @invalid_seeded_model = true - raise DiscourseAi::Configuration::InvalidSeededModelError.new - end - end end end end diff --git a/lib/configuration/simple_llm_validator.rb b/lib/configuration/simple_llm_validator.rb deleted file mode 100644 index 301ac4ad0..000000000 --- a/lib/configuration/simple_llm_validator.rb +++ /dev/null @@ -1,39 +0,0 @@ -# frozen_string_literal: true - -module DiscourseAi - module Configuration - class SimpleLlmValidator - def initialize(opts = {}) - @opts = opts - end - - def valid_value?(val) - return true if val == "" - - run_test(val).tap { |result| @unreachable = result } - rescue StandardError => e - raise e if Rails.env.test? - @unreachable = true - true - end - - def run_test(val) - if Rails.env.test? - # In test mode, we assume the model is reachable. - return true - end - - DiscourseAi::Completions::Llm - .proxy(val) - .generate("How much is 1 + 1?", user: nil, feature_name: "llm_validator") - .present? - end - - def error_message - return unless @unreachable - - I18n.t("discourse_ai.llm.configuration.model_unreachable") - end - end - end -end diff --git a/lib/embeddings/semantic_search.rb b/lib/embeddings/semantic_search.rb index a1c0b01ee..e4e894cab 100644 --- a/lib/embeddings/semantic_search.rb +++ b/lib/embeddings/semantic_search.rb @@ -212,22 +212,13 @@ def hypothetical_post_from(search_term) # 1. Persona's default LLM # 2. SiteSetting.ai_default_llm_id (or newest LLM if not set) def find_ai_hyde_model(persona_klass) - model_id = - persona_klass.default_llm_id || SiteSetting.ai_default_llm_model&.split(":")&.last # Remove legacy custom provider. + model_id = persona_klass.default_llm_id || SiteSetting.ai_default_llm_model if model_id.present? LlmModel.find_by(id: model_id) else - last_model_id = LlmModel.last&.id - - # SiteSetting.ai_default_llm_model shouldn't be empty, but if it is, we set it to the last model. - if last_model_id.present? && SiteSetting.ai_default_llm_model.empty? - SiteSetting.set_and_log("ai_default_llm_model", "custom:#{last_model_id}", Discourse.system_user) # Remove legacy custom provider. - end - LlmModel.last end - end private diff --git a/lib/summarization.rb b/lib/summarization.rb index e4706cbd4..f8f71cab2 100644 --- a/lib/summarization.rb +++ b/lib/summarization.rb @@ -56,19 +56,11 @@ def chat_channel_summary(channel, time_window_in_hours) # 1. Persona's default LLM # 2. SiteSetting.ai_default_llm_id (or newest LLM if not set) def find_summarization_model(persona_klass) - model_id = - persona_klass.default_llm_id || SiteSetting.ai_default_llm_model&.split(":")&.last # Remove legacy custom provider. + model_id = persona_klass.default_llm_id || SiteSetting.ai_default_llm_model if model_id.present? LlmModel.find_by(id: model_id) else - last_model_id = LlmModel.last&.id - - # SiteSetting.ai_default_llm_model shouldn't be empty, but if it is, we set it to the last model. - if last_model_id.present? && SiteSetting.ai_default_llm_model.empty? - SiteSetting.set_and_log("ai_default_llm_model", "custom:#{last_model_id}", Discourse.system_user) # Remove legacy custom provider. - end - LlmModel.last end end @@ -79,7 +71,7 @@ def build_bot(persona_klass, llm_model) persona = persona_klass.new user = User.find_by(id: persona_klass.user_id) || Discourse.system_user - bot = DiscourseAi::Personas::Bot.as(user, persona: persona, model: llm_model) + DiscourseAi::Personas::Bot.as(user, persona: persona, model: llm_model) end end end diff --git a/lib/translation/base_translator.rb b/lib/translation/base_translator.rb index 2777c873d..24749ac2d 100644 --- a/lib/translation/base_translator.rb +++ b/lib/translation/base_translator.rb @@ -60,9 +60,13 @@ def persona_setting end def self.preferred_llm_model(persona_klass) - id = persona_klass.default_llm_id || SiteSetting.ai_translation_model&.split(":")&.last - return nil if id.blank? - LlmModel.find_by(id:) + model_id = persona_klass.default_llm_id || SiteSetting.ai_default_llm_model + + if model_id.present? + LlmModel.find_by(id: model_id) + else + LlmModel.last + end end end end From 7ae61ce87760392315c2e3856c997e47cba678c9 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Tue, 15 Jul 2025 18:08:41 -0700 Subject: [PATCH 17/34] DEV: update automation to remove `custom:` prefix --- lib/automation.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/automation.rb b/lib/automation.rb index e43bcbc58..06a2ad471 100644 --- a/lib/automation.rb +++ b/lib/automation.rb @@ -37,7 +37,7 @@ def self.available_models value_h["id"] > 0 || SiteSetting.ai_automation_allowed_seeded_models_map.include?(value_h["id"].to_s) end - .each { |value_h| value_h["id"] = "custom:#{value_h["id"]}" } + .each { |value_h| value_h["id"] = "#{value_h["id"]}" } values end From c26d604072c1f11763cce21b6ff879fa7e0b02ff Mon Sep 17 00:00:00 2001 From: Keegan George Date: Wed, 16 Jul 2025 07:08:29 -0700 Subject: [PATCH 18/34] FIX: dependency validator should depend on default LLM setting --- lib/configuration/llm_dependency_validator.rb | 13 +++---------- lib/configuration/llm_vision_enumerator.rb | 2 -- 2 files changed, 3 insertions(+), 12 deletions(-) diff --git a/lib/configuration/llm_dependency_validator.rb b/lib/configuration/llm_dependency_validator.rb index bf6a5c122..c3b1a24fb 100644 --- a/lib/configuration/llm_dependency_validator.rb +++ b/lib/configuration/llm_dependency_validator.rb @@ -10,17 +10,10 @@ def initialize(opts = {}) def valid_value?(val) return true if val == "f" - if @opts[:name] == :ai_summarization_enabled || @opts[:name] == :ai_helper_enabled || - @opts[:name] == :ai_embeddings_semantic_search_enabled - has_llms = LlmModel.count > 0 - @no_llms_configured = !has_llms - has_llms - else - @llm_dependency_setting_name = - DiscourseAi::Configuration::LlmValidator.new.choose_llm_setting_for(@opts[:name]) + @llm_dependency_setting_name = + DiscourseAi::Configuration::LlmValidator.new.choose_llm_setting_for(@opts[:name]) - SiteSetting.public_send(@llm_dependency_setting_name).present? - end + SiteSetting.public_send(@llm_dependency_setting_name).present? end def error_message diff --git a/lib/configuration/llm_vision_enumerator.rb b/lib/configuration/llm_vision_enumerator.rb index c4cf1a621..0ef16b705 100644 --- a/lib/configuration/llm_vision_enumerator.rb +++ b/lib/configuration/llm_vision_enumerator.rb @@ -16,8 +16,6 @@ def self.values WHERE vision_enabled SQL - values.each { |value_h| value_h[:value] = "custom:#{value_h[:value]}" } - values end end From 0fadf1da1a5f0fb0938a57dc00bdd5a1c6ca277a Mon Sep 17 00:00:00 2001 From: Keegan George Date: Wed, 16 Jul 2025 10:56:18 -0700 Subject: [PATCH 19/34] DEV: update enumerator --- config/locales/client.en.yml | 2 +- config/settings.yml | 3 -- lib/configuration/llm_enumerator.rb | 44 +++++++++++++++---- lib/configuration/llm_validator.rb | 6 ++- spec/configuration/llm_enumerator_spec.rb | 2 +- .../regular/detect_translate_post_spec.rb | 4 +- .../regular/detect_translate_topic_spec.rb | 4 +- spec/jobs/regular/localize_categories_spec.rb | 4 +- spec/jobs/regular/localize_posts_spec.rb | 4 +- spec/jobs/regular/localize_topics_spec.rb | 4 +- ...tegories_locale_detection_backfill_spec.rb | 4 +- .../post_localization_backfill_spec.rb | 4 +- .../posts_locale_detection_backfill_spec.rb | 4 +- spec/plugin_helper.rb | 2 +- 14 files changed, 51 insertions(+), 40 deletions(-) diff --git a/config/locales/client.en.yml b/config/locales/client.en.yml index 8e797711f..6c3601b08 100644 --- a/config/locales/client.en.yml +++ b/config/locales/client.en.yml @@ -567,7 +567,7 @@ en: max_usages_required: "Must be set if max tokens is not set" usage: ai_bot: "AI bot" - ai_helper: "Helper" + ai_helper: "Helper (%{persona})" ai_helper_image_caption: "Image caption" ai_persona: "Persona (%{persona})" ai_summarization: "Summarize" diff --git a/config/settings.yml b/config/settings.yml index 432aafdc2..24d92169f 100644 --- a/config/settings.yml +++ b/config/settings.yml @@ -272,7 +272,6 @@ discourse_ai: type: enum allow_any: false enum: "DiscourseAi::Configuration::LlmEnumerator" - validator: "DiscourseAi::Configuration::LlmValidator" area: "ai-features/embeddings" ai_embeddings_semantic_search_hyde_model_allowed_seeded_models: default: "" @@ -328,7 +327,6 @@ discourse_ai: allow_any: false type: enum enum: "DiscourseAi::Configuration::LlmEnumerator" - validator: "DiscourseAi::Configuration::LlmValidator" hidden: true ai_summarization_persona: default: "-11" @@ -495,7 +493,6 @@ discourse_ai: type: enum allow_any: false enum: "DiscourseAi::Configuration::LlmEnumerator" - validator: "DiscourseAi::Configuration::LlmValidator" area: "ai-features/translation" ai_translation_locale_detector_persona: default: "-27" diff --git a/lib/configuration/llm_enumerator.rb b/lib/configuration/llm_enumerator.rb index ac455c0af..f1d5a29b9 100644 --- a/lib/configuration/llm_enumerator.rb +++ b/lib/configuration/llm_enumerator.rb @@ -5,7 +5,6 @@ module DiscourseAi module Configuration class LlmEnumerator < ::EnumSiteSetting - # TODO: global_usage is no longer accurate, it should be removed/updated def self.global_usage rval = Hash.new { |h, k| h[k] = [] } @@ -23,24 +22,53 @@ def self.global_usage .each { |llm_id, name, id| rval[llm_id] << { type: :ai_persona, name: name, id: id } } if SiteSetting.ai_helper_enabled - model_id = SiteSetting.ai_helper_model.split(":").last.to_i - rval[model_id] << { type: :ai_helper } if model_id != 0 + { + "#{I18n.t("js.discourse_ai.features.ai_helper.proofread")}" => + SiteSetting.ai_helper_proofreader_persona, + "#{I18n.t("js.discourse_ai.features.ai_helper.title_suggestions")}" => + SiteSetting.ai_helper_title_suggestions_persona, + "#{I18n.t("js.discourse_ai.features.ai_helper.explain")}" => + SiteSetting.ai_helper_explain_persona, + "#{I18n.t("js.discourse_ai.features.ai_helper.illustrate_post")}" => + SiteSetting.ai_helper_post_illustrator_persona, + "#{I18n.t("js.discourse_ai.features.ai_helper.smart_dates")}" => + SiteSetting.ai_helper_smart_dates_persona, + "#{I18n.t("js.discourse_ai.features.ai_helper.translator")}" => + SiteSetting.ai_helper_translator_persona, + "#{I18n.t("js.discourse_ai.features.ai_helper.markdown_tables")}" => + SiteSetting.ai_helper_markdown_tables_persona, + "#{I18n.t("js.discourse_ai.features.ai_helper.custom_prompt")}" => + SiteSetting.ai_helper_custom_prompt_persona, + }.each do |helper_type, persona_id| + next if persona_id.blank? + + persona = AiPersona.find_by(id: persona_id) + next if persona.blank? || persona.default_llm_id.blank? + + model_id = persona.default_llm_id || SiteSetting.ai_default_llm_model.to_i + rval[model_id] << { type: :ai_helper, name: helper_type } + end end - if SiteSetting.ai_helper_image_caption_model - model_id = SiteSetting.ai_helper_image_caption_model.split(":").last.to_i - rval[model_id] << { type: :ai_helper_image_caption } if model_id != 0 + if SiteSetting.ai_helper_enabled_features.split("|").include?("image_caption") + image_caption_persona = AiPersona.find_by(id: SiteSetting.ai_helper_image_caption_persona) + model_id = image_caption_persona.default_llm_id || SiteSetting.ai_default_llm_model.to_i + + rval[model_id] << { type: :ai_helper_image_caption } end if SiteSetting.ai_summarization_enabled summarization_persona = AiPersona.find_by(id: SiteSetting.ai_summarization_persona) - model_id = summarization_persona.default_llm_id || LlmModel.last&.id + model_id = summarization_persona.default_llm_id || SiteSetting.ai_default_llm_model.to_i rval[model_id] << { type: :ai_summarization } end if SiteSetting.ai_embeddings_semantic_search_enabled - model_id = SiteSetting.ai_embeddings_semantic_search_hyde_model.split(":").last.to_i + search_persona = + AiPersona.find_by(id: SiteSetting.ai_embeddings_semantic_search_hyde_persona) + model_id = search_persona.default_llm_id || SiteSetting.ai_default_llm_model.to_i + rval[model_id] << { type: :ai_embeddings_semantic_search } end diff --git a/lib/configuration/llm_validator.rb b/lib/configuration/llm_validator.rb index 7c985c5fa..262119464 100644 --- a/lib/configuration/llm_validator.rb +++ b/lib/configuration/llm_validator.rb @@ -35,9 +35,11 @@ def run_test(val) end def modules_using(llm_model) - choose_llm_settings = modules_and_choose_llm_settings.values + in_use_llms = AiPersona.where.not(default_llm_id: nil).pluck(:default_llm_id) + default_llm = SiteSetting.ai_default_llm_model.presence&.to_i - choose_llm_settings.select { |s| SiteSetting.public_send(s) == "custom:#{llm_model.id}" } + combined_llms = (in_use_llms + [default_llm]).compact.uniq + combined_llms end def error_message diff --git a/spec/configuration/llm_enumerator_spec.rb b/spec/configuration/llm_enumerator_spec.rb index 7737da45d..e1044ee71 100644 --- a/spec/configuration/llm_enumerator_spec.rb +++ b/spec/configuration/llm_enumerator_spec.rb @@ -41,7 +41,7 @@ describe "#global_usage" do it "returns a hash of Llm models in use globally" do - SiteSetting.ai_helper_model = "custom:#{fake_model.id}" + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true expect(described_class.global_usage).to eq(fake_model.id => [{ type: :ai_helper }]) end diff --git a/spec/jobs/regular/detect_translate_post_spec.rb b/spec/jobs/regular/detect_translate_post_spec.rb index 570b7093c..dfae6dba6 100644 --- a/spec/jobs/regular/detect_translate_post_spec.rb +++ b/spec/jobs/regular/detect_translate_post_spec.rb @@ -7,10 +7,8 @@ let(:locales) { %w[en ja] } before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.content_localization_supported_locales = locales.join("|") end diff --git a/spec/jobs/regular/detect_translate_topic_spec.rb b/spec/jobs/regular/detect_translate_topic_spec.rb index 80e5b8f13..a7eeb9ced 100644 --- a/spec/jobs/regular/detect_translate_topic_spec.rb +++ b/spec/jobs/regular/detect_translate_topic_spec.rb @@ -7,10 +7,8 @@ let(:locales) { %w[en ja] } before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.content_localization_supported_locales = locales.join("|") end diff --git a/spec/jobs/regular/localize_categories_spec.rb b/spec/jobs/regular/localize_categories_spec.rb index bbcdfc6ac..0746e6d5f 100644 --- a/spec/jobs/regular/localize_categories_spec.rb +++ b/spec/jobs/regular/localize_categories_spec.rb @@ -10,10 +10,8 @@ def localize_all_categories(*locales) end before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.content_localization_supported_locales = "pt_BR|zh_CN" diff --git a/spec/jobs/regular/localize_posts_spec.rb b/spec/jobs/regular/localize_posts_spec.rb index 92aae58dd..72f01e871 100644 --- a/spec/jobs/regular/localize_posts_spec.rb +++ b/spec/jobs/regular/localize_posts_spec.rb @@ -7,10 +7,8 @@ let(:locales) { %w[en ja de] } before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.content_localization_supported_locales = locales.join("|") SiteSetting.ai_translation_backfill_hourly_rate = 100 diff --git a/spec/jobs/regular/localize_topics_spec.rb b/spec/jobs/regular/localize_topics_spec.rb index f091b9155..84756c5c8 100644 --- a/spec/jobs/regular/localize_topics_spec.rb +++ b/spec/jobs/regular/localize_topics_spec.rb @@ -7,10 +7,8 @@ let(:locales) { %w[en ja de] } before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.content_localization_supported_locales = locales.join("|") SiteSetting.ai_translation_backfill_hourly_rate = 100 diff --git a/spec/jobs/scheduled/categories_locale_detection_backfill_spec.rb b/spec/jobs/scheduled/categories_locale_detection_backfill_spec.rb index 31e96275e..42c86f64e 100644 --- a/spec/jobs/scheduled/categories_locale_detection_backfill_spec.rb +++ b/spec/jobs/scheduled/categories_locale_detection_backfill_spec.rb @@ -5,10 +5,8 @@ subject(:job) { described_class.new } before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.ai_translation_backfill_hourly_rate = 100 SiteSetting.content_localization_supported_locales = "en" diff --git a/spec/jobs/scheduled/post_localization_backfill_spec.rb b/spec/jobs/scheduled/post_localization_backfill_spec.rb index f43d890d8..b6468ca74 100644 --- a/spec/jobs/scheduled/post_localization_backfill_spec.rb +++ b/spec/jobs/scheduled/post_localization_backfill_spec.rb @@ -2,11 +2,9 @@ describe Jobs::PostLocalizationBackfill do before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_translation_backfill_hourly_rate = 100 SiteSetting.content_localization_supported_locales = "en" - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.discourse_ai_enabled = true end diff --git a/spec/jobs/scheduled/posts_locale_detection_backfill_spec.rb b/spec/jobs/scheduled/posts_locale_detection_backfill_spec.rb index dfbc46f90..27398a356 100644 --- a/spec/jobs/scheduled/posts_locale_detection_backfill_spec.rb +++ b/spec/jobs/scheduled/posts_locale_detection_backfill_spec.rb @@ -5,10 +5,8 @@ subject(:job) { described_class.new } before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.ai_translation_backfill_hourly_rate = 100 SiteSetting.content_localization_supported_locales = "en" diff --git a/spec/plugin_helper.rb b/spec/plugin_helper.rb index 0346df7ca..4e2a58a7f 100644 --- a/spec/plugin_helper.rb +++ b/spec/plugin_helper.rb @@ -12,7 +12,7 @@ def toggle_enabled_bots(bots: []) def assign_fake_provider_to(setting_name) Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("#{setting_name}=", "custom:#{fake_llm.id}") + SiteSetting.public_send("#{setting_name}=", "#{fake_llm.id}") end end From b675c4c39bff94e3e4761c512693eaa2489402a1 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Wed, 16 Jul 2025 11:39:55 -0700 Subject: [PATCH 20/34] DEV: Remove custom prefix in specs --- lib/ai_helper/chat_thread_titler.rb | 2 +- spec/configuration/feature_spec.rb | 9 +----- spec/configuration/llm_enumerator_spec.rb | 2 +- .../topics_locale_detection_backfill_spec.rb | 4 +-- .../completions/endpoints/anthropic_spec.rb | 6 ++-- .../completions/endpoints/aws_bedrock_spec.rb | 20 ++++++------- spec/lib/completions/endpoints/cohere_spec.rb | 2 +- spec/lib/completions/endpoints/gemini_spec.rb | 28 +++++++++---------- spec/lib/completions/endpoints/nova_spec.rb | 6 ++-- .../lib/completions/endpoints/open_ai_spec.rb | 12 ++++---- .../completions/endpoints/open_router_spec.rb | 4 +-- spec/lib/completions/endpoints/vllm_spec.rb | 2 +- spec/lib/completions/llm_spec.rb | 8 ++---- .../discourse_automation/automation_spec.rb | 7 ++--- .../discourse_automation/llm_report_spec.rb | 4 +-- .../discourse_automation/llm_triage_spec.rb | 2 +- .../lib/modules/automation/llm_triage_spec.rb | 28 +++++++++---------- .../modules/automation/report_runner_spec.rb | 12 ++++---- .../personas/question_consolidator_spec.rb | 2 +- .../personas/tools/create_artifact_spec.rb | 2 +- spec/lib/personas/tools/create_image_spec.rb | 2 +- spec/lib/personas/tools/dall_e_spec.rb | 2 +- spec/lib/personas/tools/db_schema_spec.rb | 2 +- .../tools/discourse_meta_search_spec.rb | 2 +- spec/lib/personas/tools/edit_image_spec.rb | 2 +- .../tools/github_file_content_spec.rb | 2 +- .../tools/github_pull_request_diff_spec.rb | 2 +- .../personas/tools/github_search_code_spec.rb | 2 +- .../tools/github_search_files_spec.rb | 2 +- spec/lib/personas/tools/google_spec.rb | 2 +- spec/lib/personas/tools/image_spec.rb | 2 +- .../tools/javascript_evaluator_spec.rb | 2 +- .../personas/tools/list_categories_spec.rb | 2 +- spec/lib/personas/tools/list_tags_spec.rb | 2 +- spec/lib/personas/tools/read_spec.rb | 2 +- spec/lib/personas/tools/researcher_spec.rb | 2 +- .../personas/tools/search_settings_spec.rb | 2 +- spec/lib/personas/tools/search_spec.rb | 2 +- .../personas/tools/setting_context_spec.rb | 2 +- spec/lib/personas/tools/summarize_spec.rb | 2 +- spec/lib/personas/tools/time_spec.rb | 2 +- spec/lib/personas/tools/web_browser_spec.rb | 2 +- spec/lib/translation/base_translator_spec.rb | 5 +--- .../translation/category_localizer_spec.rb | 6 +--- spec/lib/translation/entry_point_spec.rb | 8 ++---- .../lib/translation/language_detector_spec.rb | 6 +--- spec/models/ai_tool_spec.rb | 2 +- .../requests/admin/ai_llms_controller_spec.rb | 6 ++-- .../problem_check/ai_llm_status_spec.rb | 2 +- 49 files changed, 106 insertions(+), 135 deletions(-) diff --git a/lib/ai_helper/chat_thread_titler.rb b/lib/ai_helper/chat_thread_titler.rb index 15ffc52ca..233bfe14a 100644 --- a/lib/ai_helper/chat_thread_titler.rb +++ b/lib/ai_helper/chat_thread_titler.rb @@ -30,7 +30,7 @@ def call_llm(thread_content) messages: [{ type: :user, content: chat, id: "User" }], ) - DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_helper_model).generate( + DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_default_llm_model).generate( prompt, user: Discourse.system_user, stop_sequences: [""], diff --git a/spec/configuration/feature_spec.rb b/spec/configuration/feature_spec.rb index 19368d12f..6052ba1db 100644 --- a/spec/configuration/feature_spec.rb +++ b/spec/configuration/feature_spec.rb @@ -38,7 +38,6 @@ def allow_configuring_setting(&block) it "returns the configured llm model" do SiteSetting.ai_summarization_persona = ai_persona.id - allow_configuring_setting { SiteSetting.ai_summarization_model = "custom:#{llm_model.id}" } expect(ai_feature.llm_models).to eq([llm_model]) end end @@ -55,8 +54,6 @@ def allow_configuring_setting(&block) it "returns the persona's default llm when no specific helper model is set" do SiteSetting.ai_helper_proofreader_persona = ai_persona.id - SiteSetting.ai_helper_model = "" - expect(ai_feature.llm_models).to eq([llm_model]) end end @@ -75,11 +72,7 @@ def allow_configuring_setting(&block) it "uses translation model when configured" do SiteSetting.ai_translation_locale_detector_persona = ai_persona.id - ai_persona.update!(default_llm_id: nil) - allow_configuring_setting do - SiteSetting.ai_translation_model = "custom:#{translation_model.id}" - end - + ai_persona.update!(default_llm_id: translation_model.id) expect(ai_feature.llm_models).to eq([translation_model]) end end diff --git a/spec/configuration/llm_enumerator_spec.rb b/spec/configuration/llm_enumerator_spec.rb index e1044ee71..64c250d02 100644 --- a/spec/configuration/llm_enumerator_spec.rb +++ b/spec/configuration/llm_enumerator_spec.rb @@ -51,7 +51,7 @@ component: "text", name: "model", metadata: { - value: "custom:#{fake_model.id}", + value: fake_model.id, }, target: "script", ) diff --git a/spec/jobs/scheduled/topics_locale_detection_backfill_spec.rb b/spec/jobs/scheduled/topics_locale_detection_backfill_spec.rb index 924883312..8c0f1696e 100644 --- a/spec/jobs/scheduled/topics_locale_detection_backfill_spec.rb +++ b/spec/jobs/scheduled/topics_locale_detection_backfill_spec.rb @@ -5,10 +5,8 @@ subject(:job) { described_class.new } before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.ai_translation_backfill_hourly_rate = 100 SiteSetting.content_localization_supported_locales = "en" diff --git a/spec/lib/completions/endpoints/anthropic_spec.rb b/spec/lib/completions/endpoints/anthropic_spec.rb index 8a40c2131..8fecbf1f7 100644 --- a/spec/lib/completions/endpoints/anthropic_spec.rb +++ b/spec/lib/completions/endpoints/anthropic_spec.rb @@ -4,7 +4,7 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Anthropic do let(:url) { "https://api.anthropic.com/v1/messages" } fab!(:model) { Fabricate(:anthropic_model, name: "claude-3-opus", vision_enabled: true) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(model) } let(:image100x100) { plugin_file_from_fixtures("100x100.jpg") } let(:upload100x100) do UploadCreator.new(image100x100, "image.jpg").create_for(Discourse.system_user.id) @@ -374,7 +374,7 @@ model.provider_params["reasoning_tokens"] = 10_000 model.save! - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) result = proxy.generate(prompt, user: Discourse.system_user) expect(result).to eq("Hello!") @@ -432,7 +432,7 @@ }, ).to_return(status: 200, body: body) - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) result = proxy.generate(prompt, user: Discourse.system_user) expect(result).to eq("Hello!") diff --git a/spec/lib/completions/endpoints/aws_bedrock_spec.rb b/spec/lib/completions/endpoints/aws_bedrock_spec.rb index 70bf9364d..fb598a9ec 100644 --- a/spec/lib/completions/endpoints/aws_bedrock_spec.rb +++ b/spec/lib/completions/endpoints/aws_bedrock_spec.rb @@ -47,7 +47,7 @@ def encode_message(message) model.provider_params["disable_native_tools"] = true model.save! - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) incomplete_tool_call = <<~XML.strip I should be ignored @@ -122,7 +122,7 @@ def encode_message(message) end it "supports streaming function calls" do - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil @@ -293,7 +293,7 @@ def encode_message(message) describe "Claude 3 support" do it "supports regular completions" do - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil @@ -340,7 +340,7 @@ def encode_message(message) model.provider_params["reasoning_tokens"] = 10_000 model.save! - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil @@ -387,7 +387,7 @@ def encode_message(message) end it "supports claude 3 streaming" do - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil @@ -448,7 +448,7 @@ def encode_message(message) }, ) - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil content = { @@ -487,7 +487,7 @@ def encode_message(message) describe "disabled tool use" do it "handles tool_choice: :none by adding a prefill message instead of using tool_choice param" do - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil # Create a prompt with tool_choice: :none @@ -549,7 +549,7 @@ def encode_message(message) describe "forced tool use" do it "can properly force tool use" do - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil tools = [ @@ -640,7 +640,7 @@ def encode_message(message) { type: "message_delta", delta: { usage: { output_tokens: 25 } } }, ].map { |message| encode_message(message) } - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil bedrock_mock.with_chunk_array_support do stub_request( @@ -718,7 +718,7 @@ def encode_message(message) { type: "message_delta", delta: { usage: { output_tokens: 25 } } }, ].map { |message| encode_message(message) } - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil bedrock_mock.with_chunk_array_support do stub_request( diff --git a/spec/lib/completions/endpoints/cohere_spec.rb b/spec/lib/completions/endpoints/cohere_spec.rb index c4fb06b6a..ccea5b8ea 100644 --- a/spec/lib/completions/endpoints/cohere_spec.rb +++ b/spec/lib/completions/endpoints/cohere_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Cohere do fab!(:cohere_model) - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{cohere_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(cohere_model) } fab!(:user) let(:prompt) do diff --git a/spec/lib/completions/endpoints/gemini_spec.rb b/spec/lib/completions/endpoints/gemini_spec.rb index 3a6543ea3..4ef5f7597 100644 --- a/spec/lib/completions/endpoints/gemini_spec.rb +++ b/spec/lib/completions/endpoints/gemini_spec.rb @@ -160,7 +160,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -186,7 +186,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -220,7 +220,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -246,7 +246,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -274,7 +274,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -297,7 +297,7 @@ def tool_response it "properly encodes tool calls" do prompt = DiscourseAi::Completions::Prompt.new("Hello", tools: [echo_tool]) - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" response_json = { "functionCall" => { name: "echo", args: { text: "ydney" } } } @@ -332,7 +332,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -410,7 +410,7 @@ def tool_response payload = rows.map { |r| "data: #{r.to_json}\n\n" }.join - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:streamGenerateContent?alt=sse&key=123" prompt = DiscourseAi::Completions::Prompt.new("Hello", tools: [echo_tool]) @@ -450,7 +450,7 @@ def tool_response TEXT - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:streamGenerateContent?alt=sse&key=123" output = [] @@ -478,7 +478,7 @@ def tool_response split = data.split("|") - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:streamGenerateContent?alt=sse&key=123" output = [] @@ -497,7 +497,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -525,7 +525,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -600,7 +600,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:streamGenerateContent?alt=sse&key=123" stub_request(:post, url).with( @@ -657,7 +657,7 @@ def tool_response TEXT - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:streamGenerateContent?alt=sse&key=123" output = [] diff --git a/spec/lib/completions/endpoints/nova_spec.rb b/spec/lib/completions/endpoints/nova_spec.rb index aa2727f79..7de21541c 100644 --- a/spec/lib/completions/endpoints/nova_spec.rb +++ b/spec/lib/completions/endpoints/nova_spec.rb @@ -28,7 +28,7 @@ def encode_message(message) end it "should be able to make a simple request" do - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{nova_model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) content = { "output" => { @@ -90,7 +90,7 @@ def encode_message(message) stub_request(:post, stream_url).to_return(status: 200, body: messages.join) - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{nova_model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) responses = [] proxy.generate("Hello!", user: user) { |partial| responses << partial } @@ -104,7 +104,7 @@ def encode_message(message) #model.provider_params["disable_native_tools"] = true #model.save! - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{nova_model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) prompt = DiscourseAi::Completions::Prompt.new( "You are a helpful assistant.", diff --git a/spec/lib/completions/endpoints/open_ai_spec.rb b/spec/lib/completions/endpoints/open_ai_spec.rb index cd95476d8..01e324ca9 100644 --- a/spec/lib/completions/endpoints/open_ai_spec.rb +++ b/spec/lib/completions/endpoints/open_ai_spec.rb @@ -177,7 +177,7 @@ def request_body(prompt, stream: false, tool_call: false) describe "max tokens for reasoning models" do it "uses max_completion_tokens for reasoning models" do model.update!(name: "o3-mini", max_output_tokens: 999) - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) prompt = DiscourseAi::Completions::Prompt.new( "You are a bot", @@ -216,7 +216,7 @@ def request_body(prompt, stream: false, tool_call: false) describe "repeat calls" do it "can properly reset context" do - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) tools = [ { @@ -297,7 +297,7 @@ def request_body(prompt, stream: false, tool_call: false) describe "max tokens remapping" do it "remaps max_tokens to max_completion_tokens for reasoning models" do model.update!(name: "o3-mini") - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) body_parsed = nil stub_request(:post, "https://api.openai.com/v1/chat/completions").with( @@ -313,7 +313,7 @@ def request_body(prompt, stream: false, tool_call: false) describe "forced tool use" do it "can properly force tool use" do - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) tools = [ { @@ -441,7 +441,7 @@ def request_body(prompt, stream: false, tool_call: false) describe "disabled tool use" do it "can properly disable tool use with :none" do - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) tools = [ { @@ -532,7 +532,7 @@ def request_body(prompt, stream: false, tool_call: false) describe "image support" do it "can handle images" do model = Fabricate(:llm_model, vision_enabled: true) - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) prompt = DiscourseAi::Completions::Prompt.new( "You are image bot", diff --git a/spec/lib/completions/endpoints/open_router_spec.rb b/spec/lib/completions/endpoints/open_router_spec.rb index 8beb48aca..d89fd8a7c 100644 --- a/spec/lib/completions/endpoints/open_router_spec.rb +++ b/spec/lib/completions/endpoints/open_router_spec.rb @@ -25,7 +25,7 @@ body: { "choices" => [message: { role: "assistant", content: "world" }] }.to_json, ) - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{open_router_model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(open_router_model) result = proxy.generate("hello", user: user) expect(result).to eq("world") @@ -62,7 +62,7 @@ body: { "choices" => [message: { role: "assistant", content: "test response" }] }.to_json, ) - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{open_router_model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(open_router_model) # Request with parameters that should be ignored proxy.generate("test", user: user, top_p: 0.9, temperature: 0.8, max_tokens: 500) diff --git a/spec/lib/completions/endpoints/vllm_spec.rb b/spec/lib/completions/endpoints/vllm_spec.rb index 824bcbe06..56f042bb3 100644 --- a/spec/lib/completions/endpoints/vllm_spec.rb +++ b/spec/lib/completions/endpoints/vllm_spec.rb @@ -68,7 +68,7 @@ def stub_streamed_response(prompt, deltas, tool_call: false) fab!(:llm_model) { Fabricate(:vllm_model) } fab!(:user) - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:vllm_mock) { VllmMock.new(endpoint) } let(:compliance) do diff --git a/spec/lib/completions/llm_spec.rb b/spec/lib/completions/llm_spec.rb index 4f22c16fb..115d1bc6f 100644 --- a/spec/lib/completions/llm_spec.rb +++ b/spec/lib/completions/llm_spec.rb @@ -46,9 +46,7 @@ body: result, ) result = +"" - described_class - .proxy("custom:#{model.id}") - .generate(prompt, user: user) { |partial| result << partial } + described_class.proxy(model).generate(prompt, user: user) { |partial| result << partial } expect(result).to eq("Hello") log = AiApiAuditLog.order("id desc").first @@ -75,7 +73,7 @@ ) result = - described_class.proxy("custom:#{model.id}").generate( + described_class.proxy(model).generate( "Hello", user: user, feature_name: "llm_triage", @@ -99,7 +97,7 @@ DiscourseAi::Completions::Endpoints::Fake.chunk_count = 10 end - let(:llm) { described_class.proxy("custom:#{fake_model.id}") } + let(:llm) { described_class.proxy(fake_model) } let(:prompt) do DiscourseAi::Completions::Prompt.new( diff --git a/spec/lib/discourse_automation/automation_spec.rb b/spec/lib/discourse_automation/automation_spec.rb index 3fbc08987..466076737 100644 --- a/spec/lib/discourse_automation/automation_spec.rb +++ b/spec/lib/discourse_automation/automation_spec.rb @@ -49,11 +49,8 @@ expect(models).to match_array( [ - { "translated_name" => "#{llm_model.display_name}", "id" => "custom:#{llm_model.id}" }, - { - "translated_name" => "#{seeded_model.display_name}", - "id" => "custom:#{seeded_model.id}", - }, + { "translated_name" => "#{llm_model.display_name}", "id" => llm_model.id.to_s }, + { "translated_name" => "#{seeded_model.display_name}", "id" => seeded_model.id.to_s }, ], ) end diff --git a/spec/lib/discourse_automation/llm_report_spec.rb b/spec/lib/discourse_automation/llm_report_spec.rb index c05f9df07..1432bd9ab 100644 --- a/spec/lib/discourse_automation/llm_report_spec.rb +++ b/spec/lib/discourse_automation/llm_report_spec.rb @@ -24,7 +24,7 @@ def add_automation_field(name, value, type: "text") it "can trigger via automation" do add_automation_field("sender", user.username, type: "user") add_automation_field("receivers", [user.username], type: "email_group_user") - add_automation_field("model", "custom:#{llm_model.id}") + add_automation_field("model", llm_model.id) add_automation_field("title", "Weekly report") DiscourseAi::Completions::Llm.with_prepared_responses(["An Amazing Report!!!"]) do @@ -38,7 +38,7 @@ def add_automation_field(name, value, type: "text") it "can target a topic" do add_automation_field("sender", user.username, type: "user") add_automation_field("topic_id", "#{post.topic_id}") - add_automation_field("model", "custom:#{llm_model.id}") + add_automation_field("model", llm_model.id) DiscourseAi::Completions::Llm.with_prepared_responses(["An Amazing Report!!!"]) do automation.trigger! diff --git a/spec/lib/discourse_automation/llm_triage_spec.rb b/spec/lib/discourse_automation/llm_triage_spec.rb index 1b3ca6904..6b0d01bba 100644 --- a/spec/lib/discourse_automation/llm_triage_spec.rb +++ b/spec/lib/discourse_automation/llm_triage_spec.rb @@ -27,7 +27,7 @@ def add_automation_field(name, value, type: "text") SiteSetting.tagging_enabled = true add_automation_field("system_prompt", "hello %%POST%%") add_automation_field("search_for_text", "bad") - add_automation_field("model", "custom:#{llm_model.id}") + add_automation_field("model", llm_model.id) add_automation_field("category", category.id, type: "category") add_automation_field("tags", %w[aaa bbb], type: "tags") add_automation_field("hide_topic", true, type: "boolean") diff --git a/spec/lib/modules/automation/llm_triage_spec.rb b/spec/lib/modules/automation/llm_triage_spec.rb index bbff9e106..fc4ffd18a 100644 --- a/spec/lib/modules/automation/llm_triage_spec.rb +++ b/spec/lib/modules/automation/llm_triage_spec.rb @@ -12,7 +12,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["good"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, hide_topic: true, system_prompt: "test %%POST%%", search_for_text: "bad", @@ -27,7 +27,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, hide_topic: true, system_prompt: "test %%POST%%", search_for_text: "bad", @@ -44,7 +44,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, category_id: category.id, system_prompt: "test %%POST%%", search_for_text: "bad", @@ -60,7 +60,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", canned_reply: "test canned reply 123", @@ -79,7 +79,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -97,7 +97,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -114,7 +114,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -132,7 +132,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -158,7 +158,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -174,7 +174,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["Bad.\n\nYo"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -191,7 +191,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "BAD", flag_post: true, @@ -210,7 +210,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -229,7 +229,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do |spy| triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -249,7 +249,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, diff --git a/spec/lib/modules/automation/report_runner_spec.rb b/spec/lib/modules/automation/report_runner_spec.rb index c9864e5c2..8f56cce54 100644 --- a/spec/lib/modules/automation/report_runner_spec.rb +++ b/spec/lib/modules/automation/report_runner_spec.rb @@ -43,7 +43,7 @@ module Automation sender_username: user.username, receivers: ["fake@discourse.com"], title: "test report %DATE%", - model: "custom:#{llm_model.id}", + model: llm_model.id, category_ids: nil, tags: nil, allow_secure_categories: false, @@ -80,7 +80,7 @@ module Automation sender_username: user.username, receivers: [receiver.username], title: "test report", - model: "custom:#{llm_model.id}", + model: llm_model.id, category_ids: nil, tags: nil, allow_secure_categories: false, @@ -125,7 +125,7 @@ module Automation sender_username: user.username, receivers: [receiver.username], title: "test report", - model: "custom:#{llm_model.id}", + model: llm_model.id, category_ids: nil, tags: nil, allow_secure_categories: false, @@ -168,7 +168,7 @@ module Automation sender_username: user.username, receivers: [receiver.username], title: "test report", - model: "custom:#{llm_model.id}", + model: llm_model.id, category_ids: nil, tags: nil, allow_secure_categories: false, @@ -200,7 +200,7 @@ module Automation sender_username: user.username, receivers: [group_for_reports.name], title: "group report", - model: "custom:#{llm_model.id}", + model: llm_model.id, category_ids: nil, tags: nil, allow_secure_categories: false, @@ -228,7 +228,7 @@ module Automation sender_username: user.username, receivers: [receiver.username], title: "test report", - model: "custom:#{llm_model.id}", + model: llm_model.id, category_ids: nil, tags: nil, allow_secure_categories: false, diff --git a/spec/lib/personas/question_consolidator_spec.rb b/spec/lib/personas/question_consolidator_spec.rb index 7fe543993..4150f805e 100644 --- a/spec/lib/personas/question_consolidator_spec.rb +++ b/spec/lib/personas/question_consolidator_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true RSpec.describe DiscourseAi::Personas::QuestionConsolidator do - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{Fabricate(:fake_model).id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(Fabricate(:fake_model)) } let(:fake_endpoint) { DiscourseAi::Completions::Endpoints::Fake } fab!(:user) diff --git a/spec/lib/personas/tools/create_artifact_spec.rb b/spec/lib/personas/tools/create_artifact_spec.rb index 929e54ef5..efa95c2ba 100644 --- a/spec/lib/personas/tools/create_artifact_spec.rb +++ b/spec/lib/personas/tools/create_artifact_spec.rb @@ -2,7 +2,7 @@ RSpec.describe DiscourseAi::Personas::Tools::CreateArtifact do fab!(:llm_model) - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } fab!(:post) before { SiteSetting.ai_bot_enabled = true } diff --git a/spec/lib/personas/tools/create_image_spec.rb b/spec/lib/personas/tools/create_image_spec.rb index 0aa18fea0..781a40a36 100644 --- a/spec/lib/personas/tools/create_image_spec.rb +++ b/spec/lib/personas/tools/create_image_spec.rb @@ -12,7 +12,7 @@ end let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(gpt_35_turbo.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{gpt_35_turbo.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(gpt_35_turbo) } let(:progress_blk) { Proc.new {} } let(:create_image) { described_class.new({ prompts: prompts }, llm: llm, bot_user: bot_user) } diff --git a/spec/lib/personas/tools/dall_e_spec.rb b/spec/lib/personas/tools/dall_e_spec.rb index 50d4ab72d..193e07d8e 100644 --- a/spec/lib/personas/tools/dall_e_spec.rb +++ b/spec/lib/personas/tools/dall_e_spec.rb @@ -12,7 +12,7 @@ end let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(gpt_35_turbo.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{gpt_35_turbo.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(gpt_35_turbo) } let(:progress_blk) { Proc.new {} } let(:dall_e) { described_class.new({ prompts: prompts }, llm: llm, bot_user: bot_user) } diff --git a/spec/lib/personas/tools/db_schema_spec.rb b/spec/lib/personas/tools/db_schema_spec.rb index 643e3fe7e..d533a9d8f 100644 --- a/spec/lib/personas/tools/db_schema_spec.rb +++ b/spec/lib/personas/tools/db_schema_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::DbSchema do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } before { SiteSetting.ai_bot_enabled = true } describe "#process" do diff --git a/spec/lib/personas/tools/discourse_meta_search_spec.rb b/spec/lib/personas/tools/discourse_meta_search_spec.rb index 1ccc4d4db..9c7ac116d 100644 --- a/spec/lib/personas/tools/discourse_meta_search_spec.rb +++ b/spec/lib/personas/tools/discourse_meta_search_spec.rb @@ -4,7 +4,7 @@ fab!(:llm_model) { Fabricate(:llm_model, max_prompt_tokens: 8192) } let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:progress_blk) { Proc.new {} } let(:mock_search_json) { plugin_file_from_fixtures("search.json", "search_meta").read } diff --git a/spec/lib/personas/tools/edit_image_spec.rb b/spec/lib/personas/tools/edit_image_spec.rb index 4242aec48..d0b826da4 100644 --- a/spec/lib/personas/tools/edit_image_spec.rb +++ b/spec/lib/personas/tools/edit_image_spec.rb @@ -17,7 +17,7 @@ end let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(gpt_35_turbo.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{gpt_35_turbo.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(gpt_35_turbo) } let(:progress_blk) { Proc.new {} } let(:prompt) { "add a rainbow in the background" } diff --git a/spec/lib/personas/tools/github_file_content_spec.rb b/spec/lib/personas/tools/github_file_content_spec.rb index 4186dd01e..a5ce6a904 100644 --- a/spec/lib/personas/tools/github_file_content_spec.rb +++ b/spec/lib/personas/tools/github_file_content_spec.rb @@ -4,7 +4,7 @@ RSpec.describe DiscourseAi::Personas::Tools::GithubFileContent do fab!(:llm_model) - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:tool) do described_class.new( diff --git a/spec/lib/personas/tools/github_pull_request_diff_spec.rb b/spec/lib/personas/tools/github_pull_request_diff_spec.rb index e8b3d2266..1934dd374 100644 --- a/spec/lib/personas/tools/github_pull_request_diff_spec.rb +++ b/spec/lib/personas/tools/github_pull_request_diff_spec.rb @@ -5,7 +5,7 @@ RSpec.describe DiscourseAi::Personas::Tools::GithubPullRequestDiff do let(:bot_user) { Fabricate(:user) } fab!(:llm_model) - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:tool) { described_class.new({ repo: repo, pull_id: pull_id }, bot_user: bot_user, llm: llm) } context "with #sort_and_shorten_diff" do diff --git a/spec/lib/personas/tools/github_search_code_spec.rb b/spec/lib/personas/tools/github_search_code_spec.rb index b8fbca274..466b1102e 100644 --- a/spec/lib/personas/tools/github_search_code_spec.rb +++ b/spec/lib/personas/tools/github_search_code_spec.rb @@ -5,7 +5,7 @@ RSpec.describe DiscourseAi::Personas::Tools::GithubSearchCode do let(:bot_user) { Fabricate(:user) } fab!(:llm_model) - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:tool) { described_class.new({ repo: repo, query: query }, bot_user: bot_user, llm: llm) } context "with valid search results" do diff --git a/spec/lib/personas/tools/github_search_files_spec.rb b/spec/lib/personas/tools/github_search_files_spec.rb index cc6926fd6..5b268efe6 100644 --- a/spec/lib/personas/tools/github_search_files_spec.rb +++ b/spec/lib/personas/tools/github_search_files_spec.rb @@ -4,7 +4,7 @@ RSpec.describe DiscourseAi::Personas::Tools::GithubSearchFiles do fab!(:llm_model) - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model.id) } let(:tool) do described_class.new( diff --git a/spec/lib/personas/tools/google_spec.rb b/spec/lib/personas/tools/google_spec.rb index 5062cea97..f0f32a078 100644 --- a/spec/lib/personas/tools/google_spec.rb +++ b/spec/lib/personas/tools/google_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::Google do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:progress_blk) { Proc.new {} } let(:search) { described_class.new({ query: "some search term" }, bot_user: bot_user, llm: llm) } diff --git a/spec/lib/personas/tools/image_spec.rb b/spec/lib/personas/tools/image_spec.rb index 342c9f676..cb3f97d5e 100644 --- a/spec/lib/personas/tools/image_spec.rb +++ b/spec/lib/personas/tools/image_spec.rb @@ -19,7 +19,7 @@ toggle_enabled_bots(bots: [gpt_35_turbo]) end - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{gpt_35_turbo.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(gpt_35_turbo) } let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(gpt_35_turbo.name) } diff --git a/spec/lib/personas/tools/javascript_evaluator_spec.rb b/spec/lib/personas/tools/javascript_evaluator_spec.rb index cae05ee9c..05bb84339 100644 --- a/spec/lib/personas/tools/javascript_evaluator_spec.rb +++ b/spec/lib/personas/tools/javascript_evaluator_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::JavascriptEvaluator do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:progress_blk) { Proc.new {} } before { SiteSetting.ai_bot_enabled = true } diff --git a/spec/lib/personas/tools/list_categories_spec.rb b/spec/lib/personas/tools/list_categories_spec.rb index bcda21233..f0b1c89b9 100644 --- a/spec/lib/personas/tools/list_categories_spec.rb +++ b/spec/lib/personas/tools/list_categories_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::ListCategories do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } before { SiteSetting.ai_bot_enabled = true } diff --git a/spec/lib/personas/tools/list_tags_spec.rb b/spec/lib/personas/tools/list_tags_spec.rb index b8f4ed5cd..4e8273bb3 100644 --- a/spec/lib/personas/tools/list_tags_spec.rb +++ b/spec/lib/personas/tools/list_tags_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::ListTags do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } before do SiteSetting.ai_bot_enabled = true diff --git a/spec/lib/personas/tools/read_spec.rb b/spec/lib/personas/tools/read_spec.rb index 2affc1f4e..3b04d9db1 100644 --- a/spec/lib/personas/tools/read_spec.rb +++ b/spec/lib/personas/tools/read_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::Read do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:tool) { described_class.new({ topic_id: topic_with_tags.id }, bot_user: bot_user, llm: llm) } fab!(:parent_category) { Fabricate(:category, name: "animals") } diff --git a/spec/lib/personas/tools/researcher_spec.rb b/spec/lib/personas/tools/researcher_spec.rb index 8e1a35a1c..5ee02f10a 100644 --- a/spec/lib/personas/tools/researcher_spec.rb +++ b/spec/lib/personas/tools/researcher_spec.rb @@ -6,7 +6,7 @@ fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:progress_blk) { Proc.new {} } fab!(:admin) diff --git a/spec/lib/personas/tools/search_settings_spec.rb b/spec/lib/personas/tools/search_settings_spec.rb index f3cd4356e..833354618 100644 --- a/spec/lib/personas/tools/search_settings_spec.rb +++ b/spec/lib/personas/tools/search_settings_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::SearchSettings do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:fake_settings) do [ diff --git a/spec/lib/personas/tools/search_spec.rb b/spec/lib/personas/tools/search_spec.rb index f90f660b0..cb1765b5c 100644 --- a/spec/lib/personas/tools/search_spec.rb +++ b/spec/lib/personas/tools/search_spec.rb @@ -6,7 +6,7 @@ fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:progress_blk) { Proc.new {} } fab!(:admin) diff --git a/spec/lib/personas/tools/setting_context_spec.rb b/spec/lib/personas/tools/setting_context_spec.rb index 20e26b641..d43032fe2 100644 --- a/spec/lib/personas/tools/setting_context_spec.rb +++ b/spec/lib/personas/tools/setting_context_spec.rb @@ -12,7 +12,7 @@ def has_rg? fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } before { SiteSetting.ai_bot_enabled = true } diff --git a/spec/lib/personas/tools/summarize_spec.rb b/spec/lib/personas/tools/summarize_spec.rb index 2bda3cd37..eb88441f6 100644 --- a/spec/lib/personas/tools/summarize_spec.rb +++ b/spec/lib/personas/tools/summarize_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::Summarize do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:progress_blk) { Proc.new {} } before { SiteSetting.ai_bot_enabled = true } diff --git a/spec/lib/personas/tools/time_spec.rb b/spec/lib/personas/tools/time_spec.rb index e92a32ad5..8dd5492b4 100644 --- a/spec/lib/personas/tools/time_spec.rb +++ b/spec/lib/personas/tools/time_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::Time do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } before { SiteSetting.ai_bot_enabled = true } diff --git a/spec/lib/personas/tools/web_browser_spec.rb b/spec/lib/personas/tools/web_browser_spec.rb index aebd4e66d..318e05f71 100644 --- a/spec/lib/personas/tools/web_browser_spec.rb +++ b/spec/lib/personas/tools/web_browser_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::WebBrowser do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } before { SiteSetting.ai_bot_enabled = true } diff --git a/spec/lib/translation/base_translator_spec.rb b/spec/lib/translation/base_translator_spec.rb index 729b2e0c4..da32df7b7 100644 --- a/spec/lib/translation/base_translator_spec.rb +++ b/spec/lib/translation/base_translator_spec.rb @@ -8,10 +8,7 @@ end before do - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end - + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_translation_enabled = true end diff --git a/spec/lib/translation/category_localizer_spec.rb b/spec/lib/translation/category_localizer_spec.rb index cfea804ef..00e268a7c 100644 --- a/spec/lib/translation/category_localizer_spec.rb +++ b/spec/lib/translation/category_localizer_spec.rb @@ -3,11 +3,7 @@ describe DiscourseAi::Translation::CategoryLocalizer do subject(:localizer) { described_class } - before do - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end - end + before { assign_fake_provider_to(:ai_default_llm_model) } def post_raw_translator_stub(opts) mock = instance_double(DiscourseAi::Translation::PostRawTranslator) diff --git a/spec/lib/translation/entry_point_spec.rb b/spec/lib/translation/entry_point_spec.rb index a2cd00d4c..44a4c0f99 100644 --- a/spec/lib/translation/entry_point_spec.rb +++ b/spec/lib/translation/entry_point_spec.rb @@ -2,10 +2,8 @@ describe DiscourseAi::Translation::EntryPoint do before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.content_localization_supported_locales = "en" end @@ -59,9 +57,7 @@ before do SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end + assign_fake_provider_to(:ai_default_llm_model) end it "enqueues detect topic locale and translate topic job" do diff --git a/spec/lib/translation/language_detector_spec.rb b/spec/lib/translation/language_detector_spec.rb index 9277690cf..3300854ae 100644 --- a/spec/lib/translation/language_detector_spec.rb +++ b/spec/lib/translation/language_detector_spec.rb @@ -7,11 +7,7 @@ ) end - before do - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end - end + before { assign_fake_provider_to(:ai_default_llm_model) } describe ".detect" do let(:locale_detector) { described_class.new("meow") } diff --git a/spec/models/ai_tool_spec.rb b/spec/models/ai_tool_spec.rb index 96a3f667d..a11f69554 100644 --- a/spec/models/ai_tool_spec.rb +++ b/spec/models/ai_tool_spec.rb @@ -2,7 +2,7 @@ RSpec.describe AiTool do fab!(:llm_model) { Fabricate(:llm_model, name: "claude-2") } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } fab!(:topic) fab!(:post) { Fabricate(:post, topic: topic, raw: "bananas are a tasty fruit") } fab!(:bot_user) { Discourse.system_user } diff --git a/spec/requests/admin/ai_llms_controller_spec.rb b/spec/requests/admin/ai_llms_controller_spec.rb index e0444e651..cf79c9dcf 100644 --- a/spec/requests/admin/ai_llms_controller_spec.rb +++ b/spec/requests/admin/ai_llms_controller_spec.rb @@ -52,17 +52,17 @@ # setting the setting calls the model DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) do - SiteSetting.ai_helper_model = "custom:#{llm_model.id}" + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true end DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) do - SiteSetting.ai_summarization_model = "custom:#{llm_model2.id}" + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true end DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) do - SiteSetting.ai_embeddings_semantic_search_hyde_model = "custom:#{llm_model2.id}" + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_embeddings_semantic_search_enabled = true end diff --git a/spec/services/problem_check/ai_llm_status_spec.rb b/spec/services/problem_check/ai_llm_status_spec.rb index b92274eb9..2778b46d3 100644 --- a/spec/services/problem_check/ai_llm_status_spec.rb +++ b/spec/services/problem_check/ai_llm_status_spec.rb @@ -26,7 +26,7 @@ before do stub_request(:post, post_url).to_return(status: 200, body: success_response, headers: {}) - SiteSetting.ai_summarization_model = "custom:#{llm_model.id}" + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true end From 5841543bd941e6119ba2f641ee340b922eacfb64 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Wed, 16 Jul 2025 14:51:32 -0700 Subject: [PATCH 21/34] FIX: validator --- config/locales/server.en.yml | 2 +- lib/configuration/llm_dependency_validator.rb | 3 +- lib/configuration/llm_validator.rb | 38 ++++++++-------- spec/configuration/llm_validator_spec.rb | 43 +++++++++++++------ 4 files changed, 54 insertions(+), 32 deletions(-) diff --git a/config/locales/server.en.yml b/config/locales/server.en.yml index 061799783..40602236a 100644 --- a/config/locales/server.en.yml +++ b/config/locales/server.en.yml @@ -583,7 +583,7 @@ en: llm: configuration: create_llm: "You need to setup an LLM before enabling this feature" - disable_module_first: "You have to disable %{setting} first." + disable_modules_first: "You must disable these modules first: %{settings}" set_llm_first: "Set %{setting} first" model_unreachable: "We couldn't get a response from this model. Check your settings first." invalid_seeded_model: "You can't use this model with this feature" diff --git a/lib/configuration/llm_dependency_validator.rb b/lib/configuration/llm_dependency_validator.rb index c3b1a24fb..8ca890be0 100644 --- a/lib/configuration/llm_dependency_validator.rb +++ b/lib/configuration/llm_dependency_validator.rb @@ -10,8 +10,7 @@ def initialize(opts = {}) def valid_value?(val) return true if val == "f" - @llm_dependency_setting_name = - DiscourseAi::Configuration::LlmValidator.new.choose_llm_setting_for(@opts[:name]) + @llm_dependency_setting_name = :ai_default_llm_model SiteSetting.public_send(@llm_dependency_setting_name).present? end diff --git a/lib/configuration/llm_validator.rb b/lib/configuration/llm_validator.rb index 262119464..ff63d8d37 100644 --- a/lib/configuration/llm_validator.rb +++ b/lib/configuration/llm_validator.rb @@ -9,10 +9,18 @@ def initialize(opts = {}) def valid_value?(val) if val == "" - @parent_module_name = modules_and_choose_llm_settings.invert[@opts[:name]] + if @opts[:name] == :ai_default_llm_model + @parent_module_names = [] - @parent_enabled = SiteSetting.public_send(@parent_module_name) - return !@parent_enabled + enabled_settings.each do |setting_name| + if SiteSetting.public_send(setting_name) == true + @parent_module_names << setting_name + @parent_enabled = true + end + end + + return !@parent_enabled + end end run_test(val).tap { |result| @unreachable = result } @@ -43,11 +51,11 @@ def modules_using(llm_model) end def error_message - if @parent_enabled + if @parent_enabled && @parent_module_names.present? return( I18n.t( - "discourse_ai.llm.configuration.disable_module_first", - setting: @parent_module_name, + "discourse_ai.llm.configuration.disable_modules_first", + settings: @parent_module_names.join(", "), ) ) end @@ -57,17 +65,13 @@ def error_message I18n.t("discourse_ai.llm.configuration.model_unreachable") end - def choose_llm_setting_for(module_enabler_setting) - modules_and_choose_llm_settings[module_enabler_setting] - end - - def modules_and_choose_llm_settings - { - ai_embeddings_semantic_search_enabled: :ai_default_llm_model, - ai_helper_enabled: :ai_default_llm_model, - ai_summarization_enabled: :ai_default_llm_model, - ai_translation_enabled: :ai_default_llm_model, - } + def enabled_settings + %i[ + ai_embeddings_semantic_search_enabled + ai_helper_enabled + ai_summarization_enabled + ai_translation_enabled + ] end end end diff --git a/spec/configuration/llm_validator_spec.rb b/spec/configuration/llm_validator_spec.rb index eb1aaad2c..9e91a8833 100644 --- a/spec/configuration/llm_validator_spec.rb +++ b/spec/configuration/llm_validator_spec.rb @@ -1,21 +1,40 @@ # frozen_string_literal: true -RSpec.describe DiscourseAi::Configuration::LlmValidator do +require "rails_helper" + +describe DiscourseAi::Configuration::LlmValidator do describe "#valid_value?" do - context "when the parent module is enabled and we try to reset the selected model" do - before do - assign_fake_provider_to(:ai_default_llm_model) - SiteSetting.ai_summarization_enabled = true - end + let(:validator) { described_class.new(name: :ai_default_llm_model) } + + before do + assign_fake_provider_to(:ai_default_llm_model) + SiteSetting.ai_helper_enabled = false + SiteSetting.ai_summarization_enabled = false + SiteSetting.ai_embeddings_semantic_search_enabled = false + SiteSetting.ai_translation_enabled = false + end - it "returns false and displays an error message" do - validator = described_class.new(name: :ai_summarization_model) + it "returns true when no modules are enabled and value is empty string" do + expect(validator.valid_value?("")).to eq(true) + end - value = validator.valid_value?("") + it "returns false when a module is enabled and value is empty string" do + SiteSetting.ai_helper_enabled = true + expect(validator.valid_value?("")).to eq(false) + expect(validator.error_message).to include("ai_helper_enabled") + end + + it "returns false when multiple modules are enabled and value is empty string" do + SiteSetting.ai_helper_enabled = true + SiteSetting.ai_summarization_enabled = true + expect(validator.valid_value?("")).to eq(false) + expect(validator.error_message).to include("ai_helper_enabled, ai_summarization_enabled") + end - expect(value).to eq(false) - expect(validator.error_message).to include("ai_summarization_enabled") - end + it "returns true for non-empty values regardless of module state" do + SiteSetting.ai_helper_enabled = true + SiteSetting.ai_summarization_enabled = true + expect(validator.valid_value?("some_model")).to eq(true) end end end From 7af3ce820dd528725bcaf01cd583674fb42a823c Mon Sep 17 00:00:00 2001 From: Keegan George Date: Thu, 17 Jul 2025 09:27:08 -0700 Subject: [PATCH 22/34] DEV: Remove references to translation model --- config/settings.yml | 2 +- lib/translation.rb | 2 +- spec/configuration/feature_spec.rb | 2 ++ spec/lib/translation/language_detector_spec.rb | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/config/settings.yml b/config/settings.yml index 24d92169f..c05497ba1 100644 --- a/config/settings.yml +++ b/config/settings.yml @@ -488,7 +488,7 @@ discourse_ai: client: true validator: "DiscourseAi::Configuration::LlmDependencyValidator" area: "ai-features/translation" - ai_translation_model: + ai_translation_model: # Deprecated. TODO(keegan): Remove 2025-09-01 default: "" type: enum allow_any: false diff --git a/lib/translation.rb b/lib/translation.rb index 09093f1c3..73bc50a73 100644 --- a/lib/translation.rb +++ b/lib/translation.rb @@ -4,7 +4,7 @@ module DiscourseAi module Translation def self.enabled? SiteSetting.discourse_ai_enabled && SiteSetting.ai_translation_enabled && - SiteSetting.ai_translation_model.present? && + SiteSetting.ai_default_llm_model.present? && SiteSetting.content_localization_supported_locales.present? end diff --git a/spec/configuration/feature_spec.rb b/spec/configuration/feature_spec.rb index 6052ba1db..11aab1693 100644 --- a/spec/configuration/feature_spec.rb +++ b/spec/configuration/feature_spec.rb @@ -6,6 +6,8 @@ fab!(:llm_model) fab!(:ai_persona) { Fabricate(:ai_persona, default_llm_id: llm_model.id) } + before { assign_fake_provider_to(:ai_default_llm_model) } + def allow_configuring_setting(&block) DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) { block.call } end diff --git a/spec/lib/translation/language_detector_spec.rb b/spec/lib/translation/language_detector_spec.rb index 3300854ae..99393e7e5 100644 --- a/spec/lib/translation/language_detector_spec.rb +++ b/spec/lib/translation/language_detector_spec.rb @@ -34,7 +34,7 @@ allow(DiscourseAi::Completions::Prompt).to receive(:new).and_return(mock_prompt) allow(DiscourseAi::Completions::Llm).to receive(:proxy).with( - SiteSetting.ai_translation_model, + SiteSetting.ai_default_llm_model, ).and_return(mock_llm) allow(mock_llm).to receive(:generate).with( mock_prompt, From 273a1fa618f9360e002a3117f5437d27221bdd0e Mon Sep 17 00:00:00 2001 From: Keegan George Date: Thu, 17 Jul 2025 09:43:45 -0700 Subject: [PATCH 23/34] FIX: occurrences of old model setting --- lib/embeddings/semantic_search.rb | 40 +++++++++++++++++-------------- lib/tasks/create_topics.rake | 2 +- 2 files changed, 23 insertions(+), 19 deletions(-) diff --git a/lib/embeddings/semantic_search.rb b/lib/embeddings/semantic_search.rb index e4e894cab..2072ff5b5 100644 --- a/lib/embeddings/semantic_search.rb +++ b/lib/embeddings/semantic_search.rb @@ -6,8 +6,8 @@ class SemanticSearch def self.clear_cache_for(query) digest = OpenSSL::Digest::SHA1.hexdigest(query) - hyde_key = - "semantic-search-#{digest}-#{SiteSetting.ai_embeddings_semantic_search_hyde_model}" + hyde_model_id = find_ai_hyde_model_id + hyde_key = "semantic-search-#{digest}-#{hyde_model_id}" Discourse.cache.delete(hyde_key) Discourse.cache.delete("#{hyde_key}-#{SiteSetting.ai_embeddings_selected_model}") @@ -20,12 +20,9 @@ def initialize(guardian) def cached_query?(query) digest = OpenSSL::Digest::SHA1.hexdigest(query) + hyde_model_id = find_ai_hyde_model_id embedding_key = - build_embedding_key( - digest, - SiteSetting.ai_embeddings_semantic_search_hyde_model, - SiteSetting.ai_embeddings_selected_model, - ) + build_embedding_key(digest, hyde_model_id, SiteSetting.ai_embeddings_selected_model) Discourse.cache.read(embedding_key).present? end @@ -36,14 +33,11 @@ def vector def hyde_embedding(search_term) digest = OpenSSL::Digest::SHA1.hexdigest(search_term) - hyde_key = build_hyde_key(digest, SiteSetting.ai_embeddings_semantic_search_hyde_model) + hyde_model_id = find_ai_hyde_model_id + hyde_key = build_hyde_key(digest, hyde_model_id) embedding_key = - build_embedding_key( - digest, - SiteSetting.ai_embeddings_semantic_search_hyde_model, - SiteSetting.ai_embeddings_selected_model, - ) + build_embedding_key(digest, hyde_model_id, SiteSetting.ai_embeddings_selected_model) hypothetical_post = Discourse @@ -111,6 +105,7 @@ def quick_search(query) max_semantic_results_per_page = 100 search = Search.new(query, { guardian: guardian }) search_term = search.term + hyde_model_id = find_ai_hyde_model_id return [] if search_term.nil? || search_term.length < SiteSetting.min_search_term_length @@ -119,11 +114,7 @@ def quick_search(query) digest = OpenSSL::Digest::SHA1.hexdigest(search_term) embedding_key = - build_embedding_key( - digest, - SiteSetting.ai_embeddings_semantic_search_hyde_model, - SiteSetting.ai_embeddings_selected_model, - ) + build_embedding_key(digest, hyde_model_id, SiteSetting.ai_embeddings_selected_model) search_term_embedding = Discourse @@ -221,6 +212,19 @@ def find_ai_hyde_model(persona_klass) end end + def find_ai_hyde_model_id + persona_llm_id = + AiPersona.find_by( + id: SiteSetting.ai_embeddings_semantic_search_hyde_persona, + )&.default_llm_id + + if persona_llm_id.present? + persona_llm_id + else + SiteSetting.ai_default_llm_model.to_i || LlmModel.last&.id + end + end + private attr_reader :guardian diff --git a/lib/tasks/create_topics.rake b/lib/tasks/create_topics.rake index 3095345bb..f64953e5b 100644 --- a/lib/tasks/create_topics.rake +++ b/lib/tasks/create_topics.rake @@ -88,7 +88,7 @@ namespace :ai do messages: [{ type: :user, content: prompt, id: "user" }], ) - DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_helper_model).generate( + DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_defaulit_llm_model).generate( prompt, user: Discourse.system_user, feature_name: "topic-generator", From 08d2f3ddf97ae2594dc64ba3850e2e34870190a6 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Thu, 17 Jul 2025 11:36:19 -0700 Subject: [PATCH 24/34] FIX: spam --- .../discourse/components/ai-spam.gjs | 2 +- spec/system/ai_moderation/ai_spam_spec.rb | 54 ++++++++++--------- 2 files changed, 30 insertions(+), 26 deletions(-) diff --git a/assets/javascripts/discourse/components/ai-spam.gjs b/assets/javascripts/discourse/components/ai-spam.gjs index 7f4464cf1..812660ac7 100644 --- a/assets/javascripts/discourse/components/ai-spam.gjs +++ b/assets/javascripts/discourse/components/ai-spam.gjs @@ -127,7 +127,7 @@ export default class AiSpam extends Component { } get llmId() { - return this.selectedLLM.toString().split(":")[1]; + return this.selectedLLM; } @action diff --git a/spec/system/ai_moderation/ai_spam_spec.rb b/spec/system/ai_moderation/ai_spam_spec.rb index 4640b760b..b48d2fac5 100644 --- a/spec/system/ai_moderation/ai_spam_spec.rb +++ b/spec/system/ai_moderation/ai_spam_spec.rb @@ -2,47 +2,51 @@ RSpec.describe "AI Spam Configuration", type: :system, js: true do fab!(:admin) - let(:llm_model) { Fabricate(:llm_model) } before do SiteSetting.discourse_ai_enabled = true sign_in(admin) end - it "can properly configure spam settings" do - visit "/admin/plugins/discourse-ai/ai-spam" + context "when no LLMs are configured" do + it "shows the placeholder when no LLM is configured" do + visit "/admin/plugins/discourse-ai/ai-spam" - expect(page).to have_css(".ai-spam__llm-placeholder") + expect(page).to have_css(".ai-spam__llm-placeholder") - toggle = PageObjects::Components::DToggleSwitch.new(".ai-spam__toggle") + toggle = PageObjects::Components::DToggleSwitch.new(".ai-spam__toggle") - toggle.toggle - dialog = PageObjects::Components::Dialog.new - expect(dialog).to have_content(I18n.t("discourse_ai.llm.configuration.must_select_model")) - dialog.click_ok + toggle.toggle + dialog = PageObjects::Components::Dialog.new + expect(dialog).to have_content(I18n.t("discourse_ai.llm.configuration.must_select_model")) + dialog.click_ok - expect(toggle.unchecked?).to eq(true) - - llm_model - visit "/admin/plugins/discourse-ai/ai-spam" + expect(toggle.unchecked?).to eq(true) + end + end + context "when LLMs are configured" do + fab!(:llm_model) + it "can properly configure spam settings" do + visit "/admin/plugins/discourse-ai/ai-spam" - toggle = PageObjects::Components::DToggleSwitch.new(".ai-spam__toggle") - toggle.toggle + toggle = PageObjects::Components::DToggleSwitch.new(".ai-spam__toggle") + toggle.toggle - try_until_success { expect(AiModerationSetting.spam&.llm_model_id).to eq(llm_model.id) } + try_until_success { expect(AiModerationSetting.spam&.llm_model_id).to eq(llm_model.id) } - find(".ai-spam__instructions-input").fill_in(with: "Test spam detection instructions") - find(".ai-spam__instructions-save").click + find(".ai-spam__instructions-input").fill_in(with: "Test spam detection instructions") + find(".ai-spam__instructions-save").click - toasts = PageObjects::Components::Toasts.new - expect(toasts).to have_content(I18n.t("js.discourse_ai.spam.settings_saved")) + toasts = PageObjects::Components::Toasts.new + expect(toasts).to have_content(I18n.t("js.discourse_ai.spam.settings_saved")) - expect(AiModerationSetting.spam.custom_instructions).to eq("Test spam detection instructions") + expect(AiModerationSetting.spam.custom_instructions).to eq("Test spam detection instructions") - visit "/admin/plugins/discourse-ai/ai-llms" + visit "/admin/plugins/discourse-ai/ai-llms" - expect(find(".ai-llm-list-editor__usages")).to have_content( - I18n.t("js.discourse_ai.llms.usage.ai_spam"), - ) + expect(find(".ai-llm-list-editor__usages")).to have_content( + I18n.t("js.discourse_ai.llms.usage.ai_spam"), + ) + end end end From f077d58abfc86b274384cf1aea7547e3cbb113a0 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Thu, 17 Jul 2025 11:48:28 -0700 Subject: [PATCH 25/34] FIX: automation spec --- lib/automation/report_runner.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/automation/report_runner.rb b/lib/automation/report_runner.rb index 1c207a45a..d4d06b143 100644 --- a/lib/automation/report_runner.rb +++ b/lib/automation/report_runner.rb @@ -74,7 +74,7 @@ def initialize( else I18n.t("discourse_automation.scriptables.llm_report.title") end - @model = LlmModel.find_by(id: model.split(":")&.last) + @model = LlmModel.find_by(id: model) @persona = AiPersona.find(persona_id).class_instance.new @category_ids = category_ids @tags = tags From a927fd227079fc35e6b27cbb4c042313b1e32b65 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Thu, 17 Jul 2025 12:00:05 -0700 Subject: [PATCH 26/34] fix --- lib/completions/llm.rb | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/completions/llm.rb b/lib/completions/llm.rb index 70e840049..663c6e68c 100644 --- a/lib/completions/llm.rb +++ b/lib/completions/llm.rb @@ -292,6 +292,8 @@ def proxy(model) llm_model = if model.is_a?(LlmModel) model + elsif model.is_a?(Numeric) + LlmModel.find_by(id: model) else model_name_without_prov = model.split(":").last.to_i From d9b53b4a3d1a4a496cb81b6d7b50547276ae54dc Mon Sep 17 00:00:00 2001 From: Keegan George Date: Thu, 17 Jul 2025 12:23:38 -0700 Subject: [PATCH 27/34] FIX: semantic search --- lib/embeddings/semantic_search.rb | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/embeddings/semantic_search.rb b/lib/embeddings/semantic_search.rb index 2072ff5b5..6d44c1f73 100644 --- a/lib/embeddings/semantic_search.rb +++ b/lib/embeddings/semantic_search.rb @@ -20,7 +20,7 @@ def initialize(guardian) def cached_query?(query) digest = OpenSSL::Digest::SHA1.hexdigest(query) - hyde_model_id = find_ai_hyde_model_id + hyde_model_id = self.class.find_ai_hyde_model_id embedding_key = build_embedding_key(digest, hyde_model_id, SiteSetting.ai_embeddings_selected_model) @@ -33,7 +33,7 @@ def vector def hyde_embedding(search_term) digest = OpenSSL::Digest::SHA1.hexdigest(search_term) - hyde_model_id = find_ai_hyde_model_id + hyde_model_id = self.class.find_ai_hyde_model_id hyde_key = build_hyde_key(digest, hyde_model_id) embedding_key = @@ -105,7 +105,7 @@ def quick_search(query) max_semantic_results_per_page = 100 search = Search.new(query, { guardian: guardian }) search_term = search.term - hyde_model_id = find_ai_hyde_model_id + hyde_model_id = self.class.find_ai_hyde_model_id return [] if search_term.nil? || search_term.length < SiteSetting.min_search_term_length @@ -212,7 +212,7 @@ def find_ai_hyde_model(persona_klass) end end - def find_ai_hyde_model_id + def self.find_ai_hyde_model_id persona_llm_id = AiPersona.find_by( id: SiteSetting.ai_embeddings_semantic_search_hyde_persona, From 4b03cdcc96c530acccf95c3663d9eecb2e36974d Mon Sep 17 00:00:00 2001 From: Keegan George Date: Thu, 17 Jul 2025 13:31:26 -0700 Subject: [PATCH 28/34] FIX: LLM controller spec --- .../requests/admin/ai_llms_controller_spec.rb | 30 +++++++++++-------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/spec/requests/admin/ai_llms_controller_spec.rb b/spec/requests/admin/ai_llms_controller_spec.rb index cf79c9dcf..b9ed8226a 100644 --- a/spec/requests/admin/ai_llms_controller_spec.rb +++ b/spec/requests/admin/ai_llms_controller_spec.rb @@ -49,20 +49,19 @@ it "lists enabled features on appropriate LLMs" do SiteSetting.ai_bot_enabled = true + fake_model = assign_fake_provider_to(:ai_default_llm_model) # setting the setting calls the model DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) do - assign_fake_provider_to(:ai_default_llm_model) + SiteSetting.ai_helper_proofreader_persona = ai_persona.id SiteSetting.ai_helper_enabled = true end DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) do - assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true end DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) do - assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_embeddings_semantic_search_enabled = true end @@ -71,15 +70,18 @@ llms = response.parsed_body["ai_llms"] model_json = llms.find { |m| m["id"] == llm_model.id } - expect(model_json["used_by"]).to contain_exactly( - { "type" => "ai_bot" }, - { "type" => "ai_helper" }, - ) + expect(model_json["used_by"]).to contain_exactly({ "type" => "ai_bot" }) model2_json = llms.find { |m| m["id"] == llm_model2.id } expect(model2_json["used_by"]).to contain_exactly( { "type" => "ai_persona", "name" => "Cool persona", "id" => ai_persona.id }, + { "type" => "ai_helper", "name" => "Proofread text" }, + ) + + model3_json = llms.find { |m| m["id"] == fake_model.id } + + expect(model3_json["used_by"]).to contain_exactly( { "type" => "ai_summarization" }, { "type" => "ai_embeddings_semantic_search" }, ) @@ -514,13 +516,15 @@ expect(history.subject).to eq(model_display_name) # Verify subject is set to display_name end - it "validates the model is not in use" do - fake_llm = assign_fake_provider_to(:ai_helper_model) + context "with llms configured" do + fab!(:ai_persona) { Fabricate(:ai_persona, default_llm_id: llm_model.id) } - delete "/admin/plugins/discourse-ai/ai-llms/#{fake_llm.id}.json" - - expect(response.status).to eq(409) - expect(fake_llm.reload).to eq(fake_llm) + before { assign_fake_provider_to(:ai_helper_model) } + it "validates the model is not in use" do + delete "/admin/plugins/discourse-ai/ai-llms/#{llm_model.id}.json" + expect(response.status).to eq(409) + expect(llm_model.reload).to eq(llm_model) + end end it "cleans up companion users before deleting the model" do From cc3488207476dc16c214e454315ed6580f00db85 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Thu, 17 Jul 2025 14:34:29 -0700 Subject: [PATCH 29/34] FIX: rest of specs --- lib/configuration/llm_enumerator.rb | 8 ++------ lib/configuration/llm_validator.rb | 5 ----- spec/configuration/llm_enumerator_spec.rb | 15 ++++++++++++--- spec/lib/completions/endpoints/nova_spec.rb | 6 +++--- spec/requests/admin/ai_llms_controller_spec.rb | 16 ++++++++-------- .../services/problem_check/ai_llm_status_spec.rb | 2 ++ 6 files changed, 27 insertions(+), 25 deletions(-) diff --git a/lib/configuration/llm_enumerator.rb b/lib/configuration/llm_enumerator.rb index f1d5a29b9..cd2c33d54 100644 --- a/lib/configuration/llm_enumerator.rb +++ b/lib/configuration/llm_enumerator.rb @@ -87,12 +87,8 @@ def self.global_usage ) .each do |model_text, name, id| next if model_text.blank? - model_id = model_text.split("custom:").last.to_i - if model_id.present? - if model_text =~ /custom:(\d+)/ - rval[model_id] << { type: :automation, name: name, id: id } - end - end + model_id = model_text.to_i + rval[model_id] << { type: :automation, name: name, id: id } if model_id.present? end end diff --git a/lib/configuration/llm_validator.rb b/lib/configuration/llm_validator.rb index ff63d8d37..0e22b6eee 100644 --- a/lib/configuration/llm_validator.rb +++ b/lib/configuration/llm_validator.rb @@ -31,11 +31,6 @@ def valid_value?(val) end def run_test(val) - if Rails.env.test? - # In test mode, we assume the model is reachable. - return true - end - DiscourseAi::Completions::Llm .proxy(val) .generate("How much is 1 + 1?", user: nil, feature_name: "llm_validator") diff --git a/spec/configuration/llm_enumerator_spec.rb b/spec/configuration/llm_enumerator_spec.rb index 64c250d02..d666d346c 100644 --- a/spec/configuration/llm_enumerator_spec.rb +++ b/spec/configuration/llm_enumerator_spec.rb @@ -2,6 +2,7 @@ RSpec.describe DiscourseAi::Configuration::LlmEnumerator do fab!(:fake_model) + fab!(:ai_persona) { Fabricate(:ai_persona, default_llm_id: fake_model.id) } fab!(:llm_model) fab!(:seeded_model) fab!(:automation) do @@ -42,8 +43,15 @@ describe "#global_usage" do it "returns a hash of Llm models in use globally" do assign_fake_provider_to(:ai_default_llm_model) + SiteSetting.ai_helper_proofreader_persona = ai_persona.id SiteSetting.ai_helper_enabled = true - expect(described_class.global_usage).to eq(fake_model.id => [{ type: :ai_helper }]) + expect(described_class.global_usage).to eq( + fake_model.id => [{ type: :ai_helper }], + fake_model.id => [ + { id: ai_persona.id, name: ai_persona.name, type: :ai_persona }, + { name: "Proofread text", type: :ai_helper }, + ], + ) end it "returns information about automation rules" do @@ -51,7 +59,7 @@ component: "text", name: "model", metadata: { - value: fake_model.id, + value: llm_model.id, }, target: "script", ) @@ -59,7 +67,8 @@ usage = described_class.global_usage expect(usage).to eq( - { fake_model.id => [{ type: :automation, name: "some automation", id: automation.id }] }, + fake_model.id => [{ id: ai_persona.id, name: ai_persona.name, type: :ai_persona }], + llm_model.id => [{ id: automation.id, name: automation.name, type: :automation }], ) end end diff --git a/spec/lib/completions/endpoints/nova_spec.rb b/spec/lib/completions/endpoints/nova_spec.rb index 7de21541c..e75a5bd09 100644 --- a/spec/lib/completions/endpoints/nova_spec.rb +++ b/spec/lib/completions/endpoints/nova_spec.rb @@ -28,7 +28,7 @@ def encode_message(message) end it "should be able to make a simple request" do - proxy = DiscourseAi::Completions::Llm.proxy(model) + proxy = DiscourseAi::Completions::Llm.proxy(nova_model) content = { "output" => { @@ -90,7 +90,7 @@ def encode_message(message) stub_request(:post, stream_url).to_return(status: 200, body: messages.join) - proxy = DiscourseAi::Completions::Llm.proxy(model) + proxy = DiscourseAi::Completions::Llm.proxy(nova_model) responses = [] proxy.generate("Hello!", user: user) { |partial| responses << partial } @@ -104,7 +104,7 @@ def encode_message(message) #model.provider_params["disable_native_tools"] = true #model.save! - proxy = DiscourseAi::Completions::Llm.proxy(model) + proxy = DiscourseAi::Completions::Llm.proxy(nova_model) prompt = DiscourseAi::Completions::Prompt.new( "You are a helpful assistant.", diff --git a/spec/requests/admin/ai_llms_controller_spec.rb b/spec/requests/admin/ai_llms_controller_spec.rb index b9ed8226a..084c4b3bb 100644 --- a/spec/requests/admin/ai_llms_controller_spec.rb +++ b/spec/requests/admin/ai_llms_controller_spec.rb @@ -473,16 +473,16 @@ error_type: "validation", } - WebMock.stub_request(:post, test_attrs[:url]).to_return( - status: 422, - body: error_message.to_json, - ) + error = + DiscourseAi::Completions::Endpoints::Base::CompletionFailed.new(error_message.to_json) - get "/admin/plugins/discourse-ai/ai-llms/test.json", params: { ai_llm: test_attrs } + DiscourseAi::Completions::Llm.with_prepared_responses([error]) do + get "/admin/plugins/discourse-ai/ai-llms/test.json", params: { ai_llm: test_attrs } - expect(response).to be_successful - expect(response.parsed_body["success"]).to eq(false) - expect(response.parsed_body["error"]).to eq(error_message.to_json) + expect(response).to be_successful + expect(response.parsed_body["success"]).to eq(false) + expect(response.parsed_body["error"]).to eq(error_message.to_json) + end end end end diff --git a/spec/services/problem_check/ai_llm_status_spec.rb b/spec/services/problem_check/ai_llm_status_spec.rb index 2778b46d3..5c846645d 100644 --- a/spec/services/problem_check/ai_llm_status_spec.rb +++ b/spec/services/problem_check/ai_llm_status_spec.rb @@ -6,6 +6,7 @@ subject(:check) { described_class.new } fab!(:llm_model) + fab!(:ai_persona) { Fabricate(:ai_persona, default_llm_id: llm_model.id) } let(:post_url) { "https://api.openai.com/v1/chat/completions" } let(:success_response) do @@ -27,6 +28,7 @@ before do stub_request(:post, post_url).to_return(status: 200, body: success_response, headers: {}) assign_fake_provider_to(:ai_default_llm_model) + SiteSetting.ai_summarization_persona = ai_persona.id SiteSetting.ai_summarization_enabled = true end From eb93e1736b11b29f957ea518c57431db16299200 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Thu, 17 Jul 2025 14:46:15 -0700 Subject: [PATCH 30/34] =?UTF-8?q?Final=20spec=20fix=20=F0=9F=A4=9E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- spec/configuration/llm_validator_spec.rb | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/spec/configuration/llm_validator_spec.rb b/spec/configuration/llm_validator_spec.rb index 9e91a8833..16a5fc776 100644 --- a/spec/configuration/llm_validator_spec.rb +++ b/spec/configuration/llm_validator_spec.rb @@ -5,6 +5,7 @@ describe DiscourseAi::Configuration::LlmValidator do describe "#valid_value?" do let(:validator) { described_class.new(name: :ai_default_llm_model) } + fab!(:llm_model) before do assign_fake_provider_to(:ai_default_llm_model) @@ -34,7 +35,10 @@ it "returns true for non-empty values regardless of module state" do SiteSetting.ai_helper_enabled = true SiteSetting.ai_summarization_enabled = true - expect(validator.valid_value?("some_model")).to eq(true) + + DiscourseAi::Completions::Llm.with_prepared_responses([true]) do + expect(validator.valid_value?(llm_model)).to eq(true) + end end end end From f420261196f5e59a6d1c3b5f7fc461f1c1aea1fb Mon Sep 17 00:00:00 2001 From: Keegan George Date: Fri, 18 Jul 2025 12:17:08 -0700 Subject: [PATCH 31/34] DEV: Apply feedback from review --- ..._summarization_model_to_persona_default.rb | 14 ++++++++-- ...copy_ai_helper_model_to_persona_default.rb | 28 ++++++++++++++++--- ...250710215720_copy_hyde_model_to_persona.rb | 16 +++++++++-- ...05451_copy_translation_model_to_persona.rb | 18 +++++++++++- ..._image_caption_model_to_persona_default.rb | 13 ++++++++- lib/ai_helper/chat_thread_titler.rb | 4 +++ lib/ai_helper/painter.rb | 8 +++++- lib/translation.rb | 21 ++++++++++++-- 8 files changed, 109 insertions(+), 13 deletions(-) diff --git a/db/migrate/20250710180401_copy_ai_summarization_model_to_persona_default.rb b/db/migrate/20250710180401_copy_ai_summarization_model_to_persona_default.rb index ef286cb3f..98c0bb703 100644 --- a/db/migrate/20250710180401_copy_ai_summarization_model_to_persona_default.rb +++ b/db/migrate/20250710180401_copy_ai_summarization_model_to_persona_default.rb @@ -8,11 +8,21 @@ def up # Extract the model ID from the setting value (e.g., "custom:-5" -> "-5") model_id = ai_summarization_model.split(":").last - # Update the summarization personas (IDs -11 and -12) with the extracted model ID + persona_settings = %w[ai_summarization_persona ai_summary_gists_persona] + default_persona_ids = [-11, -12] + + persona_ids_query = + persona_settings + .map { |setting| "SELECT value FROM site_settings WHERE name = '#{setting}'" } + .join(" UNION ") + persona_ids = DB.query_single(persona_ids_query).compact + all_persona_ids = (default_persona_ids + persona_ids).map(&:to_i).uniq.join(",") + + # Update the summarization personas with the extracted model ID execute(<<~SQL) UPDATE ai_personas SET default_llm_id = #{model_id} - WHERE id IN (-11, -12) AND default_llm_id IS NULL + WHERE id IN (#{all_persona_ids}) AND default_llm_id IS NULL SQL end end diff --git a/db/migrate/20250710181656_copy_ai_helper_model_to_persona_default.rb b/db/migrate/20250710181656_copy_ai_helper_model_to_persona_default.rb index 9657ea23f..996cfa746 100644 --- a/db/migrate/20250710181656_copy_ai_helper_model_to_persona_default.rb +++ b/db/migrate/20250710181656_copy_ai_helper_model_to_persona_default.rb @@ -8,12 +8,32 @@ def up # Extract the model ID from the setting value (e.g., "custom:1" -> "1") model_id = ai_helper_model.split(":").last + # Get persona IDs from site settings + persona_settings = %w[ + ai_helper_proofreader_persona + ai_helper_title_suggestions_persona + ai_helper_explain_persona + ai_helper_smart_dates_persona + ai_helper_translator_persona + ai_helper_markdown_tables_persona + ai_helper_custom_prompt_persona + ] + + default_persona_ids = [-18, -19, -20, -22, -23, -24, -25] + persona_ids_query = + persona_settings + .map { |setting| "SELECT value FROM site_settings WHERE name = '#{setting}'" } + .join(" UNION ") + persona_ids = DB.query_single(persona_ids_query).compact + + all_persona_ids = (default_persona_ids + persona_ids).map(&:to_i).uniq.join(",") + # Update the helper personas with the extracted model ID execute(<<~SQL) - UPDATE ai_personas - SET default_llm_id = #{model_id} - WHERE id IN (-18, -19, -20, -21, -22, -23, -24, -25) AND default_llm_id IS NULL - SQL + UPDATE ai_personas + SET default_llm_id = #{model_id} + WHERE id IN (#{all_persona_ids}) AND default_llm_id IS NULL + SQL end end diff --git a/db/migrate/20250710215720_copy_hyde_model_to_persona.rb b/db/migrate/20250710215720_copy_hyde_model_to_persona.rb index 982ab0e5f..80c3f7d04 100644 --- a/db/migrate/20250710215720_copy_hyde_model_to_persona.rb +++ b/db/migrate/20250710215720_copy_hyde_model_to_persona.rb @@ -2,17 +2,29 @@ class CopyHydeModelToPersona < ActiveRecord::Migration[7.2] def up hyde_model = - DB.query_single("SELECT value FROM site_settings WHERE name = 'ai_embeddings_semantic_search_hyde_model'").first + DB.query_single( + "SELECT value FROM site_settings WHERE name = 'ai_embeddings_semantic_search_hyde_model'", + ).first if hyde_model.present? && hyde_model.start_with?("custom:") # Extract the model ID from the setting value (e.g., "custom:1" -> "1") model_id = hyde_model.split(":").last + persona_settings = %w[ai_embeddings_semantic_search_hyde_persona] + default_persona_ids = [-32] + persona_ids_query = + persona_settings + .map { |setting| "SELECT value FROM site_settings WHERE name = '#{setting}'" } + .join(" UNION ") + persona_ids = DB.query_single(persona_ids_query).compact + + all_persona_ids = (default_persona_ids + persona_ids).map(&:to_i).uniq.join(",") + # Update the hyde persona with the extracted model ID execute(<<~SQL) UPDATE ai_personas SET default_llm_id = #{model_id} - WHERE id IN (-32) AND default_llm_id IS NULL + WHERE id IN (#{all_persona_ids}) AND default_llm_id IS NULL SQL end end diff --git a/db/migrate/20250716005451_copy_translation_model_to_persona.rb b/db/migrate/20250716005451_copy_translation_model_to_persona.rb index 478a6c465..aa1bb69ed 100644 --- a/db/migrate/20250716005451_copy_translation_model_to_persona.rb +++ b/db/migrate/20250716005451_copy_translation_model_to_persona.rb @@ -8,11 +8,27 @@ def up # Extract the model ID from the setting value (e.g., "custom:-5" -> "-5") model_id = ai_translation_model.split(":").last + persona_settings = %w[ + ai_translation_locale_detector_persona + ai_translation_post_raw_translator_persona + ai_translation_topic_title_translator_persona + ai_translation_short_text_translator_persona + ] + default_persona_ids = [-27, -28, -29, -30] + + persona_ids_query = + persona_settings + .map { |setting| "SELECT value FROM site_settings WHERE name = '#{setting}'" } + .join(" UNION ") + persona_ids = DB.query_single(persona_ids_query).compact + + all_persona_ids = (default_persona_ids + persona_ids).map(&:to_i).uniq.join(",") + # Update the translation personas (IDs -27, -28, -29, -30) with the extracted model ID execute(<<~SQL) UPDATE ai_personas SET default_llm_id = #{model_id} - WHERE id IN (-27, -28, -29, -30) AND default_llm_id IS NULL + WHERE id IN (#{all_persona_ids}) AND default_llm_id IS NULL SQL end end diff --git a/db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb b/db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb index a797c1b31..9c7ce9b02 100644 --- a/db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb +++ b/db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb @@ -11,11 +11,22 @@ def up # Extract the model ID from the setting value (e.g., "custom:1" -> "1") model_id = ai_helper_image_caption_model.split(":").last + persona_settings = %w[ai_helper_post_illustrator_persona ai_helper_image_caption_persona,] + default_persona_ids = [-21, -26] + + persona_ids_query = + persona_settings + .map { |setting| "SELECT value FROM site_settings WHERE name = '#{setting}'" } + .join(" UNION ") + persona_ids = DB.query_single(persona_ids_query).compact + + all_persona_ids = (default_persona_ids + persona_ids).map(&:to_i).uniq.join(",") + # Update the helper personas with the extracted model ID execute(<<~SQL) UPDATE ai_personas SET default_llm_id = #{model_id} - WHERE id IN (-26) AND default_llm_id IS NULL + WHERE id IN (#{all_persona_ids}) AND default_llm_id IS NULL SQL end end diff --git a/lib/ai_helper/chat_thread_titler.rb b/lib/ai_helper/chat_thread_titler.rb index 233bfe14a..01c7291e7 100644 --- a/lib/ai_helper/chat_thread_titler.rb +++ b/lib/ai_helper/chat_thread_titler.rb @@ -12,10 +12,14 @@ def suggested_title return nil if content.blank? suggested_title = call_llm(content) + return nil if suggested_title.blank? + cleanup(suggested_title) end def call_llm(thread_content) + return nil if SiteSetting.ai_default_llm_model.blank? + chat = "\n#{thread_content}\n" prompt = diff --git a/lib/ai_helper/painter.rb b/lib/ai_helper/painter.rb index 3e51534fe..8e3881c30 100644 --- a/lib/ai_helper/painter.rb +++ b/lib/ai_helper/painter.rb @@ -57,6 +57,12 @@ def base64_to_image(artifacts, user_id) end def diffusion_prompt(text, user) + llm_model = + AiPersona.find_by(id: SiteSetting.ai_helper_post_illustrator_persona)&.default_llm_id || + SiteSetting.ai_default_llm_model + + return nil if llm_model.blank? + prompt = DiscourseAi::Completions::Prompt.new( <<~TEXT.strip, @@ -66,7 +72,7 @@ def diffusion_prompt(text, user) messages: [{ type: :user, content: text, id: user.username }], ) - DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_default_llm_model).generate( + DiscourseAi::Completions::Llm.proxy(llm_model).generate( prompt, user: user, feature_name: "illustrate_post", diff --git a/lib/translation.rb b/lib/translation.rb index 73bc50a73..cfad97e54 100644 --- a/lib/translation.rb +++ b/lib/translation.rb @@ -3,11 +3,28 @@ module DiscourseAi module Translation def self.enabled? - SiteSetting.discourse_ai_enabled && SiteSetting.ai_translation_enabled && - SiteSetting.ai_default_llm_model.present? && + SiteSetting.discourse_ai_enabled && SiteSetting.ai_translation_enabled && has_llm_model? && SiteSetting.content_localization_supported_locales.present? end + def self.has_llm_model? + persona_ids = [ + SiteSetting.ai_translation_locale_detector_persona, + SiteSetting.ai_translation_post_raw_translator_persona, + SiteSetting.ai_translation_topic_title_translator_persona, + SiteSetting.ai_translation_short_text_translator_persona, + ] + + persona_default_llms = AiPersona.where(id: persona_ids).pluck(:default_llm_id) + default_llm_model = SiteSetting.ai_default_llm_model + + if persona_default_llms.any?(&:blank?) && default_llm_model.blank? + false + else + true + end + end + def self.backfill_enabled? enabled? && SiteSetting.ai_translation_backfill_hourly_rate > 0 && SiteSetting.ai_translation_backfill_max_age_days > 0 From fbbe01e576e5fe52e7bbbd5753eae8375864b57c Mon Sep 17 00:00:00 2001 From: Keegan George Date: Mon, 21 Jul 2025 07:45:27 -0700 Subject: [PATCH 32/34] DEV: Hide seeded LLMs in the enumerator for now --- lib/configuration/llm_enumerator.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/configuration/llm_enumerator.rb b/lib/configuration/llm_enumerator.rb index cd2c33d54..dca18d834 100644 --- a/lib/configuration/llm_enumerator.rb +++ b/lib/configuration/llm_enumerator.rb @@ -119,7 +119,7 @@ def self.values_for_serialization(allowed_seeded_llm_ids: nil) builder.query_hash.map(&:symbolize_keys) end - def self.values(allowed_seeded_llms: nil) + def self.values(allowed_seeded_llms: []) values = DB.query_hash(<<~SQL).map(&:symbolize_keys) SELECT display_name AS name, id AS value FROM llm_models From 7dee59c97fb437733cac1ce6559eaa9d2dd76c11 Mon Sep 17 00:00:00 2001 From: Keegan George Date: Mon, 21 Jul 2025 08:08:20 -0700 Subject: [PATCH 33/34] DEV: Remove no longer needed seeded model check --- app/models/ai_persona.rb | 12 ------------ spec/models/ai_persona_spec.rb | 14 -------------- 2 files changed, 26 deletions(-) diff --git a/app/models/ai_persona.rb b/app/models/ai_persona.rb index 4c40b5c8f..694cde780 100644 --- a/app/models/ai_persona.rb +++ b/app/models/ai_persona.rb @@ -12,7 +12,6 @@ class AiPersona < ActiveRecord::Base validates :system_prompt, presence: true, length: { maximum: 10_000_000 } validate :system_persona_unchangeable, on: :update, if: :system validate :chat_preconditions - validate :allowed_seeded_model, if: :default_llm_id validate :well_formated_examples validates :max_context_posts, numericality: { greater_than: 0 }, allow_nil: true # leaves some room for growth but sets a maximum to avoid memory issues @@ -365,17 +364,6 @@ def ensure_not_system end end - def allowed_seeded_model - return if default_llm_id.blank? - - return if default_llm.nil? - return if !default_llm.seeded? - - return if SiteSetting.ai_bot_allowed_seeded_models_map.include?(default_llm.id.to_s) - - errors.add(:default_llm, I18n.t("discourse_ai.llm.configuration.invalid_seeded_model")) - end - def well_formated_examples return if examples.blank? diff --git a/spec/models/ai_persona_spec.rb b/spec/models/ai_persona_spec.rb index 0e6b9d13d..46d59e527 100644 --- a/spec/models/ai_persona_spec.rb +++ b/spec/models/ai_persona_spec.rb @@ -211,20 +211,6 @@ ) end - it "validates allowed seeded model" do - basic_persona.default_llm_id = seeded_llm_model.id - - SiteSetting.ai_bot_allowed_seeded_models = "" - - expect(basic_persona.valid?).to eq(false) - expect(basic_persona.errors[:default_llm]).to include( - I18n.t("discourse_ai.llm.configuration.invalid_seeded_model"), - ) - - SiteSetting.ai_bot_allowed_seeded_models = "-1" - expect(basic_persona.valid?).to eq(true) - end - it "does not leak caches between sites" do AiPersona.create!( name: "pun_bot", From 822594f3e33fccd0056fde389a18ea43cc0be0f6 Mon Sep 17 00:00:00 2001 From: Roman Rizzi Date: Mon, 21 Jul 2025 18:00:01 -0300 Subject: [PATCH 34/34] Update db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb --- ...0716005855_copy_ai_image_caption_model_to_persona_default.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb b/db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb index 9c7ce9b02..b2a02ab0e 100644 --- a/db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb +++ b/db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb @@ -11,7 +11,7 @@ def up # Extract the model ID from the setting value (e.g., "custom:1" -> "1") model_id = ai_helper_image_caption_model.split(":").last - persona_settings = %w[ai_helper_post_illustrator_persona ai_helper_image_caption_persona,] + persona_settings = %w[ai_helper_post_illustrator_persona ai_helper_image_caption_persona] default_persona_ids = [-21, -26] persona_ids_query =