diff --git a/app/models/ai_persona.rb b/app/models/ai_persona.rb index 4c40b5c8f..694cde780 100644 --- a/app/models/ai_persona.rb +++ b/app/models/ai_persona.rb @@ -12,7 +12,6 @@ class AiPersona < ActiveRecord::Base validates :system_prompt, presence: true, length: { maximum: 10_000_000 } validate :system_persona_unchangeable, on: :update, if: :system validate :chat_preconditions - validate :allowed_seeded_model, if: :default_llm_id validate :well_formated_examples validates :max_context_posts, numericality: { greater_than: 0 }, allow_nil: true # leaves some room for growth but sets a maximum to avoid memory issues @@ -365,17 +364,6 @@ def ensure_not_system end end - def allowed_seeded_model - return if default_llm_id.blank? - - return if default_llm.nil? - return if !default_llm.seeded? - - return if SiteSetting.ai_bot_allowed_seeded_models_map.include?(default_llm.id.to_s) - - errors.add(:default_llm, I18n.t("discourse_ai.llm.configuration.invalid_seeded_model")) - end - def well_formated_examples return if examples.blank? diff --git a/app/models/llm_model.rb b/app/models/llm_model.rb index 937c5373b..7834d065b 100644 --- a/app/models/llm_model.rb +++ b/app/models/llm_model.rb @@ -99,7 +99,7 @@ def to_llm end def identifier - "custom:#{id}" + "#{id}" end def toggle_companion_user diff --git a/assets/javascripts/discourse/components/ai-default-llm-selector.gjs b/assets/javascripts/discourse/components/ai-default-llm-selector.gjs new file mode 100644 index 000000000..9a28c7b75 --- /dev/null +++ b/assets/javascripts/discourse/components/ai-default-llm-selector.gjs @@ -0,0 +1,46 @@ +import Component from "@glimmer/component"; +import { tracked } from "@glimmer/tracking"; +import { ajax } from "discourse/lib/ajax"; +import { i18n } from "discourse-i18n"; +import SiteSettingComponent from "admin/components/site-setting"; +import SiteSetting from "admin/models/site-setting"; + +export default class AiDefaultLlmSelector extends Component { + @tracked defaultLlmSetting = null; + + constructor() { + super(...arguments); + this.#loadDefaultLlmSetting(); + } + + async #loadDefaultLlmSetting() { + const { site_settings } = await ajax("/admin/config/site_settings.json", { + data: { + plugin: "discourse-ai", + category: "discourse_ai", + }, + }); + + const defaultLlmSetting = site_settings.find( + (setting) => setting.setting === "ai_default_llm_model" + ); + + this.defaultLlmSetting = SiteSetting.create(defaultLlmSetting); + } + + +} diff --git a/assets/javascripts/discourse/components/ai-features.gjs b/assets/javascripts/discourse/components/ai-features.gjs index 45f1b12ef..8c762fc0d 100644 --- a/assets/javascripts/discourse/components/ai-features.gjs +++ b/assets/javascripts/discourse/components/ai-features.gjs @@ -9,6 +9,7 @@ import DPageSubheader from "discourse/components/d-page-subheader"; import DSelect from "discourse/components/d-select"; import FilterInput from "discourse/components/filter-input"; import { i18n } from "discourse-i18n"; +import AiDefaultLlmSelector from "./ai-default-llm-selector"; import AiFeaturesList from "./ai-features-list"; const ALL = "all"; @@ -202,6 +203,8 @@ export default class AiFeatures extends Component { /> + + {{#if this.filteredFeatures.length}} {{else}} diff --git a/assets/javascripts/discourse/components/ai-llms-list-editor.gjs b/assets/javascripts/discourse/components/ai-llms-list-editor.gjs index 08f1715c8..8d70b4350 100644 --- a/assets/javascripts/discourse/components/ai-llms-list-editor.gjs +++ b/assets/javascripts/discourse/components/ai-llms-list-editor.gjs @@ -9,6 +9,7 @@ import I18n, { i18n } from "discourse-i18n"; import AdminSectionLandingItem from "admin/components/admin-section-landing-item"; import AdminSectionLandingWrapper from "admin/components/admin-section-landing-wrapper"; import DTooltip from "float-kit/components/d-tooltip"; +import AiDefaultLlmSelector from "./ai-default-llm-selector"; import AiLlmEditor from "./ai-llm-editor"; function isPreseeded(llm) { @@ -137,6 +138,9 @@ export default class AiLlmsListEditor extends Component { }} @learnMoreUrl="https://meta.discourse.org/t/discourse-ai-large-language-model-llm-settings-page/319903" /> + + + {{#if this.hasLlmElements}}
"-5") + model_id = ai_summarization_model.split(":").last + + persona_settings = %w[ai_summarization_persona ai_summary_gists_persona] + default_persona_ids = [-11, -12] + + persona_ids_query = + persona_settings + .map { |setting| "SELECT value FROM site_settings WHERE name = '#{setting}'" } + .join(" UNION ") + persona_ids = DB.query_single(persona_ids_query).compact + all_persona_ids = (default_persona_ids + persona_ids).map(&:to_i).uniq.join(",") + + # Update the summarization personas with the extracted model ID + execute(<<~SQL) + UPDATE ai_personas + SET default_llm_id = #{model_id} + WHERE id IN (#{all_persona_ids}) AND default_llm_id IS NULL + SQL + end + end + + def down + raise ActiveRecord::IrreversibleMigration + end +end diff --git a/db/migrate/20250710181656_copy_ai_helper_model_to_persona_default.rb b/db/migrate/20250710181656_copy_ai_helper_model_to_persona_default.rb new file mode 100644 index 000000000..996cfa746 --- /dev/null +++ b/db/migrate/20250710181656_copy_ai_helper_model_to_persona_default.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true +class CopyAiHelperModelToPersonaDefault < ActiveRecord::Migration[7.2] + def up + ai_helper_model = + DB.query_single("SELECT value FROM site_settings WHERE name = 'ai_helper_model'").first + + if ai_helper_model.present? && ai_helper_model.start_with?("custom:") + # Extract the model ID from the setting value (e.g., "custom:1" -> "1") + model_id = ai_helper_model.split(":").last + + # Get persona IDs from site settings + persona_settings = %w[ + ai_helper_proofreader_persona + ai_helper_title_suggestions_persona + ai_helper_explain_persona + ai_helper_smart_dates_persona + ai_helper_translator_persona + ai_helper_markdown_tables_persona + ai_helper_custom_prompt_persona + ] + + default_persona_ids = [-18, -19, -20, -22, -23, -24, -25] + persona_ids_query = + persona_settings + .map { |setting| "SELECT value FROM site_settings WHERE name = '#{setting}'" } + .join(" UNION ") + persona_ids = DB.query_single(persona_ids_query).compact + + all_persona_ids = (default_persona_ids + persona_ids).map(&:to_i).uniq.join(",") + + # Update the helper personas with the extracted model ID + execute(<<~SQL) + UPDATE ai_personas + SET default_llm_id = #{model_id} + WHERE id IN (#{all_persona_ids}) AND default_llm_id IS NULL + SQL + end + end + + def down + raise ActiveRecord::IrreversibleMigration + end +end diff --git a/db/migrate/20250710215720_copy_hyde_model_to_persona.rb b/db/migrate/20250710215720_copy_hyde_model_to_persona.rb new file mode 100644 index 000000000..80c3f7d04 --- /dev/null +++ b/db/migrate/20250710215720_copy_hyde_model_to_persona.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true +class CopyHydeModelToPersona < ActiveRecord::Migration[7.2] + def up + hyde_model = + DB.query_single( + "SELECT value FROM site_settings WHERE name = 'ai_embeddings_semantic_search_hyde_model'", + ).first + + if hyde_model.present? && hyde_model.start_with?("custom:") + # Extract the model ID from the setting value (e.g., "custom:1" -> "1") + model_id = hyde_model.split(":").last + + persona_settings = %w[ai_embeddings_semantic_search_hyde_persona] + default_persona_ids = [-32] + persona_ids_query = + persona_settings + .map { |setting| "SELECT value FROM site_settings WHERE name = '#{setting}'" } + .join(" UNION ") + persona_ids = DB.query_single(persona_ids_query).compact + + all_persona_ids = (default_persona_ids + persona_ids).map(&:to_i).uniq.join(",") + + # Update the hyde persona with the extracted model ID + execute(<<~SQL) + UPDATE ai_personas + SET default_llm_id = #{model_id} + WHERE id IN (#{all_persona_ids}) AND default_llm_id IS NULL + SQL + end + end + + def down + raise ActiveRecord::IrreversibleMigration + end +end diff --git a/db/migrate/20250716005451_copy_translation_model_to_persona.rb b/db/migrate/20250716005451_copy_translation_model_to_persona.rb new file mode 100644 index 000000000..aa1bb69ed --- /dev/null +++ b/db/migrate/20250716005451_copy_translation_model_to_persona.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true +class CopyTranslationModelToPersona < ActiveRecord::Migration[7.2] + def up + ai_translation_model = + DB.query_single("SELECT value FROM site_settings WHERE name = 'ai_translation_model'").first + + if ai_translation_model.present? && ai_translation_model.start_with?("custom:") + # Extract the model ID from the setting value (e.g., "custom:-5" -> "-5") + model_id = ai_translation_model.split(":").last + + persona_settings = %w[ + ai_translation_locale_detector_persona + ai_translation_post_raw_translator_persona + ai_translation_topic_title_translator_persona + ai_translation_short_text_translator_persona + ] + default_persona_ids = [-27, -28, -29, -30] + + persona_ids_query = + persona_settings + .map { |setting| "SELECT value FROM site_settings WHERE name = '#{setting}'" } + .join(" UNION ") + persona_ids = DB.query_single(persona_ids_query).compact + + all_persona_ids = (default_persona_ids + persona_ids).map(&:to_i).uniq.join(",") + + # Update the translation personas (IDs -27, -28, -29, -30) with the extracted model ID + execute(<<~SQL) + UPDATE ai_personas + SET default_llm_id = #{model_id} + WHERE id IN (#{all_persona_ids}) AND default_llm_id IS NULL + SQL + end + end + + def down + raise ActiveRecord::IrreversibleMigration + end +end diff --git a/db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb b/db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb new file mode 100644 index 000000000..b2a02ab0e --- /dev/null +++ b/db/migrate/20250716005855_copy_ai_image_caption_model_to_persona_default.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true +class CopyAiImageCaptionModelToPersonaDefault < ActiveRecord::Migration[7.2] + def up + ai_helper_image_caption_model = + DB.query_single( + "SELECT value FROM site_settings WHERE name = 'ai_helper_image_caption_model'", + ).first + + if ai_helper_image_caption_model.present? && + ai_helper_image_caption_model.start_with?("custom:") + # Extract the model ID from the setting value (e.g., "custom:1" -> "1") + model_id = ai_helper_image_caption_model.split(":").last + + persona_settings = %w[ai_helper_post_illustrator_persona ai_helper_image_caption_persona] + default_persona_ids = [-21, -26] + + persona_ids_query = + persona_settings + .map { |setting| "SELECT value FROM site_settings WHERE name = '#{setting}'" } + .join(" UNION ") + persona_ids = DB.query_single(persona_ids_query).compact + + all_persona_ids = (default_persona_ids + persona_ids).map(&:to_i).uniq.join(",") + + # Update the helper personas with the extracted model ID + execute(<<~SQL) + UPDATE ai_personas + SET default_llm_id = #{model_id} + WHERE id IN (#{all_persona_ids}) AND default_llm_id IS NULL + SQL + end + end + + def down + raise ActiveRecord::IrreversibleMigration + end +end diff --git a/lib/ai_helper/assistant.rb b/lib/ai_helper/assistant.rb index be61e415a..01a27675e 100644 --- a/lib/ai_helper/assistant.rb +++ b/lib/ai_helper/assistant.rb @@ -312,18 +312,9 @@ def find_ai_helper_model(helper_mode, persona_klass) # Priorities are: # 1. Persona's default LLM - # 2. Hidden `ai_helper_model` setting, or `ai_helper_image_caption_model` for image_caption. - # 3. Newest LLM config + # 2. SiteSetting.ai_default_llm_id (or newest LLM if not set) def self.find_ai_helper_model(helper_mode, persona_klass) - model_id = persona_klass.default_llm_id - - if !model_id - if helper_mode == IMAGE_CAPTION - model_id = SiteSetting.ai_helper_image_caption_model&.split(":")&.last - else - model_id = SiteSetting.ai_helper_model&.split(":")&.last - end - end + model_id = persona_klass.default_llm_id || SiteSetting.ai_default_llm_model if model_id.present? LlmModel.find_by(id: model_id) diff --git a/lib/ai_helper/chat_thread_titler.rb b/lib/ai_helper/chat_thread_titler.rb index 15ffc52ca..01c7291e7 100644 --- a/lib/ai_helper/chat_thread_titler.rb +++ b/lib/ai_helper/chat_thread_titler.rb @@ -12,10 +12,14 @@ def suggested_title return nil if content.blank? suggested_title = call_llm(content) + return nil if suggested_title.blank? + cleanup(suggested_title) end def call_llm(thread_content) + return nil if SiteSetting.ai_default_llm_model.blank? + chat = "\n#{thread_content}\n" prompt = @@ -30,7 +34,7 @@ def call_llm(thread_content) messages: [{ type: :user, content: chat, id: "User" }], ) - DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_helper_model).generate( + DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_default_llm_model).generate( prompt, user: Discourse.system_user, stop_sequences: [""], diff --git a/lib/ai_helper/painter.rb b/lib/ai_helper/painter.rb index 9be8b95a0..8e3881c30 100644 --- a/lib/ai_helper/painter.rb +++ b/lib/ai_helper/painter.rb @@ -57,6 +57,12 @@ def base64_to_image(artifacts, user_id) end def diffusion_prompt(text, user) + llm_model = + AiPersona.find_by(id: SiteSetting.ai_helper_post_illustrator_persona)&.default_llm_id || + SiteSetting.ai_default_llm_model + + return nil if llm_model.blank? + prompt = DiscourseAi::Completions::Prompt.new( <<~TEXT.strip, @@ -66,7 +72,7 @@ def diffusion_prompt(text, user) messages: [{ type: :user, content: text, id: user.username }], ) - DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_helper_model).generate( + DiscourseAi::Completions::Llm.proxy(llm_model).generate( prompt, user: user, feature_name: "illustrate_post", diff --git a/lib/automation.rb b/lib/automation.rb index 0410aed13..fed49ba7c 100644 --- a/lib/automation.rb +++ b/lib/automation.rb @@ -37,7 +37,7 @@ def self.available_models value_h["id"] > 0 || SiteSetting.ai_automation_allowed_seeded_models_map.include?(value_h["id"].to_s) end - .each { |value_h| value_h["id"] = "custom:#{value_h["id"]}" } + .each { |value_h| value_h["id"] = "#{value_h["id"]}" } values end diff --git a/lib/automation/report_runner.rb b/lib/automation/report_runner.rb index 1c207a45a..d4d06b143 100644 --- a/lib/automation/report_runner.rb +++ b/lib/automation/report_runner.rb @@ -74,7 +74,7 @@ def initialize( else I18n.t("discourse_automation.scriptables.llm_report.title") end - @model = LlmModel.find_by(id: model.split(":")&.last) + @model = LlmModel.find_by(id: model) @persona = AiPersona.find(persona_id).class_instance.new @category_ids = category_ids @tags = tags diff --git a/lib/completions/llm.rb b/lib/completions/llm.rb index 70e840049..663c6e68c 100644 --- a/lib/completions/llm.rb +++ b/lib/completions/llm.rb @@ -292,6 +292,8 @@ def proxy(model) llm_model = if model.is_a?(LlmModel) model + elsif model.is_a?(Numeric) + LlmModel.find_by(id: model) else model_name_without_prov = model.split(":").last.to_i diff --git a/lib/configuration/llm_dependency_validator.rb b/lib/configuration/llm_dependency_validator.rb index 0cf715fe0..8ca890be0 100644 --- a/lib/configuration/llm_dependency_validator.rb +++ b/lib/configuration/llm_dependency_validator.rb @@ -10,16 +10,9 @@ def initialize(opts = {}) def valid_value?(val) return true if val == "f" - if @opts[:name] == :ai_summarization_enabled || @opts[:name] == :ai_helper_enabled - has_llms = LlmModel.count > 0 - @no_llms_configured = !has_llms - has_llms - else - @llm_dependency_setting_name = - DiscourseAi::Configuration::LlmValidator.new.choose_llm_setting_for(@opts[:name]) + @llm_dependency_setting_name = :ai_default_llm_model - SiteSetting.public_send(@llm_dependency_setting_name).present? - end + SiteSetting.public_send(@llm_dependency_setting_name).present? end def error_message diff --git a/lib/configuration/llm_enumerator.rb b/lib/configuration/llm_enumerator.rb index 200fc0a27..dca18d834 100644 --- a/lib/configuration/llm_enumerator.rb +++ b/lib/configuration/llm_enumerator.rb @@ -22,24 +22,53 @@ def self.global_usage .each { |llm_id, name, id| rval[llm_id] << { type: :ai_persona, name: name, id: id } } if SiteSetting.ai_helper_enabled - model_id = SiteSetting.ai_helper_model.split(":").last.to_i - rval[model_id] << { type: :ai_helper } if model_id != 0 + { + "#{I18n.t("js.discourse_ai.features.ai_helper.proofread")}" => + SiteSetting.ai_helper_proofreader_persona, + "#{I18n.t("js.discourse_ai.features.ai_helper.title_suggestions")}" => + SiteSetting.ai_helper_title_suggestions_persona, + "#{I18n.t("js.discourse_ai.features.ai_helper.explain")}" => + SiteSetting.ai_helper_explain_persona, + "#{I18n.t("js.discourse_ai.features.ai_helper.illustrate_post")}" => + SiteSetting.ai_helper_post_illustrator_persona, + "#{I18n.t("js.discourse_ai.features.ai_helper.smart_dates")}" => + SiteSetting.ai_helper_smart_dates_persona, + "#{I18n.t("js.discourse_ai.features.ai_helper.translator")}" => + SiteSetting.ai_helper_translator_persona, + "#{I18n.t("js.discourse_ai.features.ai_helper.markdown_tables")}" => + SiteSetting.ai_helper_markdown_tables_persona, + "#{I18n.t("js.discourse_ai.features.ai_helper.custom_prompt")}" => + SiteSetting.ai_helper_custom_prompt_persona, + }.each do |helper_type, persona_id| + next if persona_id.blank? + + persona = AiPersona.find_by(id: persona_id) + next if persona.blank? || persona.default_llm_id.blank? + + model_id = persona.default_llm_id || SiteSetting.ai_default_llm_model.to_i + rval[model_id] << { type: :ai_helper, name: helper_type } + end end - if SiteSetting.ai_helper_image_caption_model - model_id = SiteSetting.ai_helper_image_caption_model.split(":").last.to_i - rval[model_id] << { type: :ai_helper_image_caption } if model_id != 0 + if SiteSetting.ai_helper_enabled_features.split("|").include?("image_caption") + image_caption_persona = AiPersona.find_by(id: SiteSetting.ai_helper_image_caption_persona) + model_id = image_caption_persona.default_llm_id || SiteSetting.ai_default_llm_model.to_i + + rval[model_id] << { type: :ai_helper_image_caption } end if SiteSetting.ai_summarization_enabled summarization_persona = AiPersona.find_by(id: SiteSetting.ai_summarization_persona) - model_id = summarization_persona.default_llm_id || LlmModel.last&.id + model_id = summarization_persona.default_llm_id || SiteSetting.ai_default_llm_model.to_i rval[model_id] << { type: :ai_summarization } end if SiteSetting.ai_embeddings_semantic_search_enabled - model_id = SiteSetting.ai_embeddings_semantic_search_hyde_model.split(":").last.to_i + search_persona = + AiPersona.find_by(id: SiteSetting.ai_embeddings_semantic_search_hyde_persona) + model_id = search_persona.default_llm_id || SiteSetting.ai_default_llm_model.to_i + rval[model_id] << { type: :ai_embeddings_semantic_search } end @@ -58,12 +87,8 @@ def self.global_usage ) .each do |model_text, name, id| next if model_text.blank? - model_id = model_text.split("custom:").last.to_i - if model_id.present? - if model_text =~ /custom:(\d+)/ - rval[model_id] << { type: :automation, name: name, id: id } - end - end + model_id = model_text.to_i + rval[model_id] << { type: :automation, name: name, id: id } if model_id.present? end end @@ -94,7 +119,7 @@ def self.values_for_serialization(allowed_seeded_llm_ids: nil) builder.query_hash.map(&:symbolize_keys) end - def self.values(allowed_seeded_llms: nil) + def self.values(allowed_seeded_llms: []) values = DB.query_hash(<<~SQL).map(&:symbolize_keys) SELECT display_name AS name, id AS value FROM llm_models @@ -107,7 +132,6 @@ def self.values(allowed_seeded_llms: nil) end end - values.each { |value_h| value_h[:value] = "custom:#{value_h[:value]}" } values end end diff --git a/lib/configuration/llm_validator.rb b/lib/configuration/llm_validator.rb index 36c3c63b1..0e22b6eee 100644 --- a/lib/configuration/llm_validator.rb +++ b/lib/configuration/llm_validator.rb @@ -2,9 +2,6 @@ module DiscourseAi module Configuration - class InvalidSeededModelError < StandardError - end - class LlmValidator def initialize(opts = {}) @opts = opts @@ -12,18 +9,21 @@ def initialize(opts = {}) def valid_value?(val) if val == "" - @parent_module_name = modules_and_choose_llm_settings.invert[@opts[:name]] - - @parent_enabled = SiteSetting.public_send(@parent_module_name) - return !@parent_enabled + if @opts[:name] == :ai_default_llm_model + @parent_module_names = [] + + enabled_settings.each do |setting_name| + if SiteSetting.public_send(setting_name) == true + @parent_module_names << setting_name + @parent_enabled = true + end + end + + return !@parent_enabled + end end - allowed_seeded_model?(val) - run_test(val).tap { |result| @unreachable = result } - rescue DiscourseAi::Configuration::InvalidSeededModelError => e - @unreachable = true - false rescue StandardError => e raise e if Rails.env.test? @unreachable = true @@ -38,54 +38,35 @@ def run_test(val) end def modules_using(llm_model) - choose_llm_settings = modules_and_choose_llm_settings.values + in_use_llms = AiPersona.where.not(default_llm_id: nil).pluck(:default_llm_id) + default_llm = SiteSetting.ai_default_llm_model.presence&.to_i - choose_llm_settings.select { |s| SiteSetting.public_send(s) == "custom:#{llm_model.id}" } + combined_llms = (in_use_llms + [default_llm]).compact.uniq + combined_llms end def error_message - if @parent_enabled + if @parent_enabled && @parent_module_names.present? return( I18n.t( - "discourse_ai.llm.configuration.disable_module_first", - setting: @parent_module_name, + "discourse_ai.llm.configuration.disable_modules_first", + settings: @parent_module_names.join(", "), ) ) end - if @invalid_seeded_model - return I18n.t("discourse_ai.llm.configuration.invalid_seeded_model") - end - return unless @unreachable I18n.t("discourse_ai.llm.configuration.model_unreachable") end - def choose_llm_setting_for(module_enabler_setting) - modules_and_choose_llm_settings[module_enabler_setting] - end - - def modules_and_choose_llm_settings - { - ai_embeddings_semantic_search_enabled: :ai_embeddings_semantic_search_hyde_model, - ai_helper_enabled: :ai_helper_model, - ai_summarization_enabled: :ai_summarization_model, - ai_translation_enabled: :ai_translation_model, - } - end - - def allowed_seeded_model?(val) - id = val.split(":").last - return true if id.to_i > 0 - - setting = @opts[:name] - allowed_list = SiteSetting.public_send("#{setting}_allowed_seeded_models") - - if allowed_list.split("|").exclude?(id) - @invalid_seeded_model = true - raise DiscourseAi::Configuration::InvalidSeededModelError.new - end + def enabled_settings + %i[ + ai_embeddings_semantic_search_enabled + ai_helper_enabled + ai_summarization_enabled + ai_translation_enabled + ] end end end diff --git a/lib/configuration/llm_vision_enumerator.rb b/lib/configuration/llm_vision_enumerator.rb index c4cf1a621..0ef16b705 100644 --- a/lib/configuration/llm_vision_enumerator.rb +++ b/lib/configuration/llm_vision_enumerator.rb @@ -16,8 +16,6 @@ def self.values WHERE vision_enabled SQL - values.each { |value_h| value_h[:value] = "custom:#{value_h[:value]}" } - values end end diff --git a/lib/embeddings/semantic_search.rb b/lib/embeddings/semantic_search.rb index 726f203dc..6d44c1f73 100644 --- a/lib/embeddings/semantic_search.rb +++ b/lib/embeddings/semantic_search.rb @@ -6,8 +6,8 @@ class SemanticSearch def self.clear_cache_for(query) digest = OpenSSL::Digest::SHA1.hexdigest(query) - hyde_key = - "semantic-search-#{digest}-#{SiteSetting.ai_embeddings_semantic_search_hyde_model}" + hyde_model_id = find_ai_hyde_model_id + hyde_key = "semantic-search-#{digest}-#{hyde_model_id}" Discourse.cache.delete(hyde_key) Discourse.cache.delete("#{hyde_key}-#{SiteSetting.ai_embeddings_selected_model}") @@ -20,12 +20,9 @@ def initialize(guardian) def cached_query?(query) digest = OpenSSL::Digest::SHA1.hexdigest(query) + hyde_model_id = self.class.find_ai_hyde_model_id embedding_key = - build_embedding_key( - digest, - SiteSetting.ai_embeddings_semantic_search_hyde_model, - SiteSetting.ai_embeddings_selected_model, - ) + build_embedding_key(digest, hyde_model_id, SiteSetting.ai_embeddings_selected_model) Discourse.cache.read(embedding_key).present? end @@ -36,14 +33,11 @@ def vector def hyde_embedding(search_term) digest = OpenSSL::Digest::SHA1.hexdigest(search_term) - hyde_key = build_hyde_key(digest, SiteSetting.ai_embeddings_semantic_search_hyde_model) + hyde_model_id = self.class.find_ai_hyde_model_id + hyde_key = build_hyde_key(digest, hyde_model_id) embedding_key = - build_embedding_key( - digest, - SiteSetting.ai_embeddings_semantic_search_hyde_model, - SiteSetting.ai_embeddings_selected_model, - ) + build_embedding_key(digest, hyde_model_id, SiteSetting.ai_embeddings_selected_model) hypothetical_post = Discourse @@ -111,6 +105,7 @@ def quick_search(query) max_semantic_results_per_page = 100 search = Search.new(query, { guardian: guardian }) search_term = search.term + hyde_model_id = self.class.find_ai_hyde_model_id return [] if search_term.nil? || search_term.length < SiteSetting.min_search_term_length @@ -119,11 +114,7 @@ def quick_search(query) digest = OpenSSL::Digest::SHA1.hexdigest(search_term) embedding_key = - build_embedding_key( - digest, - SiteSetting.ai_embeddings_semantic_search_hyde_model, - SiteSetting.ai_embeddings_selected_model, - ) + build_embedding_key(digest, hyde_model_id, SiteSetting.ai_embeddings_selected_model) search_term_embedding = Discourse @@ -210,15 +201,28 @@ def hypothetical_post_from(search_term) # Priorities are: # 1. Persona's default LLM - # 2. `ai_embeddings_semantic_search_hyde_model` setting. + # 2. SiteSetting.ai_default_llm_id (or newest LLM if not set) def find_ai_hyde_model(persona_klass) - model_id = - persona_klass.default_llm_id || - SiteSetting.ai_embeddings_semantic_search_hyde_model&.split(":")&.last + model_id = persona_klass.default_llm_id || SiteSetting.ai_default_llm_model - return if model_id.blank? + if model_id.present? + LlmModel.find_by(id: model_id) + else + LlmModel.last + end + end + + def self.find_ai_hyde_model_id + persona_llm_id = + AiPersona.find_by( + id: SiteSetting.ai_embeddings_semantic_search_hyde_persona, + )&.default_llm_id - LlmModel.find_by(id: model_id) + if persona_llm_id.present? + persona_llm_id + else + SiteSetting.ai_default_llm_model.to_i || LlmModel.last&.id + end end private diff --git a/lib/summarization.rb b/lib/summarization.rb index a7b697638..f8f71cab2 100644 --- a/lib/summarization.rb +++ b/lib/summarization.rb @@ -54,11 +54,9 @@ def chat_channel_summary(channel, time_window_in_hours) # Priorities are: # 1. Persona's default LLM - # 2. Hidden `ai_summarization_model` setting - # 3. Newest LLM config + # 2. SiteSetting.ai_default_llm_id (or newest LLM if not set) def find_summarization_model(persona_klass) - model_id = - persona_klass.default_llm_id || SiteSetting.ai_summarization_model&.split(":")&.last # Remove legacy custom provider. + model_id = persona_klass.default_llm_id || SiteSetting.ai_default_llm_model if model_id.present? LlmModel.find_by(id: model_id) @@ -73,7 +71,7 @@ def build_bot(persona_klass, llm_model) persona = persona_klass.new user = User.find_by(id: persona_klass.user_id) || Discourse.system_user - bot = DiscourseAi::Personas::Bot.as(user, persona: persona, model: llm_model) + DiscourseAi::Personas::Bot.as(user, persona: persona, model: llm_model) end end end diff --git a/lib/tasks/create_topics.rake b/lib/tasks/create_topics.rake index 3095345bb..f64953e5b 100644 --- a/lib/tasks/create_topics.rake +++ b/lib/tasks/create_topics.rake @@ -88,7 +88,7 @@ namespace :ai do messages: [{ type: :user, content: prompt, id: "user" }], ) - DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_helper_model).generate( + DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_defaulit_llm_model).generate( prompt, user: Discourse.system_user, feature_name: "topic-generator", diff --git a/lib/translation.rb b/lib/translation.rb index 09093f1c3..cfad97e54 100644 --- a/lib/translation.rb +++ b/lib/translation.rb @@ -3,11 +3,28 @@ module DiscourseAi module Translation def self.enabled? - SiteSetting.discourse_ai_enabled && SiteSetting.ai_translation_enabled && - SiteSetting.ai_translation_model.present? && + SiteSetting.discourse_ai_enabled && SiteSetting.ai_translation_enabled && has_llm_model? && SiteSetting.content_localization_supported_locales.present? end + def self.has_llm_model? + persona_ids = [ + SiteSetting.ai_translation_locale_detector_persona, + SiteSetting.ai_translation_post_raw_translator_persona, + SiteSetting.ai_translation_topic_title_translator_persona, + SiteSetting.ai_translation_short_text_translator_persona, + ] + + persona_default_llms = AiPersona.where(id: persona_ids).pluck(:default_llm_id) + default_llm_model = SiteSetting.ai_default_llm_model + + if persona_default_llms.any?(&:blank?) && default_llm_model.blank? + false + else + true + end + end + def self.backfill_enabled? enabled? && SiteSetting.ai_translation_backfill_hourly_rate > 0 && SiteSetting.ai_translation_backfill_max_age_days > 0 diff --git a/lib/translation/base_translator.rb b/lib/translation/base_translator.rb index ba39bf839..dcdc2e164 100644 --- a/lib/translation/base_translator.rb +++ b/lib/translation/base_translator.rb @@ -57,9 +57,13 @@ def persona_setting end def self.preferred_llm_model(persona_klass) - id = persona_klass.default_llm_id || SiteSetting.ai_translation_model&.split(":")&.last - return nil if id.blank? - LlmModel.find_by(id:) + model_id = persona_klass.default_llm_id || SiteSetting.ai_default_llm_model + + if model_id.present? + LlmModel.find_by(id: model_id) + else + LlmModel.last + end end end end diff --git a/spec/configuration/feature_spec.rb b/spec/configuration/feature_spec.rb index 19368d12f..11aab1693 100644 --- a/spec/configuration/feature_spec.rb +++ b/spec/configuration/feature_spec.rb @@ -6,6 +6,8 @@ fab!(:llm_model) fab!(:ai_persona) { Fabricate(:ai_persona, default_llm_id: llm_model.id) } + before { assign_fake_provider_to(:ai_default_llm_model) } + def allow_configuring_setting(&block) DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) { block.call } end @@ -38,7 +40,6 @@ def allow_configuring_setting(&block) it "returns the configured llm model" do SiteSetting.ai_summarization_persona = ai_persona.id - allow_configuring_setting { SiteSetting.ai_summarization_model = "custom:#{llm_model.id}" } expect(ai_feature.llm_models).to eq([llm_model]) end end @@ -55,8 +56,6 @@ def allow_configuring_setting(&block) it "returns the persona's default llm when no specific helper model is set" do SiteSetting.ai_helper_proofreader_persona = ai_persona.id - SiteSetting.ai_helper_model = "" - expect(ai_feature.llm_models).to eq([llm_model]) end end @@ -75,11 +74,7 @@ def allow_configuring_setting(&block) it "uses translation model when configured" do SiteSetting.ai_translation_locale_detector_persona = ai_persona.id - ai_persona.update!(default_llm_id: nil) - allow_configuring_setting do - SiteSetting.ai_translation_model = "custom:#{translation_model.id}" - end - + ai_persona.update!(default_llm_id: translation_model.id) expect(ai_feature.llm_models).to eq([translation_model]) end end diff --git a/spec/configuration/llm_enumerator_spec.rb b/spec/configuration/llm_enumerator_spec.rb index 7737da45d..d666d346c 100644 --- a/spec/configuration/llm_enumerator_spec.rb +++ b/spec/configuration/llm_enumerator_spec.rb @@ -2,6 +2,7 @@ RSpec.describe DiscourseAi::Configuration::LlmEnumerator do fab!(:fake_model) + fab!(:ai_persona) { Fabricate(:ai_persona, default_llm_id: fake_model.id) } fab!(:llm_model) fab!(:seeded_model) fab!(:automation) do @@ -41,9 +42,16 @@ describe "#global_usage" do it "returns a hash of Llm models in use globally" do - SiteSetting.ai_helper_model = "custom:#{fake_model.id}" + assign_fake_provider_to(:ai_default_llm_model) + SiteSetting.ai_helper_proofreader_persona = ai_persona.id SiteSetting.ai_helper_enabled = true - expect(described_class.global_usage).to eq(fake_model.id => [{ type: :ai_helper }]) + expect(described_class.global_usage).to eq( + fake_model.id => [{ type: :ai_helper }], + fake_model.id => [ + { id: ai_persona.id, name: ai_persona.name, type: :ai_persona }, + { name: "Proofread text", type: :ai_helper }, + ], + ) end it "returns information about automation rules" do @@ -51,7 +59,7 @@ component: "text", name: "model", metadata: { - value: "custom:#{fake_model.id}", + value: llm_model.id, }, target: "script", ) @@ -59,7 +67,8 @@ usage = described_class.global_usage expect(usage).to eq( - { fake_model.id => [{ type: :automation, name: "some automation", id: automation.id }] }, + fake_model.id => [{ id: ai_persona.id, name: ai_persona.name, type: :ai_persona }], + llm_model.id => [{ id: automation.id, name: automation.name, type: :automation }], ) end end diff --git a/spec/configuration/llm_validator_spec.rb b/spec/configuration/llm_validator_spec.rb index 5c9fdecc4..16a5fc776 100644 --- a/spec/configuration/llm_validator_spec.rb +++ b/spec/configuration/llm_validator_spec.rb @@ -1,20 +1,43 @@ # frozen_string_literal: true -RSpec.describe DiscourseAi::Configuration::LlmValidator do +require "rails_helper" + +describe DiscourseAi::Configuration::LlmValidator do describe "#valid_value?" do - context "when the parent module is enabled and we try to reset the selected model" do - before do - assign_fake_provider_to(:ai_summarization_model) - SiteSetting.ai_summarization_enabled = true - end + let(:validator) { described_class.new(name: :ai_default_llm_model) } + fab!(:llm_model) - it "returns false and displays an error message" do - validator = described_class.new(name: :ai_summarization_model) + before do + assign_fake_provider_to(:ai_default_llm_model) + SiteSetting.ai_helper_enabled = false + SiteSetting.ai_summarization_enabled = false + SiteSetting.ai_embeddings_semantic_search_enabled = false + SiteSetting.ai_translation_enabled = false + end + + it "returns true when no modules are enabled and value is empty string" do + expect(validator.valid_value?("")).to eq(true) + end + + it "returns false when a module is enabled and value is empty string" do + SiteSetting.ai_helper_enabled = true + expect(validator.valid_value?("")).to eq(false) + expect(validator.error_message).to include("ai_helper_enabled") + end + + it "returns false when multiple modules are enabled and value is empty string" do + SiteSetting.ai_helper_enabled = true + SiteSetting.ai_summarization_enabled = true + expect(validator.valid_value?("")).to eq(false) + expect(validator.error_message).to include("ai_helper_enabled, ai_summarization_enabled") + end - value = validator.valid_value?("") + it "returns true for non-empty values regardless of module state" do + SiteSetting.ai_helper_enabled = true + SiteSetting.ai_summarization_enabled = true - expect(value).to eq(false) - expect(validator.error_message).to include("ai_summarization_enabled") + DiscourseAi::Completions::Llm.with_prepared_responses([true]) do + expect(validator.valid_value?(llm_model)).to eq(true) end end end diff --git a/spec/jobs/regular/detect_translate_post_spec.rb b/spec/jobs/regular/detect_translate_post_spec.rb index 570b7093c..dfae6dba6 100644 --- a/spec/jobs/regular/detect_translate_post_spec.rb +++ b/spec/jobs/regular/detect_translate_post_spec.rb @@ -7,10 +7,8 @@ let(:locales) { %w[en ja] } before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.content_localization_supported_locales = locales.join("|") end diff --git a/spec/jobs/regular/detect_translate_topic_spec.rb b/spec/jobs/regular/detect_translate_topic_spec.rb index 80e5b8f13..a7eeb9ced 100644 --- a/spec/jobs/regular/detect_translate_topic_spec.rb +++ b/spec/jobs/regular/detect_translate_topic_spec.rb @@ -7,10 +7,8 @@ let(:locales) { %w[en ja] } before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.content_localization_supported_locales = locales.join("|") end diff --git a/spec/jobs/regular/fast_track_topic_gist_spec.rb b/spec/jobs/regular/fast_track_topic_gist_spec.rb index ef7dbc47e..77821f856 100644 --- a/spec/jobs/regular/fast_track_topic_gist_spec.rb +++ b/spec/jobs/regular/fast_track_topic_gist_spec.rb @@ -7,7 +7,8 @@ fab!(:post_2) { Fabricate(:post, topic: topic_1, post_number: 2) } before do - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) + SiteSetting.ai_summarization_enabled = true SiteSetting.ai_summary_gists_enabled = true end diff --git a/spec/jobs/regular/localize_categories_spec.rb b/spec/jobs/regular/localize_categories_spec.rb index bbcdfc6ac..0746e6d5f 100644 --- a/spec/jobs/regular/localize_categories_spec.rb +++ b/spec/jobs/regular/localize_categories_spec.rb @@ -10,10 +10,8 @@ def localize_all_categories(*locales) end before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.content_localization_supported_locales = "pt_BR|zh_CN" diff --git a/spec/jobs/regular/localize_posts_spec.rb b/spec/jobs/regular/localize_posts_spec.rb index 92aae58dd..72f01e871 100644 --- a/spec/jobs/regular/localize_posts_spec.rb +++ b/spec/jobs/regular/localize_posts_spec.rb @@ -7,10 +7,8 @@ let(:locales) { %w[en ja de] } before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.content_localization_supported_locales = locales.join("|") SiteSetting.ai_translation_backfill_hourly_rate = 100 diff --git a/spec/jobs/regular/localize_topics_spec.rb b/spec/jobs/regular/localize_topics_spec.rb index f091b9155..84756c5c8 100644 --- a/spec/jobs/regular/localize_topics_spec.rb +++ b/spec/jobs/regular/localize_topics_spec.rb @@ -7,10 +7,8 @@ let(:locales) { %w[en ja de] } before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.content_localization_supported_locales = locales.join("|") SiteSetting.ai_translation_backfill_hourly_rate = 100 diff --git a/spec/jobs/regular/stream_composer_helper_spec.rb b/spec/jobs/regular/stream_composer_helper_spec.rb index 350f758fa..3be1b22be 100644 --- a/spec/jobs/regular/stream_composer_helper_spec.rb +++ b/spec/jobs/regular/stream_composer_helper_spec.rb @@ -3,7 +3,7 @@ RSpec.describe Jobs::StreamComposerHelper do subject(:job) { described_class.new } - before { assign_fake_provider_to(:ai_helper_model) } + before { assign_fake_provider_to(:ai_default_llm_model) } describe "#execute" do let!(:input) { "I liek to eet pie fur brakefast becuz it is delishus." } diff --git a/spec/jobs/regular/stream_post_helper_spec.rb b/spec/jobs/regular/stream_post_helper_spec.rb index 06cb69d59..3301c549c 100644 --- a/spec/jobs/regular/stream_post_helper_spec.rb +++ b/spec/jobs/regular/stream_post_helper_spec.rb @@ -3,7 +3,7 @@ RSpec.describe Jobs::StreamPostHelper do subject(:job) { described_class.new } - before { assign_fake_provider_to(:ai_helper_model) } + before { assign_fake_provider_to(:ai_default_llm_model) } describe "#execute" do fab!(:topic) diff --git a/spec/jobs/regular/stream_topic_ai_summary_spec.rb b/spec/jobs/regular/stream_topic_ai_summary_spec.rb index 1591e7018..7d3ffa0d2 100644 --- a/spec/jobs/regular/stream_topic_ai_summary_spec.rb +++ b/spec/jobs/regular/stream_topic_ai_summary_spec.rb @@ -11,7 +11,7 @@ before do Group.find(Group::AUTO_GROUPS[:trust_level_3]).add(user) - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true end diff --git a/spec/jobs/scheduled/categories_locale_detection_backfill_spec.rb b/spec/jobs/scheduled/categories_locale_detection_backfill_spec.rb index 31e96275e..42c86f64e 100644 --- a/spec/jobs/scheduled/categories_locale_detection_backfill_spec.rb +++ b/spec/jobs/scheduled/categories_locale_detection_backfill_spec.rb @@ -5,10 +5,8 @@ subject(:job) { described_class.new } before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.ai_translation_backfill_hourly_rate = 100 SiteSetting.content_localization_supported_locales = "en" diff --git a/spec/jobs/scheduled/post_localization_backfill_spec.rb b/spec/jobs/scheduled/post_localization_backfill_spec.rb index f43d890d8..b6468ca74 100644 --- a/spec/jobs/scheduled/post_localization_backfill_spec.rb +++ b/spec/jobs/scheduled/post_localization_backfill_spec.rb @@ -2,11 +2,9 @@ describe Jobs::PostLocalizationBackfill do before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_translation_backfill_hourly_rate = 100 SiteSetting.content_localization_supported_locales = "en" - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.discourse_ai_enabled = true end diff --git a/spec/jobs/scheduled/posts_locale_detection_backfill_spec.rb b/spec/jobs/scheduled/posts_locale_detection_backfill_spec.rb index dfbc46f90..27398a356 100644 --- a/spec/jobs/scheduled/posts_locale_detection_backfill_spec.rb +++ b/spec/jobs/scheduled/posts_locale_detection_backfill_spec.rb @@ -5,10 +5,8 @@ subject(:job) { described_class.new } before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.ai_translation_backfill_hourly_rate = 100 SiteSetting.content_localization_supported_locales = "en" diff --git a/spec/jobs/scheduled/summaries_backfill_spec.rb b/spec/jobs/scheduled/summaries_backfill_spec.rb index ccf23b06a..f21d77253 100644 --- a/spec/jobs/scheduled/summaries_backfill_spec.rb +++ b/spec/jobs/scheduled/summaries_backfill_spec.rb @@ -8,7 +8,7 @@ let(:intervals) { 12 } # budget is split into intervals. Job runs every five minutes. before do - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true SiteSetting.ai_summary_backfill_maximum_topics_per_hour = limit SiteSetting.ai_summary_gists_enabled = true diff --git a/spec/jobs/scheduled/topics_locale_detection_backfill_spec.rb b/spec/jobs/scheduled/topics_locale_detection_backfill_spec.rb index 924883312..8c0f1696e 100644 --- a/spec/jobs/scheduled/topics_locale_detection_backfill_spec.rb +++ b/spec/jobs/scheduled/topics_locale_detection_backfill_spec.rb @@ -5,10 +5,8 @@ subject(:job) { described_class.new } before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.ai_translation_backfill_hourly_rate = 100 SiteSetting.content_localization_supported_locales = "en" diff --git a/spec/lib/completions/endpoints/anthropic_spec.rb b/spec/lib/completions/endpoints/anthropic_spec.rb index 8a40c2131..8fecbf1f7 100644 --- a/spec/lib/completions/endpoints/anthropic_spec.rb +++ b/spec/lib/completions/endpoints/anthropic_spec.rb @@ -4,7 +4,7 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Anthropic do let(:url) { "https://api.anthropic.com/v1/messages" } fab!(:model) { Fabricate(:anthropic_model, name: "claude-3-opus", vision_enabled: true) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(model) } let(:image100x100) { plugin_file_from_fixtures("100x100.jpg") } let(:upload100x100) do UploadCreator.new(image100x100, "image.jpg").create_for(Discourse.system_user.id) @@ -374,7 +374,7 @@ model.provider_params["reasoning_tokens"] = 10_000 model.save! - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) result = proxy.generate(prompt, user: Discourse.system_user) expect(result).to eq("Hello!") @@ -432,7 +432,7 @@ }, ).to_return(status: 200, body: body) - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) result = proxy.generate(prompt, user: Discourse.system_user) expect(result).to eq("Hello!") diff --git a/spec/lib/completions/endpoints/aws_bedrock_spec.rb b/spec/lib/completions/endpoints/aws_bedrock_spec.rb index 70bf9364d..fb598a9ec 100644 --- a/spec/lib/completions/endpoints/aws_bedrock_spec.rb +++ b/spec/lib/completions/endpoints/aws_bedrock_spec.rb @@ -47,7 +47,7 @@ def encode_message(message) model.provider_params["disable_native_tools"] = true model.save! - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) incomplete_tool_call = <<~XML.strip I should be ignored @@ -122,7 +122,7 @@ def encode_message(message) end it "supports streaming function calls" do - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil @@ -293,7 +293,7 @@ def encode_message(message) describe "Claude 3 support" do it "supports regular completions" do - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil @@ -340,7 +340,7 @@ def encode_message(message) model.provider_params["reasoning_tokens"] = 10_000 model.save! - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil @@ -387,7 +387,7 @@ def encode_message(message) end it "supports claude 3 streaming" do - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil @@ -448,7 +448,7 @@ def encode_message(message) }, ) - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil content = { @@ -487,7 +487,7 @@ def encode_message(message) describe "disabled tool use" do it "handles tool_choice: :none by adding a prefill message instead of using tool_choice param" do - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil # Create a prompt with tool_choice: :none @@ -549,7 +549,7 @@ def encode_message(message) describe "forced tool use" do it "can properly force tool use" do - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil tools = [ @@ -640,7 +640,7 @@ def encode_message(message) { type: "message_delta", delta: { usage: { output_tokens: 25 } } }, ].map { |message| encode_message(message) } - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil bedrock_mock.with_chunk_array_support do stub_request( @@ -718,7 +718,7 @@ def encode_message(message) { type: "message_delta", delta: { usage: { output_tokens: 25 } } }, ].map { |message| encode_message(message) } - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(model) request = nil bedrock_mock.with_chunk_array_support do stub_request( diff --git a/spec/lib/completions/endpoints/cohere_spec.rb b/spec/lib/completions/endpoints/cohere_spec.rb index c4fb06b6a..ccea5b8ea 100644 --- a/spec/lib/completions/endpoints/cohere_spec.rb +++ b/spec/lib/completions/endpoints/cohere_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Cohere do fab!(:cohere_model) - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{cohere_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(cohere_model) } fab!(:user) let(:prompt) do diff --git a/spec/lib/completions/endpoints/gemini_spec.rb b/spec/lib/completions/endpoints/gemini_spec.rb index 3a6543ea3..4ef5f7597 100644 --- a/spec/lib/completions/endpoints/gemini_spec.rb +++ b/spec/lib/completions/endpoints/gemini_spec.rb @@ -160,7 +160,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -186,7 +186,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -220,7 +220,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -246,7 +246,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -274,7 +274,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -297,7 +297,7 @@ def tool_response it "properly encodes tool calls" do prompt = DiscourseAi::Completions::Prompt.new("Hello", tools: [echo_tool]) - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" response_json = { "functionCall" => { name: "echo", args: { text: "ydney" } } } @@ -332,7 +332,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -410,7 +410,7 @@ def tool_response payload = rows.map { |r| "data: #{r.to_json}\n\n" }.join - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:streamGenerateContent?alt=sse&key=123" prompt = DiscourseAi::Completions::Prompt.new("Hello", tools: [echo_tool]) @@ -450,7 +450,7 @@ def tool_response TEXT - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:streamGenerateContent?alt=sse&key=123" output = [] @@ -478,7 +478,7 @@ def tool_response split = data.split("|") - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:streamGenerateContent?alt=sse&key=123" output = [] @@ -497,7 +497,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -525,7 +525,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:generateContent?key=123" stub_request(:post, url).with( @@ -600,7 +600,7 @@ def tool_response req_body = nil - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:streamGenerateContent?alt=sse&key=123" stub_request(:post, url).with( @@ -657,7 +657,7 @@ def tool_response TEXT - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) url = "#{model.url}:streamGenerateContent?alt=sse&key=123" output = [] diff --git a/spec/lib/completions/endpoints/nova_spec.rb b/spec/lib/completions/endpoints/nova_spec.rb index aa2727f79..e75a5bd09 100644 --- a/spec/lib/completions/endpoints/nova_spec.rb +++ b/spec/lib/completions/endpoints/nova_spec.rb @@ -28,7 +28,7 @@ def encode_message(message) end it "should be able to make a simple request" do - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{nova_model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(nova_model) content = { "output" => { @@ -90,7 +90,7 @@ def encode_message(message) stub_request(:post, stream_url).to_return(status: 200, body: messages.join) - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{nova_model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(nova_model) responses = [] proxy.generate("Hello!", user: user) { |partial| responses << partial } @@ -104,7 +104,7 @@ def encode_message(message) #model.provider_params["disable_native_tools"] = true #model.save! - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{nova_model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(nova_model) prompt = DiscourseAi::Completions::Prompt.new( "You are a helpful assistant.", diff --git a/spec/lib/completions/endpoints/open_ai_spec.rb b/spec/lib/completions/endpoints/open_ai_spec.rb index cd95476d8..01e324ca9 100644 --- a/spec/lib/completions/endpoints/open_ai_spec.rb +++ b/spec/lib/completions/endpoints/open_ai_spec.rb @@ -177,7 +177,7 @@ def request_body(prompt, stream: false, tool_call: false) describe "max tokens for reasoning models" do it "uses max_completion_tokens for reasoning models" do model.update!(name: "o3-mini", max_output_tokens: 999) - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) prompt = DiscourseAi::Completions::Prompt.new( "You are a bot", @@ -216,7 +216,7 @@ def request_body(prompt, stream: false, tool_call: false) describe "repeat calls" do it "can properly reset context" do - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) tools = [ { @@ -297,7 +297,7 @@ def request_body(prompt, stream: false, tool_call: false) describe "max tokens remapping" do it "remaps max_tokens to max_completion_tokens for reasoning models" do model.update!(name: "o3-mini") - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) body_parsed = nil stub_request(:post, "https://api.openai.com/v1/chat/completions").with( @@ -313,7 +313,7 @@ def request_body(prompt, stream: false, tool_call: false) describe "forced tool use" do it "can properly force tool use" do - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) tools = [ { @@ -441,7 +441,7 @@ def request_body(prompt, stream: false, tool_call: false) describe "disabled tool use" do it "can properly disable tool use with :none" do - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) tools = [ { @@ -532,7 +532,7 @@ def request_body(prompt, stream: false, tool_call: false) describe "image support" do it "can handle images" do model = Fabricate(:llm_model, vision_enabled: true) - llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}") + llm = DiscourseAi::Completions::Llm.proxy(model) prompt = DiscourseAi::Completions::Prompt.new( "You are image bot", diff --git a/spec/lib/completions/endpoints/open_router_spec.rb b/spec/lib/completions/endpoints/open_router_spec.rb index 8beb48aca..d89fd8a7c 100644 --- a/spec/lib/completions/endpoints/open_router_spec.rb +++ b/spec/lib/completions/endpoints/open_router_spec.rb @@ -25,7 +25,7 @@ body: { "choices" => [message: { role: "assistant", content: "world" }] }.to_json, ) - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{open_router_model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(open_router_model) result = proxy.generate("hello", user: user) expect(result).to eq("world") @@ -62,7 +62,7 @@ body: { "choices" => [message: { role: "assistant", content: "test response" }] }.to_json, ) - proxy = DiscourseAi::Completions::Llm.proxy("custom:#{open_router_model.id}") + proxy = DiscourseAi::Completions::Llm.proxy(open_router_model) # Request with parameters that should be ignored proxy.generate("test", user: user, top_p: 0.9, temperature: 0.8, max_tokens: 500) diff --git a/spec/lib/completions/endpoints/vllm_spec.rb b/spec/lib/completions/endpoints/vllm_spec.rb index 824bcbe06..56f042bb3 100644 --- a/spec/lib/completions/endpoints/vllm_spec.rb +++ b/spec/lib/completions/endpoints/vllm_spec.rb @@ -68,7 +68,7 @@ def stub_streamed_response(prompt, deltas, tool_call: false) fab!(:llm_model) { Fabricate(:vllm_model) } fab!(:user) - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:vllm_mock) { VllmMock.new(endpoint) } let(:compliance) do diff --git a/spec/lib/completions/llm_spec.rb b/spec/lib/completions/llm_spec.rb index 4f22c16fb..115d1bc6f 100644 --- a/spec/lib/completions/llm_spec.rb +++ b/spec/lib/completions/llm_spec.rb @@ -46,9 +46,7 @@ body: result, ) result = +"" - described_class - .proxy("custom:#{model.id}") - .generate(prompt, user: user) { |partial| result << partial } + described_class.proxy(model).generate(prompt, user: user) { |partial| result << partial } expect(result).to eq("Hello") log = AiApiAuditLog.order("id desc").first @@ -75,7 +73,7 @@ ) result = - described_class.proxy("custom:#{model.id}").generate( + described_class.proxy(model).generate( "Hello", user: user, feature_name: "llm_triage", @@ -99,7 +97,7 @@ DiscourseAi::Completions::Endpoints::Fake.chunk_count = 10 end - let(:llm) { described_class.proxy("custom:#{fake_model.id}") } + let(:llm) { described_class.proxy(fake_model) } let(:prompt) do DiscourseAi::Completions::Prompt.new( diff --git a/spec/lib/discourse_automation/automation_spec.rb b/spec/lib/discourse_automation/automation_spec.rb index 3fbc08987..466076737 100644 --- a/spec/lib/discourse_automation/automation_spec.rb +++ b/spec/lib/discourse_automation/automation_spec.rb @@ -49,11 +49,8 @@ expect(models).to match_array( [ - { "translated_name" => "#{llm_model.display_name}", "id" => "custom:#{llm_model.id}" }, - { - "translated_name" => "#{seeded_model.display_name}", - "id" => "custom:#{seeded_model.id}", - }, + { "translated_name" => "#{llm_model.display_name}", "id" => llm_model.id.to_s }, + { "translated_name" => "#{seeded_model.display_name}", "id" => seeded_model.id.to_s }, ], ) end diff --git a/spec/lib/discourse_automation/llm_report_spec.rb b/spec/lib/discourse_automation/llm_report_spec.rb index e1664016e..d324b1106 100644 --- a/spec/lib/discourse_automation/llm_report_spec.rb +++ b/spec/lib/discourse_automation/llm_report_spec.rb @@ -24,12 +24,11 @@ def add_automation_field(name, value, type: "text") it "can trigger via automation" do add_automation_field("sender", user.username, type: "user") add_automation_field("receivers", [user.username], type: "email_group_user") - add_automation_field("model", "custom:#{llm_model.id}") + add_automation_field("model", llm_model.id) add_automation_field( "persona_id", DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner], ) - add_automation_field("title", "Weekly report") DiscourseAi::Completions::Llm.with_prepared_responses(["An Amazing Report!!!"]) do @@ -43,7 +42,7 @@ def add_automation_field(name, value, type: "text") it "can target a topic" do add_automation_field("sender", user.username, type: "user") add_automation_field("topic_id", "#{post.topic_id}") - add_automation_field("model", "custom:#{llm_model.id}") + add_automation_field("model", llm_model.id) add_automation_field( "persona_id", DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner], diff --git a/spec/lib/discourse_automation/llm_triage_spec.rb b/spec/lib/discourse_automation/llm_triage_spec.rb index 1b3ca6904..6b0d01bba 100644 --- a/spec/lib/discourse_automation/llm_triage_spec.rb +++ b/spec/lib/discourse_automation/llm_triage_spec.rb @@ -27,7 +27,7 @@ def add_automation_field(name, value, type: "text") SiteSetting.tagging_enabled = true add_automation_field("system_prompt", "hello %%POST%%") add_automation_field("search_for_text", "bad") - add_automation_field("model", "custom:#{llm_model.id}") + add_automation_field("model", llm_model.id) add_automation_field("category", category.id, type: "category") add_automation_field("tags", %w[aaa bbb], type: "tags") add_automation_field("hide_topic", true, type: "boolean") diff --git a/spec/lib/guardian_extensions_spec.rb b/spec/lib/guardian_extensions_spec.rb index 33d43c456..1481d358f 100644 --- a/spec/lib/guardian_extensions_spec.rb +++ b/spec/lib/guardian_extensions_spec.rb @@ -7,7 +7,7 @@ before do group.add(user) - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true SiteSetting.ai_summary_gists_enabled = true end diff --git a/spec/lib/modules/ai_helper/assistant_spec.rb b/spec/lib/modules/ai_helper/assistant_spec.rb index 832235d9c..1a25f67de 100644 --- a/spec/lib/modules/ai_helper/assistant_spec.rb +++ b/spec/lib/modules/ai_helper/assistant_spec.rb @@ -5,7 +5,7 @@ fab!(:empty_locale_user) { Fabricate(:user, locale: "") } before do - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) Group.refresh_automatic_groups! end diff --git a/spec/lib/modules/ai_helper/chat_thread_titler_spec.rb b/spec/lib/modules/ai_helper/chat_thread_titler_spec.rb index 16a4a9733..13a655d5b 100644 --- a/spec/lib/modules/ai_helper/chat_thread_titler_spec.rb +++ b/spec/lib/modules/ai_helper/chat_thread_titler_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::AiHelper::ChatThreadTitler do subject(:titler) { described_class.new(thread) } - before { assign_fake_provider_to(:ai_helper_model) } + before { assign_fake_provider_to(:ai_default_llm_model) } fab!(:thread) { Fabricate(:chat_thread) } fab!(:chat_message) { Fabricate(:chat_message, thread: thread) } diff --git a/spec/lib/modules/ai_helper/entry_point_spec.rb b/spec/lib/modules/ai_helper/entry_point_spec.rb index 33765deb6..67f60a7ef 100644 --- a/spec/lib/modules/ai_helper/entry_point_spec.rb +++ b/spec/lib/modules/ai_helper/entry_point_spec.rb @@ -5,7 +5,7 @@ fab!(:french_user) { Fabricate(:user, locale: "fr") } it "will correctly localize available prompts" do - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.default_locale = "en" SiteSetting.allow_user_locale = true SiteSetting.ai_helper_enabled = true @@ -38,8 +38,7 @@ end it "will include auto_image_caption field in the user_option if image caption is enabled" do - assign_fake_provider_to(:ai_helper_model) - assign_fake_provider_to(:ai_helper_image_caption_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true SiteSetting.ai_helper_enabled_features = "image_caption" SiteSetting.ai_auto_image_caption_allowed_groups = "10" # tl0 diff --git a/spec/lib/modules/ai_helper/painter_spec.rb b/spec/lib/modules/ai_helper/painter_spec.rb index 1b97bbf51..ec84243a2 100644 --- a/spec/lib/modules/ai_helper/painter_spec.rb +++ b/spec/lib/modules/ai_helper/painter_spec.rb @@ -6,7 +6,7 @@ fab!(:user) before do - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_stability_api_url = "https://api.stability.dev" SiteSetting.ai_stability_api_key = "abc" SiteSetting.ai_openai_api_key = "abc" diff --git a/spec/lib/modules/automation/llm_triage_spec.rb b/spec/lib/modules/automation/llm_triage_spec.rb index bbff9e106..fc4ffd18a 100644 --- a/spec/lib/modules/automation/llm_triage_spec.rb +++ b/spec/lib/modules/automation/llm_triage_spec.rb @@ -12,7 +12,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["good"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, hide_topic: true, system_prompt: "test %%POST%%", search_for_text: "bad", @@ -27,7 +27,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, hide_topic: true, system_prompt: "test %%POST%%", search_for_text: "bad", @@ -44,7 +44,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, category_id: category.id, system_prompt: "test %%POST%%", search_for_text: "bad", @@ -60,7 +60,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", canned_reply: "test canned reply 123", @@ -79,7 +79,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -97,7 +97,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -114,7 +114,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -132,7 +132,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -158,7 +158,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -174,7 +174,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["Bad.\n\nYo"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -191,7 +191,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "BAD", flag_post: true, @@ -210,7 +210,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -229,7 +229,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do |spy| triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, @@ -249,7 +249,7 @@ def triage(**args) DiscourseAi::Completions::Llm.with_prepared_responses(["bad"]) do triage( post: post, - model: "custom:#{llm_model.id}", + model: llm_model.id.to_s, system_prompt: "test %%POST%%", search_for_text: "bad", flag_post: true, diff --git a/spec/lib/modules/automation/report_runner_spec.rb b/spec/lib/modules/automation/report_runner_spec.rb index 10dfb0e31..973b9c563 100644 --- a/spec/lib/modules/automation/report_runner_spec.rb +++ b/spec/lib/modules/automation/report_runner_spec.rb @@ -43,7 +43,7 @@ module Automation sender_username: user.username, receivers: ["fake@discourse.com"], title: "test report %DATE%", - model: "custom:#{llm_model.id}", + model: llm_model.id, persona_id: DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner], category_ids: nil, @@ -82,7 +82,7 @@ module Automation sender_username: user.username, receivers: [receiver.username], title: "test report", - model: "custom:#{llm_model.id}", + model: llm_model.id, persona_id: DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner], category_ids: nil, @@ -129,7 +129,7 @@ module Automation sender_username: user.username, receivers: [receiver.username], title: "test report", - model: "custom:#{llm_model.id}", + model: llm_model.id, persona_id: DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner], category_ids: nil, @@ -174,7 +174,7 @@ module Automation sender_username: user.username, receivers: [receiver.username], title: "test report", - model: "custom:#{llm_model.id}", + model: llm_model.id, persona_id: DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner], category_ids: nil, @@ -208,7 +208,7 @@ module Automation sender_username: user.username, receivers: [group_for_reports.name], title: "group report", - model: "custom:#{llm_model.id}", + model: llm_model.id, persona_id: DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner], category_ids: nil, @@ -238,7 +238,7 @@ module Automation sender_username: user.username, receivers: [receiver.username], title: "test report", - model: "custom:#{llm_model.id}", + model: llm_model.id, persona_id: DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner], category_ids: nil, diff --git a/spec/lib/modules/embeddings/semantic_search_spec.rb b/spec/lib/modules/embeddings/semantic_search_spec.rb index 431255453..f96629973 100644 --- a/spec/lib/modules/embeddings/semantic_search_spec.rb +++ b/spec/lib/modules/embeddings/semantic_search_spec.rb @@ -11,7 +11,7 @@ before do SiteSetting.ai_embeddings_selected_model = vector_def.id - assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model) + assign_fake_provider_to(:ai_default_llm_model) end describe "#search_for_topics" do diff --git a/spec/lib/modules/summarization/entry_point_spec.rb b/spec/lib/modules/summarization/entry_point_spec.rb index 0d57cbf9d..d2c721087 100644 --- a/spec/lib/modules/summarization/entry_point_spec.rb +++ b/spec/lib/modules/summarization/entry_point_spec.rb @@ -2,7 +2,7 @@ RSpec.describe DiscourseAi::Summarization::EntryPoint do before do - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true SiteSetting.ai_summary_gists_enabled = true end diff --git a/spec/lib/modules/summarization/fold_content_spec.rb b/spec/lib/modules/summarization/fold_content_spec.rb index d2497c70f..fb99022c8 100644 --- a/spec/lib/modules/summarization/fold_content_spec.rb +++ b/spec/lib/modules/summarization/fold_content_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Summarization::FoldContent do subject(:summarizer) { DiscourseAi::Summarization.topic_summary(topic) } - let!(:llm_model) { assign_fake_provider_to(:ai_summarization_model) } + let!(:llm_model) { assign_fake_provider_to(:ai_default_llm_model) } fab!(:topic) { Fabricate(:topic, highest_post_number: 2) } fab!(:post_1) { Fabricate(:post, topic: topic, post_number: 1, raw: "This is a text") } diff --git a/spec/lib/personas/question_consolidator_spec.rb b/spec/lib/personas/question_consolidator_spec.rb index 7fe543993..4150f805e 100644 --- a/spec/lib/personas/question_consolidator_spec.rb +++ b/spec/lib/personas/question_consolidator_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true RSpec.describe DiscourseAi::Personas::QuestionConsolidator do - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{Fabricate(:fake_model).id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(Fabricate(:fake_model)) } let(:fake_endpoint) { DiscourseAi::Completions::Endpoints::Fake } fab!(:user) diff --git a/spec/lib/personas/tools/create_artifact_spec.rb b/spec/lib/personas/tools/create_artifact_spec.rb index 929e54ef5..efa95c2ba 100644 --- a/spec/lib/personas/tools/create_artifact_spec.rb +++ b/spec/lib/personas/tools/create_artifact_spec.rb @@ -2,7 +2,7 @@ RSpec.describe DiscourseAi::Personas::Tools::CreateArtifact do fab!(:llm_model) - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } fab!(:post) before { SiteSetting.ai_bot_enabled = true } diff --git a/spec/lib/personas/tools/create_image_spec.rb b/spec/lib/personas/tools/create_image_spec.rb index 0aa18fea0..781a40a36 100644 --- a/spec/lib/personas/tools/create_image_spec.rb +++ b/spec/lib/personas/tools/create_image_spec.rb @@ -12,7 +12,7 @@ end let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(gpt_35_turbo.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{gpt_35_turbo.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(gpt_35_turbo) } let(:progress_blk) { Proc.new {} } let(:create_image) { described_class.new({ prompts: prompts }, llm: llm, bot_user: bot_user) } diff --git a/spec/lib/personas/tools/dall_e_spec.rb b/spec/lib/personas/tools/dall_e_spec.rb index 50d4ab72d..193e07d8e 100644 --- a/spec/lib/personas/tools/dall_e_spec.rb +++ b/spec/lib/personas/tools/dall_e_spec.rb @@ -12,7 +12,7 @@ end let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(gpt_35_turbo.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{gpt_35_turbo.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(gpt_35_turbo) } let(:progress_blk) { Proc.new {} } let(:dall_e) { described_class.new({ prompts: prompts }, llm: llm, bot_user: bot_user) } diff --git a/spec/lib/personas/tools/db_schema_spec.rb b/spec/lib/personas/tools/db_schema_spec.rb index 643e3fe7e..d533a9d8f 100644 --- a/spec/lib/personas/tools/db_schema_spec.rb +++ b/spec/lib/personas/tools/db_schema_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::DbSchema do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } before { SiteSetting.ai_bot_enabled = true } describe "#process" do diff --git a/spec/lib/personas/tools/discourse_meta_search_spec.rb b/spec/lib/personas/tools/discourse_meta_search_spec.rb index 1ccc4d4db..9c7ac116d 100644 --- a/spec/lib/personas/tools/discourse_meta_search_spec.rb +++ b/spec/lib/personas/tools/discourse_meta_search_spec.rb @@ -4,7 +4,7 @@ fab!(:llm_model) { Fabricate(:llm_model, max_prompt_tokens: 8192) } let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:progress_blk) { Proc.new {} } let(:mock_search_json) { plugin_file_from_fixtures("search.json", "search_meta").read } diff --git a/spec/lib/personas/tools/edit_image_spec.rb b/spec/lib/personas/tools/edit_image_spec.rb index 4242aec48..d0b826da4 100644 --- a/spec/lib/personas/tools/edit_image_spec.rb +++ b/spec/lib/personas/tools/edit_image_spec.rb @@ -17,7 +17,7 @@ end let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(gpt_35_turbo.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{gpt_35_turbo.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(gpt_35_turbo) } let(:progress_blk) { Proc.new {} } let(:prompt) { "add a rainbow in the background" } diff --git a/spec/lib/personas/tools/github_file_content_spec.rb b/spec/lib/personas/tools/github_file_content_spec.rb index 4186dd01e..a5ce6a904 100644 --- a/spec/lib/personas/tools/github_file_content_spec.rb +++ b/spec/lib/personas/tools/github_file_content_spec.rb @@ -4,7 +4,7 @@ RSpec.describe DiscourseAi::Personas::Tools::GithubFileContent do fab!(:llm_model) - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:tool) do described_class.new( diff --git a/spec/lib/personas/tools/github_pull_request_diff_spec.rb b/spec/lib/personas/tools/github_pull_request_diff_spec.rb index e8b3d2266..1934dd374 100644 --- a/spec/lib/personas/tools/github_pull_request_diff_spec.rb +++ b/spec/lib/personas/tools/github_pull_request_diff_spec.rb @@ -5,7 +5,7 @@ RSpec.describe DiscourseAi::Personas::Tools::GithubPullRequestDiff do let(:bot_user) { Fabricate(:user) } fab!(:llm_model) - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:tool) { described_class.new({ repo: repo, pull_id: pull_id }, bot_user: bot_user, llm: llm) } context "with #sort_and_shorten_diff" do diff --git a/spec/lib/personas/tools/github_search_code_spec.rb b/spec/lib/personas/tools/github_search_code_spec.rb index b8fbca274..466b1102e 100644 --- a/spec/lib/personas/tools/github_search_code_spec.rb +++ b/spec/lib/personas/tools/github_search_code_spec.rb @@ -5,7 +5,7 @@ RSpec.describe DiscourseAi::Personas::Tools::GithubSearchCode do let(:bot_user) { Fabricate(:user) } fab!(:llm_model) - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:tool) { described_class.new({ repo: repo, query: query }, bot_user: bot_user, llm: llm) } context "with valid search results" do diff --git a/spec/lib/personas/tools/github_search_files_spec.rb b/spec/lib/personas/tools/github_search_files_spec.rb index cc6926fd6..5b268efe6 100644 --- a/spec/lib/personas/tools/github_search_files_spec.rb +++ b/spec/lib/personas/tools/github_search_files_spec.rb @@ -4,7 +4,7 @@ RSpec.describe DiscourseAi::Personas::Tools::GithubSearchFiles do fab!(:llm_model) - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model.id) } let(:tool) do described_class.new( diff --git a/spec/lib/personas/tools/google_spec.rb b/spec/lib/personas/tools/google_spec.rb index 5062cea97..f0f32a078 100644 --- a/spec/lib/personas/tools/google_spec.rb +++ b/spec/lib/personas/tools/google_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::Google do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:progress_blk) { Proc.new {} } let(:search) { described_class.new({ query: "some search term" }, bot_user: bot_user, llm: llm) } diff --git a/spec/lib/personas/tools/image_spec.rb b/spec/lib/personas/tools/image_spec.rb index 342c9f676..cb3f97d5e 100644 --- a/spec/lib/personas/tools/image_spec.rb +++ b/spec/lib/personas/tools/image_spec.rb @@ -19,7 +19,7 @@ toggle_enabled_bots(bots: [gpt_35_turbo]) end - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{gpt_35_turbo.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(gpt_35_turbo) } let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(gpt_35_turbo.name) } diff --git a/spec/lib/personas/tools/javascript_evaluator_spec.rb b/spec/lib/personas/tools/javascript_evaluator_spec.rb index cae05ee9c..05bb84339 100644 --- a/spec/lib/personas/tools/javascript_evaluator_spec.rb +++ b/spec/lib/personas/tools/javascript_evaluator_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::JavascriptEvaluator do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:progress_blk) { Proc.new {} } before { SiteSetting.ai_bot_enabled = true } diff --git a/spec/lib/personas/tools/list_categories_spec.rb b/spec/lib/personas/tools/list_categories_spec.rb index bcda21233..f0b1c89b9 100644 --- a/spec/lib/personas/tools/list_categories_spec.rb +++ b/spec/lib/personas/tools/list_categories_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::ListCategories do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } before { SiteSetting.ai_bot_enabled = true } diff --git a/spec/lib/personas/tools/list_tags_spec.rb b/spec/lib/personas/tools/list_tags_spec.rb index b8f4ed5cd..4e8273bb3 100644 --- a/spec/lib/personas/tools/list_tags_spec.rb +++ b/spec/lib/personas/tools/list_tags_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::ListTags do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } before do SiteSetting.ai_bot_enabled = true diff --git a/spec/lib/personas/tools/read_spec.rb b/spec/lib/personas/tools/read_spec.rb index 2affc1f4e..3b04d9db1 100644 --- a/spec/lib/personas/tools/read_spec.rb +++ b/spec/lib/personas/tools/read_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::Read do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:tool) { described_class.new({ topic_id: topic_with_tags.id }, bot_user: bot_user, llm: llm) } fab!(:parent_category) { Fabricate(:category, name: "animals") } diff --git a/spec/lib/personas/tools/researcher_spec.rb b/spec/lib/personas/tools/researcher_spec.rb index 8e1a35a1c..5ee02f10a 100644 --- a/spec/lib/personas/tools/researcher_spec.rb +++ b/spec/lib/personas/tools/researcher_spec.rb @@ -6,7 +6,7 @@ fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:progress_blk) { Proc.new {} } fab!(:admin) diff --git a/spec/lib/personas/tools/search_settings_spec.rb b/spec/lib/personas/tools/search_settings_spec.rb index f3cd4356e..833354618 100644 --- a/spec/lib/personas/tools/search_settings_spec.rb +++ b/spec/lib/personas/tools/search_settings_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::SearchSettings do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:fake_settings) do [ diff --git a/spec/lib/personas/tools/search_spec.rb b/spec/lib/personas/tools/search_spec.rb index 4de518d2a..cb1765b5c 100644 --- a/spec/lib/personas/tools/search_spec.rb +++ b/spec/lib/personas/tools/search_spec.rb @@ -6,7 +6,7 @@ fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:progress_blk) { Proc.new {} } fab!(:admin) @@ -105,7 +105,8 @@ after { DiscourseAi::Embeddings::SemanticSearch.clear_cache_for(query) } it "supports semantic search when enabled" do - assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model) + assign_fake_provider_to(:ai_default_llm_model) + vector_def = Fabricate(:embedding_definition) SiteSetting.ai_embeddings_selected_model = vector_def.id SiteSetting.ai_embeddings_semantic_search_enabled = true diff --git a/spec/lib/personas/tools/setting_context_spec.rb b/spec/lib/personas/tools/setting_context_spec.rb index 20e26b641..d43032fe2 100644 --- a/spec/lib/personas/tools/setting_context_spec.rb +++ b/spec/lib/personas/tools/setting_context_spec.rb @@ -12,7 +12,7 @@ def has_rg? fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } before { SiteSetting.ai_bot_enabled = true } diff --git a/spec/lib/personas/tools/summarize_spec.rb b/spec/lib/personas/tools/summarize_spec.rb index 2bda3cd37..eb88441f6 100644 --- a/spec/lib/personas/tools/summarize_spec.rb +++ b/spec/lib/personas/tools/summarize_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::Summarize do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } let(:progress_blk) { Proc.new {} } before { SiteSetting.ai_bot_enabled = true } diff --git a/spec/lib/personas/tools/time_spec.rb b/spec/lib/personas/tools/time_spec.rb index e92a32ad5..8dd5492b4 100644 --- a/spec/lib/personas/tools/time_spec.rb +++ b/spec/lib/personas/tools/time_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::Time do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } before { SiteSetting.ai_bot_enabled = true } diff --git a/spec/lib/personas/tools/web_browser_spec.rb b/spec/lib/personas/tools/web_browser_spec.rb index aebd4e66d..318e05f71 100644 --- a/spec/lib/personas/tools/web_browser_spec.rb +++ b/spec/lib/personas/tools/web_browser_spec.rb @@ -3,7 +3,7 @@ RSpec.describe DiscourseAi::Personas::Tools::WebBrowser do fab!(:llm_model) let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } before { SiteSetting.ai_bot_enabled = true } diff --git a/spec/lib/translation/base_translator_spec.rb b/spec/lib/translation/base_translator_spec.rb index 729b2e0c4..da32df7b7 100644 --- a/spec/lib/translation/base_translator_spec.rb +++ b/spec/lib/translation/base_translator_spec.rb @@ -8,10 +8,7 @@ end before do - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end - + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_translation_enabled = true end diff --git a/spec/lib/translation/category_localizer_spec.rb b/spec/lib/translation/category_localizer_spec.rb index cfea804ef..00e268a7c 100644 --- a/spec/lib/translation/category_localizer_spec.rb +++ b/spec/lib/translation/category_localizer_spec.rb @@ -3,11 +3,7 @@ describe DiscourseAi::Translation::CategoryLocalizer do subject(:localizer) { described_class } - before do - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end - end + before { assign_fake_provider_to(:ai_default_llm_model) } def post_raw_translator_stub(opts) mock = instance_double(DiscourseAi::Translation::PostRawTranslator) diff --git a/spec/lib/translation/entry_point_spec.rb b/spec/lib/translation/entry_point_spec.rb index a2cd00d4c..44a4c0f99 100644 --- a/spec/lib/translation/entry_point_spec.rb +++ b/spec/lib/translation/entry_point_spec.rb @@ -2,10 +2,8 @@ describe DiscourseAi::Translation::EntryPoint do before do + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end SiteSetting.ai_translation_enabled = true SiteSetting.content_localization_supported_locales = "en" end @@ -59,9 +57,7 @@ before do SiteSetting.discourse_ai_enabled = true - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end + assign_fake_provider_to(:ai_default_llm_model) end it "enqueues detect topic locale and translate topic job" do diff --git a/spec/lib/translation/language_detector_spec.rb b/spec/lib/translation/language_detector_spec.rb index 9277690cf..99393e7e5 100644 --- a/spec/lib/translation/language_detector_spec.rb +++ b/spec/lib/translation/language_detector_spec.rb @@ -7,11 +7,7 @@ ) end - before do - Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") - end - end + before { assign_fake_provider_to(:ai_default_llm_model) } describe ".detect" do let(:locale_detector) { described_class.new("meow") } @@ -38,7 +34,7 @@ allow(DiscourseAi::Completions::Prompt).to receive(:new).and_return(mock_prompt) allow(DiscourseAi::Completions::Llm).to receive(:proxy).with( - SiteSetting.ai_translation_model, + SiteSetting.ai_default_llm_model, ).and_return(mock_llm) allow(mock_llm).to receive(:generate).with( mock_prompt, diff --git a/spec/lib/utils/search_spec.rb b/spec/lib/utils/search_spec.rb index 561aac207..6c185fc16 100644 --- a/spec/lib/utils/search_spec.rb +++ b/spec/lib/utils/search_spec.rb @@ -141,7 +141,8 @@ end it "includes semantic search results when enabled" do - assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model) + assign_fake_provider_to(:ai_default_llm_model) + vector_def = Fabricate(:embedding_definition) SiteSetting.ai_embeddings_selected_model = vector_def.id SiteSetting.ai_embeddings_semantic_search_enabled = true @@ -165,7 +166,8 @@ end it "can disable semantic search with hyde parameter" do - assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model) + assign_fake_provider_to(:ai_default_llm_model) + vector_def = Fabricate(:embedding_definition) SiteSetting.ai_embeddings_selected_model = vector_def.id SiteSetting.ai_embeddings_semantic_search_enabled = true diff --git a/spec/models/ai_persona_spec.rb b/spec/models/ai_persona_spec.rb index 0e6b9d13d..46d59e527 100644 --- a/spec/models/ai_persona_spec.rb +++ b/spec/models/ai_persona_spec.rb @@ -211,20 +211,6 @@ ) end - it "validates allowed seeded model" do - basic_persona.default_llm_id = seeded_llm_model.id - - SiteSetting.ai_bot_allowed_seeded_models = "" - - expect(basic_persona.valid?).to eq(false) - expect(basic_persona.errors[:default_llm]).to include( - I18n.t("discourse_ai.llm.configuration.invalid_seeded_model"), - ) - - SiteSetting.ai_bot_allowed_seeded_models = "-1" - expect(basic_persona.valid?).to eq(true) - end - it "does not leak caches between sites" do AiPersona.create!( name: "pun_bot", diff --git a/spec/models/ai_tool_spec.rb b/spec/models/ai_tool_spec.rb index 96a3f667d..a11f69554 100644 --- a/spec/models/ai_tool_spec.rb +++ b/spec/models/ai_tool_spec.rb @@ -2,7 +2,7 @@ RSpec.describe AiTool do fab!(:llm_model) { Fabricate(:llm_model, name: "claude-2") } - let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") } + let(:llm) { DiscourseAi::Completions::Llm.proxy(llm_model) } fab!(:topic) fab!(:post) { Fabricate(:post, topic: topic, raw: "bananas are a tasty fruit") } fab!(:bot_user) { Discourse.system_user } diff --git a/spec/models/user_option_spec.rb b/spec/models/user_option_spec.rb index 34121ab93..23050676f 100644 --- a/spec/models/user_option_spec.rb +++ b/spec/models/user_option_spec.rb @@ -9,8 +9,7 @@ end before do - assign_fake_provider_to(:ai_helper_model) - assign_fake_provider_to(:ai_helper_image_caption_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true SiteSetting.ai_helper_enabled_features = "image_caption" SiteSetting.ai_auto_image_caption_allowed_groups = "10" # tl0 diff --git a/spec/plugin_helper.rb b/spec/plugin_helper.rb index 0346df7ca..4e2a58a7f 100644 --- a/spec/plugin_helper.rb +++ b/spec/plugin_helper.rb @@ -12,7 +12,7 @@ def toggle_enabled_bots(bots: []) def assign_fake_provider_to(setting_name) Fabricate(:fake_model).tap do |fake_llm| - SiteSetting.public_send("#{setting_name}=", "custom:#{fake_llm.id}") + SiteSetting.public_send("#{setting_name}=", "#{fake_llm.id}") end end diff --git a/spec/plugin_spec.rb b/spec/plugin_spec.rb index 9a69041f7..41523c4eb 100644 --- a/spec/plugin_spec.rb +++ b/spec/plugin_spec.rb @@ -7,7 +7,7 @@ fab!(:user) before do - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true SiteSetting.ai_helper_illustrate_post_model = "disabled" Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) diff --git a/spec/requests/admin/ai_features_controller_spec.rb b/spec/requests/admin/ai_features_controller_spec.rb index f826e0bc4..e7b2cc840 100644 --- a/spec/requests/admin/ai_features_controller_spec.rb +++ b/spec/requests/admin/ai_features_controller_spec.rb @@ -10,6 +10,7 @@ before do sign_in(admin) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_bot_enabled = true SiteSetting.discourse_ai_enabled = true end diff --git a/spec/requests/admin/ai_llms_controller_spec.rb b/spec/requests/admin/ai_llms_controller_spec.rb index e21467273..084c4b3bb 100644 --- a/spec/requests/admin/ai_llms_controller_spec.rb +++ b/spec/requests/admin/ai_llms_controller_spec.rb @@ -49,20 +49,19 @@ it "lists enabled features on appropriate LLMs" do SiteSetting.ai_bot_enabled = true + fake_model = assign_fake_provider_to(:ai_default_llm_model) # setting the setting calls the model DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) do - SiteSetting.ai_helper_model = "custom:#{llm_model.id}" + SiteSetting.ai_helper_proofreader_persona = ai_persona.id SiteSetting.ai_helper_enabled = true end DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) do - SiteSetting.ai_summarization_model = "custom:#{llm_model2.id}" SiteSetting.ai_summarization_enabled = true end DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) do - SiteSetting.ai_embeddings_semantic_search_hyde_model = "custom:#{llm_model2.id}" SiteSetting.ai_embeddings_semantic_search_enabled = true end @@ -71,15 +70,18 @@ llms = response.parsed_body["ai_llms"] model_json = llms.find { |m| m["id"] == llm_model.id } - expect(model_json["used_by"]).to contain_exactly( - { "type" => "ai_bot" }, - { "type" => "ai_helper" }, - ) + expect(model_json["used_by"]).to contain_exactly({ "type" => "ai_bot" }) model2_json = llms.find { |m| m["id"] == llm_model2.id } expect(model2_json["used_by"]).to contain_exactly( { "type" => "ai_persona", "name" => "Cool persona", "id" => ai_persona.id }, + { "type" => "ai_helper", "name" => "Proofread text" }, + ) + + model3_json = llms.find { |m| m["id"] == fake_model.id } + + expect(model3_json["used_by"]).to contain_exactly( { "type" => "ai_summarization" }, { "type" => "ai_embeddings_semantic_search" }, ) @@ -471,16 +473,16 @@ error_type: "validation", } - WebMock.stub_request(:post, test_attrs[:url]).to_return( - status: 422, - body: error_message.to_json, - ) + error = + DiscourseAi::Completions::Endpoints::Base::CompletionFailed.new(error_message.to_json) - get "/admin/plugins/discourse-ai/ai-llms/test.json", params: { ai_llm: test_attrs } + DiscourseAi::Completions::Llm.with_prepared_responses([error]) do + get "/admin/plugins/discourse-ai/ai-llms/test.json", params: { ai_llm: test_attrs } - expect(response).to be_successful - expect(response.parsed_body["success"]).to eq(false) - expect(response.parsed_body["error"]).to eq(error_message.to_json) + expect(response).to be_successful + expect(response.parsed_body["success"]).to eq(false) + expect(response.parsed_body["error"]).to eq(error_message.to_json) + end end end end @@ -498,7 +500,6 @@ it "logs staff action when deleting an LLM model" do # Capture the model details before deletion for comparison - model_id = llm_model.id model_display_name = llm_model.display_name # Delete the model @@ -515,13 +516,15 @@ expect(history.subject).to eq(model_display_name) # Verify subject is set to display_name end - it "validates the model is not in use" do - fake_llm = assign_fake_provider_to(:ai_helper_model) - - delete "/admin/plugins/discourse-ai/ai-llms/#{fake_llm.id}.json" + context "with llms configured" do + fab!(:ai_persona) { Fabricate(:ai_persona, default_llm_id: llm_model.id) } - expect(response.status).to eq(409) - expect(fake_llm.reload).to eq(fake_llm) + before { assign_fake_provider_to(:ai_helper_model) } + it "validates the model is not in use" do + delete "/admin/plugins/discourse-ai/ai-llms/#{llm_model.id}.json" + expect(response.status).to eq(409) + expect(llm_model.reload).to eq(llm_model) + end end it "cleans up companion users before deleting the model" do diff --git a/spec/requests/ai_helper/assistant_controller_spec.rb b/spec/requests/ai_helper/assistant_controller_spec.rb index 47d5d7d73..5b921e160 100644 --- a/spec/requests/ai_helper/assistant_controller_spec.rb +++ b/spec/requests/ai_helper/assistant_controller_spec.rb @@ -1,10 +1,14 @@ # frozen_string_literal: true RSpec.describe DiscourseAi::AiHelper::AssistantController do - before { assign_fake_provider_to(:ai_helper_model) } fab!(:newuser) fab!(:user) { Fabricate(:user, refresh_auto_groups: true) } + before do + assign_fake_provider_to(:ai_default_llm_model) + SiteSetting.ai_helper_enabled = true + end + describe "#stream_suggestion" do before do Jobs.run_immediately! @@ -305,8 +309,6 @@ end let(:bad_caption) { "A picture of a cat \nsitting on a |table|" } - before { assign_fake_provider_to(:ai_helper_image_caption_model) } - def request_caption(params, caption = "A picture of a cat sitting on a table") DiscourseAi::Completions::Llm.with_prepared_responses([caption]) do post "/discourse-ai/ai-helper/caption_image", params: params @@ -411,7 +413,6 @@ def request_caption(params, caption = "A picture of a cat sitting on a table") SiteSetting.provider = SiteSettings::DbProvider.new(SiteSetting) setup_s3 stub_s3_store - assign_fake_provider_to(:ai_helper_image_caption_model) SiteSetting.secure_uploads = true SiteSetting.composer_ai_helper_allowed_groups = Group::AUTO_GROUPS[:trust_level_1] diff --git a/spec/requests/embeddings/embeddings_controller_spec.rb b/spec/requests/embeddings/embeddings_controller_spec.rb index 0408aa37d..dd46d03bc 100644 --- a/spec/requests/embeddings/embeddings_controller_spec.rb +++ b/spec/requests/embeddings/embeddings_controller_spec.rb @@ -120,7 +120,7 @@ def create_api_key(user) end it "doesn't skip HyDE if the hyde param is missing" do - assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model) + assign_fake_provider_to(:ai_default_llm_model) index(topic) index(topic_in_subcategory) diff --git a/spec/requests/summarization/chat_summary_controller_spec.rb b/spec/requests/summarization/chat_summary_controller_spec.rb index f23c447ec..203fb609e 100644 --- a/spec/requests/summarization/chat_summary_controller_spec.rb +++ b/spec/requests/summarization/chat_summary_controller_spec.rb @@ -7,7 +7,7 @@ before do group.add(current_user) - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true SiteSetting.ai_custom_summarization_allowed_groups = group.id diff --git a/spec/requests/summarization/summary_controller_spec.rb b/spec/requests/summarization/summary_controller_spec.rb index 5c051f3da..5f368727b 100644 --- a/spec/requests/summarization/summary_controller_spec.rb +++ b/spec/requests/summarization/summary_controller_spec.rb @@ -7,7 +7,7 @@ fab!(:post_2) { Fabricate(:post, topic: topic, post_number: 2) } before do - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true end diff --git a/spec/services/discourse_ai/topic_summarization_spec.rb b/spec/services/discourse_ai/topic_summarization_spec.rb index 444cfc671..71696d4ab 100644 --- a/spec/services/discourse_ai/topic_summarization_spec.rb +++ b/spec/services/discourse_ai/topic_summarization_spec.rb @@ -7,7 +7,7 @@ fab!(:post_2) { Fabricate(:post, topic: topic, post_number: 2) } before do - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true end diff --git a/spec/services/problem_check/ai_llm_status_spec.rb b/spec/services/problem_check/ai_llm_status_spec.rb index b92274eb9..5c846645d 100644 --- a/spec/services/problem_check/ai_llm_status_spec.rb +++ b/spec/services/problem_check/ai_llm_status_spec.rb @@ -6,6 +6,7 @@ subject(:check) { described_class.new } fab!(:llm_model) + fab!(:ai_persona) { Fabricate(:ai_persona, default_llm_id: llm_model.id) } let(:post_url) { "https://api.openai.com/v1/chat/completions" } let(:success_response) do @@ -26,7 +27,8 @@ before do stub_request(:post, post_url).to_return(status: 200, body: success_response, headers: {}) - SiteSetting.ai_summarization_model = "custom:#{llm_model.id}" + assign_fake_provider_to(:ai_default_llm_model) + SiteSetting.ai_summarization_persona = ai_persona.id SiteSetting.ai_summarization_enabled = true end diff --git a/spec/system/admin_ai_features_spec.rb b/spec/system/admin_ai_features_spec.rb index 39c07bbe2..e395d3cc9 100644 --- a/spec/system/admin_ai_features_spec.rb +++ b/spec/system/admin_ai_features_spec.rb @@ -13,7 +13,7 @@ before do summarization_persona.allowed_group_ids = [group_1.id, group_2.id] summarization_persona.save! - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_summarization_enabled = true SiteSetting.ai_summarization_persona = summarization_persona.id sign_in(admin) diff --git a/spec/system/ai_helper/ai_composer_helper_spec.rb b/spec/system/ai_helper/ai_composer_helper_spec.rb index 7cd5f319f..71a9e4d94 100644 --- a/spec/system/ai_helper/ai_composer_helper_spec.rb +++ b/spec/system/ai_helper/ai_composer_helper_spec.rb @@ -7,7 +7,7 @@ before do Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true Jobs.run_immediately! sign_in(user) diff --git a/spec/system/ai_helper/ai_image_caption_spec.rb b/spec/system/ai_helper/ai_image_caption_spec.rb index d034d28ab..9134bd31a 100644 --- a/spec/system/ai_helper/ai_image_caption_spec.rb +++ b/spec/system/ai_helper/ai_image_caption_spec.rb @@ -23,8 +23,7 @@ before do Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) - assign_fake_provider_to(:ai_helper_model) - assign_fake_provider_to(:ai_helper_image_caption_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true SiteSetting.ai_helper_enabled_features = "image_caption" sign_in(user) diff --git a/spec/system/ai_helper/ai_post_helper_spec.rb b/spec/system/ai_helper/ai_post_helper_spec.rb index 14c1b5766..8c9ec16a3 100644 --- a/spec/system/ai_helper/ai_post_helper_spec.rb +++ b/spec/system/ai_helper/ai_post_helper_spec.rb @@ -28,7 +28,7 @@ before do Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true Jobs.run_immediately! sign_in(user) diff --git a/spec/system/ai_helper/ai_proofreading_spec.rb b/spec/system/ai_helper/ai_proofreading_spec.rb index b2e00733b..fd64999fc 100644 --- a/spec/system/ai_helper/ai_proofreading_spec.rb +++ b/spec/system/ai_helper/ai_proofreading_spec.rb @@ -6,7 +6,7 @@ fab!(:admin) { Fabricate(:admin, refresh_auto_groups: true) } before do - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true # This needs to be done because the streaming suggestions for composer diff --git a/spec/system/ai_helper/ai_split_topic_suggestion_spec.rb b/spec/system/ai_helper/ai_split_topic_suggestion_spec.rb index 6cb3bdd92..ba8041b50 100644 --- a/spec/system/ai_helper/ai_split_topic_suggestion_spec.rb +++ b/spec/system/ai_helper/ai_split_topic_suggestion_spec.rb @@ -39,7 +39,7 @@ before do Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) - assign_fake_provider_to(:ai_helper_model) + assign_fake_provider_to(:ai_default_llm_model) SiteSetting.ai_helper_enabled = true sign_in(user) end diff --git a/spec/system/ai_moderation/ai_spam_spec.rb b/spec/system/ai_moderation/ai_spam_spec.rb index 4640b760b..b48d2fac5 100644 --- a/spec/system/ai_moderation/ai_spam_spec.rb +++ b/spec/system/ai_moderation/ai_spam_spec.rb @@ -2,47 +2,51 @@ RSpec.describe "AI Spam Configuration", type: :system, js: true do fab!(:admin) - let(:llm_model) { Fabricate(:llm_model) } before do SiteSetting.discourse_ai_enabled = true sign_in(admin) end - it "can properly configure spam settings" do - visit "/admin/plugins/discourse-ai/ai-spam" + context "when no LLMs are configured" do + it "shows the placeholder when no LLM is configured" do + visit "/admin/plugins/discourse-ai/ai-spam" - expect(page).to have_css(".ai-spam__llm-placeholder") + expect(page).to have_css(".ai-spam__llm-placeholder") - toggle = PageObjects::Components::DToggleSwitch.new(".ai-spam__toggle") + toggle = PageObjects::Components::DToggleSwitch.new(".ai-spam__toggle") - toggle.toggle - dialog = PageObjects::Components::Dialog.new - expect(dialog).to have_content(I18n.t("discourse_ai.llm.configuration.must_select_model")) - dialog.click_ok + toggle.toggle + dialog = PageObjects::Components::Dialog.new + expect(dialog).to have_content(I18n.t("discourse_ai.llm.configuration.must_select_model")) + dialog.click_ok - expect(toggle.unchecked?).to eq(true) - - llm_model - visit "/admin/plugins/discourse-ai/ai-spam" + expect(toggle.unchecked?).to eq(true) + end + end + context "when LLMs are configured" do + fab!(:llm_model) + it "can properly configure spam settings" do + visit "/admin/plugins/discourse-ai/ai-spam" - toggle = PageObjects::Components::DToggleSwitch.new(".ai-spam__toggle") - toggle.toggle + toggle = PageObjects::Components::DToggleSwitch.new(".ai-spam__toggle") + toggle.toggle - try_until_success { expect(AiModerationSetting.spam&.llm_model_id).to eq(llm_model.id) } + try_until_success { expect(AiModerationSetting.spam&.llm_model_id).to eq(llm_model.id) } - find(".ai-spam__instructions-input").fill_in(with: "Test spam detection instructions") - find(".ai-spam__instructions-save").click + find(".ai-spam__instructions-input").fill_in(with: "Test spam detection instructions") + find(".ai-spam__instructions-save").click - toasts = PageObjects::Components::Toasts.new - expect(toasts).to have_content(I18n.t("js.discourse_ai.spam.settings_saved")) + toasts = PageObjects::Components::Toasts.new + expect(toasts).to have_content(I18n.t("js.discourse_ai.spam.settings_saved")) - expect(AiModerationSetting.spam.custom_instructions).to eq("Test spam detection instructions") + expect(AiModerationSetting.spam.custom_instructions).to eq("Test spam detection instructions") - visit "/admin/plugins/discourse-ai/ai-llms" + visit "/admin/plugins/discourse-ai/ai-llms" - expect(find(".ai-llm-list-editor__usages")).to have_content( - I18n.t("js.discourse_ai.llms.usage.ai_spam"), - ) + expect(find(".ai-llm-list-editor__usages")).to have_content( + I18n.t("js.discourse_ai.llms.usage.ai_spam"), + ) + end end end diff --git a/spec/system/ai_user_preferences_spec.rb b/spec/system/ai_user_preferences_spec.rb index e5379da58..2c1865fc0 100644 --- a/spec/system/ai_user_preferences_spec.rb +++ b/spec/system/ai_user_preferences_spec.rb @@ -12,8 +12,7 @@ SiteSetting.discourse_ai_enabled = true SiteSetting.ai_bot_discover_persona = discovery_persona.id Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) - assign_fake_provider_to(:ai_helper_model) - assign_fake_provider_to(:ai_helper_image_caption_model) + assign_fake_provider_to(:ai_default_llm_model) sign_in(user) end diff --git a/spec/system/summarization/chat_summarization_spec.rb b/spec/system/summarization/chat_summarization_spec.rb index dbb78417d..92dd609cb 100644 --- a/spec/system/summarization/chat_summarization_spec.rb +++ b/spec/system/summarization/chat_summarization_spec.rb @@ -11,7 +11,7 @@ before do group.add(current_user) - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) assign_persona_to(:ai_summarization_persona, [group.id]) SiteSetting.ai_summarization_enabled = true diff --git a/spec/system/summarization/topic_summarization_spec.rb b/spec/system/summarization/topic_summarization_spec.rb index 30b147a02..598323de6 100644 --- a/spec/system/summarization/topic_summarization_spec.rb +++ b/spec/system/summarization/topic_summarization_spec.rb @@ -23,7 +23,7 @@ before do group.add(current_user) - assign_fake_provider_to(:ai_summarization_model) + assign_fake_provider_to(:ai_default_llm_model) assign_persona_to(:ai_summarization_persona, [group.id]) SiteSetting.ai_summarization_enabled = true