Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.
Closed
Show file tree
Hide file tree
Changes from 15 commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
2592be6
FEATURE: configure a default LLM model for all features
keegangeorge Jul 9, 2025
cad39c1
DEV: client side
keegangeorge Jul 9, 2025
5a590fd
DEV: updates...
keegangeorge Jul 9, 2025
29d4157
fix
keegangeorge Jul 9, 2025
5a29074
DEV: Use default LLM model
keegangeorge Jul 10, 2025
249aab1
DEV: Use a simple validator
keegangeorge Jul 10, 2025
b46940f
DEV: hide reset button
keegangeorge Jul 10, 2025
c49f1df
DEV: migrate hyde model to content creator persona
keegangeorge Jul 10, 2025
87c4214
DEV: hide hyde model setting
keegangeorge Jul 10, 2025
f366ded
DEV: Force default to be set if it was found not to be set!
keegangeorge Jul 11, 2025
b6a88ff
spec
keegangeorge Jul 11, 2025
8da4318
DEV: `assign_fake_provider`
keegangeorge Jul 11, 2025
2377b28
DEV: rely on default llm model in spec
keegangeorge Jul 15, 2025
88811f9
fixes
keegangeorge Jul 15, 2025
b58f198
FIX: don't `run_test` in testing env
keegangeorge Jul 15, 2025
f593ab6
DEV: remove `custom:` prefix
keegangeorge Jul 16, 2025
7ae61ce
DEV: update automation to remove `custom:` prefix
keegangeorge Jul 16, 2025
c26d604
FIX: dependency validator should depend on default LLM setting
keegangeorge Jul 16, 2025
0fadf1d
DEV: update enumerator
keegangeorge Jul 16, 2025
b675c4c
DEV: Remove custom prefix in specs
keegangeorge Jul 16, 2025
5ae8064
Merge branch 'main' into default-llm-model
keegangeorge Jul 16, 2025
5841543
FIX: validator
keegangeorge Jul 16, 2025
7af3ce8
DEV: Remove references to translation model
keegangeorge Jul 17, 2025
273a1fa
FIX: occurrences of old model setting
keegangeorge Jul 17, 2025
08d2f3d
FIX: spam
keegangeorge Jul 17, 2025
f077d58
FIX: automation spec
keegangeorge Jul 17, 2025
a927fd2
fix
keegangeorge Jul 17, 2025
d9b53b4
FIX: semantic search
keegangeorge Jul 17, 2025
4b03cdc
FIX: LLM controller spec
keegangeorge Jul 17, 2025
cc34882
FIX: rest of specs
keegangeorge Jul 17, 2025
eb93e17
Final spec fix 🀞
keegangeorge Jul 17, 2025
f420261
DEV: Apply feedback from review
keegangeorge Jul 18, 2025
fbbe01e
DEV: Hide seeded LLMs in the enumerator for now
keegangeorge Jul 21, 2025
7dee59c
DEV: Remove no longer needed seeded model check
keegangeorge Jul 21, 2025
822594f
Update db/migrate/20250716005855_copy_ai_image_caption_model_to_perso…
romanrizzi Jul 21, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import Component from "@glimmer/component";
import { tracked } from "@glimmer/tracking";
import { ajax } from "discourse/lib/ajax";
import { i18n } from "discourse-i18n";
import SiteSettingComponent from "admin/components/site-setting";
import SiteSetting from "admin/models/site-setting";

export default class AiDefaultLlmSelector extends Component {
@tracked defaultLlmSetting = null;

constructor() {
super(...arguments);
this.#loadDefaultLlmSetting();
}

async #loadDefaultLlmSetting() {
const { site_settings } = await ajax("/admin/config/site_settings.json", {
data: {
plugin: "discourse-ai",
category: "discourse_ai",
},
});

const defaultLlmSetting = site_settings.find(
(setting) => setting.setting === "ai_default_llm_model"
);

this.defaultLlmSetting = SiteSetting.create(defaultLlmSetting);
}

<template>
<div class="ai-configure-default-llm">
<div class="ai-configure-default-llm__header">
<h3>{{i18n "discourse_ai.default_llm.title"}}</h3>
<p>{{i18n "discourse_ai.default_llm.description"}}</p>
</div>

{{#if this.defaultLlmSetting}}
<SiteSettingComponent
@setting={{this.defaultLlmSetting}}
class="ai-configure-default-llm__setting"
/>
{{/if}}
</div>
</template>
}
3 changes: 3 additions & 0 deletions assets/javascripts/discourse/components/ai-features.gjs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import DPageSubheader from "discourse/components/d-page-subheader";
import DSelect from "discourse/components/d-select";
import FilterInput from "discourse/components/filter-input";
import { i18n } from "discourse-i18n";
import AiDefaultLlmSelector from "./ai-default-llm-selector";
import AiFeaturesList from "./ai-features-list";

const ALL = "all";
Expand Down Expand Up @@ -202,6 +203,8 @@ export default class AiFeatures extends Component {
/>
</div>

<AiDefaultLlmSelector />

{{#if this.filteredFeatures.length}}
<AiFeaturesList @modules={{this.filteredFeatures}} />
{{else}}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import I18n, { i18n } from "discourse-i18n";
import AdminSectionLandingItem from "admin/components/admin-section-landing-item";
import AdminSectionLandingWrapper from "admin/components/admin-section-landing-wrapper";
import DTooltip from "float-kit/components/d-tooltip";
import AiDefaultLlmSelector from "./ai-default-llm-selector";
import AiLlmEditor from "./ai-llm-editor";

function isPreseeded(llm) {
Expand Down Expand Up @@ -137,6 +138,9 @@ export default class AiLlmsListEditor extends Component {
}}
@learnMoreUrl="https://meta.discourse.org/t/discourse-ai-large-language-model-llm-settings-page/319903"
/>

<AiDefaultLlmSelector />

{{#if this.hasLlmElements}}
<section class="ai-llms-list-editor__configured">
<DPageSubheader
Expand Down
48 changes: 48 additions & 0 deletions assets/stylesheets/common/ai-features.scss
Original file line number Diff line number Diff line change
Expand Up @@ -158,3 +158,51 @@
.ai-expanded-list__toggle-button {
padding: 0;
}

.ai-configure-default-llm {
display: flex;
align-items: center;
gap: var(--space-2);
background: var(--primary-very-low);
padding: 1rem;
margin-block: 1rem;
border-radius: var(--d-border-radius);

&__header {
flex: 3;
}

&__setting {
flex: 2;
display: flex;
gap: var(--space-2);
align-items: center;
justify-content: center;

.setting-controls__undo {
display: none;
}
}

h3 {
color: var(--primary);
font-size: var(--font-0);
margin-bottom: 0;
}

p {
margin-top: 0;
font-size: var(--font-down-1);
color: var(--primary-high-or-secondary-low);
}

.select-kit,
.btn {
font-size: var(--font-down-1);
}

.setting-label,
.desc {
display: none;
}
}
4 changes: 4 additions & 0 deletions config/locales/client.en.yml
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,10 @@ en:
discourse_ai:
title: "AI"

default_llm:
title: "Default LLM model"
description: "The default LLM model to use for all AI features. This will be used if no LLM is specified in the feature configuration or persona. If no default LLM is specified, the last created LLM will be used."

features:
short_title: "Features"
description: "These are the AI features available to visitors on your site. These can be configured to use specific personas and LLMs, and can be access controlled by groups."
Expand Down
1 change: 1 addition & 0 deletions config/locales/server.en.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ en:
description: "Periodic report based on a large language model"
site_settings:
discourse_ai_enabled: "Enable the discourse AI plugin."
ai_default_llm_model: "The default LLM model to use for all AI features"
ai_artifact_security: "The AI artifact system generates IFRAMEs with runnable code. Strict mode forces an extra click to run code. Lax mode runs code immediately. Hybrid mode allows user to supply data-ai-artifact-autorun to show right away. Disabled mode disables the artifact system."
ai_toxicity_enabled: "Enable the toxicity module."
ai_toxicity_inference_service_api_endpoint: "URL where the API is running for the toxicity module"
Expand Down
15 changes: 11 additions & 4 deletions config/settings.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,12 @@ discourse_ai:
- "lax"
- "hybrid"
- "strict"
ai_default_llm_model:
default: ""
type: enum
allow_any: false
enum: "DiscourseAi::Configuration::LlmEnumerator"
validator: "DiscourseAi::Configuration::SimpleLlmValidator"

ai_sentiment_enabled:
default: false
Expand Down Expand Up @@ -103,7 +109,7 @@ discourse_ai:
default: false
client: true
area: "ai-features/ai_helper"
ai_helper_model:
ai_helper_model: # Deprecated. TODO(keegan): Remove 2025-09-01
default: ""
allow_any: false
type: enum
Expand Down Expand Up @@ -149,7 +155,7 @@ discourse_ai:
- "context_menu"
- "image_caption"
area: "ai-features/ai_helper"
ai_helper_image_caption_model:
ai_helper_image_caption_model: # Deprecated. TODO(keegan): Remove 2025-09-01
default: ""
type: enum
enum: "DiscourseAi::Configuration::LlmVisionEnumerator"
Expand Down Expand Up @@ -260,7 +266,8 @@ discourse_ai:
client: true
validator: "DiscourseAi::Configuration::LlmDependencyValidator"
area: "ai-features/embeddings"
ai_embeddings_semantic_search_hyde_model:
ai_embeddings_semantic_search_hyde_model: # Deprecated. TODO(keegan): Remove 2025-09-01
hidden: true
default: ""
type: enum
allow_any: false
Expand Down Expand Up @@ -316,7 +323,7 @@ discourse_ai:
client: true
validator: "DiscourseAi::Configuration::LlmDependencyValidator"
area: "ai-features/summarization"
ai_summarization_model:
ai_summarization_model: # Deprecated. TODO(keegan): Remove 2025-09-01
default: ""
allow_any: false
type: enum
Expand Down
16 changes: 16 additions & 0 deletions db/migrate/20250710173803_seed_ai_default_llm_model.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# frozen_string_literal: true
class SeedAiDefaultLlmModel < ActiveRecord::Migration[7.2]
def up
return if DB.query_single("SELECT 1 FROM llm_models LIMIT 1").empty?

last_model_id = DB.query_single("SELECT id FROM llm_models ORDER BY id DESC LIMIT 1").first

if last_model_id.present?
execute "UPDATE site_settings SET value = 'custom:#{last_model_id}' WHERE name = 'ai_default_llm_model' AND (value IS NULL OR value = '');"
end
end

def down
raise ActiveRecord::IrreversibleMigration
end
end
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# frozen_string_literal: true
class CopyAiSummarizationModelToPersonaDefault < ActiveRecord::Migration[7.2]
def up
ai_summarization_model =
DB.query_single("SELECT value FROM site_settings WHERE name = 'ai_summarization_model'").first

if ai_summarization_model.present? && ai_summarization_model.start_with?("custom:")
# Extract the model ID from the setting value (e.g., "custom:-5" -> "-5")
model_id = ai_summarization_model.split(":").last

# Update the summarization personas (IDs -11 and -12) with the extracted model ID
execute(<<~SQL)
UPDATE ai_personas
SET default_llm_id = #{model_id}
WHERE id IN (-11, -12) AND default_llm_id IS NULL
SQL
end
end

def down
raise ActiveRecord::IrreversibleMigration
end
end
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# frozen_string_literal: true
class CopyAiHelperModelToPersonaDefault < ActiveRecord::Migration[7.2]
def up
ai_helper_model =
DB.query_single("SELECT value FROM site_settings WHERE name = 'ai_helper_model'").first

if ai_helper_model.present? && ai_helper_model.start_with?("custom:")
# Extract the model ID from the setting value (e.g., "custom:1" -> "1")
model_id = ai_helper_model.split(":").last

# Update the helper personas with the extracted model ID
execute(<<~SQL)
UPDATE ai_personas
SET default_llm_id = #{model_id}
WHERE id IN (-18, -19, -20, -21, -22, -23, -24, -25, -26) AND default_llm_id IS NULL
SQL
end
end

def down
raise ActiveRecord::IrreversibleMigration
end
end
23 changes: 23 additions & 0 deletions db/migrate/20250710215720_copy_hyde_model_to_persona.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# frozen_string_literal: true
class CopyHydeModelToPersona < ActiveRecord::Migration[7.2]
def up
hyde_model =
DB.query_single("SELECT value FROM site_settings WHERE name = 'ai_embeddings_semantic_search_hyde_model'").first

if hyde_model.present? && hyde_model.start_with?("custom:")
# Extract the model ID from the setting value (e.g., "custom:1" -> "1")
model_id = hyde_model.split(":").last

# Update the hyde persona with the extracted model ID
execute(<<~SQL)
UPDATE ai_personas
SET default_llm_id = #{model_id}
WHERE id IN (-32) AND default_llm_id IS NULL
SQL
end
end

def down
raise ActiveRecord::IrreversibleMigration
end
end
21 changes: 10 additions & 11 deletions lib/ai_helper/assistant.rb
Original file line number Diff line number Diff line change
Expand Up @@ -312,22 +312,21 @@ def find_ai_helper_model(helper_mode, persona_klass)

# Priorities are:
# 1. Persona's default LLM
# 2. Hidden `ai_helper_model` setting, or `ai_helper_image_caption_model` for image_caption.
# 3. Newest LLM config
# 2. SiteSetting.ai_default_llm_id (or newest LLM if not set)
def self.find_ai_helper_model(helper_mode, persona_klass)
model_id = persona_klass.default_llm_id

if !model_id
if helper_mode == IMAGE_CAPTION
model_id = SiteSetting.ai_helper_image_caption_model&.split(":")&.last
else
model_id = SiteSetting.ai_helper_model&.split(":")&.last
end
end
model_id =
persona_klass.default_llm_id || SiteSetting.ai_default_llm_model&.split(":")&.last # Remove legacy custom provider.

if model_id.present?
LlmModel.find_by(id: model_id)
else
last_model_id = LlmModel.last&.id

# SiteSetting.ai_default_llm_model shouldn't be empty, but if it is, we set it to the last model.
if last_model_id.present? && SiteSetting.ai_default_llm_model.empty?
SiteSetting.set_and_log("ai_default_llm_model", "custom:#{last_model_id}", Discourse.system_user) # Remove legacy custom provider.
end

LlmModel.last
end
end
Expand Down
2 changes: 1 addition & 1 deletion lib/ai_helper/painter.rb
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def diffusion_prompt(text, user)
messages: [{ type: :user, content: text, id: user.username }],
)

DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_helper_model).generate(
DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_default_llm_model).generate(
prompt,
user: user,
feature_name: "illustrate_post",
Expand Down
3 changes: 2 additions & 1 deletion lib/configuration/llm_dependency_validator.rb
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ def initialize(opts = {})
def valid_value?(val)
return true if val == "f"

if @opts[:name] == :ai_summarization_enabled || @opts[:name] == :ai_helper_enabled
if @opts[:name] == :ai_summarization_enabled || @opts[:name] == :ai_helper_enabled ||
@opts[:name] == :ai_embeddings_semantic_search_enabled
has_llms = LlmModel.count > 0
@no_llms_configured = !has_llms
has_llms
Expand Down
39 changes: 39 additions & 0 deletions lib/configuration/simple_llm_validator.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# frozen_string_literal: true

module DiscourseAi
module Configuration
class SimpleLlmValidator
def initialize(opts = {})
@opts = opts
end

def valid_value?(val)
return true if val == ""

run_test(val).tap { |result| @unreachable = result }
rescue StandardError => e
raise e if Rails.env.test?
@unreachable = true
true
end

def run_test(val)
if Rails.env.test?
# In test mode, we assume the model is reachable.
return true
end

DiscourseAi::Completions::Llm
.proxy(val)
.generate("How much is 1 + 1?", user: nil, feature_name: "llm_validator")
.present?
end

def error_message
return unless @unreachable

I18n.t("discourse_ai.llm.configuration.model_unreachable")
end
end
end
end
Loading
Loading