Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.

Commit 93db677

Browse files
committed
move llm to id column - work in progress
1 parent 08eee3a commit 93db677

File tree

9 files changed

+116
-54
lines changed

9 files changed

+116
-54
lines changed

app/controllers/discourse_ai/admin/ai_personas_controller.rb

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,12 @@ def index
2727
}
2828
end
2929
llms =
30-
DiscourseAi::Configuration::LlmEnumerator
31-
.values(allowed_seeded_llms: SiteSetting.ai_bot_allowed_seeded_models)
32-
.map { |hash| { id: hash[:value], name: hash[:name] } }
30+
LlmModel
31+
.pluck(:display_name, :id, :vision_enabled)
32+
.map do |name, id, vision|
33+
next if id < 0 && SiteSetting.ai_bot_allowed_seeded_models_map.exclude?(id.to_s)
34+
{ id: id, name: name, vision: vision }
35+
end
3336
render json: { ai_personas: ai_personas, meta: { tools: tools, llms: llms } }
3437
end
3538

app/models/ai_persona.rb

Lines changed: 46 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
# frozen_string_literal: true
22

33
class AiPersona < ActiveRecord::Base
4-
# TODO remove this line 01-1-2025
5-
self.ignored_columns = %i[commands allow_chat mentionable]
4+
# TODO remove this line 01-10-2025
5+
self.ignored_columns = %i[default_llm question_consolidator_llm]
66

77
# places a hard limit, so per site we cache a maximum of 500 classes
88
MAX_PERSONAS_PER_SITE = 500
@@ -12,7 +12,7 @@ class AiPersona < ActiveRecord::Base
1212
validates :system_prompt, presence: true, length: { maximum: 10_000_000 }
1313
validate :system_persona_unchangeable, on: :update, if: :system
1414
validate :chat_preconditions
15-
validate :allowed_seeded_model, if: :default_llm
15+
validate :allowed_seeded_model, if: :default_llm_id
1616
validates :max_context_posts, numericality: { greater_than: 0 }, allow_nil: true
1717
# leaves some room for growth but sets a maximum to avoid memory issues
1818
# we may want to revisit this in the future
@@ -27,6 +27,9 @@ class AiPersona < ActiveRecord::Base
2727
belongs_to :created_by, class_name: "User"
2828
belongs_to :user
2929

30+
belongs_to :default_llm, class_name: "LlmModel"
31+
belongs_to :question_consolidator_llm, class_name: "LlmModel"
32+
3033
has_many :upload_references, as: :target, dependent: :destroy
3134
has_many :uploads, through: :upload_references
3235

@@ -59,7 +62,7 @@ def self.persona_users(user: nil)
5962
user_id: persona.user_id,
6063
username: persona.user.username_lower,
6164
allowed_group_ids: persona.allowed_group_ids,
62-
default_llm: persona.default_llm,
65+
default_llm_id: persona.default_llm_id,
6366
force_default_llm: persona.force_default_llm,
6467
allow_chat_channel_mentions: persona.allow_chat_channel_mentions,
6568
allow_chat_direct_messages: persona.allow_chat_direct_messages,
@@ -113,7 +116,7 @@ def class_instance
113116
user_id
114117
system
115118
mentionable
116-
default_llm
119+
default_llm_id
117120
max_context_posts
118121
vision_enabled
119122
vision_max_pixels
@@ -258,7 +261,7 @@ def chat_preconditions
258261
if (
259262
allow_chat_channel_mentions || allow_chat_direct_messages || allow_topic_mentions ||
260263
force_default_llm
261-
) && !default_llm
264+
) && !default_llm_id
262265
errors.add(:default_llm, I18n.t("discourse_ai.ai_bot.personas.default_llm_required"))
263266
end
264267
end
@@ -288,13 +291,12 @@ def ensure_not_system
288291
end
289292

290293
def allowed_seeded_model
291-
return if default_llm.blank?
294+
return if default_llm_id.blank?
292295

293-
llm = LlmModel.find_by(id: default_llm.split(":").last.to_i)
294-
return if llm.nil?
295-
return if !llm.seeded?
296+
return if default_llm.nil?
297+
return if !default_llm.seeded?
296298

297-
return if SiteSetting.ai_bot_allowed_seeded_models.include?(llm.id.to_s)
299+
return if SiteSetting.ai_bot_allowed_seeded_models_map.include?(default_llm.id.to_s)
298300

299301
errors.add(:default_llm, I18n.t("discourse_ai.llm.configuration.invalid_seeded_model"))
300302
end
@@ -304,37 +306,39 @@ def allowed_seeded_model
304306
#
305307
# Table name: ai_personas
306308
#
307-
# id :bigint not null, primary key
308-
# name :string(100) not null
309-
# description :string(2000) not null
310-
# system_prompt :string(10000000) not null
311-
# allowed_group_ids :integer default([]), not null, is an Array
312-
# created_by_id :integer
313-
# enabled :boolean default(TRUE), not null
314-
# created_at :datetime not null
315-
# updated_at :datetime not null
316-
# system :boolean default(FALSE), not null
317-
# priority :boolean default(FALSE), not null
318-
# temperature :float
319-
# top_p :float
320-
# user_id :integer
321-
# default_llm :text
322-
# max_context_posts :integer
323-
# vision_enabled :boolean default(FALSE), not null
324-
# vision_max_pixels :integer default(1048576), not null
325-
# rag_chunk_tokens :integer default(374), not null
326-
# rag_chunk_overlap_tokens :integer default(10), not null
327-
# rag_conversation_chunks :integer default(10), not null
328-
# question_consolidator_llm :text
329-
# tool_details :boolean default(TRUE), not null
330-
# tools :json not null
331-
# forced_tool_count :integer default(-1), not null
332-
# allow_chat_channel_mentions :boolean default(FALSE), not null
333-
# allow_chat_direct_messages :boolean default(FALSE), not null
334-
# allow_topic_mentions :boolean default(FALSE), not null
335-
# allow_personal_messages :boolean default(TRUE), not null
336-
# force_default_llm :boolean default(FALSE), not null
337-
# rag_llm_model_id :bigint
309+
# id :bigint not null, primary key
310+
# name :string(100) not null
311+
# description :string(2000) not null
312+
# system_prompt :string(10000000) not null
313+
# allowed_group_ids :integer default([]), not null, is an Array
314+
# created_by_id :integer
315+
# enabled :boolean default(TRUE), not null
316+
# created_at :datetime not null
317+
# updated_at :datetime not null
318+
# system :boolean default(FALSE), not null
319+
# priority :boolean default(FALSE), not null
320+
# temperature :float
321+
# top_p :float
322+
# user_id :integer
323+
# max_context_posts :integer
324+
# max_post_context_tokens :integer
325+
# max_context_tokens :integer
326+
# vision_enabled :boolean default(FALSE), not null
327+
# vision_max_pixels :integer default(1048576), not null
328+
# rag_chunk_tokens :integer default(374), not null
329+
# rag_chunk_overlap_tokens :integer default(10), not null
330+
# rag_conversation_chunks :integer default(10), not null
331+
# tool_details :boolean default(TRUE), not null
332+
# tools :json not null
333+
# forced_tool_count :integer default(-1), not null
334+
# allow_chat_channel_mentions :boolean default(FALSE), not null
335+
# allow_chat_direct_messages :boolean default(FALSE), not null
336+
# allow_topic_mentions :boolean default(FALSE), not null
337+
# allow_personal_messages :boolean default(TRUE), not null
338+
# force_default_llm :boolean default(FALSE), not null
339+
# rag_llm_model_id :bigint
340+
# default_llm_id :bigint
341+
# question_consolidator_llm_id :bigint
338342
#
339343
# Indexes
340344
#

assets/javascripts/discourse/components/ai-persona-editor.gjs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -598,7 +598,10 @@ export default class PersonaEditor extends Component {
598598
@onRemove={{this.removeUpload}}
599599
/>
600600
</div>
601-
<RagOptions @model={{this.editingModel}}>
601+
<RagOptions
602+
@model={{this.editingModel}}
603+
@llms={{@personas.resultSetMeta.llms}}
604+
>
602605
<div class="control-group">
603606
<label>{{i18n
604607
"discourse_ai.ai_persona.rag_conversation_chunks"

assets/javascripts/discourse/components/rag-options.gjs

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import { on } from "@ember/modifier";
55
import { action } from "@ember/object";
66
import DTooltip from "discourse/components/d-tooltip";
77
import { i18n } from "discourse-i18n";
8+
import AiLlmSelector from "./ai-llm-selector";
89

910
export default class RagOptions extends Component {
1011
@tracked showIndexingOptions = false;
@@ -22,6 +23,10 @@ export default class RagOptions extends Component {
2223
: i18n("discourse_ai.rag.options.show_indexing_options");
2324
}
2425

26+
get visionLlms() {
27+
return this.args.llms.filter((llm) => llm.vision);
28+
}
29+
2530
<template>
2631
{{#if @model.rag_uploads}}
2732
<a
@@ -64,6 +69,18 @@ export default class RagOptions extends Component {
6469
}}
6570
/>
6671
</div>
72+
<div class="control-group">
73+
<label>{{i18n "discourse_ai.rag.options.rag_llm_model"}}</label>
74+
<AiLlmSelector
75+
class="ai-persona-editor__llms"
76+
@value={{this.visionLlmId}}
77+
@llms={{this.visionLlms}}
78+
/>
79+
<DTooltip
80+
@icon="circle-question"
81+
@content={{i18n "discourse_ai.rag.options.rag_llm_model_help"}}
82+
/>
83+
</div>
6784
{{yield}}
6885
{{/if}}
6986
</template>

config/locales/client.en.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -274,6 +274,8 @@ en:
274274
rag_chunk_tokens_help: "The number of tokens to use for each chunk in the RAG model. Increase to increase the amount of context the AI can use. (changing will re-index all uploads)"
275275
rag_chunk_overlap_tokens: "Upload chunk overlap tokens"
276276
rag_chunk_overlap_tokens_help: "The number of tokens to overlap between chunks in the RAG model. (changing will re-index all uploads)"
277+
rag_llm_model: "Indexing Language Model"
278+
rag_llm_model_help: "The language model used for OCR during indexing of PDFs and images."
277279
show_indexing_options: "Show upload options"
278280
hide_indexing_options: "Hide upload options"
279281
uploads:
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
# frozen_string_literal: true
2+
class MigratePersonaToLlmModelId < ActiveRecord::Migration[7.2]
3+
def up
4+
add_column :ai_personas, :default_llm_id, :bigint
5+
add_column :ai_personas, :question_consolidator_llm_id, :bigint
6+
# personas are seeded, we do not mark stuff as readonline
7+
8+
execute <<~SQL
9+
UPDATE ai_personas
10+
set
11+
default_llm_id = (select id from llm_models where ('custom:' || id) = default_llm),
12+
question_consolidator_llm_id = (select id from llm_models where ('custom:' || id) = question_consolidator_llm)
13+
SQL
14+
end
15+
16+
def down
17+
raise ActiveRecord::IrreversibleMigration
18+
end
19+
end
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
# frozen_string_literal: true
2+
class PostMigratePersonaToLlmModelId < ActiveRecord::Migration[7.2]
3+
def up
4+
remove_column :ai_personas, :default_llm
5+
remove_column :ai_personas, :question_consolidator_llm
6+
end
7+
8+
def down
9+
raise ActiveRecord::IrreversibleMigration
10+
end
11+
end

spec/models/ai_persona_spec.rb

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
11
# frozen_string_literal: true
22

33
RSpec.describe AiPersona do
4+
fab!(:llm_model)
5+
fab!(:seeded_llm_model) { Fabricate(:llm_model, id: -1) }
6+
47
it "validates context settings" do
58
persona =
69
AiPersona.new(
@@ -71,7 +74,7 @@
7174
forum_helper = AiPersona.find_by(name: "Forum Helper")
7275
forum_helper.update!(
7376
user_id: 1,
74-
default_llm: "anthropic:claude-2",
77+
default_llm_id: llm_model.id,
7578
max_context_posts: 3,
7679
allow_topic_mentions: true,
7780
allow_personal_messages: true,
@@ -86,7 +89,7 @@
8689
# tl 0 by default
8790
expect(klass.allowed_group_ids).to eq([10])
8891
expect(klass.user_id).to eq(1)
89-
expect(klass.default_llm).to eq("anthropic:claude-2")
92+
expect(klass.default_llm_id).to eq(llm_model.id)
9093
expect(klass.max_context_posts).to eq(3)
9194
expect(klass.allow_topic_mentions).to eq(true)
9295
expect(klass.allow_personal_messages).to eq(true)
@@ -102,7 +105,7 @@
102105
system_prompt: "test",
103106
tools: [],
104107
allowed_group_ids: [],
105-
default_llm: "anthropic:claude-2",
108+
default_llm_id: llm_model.id,
106109
max_context_posts: 3,
107110
allow_topic_mentions: true,
108111
allow_personal_messages: true,
@@ -117,7 +120,7 @@
117120
expect(klass.system).to eq(false)
118121
expect(klass.allowed_group_ids).to eq([])
119122
expect(klass.user_id).to eq(1)
120-
expect(klass.default_llm).to eq("anthropic:claude-2")
123+
expect(klass.default_llm_id).to eq(llm_model.id)
121124
expect(klass.max_context_posts).to eq(3)
122125
expect(klass.allow_topic_mentions).to eq(true)
123126
expect(klass.allow_personal_messages).to eq(true)
@@ -180,10 +183,9 @@
180183
system_prompt: "test",
181184
tools: [],
182185
allowed_group_ids: [],
183-
default_llm: "seeded_model:-1",
186+
default_llm_id: seeded_llm_model.id,
184187
)
185188

186-
llm_model = Fabricate(:llm_model, id: -1)
187189
SiteSetting.ai_bot_allowed_seeded_models = ""
188190

189191
expect(persona.valid?).to eq(false)

spec/requests/admin/ai_personas_controller_spec.rb

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
RSpec.describe DiscourseAi::Admin::AiPersonasController do
44
fab!(:admin)
55
fab!(:ai_persona)
6+
fab!(:llm_model)
67

78
before do
89
sign_in(admin)
@@ -43,7 +44,7 @@
4344
allow_personal_messages: true,
4445
allow_chat_channel_mentions: true,
4546
allow_chat_direct_messages: true,
46-
default_llm: "anthropic:claude-2",
47+
default_llm_id: llm_model.id,
4748
forced_tool_count: 2,
4849
)
4950
persona2.create_user!
@@ -177,7 +178,7 @@
177178
allow_personal_messages: true,
178179
allow_chat_channel_mentions: true,
179180
allow_chat_direct_messages: true,
180-
default_llm: "anthropic:claude-2",
181+
default_llm_id: llm_model.id,
181182
forced_tool_count: 2,
182183
}
183184
end

0 commit comments

Comments
 (0)