Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.

Commit fdd4a9b

Browse files
committed
move llm to id column - work in progress
1 parent 4d1798c commit fdd4a9b

File tree

9 files changed

+116
-54
lines changed

9 files changed

+116
-54
lines changed

app/controllers/discourse_ai/admin/ai_personas_controller.rb

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -32,9 +32,12 @@ def index
3232
}
3333
end
3434
llms =
35-
DiscourseAi::Configuration::LlmEnumerator
36-
.values(allowed_seeded_llms: SiteSetting.ai_bot_allowed_seeded_models)
37-
.map { |hash| { id: hash[:value], name: hash[:name] } }
35+
LlmModel
36+
.pluck(:display_name, :id, :vision_enabled)
37+
.map do |name, id, vision|
38+
next if id < 0 && SiteSetting.ai_bot_allowed_seeded_models_map.exclude?(id.to_s)
39+
{ id: id, name: name, vision: vision }
40+
end
3841
render json: { ai_personas: ai_personas, meta: { tools: tools, llms: llms } }
3942
end
4043

app/models/ai_persona.rb

Lines changed: 46 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
# frozen_string_literal: true
22

33
class AiPersona < ActiveRecord::Base
4-
# TODO remove this line 01-1-2025
5-
self.ignored_columns = %i[commands allow_chat mentionable]
4+
# TODO remove this line 01-10-2025
5+
self.ignored_columns = %i[default_llm question_consolidator_llm]
66

77
# places a hard limit, so per site we cache a maximum of 500 classes
88
MAX_PERSONAS_PER_SITE = 500
@@ -12,7 +12,7 @@ class AiPersona < ActiveRecord::Base
1212
validates :system_prompt, presence: true, length: { maximum: 10_000_000 }
1313
validate :system_persona_unchangeable, on: :update, if: :system
1414
validate :chat_preconditions
15-
validate :allowed_seeded_model, if: :default_llm
15+
validate :allowed_seeded_model, if: :default_llm_id
1616
validates :max_context_posts, numericality: { greater_than: 0 }, allow_nil: true
1717
# leaves some room for growth but sets a maximum to avoid memory issues
1818
# we may want to revisit this in the future
@@ -30,6 +30,9 @@ class AiPersona < ActiveRecord::Base
3030
belongs_to :created_by, class_name: "User"
3131
belongs_to :user
3232

33+
belongs_to :default_llm, class_name: "LlmModel"
34+
belongs_to :question_consolidator_llm, class_name: "LlmModel"
35+
3336
has_many :upload_references, as: :target, dependent: :destroy
3437
has_many :uploads, through: :upload_references
3538

@@ -62,7 +65,7 @@ def self.persona_users(user: nil)
6265
user_id: persona.user_id,
6366
username: persona.user.username_lower,
6467
allowed_group_ids: persona.allowed_group_ids,
65-
default_llm: persona.default_llm,
68+
default_llm_id: persona.default_llm_id,
6669
force_default_llm: persona.force_default_llm,
6770
allow_chat_channel_mentions: persona.allow_chat_channel_mentions,
6871
allow_chat_direct_messages: persona.allow_chat_direct_messages,
@@ -157,7 +160,7 @@ def class_instance
157160
user_id
158161
system
159162
mentionable
160-
default_llm
163+
default_llm_id
161164
max_context_posts
162165
vision_enabled
163166
vision_max_pixels
@@ -302,7 +305,7 @@ def chat_preconditions
302305
if (
303306
allow_chat_channel_mentions || allow_chat_direct_messages || allow_topic_mentions ||
304307
force_default_llm
305-
) && !default_llm
308+
) && !default_llm_id
306309
errors.add(:default_llm, I18n.t("discourse_ai.ai_bot.personas.default_llm_required"))
307310
end
308311
end
@@ -332,13 +335,12 @@ def ensure_not_system
332335
end
333336

334337
def allowed_seeded_model
335-
return if default_llm.blank?
338+
return if default_llm_id.blank?
336339

337-
llm = LlmModel.find_by(id: default_llm.split(":").last.to_i)
338-
return if llm.nil?
339-
return if !llm.seeded?
340+
return if default_llm.nil?
341+
return if !default_llm.seeded?
340342

341-
return if SiteSetting.ai_bot_allowed_seeded_models.include?(llm.id.to_s)
343+
return if SiteSetting.ai_bot_allowed_seeded_models_map.include?(default_llm.id.to_s)
342344

343345
errors.add(:default_llm, I18n.t("discourse_ai.llm.configuration.invalid_seeded_model"))
344346
end
@@ -348,37 +350,39 @@ def allowed_seeded_model
348350
#
349351
# Table name: ai_personas
350352
#
351-
# id :bigint not null, primary key
352-
# name :string(100) not null
353-
# description :string(2000) not null
354-
# system_prompt :string(10000000) not null
355-
# allowed_group_ids :integer default([]), not null, is an Array
356-
# created_by_id :integer
357-
# enabled :boolean default(TRUE), not null
358-
# created_at :datetime not null
359-
# updated_at :datetime not null
360-
# system :boolean default(FALSE), not null
361-
# priority :boolean default(FALSE), not null
362-
# temperature :float
363-
# top_p :float
364-
# user_id :integer
365-
# default_llm :text
366-
# max_context_posts :integer
367-
# vision_enabled :boolean default(FALSE), not null
368-
# vision_max_pixels :integer default(1048576), not null
369-
# rag_chunk_tokens :integer default(374), not null
370-
# rag_chunk_overlap_tokens :integer default(10), not null
371-
# rag_conversation_chunks :integer default(10), not null
372-
# question_consolidator_llm :text
373-
# tool_details :boolean default(TRUE), not null
374-
# tools :json not null
375-
# forced_tool_count :integer default(-1), not null
376-
# allow_chat_channel_mentions :boolean default(FALSE), not null
377-
# allow_chat_direct_messages :boolean default(FALSE), not null
378-
# allow_topic_mentions :boolean default(FALSE), not null
379-
# allow_personal_messages :boolean default(TRUE), not null
380-
# force_default_llm :boolean default(FALSE), not null
381-
# rag_llm_model_id :bigint
353+
# id :bigint not null, primary key
354+
# name :string(100) not null
355+
# description :string(2000) not null
356+
# system_prompt :string(10000000) not null
357+
# allowed_group_ids :integer default([]), not null, is an Array
358+
# created_by_id :integer
359+
# enabled :boolean default(TRUE), not null
360+
# created_at :datetime not null
361+
# updated_at :datetime not null
362+
# system :boolean default(FALSE), not null
363+
# priority :boolean default(FALSE), not null
364+
# temperature :float
365+
# top_p :float
366+
# user_id :integer
367+
# max_context_posts :integer
368+
# max_post_context_tokens :integer
369+
# max_context_tokens :integer
370+
# vision_enabled :boolean default(FALSE), not null
371+
# vision_max_pixels :integer default(1048576), not null
372+
# rag_chunk_tokens :integer default(374), not null
373+
# rag_chunk_overlap_tokens :integer default(10), not null
374+
# rag_conversation_chunks :integer default(10), not null
375+
# tool_details :boolean default(TRUE), not null
376+
# tools :json not null
377+
# forced_tool_count :integer default(-1), not null
378+
# allow_chat_channel_mentions :boolean default(FALSE), not null
379+
# allow_chat_direct_messages :boolean default(FALSE), not null
380+
# allow_topic_mentions :boolean default(FALSE), not null
381+
# allow_personal_messages :boolean default(TRUE), not null
382+
# force_default_llm :boolean default(FALSE), not null
383+
# rag_llm_model_id :bigint
384+
# default_llm_id :bigint
385+
# question_consolidator_llm_id :bigint
382386
#
383387
# Indexes
384388
#

assets/javascripts/discourse/components/ai-persona-editor.gjs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -598,7 +598,10 @@ export default class PersonaEditor extends Component {
598598
@onRemove={{this.removeUpload}}
599599
/>
600600
</div>
601-
<RagOptions @model={{this.editingModel}}>
601+
<RagOptions
602+
@model={{this.editingModel}}
603+
@llms={{@personas.resultSetMeta.llms}}
604+
>
602605
<div class="control-group">
603606
<label>{{i18n
604607
"discourse_ai.ai_persona.rag_conversation_chunks"

assets/javascripts/discourse/components/rag-options.gjs

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import { on } from "@ember/modifier";
55
import { action } from "@ember/object";
66
import DTooltip from "discourse/components/d-tooltip";
77
import { i18n } from "discourse-i18n";
8+
import AiLlmSelector from "./ai-llm-selector";
89

910
export default class RagOptions extends Component {
1011
@tracked showIndexingOptions = false;
@@ -22,6 +23,10 @@ export default class RagOptions extends Component {
2223
: i18n("discourse_ai.rag.options.show_indexing_options");
2324
}
2425

26+
get visionLlms() {
27+
return this.args.llms.filter((llm) => llm.vision);
28+
}
29+
2530
<template>
2631
{{#if @model.rag_uploads}}
2732
<a
@@ -64,6 +69,18 @@ export default class RagOptions extends Component {
6469
}}
6570
/>
6671
</div>
72+
<div class="control-group">
73+
<label>{{i18n "discourse_ai.rag.options.rag_llm_model"}}</label>
74+
<AiLlmSelector
75+
class="ai-persona-editor__llms"
76+
@value={{this.visionLlmId}}
77+
@llms={{this.visionLlms}}
78+
/>
79+
<DTooltip
80+
@icon="circle-question"
81+
@content={{i18n "discourse_ai.rag.options.rag_llm_model_help"}}
82+
/>
83+
</div>
6784
{{yield}}
6885
{{/if}}
6986
</template>

config/locales/client.en.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -274,6 +274,8 @@ en:
274274
rag_chunk_tokens_help: "The number of tokens to use for each chunk in the RAG model. Increase to increase the amount of context the AI can use. (changing will re-index all uploads)"
275275
rag_chunk_overlap_tokens: "Upload chunk overlap tokens"
276276
rag_chunk_overlap_tokens_help: "The number of tokens to overlap between chunks in the RAG model. (changing will re-index all uploads)"
277+
rag_llm_model: "Indexing Language Model"
278+
rag_llm_model_help: "The language model used for OCR during indexing of PDFs and images."
277279
show_indexing_options: "Show upload options"
278280
hide_indexing_options: "Hide upload options"
279281
uploads:
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
# frozen_string_literal: true
2+
class MigratePersonaToLlmModelId < ActiveRecord::Migration[7.2]
3+
def up
4+
add_column :ai_personas, :default_llm_id, :bigint
5+
add_column :ai_personas, :question_consolidator_llm_id, :bigint
6+
# personas are seeded, we do not mark stuff as readonline
7+
8+
execute <<~SQL
9+
UPDATE ai_personas
10+
set
11+
default_llm_id = (select id from llm_models where ('custom:' || id) = default_llm),
12+
question_consolidator_llm_id = (select id from llm_models where ('custom:' || id) = question_consolidator_llm)
13+
SQL
14+
end
15+
16+
def down
17+
raise ActiveRecord::IrreversibleMigration
18+
end
19+
end
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
# frozen_string_literal: true
2+
class PostMigratePersonaToLlmModelId < ActiveRecord::Migration[7.2]
3+
def up
4+
remove_column :ai_personas, :default_llm
5+
remove_column :ai_personas, :question_consolidator_llm
6+
end
7+
8+
def down
9+
raise ActiveRecord::IrreversibleMigration
10+
end
11+
end

spec/models/ai_persona_spec.rb

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
11
# frozen_string_literal: true
22

33
RSpec.describe AiPersona do
4+
fab!(:llm_model)
5+
fab!(:seeded_llm_model) { Fabricate(:llm_model, id: -1) }
6+
47
it "validates context settings" do
58
persona =
69
AiPersona.new(
@@ -118,7 +121,7 @@
118121
forum_helper = AiPersona.find_by(name: "Forum Helper")
119122
forum_helper.update!(
120123
user_id: 1,
121-
default_llm: "anthropic:claude-2",
124+
default_llm_id: llm_model.id,
122125
max_context_posts: 3,
123126
allow_topic_mentions: true,
124127
allow_personal_messages: true,
@@ -133,7 +136,7 @@
133136
# tl 0 by default
134137
expect(klass.allowed_group_ids).to eq([10])
135138
expect(klass.user_id).to eq(1)
136-
expect(klass.default_llm).to eq("anthropic:claude-2")
139+
expect(klass.default_llm_id).to eq(llm_model.id)
137140
expect(klass.max_context_posts).to eq(3)
138141
expect(klass.allow_topic_mentions).to eq(true)
139142
expect(klass.allow_personal_messages).to eq(true)
@@ -149,7 +152,7 @@
149152
system_prompt: "test",
150153
tools: [],
151154
allowed_group_ids: [],
152-
default_llm: "anthropic:claude-2",
155+
default_llm_id: llm_model.id,
153156
max_context_posts: 3,
154157
allow_topic_mentions: true,
155158
allow_personal_messages: true,
@@ -164,7 +167,7 @@
164167
expect(klass.system).to eq(false)
165168
expect(klass.allowed_group_ids).to eq([])
166169
expect(klass.user_id).to eq(1)
167-
expect(klass.default_llm).to eq("anthropic:claude-2")
170+
expect(klass.default_llm_id).to eq(llm_model.id)
168171
expect(klass.max_context_posts).to eq(3)
169172
expect(klass.allow_topic_mentions).to eq(true)
170173
expect(klass.allow_personal_messages).to eq(true)
@@ -227,10 +230,9 @@
227230
system_prompt: "test",
228231
tools: [],
229232
allowed_group_ids: [],
230-
default_llm: "seeded_model:-1",
233+
default_llm_id: seeded_llm_model.id,
231234
)
232235

233-
llm_model = Fabricate(:llm_model, id: -1)
234236
SiteSetting.ai_bot_allowed_seeded_models = ""
235237

236238
expect(persona.valid?).to eq(false)

spec/requests/admin/ai_personas_controller_spec.rb

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
fab!(:admin)
55
fab!(:ai_persona)
66
fab!(:embedding_definition)
7+
fab!(:llm_model)
78

89
before do
910
sign_in(admin)
@@ -44,7 +45,7 @@
4445
allow_personal_messages: true,
4546
allow_chat_channel_mentions: true,
4647
allow_chat_direct_messages: true,
47-
default_llm: "anthropic:claude-2",
48+
default_llm_id: llm_model.id,
4849
forced_tool_count: 2,
4950
)
5051
persona2.create_user!
@@ -178,7 +179,7 @@
178179
allow_personal_messages: true,
179180
allow_chat_channel_mentions: true,
180181
allow_chat_direct_messages: true,
181-
default_llm: "anthropic:claude-2",
182+
default_llm_id: llm_model.id,
182183
forced_tool_count: 2,
183184
}
184185
end

0 commit comments

Comments
 (0)