Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,10 @@ export default class AiLlmsListEditor extends Component {
return i18n("discourse_ai.llms.usage.ai_persona", {
persona: usage.name,
});
} else if (usage.type === "automation") {
return i18n("discourse_ai.llms.usage.automation", {
name: usage.name,
});
} else {
return i18n("discourse_ai.llms.usage." + usage.type);
}
Expand Down
2 changes: 2 additions & 0 deletions assets/stylesheets/modules/llms/common/ai-llms-editor.scss
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@
list-style: none;
margin: 0.5em 0 0 0;
display: flex;
flex-wrap: wrap;

li {
font-size: var(--font-down-2);
Expand All @@ -125,6 +126,7 @@
border: 1px solid var(--primary-low);
padding: 1px 3px;
margin-right: 0.5em;
margin-bottom: 0.5em;
}
}

Expand Down
2 changes: 2 additions & 0 deletions config/locales/client.en.yml
Original file line number Diff line number Diff line change
Expand Up @@ -439,10 +439,12 @@ en:
usage:
ai_bot: "AI bot"
ai_helper: "Helper"
ai_helper_image_caption: "Image caption"
ai_persona: "Persona (%{persona})"
ai_summarization: "Summarize"
ai_embeddings_semantic_search: "AI search"
ai_spam: "Spam"
automation: "Automation (%{name})"
in_use_warning:
one: "This model is currently used by %{settings}. If misconfigured, the feature won't work as expected."
other: "This model is currently used by the following: %{settings}. If misconfigured, features won't work as expected. "
Expand Down
5 changes: 0 additions & 5 deletions config/settings.yml
Original file line number Diff line number Diff line change
Expand Up @@ -312,11 +312,6 @@ discourse_ai:
default: "1|2" # 1: admins, 2: moderators
allow_any: false
refresh: true
ai_bot_enabled_chat_bots: # TODO(roman): Deprecated. Remove by Sept 2024
type: list
default: "gpt-3.5-turbo"
hidden: true
choices: "DiscourseAi::Configuration::LlmEnumerator.available_ai_bots"
ai_bot_add_to_header:
default: true
client: true
Expand Down
13 changes: 13 additions & 0 deletions db/migrate/20250424035234_remove_old_settings.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# frozen_string_literal: true
class RemoveOldSettings < ActiveRecord::Migration[7.2]
def up
execute <<~SQL
DELETE FROM site_settings
WHERE name IN ('ai_bot_enabled_chat_bots')
SQL
end

def down
raise ActiveRecord::IrreversibleMigration
end
end
7 changes: 5 additions & 2 deletions lib/ai_bot/playground.rb
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ module AiBot
class Playground
BYPASS_AI_REPLY_CUSTOM_FIELD = "discourse_ai_bypass_ai_reply"
BOT_USER_PREF_ID_CUSTOM_FIELD = "discourse_ai_bot_user_pref_id"
# 10 minutes is enough for vast majority of cases
# there is a small chance that some reasoning models may take longer
MAX_STREAM_DELAY_SECONDS = 600

attr_reader :bot

Expand Down Expand Up @@ -464,7 +467,7 @@ def reply_to(
publish_update(reply_post, { raw: reply_post.cooked })

redis_stream_key = "gpt_cancel:#{reply_post.id}"
Discourse.redis.setex(redis_stream_key, 60, 1)
Discourse.redis.setex(redis_stream_key, MAX_STREAM_DELAY_SECONDS, 1)
end

context.skip_tool_details ||= !bot.persona.class.tool_details
Expand Down Expand Up @@ -504,7 +507,7 @@ def reply_to(

if post_streamer
post_streamer.run_later do
Discourse.redis.expire(redis_stream_key, 60)
Discourse.redis.expire(redis_stream_key, MAX_STREAM_DELAY_SECONDS)
publish_update(reply_post, { raw: raw })
end
end
Expand Down
74 changes: 30 additions & 44 deletions lib/configuration/llm_enumerator.rb
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,24 @@ def self.global_usage
.where("enabled_chat_bot = ?", true)
.pluck(:id)
.each { |llm_id| rval[llm_id] << { type: :ai_bot } }

AiPersona
.where("force_default_llm = ?", true)
.pluck(:default_llm_id, :name, :id)
.each { |llm_id, name, id| rval[llm_id] << { type: :ai_persona, name: name, id: id } }
end

# this is unconditional, so it is clear that we always signal configuration
AiPersona
.where("default_llm_id IS NOT NULL")
.pluck(:default_llm_id, :name, :id)
.each { |llm_id, name, id| rval[llm_id] << { type: :ai_persona, name: name, id: id } }

if SiteSetting.ai_helper_enabled
model_id = SiteSetting.ai_helper_model.split(":").last.to_i
rval[model_id] << { type: :ai_helper }
end

if SiteSetting.ai_helper_image_caption_model
model_id = SiteSetting.ai_helper_image_caption_model.split(":").last.to_i
rval[model_id] << { type: :ai_helper_image_caption }
end

if SiteSetting.ai_summarization_enabled
summarization_persona = AiPersona.find_by(id: SiteSetting.ai_summarization_persona)
model_id = summarization_persona.default_llm_id || LlmModel.last&.id
Expand All @@ -42,6 +48,25 @@ def self.global_usage
rval[model_id] << { type: :ai_spam }
end

if defined?(DiscourseAutomation::Automation)
DiscourseAutomation::Automation
.joins(:fields)
.where(script: %w[llm_report llm_triage])
.where("discourse_automation_fields.name = ?", "model")
.pluck(
"metadata ->> 'value', discourse_automation_automations.name, discourse_automation_automations.id",
)
.each do |model_text, name, id|
next if model_text.blank?
model_id = model_text.split("custom:").last.to_i
if model_id.present?
if model_text =~ /custom:(\d+)/
rval[model_id] << { type: :automation, name: name, id: id }
end
end
end
end

rval
end

Expand Down Expand Up @@ -85,45 +110,6 @@ def self.values(allowed_seeded_llms: nil)
values.each { |value_h| value_h[:value] = "custom:#{value_h[:value]}" }
values
end

# TODO(roman): Deprecated. Remove by Sept 2024
def self.old_summarization_options
%w[
gpt-4
gpt-4-32k
gpt-4-turbo
gpt-4o
gpt-3.5-turbo
gpt-3.5-turbo-16k
gemini-pro
gemini-1.5-pro
gemini-1.5-flash
claude-2
claude-instant-1
claude-3-haiku
claude-3-sonnet
claude-3-opus
mistralai/Mixtral-8x7B-Instruct-v0.1
mistralai/Mixtral-8x7B-Instruct-v0.1
]
end

# TODO(roman): Deprecated. Remove by Sept 2024
def self.available_ai_bots
%w[
gpt-3.5-turbo
gpt-4
gpt-4-turbo
gpt-4o
claude-2
gemini-1.5-pro
mixtral-8x7B-Instruct-V0.1
claude-3-opus
claude-3-sonnet
claude-3-haiku
cohere-command-r-plus
]
end
end
end
end
23 changes: 20 additions & 3 deletions spec/configuration/llm_enumerator_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@
fab!(:fake_model)
fab!(:llm_model)
fab!(:seeded_model)
fab!(:automation) do
Fabricate(:automation, script: "llm_report", name: "some automation", enabled: true)
end

describe "#values_for_serialization" do
it "returns an array for that can be used for serialization" do
Expand Down Expand Up @@ -37,13 +40,27 @@
end

describe "#global_usage" do
before do
it "returns a hash of Llm models in use globally" do
SiteSetting.ai_helper_model = "custom:#{fake_model.id}"
SiteSetting.ai_helper_enabled = true
expect(described_class.global_usage).to eq(fake_model.id => [{ type: :ai_helper }])
end

it "returns a hash of Llm models in use globally" do
expect(described_class.global_usage).to eq(fake_model.id => [{ type: :ai_helper }])
it "returns information about automation rules" do
automation.fields.create!(
component: "text",
name: "model",
metadata: {
value: "custom:#{fake_model.id}",
},
target: "script",
)

usage = described_class.global_usage

expect(usage).to eq(
{ fake_model.id => [{ type: :automation, name: "some automation", id: automation.id }] },
)
end

it "doesn't error on spam when spam detection is enabled but moderation setting is missing" do
Expand Down
1 change: 0 additions & 1 deletion spec/system/admin_ai_persona_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@

before do
SiteSetting.ai_bot_enabled = true
SiteSetting.ai_bot_enabled_chat_bots = "gpt-4"
sign_in(admin)
end

Expand Down
Loading