Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions app/models/llm_model.rb
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,11 @@ class LlmModel < ActiveRecord::Base
validates_presence_of :name, :api_key
validates :max_prompt_tokens, numericality: { greater_than: 0 }
validate :required_provider_params
scope :in_use,
-> do
model_ids = DiscourseAi::Configuration::LlmEnumerator.global_usage.keys
where(id: model_ids)
end

def self.provider_params
{
Expand Down
56 changes: 56 additions & 0 deletions app/services/problem_check/ai_llm_status.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# frozen_string_literal: true

class ProblemCheck::AiLlmStatus < ProblemCheck
self.priority = "high"
self.perform_every = 6.hours

def call
llm_errors
end

private

def llm_errors
return [] if !SiteSetting.discourse_ai_enabled

LlmModel.in_use.find_each.filter_map do |model|
try_validate(model) { validator.run_test(model) }
end
end

def try_validate(model, &blk)
begin
raise({ message: "Forced error for testing" }.to_json) if Rails.env.test?
blk.call
nil
rescue => e
error_message = parse_error_message(e.message)
message =
"#{I18n.t("dashboard.problem.ai_llm_status", { model_name: model.display_name, model_id: model.id })}"

Problem.new(
message,
priority: "high",
identifier: "ai_llm_status",
target: model.id,
details: {
model_id: model.id,
model_name: model.display_name,
error: error_message,
},
)
end
end

def validator
@validator ||= DiscourseAi::Configuration::LlmValidator.new
end

def parse_error_message(message)
begin
JSON.parse(message)["message"]
rescue JSON::ParserError
message.to_s
end
end
end
3 changes: 3 additions & 0 deletions config/locales/server.en.yml
Original file line number Diff line number Diff line change
Expand Up @@ -453,3 +453,6 @@ en:
no_default_llm: The persona must have a default_llm defined.
user_not_allowed: The user is not allowed to participate in the topic.
prompt_message_length: The message %{idx} is over the 1000 character limit.
dashboard:
problem:
ai_llm_status: "The LLM model: %{model_name} is encountering issues. Please check the <a href='/admin/plugins/discourse-ai/ai-llms/%{model_id}'>model's configuration</a>."
2 changes: 2 additions & 0 deletions plugin.rb
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,8 @@ def self.public_asset_path(name)
DiscourseAi::AiModeration::EntryPoint.new,
].each { |a_module| a_module.inject_into(self) }

register_problem_check ProblemCheck::AiLlmStatus

register_reviewable_type ReviewableAiChatMessage
register_reviewable_type ReviewableAiPost

Expand Down
44 changes: 44 additions & 0 deletions spec/services/problem_check/ai_llm_status_spec.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# frozen_string_literal: true

require "rails_helper"

RSpec.describe ProblemCheck::AiLlmStatus do
subject(:check) { described_class.new }

before do
assign_fake_provider_to(:ai_summarization_model)
SiteSetting.ai_summarization_enabled = true
end

describe "#call" do
it "does nothing if discourse-ai plugin disabled" do
SiteSetting.discourse_ai_enabled = false
expect(check).to be_chill_about_it
end

context "with discourse-ai plugin enabled for the site" do
let(:llm_model) { LlmModel.in_use.first }

before { SiteSetting.discourse_ai_enabled = true }

it "returns a problem with an LLM model" do
message =
"#{I18n.t("dashboard.problem.ai_llm_status", { model_name: llm_model.display_name, model_id: llm_model.id })}"

expect(described_class.new.call).to contain_exactly(
have_attributes(
identifier: "ai_llm_status",
target: llm_model.id,
priority: "high",
message: message,
details: {
model_id: llm_model.id,
model_name: llm_model.display_name,
error: "Forced error for testing",
},
),
)
end
end
end
end
Loading