Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.

Commit 8f10c3d

Browse files
committed
DEV: better
1 parent 61d84b0 commit 8f10c3d

File tree

3 files changed

+38
-18
lines changed

3 files changed

+38
-18
lines changed

app/models/llm_model.rb

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,11 @@ class LlmModel < ActiveRecord::Base
1313
validates_presence_of :name, :api_key
1414
validates :max_prompt_tokens, numericality: { greater_than: 0 }
1515
validate :required_provider_params
16+
scope :in_use,
17+
-> do
18+
model_ids = DiscourseAi::Configuration::LlmEnumerator.global_usage.keys
19+
where(id: model_ids)
20+
end
1621

1722
def self.provider_params
1823
{

app/services/problem_check/ai_llm_status.rb

Lines changed: 32 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -5,29 +5,44 @@ class ProblemCheck::AiLlmStatus < ProblemCheck
55
# self.perform_every = 1.hour
66

77
def call
8-
return no_problem if !SiteSetting.discourse_ai_enabled?
9-
return no_problem if llm_operational?
10-
11-
problem
8+
[*llm_errors]
129
end
1310

1411
private
1512

16-
def llm_operational?
17-
model_ids = DiscourseAi::Configuration::LlmEnumerator.global_usage.keys
18-
models_to_check = LlmModel.where(id: model_ids)
19-
20-
models_to_check.each do |model|
21-
begin
22-
result = validator.run_test(model)
23-
return false unless result
24-
rescue StandardError => e
25-
Rails.logger.warn("LlmValidator encountered an error for model #{model.id}: #{e.message}")
26-
return false
27-
end
13+
def targets
14+
[*LlmModel.in_use.pluck(:id)]
15+
end
16+
17+
def llm_errors
18+
return [] if !SiteSetting.discourse_ai_enabled
19+
20+
LlmModel.in_use.find_each.filter_map do |model|
21+
try_validate(model) { validator.run_test(model) }
2822
end
23+
end
2924

30-
return true
25+
def try_validate(model, &blk)
26+
begin
27+
blk.call
28+
nil
29+
rescue => e
30+
error_message = JSON.parse(e.message)["message"]
31+
message =
32+
"#{I18n.t("dashboard.problem.ai_llm_status", { model_name: model.display_name, model_id: model.id })}"
33+
34+
Problem.new(
35+
message,
36+
priority: "high",
37+
identifier: "ai_llm_checker",
38+
target: model.id,
39+
details: {
40+
model_id: model.id,
41+
model_name: model.display_name,
42+
error: error_message,
43+
},
44+
)
45+
end
3146
end
3247

3348
def validator

config/locales/server.en.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -455,4 +455,4 @@ en:
455455
prompt_message_length: The message %{idx} is over the 1000 character limit.
456456
dashboard:
457457
problem:
458-
ai_llm_status: "An LLM Model needs attention!"
458+
ai_llm_status: "The LLM model: %{model_name} is encountering issues. Please check the <a href='/admin/plugins/discourse-ai/ai-llms/%{model_id}'>model's configuration</a>."

0 commit comments

Comments
 (0)