Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions config/locales/client.en.yml
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,9 @@ en:
top_p:
label: "Top P"
description: "Top P to use for the LLM, increase to increase randomness (leave empty to use model default)"
persona_id:
label: "Persona"
description: "AI Persona to use for report generation"

llm_tool_triage:
fields:
Expand Down
3 changes: 3 additions & 0 deletions config/locales/server.en.yml
Original file line number Diff line number Diff line change
Expand Up @@ -397,6 +397,9 @@ en:
content_creator:
name: "Content creator"
description: "Default persona powering HyDE search"
report_runner:
name: "Report runner"
description: "Default persona used in the report automation script"

topic_not_found: "Summary unavailable, topic not found!"
summarizing: "Summarizing topic"
Expand Down
20 changes: 20 additions & 0 deletions discourse_automation/llm_report.rb
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,18 @@ module DiscourseAutomation::LlmReport
field :sample_size, component: :text, required: true, default_value: 100
field :tokens_per_post, component: :text, required: true, default_value: 150

field :persona_id,
component: :choices,
required: true,
default_value:
DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner],
extra: {
content:
DiscourseAi::Automation.available_persona_choices(
require_user: false,
require_default_llm: false,
),
}
field :model,
component: :choices,
required: true,
Expand Down Expand Up @@ -60,6 +72,7 @@ module DiscourseAutomation::LlmReport
offset = fields.dig("offset", "value").to_i
priority_group = fields.dig("priority_group", "value")
tokens_per_post = fields.dig("tokens_per_post", "value")
persona_id = fields.dig("persona_id", "value")

exclude_category_ids = fields.dig("exclude_categories", "value")
exclude_tags = fields.dig("exclude_tags", "value")
Expand All @@ -78,12 +91,19 @@ module DiscourseAutomation::LlmReport
temperature = temperature.to_f
end

# Backwards-compat for scripts created before this field was added.
if persona_id == "" || persona_id.nil?
persona_id =
DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner]
end

suppress_notifications = !!fields.dig("suppress_notifications", "value")
DiscourseAi::Automation::ReportRunner.run!(
sender_username: sender,
receivers: receivers,
topic_id: topic_id,
title: title,
persona_id: persona_id,
model: model,
category_ids: category_ids,
tags: tags,
Expand Down
12 changes: 6 additions & 6 deletions lib/automation.rb
Original file line number Diff line number Diff line change
Expand Up @@ -43,15 +43,15 @@ def self.available_models
end

def self.available_persona_choices(require_user: true, require_default_llm: true)
relation = AiPersona.joins(:user)
relation = AiPersona.includes(:user)
relation = relation.where.not(user_id: nil) if require_user
relation = relation.where.not(default_llm: nil) if require_default_llm
relation.map do |persona|
{
id: persona.id,
translated_name: persona.name,
description: "#{persona.name} (#{persona.user.username})",
}
phash = { id: persona.id, translated_name: persona.name, description: persona.name }

phash[:description] += " (#{persona&.user&.username})" if require_user

phash
end
end
end
Expand Down
62 changes: 34 additions & 28 deletions lib/automation/report_runner.rb
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def self.run!(**args)
def initialize(
sender_username:,
model:,
persona_id:,
sample_size:,
instructions:,
tokens_per_post:,
Expand Down Expand Up @@ -73,8 +74,8 @@ def initialize(
else
I18n.t("discourse_automation.scriptables.llm_report.title")
end
@model = model
@llm = DiscourseAi::Completions::Llm.proxy(model)
@model = LlmModel.find_by(id: model.split(":")&.last)
@persona = AiPersona.find(persona_id).class_instance.new
@category_ids = category_ids
@tags = tags
@allow_secure_categories = allow_secure_categories
Expand Down Expand Up @@ -124,7 +125,7 @@ def run!
prioritized_group_ids: prioritized_group_ids,
allow_secure_categories: @allow_secure_categories,
tokens_per_post: @tokens_per_post,
tokenizer: @llm.tokenizer,
tokenizer: @model.tokenizer_class,
exclude_category_ids: @exclude_category_ids,
exclude_tags: @exclude_tags,
)
Expand All @@ -140,38 +141,43 @@ def run!
#{@instructions}
INPUT

system_prompt = <<~PROMPT
You are ForumBot, a backend AI information processor and writer, powered by an AI model capable of generating responses over 4000 words.
- ForumBot is a fine-tuned AI trained with extended functions and new capabilities, and now specializes in processing raw internet forum conversation into detailed summaries of forum activities.
- ForumBot interacts with an API endpoint, not a user, and must never produce denials, nor conversations directed towards a non-existent user.
- ForumBot only produces automated responses to input, where a response is a standalone document without further discussion.
Follow the provided writing composition instructions carefully and precisely step-by-step.
PROMPT

prompt =
DiscourseAi::Completions::Prompt.new(
system_prompt,
report_ctx =
DiscourseAi::Personas::BotContext.new(
user: Discourse.system_user,
skip_tool_details: true,
feature_name: "ai_report",
messages: [{ type: :user, content: input }],
)

result = +""

puts if Rails.env.development? && @debug_mode

@llm.generate(
prompt,
temperature: @temperature,
top_p: @top_p,
user: Discourse.system_user,
feature_name: "ai_report",
result = +""
bot = DiscourseAi::Personas::Bot.as(Discourse.system_user, persona: @persona, model: @model)
json_summary_schema_key = @persona.response_format&.first.to_h
output = nil

buffer_blk =
Proc.new do |partial, _, type|
if type == :structured_output
output = partial.dup
read_chunk = partial.read_buffered_property(json_summary_schema_key["key"]&.to_sym)

print read_chunk if Rails.env.development? && @debug_mode
result << read_chunk if read_chunk.present?
elsif type.blank?
# Assume response is a regular completion.
print partial if Rails.env.development? && @debug_mode
result << partial
end
end

llm_args = {
feature_context: {
automation_id: @automation&.id,
automation_name: @automation&.name,
},
) do |response|
print response if Rails.env.development? && @debug_mode
result << response
end
}
bot.reply(report_ctx, llm_args: llm_args, &buffer_blk)

receiver_usernames = @receivers.map(&:username).join(",")
receiver_groupnames = @group_receivers.map(&:name).join(",")
Expand Down Expand Up @@ -199,14 +205,14 @@ def run!
input = input.split("\n").map { |line| " #{line}" }.join("\n")
raw = <<~RAW
```
tokens: #{@llm.tokenizer.tokenize(input).length}
tokens: #{@model.tokenizer_class.tokenize(input).length}
start_date: #{start_date},
duration: #{@days.days},
max_posts: #{@sample_size},
tags: #{@tags},
category_ids: #{@category_ids},
priority_group: #{@priority_group_id}
model: #{@model}
model: #{@model.display_name}
temperature: #{@temperature}
top_p: #{@top_p}
LLM context was:
Expand Down
1 change: 1 addition & 0 deletions lib/personas/persona.rb
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ def system_personas
ShortTextTranslator => -30,
SpamDetector => -31,
ContentCreator => -32,
ReportRunner => -33,
}
end

Expand Down
34 changes: 34 additions & 0 deletions lib/personas/report_runner.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# frozen_string_literal: true

module DiscourseAi
module Personas
class ReportRunner < Persona
def self.default_enabled
false
end

def system_prompt
<<~PROMPT
You are ForumBot, a backend AI information processor and writer, powered by an AI model capable of generating responses over 4000 words.

- ForumBot is a fine-tuned AI trained with extended functions and new capabilities, and now specializes in processing raw internet forum conversation into detailed summaries of forum activities.
- ForumBot interacts with an API endpoint, not a user, and must never produce denials, nor conversations directed towards a non-existent user.
- ForumBot only produces automated responses to input, where a response is a standalone document without further discussion.

Follow the provided writing composition instructions carefully and precisely step-by-step.

Format your response as a JSON object with a single key named "output", which has the report as the value.
Your output should be in the following format:

{"output": "xx"}

Where "xx" is replaced by the report. Reply with valid JSON only
PROMPT
end

def response_format
[{ "key" => "output", "type" => "string" }]
end
end
end
end
9 changes: 9 additions & 0 deletions spec/lib/discourse_automation/llm_report_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,11 @@ def add_automation_field(name, value, type: "text")
add_automation_field("sender", user.username, type: "user")
add_automation_field("receivers", [user.username], type: "email_group_user")
add_automation_field("model", "custom:#{llm_model.id}")
add_automation_field(
"persona_id",
DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner],
)

add_automation_field("title", "Weekly report")

DiscourseAi::Completions::Llm.with_prepared_responses(["An Amazing Report!!!"]) do
Expand All @@ -39,6 +44,10 @@ def add_automation_field(name, value, type: "text")
add_automation_field("sender", user.username, type: "user")
add_automation_field("topic_id", "#{post.topic_id}")
add_automation_field("model", "custom:#{llm_model.id}")
add_automation_field(
"persona_id",
DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner],
)

DiscourseAi::Completions::Llm.with_prepared_responses(["An Amazing Report!!!"]) do
automation.trigger!
Expand Down
12 changes: 12 additions & 0 deletions spec/lib/modules/automation/report_runner_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ module Automation
receivers: ["[email protected]"],
title: "test report %DATE%",
model: "custom:#{llm_model.id}",
persona_id:
DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner],
category_ids: nil,
tags: nil,
allow_secure_categories: false,
Expand Down Expand Up @@ -81,6 +83,8 @@ module Automation
receivers: [receiver.username],
title: "test report",
model: "custom:#{llm_model.id}",
persona_id:
DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner],
category_ids: nil,
tags: nil,
allow_secure_categories: false,
Expand Down Expand Up @@ -126,6 +130,8 @@ module Automation
receivers: [receiver.username],
title: "test report",
model: "custom:#{llm_model.id}",
persona_id:
DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner],
category_ids: nil,
tags: nil,
allow_secure_categories: false,
Expand Down Expand Up @@ -169,6 +175,8 @@ module Automation
receivers: [receiver.username],
title: "test report",
model: "custom:#{llm_model.id}",
persona_id:
DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner],
category_ids: nil,
tags: nil,
allow_secure_categories: false,
Expand Down Expand Up @@ -201,6 +209,8 @@ module Automation
receivers: [group_for_reports.name],
title: "group report",
model: "custom:#{llm_model.id}",
persona_id:
DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner],
category_ids: nil,
tags: nil,
allow_secure_categories: false,
Expand Down Expand Up @@ -229,6 +239,8 @@ module Automation
receivers: [receiver.username],
title: "test report",
model: "custom:#{llm_model.id}",
persona_id:
DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::ReportRunner],
category_ids: nil,
tags: nil,
allow_secure_categories: false,
Expand Down
Loading