Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions config/eval-llms.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,21 @@ llms:
max_prompt_tokens: 200000
vision_enabled: true

claude-3.7-sonnet-thinking:
display_name: Claude 3.7 Sonnet
name: claude-3-7-sonnet-latest
tokenizer: DiscourseAi::Tokenizer::AnthropicTokenizer
api_key_env: ANTHROPIC_API_KEY
provider: anthropic
url: https://api.anthropic.com/v1/messages
max_prompt_tokens: 200000
vision_enabled: true
provider_params:
disable_top_p: true
disable_temperature: true
enable_reasoning: true
reasoning_tokens: 1024

gemini-2.0-flash:
display_name: Gemini 2.0 Flash
name: gemini-2-0-flash
Expand Down
3 changes: 3 additions & 0 deletions config/locales/client.en.yml
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,9 @@ en:
whisper:
label: "Reply as Whisper"
description: "Whether the persona's response should be a whisper"
silent_mode:
label: "Silent Mode"
description: "In silent mode persona will receive the content but will not post anything on the forum - useful when performing triage using tools"
llm_triage:
fields:
system_prompt:
Expand Down
7 changes: 5 additions & 2 deletions discourse_automation/llm_persona_triage.rb
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,15 @@
content: DiscourseAi::Automation.available_persona_choices,
}
field :whisper, component: :boolean
field :silent_mode, component: :boolean

script do |context, fields|
post = context["post"]
next if post&.user&.bot?

persona_id = fields["persona"]["value"]
whisper = fields["whisper"]["value"]
persona_id = fields.dig("persona", "value")
whisper = !!fields.dig("whisper", "value")
silent_mode = !!fields.dig("silent_mode", "value")

begin
RateLimiter.new(
Expand All @@ -42,6 +44,7 @@
persona_id: persona_id,
whisper: whisper,
automation: self.automation,
silent_mode: silent_mode,
)
rescue => e
Discourse.warn_exception(
Expand Down
19 changes: 17 additions & 2 deletions lib/ai_bot/playground.rb
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,8 @@ def self.reply_to_post(
whisper: nil,
add_user_to_pm: false,
stream_reply: false,
auto_set_title: false
auto_set_title: false,
silent_mode: false
)
ai_persona = AiPersona.find_by(id: persona_id)
raise Discourse::InvalidParameters.new(:persona_id) if !ai_persona
Expand All @@ -207,7 +208,15 @@ def self.reply_to_post(
add_user_to_pm: add_user_to_pm,
stream_reply: stream_reply,
auto_set_title: auto_set_title,
silent_mode: silent_mode,
)
rescue => e
if Rails.env.test?
p e
puts e.backtrace[0..10]
else
raise e
end
end

def initialize(bot)
Expand Down Expand Up @@ -475,13 +484,19 @@ def reply_to(
add_user_to_pm: true,
stream_reply: nil,
auto_set_title: true,
silent_mode: false,
&blk
)
# this is a multithreading issue
# post custom prompt is needed and it may not
# be properly loaded, ensure it is loaded
PostCustomPrompt.none

if silent_mode
auto_set_title = false
stream_reply = false
end

reply = +""
post_streamer = nil

Expand Down Expand Up @@ -590,7 +605,7 @@ def reply_to(
end
end

return if reply.blank?
return if reply.blank? || silent_mode

if stream_reply
post_streamer.finish
Expand Down
56 changes: 56 additions & 0 deletions lib/ai_bot/tool_runner.rb
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,13 @@ def framework_script
},
};
},
createChatMessage: function(params) {
const result = _discourse_create_chat_message(params);
if (result.error) {
throw new Error(result.error);
}
return result;
},
};

const context = #{JSON.generate(@context)};
Expand Down Expand Up @@ -345,6 +352,55 @@ def attach_discourse(mini_racer_context)
end,
)

mini_racer_context.attach(
"_discourse_create_chat_message",
->(params) do
in_attached_function do
params = params.symbolize_keys
channel_name = params[:channel_name]
username = params[:username]
message = params[:message]

# Validate parameters
return { error: "Missing required parameter: channel_name" } if channel_name.blank?
return { error: "Missing required parameter: username" } if username.blank?
return { error: "Missing required parameter: message" } if message.blank?

# Find the user
user = User.find_by(username: username)
return { error: "User not found: #{username}" } if user.nil?

# Find the channel
channel = Chat::Channel.find_by(name: channel_name)
if channel.nil?
# Try finding by slug if not found by name
channel = Chat::Channel.find_by(slug: channel_name.parameterize)
end
return { error: "Channel not found: #{channel_name}" } if channel.nil?

begin
guardian = Guardian.new(user)
message =
ChatSDK::Message.create(
raw: message,
channel_id: channel.id,
guardian: guardian,
enforce_membership: !channel.direct_message_channel?,
)

{
success: true,
message_id: message.id,
message: message.message,
created_at: message.created_at.iso8601,
}
rescue => e
{ error: "Failed to create chat message: #{e.message}" }
end
end
end,
)

mini_racer_context.attach(
"_discourse_search",
->(params) do
Expand Down
3 changes: 2 additions & 1 deletion lib/automation/llm_persona_triage.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@
module DiscourseAi
module Automation
module LlmPersonaTriage
def self.handle(post:, persona_id:, whisper: false, automation: nil)
def self.handle(post:, persona_id:, whisper: false, silent_mode: false, automation: nil)
DiscourseAi::AiBot::Playground.reply_to_post(
post: post,
persona_id: persona_id,
whisper: whisper,
silent_mode: silent_mode,
)
rescue => e
Discourse.warn_exception(
Expand Down
43 changes: 43 additions & 0 deletions lib/completions/prompt.rb
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,49 @@ def initialize(
@tool_choice = tool_choice
end

# this new api tries to create symmetry between responses and prompts
# this means anything we get back from the model via endpoint can be easily appended
def push_model_response(response)
response = [response] if !response.is_a? Array

thinking, thinking_signature, redacted_thinking_signature = nil

response.each do |message|
if message.is_a?(Thinking)
# we can safely skip partials here
next if message.partial?
if message.redacted
redacted_thinking_signature = message.signature
else
thinking = message.message
thinking_signature = message.signature
end
elsif message.is_a?(ToolCall)
next if message.partial?
# this is a bit surprising about the API
# needing to add arguments is not ideal
push(
type: :tool_call,
content: { arguments: message.parameters }.to_json,
id: message.id,
name: message.name,
)
elsif message.is_a?(String)
push(type: :model, content: message)
else
raise ArgumentError, "response must be an array of strings, ToolCalls, or Thinkings"
end
end

# anthropic rules are that we attach thinking to last for the response
# it is odd, I wonder if long term we just keep thinking as a separate object
if thinking || redacted_thinking_signature
messages.last[:thinking] = thinking
messages.last[:thinking_signature] = thinking_signature
messages.last[:redacted_thinking_signature] = redacted_thinking_signature
end
end

def push(
type:,
content:,
Expand Down
88 changes: 88 additions & 0 deletions spec/lib/discourse_automation/llm_persona_triage_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -239,4 +239,92 @@ def add_automation_field(name, value, type: "text")
# should not inject persona into allowed users
expect(topic.topic_allowed_users.pluck(:user_id).sort).to eq(original_user_ids.sort)
end

describe "LLM Persona Triage with Chat Message Creation" do
fab!(:user)
fab!(:bot_user) { Fabricate(:user) }
fab!(:chat_channel) { Fabricate(:category_channel) }

fab!(:custom_tool) do
AiTool.create!(
name: "Chat Notifier",
tool_name: "chat_notifier",
description: "Creates a chat notification in a channel",
parameters: [
{ name: "channel_id", type: "integer", description: "Chat channel ID" },
{ name: "message", type: "string", description: "Message to post" },
],
script: <<~JS,
function invoke(params) {
// Create a chat message using the Chat API
const result = discourse.createChatMessage({
channel_name: '#{chat_channel.name}',
username: '#{user.username}',
message: params.message
});

chain.setCustomRaw("We are done, stopping chaing");

return {
success: true,
message_id: result.message_id,
url: result.url,
message: params.message
};
}
JS
summary: "Notify in chat channel",
created_by: Discourse.system_user,
)
end

before do
SiteSetting.chat_enabled = true

ai_persona.update!(tools: ["custom-#{custom_tool.id}"])

# Set up automation fields
automation.fields.create!(
component: "choices",
name: "persona",
metadata: {
value: ai_persona.id,
},
target: "script",
)

automation.fields.create!(
component: "boolean",
name: "silent_mode",
metadata: {
value: true,
},
target: "script",
)
end

it "can silently analyze a post and create a chat notification" do
post = Fabricate(:post, raw: "Please help with my billing issue")

# Tool response from LLM
tool_call =
DiscourseAi::Completions::ToolCall.new(
name: "chat_notifier",
parameters: {
"message" => "Hello world!",
},
id: "tool_call_1",
)

DiscourseAi::Completions::Llm.with_prepared_responses([tool_call]) do
automation.running_in_background!
automation.trigger!({ "post" => post })
end

expect(post.topic.reload.posts.count).to eq(1)

expect(chat_channel.chat_messages.count).to eq(1)
expect(chat_channel.chat_messages.last.message).to eq("Hello world!")
end
end
end