Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.

Commit 9bd892a

Browse files
committed
botify
1 parent 462e2b6 commit 9bd892a

File tree

3 files changed

+27
-55
lines changed

3 files changed

+27
-55
lines changed

lib/personas/locale_detection.rb

Lines changed: 1 addition & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -46,23 +46,7 @@ def system_prompt
4646
end
4747

4848
def response_format
49-
{
50-
type: "json_schema",
51-
json_schema: {
52-
name: "reply",
53-
schema: {
54-
type: "object",
55-
properties: {
56-
locale: {
57-
type: "string",
58-
},
59-
},
60-
required: ["locale"],
61-
additionalProperties: false,
62-
},
63-
strict: true,
64-
},
65-
}
49+
[{ "key" => "locale", "type" => "string" }]
6650
end
6751

6852
def temperature

lib/translation/language_detector.rb

Lines changed: 19 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -18,55 +18,37 @@ def detect
1818
end
1919

2020
persona_klass = ai_persona.class_instance
21+
persona = persona_klass.new
2122

22-
llm_model = preferred_llm_model(ai_persona, persona_klass)
23+
llm_model = LlmModel.find_by(id: preferred_llm_model(persona_klass))
2324
return nil if llm_model.blank?
2425

25-
prompt =
26-
DiscourseAi::Completions::Prompt.new(
27-
ai_persona.system_prompt,
28-
messages: [{ type: :user, content: @text, id: "user" }],
26+
bot =
27+
DiscourseAi::Personas::Bot.as(
28+
ai_persona.user || Discourse.system_user,
29+
persona: persona,
30+
model: llm_model,
2931
)
30-
response_format = persona_klass.new.response_format
31-
structured_output =
32-
DiscourseAi::Completions::Llm.proxy(llm_model).generate(
33-
prompt,
32+
33+
context =
34+
DiscourseAi::Personas::BotContext.new(
3435
user: ai_persona.user || Discourse.system_user,
36+
skip_tool_details: true,
3537
feature_name: "translation",
36-
response_format:,
38+
messages: [{ type: :user, content: @text }],
3739
)
3840

39-
structured_output&.read_buffered_property(:locale)
40-
end
41-
42-
def response_format
43-
{
44-
type: "json_schema",
45-
json_schema: {
46-
name: "reply",
47-
schema: {
48-
type: "object",
49-
properties: {
50-
locale: {
51-
type: "string",
52-
},
53-
},
54-
required: ["locale"],
55-
additionalProperties: false,
56-
},
57-
strict: true,
58-
},
59-
}
41+
structured_output = nil
42+
bot.reply(context) do |partial, _, type|
43+
structured_output = partial if type == :structured_output
44+
end
45+
structured_output&.read_buffered_property(:locale) || []
6046
end
6147

6248
private
6349

64-
def preferred_llm_model(ai_persona, persona_klass)
65-
if ai_persona.force_default_llm
66-
persona_klass.default_llm_id
67-
else
68-
SiteSetting.ai_translation_model.presence || persona_klass.default_llm_id
69-
end
50+
def preferred_llm_model(persona_klass)
51+
persona_klass.default_llm_id || SiteSetting.ai_translation_model&.split(":")&.last
7052
end
7153
end
7254
end

spec/lib/translation/language_detector_spec.rb

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,12 @@
11
# frozen_string_literal: true
22

33
describe DiscourseAi::Translation::LanguageDetector do
4+
let!(:persona) do
5+
AiPersona.find(
6+
DiscourseAi::Personas::Persona.system_personas[DiscourseAi::Personas::LocaleDetection],
7+
)
8+
end
9+
410
before do
511
Fabricate(:fake_model).tap do |fake_llm|
612
SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}")
@@ -38,7 +44,7 @@
3844
mock_prompt,
3945
user: Discourse.system_user,
4046
feature_name: "translation",
41-
response_format: locale_detector.response_format,
47+
response_format: persona.response_format,
4248
).and_return(structured_output)
4349

4450
locale_detector.detect

0 commit comments

Comments
 (0)