Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 14 additions & 16 deletions app/services/discourse_ai/base_translator.rb
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,15 @@ def translate
messages: [{ type: :user, content: formatted_content, id: "user" }],
)

response =
structured_output =
DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_helper_model).generate(
prompt,
user: Discourse.system_user,
feature_name: "translator-translate",
extra_model_params: response_format,
response_format: response_format,
)

JSON.parse(response)&.dig("translation")
structured_output&.read_latest_buffered_chunk&.dig(:translation)
end

def formatted_content
Expand All @@ -31,22 +31,20 @@ def formatted_content

def response_format
{
response_format: {
type: "json_schema",
json_schema: {
name: "reply",
schema: {
type: "object",
properties: {
translation: {
type: "string",
},
type: "json_schema",
json_schema: {
name: "reply",
schema: {
type: "object",
properties: {
translation: {
type: "string",
},
required: ["translation"],
additionalProperties: false,
},
strict: true,
required: ["translation"],
additionalProperties: false,
},
strict: true,
},
}
end
Expand Down
30 changes: 14 additions & 16 deletions app/services/discourse_ai/language_detector.rb
Original file line number Diff line number Diff line change
Expand Up @@ -49,35 +49,33 @@ def detect
messages: [{ type: :user, content: @text, id: "user" }],
)

response =
structured_output =
DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_helper_model).generate(
prompt,
user: Discourse.system_user,
feature_name: "translator-language-detect",
extra_model_params: response_format,
response_format: response_format,
)

locale = JSON.parse(response)&.dig("locale")
structured_output&.read_latest_buffered_chunk&.dig(:locale)
end

def response_format
{
response_format: {
type: "json_schema",
json_schema: {
name: "reply",
schema: {
type: "object",
properties: {
locale: {
type: "string",
},
type: "json_schema",
json_schema: {
name: "reply",
schema: {
type: "object",
properties: {
locale: {
type: "string",
},
required: ["locale"],
additionalProperties: false,
},
strict: true,
required: ["locale"],
additionalProperties: false,
},
strict: true,
},
}
end
Expand Down
10 changes: 7 additions & 3 deletions spec/services/discourse_ai/base_translator_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
describe ".translate" do
let(:text_to_translate) { "cats are great" }
let(:target_language) { "de" }
let(:llm_response) { "{\"translation\":\"hur dur hur dur!\"}" }
let(:llm_response) { "hur dur hur dur!" }

it "creates the correct prompt" do
post_translator = DiscourseAi::PostTranslator.new(text_to_translate, target_language)
Expand All @@ -32,6 +32,10 @@
mock_llm = instance_double(DiscourseAi::Completions::Llm)
post_translator = DiscourseAi::PostTranslator.new(text_to_translate, target_language)

structured_output =
DiscourseAi::Completions::StructuredOutput.new({ translation: { type: "string" } })
structured_output << { translation: llm_response }.to_json

allow(DiscourseAi::Completions::Prompt).to receive(:new).and_return(mock_prompt)
allow(DiscourseAi::Completions::Llm).to receive(:proxy).with(
SiteSetting.ai_helper_model,
Expand All @@ -40,8 +44,8 @@
mock_prompt,
user: Discourse.system_user,
feature_name: "translator-translate",
extra_model_params: post_translator.response_format,
).and_return(llm_response)
response_format: post_translator.response_format,
).and_return(structured_output)

post_translator.translate
end
Expand Down
10 changes: 7 additions & 3 deletions spec/services/discourse_ai/language_detector_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

describe ".detect" do
let(:locale_detector) { described_class.new("meow") }
let(:llm_response) { "{\"translation\":\"hur dur hur dur!\"}" }
let(:llm_response) { "hur dur hur dur!" }

it "creates the correct prompt" do
allow(DiscourseAi::Completions::Prompt).to receive(:new).with(
Expand All @@ -29,6 +29,10 @@
mock_prompt = instance_double(DiscourseAi::Completions::Prompt)
mock_llm = instance_double(DiscourseAi::Completions::Llm)

structured_output =
DiscourseAi::Completions::StructuredOutput.new({ locale: { type: "string" } })
structured_output << { locale: llm_response }.to_json

allow(DiscourseAi::Completions::Prompt).to receive(:new).and_return(mock_prompt)
allow(DiscourseAi::Completions::Llm).to receive(:proxy).with(
SiteSetting.ai_helper_model,
Expand All @@ -37,8 +41,8 @@
mock_prompt,
user: Discourse.system_user,
feature_name: "translator-language-detect",
extra_model_params: locale_detector.response_format,
).and_return(llm_response)
response_format: locale_detector.response_format,
).and_return(structured_output)

locale_detector.detect
end
Expand Down
26 changes: 5 additions & 21 deletions spec/services/discourse_ai_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
describe ".detect!" do
it "returns the detected language" do
locale = "de"
DiscourseAi::Completions::Llm.with_prepared_responses([locale_json(locale)]) do
DiscourseAi::Completions::Llm.with_prepared_responses([locale]) do
expect(DiscourseTranslator::Provider::DiscourseAi.detect!(post)).to eq locale
end
end
Expand All @@ -41,29 +41,23 @@
end

it "translates the post and returns [locale, translated_text]" do
DiscourseAi::Completions::Llm.with_prepared_responses(
[translation_json("some translated text")],
) do
DiscourseAi::Completions::Llm.with_prepared_responses(["some translated text"]) do
translated_text = DiscourseTranslator::Provider::DiscourseAi.translate_translatable!(post)
expect(translated_text).to eq "<p>some translated text</p>"
end
end

it "translates the topic" do
allow(::DiscourseAi::TopicTranslator).to receive(:new).and_call_original
DiscourseAi::Completions::Llm.with_prepared_responses(
[translation_json("some translated text")],
) do
DiscourseAi::Completions::Llm.with_prepared_responses(["some translated text"]) do
translated_text = DiscourseTranslator::Provider::DiscourseAi.translate_translatable!(topic)
expect(translated_text).to eq "some translated text"
end
end

it "sends the content for splitting and the split content for translation" do
post.update(raw: "#{"a" * 3000} #{"b" * 3000}")
DiscourseAi::Completions::Llm.with_prepared_responses(
%w[lol wut].map { |content| translation_json(content) },
) do
DiscourseAi::Completions::Llm.with_prepared_responses(%w[lol wut]) do
expect(
DiscourseTranslator::Provider::DiscourseAi.translate_translatable!(post),
).to eq "<p>lolwut</p>"
Expand All @@ -73,20 +67,10 @@

describe ".translate_text!" do
it "returns the translated text" do
DiscourseAi::Completions::Llm.with_prepared_responses(
[translation_json("some translated text")],
) do
DiscourseAi::Completions::Llm.with_prepared_responses(["some translated text"]) do
translated_text = DiscourseTranslator::Provider::DiscourseAi.translate_text!("derp")
expect(translated_text).to eq "some translated text"
end
end
end

def locale_json(content)
{ locale: content }.to_json
end

def translation_json(content)
{ translation: content }.to_json
end
end
Loading