Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.

Commit 0fa3b42

Browse files
committed
FIX: Add a max token limit based on the text to be translated
1 parent ad6a8cb commit 0fa3b42

File tree

2 files changed

+68
-1
lines changed

2 files changed

+68
-1
lines changed

lib/translation/base_translator.rb

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,12 +46,24 @@ def get_translation(text:, bot:, translation_user:)
4646
topic: @topic,
4747
post: @post,
4848
)
49+
max_tokens = get_max_tokens(text)
50+
llm_args = { max_tokens: }
4951

5052
result = +""
51-
bot.reply(context) { |partial| result << partial }
53+
bot.reply(context, llm_args:) { |partial| result << partial }
5254
result
5355
end
5456

57+
def get_max_tokens(text)
58+
if text.length < 100
59+
500
60+
elsif text.length < 500
61+
1000
62+
else
63+
text.length * 2
64+
end
65+
end
66+
5567
def persona_setting
5668
raise NotImplementedError
5769
end

spec/lib/translation/base_translator_spec.rb

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,61 @@
3636
end
3737
end
3838

39+
it "creates BotContext with the correct parameters and calls bot.reply with correct args" do
40+
post_translator =
41+
DiscourseAi::Translation::PostRawTranslator.new(
42+
text:,
43+
target_locale:,
44+
post:,
45+
topic: post.topic,
46+
)
47+
48+
expected_content = { content: text, target_locale: target_locale }.to_json
49+
50+
expect(DiscourseAi::Personas::BotContext).to receive(:new).with(
51+
user: an_instance_of(User),
52+
skip_tool_details: true,
53+
feature_name: "translation",
54+
messages: [{ type: :user, content: expected_content }],
55+
topic: post.topic,
56+
post: post,
57+
).and_call_original
58+
59+
mock_bot = instance_double(DiscourseAi::Personas::Bot)
60+
expect(DiscourseAi::Personas::Bot).to receive(:as).and_return(mock_bot)
61+
expect(mock_bot).to receive(:reply).with(
62+
an_instance_of(DiscourseAi::Personas::BotContext),
63+
llm_args: {
64+
max_tokens: 500,
65+
},
66+
).and_yield(llm_response)
67+
68+
post_translator.translate
69+
end
70+
71+
it "sets max_tokens correctly based on text length" do
72+
test_cases = [
73+
["Short text", 500], # Short text (< 100 chars)
74+
["a" * 200, 1000], # Medium text (100-500 chars)
75+
["a" * 600, 1200], # Long text (> 500 chars, 600*2=1200)
76+
]
77+
78+
test_cases.each do |text, expected_max_tokens|
79+
translator = DiscourseAi::Translation::PostRawTranslator.new(text: text, target_locale:)
80+
81+
mock_bot = instance_double(DiscourseAi::Personas::Bot)
82+
expect(DiscourseAi::Personas::Bot).to receive(:as).and_return(mock_bot)
83+
expect(mock_bot).to receive(:reply).with(
84+
an_instance_of(DiscourseAi::Personas::BotContext),
85+
llm_args: {
86+
max_tokens: expected_max_tokens,
87+
},
88+
).and_yield("translated #{text[0..10]}")
89+
90+
translator.translate
91+
end
92+
end
93+
3994
it "returns the translation from the llm's response" do
4095
DiscourseAi::Completions::Llm.with_prepared_responses([llm_response]) do
4196
expect(

0 commit comments

Comments
 (0)