Skip to content

Commit 66ce061

Browse files
committed
Add max_new_tokens and set it to max_length
1 parent 2bb618c commit 66ce061

File tree

1 file changed

+1
-0
lines changed

1 file changed

+1
-0
lines changed

haystack/nodes/prompt/providers.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -265,6 +265,7 @@ def invoke(self, *args, **kwargs):
265265
# Thus only generated text is returned (excluding prompt)
266266
if "text-generation" == self.task_name and "return_full_text" not in model_input_kwargs:
267267
model_input_kwargs["return_full_text"] = False
268+
model_input_kwargs["max_new_tokens"] = self.max_length
268269
if stop_words:
269270
sw = StopWordsCriteria(tokenizer=self.pipe.tokenizer, stop_words=stop_words)
270271
model_input_kwargs["stopping_criteria"] = StoppingCriteriaList([sw])

0 commit comments

Comments
 (0)