We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 7787ec1 commit f20e4afCopy full SHA for f20e4af
src/diffusers/pipelines/flux/pipeline_flux.py
@@ -248,7 +248,7 @@ def _get_t5_prompt_embeds(
248
padding="max_length",
249
max_length=max_sequence_length,
250
truncation=True,
251
- return_length=(self.variant == "chroma"),
+ return_length=True,
252
return_overflowing_tokens=False,
253
return_tensors="pt",
254
)
@@ -262,6 +262,7 @@ def _get_t5_prompt_embeds(
262
f" {max_sequence_length} tokens: {removed_text}"
263
264
265
+ text_inputs.attention_mask[:, : text_inputs.length + 1] = 1.0
266
prompt_embeds = self.text_encoder_2(
267
text_input_ids.to(device), output_hidden_states=False, attention_mask=text_inputs.attention_mask.to(device)
268
)[0]
0 commit comments