We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 09e1e58 commit ce5558fCopy full SHA for ce5558f
src/diffusers/pipelines/flux/pipeline_flux.py
@@ -361,7 +361,10 @@ def encode_prompt(
361
scale_lora_layers(self.text_encoder_2, lora_scale)
362
363
prompt = [prompt] if isinstance(prompt, str) else prompt
364
- batch_size = len(prompt)
+ if prompt is not None:
365
+ batch_size = len(prompt)
366
+ else:
367
+ batch_size = prompt_embeds.shape[0]
368
369
if do_true_cfg and negative_prompt is not None:
370
negative_prompt = [negative_prompt] if isinstance(negative_prompt, str) else negative_prompt
0 commit comments