We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 7db9b44 commit 786babbCopy full SHA for 786babb
tests/pipelines/flux/test_pipeline_flux.py
@@ -138,13 +138,12 @@ def test_flux_prompt_embeds(self):
138
inputs = self.get_dummy_inputs(torch_device)
139
prompt = inputs.pop("prompt")
140
141
- prompt_outputs = pipe.encode_prompt(
+ (prompt_embeds, pooled_prompt_embeds, text_ids) = pipe.encode_prompt(
142
prompt,
143
prompt_2=None,
144
device=torch_device,
145
max_sequence_length=inputs["max_sequence_length"],
146
)
147
- prompt_embeds, pooled_prompt_embeds = prompt_outputs[0], prompt_outputs[1]
148
output_with_embeds = pipe(
149
prompt_embeds=prompt_embeds,
150
pooled_prompt_embeds=pooled_prompt_embeds,
0 commit comments