Skip to content

Commit 6f2ded5

Browse files
authored
Merge branch 'main' into remove-explicit-typing
2 parents 6d2a80c + 6d1a648 commit 6f2ded5

File tree

7 files changed

+7
-7
lines changed

7 files changed

+7
-7
lines changed

src/diffusers/pipelines/controlnet_sd3/pipeline_stable_diffusion_3_controlnet.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -267,7 +267,7 @@ def _get_t5_prompt_embeds(
267267
return torch.zeros(
268268
(
269269
batch_size * num_images_per_prompt,
270-
self.tokenizer_max_length,
270+
max_sequence_length,
271271
self.transformer.config.joint_attention_dim,
272272
),
273273
device=device,

src/diffusers/pipelines/controlnet_sd3/pipeline_stable_diffusion_3_controlnet_inpainting.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -285,7 +285,7 @@ def _get_t5_prompt_embeds(
285285
return torch.zeros(
286286
(
287287
batch_size * num_images_per_prompt,
288-
self.tokenizer_max_length,
288+
max_sequence_length,
289289
self.transformer.config.joint_attention_dim,
290290
),
291291
device=device,

src/diffusers/pipelines/pag/pipeline_pag_sd_3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -237,7 +237,7 @@ def _get_t5_prompt_embeds(
237237
return torch.zeros(
238238
(
239239
batch_size * num_images_per_prompt,
240-
self.tokenizer_max_length,
240+
max_sequence_length,
241241
self.transformer.config.joint_attention_dim,
242242
),
243243
device=device,

src/diffusers/pipelines/pag/pipeline_pag_sd_3_img2img.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -253,7 +253,7 @@ def _get_t5_prompt_embeds(
253253
return torch.zeros(
254254
(
255255
batch_size * num_images_per_prompt,
256-
self.tokenizer_max_length,
256+
max_sequence_length,
257257
self.transformer.config.joint_attention_dim,
258258
),
259259
device=device,

src/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -248,7 +248,7 @@ def _get_t5_prompt_embeds(
248248
return torch.zeros(
249249
(
250250
batch_size * num_images_per_prompt,
251-
self.tokenizer_max_length,
251+
max_sequence_length,
252252
self.transformer.config.joint_attention_dim,
253253
),
254254
device=device,

src/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3_img2img.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -272,7 +272,7 @@ def _get_t5_prompt_embeds(
272272
return torch.zeros(
273273
(
274274
batch_size * num_images_per_prompt,
275-
self.tokenizer_max_length,
275+
max_sequence_length,
276276
self.transformer.config.joint_attention_dim,
277277
),
278278
device=device,

src/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3_inpaint.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -278,7 +278,7 @@ def _get_t5_prompt_embeds(
278278
return torch.zeros(
279279
(
280280
batch_size * num_images_per_prompt,
281-
self.tokenizer_max_length,
281+
max_sequence_length,
282282
self.transformer.config.joint_attention_dim,
283283
),
284284
device=device,

0 commit comments

Comments
 (0)