Skip to content

Commit 1a171c3

Browse files
authored
Merge branch 'main' into param-device-bnb
2 parents 126d84e + 243d9a4 commit 1a171c3

File tree

4 files changed

+18
-12
lines changed

4 files changed

+18
-12
lines changed

src/diffusers/loaders/ip_adapter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ def load_ip_adapter(
187187
state_dict = pretrained_model_name_or_path_or_dict
188188

189189
keys = list(state_dict.keys())
190-
if keys != ["image_proj", "ip_adapter"]:
190+
if "image_proj" not in keys and "ip_adapter" not in keys:
191191
raise ValueError("Required keys are (`image_proj` and `ip_adapter`) missing from the state dict.")
192192

193193
state_dicts.append(state_dict)

src/diffusers/models/attention_processor.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1908,7 +1908,9 @@ def __call__(
19081908
query = apply_rotary_emb(query, image_rotary_emb)
19091909
key = apply_rotary_emb(key, image_rotary_emb)
19101910

1911-
hidden_states = F.scaled_dot_product_attention(query, key, value, dropout_p=0.0, is_causal=False)
1911+
hidden_states = F.scaled_dot_product_attention(
1912+
query, key, value, attn_mask=attention_mask, dropout_p=0.0, is_causal=False
1913+
)
19121914
hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, attn.heads * head_dim)
19131915
hidden_states = hidden_states.to(query.dtype)
19141916

src/diffusers/models/controlnets/controlnet_sd3.py

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -393,13 +393,19 @@ def custom_forward(*inputs):
393393
return custom_forward
394394

395395
ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {}
396-
encoder_hidden_states, hidden_states = torch.utils.checkpoint.checkpoint(
397-
create_custom_forward(block),
398-
hidden_states,
399-
encoder_hidden_states,
400-
temb,
401-
**ckpt_kwargs,
402-
)
396+
if self.context_embedder is not None:
397+
encoder_hidden_states, hidden_states = torch.utils.checkpoint.checkpoint(
398+
create_custom_forward(block),
399+
hidden_states,
400+
encoder_hidden_states,
401+
temb,
402+
**ckpt_kwargs,
403+
)
404+
else:
405+
# SD3.5 8b controlnet use single transformer block, which does not use `encoder_hidden_states`
406+
hidden_states = torch.utils.checkpoint.checkpoint(
407+
create_custom_forward(block), hidden_states, temb, **ckpt_kwargs
408+
)
403409

404410
else:
405411
if self.context_embedder is not None:

src/diffusers/models/transformers/transformer_sd3.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515

1616
from typing import Any, Dict, List, Optional, Tuple, Union
1717

18-
import numpy as np
1918
import torch
2019
import torch.nn as nn
2120
import torch.nn.functional as F
@@ -424,8 +423,7 @@ def custom_forward(*inputs):
424423
# controlnet residual
425424
if block_controlnet_hidden_states is not None and block.context_pre_only is False:
426425
interval_control = len(self.transformer_blocks) / len(block_controlnet_hidden_states)
427-
interval_control = int(np.ceil(interval_control))
428-
hidden_states = hidden_states + block_controlnet_hidden_states[index_block // interval_control]
426+
hidden_states = hidden_states + block_controlnet_hidden_states[int(index_block / interval_control)]
429427

430428
hidden_states = self.norm_out(hidden_states, temb)
431429
hidden_states = self.proj_out(hidden_states)

0 commit comments

Comments
 (0)