Skip to content

Commit 8c988f4

Browse files
committed
updates
1 parent c8802d7 commit 8c988f4

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

src/diffusers/loaders/peft.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,7 @@
5454
"SanaTransformer2DModel": lambda model_cls, weights: weights,
5555
}
5656
_NO_CONFIG_UPDATE_KEYS = ["to_k", "to_q", "to_v"]
57+
_FULL_NAME_PREFIX_FOR_PEFT = "FULL-NAME"
5758

5859

5960
def _maybe_adjust_config(config):
@@ -188,6 +189,7 @@ def load_lora_adapter(self, pretrained_model_name_or_path_or_dict, prefix="trans
188189
"""
189190
from peft import LoraConfig, inject_adapter_in_model, set_peft_model_state_dict
190191
from peft.tuners.tuners_utils import BaseTunerLayer
192+
from peft.utils.constants import FULLY_QUALIFIED_PATTERN_KEY_PREFIX
191193

192194
cache_dir = kwargs.pop("cache_dir", None)
193195
force_download = kwargs.pop("force_download", False)
@@ -253,14 +255,14 @@ def load_lora_adapter(self, pretrained_model_name_or_path_or_dict, prefix="trans
253255
# Cannot figure out rank from lora layers that don't have atleast 2 dimensions.
254256
# Bias layers in LoRA only have a single dimension
255257
if "lora_B" in key and val.ndim > 1:
256-
rank[key] = val.shape[1]
258+
rank[f"{FULLY_QUALIFIED_PATTERN_KEY_PREFIX}{key}"] = val.shape[1]
257259

258260
if network_alphas is not None and len(network_alphas) >= 1:
259261
alpha_keys = [k for k in network_alphas.keys() if k.startswith(f"{prefix}.")]
260262
network_alphas = {k.replace(f"{prefix}.", ""): v for k, v in network_alphas.items() if k in alpha_keys}
261263

262264
lora_config_kwargs = get_peft_kwargs(rank, network_alpha_dict=network_alphas, peft_state_dict=state_dict)
263-
lora_config_kwargs = _maybe_adjust_config(lora_config_kwargs)
265+
# lora_config_kwargs = _maybe_adjust_config(lora_config_kwargs)
264266

265267
if "use_dora" in lora_config_kwargs:
266268
if lora_config_kwargs["use_dora"]:

0 commit comments

Comments
 (0)