Skip to content

Commit 7ca7493

Browse files
committed
fix
1 parent e6043a0 commit 7ca7493

File tree

1 file changed

+0
-12
lines changed

1 file changed

+0
-12
lines changed

src/diffusers/loaders/peft.py

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -257,15 +257,8 @@ def load_lora_adapter(self, pretrained_model_name_or_path_or_dict, prefix="trans
257257
model_keys = [k for k in keys if k.startswith(f"{prefix}.")]
258258
if len(model_keys) > 0:
259259
state_dict = {k.replace(f"{prefix}.", ""): v for k, v in state_dict.items() if k in model_keys}
260-
else:
261-
state_dict = {}
262260

263261
if len(state_dict) > 0:
264-
if prefix is None:
265-
component_name = "unet" if "UNet" in self.__class__.__name__ else "transformer"
266-
else:
267-
component_name = prefix
268-
logger.info(f"Loading {component_name}.")
269262
if adapter_name in getattr(self, "peft_config", {}):
270263
raise ValueError(
271264
f"Adapter name {adapter_name} already in use in the model - please select a new adapter name."
@@ -376,11 +369,6 @@ def load_lora_adapter(self, pretrained_model_name_or_path_or_dict, prefix="trans
376369
_pipeline.enable_sequential_cpu_offload()
377370
# Unsafe code />
378371

379-
else:
380-
logger.info(
381-
f"No LoRA keys found in the provided state dict for {self.__class__.__name__}. Please open an issue if you think this is unexpected - https://github.com/huggingface/diffusers/issues/new."
382-
)
383-
384372
def save_lora_adapter(
385373
self,
386374
save_directory,

0 commit comments

Comments
 (0)