diff --git a/src/diffusers/loaders/lora_base.py b/src/diffusers/loaders/lora_base.py index 4b963270427b..89bb498a3acd 100644 --- a/src/diffusers/loaders/lora_base.py +++ b/src/diffusers/loaders/lora_base.py @@ -699,9 +699,10 @@ def set_lora_device(self, adapter_names: List[str], device: Union[torch.device, module.lora_B[adapter_name].to(device) # this is a param, not a module, so device placement is not in-place -> re-assign if hasattr(module, "lora_magnitude_vector") and module.lora_magnitude_vector is not None: - module.lora_magnitude_vector[adapter_name] = module.lora_magnitude_vector[ - adapter_name - ].to(device) + if adapter_name in module.lora_magnitude_vector: + module.lora_magnitude_vector[adapter_name] = module.lora_magnitude_vector[ + adapter_name + ].to(device) @staticmethod def pack_weights(layers, prefix):