We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
2 parents 6134491 + 0967593 commit db827b5Copy full SHA for db827b5
src/diffusers/loaders/lora_pipeline.py
@@ -2301,7 +2301,7 @@ def _maybe_expand_transformer_param_shape_or_error_(
2301
for name, module in transformer.named_modules():
2302
if isinstance(module, torch.nn.Linear):
2303
module_weight = module.weight.data
2304
- module_bias = module.bias.data if hasattr(module, "bias") else None
+ module_bias = module.bias.data if module.bias is not None else None
2305
bias = module_bias is not None
2306
2307
lora_A_weight_name = f"{name}.lora_A.weight"
0 commit comments