We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
2 parents d45e7f3 + 0967593 commit e828793Copy full SHA for e828793
src/diffusers/loaders/lora_pipeline.py
@@ -2313,7 +2313,7 @@ def _maybe_expand_transformer_param_shape_or_error_(
2313
for name, module in transformer.named_modules():
2314
if isinstance(module, torch.nn.Linear):
2315
module_weight = module.weight.data
2316
- module_bias = module.bias.data if hasattr(module, "bias") else None
+ module_bias = module.bias.data if module.bias is not None else None
2317
bias = module_bias is not None
2318
2319
lora_A_weight_name = f"{name}.lora_A.weight"
0 commit comments