Skip to content

Commit 2f05455

Browse files
committed
updates
1 parent 456e975 commit 2f05455

File tree

1 file changed

+2
-1
lines changed

1 file changed

+2
-1
lines changed

src/diffusers/loaders/lora_pipeline.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2307,7 +2307,6 @@ def unload_lora_weights(self):
23072307
in_features,
23082308
out_features,
23092309
bias=bias,
2310-
device=module_weight.device,
23112310
dtype=module_weight.dtype,
23122311
)
23132312

@@ -2423,6 +2422,8 @@ def _maybe_expand_transformer_param_shape_or_error_(
24232422
logger.info(f"Set the {attribute_name} attribute of the model to {new_value} from {old_value}.")
24242423

24252424
# For `unload_lora_weights()`.
2425+
# TODO: this could lead to more memory overhead if the number of overwritten params
2426+
# are large. Should be revisited later and tackled through a `discard_original_layers` arg.
24262427
overwritten_params[f"{current_module_name}.weight"] = module_weight
24272428
if module_bias is not None:
24282429
overwritten_params[f"{current_module_name}.bias"] = module_bias

0 commit comments

Comments
 (0)