Skip to content

Commit 5311e87

Browse files
FIX: DeLoRA adapter deletion issue (#2853)
Currently, adapter deletion raises an error with DeLoRA. The reason is that the dropout module is called module_dropout, i.e. the prefix "delora" is not part of the name, which is required for proper working. This PR renames the attribute to delora_dropout. The reason why this was not caught is because the corresponding test was not updated to include DeLoRA. The test was thus also changed. Also consolidate merging error message.
1 parent a1056cf commit 5311e87

File tree

2 files changed

+6
-5
lines changed

2 files changed

+6
-5
lines changed

src/peft/tuners/delora/layer.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -34,14 +34,14 @@ class DeloraLayer(BaseTunerLayer):
3434
# All names of other parameters that may contain adapter-related parameters
3535
other_param_names = (
3636
"r",
37-
"module_dropout",
37+
"delora_dropout",
3838
"delora_w_norm",
3939
)
4040

4141
def __init__(self, base_layer: nn.Module, **kwargs) -> None:
4242
self.base_layer = base_layer
4343
self.r = {}
44-
self.module_dropout = nn.ModuleDict({})
44+
self.delora_dropout = nn.ModuleDict({})
4545
self.delora_A = nn.ParameterDict({})
4646
self.delora_B = nn.ParameterDict({})
4747
self.delora_lambda = nn.ParameterDict({})
@@ -113,7 +113,7 @@ def update_layer(
113113
module_dropout_layer = nn.Dropout(p=module_dropout)
114114
else:
115115
module_dropout_layer = nn.Identity()
116-
self.module_dropout.update(nn.ModuleDict({adapter_name: module_dropout_layer}))
116+
self.delora_dropout.update(nn.ModuleDict({adapter_name: module_dropout_layer}))
117117

118118
# Initialize weights
119119
self.reset_delora_parameters(adapter_name, init_weights, delora_lambda)
@@ -200,7 +200,7 @@ def merge(self, safe_merge: bool = False, adapter_names: Optional[list[str]] = N
200200

201201
if not torch.isfinite(orig_weights).all():
202202
raise ValueError(
203-
f"NaNs detected in merged weights for adapter {active_adapter}; aborting merge"
203+
f"NaNs detected in the merged weights. The adapter {active_adapter} seems to be broken"
204204
)
205205

206206
base_layer.weight.data = orig_weights
@@ -241,7 +241,7 @@ def forward(self, x: torch.Tensor, *args: Any, **kwargs: Any) -> torch.Tensor:
241241
if adapter not in self.delora_A:
242242
continue
243243

244-
x_d = self.module_dropout[adapter](x)
244+
x_d = self.delora_dropout[adapter](x)
245245

246246
# Decomposed delta calculation
247247
# 1. (x * w_norm) @ A.T

tests/testing_common.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1434,6 +1434,7 @@ def _test_delete_adapter(self, model_id, config_cls, config_kwargs):
14341434
PeftType.VBLORA,
14351435
PeftType.BONE,
14361436
PeftType.MISS,
1437+
PeftType.DELORA,
14371438
]
14381439
# IA3 does not support deleting adapters yet, but it just needs to be added
14391440
# AdaLora does not support multiple adapters

0 commit comments

Comments
 (0)