Skip to content

Commit 46ae69a

Browse files
FIX Small fixes to target_parameters (#2677)
1. Better error message when same layer targeted twice 2. Remove unused attribute num_experts from _LoraParameterProxy
1 parent 1c853ea commit 46ae69a

File tree

2 files changed

+5
-4
lines changed

2 files changed

+5
-4
lines changed

src/peft/tuners/lora/layer.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1767,10 +1767,9 @@ class _LoraParameterProxy(nn.Module):
17671767
Intended to be used in conjunction with `nn.utils.parametrize`, see `ParamWrapper`.
17681768
"""
17691769

1770-
def __init__(self, delta_weight, num_experts):
1770+
def __init__(self, delta_weight):
17711771
super().__init__()
17721772
self.delta_weight = delta_weight
1773-
self.num_experts = num_experts
17741773

17751774
def forward(self, W):
17761775
with nn.utils.parametrize.cached():
@@ -1998,7 +1997,7 @@ def _activate_lora(self, active_adapters: list[str]):
19981997
base_layer = self.get_base_layer()
19991998
requires_grad_before = self.get_param().requires_grad
20001999
nn.utils.parametrize.register_parametrization(
2001-
base_layer, self.parameter_name, _LoraParameterProxy(delta_weight, num_experts=self.num_experts)
2000+
base_layer, self.parameter_name, _LoraParameterProxy(delta_weight)
20022001
)
20032002
# set requires_grad, as it defaults to False
20042003
base_layer.parametrizations[self.parameter_name].original.requires_grad_(requires_grad_before)

src/peft/tuners/tuners_utils.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -656,7 +656,9 @@ def _inject_parameters(
656656
if isinstance(target, BaseTunerLayer) and target.__class__.__name__ != "ParamWrapper":
657657
raise ValueError(
658658
f"Trying to wrap an `nn.Parameter` of layer '{target_name}' of type "
659-
f"{type(target).__name__}, which is not a valid target."
659+
f"{type(target).__name__}, which is not a valid target. Make sure that this layer is not "
660+
"also targeted with `target_modules`. For some models, PEFT will do this automatically, "
661+
"try setting `target_modules=[]` to prevent it."
660662
)
661663

662664
self._check_target_module_compatiblity(peft_config, model, target_name)

0 commit comments

Comments
 (0)