Skip to content

Commit 8a9e2f5

Browse files
committed
Handle bias in full/diff lora layer
1 parent 31949ed commit 8a9e2f5

File tree

1 file changed

+3
-5
lines changed

1 file changed

+3
-5
lines changed

invokeai/backend/lora.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -260,7 +260,9 @@ def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype]
260260

261261

262262
class FullLayer(LoRALayerBase):
263+
# bias handled in LoRALayerBase(calc_size, to)
263264
# weight: torch.Tensor
265+
# bias: Optional[torch.Tensor]
264266

265267
def __init__(
266268
self,
@@ -270,11 +272,7 @@ def __init__(
270272
super().__init__(layer_key, values)
271273

272274
self.weight = values["diff"]
273-
274-
if len(values.keys()) > 1:
275-
_keys = list(values.keys())
276-
_keys.remove("diff")
277-
raise NotImplementedError(f"Unexpected keys in lora diff layer: {_keys}")
275+
self.bias = values.get("diff_b", None)
278276

279277
self.rank = None # unscaled
280278

0 commit comments

Comments
 (0)