Skip to content

Commit 5d65817

Browse files
committed
debug
1 parent 4da2227 commit 5d65817

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

bitsandbytes/autograd/_functions.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -370,8 +370,6 @@ def backward(ctx, grad_output):
370370
if state.threshold > 0.0 and subA is not None:
371371
grad_B[:, idx] += torch.matmul(grad_output.t(), subA)
372372

373-
raise NotImplementedError("!!")
374-
375373
if req_gradA:
376374
if state.CBt is not None:
377375
C32grad, Sgrad = F.transform(Cgrad, "col32")

bitsandbytes/nn/modules.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -237,7 +237,9 @@ def __init__(
237237
if threshold > 0.0 and not has_fp16_weights:
238238
self.state.use_pool = True
239239

240-
self.weight = Int8Params(self.weight.data, has_fp16_weights=has_fp16_weights)
240+
self.weight = Int8Params(
241+
self.weight.data, has_fp16_weights=has_fp16_weights, requires_grad=has_fp16_weights
242+
)
241243

242244
def init_8bit_state(self):
243245
self.state.CB = self.weight.CB

0 commit comments

Comments
 (0)