Skip to content

Commit 579b8c7

Browse files
committed
reduce diff
1 parent 76ece2c commit 579b8c7

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

bitsandbytes/autograd/_functions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -368,7 +368,7 @@ def backward(ctx, grad_output):
368368
gradB32, SgradB32 = F.igemmlt(C32grad, CxAt, Sgrad, SAt)
369369
grad_B = F.mm_dequant(gradB32, SgradB32, SCgradt, SCAt).to(ctx.dtype_B)
370370
if state.threshold > 0.0 and subA is not None:
371-
grad_B[:, idx] += torch.mm(grad_output.t(), subA)
371+
grad_B[:, idx] += torch.matmul(grad_output.t(), subA)
372372

373373
if req_gradA:
374374
if state.CBt is not None:

0 commit comments

Comments
 (0)