Skip to content

Commit 35dbb2e

Browse files
more cleanup
1 parent b954474 commit 35dbb2e

File tree

3 files changed

+3
-13
lines changed

3 files changed

+3
-13
lines changed

bitsandbytes/functional.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2388,9 +2388,9 @@ def int8_linear_matmul(A: torch.Tensor, B: torch.Tensor, out: Optional[torch.Ten
23882388
if has_error:
23892389
raise RuntimeError(
23902390
f"cublasLt ran into an error!\n"
2391-
f"\tA: {shapeA}, B: {shapeB}, C: {shapeC}\n"
2392-
f"\t(lda, ldb, ldc): {(lda, ldb, ldc)}\n"
2393-
f"\t(m, n, k): {(m, n, k)}"
2391+
f"\t{shapeA=}, {shapeB=}, {shapeC=}\n"
2392+
f"\t{(lda, ldb, ldc)=}\n"
2393+
f"\t{(m, n, k)=}"
23942394
)
23952395

23962396
return out

bitsandbytes/research/autograd/_functions.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -256,8 +256,6 @@ def forward(ctx, A, B, out=None, bias=None, state: Optional[MatmulLtState] = Non
256256
if outlier_cols is not None and not state.has_fp16_weights:
257257
# extract outliers
258258
state.idx = outlier_cols
259-
260-
# outliers = F.extract_outliers(state.CxB, state.SB, state.idx.int())
261259
outliers = state.CB[:, state.idx.long()].clone()
262260
state.subB = (outliers * state.SCB.view(-1, 1) / 127.0).t().contiguous().to(A.dtype)
263261
CA[:, state.idx.long()] = 0

tests/test_autograd.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -255,14 +255,6 @@ def test_matmullt(dim1, dim2, dim3, dim4, funcs, dtype, req_grad, transpose, dec
255255
B2 = B2.t().contiguous()
256256

257257
state.CB, state.SCB, _ = bnb.functional.int8_vectorwise_quant(B2.to(torch.float16))
258-
259-
# (
260-
# state.CB,
261-
# CBt,
262-
# state.SCB,
263-
# SCBt,
264-
# coo_tensorB,
265-
# ) = bnb.functional.double_quant(B2.to(torch.float16))
266258
B2 = state.CB
267259

268260
if not transpose[0] and transpose[1]:

0 commit comments

Comments
 (0)