Skip to content

Commit 33a1966

Browse files
carmoccarasbt
authored andcommitted
Downgrade ligthning to before bitsandbytes upgrade (#1104)
1 parent 5021cee commit 33a1966

File tree

3 files changed

+7
-16
lines changed

3 files changed

+7
-16
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ license = { file = "LICENSE" }
1010

1111
dependencies = [
1212
"torch>=2.2.0",
13-
"lightning @ git+https://github.com/Lightning-AI/lightning@f23b3b1e7fdab1d325f79f69a28706d33144f27e",
13+
"lightning @ git+https://github.com/Lightning-AI/lightning@b19c3a961c79028d7c39a4f1ff1c2df991406d1d",
1414
# TODO: install from PyPI when https://github.com/omni-us/jsonargparse/pull/466 is released
1515
"jsonargparse[signatures] @ git+https://github.com/omni-us/jsonargparse",
1616
]

tests/test_ci.py

Lines changed: 4 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,10 @@
11
# Copyright Lightning AI. Licensed under the Apache License 2.0, see LICENSE file.
22

3-
# this file is just to validate on the CI logs that these tests were run
3+
from lightning.fabric.plugins.precision.bitsandbytes import _BITSANDBYTES_AVAILABLE
4+
45
from conftest import RunIf
56

67

78
@RunIf(min_cuda_gpus=1)
8-
def test_runif_min_cuda_gpus():
9-
assert True
10-
11-
12-
@RunIf(min_cuda_gpus=1, standalone=True)
13-
def test_runif_min_cuda_gpus_standalone():
14-
assert True
15-
16-
17-
@RunIf(standalone=True)
18-
def test_runif_standalone():
19-
assert True
9+
def test_gpu_ci_installs_bitsandbytes():
10+
assert _BITSANDBYTES_AVAILABLE, str(_BITSANDBYTES_AVAILABLE)

tests/test_lora.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -689,8 +689,8 @@ def test_lora_bitsandbytes(monkeypatch, tmp_path, fake_checkpoint_dir, alpaca_pa
689689
},
690690
}
691691

692-
assert {p.name for p in tmp_path.rglob("*.pth")} == {"lit_model.pth"}
693-
state_dict = torch.load(tmp_path / "final" / "lit_model.pth")
692+
assert {p.name for p in tmp_path.rglob("*.lora")} == {"lit_model.pth.lora"}
693+
state_dict = torch.load(tmp_path / "final" / "lit_model.pth.lora")
694694
assert len(state_dict) == 1
695695
dtype_to_name = {"torch.float16": set()}
696696
for name, layer in state_dict["model"].items():

0 commit comments

Comments
 (0)