Skip to content

Commit f36b9f6

Browse files
authored
Recovered skipped w8a8 compression related tests (#1785)
SUMMARY: Recovered skipped w8a8 compression/decompression tests now that transformer side of code is merged. Added memory collection between test instances. TEST PLAN: tested locally on transformers 4.56.0.dev0 they passed. --------- Signed-off-by: shanjiaz <[email protected]>
1 parent cf149b8 commit f36b9f6

File tree

3 files changed

+16
-0
lines changed

3 files changed

+16
-0
lines changed
File renamed without changes.
File renamed without changes.

tests/llmcompressor/transformers/compression/test_run_compressed.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -84,9 +84,17 @@ def test_compressed_matches_decompressed(self):
8484
def tearDownClass(cls):
8585
if os.path.isdir(cls.test_dir):
8686
shutil.rmtree(cls.test_dir)
87+
88+
if hasattr(cls, "decompressed_model") and cls.decompressed_model is not None:
89+
cls.decompressed_model.cpu()
90+
if hasattr(cls, "uncompressed_model") and cls.uncompressed_model is not None:
91+
cls.uncompressed_model.cpu()
8792
del cls.decompressed_model
8893
del cls.uncompressed_model
94+
del cls.tokenizer
95+
8996
torch.cuda.empty_cache()
97+
torch.cuda.synchronize()
9098

9199

92100
@requires_gpu
@@ -168,6 +176,14 @@ def test_compressed_matches_decompressed__hf_quantizer(self):
168176
def tearDownClass(cls):
169177
if os.path.isdir(cls.test_dir):
170178
shutil.rmtree(cls.test_dir)
179+
180+
if hasattr(cls, "decompressed_model") and cls.decompressed_model is not None:
181+
cls.decompressed_model.cpu()
182+
if hasattr(cls, "compressed_model") and cls.compressed_model is not None:
183+
cls.compressed_model.cpu()
171184
del cls.decompressed_model
172185
del cls.compressed_model
186+
del cls.tokenizer
187+
173188
torch.cuda.empty_cache()
189+
torch.cuda.synchronize()

0 commit comments

Comments
 (0)