Skip to content

Commit fea43a0

Browse files
committed
Prepare tests for memory allocation while estimating
1 parent b4a3542 commit fea43a0

File tree

1 file changed

+6
-4
lines changed

1 file changed

+6
-4
lines changed

tests/test_prep/test_stripe.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -97,11 +97,12 @@ def test_remove_stripe_fw_calc_mem(slices, level, dim_x, wname, ensure_clean_mem
9797
remove_stripe_fw(cp.copy(data), wname=wname, level=level)
9898
actual_mem_peak = hook.max_mem
9999

100-
hook = MaxMemoryHook()
101-
with hook:
100+
try:
102101
estimated_mem_peak = remove_stripe_fw(
103102
data.shape, level=level, wname=wname, calc_peak_gpu_mem=True
104103
)
104+
except cp.cuda.memory.OutOfMemoryError:
105+
pytest.skip("Not enough GPU memory to estimate memory peak")
105106

106107
assert actual_mem_peak * 0.99 <= estimated_mem_peak
107108
assert estimated_mem_peak <= actual_mem_peak * 1.3
@@ -116,11 +117,12 @@ def test_remove_stripe_fw_calc_mem_big(wname, slices, level, ensure_clean_memory
116117
dim_y = 901
117118
dim_x = 1200
118119
data_shape = (slices, dim_x, dim_y)
119-
hook = MaxMemoryHook()
120-
with hook:
120+
try:
121121
estimated_mem_peak = remove_stripe_fw(
122122
data_shape, wname=wname, level=level, calc_peak_gpu_mem=True
123123
)
124+
except cp.cuda.memory.OutOfMemoryError:
125+
pytest.skip("Not enough GPU memory to estimate memory peak")
124126
av_mem = cp.cuda.Device().mem_info[0]
125127
if av_mem < estimated_mem_peak:
126128
pytest.skip("Not enough GPU memory to run this test")

0 commit comments

Comments
 (0)