We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 475d7d0 commit 83bca4cCopy full SHA for 83bca4c
pytorch_lightning/tuner/auto_gpu_select.py
@@ -45,9 +45,19 @@ def pick_single_gpu(exclude_gpus: List[int]) -> int:
45
RuntimeError:
46
If you try to allocate a GPU, when no GPUs are available.
47
"""
48
+ previously_used_gpus = []
49
+ unused_gpus = []
50
for i in range(torch.cuda.device_count()):
51
if i in exclude_gpus:
52
continue
53
+
54
+ if torch.cuda.memory_reserved(f"cuda:{i}") > 0:
55
+ previously_used_gpus.append(i)
56
+ else:
57
+ unused_gpus.append(i)
58
59
+ # Prioritize previously used GPUs
60
+ for i in previously_used_gpus + unused_gpus:
61
# Try to allocate on device:
62
device = torch.device(f"cuda:{i}")
63
try:
0 commit comments