We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 2ed18b5 commit c0f96feCopy full SHA for c0f96fe
tests/hooks/test_group_offloading.py
@@ -217,7 +217,7 @@ def test_warning_logged_if_group_offloaded_pipe_moved_to_accelerator(self):
217
self.assertIn(f"The module '{self.model.__class__.__name__}' is group offloaded", cm.output[0])
218
219
def test_error_raised_if_streams_used_and_no_accelerator_device(self):
220
- torch_accelerator_module = getattr(torch, torch_device)
+ torch_accelerator_module = getattr(torch, torch_device, torch.cuda)
221
original_is_available = torch_accelerator_module.is_available
222
torch_accelerator_module.is_available = lambda: False
223
with self.assertRaises(ValueError):
0 commit comments