We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent e356426 commit 7bf3645Copy full SHA for 7bf3645
tests/tests_pytorch/accelerators/test_xla.py
@@ -313,6 +313,24 @@ def test_tpu_device_name():
313
assert XLAAccelerator.device_name() == tpu.get_tpu_env()[xenv.ACCELERATOR_TYPE]
314
315
316
+def test_tpu_device_name_exception(tpu_available, monkeypatch):
317
+ from requests.exceptions import HTTPError
318
+
319
+ monkeypatch.delattr(
320
+ lightning.pytorch.accelerators.xla.XLAAccelerator,
321
+ "device_name",
322
+ raising=False,
323
+ )
324
325
+ mock.patch(
326
+ "torch_xla._internal.tpu",
327
+ "get_tpu_env",
328
+ side_effect=HTTPError("Could not fetch TPU device name"),
329
330
331
+ assert XLAAccelerator.device_name() == "True"
332
333
334
@pytest.mark.parametrize(
335
("devices", "expected_device_ids"),
336
[
0 commit comments