We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent aae74ef commit eeee1c3Copy full SHA for eeee1c3
vllm/platforms/__init__.py
@@ -8,8 +8,10 @@
8
9
is_tpu = False
10
try:
11
- import torch_xla.core.xla_model as xm
12
- xm.xla_device(devkind="TPU")
+ # While it's technically possible to install libtpu on a non-TPU machine,
+ # this is a very uncommon scenario. Therefore, we assume that libtpu is
13
+ # installed if and only if the machine has TPUs.
14
+ import libtpu # noqa: F401
15
is_tpu = True
16
except Exception:
17
pass
0 commit comments