Skip to content

Commit e9a1c5f

Browse files
authored
Remove OpenXLA CUDA fallback and _XLAC_cuda_functions.so extension. (#9581)
This PR removes the OpenXLA CUDA fallback implementation, and also the `_XLA_cuda_functions.so` extension, completely. Starting on this PR, the fallback shall be run only on CPU. **Key Changes:** - Remove _aten_cuda_functions.cpp_ and _aten_cuda_functions.h_ - Remove the OpenXLA CUDA fallback functions from _aten_fallback.cpp_ - Remove the `_XLAC_cuda_functions.so` library from _BUILD_ - Remove the Python `_XLAC_cuda_functions.so` extension from _setup.py_ - Remove the conditional loading of `_XLAC_cuda_functions.so` Python extension from _torch_xla/__init__.py_
1 parent aada9fc commit e9a1c5f

File tree

11 files changed

+11
-537
lines changed

11 files changed

+11
-537
lines changed

BUILD

Lines changed: 0 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -46,22 +46,6 @@ cc_binary(
4646
]),
4747
)
4848

49-
cc_binary(
50-
name = "_XLAC_cuda_functions.so",
51-
copts = [
52-
"-fopenmp",
53-
"-fPIC",
54-
],
55-
linkopts = [
56-
"-Wl,-soname,_XLAC_cuda_functions.so",
57-
],
58-
linkshared = 1,
59-
visibility = ["//visibility:public"],
60-
deps = [
61-
"//torch_xla/csrc:aten_cuda_functions",
62-
],
63-
)
64-
6549
test_suite(
6650
name = "cpp_tests",
6751
# testonly = True,

configuration.yaml

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -397,8 +397,3 @@ variables:
397397
your code.
398398
type: bool
399399
default_value: false
400-
XLA_FALLBACK_CPU:
401-
description:
402-
- Forces CPU OpenXLA fallback. By default, PyTorch/XLA will run any operation
403-
that doesn't have a lowering using PyTorch CUDA as fallback. Setting this
404-
flag will force PyTorch/XLA to use PyTorch CPU as fallback.

setup.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -455,7 +455,6 @@ def _get_jax_install_requirements():
455455
package_dir=package_dir_mapping,
456456
ext_modules=[
457457
BazelExtension('//:_XLAC.so'),
458-
BazelExtension('//:_XLAC_cuda_functions.so'),
459458
],
460459
install_requires=[
461460
'absl-py>=1.0.0',

test/cpp/BUILD

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,6 @@ ptxla_cc_test(
5454
":cpp_test_util",
5555
":torch_xla_test",
5656
"//torch_xla/csrc:tensor",
57-
"//torch_xla/csrc:aten_cuda_functions",
5857
"@com_google_googletest//:gtest_main",
5958
],
6059
)
@@ -65,7 +64,6 @@ ptxla_cc_test(
6564
deps = [
6665
":torch_xla_test",
6766
"//torch_xla/csrc:tensor",
68-
"//torch_xla/csrc:aten_cuda_functions",
6967
"@com_google_googletest//:gtest_main",
7068
"@xla//xla:shape_util",
7169
],
@@ -81,7 +79,6 @@ ptxla_cc_test(
8179
"//torch_xla/csrc/runtime:debug_macros",
8280
"//torch_xla/csrc:status",
8381
"//torch_xla/csrc:tensor",
84-
"//torch_xla/csrc:aten_cuda_functions",
8582
"//torch_xla/csrc:thread_pool",
8683
"@com_google_absl//absl/synchronization",
8784
"@com_google_googletest//:gtest_main",
@@ -100,7 +97,6 @@ ptxla_cc_test(
10097
":cpp_test_util",
10198
":torch_xla_test",
10299
"//torch_xla/csrc:tensor",
103-
"//torch_xla/csrc:aten_cuda_functions",
104100
"@com_google_googletest//:gtest_main",
105101
],
106102
)
@@ -127,7 +123,6 @@ ptxla_cc_test(
127123
"//torch_xla/csrc/runtime:sys_util",
128124
"//torch_xla/csrc:status",
129125
"//torch_xla/csrc:tensor",
130-
"//torch_xla/csrc:aten_cuda_functions",
131126
"@com_google_googletest//:gtest_main",
132127
"@xla//xla:xla_data_proto_cc",
133128
"@xla//xla/tsl/profiler/utils:session_manager",
@@ -146,7 +141,6 @@ ptxla_cc_test(
146141
":torch_xla_test",
147142
"//torch_xla/csrc/runtime:metrics",
148143
"//torch_xla/csrc:tensor",
149-
"//torch_xla/csrc:aten_cuda_functions",
150144
"@com_google_googletest//:gtest_main",
151145
"@xla//xla:permutation_util",
152146
],
@@ -212,7 +206,6 @@ ptxla_cc_test(
212206
":cpp_test_util",
213207
":torch_xla_test",
214208
"//torch_xla/csrc:tensor",
215-
"//torch_xla/csrc:aten_cuda_functions",
216209
"@com_google_googletest//:gtest_main",
217210
],
218-
)
211+
)

torch_xla/__init__.py

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,16 +7,6 @@
77

88
import torch
99

10-
if not torch.cuda.is_available():
11-
# Load _XLAC_cuda_functions to RTLD_GLOBAL, so that it can be used by _XLAC.
12-
flags = sys.getdlopenflags()
13-
sys.setdlopenflags(flags | os.RTLD_NOW | os.RTLD_GLOBAL)
14-
15-
import _XLAC_cuda_functions
16-
17-
# Then, restore the original flags.
18-
sys.setdlopenflags(flags)
19-
2010
import _XLAC
2111
from ._internal import tpu
2212
from .version import __version__

torch_xla/csrc/BUILD

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,6 @@ ptxla_cc_library(
7777
hdrs = [
7878
"aten_autograd_ops.h",
7979
"aten_fallback.h",
80-
"aten_cuda_functions.h",
8180
"aten_xla_bridge.h",
8281
"batch_norm.h",
8382
"convert_ops.h",
@@ -364,16 +363,6 @@ ptxla_cc_library(
364363
],
365364
)
366365

367-
ptxla_cc_library(
368-
name = "aten_cuda_functions",
369-
srcs = ["aten_cuda_functions.cpp"],
370-
hdrs = ["aten_cuda_functions.h"],
371-
deps = [
372-
"@local_config_python//:python_headers",
373-
"@pybind11//:pybind11_embed",
374-
],
375-
)
376-
377366
cc_library(
378367
name = "status",
379368
srcs = ["status.cpp"],

torch_xla/csrc/aten_cuda_functions.cpp

Lines changed: 0 additions & 51 deletions
This file was deleted.

torch_xla/csrc/aten_cuda_functions.h

Lines changed: 0 additions & 34 deletions
This file was deleted.

0 commit comments

Comments
 (0)