forked from jd-opensource/xllm
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathenv.py
More file actions
216 lines (180 loc) · 7.74 KB
/
env.py
File metadata and controls
216 lines (180 loc) · 7.74 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
import os
import platform
from typing import Optional
def get_cxx_abi() -> bool:
try:
import torch
return torch.compiled_with_cxx11_abi()
except ImportError:
return False
def get_python_include_path() -> Optional[str]:
try:
from sysconfig import get_paths
return get_paths()["include"]
except ImportError:
return None
def get_torch_root_path() -> Optional[str]:
try:
import torch
import os
return os.path.dirname(os.path.abspath(torch.__file__))
except ImportError:
return None
def get_torch_mlu_root_path() -> Optional[str]:
try:
import torch_mlu
import os
return os.path.dirname(os.path.abspath(torch_mlu.__file__))
except ImportError:
return None
def get_ixformer_root_path() -> Optional[str]:
try:
import ixformer
import os
return os.path.dirname(os.path.abspath(ixformer.__file__))
except ImportError:
return None
def get_cuda_root_path() -> Optional[str]:
try:
import torch
from torch.utils.cpp_extension import CUDA_HOME
if CUDA_HOME is None:
raise RuntimeError(
"PyTorch was not built with CUDA, or nvcc is not in PATH. "
"Please set CUDA_TOOLKIT_ROOT_DIR manually."
)
return CUDA_HOME
except ImportError:
return None
def get_torch_musa_root_path() -> Optional[str]:
try:
import torch_musa
import os
return os.path.dirname(os.path.abspath(torch_musa.__file__))
except ImportError:
return None
def prepend_path_env(var_name: str, path: str, sep: str = os.pathsep) -> None:
"""Prepend a path into a path env var without duplicates."""
if not path:
return
current = os.getenv(var_name, "")
entries = [item for item in current.split(sep) if item]
if path in entries:
entries = [item for item in entries if item != path]
entries.insert(0, path)
os.environ[var_name] = sep.join(entries)
def set_npu_torch_ld_library_path() -> None:
"""Only for NPU flow: ensure torch runtime libraries are discoverable."""
torch_root = os.getenv("PYTORCH_INSTALL_PATH") or get_torch_root_path() or ""
if not torch_root:
return
# Order keeps current behavior: torch.libs > torch > torch/lib
for path in (f"{torch_root}.libs", torch_root, os.path.join(torch_root, "lib")):
if os.path.isdir(path):
prepend_path_env("LD_LIBRARY_PATH", path)
def set_common_envs() -> None:
os.environ["PYTHON_INCLUDE_PATH"] = get_python_include_path() or ""
torch_root = get_torch_root_path() or ""
os.environ["PYTHON_LIB_PATH"] = torch_root
os.environ["LIBTORCH_ROOT"] = torch_root
os.environ["PYTORCH_INSTALL_PATH"] = torch_root
def set_npu_envs() -> None:
PYTORCH_NPU_INSTALL_PATH = os.getenv("PYTORCH_NPU_INSTALL_PATH")
if not PYTORCH_NPU_INSTALL_PATH:
os.environ["PYTORCH_NPU_INSTALL_PATH"] = "/usr/local/libtorch_npu"
set_common_envs()
set_npu_torch_ld_library_path()
NPU_TOOLKIT_HOME = os.getenv("NPU_TOOLKIT_HOME")
if not NPU_TOOLKIT_HOME:
os.environ["NPU_TOOLKIT_HOME"] = "/usr/local/Ascend/ascend-toolkit/latest"
NPU_TOOLKIT_HOME = "/usr/local/Ascend/ascend-toolkit/latest"
LD_LIBRARY_PATH = os.getenv("LD_LIBRARY_PATH", "")
arch = platform.machine()
LD_LIBRARY_PATH = NPU_TOOLKIT_HOME+"/lib64" + ":" + \
NPU_TOOLKIT_HOME+"/lib64/plugin/opskernel" + ":" + \
NPU_TOOLKIT_HOME+"/lib64/plugin/nnengine" + ":" + \
NPU_TOOLKIT_HOME+"/opp/built-in/op_impl/ai_core/tbe/op_tiling/lib/linux/"+arch + ":" + \
NPU_TOOLKIT_HOME+"/opp/vendors/xllm/op_api/lib" + ":" + \
NPU_TOOLKIT_HOME+"/tools/aml/lib64" + ":" + \
NPU_TOOLKIT_HOME+"/tools/aml/lib64/plugin" + ":" + \
LD_LIBRARY_PATH
os.environ["LD_LIBRARY_PATH"] = LD_LIBRARY_PATH
PYTHONPATH = os.getenv("PYTHONPATH", "")
PYTHONPATH = NPU_TOOLKIT_HOME+"/python/site-packages" + ":" + \
NPU_TOOLKIT_HOME+"/opp/built-in/op_impl/ai_core/tbe" + ":" + \
PYTHONPATH
os.environ["PYTHONPATH"] = PYTHONPATH
PATH = os.getenv("PATH", "")
PATH = NPU_TOOLKIT_HOME+"/bin" + ":" + \
NPU_TOOLKIT_HOME+"/compiler/ccec_compiler/bin" + ":" + \
NPU_TOOLKIT_HOME+"/tools/ccec_compiler/bin" + ":" + \
PATH
os.environ["PATH"] = PATH
os.environ["ASCEND_AICPU_PATH"] = NPU_TOOLKIT_HOME
os.environ["ASCEND_OPP_PATH"] = NPU_TOOLKIT_HOME+"/opp"
os.environ["TOOLCHAIN_HOME"] = NPU_TOOLKIT_HOME+"/toolkit"
os.environ["NPU_HOME_PATH"] = NPU_TOOLKIT_HOME
ATB_PATH = os.getenv("ATB_PATH")
if not ATB_PATH:
os.environ["ATB_PATH"] = "/usr/local/Ascend/nnal/atb"
ATB_PATH = "/usr/local/Ascend/nnal/atb"
cxx_abi = "1" if get_cxx_abi() else "0"
ATB_HOME_PATH = os.path.join(ATB_PATH, "latest", "atb", "cxx_abi_" + cxx_abi)
os.environ["ATB_HOME_PATH"] = ATB_HOME_PATH
LD_LIBRARY_PATH = os.getenv("LD_LIBRARY_PATH", "")
LD_LIBRARY_PATH = ATB_HOME_PATH+"/lib" + ":" + \
ATB_HOME_PATH+"/examples" + ":" + \
ATB_HOME_PATH+"/tests/atbopstest" + ":" + \
LD_LIBRARY_PATH
os.environ["LD_LIBRARY_PATH"] = LD_LIBRARY_PATH
PATH = os.getenv("PATH", "")
PATH = ATB_HOME_PATH+"/bin" + ":" + PATH
os.environ["PATH"] = PATH
os.environ["ATB_STREAM_SYNC_EVERY_KERNEL_ENABLE"] = "0"
os.environ["ATB_STREAM_SYNC_EVERY_RUNNER_ENABLE"] = "0"
os.environ["ATB_STREAM_SYNC_EVERY_OPERATION_ENABLE"] = "0"
os.environ["ATB_OPSRUNNER_SETUP_CACHE_ENABLE"] = "1"
os.environ["ATB_OPSRUNNER_KERNEL_CACHE_TYPE"] = "3"
os.environ["ATB_OPSRUNNER_KERNEL_CACHE_LOCAL_COUNT"] = "1"
os.environ["ATB_OPSRUNNER_KERNEL_CACHE_GLOABL_COUNT"] = "5"
os.environ["ATB_OPSRUNNER_KERNEL_CACHE_TILING_SIZE"] = "10240"
os.environ["ATB_WORKSPACE_MEM_ALLOC_ALG_TYPE"] = "1"
os.environ["ATB_WORKSPACE_MEM_ALLOC_GLOBAL"] = "0"
os.environ["ATB_COMPARE_TILING_EVERY_KERNEL"] = "0"
os.environ["ATB_HOST_TILING_BUFFER_BLOCK_NUM"] = "128"
os.environ["ATB_DEVICE_TILING_BUFFER_BLOCK_NUM"] = "32"
os.environ["ATB_SHARE_MEMORY_NAME_SUFFIX"] = ""
os.environ["ATB_LAUNCH_KERNEL_WITH_TILING"] = "1"
os.environ["ATB_MATMUL_SHUFFLE_K_ENABLE"] = "1"
os.environ["ATB_RUNNER_POOL_SIZE"] = "64"
os.environ["ASDOPS_HOME_PATH"] = ATB_HOME_PATH
os.environ["ASDOPS_MATMUL_PP_FLAG"] = "1"
os.environ["ASDOPS_LOG_LEVEL"] = "ERROR"
os.environ["ASDOPS_LOG_TO_STDOUT"] = "0"
os.environ["ASDOPS_LOG_TO_FILE"] = "1"
os.environ["ASDOPS_LOG_TO_FILE_FLUSH"] = "0"
os.environ["ASDOPS_LOG_TO_BOOST_TYPE"] = "atb"
os.environ["ASDOPS_LOG_PATH"] = "~"
os.environ["ASDOPS_TILING_PARSE_CACHE_DISABLE"] = "0"
os.environ["LCCL_DETERMINISTIC"] = "0"
os.environ["LCCL_PARALLEL"] = "0"
def set_mlu_envs() -> None:
set_common_envs()
os.environ["PYTORCH_MLU_INSTALL_PATH"] = get_torch_mlu_root_path() or ""
def set_cuda_envs() -> None:
set_common_envs()
os.environ["CUDA_TOOLKIT_ROOT_DIR"] = get_cuda_root_path() or ""
def set_ilu_envs() -> None:
set_common_envs()
os.environ["IXFORMER_INSTALL_PATH"] = get_ixformer_root_path() or ""
def set_musa_envs() -> None:
set_common_envs()
os.environ["PYTORCH_MUSA_INSTALL_PATH"] = get_torch_musa_root_path() or ""
import torch_musa
from torch_musa.utils.musa_extension import MUSA_HOME
os.environ["TORCH_MUSA_PYTHONPATH"] = torch_musa.core.cmake_prefix_path
os.environ["MUSA_TOOLKIT_ROOT_DIR"] = MUSA_HOME
os.environ["MKL_DIR"] = "/opt/intel/oneapi/mkl/lib/cmake/mkl"
os.environ["MKLROOT"] = "/opt/intel/oneapi/mkl"
os.environ["TorchMusa_DIR"] = torch_musa.core.cmake_prefix_path + "/TorchMusa"
os.environ["MUSAMAPPING_PATH"] = MUSA_HOME + "/tools/musamapping"