|
1 | 1 | import ctypes as ct |
2 | | -from pathlib import Path |
3 | | -from warnings import warn |
4 | | - |
5 | 2 | import torch |
6 | 3 |
|
| 4 | +from pathlib import Path |
| 5 | +from warnings import warn |
7 | 6 |
|
8 | | -class CUDASetup: |
9 | | - _instance = None |
10 | | - |
11 | | - def __init__(self): |
12 | | - raise RuntimeError("Call get_instance() instead") |
13 | | - |
14 | | - def generate_instructions(self): |
15 | | - if self.cuda is None: |
16 | | - self.add_log_entry('CUDA SETUP: Problem: The main issue seems to be that the main CUDA library was not detected.') |
17 | | - self.add_log_entry('CUDA SETUP: Solution 1): Your paths are probably not up-to-date. You can update them via: sudo ldconfig.') |
18 | | - self.add_log_entry('CUDA SETUP: Solution 2): If you do not have sudo rights, you can do the following:') |
19 | | - self.add_log_entry('CUDA SETUP: Solution 2a): Find the cuda library via: find / -name libcuda.so 2>/dev/null') |
20 | | - self.add_log_entry('CUDA SETUP: Solution 2b): Once the library is found add it to the LD_LIBRARY_PATH: export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:FOUND_PATH_FROM_2a') |
21 | | - self.add_log_entry('CUDA SETUP: Solution 2c): For a permanent solution add the export from 2b into your .bashrc file, located at ~/.bashrc') |
22 | | - return |
23 | | - |
24 | | - if self.cudart_path is None: |
25 | | - self.add_log_entry('CUDA SETUP: Problem: The main issue seems to be that the main CUDA runtime library was not detected.') |
26 | | - self.add_log_entry('CUDA SETUP: Solution 1: To solve the issue the libcudart.so location needs to be added to the LD_LIBRARY_PATH variable') |
27 | | - self.add_log_entry('CUDA SETUP: Solution 1a): Find the cuda runtime library via: find / -name libcudart.so 2>/dev/null') |
28 | | - self.add_log_entry('CUDA SETUP: Solution 1b): Once the library is found add it to the LD_LIBRARY_PATH: export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:FOUND_PATH_FROM_1a') |
29 | | - self.add_log_entry('CUDA SETUP: Solution 1c): For a permanent solution add the export from 1b into your .bashrc file, located at ~/.bashrc') |
30 | | - self.add_log_entry('CUDA SETUP: Solution 2: If no library was found in step 1a) you need to install CUDA.') |
31 | | - self.add_log_entry('CUDA SETUP: Solution 2a): Download CUDA install script: wget https://github.com/TimDettmers/bitsandbytes/blob/main/cuda_install.sh') |
32 | | - self.add_log_entry('CUDA SETUP: Solution 2b): Install desired CUDA version to desired location. The syntax is bash cuda_install.sh CUDA_VERSION PATH_TO_INSTALL_INTO.') |
33 | | - self.add_log_entry('CUDA SETUP: Solution 2b): For example, "bash cuda_install.sh 113 ~/local/" will download CUDA 11.3 and install into the folder ~/local') |
34 | | - return |
35 | | - |
36 | | - make_cmd = f'CUDA_VERSION={self.cuda_version_string}' |
37 | | - if len(self.cuda_version_string) < 3: |
38 | | - make_cmd += ' make cuda92' |
39 | | - elif self.cuda_version_string == '110': |
40 | | - make_cmd += ' make cuda110' |
41 | | - elif self.cuda_version_string[:2] == '11' and int(self.cuda_version_string[2]) > 0: |
42 | | - make_cmd += ' make cuda11x' |
43 | | - |
44 | | - has_cublaslt = self.cc in ["7.5", "8.0", "8.6"] |
45 | | - if not has_cublaslt: |
46 | | - make_cmd += '_nomatmul' |
47 | | - |
48 | | - self.add_log_entry('CUDA SETUP: Something unexpected happened. Please compile from source:') |
49 | | - self. add_log_entry( 'git clone [email protected]:TimDettmers/bitsandbytes.git') |
50 | | - self.add_log_entry('cd bitsandbytes') |
51 | | - self.add_log_entry(make_cmd) |
52 | | - self.add_log_entry('python setup.py install') |
53 | | - |
54 | | - def initialize(self): |
55 | | - self.has_printed = False |
56 | | - self.lib = None |
57 | | - self.run_cuda_setup() |
58 | | - |
59 | | - def run_cuda_setup(self): |
60 | | - self.initialized = True |
61 | | - self.cuda_setup_log = [] |
62 | | - |
63 | | - from .cuda_setup.main import evaluate_cuda_setup |
64 | | - binary_name, cudart_path, cuda, cc, cuda_version_string = evaluate_cuda_setup() |
65 | | - self.cudart_path = cudart_path |
66 | | - self.cuda = cuda |
67 | | - self.cc = cc |
68 | | - self.cuda_version_string = cuda_version_string |
69 | | - |
70 | | - package_dir = Path(__file__).parent |
71 | | - binary_path = package_dir / binary_name |
72 | | - |
73 | | - try: |
74 | | - if not binary_path.exists(): |
75 | | - self.add_log_entry(f"CUDA SETUP: Required library version not found: {binary_name}. Maybe you need to compile it from source?") |
76 | | - legacy_binary_name = "libbitsandbytes.so" |
77 | | - self.add_log_entry(f"CUDA SETUP: Defaulting to {legacy_binary_name}...") |
78 | | - binary_path = package_dir / legacy_binary_name |
79 | | - if not binary_path.exists(): |
80 | | - self.add_log_entry('') |
81 | | - self.add_log_entry('='*48 + 'ERROR' + '='*37) |
82 | | - self.add_log_entry('CUDA SETUP: CUDA detection failed! Possible reasons:') |
83 | | - self.add_log_entry('1. CUDA driver not installed') |
84 | | - self.add_log_entry('2. CUDA not installed') |
85 | | - self.add_log_entry('3. You have multiple conflicting CUDA libraries') |
86 | | - self.add_log_entry('4. Required library not pre-compiled for this bitsandbytes release!') |
87 | | - self.add_log_entry('CUDA SETUP: If you compiled from source, try again with `make CUDA_VERSION=DETECTED_CUDA_VERSION` for example, `make CUDA_VERSION=113`.') |
88 | | - self.add_log_entry('='*80) |
89 | | - self.add_log_entry('') |
90 | | - self.generate_instructions() |
91 | | - self.print_log_stack() |
92 | | - raise Exception('CUDA SETUP: Setup Failed!') |
93 | | - self.lib = ct.cdll.LoadLibrary(binary_path) |
94 | | - else: |
95 | | - self.add_log_entry(f"CUDA SETUP: Loading binary {binary_path}...") |
96 | | - self.lib = ct.cdll.LoadLibrary(binary_path) |
97 | | - except Exception as ex: |
98 | | - self.add_log_entry(str(ex)) |
99 | | - self.print_log_stack() |
100 | | - |
101 | | - def add_log_entry(self, msg, is_warning=False): |
102 | | - self.cuda_setup_log.append((msg, is_warning)) |
103 | | - |
104 | | - def print_log_stack(self): |
105 | | - for msg, is_warning in self.cuda_setup_log: |
106 | | - if is_warning: |
107 | | - warn(msg) |
108 | | - else: |
109 | | - print(msg) |
| 7 | +from bitsandbytes.cuda_setup.main import CUDASetup |
110 | 8 |
|
111 | | - @classmethod |
112 | | - def get_instance(cls): |
113 | | - if cls._instance is None: |
114 | | - cls._instance = cls.__new__(cls) |
115 | | - cls._instance.initialize() |
116 | | - return cls._instance |
117 | 9 |
|
| 10 | +setup = CUDASetup.get_instance() |
| 11 | +if setup.initialized != True: |
| 12 | + setup.run_cuda_setup() |
118 | 13 |
|
119 | | -lib = CUDASetup.get_instance().lib |
| 14 | +lib = setup.lib |
120 | 15 | try: |
121 | 16 | if lib is None and torch.cuda.is_available(): |
122 | 17 | CUDASetup.get_instance().generate_instructions() |
|
0 commit comments