@@ -62,7 +62,7 @@ def __init__(self, lib: ct.CDLL):
6262 lib .cget_managed_ptr .restype = ct .c_void_p
6363
6464
65- def get_available_cuda_binaries () -> list [str ]:
65+ def get_available_cuda_binary_versions () -> list [str ]:
6666 """Get formatted CUDA versions from existing library files using cuda_specs logic"""
6767 lib_pattern = f"libbitsandbytes_cuda*{ DYNAMIC_LIBRARY_SUFFIX } "
6868 versions = []
@@ -86,26 +86,23 @@ def parse_cuda_version(version_str: str) -> str:
8686def _format_cuda_error_message (
8787 available_versions : list [str ],
8888 user_cuda_version : str ,
89- override_info : str ,
9089 original_error : str = "" ,
91- include_diagnostics : bool = False ,
92- include_override_notes : bool = False ,
93- required_version : Optional [str ] = None ,
94- version_missing : bool = False ,
90+ requested_version : Optional [str ] = None ,
9591) -> str :
96- version_list = ", " .join (available_versions ) if available_versions else "none"
9792 base_msg = "Attempted to use bitsandbytes native library functionality but it's not available.\n \n "
9893
99- # Explicit version availability check
10094 version_alert = ""
101- if version_missing and required_version :
102- version_list_str = "\n - " + "\n - " .join (available_versions ) if available_versions else "NONE"
95+ if requested_version not in available_versions :
96+ version_list_str = "\n - " + "\n - " .join (available_versions ) if available_versions else "NONE"
10397 version_alert = (
10498 f"🚨 CUDA VERSION MISMATCH 🚨\n "
105- f"Requested CUDA version: { required_version } \n "
99+ f"Requested CUDA version: { requested_version } \n "
100+ f"Detected PyTorch CUDA version: { user_cuda_version } \n "
106101 f"Available pre-compiled versions: { version_list_str } \n \n "
107102 "This means:\n "
108103 "1. The version you're trying to use is NOT distributed with this package\n "
104+ if available_versions
105+ else "1. You're not using the package but checked-out the source code\n "
109106 "2. You MUST compile from source for this specific CUDA version\n "
110107 "3. The installation will NOT work until you compile or choose a CUDA supported version\n \n "
111108 )
@@ -122,43 +119,25 @@ def _format_cuda_error_message(
122119 "If your CUDA version doesn't have a pre-compiled binary, you MUST compile from source.\n \n "
123120 )
124121
125- cuda_info = (
126- f"Detected PyTorch CUDA version: { user_cuda_version } \n "
127- f"Available pre-compiled bitsandbytes binaries for these CUDA versions: { version_list } \n "
128- f"{ override_info } \n \n "
129- )
130-
131122 compile_instructions = (
132- (
133- "You have three options:\n "
134- "1. COMPILE FROM SOURCE (required if no binary exists):\n "
135- " https://huggingface.co/docs/bitsandbytes/main/en/installation#cuda-compile\n "
136- "2. Use BNB_CUDA_VERSION to specify a DIFFERENT CUDA version from the detected one\n "
137- "3. Check LD_LIBRARY_PATH contains the correct CUDA libraries\n \n "
138- )
139- if include_override_notes
140- else ""
123+ "You have three options:\n "
124+ "1. COMPILE FROM SOURCE (required if no binary exists):\n "
125+ " https://huggingface.co/docs/bitsandbytes/main/en/installation#cuda-compile\n "
126+ "2. Use BNB_CUDA_VERSION to specify a DIFFERENT CUDA version from the detected one, which is installed on your machine and matching an available pre-compiled version listed above\n "
127+ "3. Check LD_LIBRARY_PATH contains the correct CUDA libraries\n \n "
141128 )
142129
143130 diagnostics = (
144- (
145- "🔍 Run this command for detailed diagnostics:\n "
146- "python -m bitsandbytes\n \n "
147- "If you've tried everything and still have issues:\n "
148- "1. Include ALL version info (operating system, bitsandbytes, pytorch, cuda, python)\n "
149- "2. Describe what you've tried in detail\n "
150- "3. Open an issue with this information:\n "
151- " https://github.com/bitsandbytes-foundation/bitsandbytes/issues\n \n "
152- )
153- if include_diagnostics
154- else ""
131+ "🔍 Run this command for detailed diagnostics:\n "
132+ "python -m bitsandbytes\n \n "
133+ "If you've tried everything and still have issues:\n "
134+ "1. Include ALL version info (operating system, bitsandbytes, pytorch, cuda, python)\n "
135+ "2. Describe what you've tried in detail\n "
136+ "3. Open an issue with this information:\n "
137+ " https://github.com/bitsandbytes-foundation/bitsandbytes/issues\n \n "
155138 )
156139
157- return (
158- f"{ version_alert } { base_msg } { troubleshooting } { cuda_info } "
159- f"{ note } { compile_instructions } "
160- f"{ original_error } \n { diagnostics } "
161- )
140+ return f"{ version_alert } { base_msg } { troubleshooting } { note } { compile_instructions } { original_error } \n { diagnostics } "
162141
163142
164143class MockBNBNativeLibrary (BNBNativeLibrary ):
@@ -174,26 +153,20 @@ def __init__(self, error_msg: str):
174153 self .user_cuda_version = get_cuda_version_tuple ()
175154
176155 def __getattr__ (self , name ):
177- available_versions = get_available_cuda_binaries ()
156+ available_versions = get_available_cuda_binary_versions ()
178157 override_value = os .environ .get ("BNB_CUDA_VERSION" )
179- override_info = f"\n CUDA override: BNB_CUDA_VERSION={ override_value } " if override_value else ""
180158
181- formatted_version = (
159+ requested_version = (
182160 parse_cuda_version (override_value )
183161 if override_value
184162 else f"{ self .user_cuda_version [0 ]} .{ self .user_cuda_version [1 ]} "
185163 )
186- required_version = formatted_version
187- version_missing = required_version not in available_versions
188164
189165 msg = _format_cuda_error_message (
190166 available_versions = available_versions ,
191167 user_cuda_version = f"{ self .user_cuda_version [0 ]} .{ self .user_cuda_version [1 ]} " ,
192- override_info = override_info ,
193168 original_error = f"Original error: { self .error_msg } \n " if self .error_msg else "" ,
194- include_diagnostics = True ,
195- required_version = formatted_version ,
196- version_missing = version_missing ,
169+ requested_version = requested_version ,
197170 )
198171 raise RuntimeError (msg )
199172
@@ -212,21 +185,15 @@ def get_native_library() -> BNBNativeLibrary:
212185 if cuda_binary_path .exists ():
213186 binary_path = cuda_binary_path
214187 else :
215- available_versions = get_available_cuda_binaries ()
188+ available_versions = get_available_cuda_binary_versions ()
216189 env_version = os .environ .get ("BNB_CUDA_VERSION" )
217- override_info = "\n CUDA override active" if env_version else ""
218190
219- formatted_version = parse_cuda_version (env_version ) if env_version else cuda_specs .cuda_version_string
220- required_version = formatted_version
221- version_missing = required_version not in available_versions
191+ requested_version = parse_cuda_version (env_version ) if env_version else cuda_specs .cuda_version_string
222192
223193 msg = _format_cuda_error_message (
224194 available_versions = available_versions ,
225195 user_cuda_version = cuda_specs .cuda_version_string ,
226- override_info = override_info ,
227- include_override_notes = True ,
228- required_version = formatted_version ,
229- version_missing = version_missing ,
196+ requested_version = requested_version ,
230197 )
231198 logger .warning (msg )
232199
@@ -249,19 +216,5 @@ def get_native_library() -> BNBNativeLibrary:
249216 error_msg = f"Could not load bitsandbytes native library: { e } "
250217 logger .error (error_msg , exc_info = False )
251218
252- diagnostic_help = ""
253- if torch .cuda .is_available ():
254- diagnostic_help = (
255- "CUDA Setup failed despite CUDA being available.\n \n "
256- "Please run the following command to get more information:\n \n "
257- "python -m bitsandbytes\n \n "
258- "Inspect the output of the command and see if you can locate CUDA libraries. "
259- "You might need to add them to your LD_LIBRARY_PATH. "
260- "If you suspect a bug, please take the information from the command and open an issue at:\n \n "
261- "https://github.com/bitsandbytes-foundation/bitsandbytes/issues\n \n "
262- "If you are using a custom CUDA version, you might need to set the BNB_CUDA_VERSION "
263- "environment variable to the correct version."
264- )
265-
266219 # create a mock with error messaging as fallback
267- lib = MockBNBNativeLibrary (diagnostic_help )
220+ lib = MockBNBNativeLibrary (error_msg )
0 commit comments