Skip to content

Commit 1c1b257

Browse files
further tweaks to reporting
1 parent c2480e3 commit 1c1b257

File tree

1 file changed

+122
-35
lines changed

1 file changed

+122
-35
lines changed

bitsandbytes/cextension.py

Lines changed: 122 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import os
44
from pathlib import Path
55
import re
6+
from typing import Optional
67

78
import torch
89

@@ -75,6 +76,91 @@ def get_available_cuda_binaries() -> list[str]:
7576
return sorted(versions)
7677

7778

79+
def parse_cuda_version(version_str: str) -> str:
80+
"""Convert raw version string (e.g. '118' from env var) to formatted version (e.g. '11.8')"""
81+
if version_str.isdigit() and len(version_str) == 3:
82+
return f"{version_str[:2]}.{version_str[2]}"
83+
return version_str # fallback as safety net
84+
85+
86+
def _format_cuda_error_message(
87+
available_versions: list[str],
88+
user_cuda_version: str,
89+
override_info: str,
90+
original_error: str = "",
91+
include_diagnostics: bool = False,
92+
include_override_notes: bool = False,
93+
required_version: Optional[str] = None,
94+
version_missing: bool = False,
95+
) -> str:
96+
version_list = ", ".join(available_versions) if available_versions else "none"
97+
base_msg = "Attempted to use bitsandbytes native library functionality but it's not available.\n\n"
98+
99+
# Explicit version availability check
100+
version_alert = ""
101+
if version_missing and required_version:
102+
version_list_str = "\n- " + "\n- ".join(available_versions) if available_versions else "NONE"
103+
version_alert = (
104+
f"🚨 CUDA VERSION MISMATCH 🚨\n"
105+
f"Requested CUDA version: {required_version}\n"
106+
f"Available pre-compiled versions: {version_list_str}\n\n"
107+
"This means:\n"
108+
"1. The version you're trying to use is NOT distributed with this package\n"
109+
"2. You MUST compile from source for this specific CUDA version\n"
110+
"3. The installation will NOT work until you compile or choose a CUDA supported version\n\n"
111+
)
112+
113+
troubleshooting = (
114+
"This typically happens when:\n"
115+
"1. bitsandbytes doesn't ship with a pre-compiled binary for your CUDA version\n"
116+
"2. The library wasn't compiled properly during installation from source\n"
117+
"3. Missing CUDA dependencies\n\n"
118+
)
119+
120+
note = (
121+
"To make bitsandbytes work, the compiled library version MUST exactly match the linked CUDA version.\n"
122+
"If your CUDA version doesn't have a pre-compiled binary, you MUST compile from source.\n\n"
123+
)
124+
125+
cuda_info = (
126+
f"Detected PyTorch CUDA version: {user_cuda_version}\n"
127+
f"Available pre-compiled bitsandbytes binaries for these CUDA versions: {version_list}\n"
128+
f"{override_info}\n\n"
129+
)
130+
131+
compile_instructions = (
132+
(
133+
"You have three options:\n"
134+
"1. COMPILE FROM SOURCE (required if no binary exists):\n"
135+
" https://huggingface.co/docs/bitsandbytes/main/en/installation#cuda-compile\n"
136+
"2. Use BNB_CUDA_VERSION to specify a DIFFERENT CUDA version from the detected one\n"
137+
"3. Check LD_LIBRARY_PATH contains the correct CUDA libraries\n\n"
138+
)
139+
if include_override_notes
140+
else ""
141+
)
142+
143+
diagnostics = (
144+
(
145+
"🔍 Run this command for detailed diagnostics:\n"
146+
"python -m bitsandbytes\n\n"
147+
"If you've tried everything and still have issues:\n"
148+
"1. Include ALL version info (operating system, bitsandbytes, pytorch, cuda, python)\n"
149+
"2. Describe what you've tried in detail\n"
150+
"3. Open an issue with this information:\n"
151+
" https://github.com/bitsandbytes-foundation/bitsandbytes/issues\n\n"
152+
)
153+
if include_diagnostics
154+
else ""
155+
)
156+
157+
return (
158+
f"{version_alert}{base_msg}{troubleshooting}{cuda_info}"
159+
f"{note}{compile_instructions}"
160+
f"{original_error}\n{diagnostics}"
161+
)
162+
163+
78164
class MockBNBNativeLibrary(BNBNativeLibrary):
79165
"""
80166
Mock BNBNativeLibrary that raises an error when trying to use native library
@@ -89,43 +175,27 @@ def __init__(self, error_msg: str):
89175

90176
def __getattr__(self, name):
91177
available_versions = get_available_cuda_binaries()
92-
version_list = ", ".join(available_versions) if available_versions else "none"
93-
94-
user_ver = "Not detected"
95-
if self.user_cuda_version:
96-
user_ver = f"{self.user_cuda_version[0]}.{self.user_cuda_version[1]}"
178+
override_value = os.environ.get("BNB_CUDA_VERSION")
179+
override_info = f"\nCUDA override: BNB_CUDA_VERSION={override_value}" if override_value else ""
97180

98-
override_value = os.environ.get("BNB_CUDA_VERSION", None)
99-
override_info = (
100-
f"\nCUDA version overridden with BNB_CUDA_VERSION={override_value} environment variable"
181+
formatted_version = (
182+
parse_cuda_version(override_value)
101183
if override_value
102-
else ""
184+
else f"{self.user_cuda_version[0]}.{self.user_cuda_version[1]}"
103185
)
104-
105-
note = "To make bitsandbytes work, the compiled version of the library must match the corresponding linked CUDA version. If you are using a CUDA version that doesn't come with a pre-compiled binary, the only solution is to compile the library from source."
106-
107-
cuda_info = (
108-
f"Detected PyTorch CUDA version: {user_ver}\n"
109-
f"Available pre-compiled bitsandbytes binaries for CUDA versions: {version_list}"
110-
+ override_info
111-
+ "\n\n"
112-
+ note
113-
+ "\n\n"
114-
)
115-
116-
base_msg = "Attempted to use bitsandbytes native library functionality but it's not available.\n\n"
117-
original_error = f"Original error: {self.error_msg}\n\n" if self.error_msg else ""
118-
troubleshooting = (
119-
"This typically happens when:\n"
120-
"1. BNB doesn't ship with a pre-compiled binary for your CUDA version\n"
121-
"2. The library wasn't compiled properly during installation from source\n"
122-
"3. Missing CUDA dependencies\n\n"
186+
required_version = formatted_version
187+
version_missing = required_version not in available_versions
188+
189+
msg = _format_cuda_error_message(
190+
available_versions=available_versions,
191+
user_cuda_version=f"{self.user_cuda_version[0]}.{self.user_cuda_version[1]}",
192+
override_info=override_info,
193+
original_error=f"Original error: {self.error_msg}\n" if self.error_msg else "",
194+
include_diagnostics=True,
195+
required_version=formatted_version,
196+
version_missing=version_missing,
123197
)
124-
err_msg = (
125-
base_msg + troubleshooting + cuda_info + original_error + ("Run 'python -m bitsandbytes' for diagnostics.")
126-
)
127-
128-
raise RuntimeError(err_msg)
198+
raise RuntimeError(msg)
129199

130200
def __getitem__(self, name):
131201
return self.__getattr__(name)
@@ -142,7 +212,24 @@ def get_native_library() -> BNBNativeLibrary:
142212
if cuda_binary_path.exists():
143213
binary_path = cuda_binary_path
144214
else:
145-
logger.warning("Could not find the bitsandbytes CUDA binary at %r", cuda_binary_path)
215+
available_versions = get_available_cuda_binaries()
216+
env_version = os.environ.get("BNB_CUDA_VERSION")
217+
override_info = "\nCUDA override active" if env_version else ""
218+
219+
formatted_version = parse_cuda_version(env_version) if env_version else cuda_specs.cuda_version_string
220+
required_version = formatted_version
221+
version_missing = required_version not in available_versions
222+
223+
msg = _format_cuda_error_message(
224+
available_versions=available_versions,
225+
user_cuda_version=cuda_specs.cuda_version_string,
226+
override_info=override_info,
227+
include_override_notes=True,
228+
required_version=formatted_version,
229+
version_missing=version_missing,
230+
)
231+
logger.warning(msg)
232+
146233
logger.debug(f"Loading bitsandbytes native library from: {binary_path}")
147234
dll = ct.cdll.LoadLibrary(str(binary_path))
148235

@@ -165,7 +252,7 @@ def get_native_library() -> BNBNativeLibrary:
165252
diagnostic_help = ""
166253
if torch.cuda.is_available():
167254
diagnostic_help = (
168-
"CUDA Setup failed despite CUDA being available. "
255+
"CUDA Setup failed despite CUDA being available.\n\n"
169256
"Please run the following command to get more information:\n\n"
170257
"python -m bitsandbytes\n\n"
171258
"Inspect the output of the command and see if you can locate CUDA libraries. "

0 commit comments

Comments
 (0)