Skip to content

Commit 1f2a970

Browse files
C lib loading: add fallback with sensible error msg
1 parent c244e98 commit 1f2a970

File tree

1 file changed

+47
-12
lines changed

1 file changed

+47
-12
lines changed

bitsandbytes/cextension.py

Lines changed: 47 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,37 @@ def __init__(self, lib: ct.CDLL):
6161
lib.cget_managed_ptr.restype = ct.c_void_p
6262

6363

64+
class MockBNBNativeLibrary(BNBNativeLibrary):
65+
"""
66+
Mock BNBNativeLibrary that raises an error when trying to use native library functionality without successfully loading the library.
67+
68+
Any method or attribute access will raise a RuntimeError with a message that points to the original error and provides troubleshooting steps.
69+
"""
70+
71+
def __init__(self, error_msg: str):
72+
self.error_msg = error_msg
73+
74+
def __getattr__(self, name):
75+
base_msg = "Attempted to use bitsandbytes native library functionality but it's not available.\n\n"
76+
original_error = f"Original error: {self.error_msg}\n\n" if self.error_msg else ""
77+
troubleshooting = (
78+
"This typically happens when:\n"
79+
"1. BNB doesn't ship with a pre-compiled binary for your CUDA version\n"
80+
"2. The library wasn't compiled properly during installation\n"
81+
"3. Missing CUDA dependencies\n"
82+
"4. PyTorch/bitsandbytes version mismatch\n\n"
83+
"Run 'python -m bitsandbytes' for diagnostics."
84+
)
85+
raise RuntimeError(base_msg + original_error + troubleshooting)
86+
87+
def __getitem__(self, name):
88+
return self.__getattr__(name)
89+
90+
6491
def get_native_library() -> BNBNativeLibrary:
92+
"""
93+
Load CUDA library XOR CPU, as the latter contains a subset of symbols of the former.
94+
"""
6595
binary_path = PACKAGE_DIR / f"libbitsandbytes_cpu{DYNAMIC_LIBRARY_SUFFIX}"
6696
cuda_specs = get_cuda_specs()
6797
if cuda_specs:
@@ -86,17 +116,22 @@ def get_native_library() -> BNBNativeLibrary:
86116
try:
87117
lib = get_native_library()
88118
except Exception as e:
89-
lib = None
90-
logger.error(f"Could not load bitsandbytes native library: {e}", exc_info=True)
91-
if torch.cuda.is_available():
92-
logger.warning(
93-
"""
94-
CUDA Setup failed despite CUDA being available. Please run the following command to get more information:
119+
error_msg = f"Could not load bitsandbytes native library: {e}"
120+
logger.error(error_msg, exc_info=True)
95121

96-
python -m bitsandbytes
97-
98-
Inspect the output of the command and see if you can locate CUDA libraries. You might need to add them
99-
to your LD_LIBRARY_PATH. If you suspect a bug, please take the information from python -m bitsandbytes
100-
and open an issue at: https://github.com/bitsandbytes-foundation/bitsandbytes/issues
101-
""",
122+
diagnostic_help = ""
123+
if torch.cuda.is_available():
124+
diagnostic_help = (
125+
"CUDA Setup failed despite CUDA being available. "
126+
"Please run the following command to get more information:\n\n"
127+
"python -m bitsandbytes\n\n"
128+
"Inspect the output of the command and see if you can locate CUDA libraries. "
129+
"You might need to add them to your LD_LIBRARY_PATH. "
130+
"If you suspect a bug, please take the information from the command and open an issue at:\n\n"
131+
"https://github.com/bitsandbytes-foundation/bitsandbytes/issues\n\n"
132+
"If you are using a custom CUDA version, you might need to set the BNB_CUDA_VERSION "
133+
"environment variable to the correct version."
102134
)
135+
136+
# create a mock with error messaging as fallback
137+
lib = MockBNBNativeLibrary(diagnostic_help)

0 commit comments

Comments
 (0)