Commit 9274fb18 authored by Titus von Koeller's avatar Titus von Koeller
Browse files

cleanup existing code

parent 1c1b2576
...@@ -62,7 +62,7 @@ class CudaBNBNativeLibrary(BNBNativeLibrary): ...@@ -62,7 +62,7 @@ class CudaBNBNativeLibrary(BNBNativeLibrary):
lib.cget_managed_ptr.restype = ct.c_void_p lib.cget_managed_ptr.restype = ct.c_void_p
def get_available_cuda_binaries() -> list[str]: def get_available_cuda_binary_versions() -> list[str]:
"""Get formatted CUDA versions from existing library files using cuda_specs logic""" """Get formatted CUDA versions from existing library files using cuda_specs logic"""
lib_pattern = f"libbitsandbytes_cuda*{DYNAMIC_LIBRARY_SUFFIX}" lib_pattern = f"libbitsandbytes_cuda*{DYNAMIC_LIBRARY_SUFFIX}"
versions = [] versions = []
...@@ -86,26 +86,23 @@ def parse_cuda_version(version_str: str) -> str: ...@@ -86,26 +86,23 @@ def parse_cuda_version(version_str: str) -> str:
def _format_cuda_error_message( def _format_cuda_error_message(
available_versions: list[str], available_versions: list[str],
user_cuda_version: str, user_cuda_version: str,
override_info: str,
original_error: str = "", original_error: str = "",
include_diagnostics: bool = False, requested_version: Optional[str] = None,
include_override_notes: bool = False,
required_version: Optional[str] = None,
version_missing: bool = False,
) -> str: ) -> str:
version_list = ", ".join(available_versions) if available_versions else "none"
base_msg = "Attempted to use bitsandbytes native library functionality but it's not available.\n\n" base_msg = "Attempted to use bitsandbytes native library functionality but it's not available.\n\n"
# Explicit version availability check
version_alert = "" version_alert = ""
if version_missing and required_version: if requested_version not in available_versions:
version_list_str = "\n- " + "\n- ".join(available_versions) if available_versions else "NONE" version_list_str = "\n - " + "\n - ".join(available_versions) if available_versions else "NONE"
version_alert = ( version_alert = (
f"🚨 CUDA VERSION MISMATCH 🚨\n" f"🚨 CUDA VERSION MISMATCH 🚨\n"
f"Requested CUDA version: {required_version}\n" f"Requested CUDA version: {requested_version}\n"
f"Detected PyTorch CUDA version: {user_cuda_version}\n"
f"Available pre-compiled versions: {version_list_str}\n\n" f"Available pre-compiled versions: {version_list_str}\n\n"
"This means:\n" "This means:\n"
"1. The version you're trying to use is NOT distributed with this package\n" "1. The version you're trying to use is NOT distributed with this package\n"
if available_versions
else "1. You're not using the package but checked-out the source code\n"
"2. You MUST compile from source for this specific CUDA version\n" "2. You MUST compile from source for this specific CUDA version\n"
"3. The installation will NOT work until you compile or choose a CUDA supported version\n\n" "3. The installation will NOT work until you compile or choose a CUDA supported version\n\n"
) )
...@@ -122,43 +119,25 @@ def _format_cuda_error_message( ...@@ -122,43 +119,25 @@ def _format_cuda_error_message(
"If your CUDA version doesn't have a pre-compiled binary, you MUST compile from source.\n\n" "If your CUDA version doesn't have a pre-compiled binary, you MUST compile from source.\n\n"
) )
cuda_info = (
f"Detected PyTorch CUDA version: {user_cuda_version}\n"
f"Available pre-compiled bitsandbytes binaries for these CUDA versions: {version_list}\n"
f"{override_info}\n\n"
)
compile_instructions = ( compile_instructions = (
( "You have three options:\n"
"You have three options:\n" "1. COMPILE FROM SOURCE (required if no binary exists):\n"
"1. COMPILE FROM SOURCE (required if no binary exists):\n" " https://huggingface.co/docs/bitsandbytes/main/en/installation#cuda-compile\n"
" https://huggingface.co/docs/bitsandbytes/main/en/installation#cuda-compile\n" "2. Use BNB_CUDA_VERSION to specify a DIFFERENT CUDA version from the detected one, which is installed on your machine and matching an available pre-compiled version listed above\n"
"2. Use BNB_CUDA_VERSION to specify a DIFFERENT CUDA version from the detected one\n" "3. Check LD_LIBRARY_PATH contains the correct CUDA libraries\n\n"
"3. Check LD_LIBRARY_PATH contains the correct CUDA libraries\n\n"
)
if include_override_notes
else ""
) )
diagnostics = ( diagnostics = (
( "🔍 Run this command for detailed diagnostics:\n"
"🔍 Run this command for detailed diagnostics:\n" "python -m bitsandbytes\n\n"
"python -m bitsandbytes\n\n" "If you've tried everything and still have issues:\n"
"If you've tried everything and still have issues:\n" "1. Include ALL version info (operating system, bitsandbytes, pytorch, cuda, python)\n"
"1. Include ALL version info (operating system, bitsandbytes, pytorch, cuda, python)\n" "2. Describe what you've tried in detail\n"
"2. Describe what you've tried in detail\n" "3. Open an issue with this information:\n"
"3. Open an issue with this information:\n" " https://github.com/bitsandbytes-foundation/bitsandbytes/issues\n\n"
" https://github.com/bitsandbytes-foundation/bitsandbytes/issues\n\n"
)
if include_diagnostics
else ""
) )
return ( return f"{version_alert}{base_msg}{troubleshooting}{note}{compile_instructions}{original_error}\n{diagnostics}"
f"{version_alert}{base_msg}{troubleshooting}{cuda_info}"
f"{note}{compile_instructions}"
f"{original_error}\n{diagnostics}"
)
class MockBNBNativeLibrary(BNBNativeLibrary): class MockBNBNativeLibrary(BNBNativeLibrary):
...@@ -174,26 +153,20 @@ class MockBNBNativeLibrary(BNBNativeLibrary): ...@@ -174,26 +153,20 @@ class MockBNBNativeLibrary(BNBNativeLibrary):
self.user_cuda_version = get_cuda_version_tuple() self.user_cuda_version = get_cuda_version_tuple()
def __getattr__(self, name): def __getattr__(self, name):
available_versions = get_available_cuda_binaries() available_versions = get_available_cuda_binary_versions()
override_value = os.environ.get("BNB_CUDA_VERSION") override_value = os.environ.get("BNB_CUDA_VERSION")
override_info = f"\nCUDA override: BNB_CUDA_VERSION={override_value}" if override_value else ""
formatted_version = ( requested_version = (
parse_cuda_version(override_value) parse_cuda_version(override_value)
if override_value if override_value
else f"{self.user_cuda_version[0]}.{self.user_cuda_version[1]}" else f"{self.user_cuda_version[0]}.{self.user_cuda_version[1]}"
) )
required_version = formatted_version
version_missing = required_version not in available_versions
msg = _format_cuda_error_message( msg = _format_cuda_error_message(
available_versions=available_versions, available_versions=available_versions,
user_cuda_version=f"{self.user_cuda_version[0]}.{self.user_cuda_version[1]}", user_cuda_version=f"{self.user_cuda_version[0]}.{self.user_cuda_version[1]}",
override_info=override_info,
original_error=f"Original error: {self.error_msg}\n" if self.error_msg else "", original_error=f"Original error: {self.error_msg}\n" if self.error_msg else "",
include_diagnostics=True, requested_version=requested_version,
required_version=formatted_version,
version_missing=version_missing,
) )
raise RuntimeError(msg) raise RuntimeError(msg)
...@@ -212,21 +185,15 @@ def get_native_library() -> BNBNativeLibrary: ...@@ -212,21 +185,15 @@ def get_native_library() -> BNBNativeLibrary:
if cuda_binary_path.exists(): if cuda_binary_path.exists():
binary_path = cuda_binary_path binary_path = cuda_binary_path
else: else:
available_versions = get_available_cuda_binaries() available_versions = get_available_cuda_binary_versions()
env_version = os.environ.get("BNB_CUDA_VERSION") env_version = os.environ.get("BNB_CUDA_VERSION")
override_info = "\nCUDA override active" if env_version else ""
formatted_version = parse_cuda_version(env_version) if env_version else cuda_specs.cuda_version_string requested_version = parse_cuda_version(env_version) if env_version else cuda_specs.cuda_version_string
required_version = formatted_version
version_missing = required_version not in available_versions
msg = _format_cuda_error_message( msg = _format_cuda_error_message(
available_versions=available_versions, available_versions=available_versions,
user_cuda_version=cuda_specs.cuda_version_string, user_cuda_version=cuda_specs.cuda_version_string,
override_info=override_info, requested_version=requested_version,
include_override_notes=True,
required_version=formatted_version,
version_missing=version_missing,
) )
logger.warning(msg) logger.warning(msg)
...@@ -249,19 +216,5 @@ except Exception as e: ...@@ -249,19 +216,5 @@ except Exception as e:
error_msg = f"Could not load bitsandbytes native library: {e}" error_msg = f"Could not load bitsandbytes native library: {e}"
logger.error(error_msg, exc_info=False) logger.error(error_msg, exc_info=False)
diagnostic_help = ""
if torch.cuda.is_available():
diagnostic_help = (
"CUDA Setup failed despite CUDA being available.\n\n"
"Please run the following command to get more information:\n\n"
"python -m bitsandbytes\n\n"
"Inspect the output of the command and see if you can locate CUDA libraries. "
"You might need to add them to your LD_LIBRARY_PATH. "
"If you suspect a bug, please take the information from the command and open an issue at:\n\n"
"https://github.com/bitsandbytes-foundation/bitsandbytes/issues\n\n"
"If you are using a custom CUDA version, you might need to set the BNB_CUDA_VERSION "
"environment variable to the correct version."
)
# create a mock with error messaging as fallback # create a mock with error messaging as fallback
lib = MockBNBNativeLibrary(diagnostic_help) lib = MockBNBNativeLibrary(error_msg)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment