-
Notifications
You must be signed in to change notification settings - Fork 214
Make bitsandbytes optional on ROCm #474
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -36,7 +36,12 @@ def patch_compiling_bitsandbytes(): | |
| # All Unsloth Zoo code licensed under LGPLv3 | ||
| os.environ["UNSLOTH_PATCHED"] = "1" | ||
|
|
||
| import bitsandbytes | ||
| try: | ||
| import bitsandbytes | ||
| except Exception: | ||
| # bitsandbytes is optional (eg, AMD ROCm environments). If it's not | ||
| # installed, just skip this patch. | ||
| return | ||
| if Version(bitsandbytes.__version__) >= Version("0.46.0"): | ||
| if os.environ.get("UNSLOTH_ENABLE_LOGGING", "0") == "1": | ||
| print("Unsloth: Bitsandbytes >= 0.46.0 supports torch.compile - enabling.") | ||
|
|
@@ -307,12 +312,13 @@ def patch_model_and_tokenizer( | |
| # BnB default dtype seems to be float16! | ||
| try: | ||
| from bitsandbytes.nn import Linear4bit as Bnb_Linear4bit | ||
| except: | ||
| raise ImportError("Unsloth: Please install bitsandbytes via `pip install bitsandbytes`") | ||
| except Exception: | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. |
||
| Bnb_Linear4bit = None | ||
| try: | ||
| from peft.tuners.lora import Linear4bit as Peft_Linear4bit | ||
| except: | ||
| raise ImportError("Unsloth: Please install peft via `pip install peft`") | ||
| except Exception: | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. |
||
| Peft_Linear4bit = None | ||
| _bnb_types = tuple(t for t in (Bnb_Linear4bit, Peft_Linear4bit) if t is not None) | ||
| pass | ||
|
|
||
| # Get most likely the correct data-type of the model | ||
|
|
@@ -398,7 +404,7 @@ def __fix_dtype(config): | |
|
|
||
| # Check all params and patch! | ||
| for name, module in model.named_modules(): | ||
| if isinstance(module, (Bnb_Linear4bit, Peft_Linear4bit)): | ||
| if _bnb_types and isinstance(module, _bnb_types): | ||
| weight = module.weight | ||
| # Check if quant_state exists for vision models like unsloth/Llama-3.2-11B-Vision-Instruct-bnb-4bit, unsloth/granite-vision-3.2-2b | ||
| if not hasattr(weight, 'quant_state'): | ||
|
|
||
| Original file line number | Diff line number | Diff line change | ||||||||||||||||||||||||||||||||||||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -146,11 +146,17 @@ def get_lora_layer_modules(): | |||||||||||||||||||||||||||||||||||||||||||||||||||||||
| for file in files: | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| if file == "__init__.py" or not file.endswith(".py"): continue | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| item = f"peft.tuners.lora.{file[:-len('.py')]}" | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| exec(f"import {item}", locals(), globals()) | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| try: | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| exec(f"import {item}", locals(), globals()) | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| except Exception: | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| continue | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
Comment on lines
+149
to
+152
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Catching all exceptions here means any failure inside a LoRA module (e.g., a real bug or a missing dependency unrelated to bitsandbytes) is silently ignored, so its Useful? React with 👍 / 👎. |
||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| modules = dir(eval(item)) | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| modules = [x for x in modules if x.startswith("Linear") or x.endswith("Linear")] | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| if len(modules) == 0: continue | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| exec(f"from {item} import ({', '.join(modules)})", locals(), globals()) | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| try: | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| exec(f"from {item} import ({', '.join(modules)})", locals(), globals()) | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| except Exception: | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| continue | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| Linear_LoRA_Layers += [(eval(x), item, x,) for x in modules] | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
Comment on lines
+149
to
160
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Using Please also add
Suggested change
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| pass | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| return tuple(Linear_LoRA_Layers) | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -61,7 +61,10 @@ | |
| """ | ||
|
|
||
| import torch | ||
| import bitsandbytes as bnb | ||
| try: | ||
| import bitsandbytes as bnb | ||
| except Exception: | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. |
||
| bnb = None | ||
| try: | ||
| from huggingface_hub import get_token | ||
| except: | ||
|
|
@@ -84,7 +87,7 @@ def find_skipped_quantized_modules(model): | |
| skipped_modules = [] | ||
| quantized_modules = [] | ||
| for name, module in model.named_modules(): | ||
| if isinstance(module, bnb.nn.Linear4bit): | ||
| if (bnb is not None) and isinstance(module, bnb.nn.Linear4bit): | ||
| if hasattr(module.weight, 'quant_state') and module.weight.quant_state is not None: | ||
| quantized_modules.append(name) | ||
| else: | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
It's a best practice to catch specific exceptions rather than the general
Exception. In this case, anImportErroris expected ifbitsandbytesis not installed. Usingexcept ImportError:is more precise and prevents masking other potential errors during the import process.