Skip to content

Commit 975c356

Browse files
Remove deprecated supports_igemmlt
1 parent 587120a commit 975c356

File tree

1 file changed

+0
-12
lines changed

1 file changed

+0
-12
lines changed

bitsandbytes/autograd/_functions.py

-12
Original file line numberDiff line numberDiff line change
@@ -106,18 +106,6 @@ def undo_layout(permuted_tensor: torch.Tensor, tile_indices: torch.LongTensor) -
106106
return outputs.reshape(rows, cols).contiguous()
107107

108108

109-
@deprecated("This function is deprecated and will be removed in a future release.", category=FutureWarning)
110-
def supports_igemmlt(device: torch.device) -> bool:
111-
"""check if this device supports the optimized int8 kernel"""
112-
if torch.cuda.get_device_capability(device=device) < (7, 5):
113-
return False
114-
device_name = torch.cuda.get_device_name(device=device)
115-
nvidia16_models = ("GTX 1630", "GTX 1650", "GTX 1660") # https://en.wikipedia.org/wiki/GeForce_16_series
116-
if any(model_name in device_name for model_name in nvidia16_models):
117-
return False # these devices are technically cuda 7.5-capable, but they lack tensor cores
118-
return True
119-
120-
121109
@dataclass
122110
class MatmulLtState:
123111
_tile_indices: Optional[torch.Tensor] = None # TODO: remove

0 commit comments

Comments
 (0)