Skip to content

Commit 1ad6edf

Browse files
committed
fix directml
1 parent 7536a14 commit 1ad6edf

File tree

1 file changed

+6
-2
lines changed

1 file changed

+6
-2
lines changed

modules/sd_hijack_optimizations.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
from ldm.util import default
1111
from einops import rearrange
1212

13-
from modules import shared, errors, devices, sub_quadratic_attention, rocm_triton_windows
13+
from modules import shared, errors, devices, sub_quadratic_attention
1414
from modules.hypernetworks import hypernetwork
1515

1616
import ldm.modules.attention
@@ -154,7 +154,11 @@ def __init__(self):
154154
self.sdpa_pre_flash_atten = None
155155

156156
def is_available(self):
157-
return hasattr(torch.nn.functional, "scaled_dot_product_attention") and callable(torch.nn.functional.scaled_dot_product_attention) and devices.has_zluda() and rocm_triton_windows.is_available
157+
try:
158+
from modules import rocm_triton_windows
159+
return hasattr(torch.nn.functional, "scaled_dot_product_attention") and callable(torch.nn.functional.scaled_dot_product_attention) and devices.has_zluda() and rocm_triton_windows.is_available
160+
except Exception:
161+
return False
158162

159163
def apply(self):
160164
if self.sdpa_pre_flash_atten is None:

0 commit comments

Comments
 (0)