File tree Expand file tree Collapse file tree 2 files changed +3
-3
lines changed
Expand file tree Collapse file tree 2 files changed +3
-3
lines changed Original file line number Diff line number Diff line change @@ -98,7 +98,7 @@ MAX_JOBS=4 pip install flash-attn --no-build-isolation
9898
9999### NVIDIA CUDA Support
100100** Requirements:**
101- - CUDA 11.6 and above.
101+ - CUDA 11.7 and above.
102102
103103We recommend the
104104[ Pytorch] ( https://catalog.ngc.nvidia.com/orgs/nvidia/containers/pytorch )
Original file line number Diff line number Diff line change @@ -156,9 +156,9 @@ def validate_and_update_archs(archs):
156156 cc_flag = []
157157 if CUDA_HOME is not None :
158158 _ , bare_metal_version = get_cuda_bare_metal_version (CUDA_HOME )
159- if bare_metal_version < Version ("11.6 " ):
159+ if bare_metal_version < Version ("11.7 " ):
160160 raise RuntimeError (
161- "FlashAttention is only supported on CUDA 11.6 and above. "
161+ "FlashAttention is only supported on CUDA 11.7 and above. "
162162 "Note: make sure nvcc has a supported version by running nvcc -V."
163163 )
164164 # cc_flag.append("-gencode")
You can’t perform that action at this time.
0 commit comments