-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsetup.py
More file actions
33 lines (32 loc) · 955 Bytes
/
setup.py
File metadata and controls
33 lines (32 loc) · 955 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
from setuptools import setup
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
setup(
name="flash-attn-cuda",
ext_modules=[
# Baseline FP32 kernel
CUDAExtension(
name="flash_attn_cuda",
sources=["cuda/flash_attn_kernel.cu"],
extra_compile_args={
"nvcc": [
"-O3",
"--use_fast_math",
"-gencode=arch=compute_89,code=sm_89",
],
},
),
# WMMA Tensor Core optimized kernel
CUDAExtension(
name="flash_attn_wmma",
sources=["cuda/flash_attn_wmma.cu"],
extra_compile_args={
"nvcc": [
"-O3",
"--use_fast_math",
"-gencode=arch=compute_89,code=sm_89",
],
},
),
],
cmdclass={"build_ext": BuildExtension},
)