File tree Expand file tree Collapse file tree 2 files changed +8
-6
lines changed
Expand file tree Collapse file tree 2 files changed +8
-6
lines changed Original file line number Diff line number Diff line change @@ -295,11 +295,12 @@ RUN --mount=type=cache,target=/root/.cache/pip \
295295RUN --mount=type=cache,target=/root/.cache/pip \
296296 # Update UV
297297 pip install -U uv \
298- # SageAttention needs PyTorch to build with
299- && pip install sageattention --no-build-isolation \
300298 # Nunchaku version needs to sync with PyTorch version
301299 && pip install \
302- https://github.com/nunchaku-tech/nunchaku/releases/download/v1.0.1/nunchaku-1.0.1+torch2.8-cp312-cp312-linux_x86_64.whl
300+ https://github.com/nunchaku-tech/nunchaku/releases/download/v1.0.1/nunchaku-1.0.1+torch2.8-cp312-cp312-linux_x86_64.whl \
301+ # SageAttention needs PyTorch to build with
302+ && export MAX_JOBS=4 TORCH_CUDA_ARCH_LIST="5.0;6.0;7.0;7.5;8.0;8.6;9.0" \
303+ && pip install sageattention --no-build-isolation
303304
304305# Notes on FlashAttention:
305306# <xformers 0.0.32.post2> requires <flash-attn [2.7.1, 2.8.2]>,
Original file line number Diff line number Diff line change @@ -295,11 +295,12 @@ RUN --mount=type=cache,target=/root/.cache/pip \
295295RUN --mount=type=cache,target=/root/.cache/pip \
296296 # Update UV
297297 pip install -U uv \
298- # SageAttention needs PyTorch to build with
299- && pip install sageattention --no-build-isolation \
300298 # Nunchaku version needs to sync with PyTorch version
301299 && pip install \
302- https://github.com/nunchaku-tech/nunchaku/releases/download/v1.0.1/nunchaku-1.0.1+torch2.8-cp312-cp312-linux_x86_64.whl
300+ https://github.com/nunchaku-tech/nunchaku/releases/download/v1.0.1/nunchaku-1.0.1+torch2.8-cp312-cp312-linux_x86_64.whl \
301+ # SageAttention needs PyTorch to build with
302+ && export MAX_JOBS=4 TORCH_CUDA_ARCH_LIST="7.0;7.5;8.0;8.6;9.0;10.0;12.0" \
303+ && pip install sageattention --no-build-isolation
303304
304305# Notes on FlashAttention:
305306# <xformers 0.0.32.post2> requires <flash-attn [2.7.1, 2.8.2]>,
You can’t perform that action at this time.
0 commit comments