File tree Expand file tree Collapse file tree 3 files changed +3
-3
lines changed
Expand file tree Collapse file tree 3 files changed +3
-3
lines changed Original file line number Diff line number Diff line change @@ -310,7 +310,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
310310 pip install -U uv \
311311 # Nunchaku (binary pair with PyTorch)
312312 && pip install \
313- https://github.com/nunchaku-tech/nunchaku/releases/download/v1.1.0 /nunchaku-1.1.0 +torch2.9-cp312-cp312-linux_x86_64.whl \
313+ https://github.com/nunchaku-tech/nunchaku/releases/download/v1.0.2 /nunchaku-1.0.2 +torch2.9-cp312-cp312-linux_x86_64.whl \
314314 # FlashAttention (version pair with xFormers, binary pair with PyTorch & CUDA)
315315 && pip install \
316316https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.5.4/flash_attn-2.8.3+cu126torch2.9-cp312-cp312-linux_x86_64.whl
Original file line number Diff line number Diff line change @@ -238,7 +238,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
238238 pip install -U uv \
239239 # Nunchaku (binary pair with PyTorch)
240240 && pip install \
241- https://github.com/nunchaku-tech/nunchaku/releases/download/v1.1.0 /nunchaku-1.1.0 +torch2.9-cp312-cp312-linux_x86_64.whl \
241+ https://github.com/nunchaku-tech/nunchaku/releases/download/v1.0.2 /nunchaku-1.0.2 +torch2.9-cp312-cp312-linux_x86_64.whl \
242242 # FlashAttention (version pair with xFormers, binary pair with PyTorch & CUDA)
243243 && pip install \
244244https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.7.0/flash_attn-2.8.3+cu128torch2.9-cp312-cp312-linux_x86_64.whl \
Original file line number Diff line number Diff line change @@ -310,7 +310,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
310310 pip install -U uv \
311311 # Nunchaku (binary pair with PyTorch)
312312 && pip install \
313- https://github.com/nunchaku-tech/nunchaku/releases/download/v1.1.0 /nunchaku-1.1.0 +torch2.9-cp312-cp312-linux_x86_64.whl \
313+ https://github.com/nunchaku-tech/nunchaku/releases/download/v1.0.2 /nunchaku-1.0.2 +torch2.9-cp312-cp312-linux_x86_64.whl \
314314 # FlashAttention (version pair with xFormers, binary pair with PyTorch & CUDA)
315315 && pip install \
316316https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.7.0/flash_attn-2.8.3+cu128torch2.9-cp312-cp312-linux_x86_64.whl \
You can’t perform that action at this time.
0 commit comments