forked from huggingface/kernels
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy patharch.nix
More file actions
307 lines (266 loc) · 8.77 KB
/
arch.nix
File metadata and controls
307 lines (266 loc) · 8.77 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
{
cudaSupport ? torch.cudaSupport,
rocmSupport ? torch.rocmSupport,
xpuSupport ? torch.xpuSupport,
lib,
pkgs,
stdenv,
writeText,
# Native build inputs
build2cmake,
cmake,
cmakeNvccThreadsHook,
cuda_nvcc,
get-kernel-check,
kernel-abi-check,
kernel-layout-check,
ninja,
python3,
remove-bytecode-hook,
rewrite-nix-paths-macho,
writeScriptBin,
# Framework packages
cudaPackages,
rocmPackages,
xpuPackages,
# Build inputs
apple-sdk_26,
clr,
oneapi-torch-dev,
onednn-xpu,
torch,
}:
{
buildConfig,
# Whether to do ABI checks.
doAbiCheck ? true,
# Whether to run get-kernel-check.
doGetKernelCheck ? true,
kernelName,
# Extra dependencies (such as CUTLASS).
extraDeps ? [ ],
nvccThreads,
# A stringly-typed list of Python dependencies. Ideally we'd take a
# list of derivations, but we also need to write the dependencies to
# the output.
pythonDeps,
backendPythonDeps,
# Wheter to strip rpath for non-nix use.
stripRPath ? false,
# Revision to bake into the ops name.
rev,
src,
}:
# Extra validation - the environment should correspind to the build config.
assert (buildConfig ? cudaVersion) -> cudaSupport;
assert (buildConfig ? rocmVersion) -> rocmSupport;
assert (buildConfig ? xpuVersion) -> xpuSupport;
assert (buildConfig.metal or false) -> stdenv.hostPlatform.isDarwin;
let
inherit (import ../deps.nix { inherit lib pkgs torch; }) resolvePythonDeps resolveBackendPythonDeps;
dependencies =
resolvePythonDeps pythonDeps
++ resolveBackendPythonDeps buildConfig.backend backendPythonDeps
++ [ torch ];
moduleName = builtins.replaceStrings [ "-" ] [ "_" ] kernelName;
# On Darwin, we need the host's xcrun for `xcrun metal` to compile Metal shaders.
# It's not supported by the nixpkgs shim.
xcrunHost = writeScriptBin "xcrunHost" ''
# Use system SDK for Metal files. Clear Nix-set variables that
# interfere with xcrun/xcodebuild's SDK and toolchain resolution.
unset DEVELOPER_DIR
unset SDKROOT
/usr/bin/xcrun "$@"
'';
metalSupport = buildConfig.metal or false;
in
stdenv.mkDerivation (prevAttrs: {
name = "${kernelName}-torch-ext";
inherit
doAbiCheck
moduleName
nvccThreads
;
# We run build2cmake here rather than patchPhase or preConfigure,
# so that external users of `src` get the source tree with the files
# generated by build2cmake.
src = pkgs.runCommand "source" { } ''
mkdir -p $out
cp -r --no-preserve=mode ${src}/* $out/
${pkgs.build2cmake}/bin/build2cmake generate-torch \
--ops-id ${rev} $out/build.toml
'';
preConfigure =
# This is a workaround for https://openradar.appspot.com/FB20389216 - even
# if the user downloaded the Metal toolchain, the mapping is not set up
# for the Nix build users. To make things worse, we cannot set up a mapping
# because the Nix build users do not have a writable home directory and
# showComponent/downloadComponent do not respect the HOME variable. So
# instead, we'll use showComponent (which will emit a lot of warnings due
# to the above) to grab the path of the Metal toolchain.
lib.optionalString metalSupport ''
# Try the separate Metal toolchain first (macOS 26+ with xcodebuild -downloadComponent).
# Use || true to prevent set -o pipefail from aborting on older macOS where
# -showComponent is unsupported.
METAL_PATH=$(${xcrunHost}/bin/xcrunHost xcodebuild -showComponent MetalToolchain 2> /dev/null | sed -rn "s/Toolchain Search Path: (.*)/\1/p" || true)
if [ -d "$METAL_PATH/Metal.xctoolchain" ]; then
cmakeFlagsArray+=("-DMETAL_TOOLCHAIN=$METAL_PATH/Metal.xctoolchain")
else
# On macOS 14/15, xcrun and xcode-select may not work inside the Nix
# build environment (sandbox restrictions). Try them, then fall back
# to scanning /Applications for Xcode installations.
XCODE_DEV=$(${xcrunHost}/bin/xcrunHost xcode-select -p 2>/dev/null || true)
XCODE_TOOLCHAIN="$XCODE_DEV/Toolchains/XcodeDefault.xctoolchain"
XCRUN_METAL=$(${xcrunHost}/bin/xcrunHost xcrun -find metal 2>/dev/null || true)
if [ -d "$XCODE_TOOLCHAIN/usr/bin" ] && [ -f "$XCODE_TOOLCHAIN/usr/bin/metal" ]; then
cmakeFlagsArray+=("-DMETAL_TOOLCHAIN=$XCODE_TOOLCHAIN")
elif [ -n "$XCRUN_METAL" ] && [ -f "$XCRUN_METAL" ]; then
# Derive toolchain path from xcrun result
METAL_BIN_DIR=$(dirname "$XCRUN_METAL")
METAL_TC_DIR=$(dirname $(dirname "$METAL_BIN_DIR"))
cmakeFlagsArray+=("-DMETAL_TOOLCHAIN=$METAL_TC_DIR")
else
# Last resort: scan /Applications/Xcode*.app for metal compiler
FOUND_TC=""
for xcode_app in /Applications/Xcode*.app; do
TC="$xcode_app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain"
if [ -f "$TC/usr/bin/metal" ]; then
FOUND_TC="$TC"
break
fi
done
if [ -n "$FOUND_TC" ]; then
cmakeFlagsArray+=("-DMETAL_TOOLCHAIN=$FOUND_TC")
else
>&2 echo "Cannot find Metal toolchain. On macOS 26+, use: xcodebuild -downloadComponent metalToolchain"
exit 1
fi
fi
fi
'';
# hipify copies files, but its target is run in the CMake build and install
# phases. Since some of the files come from the Nix store, this fails the
# second time around.
preInstall = ''
chmod -R u+w .
'';
nativeBuildInputs = [
cmake
ninja
build2cmake
kernel-abi-check
kernel-layout-check
remove-bytecode-hook
]
++ lib.optionals doGetKernelCheck [
(get-kernel-check.override { python3 = python3.withPackages (ps: dependencies); })
]
++ lib.optionals cudaSupport [
cmakeNvccThreadsHook
cuda_nvcc
]
++ lib.optionals rocmSupport [
clr
]
++ lib.optionals xpuSupport ([
xpuPackages.ocloc
oneapi-torch-dev
])
++ lib.optionals stdenv.hostPlatform.isDarwin [
rewrite-nix-paths-macho
];
buildInputs = [
torch
torch.cxxdev
]
++ lib.optionals cudaSupport (
with cudaPackages;
[
cuda_cudart
# Make dependent on build configuration dependencies once
# the Torch dependency is gone.
cuda_cccl
libcublas
libcusolver
libcusparse
]
)
++ lib.optionals rocmSupport (
with rocmPackages;
[
hipcub-devel
hipsparselt
rocprim-devel
rocthrust-devel
rocwmma-devel
]
)
++ lib.optionals xpuSupport ([
oneapi-torch-dev
onednn-xpu
])
++ lib.optionals stdenv.hostPlatform.isDarwin [
apple-sdk_26
]
++ extraDeps;
env =
lib.optionalAttrs cudaSupport {
CUDAToolkit_ROOT = "${lib.getDev cudaPackages.cuda_nvcc}";
TORCH_CUDA_ARCH_LIST = lib.concatStringsSep ";" torch.cudaCapabilities;
}
// lib.optionalAttrs rocmSupport {
PYTORCH_ROCM_ARCH = lib.concatStringsSep ";" torch.rocmArchs;
}
// lib.optionalAttrs xpuSupport {
MKLROOT = oneapi-torch-dev;
SYCL_ROOT = oneapi-torch-dev;
};
# If we use the default setup, CMAKE_CUDA_HOST_COMPILER gets set to nixpkgs g++.
dontSetupCUDAToolkitCompilers = true;
cmakeFlags = [
(lib.cmakeBool "BUILD_ALL_SUPPORTED_ARCHS" true)
(lib.cmakeFeature "Python_EXECUTABLE" "${python3.withPackages (ps: [ torch ])}/bin/python")
# Fix: file RPATH_CHANGE could not write new RPATH, we are rewriting
# rpaths anyway.
(lib.cmakeBool "CMAKE_SKIP_RPATH" true)
]
++ lib.optionals cudaSupport [
(lib.cmakeFeature "CMAKE_CUDA_HOST_COMPILER" "${stdenv.cc}/bin/g++")
]
++ lib.optionals rocmSupport [
# Ensure sure that we use HIP from our CLR override and not HIP from
# the symlink-joined ROCm toolkit.
(lib.cmakeFeature "CMAKE_HIP_COMPILER_ROCM_ROOT" "${clr}")
(lib.cmakeFeature "HIP_ROOT_DIR" "${clr}")
]
++ lib.optionals metalSupport [
# Use host compiler for Metal. Not included in the redistributable SDK.
# Re-enable when the issue mentioned in preConfigure is solved.
#(lib.cmakeFeature "METAL_COMPILER" "${xcrunHost}/bin/xcrunHost")
];
postInstall =
let
buildVariant = torch.variant;
in
''
rm -rf $out/_${moduleName}_*_${rev}
''
+ (lib.optionalString (stripRPath && stdenv.hostPlatform.isLinux)) ''
find $out/ -name '*.so' \
-exec patchelf --set-rpath "" {} \;
''
+ (lib.optionalString (stripRPath && stdenv.hostPlatform.isDarwin)) ''
find $out/ -name '*.so' \
-exec rewrite-nix-paths-macho {} \;
# Stub some rpath.
find $out/ -name '*.so' \
-exec install_name_tool -add_rpath "@loader_path/lib" {} \;
'';
doInstallCheck = true;
# We need access to the host system on Darwin for the Metal compiler.
__noChroot = metalSupport;
passthru = {
inherit dependencies torch;
inherit (torch) variant;
};
})