forked from E3SM-Project/mache
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconfig.yaml.j2.j2
More file actions
276 lines (247 loc) · 9.68 KB
/
config.yaml.j2.j2
File metadata and controls
276 lines (247 loc) · 9.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
project:
software: "[[ software ]]"
# A specific version string or "dynamic" if provided by the "pre_pixi" hook.
version: "dynamic"
# Optional: a command that prints the *runtime* version string for the
# deployed software.
#
# This command is run via `pixi run -m <pixi.toml> -- bash -c <cmd>` *before*
# the load script activates the environment, so it can safely `return` on
# mismatch without changing your shell environment.
#
# The command should print a single version string on stdout that matches
# `project.version` exactly.
#
# Examples:
# runtime_version_cmd: "python -c 'import importlib.metadata as m; print(m.version(\"mache\"))'"
# runtime_version_cmd: "e3sm_diags --version"
runtime_version_cmd: null
# Optional: value exported as <SOFTWARE>_BRANCH by the generated load script.
#
# Default behavior when this key is omitted entirely is to preserve the
# legacy export of the target repository root for downstream compatibility.
#
# Set to:
# - null to suppress the export entirely
# - an explicit shared path to export a different location
branch_path: null
# Machine name selection.
# Priority order in `mache deploy run`:
# 1. CLI --machine
# 2. this value (project.machine)
# 3. automatic detection (if this is "dynamic")
machine: "dynamic"
machines:
# Optional path containing machine config files in ini format.
#
# This MUST be a filesystem path (not a Python package) because we need to
# read machine configs before the target software (and its dependencies)
# have been installed into the pixi environment.
#
# Should be a relative path, relative to the target software repo root.
#
# Files should be named like "<machine>.cfg" (e.g. "chrysalis.cfg").
#
# Machine config is loaded in this order:
# 1. mache.machines/default.cfg
# 2. mache.machines/<machine>.cfg (if a known machine is selected)
# 3. <machines.path>/default.cfg (if machines.path is set)
# 4. <machines.path>/<machine>.cfg (if present)
path: null
pixi:
# Whether to deploy the pixi environment
deploy: true
# Where to install the pixi project (and its .pixi directory).
# Absolute path is recommended for shared deployments.
# Environment variables will be expanded at runtime (e.g. $SCRATCH).
prefix: pixi-env
# Optional: how generated load scripts should find the pixi executable.
#
# Supported values:
# - omitted: preserve the legacy behavior and embed the pixi path passed
# to `mache deploy run` (mainly for backward compatibility)
# - shared: copy pixi into the deployed prefix/prefixes and use that
# - null (or "path"): use $PIXI or `pixi` on PATH at load-script runtime
# - /explicit/path/to/pixi: use that path in the generated load script
#
# New templates default to `null` so per-user deployments pick up each
# user's own pixi install unless a downstream opts into another mode.
load_script_exe: null
# Channels used by pixi for this environment.
channels:
- conda-forge
# MPI provider for conda packages.
# Supported values in `mache deploy run`:
# - null if MPI is not used in the target software
# - nompi
# - mpich
# - openmpi
# - hpc (E3SM-Unified only)
# - dynamic (determine by the "pre_pixi" hook)
mpi: nompi
# Optional alternate MPI provider for login nodes.
#
# If set, mache may deploy a second pixi environment for login-node use and
# the generated load script will auto-select between login and compute
# environments based on common batch-job environment variables.
login_mpi: null
# Optional install location for the login-node pixi environment.
#
# Default behavior:
# - same as `prefix` if login_mpi resolves to the same MPI as `mpi`
# - otherwise `<prefix>_login`
login_prefix: null
# Whether to install the target software in editable/development mode.
install_dev_software: false
# System toolchain selection (primarily for Spack-based dependencies).
#
# These values are resolved in `mache deploy run` with the following priority:
# 1. CLI flags: --compiler / --mpi
# 2. Values here
# 3. Machine config defaults from [deploy] in merged machine config:
# compiler = <name>
# mpi_<compiler> = <mpilib>
#
# Use "dynamic" to request defaults from machine config.
toolchain:
# One or more compiler names. Examples:
# compiler: [gnu]
# compiler: [gnu, intel]
#
# If empty (or "dynamic"), defaults come from merged machine config:
# [deploy] compiler
compiler: []
# One or more MPI libraries. Examples:
# mpi: [openmpi]
# mpi: [mpich, openmpi]
#
# Pairing rules in `mache deploy run`:
# - If both lists have the same length, they are zipped.
# - If one list has length 1, it is broadcast across the other.
# - If mpi is empty (or "dynamic"), defaults come from machine config:
# [deploy] mpi_<compiler> (preferred)
# [deploy] mpi (fallback)
mpi: []
env_vars:
# Placeholder: env vars to export in the "load" script
set: {}
permissions:
# Optional shared-filesystem permission policy applied after a successful
# deployment.
#
# Priority order in `mache deploy run`:
# 1. Hook/runtime overrides:
# ctx.runtime["permissions"]["group"]
# ctx.runtime["permissions"]["world_readable"]
# 2. Values here
# 3. Machine config defaults from [deploy]:
# group = <unix-group>
# world_readable = true|false
#
# If no group is resolved, mache skips the permission update step.
group: null
# Whether deployed files should be readable by users outside the shared
# group. Directories keep execute permission when needed for traversal.
world_readable: true
spack:
# Whether to deploy Spack environments at all.
#
# Behavior:
# - If true, deploy ALL supported Spack environments.
# - If false, deploy none.
#
# This can be forced on at runtime with the `mache deploy run` CLI flag:
# --deploy-spack
#
# Passing --no-spack disables all Spack use for a single run, including
# reuse of pre-existing environments for load scripts.
deploy: false
# Whether this target repository supports a Spack *library* environment.
#
# If true, mache will deploy one library env per toolchain pair.
supported: false
# Optional: deploy an additional "software" spack environment.
#
# This environment is built once (not per toolchain) with a single compiler
# and MPI from the merged machine config:
# [deploy] software_compiler
# [deploy] mpi_<software_compiler>
#
# Load scripts do NOT activate this environment; they add its view's `bin`
# directory to PATH.
software:
# Whether this target repository supports a Spack *software* environment.
supported: false
# Optional override for the environment name.
# Default: "<software>_software"
env_name: null
# Base path for the spack checkout used for deployment.
# If it does not exist, mache will clone the E3SM spack repo.
#
# In practice, most target repositories should set this dynamically in the
# `pre_spack()` hook (e.g., based on machine config) by writing:
# ctx.runtime['spack']['spack_path'] = <path>
# Hooks may also disable Spack for a single run by returning:
# ctx.runtime['spack']['supported'] = False
# ctx.runtime['spack']['software']['supported'] = False
# This config value is a fallback.
#
# Required (either via hook/runtime or here) when spack.deploy (or
# --deploy-spack) is used and at least one supported spack environment is
# enabled, or when an existing Spack environment will be reused for load
# scripts.
# Can also be overridden temporarily via: --spack-path <path>
# To bypass Spack entirely for one run, pass: --no-spack
spack_path: null
# Prefix for spack environment names.
# Final env name is computed as: "<env_name_prefix>_<compiler>_<mpi>".
env_name_prefix: "spack_env"
# Jinja2-templated YAML file in the target repo containing a YAML list of
# spack specs. This is inserted into the appropriate mache spack env
# template for the selected machine/compiler/mpi.
#
# Expected format: a YAML mapping with keys "library" and/or "software",
# each containing a list of spec strings.
specs_template: "deploy/spack.yaml.j2"
# Optional list of machine-provided Spack packages to suppress so Spack can
# build them instead.
#
# Typical examples:
# exclude_packages:
# - cmake
#
# To opt out of the machine-provided HDF5/NetCDF bundle, prefer:
# exclude_packages:
# - hdf5_netcdf
#
# In some target repositories, this list is set dynamically in `pre_spack()`
# based on merged machine config, for example when [deploy]
# use_e3sm_hdf5_netcdf = false should map to excluding the bundle.
exclude_packages: []
# Optional spack build settings
tmpdir: null
mirror: null
custom_spack: ""
jigsaw:
# If true, build/install JIGSAW + JIGSAW-Python into the pixi env
enabled: false
# Relative path in the target repo where JIGSAW-Python lives.
# If this is a submodule, `mache deploy run` will initialize it.
# If it is not, `mache deploy run` will clone it from the main JIGSAW-Python
# repo if missing.
jigsaw_python_path: jigsaw-python
# Optional deployment hooks.
#
# Hooks run ONLY during `mache deploy run` and execute arbitrary Python code
# from the target repository. Use only with trusted repositories.
# Custom CLI flags from deploy/custom_cli_spec.json are available on ctx.args
# inside hooks when routed to "run".
#
# hooks:
# file: "deploy/hooks.py" # default
# entrypoints:
# pre_pixi: "pre_pixi" # optional
# post_pixi: "post_pixi" # optional
# pre_spack: "pre_spack" # optional
# post_spack: "post_spack" # optional
# post_deploy: "post_deploy" # optional