Skip to content

Commit ffc3ab8

Browse files
committed
Add shared load-script artifact support
1 parent 73fb38a commit ffc3ab8

File tree

7 files changed

+520
-3
lines changed

7 files changed

+520
-3
lines changed

docs/developers_guide/deploy.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,10 @@ That separation is the main thing to preserve when changing the design.
4343
`mache/deploy/hooks.py`
4444
: Hook discovery, hook execution, and the `DeployContext` data model.
4545

46+
`mache/deploy/shared.py`
47+
: Shared-deployment artifact helpers for copied load scripts, symlinks, and
48+
extra permission-managed paths outside the deployed prefix.
49+
4650
`mache/deploy/machine.py`
4751
: Machine selection and merged machine-config loading from both
4852
`mache.machines` and target-owned config files.
@@ -220,6 +224,7 @@ including:
220224
- toolchain pairing,
221225
- pixi installation,
222226
- load-script generation,
227+
- shared load-script aliases and shared permission-managed artifacts,
223228
- JIGSAW wiring.
224229

225230
Before adding a new package-specific branch in `run.py`, prefer checking

mache/deploy/run.py

Lines changed: 18 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
from .conda import get_conda_platform_and_system
3030
from .hooks import DeployContext, configparser_to_nested_dict, load_hooks
3131
from .machine import get_machine, get_machine_config
32+
from .shared import SharedDeployArtifacts, create_shared_deploy_artifacts
3233
from .spack import (
3334
SpackDeployResult,
3435
SpackSoftwareEnvResult,
@@ -378,6 +379,16 @@ def run_deploy(args: argparse.Namespace) -> None:
378379
)
379380
ctx.runtime['load_scripts'] = load_script_paths
380381

382+
hook_registry.run_hook('post_deploy', ctx)
383+
384+
shared_artifacts = create_shared_deploy_artifacts(
385+
config=config,
386+
runtime=ctx.runtime,
387+
repo_root=repo_root,
388+
load_script_paths=load_script_paths,
389+
logger=logger,
390+
)
391+
381392
permissions_group, world_readable = _resolve_deploy_permissions(
382393
config=config,
383394
runtime=ctx.runtime,
@@ -399,13 +410,12 @@ def run_deploy(args: argparse.Namespace) -> None:
399410
if deploy_spack
400411
else []
401412
),
413+
shared_artifacts=shared_artifacts,
402414
group=permissions_group,
403415
world_readable=world_readable,
404416
logger=logger,
405417
)
406418

407-
hook_registry.run_hook('post_deploy', ctx)
408-
409419

410420
def _get_deploy_logger(*, log_filename: str, quiet: bool) -> logging.Logger:
411421
"""Get a logger for deploy-run messages.
@@ -976,6 +986,7 @@ def _apply_deploy_permissions(
976986
extra_prefixes: list[str] | None,
977987
load_script_paths: list[str],
978988
spack_paths: list[str],
989+
shared_artifacts: SharedDeployArtifacts,
979990
group: str | None,
980991
world_readable: bool,
981992
logger: logging.Logger,
@@ -1004,7 +1015,10 @@ def _apply_deploy_permissions(
10041015
if extra_prefix_abs not in prefixes:
10051016
prefixes.append(extra_prefix_abs)
10061017

1007-
for managed_prefix in prefixes:
1018+
permission_roots = prefixes + shared_artifacts.managed_dirs
1019+
permission_roots = list(dict.fromkeys(permission_roots))
1020+
1021+
for managed_prefix in permission_roots:
10081022
update_permissions(
10091023
managed_prefix,
10101024
group,
@@ -1021,6 +1035,7 @@ def _apply_deploy_permissions(
10211035
elif prefix_path.exists():
10221036
managed_paths.append(managed_prefix)
10231037
managed_paths.extend(spack_paths)
1038+
managed_paths.extend(shared_artifacts.managed_files)
10241039

10251040
managed_paths = list(dict.fromkeys(managed_paths))
10261041

mache/deploy/shared.py

Lines changed: 272 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,272 @@
1+
from __future__ import annotations
2+
3+
import logging
4+
import os
5+
from dataclasses import dataclass
6+
from pathlib import Path
7+
from typing import Any
8+
9+
10+
@dataclass(frozen=True)
11+
class SharedDeployArtifacts:
12+
managed_dirs: list[str]
13+
managed_files: list[str]
14+
15+
16+
def create_shared_deploy_artifacts(
17+
*,
18+
config: dict[str, Any],
19+
runtime: dict[str, Any],
20+
repo_root: str,
21+
load_script_paths: list[str],
22+
logger: logging.Logger,
23+
) -> SharedDeployArtifacts:
24+
shared_cfg = _merge_shared_config(config=config, runtime=runtime)
25+
26+
managed_dirs = _normalize_path_entries(
27+
shared_cfg.get('managed_directories'),
28+
repo_root=repo_root,
29+
field_name='shared.managed_directories',
30+
)
31+
managed_files = _normalize_path_entries(
32+
shared_cfg.get('managed_files'),
33+
repo_root=repo_root,
34+
field_name='shared.managed_files',
35+
)
36+
37+
load_script_copies = _normalize_load_script_copy_entries(
38+
shared_cfg.get('load_script_copies'),
39+
repo_root=repo_root,
40+
)
41+
load_script_symlinks = _normalize_load_script_symlink_entries(
42+
shared_cfg.get('load_script_symlinks'),
43+
repo_root=repo_root,
44+
)
45+
46+
if load_script_copies or load_script_symlinks:
47+
source_script = _require_single_load_script(
48+
load_script_paths=load_script_paths,
49+
reason='shared load-script copies/symlinks',
50+
)
51+
52+
for dest_script in load_script_copies:
53+
logger.info('Writing shared load-script copy: %s', dest_script)
54+
_copy_load_script(
55+
source_script=source_script,
56+
dest_script=dest_script,
57+
)
58+
managed_dirs.append(str(dest_script.parent))
59+
managed_files.append(str(dest_script))
60+
61+
for dest_link, target_path in load_script_symlinks:
62+
if not (os.path.exists(target_path) or os.path.islink(target_path)):
63+
raise FileNotFoundError(
64+
'Shared load-script symlink target does not exist: '
65+
f'{target_path}'
66+
)
67+
logger.info(
68+
'Writing shared load-script symlink: %s -> %s',
69+
dest_link,
70+
target_path,
71+
)
72+
dest_link.parent.mkdir(parents=True, exist_ok=True)
73+
if dest_link.exists() or dest_link.is_symlink():
74+
dest_link.unlink()
75+
dest_link.symlink_to(target_path)
76+
managed_dirs.append(str(dest_link.parent))
77+
78+
return SharedDeployArtifacts(
79+
managed_dirs=_dedupe_existing_paths(managed_dirs),
80+
managed_files=_dedupe_existing_paths(managed_files),
81+
)
82+
83+
84+
def _merge_shared_config(
85+
*,
86+
config: dict[str, Any],
87+
runtime: dict[str, Any],
88+
) -> dict[str, Any]:
89+
merged: dict[str, Any] = {}
90+
91+
config_shared = config.get('shared')
92+
if config_shared is not None:
93+
if not isinstance(config_shared, dict):
94+
raise ValueError('shared section must be a mapping if provided')
95+
merged.update(config_shared)
96+
97+
runtime_shared = runtime.get('shared')
98+
if runtime_shared is not None:
99+
if not isinstance(runtime_shared, dict):
100+
raise ValueError('runtime.shared must be a mapping if provided')
101+
merged.update(runtime_shared)
102+
103+
return merged
104+
105+
106+
def _normalize_path_entries(
107+
value: Any,
108+
*,
109+
repo_root: str,
110+
field_name: str,
111+
) -> list[str]:
112+
if value is None:
113+
return []
114+
if not isinstance(value, list):
115+
raise ValueError(f'{field_name} must be a list if provided')
116+
117+
entries: list[str] = []
118+
for index, item in enumerate(value):
119+
entries.append(
120+
_resolve_path(
121+
value=item,
122+
repo_root=repo_root,
123+
field_name=f'{field_name}[{index}]',
124+
)
125+
)
126+
return entries
127+
128+
129+
def _normalize_load_script_copy_entries(
130+
value: Any,
131+
*,
132+
repo_root: str,
133+
) -> list[Path]:
134+
if value is None:
135+
return []
136+
if not isinstance(value, list):
137+
raise ValueError(
138+
'shared.load_script_copies must be a list if provided'
139+
)
140+
141+
copies: list[Path] = []
142+
for index, item in enumerate(value):
143+
field_name = f'shared.load_script_copies[{index}]'
144+
path_value: Any
145+
if isinstance(item, str):
146+
path_value = item
147+
elif isinstance(item, dict):
148+
path_value = item.get('path')
149+
else:
150+
raise ValueError(
151+
f'{field_name} must be a string or mapping with a path'
152+
)
153+
154+
copies.append(
155+
Path(
156+
_resolve_path(
157+
value=path_value,
158+
repo_root=repo_root,
159+
field_name=f'{field_name}.path',
160+
)
161+
)
162+
)
163+
164+
return _dedupe_paths(copies)
165+
166+
167+
def _normalize_load_script_symlink_entries(
168+
value: Any,
169+
*,
170+
repo_root: str,
171+
) -> list[tuple[Path, str]]:
172+
if value is None:
173+
return []
174+
if not isinstance(value, list):
175+
raise ValueError(
176+
'shared.load_script_symlinks must be a list if provided'
177+
)
178+
179+
symlinks: list[tuple[Path, str]] = []
180+
for index, item in enumerate(value):
181+
field_name = f'shared.load_script_symlinks[{index}]'
182+
if not isinstance(item, dict):
183+
raise ValueError(
184+
f'{field_name} must be a mapping with path and target'
185+
)
186+
path_value = item.get('path')
187+
target_value = item.get('target')
188+
symlinks.append(
189+
(
190+
Path(
191+
_resolve_path(
192+
value=path_value,
193+
repo_root=repo_root,
194+
field_name=f'{field_name}.path',
195+
)
196+
),
197+
_resolve_path(
198+
value=target_value,
199+
repo_root=repo_root,
200+
field_name=f'{field_name}.target',
201+
),
202+
)
203+
)
204+
205+
deduped: dict[str, tuple[Path, str]] = {}
206+
for path, target in symlinks:
207+
deduped[str(path)] = (path, target)
208+
return list(deduped.values())
209+
210+
211+
def _resolve_path(
212+
*,
213+
value: Any,
214+
repo_root: str,
215+
field_name: str,
216+
) -> str:
217+
if value is None:
218+
raise ValueError(f'{field_name} must not be null')
219+
220+
candidate = str(value).strip()
221+
if candidate.lower() in ('', 'none', 'null'):
222+
raise ValueError(f'{field_name} must be a non-empty path')
223+
224+
expanded = os.path.expanduser(os.path.expandvars(candidate))
225+
if os.path.isabs(expanded):
226+
return os.path.abspath(expanded)
227+
return os.path.abspath(os.path.join(repo_root, expanded))
228+
229+
230+
def _require_single_load_script(
231+
*,
232+
load_script_paths: list[str],
233+
reason: str,
234+
) -> Path:
235+
if not load_script_paths:
236+
raise ValueError(f'Expected one generated load script for {reason}.')
237+
if len(load_script_paths) != 1:
238+
raise ValueError(
239+
f'{reason} currently require exactly one generated load script.'
240+
)
241+
return Path(str(load_script_paths[0])).resolve()
242+
243+
244+
def _copy_load_script(*, source_script: Path, dest_script: Path) -> None:
245+
dest_script.parent.mkdir(parents=True, exist_ok=True)
246+
source_text = source_script.read_text(encoding='utf-8')
247+
updated = source_text.replace(
248+
str(source_script),
249+
str(dest_script.resolve()),
250+
)
251+
dest_script.write_text(updated, encoding='utf-8')
252+
dest_script.chmod(0o644)
253+
254+
255+
def _dedupe_paths(paths: list[Path]) -> list[Path]:
256+
deduped: dict[str, Path] = {}
257+
for path in paths:
258+
deduped[str(path)] = path
259+
return list(deduped.values())
260+
261+
262+
def _dedupe_existing_paths(paths: list[str]) -> list[str]:
263+
deduped: list[str] = []
264+
seen: set[str] = set()
265+
for path in paths:
266+
if path in seen:
267+
continue
268+
if not (os.path.exists(path) or os.path.islink(path)):
269+
continue
270+
seen.add(path)
271+
deduped.append(path)
272+
return deduped

mache/deploy/templates/config.yaml.j2.j2

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -160,6 +160,36 @@ permissions:
160160
# group. Directories keep execute permission when needed for traversal.
161161
world_readable: true
162162

163+
shared:
164+
# Optional extra shared-deployment artifacts managed by `mache deploy run`.
165+
#
166+
# These are most useful for shared release/test installs that need copies of
167+
# the generated load script in well-known shared locations.
168+
#
169+
# Priority order:
170+
# 1. runtime["shared"] values returned from hooks
171+
# 2. values here
172+
#
173+
# `load_script_copies` entries are copied from the single generated
174+
# `load_<software>*.sh` script after deployment completes successfully.
175+
# Each entry may be either:
176+
# - "/absolute/or/relative/path/to/copied_load_script.sh"
177+
# - {path: "/path/to/copied_load_script.sh"}
178+
load_script_copies: []
179+
180+
# Optional symlinks created after any copied load scripts.
181+
# Each entry must provide:
182+
# - path: path to the symlink to create
183+
# - target: path the symlink should point to
184+
load_script_symlinks: []
185+
186+
# Optional extra shared directories/files to include in mache's
187+
# post-deploy permission update step. This is helpful when a downstream
188+
# post_deploy hook creates additional shared artifacts that mache itself
189+
# does not create.
190+
managed_directories: []
191+
managed_files: []
192+
163193
spack:
164194
# Whether to deploy Spack environments at all.
165195
#

0 commit comments

Comments
 (0)