Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions docs/developers_guide/deploy.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,10 @@ That separation is the main thing to preserve when changing the design.
`mache/deploy/hooks.py`
: Hook discovery, hook execution, and the `DeployContext` data model.

`mache/deploy/shared.py`
: Shared-deployment artifact helpers for copied load scripts, symlinks, and
extra permission-managed paths outside the deployed prefix.

`mache/deploy/machine.py`
: Machine selection and merged machine-config loading from both
`mache.machines` and target-owned config files.
Expand Down Expand Up @@ -220,6 +224,7 @@ including:
- toolchain pairing,
- pixi installation,
- load-script generation,
- shared load-script aliases and shared permission-managed artifacts,
- JIGSAW wiring.

Before adding a new package-specific branch in `run.py`, prefer checking
Expand Down
21 changes: 18 additions & 3 deletions mache/deploy/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from .conda import get_conda_platform_and_system
from .hooks import DeployContext, configparser_to_nested_dict, load_hooks
from .machine import get_machine, get_machine_config
from .shared import SharedDeployArtifacts, create_shared_deploy_artifacts
from .spack import (
SpackDeployResult,
SpackSoftwareEnvResult,
Expand Down Expand Up @@ -378,6 +379,16 @@ def run_deploy(args: argparse.Namespace) -> None:
)
ctx.runtime['load_scripts'] = load_script_paths

hook_registry.run_hook('post_deploy', ctx)

shared_artifacts = create_shared_deploy_artifacts(
config=config,
runtime=ctx.runtime,
repo_root=repo_root,
load_script_paths=load_script_paths,
logger=logger,
)

permissions_group, world_readable = _resolve_deploy_permissions(
config=config,
runtime=ctx.runtime,
Expand All @@ -399,13 +410,12 @@ def run_deploy(args: argparse.Namespace) -> None:
if deploy_spack
else []
),
shared_artifacts=shared_artifacts,
group=permissions_group,
world_readable=world_readable,
logger=logger,
)

hook_registry.run_hook('post_deploy', ctx)


def _get_deploy_logger(*, log_filename: str, quiet: bool) -> logging.Logger:
"""Get a logger for deploy-run messages.
Expand Down Expand Up @@ -976,6 +986,7 @@ def _apply_deploy_permissions(
extra_prefixes: list[str] | None,
load_script_paths: list[str],
spack_paths: list[str],
shared_artifacts: SharedDeployArtifacts,
group: str | None,
world_readable: bool,
logger: logging.Logger,
Expand Down Expand Up @@ -1004,7 +1015,10 @@ def _apply_deploy_permissions(
if extra_prefix_abs not in prefixes:
prefixes.append(extra_prefix_abs)

for managed_prefix in prefixes:
permission_roots = prefixes + shared_artifacts.managed_dirs
permission_roots = list(dict.fromkeys(permission_roots))

for managed_prefix in permission_roots:
update_permissions(
managed_prefix,
group,
Expand All @@ -1021,6 +1035,7 @@ def _apply_deploy_permissions(
elif prefix_path.exists():
managed_paths.append(managed_prefix)
managed_paths.extend(spack_paths)
managed_paths.extend(shared_artifacts.managed_files)

managed_paths = list(dict.fromkeys(managed_paths))

Expand Down
272 changes: 272 additions & 0 deletions mache/deploy/shared.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,272 @@
from __future__ import annotations

import logging
import os
from dataclasses import dataclass
from pathlib import Path
from typing import Any


@dataclass(frozen=True)
class SharedDeployArtifacts:
managed_dirs: list[str]
managed_files: list[str]


def create_shared_deploy_artifacts(
*,
config: dict[str, Any],
runtime: dict[str, Any],
repo_root: str,
load_script_paths: list[str],
logger: logging.Logger,
) -> SharedDeployArtifacts:
shared_cfg = _merge_shared_config(config=config, runtime=runtime)

managed_dirs = _normalize_path_entries(
shared_cfg.get('managed_directories'),
repo_root=repo_root,
field_name='shared.managed_directories',
)
managed_files = _normalize_path_entries(
shared_cfg.get('managed_files'),
repo_root=repo_root,
field_name='shared.managed_files',
)

load_script_copies = _normalize_load_script_copy_entries(
shared_cfg.get('load_script_copies'),
repo_root=repo_root,
)
load_script_symlinks = _normalize_load_script_symlink_entries(
shared_cfg.get('load_script_symlinks'),
repo_root=repo_root,
)

if load_script_copies or load_script_symlinks:
source_script = _require_single_load_script(
load_script_paths=load_script_paths,
reason='shared load-script copies/symlinks',
)

for dest_script in load_script_copies:
logger.info('Writing shared load-script copy: %s', dest_script)
_copy_load_script(
source_script=source_script,
dest_script=dest_script,
)
managed_dirs.append(str(dest_script.parent))
managed_files.append(str(dest_script))

for dest_link, target_path in load_script_symlinks:
if not (os.path.exists(target_path) or os.path.islink(target_path)):
raise FileNotFoundError(
'Shared load-script symlink target does not exist: '
f'{target_path}'
)
logger.info(
'Writing shared load-script symlink: %s -> %s',
dest_link,
target_path,
)
dest_link.parent.mkdir(parents=True, exist_ok=True)
if dest_link.exists() or dest_link.is_symlink():
dest_link.unlink()
dest_link.symlink_to(target_path)
managed_dirs.append(str(dest_link.parent))

return SharedDeployArtifacts(
managed_dirs=_dedupe_existing_paths(managed_dirs),
managed_files=_dedupe_existing_paths(managed_files),
)


def _merge_shared_config(
*,
config: dict[str, Any],
runtime: dict[str, Any],
) -> dict[str, Any]:
merged: dict[str, Any] = {}

config_shared = config.get('shared')
if config_shared is not None:
if not isinstance(config_shared, dict):
raise ValueError('shared section must be a mapping if provided')
merged.update(config_shared)

runtime_shared = runtime.get('shared')
if runtime_shared is not None:
if not isinstance(runtime_shared, dict):
raise ValueError('runtime.shared must be a mapping if provided')
merged.update(runtime_shared)

return merged


def _normalize_path_entries(
value: Any,
*,
repo_root: str,
field_name: str,
) -> list[str]:
if value is None:
return []
if not isinstance(value, list):
raise ValueError(f'{field_name} must be a list if provided')

entries: list[str] = []
for index, item in enumerate(value):
entries.append(
_resolve_path(
value=item,
repo_root=repo_root,
field_name=f'{field_name}[{index}]',
)
)
return entries


def _normalize_load_script_copy_entries(
value: Any,
*,
repo_root: str,
) -> list[Path]:
if value is None:
return []
if not isinstance(value, list):
raise ValueError(
'shared.load_script_copies must be a list if provided'
)

copies: list[Path] = []
for index, item in enumerate(value):
field_name = f'shared.load_script_copies[{index}]'
path_value: Any
if isinstance(item, str):
path_value = item
elif isinstance(item, dict):
path_value = item.get('path')
else:
raise ValueError(
f'{field_name} must be a string or mapping with a path'
)

copies.append(
Path(
_resolve_path(
value=path_value,
repo_root=repo_root,
field_name=f'{field_name}.path',
)
)
)

return _dedupe_paths(copies)


def _normalize_load_script_symlink_entries(
value: Any,
*,
repo_root: str,
) -> list[tuple[Path, str]]:
if value is None:
return []
if not isinstance(value, list):
raise ValueError(
'shared.load_script_symlinks must be a list if provided'
)

symlinks: list[tuple[Path, str]] = []
for index, item in enumerate(value):
field_name = f'shared.load_script_symlinks[{index}]'
if not isinstance(item, dict):
raise ValueError(
f'{field_name} must be a mapping with path and target'
)
path_value = item.get('path')
target_value = item.get('target')
symlinks.append(
(
Path(
_resolve_path(
value=path_value,
repo_root=repo_root,
field_name=f'{field_name}.path',
)
),
_resolve_path(
value=target_value,
repo_root=repo_root,
field_name=f'{field_name}.target',
),
)
)

deduped: dict[str, tuple[Path, str]] = {}
for path, target in symlinks:
deduped[str(path)] = (path, target)
return list(deduped.values())


def _resolve_path(
*,
value: Any,
repo_root: str,
field_name: str,
) -> str:
if value is None:
raise ValueError(f'{field_name} must not be null')

candidate = str(value).strip()
if candidate.lower() in ('', 'none', 'null'):
raise ValueError(f'{field_name} must be a non-empty path')

expanded = os.path.expanduser(os.path.expandvars(candidate))
if os.path.isabs(expanded):
return os.path.abspath(expanded)
return os.path.abspath(os.path.join(repo_root, expanded))


def _require_single_load_script(
*,
load_script_paths: list[str],
reason: str,
) -> Path:
if not load_script_paths:
raise ValueError(f'Expected one generated load script for {reason}.')
if len(load_script_paths) != 1:
raise ValueError(
f'{reason} currently require exactly one generated load script.'
)
return Path(str(load_script_paths[0])).resolve()


def _copy_load_script(*, source_script: Path, dest_script: Path) -> None:
dest_script.parent.mkdir(parents=True, exist_ok=True)
source_text = source_script.read_text(encoding='utf-8')
updated = source_text.replace(
str(source_script),
str(dest_script.resolve()),
)
dest_script.write_text(updated, encoding='utf-8')
dest_script.chmod(0o644)


def _dedupe_paths(paths: list[Path]) -> list[Path]:
deduped: dict[str, Path] = {}
for path in paths:
deduped[str(path)] = path
return list(deduped.values())


def _dedupe_existing_paths(paths: list[str]) -> list[str]:
deduped: list[str] = []
seen: set[str] = set()
for path in paths:
if path in seen:
continue
if not (os.path.exists(path) or os.path.islink(path)):
continue
seen.add(path)
deduped.append(path)
return deduped
30 changes: 30 additions & 0 deletions mache/deploy/templates/config.yaml.j2.j2
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,36 @@ permissions:
# group. Directories keep execute permission when needed for traversal.
world_readable: true

shared:
# Optional extra shared-deployment artifacts managed by `mache deploy run`.
#
# These are most useful for shared release/test installs that need copies of
# the generated load script in well-known shared locations.
#
# Priority order:
# 1. runtime["shared"] values returned from hooks
# 2. values here
#
# `load_script_copies` entries are copied from the single generated
# `load_<software>*.sh` script after deployment completes successfully.
# Each entry may be either:
# - "/absolute/or/relative/path/to/copied_load_script.sh"
# - {path: "/path/to/copied_load_script.sh"}
load_script_copies: []

# Optional symlinks created after any copied load scripts.
# Each entry must provide:
# - path: path to the symlink to create
# - target: path the symlink should point to
load_script_symlinks: []

# Optional extra shared directories/files to include in mache's
# post-deploy permission update step. This is helpful when a downstream
# post_deploy hook creates additional shared artifacts that mache itself
# does not create.
managed_directories: []
managed_files: []

spack:
# Whether to deploy Spack environments at all.
#
Expand Down
Loading
Loading