Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions docs/users_guide/deploy.md
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,10 @@ Important settings:
- `permissions.world_readable`: optional boolean controlling whether deployed
files are readable outside the shared group. Hooks may override both values
through `ctx.runtime["permissions"]`.
- `shared.base_path`: optional shared base directory whose permissions mache
updates recursively before any path-specific permission updates. Hooks may
override this through `ctx.runtime["shared"]["base_path"]`, and `"dynamic"`
is useful when the path depends on machine-specific logic in `deploy/hooks.py`.
- `spack.spack_path`: required when Spack support is enabled and no hook or
CLI override provides it, unless the user disables Spack for that run with
`--no-spack`.
Expand Down
71 changes: 63 additions & 8 deletions mache/deploy/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -1006,16 +1006,34 @@ def _apply_deploy_permissions(
world_readable,
)

# A declared shared base absorbs permission management for any descendant
# artifacts, so we update it first and then only handle out-of-tree paths.
shared_base_path = _normalize_permission_path(shared_artifacts.base_path)
if shared_base_path is not None:
update_permissions(
shared_base_path,
group,
show_progress=True,
group_writable=True,
other_readable=world_readable,
recursive=True,
)

prefixes = [prefix_abs]
if extra_prefixes:
for extra_prefix in extra_prefixes:
extra_prefix_abs = os.path.abspath(
os.path.expanduser(os.path.expandvars(str(extra_prefix)))
)
if extra_prefix_abs not in prefixes:
extra_prefix_abs = _normalize_permission_path(extra_prefix)
if (
extra_prefix_abs is not None
and extra_prefix_abs not in prefixes
):
prefixes.append(extra_prefix_abs)

permission_roots = prefixes + shared_artifacts.managed_dirs
permission_roots = [
path
for path in prefixes + shared_artifacts.managed_dirs
if not _permission_path_is_within(path, shared_base_path)
]
permission_roots = list(dict.fromkeys(permission_roots))

for managed_prefix in permission_roots:
Expand All @@ -1027,15 +1045,29 @@ def _apply_deploy_permissions(
recursive=False,
)

managed_paths = [str(path) for path in load_script_paths]
managed_paths = [
_normalize_permission_path(path) for path in load_script_paths
]
for managed_prefix in prefixes:
prefix_path = Path(managed_prefix)
if prefix_path.is_dir():
managed_paths.extend(str(path) for path in prefix_path.iterdir())
elif prefix_path.exists():
managed_paths.append(managed_prefix)
managed_paths.extend(spack_paths)
managed_paths.extend(shared_artifacts.managed_files)
managed_paths.extend(
_normalize_permission_path(path) for path in spack_paths
)
managed_paths.extend(
_normalize_permission_path(path)
for path in shared_artifacts.managed_files
)

managed_paths = [
path
for path in managed_paths
if path is not None
and not _permission_path_is_within(path, shared_base_path)
]

managed_paths = list(dict.fromkeys(managed_paths))

Expand All @@ -1052,6 +1084,29 @@ def _apply_deploy_permissions(
)


def _normalize_permission_path(path: str | None) -> str | None:
if path is None:
return None
return os.path.abspath(os.path.expanduser(os.path.expandvars(str(path))))


def _permission_path_is_within(
path: str,
base_path: str | None,
) -> bool:
if base_path is None:
return False

normalized_path = _normalize_permission_path(path)
if normalized_path is None:
return False

try:
return os.path.commonpath([normalized_path, base_path]) == base_path
except ValueError:
return False


def _get_deployed_spack_paths(
*,
spack_results: list[SpackDeployResult],
Expand Down
33 changes: 30 additions & 3 deletions mache/deploy/shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,16 @@

import logging
import os
from dataclasses import dataclass
from dataclasses import dataclass, field
from pathlib import Path
from typing import Any


@dataclass(frozen=True)
class SharedDeployArtifacts:
managed_dirs: list[str]
managed_files: list[str]
base_path: str | None = None
managed_dirs: list[str] = field(default_factory=list)
managed_files: list[str] = field(default_factory=list)


def create_shared_deploy_artifacts(
Expand All @@ -23,6 +24,11 @@ def create_shared_deploy_artifacts(
) -> SharedDeployArtifacts:
shared_cfg = _merge_shared_config(config=config, runtime=runtime)

base_path = _resolve_optional_path(
value=shared_cfg.get('base_path'),
repo_root=repo_root,
field_name='shared.base_path',
)
managed_dirs = _normalize_path_entries(
shared_cfg.get('managed_directories'),
repo_root=repo_root,
Expand Down Expand Up @@ -76,6 +82,7 @@ def create_shared_deploy_artifacts(
managed_dirs.append(str(dest_link.parent))

return SharedDeployArtifacts(
base_path=base_path,
managed_dirs=_dedupe_existing_paths(managed_dirs),
managed_files=_dedupe_existing_paths(managed_files),
)
Expand Down Expand Up @@ -227,6 +234,26 @@ def _resolve_path(
return os.path.abspath(os.path.join(repo_root, expanded))


def _resolve_optional_path(
*,
value: Any,
repo_root: str,
field_name: str,
) -> str | None:
if value is None:
return None

candidate = str(value).strip()
if not candidate or candidate.lower() in ('none', 'null', 'dynamic'):
return None

return _resolve_path(
value=candidate,
repo_root=repo_root,
field_name=field_name,
)


def _require_single_load_script(
*,
load_script_paths: list[str],
Expand Down
6 changes: 6 additions & 0 deletions mache/deploy/templates/config.yaml.j2.j2
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,12 @@ shared:
# 1. runtime["shared"] values returned from hooks
# 2. values here
#
# Optional shared base directory to update permissions on recursively before
# mache processes specific deployed prefixes, load scripts, Spack paths, and
# other managed shared artifacts. Set this to "dynamic" when hooks will
# decide the path at runtime.
base_path: "dynamic"
#
# `load_script_copies` entries are copied from the single generated
# `load_<software>*.sh` script after deployment completes successfully.
# Each entry may be either:
Expand Down
3 changes: 3 additions & 0 deletions mache/deploy/templates/hooks.py.j2
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,8 @@ def pre_pixi(ctx: DeployContext) -> dict[str, Any] | None:
# the generated load script in shared locations
# - runtime["shared"]["load_script_symlinks"]: additional managed
# symlinks for shared load-script aliases
# - runtime["shared"]["base_path"]: shared base directory to update
# permissions on recursively before path-specific updates
# - runtime["shared"]["managed_directories"]: extra shared directories to
# include in the permission update step
# - runtime["shared"]["managed_files"]: extra shared files to include in
Expand All @@ -103,6 +105,7 @@ def pre_pixi(ctx: DeployContext) -> dict[str, Any] | None:
# updates.setdefault("pixi", {})["omit_dependencies"] = ["git"]
# updates.setdefault("permissions", {})["group"] = "e3sm"
# updates.setdefault("permissions", {})["world_readable"] = False
# updates.setdefault("shared", {})["base_path"] = "/shared/my-software"
# updates.setdefault("shared", {})["load_script_copies"] = [
# "/shared/load_my_software.sh",
# ]
Expand Down
96 changes: 96 additions & 0 deletions tests/test_deploy_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -919,6 +919,102 @@ def _fake_update_permissions(*args, **kwargs):
assert third_kwargs['recursive'] is True


def test_apply_deploy_permissions_updates_shared_base_first(
tmp_path: Path, monkeypatch: pytest.MonkeyPatch
):
shared_base = tmp_path / 'shared'
shared_base.mkdir()

prefix = shared_base / 'prefix'
prefix.mkdir()
(prefix / 'pixi.toml').write_text('[workspace]\n', encoding='utf-8')
(prefix / 'bin').mkdir()

load_script_in_base = shared_base / 'load_demo.sh'
load_script_in_base.write_text('#!/bin/sh\n', encoding='utf-8')
load_script_outside = tmp_path / 'load_external.sh'
load_script_outside.write_text('#!/bin/sh\n', encoding='utf-8')

spack_in_base = shared_base / 'spack'
spack_in_base.mkdir()
spack_outside = tmp_path / 'spack-outside'
spack_outside.mkdir()

managed_dir_in_base = shared_base / 'managed-dir'
managed_dir_in_base.mkdir()
managed_dir_outside = tmp_path / 'managed-dir-outside'
managed_dir_outside.mkdir()

managed_file_in_base = shared_base / 'managed.txt'
managed_file_in_base.write_text('shared\n', encoding='utf-8')
managed_file_outside = tmp_path / 'managed-outside.txt'
managed_file_outside.write_text('outside\n', encoding='utf-8')

calls = []

def _fake_update_permissions(*args, **kwargs):
calls.append((args, kwargs))

monkeypatch.setattr(
deploy_run, 'update_permissions', _fake_update_permissions
)

logger = deploy_run.logging.getLogger(
'test-apply-deploy-permissions-shared-base'
)
logger.handlers = [deploy_run.logging.NullHandler()]
logger.propagate = False

deploy_run._apply_deploy_permissions(
prefix=str(prefix),
extra_prefixes=None,
load_script_paths=[
str(load_script_in_base),
str(load_script_outside),
],
spack_paths=[str(spack_in_base), str(spack_outside)],
shared_artifacts=SharedDeployArtifacts(
base_path=str(shared_base),
managed_dirs=[
str(managed_dir_in_base),
str(managed_dir_outside),
],
managed_files=[
str(managed_file_in_base),
str(managed_file_outside),
],
),
group='e3sm',
world_readable=True,
logger=logger,
)

assert len(calls) == 3

first_args, first_kwargs = calls[0]
assert first_args == (str(shared_base), 'e3sm')
assert first_kwargs['group_writable'] is True
assert first_kwargs['other_readable'] is True
assert first_kwargs['recursive'] is True

second_args, second_kwargs = calls[1]
assert second_args == (str(managed_dir_outside), 'e3sm')
assert second_kwargs['group_writable'] is True
assert second_kwargs['recursive'] is False

third_args, third_kwargs = calls[2]
assert third_args[1] == 'e3sm'
assert sorted(third_args[0]) == sorted(
[
str(load_script_outside),
str(spack_outside),
str(managed_file_outside),
]
)
assert third_kwargs['group_writable'] is False
assert third_kwargs['recursive'] is True


def test_pixi_install_writes_project_local_pixi_config(
tmp_path: Path, monkeypatch: pytest.MonkeyPatch
):
Expand Down
22 changes: 22 additions & 0 deletions tests/test_deploy_shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ def test_create_shared_deploy_artifacts_copies_load_scripts_and_links(
assert latest_link.is_symlink()
assert os.readlink(latest_link) == str(copied_script)
assert artifacts == SharedDeployArtifacts(
base_path=None,
managed_dirs=[
str(extra_dir),
str(copied_script.parent),
Expand All @@ -68,6 +69,27 @@ def test_create_shared_deploy_artifacts_copies_load_scripts_and_links(
)


def test_create_shared_deploy_artifacts_resolves_runtime_base_path(
tmp_path: Path,
):
repo_root = tmp_path / 'repo'
repo_root.mkdir()

artifacts = create_shared_deploy_artifacts(
config={'shared': {'base_path': 'shared/from-config'}},
runtime={'shared': {'base_path': 'shared/from-runtime'}},
repo_root=str(repo_root),
load_script_paths=[],
logger=_logger(),
)

assert artifacts == SharedDeployArtifacts(
base_path=str(repo_root / 'shared' / 'from-runtime'),
managed_dirs=[],
managed_files=[],
)


def test_create_shared_deploy_artifacts_requires_single_load_script(
tmp_path: Path,
):
Expand Down
Loading