Skip to content
49 changes: 7 additions & 42 deletions lib/iris/hatch_build.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,13 @@

"""Hatchling custom build hook for Iris.

Regenerates protobuf files from .proto sources and rebuilds the Vue dashboard
when source files are newer than their generated outputs. This runs automatically
during ``uv sync`` / ``pip install -e .`` / wheel builds, eliminating the need
to check generated files into git or manually run build steps.
Regenerates protobuf files from .proto sources when source files are newer
than their generated outputs. This runs automatically during ``uv sync`` /
``pip install -e .`` / wheel builds, eliminating the need to check generated
files into git or manually run build steps.

Dashboard assets are built separately via ``iris build dashboard`` or
``_ensure_dashboard_dist()`` in the Docker image build pipeline.
"""

import logging
Expand All @@ -23,9 +26,6 @@
_PROTO_SOURCE_GLOBS = ["src/iris/rpc/*.proto"]
_PROTO_OUTPUT_GLOBS = ["src/iris/rpc/*_pb2.py", "src/iris/rpc/*_pb2.pyi", "src/iris/rpc/*_connect.py"]

_DASHBOARD_SOURCE_GLOBS = ["dashboard/src/**/*", "dashboard/package.json", "dashboard/rsbuild.config.ts"]
_DASHBOARD_OUTPUT_DIR = "dashboard/dist"


def _newest_mtime(root: Path, globs: list[str]) -> float:
"""Return the newest mtime across all files matching the given globs."""
Expand Down Expand Up @@ -70,7 +70,6 @@ class CustomBuildHook(BuildHookInterface):
def initialize(self, version: str, build_data: dict) -> None:
root = Path(self.root)
self._maybe_generate_protos(root)
self._maybe_build_dashboard(root)

def _maybe_generate_protos(self, root: Path) -> None:
outputs_present = _outputs_exist(root, _PROTO_OUTPUT_GLOBS)
Expand Down Expand Up @@ -109,37 +108,3 @@ def _maybe_generate_protos(self, root: Path) -> None:
if result.returncode != 0:
raise RuntimeError(f"Protobuf generation failed:\n{result.stdout}\n{result.stderr}")
logger.info("Protobuf generation complete")

def _maybe_build_dashboard(self, root: Path) -> None:
dashboard_dir = root / "dashboard"
if not (dashboard_dir / "package.json").exists():
logger.info("Dashboard source not found, skipping build")
return

dist_dir = root / _DASHBOARD_OUTPUT_DIR
dist_present = dist_dir.exists() and any(dist_dir.iterdir())

if shutil.which("npm") is None:
if not dist_present:
logger.warning(
"npm not found and dashboard/dist is missing. "
"Dashboard will not be available. Install Node.js to build it."
)
return

source_newest = _newest_mtime(root, _DASHBOARD_SOURCE_GLOBS)
if dist_present and source_newest > 0:
output_oldest = _oldest_mtime(root, [f"{_DASHBOARD_OUTPUT_DIR}/**/*"])
if output_oldest > 0 and source_newest <= output_oldest:
logger.info("Dashboard assets are up-to-date, skipping build")
return

logger.info("Building dashboard assets...")
result = subprocess.run(["npm", "ci"], cwd=dashboard_dir, capture_output=True, text=True)
if result.returncode != 0:
raise RuntimeError(f"npm ci failed:\n{result.stdout}\n{result.stderr}")

result = subprocess.run(["npm", "run", "build"], cwd=dashboard_dir, capture_output=True, text=True)
if result.returncode != 0:
raise RuntimeError(f"Dashboard build failed:\n{result.stdout}\n{result.stderr}")
logger.info("Dashboard build complete")
28 changes: 28 additions & 0 deletions lib/iris/src/iris/cluster/client/bundle.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,32 @@ def _get_git_non_ignored_files(workspace: Path) -> set[Path] | None:
return None


# Glob patterns for generated files that are gitignored but required at runtime.
# These are produced by build hooks (e.g. hatch_build.py protobuf generation)
# and must be included in task bundles so that `uv sync` inside containers can
# skip regeneration.
_GENERATED_ARTIFACT_GLOBS = [
"src/iris/rpc/*_pb2.py",
"src/iris/rpc/*_pb2.pyi",
"src/iris/rpc/*_connect.py",
"lib/iris/src/iris/rpc/*_pb2.py",
"lib/iris/src/iris/rpc/*_pb2.pyi",
"lib/iris/src/iris/rpc/*_connect.py",
]


def _include_generated_build_artifacts(workspace: Path, files: set[Path]) -> None:
"""Add generated build artifacts that exist on disk but are gitignored."""
added = 0
for pattern in _GENERATED_ARTIFACT_GLOBS:
for path in workspace.glob(pattern):
if path.is_file() and path not in files and not _should_exclude(path.relative_to(workspace)):
files.add(path)
added += 1
if added:
logger.debug("Included %d generated build artifact(s) in bundle", added)


class BundleCreator:
"""Helper for creating workspace bundles.

Expand All @@ -93,6 +119,8 @@ def create_bundle(self) -> bytes:
ValueError: If bundle size exceeds MAX_BUNDLE_SIZE_BYTES
"""
git_files = _get_git_non_ignored_files(self._workspace)
if git_files is not None:
_include_generated_build_artifacts(self._workspace, git_files)

with tempfile.TemporaryDirectory(prefix="bundle_") as td:
bundle_path = Path(td) / "bundle.zip"
Expand Down
24 changes: 24 additions & 0 deletions lib/iris/tests/cluster/client/test_bundle.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,30 @@ def test_bundle_creator_uses_git_files_when_available(workspace):
assert not any("__pycache__" in n for n in names)


def test_bundle_includes_generated_proto_files(workspace):
"""Generated protobuf files (gitignored) are included in the bundle."""
rpc_dir = workspace / "src" / "iris" / "rpc"
rpc_dir.mkdir(parents=True)
(rpc_dir / "cluster_pb2.py").write_text("# generated")
(rpc_dir / "cluster_pb2.pyi").write_text("# generated")
(rpc_dir / "cluster_connect.py").write_text("# generated")

# Simulate git ls-files returning only tracked files (not the generated ones)
git_files = {workspace / "pyproject.toml", workspace / "src" / "main.py"}
with patch("iris.cluster.client.bundle._get_git_non_ignored_files") as mock_git:
# Call the real function's logic but with controlled git output,
# then verify generated files are added via _include_generated_build_artifacts.
mock_git.return_value = git_files
creator = BundleCreator(workspace)
bundle_bytes = creator.create_bundle()

with zipfile.ZipFile(io.BytesIO(bundle_bytes)) as zf:
names = zf.namelist()
assert "src/iris/rpc/cluster_pb2.py" in names
assert "src/iris/rpc/cluster_pb2.pyi" in names
assert "src/iris/rpc/cluster_connect.py" in names


def test_bundle_creator_rejects_oversized_bundles(workspace):
"""Test that bundles exceeding MAX_BUNDLE_SIZE_BYTES are rejected."""
# Create a large file with random data that won't compress well
Expand Down
16 changes: 16 additions & 0 deletions lib/iris/tests/e2e/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,22 @@
DEFAULT_CONFIG = IRIS_ROOT / "examples" / "test.yaml"


@pytest.fixture(scope="session", autouse=True)
def _ensure_dashboard_built():
"""Build dashboard assets once per session so dashboard tests have content to render."""
dashboard_dir = IRIS_ROOT / "dashboard"
dist_dir = dashboard_dir / "dist"
if dist_dir.exists() and any(dist_dir.iterdir()):
Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fix this, always run the ci to build, we might be out of date

return
if not (dashboard_dir / "package.json").exists():
return
if shutil.which("npm") is None:
logging.getLogger(__name__).warning("npm not found, skipping dashboard build for tests")
return
subprocess.run(["npm", "ci"], cwd=dashboard_dir, check=True, capture_output=True)
subprocess.run(["npm", "run", "build"], cwd=dashboard_dir, check=True, capture_output=True)


def pytest_addoption(parser):
"""Cloud mode CLI options for running smoke tests against remote clusters."""
parser.addoption("--iris-config", default=None, help="Path to cluster config YAML for cloud mode")
Expand Down
Loading