Skip to content

Commit 5d5c73e

Browse files
committed
feat: introduce Node.js runtime management and Pydantic schemas for Next.js dashboard data serialization.
1 parent 9b8cb0c commit 5d5c73e

File tree

4 files changed

+340
-243
lines changed

4 files changed

+340
-243
lines changed
Lines changed: 188 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,188 @@
1+
"""Node.js runtime management for floatCSEP Next.js dashboard."""
2+
3+
import logging
4+
import os
5+
import platform
6+
import re
7+
import shutil
8+
import stat
9+
import subprocess
10+
import tarfile
11+
import zipfile
12+
from dataclasses import dataclass
13+
from pathlib import Path
14+
from typing import List, Optional
15+
from urllib import request
16+
17+
logger = logging.getLogger(__name__)
18+
19+
MIN_NODE_VERSION = (18, 17, 0)
20+
BUNDLED_NODE_VERSION = "20.11.1"
21+
22+
23+
@dataclass
24+
class NodeRuntime:
25+
"""Represents a runnable Node.js installation."""
26+
27+
node_path: Path
28+
npm_path: Path
29+
bin_dir: Path
30+
source: str
31+
32+
def apply_to_env(self, env: dict) -> dict:
33+
"""Return a copy of the environment with this runtime prepended to PATH."""
34+
35+
current_path = env.get("PATH", "")
36+
updated = env.copy()
37+
updated["PATH"] = (
38+
f"{self.bin_dir}{os.pathsep}{current_path}"
39+
if current_path
40+
else str(self.bin_dir)
41+
)
42+
return updated
43+
44+
45+
def parse_node_version(raw: str) -> Optional[tuple[int, int, int]]:
46+
match = re.match(r"v?(\d+)\.(\d+)\.(\d+)", raw.strip())
47+
if not match:
48+
return None
49+
return tuple(int(part) for part in match.groups())
50+
51+
52+
def get_system_node_runtime() -> Optional[NodeRuntime]:
53+
node_cmd = shutil.which("node")
54+
npm_cmd = shutil.which("npm")
55+
if not node_cmd or not npm_cmd:
56+
return None
57+
try:
58+
result = subprocess.run(
59+
[node_cmd, "--version"], capture_output=True, text=True, check=True
60+
)
61+
except (subprocess.CalledProcessError, FileNotFoundError):
62+
return None
63+
version = parse_node_version(result.stdout)
64+
if not version or version < MIN_NODE_VERSION:
65+
return None
66+
return NodeRuntime(
67+
node_path=Path(node_cmd),
68+
npm_path=Path(npm_cmd),
69+
bin_dir=Path(node_cmd).parent,
70+
source="system",
71+
)
72+
73+
74+
def _node_dist_name() -> tuple[str, str]:
75+
system = platform.system().lower()
76+
machine = platform.machine().lower()
77+
if system == "linux":
78+
if machine in ("x86_64", "amd64"):
79+
return "linux-x64", ".tar.xz"
80+
if machine in ("aarch64", "arm64"):
81+
return "linux-arm64", ".tar.xz"
82+
elif system == "darwin":
83+
if machine == "arm64":
84+
return "darwin-arm64", ".tar.xz"
85+
if machine in ("x86_64", "amd64"):
86+
return "darwin-x64", ".tar.xz"
87+
elif system == "windows":
88+
if machine in ("x86_64", "amd64"):
89+
return "win-x64", ".zip"
90+
raise RuntimeError(
91+
f"Unsupported platform '{platform.system()} {platform.machine()}'. "
92+
"Please install Node.js 20+ manually."
93+
)
94+
95+
96+
def _download_node_archive(target: Path, url: str) -> None:
97+
logger.info("Downloading Node.js runtime from %s", url)
98+
target.parent.mkdir(parents=True, exist_ok=True)
99+
with request.urlopen(url) as response, open(target, "wb") as handle:
100+
shutil.copyfileobj(response, handle)
101+
102+
103+
def _extract_node_archive(archive: Path, destination: Path) -> Path:
104+
logger.info("Extracting Node.js runtime to %s", destination)
105+
destination.mkdir(parents=True, exist_ok=True)
106+
if archive.suffix == ".zip":
107+
with zipfile.ZipFile(archive) as zf:
108+
zf.extractall(destination)
109+
else:
110+
# Handles .tar.xz
111+
with tarfile.open(archive, mode="r:*") as tf:
112+
tf.extractall(destination)
113+
# Find the extracted directory (node-vXX-<platform>)
114+
for child in destination.iterdir():
115+
if child.is_dir() and child.name.startswith(f"node-v{BUNDLED_NODE_VERSION}"):
116+
return child
117+
raise RuntimeError("Failed to locate extracted Node.js runtime")
118+
119+
120+
def ensure_bundled_node(nextjs_dir: Path) -> NodeRuntime:
121+
platform_tag, archive_ext = _node_dist_name()
122+
cache_dir = nextjs_dir / ".cache" / "node-runtime"
123+
extract_root = cache_dir / f"node-v{BUNDLED_NODE_VERSION}-{platform_tag}"
124+
if extract_root.exists():
125+
logger.info("Using cached Node.js runtime at %s", extract_root)
126+
else:
127+
archive_name = f"node-v{BUNDLED_NODE_VERSION}-{platform_tag}{archive_ext}"
128+
download_url = (
129+
f"https://nodejs.org/dist/v{BUNDLED_NODE_VERSION}/{archive_name}"
130+
)
131+
archive_path = cache_dir / archive_name
132+
_download_node_archive(archive_path, download_url)
133+
extracted = _extract_node_archive(archive_path, cache_dir)
134+
extracted.rename(extract_root)
135+
archive_path.unlink(missing_ok=True)
136+
137+
if platform.system().lower() == "windows":
138+
node_path = extract_root / "node.exe"
139+
npm_path = extract_root / "npm.cmd"
140+
bin_dir = extract_root
141+
else:
142+
bin_dir = extract_root / "bin"
143+
node_path = bin_dir / "node"
144+
npm_path = bin_dir / "npm"
145+
for path in (node_path, npm_path):
146+
if not path.exists():
147+
raise RuntimeError(f"Bundled Node.js binary missing: {path}")
148+
mode = path.stat().st_mode
149+
path.chmod(mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
150+
return NodeRuntime(
151+
node_path=node_path, npm_path=npm_path, bin_dir=bin_dir, source="bundled"
152+
)
153+
154+
155+
def ensure_node_runtime(nextjs_dir: Path) -> NodeRuntime:
156+
runtime = get_system_node_runtime()
157+
if runtime:
158+
logger.info("Detected Node.js %s from system PATH", runtime.node_path)
159+
return runtime
160+
logger.warning(
161+
"Node.js %s or newer not found. Downloading a scoped runtime (v%s).",
162+
".".join(str(part) for part in MIN_NODE_VERSION),
163+
BUNDLED_NODE_VERSION,
164+
)
165+
return ensure_bundled_node(nextjs_dir)
166+
167+
168+
def ensure_nextjs_dependencies(
169+
nextjs_dir: Path, npm_cmd: List[str], env: dict
170+
) -> None:
171+
"""Install Node dependencies if needed."""
172+
node_modules = nextjs_dir / "node_modules"
173+
if node_modules.exists():
174+
return
175+
logger.info("Installing Next.js dependencies (this may take a few minutes)...")
176+
try:
177+
subprocess.run(
178+
npm_cmd + ["install"],
179+
cwd=nextjs_dir,
180+
check=True,
181+
env=env,
182+
)
183+
except subprocess.CalledProcessError as exc:
184+
logger.error("Failed to install dependencies: %s", exc)
185+
raise RuntimeError(
186+
"Could not install Next.js dependencies automatically. "
187+
"Please ensure network access is available or install them manually."
188+
)
Lines changed: 131 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,131 @@
1+
"""Pydantic schemas for floatCSEP Next.js dashboard."""
2+
3+
from typing import Any, Dict, List, Optional, Tuple
4+
from pathlib import Path
5+
from pydantic import BaseModel, ConfigDict, field_validator, Field
6+
7+
8+
def serialize_value_recursive(value: Any) -> Any:
9+
"""Recursively convert values to JSON-serializable types."""
10+
if isinstance(value, Path):
11+
return str(value)
12+
elif isinstance(value, dict):
13+
return {k: serialize_value_recursive(v) for k, v in value.items()}
14+
elif isinstance(value, (list, tuple)):
15+
return [serialize_value_recursive(v) for v in value]
16+
elif hasattr(value, "__dict__"):
17+
return serialize_value_recursive(vars(value))
18+
else:
19+
return value
20+
21+
22+
class ManifestModel(BaseModel):
23+
"""
24+
Pydantic model for the Experiment Manifest.
25+
Validates and serializes the Manifest dataclass from floatcsep.
26+
"""
27+
28+
model_config = ConfigDict(from_attributes=True, populate_by_name=True)
29+
30+
# --- Existing fields ---
31+
name: str
32+
start_date: str
33+
end_date: str
34+
authors: Optional[str] = None
35+
doi: Optional[str] = None
36+
journal: Optional[str] = None
37+
manuscript_doi: Optional[str] = None
38+
exp_time: Optional[str] = None
39+
floatcsep_version: Optional[str] = None
40+
pycsep_version: Optional[str] = None
41+
last_run: Optional[str] = None
42+
catalog_doi: Optional[str] = None
43+
license: Optional[str] = None
44+
date_range: str
45+
magnitudes: List[float]
46+
47+
# Region is typically an object in the dataclass
48+
region: Optional[Dict[str, Any]] = None
49+
50+
models: List[Dict[str, Any]]
51+
tests: List[Dict[str, Any]]
52+
time_windows: List[str]
53+
54+
catalog: Dict[str, Any]
55+
results_main: Dict[str, str] # Key will be converted to string pipe-delimited
56+
results_model: Dict[str, str]
57+
58+
app_root: Optional[str] = None
59+
60+
# --- Metadata fields ---
61+
exp_class: str
62+
n_intervals: int
63+
horizon: Optional[str] = None
64+
offset: Optional[str] = None
65+
growth: Optional[str] = None
66+
67+
mag_min: Optional[float] = None
68+
mag_max: Optional[float] = None
69+
mag_bin: Optional[float] = None
70+
depth_min: Optional[float] = None
71+
depth_max: Optional[float] = None
72+
73+
run_mode: Optional[str] = None
74+
run_dir: Optional[str] = None
75+
config_file: Optional[str] = None
76+
# Rename to avoid conflict with Pydantic's model_config
77+
model_config_path: Optional[str] = Field(None, validation_alias="model_config", serialization_alias="model_config")
78+
test_config: Optional[str] = None
79+
80+
@field_validator("region", mode="before")
81+
def serialize_region(cls, v: Any) -> Optional[Dict[str, Any]]:
82+
if v is None:
83+
return None
84+
if isinstance(v, dict):
85+
return v
86+
# Attempt to extract attributes from Region object
87+
return {
88+
"name": getattr(v, "name", None),
89+
"bbox": list(v.get_bbox()) if hasattr(v, "get_bbox") else None,
90+
"dh": float(v.dh) if hasattr(v, "dh") else None,
91+
"origins": v.origins().tolist() if hasattr(v, "origins") else None,
92+
}
93+
94+
@field_validator("models", "tests", "catalog", mode="before")
95+
def serialize_generic_structures(cls, v: Any) -> Any:
96+
return serialize_value_recursive(v)
97+
98+
@field_validator("results_main", mode="before")
99+
def serialize_results_main(cls, v: Any) -> Dict[str, str]:
100+
# transform Dict[Tuple[str, str], str] -> Dict[str, str]
101+
if isinstance(v, dict):
102+
new_dict = {}
103+
for key, val in v.items():
104+
if isinstance(key, tuple):
105+
new_key = f"{key[0]}|{key[1]}"
106+
else:
107+
new_key = str(key)
108+
new_dict[new_key] = serialize_value_recursive(val)
109+
return new_dict
110+
return v
111+
112+
@field_validator("results_model", mode="before")
113+
def serialize_results_model(cls, v: Any) -> Dict[str, str]:
114+
# transform Dict[Tuple[str, str, str], str] -> Dict[str, str]
115+
if isinstance(v, dict):
116+
new_dict = {}
117+
for key, val in v.items():
118+
if isinstance(key, tuple):
119+
new_key = f"{key[0]}|{key[1]}|{key[2]}"
120+
else:
121+
new_key = str(key)
122+
new_dict[new_key] = serialize_value_recursive(val)
123+
return new_dict
124+
return v
125+
126+
@field_validator("app_root", "run_dir", "config_file", "model_config_path", "test_config", mode="before")
127+
def serialize_paths(cls, v: Any) -> Optional[str]:
128+
if isinstance(v, Path):
129+
return str(v)
130+
return v
131+

0 commit comments

Comments
 (0)