diff --git a/.github/workflows/full-test-jsonl.yml b/.github/workflows/full-test-jsonl.yml new file mode 100644 index 00000000..9b79cd19 --- /dev/null +++ b/.github/workflows/full-test-jsonl.yml @@ -0,0 +1,103 @@ +name: Container Test + +on: + pull_request: + branches: + - main + - dev + paths: + - "Dockerfile" + - "src/**" + - "docker-compose*.yml" + - ".last_release" + - "pyproject.toml" + - "uv.lock" + +jobs: + test-container-jsonl: + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v6 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Read Photon version from .last_release + id: photon_version + run: | + PHOTON_VERSION=$(cat .last_release | tr -d '[:space:]') + if [[ -z "$PHOTON_VERSION" || ! "$PHOTON_VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo "Error: .last_release is missing, empty, or contains an invalid version: '$PHOTON_VERSION'" + exit 1 + fi + echo "PHOTON_VERSION=$PHOTON_VERSION" >> "$GITHUB_ENV" + echo "Photon Version: $PHOTON_VERSION" + + - name: Build test image + uses: docker/build-push-action@v6 + with: + context: . + file: ./Dockerfile + build-args: | + PHOTON_VERSION=${{ env.PHOTON_VERSION }} + push: false + load: true + tags: photon-test:pr-${{ github.event.pull_request.number }} + platforms: linux/amd64 + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Start container + run: | + docker run -d \ + --name photon-test-pr-${{ github.event.pull_request.number }} \ + -e REGION=andorra \ + -e IMPORT_MODE=jsonl \ + -e UPDATE_STRATEGY=DISABLED \ + photon-test:pr-${{ github.event.pull_request.number }} + + - name: Wait for container to be healthy + run: | + echo "Waiting for container to become healthy (timeout: 6 minutes)..." + CONTAINER_NAME=photon-test-pr-${{ github.event.pull_request.number }} + + docker logs -f $CONTAINER_NAME & + LOGS_PID=$! + + SECONDS=0 + TIMEOUT=360 + + while [ $SECONDS -lt $TIMEOUT ]; do + HEALTH_STATUS=$(docker inspect --format='{{.State.Health.Status}}' $CONTAINER_NAME 2>/dev/null || echo "unknown") + + if [ "$HEALTH_STATUS" = "healthy" ]; then + echo "Container is healthy after $SECONDS seconds" + kill $LOGS_PID 2>/dev/null || true + exit 0 + fi + + echo "Health status: $HEALTH_STATUS (elapsed: ${SECONDS}s)" + sleep 10 + SECONDS=$((SECONDS + 10)) + done + + kill $LOGS_PID 2>/dev/null || true + echo "Container failed to become healthy within $TIMEOUT seconds" + docker logs $CONTAINER_NAME + exit 1 + + - name: Cleanup + if: always() + run: | + docker stop photon-test-pr-${{ github.event.pull_request.number }} || true + docker rm photon-test-pr-${{ github.event.pull_request.number }} || true + docker rmi photon-test:pr-${{ github.event.pull_request.number }} || true + + - name: Output summary + if: always() + run: | + echo "## Container Test Summary" >> $GITHUB_STEP_SUMMARY + echo "- **PR Number:** ${{ github.event.pull_request.number }}" >> $GITHUB_STEP_SUMMARY + echo "- **Photon Version:** ${{ env.PHOTON_VERSION }}" >> $GITHUB_STEP_SUMMARY + echo "- **Status:** ${{ job.status }}" >> $GITHUB_STEP_SUMMARY diff --git a/pyproject.toml b/pyproject.toml index dcfd348c..b05a6bc3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,6 +10,7 @@ dependencies = [ "requests==2.32.5", "schedule>=1.2.2", "tqdm==4.67.3", + "zstandard>=0.23.0", ] [dependency-groups] diff --git a/src/entrypoint.py b/src/entrypoint.py index ac6e930d..308d939d 100644 --- a/src/entrypoint.py +++ b/src/entrypoint.py @@ -3,6 +3,7 @@ from src.check_remote import check_index_age from src.downloader import InsufficientSpaceError, parallel_update, sequential_update +from src.importer import run_jsonl_import from src.utils import config from src.utils.logger import get_logger, setup_logging from src.utils.notify import send_notification @@ -12,14 +13,15 @@ logger = get_logger() -def main(): - send_notification("Photon-Docker Initializing") - - logger.debug("Entrypoint setup called") +def log_config() -> None: logger.info("=== CONFIG VARIABLES ===") + logger.info(f"IMPORT_MODE: {config.IMPORT_MODE}") logger.info(f"UPDATE_STRATEGY: {config.UPDATE_STRATEGY}") logger.info(f"UPDATE_INTERVAL: {config.UPDATE_INTERVAL}") logger.info(f"REGION: {config.REGION}") + logger.info(f"LANGUAGES: {config.LANGUAGES}") + logger.info(f"EXTRA_TAGS: {config.EXTRA_TAGS}") + logger.info(f"IMPORT_GEOMETRIES: {config.IMPORT_GEOMETRIES}") logger.info(f"FORCE_UPDATE: {config.FORCE_UPDATE}") logger.info(f"DOWNLOAD_MAX_RETRIES: {config.DOWNLOAD_MAX_RETRIES}") logger.info(f"FILE_URL (sanitized): {sanitize_url(config.FILE_URL)}") @@ -39,6 +41,32 @@ def main(): logger.info("=== END CONFIG VARIABLES ===") + +def run_update_or_import(force_update: bool = False) -> None: + if config.IMPORT_MODE == "jsonl": + action = "forced JSONL import" if force_update else "initial JSONL import" + logger.info(f"Starting {action}") + run_jsonl_import() + return + + if not force_update: + logger.info("Starting initial download using sequential strategy") + logger.info("Note: Initial download will use sequential strategy regardless of config setting") + sequential_update() + return + + if config.UPDATE_STRATEGY == "PARALLEL": + parallel_update() + else: + sequential_update() + + +def main(): + send_notification("Photon-Docker Initializing") + + logger.debug("Entrypoint setup called") + log_config() + try: validate_config() except ValueError as e: @@ -51,10 +79,7 @@ def main(): if config.FORCE_UPDATE: logger.info("Starting forced update") try: - if config.UPDATE_STRATEGY == "PARALLEL": - parallel_update() - else: - sequential_update() + run_update_or_import(force_update=True) except InsufficientSpaceError as e: logger.error(f"Cannot proceed with force update: {e}") send_notification(f"Photon-Docker force update failed: {e}") @@ -66,17 +91,22 @@ def main(): if not config.INITIAL_DOWNLOAD: logger.warning("Initial download is disabled but no existing Photon index was found. ") return - logger.info("Starting initial download using sequential strategy") - logger.info("Note: Initial download will use sequential strategy regardless of config setting") try: - sequential_update() + run_update_or_import(force_update=False) except InsufficientSpaceError as e: logger.error(f"Cannot proceed: {e}") send_notification(f"Photon-Docker cannot start: {e}") sys.exit(75) + except Exception: + logger.error("Initial setup failed") + raise else: logger.info("Existing index found, skipping download") + if config.IMPORT_MODE == "jsonl": + logger.info("JSONL mode with existing index found, skipping automatic rebuild during setup") + return + if config.MIN_INDEX_DATE and check_index_age(): logger.info("Index is older than minimum required date, starting sequential update") try: diff --git a/src/importer.py b/src/importer.py new file mode 100644 index 00000000..da082b2f --- /dev/null +++ b/src/importer.py @@ -0,0 +1,64 @@ +import os +import shlex +import subprocess + +from src.filesystem import clear_temp_dir +from src.jsonl.decompressor import stream_decompress +from src.jsonl.downloader import download_jsonl +from src.utils import config +from src.utils.logger import get_logger +from src.utils.regions import get_regions_for_jsonl + +logger = get_logger(__name__) + + +def run_jsonl_import() -> None: + regions = get_regions_for_jsonl(config.get_jsonl_regions()) + if len(regions) != 1: + raise ValueError("JSONL mode currently supports exactly one region.") + + region = regions[0] + + try: + jsonl_path = download_jsonl(region) + import_proc = _start_photon_import("-") + try: + if import_proc.stdin is None: + raise RuntimeError("Photon import process stdin is unavailable") + for chunk in stream_decompress(jsonl_path): + import_proc.stdin.write(chunk) + + import_proc.stdin.close() + return_code = import_proc.wait() + if return_code != 0: + raise RuntimeError(f"Photon JSONL import failed with exit code {return_code}") + except Exception: + import_proc.kill() + import_proc.wait() + raise + finally: + clear_temp_dir() + + +def _start_photon_import(input_source: str) -> subprocess.Popen: + os.makedirs(config.DATA_DIR, exist_ok=True) + + cmd = ["java"] + if config.JAVA_PARAMS: + cmd.extend(shlex.split(config.JAVA_PARAMS)) + + cmd.extend(["-jar", "/photon/photon.jar", "import", "-import-file", input_source, "-data-dir", config.DATA_DIR]) + + languages = config.get_languages() + if languages: + cmd.extend(["-languages", ",".join(languages)]) + + extra_tags = config.get_extra_tags() + if extra_tags: + cmd.extend(["-extra-tags", ",".join(extra_tags)]) + + if config.IMPORT_GEOMETRIES: + cmd.append("-full-geometries") + + logger.info(f"Starting Photon JSONL import for region(s): {', '.join(config.get_jsonl_regions())}") + return subprocess.Popen(cmd, cwd=config.PHOTON_DIR, stdin=subprocess.PIPE) # noqa: S603 diff --git a/src/jsonl/__init__.py b/src/jsonl/__init__.py new file mode 100644 index 00000000..54590dd7 --- /dev/null +++ b/src/jsonl/__init__.py @@ -0,0 +1,3 @@ +from src.jsonl.downloader import download_jsonl + +__all__ = ["download_jsonl"] diff --git a/src/jsonl/decompressor.py b/src/jsonl/decompressor.py new file mode 100644 index 00000000..7384534e --- /dev/null +++ b/src/jsonl/decompressor.py @@ -0,0 +1,7 @@ +import zstandard as zstd + + +def stream_decompress(input_path: str, read_size: int = 65536): + dctx = zstd.ZstdDecompressor() + with open(input_path, "rb") as file_handle: + yield from dctx.read_to_iter(file_handle, read_size=read_size) diff --git a/src/jsonl/downloader.py b/src/jsonl/downloader.py new file mode 100644 index 00000000..b8f37930 --- /dev/null +++ b/src/jsonl/downloader.py @@ -0,0 +1,46 @@ +import os + +from src.downloader import download_file +from src.utils import config +from src.utils.logger import get_logger +from src.utils.regions import get_jsonl_filename, get_region_info, normalize_region + +logger = get_logger(__name__) + + +def get_jsonl_url(region: str) -> str: + normalized_region = normalize_region(region) + if normalized_region is None: + raise ValueError(f"Unknown region: {region}") + + region_info = get_region_info(normalized_region) + if not region_info: + raise ValueError(f"Unknown region: {region}") + + filename = get_jsonl_filename(normalized_region, config.JSONL_FILE_EXTENSION, config.JSONL_RELEASE_CHANNEL) + + if region_info["type"] == "planet": + return f"{config.BASE_URL}/{filename}" + if region_info["type"] == "continent": + return f"{config.BASE_URL}/{normalized_region}/{filename}" + + continent = region_info["continent"] + return f"{config.BASE_URL}/{continent}/{normalized_region}/{filename}" + + +def download_jsonl(region: str) -> str: + os.makedirs(config.TEMP_DIR, exist_ok=True) + + normalized_region = normalize_region(region) + if normalized_region is None: + raise ValueError(f"Unknown region: {region}") + + download_url = get_jsonl_url(normalized_region) + output_path = os.path.join(config.TEMP_DIR, f"{normalized_region}.{config.JSONL_FILE_EXTENSION}") + + logger.info(f"Downloading JSONL dump for {normalized_region} from {download_url}") + + if not download_file(download_url, output_path): + raise RuntimeError(f"Failed to download JSONL dump from {download_url}") + + return output_path diff --git a/src/process_manager.py b/src/process_manager.py index 392beb06..9c2a4013 100644 --- a/src/process_manager.py +++ b/src/process_manager.py @@ -205,6 +205,10 @@ def run_update(self): logger.info("Updates disabled, skipping") return + if config.IMPORT_MODE == "jsonl": + logger.info("Scheduled JSONL rebuilds are not implemented yet, skipping") + return + self.state = AppState.UPDATING logger.info(f"Running {config.UPDATE_STRATEGY.lower()} update...") update_start = time.time() @@ -257,6 +261,10 @@ def schedule_updates(self): logger.info("Updates disabled, not scheduling") return + if config.IMPORT_MODE == "jsonl": + logger.info("Skipping scheduled updates in JSONL mode until rebuild support is implemented") + return + interval = config.UPDATE_INTERVAL.lower() if interval.endswith("d"): diff --git a/src/updater.py b/src/updater.py index f11bf548..0a20f735 100644 --- a/src/updater.py +++ b/src/updater.py @@ -12,6 +12,10 @@ def main(): logger.info("Starting update process...") try: + if config.IMPORT_MODE == "jsonl": + logger.info("Scheduled JSONL rebuilds are not implemented yet, skipping updater run") + return + if config.UPDATE_STRATEGY == "PARALLEL": logger.info("Running parallel update...") parallel_update() diff --git a/src/utils/config.py b/src/utils/config.py index 2d79a205..73e580aa 100644 --- a/src/utils/config.py +++ b/src/utils/config.py @@ -1,9 +1,13 @@ import os # USER CONFIG +IMPORT_MODE = os.getenv("IMPORT_MODE", "db") UPDATE_STRATEGY = os.getenv("UPDATE_STRATEGY", "SEQUENTIAL") UPDATE_INTERVAL = os.getenv("UPDATE_INTERVAL", "30d") REGION = os.getenv("REGION") +LANGUAGES = os.getenv("LANGUAGES") +EXTRA_TAGS = os.getenv("EXTRA_TAGS") +IMPORT_GEOMETRIES = os.getenv("IMPORT_GEOMETRIES", "False").lower() in ("true", "1", "t") FORCE_UPDATE = os.getenv("FORCE_UPDATE", "False").lower() in ("true", "1", "t") DOWNLOAD_MAX_RETRIES = os.getenv("DOWNLOAD_MAX_RETRIES", "3") FILE_URL = os.getenv("FILE_URL") @@ -23,6 +27,8 @@ # APP CONFIG INDEX_DB_VERSION = "1.0" INDEX_FILE_EXTENSION = "tar.bz2" +JSONL_FILE_EXTENSION = "jsonl.zst" +JSONL_RELEASE_CHANNEL = "master" PHOTON_DIR = "/photon" DATA_DIR = "/photon/data" @@ -30,6 +36,27 @@ TEMP_DIR = os.path.join(DATA_DIR, "temp") OS_NODE_DIR = os.path.join(PHOTON_DATA_DIR, "node_1") + +def get_languages() -> list[str] | None: + return _get_csv_values(LANGUAGES) + + +def get_extra_tags() -> list[str] | None: + return _get_csv_values(EXTRA_TAGS) + + +def get_jsonl_regions() -> list[str]: + return _get_csv_values(REGION) or [] + + +def _get_csv_values(value: str | None) -> list[str] | None: + if not value: + return None + + values = [item.strip() for item in value.split(",") if item.strip()] + return values or None + + if FILE_URL: UPDATE_STRATEGY = "DISABLED" if not MD5_URL: diff --git a/src/utils/regions.py b/src/utils/regions.py index 74e17a9b..ba38b65c 100644 --- a/src/utils/regions.py +++ b/src/utils/regions.py @@ -1,27 +1,32 @@ REGION_MAPPING = { - "planet": {"type": "planet", "continent": None, "available": True}, - "africa": {"type": "continent", "continent": "africa", "available": True}, - "asia": {"type": "continent", "continent": "asia", "available": True}, - "australia-oceania": {"type": "continent", "continent": "australia-oceania", "available": True}, - "europe": {"type": "continent", "continent": "europe", "available": True}, - "north-america": {"type": "continent", "continent": "north-america", "available": True}, - "south-america": {"type": "continent", "continent": "south-america", "available": True}, - "india": {"type": "sub-region", "continent": "asia", "available": True}, - "japan": {"type": "sub-region", "continent": "asia", "available": True}, - "andorra": {"type": "sub-region", "continent": "europe", "available": True}, - "austria": {"type": "sub-region", "continent": "europe", "available": True}, - "denmark": {"type": "sub-region", "continent": "europe", "available": True}, - "france-monacco": {"type": "sub-region", "continent": "europe", "available": True}, - "germany": {"type": "sub-region", "continent": "europe", "available": True}, - "luxemburg": {"type": "sub-region", "continent": "europe", "available": True}, - "netherlands": {"type": "sub-region", "continent": "europe", "available": True}, - "russia": {"type": "sub-region", "continent": "europe", "available": True}, - "slovakia": {"type": "sub-region", "continent": "europe", "available": True}, - "spain": {"type": "sub-region", "continent": "europe", "available": True}, - "canada": {"type": "sub-region", "continent": "north-america", "available": True}, - "mexico": {"type": "sub-region", "continent": "north-america", "available": True}, - "usa": {"type": "sub-region", "continent": "north-america", "available": True}, - "argentina": {"type": "sub-region", "continent": "south-america", "available": True}, + "planet": {"type": "planet", "continent": None, "db_available": True, "jsonl_available": True}, + "africa": {"type": "continent", "continent": "africa", "db_available": True, "jsonl_available": True}, + "asia": {"type": "continent", "continent": "asia", "db_available": True, "jsonl_available": True}, + "australia-oceania": { + "type": "continent", + "continent": "australia-oceania", + "db_available": True, + "jsonl_available": True, + }, + "europe": {"type": "continent", "continent": "europe", "db_available": True, "jsonl_available": True}, + "north-america": {"type": "continent", "continent": "north-america", "db_available": True, "jsonl_available": True}, + "south-america": {"type": "continent", "continent": "south-america", "db_available": True, "jsonl_available": True}, + "india": {"type": "sub-region", "continent": "asia", "db_available": True, "jsonl_available": True}, + "japan": {"type": "sub-region", "continent": "asia", "db_available": True, "jsonl_available": True}, + "andorra": {"type": "sub-region", "continent": "europe", "db_available": True, "jsonl_available": True}, + "austria": {"type": "sub-region", "continent": "europe", "db_available": True, "jsonl_available": True}, + "denmark": {"type": "sub-region", "continent": "europe", "db_available": True, "jsonl_available": True}, + "france-monacco": {"type": "sub-region", "continent": "europe", "db_available": True, "jsonl_available": True}, + "germany": {"type": "sub-region", "continent": "europe", "db_available": True, "jsonl_available": True}, + "luxemburg": {"type": "sub-region", "continent": "europe", "db_available": True, "jsonl_available": True}, + "netherlands": {"type": "sub-region", "continent": "europe", "db_available": True, "jsonl_available": True}, + "russia": {"type": "sub-region", "continent": "europe", "db_available": True, "jsonl_available": True}, + "slovakia": {"type": "sub-region", "continent": "europe", "db_available": True, "jsonl_available": True}, + "spain": {"type": "sub-region", "continent": "europe", "db_available": True, "jsonl_available": True}, + "canada": {"type": "sub-region", "continent": "north-america", "db_available": True, "jsonl_available": True}, + "mexico": {"type": "sub-region", "continent": "north-america", "db_available": True, "jsonl_available": True}, + "usa": {"type": "sub-region", "continent": "north-america", "db_available": True, "jsonl_available": True}, + "argentina": {"type": "sub-region", "continent": "south-america", "db_available": True, "jsonl_available": True}, } REGION_ALIASES = { @@ -82,6 +87,10 @@ def get_index_filename(region_name: str, db_version: str, extension: str) -> str return f"photon-db-{region_name}-{db_version}-latest.{extension}" +def get_jsonl_filename(region_name: str, extension: str, channel: str = "master") -> str: + return f"photon-dump-{region_name}-{channel}-latest.{extension}" + + def get_index_url_path(region: str | None, db_version: str, extension: str) -> str: if region: normalized = normalize_region(region) @@ -106,3 +115,43 @@ def get_index_url_path(region: str | None, db_version: str, extension: str) -> s raise ValueError(f"Invalid region type: {region_type}") return f"/{get_index_filename('planet', db_version, extension)}" + + +def get_jsonl_url_path(region: str, extension: str) -> str: + normalized = normalize_region(region) + if normalized is None: + raise ValueError(f"Unknown region: {region}") + + region_info = get_region_info(region) + if not region_info: + raise ValueError(f"Unknown region: {region}") + + if not region_info.get("jsonl_available", False): + raise ValueError(f"JSONL not available for region: {region}") + + filename = get_jsonl_filename(normalized, extension) + region_type = region_info["type"] + + if region_type == "planet": + return f"/{filename}" + if region_type == "continent": + return f"/{normalized}/{filename}" + if region_type == "sub-region": + continent = region_info["continent"] + return f"/{continent}/{normalized}/{filename}" + + raise ValueError(f"Invalid region type: {region_type}") + + +def get_regions_for_jsonl(regions: list[str]) -> list[str]: + validated_regions = [] + + for region in regions: + region_info = get_region_info(region) + if not region_info: + raise ValueError(f"Unknown region: {region}") + if not region_info.get("jsonl_available", False): + raise ValueError(f"JSONL not available for region: {region}") + validated_regions.append(normalize_region(region)) + + return [region for region in validated_regions if region] diff --git a/src/utils/validate_config.py b/src/utils/validate_config.py index d5337e10..9fc846be 100644 --- a/src/utils/validate_config.py +++ b/src/utils/validate_config.py @@ -2,7 +2,7 @@ from src.utils import config from src.utils.logger import get_logger -from src.utils.regions import is_valid_region +from src.utils.regions import get_regions_for_jsonl, is_valid_region logging = get_logger() @@ -11,6 +11,10 @@ def validate_config(): logging.info("Validating environment variables...") error_messages = [] + valid_import_modes = ["db", "jsonl"] + if config.IMPORT_MODE not in valid_import_modes: + error_messages.append(f"Invalid IMPORT_MODE: '{config.IMPORT_MODE}'. Must be one of {valid_import_modes}.") + valid_strategies = ["SEQUENTIAL", "PARALLEL", "DISABLED"] if config.UPDATE_STRATEGY not in valid_strategies: error_messages.append( @@ -22,8 +26,28 @@ def validate_config(): f"Invalid UPDATE_INTERVAL format: '{config.UPDATE_INTERVAL}'. Expected format like '30d', '12h', or '30m'." ) - if config.REGION and not is_valid_region(config.REGION): - error_messages.append(f"Invalid REGION: '{config.REGION}'. Must be a valid continent, sub-region, or 'planet'.") + if config.IMPORT_MODE == "db": + if config.REGION and not is_valid_region(config.REGION): + error_messages.append( + f"Invalid REGION: '{config.REGION}'. Must be a valid continent, sub-region, or 'planet'." + ) + if config.REGION and len(config.get_jsonl_regions()) > 1: + error_messages.append("DB mode supports exactly one region in REGION.") + + if config.IMPORT_MODE == "jsonl": + if config.FILE_URL: + error_messages.append("FILE_URL is not supported when IMPORT_MODE=jsonl.") + if config.MD5_URL: + error_messages.append("MD5_URL is not supported when IMPORT_MODE=jsonl.") + if not config.get_jsonl_regions(): + error_messages.append("REGION is required when IMPORT_MODE=jsonl.") + else: + try: + validated_regions = get_regions_for_jsonl(config.get_jsonl_regions()) + if len(validated_regions) != 1: + error_messages.append("JSONL mode currently supports exactly one region.") + except ValueError as exc: + error_messages.append(str(exc)) if error_messages: full_error_message = "Configuration validation failed:\n" + "\n".join(error_messages) diff --git a/tests/jsonl/test_downloader.py b/tests/jsonl/test_downloader.py new file mode 100644 index 00000000..17ac8a41 --- /dev/null +++ b/tests/jsonl/test_downloader.py @@ -0,0 +1,6 @@ +from src.jsonl.downloader import get_jsonl_url +from src.utils import config + + +def test_get_jsonl_url_uses_base_url(): + assert get_jsonl_url("germany") == f"{config.BASE_URL}/europe/germany/photon-dump-germany-master-latest.jsonl.zst" diff --git a/tests/test_entrypoint.py b/tests/test_entrypoint.py new file mode 100644 index 00000000..de2ff1e2 --- /dev/null +++ b/tests/test_entrypoint.py @@ -0,0 +1,84 @@ +import pytest + +from src import entrypoint +from src.utils import config + + +def _set_base_config(monkeypatch: pytest.MonkeyPatch): + monkeypatch.setattr(config, "IMPORT_MODE", "db") + monkeypatch.setattr(config, "FORCE_UPDATE", False) + monkeypatch.setattr(config, "INITIAL_DOWNLOAD", True) + monkeypatch.setattr(config, "MIN_INDEX_DATE", None) + monkeypatch.setattr(config, "UPDATE_STRATEGY", "SEQUENTIAL") + + +def test_main_runs_jsonl_import_when_no_index_exists(monkeypatch: pytest.MonkeyPatch): + calls = [] + + _set_base_config(monkeypatch) + monkeypatch.setattr(config, "IMPORT_MODE", "jsonl") + monkeypatch.setattr(entrypoint, "send_notification", lambda message: None) + monkeypatch.setattr(entrypoint, "log_config", lambda: None) + monkeypatch.setattr(entrypoint, "validate_config", lambda: None) + monkeypatch.setattr(entrypoint.os.path, "isdir", lambda path: False) + monkeypatch.setattr(entrypoint, "run_update_or_import", lambda force_update=False: calls.append(force_update)) + + entrypoint.main() + + assert calls == [False] + + +def test_main_skips_jsonl_rebuild_when_index_exists(monkeypatch: pytest.MonkeyPatch): + calls = [] + + _set_base_config(monkeypatch) + monkeypatch.setattr(config, "IMPORT_MODE", "jsonl") + monkeypatch.setattr(entrypoint, "send_notification", lambda message: None) + monkeypatch.setattr(entrypoint, "log_config", lambda: None) + monkeypatch.setattr(entrypoint, "validate_config", lambda: None) + monkeypatch.setattr(entrypoint.os.path, "isdir", lambda path: True) + monkeypatch.setattr(entrypoint, "run_update_or_import", lambda force_update=False: calls.append(force_update)) + + entrypoint.main() + + assert calls == [] + + +def test_main_uses_force_update_path_for_jsonl(monkeypatch: pytest.MonkeyPatch): + calls = [] + + _set_base_config(monkeypatch) + monkeypatch.setattr(config, "IMPORT_MODE", "jsonl") + monkeypatch.setattr(config, "FORCE_UPDATE", True) + monkeypatch.setattr(entrypoint, "send_notification", lambda message: None) + monkeypatch.setattr(entrypoint, "log_config", lambda: None) + monkeypatch.setattr(entrypoint, "validate_config", lambda: None) + monkeypatch.setattr(entrypoint, "run_update_or_import", lambda force_update=False: calls.append(force_update)) + + entrypoint.main() + + assert calls == [True] + + +def test_run_update_or_import_uses_db_parallel_update(monkeypatch: pytest.MonkeyPatch): + calls = [] + + _set_base_config(monkeypatch) + monkeypatch.setattr(config, "UPDATE_STRATEGY", "PARALLEL") + monkeypatch.setattr(entrypoint, "parallel_update", lambda: calls.append("parallel")) + monkeypatch.setattr(entrypoint, "sequential_update", lambda: calls.append("sequential")) + + entrypoint.run_update_or_import(force_update=True) + + assert calls == ["parallel"] + + +def test_run_update_or_import_uses_db_sequential_update_for_initial_setup(monkeypatch: pytest.MonkeyPatch): + calls = [] + + _set_base_config(monkeypatch) + monkeypatch.setattr(entrypoint, "sequential_update", lambda: calls.append("sequential")) + + entrypoint.run_update_or_import(force_update=False) + + assert calls == ["sequential"] diff --git a/tests/test_importer.py b/tests/test_importer.py new file mode 100644 index 00000000..80f2c186 --- /dev/null +++ b/tests/test_importer.py @@ -0,0 +1,135 @@ +import io + +import pytest + +from src import importer +from src.utils import config + + +def _noop_makedirs(path: str, exist_ok: bool = False) -> None: + _ = (path, exist_ok) + + +def test_start_photon_import_builds_expected_command(monkeypatch): + commands = [] + + class DummyProcess: + def __init__(self): + self.stdin = io.BytesIO() + + def fake_popen(cmd, cwd, stdin): + commands.append({"cmd": cmd, "cwd": cwd, "stdin": stdin}) + return DummyProcess() + + monkeypatch.setattr(config, "JAVA_PARAMS", "-Xmx2g") + monkeypatch.setattr(config, "LANGUAGES", "en,de") + monkeypatch.setattr(config, "EXTRA_TAGS", "website,phone") + monkeypatch.setattr(config, "IMPORT_GEOMETRIES", True) + monkeypatch.setattr(importer.os, "makedirs", _noop_makedirs) + monkeypatch.setattr(importer.subprocess, "Popen", fake_popen) + + importer._start_photon_import("-") + + assert commands == [ + { + "cmd": [ + "java", + "-Xmx2g", + "-jar", + "/photon/photon.jar", + "import", + "-import-file", + "-", + "-data-dir", + config.DATA_DIR, + "-languages", + "en,de", + "-extra-tags", + "website,phone", + "-full-geometries", + ], + "cwd": config.PHOTON_DIR, + "stdin": importer.subprocess.PIPE, + } + ] + + +class RecordingProcess: + def __init__(self, wait_return_code: int = 0): + self.stdin = RecordingStdin() + self.wait_calls = 0 + self.kill_calls = 0 + self.wait_return_code = wait_return_code + + def wait(self): + self.wait_calls += 1 + return self.wait_return_code + + def kill(self): + self.kill_calls += 1 + + +class RecordingStdin(io.BytesIO): + def close(self): + self.was_closed = True + + +def test_run_jsonl_import_streams_data_and_cleans_up(monkeypatch): + process = RecordingProcess() + cleanup_calls = [] + fake_path = "/photon/data/temp/andorra.jsonl.zst" + + monkeypatch.setattr(config, "REGION", "andorra") + monkeypatch.setattr(importer, "download_jsonl", lambda region: fake_path) + monkeypatch.setattr(importer, "stream_decompress", lambda path: [b'{"type":"Place"}\n', b'{"type":"Place2"}\n']) + monkeypatch.setattr(importer, "_start_photon_import", lambda input_source: process) + monkeypatch.setattr(importer, "clear_temp_dir", lambda: cleanup_calls.append(True)) + + importer.run_jsonl_import() + + assert process.stdin.getvalue() == b'{"type":"Place"}\n{"type":"Place2"}\n' + assert process.wait_calls == 1 + assert process.kill_calls == 0 + assert cleanup_calls == [True] + + +def test_run_jsonl_import_kills_process_and_cleans_up_on_stream_failure(monkeypatch): + process = RecordingProcess() + cleanup_calls = [] + fake_path = "/photon/data/temp/andorra.jsonl.zst" + + def broken_stream(path): + yield b'{"type":"Place"}\n' + raise RuntimeError("boom") + + monkeypatch.setattr(config, "REGION", "andorra") + monkeypatch.setattr(importer, "download_jsonl", lambda region: fake_path) + monkeypatch.setattr(importer, "stream_decompress", broken_stream) + monkeypatch.setattr(importer, "_start_photon_import", lambda input_source: process) + monkeypatch.setattr(importer, "clear_temp_dir", lambda: cleanup_calls.append(True)) + + with pytest.raises(RuntimeError, match="boom"): + importer.run_jsonl_import() + + assert process.kill_calls == 1 + assert process.wait_calls == 1 + assert cleanup_calls == [True] + + +def test_run_jsonl_import_raises_when_import_process_fails(monkeypatch): + process = RecordingProcess(wait_return_code=2) + cleanup_calls = [] + fake_path = "/photon/data/temp/andorra.jsonl.zst" + + monkeypatch.setattr(config, "REGION", "andorra") + monkeypatch.setattr(importer, "download_jsonl", lambda region: fake_path) + monkeypatch.setattr(importer, "stream_decompress", lambda path: [b'{"type":"Place"}\n']) + monkeypatch.setattr(importer, "_start_photon_import", lambda input_source: process) + monkeypatch.setattr(importer, "clear_temp_dir", lambda: cleanup_calls.append(True)) + + with pytest.raises(RuntimeError, match="exit code 2"): + importer.run_jsonl_import() + + assert process.kill_calls == 1 + assert process.wait_calls == 2 + assert cleanup_calls == [True] diff --git a/tests/utils/test_regions.py b/tests/utils/test_regions.py index ae428c0b..fb88a9aa 100644 --- a/tests/utils/test_regions.py +++ b/tests/utils/test_regions.py @@ -1,6 +1,13 @@ import pytest -from src.utils.regions import get_index_url_path, get_region_info, is_valid_region, normalize_region +from src.utils.regions import ( + get_index_url_path, + get_jsonl_url_path, + get_region_info, + get_regions_for_jsonl, + is_valid_region, + normalize_region, +) @pytest.mark.parametrize( @@ -28,7 +35,12 @@ def test_is_valid_region(region: str, expected: bool): def test_get_region_info_for_alias_returns_canonical_region_metadata(): - assert get_region_info("us") == {"type": "sub-region", "continent": "north-america", "available": True} + assert get_region_info("us") == { + "type": "sub-region", + "continent": "north-america", + "db_available": True, + "jsonl_available": True, + } @pytest.mark.parametrize( @@ -48,3 +60,19 @@ def test_get_index_url_path(region: str | None, expected: str): def test_get_index_url_path_raises_for_unknown_region(): with pytest.raises(ValueError, match="Unknown region: atlantis"): get_index_url_path("atlantis", "1.0", "tar.bz2") + + +@pytest.mark.parametrize( + ("region", "expected"), + [ + ("planet", "/photon-dump-planet-master-latest.jsonl.zst"), + ("europe", "/europe/photon-dump-europe-master-latest.jsonl.zst"), + ("us", "/north-america/usa/photon-dump-usa-master-latest.jsonl.zst"), + ], +) +def test_get_jsonl_url_path(region: str, expected: str): + assert get_jsonl_url_path(region, "jsonl.zst") == expected + + +def test_get_regions_for_jsonl_normalizes_aliases(): + assert get_regions_for_jsonl(["DE"]) == ["germany"] diff --git a/tests/utils/test_validate_config.py b/tests/utils/test_validate_config.py index 678b9fe1..63e423b1 100644 --- a/tests/utils/test_validate_config.py +++ b/tests/utils/test_validate_config.py @@ -5,9 +5,12 @@ def _set_base_config(monkeypatch: pytest.MonkeyPatch): + monkeypatch.setattr(config, "IMPORT_MODE", "db") monkeypatch.setattr(config, "UPDATE_STRATEGY", "SEQUENTIAL") monkeypatch.setattr(config, "UPDATE_INTERVAL", "30d") monkeypatch.setattr(config, "REGION", None) + monkeypatch.setattr(config, "FILE_URL", None) + monkeypatch.setattr(config, "MD5_URL", None) def test_validate_config_accepts_valid_configuration(monkeypatch: pytest.MonkeyPatch): @@ -42,6 +45,47 @@ def test_validate_config_rejects_invalid_region(monkeypatch: pytest.MonkeyPatch) validate_config() +def test_validate_config_rejects_invalid_import_mode(monkeypatch: pytest.MonkeyPatch): + _set_base_config(monkeypatch) + monkeypatch.setattr(config, "IMPORT_MODE", "archive") + + with pytest.raises(ValueError, match="Invalid IMPORT_MODE: 'archive'"): + validate_config() + + +def test_validate_config_accepts_jsonl_single_region(monkeypatch: pytest.MonkeyPatch): + _set_base_config(monkeypatch) + monkeypatch.setattr(config, "IMPORT_MODE", "jsonl") + monkeypatch.setattr(config, "REGION", "de") + + validate_config() + + +def test_validate_config_requires_regions_for_jsonl(monkeypatch: pytest.MonkeyPatch): + _set_base_config(monkeypatch) + monkeypatch.setattr(config, "IMPORT_MODE", "jsonl") + + with pytest.raises(ValueError, match="REGION is required when IMPORT_MODE=jsonl"): + validate_config() + + +def test_validate_config_rejects_multiple_jsonl_regions_for_now(monkeypatch: pytest.MonkeyPatch): + _set_base_config(monkeypatch) + monkeypatch.setattr(config, "IMPORT_MODE", "jsonl") + monkeypatch.setattr(config, "REGION", "de,fr") + + with pytest.raises(ValueError, match="currently supports exactly one region"): + validate_config() + + +def test_validate_config_rejects_multiple_db_regions(monkeypatch: pytest.MonkeyPatch): + _set_base_config(monkeypatch) + monkeypatch.setattr(config, "REGION", "germany,andorra") + + with pytest.raises(ValueError, match="DB mode supports exactly one region"): + validate_config() + + def test_validate_config_reports_multiple_errors(monkeypatch: pytest.MonkeyPatch): _set_base_config(monkeypatch) monkeypatch.setattr(config, "UPDATE_STRATEGY", "WRONG") diff --git a/uv.lock b/uv.lock index ff6300f3..b6614a3e 100644 --- a/uv.lock +++ b/uv.lock @@ -225,6 +225,7 @@ dependencies = [ { name = "requests" }, { name = "schedule" }, { name = "tqdm" }, + { name = "zstandard" }, ] [package.dev-dependencies] @@ -245,6 +246,7 @@ requires-dist = [ { name = "requests", specifier = "==2.32.5" }, { name = "schedule", specifier = ">=1.2.2" }, { name = "tqdm", specifier = "==4.67.3" }, + { name = "zstandard", specifier = ">=0.23.0" }, ] [package.metadata.requires-dev] @@ -529,3 +531,60 @@ sdist = { url = "https://files.pythonhosted.org/packages/8e/25/925f35db758a0f919 wheels = [ { url = "https://files.pythonhosted.org/packages/a0/56/0cc15b8ff2613c1d5c3dc1f3f576ede1c43868c1bc2e5ccaa2d4bcd7974d/vulture-2.14-py2.py3-none-any.whl", hash = "sha256:d9a90dba89607489548a49d557f8bac8112bd25d3cbc8aeef23e860811bd5ed9", size = 28915, upload-time = "2024-12-08T17:39:40.573Z" }, ] + +[[package]] +name = "zstandard" +version = "0.25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/fc/f26eb6ef91ae723a03e16eddb198abcfce2bc5a42e224d44cc8b6765e57e/zstandard-0.25.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b", size = 795738, upload-time = "2025-09-14T22:16:56.237Z" }, + { url = "https://files.pythonhosted.org/packages/aa/1c/d920d64b22f8dd028a8b90e2d756e431a5d86194caa78e3819c7bf53b4b3/zstandard-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00", size = 640436, upload-time = "2025-09-14T22:16:57.774Z" }, + { url = "https://files.pythonhosted.org/packages/53/6c/288c3f0bd9fcfe9ca41e2c2fbfd17b2097f6af57b62a81161941f09afa76/zstandard-0.25.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64", size = 5343019, upload-time = "2025-09-14T22:16:59.302Z" }, + { url = "https://files.pythonhosted.org/packages/1e/15/efef5a2f204a64bdb5571e6161d49f7ef0fffdbca953a615efbec045f60f/zstandard-0.25.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6dffecc361d079bb48d7caef5d673c88c8988d3d33fb74ab95b7ee6da42652ea", size = 5063012, upload-time = "2025-09-14T22:17:01.156Z" }, + { url = "https://files.pythonhosted.org/packages/b7/37/a6ce629ffdb43959e92e87ebdaeebb5ac81c944b6a75c9c47e300f85abdf/zstandard-0.25.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7149623bba7fdf7e7f24312953bcf73cae103db8cae49f8154dd1eadc8a29ecb", size = 5394148, upload-time = "2025-09-14T22:17:03.091Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/2bf870b3abeb5c070fe2d670a5a8d1057a8270f125ef7676d29ea900f496/zstandard-0.25.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6a573a35693e03cf1d67799fd01b50ff578515a8aeadd4595d2a7fa9f3ec002a", size = 5451652, upload-time = "2025-09-14T22:17:04.979Z" }, + { url = "https://files.pythonhosted.org/packages/53/60/7be26e610767316c028a2cbedb9a3beabdbe33e2182c373f71a1c0b88f36/zstandard-0.25.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5a56ba0db2d244117ed744dfa8f6f5b366e14148e00de44723413b2f3938a902", size = 5546993, upload-time = "2025-09-14T22:17:06.781Z" }, + { url = "https://files.pythonhosted.org/packages/85/c7/3483ad9ff0662623f3648479b0380d2de5510abf00990468c286c6b04017/zstandard-0.25.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:10ef2a79ab8e2974e2075fb984e5b9806c64134810fac21576f0668e7ea19f8f", size = 5046806, upload-time = "2025-09-14T22:17:08.415Z" }, + { url = "https://files.pythonhosted.org/packages/08/b3/206883dd25b8d1591a1caa44b54c2aad84badccf2f1de9e2d60a446f9a25/zstandard-0.25.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aaf21ba8fb76d102b696781bddaa0954b782536446083ae3fdaa6f16b25a1c4b", size = 5576659, upload-time = "2025-09-14T22:17:10.164Z" }, + { url = "https://files.pythonhosted.org/packages/9d/31/76c0779101453e6c117b0ff22565865c54f48f8bd807df2b00c2c404b8e0/zstandard-0.25.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1869da9571d5e94a85a5e8d57e4e8807b175c9e4a6294e3b66fa4efb074d90f6", size = 4953933, upload-time = "2025-09-14T22:17:11.857Z" }, + { url = "https://files.pythonhosted.org/packages/18/e1/97680c664a1bf9a247a280a053d98e251424af51f1b196c6d52f117c9720/zstandard-0.25.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:809c5bcb2c67cd0ed81e9229d227d4ca28f82d0f778fc5fea624a9def3963f91", size = 5268008, upload-time = "2025-09-14T22:17:13.627Z" }, + { url = "https://files.pythonhosted.org/packages/1e/73/316e4010de585ac798e154e88fd81bb16afc5c5cb1a72eeb16dd37e8024a/zstandard-0.25.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f27662e4f7dbf9f9c12391cb37b4c4c3cb90ffbd3b1fb9284dadbbb8935fa708", size = 5433517, upload-time = "2025-09-14T22:17:16.103Z" }, + { url = "https://files.pythonhosted.org/packages/5b/60/dd0f8cfa8129c5a0ce3ea6b7f70be5b33d2618013a161e1ff26c2b39787c/zstandard-0.25.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99c0c846e6e61718715a3c9437ccc625de26593fea60189567f0118dc9db7512", size = 5814292, upload-time = "2025-09-14T22:17:17.827Z" }, + { url = "https://files.pythonhosted.org/packages/fc/5f/75aafd4b9d11b5407b641b8e41a57864097663699f23e9ad4dbb91dc6bfe/zstandard-0.25.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:474d2596a2dbc241a556e965fb76002c1ce655445e4e3bf38e5477d413165ffa", size = 5360237, upload-time = "2025-09-14T22:17:19.954Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8d/0309daffea4fcac7981021dbf21cdb2e3427a9e76bafbcdbdf5392ff99a4/zstandard-0.25.0-cp312-cp312-win32.whl", hash = "sha256:23ebc8f17a03133b4426bcc04aabd68f8236eb78c3760f12783385171b0fd8bd", size = 436922, upload-time = "2025-09-14T22:17:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/79/3b/fa54d9015f945330510cb5d0b0501e8253c127cca7ebe8ba46a965df18c5/zstandard-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffef5a74088f1e09947aecf91011136665152e0b4b359c42be3373897fb39b01", size = 506276, upload-time = "2025-09-14T22:17:21.429Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6b/8b51697e5319b1f9ac71087b0af9a40d8a6288ff8025c36486e0c12abcc4/zstandard-0.25.0-cp312-cp312-win_arm64.whl", hash = "sha256:181eb40e0b6a29b3cd2849f825e0fa34397f649170673d385f3598ae17cca2e9", size = 462679, upload-time = "2025-09-14T22:17:23.147Z" }, + { url = "https://files.pythonhosted.org/packages/35/0b/8df9c4ad06af91d39e94fa96cc010a24ac4ef1378d3efab9223cc8593d40/zstandard-0.25.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec996f12524f88e151c339688c3897194821d7f03081ab35d31d1e12ec975e94", size = 795735, upload-time = "2025-09-14T22:17:26.042Z" }, + { url = "https://files.pythonhosted.org/packages/3f/06/9ae96a3e5dcfd119377ba33d4c42a7d89da1efabd5cb3e366b156c45ff4d/zstandard-0.25.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a1a4ae2dec3993a32247995bdfe367fc3266da832d82f8438c8570f989753de1", size = 640440, upload-time = "2025-09-14T22:17:27.366Z" }, + { url = "https://files.pythonhosted.org/packages/d9/14/933d27204c2bd404229c69f445862454dcc101cd69ef8c6068f15aaec12c/zstandard-0.25.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:e96594a5537722fdfb79951672a2a63aec5ebfb823e7560586f7484819f2a08f", size = 5343070, upload-time = "2025-09-14T22:17:28.896Z" }, + { url = "https://files.pythonhosted.org/packages/6d/db/ddb11011826ed7db9d0e485d13df79b58586bfdec56e5c84a928a9a78c1c/zstandard-0.25.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bfc4e20784722098822e3eee42b8e576b379ed72cca4a7cb856ae733e62192ea", size = 5063001, upload-time = "2025-09-14T22:17:31.044Z" }, + { url = "https://files.pythonhosted.org/packages/db/00/87466ea3f99599d02a5238498b87bf84a6348290c19571051839ca943777/zstandard-0.25.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:457ed498fc58cdc12fc48f7950e02740d4f7ae9493dd4ab2168a47c93c31298e", size = 5394120, upload-time = "2025-09-14T22:17:32.711Z" }, + { url = "https://files.pythonhosted.org/packages/2b/95/fc5531d9c618a679a20ff6c29e2b3ef1d1f4ad66c5e161ae6ff847d102a9/zstandard-0.25.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:fd7a5004eb1980d3cefe26b2685bcb0b17989901a70a1040d1ac86f1d898c551", size = 5451230, upload-time = "2025-09-14T22:17:34.41Z" }, + { url = "https://files.pythonhosted.org/packages/63/4b/e3678b4e776db00f9f7b2fe58e547e8928ef32727d7a1ff01dea010f3f13/zstandard-0.25.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8e735494da3db08694d26480f1493ad2cf86e99bdd53e8e9771b2752a5c0246a", size = 5547173, upload-time = "2025-09-14T22:17:36.084Z" }, + { url = "https://files.pythonhosted.org/packages/4e/d5/ba05ed95c6b8ec30bd468dfeab20589f2cf709b5c940483e31d991f2ca58/zstandard-0.25.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3a39c94ad7866160a4a46d772e43311a743c316942037671beb264e395bdd611", size = 5046736, upload-time = "2025-09-14T22:17:37.891Z" }, + { url = "https://files.pythonhosted.org/packages/50/d5/870aa06b3a76c73eced65c044b92286a3c4e00554005ff51962deef28e28/zstandard-0.25.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:172de1f06947577d3a3005416977cce6168f2261284c02080e7ad0185faeced3", size = 5576368, upload-time = "2025-09-14T22:17:40.206Z" }, + { url = "https://files.pythonhosted.org/packages/5d/35/398dc2ffc89d304d59bc12f0fdd931b4ce455bddf7038a0a67733a25f550/zstandard-0.25.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3c83b0188c852a47cd13ef3bf9209fb0a77fa5374958b8c53aaa699398c6bd7b", size = 4954022, upload-time = "2025-09-14T22:17:41.879Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5c/36ba1e5507d56d2213202ec2b05e8541734af5f2ce378c5d1ceaf4d88dc4/zstandard-0.25.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1673b7199bbe763365b81a4f3252b8e80f44c9e323fc42940dc8843bfeaf9851", size = 5267889, upload-time = "2025-09-14T22:17:43.577Z" }, + { url = "https://files.pythonhosted.org/packages/70/e8/2ec6b6fb7358b2ec0113ae202647ca7c0e9d15b61c005ae5225ad0995df5/zstandard-0.25.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0be7622c37c183406f3dbf0cba104118eb16a4ea7359eeb5752f0794882fc250", size = 5433952, upload-time = "2025-09-14T22:17:45.271Z" }, + { url = "https://files.pythonhosted.org/packages/7b/01/b5f4d4dbc59ef193e870495c6f1275f5b2928e01ff5a81fecb22a06e22fb/zstandard-0.25.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5f5e4c2a23ca271c218ac025bd7d635597048b366d6f31f420aaeb715239fc98", size = 5814054, upload-time = "2025-09-14T22:17:47.08Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/fbd822d5c6f427cf158316d012c5a12f233473c2f9c5fe5ab1ae5d21f3d8/zstandard-0.25.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f187a0bb61b35119d1926aee039524d1f93aaf38a9916b8c4b78ac8514a0aaf", size = 5360113, upload-time = "2025-09-14T22:17:48.893Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/69a553d2047f9a2c7347caa225bb3a63b6d7704ad74610cb7823baa08ed7/zstandard-0.25.0-cp313-cp313-win32.whl", hash = "sha256:7030defa83eef3e51ff26f0b7bfb229f0204b66fe18e04359ce3474ac33cbc09", size = 436936, upload-time = "2025-09-14T22:17:52.658Z" }, + { url = "https://files.pythonhosted.org/packages/d9/82/b9c06c870f3bd8767c201f1edbdf9e8dc34be5b0fbc5682c4f80fe948475/zstandard-0.25.0-cp313-cp313-win_amd64.whl", hash = "sha256:1f830a0dac88719af0ae43b8b2d6aef487d437036468ef3c2ea59c51f9d55fd5", size = 506232, upload-time = "2025-09-14T22:17:50.402Z" }, + { url = "https://files.pythonhosted.org/packages/d4/57/60c3c01243bb81d381c9916e2a6d9e149ab8627c0c7d7abb2d73384b3c0c/zstandard-0.25.0-cp313-cp313-win_arm64.whl", hash = "sha256:85304a43f4d513f5464ceb938aa02c1e78c2943b29f44a750b48b25ac999a049", size = 462671, upload-time = "2025-09-14T22:17:51.533Z" }, + { url = "https://files.pythonhosted.org/packages/3d/5c/f8923b595b55fe49e30612987ad8bf053aef555c14f05bb659dd5dbe3e8a/zstandard-0.25.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e29f0cf06974c899b2c188ef7f783607dbef36da4c242eb6c82dcd8b512855e3", size = 795887, upload-time = "2025-09-14T22:17:54.198Z" }, + { url = "https://files.pythonhosted.org/packages/8d/09/d0a2a14fc3439c5f874042dca72a79c70a532090b7ba0003be73fee37ae2/zstandard-0.25.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:05df5136bc5a011f33cd25bc9f506e7426c0c9b3f9954f056831ce68f3b6689f", size = 640658, upload-time = "2025-09-14T22:17:55.423Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7c/8b6b71b1ddd517f68ffb55e10834388d4f793c49c6b83effaaa05785b0b4/zstandard-0.25.0-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:f604efd28f239cc21b3adb53eb061e2a205dc164be408e553b41ba2ffe0ca15c", size = 5379849, upload-time = "2025-09-14T22:17:57.372Z" }, + { url = "https://files.pythonhosted.org/packages/a4/86/a48e56320d0a17189ab7a42645387334fba2200e904ee47fc5a26c1fd8ca/zstandard-0.25.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223415140608d0f0da010499eaa8ccdb9af210a543fac54bce15babbcfc78439", size = 5058095, upload-time = "2025-09-14T22:17:59.498Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ad/eb659984ee2c0a779f9d06dbfe45e2dc39d99ff40a319895df2d3d9a48e5/zstandard-0.25.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e54296a283f3ab5a26fc9b8b5d4978ea0532f37b231644f367aa588930aa043", size = 5551751, upload-time = "2025-09-14T22:18:01.618Z" }, + { url = "https://files.pythonhosted.org/packages/61/b3/b637faea43677eb7bd42ab204dfb7053bd5c4582bfe6b1baefa80ac0c47b/zstandard-0.25.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ca54090275939dc8ec5dea2d2afb400e0f83444b2fc24e07df7fdef677110859", size = 6364818, upload-time = "2025-09-14T22:18:03.769Z" }, + { url = "https://files.pythonhosted.org/packages/31/dc/cc50210e11e465c975462439a492516a73300ab8caa8f5e0902544fd748b/zstandard-0.25.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e09bb6252b6476d8d56100e8147b803befa9a12cea144bbe629dd508800d1ad0", size = 5560402, upload-time = "2025-09-14T22:18:05.954Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ae/56523ae9c142f0c08efd5e868a6da613ae76614eca1305259c3bf6a0ed43/zstandard-0.25.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a9ec8c642d1ec73287ae3e726792dd86c96f5681eb8df274a757bf62b750eae7", size = 4955108, upload-time = "2025-09-14T22:18:07.68Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/c899f2d6df0840d5e384cf4c4121458c72802e8bda19691f3b16619f51e9/zstandard-0.25.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a4089a10e598eae6393756b036e0f419e8c1d60f44a831520f9af41c14216cf2", size = 5269248, upload-time = "2025-09-14T22:18:09.753Z" }, + { url = "https://files.pythonhosted.org/packages/1b/c0/59e912a531d91e1c192d3085fc0f6fb2852753c301a812d856d857ea03c6/zstandard-0.25.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:f67e8f1a324a900e75b5e28ffb152bcac9fbed1cc7b43f99cd90f395c4375344", size = 5430330, upload-time = "2025-09-14T22:18:11.966Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/7e31db1240de2df22a58e2ea9a93fc6e38cc29353e660c0272b6735d6669/zstandard-0.25.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:9654dbc012d8b06fc3d19cc825af3f7bf8ae242226df5f83936cb39f5fdc846c", size = 5811123, upload-time = "2025-09-14T22:18:13.907Z" }, + { url = "https://files.pythonhosted.org/packages/f6/49/fac46df5ad353d50535e118d6983069df68ca5908d4d65b8c466150a4ff1/zstandard-0.25.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4203ce3b31aec23012d3a4cf4a2ed64d12fea5269c49aed5e4c3611b938e4088", size = 5359591, upload-time = "2025-09-14T22:18:16.465Z" }, + { url = "https://files.pythonhosted.org/packages/c2/38/f249a2050ad1eea0bb364046153942e34abba95dd5520af199aed86fbb49/zstandard-0.25.0-cp314-cp314-win32.whl", hash = "sha256:da469dc041701583e34de852d8634703550348d5822e66a0c827d39b05365b12", size = 444513, upload-time = "2025-09-14T22:18:20.61Z" }, + { url = "https://files.pythonhosted.org/packages/3a/43/241f9615bcf8ba8903b3f0432da069e857fc4fd1783bd26183db53c4804b/zstandard-0.25.0-cp314-cp314-win_amd64.whl", hash = "sha256:c19bcdd826e95671065f8692b5a4aa95c52dc7a02a4c5a0cac46deb879a017a2", size = 516118, upload-time = "2025-09-14T22:18:17.849Z" }, + { url = "https://files.pythonhosted.org/packages/f0/ef/da163ce2450ed4febf6467d77ccb4cd52c4c30ab45624bad26ca0a27260c/zstandard-0.25.0-cp314-cp314-win_arm64.whl", hash = "sha256:d7541afd73985c630bafcd6338d2518ae96060075f9463d7dc14cfb33514383d", size = 476940, upload-time = "2025-09-14T22:18:19.088Z" }, +]