Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/features.md
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ discussion and subject to change.
:::

If you maintain a conda channel, you can now serve Python wheels directly
alongside regular conda packages. Add your wheels to a `packages.whl` section
alongside regular conda packages. Add your wheels to a `v3.whl` section
in `repodata.json` and point each entry at the wheel URL — `conda install`
will pick them up, resolve their dependencies, and extract them correctly,
with no pre-conversion step required.
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ python = ">=3.10"
conda = ">=26.1"
conda-index = ">=0.7.0"
conda-package-streaming = ">=0.11"
conda-rattler-solver = ">=0.0.5"
conda-rattler-solver = ">=0.0.6"
packaging = "*"
pip = "*"
unearth = "*"
Expand Down
190 changes: 175 additions & 15 deletions tests/conda_local_channel/generate_noarch_wheel_repodata.py
Original file line number Diff line number Diff line change
@@ -1,34 +1,185 @@
# This is a utility for generating test specific data in conda-pypi
# only. It is not appropriate to use this to generate production level
# repodata.
"""
Utility for generating test-specific local channel repodata.

This is test data generation logic for conda-pypi only; it is not intended for
production repodata generation.

Marker conversion policy for this test channel:
- Convert Python markers to `python...` matchspec fragments, including
`python_version not in "x, y"` -> `(python!=x and python!=y)`.
- Convert platform/os markers to virtual packages when feasible
(`__win`, `__linux`, `__osx`, `__unix`).
- Keep extras in `extra_depends`, with remaining non-extra marker logic
encoded via `[when="..."]`.
- Drop unsupported marker dimensions (for example interpreter/machine-specific
variants) for these noarch channel tests.
"""

import json
import re
import requests
from concurrent.futures import ThreadPoolExecutor, as_completed
from enum import StrEnum
from packaging.markers import Marker
from packaging.requirements import Requirement
from typing import Any

EXTRA_MARKER_RE = re.compile(r'extra\s*==\s*["\']([^"\']+)["\']')

class MarkerVar(StrEnum):
PYTHON_VERSION = "python_version"
PYTHON_FULL_VERSION = "python_full_version"
EXTRA = "extra"
SYS_PLATFORM = "sys_platform"
PLATFORM_SYSTEM = "platform_system"
OS_NAME = "os_name"
IMPLEMENTATION_NAME = "implementation_name"
PLATFORM_PYTHON_IMPLEMENTATION = "platform_python_implementation"
PLATFORM_MACHINE = "platform_machine"


class MarkerOp(StrEnum):
EQ = "=="
NE = "!="
NOT_IN = "not in"


SYSTEM_TO_VIRTUAL_PACKAGE = {
"windows": "__win",
"win32": "__win",
"linux": "__linux",
"darwin": "__osx",
"cygwin": "__unix",
}

OS_NAME_TO_VIRTUAL_PACKAGE = {
"nt": "__win",
"windows": "__win",
"posix": "__unix",
}


def normalize_name(name: str) -> str:
"""Normalize a package name to conda conventions (lowercase, _ -> -)."""
return name.lower().replace("_", "-")


def _marker_value(token: Any) -> str:
"""Extract the textual value from packaging marker tokens."""
return getattr(token, "value", str(token))


def _normalize_marker_atom(lhs: str, op: str, rhs: str) -> str | None:
"""Map a single PEP 508 marker atom to a MatchSpec-like fragment."""
lhs_l = lhs.lower()
rhs_l = rhs.lower()

if lhs_l in {MarkerVar.PYTHON_VERSION, MarkerVar.PYTHON_FULL_VERSION}:
if op == MarkerOp.NOT_IN:
excluded_versions = [version.strip() for version in rhs.split(",") if version.strip()]
if not excluded_versions:
return None
clauses = [f"python!={version}" for version in excluded_versions]
if len(clauses) == 1:
return clauses[0]
return f"({' and '.join(clauses)})"
return f"python{op}{rhs}"

if lhs_l == MarkerVar.EXTRA and op == MarkerOp.EQ:
return None

if lhs_l in {MarkerVar.SYS_PLATFORM, MarkerVar.PLATFORM_SYSTEM}:
mapped = SYSTEM_TO_VIRTUAL_PACKAGE.get(rhs_l)
if op == MarkerOp.EQ and mapped:
return mapped
if op == MarkerOp.NE and rhs_l in {"win32", "windows", "cygwin"}:
return "__unix"
if op == MarkerOp.NE and rhs_l == "emscripten":
return None
return None

if lhs_l == MarkerVar.OS_NAME:
mapped = OS_NAME_TO_VIRTUAL_PACKAGE.get(rhs_l)
if not mapped:
return None
if op == MarkerOp.EQ:
return mapped
if op == MarkerOp.NE:
return "__unix" if mapped == "__win" else "__win"
return None

if lhs_l in {MarkerVar.IMPLEMENTATION_NAME, MarkerVar.PLATFORM_PYTHON_IMPLEMENTATION}:
if rhs_l in {"cpython", "pypy", "jython"}:
return None
return None

if lhs_l == MarkerVar.PLATFORM_MACHINE:
return None

return None


def _combine_expr(left: str | None, op: str, right: str | None) -> str | None:
"""Combine optional left/right expressions with a boolean operator."""
if left is None:
return right
if right is None:
return left
if left == right:
return left
return f"({left} {op} {right})"


def extract_marker_condition_and_extras(marker: Marker) -> tuple[str | None, list[str]]:
"""Split a Marker into optional non-extra condition and extra group names."""
extras: list[str] = []
seen_extras: set[str] = set()

def visit(node: Any) -> str | None:
if isinstance(node, tuple) and len(node) == 3:
lhs = _marker_value(node[0])
op = _marker_value(node[1])
rhs = _marker_value(node[2])

if lhs.lower() == MarkerVar.EXTRA and op == MarkerOp.EQ:
extra_name = rhs.lower()
if extra_name not in seen_extras:
seen_extras.add(extra_name)
extras.append(extra_name)
return None

return _normalize_marker_atom(lhs, op, rhs)

if isinstance(node, list):
if not node:
return None

expr = visit(node[0])
i = 1
while i + 1 < len(node):
op = str(node[i]).lower()
rhs_expr = visit(node[i + 1])
expr = _combine_expr(expr, op, rhs_expr)
i += 2
return expr

return None

# Marker._markers is a private packaging attribute; keep access isolated here.
condition = visit(getattr(marker, "_markers", []))
return condition, extras


def pypi_to_repodata_noarch_whl_entry(
pypi_data: dict[str, Any],
) -> dict[str, Any] | None:
"""
Convert PyPI JSON endpoint data to a repodata.json packages.whl entry for a
Convert PyPI JSON endpoint data to a repodata.json v3.whl entry for a
pure Python (noarch) wheel.

Args:
pypi_data: Dictionary containing the complete info from PyPI JSON endpoint

Returns:
Dictionary representing the entry for packages.whl, or None if no pure
Dictionary representing the entry for v3.whl, or None if no pure
Python wheel (platform tag "none-any") is found
"""
# Find a pure Python wheel (platform tag "none-any")
Expand All @@ -48,17 +199,26 @@ def pypi_to_repodata_noarch_whl_entry(
pypi_info = pypi_data.get("info")

depends_list: list[str] = []
extras_dict: dict[str, list[str]] = {}
extra_depends_dict: dict[str, list[str]] = {}
for dep in pypi_info.get("requires_dist") or []:
req = Requirement(dep)
conda_dep = normalize_name(req.name) + str(req.specifier)

if req.marker:
extra_match = EXTRA_MARKER_RE.search(str(req.marker))
if extra_match:
extras_dict.setdefault(extra_match.group(1), []).append(conda_dep)
non_extra_condition, extra_names = extract_marker_condition_and_extras(req.marker)
if extra_names:
for extra_name in extra_names:
extra_dep = conda_dep
if non_extra_condition:
marker_condition = json.dumps(non_extra_condition)
extra_dep = f"{extra_dep}[when={marker_condition}]"
extra_depends_dict.setdefault(extra_name, []).append(extra_dep)
else:
depends_list.append(conda_dep)
if non_extra_condition:
marker_condition = json.dumps(non_extra_condition)
depends_list.append(f"{conda_dep}[when={marker_condition}]")
else:
depends_list.append(conda_dep)
else:
depends_list.append(conda_dep)

Expand All @@ -78,7 +238,7 @@ def pypi_to_repodata_noarch_whl_entry(
"build": "py3_none_any_0",
"build_number": 0,
"depends": depends_list,
"extras": extras_dict,
"extra_depends": extra_depends_dict,
"fn": f"{pypi_info.get('name')}-{pypi_info.get('version')}-py3-none-any.whl",
"sha256": wheel_url.get("digests", {}).get("sha256", ""),
"size": wheel_url.get("size", 0),
Expand Down Expand Up @@ -137,9 +297,9 @@ def get_repodata_entry(name: str, version: str) -> dict[str, Any] | None:
"packages": {},
"packages.conda": {},
"removed": [],
"repodata_version": 1,
"repodata_version": 3,
"signatures": {},
"packages.whl": {key: value for key, value in sorted(pkg_whls.items())},
"v3": {"whl": {key: value for key, value in sorted(pkg_whls.items())}},
}

with open(wheel_repodata, "w") as f:
Expand Down
Loading
Loading