diff --git a/.idea/macrostrat.iml b/.idea/macrostrat.iml
index 02cbd8132..052dd3973 100644
--- a/.idea/macrostrat.iml
+++ b/.idea/macrostrat.iml
@@ -28,6 +28,7 @@
+
diff --git a/.idea/modules.xml b/.idea/modules.xml
index aa76307a7..934e0b158 100644
--- a/.idea/modules.xml
+++ b/.idea/modules.xml
@@ -2,6 +2,7 @@
+
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
index c83f6e520..03d814c6f 100644
--- a/.idea/vcs.xml
+++ b/.idea/vcs.xml
@@ -72,6 +72,7 @@
+
diff --git a/services/api-v3/Makefile b/services/api-v3/Makefile
index a2575fbf3..609f475d2 100644
--- a/services/api-v3/Makefile
+++ b/services/api-v3/Makefile
@@ -18,3 +18,10 @@ release:
git diff-index --quiet HEAD --
git tag -a $(TAG) -m "Version $(TAG)"
git push origin $(TAG)
+
+test-fieldsite:
+ uv run pytest -q \
+ --confcutdir=api/tests \
+ api/tests/test_fieldsite.py
+
+
diff --git a/services/api-v3/api/routes/dev_routes/convert.py b/services/api-v3/api/routes/dev_routes/convert.py
index 3b6154eed..65d028747 100644
--- a/services/api-v3/api/routes/dev_routes/convert.py
+++ b/services/api-v3/api/routes/dev_routes/convert.py
@@ -45,7 +45,72 @@
responses={404: {"description": "Not found"}},
)
-# helpers
+
+# _____________________HELPERS_________________________________
+def _dt_to_ms(dt: Optional[datetime]) -> Optional[int]:
+ if not dt:
+ return None
+ if dt.tzinfo is None:
+ dt = dt.replace(tzinfo=timezone.utc)
+ return int(dt.timestamp() * 1000)
+
+
+def _format_checkin_date(dt: Optional[datetime]) -> Optional[str]:
+ """Match example Rockd checkin date strings like 'October 19, 2023'."""
+ if not dt:
+ return None
+ if dt.tzinfo is None:
+ dt = dt.replace(tzinfo=timezone.utc)
+ return dt.strftime("%B %d, %Y")
+
+
+def _iter_spot_features(payload: Any) -> Iterable[dict]:
+ """
+ Yield GeoJSON Feature dicts from:
+ - FeatureCollection
+ - Feature
+ - list[FeatureCollection|Feature]
+ - list[Feature] (rare but convenient)
+ """
+ if payload is None:
+ return
+ items = payload if isinstance(payload, list) else [payload]
+
+ for item in items:
+ if not isinstance(item, dict):
+ continue
+ t = item.get("type")
+ # A single Feature
+ if t == "Feature":
+ yield item
+ continue
+ # A FeatureCollection
+ if t == "FeatureCollection":
+ feats = item.get("features") or []
+ if isinstance(feats, list):
+ for f in feats:
+ if isinstance(f, dict) and f.get("type") == "Feature":
+ yield f
+ continue
+ feats = item.get("features")
+ if isinstance(feats, list):
+ for f in feats:
+ if isinstance(f, dict) and f.get("type") == "Feature":
+ yield f
+
+
+def _iter_checkins(payload: Any) -> Iterable[dict]:
+ """
+ Yield checkin dicts from:
+ - dict (single checkin)
+ - list[dict] (multiple checkins)
+ """
+ if payload is None:
+ return
+ items = payload if isinstance(payload, list) else [payload]
+ for item in items:
+ if isinstance(item, dict):
+ yield item
def _parse_date_time(x: Optional[str]) -> Optional[datetime]:
@@ -124,6 +189,7 @@ def _first_planar_from_fieldsite(fs: FieldSite) -> Optional[PlanarOrientation]:
return None
+# _____________________________SPOT - FS - CHECKIN_________________________________
def spot_to_fieldsite(feat) -> FieldSite:
props = feat.get("properties", {}) or {}
geom = feat.get("geometry", {}) or {}
@@ -138,7 +204,6 @@ def spot_to_fieldsite(feat) -> FieldSite:
if len(coords) >= 2:
lng = coords[0] if lng is None else lng
lat = coords[1] if lat is None else lat
-
if not _valid_coords(lat, lng):
raise ValueError("Invalid or missing lat/lng for Spot feature")
@@ -164,7 +229,6 @@ def spot_to_fieldsite(feat) -> FieldSite:
created = _parse_date_time(props.get("time") or props.get("date")) or datetime.now(
timezone.utc
)
-
mt = props.get("modified_timestamp")
if mt is not None:
try:
@@ -173,7 +237,6 @@ def spot_to_fieldsite(feat) -> FieldSite:
updated = created
else:
updated = created
-
return FieldSite(
id=sid,
location=Location(latitude=float(lat), longitude=float(lng)),
@@ -186,108 +249,266 @@ def spot_to_fieldsite(feat) -> FieldSite:
def multiple_spot_to_fieldsite(
- feat: Union[dict, List[dict]] = Body(...)
+ payload: Union[dict, List[dict]] = Body(...)
) -> List[FieldSite]:
"""
- Accept a single FeatureCollection or a list of FeatureCollections and
- return a FieldSite for each qualifying Point feature.
+ Accept:
+ - FeatureCollection with many spots (your example)
+ - a single Feature
+ - a list of FeatureCollections/Features
+ Return a FieldSite for each qualifying Point feature.
"""
out: list[FieldSite] = []
+ for feat in _iter_spot_features(payload):
+ props = feat.get("properties") or {}
+ geom = feat.get("geometry") or {}
+ # Only Point features become FieldSites
+ if geom.get("type") != "Point":
+ continue
+ # Skip "on image" annotation points
+ if props.get("image_basemap") is not None:
+ continue
+ # Require spot id
+ if props.get("id") is None:
+ continue
+ # spot_to_fieldsite will validate coords (from props or geometry)
+ try:
+ out.append(spot_to_fieldsite(feat))
+ except Exception:
+ continue
+ return out
+
+
+def spot_to_checkin(spot: Union[dict, List[dict]] = Body(...)) -> list[dict]:
+ """Pipeline: Spot JSON (FeatureCollections) or FieldSites -> Checkin list."""
+ # already a single FieldSite-shaped dict
+ if isinstance(spot, dict) and "location" in spot:
+ fieldsites: List[FieldSite] = [spot]
+ # already a list of FieldSite-shaped dicts
+ elif (
+ isinstance(spot, list)
+ and spot
+ and isinstance(spot[0], dict)
+ and "location" in spot[0]
+ ):
+ fieldsites = spot
+ else:
+ fieldsites = multiple_spot_to_fieldsite(spot)
+ return multiple_fieldsite_to_rockd_checkin(fieldsites)
+
+
+# ___________________________________CHECKIN - FS - SPOT____________________________________
+def checkin_to_fieldsite(checkin: dict) -> FieldSite:
+ """
+ Convert Rockd checkin JSON dict -> FieldSite.
+
+ Required:
+ - checkin_id
+ - lat/lng
+ """
+ if not isinstance(checkin, dict):
+ raise ValueError("Checkin must be a dict")
+ cid = checkin.get("checkin_id")
+ if cid is None:
+ raise ValueError("Missing checkin_id")
+ lat, lng = checkin.get("lat"), checkin.get("lng")
+ if not _valid_coords(lat, lng):
+ raise ValueError("Invalid or missing lat/lng for Checkin")
+ photos: list[Photo] = []
+ pid = checkin.get("photo")
+ if isinstance(pid, int):
+ photos.append(
+ Photo(
+ id=int(pid),
+ url=f"rockd://photo/{pid}",
+ width=0,
+ height=0,
+ checksum="",
+ )
+ )
+
+ observations: list[Observation] = []
+ planar = _first_planar_from_checkin(checkin)
+ if planar:
+ observations.append(Observation(data=planar))
+ created = _parse_date_time(checkin.get("created")) or datetime.now(timezone.utc)
+ updated = _parse_date_time(checkin.get("added")) or created
+ return FieldSite(
+ id=int(cid),
+ location=Location(latitude=float(lat), longitude=float(lng)),
+ created=created,
+ updated=updated,
+ notes=checkin.get("notes"),
+ photos=photos,
+ observations=observations,
+ )
- # normalize: allow a single FeatureCollection or a list of them
- collections = feat if isinstance(feat, list) else [feat]
- for coll in collections:
- if not isinstance(coll, dict) or coll.get("type") != "FeatureCollection":
+def multiple_checkin_to_fieldsite(
+ payload: Union[dict, List[dict]] = Body(...)
+) -> List[FieldSite]:
+ """Convert single checkin dict OR list of checkins -> list[FieldSite]."""
+ out: list[FieldSite] = []
+ for c in _iter_checkins(payload):
+ try:
+ out.append(checkin_to_fieldsite(c))
+ except Exception:
continue
- for f in coll.get("features", []) or []:
- props = f.get("properties", {}) or {}
- geom = f.get("geometry", {}) or {}
- if geom.get("type") != "Point":
- continue
- if props.get("image_basemap") is not None:
- continue
- if props.get("id") is None:
- continue
- coords = geom.get("coordinates", []) or []
- if len(coords) < 2 or not _valid_coords(coords[1], coords[0]):
- continue
- try:
- out.append(spot_to_fieldsite(f))
- except Exception:
- continue
return out
-def fieldsite_to_checkin(fs: FieldSite) -> dict:
- d = {
- "spot_id": fs.id,
+def fieldsite_to_rockd_checkin(fs: FieldSite) -> dict:
+ """
+ Convert FieldSite -> Rockd checkin JSON (based on your example structure),
+ but ALSO include spot_id for compatibility with spot-based pipelines.
+ """
+ if not isinstance(fs, FieldSite):
+ fs = FieldSite(**fs)
+ created = fs.created or datetime.now(timezone.utc)
+ updated = fs.updated or created
+ d: dict = {
+ "checkin_id": fs.id,
+ "spot_id": fs.id, # <-- added
"notes": fs.notes,
"lat": fs.location.latitude,
"lng": fs.location.longitude,
- "created": fs.created.isoformat(),
+ "created": _format_checkin_date(created),
+ "added": _format_checkin_date(updated),
+ "observations": [],
}
if fs.photos:
d["photo"] = fs.photos[0].id
planar = _first_planar_from_fieldsite(fs)
if planar:
d["observations"] = [
- {"orientation": {"strike": float(planar.strike), "dip": float(planar.dip)}}
+ {
+ "orientation": {
+ "strike": float(planar.strike),
+ "dip": float(planar.dip),
+ }
+ }
]
return d
-def multiple_fieldsite_to_checkin(
- fieldsites: list[FieldSite] = Body(...),
+def multiple_fieldsite_to_rockd_checkin(
+ fieldsites: Union[list[FieldSite], list[dict]] = Body(...)
) -> list[dict]:
+ """Convert list[FieldSite] (or list[dict]) -> list[Rockd checkin dict]."""
out: list[dict] = []
- for fs in fieldsites:
+ for fs in fieldsites or []:
try:
if not isinstance(fs, FieldSite):
fs = FieldSite(**fs)
- out.append(fieldsite_to_checkin(fs))
+ out.append(fieldsite_to_rockd_checkin(fs))
except Exception:
continue
return out
-def spot_to_checkin(spot: Union[dict, List[dict]] = Body(...)) -> list[dict]:
- """Pipeline: Spot JSON (FeatureCollection[s]) or FieldSite list -> Checkin list."""
- # If it's already a list of FieldSite-like dicts (has 'location'), skip the first hop
- if (
- isinstance(spot, list)
- and spot
- and isinstance(spot[0], dict)
- and "location" in spot[0]
- ):
- fieldsites: List[FieldSite] = spot # already FieldSite-shaped
- else:
- # Convert FeatureCollection (or list of them) -> FieldSite list
- fieldsites = multiple_spot_to_fieldsite(spot)
- # Convert FieldSite list -> Checkin list
- return multiple_fieldsite_to_checkin(fieldsites)
+def fieldsite_to_spot(fs: FieldSite) -> dict:
+ """
+ Convert a single FieldSite -> VALID StraboSpot spot payload.
+
+ IMPORTANT: For posting, StraboSpot expects a FeatureCollection (not a bare Feature).
+ So this returns:
+ {"type": "FeatureCollection", "features": []}
+ """
+ if not isinstance(fs, FieldSite):
+ fs = FieldSite(**fs)
+ created = fs.created or datetime.now(timezone.utc)
+ updated = fs.updated or created
+ feat: dict = {
+ "type": "Feature",
+ "geometry": {
+ "type": "Point",
+ "coordinates": [fs.location.longitude, fs.location.latitude],
+ },
+ "properties": {
+ "id": fs.id,
+ "notes": fs.notes,
+ "time": created.isoformat().replace("+00:00", "Z"),
+ "date": created.isoformat().replace("+00:00", "Z"),
+ "modified_timestamp": _dt_to_ms(updated),
+ "lat": fs.location.latitude,
+ "lng": fs.location.longitude,
+ },
+ }
+ if fs.photos:
+ p = fs.photos[0]
+ feat["properties"]["images"] = [
+ {
+ "id": int(p.id),
+ "width": int(getattr(p, "width", 0) or 0),
+ "height": int(getattr(p, "height", 0) or 0),
+ "title": "",
+ "image_type": "photo",
+ }
+ ]
+ planar = _first_planar_from_fieldsite(fs)
+ if planar:
+ feat["properties"]["orientation_data"] = [
+ {
+ "type": "planar_orientation",
+ "strike": float(planar.strike),
+ "dip": float(planar.dip),
+ }
+ ]
+ return {"type": "FeatureCollection", "features": [feat]}
+def multiple_fieldsite_to_spot(fieldsites: list[FieldSite]) -> dict:
+ """Convert many FieldSites -> one StraboSpot FeatureCollection with many Features."""
+ features: list[dict] = []
+ for fs in fieldsites or []:
+ try:
+ fc = fieldsite_to_spot(fs) # FeatureCollection with 1 feature
+ f = (fc.get("features") or [None])[0]
+ if isinstance(f, dict):
+ features.append(f)
+ except Exception:
+ continue
+ return {"type": "FeatureCollection", "features": features}
+
+
+def checkin_to_spot(payload: Union[dict, List[dict]] = Body(...)) -> dict:
+ """
+ Convert:
+ - single checkin dict
+ - list of checkin dicts
+ -> VALID StraboSpot FeatureCollection (single or multi-spot).
+ """
+ fieldsites = multiple_checkin_to_fieldsite(payload)
+ if len(fieldsites) == 1:
+ return fieldsite_to_spot(fieldsites[0])
+ return multiple_fieldsite_to_spot(fieldsites)
+
+
+# _________________________________API ROUTE___________________________________
@convert_router.post("/field-site")
async def convert_field_site(
payload: Union[dict, List[dict]] = Body(...),
in_: str = Query(..., alias="in"),
out: str = Query(..., alias="out"),
) -> Any:
- """
- Unified converter:
- - ?in=spot&out=fieldsite -> spot FeatureCollection(s) -> FieldSite
- - ?in=fieldsite&out=checkin -> FieldSite -> Checkin
- - ?in=spot&out=checkin -> spot FeatureCollection(s) -> FieldSite -> Checkin
- """
key = (in_.lower(), out.lower())
if key == ("spot", "fieldsite"):
return multiple_spot_to_fieldsite(payload)
+ if key == ("checkin", "fieldsite"):
+ return multiple_checkin_to_fieldsite(payload)
if key == ("fieldsite", "checkin"):
- return multiple_fieldsite_to_checkin(payload)
+ if isinstance(payload, list):
+ return multiple_fieldsite_to_rockd_checkin(payload)
+ return [fieldsite_to_rockd_checkin(payload)]
+ if key == ("fieldsite", "spot"):
+ if isinstance(payload, list):
+ return multiple_fieldsite_to_spot(payload)
+ return fieldsite_to_spot(payload)
+ if key == ("checkin", "spot"):
+ return checkin_to_spot(payload)
if key == ("spot", "checkin"):
return spot_to_checkin(payload)
raise HTTPException(
status_code=400,
- detail="Unsupported conversion. Use in=[spot|fieldsite], out=[fieldsite|checkin].",
+ detail="Unsupported conversion. Use in=[spot|fieldsite|checkin], out=[fieldsite|checkin|spot].",
)
diff --git a/services/api-v3/api/routes/object.py b/services/api-v3/api/routes/object.py
index 9944d17b5..3138e8392 100644
--- a/services/api-v3/api/routes/object.py
+++ b/services/api-v3/api/routes/object.py
@@ -51,7 +51,6 @@ def sha256_of_uploadfile(
def get_s3_client():
- # TODO need to add or configure these envs within api v3 kubernetes config
return Minio(
endpoint=settings.s3_endpoint,
access_key=settings.s3_access_key,
@@ -60,6 +59,7 @@ def get_s3_client():
)
+# TODO add bucket name parameter for user to specify
def get_storage_host_bucket() -> tuple[str, str]:
"""
Keep host/bucket consistent everywhere.
diff --git a/services/api-v3/api/tests/test_fieldsite.py b/services/api-v3/api/tests/test_fieldsite.py
new file mode 100644
index 000000000..7071b681d
--- /dev/null
+++ b/services/api-v3/api/tests/test_fieldsite.py
@@ -0,0 +1,350 @@
+# /Users/afromandi/Macrostrat/Projects/macrostrat/services/api-v3/api/tests/test_convert_field_site.py
+
+from __future__ import annotations
+
+from typing import Any, Dict, List
+
+import pytest
+
+from .test_database import api_client
+
+
+def test__debug_routes(api_client):
+ paths = sorted({r.path for r in api_client.app.router.routes if hasattr(r, "path")})
+ print("\n".join(paths))
+ assert True
+
+
+def _spot_feature(
+ *,
+ spot_id: int = 123,
+ lat: float = 43.0,
+ lng: float = -89.0,
+ include_orientation: bool = True,
+ include_images: bool = True,
+ image_basemap: Any = None,
+ geom_type: str = "Point",
+) -> Dict[str, Any]:
+ props: Dict[str, Any] = {
+ "id": spot_id,
+ "lat": lat,
+ "lng": lng,
+ "notes": "hello",
+ "time": "2023-10-19T12:00:00Z",
+ "modified_timestamp": 1697716800000,
+ }
+ if image_basemap is not None:
+ props["image_basemap"] = image_basemap
+
+ if include_images:
+ props["images"] = [{"id": 777, "width": 100, "height": 200}]
+
+ if include_orientation:
+ props["orientation_data"] = [
+ {"type": "planar_orientation", "strike": 123.0, "dip": 45.0}
+ ]
+
+ geom: Dict[str, Any] = (
+ {"type": "Point", "coordinates": [lng, lat]}
+ if geom_type == "Point"
+ else {"type": geom_type, "coordinates": []}
+ )
+
+ return {"type": "Feature", "properties": props, "geometry": geom}
+
+
+def _spot_featurecollection(features: List[Dict[str, Any]]) -> Dict[str, Any]:
+ return {"type": "FeatureCollection", "features": features}
+
+
+def _checkin(
+ *,
+ checkin_id: int = 55,
+ lat: float = 43.0,
+ lng: float = -89.0,
+ include_photo: bool = True,
+ include_orientation: bool = True,
+) -> Dict[str, Any]:
+ d: Dict[str, Any] = {
+ "checkin_id": checkin_id,
+ "lat": lat,
+ "lng": lng,
+ "notes": "note",
+ "created": "October 19, 2023",
+ "added": "October 20, 2023",
+ }
+ if include_photo:
+ d["photo"] = 999
+ if include_orientation:
+ d["observations"] = [{"orientation": {"strike": 10.0, "dip": 20.0}}]
+ else:
+ d["observations"] = []
+ return d
+
+
+def _fieldsite_dict(
+ *,
+ fs_id: int = 88,
+ lat: float = 43.0,
+ lng: float = -89.0,
+ include_photo: bool = True,
+ include_orientation: bool = True,
+) -> Dict[str, Any]:
+ d: Dict[str, Any] = {
+ "id": fs_id,
+ "notes": "fs note",
+ "created": "2023-10-19T12:00:00+00:00",
+ "updated": "2023-10-20T12:00:00+00:00",
+ "location": {"latitude": lat, "longitude": lng},
+ "photos": [],
+ "observations": [],
+ }
+
+ if include_photo:
+ d["photos"] = [
+ {
+ "id": 321,
+ "url": "rockd://photo/321",
+ "width": 0,
+ "height": 0,
+ "checksum": "",
+ }
+ ]
+
+ if include_orientation:
+ d["observations"] = [
+ {"data": {"strike": 111.0, "dip": 33.0, "facing": "upright"}}
+ ]
+
+ return d
+
+
+class TestConvertFieldSite:
+ def test_spot_to_fieldsite_featurecollection(self, api_client):
+ payload = _spot_featurecollection(
+ [
+ _spot_feature(spot_id=1),
+ _spot_feature(spot_id=2, include_orientation=False),
+ ]
+ )
+
+ resp = api_client.post(
+ "/dev/convert/field-site?in=spot&out=fieldsite", json=payload
+ )
+ assert resp.status_code == 200
+
+ data = resp.json()
+ assert isinstance(data, list)
+ assert len(data) == 2
+
+ # basic FieldSite shape
+ assert data[0]["id"] == 1
+ assert "location" in data[0]
+ assert "latitude" in data[0]["location"]
+ assert "longitude" in data[0]["location"]
+ assert data[0]["location"]["latitude"] == pytest.approx(43.0)
+ assert data[0]["location"]["longitude"] == pytest.approx(-89.0)
+
+ # observations for first has planar, second does not
+ assert isinstance(data[0].get("observations"), list)
+ assert isinstance(data[1].get("observations"), list)
+
+ def test_spot_to_fieldsite_filters_non_point_and_image_basemap(self, api_client):
+ payload = _spot_featurecollection(
+ [
+ _spot_feature(spot_id=1, geom_type="LineString"),
+ _spot_feature(
+ spot_id=2, image_basemap="something"
+ ), # should be skipped
+ _spot_feature(spot_id=3), # should survive
+ ]
+ )
+
+ resp = api_client.post(
+ "/dev/convert/field-site?in=spot&out=fieldsite", json=payload
+ )
+ assert resp.status_code == 200
+ data = resp.json()
+
+ assert isinstance(data, list)
+ assert len(data) == 1
+ assert data[0]["id"] == 3
+
+ def test_spot_to_fieldsite_invalid_coords_skips_feature(self, api_client):
+ payload = _spot_featurecollection(
+ [_spot_feature(spot_id=1, lat=999.0, lng=-89.0)]
+ )
+ resp = api_client.post(
+ "/dev/convert/field-site?in=spot&out=fieldsite", json=payload
+ )
+ assert resp.status_code == 200
+ data = resp.json()
+ assert data == [] # invalid spot should be skipped
+
+ def test_spot_to_checkin_from_featurecollection(self, api_client):
+ payload = _spot_featurecollection([_spot_feature(spot_id=10)])
+ resp = api_client.post(
+ "/dev/convert/field-site?in=spot&out=checkin", json=payload
+ )
+ assert resp.status_code == 200
+
+ out = resp.json()
+ assert isinstance(out, list)
+ assert len(out) == 1
+
+ c = out[0]
+ # Rockd checkin-ish keys
+ assert c["checkin_id"] == 10
+ assert c["spot_id"] == 10
+ assert c["lat"] == pytest.approx(43.0)
+ assert c["lng"] == pytest.approx(-89.0)
+ assert "created" in c and isinstance(c["created"], str)
+ assert "added" in c and isinstance(c["added"], str)
+ assert "observations" in c and isinstance(c["observations"], list)
+
+ # orientation passed through
+ assert len(c["observations"]) == 1
+ assert "orientation" in c["observations"][0]
+ assert c["observations"][0]["orientation"]["strike"] == pytest.approx(123.0)
+ assert c["observations"][0]["orientation"]["dip"] == pytest.approx(45.0)
+
+ def test_spot_to_checkin_accepts_single_fieldsite_dict(self, api_client):
+ # your spot_to_checkin treats dict with "location" as already-FieldSite-shaped
+ payload = _fieldsite_dict(fs_id=501)
+ resp = api_client.post(
+ "/dev/convert/field-site?in=spot&out=checkin", json=payload
+ )
+ assert resp.status_code == 200
+
+ out = resp.json()
+ assert isinstance(out, list)
+ assert len(out) == 1
+ assert out[0]["checkin_id"] == 501
+ assert out[0]["spot_id"] == 501
+
+ def test_checkin_to_fieldsite_single(self, api_client):
+ payload = _checkin(checkin_id=77)
+ resp = api_client.post(
+ "/dev/convert/field-site?in=checkin&out=fieldsite", json=payload
+ )
+ assert resp.status_code == 200
+
+ out = resp.json()
+ assert isinstance(out, list)
+ assert len(out) == 1
+
+ fs = out[0]
+ assert fs["id"] == 77
+ assert fs["location"]["latitude"] == pytest.approx(43.0)
+ assert fs["location"]["longitude"] == pytest.approx(-89.0)
+ assert "observations" in fs and isinstance(fs["observations"], list)
+
+ def test_checkin_to_fieldsite_list(self, api_client):
+ payload = [
+ _checkin(checkin_id=1),
+ _checkin(checkin_id=2, include_orientation=False),
+ ]
+ resp = api_client.post(
+ "/dev/convert/field-site?in=checkin&out=fieldsite", json=payload
+ )
+ assert resp.status_code == 200
+ out = resp.json()
+ assert isinstance(out, list)
+ assert len(out) == 2
+ assert {o["id"] for o in out} == {1, 2}
+
+ def test_fieldsite_to_checkin_list(self, api_client):
+ payload = [
+ _fieldsite_dict(fs_id=900),
+ _fieldsite_dict(fs_id=901, include_orientation=False),
+ ]
+ resp = api_client.post(
+ "/dev/convert/field-site?in=fieldsite&out=checkin", json=payload
+ )
+ assert resp.status_code == 200
+
+ out = resp.json()
+ assert isinstance(out, list)
+ assert len(out) == 2
+
+ c0 = out[0]
+ assert c0["checkin_id"] == 900
+ assert c0["spot_id"] == 900
+ assert "created" in c0 and isinstance(c0["created"], str)
+ assert "added" in c0 and isinstance(c0["added"], str)
+
+ def test_fieldsite_to_checkin_single_returns_list_of_one(self, api_client):
+ payload = _fieldsite_dict(fs_id=999)
+ resp = api_client.post(
+ "/dev/convert/field-site?in=fieldsite&out=checkin", json=payload
+ )
+ assert resp.status_code == 200
+
+ out = resp.json()
+ assert isinstance(out, list)
+ assert len(out) == 1
+ assert out[0]["checkin_id"] == 999
+ assert out[0]["spot_id"] == 999
+
+ def test_fieldsite_to_spot_single(self, api_client):
+ payload = _fieldsite_dict(fs_id=1234)
+ resp = api_client.post(
+ "/dev/convert/field-site?in=fieldsite&out=spot", json=payload
+ )
+ assert resp.status_code == 200
+
+ fc = resp.json()
+ assert isinstance(fc, dict)
+ assert fc["type"] == "FeatureCollection"
+ assert isinstance(fc.get("features"), list)
+ assert len(fc["features"]) == 1
+ f0 = fc["features"][0]
+ assert f0["type"] == "Feature"
+ assert f0["geometry"]["type"] == "Point"
+ assert f0["properties"]["id"] == 1234
+
+ def test_fieldsite_to_spot_list(self, api_client):
+ payload = [_fieldsite_dict(fs_id=1), _fieldsite_dict(fs_id=2)]
+ resp = api_client.post(
+ "/dev/convert/field-site?in=fieldsite&out=spot", json=payload
+ )
+ assert resp.status_code == 200
+
+ fc = resp.json()
+ assert fc["type"] == "FeatureCollection"
+ assert len(fc["features"]) == 2
+ ids = {f["properties"]["id"] for f in fc["features"]}
+ assert ids == {1, 2}
+
+ def test_checkin_to_spot_single(self, api_client):
+ payload = _checkin(checkin_id=321)
+ resp = api_client.post(
+ "/dev/convert/field-site?in=checkin&out=spot", json=payload
+ )
+ assert resp.status_code == 200
+
+ fc = resp.json()
+ assert fc["type"] == "FeatureCollection"
+ assert len(fc["features"]) == 1
+ assert fc["features"][0]["properties"]["id"] == 321
+
+ def test_checkin_to_spot_list(self, api_client):
+ payload = [_checkin(checkin_id=1), _checkin(checkin_id=2)]
+ resp = api_client.post(
+ "/dev/convert/field-site?in=checkin&out=spot", json=payload
+ )
+ assert resp.status_code == 200
+
+ fc = resp.json()
+ assert fc["type"] == "FeatureCollection"
+ assert len(fc["features"]) == 2
+ ids = {f["properties"]["id"] for f in fc["features"]}
+ assert ids == {1, 2}
+
+ def test_unsupported_conversion_400(self, api_client):
+ resp = api_client.post(
+ "/dev/convert/field-site?in=banana&out=fieldsite", json={}
+ )
+ assert resp.status_code == 400
+ detail = resp.json().get("detail")
+ assert isinstance(detail, str)
diff --git a/services/api-v3/pyproject.toml b/services/api-v3/pyproject.toml
index 28fdb06b6..688e79ad7 100644
--- a/services/api-v3/pyproject.toml
+++ b/services/api-v3/pyproject.toml
@@ -37,6 +37,7 @@ dev = [
"pylint>=3.0,<3.1",
"safety>=2.3,<2.4",
"pytest>=8.3.5,<9",
+ "docker>=7.1.0",
]
[build-system]
diff --git a/services/api-v3/uv.lock b/services/api-v3/uv.lock
index 547742562..f128c910e 100644
--- a/services/api-v3/uv.lock
+++ b/services/api-v3/uv.lock
@@ -151,6 +151,7 @@ dependencies = [
[package.dev-dependencies]
dev = [
{ name = "bandit" },
+ { name = "docker" },
{ name = "mypy" },
{ name = "pylint" },
{ name = "pytest" },
@@ -185,6 +186,7 @@ requires-dist = [
[package.metadata.requires-dev]
dev = [
{ name = "bandit", specifier = ">=1.7,<1.8" },
+ { name = "docker", specifier = ">=7.1.0" },
{ name = "mypy", specifier = ">=1.6,<1.7" },
{ name = "pylint", specifier = ">=3.0,<3.1" },
{ name = "pytest", specifier = ">=8.3.5,<9" },
@@ -488,6 +490,20 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/50/3d/9373ad9c56321fdab5b41197068e1d8c25883b3fea29dd361f9b55116869/dill-0.4.0-py3-none-any.whl", hash = "sha256:44f54bf6412c2c8464c14e8243eb163690a9800dbe2c367330883b19c7561049", size = 119668, upload-time = "2025-04-16T00:41:47.671Z" },
]
+[[package]]
+name = "docker"
+version = "7.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pywin32", marker = "sys_platform == 'win32'" },
+ { name = "requests" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" },
+]
+
[[package]]
name = "dparse"
version = "0.6.4"
@@ -1394,6 +1410,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" },
]
+[[package]]
+name = "pywin32"
+version = "311"
+source = { registry = "https://pypi.org/simple" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" },
+ { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" },
+ { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" },
+]
+
[[package]]
name = "pyyaml"
version = "6.0.3"
diff --git a/uv.lock b/uv.lock
index d94148f23..5b7f32241 100644
--- a/uv.lock
+++ b/uv.lock
@@ -155,6 +155,7 @@ requires-dist = [
[package.metadata.requires-dev]
dev = [
{ name = "bandit", specifier = ">=1.7,<1.8" },
+ { name = "docker", specifier = ">=7.1.0" },
{ name = "mypy", specifier = ">=1.6,<1.7" },
{ name = "pylint", specifier = ">=3.0,<3.1" },
{ name = "pytest", specifier = ">=8.3.5,<9" },