diff --git a/compose.test.yaml b/compose.test.yaml index 1364f3741..bb782eb83 100644 --- a/compose.test.yaml +++ b/compose.test.yaml @@ -1,15 +1,14 @@ -# compose.test.yaml name: dtm-test +networks: + dtm-network: + name: dtm-network + volumes: db-data: minio-data: frontend-html: -networks: - dtm-network: - driver: bridge - services: backend: extends: @@ -49,17 +48,11 @@ services: service: createbuckets migrations: - image: ghcr.io/hotosm/drone-tm/backend:debug - volumes: - - ./src/backend:/project/src/backend - env_file: - - .env + extends: + file: compose.yaml + service: migrations environment: MONITORING: "" - networks: - - dtm-network - command: ["alembic", "upgrade", "head"] - restart: "no" arq-worker: extends: diff --git a/compose.yaml b/compose.yaml index 0a48513b3..f4a2f24f4 100644 --- a/compose.yaml +++ b/compose.yaml @@ -177,7 +177,7 @@ services: # This container does the actual imagery processing (not persistent, scalable) nodeodm: image: docker.io/opendronemap/nodeodm:3.5.5 - command: ["--port", "9900"] + command: ["--port", "9900", "--log_level", "debug"] env_file: .env ports: - 9900:9900 diff --git a/src/backend/app/arq/tasks.py b/src/backend/app/arq/tasks.py index b989367c8..ccaf1f4ab 100644 --- a/src/backend/app/arq/tasks.py +++ b/src/backend/app/arq/tasks.py @@ -20,7 +20,12 @@ from app.images.image_schemas import ProjectImageCreate, ProjectImageOut from app.images.flight_tail_removal import mark_and_remove_flight_tail_imagery from app.models.enums import HTTPStatus, ImageStatus -from app.projects.project_logic import process_all_drone_images, process_drone_images +from app.projects import project_schemas +from app.projects.project_logic import ( + process_all_drone_images, + process_drone_images, + process_task_metrics, +) from app.s3 import async_get_obj_from_bucket, s3_client from app.images.image_classification import ImageClassifier from app.jaxa.upload_dem import download_and_upload_dem @@ -620,78 +625,71 @@ async def delete_batch_images( try: async with db_pool.connection() as conn: - # Get all S3 keys for images and thumbnails in this batch - query = """ - SELECT s3_key, thumbnail_url - FROM project_images - WHERE batch_id = %(batch_id)s - AND project_id = %(project_id)s - """ + result = await ImageClassifier.delete_batch( + conn, UUID(batch_id), UUID(project_id) + ) - async with conn.cursor() as cur: - await cur.execute( - query, - {"batch_id": batch_id, "project_id": project_id}, - ) - rows = await cur.fetchall() - - # Collect all S3 keys to delete - s3_keys_to_delete = [] - for row in rows: - s3_key, thumbnail_url = row - if s3_key: - s3_keys_to_delete.append(s3_key) - if thumbnail_url: - s3_keys_to_delete.append(thumbnail_url) - - image_count = len(rows) log.info( - f"Found {image_count} images and {len(s3_keys_to_delete)} S3 objects to delete" + f"Batch deletion complete: {result['deleted_count']} images, " + f"{result['deleted_s3_count']} S3 objects deleted" ) - # Delete from S3 - deleted_s3_count = 0 - if s3_keys_to_delete: - client = s3_client() - for key in s3_keys_to_delete: - try: - key = key.lstrip("/") - client.remove_object(settings.S3_BUCKET_NAME, key) - deleted_s3_count += 1 - except Exception as e: - log.warning(f"Failed to delete S3 object {key}: {e}") - - log.info(f"Deleted {deleted_s3_count} objects from S3") - - # Delete from database - delete_query = """ - DELETE FROM project_images - WHERE batch_id = %(batch_id)s - AND project_id = %(project_id)s - """ + return { + "message": result["message"], + "batch_id": batch_id, + "deleted_images": result["deleted_count"], + "deleted_s3_objects": result["deleted_s3_count"], + } - async with conn.cursor() as cur: + except Exception as e: + log.error(f"Failed to delete batch (Job: {job_id}): {str(e)}") + raise + + +async def process_project_task_metrics( + ctx: Dict[Any, Any], project_id: str +) -> Dict[str, Any]: + """Process project task metrics in the ARQ worker.""" + job_id = ctx.get("job_id", "unknown") + log.info( + f"Starting process_project_task_metrics (Job ID: {job_id}): project={project_id}" + ) + + db_pool = ctx.get("db_pool") + if not db_pool: + raise RuntimeError("Database pool not initialized in ARQ context") + + try: + async with db_pool.connection() as db: + project = await project_schemas.DbProject.one(db, UUID(project_id)) + + async with db.cursor() as cur: await cur.execute( - delete_query, - {"batch_id": batch_id, "project_id": project_id}, + """ + SELECT id, project_id, ST_AsBinary(outline), project_task_index + FROM tasks + WHERE project_id = %s + ORDER BY project_task_index + """, + (project.id,), ) + tasks_data = await cur.fetchall() - await conn.commit() + await process_task_metrics(db, tasks_data, project) log.info( - f"Batch deletion complete: {image_count} images, " - f"{deleted_s3_count} S3 objects deleted" + f"Completed process_project_task_metrics (Job ID: {job_id}): " + f"project={project_id}, tasks={len(tasks_data)}" ) return { - "message": "Batch deleted successfully", - "batch_id": batch_id, - "deleted_images": image_count, - "deleted_s3_objects": deleted_s3_count, + "message": "Task metrics processed", + "project_id": project_id, + "task_count": len(tasks_data), } except Exception as e: - log.error(f"Failed to delete batch (Job: {job_id}): {str(e)}") + log.error(f"Failed process_project_task_metrics (Job ID: {job_id}): {str(e)}") raise @@ -708,6 +706,7 @@ class WorkerSettings: classify_image_batch, process_batch_images, delete_batch_images, + process_project_task_metrics, download_and_upload_dem, ] diff --git a/src/backend/app/images/image_classification.py b/src/backend/app/images/image_classification.py index 703cb4e41..31744ffca 100644 --- a/src/backend/app/images/image_classification.py +++ b/src/backend/app/images/image_classification.py @@ -21,11 +21,14 @@ copy_file_within_bucket, get_obj_from_bucket, maybe_presign_s3_key, + s3_client, ) -# Number of concurrent workers for parallel classification -CLASSIFICATION_CONCURRENCY = 6 +# Number of concurrent workers for parallel classification. +# Must not exceed the connection pool size (default 4) to avoid pool +# exhaustion, which silently leaves images stuck in STAGED. +CLASSIFICATION_CONCURRENCY = 4 # Hard timeout per image to avoid a single stuck S3 read / decode hanging the whole batch. # If exceeded, the image is marked as REJECTED with a generic "Classification failed". @@ -34,6 +37,15 @@ # Buffer radius in meters for coverage calculation COVERAGE_BUFFER_METERS = 20.0 +# Task states that indicate verification has occurred (IMAGE_UPLOADED = just verified, +# later processing states preserve that verification status) +VERIFIED_TASK_STATES = { + "IMAGE_UPLOADED", + "IMAGE_PROCESSING_STARTED", + "IMAGE_PROCESSING_FINISHED", + "IMAGE_PROCESSING_FAILED", +} + @dataclass(frozen=True) class QualityThresholds: @@ -778,7 +790,36 @@ async def classify_with_commit(image_record: dict[str, Any]) -> dict[str, Any]: # Process all images in parallel with controlled concurrency tasks = [classify_with_commit(image) for image in images] - await asyncio.gather(*tasks, return_exceptions=True) + gather_results = await asyncio.gather(*tasks, return_exceptions=True) + + # Detect silently swallowed exceptions (e.g. pool timeout before the + # inner try/except). These leave images stuck in STAGED with no log. + for image_record, result_or_exc in zip(images, gather_results): + if isinstance(result_or_exc, BaseException): + image_id = image_record["id"] + log.error( + f"Classification worker failed for image {image_id} " + f"(batch {batch_id}): {result_or_exc!r}" + ) + # Best-effort: mark the image as rejected so it isn't stuck + try: + async with db_pool.connection() as err_conn: + await ImageClassifier._update_image_status( + err_conn, + image_id + if isinstance(image_id, uuid.UUID) + else uuid.UUID(image_id), + ImageStatus.REJECTED, + _format_classification_failure_reason(result_or_exc), + ) + await err_conn.commit() + async with results_lock: + results["rejected"] += 1 + except Exception as cleanup_err: + log.error( + f"Failed to mark image {image_id} as rejected " + f"after worker error: {cleanup_err}" + ) log.info( f"Parallel classification complete for batch {batch_id}: " @@ -850,7 +891,7 @@ async def get_batch_review_data( batch_id: uuid.UUID, project_id: uuid.UUID, ) -> dict: - # Query includes is_verified by checking if task has IMAGE_UPLOADED state + # Query includes is_verified by checking if task has a post-verification state query = """ WITH latest_task_state AS ( SELECT DISTINCT ON (task_id) @@ -864,7 +905,7 @@ async def get_batch_review_data( pi.task_id, t.project_task_index, COUNT(*) as image_count, - COALESCE(lts.state = 'IMAGE_UPLOADED', false) as is_verified, + COALESCE(lts.state IN ('IMAGE_UPLOADED', 'IMAGE_PROCESSING_STARTED', 'IMAGE_PROCESSING_FINISHED', 'IMAGE_PROCESSING_FAILED'), false) as is_verified, json_agg( json_build_object( 'id', pi.id, @@ -980,9 +1021,8 @@ async def delete_batch( batch_id: uuid.UUID, project_id: uuid.UUID, ) -> dict: - # Get count of images to be deleted - count_query = """ - SELECT COUNT(*) as count + keys_query = """ + SELECT s3_key, thumbnail_url FROM project_images WHERE batch_id = %(batch_id)s AND project_id = %(project_id)s @@ -990,13 +1030,28 @@ async def delete_batch( async with db.cursor(row_factory=dict_row) as cur: await cur.execute( - count_query, + keys_query, {"batch_id": str(batch_id), "project_id": str(project_id)}, ) - result = await cur.fetchone() - image_count = result["count"] if result else 0 + rows = await cur.fetchall() + + s3_keys_to_delete: list[str] = [] + for row in rows: + if row["s3_key"]: + s3_keys_to_delete.append(str(row["s3_key"]).lstrip("/")) + if row["thumbnail_url"]: + s3_keys_to_delete.append(str(row["thumbnail_url"]).lstrip("/")) + + deleted_s3_count = 0 + if s3_keys_to_delete: + client = s3_client() + for key in s3_keys_to_delete: + try: + client.remove_object(settings.S3_BUCKET_NAME, key) + deleted_s3_count += 1 + except Exception as e: + log.warning(f"Failed to delete S3 object {key}: {e}") - # Delete all images in the batch delete_query = """ DELETE FROM project_images WHERE batch_id = %(batch_id)s @@ -1009,14 +1064,19 @@ async def delete_batch( {"batch_id": str(batch_id), "project_id": str(project_id)}, ) + await db.commit() + + image_count = len(rows) log.info( - f"Deleted {image_count} images from batch {batch_id} in project {project_id}" + f"Deleted {image_count} images and {deleted_s3_count} S3 objects " + f"from batch {batch_id} in project {project_id}" ) return { "message": "Batch deleted successfully", "batch_id": str(batch_id), "deleted_count": image_count, + "deleted_s3_count": deleted_s3_count, } @staticmethod @@ -1027,7 +1087,7 @@ async def get_batch_map_data( ) -> dict: """Get map data for batch review visualization. - Returns task geometries and image point locations as GeoJSON. + Returns task geometries and all images as GeoJSON (with/without GPS coordinates). """ # Get all task IDs that have images in this batch task_ids_query = """ @@ -1075,47 +1135,78 @@ async def get_batch_map_data( } ) - # Get image locations as GeoJSON points - images_query = """ + # Get all images with or without GPS data + all_images_query = """ SELECT id, filename, status, + rejection_reason, task_id, + s3_key, + thumbnail_url, ST_X(location::geometry) as longitude, ST_Y(location::geometry) as latitude FROM project_images WHERE batch_id = %(batch_id)s AND project_id = %(project_id)s - AND location IS NOT NULL + ORDER BY uploaded_at DESC """ async with db.cursor(row_factory=dict_row) as cur: await cur.execute( - images_query, + all_images_query, {"batch_id": str(batch_id), "project_id": str(project_id)}, ) - images = await cur.fetchall() + all_images = await cur.fetchall() + + # Build GeoJSON features for each image + images_features = [] + located_count = 0 + unlocated_count = 0 + + for img in all_images: + properties = { + "id": str(img["id"]), + "filename": img["filename"], + "status": img["status"], + "task_id": str(img["task_id"]) if img["task_id"] else None, + "rejection_reason": img["rejection_reason"], + "thumbnail_url": maybe_presign_s3_key( + img["thumbnail_url"], expires_hours=1 + ) + if img.get("thumbnail_url") + else None, + "url": maybe_presign_s3_key(img["s3_key"], expires_hours=1) + if img.get("s3_key") + else None, + } - images_geojson = { - "type": "FeatureCollection", - "features": [ - { + # Add Point geometry if GPS data exists + if img["longitude"] is not None and img["latitude"] is not None: + feature = { "type": "Feature", "geometry": { "type": "Point", "coordinates": [img["longitude"], img["latitude"]], }, - "properties": { - "id": str(img["id"]), - "filename": img["filename"], - "status": img["status"], - "task_id": str(img["task_id"]) if img["task_id"] else None, - }, + "properties": properties, } - for img in images - if img["longitude"] is not None and img["latitude"] is not None - ], + located_count += 1 + else: + # Add feature with null geometry for images without GPS + feature = { + "type": "Feature", + "geometry": None, + "properties": properties, + } + unlocated_count += 1 + + images_features.append(feature) + + images_geojson = { + "type": "FeatureCollection", + "features": images_features, } return { @@ -1124,6 +1215,8 @@ async def get_batch_map_data( "images": images_geojson, "total_tasks": len(tasks_geojson["features"]), "total_images": len(images_geojson["features"]), + "total_images_with_gps": located_count, + "total_images_without_gps": unlocated_count, } @staticmethod @@ -1261,6 +1354,93 @@ async def move_batch_images_to_tasks( "tasks": tasks_summary, } + @staticmethod + async def move_task_images_to_folder( + db: Connection, + project_id: uuid.UUID, + task_id: uuid.UUID, + ) -> dict: + """Move all assigned images for a specific task from staging to the task folder. + + Copies images from user-uploads staging area to: + projects/{project_id}/{task_id}/images/{filename} + + This is called after marking a task as verified/fully flown so that + the images are in the expected location for ODM processing. + """ + query = """ + SELECT + pi.id, + pi.filename, + pi.s3_key + FROM project_images pi + WHERE pi.project_id = %(project_id)s + AND pi.task_id = %(task_id)s + AND pi.status = %(status)s + AND pi.s3_key LIKE '%%user-uploads%%' + ORDER BY pi.uploaded_at + """ + + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute( + query, + { + "project_id": str(project_id), + "task_id": str(task_id), + "status": ImageStatus.ASSIGNED.value, + }, + ) + images = await cur.fetchall() + + if not images: + return {"moved_count": 0, "failed_count": 0} + + moved_count = 0 + failed_count = 0 + + for image in images: + filename = image["filename"] + source_key = image["s3_key"] + # Use the image's unique DB id as prefix to guarantee no collisions + # (across batches, within a batch, or from duplicate filenames). + image_id_prefix = str(image["id"])[:8] + dest_key = ( + f"projects/{project_id}/{task_id}/images/{image_id_prefix}_{filename}" + ) + + success = await run_in_threadpool( + copy_file_within_bucket, + settings.S3_BUCKET_NAME, + source_key, + dest_key, + ) + + if success: + async with db.cursor() as update_cur: + await update_cur.execute( + """ + UPDATE project_images + SET s3_key = %(new_s3_key)s + WHERE id = %(image_id)s + """, + { + "new_s3_key": dest_key, + "image_id": str(image["id"]), + }, + ) + moved_count += 1 + log.info(f"Moved image {filename} to task {task_id}") + else: + failed_count += 1 + log.error(f"Failed to move image {filename} to task {task_id}") + + # NOTE: caller is responsible for commit/rollback so that the task + # state event and the image moves are in the same transaction. + + log.info(f"Task {task_id}: Moved {moved_count} images, {failed_count} failed") + + return {"moved_count": moved_count, "failed_count": failed_count} + @staticmethod async def get_batch_processing_summary( db: Connection, @@ -1510,3 +1690,439 @@ async def get_task_verification_data( "coverage_percentage": coverage_percentage, "is_verified": False, # TODO: Add verified_at field to tasks table } + + # ─── Project-level (task-centric) methods ───────────────────────────── + + @staticmethod + async def get_project_task_imagery_summary( + db: Connection, + project_id: uuid.UUID, + ) -> list[dict]: + """Get per-task imagery summary aggregated across ALL batches. + + Returns one row per task with counts, status breakdown, and task state. + This is the single source of truth for "what imagery exists for each task". + """ + query = """ + WITH latest_task_state AS ( + SELECT DISTINCT ON (task_id) + task_id, + state, + comment + FROM task_events + WHERE project_id = %(project_id)s + ORDER BY task_id, created_at DESC + ) + SELECT + t.id as task_id, + t.project_task_index, + COALESCE(lts.state, 'LOCKED_FOR_MAPPING') as task_state, + lts.comment as state_comment, + COALESCE(img.total, 0) as total_images, + COALESCE(img.assigned, 0) as assigned_images, + COALESCE(img.rejected, 0) as rejected_images, + COALESCE(img.invalid_exif, 0) as invalid_exif_images, + COALESCE(img.duplicate, 0) as duplicate_images, + COALESCE(img.unmatched, 0) as unmatched_images, + img.latest_upload + FROM tasks t + LEFT JOIN latest_task_state lts ON t.id = lts.task_id + LEFT JOIN LATERAL ( + SELECT + COUNT(*) as total, + COUNT(*) FILTER (WHERE status = 'assigned') as assigned, + COUNT(*) FILTER (WHERE status = 'rejected') as rejected, + COUNT(*) FILTER (WHERE status = 'invalid_exif') as invalid_exif, + COUNT(*) FILTER (WHERE status = 'duplicate') as duplicate, + COUNT(*) FILTER (WHERE status = 'unmatched') as unmatched, + MAX(uploaded_at) as latest_upload + FROM project_images + WHERE task_id = t.id AND project_id = %(project_id)s + ) img ON true + WHERE t.project_id = %(project_id)s + ORDER BY t.project_task_index + """ + + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute(query, {"project_id": str(project_id)}) + rows = await cur.fetchall() + + result = [] + for row in rows: + has_ready_imagery = ( + row["task_state"] in VERIFIED_TASK_STATES and row["assigned_images"] > 0 + ) + result.append( + { + "task_id": str(row["task_id"]), + "project_task_index": row["project_task_index"], + "task_state": row["task_state"], + "total_images": row["total_images"], + "assigned_images": row["assigned_images"], + "rejected_images": row["rejected_images"], + "invalid_exif_images": row["invalid_exif_images"], + "duplicate_images": row["duplicate_images"], + "unmatched_images": row["unmatched_images"], + "latest_upload": ( + row["latest_upload"].isoformat() + if row["latest_upload"] + else None + ), + "failure_reason": ( + row["state_comment"] + if row["task_state"] == "IMAGE_PROCESSING_FAILED" + else None + ), + "has_ready_imagery": has_ready_imagery, + } + ) + + return result + + @staticmethod + async def get_project_review_data( + db: Connection, + project_id: uuid.UUID, + ) -> dict: + """Project-level review: images grouped by task across ALL batches. + + Replaces get_batch_review_data for the verify/review UI. + """ + query = """ + WITH latest_task_state AS ( + SELECT DISTINCT ON (task_id) + task_id, + state + FROM task_events + WHERE project_id = %(project_id)s + ORDER BY task_id, created_at DESC + ) + SELECT + pi.task_id, + t.project_task_index, + COUNT(*) as image_count, + COALESCE(lts.state IN ('IMAGE_UPLOADED', 'IMAGE_PROCESSING_STARTED', 'IMAGE_PROCESSING_FINISHED', 'IMAGE_PROCESSING_FAILED'), false) as is_verified, + json_agg( + json_build_object( + 'id', pi.id, + 'filename', pi.filename, + 's3_key', pi.s3_key, + 'thumbnail_url', pi.thumbnail_url, + 'status', pi.status, + 'rejection_reason', pi.rejection_reason, + 'uploaded_at', pi.uploaded_at + ) ORDER BY pi.uploaded_at + ) as images + FROM project_images pi + LEFT JOIN tasks t ON pi.task_id = t.id + LEFT JOIN latest_task_state lts ON pi.task_id = lts.task_id + WHERE pi.project_id = %(project_id)s + AND pi.status IN ('assigned', 'rejected', 'invalid_exif', 'duplicate') + GROUP BY pi.task_id, t.project_task_index, lts.state + ORDER BY t.project_task_index NULLS LAST + """ + + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute(query, {"project_id": str(project_id)}) + task_groups = await cur.fetchall() + + for group in task_groups: + for image in group["images"]: + if image.get("thumbnail_url"): + image["thumbnail_url"] = maybe_presign_s3_key( + image["thumbnail_url"], expires_hours=1 + ) + if image.get("s3_key"): + image["url"] = maybe_presign_s3_key( + image["s3_key"], expires_hours=1 + ) + + return { + "project_id": str(project_id), + "task_groups": task_groups, + "total_tasks": len(task_groups), + "total_images": sum(group["image_count"] for group in task_groups), + } + + @staticmethod + async def get_project_map_data( + db: Connection, + project_id: uuid.UUID, + ) -> dict: + """Project-level map data: task geometries + ALL image points across batches. + + Replaces get_batch_map_data for the verify/review UI. + """ + # Get all tasks for the project that have any assigned imagery + tasks_query = """ + SELECT + t.id, + t.project_task_index, + ST_AsGeoJSON(t.outline)::json as geometry + FROM tasks t + WHERE t.project_id = %(project_id)s + AND EXISTS ( + SELECT 1 FROM project_images pi + WHERE pi.task_id = t.id + AND pi.project_id = %(project_id)s + AND pi.status = 'assigned' + ) + """ + + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute(tasks_query, {"project_id": str(project_id)}) + tasks = await cur.fetchall() + + tasks_geojson = { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": task["geometry"], + "properties": { + "id": str(task["id"]), + "task_index": task["project_task_index"], + }, + } + for task in tasks + ], + } + + # Get all classified images across all batches + images_query = """ + SELECT + id, + filename, + status, + rejection_reason, + task_id, + s3_key, + thumbnail_url, + ST_X(location::geometry) as longitude, + ST_Y(location::geometry) as latitude + FROM project_images + WHERE project_id = %(project_id)s + AND status IN ('assigned', 'rejected', 'invalid_exif', 'duplicate', 'unmatched') + ORDER BY uploaded_at DESC + """ + + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute(images_query, {"project_id": str(project_id)}) + all_images = await cur.fetchall() + + images_features = [] + located_count = 0 + unlocated_count = 0 + + for img in all_images: + properties = { + "id": str(img["id"]), + "filename": img["filename"], + "status": img["status"], + "task_id": str(img["task_id"]) if img["task_id"] else None, + "rejection_reason": img["rejection_reason"], + "thumbnail_url": maybe_presign_s3_key( + img["thumbnail_url"], expires_hours=1 + ) + if img.get("thumbnail_url") + else None, + "url": maybe_presign_s3_key(img["s3_key"], expires_hours=1) + if img.get("s3_key") + else None, + } + + if img["longitude"] is not None and img["latitude"] is not None: + feature = { + "type": "Feature", + "geometry": { + "type": "Point", + "coordinates": [img["longitude"], img["latitude"]], + }, + "properties": properties, + } + located_count += 1 + else: + feature = { + "type": "Feature", + "geometry": None, + "properties": properties, + } + unlocated_count += 1 + + images_features.append(feature) + + return { + "project_id": str(project_id), + "tasks": tasks_geojson, + "images": { + "type": "FeatureCollection", + "features": images_features, + }, + "total_tasks": len(tasks_geojson["features"]), + "total_images": len(images_features), + "total_images_with_gps": located_count, + "total_images_without_gps": unlocated_count, + } + + @staticmethod + async def get_task_verification_data_project( + db: Connection, + task_id: uuid.UUID, + project_id: uuid.UUID, + ) -> dict: + """Project-level task verification: ALL assigned images for this task + across all batches. + + Replaces get_task_verification_data (which was batch-scoped). + """ + # Get task geometry and index + task_query = """ + SELECT + id, + project_task_index, + ST_AsGeoJSON(outline)::json as geometry + FROM tasks + WHERE id = %(task_id)s + AND project_id = %(project_id)s + """ + + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute( + task_query, + {"task_id": str(task_id), "project_id": str(project_id)}, + ) + task = await cur.fetchone() + + if not task: + raise ValueError(f"Task {task_id} not found in project {project_id}") + + # Get ALL assigned images for this task across all batches + images_query = """ + SELECT + id, + filename, + s3_key, + thumbnail_url, + status, + rejection_reason, + ST_AsGeoJSON(location)::json as location + FROM project_images + WHERE task_id = %(task_id)s + AND project_id = %(project_id)s + AND status = %(status)s + ORDER BY uploaded_at + """ + + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute( + images_query, + { + "task_id": str(task_id), + "project_id": str(project_id), + "status": ImageStatus.ASSIGNED.value, + }, + ) + images = await cur.fetchall() + + for image in images: + if image.get("thumbnail_url"): + image["thumbnail_url"] = maybe_presign_s3_key( + image["thumbnail_url"], expires_hours=1 + ) + if image.get("s3_key"): + image["url"] = maybe_presign_s3_key(image["s3_key"], expires_hours=1) + + # Calculate coverage using PostGIS (same logic, no batch filter) + coverage_query = """ + WITH image_points AS ( + SELECT location + FROM project_images + WHERE task_id = %(task_id)s + AND project_id = %(project_id)s + AND status = %(status)s + AND location IS NOT NULL + ), + task_polygon AS ( + SELECT outline + FROM tasks + WHERE id = %(task_id)s + ), + buffered_points AS ( + SELECT ST_Union(ST_Buffer(location::geography, 20)::geometry) as coverage + FROM image_points + ) + SELECT + CASE + WHEN (SELECT COUNT(*) FROM image_points) = 0 THEN 0 + ELSE LEAST(100, ( + ST_Area( + ST_Intersection( + (SELECT coverage FROM buffered_points), + (SELECT outline FROM task_polygon) + )::geography + ) / + NULLIF(ST_Area((SELECT outline FROM task_polygon)::geography), 0) + ) * 100) + END as coverage_percentage + """ + + coverage_percentage = 0 + try: + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute( + coverage_query, + { + "task_id": str(task_id), + "project_id": str(project_id), + "status": ImageStatus.ASSIGNED.value, + }, + ) + coverage_result = await cur.fetchone() + if coverage_result and coverage_result.get("coverage_percentage"): + coverage_percentage = float(coverage_result["coverage_percentage"]) + except Exception as e: + log.warning(f"Could not calculate coverage: {e}") + + # Check if task is verified (any post-verification state counts) + is_verified = False + async with db.cursor() as cur: + await cur.execute( + """ + SELECT state FROM task_events + WHERE task_id = %(task_id)s AND project_id = %(project_id)s + ORDER BY created_at DESC LIMIT 1 + """, + {"task_id": str(task_id), "project_id": str(project_id)}, + ) + row = await cur.fetchone() + if row and row[0] in VERIFIED_TASK_STATES: + is_verified = True + + return { + "task_id": str(task_id), + "project_task_index": task["project_task_index"], + "image_count": len(images), + "images": [ + { + "id": str(img["id"]), + "filename": img["filename"], + "s3_key": img["s3_key"], + "thumbnail_url": img.get("thumbnail_url"), + "url": img.get("url"), + "status": img["status"], + "rejection_reason": img.get("rejection_reason"), + "location": img.get("location"), + } + for img in images + ], + "task_geometry": { + "type": "Feature", + "geometry": task["geometry"], + "properties": { + "id": str(task["id"]), + "task_index": task["project_task_index"], + }, + }, + "coverage_percentage": coverage_percentage, + "is_verified": is_verified, + } diff --git a/src/backend/app/migrations/versions/add_image_classification.py b/src/backend/app/migrations/versions/add_image_classification.py new file mode 100644 index 000000000..626115a12 --- /dev/null +++ b/src/backend/app/migrations/versions/add_image_classification.py @@ -0,0 +1,60 @@ +"""add image classification fields + +Revision ID: add_image_classification +Revises: 001_project_images, 7389d0d528c3 +Create Date: 2025-01-06 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = "add_image_classification" +down_revision = ("001_project_images", "7389d0d528c3") +branch_labels = None +depends_on = None + + +def upgrade(): + # Add batch_id column + try: + op.add_column( + "project_images", + sa.Column("batch_id", postgresql.UUID(as_uuid=True), nullable=True), + ) + except Exception: + # Column might already exist + pass + + # Add rejection_reason column + try: + op.add_column( + "project_images", sa.Column("rejection_reason", sa.Text(), nullable=True) + ) + except Exception: + pass + + # Add sharpness_score column + try: + op.add_column( + "project_images", sa.Column("sharpness_score", sa.Float(), nullable=True) + ) + except Exception: + pass + + # Create indexes + op.execute( + "CREATE INDEX IF NOT EXISTS idx_project_images_batch_id ON project_images (batch_id)" + ) + op.execute( + "CREATE INDEX IF NOT EXISTS idx_project_images_batch_status ON project_images (batch_id, status)" + ) + + +def downgrade(): + op.drop_index("idx_project_images_batch_status", table_name="project_images") + op.drop_index("idx_project_images_batch_id", table_name="project_images") + op.drop_column("project_images", "sharpness_score") + op.drop_column("project_images", "rejection_reason") + op.drop_column("project_images", "batch_id") diff --git a/src/backend/app/migrations/versions/add_image_classification_fields.py b/src/backend/app/migrations/versions/add_image_classification_fields.py deleted file mode 100644 index 460178197..000000000 --- a/src/backend/app/migrations/versions/add_image_classification_fields.py +++ /dev/null @@ -1,196 +0,0 @@ -"""add image classification fields - -Revision ID: add_image_classification -Revises: 001_project_images -Create Date: 2025-01-06 - -""" - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -revision = "add_image_classification" -down_revision = ("001_project_images", "7389d0d528c3") -branch_labels = None -depends_on = None - - -def upgrade(): - connection = op.get_bind() - - # Check if batch_id column exists - batch_id_exists = connection.execute( - sa.text(""" - SELECT EXISTS ( - SELECT 1 FROM information_schema.columns - WHERE table_name = 'project_images' - AND column_name = 'batch_id' - ) - """) - ).scalar() - - if not batch_id_exists: - op.add_column( - "project_images", - sa.Column("batch_id", postgresql.UUID(as_uuid=True), nullable=True), - ) - - # Check if rejection_reason column exists - rejection_reason_exists = connection.execute( - sa.text(""" - SELECT EXISTS ( - SELECT 1 FROM information_schema.columns - WHERE table_name = 'project_images' - AND column_name = 'rejection_reason' - ) - """) - ).scalar() - - if not rejection_reason_exists: - op.add_column( - "project_images", sa.Column("rejection_reason", sa.Text(), nullable=True) - ) - - # Check if sharpness_score column exists - sharpness_score_exists = connection.execute( - sa.text(""" - SELECT EXISTS ( - SELECT 1 FROM information_schema.columns - WHERE table_name = 'project_images' - AND column_name = 'sharpness_score' - ) - """) - ).scalar() - - if not sharpness_score_exists: - op.add_column( - "project_images", sa.Column("sharpness_score", sa.Float(), nullable=True) - ) - - # Create indexes if they don't exist - op.execute( - "CREATE INDEX IF NOT EXISTS idx_project_images_batch_id ON project_images (batch_id)" - ) - op.execute( - "CREATE INDEX IF NOT EXISTS idx_project_images_batch_status ON project_images (batch_id, status)" - ) - - # Check if we need to update the enum - # Get current enum type name - enum_type_name = connection.execute( - sa.text(""" - SELECT t.typname - FROM pg_type t - JOIN pg_class c ON c.reltype = t.oid - WHERE c.relname = 'project_images' - AND EXISTS ( - SELECT 1 FROM pg_attribute a - WHERE a.attrelid = c.oid - AND a.attname = 'status' - AND a.atttypid = t.oid - ) - UNION - SELECT t.typname - FROM pg_type t - JOIN pg_attribute a ON a.atttypid = t.oid - JOIN pg_class c ON a.attrelid = c.oid - WHERE c.relname = 'project_images' - AND a.attname = 'status' - LIMIT 1 - """) - ).scalar() - - # Check if 'uploaded' value exists in the enum - uploaded_exists = connection.execute( - sa.text(""" - SELECT EXISTS ( - SELECT 1 FROM pg_enum e - JOIN pg_type t ON e.enumtypid = t.oid - WHERE t.typname IN ('imagestatus', 'image_status') - AND e.enumlabel = 'uploaded' - ) - """) - ).scalar() - - if not uploaded_exists: - # Need to recreate the enum with new values - # First, drop the default - op.execute("ALTER TABLE project_images ALTER COLUMN status DROP DEFAULT") - - # Rename old enum and create new one - op.execute(f"ALTER TYPE {enum_type_name} RENAME TO {enum_type_name}_old") - op.execute(""" - CREATE TYPE imagestatus AS ENUM ( - 'staged', - 'uploaded', - 'classifying', - 'assigned', - 'rejected', - 'unmatched', - 'invalid_exif', - 'duplicate' - ) - """) - - # Convert column to new type - op.execute(""" - ALTER TABLE project_images - ALTER COLUMN status TYPE imagestatus - USING CASE status::text - WHEN 'classified' THEN 'assigned'::imagestatus - ELSE status::text::imagestatus - END - """) - - # Drop old enum and restore default - op.execute(f"DROP TYPE {enum_type_name}_old") - op.execute( - "ALTER TABLE project_images ALTER COLUMN status SET DEFAULT 'staged'::imagestatus" - ) - - -def downgrade(): - op.drop_index("idx_project_images_batch_status", table_name="project_images") - op.drop_index("idx_project_images_batch_id", table_name="project_images") - op.drop_column("project_images", "sharpness_score") - op.drop_column("project_images", "rejection_reason") - op.drop_column("project_images", "batch_id") - - op.execute("ALTER TYPE imagestatus RENAME TO imagestatus_new") - op.execute(""" - CREATE TYPE imagestatus AS ENUM ( - 'staged', - 'classified', - 'invalid_exif', - 'unmatched', - 'duplicate' - ) - """) - op.execute(""" - ALTER TABLE project_images - ALTER COLUMN status TYPE imagestatus - USING CASE status::text - WHEN 'uploaded' THEN 'staged'::imagestatus - WHEN 'assigned' THEN 'classified'::imagestatus - WHEN 'classifying' THEN 'staged'::imagestatus - WHEN 'rejected' THEN 'staged'::imagestatus - ELSE status::text::imagestatus - END - """) - op.execute("DROP TYPE imagestatus_new") - - op.alter_column( - "project_images", - "status", - existing_type=postgresql.ENUM( - "staged", - "classified", - "invalid_exif", - "unmatched", - "duplicate", - name="imagestatus", - ), - nullable=False, - server_default="staged", - ) diff --git a/src/backend/app/projects/classification_routes.py b/src/backend/app/projects/classification_routes.py index 0927fdc8b..6472ef423 100644 --- a/src/backend/app/projects/classification_routes.py +++ b/src/backend/app/projects/classification_routes.py @@ -37,6 +37,47 @@ class ClassificationStatusResponse(BaseModel): images: list[dict] +@router.get("/{project_id}/latest-batch/", tags=["Image Classification"]) +async def get_latest_batch( + project_id: UUID, + db: Annotated[Connection, Depends(database.get_db)], + user: Annotated[AuthUser, Depends(login_required)], +): + """Get the most recent batch ID for a project.""" + try: + async with db.cursor() as cur: + await cur.execute( + """ + SELECT batch_id + FROM project_images + WHERE project_id = %(project_id)s + AND batch_id IS NOT NULL + GROUP BY batch_id + ORDER BY MAX(uploaded_at) DESC + LIMIT 1 + """, + {"project_id": str(project_id)}, + ) + result = await cur.fetchone() + + if not result: + raise HTTPException( + status_code=HTTPStatus.NOT_FOUND, + detail="No batches found for this project", + ) + + return {"batch_id": str(result[0]), "project_id": str(project_id)} + + except HTTPException: + raise + except Exception as e: + log.error(f"Failed to get latest batch: {e}") + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, + detail=f"Failed to get latest batch: {e}", + ) + + @router.post("/{project_id}/classify-batch/", tags=["Image Classification"]) async def start_batch_classification( project_id: UUID, @@ -242,10 +283,18 @@ async def get_batch_map_data( async def delete_batch( project_id: UUID, batch_id: UUID, + db: Annotated[Connection, Depends(database.get_db)], redis: Annotated[ArqRedis, Depends(get_redis_pool)], user: Annotated[AuthUser, Depends(login_required)], + wait_for_cleanup: bool = Query( + False, + description="Delete immediately instead of enqueueing a background job", + ), ): try: + if wait_for_cleanup: + return await ImageClassifier.delete_batch(db, batch_id, project_id) + # Enqueue the deletion job to run in background job = await redis.enqueue_job( "delete_batch_images", @@ -294,6 +343,44 @@ async def get_batch_processing_summary( ) +@router.post("/{project_id}/batch/{batch_id}/finalize/", tags=["Image Classification"]) +async def finalize_batch( + project_id: UUID, + batch_id: UUID, + db: Annotated[Connection, Depends(database.get_db)], + user: Annotated[AuthUser, Depends(login_required)], +): + """Finalize a batch: move images to task folders without triggering ODM processing. + + This is called when a user clicks 'Finish' without processing any tasks. + It ensures images are stored under the correct {task_id}/images/ path. + """ + try: + move_result = await ImageClassifier.move_batch_images_to_tasks( + db, batch_id, project_id + ) + await db.commit() + + log.info( + f"Finalized batch {batch_id}: moved {move_result['total_moved']} images " + f"to {move_result['task_count']} tasks" + ) + + return { + "message": "Batch finalized successfully", + "batch_id": str(batch_id), + "total_moved": move_result["total_moved"], + "task_count": move_result["task_count"], + } + + except Exception as e: + log.error(f"Failed to finalize batch: {e}") + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, + detail=f"Failed to finalize batch: {e}", + ) + + @router.post("/{project_id}/batch/{batch_id}/process/", tags=["Image Classification"]) async def process_batch( project_id: UUID, @@ -407,6 +494,88 @@ async def delete_image( ) +# ─── Project-level (task-centric) endpoints ────────────────────────────────── + + +@router.get("/{project_id}/imagery/tasks/", tags=["Image Classification"]) +async def get_project_task_imagery_summary( + project_id: UUID, + db: Annotated[Connection, Depends(database.get_db)], + user: Annotated[AuthUser, Depends(login_required)], +): + """Per-task imagery summary aggregated across all batches. + + Single source of truth for task readiness, image counts, and processability. + """ + try: + return await ImageClassifier.get_project_task_imagery_summary(db, project_id) + except Exception as e: + log.error(f"Failed to get project task imagery summary: {e}") + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, + detail=f"Failed to retrieve task imagery summary: {e}", + ) + + +@router.get("/{project_id}/imagery/review/", tags=["Image Classification"]) +async def get_project_review( + project_id: UUID, + db: Annotated[Connection, Depends(database.get_db)], + user: Annotated[AuthUser, Depends(login_required)], +): + """Project-level review data: images grouped by task across all batches.""" + try: + return await ImageClassifier.get_project_review_data(db, project_id) + except Exception as e: + log.error(f"Failed to get project review data: {e}") + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, + detail=f"Failed to retrieve project review data: {e}", + ) + + +@router.get("/{project_id}/imagery/map-data/", tags=["Image Classification"]) +async def get_project_map_data( + project_id: UUID, + db: Annotated[Connection, Depends(database.get_db)], + user: Annotated[AuthUser, Depends(login_required)], +): + """Project-level map data: task geometries + all image points across batches.""" + try: + return await ImageClassifier.get_project_map_data(db, project_id) + except Exception as e: + log.error(f"Failed to get project map data: {e}") + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, + detail=f"Failed to retrieve project map data: {e}", + ) + + +@router.get( + "/{project_id}/imagery/task/{task_id}/verification/", + tags=["Image Classification"], +) +async def get_project_task_verification( + project_id: UUID, + task_id: UUID, + db: Annotated[Connection, Depends(database.get_db)], + user: Annotated[AuthUser, Depends(login_required)], +): + """Task verification data aggregated across all batches.""" + try: + return await ImageClassifier.get_task_verification_data_project( + db, task_id, project_id + ) + except ValueError as e: + raise HTTPException(status_code=HTTPStatus.NOT_FOUND, detail=str(e)) + except Exception as e: + log.error(f"Failed to get task verification data: {e}") + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, + detail=f"Failed to retrieve task verification data: {e}", + ) + + @router.post( "/{project_id}/tasks/{task_id}/mark-verified/", tags=["Image Classification"] ) @@ -420,7 +589,9 @@ async def mark_task_verified( This inserts a new task event with IMAGE_UPLOADED state, indicating that the user has verified that all required images are present and the task - is ready for processing. + is ready for processing. After marking, it also moves the task's images + from the upload staging area to the task's images folder so they are + ready for ODM processing. """ try: async with db.cursor() as cur: @@ -466,20 +637,42 @@ async def mark_task_verified( }, ) + # Move images BEFORE committing the state change so we don't mark + # verified when files never made it to the task folder. + move_result = await ImageClassifier.move_task_images_to_folder( + db, project_id, task_id + ) + + if move_result.get("failed_count", 0) > 0: + await db.rollback() + raise HTTPException( + status_code=HTTPStatus.INTERNAL_SERVER_ERROR, + detail=( + f"Failed to move {move_result['failed_count']} image(s) " + f"to the task folder. Task was NOT marked as verified. " + f"Please try again." + ), + ) + + await db.commit() + log.info( - f"Task {task_id} marked as verified (IMAGE_UPLOADED) by user {user.id}" + f"Task {task_id} marked as verified (IMAGE_UPLOADED) by user {user.id}, " + f"{move_result.get('moved_count', 0)} images moved" ) return { "message": "Task marked as fully flown", "task_id": str(task_id), "state": State.IMAGE_UPLOADED.name, + "images_moved": move_result.get("moved_count", 0), } except HTTPException: raise except Exception as e: log.error(f"Failed to mark task as verified: {e}") + await db.rollback() raise HTTPException( status_code=HTTPStatus.BAD_REQUEST, detail=f"Failed to mark task as verified: {e}", diff --git a/src/backend/app/projects/project_logic.py b/src/backend/app/projects/project_logic.py index dc9e0781d..62ce863b9 100644 --- a/src/backend/app/projects/project_logic.py +++ b/src/backend/app/projects/project_logic.py @@ -8,13 +8,14 @@ import geojson import pyproj import shapely.wkb as wkblib -from fastapi import BackgroundTasks, HTTPException, UploadFile +from fastapi import HTTPException, UploadFile from fastapi.concurrency import run_in_threadpool from geojson import Feature, FeatureCollection from loguru import logger as log from minio.error import S3Error from psycopg import Connection from psycopg.rows import dict_row +from pyodm.exceptions import NodeResponseError from shapely.geometry import shape from shapely.ops import transform @@ -28,6 +29,7 @@ from drone_flightplan.enums import FlightMode from app.config import settings +from app.images.image_classification import ImageClassifier from app.models.enums import ImageProcessingStatus, OAMUploadStatus, State from app.projects import project_schemas from app.images.image_processing import DroneImageProcessor @@ -166,6 +168,7 @@ async def update_task_metrics(db, task_updates): task_updates, ) log.debug(f"Updated {len(task_updates)} tasks with flight metrics") + await db.commit() async def process_task_metrics(db, tasks_data, project): @@ -252,9 +255,9 @@ async def create_tasks_from_geojson( project_id: uuid.UUID, boundaries: Any, project, - background_tasks: BackgroundTasks, + redis=None, ): - """Create tasks and update metrics asynchronously.""" + """Create tasks and enqueue task metric processing asynchronously.""" try: if isinstance(boundaries, str): boundaries = json.loads(boundaries) @@ -284,7 +287,22 @@ async def create_tasks_from_geojson( tasks_data, ) log.debug(f"Inserted {len(tasks_data)} tasks in bulk") - background_tasks.add_task(process_task_metrics, db, tasks_data, project) + await db.commit() + + if redis: + job = await redis.enqueue_job( + "process_project_task_metrics", + str(project_id), + _queue_name="default_queue", + ) + log.info( + f"Queued task metrics job {job.job_id} for project {project_id}" + ) + else: + log.warning( + "Project {} task metrics enqueue skipped (Redis unavailable)", + project_id, + ) return { "message": "Task creation started, metrics will be updated in the background" @@ -294,163 +312,6 @@ async def create_tasks_from_geojson( raise HTTPException(e) from e -# async def create_tasks_from_geojson( -# db: Connection, -# project_id: uuid.UUID, -# boundaries: Any, -# project: project_schemas.DbProject, -# ): -# """Create tasks for a project, from provided task boundaries.""" -# try: -# if isinstance(boundaries, str): -# boundaries = json.loads(boundaries) - -# if boundaries["type"] == "Feature": -# polygons = [boundaries] -# else: -# polygons = boundaries["features"] - -# log.debug(f"Processing {len(polygons)} task geometries") - -# # Set up the projection transform for EPSG:3857 (Web Mercator) -# proj_wgs84 = pyproj.CRS("EPSG:4326") -# proj_mercator = pyproj.CRS("EPSG:3857") -# project_transformer = pyproj.Transformer.from_crs( -# proj_wgs84, proj_mercator, always_xy=True -# ) - -# for index, polygon in enumerate(polygons): -# forward_overlap = project.front_overlap if project.front_overlap else 70 -# side_overlap = project.side_overlap if project.side_overlap else 70 -# generate_3d = False # TODO: For 3d imageries drone_flightplan package needs to be updated. - -# gsd = project.gsd_cm_px -# altitude = project.altitude_from_ground - -# parameters = calculate_parameters( -# forward_overlap, -# side_overlap, -# altitude, -# gsd, -# 2, # Image Interval is set to 2 -# ) - -# # Wrap polygon into GeoJSON Feature -# if not polygon["geometry"]: -# continue -# # If the polygon is a MultiPolygon, convert it to a Polygon -# if polygon["geometry"]["type"] == "MultiPolygon": -# log.debug("Converting MultiPolygon to Polygon") -# polygon["geometry"]["type"] = "Polygon" -# polygon["geometry"]["coordinates"] = polygon["geometry"]["coordinates"][ -# 0 -# ] - -# geom = shape(polygon["geometry"]) - -# coordinates = polygon["geometry"]["coordinates"] -# if polygon["geometry"]["type"] == "Polygon": -# coordinates = polygon["geometry"]["coordinates"] -# feature = Feature(geometry=Polygon(coordinates), properties={}) -# feature_collection = FeatureCollection([feature]) - -# # Common parameters for create_waypoint -# waypoint_params = { -# "project_area": feature_collection, -# "agl": altitude, -# "gsd": gsd, -# "forward_overlap": forward_overlap, -# "side_overlap": side_overlap, -# "rotation_angle": 0, -# "generate_3d": generate_3d, -# } -# waypoint_params["mode"] = FlightMode.WAYPOINTS -# if project.is_terrain_follow: -# dem_path = f"/tmp/{uuid.uuid4()}/dem.tif" - -# # Terrain follow uses waypoints mode, waylines are generated later -# points = create_waypoint(**waypoint_params) - -# try: -# get_file_from_bucket( -# settings.S3_BUCKET_NAME, -# f"projects/{project.id}/dem.tif", -# dem_path, -# ) -# # TODO: Do this with inmemory data -# outfile_with_elevation = "/tmp/output_file_with_elevation.geojson" -# add_elevation_from_dem(dem_path, points, outfile_with_elevation) - -# inpointsfile = open(outfile_with_elevation, "r") -# points_with_elevation = inpointsfile.read() - -# except Exception: -# points_with_elevation = points - -# placemarks = create_placemarks( -# geojson.loads(points_with_elevation), parameters -# ) - -# else: -# points = create_waypoint(**waypoint_params) -# placemarks = create_placemarks(geojson.loads(points), parameters) - -# flight_time_minutes = calculate_flight_time_from_placemarks(placemarks).get( -# "total_flight_time" -# ) -# flight_distance_km = calculate_flight_time_from_placemarks(placemarks).get( -# "flight_distance_km" -# ) -# try: -# # Transform the geometry to EPSG:3857 and calculate the area in square meters -# transformed_geom = transform(project_transformer.transform, geom) -# area_sq_m = transformed_geom.area # Area in square meters - -# # Convert area to square kilometers -# total_area_sqkm = area_sq_m / 1_000_000 - -# task_id = str(uuid.uuid4()) -# async with db.cursor() as cur: -# await cur.execute( -# """ -# INSERT INTO tasks (id, project_id, outline, project_task_index, total_area_sqkm, flight_time_minutes, flight_distance_km) -# VALUES (%(id)s, %(project_id)s, %(outline)s, %(project_task_index)s, %(total_area_sqkm)s, %(flight_time_minutes)s, %(flight_distance_km)s) -# RETURNING id; -# """, -# { -# "id": task_id, -# "project_id": project_id, -# "outline": wkblib.dumps( -# shape(polygon["geometry"]), hex=True -# ), -# "project_task_index": index + 1, -# "total_area_sqkm": total_area_sqkm, -# "flight_time_minutes": flight_time_minutes, -# "flight_distance_km": flight_distance_km, -# }, -# ) -# result = await cur.fetchone() - -# if result: -# log.debug( -# "Created database task | " -# f"Project ID {project_id} | " -# f"Task index {index}" -# ) -# log.debug( -# "COMPLETE: creating project boundary, based on task boundaries" -# ) -# except Exception as e: -# log.exception(e) -# raise HTTPException(e) from e - -# return True - -# except Exception as e: -# log.exception(e) -# raise HTTPException(e) from e - - async def preview_split_by_square(boundary: str, meters: int): """Preview split by square for a project boundary. @@ -480,11 +341,15 @@ async def process_drone_images( try: pool = ctx["db_pool"] async with pool.connection() as conn: - # Update task state to IMAGE_PROCESSING_STARTED + # Update task state to IMAGE_PROCESSING_STARTED first, so that any + # failure below can be correctly transitioned to IMAGE_PROCESSING_FAILED. + # Support fresh processing (IMAGE_UPLOADED), retries from failure + # (IMAGE_PROCESSING_FAILED), and reruns after completion + # (IMAGE_PROCESSING_FINISHED) when new imagery has been verified. from app.tasks import task_logic from app.utils import timestamp - await task_logic.update_task_state_system( + result = await task_logic.update_task_state_system( conn, project_id, task_id, @@ -493,9 +358,53 @@ async def process_drone_images( State.IMAGE_PROCESSING_STARTED, timestamp(), ) + if result is None: + result = await task_logic.update_task_state_system( + conn, + project_id, + task_id, + "ODM processing retry", + State.IMAGE_PROCESSING_FAILED, + State.IMAGE_PROCESSING_STARTED, + timestamp(), + ) + if result is None: + result = await task_logic.update_task_state_system( + conn, + project_id, + task_id, + "ODM processing rerun", + State.IMAGE_PROCESSING_FINISHED, + State.IMAGE_PROCESSING_STARTED, + timestamp(), + ) + if result is None: + raise RuntimeError( + "Cannot start processing: task is not in a valid state " + "(expected IMAGE_UPLOADED, IMAGE_PROCESSING_FAILED, " + "or IMAGE_PROCESSING_FINISHED)" + ) await conn.commit() log.info(f"Task {task_id} state updated to IMAGE_PROCESSING_STARTED") + # Ensure images classified for this task are available in the task folder. + # Single-task processing can be triggered from the project dialog before the + # batch-processing flow has copied files out of staging. + move_result = await ImageClassifier.move_task_images_to_folder( + conn, project_id, task_id + ) + if move_result.get("failed_count", 0) > 0: + await conn.rollback() + raise RuntimeError( + f"Failed to move {move_result['failed_count']} image(s) into the task folder" + ) + if move_result.get("moved_count", 0) > 0: + await conn.commit() + log.info( + f"Task {task_id}: moved {move_result['moved_count']} staged image(s) " + "into the task folder before ODM submission" + ) + # Initialize the processor with the database connection processor = DroneImageProcessor( node_odm_url=settings.ODM_ENDPOINT, @@ -531,6 +440,35 @@ async def process_drone_images( } except Exception as e: + failure_message = str(e).strip() or "Image processing failed." + if isinstance(e, NodeResponseError) and "Not enough images" in failure_message: + failure_message = "Not enough images for ODM processing. At least 3 task images are required." + + try: + pool = ctx["db_pool"] + async with pool.connection() as conn: + from app.tasks import task_logic + from app.utils import timestamp + + await task_logic.update_task_state_system( + conn, + project_id, + task_id, + failure_message, + State.IMAGE_PROCESSING_STARTED, + State.IMAGE_PROCESSING_FAILED, + timestamp(), + ) + await conn.commit() + log.info( + f"Task {task_id} state updated to IMAGE_PROCESSING_FAILED: " + f"{failure_message}" + ) + except Exception as state_error: + log.error( + f"Failed to persist processing failure state for task {task_id}: {state_error}" + ) + log.error(f"Error in process_drone_images (Job ID: {job_id}): {str(e)}") raise diff --git a/src/backend/app/projects/project_routes.py b/src/backend/app/projects/project_routes.py index a37028592..98a0d507b 100644 --- a/src/backend/app/projects/project_routes.py +++ b/src/backend/app/projects/project_routes.py @@ -243,7 +243,6 @@ async def create_project( @router.post("/{project_id}/upload-task-boundaries", tags=["Projects"]) async def upload_project_task_boundaries( - background_tasks: BackgroundTasks, project: Annotated[ project_schemas.DbProject, Depends(project_deps.get_project_by_id) ], @@ -259,8 +258,18 @@ async def upload_project_task_boundaries( dict: JSON containing success message, project ID, and number of tasks. """ log.debug("Creating tasks for each polygon in project") + redis_pool = None + try: + redis_pool = await get_redis_pool() + except HTTPException as e: + log.warning( + "Project {} tasks created without metrics queueing: {}", + project.id, + e.detail, + ) + await project_logic.create_tasks_from_geojson( - db, project.id, task_featcol, project, background_tasks + db, project.id, task_featcol, project, redis_pool ) return { diff --git a/src/backend/app/projects/project_schemas.py b/src/backend/app/projects/project_schemas.py index 541569937..87778ef4a 100644 --- a/src/backend/app/projects/project_schemas.py +++ b/src/backend/app/projects/project_schemas.py @@ -31,7 +31,9 @@ RegulatorApprovalStatus, UserRole, ) +from app.config import settings from app.s3 import ( + check_file_exists, get_assets_url_for_project, get_orthophoto_url_for_project, maybe_presign_s3_key, @@ -663,6 +665,7 @@ class ProjectInfo(BaseModel): oam_upload_status: Optional[str] = None assets_url: Optional[str] = None orthophoto_url: Optional[str] = None + has_gcp: bool = False regulator_comment: Optional[str] = None commenting_regulator_id: Optional[str] = None author_name: Optional[str] = None @@ -720,6 +723,24 @@ def set_orthophoto_url(cls, values): return values + @model_validator(mode="after") + def set_has_gcp(cls, values): + project_id = values.id + if not project_id: + values.has_gcp = False + return values + + try: + values.has_gcp = check_file_exists( + settings.S3_BUCKET_NAME, + f"projects/{project_id}/gcp/gcp_list.txt", + ) + except Exception as e: + log.warning(f"Failed to determine has_gcp for project {project_id}: {e}") + values.has_gcp = False + + return values + @model_validator(mode="after") def calculate_status(cls, values): """Set the project status based on task counts.""" diff --git a/src/backend/docker-entrypoint.sh b/src/backend/docker-entrypoint.sh index 769d2d348..21eb94b30 100755 --- a/src/backend/docker-entrypoint.sh +++ b/src/backend/docker-entrypoint.sh @@ -5,24 +5,28 @@ set -eo pipefail wait_for_db() { max_retries=30 retry_interval=5 + db_host="${POSTGRES_HOST:-db}" + db_port="${POSTGRES_PORT:-5432}" + db_user="${POSTGRES_USER:-dtm}" + db_name="${POSTGRES_DB:-dtm_db}" for ((i = 0; i < max_retries; i++)); do - if /dev/null 2>&1; then echo "Database is available." - return 0 # Database is available, exit successfully + return 0 fi - echo "Database is not yet available. Retrying in ${retry_interval} seconds..." + echo "Database is not yet available at ${db_host}:${db_port}. Retrying in ${retry_interval} seconds..." sleep ${retry_interval} done - echo "Timed out waiting for the database to become available." - exit 1 # Exit with an error code + echo "Timed out waiting for the database to become available at ${db_host}:${db_port}." + exit 1 } -# Start wait in background with tmp log files -wait_for_db & -wait +wait_for_db exec "$@" - -exit 0 diff --git a/src/backend/tests/test_batch_delete.py b/src/backend/tests/test_batch_delete.py new file mode 100644 index 000000000..b6e6f8554 --- /dev/null +++ b/src/backend/tests/test_batch_delete.py @@ -0,0 +1,227 @@ +import asyncio +from io import BytesIO +import uuid + +import pytest + +from app.config import settings +from app.images.image_classification import ImageClassifier +from app.s3 import add_obj_to_bucket, check_file_exists + + +def _upload_test_object(object_name: str, content: bytes) -> None: + add_obj_to_bucket( + settings.S3_BUCKET_NAME, + BytesIO(content), + object_name, + content_type="image/jpeg", + ) + + +async def _insert_batch_image( + db, + *, + project_id: uuid.UUID, + batch_id: uuid.UUID, + uploaded_by: str, + filename: str, + s3_key: str, + thumbnail_url: str, +) -> None: + async with db.cursor() as cur: + await cur.execute( + """ + INSERT INTO project_images + (id, project_id, filename, s3_key, thumbnail_url, hash_md5, batch_id, status, uploaded_by) + VALUES (%s, %s, %s, %s, %s, %s, %s, 'staged', %s) + """, + ( + str(uuid.uuid4()), + str(project_id), + filename, + s3_key, + thumbnail_url, + uuid.uuid4().hex, + str(batch_id), + uploaded_by, + ), + ) + + +async def _count_batch_images(db, *, project_id: uuid.UUID, batch_id: uuid.UUID) -> int: + async with db.cursor() as cur: + await cur.execute( + """ + SELECT COUNT(*) + FROM project_images + WHERE batch_id = %s AND project_id = %s + """, + (str(batch_id), str(project_id)), + ) + row = await cur.fetchone() + return int(row[0]) + + +async def _create_batch_with_objects( + db, + *, + project_id: uuid.UUID, + batch_id: uuid.UUID, + uploaded_by: str, + image_prefix: str, + image_count: int, +) -> list[str]: + object_names: list[str] = [] + + for index in range(image_count): + s3_key = ( + f"projects/{project_id}/user-uploads/{batch_id}/{image_prefix}_{index}.jpg" + ) + thumbnail_key = f"projects/{project_id}/user-uploads/{batch_id}/thumbs/{image_prefix}_{index}.jpg" + _upload_test_object(s3_key, f"{image_prefix}-image-{index}".encode()) + _upload_test_object(thumbnail_key, f"{image_prefix}-thumb-{index}".encode()) + object_names.extend([s3_key, thumbnail_key]) + await _insert_batch_image( + db, + project_id=project_id, + batch_id=batch_id, + uploaded_by=uploaded_by, + filename=f"{image_prefix}_{index}.jpg", + s3_key=s3_key, + thumbnail_url=thumbnail_key, + ) + + await db.commit() + return object_names + + +async def _wait_for_batch_cleanup( + db, + *, + project_id: uuid.UUID, + batch_id: uuid.UUID, + object_names: list[str], + attempts: int = 40, + delay_seconds: float = 0.25, +) -> None: + for _ in range(attempts): + remaining_images = await _count_batch_images( + db, project_id=project_id, batch_id=batch_id + ) + remaining_objects = [ + name + for name in object_names + if check_file_exists(settings.S3_BUCKET_NAME, name) + ] + if remaining_images == 0 and not remaining_objects: + return + await asyncio.sleep(delay_seconds) + + remaining_images = await _count_batch_images( + db, project_id=project_id, batch_id=batch_id + ) + remaining_objects = [ + name + for name in object_names + if check_file_exists(settings.S3_BUCKET_NAME, name) + ] + pytest.fail( + "Batch cleanup did not complete in time: " + f"remaining_images={remaining_images}, remaining_objects={remaining_objects}" + ) + + +@pytest.mark.asyncio +async def test_delete_batch_removes_db_rows_and_s3_objects( + db, create_test_project, auth_user +): + project_id = uuid.UUID(create_test_project) + batch_id = uuid.uuid4() + object_names = await _create_batch_with_objects( + db, + project_id=project_id, + batch_id=batch_id, + uploaded_by=auth_user.id, + image_prefix="image", + image_count=2, + ) + + result = await ImageClassifier.delete_batch(db, batch_id, project_id) + + assert result["message"] == "Batch deleted successfully" + assert result["batch_id"] == str(batch_id) + assert result["deleted_count"] == 2 + assert result["deleted_s3_count"] == 4 + assert await _count_batch_images(db, project_id=project_id, batch_id=batch_id) == 0 + assert all( + not check_file_exists(settings.S3_BUCKET_NAME, name) for name in object_names + ) + + +@pytest.mark.asyncio +async def test_delete_batch_route_waits_for_cleanup( + client, db, create_test_project, auth_user +): + project_id = uuid.UUID(create_test_project) + batch_id = uuid.uuid4() + object_names = await _create_batch_with_objects( + db, + project_id=project_id, + batch_id=batch_id, + uploaded_by=auth_user.id, + image_prefix="sync", + image_count=2, + ) + + response = await client.delete( + f"/api/projects/{project_id}/batch/{batch_id}/", + params={"wait_for_cleanup": "true"}, + ) + + assert response.status_code == 200 + assert response.json() == { + "message": "Batch deleted successfully", + "batch_id": str(batch_id), + "deleted_count": 2, + "deleted_s3_count": 4, + } + assert await _count_batch_images(db, project_id=project_id, batch_id=batch_id) == 0 + assert all( + not check_file_exists(settings.S3_BUCKET_NAME, name) for name in object_names + ) + + +@pytest.mark.asyncio +@pytest.mark.integration +async def test_delete_batch_route_enqueues_cleanup_job( + client, db, create_test_project, auth_user +): + """Integration test: requires a running ARQ worker to process the background job. + + Skipped by default in unit test runs. Run with: pytest -m integration + """ + project_id = uuid.UUID(create_test_project) + batch_id = uuid.uuid4() + object_names = await _create_batch_with_objects( + db, + project_id=project_id, + batch_id=batch_id, + uploaded_by=auth_user.id, + image_prefix="queued", + image_count=1, + ) + + response = await client.delete(f"/api/projects/{project_id}/batch/{batch_id}/") + + assert response.status_code == 200 + body = response.json() + assert body["message"] == "Batch deletion started" + assert body["batch_id"] == str(batch_id) + assert body["job_id"] + + await _wait_for_batch_cleanup( + db, + project_id=project_id, + batch_id=batch_id, + object_names=object_names, + ) diff --git a/src/backend/tests/test_batch_map_data.py b/src/backend/tests/test_batch_map_data.py new file mode 100644 index 000000000..0f74d0f16 --- /dev/null +++ b/src/backend/tests/test_batch_map_data.py @@ -0,0 +1,184 @@ +"""Tests for batch map data retrieval with GPS and non-GPS images.""" + +import uuid +import pytest +from shapely.geometry import box + +from app.models.enums import ImageStatus +from app.images.image_classification import ImageClassifier + + +@pytest.mark.asyncio +async def test_get_batch_map_data_with_mixed_gps(db, create_test_project, auth_user): + """Test that get_batch_map_data returns both located and unlocated images.""" + project_id = uuid.UUID(create_test_project) + batch_id = uuid.uuid4() + task_id = uuid.uuid4() + + # Create a test task + outline = box(-8.34, 115.50, -8.33, 115.51) + async with db.cursor() as cur: + await cur.execute( + """ + INSERT INTO tasks (id, project_id, project_task_index, outline) + VALUES (%s, %s, %s, ST_SetSRID(ST_GeomFromText(%s), 4326)) + """, + (str(task_id), str(project_id), 1, outline.wkt), + ) + + # Insert image WITH GPS location + image_with_gps_id = uuid.uuid4() + async with db.cursor() as cur: + await cur.execute( + """ + INSERT INTO project_images + (id, project_id, task_id, filename, s3_key, hash_md5, batch_id, status, location, uploaded_by) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s, ST_SetSRID(ST_MakePoint(%s, %s), 4326), %s) + """, + ( + str(image_with_gps_id), + str(project_id), + str(task_id), + "image_with_gps.jpg", + "images/image_with_gps.jpg", + "abc123abc123abc123abc123", + str(batch_id), + ImageStatus.ASSIGNED.value, + 115.505, # longitude + -8.335, # latitude + auth_user.id, + ), + ) + + # Insert image WITHOUT GPS location (null geometry) + image_without_gps_id = uuid.uuid4() + async with db.cursor() as cur: + await cur.execute( + """ + INSERT INTO project_images + (id, project_id, task_id, filename, s3_key, hash_md5, batch_id, status, rejection_reason, uploaded_by) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) + """, + ( + str(image_without_gps_id), + str(project_id), + str(task_id), + "image_no_gps.jpg", + "images/image_no_gps.jpg", + "def456def456def456def456", + str(batch_id), + ImageStatus.INVALID_EXIF.value, + "No GPS coordinates found in EXIF", + auth_user.id, + ), + ) + + await db.commit() + + # Get batch map data + map_data = await ImageClassifier.get_batch_map_data(db, batch_id, project_id) + + # Verify response structure + assert map_data["batch_id"] == str(batch_id) + assert "tasks" in map_data + assert "images" in map_data + assert "total_tasks" in map_data + assert "total_images" in map_data + assert "total_images_with_gps" in map_data + assert "total_images_without_gps" in map_data + + # Verify counts + assert map_data["total_tasks"] == 1 + assert map_data["total_images"] == 2 # Both images included + assert map_data["total_images_with_gps"] == 1 + assert map_data["total_images_without_gps"] == 1 + + # Verify image features + image_features = map_data["images"]["features"] + assert len(image_features) == 2 + + # Find images by filename + located_feature = next( + f for f in image_features if f["properties"]["filename"] == "image_with_gps.jpg" + ) + unlocated_feature = next( + f for f in image_features if f["properties"]["filename"] == "image_no_gps.jpg" + ) + + # Verify located image has Point geometry + assert located_feature["geometry"] is not None + assert located_feature["geometry"]["type"] == "Point" + assert located_feature["properties"]["status"] == ImageStatus.ASSIGNED.value + + # Verify unlocated image has null geometry but properties preserved + assert unlocated_feature["geometry"] is None + assert unlocated_feature["properties"]["filename"] == "image_no_gps.jpg" + assert unlocated_feature["properties"]["status"] == ImageStatus.INVALID_EXIF.value + assert ( + unlocated_feature["properties"]["rejection_reason"] + == "No GPS coordinates found in EXIF" + ) + + +@pytest.mark.asyncio +async def test_get_batch_map_data_empty_batch(db, create_test_project, auth_user): + """Test that get_batch_map_data handles empty batches gracefully.""" + project_id = uuid.UUID(create_test_project) + batch_id = uuid.uuid4() + + # Get batch map data for non-existent batch + map_data = await ImageClassifier.get_batch_map_data(db, batch_id, project_id) + + # Verify response is valid but empty + assert map_data["batch_id"] == str(batch_id) + assert map_data["total_tasks"] == 0 + assert map_data["total_images"] == 0 + assert map_data["total_images_with_gps"] == 0 + assert map_data["total_images_without_gps"] == 0 + assert len(map_data["tasks"]["features"]) == 0 + assert len(map_data["images"]["features"]) == 0 + + +@pytest.mark.asyncio +async def test_get_batch_map_data_all_without_gps(db, create_test_project, auth_user): + """Test batch with only images missing GPS coordinates.""" + project_id = uuid.UUID(create_test_project) + batch_id = uuid.uuid4() + + # Insert 3 images without GPS + for i in range(3): + image_id = uuid.uuid4() + async with db.cursor() as cur: + await cur.execute( + """ + INSERT INTO project_images + (id, project_id, filename, s3_key, hash_md5, batch_id, status, rejection_reason, uploaded_by) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s) + """, + ( + str(image_id), + str(project_id), + f"image_{i}.jpg", + f"images/image_{i}.jpg", + f"hash{i:0>24}", + str(batch_id), + ImageStatus.INVALID_EXIF.value, + "Missing GPS", + auth_user.id, + ), + ) + + await db.commit() + + # Get batch map data + map_data = await ImageClassifier.get_batch_map_data(db, batch_id, project_id) + + # Verify all images are returned as unlocated + assert map_data["total_images"] == 3 + assert map_data["total_images_with_gps"] == 0 + assert map_data["total_images_without_gps"] == 3 + + # Verify all features have null geometry + for feature in map_data["images"]["features"]: + assert feature["geometry"] is None + assert feature["properties"]["rejection_reason"] == "Missing GPS" diff --git a/src/backend/tests/test_project_processing.py b/src/backend/tests/test_project_processing.py new file mode 100644 index 000000000..6a2e66dcc --- /dev/null +++ b/src/backend/tests/test_project_processing.py @@ -0,0 +1,362 @@ +import uuid + +import pytest +from pyodm.exceptions import NodeResponseError + +from app.models.enums import State +from app.projects import project_logic + + +class _FakeConn: + def __init__(self): + self.commit_calls = 0 + self.rollback_calls = 0 + + async def commit(self): + self.commit_calls += 1 + + async def rollback(self): + self.rollback_calls += 1 + + +class _FakePoolConnection: + def __init__(self, conn): + self.conn = conn + + async def __aenter__(self): + return self.conn + + async def __aexit__(self, exc_type, exc, tb): + return False + + +class _FakePool: + def __init__(self, conn): + self.conn = conn + + def connection(self): + return _FakePoolConnection(self.conn) + + +@pytest.mark.asyncio +async def test_process_drone_images_moves_staged_task_images_before_odm(monkeypatch): + project_id = uuid.uuid4() + task_id = uuid.uuid4() + conn = _FakeConn() + ctx = {"job_id": "job-1", "db_pool": _FakePool(conn)} + calls = {"move": [], "state": []} + + async def fake_move_task_images_to_folder(db, project_id_arg, task_id_arg): + calls["move"].append((db, project_id_arg, task_id_arg)) + return {"moved_count": 13, "failed_count": 0} + + async def fake_update_task_state_system( + db, + project_id_arg, + task_id_arg, + comment, + initial_state, + final_state, + updated_at, + ): + calls["state"].append( + { + "project_id": project_id_arg, + "task_id": task_id_arg, + "comment": comment, + "initial_state": initial_state, + "final_state": final_state, + } + ) + return {"project_id": project_id_arg, "task_id": task_id_arg} + + class FakeProcessor: + def __init__(self, **kwargs): + self.kwargs = kwargs + + async def process_images_from_s3(self, bucket_name, name, options, webhook): + return { + "bucket_name": bucket_name, + "name": name, + "webhook": webhook, + } + + monkeypatch.setattr( + project_logic.ImageClassifier, + "move_task_images_to_folder", + fake_move_task_images_to_folder, + ) + monkeypatch.setattr(project_logic, "DroneImageProcessor", FakeProcessor) + + from app.tasks import task_logic + + monkeypatch.setattr( + task_logic, + "update_task_state_system", + fake_update_task_state_system, + ) + + result = await project_logic.process_drone_images( + ctx, + project_id, + task_id, + "user-123", + ) + + assert result["status"] == "processing_started" + # State is set to STARTED before the move + assert calls["state"][0]["final_state"] == State.IMAGE_PROCESSING_STARTED + assert calls["move"] == [(conn, project_id, task_id)] + assert conn.commit_calls >= 2 + + +@pytest.mark.asyncio +async def test_process_drone_images_marks_task_failed_when_odm_rejects(monkeypatch): + project_id = uuid.uuid4() + task_id = uuid.uuid4() + conn = _FakeConn() + ctx = {"job_id": "job-2", "db_pool": _FakePool(conn)} + state_calls = [] + + async def fake_move_task_images_to_folder(db, project_id_arg, task_id_arg): + return {"moved_count": 0, "failed_count": 0} + + async def fake_update_task_state_system( + db, + project_id_arg, + task_id_arg, + comment, + initial_state, + final_state, + updated_at, + ): + state_calls.append( + { + "comment": comment, + "initial_state": initial_state, + "final_state": final_state, + } + ) + return {"project_id": project_id_arg, "task_id": task_id_arg} + + class FailingProcessor: + def __init__(self, **kwargs): + self.kwargs = kwargs + + async def process_images_from_s3(self, bucket_name, name, options, webhook): + raise NodeResponseError("Not enough images") + + monkeypatch.setattr( + project_logic.ImageClassifier, + "move_task_images_to_folder", + fake_move_task_images_to_folder, + ) + monkeypatch.setattr(project_logic, "DroneImageProcessor", FailingProcessor) + + from app.tasks import task_logic + + monkeypatch.setattr( + task_logic, + "update_task_state_system", + fake_update_task_state_system, + ) + + with pytest.raises(NodeResponseError): + await project_logic.process_drone_images( + ctx, + project_id, + task_id, + "user-123", + ) + + assert state_calls[0]["final_state"] == State.IMAGE_PROCESSING_STARTED + assert state_calls[1]["final_state"] == State.IMAGE_PROCESSING_FAILED + assert ( + state_calls[1]["comment"] + == "Not enough images for ODM processing. At least 3 task images are required." + ) + + +@pytest.mark.asyncio +async def test_process_drone_images_retries_from_failed_state(monkeypatch): + """Retry a task whose previous processing attempt failed. + + The first transition (IMAGE_UPLOADED -> STARTED) should return None + because the task is in IMAGE_PROCESSING_FAILED, then the fallback + (IMAGE_PROCESSING_FAILED -> STARTED) should succeed. + """ + project_id = uuid.uuid4() + task_id = uuid.uuid4() + conn = _FakeConn() + ctx = {"job_id": "job-3", "db_pool": _FakePool(conn)} + state_calls = [] + + async def fake_move_task_images_to_folder(db, project_id_arg, task_id_arg): + return {"moved_count": 0, "failed_count": 0} + + async def fake_update_task_state_system( + db, + project_id_arg, + task_id_arg, + comment, + initial_state, + final_state, + updated_at, + ): + state_calls.append( + { + "comment": comment, + "initial_state": initial_state, + "final_state": final_state, + } + ) + # Simulate: task is currently IMAGE_PROCESSING_FAILED. + # IMAGE_UPLOADED -> STARTED fails; IMAGE_PROCESSING_FAILED -> STARTED succeeds. + if initial_state == State.IMAGE_UPLOADED: + return None + return {"project_id": project_id_arg, "task_id": task_id_arg} + + class FakeProcessor: + def __init__(self, **kwargs): + self.kwargs = kwargs + + async def process_images_from_s3(self, bucket_name, name, options, webhook): + return {"bucket_name": bucket_name, "name": name, "webhook": webhook} + + monkeypatch.setattr( + project_logic.ImageClassifier, + "move_task_images_to_folder", + fake_move_task_images_to_folder, + ) + monkeypatch.setattr(project_logic, "DroneImageProcessor", FakeProcessor) + + from app.tasks import task_logic + + monkeypatch.setattr( + task_logic, + "update_task_state_system", + fake_update_task_state_system, + ) + + result = await project_logic.process_drone_images( + ctx, + project_id, + task_id, + "user-123", + ) + + assert result["status"] == "processing_started" + # First attempt (IMAGE_UPLOADED) returned None, second (IMAGE_PROCESSING_FAILED) succeeded + assert state_calls[0]["initial_state"] == State.IMAGE_UPLOADED + assert state_calls[1]["initial_state"] == State.IMAGE_PROCESSING_FAILED + assert state_calls[1]["final_state"] == State.IMAGE_PROCESSING_STARTED + + +@pytest.mark.asyncio +async def test_process_drone_images_reruns_from_finished_state(monkeypatch): + project_id = uuid.uuid4() + task_id = uuid.uuid4() + conn = _FakeConn() + ctx = {"job_id": "job-4", "db_pool": _FakePool(conn)} + state_calls = [] + + async def fake_move_task_images_to_folder(db, project_id_arg, task_id_arg): + return {"moved_count": 0, "failed_count": 0} + + async def fake_update_task_state_system( + db, + project_id_arg, + task_id_arg, + comment, + initial_state, + final_state, + updated_at, + ): + state_calls.append( + { + "comment": comment, + "initial_state": initial_state, + "final_state": final_state, + } + ) + if initial_state in ( + State.IMAGE_UPLOADED, + State.IMAGE_PROCESSING_FAILED, + ): + return None + return {"project_id": project_id_arg, "task_id": task_id_arg} + + class FakeProcessor: + def __init__(self, **kwargs): + self.kwargs = kwargs + + async def process_images_from_s3(self, bucket_name, name, options, webhook): + return {"bucket_name": bucket_name, "name": name, "webhook": webhook} + + monkeypatch.setattr( + project_logic.ImageClassifier, + "move_task_images_to_folder", + fake_move_task_images_to_folder, + ) + monkeypatch.setattr(project_logic, "DroneImageProcessor", FakeProcessor) + + from app.tasks import task_logic + + monkeypatch.setattr( + task_logic, + "update_task_state_system", + fake_update_task_state_system, + ) + + result = await project_logic.process_drone_images( + ctx, + project_id, + task_id, + "user-123", + ) + + assert result["status"] == "processing_started" + assert state_calls[0]["initial_state"] == State.IMAGE_UPLOADED + assert state_calls[1]["initial_state"] == State.IMAGE_PROCESSING_FAILED + assert state_calls[2]["initial_state"] == State.IMAGE_PROCESSING_FINISHED + assert state_calls[2]["final_state"] == State.IMAGE_PROCESSING_STARTED + + +@pytest.mark.asyncio +async def test_process_drone_images_raises_when_state_invalid(monkeypatch): + """If task is in none of the allowed processing states, raise immediately.""" + project_id = uuid.uuid4() + task_id = uuid.uuid4() + conn = _FakeConn() + ctx = {"job_id": "job-5", "db_pool": _FakePool(conn)} + + async def fake_move_task_images_to_folder(db, project_id_arg, task_id_arg): + return {"moved_count": 0, "failed_count": 0} + + async def fake_update_task_state_system( + db, project_id_arg, task_id_arg, comment, initial_state, final_state, updated_at + ): + # Both transitions fail — task is in an unexpected state + return None + + monkeypatch.setattr( + project_logic.ImageClassifier, + "move_task_images_to_folder", + fake_move_task_images_to_folder, + ) + + from app.tasks import task_logic + + monkeypatch.setattr( + task_logic, + "update_task_state_system", + fake_update_task_state_system, + ) + + with pytest.raises(RuntimeError, match="not in a valid state"): + await project_logic.process_drone_images( + ctx, + project_id, + task_id, + "user-123", + ) diff --git a/src/backend/tests/test_projects_routes.py b/src/backend/tests/test_projects_routes.py index 83a614994..b87ae7b26 100644 --- a/src/backend/tests/test_projects_routes.py +++ b/src/backend/tests/test_projects_routes.py @@ -6,6 +6,7 @@ from loguru import logger as log from app.projects import project_routes +from app.projects import project_schemas @pytest.mark.asyncio @@ -83,6 +84,19 @@ async def test_read_project(client, create_test_project): assert response.json()["id"] == project_id +@pytest.mark.asyncio +async def test_read_project_includes_has_gcp_flag( + client, create_test_project, monkeypatch +): + project_id = create_test_project + monkeypatch.setattr(project_schemas, "check_file_exists", lambda *_args: True) + + response = await client.get(f"/api/projects/{project_id}") + + assert response.status_code == 200 + assert response.json()["has_gcp"] is True + + @pytest.mark.asyncio async def test_read_project_centroids(client): """Test reading project centroids.""" diff --git a/src/frontend/src/api/projects.ts b/src/frontend/src/api/projects.ts index 2073320e4..d708c5abc 100644 --- a/src/frontend/src/api/projects.ts +++ b/src/frontend/src/api/projects.ts @@ -89,18 +89,18 @@ export const useGetProjectCentroidQuery = ( queryOptions?: Partial, ) => { return useQuery({ - queryKey: queryOptions?.queryKey - ? [ - 'all-projects-centroid', - ...Object.values(queryOptions?.queryKey || {}), - ] - : ['all-projects-centroid'], queryFn: () => getProjectCentroid( queryOptions?.queryKey ? { ...queryOptions.queryKey } : {}, ), select: (data: any) => data.data, ...queryOptions, + queryKey: queryOptions?.queryKey + ? [ + 'all-projects-centroid', + ...Object.values(queryOptions?.queryKey || {}), + ] + : ['all-projects-centroid'], }); }; diff --git a/src/frontend/src/components/CreateProject/CreateprojectLayout/index.tsx b/src/frontend/src/components/CreateProject/CreateprojectLayout/index.tsx index ef2956a11..91e84334f 100644 --- a/src/frontend/src/components/CreateProject/CreateprojectLayout/index.tsx +++ b/src/frontend/src/components/CreateProject/CreateprojectLayout/index.tsx @@ -110,16 +110,16 @@ const CreateprojectLayout = () => { name: '', // short_description: '', description: '', - outline: null, - no_fly_zones: null, - gsd_cm_px: null, - task_split_dimension: null, - is_terrain_follow: null, + outline: undefined, + no_fly_zones: undefined, + gsd_cm_px: '', + task_split_dimension: '', + is_terrain_follow: false, // task_split_type: 1, per_task_instructions: '', deadline_at: '', visibility: 0, - dem: null, + dem: undefined, requires_approval_from_manager_for_locking: false, altitude_from_ground: 0, requires_approval_from_regulator: false, @@ -199,7 +199,7 @@ const CreateprojectLayout = () => { dispatch(setCreateProjectState({ activeStep: activeStep - 1 })); }; - const { isFetching: isFetchingCountry } = useQuery({ + const { data: countryResponse, isFetching: isFetchingCountry } = useQuery({ queryFn: () => getCountry({ lon: projectCentroid?.[0] || 0, @@ -208,15 +208,16 @@ const CreateprojectLayout = () => { }), queryKey: ['country', projectCentroid?.[0], projectCentroid?.[1]], enabled: !!projectCentroid, - select(data) { - dispatch( - setCommonState({ - projectCountry: data?.data?.address?.country || null, - }), - ); - }, }); + useEffect(() => { + dispatch( + setCommonState({ + projectCountry: countryResponse?.data?.address?.country || null, + }), + ); + }, [countryResponse, dispatch]); + const onSubmit = (data: any) => { if (activeStep === 2) { if ( diff --git a/src/frontend/src/components/CreateProject/FormContents/GenerateTasks/index.tsx b/src/frontend/src/components/CreateProject/FormContents/GenerateTasks/index.tsx index bfc4b89e2..2a98ce311 100644 --- a/src/frontend/src/components/CreateProject/FormContents/GenerateTasks/index.tsx +++ b/src/frontend/src/components/CreateProject/FormContents/GenerateTasks/index.tsx @@ -88,7 +88,7 @@ export default function GenerateTasks({ formProps }: { formProps: any }) { placeholder="Enter Dimension (in m)" type="number" className="naxatw-mt-1" - value={dimension} + value={dimension ?? ''} min={50} max={1000} {...register('task_split_dimension', { @@ -136,8 +136,10 @@ export default function GenerateTasks({ formProps }: { formProps: any }) { Generate Tasks {!projectWaypointCountIsLoading && projectWayPoints && ( -

- The average number of waypoints is: +

+

+ The average number of waypoints is: +

-

+
)}
diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DescriptionBox/index.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DescriptionBox/index.tsx index 5e00cd6d8..f9d73d92b 100644 --- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DescriptionBox/index.tsx +++ b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DescriptionBox/index.tsx @@ -1,5 +1,5 @@ import { useEffect, useMemo, useState } from 'react'; -import { useParams, useNavigate } from 'react-router-dom'; +import { useNavigate, useParams } from 'react-router-dom'; import { useDispatch } from 'react-redux'; import { toast } from 'react-toastify'; import { @@ -7,8 +7,6 @@ import { useGetTaskAssetsInfo, useGetTaskWaypointQuery, } from '@Api/tasks'; -import { useMutation, useQueryClient } from '@tanstack/react-query'; -import { postProcessImagery } from '@Services/tasks'; import { formatString } from '@Utils/index'; import { Button } from '@Components/RadixComponents/Button'; import { @@ -16,19 +14,15 @@ import { setSelectedTaskDetailToViewOrthophoto, } from '@Store/actions/droneOperatorTask'; import { useTypedSelector } from '@Store/hooks'; -import { postTaskStatus } from '@Services/project'; import DescriptionBoxComponent from './DescriptionComponent'; import QuestionBox from '../QuestionBox'; import UploadsInformation from '../UploadsInformation'; import ProgressBar from './ProgessBar'; -import DroneImageProcessingWorkflow from '../DroneImageProcessingWorkflow'; const DescriptionBox = () => { const dispatch = useDispatch(); - const queryClient = useQueryClient(); const navigate = useNavigate(); const [flyable, setFlyable] = useState('yes'); - const [isWorkflowModalOpen, setIsWorkflowModalOpen] = useState(false); const { taskId, projectId } = useParams(); const waypointMode = useTypedSelector( state => state.droneOperatorTask.waypointMode, @@ -59,52 +53,17 @@ const DescriptionBox = () => { const { data: taskAssetsInformation, - // isFetching: taskAssetsInfoLoading, }: Record = useGetTaskAssetsInfo( projectId as string, taskId as string, ); - const { mutate: updateStatus, isPending: statusUpdating } = useMutation< - any, - any, - any, - unknown - >({ - mutationFn: postTaskStatus, - onSuccess: () => { - queryClient.invalidateQueries({ queryKey: ['task-description'] }); - queryClient.invalidateQueries({ queryKey: ['task-assets-info'] }); - }, - onError: (err: any) => { - toast.error(err.message); - }, - }); - useEffect(() => { dispatch(resetFilesExifData()); }, [dispatch]); - const { mutate: startImageryProcess, isPending: imageProcessingStarting } = - useMutation({ - mutationFn: () => - postProcessImagery(projectId as string, taskId as string), - onSuccess: () => { - updateStatus({ - projectId, - taskId, - data: { - event: 'image_processing_start', - updated_at: new Date().toISOString(), - }, - }); - toast.success('Image processing started'); - }, - }); - const { data: taskQueryData }: Record = useGetIndividualTaskQuery(taskId as string, { - // enabled: !!taskWayPoints, select: (data: any) => { const { data: taskData } = data; @@ -178,11 +137,6 @@ const DescriptionBox = () => { }, ], }, - // { - // total_image_uploaded: taskData?.total_image_uploaded || 0, - // assets_url: taskData?.assets_url, - // state: taskData?.state, - // }, ]; return { taskDescription, taskData }; }, @@ -200,12 +154,6 @@ const DescriptionBox = () => { } }, [dispatch, taskQueryData]); - // const taskAssetsInformation = useMemo(() => { - // if (!taskDescription) return {}; - // dispatch(setTaskAssetsInformation(taskDescription?.[2])); - // return taskDescription?.[2]; - // }, [taskDescription, dispatch]); - const handleDownloadResult = () => { if (!taskAssetsInformation?.assets_url) return; try { @@ -225,6 +173,10 @@ const DescriptionBox = () => { [taskId, uploadProgress], ); + const hasImages = taskAssetsInformation?.image_count > 0; + const isLocked = taskAssetsInformation?.state === 'LOCKED_FOR_MAPPING'; + const hasAssets = !!taskAssetsInformation?.assets_url; + return ( <>
@@ -236,19 +188,38 @@ const DescriptionBox = () => { /> ))}

- {/* TODO - we might need to change this value if a drone is added which cannot - achieve this speed */} *This flight time was calculated using an average ground speed of 11.5 m/s.

- {/* Drone Image Processing Workflow Modal */} - setIsWorkflowModalOpen(false)} - projectId={projectId as string} - /> + {(isLocked || hasImages) && ( +
+
+ + info + +
+

+ Imagery processing moved to the project page +

+

+ Upload imagery, verify coverage, and start processing from the + main project page instead of the individual task page. +

+ +
+
+
+ )} + {/* Upload progress bar */} {taskAssetsInformation?.image_count === 0 && (progressDetails?.totalFiles ? ( { /> ))} - {taskAssetsInformation?.image_count > 0 && ( + {/* Uploads info and download section */} + {hasImages && (
{ }, { name: 'Orthophoto available', - value: taskAssetsInformation?.assets_url ? 'Yes' : 'No', + value: hasAssets ? 'Yes' : 'No', }, { name: 'Image Status', value: - // if the state is LOCKED_FOR_MAPPING and has a image count it means the images are not fully uploaded - taskAssetsInformation?.state === 'LOCKED_FOR_MAPPING' && - taskAssetsInformation?.image_count > 0 + isLocked && hasImages ? 'Image Uploading Failed' : formatString(taskAssetsInformation?.state), }, ]} /> - {taskAssetsInformation?.assets_url && ( -
+ {hasAssets && ( +
-
)} - {progressDetails?.totalFiles ? ( + {progressDetails?.totalFiles && ( - ) : ( - <> - {/* Info banner for returning users */} - {(taskAssetsInformation?.state === 'IMAGE_UPLOADED' || - (taskAssetsInformation?.state === 'LOCKED_FOR_MAPPING' && - taskAssetsInformation?.image_count > 0)) && ( -
-

- {taskAssetsInformation?.image_count} images uploaded. Ready - to process. -

-
- )} - - {/* Start Processing / Re-run Processing button — prominent position */} - {(taskAssetsInformation?.state === 'IMAGE_UPLOADED' || - (taskAssetsInformation?.state === 'LOCKED_FOR_MAPPING' && - taskAssetsInformation?.image_count > 0)) && ( -
- - {taskAssetsInformation?.state === 'LOCKED_FOR_MAPPING' && - taskAssetsInformation?.image_count > 0 && ( -

- Note: Some images may not have been uploaded due to an - issue during the upload process. However, you can - proceed with processing for the successfully uploaded - images. -

- )} -
- )} - {taskAssetsInformation?.state === 'IMAGE_PROCESSING_FAILED' && ( -
- -
- )} - - {/* Collapsible upload section */} - {(taskAssetsInformation?.state === 'IMAGE_PROCESSING_FAILED' || - taskAssetsInformation?.state === 'LOCKED_FOR_MAPPING' || - taskAssetsInformation?.state === 'IMAGE_UPLOADED') && ( -
-
- - info - -
-

- Need to upload more images? -

-

- Image uploads are now managed from the Project Details - page. Use the Drone Image Processing Workflow to upload, - classify, and process your drone imagery. -

-
-
- -
- )} - )}
)} diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageClassification.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageClassification.tsx index 4e7d76f32..c50d7bf7c 100644 --- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageClassification.tsx +++ b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageClassification.tsx @@ -360,26 +360,57 @@ const ImageClassification = ({ {/* Status Summary */} {computedStats && ( -
-
- {renderValue(computedStats.uploaded, isClassifying, 'naxatw-text-gray-500')} -
Pending
-
-
- {renderValue(computedStats.processing, isClassifying, 'naxatw-text-blue-600')} -
Processing
-
-
- {renderValue(computedStats.complete, isClassifying, 'naxatw-text-green-600')} -
No Issues
-
-
- {renderValue(computedStats.issues, isClassifying, 'naxatw-text-orange-600')} -
Issues
-
-
- {renderValue(computedStats.duplicates, isClassifying, 'naxatw-text-gray-600')} -
Duplicates
+
+ {/* Progress bar during classification */} + {isClassifying && batchStatus && (batchStatus.total ?? 0) > 0 && ( +
+
+
+
+ + Classifying images... + +
+ + {computedStats.totalClassified} / {batchStatus.total ?? 0} + +
+
+
+
+

+ {computedStats.processing > 0 && `${computedStats.processing} currently processing. `} + Updates every 10 seconds. +

+
+ )} + +
+
+ {renderValue(computedStats.uploaded, isClassifying, 'naxatw-text-gray-500')} +
Pending
+
+
+ {renderValue(computedStats.processing, isClassifying, 'naxatw-text-blue-600')} +
Processing
+
+
+ {renderValue(computedStats.complete, isClassifying, 'naxatw-text-green-600')} +
No Issues
+
+
+ {renderValue(computedStats.issues, isClassifying, 'naxatw-text-orange-600')} +
Issues
+
+
+ {renderValue(computedStats.duplicates, isClassifying, 'naxatw-text-gray-600')} +
Duplicates
+
)} diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageReview.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageReview.tsx index 969b4bff1..b4ab1ee53 100644 --- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageReview.tsx +++ b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageReview.tsx @@ -1,7 +1,8 @@ import { useState, useEffect, useCallback, useRef } from 'react'; import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; -import { Map as MapLibreMap, NavigationControl, AttributionControl } from 'maplibre-gl'; -import { getBatchReview, getBatchMapData, acceptImage, BatchReviewData, BatchMapData, TaskGroup, TaskGroupImage } from '@Services/classification'; +import { Map as MapLibreMap, NavigationControl, AttributionControl, LngLatBoundsLike, Popup } from 'maplibre-gl'; +import bbox from '@turf/bbox'; +import { getProjectReview, getProjectMapData, acceptImage, ProjectReviewData, ProjectMapData, TaskGroup, TaskGroupImage, getBatchReview, getBatchMapData } from '@Services/classification'; import { FlexColumn, FlexRow } from '@Components/common/Layouts'; import Accordion from '@Components/common/Accordion'; import { Button } from '@Components/RadixComponents/Button'; @@ -10,16 +11,53 @@ import MapContainer from '@Components/common/MapLibreComponents/MapContainer'; import VectorLayer from '@Components/common/MapLibreComponents/Layers/VectorLayer'; import BaseLayerSwitcherUI from '@Components/common/BaseLayerSwitcher'; import { GeojsonType } from '@Components/common/MapLibreComponents/types'; -import AsyncPopup from '@Components/common/MapLibreComponents/NewAsyncPopup'; import TaskVerificationModal from './TaskVerificationModal'; interface ImageReviewProps { projectId: string; - batchId: string; + batchId?: string; // Optional: when provided, shows batch-scoped data (upload step 3); when absent, shows project-level data (verify dialog) } +const hasIssueStatus = (status?: string) => status !== 'assigned'; + const ImageReview = ({ projectId, batchId }: ImageReviewProps) => { const queryClient = useQueryClient(); + const hasFitBoundsRef = useRef(false); + const popupRef = useRef(null); + const [showOnlyIssueImages, setShowOnlyIssueImages] = useState(false); + + // Use project-level endpoints when no batchId (verify dialog), batch-scoped when batchId is present (upload step 3) + // Both return the same shape (tasks FeatureCollection, images FeatureCollection, counts) + const { + data: mapData, + isLoading: isMapDataLoading, + error: mapDataError, + isError: isMapDataError + } = useQuery({ + queryKey: batchId ? ['batchMapData', projectId, batchId] : ['projectMapData', projectId], + queryFn: () => (batchId ? getBatchMapData(projectId, batchId) : getProjectMapData(projectId)) as Promise, + enabled: !!projectId, + }); + + const { + data: reviewData, + isLoading: isReviewLoading, + error: reviewError, + isError: isReviewError + } = useQuery({ + queryKey: batchId ? ['batchReview', projectId, batchId] : ['projectReview', projectId], + queryFn: () => (batchId ? getBatchReview(projectId, batchId) : getProjectReview(projectId)) as Promise, + enabled: !!projectId, + }); + + const isLoading = isMapDataLoading || isReviewLoading; + const error = isMapDataError ? mapDataError : (isReviewError ? reviewError : null); + + // Reset fit bounds when data source changes + useEffect(() => { + hasFitBoundsRef.current = false; + }, [batchId, projectId]); + const [selectedImage, setSelectedImage] = useState<{ id: string; url: string; @@ -27,44 +65,53 @@ const ImageReview = ({ projectId, batchId }: ImageReviewProps) => { status: string; rejection_reason?: string; } | null>(null); - const [popupData, setPopupData] = useState>(); + const [highlightedImageId, setHighlightedImageId] = useState(null); const [map, setMap] = useState(null); const [isMapLoaded, setIsMapLoaded] = useState(false); - const mapContainerRef = useRef(null); + const [container, setContainer] = useState(null); const [verificationModal, setVerificationModal] = useState<{ isOpen: boolean; taskId: string; taskIndex: number; - }>({ isOpen: false, taskId: '', taskIndex: 0 }); - - const { data: reviewData, isLoading, error } = useQuery({ - queryKey: ['batchReview', projectId, batchId], - queryFn: () => getBatchReview(projectId, batchId), - enabled: !!projectId && !!batchId, + }>({ + isOpen: false, + taskId: '', + taskIndex: 0, }); - const { data: mapData } = useQuery({ - queryKey: ['batchMapData', projectId, batchId], - queryFn: () => getBatchMapData(projectId, batchId), - enabled: !!projectId && !!batchId, - }); + // Refs for sidebar scrolling + const imageRefs = useRef>({}); - // Initialize map only once when component mounts + // Detect when container is ready + const mapContainerRefCallback = useCallback((node: HTMLDivElement | null) => { + if (node !== null) { + setContainer(node); + } + }, []); + + // Initialize map when container is ready useEffect(() => { - if (!mapContainerRef.current || map) return; + if (!container || map) { + return; + } const mapInstance = new MapLibreMap({ - container: mapContainerRef.current, + container: container, style: { version: 8, sources: {}, layers: [] }, center: [0, 0], - zoom: 2, + zoom: 1, maxZoom: 22, attributionControl: false, renderWorldCopies: false, + preserveDrawingBuffer: true, + trackResize: true, }); + mapInstance.setStyle({ version: 8, sources: {}, layers: [] }); + mapInstance.on('load', () => { setIsMapLoaded(true); + mapInstance.resize(); }); // Disable rotation @@ -74,9 +121,48 @@ const ImageReview = ({ projectId, batchId }: ImageReviewProps) => { setMap(mapInstance); return () => { - mapInstance.remove(); + if (mapInstance) { + try { + mapInstance.remove(); + } catch (e) { + console.warn('Error removing map instance:', e); + } + } }; - }, []); // Empty dependency - initialize map only once on mount + }, [container]); // Re-run when container becomes available + + // Observe container resize events + useEffect(() => { + if (!map || !container) return; + + const observer = new ResizeObserver(() => { + if (map) { + map.resize(); + } + }); + + observer.observe(container); + + return () => { + observer.disconnect(); + } + }, [map, container]); + + // Fit map to task extent when ready + useEffect(() => { + if (!map || !isMapLoaded || !mapData?.tasks || hasFitBoundsRef.current) return; + hasFitBoundsRef.current = true; + try { + const [minLng, minLat, maxLng, maxLat] = bbox(mapData.tasks); + map.fitBounds([[minLng, minLat], [maxLng, maxLat]] as LngLatBoundsLike, { + padding: 40, + maxZoom: 18, + duration: 300, + }); + } catch { + // ignore invalid geometry + } + }, [map, isMapLoaded, mapData]); // Add map controls when loaded useEffect(() => { @@ -91,6 +177,140 @@ const ImageReview = ({ projectId, batchId }: ImageReviewProps) => { } }, [isMapLoaded, map]); + // Pointer cursor on image point hover + useEffect(() => { + if (!map || !isMapLoaded) return; + + const layerId = 'review-image-points-layer'; + + const onMouseEnter = () => { + map.getCanvas().style.cursor = 'pointer'; + }; + const onMouseLeave = () => { + map.getCanvas().style.cursor = ''; + }; + + map.on('mouseenter', layerId, onMouseEnter); + map.on('mouseleave', layerId, onMouseLeave); + + return () => { + map.off('mouseenter', layerId, onMouseEnter); + map.off('mouseleave', layerId, onMouseLeave); + }; + }, [map, isMapLoaded]); + + // Custom popup on map click (replaces AsyncPopup for reliable close behavior) + useEffect(() => { + if (!map || !isMapLoaded) return; + + const layerId = 'review-image-points-layer'; + + const handleClick = (e: any) => { + const features = map.queryRenderedFeatures(e.point, { layers: [layerId] }); + if (!features?.length) return; + + const props = features[0].properties; + const coords = (features[0].geometry as any).coordinates.slice(); + + // Close existing popup + if (popupRef.current) { + popupRef.current.remove(); + } + + const statusColors: Record = { + assigned: '#22c55e', + rejected: '#D73F3F', + unmatched: '#eab308', + invalid_exif: '#f97316', + duplicate: '#6b7280', + }; + const dotColor = statusColors[props.status] || '#3b82f6'; + + const html = ` +
+
${props.filename || 'Unknown'}
+
+ + ${(props.status || 'unknown').replace('_', ' ')} +
+ ${props.rejection_reason ? `
${props.rejection_reason}
` : ''} +
+ `; + + const newPopup = new Popup({ + closeButton: true, + closeOnClick: false, + offset: 12, + anchor: 'bottom', + maxWidth: '300px', + }) + .setLngLat(coords) + .setHTML(html) + .addTo(map); + + popupRef.current = newPopup; + + // Highlight the clicked image in sidebar + setHighlightedImageId(props.id); + + // Scroll to the image in the sidebar + setTimeout(() => { + const el = imageRefs.current[props.id]; + if (el) { + el.scrollIntoView({ behavior: 'smooth', block: 'nearest' }); + } + }, 100); + }; + + map.on('click', layerId, handleClick); + + return () => { + map.off('click', layerId, handleClick); + if (popupRef.current) { + popupRef.current.remove(); + popupRef.current = null; + } + }; + }, [map, isMapLoaded]); + + // Update map highlight when highlightedImageId changes + useEffect(() => { + if (!map || !isMapLoaded) return; + + const layerId = 'review-image-points-layer'; + + try { + if (!map.getLayer(layerId)) return; + + if (highlightedImageId) { + map.setPaintProperty(layerId, 'circle-stroke-width', [ + 'case', + ['==', ['get', 'id'], highlightedImageId], + 4, + 2, + ]); + map.setPaintProperty(layerId, 'circle-stroke-color', [ + 'case', + ['==', ['get', 'id'], highlightedImageId], + '#2563eb', + '#ffffff', + ]); + map.setPaintProperty(layerId, 'circle-radius', [ + 'case', + ['==', ['get', 'id'], highlightedImageId], + 8, + 5, + ]); + } else { + map.setPaintProperty(layerId, 'circle-stroke-width', 2); + map.setPaintProperty(layerId, 'circle-stroke-color', '#ffffff'); + map.setPaintProperty(layerId, 'circle-radius', 5); + } + } catch { + // Layer might not exist yet + } + }, [map, isMapLoaded, highlightedImageId]); + const acceptMutation = useMutation({ mutationFn: (imageId: string) => acceptImage(projectId, imageId), onSuccess: (data) => { @@ -99,8 +319,13 @@ const ImageReview = ({ projectId, batchId }: ImageReviewProps) => { } else { toast.success('Image accepted successfully'); } - queryClient.invalidateQueries({ queryKey: ['batchReview', projectId, batchId] }); - queryClient.invalidateQueries({ queryKey: ['batchMapData', projectId, batchId] }); + // Invalidate both batch-scoped and project-level queries + if (batchId) { + queryClient.invalidateQueries({ queryKey: ['batchReview', projectId, batchId] }); + queryClient.invalidateQueries({ queryKey: ['batchMapData', projectId, batchId] }); + } + queryClient.invalidateQueries({ queryKey: ['projectReview', projectId] }); + queryClient.invalidateQueries({ queryKey: ['projectMapData', projectId] }); setSelectedImage(null); }, onError: (error: any) => { @@ -119,6 +344,61 @@ const ImageReview = ({ projectId, batchId }: ImageReviewProps) => { }); }; + // Handle sidebar thumbnail click: highlight on map and fly to it + const handleSidebarImageClick = (image: TaskGroupImage) => { + setHighlightedImageId(image.id); + + // Find the image's coordinates on the map and fly to it + if (map && mapData?.images?.features) { + const feature = mapData.images.features.find( + (f: GeoJSON.Feature) => f.properties?.id === image.id && f.geometry + ); + if (feature && feature.geometry && 'coordinates' in feature.geometry) { + const coords = (feature.geometry as GeoJSON.Point).coordinates; + + // Close existing popup + if (popupRef.current) { + popupRef.current.remove(); + } + + const statusColors: Record = { + assigned: '#22c55e', + rejected: '#D73F3F', + unmatched: '#eab308', + invalid_exif: '#f97316', + duplicate: '#6b7280', + }; + const dotColor = statusColors[image.status] || '#3b82f6'; + + const html = ` +
+
${image.filename || 'Unknown'}
+
+ + ${(image.status || 'unknown').replace('_', ' ')} +
+ ${image.rejection_reason ? `
${image.rejection_reason}
` : ''} +
+ `; + + const newPopup = new Popup({ + closeButton: true, + closeOnClick: false, + offset: 12, + anchor: 'bottom', + maxWidth: '300px', + }) + .setLngLat(coords as [number, number]) + .setHTML(html) + .addTo(map); + + popupRef.current = newPopup; + + map.flyTo({ center: coords as [number, number], zoom: Math.max(map.getZoom(), 16), duration: 500 }); + } + } + }; + const closeModal = () => { setSelectedImage(null); }; @@ -129,15 +409,6 @@ const ImageReview = ({ projectId, batchId }: ImageReviewProps) => { } }; - const getPopupUI = useCallback(() => { - return ( -
-

{popupData?.filename || 'Unknown'}

-

Status: {popupData?.status?.replace('_', ' ') || 'Unknown'}

-
- ); - }, [popupData]); - if (isLoading) { return (
@@ -169,21 +440,102 @@ const ImageReview = ({ projectId, batchId }: ImageReviewProps) => { const isRejectedImage = selectedImage && (selectedImage.status === 'rejected' || selectedImage.status === 'invalid_exif'); const isDuplicateImage = selectedImage && selectedImage.status === 'duplicate'; + const locatedImages = mapData?.images?.features?.filter( + (feature: GeoJSON.Feature) => feature.geometry !== null, + ) || []; + + // Collect unlocated images with their thumbnail data from map API + const unlocatedImages = mapData?.images?.features?.filter( + (feature: GeoJSON.Feature) => feature.geometry === null, + ) || []; + + const filteredLocatedImages = showOnlyIssueImages + ? locatedImages.filter((feature: GeoJSON.Feature) => hasIssueStatus(feature.properties?.status)) + : locatedImages; + + const filteredUnlocatedImages = showOnlyIssueImages + ? unlocatedImages.filter((feature: GeoJSON.Feature) => hasIssueStatus(feature.properties?.status)) + : unlocatedImages; + + const locatedImagesGeojson: GeoJSON.FeatureCollection = { + type: 'FeatureCollection', + features: filteredLocatedImages as GeoJSON.Feature[], + }; + + const displayedTaskGroups = reviewData.task_groups + .map((group: TaskGroup) => { + const filteredImages = showOnlyIssueImages + ? group.images.filter((image) => hasIssueStatus(image.status)) + : group.images; + + if (group.task_id) { + return { + ...group, + images: filteredImages, + }; + } + + const mergedUnlocatedImages = filteredUnlocatedImages + .filter((feature: GeoJSON.Feature) => !group.images.some((img) => img.id === feature.properties?.id)) + .map((feature: GeoJSON.Feature) => { + const props = feature.properties || {}; + return { + id: props.id, + filename: props.filename || 'Unknown', + s3_key: props.s3_key || '', + thumbnail_url: props.thumbnail_url, + url: props.url, + status: props.status || 'unknown', + rejection_reason: props.rejection_reason || 'No GPS', + uploaded_at: props.uploaded_at || '', + }; + }); + + return { + ...group, + images: [...filteredImages, ...mergedUnlocatedImages], + }; + }) + .filter((group: TaskGroup) => !showOnlyIssueImages || group.images.length > 0); + + const totalIssueImages = (() => { + const groupedIssueCount = reviewData.task_groups.reduce( + (count: number, group: TaskGroup) => count + group.images.filter((image) => hasIssueStatus(image.status)).length, + 0, + ); + const rejectedGroup = reviewData.task_groups.find((group: TaskGroup) => group.task_id === null); + const extraUnlocatedIssueCount = unlocatedImages.filter((feature: GeoJSON.Feature) => { + if (!hasIssueStatus(feature.properties?.status)) { + return false; + } + if (!rejectedGroup) { + return true; + } + return !rejectedGroup.images.some((image) => image.id === feature.properties?.id); + }).length; + + return groupedIssueCount + extraUnlocatedIssueCount; + })(); + + const visibleTaskCount = displayedTaskGroups.filter((group: TaskGroup) => group.task_id).length; + return ( {/* Map and List Split View */} -
+
{/* Map Section */} -
+
+ {/* Map Container */} +
@@ -191,6 +543,7 @@ const ImageReview = ({ projectId, batchId }: ImageReviewProps) => { {/* Task polygons */} {map && isMapLoaded && mapData?.tasks && ( { 'fill-opacity': 0.4, }, }} - zoomToExtent /> )} {/* Task polygon outlines for better visibility */} {map && isMapLoaded && mapData?.tasks && ( { )} {/* Image point markers */} - {map && isMapLoaded && mapData?.images && ( + {map && isMapLoaded && locatedImagesGeojson?.features?.length > 0 && ( { }} /> )} - - {/* Popup for image points */} - ) => - feature?.source === 'review-image-points' - } - popupUI={getPopupUI} - fetchPopupData={(properties: Record) => { - setPopupData(properties); - }} - title="Image Details" - hideButton - /> - {/* Legend */} -
-

Image Status

-
-
-
- Assigned -
-
-
- Rejected -
-
-
- Unmatched + {/* Loading Overlay - appears while data is fetching */} + {isMapDataLoading && ( +
+
+
+ refresh +
+

Loading map data...

+
-
-
- Invalid EXIF + )} + + {/* Legend */} +
+

Image Status

+
+
+
+ Assigned +
+
+
+ Rejected +
+
+
+ Unmatched +
+
+
+ Invalid EXIF +
@@ -297,22 +663,36 @@ const ImageReview = ({ projectId, batchId }: ImageReviewProps) => { {/* List Section */}
-

- Review the classified images grouped by tasks. -

- +
+

+ Review the classified images grouped by tasks. +

+ +
+ + + {showOnlyIssueImages ? visibleTaskCount : reviewData.total_tasks} Tasks + - {reviewData.total_tasks} Tasks + {filteredLocatedImages.length} on Map - {reviewData.total_images} Images + {totalIssueImages} Issues
{/* Task Accordions */}
- {reviewData.task_groups.map((group: TaskGroup, index: number) => ( + {displayedTaskGroups.map((group: TaskGroup, index: number) => ( { {group.task_id ? `Task #${group.project_task_index}` : 'Rejected Images'} - {group.image_count} {group.image_count === 1 ? 'image' : 'images'} + {showOnlyIssueImages + ? `${group.images.length} ${group.images.length === 1 ? 'issue' : 'issues'}` + : `${group.images.length} ${group.images.length === 1 ? 'image' : 'images'}`} {group.is_verified && ( @@ -335,8 +717,8 @@ const ImageReview = ({ projectId, batchId }: ImageReviewProps) => { } > - {/* Verify Task Button - Only for actual tasks, not rejected images group */} - {group.task_id && ( + {/* Verify Task Button - Only for actual tasks in project-level view (not batch-scoped upload step 3) */} + {group.task_id && !batchId && (
)} - {/* Image Grid - Only loaded when accordion is open */} + {/* Image Grid */}
{group.images.map((image) => (
{ imageRefs.current[image.id] = el; }} + className={`naxatw-group naxatw-relative naxatw-aspect-square naxatw-cursor-pointer naxatw-overflow-hidden naxatw-rounded naxatw-border-2 naxatw-transition-all hover:naxatw-shadow-md ${ + highlightedImageId === image.id + ? 'naxatw-border-blue-500 naxatw-ring-2 naxatw-ring-blue-300' + : image.status === 'rejected' || image.status === 'invalid_exif' + ? 'naxatw-border-red-300 hover:naxatw-border-red-500' + : image.status === 'duplicate' + ? 'naxatw-border-gray-400 hover:naxatw-border-gray-600 naxatw-opacity-60' + : 'naxatw-border-gray-200 hover:naxatw-border-blue-500' }`} - onClick={() => handleImageClick(image)} - title={image.filename} + onClick={() => handleSidebarImageClick(image)} + onDoubleClick={() => handleImageClick(image)} + title={`${image.filename}${image.rejection_reason ? ` - ${image.rejection_reason}` : ''} (double-click to view)`} > { loading="lazy" /> {(image.status === 'rejected' || image.status === 'invalid_exif') && ( -
- Rejected +
+ {image.rejection_reason || 'Rejected'}
)} {image.status === 'duplicate' && ( @@ -418,7 +804,10 @@ const ImageReview = ({ projectId, batchId }: ImageReviewProps) => {

{selectedImage.filename}

- {isRejectedImage && selectedImage.rejection_reason && ( +

+ Status: {selectedImage.status.replace('_', ' ')} +

+ {selectedImage.rejection_reason && (

Reason: {selectedImage.rejection_reason}

@@ -450,11 +839,14 @@ const ImageReview = ({ projectId, batchId }: ImageReviewProps) => { isOpen={verificationModal.isOpen} onClose={() => setVerificationModal({ isOpen: false, taskId: '', taskIndex: 0 })} projectId={projectId} - batchId={batchId} taskId={verificationModal.taskId} taskIndex={verificationModal.taskIndex} onVerified={() => { - queryClient.invalidateQueries({ queryKey: ['batchReview', projectId, batchId] }); + if (batchId) { + queryClient.invalidateQueries({ queryKey: ['batchReview', projectId, batchId] }); + } + queryClient.invalidateQueries({ queryKey: ['projectReview', projectId] }); + queryClient.invalidateQueries({ queryKey: ['projectMapData', projectId] }); }} /> diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageUpload.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageUpload.tsx index e10a42956..67c92a939 100644 --- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageUpload.tsx +++ b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageUpload.tsx @@ -3,10 +3,9 @@ import UppyFileUploader from '../UppyFileUploader'; interface ImageUploadProps { projectId: string; onUploadComplete?: (result: any, batchId?: string) => void; - onCancel?: (batchId: string) => void; } -const ImageUpload = ({ projectId, onUploadComplete, onCancel }: ImageUploadProps) => { +const ImageUpload = ({ projectId, onUploadComplete }: ImageUploadProps) => { return (
{projectId ? ( @@ -14,7 +13,6 @@ const ImageUpload = ({ projectId, onUploadComplete, onCancel }: ImageUploadProps projectId={projectId} label="" onUploadComplete={onUploadComplete} - onCancel={onCancel} allowedFileTypes={[ 'image/jpeg', 'image/jpg', diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/TaskVerificationModal.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/TaskVerificationModal.tsx index f193095a7..3941274ae 100644 --- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/TaskVerificationModal.tsx +++ b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/TaskVerificationModal.tsx @@ -1,9 +1,10 @@ -import { useState, useEffect, useCallback } from 'react'; +import { useState, useEffect, useCallback, useRef } from 'react'; import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; -import { Map as MapLibreMap, NavigationControl, AttributionControl } from 'maplibre-gl'; +import { Map as MapLibreMap, NavigationControl, AttributionControl, LngLatBoundsLike, Popup } from 'maplibre-gl'; +import bbox from '@turf/bbox'; import { toast } from 'react-toastify'; import { - getTaskVerificationData, + getProjectTaskVerificationData, markTaskAsVerified, deleteTaskImage, TaskVerificationData, @@ -14,13 +15,11 @@ import MapContainer from '@Components/common/MapLibreComponents/MapContainer'; import VectorLayer from '@Components/common/MapLibreComponents/Layers/VectorLayer'; import BaseLayerSwitcherUI from '@Components/common/BaseLayerSwitcher'; import { GeojsonType } from '@Components/common/MapLibreComponents/types'; -import AsyncPopup from '@Components/common/MapLibreComponents/NewAsyncPopup'; interface TaskVerificationModalProps { isOpen: boolean; onClose: () => void; projectId: string; - batchId: string; taskId: string; taskIndex: number; onVerified?: () => void; @@ -30,7 +29,6 @@ const TaskVerificationModal = ({ isOpen, onClose, projectId, - batchId, taskId, taskIndex, onVerified, @@ -39,14 +37,16 @@ const TaskVerificationModal = ({ const [map, setMap] = useState(null); const [isMapLoaded, setIsMapLoaded] = useState(false); const [isStyleReady, setIsStyleReady] = useState(false); - const [popupData, setPopupData] = useState>(); const [selectedImageId, setSelectedImageId] = useState(null); + const popupRef = useRef(null); + const imageRefs = useRef>({}); + const hasFitRef = useRef(false); - // Fetch task verification data + // Fetch task verification data (project-level, across all batches) const { data: verificationData, isLoading, refetch } = useQuery({ - queryKey: ['taskVerification', projectId, batchId, taskId], - queryFn: () => getTaskVerificationData(projectId, batchId, taskId), - enabled: isOpen && !!projectId && !!batchId && !!taskId, + queryKey: ['taskVerification', projectId, taskId], + queryFn: () => getProjectTaskVerificationData(projectId, taskId), + enabled: isOpen && !!projectId && !!taskId, }); // Reset map when modal closes @@ -56,6 +56,7 @@ const TaskVerificationModal = ({ setMap(null); setIsMapLoaded(false); setIsStyleReady(false); + hasFitRef.current = false; } }, [isOpen]); @@ -63,7 +64,6 @@ const TaskVerificationModal = ({ useEffect(() => { if (!isOpen || !verificationData || map) return; - // Use a small delay to ensure DOM is rendered after loading state changes const timer = setTimeout(() => { const container = document.getElementById('task-verification-map'); if (!container) { @@ -83,7 +83,6 @@ const TaskVerificationModal = ({ mapInstance.on('load', () => { setIsMapLoaded(true); - // Additional delay to ensure style is fully ready setTimeout(() => { if (mapInstance.getStyle()) { setIsStyleReady(true); @@ -111,6 +110,141 @@ const TaskVerificationModal = ({ } }, [isMapLoaded, map]); + // Fit to task extent with appropriate zoom (not too close) + useEffect(() => { + if (!map || !isMapLoaded || !isStyleReady || !verificationData?.task_geometry || hasFitRef.current) return; + hasFitRef.current = true; + + try { + const geojson = { + type: 'FeatureCollection' as const, + features: [verificationData.task_geometry], + }; + const [minLng, minLat, maxLng, maxLat] = bbox(geojson); + map.fitBounds( + [[minLng, minLat], [maxLng, maxLat]] as LngLatBoundsLike, + { + padding: 60, + maxZoom: 17, + duration: 300, + } + ); + } catch { + // ignore + } + }, [map, isMapLoaded, isStyleReady, verificationData]); + + // Pointer cursor + click handler on image points + useEffect(() => { + if (!map || !isMapLoaded) return; + + const layerId = 'task-image-points-layer'; + + const onMouseEnter = () => { + map.getCanvas().style.cursor = 'pointer'; + }; + const onMouseLeave = () => { + map.getCanvas().style.cursor = ''; + }; + + const handleClick = (e: any) => { + const features = map.queryRenderedFeatures(e.point, { layers: [layerId] }); + if (!features?.length) return; + + const props = features[0].properties; + const coords = (features[0].geometry as any).coordinates.slice(); + + // Close existing popup + if (popupRef.current) { + popupRef.current.remove(); + } + + const html = ` +
+

${props.filename}

+ ${(props.thumbnail_url || props.url) + ? `` + : ''} +

Status: ${(props.status || '').replace('_', ' ')}

+
+ `; + + const newPopup = new Popup({ + closeButton: true, + closeOnClick: false, + offset: 12, + anchor: 'bottom', + maxWidth: '300px', + }) + .setLngLat(coords) + .setHTML(html) + .addTo(map); + + popupRef.current = newPopup; + + // Highlight in sidebar + setSelectedImageId(props.id); + setTimeout(() => { + const el = imageRefs.current[props.id]; + if (el) { + el.scrollIntoView({ behavior: 'smooth', block: 'nearest' }); + } + }, 100); + }; + + map.on('mouseenter', layerId, onMouseEnter); + map.on('mouseleave', layerId, onMouseLeave); + map.on('click', layerId, handleClick); + + return () => { + map.off('mouseenter', layerId, onMouseEnter); + map.off('mouseleave', layerId, onMouseLeave); + map.off('click', layerId, handleClick); + if (popupRef.current) { + popupRef.current.remove(); + popupRef.current = null; + } + }; + }, [map, isMapLoaded]); + + // Update map highlight when selectedImageId changes + useEffect(() => { + if (!map || !isMapLoaded) return; + + const layerId = 'task-image-points-layer'; + + try { + if (!map.getLayer(layerId)) return; + + if (selectedImageId) { + map.setPaintProperty(layerId, 'circle-stroke-width', [ + 'case', + ['==', ['get', 'id'], selectedImageId], + 4, + 2, + ]); + map.setPaintProperty(layerId, 'circle-stroke-color', [ + 'case', + ['==', ['get', 'id'], selectedImageId], + '#2563eb', + '#ffffff', + ]); + map.setPaintProperty(layerId, 'circle-radius', [ + 'case', + ['==', ['get', 'id'], selectedImageId], + 8, + 6, + ]); + } else { + map.setPaintProperty(layerId, 'circle-stroke-width', 2); + map.setPaintProperty(layerId, 'circle-stroke-color', '#ffffff'); + map.setPaintProperty(layerId, 'circle-radius', 6); + } + } catch { + // Layer not ready yet + } + }, [map, isMapLoaded, selectedImageId]); + // Convert images to GeoJSON for map display const imagesGeoJson = useCallback(() => { if (!verificationData?.images) return null; @@ -141,8 +275,15 @@ const TaskVerificationModal = ({ onSuccess: () => { toast.success(`Task #${taskIndex} marked as fully flown`); queryClient.invalidateQueries({ queryKey: ['taskVerification'] }); - queryClient.invalidateQueries({ queryKey: ['batchReview'] }); - queryClient.invalidateQueries({ queryKey: ['processing-summary'] }); + queryClient.invalidateQueries({ queryKey: ['projectReview', projectId] }); + queryClient.invalidateQueries({ queryKey: ['projectMapData', projectId] }); + queryClient.invalidateQueries({ queryKey: ['project-detail', projectId] }); + queryClient.invalidateQueries({ + queryKey: ['projectTaskImagerySummary', projectId], + }); + queryClient.invalidateQueries({ + queryKey: ['all-task-assets-info', projectId], + }); onVerified?.(); onClose(); }, @@ -166,39 +307,47 @@ const TaskVerificationModal = ({ }, }); - const getPopupUI = useCallback( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - (_properties: Record) => { - if (!popupData) { - return
Loading...
; + // Handle sidebar image click: highlight on map and fly to point + const handleSidebarImageClick = (imageId: string) => { + setSelectedImageId(imageId); + + if (map && verificationData?.images) { + const img = verificationData.images.find(i => i.id === imageId); + if (img?.location?.coordinates) { + const coords = img.location.coordinates as [number, number]; + + // Close existing popup + if (popupRef.current) { + popupRef.current.remove(); + } + + const html = ` +
+

${img.filename}

+ ${(img.thumbnail_url || img.url) + ? `` + : ''} +

Status: ${(img.status || '').replace('_', ' ')}

+
+ `; + + const newPopup = new Popup({ + closeButton: true, + closeOnClick: false, + offset: 12, + anchor: 'bottom', + maxWidth: '300px', + }) + .setLngLat(coords) + .setHTML(html) + .addTo(map); + + popupRef.current = newPopup; + + map.flyTo({ center: coords, zoom: Math.max(map.getZoom(), 17), duration: 500 }); } - - return ( -
-

- {popupData.filename} -

- {(popupData.thumbnail_url || popupData.url) && ( - {popupData.filename} - )} -

- Status: {popupData.status?.replace('_', ' ')} -

-
- ); - }, - [popupData], - ); - - const handleDeleteFromPopup = useCallback((data: Record) => { - if (data?.id) { - deleteMutation.mutate(data.id); } - }, [deleteMutation]); + }; if (!isOpen) return null; @@ -270,7 +419,6 @@ const TaskVerificationModal = ({ 'fill-opacity': 0.4, }, }} - zoomToExtent /> )} @@ -315,21 +463,6 @@ const TaskVerificationModal = ({ }} /> )} - - {/* Popup for image preview */} - ) => - feature?.source === 'task-image-points' - } - popupUI={getPopupUI} - fetchPopupData={(properties: Record) => { - setPopupData(properties); - }} - title="Image Preview" - buttonText="Delete" - handleBtnClick={handleDeleteFromPopup} - closePopupOnButtonClick - /> {/* Stats Overlay */} @@ -388,12 +521,13 @@ const TaskVerificationModal = ({ {verificationData?.images.map((image) => (
{ imageRefs.current[image.id] = el; }} + className={`naxatw-group naxatw-relative naxatw-aspect-square naxatw-cursor-pointer naxatw-overflow-hidden naxatw-rounded naxatw-border-2 naxatw-transition-all hover:naxatw-shadow-md ${ selectedImageId === image.id ? 'naxatw-border-blue-500 naxatw-ring-2 naxatw-ring-blue-200' : 'naxatw-border-gray-200' }`} - onClick={() => setSelectedImageId(image.id)} + onClick={() => handleSidebarImageClick(image.id)} >
- Click on image dots to preview. Delete any problematic images before verifying. + Click on image dots to preview. Click thumbnails to highlight on map.
{currentStep < steps.length ? ( @@ -288,10 +230,8 @@ const DroneImageProcessingWorkflow = ({
- - {/* Abort Confirmation Dialog */} {showAbortConfirmation && (
@@ -309,7 +249,7 @@ const DroneImageProcessingWorkflow = ({ +
+
+ + ); +}; diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/PopoverBox/ImageBox/ImageCard/index.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/PopoverBox/ImageBox/ImageCard/index.tsx deleted file mode 100644 index fb23e1681..000000000 --- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/PopoverBox/ImageBox/ImageCard/index.tsx +++ /dev/null @@ -1,62 +0,0 @@ -import { - setSelectedImage, - unCheckImages, -} from '@Store/actions/droneOperatorTask'; -import { useTypedDispatch } from '@Store/hooks'; - -interface IImageCardProps { - image: string; - imageName: string; - checked: boolean; - deselectImages?: number; -} -const ImageCard = ({ - image, - imageName, - checked, - deselectImages, -}: IImageCardProps) => { - const dispatch = useTypedDispatch(); - - return ( - <> -
dispatch(setSelectedImage(image))} - > -
- {/* dispatch(setSelectedImage(image))} - /> */} - - image - -
-
{ - e.stopPropagation(); - dispatch(unCheckImages(deselectImages)); - }} - > - -

- {imageName} -

-
-
- - ); -}; - -export default ImageCard; diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/PopoverBox/ImageBox/PreviewImage/index.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/PopoverBox/ImageBox/PreviewImage/index.tsx deleted file mode 100644 index 75f4ad56a..000000000 --- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/PopoverBox/ImageBox/PreviewImage/index.tsx +++ /dev/null @@ -1,16 +0,0 @@ -import { useTypedSelector } from '@Store/hooks'; - -const PreviewImage = () => { - const clickedImage = useTypedSelector( - state => state.droneOperatorTask.clickedImage, - ); - return ( - - ); -}; - -export default PreviewImage; diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/PopoverBox/ImageBox/index.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/PopoverBox/ImageBox/index.tsx deleted file mode 100644 index 6002b14db..000000000 --- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/PopoverBox/ImageBox/index.tsx +++ /dev/null @@ -1,244 +0,0 @@ -/* eslint-disable no-await-in-loop */ -import { useEffect, useRef, useState } from 'react'; -import { motion } from 'framer-motion'; -import { toast } from 'react-toastify'; -import { Button } from '@Components/RadixComponents/Button'; -import { getImageUploadLink } from '@Services/droneOperator'; -import { useMutation } from '@tanstack/react-query'; -import { - checkAllImages, - setCheckedImages, - unCheckAllImages, -} from '@Store/actions/droneOperatorTask'; -import { useTypedDispatch, useTypedSelector } from '@Store/hooks'; -import callApiSimultaneously from '@Utils/callApiSimultaneously'; -import chunkArray from '@Utils/createChunksOfArray'; -import delay from '@Utils/createDelay'; -import widthCalulator from '@Utils/percentageCalculator'; -import { postProcessImagery } from '@Services/tasks'; -import { postTaskStatus } from '@Services/project'; -import FilesUploadingPopOver from '../LoadingBox'; -import ImageCard from './ImageCard'; -import PreviewImage from './PreviewImage'; - -const ImageBoxPopOver = () => { - const dispatch = useTypedDispatch(); - const pathname = window.location.pathname?.split('/'); - const projectId = pathname?.[2]; - const taskId = pathname?.[4]; - - const uploadedFilesNumber = useRef(0); - const [imageObject, setImageObject] = useState([]); - const [progressBar, setProgressBar] = useState(false); - const [imagesNames, setImagesNames] = useState([]); - const [loadingWidth, setLoadingWidth] = useState(0); - const [files, setFiles] = useState([]); - const imageFiles = useTypedSelector(state => state.droneOperatorTask.files); - const clickedImage = useTypedSelector( - state => state.droneOperatorTask.clickedImage, - ); - const checkedImages = useTypedSelector( - state => state.droneOperatorTask.checkedImages, - ); - const uploadedImageType = useTypedSelector( - state => state.droneOperatorTask.uploadedImagesType, - ); - - const { mutate: updateStatus } = useMutation({ - mutationFn: postTaskStatus, - onError: (err: any) => { - toast.error(err.message); - }, - }); - - const { mutate: startImageryProcess } = useMutation({ - mutationFn: () => postProcessImagery(projectId, taskId), - onSuccess: () => { - updateStatus({ - projectId, - taskId, - data: { event: 'image_upload', updated_at: new Date().toISOString() }, - }); - toast.success('Image processing started'); - }, - }); - - // function that gets the signed urls for the images and again puts them in chunks of 4 - const { mutate } = useMutation({ - mutationFn: async (data: any) => { - const urlsData = await getImageUploadLink( - uploadedImageType === 'replace', - data, - ); - - // urls from array of objects is retrieved and stored in value - const urls = urlsData.data.map((url: any) => url.url); - const chunkedUrls = chunkArray(urls, 4); - const chunkedFiles = chunkArray(files, 4); - - // this calls api simultaneously for each chunk of files - // each chunk contains 4 files - for (let index = 0; index < chunkedUrls.length; index++) { - const urlChunk = chunkedUrls[index]; - - await callApiSimultaneously(urlChunk, chunkedFiles[index]); - uploadedFilesNumber.current += urlChunk.length; - const width = widthCalulator(uploadedFilesNumber.current, files.length); - setLoadingWidth(width); - - // to call api in chunks of 4 with a delay of 2 seconds - if (index < chunkedUrls.length - 1) { - await delay(500); - } - } - startImageryProcess(); - }, - }); - - useEffect(() => { - // creates objectsurl of images - const uploadedFiles = imageFiles?.map(file => - // @ts-ignore - URL.createObjectURL(file), - ); - - setImageObject(uploadedFiles); - - // sets the initial state of checked images i.e. true - const initialState: { [key: number]: boolean } = {}; - uploadedFiles.forEach((_, index) => { - initialState[index] = true; - }); - dispatch(setCheckedImages(initialState)); - }, [dispatch, imageFiles]); - - // filters out the selected images and sets the selected images in the state - useEffect(() => { - // filters the checked images names - const selectedImagesNames = imageFiles - ?.map((file, index) => { - if (checkedImages[index]) { - return file.name; - } - return null; - }) - .filter(name => name !== null); - setImagesNames(selectedImagesNames); - - // set the selected image's binary file in the state - const selectedBinaryFiles = imageFiles.filter(file => - selectedImagesNames.includes(file.name), - ); - setFiles(selectedBinaryFiles); - }, [imageFiles, checkedImages]); - - function handleSubmit() { - setProgressBar(true); - const filesData = { - expiry: 5, - task_id: taskId, - image_name: imagesNames, - project_id: projectId, - }; - mutate(filesData); - } - function handleDeselectAllImages() { - if (Object.values(checkedImages).every(value => value === false)) { - return; - } - dispatch(unCheckAllImages()); - } - function handleSelectAllImages() { - if (Object.values(checkedImages).every(Boolean)) { - return; - } - dispatch(checkAllImages()); - } - const variants = { - visible: { opacity: 1, x: 0 }, - hidden: { opacity: 0, x: '100%' }, - }; - return ( - <> - {/* ------------------ image section ----------------- */} -
-
- {imageObject?.map((image, index) => ( - - ))} -
- - {/* ----------------- preview Image --------------------- */} - - - -
- - {/* ------------------ buttons section ----------------- */} -
-
-
- -

- Select All -

-
-
- value === false, - )} - /> -

- Deselect All -

-
-
-
- -
-
- {/* ---------- loading popover-------------- */} - - - ); -}; - -export default ImageBoxPopOver; diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/QuestionBox/index.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/QuestionBox/index.tsx index fca710357..a878fd2f2 100644 --- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/QuestionBox/index.tsx +++ b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/QuestionBox/index.tsx @@ -128,32 +128,6 @@ const QuestionBox = ({ {/* Image upload disabled - use the Drone Upload Workflow instead */} {/* {flyable === 'yes' && haveNoImages && } */} - {flyable === 'yes' && haveNoImages && ( -
-
- info -
-

- Image Upload Has Moved -

-

- To upload drone images, please use the Drone Upload Workflow - from the Project Details page. This new workflow provides - better image classification and processing capabilities. -

-
-
- -
- )}
); diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/UppyFileUploader/index.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/UppyFileUploader/index.tsx index 4492975ef..073dbf3da 100644 --- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/UppyFileUploader/index.tsx +++ b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/UppyFileUploader/index.tsx @@ -4,7 +4,7 @@ import { Dashboard } from '@uppy/react'; import { UppyContext } from '@uppy/react'; import { toast } from 'react-toastify'; import { authenticated, api } from '@Services/index'; -import { useTypedDispatch } from '@Store/hooks'; +import { deleteBatch } from '@Services/classification'; import '@uppy/core/css/style.min.css'; import '@uppy/dashboard/css/style.min.css'; @@ -15,7 +15,6 @@ interface UppyFileUploaderProps { taskId?: string; label?: string; onUploadComplete?: (result: any, batchId?: string) => void; - onCancel?: (batchId: string) => void; allowedFileTypes?: string[]; note?: string; staging?: boolean; // If true, uploads to user-uploads staging directory @@ -26,7 +25,6 @@ const UppyFileUploader = ({ taskId, label = 'Upload Files', onUploadComplete, - onCancel, allowedFileTypes = [ 'image/jpeg', 'image/jpg', @@ -43,7 +41,6 @@ const UppyFileUploader = ({ note = 'Drag and drop files here, or click to browse', staging = false, }: UppyFileUploaderProps) => { - const dispatch = useTypedDispatch(); // Generate a batch ID when upload starts (for staging uploads only) const batchIdRef = useRef(null); // Track if we've shown the success notification to prevent duplicates @@ -258,10 +255,18 @@ const UppyFileUploader = ({ }; // Handle cancel-all event to clean up batch when user cancels upload - const handleCancelAll = () => { + const handleCancelAll = async () => { if (staging && batchIdRef.current) { - onCancel?.(batchIdRef.current); - batchIdRef.current = null; + try { + await deleteBatch(projectId, batchIdRef.current, { waitForCleanup: true }); + toast.warning('Upload cancelled. Staging images cleared.'); + } catch (error: any) { + console.error('Failed to cleanup batch after cancel:', error); + toast.error('Warning: Failed to cleanup staging images'); + } finally { + batchIdRef.current = null; + notificationShownRef.current = false; + } } }; @@ -276,10 +281,10 @@ const UppyFileUploader = ({ uppy.off('complete', handleComplete); uppy.off('cancel-all', handleCancelAll); }; - }, [uppy, dispatch, onUploadComplete, onCancel, staging]); + }, [uppy, onUploadComplete, staging, projectId]); return ( -
+
{label && (

{label} diff --git a/src/frontend/src/components/IndividualProject/ModalContent/ProcessingStatusDialog.tsx b/src/frontend/src/components/IndividualProject/ModalContent/ProcessingStatusDialog.tsx index 8283ffa91..d376a6a10 100644 --- a/src/frontend/src/components/IndividualProject/ModalContent/ProcessingStatusDialog.tsx +++ b/src/frontend/src/components/IndividualProject/ModalContent/ProcessingStatusDialog.tsx @@ -1,11 +1,13 @@ -import { useCallback, useMemo, useState } from 'react'; -import { useParams } from 'react-router-dom'; +import { useCallback, useEffect, useMemo, useState } from 'react'; +import { matchPath, useLocation } from 'react-router-dom'; import { useDispatch } from 'react-redux'; import { toast } from 'react-toastify'; -import { useMutation, useQueryClient } from '@tanstack/react-query'; +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; +import { useGetProjectsDetailQuery } from '@Api/projects'; import { useGetAllTaskAssetsInfo } from '@Api/tasks'; import { postProcessImagery } from '@Services/tasks'; import { processAllImagery } from '@Services/project'; +import { getProjectTaskImagerySummary, TaskImagerySummary } from '@Services/classification'; import { formatString } from '@Utils/index'; import { Button } from '@Components/RadixComponents/Button'; import Icon from '@Components/common/Icon'; @@ -21,14 +23,27 @@ const stateColors: Record = { UNFLYABLE_TASK: '#9EA5AD', }; -const isProcessable = (state: string | null, imageCount: number) => - state === 'IMAGE_UPLOADED' || - state === 'IMAGE_PROCESSING_FAILED' || - (state === 'LOCKED_FOR_MAPPING' && imageCount > 0); +type ProcessingDialogTask = { + task_id: string; + task_index: number; + image_count: number; + state: string; + failure_reason?: string | null; + assets_url?: string | null; +}; + +type ProcessingDialogProjectDetail = { + total_task_count?: number; + has_gcp?: boolean; +}; const ProcessingStatusDialog = () => { - const { id } = useParams(); - const projectId = id as string; + const { pathname } = useLocation(); + const projectId = useMemo(() => { + const projectMatch = matchPath('/projects/:id', pathname); + const approvalMatch = matchPath('/projects/:id/approval', pathname); + return projectMatch?.params.id || approvalMatch?.params.id || ''; + }, [pathname]); const dispatch = useDispatch(); const queryClient = useQueryClient(); @@ -37,7 +52,31 @@ const ProcessingStatusDialog = () => { new Set(), ); + // allTaskAssets: S3-based data (assets_url, image_count from disk, state) const { data: allTaskAssets } = useGetAllTaskAssetsInfo(projectId); + const { data: projectDetail } = useGetProjectsDetailQuery(projectId) as { + data?: ProcessingDialogProjectDetail; + }; + + // Backend summary: authoritative source for has_ready_imagery + const { data: taskSummary } = useQuery({ + queryKey: ['projectTaskImagerySummary', projectId], + queryFn: () => getProjectTaskImagerySummary(projectId), + enabled: !!projectId, + refetchInterval: 30000, + }); + + // Build a lookup from task_id → has_ready_imagery so the backend is the + // single source of truth for readiness decisions. + const readinessMap = useMemo(() => { + const map = new Map(); + if (taskSummary) { + for (const t of taskSummary) { + map.set(t.task_id, t.has_ready_imagery); + } + } + return map; + }, [taskSummary]); const { mutateAsync: processTask } = useMutation({ mutationFn: ({ taskId }: { taskId: string }) => @@ -52,20 +91,13 @@ const ProcessingStatusDialog = () => { queryClient.invalidateQueries({ queryKey: ['all-task-assets-info', projectId], }); + queryClient.invalidateQueries({ + queryKey: ['projectTaskImagerySummary', projectId], + }); toast.success('Final processing started'); }, }); - const processableTasks = useMemo( - () => - Array.isArray(allTaskAssets) - ? allTaskAssets.filter((t: any) => - isProcessable(t.state, t.image_count), - ) - : [], - [allTaskAssets], - ); - const toggleTaskSelection = useCallback((taskId: string) => { setSelectedTasks(prev => { const next = new Set(prev); @@ -78,18 +110,9 @@ const ProcessingStatusDialog = () => { }); }, []); - const toggleSelectAll = useCallback(() => { - if (selectedTasks.size === processableTasks.length) { - setSelectedTasks(new Set()); - } else { - setSelectedTasks( - new Set(processableTasks.map((t: any) => t.task_id)), - ); - } - }, [selectedTasks, processableTasks]); - const handleProcessSelected = useCallback(async () => { const taskIds = Array.from(selectedTasks); + setSelectedTasks(new Set()); setProcessingTasks(new Set(taskIds)); const results = await Promise.allSettled( taskIds.map(taskId => processTask({ taskId })), @@ -106,11 +129,22 @@ const ProcessingStatusDialog = () => { if (failCount > 0) { toast.error(`Failed to start processing for ${failCount} task(s)`); } - setSelectedTasks(new Set()); + const failedTaskIds = taskIds.filter( + (_taskId, index) => results[index].status === 'rejected', + ); + if (failedTaskIds.length > 0) { + setProcessingTasks((prev) => { + const next = new Set(prev); + failedTaskIds.forEach((taskId) => next.delete(taskId)); + return next; + }); + } queryClient.invalidateQueries({ queryKey: ['all-task-assets-info', projectId], }); - setProcessingTasks(new Set()); + queryClient.invalidateQueries({ + queryKey: ['projectTaskImagerySummary', projectId], + }); }, [selectedTasks, processTask, queryClient, projectId]); const handleProcessSingle = useCallback( @@ -122,14 +156,17 @@ const ProcessingStatusDialog = () => { queryClient.invalidateQueries({ queryKey: ['all-task-assets-info', projectId], }); + queryClient.invalidateQueries({ + queryKey: ['projectTaskImagerySummary', projectId], + }); } catch { toast.error('Failed to start processing'); + setProcessingTasks(prev => { + const next = new Set(prev); + next.delete(taskId); + return next; + }); } - setProcessingTasks(prev => { - const next = new Set(prev); - next.delete(taskId); - return next; - }); }, [processTask, queryClient, projectId], ); @@ -147,6 +184,16 @@ const ProcessingStatusDialog = () => { } }, []); + const getProcessButtonLabel = useCallback((task: ProcessingDialogTask) => { + if (task.state === 'IMAGE_PROCESSING_FAILED') { + return 'Retry'; + } + if (task.state === 'IMAGE_PROCESSING_FINISHED') { + return 'Re-run'; + } + return 'Process'; + }, []); + const handleStartFinalProcessing = useCallback( (withGcp: boolean) => { if (withGcp) { @@ -160,13 +207,102 @@ const ProcessingStatusDialog = () => { [dispatch, startAllImageProcessing, projectId], ); - const taskList = useMemo(() => { + const totalTaskCount = useMemo(() => { + if (typeof projectDetail?.total_task_count === 'number') { + return projectDetail.total_task_count; + } + if (Array.isArray(taskSummary) && taskSummary.length > 0) { + return taskSummary.length; + } + return Array.isArray(allTaskAssets) ? allTaskAssets.length : 0; + }, [allTaskAssets, taskSummary, projectDetail]); + + const taskList = useMemo(() => { + const assetsByTaskId = new Map(); + if (Array.isArray(allTaskAssets)) { + allTaskAssets.forEach((task: any) => { + assetsByTaskId.set(task.task_id, task); + }); + } + + if (Array.isArray(taskSummary) && taskSummary.length > 0) { + return taskSummary + .filter( + (task) => + task.assigned_images > 0 || + task.task_state === 'IMAGE_UPLOADED' || + task.task_state === 'IMAGE_PROCESSING_STARTED' || + task.task_state === 'IMAGE_PROCESSING_FINISHED' || + task.task_state === 'IMAGE_PROCESSING_FAILED', + ) + .map((task) => { + const assetInfo = assetsByTaskId.get(task.task_id); + return { + task_id: task.task_id, + task_index: task.project_task_index, + image_count: task.assigned_images, + state: task.task_state, + failure_reason: task.failure_reason, + assets_url: assetInfo?.assets_url, + }; + }) + .sort((a, b) => a.task_index - b.task_index); + } + if (!Array.isArray(allTaskAssets)) return []; - return [...allTaskAssets].sort((a: any, b: any) => { - const aIdx = a.task_id?.localeCompare?.(b.task_id) || 0; - return aIdx; + + return [...allTaskAssets] + .filter( + (t: any) => + t.image_count > 0 || + t.state === 'IMAGE_UPLOADED' || + t.state === 'IMAGE_PROCESSING_STARTED' || + t.state === 'IMAGE_PROCESSING_FINISHED' || + t.state === 'IMAGE_PROCESSING_FAILED', + ) + .map((task: any) => ({ + task_id: task.task_id, + task_index: task.task_index, + image_count: task.image_count, + state: task.state, + failure_reason: task.failure_reason, + assets_url: task.assets_url, + })) + .sort((a, b) => { + const aIdx = a.task_id?.localeCompare?.(b.task_id) || 0; + return aIdx; + }); + }, [allTaskAssets, taskSummary]); + + const processableTasks = useMemo( + () => taskList.filter((task) => readinessMap.get(task.task_id) === true), + [taskList, readinessMap], + ); + + useEffect(() => { + setProcessingTasks((prev) => { + if (prev.size === 0) return prev; + + const next = new Set(prev); + taskList.forEach((task) => { + if (next.has(task.task_id) && task.state !== 'IMAGE_UPLOADED') { + next.delete(task.task_id); + } + }); + + return next.size === prev.size ? prev : next; }); - }, [allTaskAssets]); + }, [taskList]); + + const toggleSelectAll = useCallback(() => { + if (selectedTasks.size === processableTasks.length) { + setSelectedTasks(new Set()); + } else { + setSelectedTasks( + new Set(processableTasks.map((task) => task.task_id)), + ); + } + }, [selectedTasks, processableTasks]); const processedCount = useMemo( () => @@ -176,6 +312,35 @@ const ProcessingStatusDialog = () => { [taskList], ); + const allTasksProcessed = useMemo( + () => + totalTaskCount > 0 && + Array.isArray(taskSummary) && + taskSummary.length === totalTaskCount && + taskSummary.every( + (task) => task.task_state === 'IMAGE_PROCESSING_FINISHED', + ), + [taskSummary, totalTaskCount], + ); + + const finalProcessingDisabledReason = useMemo(() => { + if (isProcessingAll) { + return 'Final processing is already starting.'; + } + if (!totalTaskCount) { + return 'No project tasks are available yet.'; + } + if (!Array.isArray(taskSummary) || taskSummary.length < totalTaskCount) { + return 'Every task must have imagery and finish quick processing first.'; + } + if (!allTasksProcessed) { + return 'All tasks must reach Complete before final processing can start.'; + } + return ''; + }, [isProcessingAll, totalTaskCount, taskSummary, allTasksProcessed]); + + const hasSavedGcp = Boolean(projectDetail?.has_gcp); + return (

{/* Per-task processing section */} @@ -221,13 +386,19 @@ const ProcessingStatusDialog = () => { - {taskList.map((task: any, index: number) => { - const canProcess = isProcessable(task.state, task.image_count); + {taskList.map((task, index: number) => { + const canProcess = + readinessMap.get(task.task_id) === true && + !processingTasks.has(task.task_id) && + task.state !== 'IMAGE_PROCESSING_STARTED'; const isTaskProcessing = processingTasks.has(task.task_id) || task.state === 'IMAGE_PROCESSING_STARTED'; + const displayState = isTaskProcessing + ? 'IMAGE_PROCESSING_STARTED' + : task.state; const stateColor = - stateColors[task.state] || '#e5e7eb'; + stateColors[displayState] || '#e5e7eb'; return ( { /> - Task {index + 1} + Task {task.task_index ?? index + 1} {task.image_count} images - - {isTaskProcessing && ( - +
+ + {isTaskProcessing && ( + + )} + {displayState === 'IMAGE_PROCESSING_FINISHED' && '✓ '} + {formatString(displayState) || 'No images'} + + {task.state === 'IMAGE_PROCESSING_FAILED' && task.failure_reason && ( +

+ {task.failure_reason} +

)} - {task.state === 'IMAGE_PROCESSING_FINISHED' && '✓ '} - {formatString(task.state) || 'No images'} - +
- + {isTaskProcessing ? ( - - ) : task.state === 'IMAGE_PROCESSING_FINISHED' && - task.assets_url ? ( - - ) : canProcess ? ( - - ) : null} +
+ +
+ ) : ( +
+ {task.state === 'IMAGE_PROCESSING_FINISHED' && task.assets_url ? ( + + ) : null} + {canProcess ? ( + + ) : null} +
+ )} ); @@ -319,9 +506,10 @@ const ProcessingStatusDialog = () => {
@@ -331,8 +519,27 @@ const ProcessingStatusDialog = () => { {/* Status summary */}

{processedCount}/{taskList.length} tasks processed + {taskList.length < totalTaskCount && ( + + ({totalTaskCount - taskList.length} tasks awaiting imagery) + + )}

+ {taskList.length === 0 && ( +
+ + image_search + +

+ No tasks are ready for processing yet. +

+

+ Upload imagery and mark tasks as fully flown in the Verify Imagery step first. +

+
+ )} + {/* Divider */}
@@ -347,25 +554,55 @@ const ProcessingStatusDialog = () => {

-
+
+
+
+ + {hasSavedGcp ? 'check_circle' : 'pin_drop'} + +
+

+ {hasSavedGcp ? 'GCP points have been saved for this project.' : 'No saved GCP points yet.'} +

+

+ {hasSavedGcp + ? 'Start Final Processing will automatically include the saved GCP file.' + : 'Use With GCP to add control points before starting final processing.'} +

+
+
+
+ + {finalProcessingDisabledReason && ( +

+ {finalProcessingDisabledReason} +

+ )} +
diff --git a/src/frontend/src/components/RegulatorsApprovalPage/Description/DescriptionSection.tsx b/src/frontend/src/components/RegulatorsApprovalPage/Description/DescriptionSection.tsx index b79b950a3..739bef5fa 100644 --- a/src/frontend/src/components/RegulatorsApprovalPage/Description/DescriptionSection.tsx +++ b/src/frontend/src/components/RegulatorsApprovalPage/Description/DescriptionSection.tsx @@ -21,11 +21,15 @@ const DescriptionSection = ({ page = 'project-approval', projectData, isProjectDataLoading = false, + onOpenUpload, + onOpenVerify, onOpenWorkflow, }: { projectData: Record; page?: 'project-description' | 'project-approval'; isProjectDataLoading?: boolean; + onOpenUpload?: () => void; + onOpenVerify?: () => void; onOpenWorkflow?: () => void; }) => { const dispatch = useDispatch(); @@ -42,15 +46,6 @@ const DescriptionSection = ({ [projectData?.tasks], ); - const taskStatusSummary = useMemo(() => { - const tasks = projectData?.tasks; - if (!tasks?.length) return null; - const processed = tasks.filter( - (t: Record) => t?.state === 'IMAGE_PROCESSING_FINISHED', - ).length; - return `${processed}/${tasks.length}`; - }, [projectData?.tasks]); - const handleDownloadResult = () => { if (!projectData?.assets_url) return; try { @@ -129,30 +124,85 @@ const DescriptionSection = ({
- {page === 'project-description' && onOpenWorkflow && ( -
-
- - cloud_upload - + {page === 'project-description' && (onOpenUpload || onOpenWorkflow) && ( +
+

+ Imagery Workflow +

+ + {/* Step 1: Upload Imagery */} + + + {/* Step 2: Verify Imagery */} +
- + + {/* Step 3: Processing */} + +
+ 3 +
+
+

Processing

+

+ Start ODM processing and monitor task status +

+
+ chevron_right + + + {/* Step 4: Identify Flight Gaps (future) */} +
+
+ 4 +
+
+

Identify Flight Gaps

+

+ Coming soon +

+
+
)} @@ -161,21 +211,6 @@ const DescriptionSection = ({ projectData?.regulator_approval_status === 'APPROVED') && isAbleToStartProcessing && (
- - {projectData?.image_processing_status === 'SUCCESS' && ( <>
-
+
{children}
diff --git a/src/frontend/src/services/classification.ts b/src/frontend/src/services/classification.ts index 2030545e2..778083eca 100644 --- a/src/frontend/src/services/classification.ts +++ b/src/frontend/src/services/classification.ts @@ -131,9 +131,13 @@ export const acceptImage = async ( export const deleteBatch = async ( projectId: string, batchId: string, -): Promise<{ message: string; batch_id: string; job_id: string }> => { + options?: { waitForCleanup?: boolean }, +): Promise<{ message: string; batch_id: string; job_id?: string; deleted_count?: number; deleted_s3_count?: number }> => { const response = await authenticated(api).delete( `/projects/${projectId}/batch/${batchId}/`, + { + params: options?.waitForCleanup ? { wait_for_cleanup: true } : undefined, + }, ); return response.data; }; @@ -144,6 +148,8 @@ export interface BatchMapData { images: GeoJSON.FeatureCollection; total_tasks: number; total_images: number; + total_images_with_gps: number; + total_images_without_gps: number; } /** @@ -193,6 +199,20 @@ export const getProcessingSummary = async ( return response.data; }; +/** + * Finalize a batch - moves images to task folders WITHOUT triggering ODM. + * Called when user clicks 'Finish' without processing. + */ +export const finalizeBatch = async ( + projectId: string, + batchId: string, +): Promise<{ message: string; batch_id: string; total_moved: number; task_count: number }> => { + const response = await authenticated(api).post( + `/projects/${projectId}/batch/${batchId}/finalize/`, + ); + return response.data; +}; + /** * Start batch processing - moves images to task folders and triggers ODM */ @@ -269,3 +289,86 @@ export const deleteTaskImage = async ( ); return response.data; }; + +// ─── Project-level (task-centric) endpoints ───────────────────────────────── + +export interface TaskImagerySummary { + task_id: string; + project_task_index: number; + task_state: string; + total_images: number; + assigned_images: number; + rejected_images: number; + invalid_exif_images: number; + duplicate_images: number; + unmatched_images: number; + latest_upload: string | null; + failure_reason?: string | null; + has_ready_imagery: boolean; +} + +export interface ProjectReviewData { + project_id: string; + task_groups: TaskGroup[]; + total_tasks: number; + total_images: number; +} + +export interface ProjectMapData { + project_id: string; + tasks: GeoJSON.FeatureCollection; + images: GeoJSON.FeatureCollection; + total_tasks: number; + total_images: number; + total_images_with_gps: number; + total_images_without_gps: number; +} + +/** + * Get per-task imagery summary aggregated across all batches + */ +export const getProjectTaskImagerySummary = async ( + projectId: string, +): Promise => { + const response = await authenticated(api).get( + `/projects/${projectId}/imagery/tasks/`, + ); + return response.data; +}; + +/** + * Get project-level review data: images grouped by task across all batches + */ +export const getProjectReview = async ( + projectId: string, +): Promise => { + const response = await authenticated(api).get( + `/projects/${projectId}/imagery/review/`, + ); + return response.data; +}; + +/** + * Get project-level map data: task geometries + all image points across batches + */ +export const getProjectMapData = async ( + projectId: string, +): Promise => { + const response = await authenticated(api).get( + `/projects/${projectId}/imagery/map-data/`, + ); + return response.data; +}; + +/** + * Get task verification data aggregated across all batches (no batch_id needed) + */ +export const getProjectTaskVerificationData = async ( + projectId: string, + taskId: string, +): Promise => { + const response = await authenticated(api).get( + `/projects/${projectId}/imagery/task/${taskId}/verification/`, + ); + return response.data; +}; diff --git a/src/frontend/src/store/actions/droneOperatorTask.ts b/src/frontend/src/store/actions/droneOperatorTask.ts index 354b41782..de21f5b84 100644 --- a/src/frontend/src/store/actions/droneOperatorTask.ts +++ b/src/frontend/src/store/actions/droneOperatorTask.ts @@ -3,13 +3,7 @@ import { droneOperatorTaskSlice } from '@Store/slices/droneOperartorTask'; export const { setSecondPage, setSecondPageState, - setSelectedImage, - setCheckedImages, - unCheckImages, showPopover, - unCheckAllImages, - checkAllImages, - setFiles, setSelectedTakeOffPointOption, setSelectedTakeOffPoint, setUploadedImagesType, diff --git a/src/frontend/src/store/slices/droneOperartorTask.ts b/src/frontend/src/store/slices/droneOperartorTask.ts index 4c868414e..b0555fb63 100644 --- a/src/frontend/src/store/slices/droneOperartorTask.ts +++ b/src/frontend/src/store/slices/droneOperartorTask.ts @@ -9,10 +9,7 @@ export interface IFilesExifData { export interface IDroneOperatorTaskState { secondPage: boolean; secondPageState: string; - clickedImage: string; - checkedImages: Record; popOver: boolean; - files: any[]; selectedTakeOffPointOption: string; selectedTakeOffPoint: any[] | string | null; uploadedImagesType: 'add' | 'replace'; @@ -31,10 +28,7 @@ export interface IDroneOperatorTaskState { const initialState: IDroneOperatorTaskState = { secondPage: false, secondPageState: 'description', - clickedImage: '', - checkedImages: {}, popOver: false, - files: [], selectedTakeOffPointOption: 'current_location', selectedTakeOffPoint: null, uploadedImagesType: 'add', @@ -67,32 +61,9 @@ export const droneOperatorTaskSlice = createSlice({ setSecondPageState: (state, action) => { state.secondPageState = action.payload; }, - setSelectedImage: (state, action) => { - state.clickedImage = action.payload; - }, - setCheckedImages: (state, action) => { - state.checkedImages = action.payload; - }, - unCheckImages: (state, action) => { - state.checkedImages[action.payload] = - !state.checkedImages[action.payload]; - }, showPopover: state => { state.popOver = !state.popOver; }, - unCheckAllImages: state => { - Object.keys(state.checkedImages).forEach((key: any) => { - state.checkedImages[key] = false; - }); - }, - checkAllImages: state => { - Object.keys(state.checkedImages).forEach((key: any) => { - state.checkedImages[key] = true; - }); - }, - setFiles: (state, action) => { - state.files = action.payload; - }, setSelectedTakeOffPointOption: (state, action) => { state.selectedTakeOffPointOption = action.payload; }, diff --git a/src/frontend/src/views/IndividualProject/index.tsx b/src/frontend/src/views/IndividualProject/index.tsx index 11613908e..3dae51d9f 100644 --- a/src/frontend/src/views/IndividualProject/index.tsx +++ b/src/frontend/src/views/IndividualProject/index.tsx @@ -30,7 +30,7 @@ import { setProjectState } from '@Store/actions/project'; import { useTypedDispatch, useTypedSelector } from '@Store/hooks'; import { useMutation, useQueryClient } from '@tanstack/react-query'; import hasErrorBoundary from '@Utils/hasErrorBoundary'; -import DroneImageProcessingWorkflow from '@Components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow'; +import { UploadImageryDialog, VerifyImageryDialog } from '@Components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow'; import { getRuntimeConfig } from '@/runtimeConfig'; // eslint-disable-next-line camelcase @@ -44,7 +44,9 @@ const getActiveTabContent = ( // eslint-disable-next-line no-unused-vars handleTableRowClick: (rowData: any) => {}, // eslint-disable-next-line no-unused-vars - onOpenWorkflow?: () => void, + onOpenUpload?: () => void, + // eslint-disable-next-line no-unused-vars + onOpenVerify?: () => void, ) => { if (activeTab === 'about') return ( @@ -52,7 +54,8 @@ const getActiveTabContent = ( projectData={data} isProjectDataLoading={isProjectDataLoading} page="project-description" - onOpenWorkflow={onOpenWorkflow} + onOpenUpload={onOpenUpload} + onOpenVerify={onOpenVerify} /> ); if (activeTab === 'tasks') @@ -89,7 +92,8 @@ const IndividualProject = () => { const [showProjectDeletePrompt, setShowProjectDeletePrompt] = useState(false); const [showDownloadOptions, setShowDownloadOptions] = useState(false); - const [isWorkflowModalOpen, setIsWorkflowModalOpen] = useState(false); + const [isUploadDialogOpen, setIsUploadDialogOpen] = useState(false); + const [isVerifyDialogOpen, setIsVerifyDialogOpen] = useState(false); const Token = localStorage.getItem('token'); const individualProjectActiveTab = useTypedSelector( @@ -295,7 +299,8 @@ const IndividualProject = () => { projectData as Record, isProjectDataFetching, handleTableRowClick, - () => setIsWorkflowModalOpen(true), + () => setIsUploadDialogOpen(true), + () => setIsVerifyDialogOpen(true), )}
@@ -350,9 +355,15 @@ const IndividualProject = () => { /> - setIsWorkflowModalOpen(false)} + setIsUploadDialogOpen(false)} + projectId={id as string} + /> + + setIsVerifyDialogOpen(false)} projectId={id as string} /> diff --git a/tasks/test b/tasks/test index ed3213dd4..9786c3728 100644 --- a/tasks/test +++ b/tasks/test @@ -28,11 +28,11 @@ default: [no-cd] backend: {{docker}} compose -f compose.test.yaml build backend - {{docker}} compose -f compose.test.yaml up --detach db dragonfly minio nodeodm arq-worker + # Must run separately to avoid exit 0 from createbuckets {{docker}} compose -f compose.test.yaml run --rm createbuckets - {{docker}} compose -f compose.test.yaml run --rm migrations + {{docker}} compose -f compose.test.yaml up --detach --wait backend arq-worker # We run like this to ensure correct cleanup after tests done (or early exit) - sh -c 'trap "just test backend-cleanup || true" EXIT; {{docker}} compose -f compose.test.yaml run --rm backend pytest' + sh -c 'trap "just test backend-cleanup || true" EXIT; {{docker}} compose -f compose.test.yaml exec -T backend pytest' # Cleanup orphan backend tests [no-cd]