|
3 | 3 | import pika |
4 | 4 | from typing import Optional |
5 | 5 |
|
6 | | -from dbmanager.dbmanager import DBManager |
| 6 | +from fastapi.responses import FileResponse |
| 7 | + |
| 8 | +from dbmanager.dbmanager import DBManager, RequestStatus |
7 | 9 | from geoquery.geoquery import GeoQuery |
8 | 10 | from geoquery.task import TaskList |
9 | 11 | from datastore.datastore import Datastore, DEFAULT_MAX_REQUEST_SIZE_GB |
|
18 | 20 | from api_utils import make_bytes_readable_dict |
19 | 21 | from validation import assert_product_exists |
20 | 22 |
|
| 23 | +from . import request |
21 | 24 |
|
22 | 25 | log = get_dds_logger(__name__) |
23 | 26 | data_store = Datastore() |
24 | 27 |
|
25 | 28 | MESSAGE_SEPARATOR = os.environ["MESSAGE_SEPARATOR"] |
26 | 29 |
|
| 30 | +def _is_etimate_enabled(dataset_id, product_id): |
| 31 | + if dataset_id in ("sentinel-2",): |
| 32 | + return False |
| 33 | + return True |
| 34 | + |
27 | 35 |
|
28 | 36 | @log_execution_time(log) |
29 | 37 | def get_datasets(user_roles_names: list[str]) -> list[dict]: |
@@ -213,7 +221,7 @@ def estimate( |
213 | 221 |
|
214 | 222 | @log_execution_time(log) |
215 | 223 | @assert_product_exists |
216 | | -def query( |
| 224 | +def async_query( |
217 | 225 | user_id: str, |
218 | 226 | dataset_id: str, |
219 | 227 | product_id: str, |
@@ -250,21 +258,22 @@ def query( |
250 | 258 |
|
251 | 259 | """ |
252 | 260 | log.debug("geoquery: %s", query) |
253 | | - estimated_size = estimate(dataset_id, product_id, query, "GB").get("value") |
254 | | - allowed_size = data_store.product_metadata(dataset_id, product_id).get( |
255 | | - "maximum_query_size_gb", DEFAULT_MAX_REQUEST_SIZE_GB |
256 | | - ) |
257 | | - if estimated_size > allowed_size: |
258 | | - raise exc.MaximumAllowedSizeExceededError( |
259 | | - dataset_id=dataset_id, |
260 | | - product_id=product_id, |
261 | | - estimated_size_gb=estimated_size, |
262 | | - allowed_size_gb=allowed_size, |
263 | | - ) |
264 | | - if estimated_size == 0.0: |
265 | | - raise exc.EmptyDatasetError( |
266 | | - dataset_id=dataset_id, product_id=product_id |
| 261 | + if _is_etimate_enabled(dataset_id, product_id): |
| 262 | + estimated_size = estimate(dataset_id, product_id, query, "GB").get("value") |
| 263 | + allowed_size = data_store.product_metadata(dataset_id, product_id).get( |
| 264 | + "maximum_query_size_gb", DEFAULT_MAX_REQUEST_SIZE_GB |
267 | 265 | ) |
| 266 | + if estimated_size > allowed_size: |
| 267 | + raise exc.MaximumAllowedSizeExceededError( |
| 268 | + dataset_id=dataset_id, |
| 269 | + product_id=product_id, |
| 270 | + estimated_size_gb=estimated_size, |
| 271 | + allowed_size_gb=allowed_size, |
| 272 | + ) |
| 273 | + if estimated_size == 0.0: |
| 274 | + raise exc.EmptyDatasetError( |
| 275 | + dataset_id=dataset_id, product_id=product_id |
| 276 | + ) |
268 | 277 | broker_conn = pika.BlockingConnection( |
269 | 278 | pika.ConnectionParameters( |
270 | 279 | host=os.getenv("BROKER_SERVICE_HOST", "broker") |
@@ -295,6 +304,68 @@ def query( |
295 | 304 | broker_conn.close() |
296 | 305 | return request_id |
297 | 306 |
|
| 307 | +@log_execution_time(log) |
| 308 | +@assert_product_exists |
| 309 | +def sync_query( |
| 310 | + user_id: str, |
| 311 | + dataset_id: str, |
| 312 | + product_id: str, |
| 313 | + query: GeoQuery, |
| 314 | +): |
| 315 | + """Realize the logic for the endpoint: |
| 316 | +
|
| 317 | + `POST /datasets/{dataset_id}/{product_id}/execute` |
| 318 | +
|
| 319 | + Query the data and return the result of the request. |
| 320 | +
|
| 321 | + Parameters |
| 322 | + ---------- |
| 323 | + user_id : str |
| 324 | + ID of the user executing the query |
| 325 | + dataset_id : str |
| 326 | + ID of the dataset |
| 327 | + product_id : str |
| 328 | + ID of the product |
| 329 | + query : GeoQuery |
| 330 | + Query to perform |
| 331 | +
|
| 332 | + Returns |
| 333 | + ------- |
| 334 | + request_id : int |
| 335 | + ID of the request |
| 336 | +
|
| 337 | + Raises |
| 338 | + ------- |
| 339 | + MaximumAllowedSizeExceededError |
| 340 | + if the allowed size is below the estimated one |
| 341 | + EmptyDatasetError |
| 342 | + if estimated size is zero |
| 343 | +
|
| 344 | + """ |
| 345 | + |
| 346 | + import time |
| 347 | + request_id = async_query(user_id, dataset_id, product_id, query) |
| 348 | + status, _ = DBManager().get_request_status_and_reason(request_id) |
| 349 | + log.debug("sync query: status: %s", status) |
| 350 | + while status in (RequestStatus.RUNNING, RequestStatus.QUEUED, |
| 351 | + RequestStatus.PENDING): |
| 352 | + time.sleep(1) |
| 353 | + status, _ = DBManager().get_request_status_and_reason(request_id) |
| 354 | + log.debug("sync query: status: %s", status) |
| 355 | + |
| 356 | + if status is RequestStatus.DONE: |
| 357 | + download_details = DBManager().get_download_details_for_request_id( |
| 358 | + request_id |
| 359 | + ) |
| 360 | + return FileResponse( |
| 361 | + path=download_details.location_path, |
| 362 | + filename=download_details.location_path.split(os.sep)[-1], |
| 363 | + ) |
| 364 | + raise exc.ProductRetrievingError( |
| 365 | + dataset_id=dataset_id, |
| 366 | + product_id=product_id, |
| 367 | + status=status.name) |
| 368 | + |
298 | 369 |
|
299 | 370 | @log_execution_time(log) |
300 | 371 | def run_workflow( |
|
0 commit comments