diff --git a/openapi.json b/openapi.json index 643aedb19..66a4d334d 100644 --- a/openapi.json +++ b/openapi.json @@ -1363,6 +1363,190 @@ ] } }, + "/layerDb/providers": { + "get": { + "tags": [ + "Layers" + ], + "summary": "List all providers", + "operationId": "list_providers", + "parameters": [ + { + "name": "offset", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + }, + { + "name": "limit", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "OK", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/LayerProviderListing" + } + } + } + } + } + }, + "security": [ + { + "session_token": [] + } + ] + }, + "post": { + "tags": [ + "Layers" + ], + "summary": "Add a new provider", + "operationId": "add_provider", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TypedDataProviderDefinition" + } + } + }, + "required": true + }, + "responses": { + "200": { + "$ref": "#/components/responses/IdResponse" + } + }, + "security": [ + { + "session_token": [] + } + ] + } + }, + "/layerDb/providers/{provider}": { + "get": { + "tags": [ + "Layers" + ], + "summary": "Get an existing provider's definition", + "operationId": "get_provider_definition", + "parameters": [ + { + "name": "provider", + "in": "path", + "description": "Layer provider id", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "OK", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TypedDataProviderDefinition" + } + } + } + } + }, + "security": [ + { + "session_token": [] + } + ] + }, + "put": { + "tags": [ + "Layers" + ], + "summary": "Update an existing provider's definition", + "operationId": "update_provider_definition", + "parameters": [ + { + "name": "provider", + "in": "path", + "description": "Layer provider id", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TypedDataProviderDefinition" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "OK" + } + }, + "security": [ + { + "session_token": [] + } + ] + }, + "delete": { + "tags": [ + "Layers" + ], + "summary": "Delete an existing provider", + "operationId": "delete_provider", + "parameters": [ + { + "name": "provider", + "in": "path", + "description": "Layer provider id", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "200": { + "description": "OK" + } + }, + "security": [ + { + "session_token": [] + } + ] + } + }, "/layers/collections": { "get": { "tags": [ @@ -3399,6 +3583,64 @@ ] } }, + "/session/project/{project}": { + "post": { + "tags": [ + "Session" + ], + "summary": "Sets the active project of the session.", + "operationId": "session_project_handler", + "parameters": [ + { + "name": "project", + "in": "path", + "description": "Project id", + "required": true, + "schema": { + "$ref": "#/components/schemas/ProjectId" + } + } + ], + "responses": { + "200": { + "description": "The project of the session was updated." + } + }, + "security": [ + { + "session_token": [] + } + ] + } + }, + "/session/view": { + "post": { + "tags": [ + "Session" + ], + "operationId": "session_view_handler", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/STRectangle" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "The view of the session was updated." + } + }, + "security": [ + { + "session_token": [] + } + ] + } + }, "/spatialReferenceSpecification/{srsString}": { "get": { "tags": [ @@ -5375,6 +5617,58 @@ } } }, + "ArunaDataProviderDefinition": { + "type": "object", + "required": [ + "type", + "id", + "name", + "description", + "apiUrl", + "projectId", + "apiToken", + "filterLabel" + ], + "properties": { + "apiToken": { + "type": "string" + }, + "apiUrl": { + "type": "string" + }, + "cacheTtl": { + "$ref": "#/components/schemas/CacheTtlSeconds" + }, + "description": { + "type": "string" + }, + "filterLabel": { + "type": "string" + }, + "id": { + "$ref": "#/components/schemas/DataProviderId" + }, + "name": { + "type": "string" + }, + "priority": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "projectId": { + "type": "string" + }, + "type": { + "type": "string", + "enum": [ + "Aruna" + ] + } + } + }, "AuthCodeRequestURL": { "type": "object", "required": [ @@ -5664,7 +5958,63 @@ } } }, - "CreateDataset": { + "CopernicusDataspaceDataProviderDefinition": { + "type": "object", + "required": [ + "type", + "name", + "description", + "id", + "stacUrl", + "s3Url", + "s3AccessKey", + "s3SecretKey", + "gdalConfig" + ], + "properties": { + "description": { + "type": "string" + }, + "gdalConfig": { + "type": "array", + "items": { + "$ref": "#/components/schemas/StringPair" + } + }, + "id": { + "$ref": "#/components/schemas/DataProviderId" + }, + "name": { + "type": "string" + }, + "priority": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "s3AccessKey": { + "type": "string" + }, + "s3SecretKey": { + "type": "string" + }, + "s3Url": { + "type": "string" + }, + "stacUrl": { + "type": "string" + }, + "type": { + "type": "string", + "enum": [ + "CopernicusDataspace" + ] + } + } + }, + "CreateDataset": { "type": "object", "required": [ "dataPath", @@ -5789,6 +6139,24 @@ "type": "string", "format": "uuid" }, + "DataProviderResource": { + "type": "object", + "required": [ + "type", + "id" + ], + "properties": { + "id": { + "$ref": "#/components/schemas/DataProviderId" + }, + "type": { + "type": "string", + "enum": [ + "provider" + ] + } + } + }, "DataUsage": { "type": "object", "required": [ @@ -5843,6 +6211,39 @@ } } }, + "DatabaseConnectionConfig": { + "type": "object", + "required": [ + "host", + "port", + "database", + "schema", + "user", + "password" + ], + "properties": { + "database": { + "type": "string" + }, + "host": { + "type": "string" + }, + "password": { + "type": "string" + }, + "port": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "schema": { + "type": "string" + }, + "user": { + "type": "string" + } + } + }, "Dataset": { "type": "object", "required": [ @@ -5921,6 +6322,68 @@ "type": "string", "format": "uuid" }, + "DatasetLayerListingCollection": { + "type": "object", + "required": [ + "name", + "description", + "tags" + ], + "properties": { + "description": { + "type": "string" + }, + "name": { + "type": "string" + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "DatasetLayerListingProviderDefinition": { + "type": "object", + "required": [ + "type", + "id", + "name", + "description", + "collections" + ], + "properties": { + "collections": { + "type": "array", + "items": { + "$ref": "#/components/schemas/DatasetLayerListingCollection" + } + }, + "description": { + "type": "string" + }, + "id": { + "$ref": "#/components/schemas/DataProviderId" + }, + "name": { + "type": "string" + }, + "priority": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "type": { + "type": "string", + "enum": [ + "DatasetLayerListing" + ] + } + } + }, "DatasetListing": { "type": "object", "required": [ @@ -6067,6 +6530,141 @@ "DescribeCoverage" ] }, + "EbvPortalDataProviderDefinition": { + "type": "object", + "required": [ + "type", + "name", + "description", + "baseUrl", + "data", + "overviews" + ], + "properties": { + "baseUrl": { + "type": "string", + "format": "uri" + }, + "cacheTtl": { + "$ref": "#/components/schemas/CacheTtlSeconds" + }, + "data": { + "type": "string", + "description": "Path were the `NetCDF` data can be found" + }, + "description": { + "type": "string" + }, + "name": { + "type": "string" + }, + "overviews": { + "type": "string", + "description": "Path were overview files are stored" + }, + "priority": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "type": { + "type": "string", + "enum": [ + "EbvPortal" + ] + } + } + }, + "EdrDataProviderDefinition": { + "type": "object", + "required": [ + "type", + "name", + "description", + "id", + "baseUrl" + ], + "properties": { + "baseUrl": { + "type": "string", + "format": "uri" + }, + "cacheTtl": { + "$ref": "#/components/schemas/CacheTtlSeconds" + }, + "description": { + "type": "string" + }, + "discreteVrs": { + "type": "array", + "items": { + "type": "string" + }, + "description": "List of vertical reference systems with a discrete scale" + }, + "id": { + "$ref": "#/components/schemas/DataProviderId" + }, + "name": { + "type": "string" + }, + "priority": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "provenance": { + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/components/schemas/Provenance" + } + }, + "type": { + "type": "string", + "enum": [ + "Edr" + ] + }, + "vectorSpec": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/components/schemas/EdrVectorSpec" + } + ] + } + } + }, + "EdrVectorSpec": { + "type": "object", + "required": [ + "x", + "time" + ], + "properties": { + "time": { + "type": "string" + }, + "x": { + "type": "string" + }, + "y": { + "type": [ + "string", + "null" + ] + } + } + }, "ErrorResponse": { "type": "object", "required": [ @@ -6145,6 +6743,54 @@ } ] }, + "GbifDataProviderDefinition": { + "type": "object", + "required": [ + "type", + "name", + "description", + "dbConfig", + "autocompleteTimeout", + "columns" + ], + "properties": { + "autocompleteTimeout": { + "type": "integer", + "format": "int32" + }, + "cacheTtl": { + "$ref": "#/components/schemas/CacheTtlSeconds" + }, + "columns": { + "type": "array", + "items": { + "type": "string" + } + }, + "dbConfig": { + "$ref": "#/components/schemas/DatabaseConnectionConfig" + }, + "description": { + "type": "string" + }, + "name": { + "type": "string" + }, + "priority": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "type": { + "type": "string", + "enum": [ + "Gbif" + ] + } + } + }, "GdalDatasetGeoTransform": { "type": "object", "required": [ @@ -6516,6 +7162,92 @@ "GetMap" ] }, + "GfbioAbcdDataProviderDefinition": { + "type": "object", + "required": [ + "type", + "name", + "description", + "dbConfig" + ], + "properties": { + "cacheTtl": { + "$ref": "#/components/schemas/CacheTtlSeconds" + }, + "dbConfig": { + "$ref": "#/components/schemas/DatabaseConnectionConfig" + }, + "description": { + "type": "string" + }, + "name": { + "type": "string" + }, + "priority": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "type": { + "type": "string", + "enum": [ + "GfbioAbcd" + ] + } + } + }, + "GfbioCollectionsDataProviderDefinition": { + "type": "object", + "required": [ + "type", + "name", + "description", + "collectionApiUrl", + "collectionApiAuthToken", + "abcdDbConfig", + "pangaeaUrl" + ], + "properties": { + "abcdDbConfig": { + "$ref": "#/components/schemas/DatabaseConnectionConfig" + }, + "cacheTtl": { + "$ref": "#/components/schemas/CacheTtlSeconds" + }, + "collectionApiAuthToken": { + "type": "string" + }, + "collectionApiUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + }, + "name": { + "type": "string" + }, + "pangaeaUrl": { + "type": "string", + "format": "uri" + }, + "priority": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "type": { + "type": "string", + "enum": [ + "GfbioCollections" + ] + } + } + }, "InternalDataId": { "type": "object", "required": [ @@ -6713,6 +7445,26 @@ } } }, + "LayerProviderListing": { + "type": "object", + "required": [ + "id", + "name", + "priority" + ], + "properties": { + "id": { + "$ref": "#/components/schemas/DataProviderId" + }, + "name": { + "type": "string" + }, + "priority": { + "type": "integer", + "format": "int32" + } + } + }, "LayerResource": { "type": "object", "required": [ @@ -7271,6 +8023,48 @@ "NamedData": { "type": "string" }, + "NetCdfCfDataProviderDefinition": { + "type": "object", + "required": [ + "type", + "name", + "description", + "data", + "overviews" + ], + "properties": { + "cacheTtl": { + "$ref": "#/components/schemas/CacheTtlSeconds" + }, + "data": { + "type": "string", + "description": "Path were the `NetCDF` data can be found" + }, + "description": { + "type": "string" + }, + "name": { + "type": "string" + }, + "overviews": { + "type": "string", + "description": "Path were overview files are stored" + }, + "priority": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "type": { + "type": "string", + "enum": [ + "NetCdfCf" + ] + } + } + }, "NoGeometry": { "default": null }, @@ -7792,6 +8586,44 @@ } } }, + "PangaeaDataProviderDefinition": { + "type": "object", + "required": [ + "type", + "name", + "description", + "baseUrl", + "cacheTtl" + ], + "properties": { + "baseUrl": { + "type": "string", + "format": "uri" + }, + "cacheTtl": { + "$ref": "#/components/schemas/CacheTtlSeconds" + }, + "description": { + "type": "string" + }, + "name": { + "type": "string" + }, + "priority": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "type": { + "type": "string", + "enum": [ + "Pangaea" + ] + } + } + }, "Permission": { "type": "string", "enum": [ @@ -8571,6 +9403,9 @@ }, { "$ref": "#/components/schemas/MlModelResource" + }, + { + "$ref": "#/components/schemas/DataProviderResource" } ], "description": "A resource that is affected by a permission.", @@ -8581,7 +9416,8 @@ "layer": "#/components/schemas/LayerResource", "layerCollection": "#/components/schemas/LayerCollectionResource", "mlModel": "#/components/schemas/MlModelResource", - "project": "#/components/schemas/ProjectResource" + "project": "#/components/schemas/ProjectResource", + "provider": "#/components/schemas/DataProviderResource" } } }, @@ -8692,6 +9528,70 @@ } } }, + "SentinelS2L2ACogsProviderDefinition": { + "type": "object", + "required": [ + "type", + "name", + "id", + "description", + "apiUrl", + "bands", + "zones" + ], + "properties": { + "apiUrl": { + "type": "string" + }, + "bands": { + "type": "array", + "items": { + "$ref": "#/components/schemas/StacBand" + } + }, + "cacheTtl": { + "$ref": "#/components/schemas/CacheTtlSeconds" + }, + "description": { + "type": "string" + }, + "gdalRetries": { + "type": "integer", + "minimum": 0 + }, + "id": { + "$ref": "#/components/schemas/DataProviderId" + }, + "name": { + "type": "string" + }, + "priority": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "queryBuffer": { + "$ref": "#/components/schemas/StacQueryBuffer" + }, + "stacApiRetries": { + "$ref": "#/components/schemas/StacApiRetries" + }, + "type": { + "type": "string", + "enum": [ + "SentinelS2L2ACogs" + ] + }, + "zones": { + "type": "array", + "items": { + "$ref": "#/components/schemas/StacZone" + } + } + } + }, "ServerInfo": { "type": "object", "required": [ @@ -8831,6 +9731,86 @@ } } }, + "StacApiRetries": { + "type": "object", + "required": [ + "numberOfRetries", + "initialDelayMs", + "exponentialBackoffFactor" + ], + "properties": { + "exponentialBackoffFactor": { + "type": "number", + "format": "double" + }, + "initialDelayMs": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "numberOfRetries": { + "type": "integer", + "minimum": 0 + } + } + }, + "StacBand": { + "type": "object", + "required": [ + "name", + "dataType" + ], + "properties": { + "dataType": { + "$ref": "#/components/schemas/RasterDataType" + }, + "name": { + "type": "string" + }, + "noDataValue": { + "type": [ + "number", + "null" + ], + "format": "double" + } + } + }, + "StacQueryBuffer": { + "type": "object", + "description": "A struct that represents buffers to apply to stac requests", + "required": [ + "startSeconds", + "endSeconds" + ], + "properties": { + "endSeconds": { + "type": "integer", + "format": "int64" + }, + "startSeconds": { + "type": "integer", + "format": "int64" + } + } + }, + "StacZone": { + "type": "object", + "required": [ + "name", + "epsg" + ], + "properties": { + "epsg": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "name": { + "type": "string" + } + } + }, "StaticColor": { "type": "object", "required": [ @@ -9209,6 +10189,63 @@ "TypeNames": { "type": "string" }, + "TypedDataProviderDefinition": { + "oneOf": [ + { + "$ref": "#/components/schemas/ArunaDataProviderDefinition" + }, + { + "$ref": "#/components/schemas/CopernicusDataspaceDataProviderDefinition" + }, + { + "$ref": "#/components/schemas/DatasetLayerListingProviderDefinition" + }, + { + "$ref": "#/components/schemas/EbvPortalDataProviderDefinition" + }, + { + "$ref": "#/components/schemas/EdrDataProviderDefinition" + }, + { + "$ref": "#/components/schemas/GbifDataProviderDefinition" + }, + { + "$ref": "#/components/schemas/GfbioAbcdDataProviderDefinition" + }, + { + "$ref": "#/components/schemas/GfbioCollectionsDataProviderDefinition" + }, + { + "$ref": "#/components/schemas/NetCdfCfDataProviderDefinition" + }, + { + "$ref": "#/components/schemas/PangaeaDataProviderDefinition" + }, + { + "$ref": "#/components/schemas/SentinelS2L2ACogsProviderDefinition" + }, + { + "$ref": "#/components/schemas/WildliveDataConnectorDefinition" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "Aruna": "#/components/schemas/ArunaDataProviderDefinition", + "CopernicusDataspace": "#/components/schemas/CopernicusDataspaceDataProviderDefinition", + "DatasetLayerListing": "#/components/schemas/DatasetLayerListingProviderDefinition", + "EbvPortal": "#/components/schemas/EbvPortalDataProviderDefinition", + "Edr": "#/components/schemas/EdrDataProviderDefinition", + "Gbif": "#/components/schemas/GbifDataProviderDefinition", + "GfbioAbcd": "#/components/schemas/GfbioAbcdDataProviderDefinition", + "GfbioCollections": "#/components/schemas/GfbioCollectionsDataProviderDefinition", + "NetCdfCf": "#/components/schemas/NetCdfCfDataProviderDefinition", + "Pangaea": "#/components/schemas/PangaeaDataProviderDefinition", + "SentinelS2L2ACogs": "#/components/schemas/SentinelS2L2ACogsProviderDefinition", + "WildLIVE!": "#/components/schemas/WildliveDataConnectorDefinition" + } + } + }, "TypedGeometry": { "oneOf": [ { @@ -9990,6 +11027,45 @@ "2.0.0" ] }, + "WildliveDataConnectorDefinition": { + "type": "object", + "required": [ + "type", + "id", + "name", + "description" + ], + "properties": { + "apiKey": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": "string" + }, + "id": { + "$ref": "#/components/schemas/DataProviderId" + }, + "name": { + "type": "string" + }, + "priority": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "type": { + "type": "string", + "enum": [ + "WildLIVE!" + ] + } + } + }, "WmsService": { "type": "string", "enum": [ diff --git a/services/src/api/apidoc.rs b/services/src/api/apidoc.rs index 6e7510a71..1d8bed2c1 100644 --- a/services/src/api/apidoc.rs +++ b/services/src/api/apidoc.rs @@ -39,10 +39,22 @@ use crate::api::model::responses::{ UnauthorizedAdminResponse, UnauthorizedUserResponse, UnsupportedMediaTypeForJsonResponse, ZipResponse, }; +use crate::api::model::services::DatabaseConnectionConfig; +use crate::api::model::services::EdrVectorSpec; +use crate::api::model::services::LayerProviderListing; use crate::api::model::services::{ AddDataset, CreateDataset, DataPath, DatasetDefinition, MetaDataDefinition, MetaDataSuggestion, MlModel, Provenance, ProvenanceOutput, Provenances, UpdateDataset, Volume, }; +use crate::api::model::services::{ + ArunaDataProviderDefinition, CopernicusDataspaceDataProviderDefinition, + DatasetLayerListingCollection, DatasetLayerListingProviderDefinition, + EbvPortalDataProviderDefinition, EdrDataProviderDefinition, GbifDataProviderDefinition, + GfbioAbcdDataProviderDefinition, GfbioCollectionsDataProviderDefinition, + NetCdfCfDataProviderDefinition, PangaeaDataProviderDefinition, + SentinelS2L2ACogsProviderDefinition, StacApiRetries, StacBand, StacQueryBuffer, StacZone, + TypedDataProviderDefinition, +}; use crate::api::ogc::{util::OgcBoundingBox, wcs, wfs, wms}; use crate::contexts::SessionId; use crate::datasets::listing::{DatasetListing, OrderBy}; @@ -109,6 +121,13 @@ use utoipa::{Modify, OpenApi}; handlers::layers::remove_collection_from_collection, handlers::layers::remove_collection, handlers::layers::remove_layer_from_collection, + handlers::layers::add_provider, + handlers::layers::get_provider_definition, + handlers::layers::update_provider_definition, + handlers::layers::delete_provider, + handlers::layers::list_providers, + handlers::users::session_project_handler, + handlers::users::session_view_handler, handlers::layers::remove_layer, handlers::layers::search_handler, handlers::layers::update_collection, @@ -407,6 +426,27 @@ use utoipa::{Modify, OpenApi}; RasterStreamWebsocketResultType, CacheTtlSeconds, + TypedDataProviderDefinition, + ArunaDataProviderDefinition, + DatasetLayerListingProviderDefinition, + GbifDataProviderDefinition, + GfbioAbcdDataProviderDefinition, + GfbioCollectionsDataProviderDefinition, + EbvPortalDataProviderDefinition, + NetCdfCfDataProviderDefinition, + PangaeaDataProviderDefinition, + EdrDataProviderDefinition, + CopernicusDataspaceDataProviderDefinition, + SentinelS2L2ACogsProviderDefinition, + DatabaseConnectionConfig, + EdrVectorSpec, + StacBand, + StacZone, + StacApiRetries, + StacQueryBuffer, + DatasetLayerListingCollection, + LayerProviderListing, + PermissionRequest, Resource, Permission, diff --git a/services/src/api/handlers/layers.rs b/services/src/api/handlers/layers.rs index 4c8ef6639..4258dbcd1 100644 --- a/services/src/api/handlers/layers.rs +++ b/services/src/api/handlers/layers.rs @@ -1,5 +1,8 @@ -use crate::api::model::datatypes::{DataProviderId, LayerId}; +use super::tasks::TaskResponse; +use crate::api::model::datatypes::LayerId; use crate::api::model::responses::IdResponse; +use crate::api::model::services::LayerProviderListing; +use crate::api::model::services::TypedDataProviderDefinition; use crate::config::get_config_element; use crate::contexts::ApplicationContext; use crate::datasets::{RasterDatasetFromWorkflow, schedule_raster_dataset_from_workflow_task}; @@ -19,14 +22,13 @@ use crate::workflows::registry::WorkflowRegistry; use crate::workflows::workflow::WorkflowId; use crate::{contexts::SessionContext, layers::layer::LayerCollectionListOptions}; use actix_web::{FromRequest, HttpResponse, Responder, web}; +use geoengine_datatypes::dataset::DataProviderId; use geoengine_datatypes::primitives::{BandSelection, QueryRectangle}; use geoengine_operators::engine::WorkflowOperatorPath; use serde::{Deserialize, Serialize}; use std::sync::Arc; use utoipa::IntoParams; -use super::tasks::TaskResponse; - pub const ROOT_PROVIDER_ID: DataProviderId = DataProviderId::from_u128(0x1c3b_8042_300b_485c_95b5_0147_d9dc_068d); @@ -109,6 +111,17 @@ where ) .route("", web::put().to(update_collection::)) .route("", web::delete().to(remove_collection::)), + ) + .service( + web::scope("/providers") + .route("", web::post().to(add_provider::)) + .route("", web::get().to(list_providers::)) + .service( + web::resource("/{provider}") + .route(web::get().to(get_provider_definition::)) + .route(web::put().to(update_provider_definition::)) + .route(web::delete().to(delete_provider::)), + ), ), ); } @@ -241,7 +254,7 @@ async fn get_layer_providers( } let root_collection = LayerCollection { id: ProviderLayerCollectionId { - provider_id: ROOT_PROVIDER_ID.into(), + provider_id: ROOT_PROVIDER_ID, collection_id: LayerCollectionId(ROOT_COLLECTION_ID.to_string()), }, name: "Layer Providers".to_string(), @@ -293,7 +306,7 @@ async fn get_layer_providers( ) ), params( - ("provider" = DataProviderId, description = "Data provider id"), + ("provider" = crate::api::model::datatypes::DataProviderId, description = "Data provider id"), ("collection" = LayerCollectionId, description = "Layer collection id"), LayerCollectionListOptions ), @@ -316,7 +329,7 @@ async fn list_collection_handler( let db = app_ctx.session_context(session).db(); - if provider == crate::layers::storage::INTERNAL_PROVIDER_ID.into() { + if provider == crate::layers::storage::INTERNAL_PROVIDER_ID { let collection = db .load_layer_collection(&item, options.into_inner()) .await?; @@ -325,7 +338,7 @@ async fn list_collection_handler( } let collection = db - .load_layer_provider(provider.into()) + .load_layer_provider(provider) .await? .load_layer_collection(&item, options.into_inner()) .await?; @@ -354,7 +367,7 @@ async fn list_collection_handler( ) ), params( - ("provider" = DataProviderId, description = "Data provider id") + ("provider" = crate::api::model::datatypes::DataProviderId, description = "Data provider id") ), security( ("session_token" = []) @@ -375,7 +388,7 @@ async fn provider_capabilities_handler( let db = app_ctx.session_context(session).db(); - let capabilities = match provider.into() { + let capabilities = match provider { crate::layers::storage::INTERNAL_PROVIDER_ID => LayerCollectionProvider::capabilities(&db), provider => db.load_layer_provider(provider).await?.capabilities(), }; @@ -425,7 +438,7 @@ async fn provider_capabilities_handler( ) ), params( - ("provider" = DataProviderId, description = "Data provider id", example = "ce5e84db-cbf9-48a2-9a32-d4b7cc56ea74"), + ("provider" = crate::api::model::datatypes::DataProviderId, description = "Data provider id", example = "ce5e84db-cbf9-48a2-9a32-d4b7cc56ea74"), ("collection" = LayerCollectionId, description = "Layer collection id", example = "05102bb3-a855-4a37-8a8a-30026a91fef1"), SearchParameters ), @@ -449,7 +462,7 @@ async fn search_handler( let db = app_ctx.session_context(session).db(); - let collection = match provider.into() { + let collection = match provider { crate::layers::storage::INTERNAL_PROVIDER_ID => { LayerCollectionProvider::search(&db, &collection, options.into_inner()).await? } @@ -475,7 +488,7 @@ async fn search_handler( ) ), params( - ("provider" = DataProviderId, description = "Data provider id", example = "ce5e84db-cbf9-48a2-9a32-d4b7cc56ea74"), + ("provider" = crate::api::model::datatypes::DataProviderId, description = "Data provider id", example = "ce5e84db-cbf9-48a2-9a32-d4b7cc56ea74"), ("collection" = LayerCollectionId, description = "Layer collection id", example = "05102bb3-a855-4a37-8a8a-30026a91fef1"), SearchParameters ), @@ -499,7 +512,7 @@ async fn autocomplete_handler( let db = app_ctx.session_context(session).db(); - let suggestions = match provider.into() { + let suggestions = match provider { crate::layers::storage::INTERNAL_PROVIDER_ID => { LayerCollectionProvider::autocomplete_search(&db, &collection, options.into_inner()) .await? @@ -670,7 +683,7 @@ async fn autocomplete_handler( ) ), params( - ("provider" = DataProviderId, description = "Data provider id"), + ("provider" = crate::api::model::datatypes::DataProviderId, description = "Data provider id"), ("layer" = LayerCollectionId, description = "Layer id"), ), security( @@ -686,14 +699,14 @@ async fn layer_handler( let db = app_ctx.session_context(session).db(); - if provider == crate::layers::storage::INTERNAL_PROVIDER_ID.into() { + if provider == crate::layers::storage::INTERNAL_PROVIDER_ID { let collection = db.load_layer(&item.into()).await?; return Ok(web::Json(collection)); } let collection = db - .load_layer_provider(provider.into()) + .load_layer_provider(provider) .await? .load_layer(&item.into()) .await?; @@ -710,7 +723,7 @@ async fn layer_handler( (status = 200, response = IdResponse::) ), params( - ("provider" = DataProviderId, description = "Data provider id"), + ("provider" = crate::api::model::datatypes::DataProviderId, description = "Data provider id"), ("layer" = LayerCollectionId, description = "Layer id"), ), security( @@ -725,10 +738,10 @@ async fn layer_to_workflow_id_handler( let (provider, item) = path.into_inner(); let db = app_ctx.session_context(session.clone()).db(); - let layer = match provider.into() { + let layer = match provider { crate::layers::storage::INTERNAL_PROVIDER_ID => db.load_layer(&item.into()).await?, _ => { - db.load_layer_provider(provider.into()) + db.load_layer_provider(provider) .await? .load_layer(&item.into()) .await? @@ -754,7 +767,7 @@ async fn layer_to_workflow_id_handler( ) ), params( - ("provider" = DataProviderId, description = "Data provider id"), + ("provider" = crate::api::model::datatypes::DataProviderId, description = "Data provider id"), ("layer" = LayerId, description = "Layer id"), ), security( @@ -773,10 +786,10 @@ async fn layer_to_dataset( let db = ctx.db(); - let layer = match provider.into() { + let layer = match provider { crate::layers::storage::INTERNAL_PROVIDER_ID => db.load_layer(&item).await?, _ => { - db.load_layer_provider(provider.into()) + db.load_layer_provider(provider) .await? .load_layer(&item) .await? @@ -1181,6 +1194,165 @@ async fn remove_collection_from_collection( Ok(HttpResponse::Ok().finish()) } +/// Add a new provider +#[utoipa::path( + tag = "Layers", + post, + path = "/layerDb/providers", + params(), + request_body = TypedDataProviderDefinition, + responses( + (status = 200, response = IdResponse::) + ), + security( + ("session_token" = []) + ) +)] +async fn add_provider( + session: C::Session, + app_ctx: web::Data, + request: web::Json, +) -> Result>> { + let provider = request.into_inner().into(); + + let id = app_ctx + .into_inner() + .session_context(session) + .db() + .add_layer_provider(provider) + .await?; + + Ok(web::Json(IdResponse { id })) +} + +/// List all providers +#[utoipa::path( + tag = "Layers", + get, + path = "/layerDb/providers", + params(LayerProviderListingOptions), + responses( + (status = 200, description = "OK", body = Vec) + ), + security( + ("session_token" = []) + ) +)] +async fn list_providers( + session: C::Session, + app_ctx: web::Data, + options: ValidatedQuery, +) -> Result>> { + let providers = app_ctx + .into_inner() + .session_context(session) + .db() + .list_layer_providers(options.into_inner()) + .await?; + + Ok(web::Json(providers.into_iter().map(Into::into).collect())) +} + +/// Get an existing provider's definition +#[utoipa::path( + tag = "Layers", + get, + path = "/layerDb/providers/{provider}", + params( + ("provider" = uuid::Uuid, description = "Layer provider id"), + ), + responses( + (status = 200, description = "OK", body = TypedDataProviderDefinition) + ), + security( + ("session_token" = []) + ) +)] +async fn get_provider_definition( + session: C::Session, + app_ctx: web::Data, + path: web::Path, +) -> Result> { + let id = DataProviderId(path.into_inner()); + + let provider = app_ctx + .into_inner() + .session_context(session) + .db() + .get_layer_provider_definition(id) + .await? + .into(); + + Ok(web::Json(provider)) +} + +/// Update an existing provider's definition +#[utoipa::path( + tag = "Layers", + put, + path = "/layerDb/providers/{provider}", + params( + ("provider" = uuid::Uuid, description = "Layer provider id"), + ), + request_body = TypedDataProviderDefinition, + responses( + (status = 200, description = "OK") + ), + security( + ("session_token" = []) + ) +)] +async fn update_provider_definition( + session: C::Session, + app_ctx: web::Data, + path: web::Path, + request: web::Json, +) -> Result { + let id = DataProviderId(path.into_inner()); + let definition = request.into_inner().into(); + + app_ctx + .into_inner() + .session_context(session) + .db() + .update_layer_provider_definition(id, definition) + .await?; + + Ok(HttpResponse::Ok().finish()) +} + +/// Delete an existing provider +#[utoipa::path( + tag = "Layers", + delete, + path = "/layerDb/providers/{provider}", + params( + ("provider" = uuid::Uuid, description = "Layer provider id"), + ), + responses( + (status = 200, description = "OK") + ), + security( + ("session_token" = []) + ) +)] +async fn delete_provider( + session: C::Session, + app_ctx: web::Data, + path: web::Path, +) -> Result { + let id = DataProviderId(path.into_inner()); + + app_ctx + .into_inner() + .session_context(session) + .db() + .delete_layer_provider(id) + .await?; + + Ok(HttpResponse::Ok().finish()) +} + #[cfg(test)] mod tests { @@ -1190,6 +1362,10 @@ mod tests { use crate::contexts::PostgresContext; use crate::contexts::SessionId; use crate::datasets::RasterDatasetFromWorkflowResult; + use crate::datasets::dataset_listing_provider::{ + DatasetLayerListingCollection, DatasetLayerListingProviderDefinition, + }; + use crate::datasets::external::aruna::ArunaDataProviderDefinition; use crate::ge_context; use crate::layers::layer::Layer; use crate::layers::storage::INTERNAL_PROVIDER_ID; @@ -1204,7 +1380,7 @@ mod tests { use actix_web::dev::ServiceResponse; use actix_web::{http::header, test}; use actix_web_httpauth::headers::authorization::Bearer; - use geoengine_datatypes::primitives::{CacheHint, Coordinate2D}; + use geoengine_datatypes::primitives::{CacheHint, CacheTtlSeconds, Coordinate2D}; use geoengine_datatypes::primitives::{ RasterQueryRectangle, SpatialPartition2D, TimeGranularity, TimeInterval, }; @@ -1869,6 +2045,376 @@ mod tests { assert!(response.status().is_success(), "{response:?}"); } + fn default_dataset_layer_listing_provider_definition() -> DatasetLayerListingProviderDefinition + { + DatasetLayerListingProviderDefinition { + id: DataProviderId::from_u128(0xcbb2_1ee3_d15d_45c5_a175_6696_4adf_4e85), + name: "User Data Listing".to_string(), + description: "User specific datasets grouped by tags.".to_string(), + priority: None, + collections: vec![ + DatasetLayerListingCollection { + name: "User Uploads".to_string(), + description: "Datasets uploaded by the user.".to_string(), + tags: vec!["upload".to_string()], + }, + DatasetLayerListingCollection { + name: "Workflows".to_string(), + description: "Datasets created from workflows.".to_string(), + tags: vec!["workflow".to_string()], + }, + DatasetLayerListingCollection { + name: "All Datasets".to_string(), + description: "All datasets".to_string(), + tags: vec!["*".to_string()], + }, + ], + } + } + + #[ge_context::test] + async fn test_get_provider_definition(app_ctx: PostgresContext) { + let session = admin_login(&app_ctx).await; + let ctx = app_ctx.session_context(session.clone()); + + let session_id = session.id(); + + let dataset_listing_provider = default_dataset_layer_listing_provider_definition(); + + ctx.db() + .add_layer_provider( + crate::layers::external::TypedDataProviderDefinition::DatasetLayerListingProviderDefinition( + dataset_listing_provider.clone(), + ), + ) + .await.unwrap(); + + let req = test::TestRequest::get() + .uri("/layerDb/providers/cbb21ee3-d15d-45c5-a175-66964adf4e85") + .append_header((header::AUTHORIZATION, Bearer::new(session_id.to_string()))); + let response = send_test_request(req, app_ctx.clone()).await; + + assert!(response.status().is_success(), "{response:?}"); + + let response_provider = + serde_json::from_str::(&read_body_string(response).await) + .unwrap(); + assert_eq!( + response_provider, + TypedDataProviderDefinition::DatasetLayerListingProviderDefinition( + dataset_listing_provider.into() + ) + ); + } + + #[ge_context::test] + async fn test_add_provider_definition(app_ctx: PostgresContext) { + let session = admin_login(&app_ctx).await; + let ctx = app_ctx.session_context(session.clone()); + + let session_id = session.id(); + + let dataset_listing_provider = default_dataset_layer_listing_provider_definition(); + + let req = test::TestRequest::post() + .uri("/layerDb/providers") + .set_json(serde_json::json!( + &TypedDataProviderDefinition::DatasetLayerListingProviderDefinition( + dataset_listing_provider.clone().into(), + ) + )) + .append_header((header::AUTHORIZATION, Bearer::new(session_id.to_string()))); + let response = send_test_request(req, app_ctx.clone()).await; + + assert!(response.status().is_success(), "{response:?}"); + + assert_eq!( + ctx.db() + .get_layer_provider_definition( + DataProviderId::from_u128(0xcbb2_1ee3_d15d_45c5_a175_6696_4adf_4e85), + ) + .await + .unwrap(), + crate::layers::external::TypedDataProviderDefinition::DatasetLayerListingProviderDefinition( + dataset_listing_provider + ) + ); + } + + #[ge_context::test] + async fn test_update_provider_definition(app_ctx: PostgresContext) { + let session = admin_login(&app_ctx).await; + let ctx = app_ctx.session_context(session.clone()); + + let session_id = session.id(); + + let dataset_listing_provider = default_dataset_layer_listing_provider_definition(); + + ctx.db() + .add_layer_provider( + crate::layers::external::TypedDataProviderDefinition::DatasetLayerListingProviderDefinition( + dataset_listing_provider, + ), + ) + .await.unwrap(); + + let dataset_listing_provider = DatasetLayerListingProviderDefinition { + id: DataProviderId::from_u128(0xcbb2_1ee3_d15d_45c5_a175_6696_4adf_4e85), + name: "Updated User Data Listing".to_string(), + description: "Updated User specific datasets grouped by tags.".to_string(), + priority: Some(2), + collections: vec![ + DatasetLayerListingCollection { + name: "Updated User Uploads".to_string(), + description: "Datasets uploaded by the user.".to_string(), + tags: vec!["upload".to_string()], + }, + DatasetLayerListingCollection { + name: "Workflows".to_string(), + description: "Datasets created from workflows.".to_string(), + tags: vec!["workflow".to_string()], + }, + ], + }; + + let req = test::TestRequest::put() + .uri("/layerDb/providers/cbb21ee3-d15d-45c5-a175-66964adf4e85") + .set_json(serde_json::json!( + &TypedDataProviderDefinition::DatasetLayerListingProviderDefinition( + dataset_listing_provider.clone().into(), + ) + )) + .append_header((header::AUTHORIZATION, Bearer::new(session_id.to_string()))); + + let response = send_test_request(req, app_ctx.clone()).await; + + assert!(response.status().is_success(), "{response:?}"); + + assert_eq!( + ctx.db() + .get_layer_provider_definition( + DataProviderId::from_u128(0xcbb2_1ee3_d15d_45c5_a175_6696_4adf_4e85), + ) + .await + .unwrap(), + crate::layers::external::TypedDataProviderDefinition::DatasetLayerListingProviderDefinition( + dataset_listing_provider + ) + ); + } + + #[ge_context::test] + async fn test_delete_provider_definition(app_ctx: PostgresContext) { + let session = admin_login(&app_ctx).await; + let ctx = app_ctx.session_context(session.clone()); + + let session_id = session.id(); + + let dataset_listing_provider = default_dataset_layer_listing_provider_definition(); + + ctx.db() + .add_layer_provider( + crate::layers::external::TypedDataProviderDefinition::DatasetLayerListingProviderDefinition( + dataset_listing_provider, + ), + ) + .await.unwrap(); + + let req = test::TestRequest::delete() + .uri("/layerDb/providers/cbb21ee3-d15d-45c5-a175-66964adf4e85") + .append_header((header::AUTHORIZATION, Bearer::new(session_id.to_string()))); + + let response = send_test_request(req, app_ctx.clone()).await; + + assert!(response.status().is_success(), "{response:?}"); + + assert!( + ctx.db() + .get_layer_provider_definition(DataProviderId::from_u128( + 0xcbb2_1ee3_d15d_45c5_a175_6696_4adf_4e85 + ),) + .await + .is_err() + ); + } + + #[ge_context::test] + async fn test_cannot_add_existing_provider_definition(app_ctx: PostgresContext) { + let session = admin_login(&app_ctx).await; + let ctx = app_ctx.session_context(session.clone()); + + let session_id = session.id(); + + let dataset_listing_provider = default_dataset_layer_listing_provider_definition(); + + ctx.db() + .add_layer_provider( + crate::layers::external::TypedDataProviderDefinition::DatasetLayerListingProviderDefinition( + dataset_listing_provider.clone(), + ), + ) + .await.unwrap(); + + let req = test::TestRequest::post() + .uri("/layerDb/providers") + .set_json(serde_json::json!( + &TypedDataProviderDefinition::DatasetLayerListingProviderDefinition( + dataset_listing_provider.clone().into(), + ) + )) + .append_header((header::AUTHORIZATION, Bearer::new(session_id.to_string()))); + let response = send_test_request(req, app_ctx.clone()).await; + + assert!(response.status().is_client_error(), "{response:?}"); + + assert_eq!( + response.response().error().unwrap().to_string(), + crate::error::Error::ProviderIdAlreadyExists { + provider_id: DataProviderId::from_u128(0xcbb2_1ee3_d15d_45c5_a175_6696_4adf_4e85) + } + .to_string() + ); + } + + #[ge_context::test] + async fn test_cannot_update_provider_id(app_ctx: PostgresContext) { + let session = admin_login(&app_ctx).await; + let ctx = app_ctx.session_context(session.clone()); + + let session_id = session.id(); + + let dataset_listing_provider = default_dataset_layer_listing_provider_definition(); + + ctx.db() + .add_layer_provider( + crate::layers::external::TypedDataProviderDefinition::DatasetLayerListingProviderDefinition( + dataset_listing_provider, + ), + ) + .await.unwrap(); + + let dataset_listing_provider = DatasetLayerListingProviderDefinition { + id: DataProviderId::from_u128(0xcbb2_1ee3_d15d_45c5_a175_6696_4adf_4e86), + name: "Updated User Data Listing".to_string(), + description: "Updated User specific datasets grouped by tags.".to_string(), + priority: Some(2), + collections: vec![], + }; + + let req = test::TestRequest::put() + .uri("/layerDb/providers/cbb21ee3-d15d-45c5-a175-66964adf4e85") + .set_json(serde_json::json!( + &TypedDataProviderDefinition::DatasetLayerListingProviderDefinition( + dataset_listing_provider.clone().into(), + ) + )) + .append_header((header::AUTHORIZATION, Bearer::new(session_id.to_string()))); + + let response = send_test_request(req, app_ctx.clone()).await; + + assert!(response.status().is_client_error(), "{response:?}"); + + assert_eq!( + response.response().error().unwrap().to_string(), + crate::error::Error::ProviderIdUnmodifiable.to_string() + ); + } + + #[ge_context::test] + async fn test_cannot_update_provider_type(app_ctx: PostgresContext) { + let session = admin_login(&app_ctx).await; + let ctx = app_ctx.session_context(session.clone()); + + let session_id = session.id(); + + let dataset_listing_provider = default_dataset_layer_listing_provider_definition(); + + ctx.db() + .add_layer_provider( + crate::layers::external::TypedDataProviderDefinition::DatasetLayerListingProviderDefinition( + dataset_listing_provider, + ), + ) + .await.unwrap(); + + let aruna_provider = ArunaDataProviderDefinition { + id: DataProviderId::from_u128(0xcbb2_1ee3_d15d_45c5_a175_6696_4adf_4e85), + name: "Aruna".to_string(), + description: String::new(), + priority: None, + api_url: String::new(), + project_id: String::new(), + api_token: String::new(), + filter_label: String::new(), + cache_ttl: CacheTtlSeconds::default(), + }; + + let req = test::TestRequest::put() + .uri("/layerDb/providers/cbb21ee3-d15d-45c5-a175-66964adf4e85") + .set_json(serde_json::json!( + &TypedDataProviderDefinition::ArunaDataProviderDefinition( + aruna_provider.clone().into(), + ) + )) + .append_header((header::AUTHORIZATION, Bearer::new(session_id.to_string()))); + + let response = send_test_request(req, app_ctx.clone()).await; + + assert!(response.status().is_client_error(), "{response:?}"); + + assert_eq!( + response.response().error().unwrap().to_string(), + crate::error::Error::ProviderTypeUnmodifiable.to_string() + ); + } + + #[ge_context::test] + async fn test_cannot_update_non_existing_provider(app_ctx: PostgresContext) { + let session = admin_login(&app_ctx).await; + + let session_id = session.id(); + + let dataset_listing_provider = default_dataset_layer_listing_provider_definition(); + + let req = test::TestRequest::put() + .uri("/layerDb/providers/cbb21ee3-d15d-45c5-a175-66964adf4e85") + .set_json(serde_json::json!( + &TypedDataProviderDefinition::DatasetLayerListingProviderDefinition( + dataset_listing_provider.clone().into(), + ) + )) + .append_header((header::AUTHORIZATION, Bearer::new(session_id.to_string()))); + + let response = send_test_request(req, app_ctx.clone()).await; + + assert!(response.status().is_client_error(), "{response:?}"); + + assert_eq!( + response.response().error().unwrap().to_string(), + "A permission error occurred: Permission Owner for resource provider:cbb21ee3-d15d-45c5-a175-66964adf4e85 denied..".to_string() + ); + } + + #[ge_context::test] + async fn test_cannot_get_non_existing_provider_definition(app_ctx: PostgresContext) { + let session = admin_login(&app_ctx).await; + + let session_id = session.id(); + + let req = test::TestRequest::get() + .uri("/layerDb/providers/cbb21ee3-d15d-45c5-a175-66964adf4e85") + .append_header((header::AUTHORIZATION, Bearer::new(session_id.to_string()))); + + let response = send_test_request(req, app_ctx.clone()).await; + + assert!(response.status().is_client_error(), "{response:?}"); + + assert_eq!( + response.response().error().unwrap().to_string(), + "TokioPostgres".to_string() + ); + } + struct MockRasterWorkflowLayerDescription { workflow: Workflow, tiling_specification: TilingSpecification, diff --git a/services/src/api/handlers/permissions.rs b/services/src/api/handlers/permissions.rs index 74ceaee66..9e0d597bb 100644 --- a/services/src/api/handlers/permissions.rs +++ b/services/src/api/handlers/permissions.rs @@ -1,4 +1,4 @@ -use crate::api::model::datatypes::LayerId; +use crate::api::model::datatypes::{DataProviderId, LayerId}; use crate::contexts::{ApplicationContext, GeoEngineDb, SessionContext}; use crate::datasets::DatasetName; use crate::datasets::storage::DatasetDb; @@ -78,6 +78,7 @@ pub enum Resource { Project(ProjectResource), Dataset(DatasetResource), MlModel(MlModelResource), + Provider(DataProviderResource), } #[type_tag(value = "layer")] @@ -108,6 +109,13 @@ pub struct DatasetResource { pub id: DatasetName, } +#[type_tag(value = "provider")] +#[derive(Debug, PartialEq, Eq, Deserialize, Clone, ToSchema, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct DataProviderResource { + pub id: DataProviderId, +} + #[type_tag(value = "mlModel")] #[derive(Debug, PartialEq, Eq, Deserialize, Clone, ToSchema, Serialize)] #[serde(rename_all = "camelCase")] @@ -129,6 +137,7 @@ impl Resource { Ok(ResourceId::LayerCollection(layer_collection.id.clone())) } Resource::Project(project_id) => Ok(ResourceId::Project(project_id.id)), + Resource::Provider(provider_id) => Ok(ResourceId::DataProvider(provider_id.id.into())), Resource::Dataset(dataset_name) => { let dataset_id_option = db.resolve_dataset_name_to_id(&dataset_name.id).await?; dataset_id_option @@ -183,6 +192,10 @@ impl TryFrom<(String, String)> for Resource { r#type: Default::default(), id: MlModelName::from_str(&value.1)?, }), + "provider" => Resource::Provider(DataProviderResource { + r#type: Default::default(), + id: DataProviderId(Uuid::from_str(&value.1).context(error::Uuid)?), + }), _ => { return Err(Error::InvalidResourceId { resource_type: value.0, diff --git a/services/src/api/model/services.rs b/services/src/api/model/services.rs index a3c1eeb02..7680c3fa6 100644 --- a/services/src/api/model/services.rs +++ b/services/src/api/model/services.rs @@ -1,19 +1,23 @@ -use std::path::PathBuf; - +use super::datatypes::{CacheTtlSeconds, DataId, DataProviderId, GdalConfigOption, RasterDataType}; use crate::api::model::datatypes::MlModelName; use crate::api::model::operators::{ GdalMetaDataList, GdalMetaDataRegular, GdalMetaDataStatic, GdalMetadataNetCdfCf, MlModelMetadata, MockMetaData, OgrMetaData, }; use crate::datasets::DatasetName; +use crate::datasets::external::GdalRetries; use crate::datasets::storage::validate_tags; use crate::datasets::upload::{UploadId, VolumeName}; use crate::projects::Symbology; +use crate::util::parsing::deserialize_base_url; +use geoengine_macros::type_tag; use serde::{Deserialize, Serialize}; +use std::path::PathBuf; +use url::Url; use utoipa::ToSchema; use validator::{Validate, ValidationErrors}; -use super::datatypes::DataId; +pub const SECRET_REPLACEMENT: &str = "*****"; #[allow(clippy::large_enum_variant)] #[derive(Serialize, Deserialize, Debug, Clone, ToSchema, PartialEq)] @@ -214,6 +218,886 @@ pub struct Volume { pub path: Option, } +#[derive(Debug, Clone, PartialEq, Eq, ToSchema, Serialize)] +pub struct LayerProviderListing { + pub id: DataProviderId, + pub name: String, + pub priority: i16, +} + +impl From for LayerProviderListing { + fn from(value: crate::layers::storage::LayerProviderListing) -> Self { + LayerProviderListing { + id: value.id.into(), + name: value.name, + priority: value.priority, + } + } +} + +#[type_tag(value = "Aruna")] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ArunaDataProviderDefinition { + pub id: DataProviderId, + pub name: String, + pub description: String, + pub priority: Option, + pub api_url: String, + pub project_id: String, + pub api_token: String, + pub filter_label: String, + #[serde(default)] + pub cache_ttl: CacheTtlSeconds, +} + +impl From + for crate::datasets::external::aruna::ArunaDataProviderDefinition +{ + fn from(value: ArunaDataProviderDefinition) -> Self { + crate::datasets::external::aruna::ArunaDataProviderDefinition { + id: value.id.into(), + name: value.name, + description: value.description, + priority: value.priority, + api_url: value.api_url, + project_id: value.project_id, + api_token: value.api_token, + filter_label: value.filter_label, + cache_ttl: value.cache_ttl.into(), + } + } +} + +impl From + for ArunaDataProviderDefinition +{ + fn from(value: crate::datasets::external::aruna::ArunaDataProviderDefinition) -> Self { + ArunaDataProviderDefinition { + r#type: Default::default(), + id: value.id.into(), + name: value.name, + description: value.description, + priority: value.priority, + api_url: value.api_url, + project_id: value.project_id, + api_token: SECRET_REPLACEMENT.to_string(), + filter_label: value.filter_label, + cache_ttl: value.cache_ttl.into(), + } + } +} + +#[type_tag(value = "CopernicusDataspace")] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct CopernicusDataspaceDataProviderDefinition { + pub name: String, + pub description: String, + pub id: DataProviderId, + pub stac_url: String, + pub s3_url: String, + pub s3_access_key: String, + pub s3_secret_key: String, + pub gdal_config: Vec, + pub priority: Option, +} + +impl From + for crate::datasets::external::copernicus_dataspace::CopernicusDataspaceDataProviderDefinition +{ + fn from(value: CopernicusDataspaceDataProviderDefinition) -> Self { + crate::datasets::external::copernicus_dataspace::CopernicusDataspaceDataProviderDefinition { + name: value.name, + description: value.description, + id: value.id.into(), + stac_url: value.stac_url, + s3_url: value.s3_url, + s3_access_key: value.s3_access_key, + s3_secret_key: value.s3_secret_key, + gdal_config: value.gdal_config, + priority: value.priority, + } + } +} + +impl + From + for CopernicusDataspaceDataProviderDefinition +{ + fn from( + value: crate::datasets::external::copernicus_dataspace::CopernicusDataspaceDataProviderDefinition, + ) -> Self { + CopernicusDataspaceDataProviderDefinition { + r#type: Default::default(), + name: value.name, + description: value.description, + id: value.id.into(), + stac_url: value.stac_url, + s3_url: value.s3_url, + s3_access_key: value.s3_access_key, + s3_secret_key: SECRET_REPLACEMENT.to_string(), + gdal_config: value.gdal_config, + priority: value.priority, + } + } +} + +#[type_tag(value = "EbvPortal")] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct EbvPortalDataProviderDefinition { + pub name: String, + pub description: String, + pub priority: Option, + pub base_url: Url, + /// Path were the `NetCDF` data can be found + #[schema(value_type = String)] + pub data: PathBuf, + /// Path were overview files are stored + #[schema(value_type = String)] + pub overviews: PathBuf, + #[serde(default)] + pub cache_ttl: CacheTtlSeconds, +} + +impl From + for crate::datasets::external::netcdfcf::EbvPortalDataProviderDefinition +{ + fn from(value: EbvPortalDataProviderDefinition) -> Self { + crate::datasets::external::netcdfcf::EbvPortalDataProviderDefinition { + name: value.name, + description: value.description, + priority: value.priority, + base_url: value.base_url, + data: value.data, + overviews: value.overviews, + cache_ttl: value.cache_ttl.into(), + } + } +} + +impl From + for EbvPortalDataProviderDefinition +{ + fn from(value: crate::datasets::external::netcdfcf::EbvPortalDataProviderDefinition) -> Self { + EbvPortalDataProviderDefinition { + r#type: Default::default(), + name: value.name, + description: value.description, + priority: value.priority, + base_url: value.base_url, + data: value.data, + overviews: value.overviews, + cache_ttl: value.cache_ttl.into(), + } + } +} + +#[type_tag(value = "NetCdfCf")] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct NetCdfCfDataProviderDefinition { + pub name: String, + pub description: String, + pub priority: Option, + /// Path were the `NetCDF` data can be found + #[schema(value_type = String)] + pub data: PathBuf, + /// Path were overview files are stored + #[schema(value_type = String)] + pub overviews: PathBuf, + #[serde(default)] + pub cache_ttl: CacheTtlSeconds, +} + +impl From + for crate::datasets::external::netcdfcf::NetCdfCfDataProviderDefinition +{ + fn from(value: NetCdfCfDataProviderDefinition) -> Self { + crate::datasets::external::netcdfcf::NetCdfCfDataProviderDefinition { + name: value.name, + description: value.description, + priority: value.priority, + data: value.data, + overviews: value.overviews, + cache_ttl: value.cache_ttl.into(), + } + } +} + +impl From + for NetCdfCfDataProviderDefinition +{ + fn from(value: crate::datasets::external::netcdfcf::NetCdfCfDataProviderDefinition) -> Self { + NetCdfCfDataProviderDefinition { + r#type: Default::default(), + name: value.name, + description: value.description, + priority: value.priority, + data: value.data, + overviews: value.overviews, + cache_ttl: value.cache_ttl.into(), + } + } +} + +#[type_tag(value = "Pangaea")] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct PangaeaDataProviderDefinition { + pub name: String, + pub description: String, + pub priority: Option, + pub base_url: Url, + pub cache_ttl: CacheTtlSeconds, +} + +impl From + for crate::datasets::external::pangaea::PangaeaDataProviderDefinition +{ + fn from(value: PangaeaDataProviderDefinition) -> Self { + crate::datasets::external::pangaea::PangaeaDataProviderDefinition { + name: value.name, + description: value.description, + priority: value.priority, + base_url: value.base_url, + cache_ttl: value.cache_ttl.into(), + } + } +} + +impl From + for PangaeaDataProviderDefinition +{ + fn from(value: crate::datasets::external::pangaea::PangaeaDataProviderDefinition) -> Self { + PangaeaDataProviderDefinition { + r#type: Default::default(), + name: value.name, + description: value.description, + priority: value.priority, + base_url: value.base_url, + cache_ttl: value.cache_ttl.into(), + } + } +} + +#[type_tag(value = "Edr")] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct EdrDataProviderDefinition { + pub name: String, + pub description: String, + pub priority: Option, + pub id: DataProviderId, + #[serde(deserialize_with = "deserialize_base_url")] + pub base_url: Url, + pub vector_spec: Option, + #[serde(default)] + pub cache_ttl: CacheTtlSeconds, + #[serde(default)] + /// List of vertical reference systems with a discrete scale + pub discrete_vrs: Vec, + pub provenance: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, ToSchema)] +pub struct EdrVectorSpec { + pub x: String, + pub y: Option, + pub time: String, +} + +impl From for crate::datasets::external::edr::EdrVectorSpec { + fn from(value: EdrVectorSpec) -> Self { + crate::datasets::external::edr::EdrVectorSpec { + x: value.x, + y: value.y, + time: value.time, + } + } +} + +impl From for EdrVectorSpec { + fn from(value: crate::datasets::external::edr::EdrVectorSpec) -> Self { + EdrVectorSpec { + x: value.x, + y: value.y, + time: value.time, + } + } +} + +impl From for crate::datasets::external::edr::EdrDataProviderDefinition { + fn from(value: EdrDataProviderDefinition) -> Self { + crate::datasets::external::edr::EdrDataProviderDefinition { + name: value.name, + description: value.description, + priority: value.priority, + id: value.id.into(), + base_url: value.base_url, + vector_spec: value.vector_spec.map(Into::into), + cache_ttl: value.cache_ttl.into(), + discrete_vrs: value.discrete_vrs, + provenance: value + .provenance + .map(|v| v.into_iter().map(Into::into).collect()), + } + } +} + +impl From for EdrDataProviderDefinition { + fn from(value: crate::datasets::external::edr::EdrDataProviderDefinition) -> Self { + EdrDataProviderDefinition { + r#type: Default::default(), + name: value.name, + description: value.description, + priority: value.priority, + id: value.id.into(), + base_url: value.base_url, + vector_spec: value.vector_spec.map(Into::into), + cache_ttl: value.cache_ttl.into(), + discrete_vrs: value.discrete_vrs, + provenance: value + .provenance + .map(|v| v.into_iter().map(Into::into).collect()), + } + } +} + +#[type_tag(value = "Gbif")] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct GbifDataProviderDefinition { + pub name: String, + pub description: String, + pub priority: Option, + pub db_config: DatabaseConnectionConfig, + #[serde(default)] + pub cache_ttl: CacheTtlSeconds, + pub autocomplete_timeout: i32, + pub columns: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, ToSchema)] +pub struct DatabaseConnectionConfig { + pub host: String, + pub port: u16, + pub database: String, + pub schema: String, + pub user: String, + pub password: String, +} + +impl From for crate::util::postgres::DatabaseConnectionConfig { + fn from(value: DatabaseConnectionConfig) -> Self { + crate::util::postgres::DatabaseConnectionConfig { + host: value.host, + port: value.port, + database: value.database, + schema: value.schema, + user: value.user, + password: value.password, + } + } +} + +impl From for DatabaseConnectionConfig { + fn from(value: crate::util::postgres::DatabaseConnectionConfig) -> Self { + DatabaseConnectionConfig { + host: value.host, + port: value.port, + database: value.database, + schema: value.schema, + user: value.user, + password: SECRET_REPLACEMENT.to_string(), + } + } +} + +impl From + for crate::datasets::external::gbif::GbifDataProviderDefinition +{ + fn from(value: GbifDataProviderDefinition) -> Self { + crate::datasets::external::gbif::GbifDataProviderDefinition { + name: value.name, + description: value.description, + priority: value.priority, + db_config: value.db_config.into(), + cache_ttl: value.cache_ttl.into(), + autocomplete_timeout: value.autocomplete_timeout, + columns: value.columns, + } + } +} + +impl From + for GbifDataProviderDefinition +{ + fn from(value: crate::datasets::external::gbif::GbifDataProviderDefinition) -> Self { + GbifDataProviderDefinition { + r#type: Default::default(), + name: value.name, + description: value.description, + priority: value.priority, + db_config: value.db_config.into(), + cache_ttl: value.cache_ttl.into(), + autocomplete_timeout: value.autocomplete_timeout, + columns: value.columns, + } + } +} + +#[type_tag(value = "GfbioAbcd")] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct GfbioAbcdDataProviderDefinition { + pub name: String, + pub description: String, + pub priority: Option, + pub db_config: DatabaseConnectionConfig, + #[serde(default)] + pub cache_ttl: CacheTtlSeconds, +} + +impl From + for crate::datasets::external::gfbio_abcd::GfbioAbcdDataProviderDefinition +{ + fn from(value: GfbioAbcdDataProviderDefinition) -> Self { + crate::datasets::external::gfbio_abcd::GfbioAbcdDataProviderDefinition { + name: value.name, + description: value.description, + priority: value.priority, + db_config: value.db_config.into(), + cache_ttl: value.cache_ttl.into(), + } + } +} + +impl From + for GfbioAbcdDataProviderDefinition +{ + fn from(value: crate::datasets::external::gfbio_abcd::GfbioAbcdDataProviderDefinition) -> Self { + GfbioAbcdDataProviderDefinition { + r#type: Default::default(), + name: value.name, + description: value.description, + priority: value.priority, + db_config: value.db_config.into(), + cache_ttl: value.cache_ttl.into(), + } + } +} + +#[type_tag(value = "GfbioCollections")] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct GfbioCollectionsDataProviderDefinition { + pub name: String, + pub description: String, + pub priority: Option, + pub collection_api_url: Url, + pub collection_api_auth_token: String, + pub abcd_db_config: DatabaseConnectionConfig, + pub pangaea_url: Url, + #[serde(default)] + pub cache_ttl: CacheTtlSeconds, +} + +impl From + for crate::datasets::external::gfbio_collections::GfbioCollectionsDataProviderDefinition +{ + fn from(value: GfbioCollectionsDataProviderDefinition) -> Self { + crate::datasets::external::gfbio_collections::GfbioCollectionsDataProviderDefinition { + name: value.name, + description: value.description, + priority: value.priority, + collection_api_url: value.collection_api_url, + collection_api_auth_token: value.collection_api_auth_token, + abcd_db_config: value.abcd_db_config.into(), + pangaea_url: value.pangaea_url, + cache_ttl: value.cache_ttl.into(), + } + } +} + +impl From + for GfbioCollectionsDataProviderDefinition +{ + fn from( + value: crate::datasets::external::gfbio_collections::GfbioCollectionsDataProviderDefinition, + ) -> Self { + GfbioCollectionsDataProviderDefinition { + r#type: Default::default(), + name: value.name, + description: value.description, + priority: value.priority, + collection_api_url: value.collection_api_url, + collection_api_auth_token: value.collection_api_auth_token, + abcd_db_config: value.abcd_db_config.into(), + pangaea_url: value.pangaea_url, + cache_ttl: value.cache_ttl.into(), + } + } +} + +#[type_tag(value = "SentinelS2L2ACogs")] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct SentinelS2L2ACogsProviderDefinition { + pub name: String, + pub id: DataProviderId, + pub description: String, + pub priority: Option, + pub api_url: String, + pub bands: Vec, + pub zones: Vec, + #[serde(default)] + pub stac_api_retries: StacApiRetries, + #[serde(default)] + pub gdal_retries: usize, + #[serde(default)] + pub cache_ttl: CacheTtlSeconds, + #[serde(default)] + pub query_buffer: StacQueryBuffer, +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct StacBand { + pub name: String, + pub no_data_value: Option, + pub data_type: RasterDataType, +} + +impl From for crate::datasets::external::sentinel_s2_l2a_cogs::StacBand { + fn from(value: StacBand) -> Self { + crate::datasets::external::sentinel_s2_l2a_cogs::StacBand { + name: value.name, + no_data_value: value.no_data_value, + data_type: value.data_type.into(), + } + } +} + +impl From for StacBand { + fn from(value: crate::datasets::external::sentinel_s2_l2a_cogs::StacBand) -> Self { + StacBand { + name: value.name, + no_data_value: value.no_data_value, + data_type: value.data_type.into(), + } + } +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ToSchema)] +pub struct StacZone { + pub name: String, + pub epsg: u32, +} + +impl From for crate::datasets::external::sentinel_s2_l2a_cogs::StacZone { + fn from(value: StacZone) -> Self { + crate::datasets::external::sentinel_s2_l2a_cogs::StacZone { + name: value.name, + epsg: value.epsg, + } + } +} + +impl From for StacZone { + fn from(value: crate::datasets::external::sentinel_s2_l2a_cogs::StacZone) -> Self { + StacZone { + name: value.name, + epsg: value.epsg, + } + } +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, ToSchema, Default)] +#[serde(rename_all = "camelCase")] +pub struct StacApiRetries { + pub number_of_retries: usize, + pub initial_delay_ms: u64, + pub exponential_backoff_factor: f64, +} + +impl From for crate::datasets::external::sentinel_s2_l2a_cogs::StacApiRetries { + fn from(value: StacApiRetries) -> Self { + crate::datasets::external::sentinel_s2_l2a_cogs::StacApiRetries { + number_of_retries: value.number_of_retries, + initial_delay_ms: value.initial_delay_ms, + exponential_backoff_factor: value.exponential_backoff_factor, + } + } +} + +impl From for StacApiRetries { + fn from(value: crate::datasets::external::sentinel_s2_l2a_cogs::StacApiRetries) -> Self { + StacApiRetries { + number_of_retries: value.number_of_retries, + initial_delay_ms: value.initial_delay_ms, + exponential_backoff_factor: value.exponential_backoff_factor, + } + } +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, ToSchema, Default)] +#[serde(rename_all = "camelCase")] +/// A struct that represents buffers to apply to stac requests +pub struct StacQueryBuffer { + pub start_seconds: i64, + pub end_seconds: i64, + // TODO: add also spatial buffers? +} + +impl From for crate::datasets::external::sentinel_s2_l2a_cogs::StacQueryBuffer { + fn from(value: StacQueryBuffer) -> Self { + crate::datasets::external::sentinel_s2_l2a_cogs::StacQueryBuffer { + start_seconds: value.start_seconds, + end_seconds: value.end_seconds, + } + } +} + +impl From for StacQueryBuffer { + fn from(value: crate::datasets::external::sentinel_s2_l2a_cogs::StacQueryBuffer) -> Self { + StacQueryBuffer { + start_seconds: value.start_seconds, + end_seconds: value.end_seconds, + } + } +} + +impl From for GdalRetries { + fn from(value: usize) -> Self { + GdalRetries { + number_of_retries: value, + } + } +} + +impl From for usize { + fn from(value: GdalRetries) -> Self { + value.number_of_retries + } +} + +impl From + for crate::datasets::external::sentinel_s2_l2a_cogs::SentinelS2L2ACogsProviderDefinition +{ + fn from(value: SentinelS2L2ACogsProviderDefinition) -> Self { + crate::datasets::external::sentinel_s2_l2a_cogs::SentinelS2L2ACogsProviderDefinition { + name: value.name, + id: value.id.into(), + description: value.description, + priority: value.priority, + api_url: value.api_url, + bands: value.bands.into_iter().map(Into::into).collect(), + zones: value.zones.into_iter().map(Into::into).collect(), + stac_api_retries: value.stac_api_retries.into(), + gdal_retries: value.gdal_retries.into(), + cache_ttl: value.cache_ttl.into(), + query_buffer: value.query_buffer.into(), + } + } +} + +impl From + for SentinelS2L2ACogsProviderDefinition +{ + fn from( + value: crate::datasets::external::sentinel_s2_l2a_cogs::SentinelS2L2ACogsProviderDefinition, + ) -> Self { + SentinelS2L2ACogsProviderDefinition { + r#type: Default::default(), + name: value.name, + id: value.id.into(), + description: value.description, + priority: value.priority, + api_url: value.api_url, + bands: value.bands.into_iter().map(Into::into).collect(), + zones: value.zones.into_iter().map(Into::into).collect(), + stac_api_retries: value.stac_api_retries.into(), + gdal_retries: value.gdal_retries.into(), + cache_ttl: value.cache_ttl.into(), + query_buffer: value.query_buffer.into(), + } + } +} + +#[type_tag(value = "DatasetLayerListing")] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DatasetLayerListingProviderDefinition { + pub id: DataProviderId, + pub name: String, + pub description: String, + pub priority: Option, + pub collections: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, ToSchema)] +pub struct DatasetLayerListingCollection { + pub name: String, + pub description: String, + pub tags: Vec, +} + +impl From + for crate::datasets::dataset_listing_provider::DatasetLayerListingCollection +{ + fn from(value: DatasetLayerListingCollection) -> Self { + crate::datasets::dataset_listing_provider::DatasetLayerListingCollection { + name: value.name, + description: value.description, + tags: value.tags, + } + } +} + +impl From + for DatasetLayerListingCollection +{ + fn from( + value: crate::datasets::dataset_listing_provider::DatasetLayerListingCollection, + ) -> Self { + DatasetLayerListingCollection { + name: value.name, + description: value.description, + tags: value.tags, + } + } +} + +impl From + for crate::datasets::dataset_listing_provider::DatasetLayerListingProviderDefinition +{ + fn from(value: DatasetLayerListingProviderDefinition) -> Self { + crate::datasets::dataset_listing_provider::DatasetLayerListingProviderDefinition { + id: value.id.into(), + name: value.name, + description: value.description, + priority: value.priority, + collections: value.collections.into_iter().map(Into::into).collect(), + } + } +} + +impl From + for DatasetLayerListingProviderDefinition +{ + fn from( + value: crate::datasets::dataset_listing_provider::DatasetLayerListingProviderDefinition, + ) -> Self { + DatasetLayerListingProviderDefinition { + r#type: Default::default(), + id: value.id.into(), + name: value.name, + description: value.description, + priority: value.priority, + collections: value.collections.into_iter().map(Into::into).collect(), + } + } +} + +#[type_tag(value = "WildLIVE!")] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct WildliveDataConnectorDefinition { + pub id: DataProviderId, + pub name: String, + pub description: String, + pub api_key: Option, + pub priority: Option, +} + +impl From + for crate::datasets::external::WildliveDataConnectorDefinition +{ + fn from(value: WildliveDataConnectorDefinition) -> Self { + crate::datasets::external::WildliveDataConnectorDefinition { + id: value.id.into(), + name: value.name, + description: value.description, + api_key: value.api_key, + priority: value.priority, + } + } +} + +impl From + for WildliveDataConnectorDefinition +{ + fn from(value: crate::datasets::external::WildliveDataConnectorDefinition) -> Self { + WildliveDataConnectorDefinition { + r#type: Default::default(), + id: value.id.into(), + name: value.name, + description: value.description, + api_key: Some(SECRET_REPLACEMENT.to_string()), + priority: value.priority, + } + } +} + +#[derive(PartialEq, Debug, Serialize, Deserialize, Clone, ToSchema)] +#[allow(clippy::enum_variant_names)] // TODO: think about better names +#[schema(discriminator = "type")] +#[serde(untagged)] +pub enum TypedDataProviderDefinition { + ArunaDataProviderDefinition(ArunaDataProviderDefinition), + CopernicusDataspaceDataProviderDefinition(CopernicusDataspaceDataProviderDefinition), + DatasetLayerListingProviderDefinition(DatasetLayerListingProviderDefinition), + EbvPortalDataProviderDefinition(EbvPortalDataProviderDefinition), + EdrDataProviderDefinition(EdrDataProviderDefinition), + GbifDataProviderDefinition(GbifDataProviderDefinition), + GfbioAbcdDataProviderDefinition(GfbioAbcdDataProviderDefinition), + GfbioCollectionsDataProviderDefinition(GfbioCollectionsDataProviderDefinition), + NetCdfCfDataProviderDefinition(NetCdfCfDataProviderDefinition), + PangaeaDataProviderDefinition(PangaeaDataProviderDefinition), + SentinelS2L2ACogsProviderDefinition(SentinelS2L2ACogsProviderDefinition), + WildliveDataConnectorDefinition(WildliveDataConnectorDefinition), +} + +impl From for crate::layers::external::TypedDataProviderDefinition { + fn from(value: TypedDataProviderDefinition) -> Self { + match value { + TypedDataProviderDefinition::ArunaDataProviderDefinition(def) => crate::layers::external::TypedDataProviderDefinition::ArunaDataProviderDefinition(def.into()), + TypedDataProviderDefinition::CopernicusDataspaceDataProviderDefinition(def) => crate::layers::external::TypedDataProviderDefinition::CopernicusDataspaceDataProviderDefinition(def.into()), + TypedDataProviderDefinition::DatasetLayerListingProviderDefinition(def) => crate::layers::external::TypedDataProviderDefinition::DatasetLayerListingProviderDefinition(def.into()), + TypedDataProviderDefinition::EbvPortalDataProviderDefinition(def) => crate::layers::external::TypedDataProviderDefinition::EbvPortalDataProviderDefinition(def.into()), + TypedDataProviderDefinition::EdrDataProviderDefinition(def) => crate::layers::external::TypedDataProviderDefinition::EdrDataProviderDefinition(def.into()), + TypedDataProviderDefinition::GbifDataProviderDefinition(def) => crate::layers::external::TypedDataProviderDefinition::GbifDataProviderDefinition(def.into()), + TypedDataProviderDefinition::GfbioAbcdDataProviderDefinition(def) => crate::layers::external::TypedDataProviderDefinition::GfbioAbcdDataProviderDefinition(def.into()), + TypedDataProviderDefinition::GfbioCollectionsDataProviderDefinition(def) => crate::layers::external::TypedDataProviderDefinition::GfbioCollectionsDataProviderDefinition(def.into()), + TypedDataProviderDefinition::NetCdfCfDataProviderDefinition(def) => crate::layers::external::TypedDataProviderDefinition::NetCdfCfDataProviderDefinition(def.into()), + TypedDataProviderDefinition::PangaeaDataProviderDefinition(def) => crate::layers::external::TypedDataProviderDefinition::PangaeaDataProviderDefinition(def.into()), + TypedDataProviderDefinition::SentinelS2L2ACogsProviderDefinition(def) => crate::layers::external::TypedDataProviderDefinition::SentinelS2L2ACogsProviderDefinition(def.into()), + TypedDataProviderDefinition::WildliveDataConnectorDefinition(def) => crate::layers::external::TypedDataProviderDefinition::WildliveDataConnectorDefinition(def.into()), + } + } +} + +impl From for TypedDataProviderDefinition { + fn from(value: crate::layers::external::TypedDataProviderDefinition) -> Self { + match value { + crate::layers::external::TypedDataProviderDefinition::ArunaDataProviderDefinition(def) => TypedDataProviderDefinition::ArunaDataProviderDefinition(def.into()), + crate::layers::external::TypedDataProviderDefinition::CopernicusDataspaceDataProviderDefinition(def) => TypedDataProviderDefinition::CopernicusDataspaceDataProviderDefinition(def.into()), + crate::layers::external::TypedDataProviderDefinition::DatasetLayerListingProviderDefinition(def) => TypedDataProviderDefinition::DatasetLayerListingProviderDefinition(def.into()), + crate::layers::external::TypedDataProviderDefinition::EbvPortalDataProviderDefinition(def) => TypedDataProviderDefinition::EbvPortalDataProviderDefinition(def.into()), + crate::layers::external::TypedDataProviderDefinition::EdrDataProviderDefinition(def) => TypedDataProviderDefinition::EdrDataProviderDefinition(def.into()), + crate::layers::external::TypedDataProviderDefinition::GbifDataProviderDefinition(def) => TypedDataProviderDefinition::GbifDataProviderDefinition(def.into()), + crate::layers::external::TypedDataProviderDefinition::GfbioAbcdDataProviderDefinition(def) => TypedDataProviderDefinition::GfbioAbcdDataProviderDefinition(def.into()), + crate::layers::external::TypedDataProviderDefinition::GfbioCollectionsDataProviderDefinition(def) => TypedDataProviderDefinition::GfbioCollectionsDataProviderDefinition(def.into()), + crate::layers::external::TypedDataProviderDefinition::NetCdfCfDataProviderDefinition(def) => TypedDataProviderDefinition::NetCdfCfDataProviderDefinition(def.into()), + crate::layers::external::TypedDataProviderDefinition::PangaeaDataProviderDefinition(def) => TypedDataProviderDefinition::PangaeaDataProviderDefinition(def.into()), + crate::layers::external::TypedDataProviderDefinition::SentinelS2L2ACogsProviderDefinition(def) => TypedDataProviderDefinition::SentinelS2L2ACogsProviderDefinition(def.into()), + crate::layers::external::TypedDataProviderDefinition::WildliveDataConnectorDefinition(def) => TypedDataProviderDefinition::WildliveDataConnectorDefinition(def.into()), + } + } +} + impl From<&Volume> for crate::datasets::upload::Volume { fn from(value: &Volume) -> Self { Self { diff --git a/services/src/contexts/migrations/current_schema.sql b/services/src/contexts/migrations/current_schema.sql index c5bbe0882..125f61601 100644 --- a/services/src/contexts/migrations/current_schema.sql +++ b/services/src/contexts/migrations/current_schema.sql @@ -1129,6 +1129,7 @@ CREATE TABLE permissions ( ) ON DELETE CASCADE, project_id uuid REFERENCES projects (id) ON DELETE CASCADE, ml_model_id uuid REFERENCES ml_models (id) ON DELETE CASCADE, + provider_id uuid REFERENCES layer_providers (id) ON DELETE CASCADE CHECK ( ( (dataset_id IS NOT NULL)::integer @@ -1136,6 +1137,7 @@ CREATE TABLE permissions ( + (layer_collection_id IS NOT NULL)::integer + (project_id IS NOT NULL)::integer + (ml_model_id IS NOT NULL)::integer + + (provider_id IS NOT NULL)::integer ) = 1 ) ); @@ -1210,6 +1212,17 @@ INNER JOIN permissions AS p ON ( r.role_id = p.role_id AND p.layer_id IS NOT NULL ); +CREATE VIEW user_permitted_providers +AS +SELECT + r.user_id, + p.provider_id, + p.permission +FROM user_roles AS r +INNER JOIN permissions AS p ON ( + r.role_id = p.role_id AND p.provider_id IS NOT NULL +); + CREATE TABLE oidc_session_tokens ( session_id uuid PRIMARY KEY REFERENCES sessions ( id diff --git a/services/src/contexts/migrations/migration_0020_provider_permissions.rs b/services/src/contexts/migrations/migration_0020_provider_permissions.rs new file mode 100644 index 000000000..8bb2db5fd --- /dev/null +++ b/services/src/contexts/migrations/migration_0020_provider_permissions.rs @@ -0,0 +1,26 @@ +use super::database_migration::{DatabaseVersion, Migration}; +use crate::contexts::migrations::migration_0019_ml_model_no_data::Migration0019MlModelNoData; +use crate::error::Result; +use async_trait::async_trait; +use tokio_postgres::Transaction; + +/// This migration adds the provider permissions +pub struct Migration0020ProviderPermissions; + +#[async_trait] +impl Migration for Migration0020ProviderPermissions { + fn prev_version(&self) -> Option { + Some(Migration0019MlModelNoData.version()) + } + + fn version(&self) -> DatabaseVersion { + "0020_provider_permissions".into() + } + + async fn migrate(&self, tx: &Transaction<'_>) -> Result<()> { + tx.batch_execute(include_str!("migration_0020_provider_permissions.sql")) + .await?; + + Ok(()) + } +} diff --git a/services/src/contexts/migrations/migration_0020_provider_permissions.sql b/services/src/contexts/migrations/migration_0020_provider_permissions.sql new file mode 100644 index 000000000..572108075 --- /dev/null +++ b/services/src/contexts/migrations/migration_0020_provider_permissions.sql @@ -0,0 +1,30 @@ +ALTER TABLE permissions +ADD COLUMN provider_id uuid +REFERENCES layer_providers (id) ON DELETE CASCADE; + +ALTER TABLE permissions +DROP CONSTRAINT permissions_check; + +ALTER TABLE permissions +ADD CONSTRAINT permissions_check CHECK ( + ( + (dataset_id IS NOT NULL)::integer + + (layer_id IS NOT NULL)::integer + + (layer_collection_id IS NOT NULL)::integer + + (project_id IS NOT NULL)::integer + + (ml_model_id IS NOT NULL)::integer + + (provider_id IS NOT NULL)::integer + ) = 1 +); + + +CREATE VIEW user_permitted_providers +AS +SELECT + r.user_id, + p.provider_id, + p.permission +FROM user_roles AS r +INNER JOIN permissions AS p ON ( + r.role_id = p.role_id AND p.provider_id IS NOT NULL +); diff --git a/services/src/contexts/migrations/mod.rs b/services/src/contexts/migrations/mod.rs index 92c80b6dc..018568450 100644 --- a/services/src/contexts/migrations/mod.rs +++ b/services/src/contexts/migrations/mod.rs @@ -4,6 +4,7 @@ pub use crate::contexts::migrations::{ migration_0016_merge_providers::Migration0016MergeProviders, migration_0017_ml_model_tensor_shape::Migration0017MlModelTensorShape, migration_0018_wildlive_connector::Migration0018WildliveConnector, + migration_0020_provider_permissions::Migration0020ProviderPermissions, }; pub use database_migration::{ DatabaseVersion, Migration, MigrationResult, initialize_database, migrate_database, @@ -16,6 +17,7 @@ mod migration_0016_merge_providers; mod migration_0017_ml_model_tensor_shape; mod migration_0018_wildlive_connector; mod migration_0019_ml_model_no_data; +mod migration_0020_provider_permissions; #[cfg(test)] mod schema_info; @@ -34,6 +36,7 @@ pub fn all_migrations() -> Vec> { Box::new(Migration0017MlModelTensorShape), Box::new(Migration0018WildliveConnector), Box::new(Migration0019MlModelNoData), + Box::new(Migration0020ProviderPermissions), ] } diff --git a/services/src/datasets/dataset_listing_provider.rs b/services/src/datasets/dataset_listing_provider.rs index 416f3a84c..267c25f26 100644 --- a/services/src/datasets/dataset_listing_provider.rs +++ b/services/src/datasets/dataset_listing_provider.rs @@ -1,18 +1,5 @@ use std::{borrow::Cow, collections::HashMap, str::FromStr}; -use async_trait::async_trait; -use geoengine_datatypes::{ - dataset::{DataId, LayerId}, - primitives::{RasterQueryRectangle, VectorQueryRectangle}, -}; -use geoengine_operators::{ - engine::{MetaData, MetaDataProvider, RasterResultDescriptor, VectorResultDescriptor}, - mock::MockDatasetDataSourceLoadingInfo, - source::{GdalLoadingInfo, OgrSourceDataset}, -}; -use postgres_types::{FromSql, ToSql}; -use serde::{Deserialize, Serialize}; - use crate::{ contexts::GeoEngineDb, datasets::listing::DatasetProvider, @@ -31,6 +18,18 @@ use crate::{ util::operators::source_operator_from_dataset, workflows::workflow::Workflow, }; +use async_trait::async_trait; +use geoengine_datatypes::{ + dataset::{DataId, LayerId}, + primitives::{RasterQueryRectangle, VectorQueryRectangle}, +}; +use geoengine_operators::{ + engine::{MetaData, MetaDataProvider, RasterResultDescriptor, VectorResultDescriptor}, + mock::MockDatasetDataSourceLoadingInfo, + source::{GdalLoadingInfo, OgrSourceDataset}, +}; +use postgres_types::{FromSql, ToSql}; +use serde::{Deserialize, Serialize}; use geoengine_datatypes::dataset::{DataProviderId, DatasetId}; diff --git a/services/src/datasets/external/aruna/mod.rs b/services/src/datasets/external/aruna/mod.rs index 0d41523ac..7c9e9b989 100644 --- a/services/src/datasets/external/aruna/mod.rs +++ b/services/src/datasets/external/aruna/mod.rs @@ -4,6 +4,19 @@ use std::marker::PhantomData; use std::path::PathBuf; use std::str::FromStr; +use crate::api::model::services::SECRET_REPLACEMENT; +use crate::contexts::GeoEngineDb; +use crate::datasets::external::aruna::metadata::{DataType, GEMetadata, RasterInfo, VectorInfo}; +use crate::datasets::listing::ProvenanceOutput; +use crate::layers::external::{DataProvider, DataProviderDefinition, TypedDataProviderDefinition}; +use crate::layers::layer::{ + CollectionItem, Layer, LayerCollection, LayerCollectionListOptions, LayerListing, + ProviderLayerCollectionId, ProviderLayerId, +}; +use crate::layers::listing::{ + LayerCollectionId, LayerCollectionProvider, ProviderCapabilities, SearchCapabilities, +}; +use crate::workflows::workflow::Workflow; use aruna_rust_api::api::storage::models::v2::relation::Relation as ArunaRelationEnum; use aruna_rust_api::api::storage::models::v2::{ Dataset, InternalRelationVariant, KeyValue, KeyValueVariant, Object, ResourceVariant, @@ -18,15 +31,6 @@ use aruna_rust_api::api::storage::services::v2::{ GetDatasetRequest, GetDatasetsRequest, GetDownloadUrlRequest, GetObjectsRequest, GetProjectRequest, }; -use postgres_types::{FromSql, ToSql}; -use serde::{Deserialize, Serialize}; -use snafu::ensure; -use tonic::codegen::InterceptedService; -use tonic::metadata::{AsciiMetadataKey, AsciiMetadataValue}; -use tonic::service::Interceptor; -use tonic::transport::{Channel, Endpoint}; -use tonic::{Request, Status}; - use geoengine_datatypes::collections::VectorDataType; use geoengine_datatypes::dataset::{DataId, DataProviderId, LayerId}; use geoengine_datatypes::primitives::CacheTtlSeconds; @@ -46,19 +50,14 @@ use geoengine_operators::source::{ OgrSourceColumnSpec, OgrSourceDataset, OgrSourceDatasetTimeType, OgrSourceDurationSpec, OgrSourceErrorSpec, OgrSourceParameters, OgrSourceTimeFormat, }; - -use crate::contexts::GeoEngineDb; -use crate::datasets::external::aruna::metadata::{DataType, GEMetadata, RasterInfo, VectorInfo}; -use crate::datasets::listing::ProvenanceOutput; -use crate::layers::external::{DataProvider, DataProviderDefinition}; -use crate::layers::layer::{ - CollectionItem, Layer, LayerCollection, LayerCollectionListOptions, LayerListing, - ProviderLayerCollectionId, ProviderLayerId, -}; -use crate::layers::listing::{ - LayerCollectionId, LayerCollectionProvider, ProviderCapabilities, SearchCapabilities, -}; -use crate::workflows::workflow::Workflow; +use postgres_types::{FromSql, ToSql}; +use serde::{Deserialize, Serialize}; +use snafu::ensure; +use tonic::codegen::InterceptedService; +use tonic::metadata::{AsciiMetadataKey, AsciiMetadataValue}; +use tonic::service::Interceptor; +use tonic::transport::{Channel, Endpoint}; +use tonic::{Request, Status}; pub use self::error::ArunaProviderError; @@ -108,6 +107,21 @@ impl DataProviderDefinition for ArunaDataProviderDefinition { fn priority(&self) -> i16 { self.priority.unwrap_or(0) } + + fn update(&self, new: TypedDataProviderDefinition) -> TypedDataProviderDefinition + where + Self: Sized, + { + match new { + TypedDataProviderDefinition::ArunaDataProviderDefinition(mut new) => { + if new.api_token == SECRET_REPLACEMENT { + new.api_token.clone_from(&self.api_token); + } + TypedDataProviderDefinition::ArunaDataProviderDefinition(new) + } + _ => new, + } + } } /// Intercepts `gRPC` calls to the core-storage and attaches the authorization token diff --git a/services/src/datasets/external/copernicus_dataspace/provider.rs b/services/src/datasets/external/copernicus_dataspace/provider.rs index e29f3fdb8..258176065 100644 --- a/services/src/datasets/external/copernicus_dataspace/provider.rs +++ b/services/src/datasets/external/copernicus_dataspace/provider.rs @@ -32,14 +32,6 @@ use ordered_float::NotNan; use serde::{Deserialize, Serialize}; use strum::{EnumIter, IntoEnumIterator}; -use crate::{ - contexts::GeoEngineDb, - layers::{ - external::{DataProvider, DataProviderDefinition}, - listing::{LayerCollectionProvider, ProviderCapabilities, SearchCapabilities}, - }, -}; - use super::{ ids::{ CopernicusDataId, CopernicusDataspaceLayerCollectionId, CopernicusDataspaceLayerId, @@ -48,6 +40,15 @@ use super::{ }, sentinel2::Sentinel2Metadata, }; +use crate::api::model::services::SECRET_REPLACEMENT; +use crate::layers::external::TypedDataProviderDefinition; +use crate::{ + contexts::GeoEngineDb, + layers::{ + external::{DataProvider, DataProviderDefinition}, + listing::{LayerCollectionProvider, ProviderCapabilities, SearchCapabilities}, + }, +}; #[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] #[serde(rename_all = "camelCase")] @@ -96,6 +97,22 @@ impl DataProviderDefinition for CopernicusDataspaceDataProvid fn priority(&self) -> i16 { self.priority.unwrap_or(0) } + + fn update(&self, new: TypedDataProviderDefinition) -> TypedDataProviderDefinition + where + Self: Sized, + { + match new { + TypedDataProviderDefinition::CopernicusDataspaceDataProviderDefinition(mut new) => { + if new.s3_secret_key == SECRET_REPLACEMENT { + new.s3_secret_key.clone_from(&self.s3_secret_key); + } + // TODO Also hide access key? + TypedDataProviderDefinition::CopernicusDataspaceDataProviderDefinition(new) + } + _ => new, + } + } } #[derive(Clone, Debug)] diff --git a/services/src/datasets/external/gbif.rs b/services/src/datasets/external/gbif.rs index 3ab455c54..45e885dac 100644 --- a/services/src/datasets/external/gbif.rs +++ b/services/src/datasets/external/gbif.rs @@ -1,7 +1,8 @@ +use crate::api::model::services::SECRET_REPLACEMENT; use crate::contexts::GeoEngineDb; use crate::datasets::listing::{Provenance, ProvenanceOutput}; use crate::error::{Error, Result}; -use crate::layers::external::{DataProvider, DataProviderDefinition}; +use crate::layers::external::{DataProvider, DataProviderDefinition, TypedDataProviderDefinition}; use crate::layers::layer::{ CollectionItem, Layer, LayerCollection, LayerCollectionListOptions, LayerCollectionListing, LayerListing, ProviderLayerCollectionId, ProviderLayerId, @@ -88,6 +89,21 @@ impl DataProviderDefinition for GbifDataProviderDefinition { fn priority(&self) -> i16 { self.priority.unwrap_or(0) } + + fn update(&self, new: TypedDataProviderDefinition) -> TypedDataProviderDefinition + where + Self: Sized, + { + match new { + TypedDataProviderDefinition::GbifDataProviderDefinition(mut new) => { + if new.db_config.password == SECRET_REPLACEMENT { + new.db_config.password.clone_from(&self.db_config.password); + } + TypedDataProviderDefinition::GbifDataProviderDefinition(new) + } + _ => new, + } + } } #[derive(Debug)] diff --git a/services/src/datasets/external/gfbio_abcd.rs b/services/src/datasets/external/gfbio_abcd.rs index dc492242a..784c8752d 100644 --- a/services/src/datasets/external/gfbio_abcd.rs +++ b/services/src/datasets/external/gfbio_abcd.rs @@ -1,8 +1,9 @@ +use crate::api::model::services::SECRET_REPLACEMENT; use crate::contexts::GeoEngineDb; use crate::datasets::listing::{Provenance, ProvenanceOutput}; use crate::error::Result; use crate::error::{self, Error}; -use crate::layers::external::{DataProvider, DataProviderDefinition}; +use crate::layers::external::{DataProvider, DataProviderDefinition, TypedDataProviderDefinition}; use crate::layers::layer::{ CollectionItem, Layer, LayerCollection, LayerCollectionListOptions, LayerListing, ProviderLayerCollectionId, ProviderLayerId, @@ -80,6 +81,21 @@ impl DataProviderDefinition for GfbioAbcdDataProviderDefiniti fn priority(&self) -> i16 { self.priority.unwrap_or(0) } + + fn update(&self, new: TypedDataProviderDefinition) -> TypedDataProviderDefinition + where + Self: Sized, + { + match new { + TypedDataProviderDefinition::GfbioAbcdDataProviderDefinition(mut new) => { + if new.db_config.password == SECRET_REPLACEMENT { + new.db_config.password.clone_from(&self.db_config.password); + } + TypedDataProviderDefinition::GfbioAbcdDataProviderDefinition(new) + } + _ => new, + } + } } // TODO: make table names and column names configurable like in crawler diff --git a/services/src/datasets/external/gfbio_collections.rs b/services/src/datasets/external/gfbio_collections.rs index e91f3f251..6f10ad68b 100644 --- a/services/src/datasets/external/gfbio_collections.rs +++ b/services/src/datasets/external/gfbio_collections.rs @@ -1,10 +1,11 @@ use super::gfbio_abcd::GfbioAbcdDataProvider; use super::pangaea::{PangaeaDataProvider, PangaeaMetaData}; +use crate::api::model::services::SECRET_REPLACEMENT; use crate::contexts::GeoEngineDb; use crate::datasets::listing::ProvenanceOutput; use crate::error::Error::ProviderDoesNotSupportBrowsing; use crate::error::{Error, Result}; -use crate::layers::external::{DataProvider, DataProviderDefinition}; +use crate::layers::external::{DataProvider, DataProviderDefinition, TypedDataProviderDefinition}; use crate::layers::layer::{ CollectionItem, Layer, LayerCollection, LayerCollectionListOptions, LayerListing, ProviderLayerCollectionId, ProviderLayerId, @@ -96,6 +97,27 @@ impl DataProviderDefinition for GfbioCollectionsDataProviderD fn priority(&self) -> i16 { self.priority.unwrap_or(0) } + + fn update(&self, new: TypedDataProviderDefinition) -> TypedDataProviderDefinition + where + Self: Sized, + { + match new { + TypedDataProviderDefinition::GfbioCollectionsDataProviderDefinition(mut new) => { + if new.abcd_db_config.password == SECRET_REPLACEMENT { + new.abcd_db_config + .password + .clone_from(&self.abcd_db_config.password); + } + if new.collection_api_auth_token == SECRET_REPLACEMENT { + new.collection_api_auth_token + .clone_from(&self.collection_api_auth_token); + } + TypedDataProviderDefinition::GfbioCollectionsDataProviderDefinition(new) + } + _ => new, + } + } } #[derive(Debug)] diff --git a/services/src/datasets/external/mod.rs b/services/src/datasets/external/mod.rs index 2500ccd81..2f0817137 100644 --- a/services/src/datasets/external/mod.rs +++ b/services/src/datasets/external/mod.rs @@ -1,12 +1,12 @@ pub mod aruna; -mod copernicus_dataspace; +pub mod copernicus_dataspace; pub mod edr; pub mod gbif; pub mod gfbio_abcd; pub mod gfbio_collections; pub mod netcdfcf; pub mod pangaea; -mod sentinel_s2_l2a_cogs; +pub mod sentinel_s2_l2a_cogs; mod wildlive; pub use copernicus_dataspace::CopernicusDataspaceDataProviderDefinition; diff --git a/services/src/datasets/external/wildlive/mod.rs b/services/src/datasets/external/wildlive/mod.rs index c43271499..585dfaca0 100644 --- a/services/src/datasets/external/wildlive/mod.rs +++ b/services/src/datasets/external/wildlive/mod.rs @@ -46,6 +46,8 @@ use serde::{Deserialize, Serialize}; use std::sync::Arc; use url::Url; +use crate::api::model::services::SECRET_REPLACEMENT; +use crate::layers::external::TypedDataProviderDefinition; pub use cache::WildliveDbCache; pub use error::WildliveError; @@ -56,6 +58,7 @@ mod error; type Result = std::result::Result; #[derive(Debug, PartialEq, Clone, Deserialize, Serialize, FromSql, ToSql)] +#[serde(rename_all = "camelCase")] pub struct WildliveDataConnectorDefinition { pub id: DataProviderId, pub name: String, @@ -134,6 +137,21 @@ impl DataProviderDefinition for WildliveDataConnectorDefiniti fn priority(&self) -> i16 { self.priority.unwrap_or(0) } + + fn update(&self, new: TypedDataProviderDefinition) -> TypedDataProviderDefinition + where + Self: Sized, + { + match new { + TypedDataProviderDefinition::WildliveDataConnectorDefinition(mut new) => { + if new.api_key == Some(SECRET_REPLACEMENT.to_string()) { + new.api_key.clone_from(&self.api_key); + } + TypedDataProviderDefinition::WildliveDataConnectorDefinition(new) + } + _ => new, + } + } } #[async_trait] diff --git a/services/src/error.rs b/services/src/error.rs index 92bcf924d..595668f22 100644 --- a/services/src/error.rs +++ b/services/src/error.rs @@ -7,7 +7,7 @@ use crate::datasets::external::netcdfcf::NetCdfCf4DProviderError; use crate::{layers::listing::LayerCollectionId, workflows::workflow::WorkflowId}; use actix_web::HttpResponse; use actix_web::http::StatusCode; -use geoengine_datatypes::dataset::LayerId; +use geoengine_datatypes::dataset::{DataProviderId, LayerId}; use geoengine_datatypes::error::ErrorSource; use geoengine_datatypes::util::helpers::ge_report; use ordered_float::FloatIsNan; @@ -114,11 +114,11 @@ pub enum Error { #[snafu(display("Failed to delete the project."))] ProjectDeleteFailed, PermissionFailed, - #[snafu(display("A permission error occured: {source}."))] + #[snafu(display("A permission error occurred: {source}."))] PermissionDb { source: Box, }, - #[snafu(display("A role error occured: {source}."))] + #[snafu(display("A role error occurred: {source}."))] RoleDb { source: Box, }, @@ -515,6 +515,17 @@ pub enum Error { volume_name: String, }, + #[snafu(display("A provider with id '{}' already exists", provider_id))] + ProviderIdAlreadyExists { + provider_id: DataProviderId, + }, + + #[snafu(display("An existing provider's type cannot be modified"))] + ProviderTypeUnmodifiable, + + #[snafu(display("An existing provider's id cannot be modified"))] + ProviderIdUnmodifiable, + #[snafu(display("Unknown resource name {} of kind {}", name, kind))] UnknownResource { kind: String, diff --git a/services/src/layers/add_from_directory.rs b/services/src/layers/add_from_directory.rs index eea702b9e..b68202de7 100644 --- a/services/src/layers/add_from_directory.rs +++ b/services/src/layers/add_from_directory.rs @@ -1,4 +1,7 @@ +use super::storage::LayerProviderDb; +use crate::contexts::GeoEngineDb; use crate::datasets::storage::{DatasetDb, DatasetDefinition}; +use crate::layers::external::DataProviderDefinition; use crate::layers::storage::LayerDb; use crate::{ error::Result, @@ -22,8 +25,6 @@ use std::{ use tracing::{error, info, warn}; use uuid::Uuid; -use super::storage::LayerProviderDb; - pub const UNSORTED_COLLECTION_ID: Uuid = Uuid::from_u128(0xffb2_dd9e_f5ad_427c_b7f1_c9a0_c7a0_ae3f); pub async fn add_layers_from_directory(db: &mut L, file_path: PathBuf) { @@ -198,15 +199,31 @@ pub async fn add_layer_collections_from_directory< } } -pub async fn add_providers_from_directory(db: &mut D, base_path: PathBuf) { - async fn add_provider_definition_from_dir_entry( +pub async fn add_providers_from_directory( + db: &mut D, + base_path: PathBuf, +) { + async fn add_provider_definition_from_dir_entry< + D: LayerProviderDb + PermissionDb + GeoEngineDb, + >( db: &mut D, entry: &DirEntry, ) -> Result<()> { let def: TypedDataProviderDefinition = serde_json::from_reader(BufReader::new(File::open(entry.path())?))?; - db.add_layer_provider(def).await?; // TODO: add as system user + db.add_layer_provider(def.clone()).await?; + + let id = >::id(&def); + + // share with users + db.add_permission(Role::registered_user_role_id(), id, Permission::Read) + .await + .boxed_context(crate::error::PermissionDb)?; + db.add_permission(Role::anonymous_role_id(), id, Permission::Read) + .await + .boxed_context(crate::error::PermissionDb)?; + Ok(()) } diff --git a/services/src/layers/external.rs b/services/src/layers/external.rs index 6d87626ca..7f9f3528b 100644 --- a/services/src/layers/external.rs +++ b/services/src/layers/external.rs @@ -45,6 +45,13 @@ pub trait DataProviderDefinition: Send + Sync + std::fmt::Debug fn priority(&self) -> i16 { 0 } + + fn update(&self, new: TypedDataProviderDefinition) -> TypedDataProviderDefinition + where + Self: Sized, + { + new + } } /// A provider of layers that are not hosted by Geo Engine itself but some external party @@ -321,4 +328,48 @@ impl DataProviderDefinition for TypedDataProviderDefinition { } } } + + fn update(&self, other: TypedDataProviderDefinition) -> TypedDataProviderDefinition + where + Self: Sized, + { + match self { + TypedDataProviderDefinition::ArunaDataProviderDefinition(def) => { + DataProviderDefinition::::update(def, other) + } + TypedDataProviderDefinition::DatasetLayerListingProviderDefinition(def) => { + DataProviderDefinition::::update(def, other) + } + TypedDataProviderDefinition::GbifDataProviderDefinition(def) => { + DataProviderDefinition::::update(def, other) + } + TypedDataProviderDefinition::GfbioAbcdDataProviderDefinition(def) => { + DataProviderDefinition::::update(def, other) + } + TypedDataProviderDefinition::GfbioCollectionsDataProviderDefinition(def) => { + DataProviderDefinition::::update(def, other) + } + TypedDataProviderDefinition::EbvPortalDataProviderDefinition(def) => { + DataProviderDefinition::::update(def, other) + } + TypedDataProviderDefinition::NetCdfCfDataProviderDefinition(def) => { + DataProviderDefinition::::update(def, other) + } + TypedDataProviderDefinition::PangaeaDataProviderDefinition(def) => { + DataProviderDefinition::::update(def, other) + } + TypedDataProviderDefinition::EdrDataProviderDefinition(def) => { + DataProviderDefinition::::update(def, other) + } + TypedDataProviderDefinition::CopernicusDataspaceDataProviderDefinition(def) => { + DataProviderDefinition::::update(def, other) + } + TypedDataProviderDefinition::SentinelS2L2ACogsProviderDefinition(def) => { + DataProviderDefinition::::update(def, other) + } + TypedDataProviderDefinition::WildliveDataConnectorDefinition(def) => { + DataProviderDefinition::::update(def, other) + } + } + } } diff --git a/services/src/layers/postgres_layer_db.rs b/services/src/layers/postgres_layer_db.rs index faf0d3afd..47a6dbf1b 100644 --- a/services/src/layers/postgres_layer_db.rs +++ b/services/src/layers/postgres_layer_db.rs @@ -13,11 +13,14 @@ use super::storage::{ LayerProviderListingOptions, }; use crate::contexts::PostgresDb; +use crate::error::Error::{ + ProviderIdAlreadyExists, ProviderIdUnmodifiable, ProviderTypeUnmodifiable, +}; use crate::layers::external::DataProviderDefinition; use crate::permissions::{Permission, RoleId, TxPermissionDb}; use crate::workflows::registry::TxWorkflowRegistry; use crate::{ - error::{self, Result}, + error::Result, layers::{ LayerDbError, layer::{AddLayer, AddLayerCollection}, @@ -34,7 +37,7 @@ use bb8_postgres::tokio_postgres::{ use geoengine_datatypes::dataset::{DataProviderId, LayerId}; use geoengine_datatypes::error::BoxedResultExt; use geoengine_datatypes::util::HashMapTextTextDbType; -use snafu::{ResultExt, ensure}; +use snafu::ResultExt; use std::str::FromStr; use tokio_postgres::Transaction; use tonic::async_trait; @@ -326,7 +329,7 @@ where transaction .execute( - "UPDATE layer_collections + "UPDATE layer_collections SET name = $1, description = $2, properties = $3 WHERE id = $4;", &[ @@ -350,14 +353,14 @@ fn create_search_query(full_info: bool) -> String { ) SELECT DISTINCT * FROM ( - SELECT + SELECT {} FROM user_permitted_layer_collections u JOIN layer_collections lc ON (u.layer_collection_id = lc.id) JOIN (SELECT DISTINCT child FROM collection_children JOIN parents ON (id = parent)) cc ON (id = cc.child) WHERE u.user_id = $4 AND name ILIKE $5 ) u UNION ( - SELECT + SELECT {} FROM user_permitted_layers ul JOIN layers uc ON (ul.layer_id = uc.id) @@ -365,7 +368,7 @@ fn create_search_query(full_info: bool) -> String { WHERE ul.user_id = $4 AND name ILIKE $5 ) ORDER BY {}name ASC - LIMIT $2 + LIMIT $2 OFFSET $3;", if full_info { "concat(id, '') AS id, @@ -437,8 +440,8 @@ where .prepare( " SELECT name, description, properties - FROM user_permitted_layer_collections p - JOIN layer_collections c ON (p.layer_collection_id = c.id) + FROM user_permitted_layer_collections p + JOIN layer_collections c ON (p.layer_collection_id = c.id) WHERE p.user_id = $1 AND layer_collection_id = $2;", ) .await?; @@ -462,7 +465,7 @@ where description, properties, FALSE AS is_layer - FROM user_permitted_layer_collections u + FROM user_permitted_layer_collections u JOIN layer_collections lc ON (u.layer_collection_id = lc.id) JOIN collection_children cc ON (layer_collection_id = cc.child) WHERE u.user_id = $4 AND cc.parent = $1 @@ -474,7 +477,7 @@ where properties, TRUE AS is_layer FROM user_permitted_layers ul - JOIN layers uc ON (ul.layer_id = uc.id) + JOIN layers uc ON (ul.layer_id = uc.id) JOIN collection_layers cl ON (layer_id = cl.layer) WHERE ul.user_id = $4 AND cl.collection = $1 ) @@ -566,8 +569,8 @@ where .prepare( " SELECT name, description, properties - FROM user_permitted_layer_collections p - JOIN layer_collections c ON (p.layer_collection_id = c.id) + FROM user_permitted_layer_collections p + JOIN layer_collections c ON (p.layer_collection_id = c.id) WHERE p.user_id = $1 AND layer_collection_id = $2;", ) .await?; @@ -761,6 +764,38 @@ where } } +impl PostgresDb +where + <>::TlsConnect as TlsConnect>::Future: Send, + >::Stream: Send + Sync, + >::TlsConnect: Send, + Tls: 'static + Clone + MakeTlsConnect + Send + Sync + std::fmt::Debug, +{ + fn clamp_prio(provider: &TypedDataProviderDefinition, prio: i16) -> i16 { + let clamp_prio = prio.clamp(-1000, 1000); + + if prio != clamp_prio { + tracing::warn!( + "The priority of the provider {} is out of range! --> clamped {} to {}", + DataProviderDefinition::::name(provider), + prio, + clamp_prio + ); + } + clamp_prio + } + + async fn id_exists(tx: &Transaction<'_>, id: &DataProviderId) -> Result { + Ok(tx + .query_one( + "SELECT EXISTS(SELECT 1 FROM layer_providers WHERE id = $1)", + &[&id], + ) + .await? + .get::(0)) + } +} + #[async_trait] impl LayerProviderDb for PostgresDb where @@ -773,28 +808,25 @@ where &self, provider: TypedDataProviderDefinition, ) -> Result { - ensure!(self.session.is_admin(), error::PermissionDenied); + let mut conn = self.conn_pool.get().await?; + let tx = conn.build_transaction().start().await?; - let conn = self.conn_pool.get().await?; + let id = DataProviderDefinition::::id(&provider); + + if Self::id_exists(&tx, &id).await? { + return Err(ProviderIdAlreadyExists { provider_id: id }); + } let prio = DataProviderDefinition::::priority(&provider); - let clamp_prio = prio.clamp(-1000, 1000); - if prio != clamp_prio { - tracing::warn!( - "The priority of the provider {} is out of range! --> clamped {} to {}", - DataProviderDefinition::::name(&provider), - prio, - clamp_prio - ); - } + let clamp_prio = Self::clamp_prio(&provider, prio); - let stmt = conn + let stmt = tx .prepare( " INSERT INTO layer_providers ( - id, - type_name, + id, + type_name, name, definition, priority @@ -803,8 +835,7 @@ where ) .await?; - let id = DataProviderDefinition::::id(&provider); - conn.execute( + tx.execute( &stmt, &[ &id, @@ -815,6 +846,23 @@ where ], ) .await?; + + let stmt = tx + .prepare( + " + INSERT INTO permissions (role_id, permission, provider_id) + VALUES ($1, $2, $3) ON CONFLICT DO NOTHING;", + ) + .await?; + + tx.execute( + &stmt, + &[&RoleId::from(self.session.user.id), &Permission::Owner, &id], + ) + .await?; + + tx.commit().await?; + Ok(id) } @@ -822,23 +870,23 @@ where &self, options: LayerProviderListingOptions, ) -> Result> { - // TODO: permission let conn = self.conn_pool.get().await?; let stmt = conn .prepare( " - SELECT - id, + SELECT + id, name, type_name, priority - FROM - layer_providers + FROM + user_permitted_providers p + JOIN layer_providers l ON (p.provider_id = l.id) WHERE - priority > -1000 + p.user_id = $3 AND priority > -1000 ORDER BY priority desc, name ASC - LIMIT $1 + LIMIT $1 OFFSET $2; ", ) @@ -847,7 +895,11 @@ where let rows = conn .query( &stmt, - &[&i64::from(options.limit), &i64::from(options.offset)], + &[ + &i64::from(options.limit), + &i64::from(options.offset), + &self.session.user.id, + ], ) .await?; @@ -862,7 +914,20 @@ where } async fn load_layer_provider(&self, id: DataProviderId) -> Result> { - // TODO: permissions + let definition = self.get_layer_provider_definition(id).await?; + + return Box::new(definition) + .initialize(PostgresDb { + conn_pool: self.conn_pool.clone(), + session: self.session.clone(), + }) + .await; + } + + async fn get_layer_provider_definition( + &self, + id: DataProviderId, + ) -> Result { let conn = self.conn_pool.get().await?; let stmt = conn @@ -871,22 +936,107 @@ where SELECT definition FROM - layer_providers + user_permitted_providers p + JOIN layer_providers l ON (p.provider_id = l.id) WHERE - id = $1 + id = $1 AND p.user_id = $2 ", ) .await?; - let row = conn.query_one(&stmt, &[&id]).await?; - let definition: TypedDataProviderDefinition = row.get(0); + let row = conn.query_one(&stmt, &[&id, &self.session.user.id]).await?; - return Box::new(definition) - .initialize(PostgresDb { - conn_pool: self.conn_pool.clone(), - session: self.session.clone(), - }) - .await; + Ok(row.get(0)) + } + + async fn update_layer_provider_definition( + &self, + id: DataProviderId, + provider: TypedDataProviderDefinition, + ) -> Result<()> { + if id.0 != DataProviderDefinition::::id(&provider).0 { + return Err(ProviderIdUnmodifiable); + } + + let mut conn = self.conn_pool.get().await?; + let tx = conn.build_transaction().start().await?; + + self.ensure_permission_in_tx(id.into(), Permission::Owner, &tx) + .await + .boxed_context(crate::error::PermissionDb)?; + + let type_name_matches: bool = tx + .query_one( + "SELECT type_name = $2 FROM layer_providers WHERE id = $1", + &[&id, &DataProviderDefinition::::type_name(&provider)], + ) + .await? + .get(0); + + if !type_name_matches { + return Err(ProviderTypeUnmodifiable); + } + + let old_definition = self.get_layer_provider_definition(id).await?; + let provider = DataProviderDefinition::::update(&old_definition, provider); + + println!("{:?}", provider); + + let prio = DataProviderDefinition::::priority(&provider); + + let clamp_prio = Self::clamp_prio(&provider, prio); + + let stmt = tx + .prepare( + " + UPDATE layer_providers + SET + name = $2, + definition = $3, + priority = $4 + WHERE id = $1 + ", + ) + .await?; + + tx.execute( + &stmt, + &[ + &id, + &DataProviderDefinition::::name(&provider), + &provider, + &clamp_prio, + ], + ) + .await?; + + tx.commit().await?; + + Ok(()) + } + + async fn delete_layer_provider(&self, id: DataProviderId) -> Result<()> { + let mut conn = self.conn_pool.get().await?; + let tx = conn.build_transaction().start().await?; + + self.ensure_permission_in_tx(id.into(), Permission::Owner, &tx) + .await + .boxed_context(crate::error::PermissionDb)?; + + let stmt = tx + .prepare( + " + DELETE FROM layer_providers + WHERE id = $1 + ", + ) + .await?; + + tx.execute(&stmt, &[&id]).await?; + + tx.commit().await?; + + Ok(()) } } diff --git a/services/src/layers/storage.rs b/services/src/layers/storage.rs index 5586f3df6..2d9a54225 100644 --- a/services/src/layers/storage.rs +++ b/services/src/layers/storage.rs @@ -6,7 +6,9 @@ use crate::error::Result; use async_trait::async_trait; use geoengine_datatypes::dataset::{DataProviderId, LayerId}; use serde::{Deserialize, Serialize}; +use utoipa::IntoParams; use uuid::Uuid; +use validator::Validate; pub const INTERNAL_PROVIDER_ID: DataProviderId = DataProviderId::from_u128(0xce5e_84db_cbf9_48a2_9a32_d4b7_cc56_ea74); @@ -109,7 +111,8 @@ pub struct LayerProviderListing { pub name: String, pub priority: i16, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, IntoParams, Validate)] +#[into_params(parameter_in = Query)] // TODO: validate user input pub struct LayerProviderListingOptions { pub offset: u32, @@ -130,5 +133,18 @@ pub trait LayerProviderDb: Send + Sync + 'static { async fn load_layer_provider(&self, id: DataProviderId) -> Result>; + async fn get_layer_provider_definition( + &self, + id: DataProviderId, + ) -> Result; + + async fn update_layer_provider_definition( + &self, + id: DataProviderId, + provider: TypedDataProviderDefinition, + ) -> Result<()>; + + async fn delete_layer_provider(&self, id: DataProviderId) -> Result<()>; + // TODO: share/remove/update layer providers } diff --git a/services/src/permissions/mod.rs b/services/src/permissions/mod.rs index 90c2afe52..6a47d41b7 100644 --- a/services/src/permissions/mod.rs +++ b/services/src/permissions/mod.rs @@ -1,16 +1,17 @@ -use crate::error::Result; +use crate::error::{self, Error, Result}; use crate::identifier; use crate::layers::listing::LayerCollectionId; use crate::machine_learning::MlModelId; use crate::projects::ProjectId; use crate::users::UserId; use async_trait::async_trait; -use geoengine_datatypes::dataset::{DatasetId, LayerId}; +use geoengine_datatypes::dataset::{DataProviderId, DatasetId, LayerId}; use postgres_types::{FromSql, ToSql}; use serde::{Deserialize, Serialize}; -use snafu::Snafu; +use snafu::{ResultExt, Snafu}; use std::str::FromStr; use utoipa::ToSchema; +use uuid::Uuid; mod postgres_permissiondb; @@ -100,6 +101,7 @@ pub enum ResourceId { Project(ProjectId), DatasetId(DatasetId), MlModel(MlModelId), + DataProvider(DataProviderId), } impl std::fmt::Display for ResourceId { @@ -112,6 +114,7 @@ impl std::fmt::Display for ResourceId { ResourceId::Project(project_id) => write!(f, "project:{}", project_id.0), ResourceId::DatasetId(dataset_id) => write!(f, "dataset:{}", dataset_id.0), ResourceId::MlModel(ml_model_id) => write!(f, "mlModel:{}", ml_model_id.0), + ResourceId::DataProvider(provider_id) => write!(f, "provider:{}", provider_id.0), } } } @@ -140,6 +143,38 @@ impl From for ResourceId { } } +impl From for ResourceId { + fn from(provider_id: DataProviderId) -> Self { + ResourceId::DataProvider(provider_id) + } +} + +impl TryFrom<(String, String)> for ResourceId { + type Error = Error; + + fn try_from(value: (String, String)) -> Result { + Ok(match value.0.as_str() { + "layer" => ResourceId::Layer(LayerId(value.1)), + "layerCollection" => ResourceId::LayerCollection(LayerCollectionId(value.1)), + "project" => { + ResourceId::Project(ProjectId(Uuid::from_str(&value.1).context(error::Uuid)?)) + } + "dataset" => { + ResourceId::DatasetId(DatasetId(Uuid::from_str(&value.1).context(error::Uuid)?)) + } + "provider" => ResourceId::DataProvider(DataProviderId( + Uuid::from_str(&value.1).context(error::Uuid)?, + )), + _ => { + return Err(Error::InvalidResourceId { + resource_type: value.0, + resource_id: value.1, + }); + } + }) + } +} + #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)] #[serde(rename_all = "camelCase")] pub struct PermissionListing { diff --git a/services/src/permissions/postgres_permissiondb.rs b/services/src/permissions/postgres_permissiondb.rs index 3bd040092..3d1d37a89 100644 --- a/services/src/permissions/postgres_permissiondb.rs +++ b/services/src/permissions/postgres_permissiondb.rs @@ -32,6 +32,7 @@ impl ResourceTypeName for ResourceId { ResourceId::Project(_) => "project_id", ResourceId::DatasetId(_) => "dataset_id", ResourceId::MlModel(_) => "ml_model_id", + ResourceId::DataProvider(_) => "provider_id", } } @@ -50,6 +51,7 @@ impl ResourceTypeName for ResourceId { ResourceId::Project(id) => Ok(id.0), ResourceId::DatasetId(id) => Ok(id.0), ResourceId::MlModel(id) => Ok(id.0), + ResourceId::DataProvider(id) => Ok(id.0), } } } @@ -329,33 +331,60 @@ where ) -> Result, PermissionDbError> { let resource: ResourceId = resource.into(); - self.ensure_permission_in_tx(resource.clone(), Permission::Owner, tx) + self.ensure_permission_in_tx(resource.clone(), Permission::Read, tx) + .await?; + + // Owners see all permissions, Readers only permissions from roles they are assigned to. + // Owner Permissions are always returned first so Owners know immediately that they are + // an owner of the resource when the first entry is an Owner permission. + let is_owner = self + .has_permission_in_tx(resource.clone(), Permission::Owner, tx) .await?; let stmt = tx .prepare(&format!( " - SELECT - r.id, r.name, p.permission - FROM - permissions p JOIN roles r ON (p.role_id = r.id) - WHERE + SELECT + r.id, r.name, p.permission + FROM + permissions p JOIN {roles} r ON (p.role_id = r.id) + WHERE {resource_type} = $1 - ORDER BY r.name ASC + ORDER BY p.permission DESC, r.name ASC OFFSET $2 LIMIT $3;", - resource_type = resource.resource_type_name() + resource_type = resource.resource_type_name(), + roles = if is_owner { + "roles" + } else { + "(SELECT r.id, r.name \ + FROM user_roles ur JOIN roles r ON (ur.role_id = r.id) \ + WHERE ur.user_id = $4)" + } )) .await .context(PostgresPermissionDbError)?; - let rows = tx - .query( + let rows = if is_owner { + tx.query( &stmt, &[&resource.uuid()?, &(i64::from(offset)), &(i64::from(limit))], ) .await - .context(PostgresPermissionDbError)?; + .context(PostgresPermissionDbError)? + } else { + tx.query( + &stmt, + &[ + &resource.uuid()?, + &(i64::from(offset)), + &(i64::from(limit)), + &self.session.user.id, + ], + ) + .await + .context(PostgresPermissionDbError)? + }; let permissions = rows .into_iter()