diff --git a/src/actinia_core/core/grass_init.py b/src/actinia_core/core/grass_init.py index 435afd48a..67e5b7900 100644 --- a/src/actinia_core/core/grass_init.py +++ b/src/actinia_core/core/grass_init.py @@ -151,7 +151,7 @@ def set(self): try: value = self.env[key] origValue = os.getenv(key) - if origValue: + if origValue and "PATH" in key: value += ":" + origValue os.putenv(key, value) os.environ[key] = value diff --git a/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py b/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py index 1f2a36b1e..04885123b 100644 --- a/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py +++ b/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py @@ -324,42 +324,59 @@ def _update_views_in_tgis(self, tgis_db_path): con.close() del cur - def _merge_tgis_dbs(self, tgis_db_path_1, tgis_db_path_2): - """Merge two tgis sqlite.db files - - Args: - tgis_db_path_1(str): path of a tgis sqlite.db file in which the - other should be merged - tgis_db_path_2(str): path of a tgis sqlite.db file which should be - merged in tgis_db_path_1 - """ - con = sqlite3.connect(tgis_db_path_1) - con.execute(f"ATTACH '{tgis_db_path_2}' as dba") - con.execute("BEGIN") - - table_names1 = [row[1] for row in con.execute( - "SELECT * FROM sqlite_master where type='table'")] - table_names2 = [row[1] for row in con.execute( - "SELECT * FROM dba.sqlite_master where type='table'")] - - # merge databases - for table in table_names2: - if table == 'tgis_metadata': - con.execute(f"DROP TABLE {table}") - con.execute(f"CREATE TABLE {table} AS " - f"SELECT * FROM dba.{table}") - continue - # for example raster_register_xxx tables are not in both dbs - if table not in table_names1: - con.execute(f"CREATE TABLE {table} AS " - f"SELECT * FROM dba.{table}") - continue - combine = f"INSERT OR IGNORE INTO {table} SELECT * FROM dba.{table}" - con.execute(combine) - con.commit() - con.execute("detach database dba") - if con: - con.close() + def _change_mapsetname_in_tgistable( + self, cur, table_name, + source_mapset, target_mapset, skip_columns=[]): + columns = [row[0] for row in cur.execute( + f"SELECT * FROM {table_name}").description] + + # find PRIMARY KEY + selection = [row[1] for row in cur.execute( + f"PRAGMA table_info({table_name})") if row[-1] == 1] + if len(selection) == 0: + primary_key = "" + else: + primary_key = selection[0] + + for col in columns: + if col not in skip_columns: + update_statement = f"UPDATE {table_name} SET {col} = " \ + f"REPLACE({col}, '{source_mapset}', '{target_mapset}')" + if col == primary_key: + primary_key_vals = [row[0] for row in cur.execute( + f"SELECT {primary_key} FROM {table_name}")] + deleted_keys = list() + for p_key in primary_key_vals: + if p_key not in deleted_keys: + new_p_key = p_key.replace( + source_mapset, target_mapset) + if (source_mapset in p_key and + new_p_key in primary_key_vals): + deleted_keys.append(new_p_key) + delete_old_entry = f"DELETE FROM {table_name}" \ + f" WHERE {primary_key}='{new_p_key}'" + cur.execute(delete_old_entry) + old_row = [row for row in cur.execute( + f"SELECT * FROM {table_name} WHERE " + f"{primary_key}='{p_key}'")][0] + new_row = list() + for old_v, col in zip(old_row, columns): + if (col not in skip_columns and + isinstance(old_v, str)): + new_v = old_v.replace( + source_mapset, target_mapset) + elif old_v is None: + new_v == "NULL" + else: + new_v = old_v + new_row.append(new_v) + insert_statment = f"INSERT INTO {table_name}" \ + f" {tuple(columns)} VALUES {tuple(new_row)}" + cur.execute(insert_statment) + else: + cur.execute(update_statement) + else: + cur.execute(update_statement) def _change_mapsetname_in_tgis(self, tgis_path, source_mapset, target_mapset, target_tgis_db): @@ -381,20 +398,13 @@ def _change_mapsetname_in_tgis(self, tgis_path, source_mapset, table_names = [row[1] for row in cur.execute( "SELECT * FROM sqlite_master where type='table'")] for table_name in table_names: - columns = [row[0] for row in cur.execute( - f"SELECT * FROM {table_name}").description] - for col in columns: - cur.execute(f"UPDATE {table_name} SET {col} = REPLACE({col}, " - f"'{source_mapset}', '{target_mapset}')") + self._change_mapsetname_in_tgistable( + cur, table_name, source_mapset, target_mapset) con.commit() if con: con.close() del cur - # if there already exists a sqlite.db file then merge it - if target_tgis_db is not None: - self._merge_tgis_dbs(tgis_db_path, target_tgis_db) - # update views self._update_views_in_tgis(tgis_db_path) @@ -433,19 +443,31 @@ def _merge_mapset_into_target(self, source_mapset, target_mapset): if os.path.exists(source_path) is True: # Hardlink the sources into the target - stdout = subprocess.PIPE - stderr = subprocess.PIPE - - p = subprocess.Popen(["/bin/cp", "-flr", - "%s" % source_path, - "%s/." % target_path], - stdout=stdout, - stderr=stderr) - (stdout_buff, stderr_buff) = p.communicate() - if p.returncode != 0: - raise AsyncProcessError( - "Unable to merge mapsets. Error in linking:" - " stdout: %s stderr: %s" % (stdout_buff, stderr_buff)) + self._copy_folder( + source_path, target_path, + msg="merge mapsets. Error in linking") + + def _copy_folder( + self, source_path, target_path, + msg="copy temporary mapset to original location"): + try: + stdout = subprocess.PIPE + stderr = subprocess.PIPE + p = subprocess.Popen(["/bin/cp", "-fr", + "%s" % source_path, + "%s" % target_path], + stdout=stdout, + stderr=stderr) + (stdout_buff, stderr_buff) = p.communicate() + if p.returncode != 0: + raise AsyncProcessError( + f"Unable to {msg}. Copy error " + "stdout: %s stderr: %s returncode: %i" % (stdout_buff, + stderr_buff, + p.returncode)) + except Exception as e: + raise AsyncProcessError( + f"Unable to copy {msg}. Exception %s" % str(e)) def _copy_merge_tmp_mapset_to_target_mapset(self): """Copy the temporary mapset into the original location @@ -491,25 +513,7 @@ def _copy_merge_tmp_mapset_to_target_mapset(self): self._send_resource_update(message) - try: - stdout = subprocess.PIPE - stderr = subprocess.PIPE - p = subprocess.Popen(["/bin/cp", "-fr", - "%s" % source_path, - "%s" % target_path], - stdout=stdout, - stderr=stderr) - (stdout_buff, stderr_buff) = p.communicate() - if p.returncode != 0: - raise AsyncProcessError( - "Unable to copy temporary mapset to " - "original location. Copy error " - "stdout: %s stderr: %s returncode: %i" % (stdout_buff, - stderr_buff, - p.returncode)) - except Exception as e: - raise AsyncProcessError("Unable to copy temporary mapset to " - "original location. Exception %s" % str(e)) + self._copy_folder(source_path, target_path) # Merge the temp mapset into the target mapset in case the target already exists if self.target_mapset_exists is True: @@ -561,6 +565,116 @@ def _execute_process_list(self, process_list): elif process.exec_type == "python": eval(process.executable) + def _tgis_set_mapset_to_temp_mapset(self): + """Rename mapset of STRDS in tgis sqlite.db file + """ + tgis_db_path = os.path.join(self.temp_mapset_path, "tgis", "sqlite.db") + + con = sqlite3.connect(tgis_db_path) + cur = con.cursor() + table_names = [row[1] for row in cur.execute( + "SELECT * FROM sqlite_master where type='table'")] + + tables_not_to_change = [ + "raster_base", + "raster_relative_time", + "raster_absolute_time", + "raster_spatial_extent", + "raster_metadata", + "vector_base", + "vector_relative_time", + "vector_absolute_time", + "vector_spatial_extent", + "vector_metadata", + "raster3d_base", + "raster3d_relative_time", + "raster3d_absolute_time", + "raster3d_spatial_extent", + "raster3d_metadata", + "tgis_metadata", + ] + # raster_map_register_XXX + tables_change_all_mapsets = [ + "strds_base", + "strds_relative_time", + "strds_absolute_time", + "strds_spatial_extent", + "strds_metadata", + "stvds_base", + "stvds_relative_time", + "stvds_absolute_time", + "stvds_spatial_extent", + "stvds_metadata", + "str3ds_base", + "str3ds_relative_time", + "str3ds_absolute_time", + "str3ds_spatial_extent", + "str3ds_metadata", + + ] + tables_change_only_few_mapsets = [ + "raster_stds_register", + "vector_stds_register", + "raster3d_stds_register", + ] + + for table_name in table_names: + if table_name in tables_not_to_change: + continue + elif table_name in tables_change_all_mapsets: + self._change_mapsetname_in_tgistable( + cur, table_name, + self.target_mapset_name, self.temp_mapset_name) + elif table_name in tables_change_only_few_mapsets: + self._change_mapsetname_in_tgistable( + cur, table_name, + self.target_mapset_name, self.temp_mapset_name, + ["id"]) + con.commit() + if con: + con.close() + + def _create_temporary_grass_environment(self, source_mapset_name=None, + interim_result_mapset=None, + interim_result_file_path=None): + """Create a temporary GRASS GIS environment + + This method will: + 1. create the temporary database + 2. sets-up the GRASS environment + 3. Create temporary mapset + 4. Copies tgis db + + This method will link the required mapsets that are + defined in *self.required_mapsets* into the location. + The mapsets may be from the global and/or user database. + + Args: + source_mapset_name (str): The name of the source mapset to copy the + WIND file from + interim_result_mapset (str): The path to the mapset which is saved + as interim result and should be used + as start mapset for the job resumtion + interim_result_file_path (str): The path of the interim result + temporary file path + Raises: + This method will raise an AsyncProcessError + """ + super(PersistentProcessing, self)._create_temporary_grass_environment( + source_mapset_name, + interim_result_mapset, + interim_result_file_path + ) + tgis_path = os.path.join( + self.user_location_path, + self.target_mapset_name, + "tgis" + ) + if os.path.isdir(tgis_path): + self._copy_folder( + tgis_path, os.path.join(self.temp_mapset_path, "tgis")) + self._tgis_set_mapset_to_temp_mapset() + def _execute(self, skip_permission_check=False): """Overwrite this function in subclasses diff --git a/src/actinia_core/processing/actinia_processing/ephemeral_processing.py b/src/actinia_core/processing/actinia_processing/ephemeral_processing.py index 300c792c1..30a1aabb2 100644 --- a/src/actinia_core/processing/actinia_processing/ephemeral_processing.py +++ b/src/actinia_core/processing/actinia_processing/ephemeral_processing.py @@ -1399,10 +1399,12 @@ def _create_temporary_grass_environment(self, source_mapset_name=None, mapset_name="PERMANENT") # Create the temporary mapset and switch into it - self._create_temporary_mapset(temp_mapset_name=self.temp_mapset_name, - source_mapset_name=source_mapset_name, - interim_result_mapset=interim_result_mapset, - interim_result_file_path=interim_result_file_path) + self._create_temporary_mapset( + temp_mapset_name=self.temp_mapset_name, + source_mapset_name=source_mapset_name, + interim_result_mapset=interim_result_mapset, + interim_result_file_path=interim_result_file_path + ) def _execute(self, skip_permission_check=False): """Overwrite this function in subclasses. diff --git a/src/actinia_core/processing/actinia_processing/persistent/strds_management.py b/src/actinia_core/processing/actinia_processing/persistent/strds_management.py index b0cb32600..2ebec847b 100644 --- a/src/actinia_core/processing/actinia_processing/persistent/strds_management.py +++ b/src/actinia_core/processing/actinia_processing/persistent/strds_management.py @@ -26,6 +26,7 @@ TODO: Integrate into the ephemeral process chain approach """ + from actinia_api.swagger2.actinia_core.schemas.strds_management import \ STRDSInfoModel, STRDSInfoResponseModel @@ -35,8 +36,9 @@ from actinia_core.models.response_models import \ StringListProcessingResultResponseModel + __license__ = "GPLv3" -__author__ = "Sören Gebbert, Carmen Tawalika" +__author__ = "Sören Gebbert, Carmen Tawalika, Anika Weinmann" __copyright__ = "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" __maintainer__ = "mundialis" @@ -52,27 +54,36 @@ def _execute(self): self._setup() - pc = {"1": {"module": "t.list", - "inputs": {"type": "strds", - "column": "name"}}} + pc = { + "version": 1, + "list": [{ + "module": "t.list", + "id": f"list_strds_{self.unique_id}", + "inputs": [ + {"param": "type", "value": "strds"}, + {"param": "column", "value": "name"} + ] + }] + } # Make sure that only the current mapset is used for strds listing has_where = False if self.rdc.user_data: - for option in self.rdc.user_data: - if self.rdc.user_data[option] is not None: + for option, val in self.rdc.user_data.items(): + if val is not None: if "where" in option: - select = self.rdc.user_data[option] + \ - " AND mapset = \'%s\'" % self.mapset_name - pc["1"]["inputs"]["where"] = select + select = f"{val} AND mapset = \'{self.mapset_name}\'" + pc["list"][0]["inputs"].append( + {"param": "where", "value": select}) has_where = True else: - pc["1"]["inputs"][option] = self.rdc.user_data[option] + pc["list"][0]["inputs"].append( + {"param": option, "value": val}) if has_where is False: - select = "mapset=\'%s\'" % self.mapset_name - pc["1"]["inputs"]["where"] = select + select = f"mapset=\'{self.mapset_name}\'" + pc["list"][0]["inputs"].append({"param": "where", "value": select}) process_list = self._validate_process_chain(skip_permission_check=True, process_chain=pc) @@ -104,10 +115,18 @@ def _execute(self): self._setup() - pc = {"1": {"module": "t.info", - "inputs": {"type": "strds", - "input": self.map_name}, - "flags": "g"}} + pc = { + "version": 1, + "list": [{ + "module": "t.info", + "id": f"strds_info_{self.unique_id}", + "inputs": [ + {"param": "type", "value": "strds"}, + {"param": "input", "value": self.map_name} + ], + "flags": "g" + }] + } process_list = self._validate_process_chain(skip_permission_check=True, process_chain=pc) @@ -143,25 +162,34 @@ def _execute(self): self.required_mapsets.append(self.target_mapset_name) args = self.rdc.user_data - - pc = {"1": {"module": "t.remove", - "inputs": {"type": "strds", - "inputs": self.map_name}, - "flags": "f"}} - + flags = "f" if args and "recursive" in args and args["recursive"] is True: - pc["1"]["flags"] = "rf" + flags = "rf" + + pc = { + "version": 1, + "list": [{ + "id": f"remove_strds_{self.unique_id}", + "module": "t.remove", + "inputs": [ + {"param": "type", "value": "strds"}, + {"param": "inputs", "value": self.map_name} + ], + "flags": flags}] + } process_list = self._validate_process_chain(skip_permission_check=True, process_chain=pc) - - self._create_temp_database() self._check_lock_target_mapset() - self._create_grass_environment(grass_data_base=self.temp_grass_data_base, mapset_name=self.target_mapset_name) + # Init GRASS environment and create the temporary mapset + self._create_temporary_grass_environment( + source_mapset_name=self.target_mapset_name) + self._lock_temp_mapset() self._execute_process_list(process_list) + self._copy_merge_tmp_mapset_to_target_mapset() self.finish_message = "STRDS <%s> successfully deleted" % self.map_name @@ -177,21 +205,38 @@ def _execute(self): self._setup() self.required_mapsets.append(self.target_mapset_name) - pc_1 = {} - pc_1["1"] = {"module": "t.list", "inputs": { - "type": "strds", - "where": "id = \'%s@%s\'" % (self.map_name, self.target_mapset_name)}} + pc_1 = {"version": 1} + pc_1["list"] = [{ + "id": f"list_strds_{self.unique_id}", + "module": "t.list", + "inputs": [ + { + "param": "type", + "value": "strds" + }, + { + "param": "where", + "value": f"id = \'{self.map_name}@" + f"{self.target_mapset_name}\'" + }, + ] + }] # Check the first process chain pc_1 = self._validate_process_chain(skip_permission_check=True, process_chain=pc_1) - pc_2 = {"1": {"module": "t.create", - "inputs": {"type": "strds", - "output": self.map_name}}} - + pc_2 = { + "version": 1, + "list": [{ + "id": f"create_strds_{self.unique_id}", + "module": "t.create", + "inputs": [{"param": "type", "value": "strds"}], + "outputs": [{"param": "output", "value": self.map_name}] + }] + } if self.request_data: - for key in self.request_data: - pc_2["1"]["inputs"][key] = self.request_data[key] + for key, val in self.request_data.items(): + pc_2["list"][0]["inputs"].append({"param": key, "value": val}) pc_2 = self._validate_process_chain(skip_permission_check=True, process_chain=pc_2) diff --git a/src/actinia_core/processing/actinia_processing/persistent/strds_raster_management.py b/src/actinia_core/processing/actinia_processing/persistent/strds_raster_management.py index 4aa7c30db..81b866a88 100644 --- a/src/actinia_core/processing/actinia_processing/persistent/strds_raster_management.py +++ b/src/actinia_core/processing/actinia_processing/persistent/strds_raster_management.py @@ -153,26 +153,37 @@ def __init__(self, *args): def _execute(self): self._setup() - input_file = tempfile.NamedTemporaryFile( - dir=self.temp_file_path, delete=True, mode="w") - - for map_name in self.request_data: - line = "%s\n" % map_name - input_file.write(line) - input_file.flush() - - pc = {"1": {"module": "t.unregister", - "inputs": {"input": "%s@%s" % (self.map_name, self.mapset_name), - "type": "raster", - "file": input_file.name}}} + maps = [ + map if "@" in map else f"{map}@{mapset}" + for map, mapset in zip( + self.request_data, [self.mapset_name]*len(self.request_data)) + ] + pc = { + "version": 1, + "list": [{ + "id": f"strds_unregister_raster_{self.unique_id}", + "module": "t.unregister", + "inputs": [ + {"param": "input", "value": f"{self.map_name}"}, + {"param": "type", "value": "raster"}, + {"param": "maps", "value": ",".join(maps)} + ] + }] + } process_list = self._validate_process_chain(skip_permission_check=True, process_chain=pc) self._check_lock_target_mapset() - self._create_temp_database(mapsets=self.required_mapsets) - self._create_grass_environment(grass_data_base=self.temp_grass_data_base, - mapset_name=self.mapset_name) + self._create_grass_environment( + grass_data_base=self.temp_grass_data_base, + mapset_name=self.target_mapset_name) + # Init GRASS environment and create the temporary mapset + self._create_temporary_grass_environment( + source_mapset_name=self.target_mapset_name) + self._lock_temp_mapset() self._execute_process_list(process_list) + self._copy_merge_tmp_mapset_to_target_mapset() - input_file.close() + self.finish_message = f"Raster maps <{self.request_data}> " \ + f"successfully unregistered from {self.map_name}" diff --git a/tests/test_async_mapset_merging_strds.py b/tests/test_async_mapset_merging_strds.py index 0edb8e615..08a291a85 100644 --- a/tests/test_async_mapset_merging_strds.py +++ b/tests/test_async_mapset_merging_strds.py @@ -415,6 +415,7 @@ def test_create_strds_in_persistent_user_db_2(self): self.waitAsyncStatusAssertHTTP( rv, headers=self.admin_auth_header, http_status=200, status="finished") + self.server.get(URL_PREFIX + f'/locations/nc_spm_08/mapsets/{self.user_mapset}/strds') # check if strds 'modis' and 'modis2' is in mapset self.check_strds_in_mapset(['modis', 'modis2']) diff --git a/tests/test_async_processing_export_vector.py b/tests/test_async_processing_export_vector.py index 52ef2c709..6cb50434a 100644 --- a/tests/test_async_processing_export_vector.py +++ b/tests/test_async_processing_export_vector.py @@ -24,7 +24,6 @@ """ Tests: AsyncProcess test case """ -import pytest import unittest from flask.json import dumps as json_dumps try: diff --git a/tests/test_login.py b/tests/test_login.py index d650095c3..bff888ccc 100644 --- a/tests/test_login.py +++ b/tests/test_login.py @@ -27,7 +27,6 @@ from flask.json import loads as json_load from werkzeug.datastructures import Headers import unittest -import pytest import base64 try: from .test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX @@ -97,7 +96,6 @@ def test_list_users(self): self.assertEqual(users, "guest", "Wrong user listed") - @pytest.mark.dev def test_create_get_delete_user(self): """ Create a new user, access it as admin and as normal user and delete it diff --git a/tests/test_strds_raster_management.py b/tests/test_strds_raster_management.py index 203522c29..13a6b40fa 100644 --- a/tests/test_strds_raster_management.py +++ b/tests/test_strds_raster_management.py @@ -24,6 +24,7 @@ """ Tests: STRDS test case """ + from pprint import pprint from flask.json import loads as json_loads, dumps as json_dumps import unittest @@ -90,7 +91,6 @@ def create_raster_layer(self, location_name, mapset_name, raster_name, val): self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype) # ################### CREATE REGISTER ###################################### - def test_strds_creation_error(self): # This must fail, global mapsets are not allowed to modify @@ -148,34 +148,34 @@ def test_strds_create_register_unregister_1(self): min_min = json_loads(rv.data)["process_results"]["min_min"] max_max = json_loads(rv.data)["process_results"]["max_max"] num_maps = json_loads(rv.data)["process_results"]["number_of_maps"] - self.assertEqual(min_min, "1.0") - self.assertEqual(max_max, "3.0") - self.assertEqual(num_maps, "3") + self.assertEqual(min_min, "1.0", "min_min values is wrong") + self.assertEqual(max_max, "3.0", "max_max values is wrong") + self.assertEqual(num_maps, "3", "number_of_maps values is wrong") # Unregister the raster layers raster_layers = ["test_layer_1", "test_layer_2", "test_layer_3"] - rv = self.server.delete(URL_PREFIX + "/locations/%(location)s/mapsets/%(mapset)s/strds/test_strds_register/raster_layers" % {'location': location, 'mapset': new_mapset}, - data=json_dumps(raster_layers), - content_type="application/json", - headers=self.user_auth_header) - pprint(json_loads(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i" % rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype) + rv2 = self.server.delete(URL_PREFIX + "/locations/%(location)s/mapsets/%(mapset)s/strds/test_strds_register/raster_layers" % {'location': location, 'mapset': new_mapset}, + data=json_dumps(raster_layers), + content_type="application/json", + headers=self.user_auth_header) + pprint(json_loads(rv2.data)) + self.assertEqual(rv2.status_code, 200, "HTML status code is wrong %i" % rv2.status_code) + self.assertEqual(rv2.mimetype, "application/json", "Wrong mimetype %s" % rv2.mimetype) # Check strds - rv = self.server.get(URL_PREFIX + "/locations/%(location)s/mapsets/%(mapset)s/strds/test_strds_register" % {'location': location, 'mapset': new_mapset}, - headers=self.user_auth_header) - pprint(json_loads(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i" % rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype) - - min_min = json_loads(rv.data)["process_results"]["min_min"] - max_max = json_loads(rv.data)["process_results"]["max_max"] - num_maps = json_loads(rv.data)["process_results"]["number_of_maps"] - self.assertEqual(min_min, "None") - self.assertEqual(max_max, "None") - self.assertEqual(num_maps, "0") + rv3 = self.server.get(URL_PREFIX + "/locations/%(location)s/mapsets/%(mapset)s/strds/test_strds_register" % {'location': location, 'mapset': new_mapset}, + headers=self.user_auth_header) + pprint(json_loads(rv3.data)) + self.assertEqual(rv3.status_code, 200, "HTML status code is wrong %i" % rv3.status_code) + self.assertEqual(rv3.mimetype, "application/json", "Wrong mimetype %s" % rv3.mimetype) + + min_min = json_loads(rv3.data)["process_results"]["min_min"] + max_max = json_loads(rv3.data)["process_results"]["max_max"] + num_maps = json_loads(rv3.data)["process_results"]["number_of_maps"] + self.assertEqual(min_min, "None", "min_min values is not None") + self.assertEqual(max_max, "None", "max_max values is not None") + self.assertEqual(num_maps, "0", "number_of_maps values is not 0") # Delete the strds rv = self.server.delete(URL_PREFIX + '/locations/%(location)s/mapsets/%(mapset)s/strds/test_strds_register' % {'location': location, 'mapset': new_mapset}, @@ -185,7 +185,6 @@ def test_strds_create_register_unregister_1(self): self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype) # ################### LIST RASTER FROM STRDS ############################### - def test_strds_raster_layer_1(self): rv = self.server.get(strds_url + '/%s/raster_layers' % strds_data, headers=self.user_auth_header)