From 36265d3e377a6aa15fd48547972ceb80c76181dd Mon Sep 17 00:00:00 2001 From: romer8 Date: Thu, 30 Jan 2025 10:28:49 -0700 Subject: [PATCH 01/20] changed to put for new geoserver versions --- tethys_dataset_services/engines/geoserver_engine.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tethys_dataset_services/engines/geoserver_engine.py b/tethys_dataset_services/engines/geoserver_engine.py index 6b39803..7e75bd4 100644 --- a/tethys_dataset_services/engines/geoserver_engine.py +++ b/tethys_dataset_services/engines/geoserver_engine.py @@ -1501,7 +1501,7 @@ def create_sql_view_layer(self, store_id, layer_name, geometry_type, srid, sql, retries_remaining = 300 while retries_remaining > 0: - response = requests.post( + response = requests.put( url, headers=headers, auth=(self.username, self.password), From f4381ca25b62e0742ac7737f592fbd9acf025898 Mon Sep 17 00:00:00 2001 From: romer8 Date: Sat, 29 Mar 2025 19:10:43 -0600 Subject: [PATCH 02/20] gs-cloud compatible changes --- .../engines/geoserver_engine.py | 98 ++++++++++++++++++- 1 file changed, 94 insertions(+), 4 deletions(-) diff --git a/tethys_dataset_services/engines/geoserver_engine.py b/tethys_dataset_services/engines/geoserver_engine.py index 7e75bd4..f8b25ce 100644 --- a/tethys_dataset_services/engines/geoserver_engine.py +++ b/tethys_dataset_services/engines/geoserver_engine.py @@ -677,7 +677,8 @@ def reload(self, ports=None, public=True): GeoServer are running in a clustered GeoServer configuration. public (bool): Use the public geoserver endpoint if True, otherwise use the internal endpoint. """ - node_endpoints = self._get_node_endpoints(ports=ports, public=public) + node_endpoints = self._get_node_endpoints(ports=[9090], public=public) # take this out, it is hardcoded for testing. + # node_endpoints = self._get_node_endpoints(ports=ports, public=public) log.debug("Catalog Reload URLS: {0}".format(node_endpoints)) response_dict = {'success': True, 'result': None, 'error': []} @@ -1332,7 +1333,96 @@ def create_postgis_store(self, store_id, host, port, database, username, passwor return response_dict - def create_layer_from_postgis_store(self, store_id, table, debug=False): + + + + def create_layer_from_postgis_store(self, store_id, table, layer_name=None, debug=False): + """ + Add an existing PostGIS table as a feature resource to a PostGIS store that already exists. + + Args: + store_id (str): Identifier for the store to add the resource to. This can be a store name, + or "workspace:store_name" combo (e.g.: "name" or "workspace:name"). If the workspace + is not specified, the catalog's default workspace is used. + table (str): The underlying table name in the PostGIS database. A layer (feature resource) + will be created referencing this table. + layer_name (str, optional): If provided, this name will be used for the newly created layer + in GeoServer. If not provided, defaults to the same as 'table'. + debug (bool, optional): Pretty print the response dictionary to the console for debugging. + Defaults to False. + + Returns: + dict: A response dictionary with 'success', 'result', and/or 'error' keys. + + Examples: + # Use the table name for layer: + engine.create_layer_from_postgis_store( + store_id='workspace:store_name', + table='table_name' + ) + + # Provide a custom layer name: + engine.create_layer_from_postgis_store( + store_id='workspace:store_name', + table='table_name', + layer_name='my_custom_layer' + ) + """ + # Extract (workspace, store_name) from the store_id + workspace, store_name = self._process_identifier(store_id) + if not workspace: + workspace = self.catalog.get_default_workspace().name + + # Verify the store exists + store_info = self.get_store(store_id, debug=debug) + if not store_info['success']: + message = f"There is no store named '{store_name}'" + if workspace: + message += f" in {workspace}" + return {'success': False, 'error': message} + + # If no layer_name was provided, default to the PostGIS table name + if not layer_name: + layer_name = table + + # Create an XML body for the new feature type in GeoServer + # The field sets the GeoServer layer (and resource) name. + xml_body = f""" + + {layer_name} + {table} + + """ + + headers = { + "Content-type": "text/xml", + "Accept": "application/xml" + } + + # POST /workspaces//datastores//featuretypes + url = self._assemble_url('workspaces', workspace, 'datastores', store_name, 'featuretypes') + response = requests.post( + url=url, + data=xml_body, + headers=headers, + auth=HTTPBasicAuth(username=self.username, password=self.password) + ) + + if response.status_code != 201: + response_dict = { + 'success': False, + 'error': f'{response.reason}({response.status_code}): {response.text}' + } + self._handle_debug(response_dict, debug) + return response_dict + + # Optionally return the store info, or you could directly query the new layer if desired + response_dict = self.get_store(store_id=store_id, debug=debug) + self._handle_debug(response_dict, debug) + return response_dict + + + def create_layer_from_postgis_store2(self, store_id, table, debug=False): """ Add an existing postgis table as a feature resource to a postgis store that already exists. @@ -2208,7 +2298,7 @@ def update_layer(self, layer_id, debug=False, **kwargs): """ # Pop tile caching properties to handle separately tile_caching = kwargs.pop('tile_caching', None) - + # breakpoint() try: # Get resource layer = self.catalog.get_layer(name=layer_id) @@ -2225,7 +2315,7 @@ def update_layer(self, layer_id, debug=False, **kwargs): # Assemble Response response_dict = {'success': True, 'result': layer_dict} - + # Handle tile caching properties (gsconfig doesn't support this) if tile_caching is not None: gwc_url = '{0}layers/{1}.xml'.format(self.gwc_endpoint, layer_id) From d83b39cdddf8553036f7965349aac43117589b67 Mon Sep 17 00:00:00 2001 From: romer8 Date: Fri, 18 Apr 2025 11:06:30 -0600 Subject: [PATCH 03/20] no need to change the geoserver relaod function --- tethys_dataset_services/engines/geoserver_engine.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tethys_dataset_services/engines/geoserver_engine.py b/tethys_dataset_services/engines/geoserver_engine.py index f8b25ce..5b11427 100644 --- a/tethys_dataset_services/engines/geoserver_engine.py +++ b/tethys_dataset_services/engines/geoserver_engine.py @@ -677,8 +677,8 @@ def reload(self, ports=None, public=True): GeoServer are running in a clustered GeoServer configuration. public (bool): Use the public geoserver endpoint if True, otherwise use the internal endpoint. """ - node_endpoints = self._get_node_endpoints(ports=[9090], public=public) # take this out, it is hardcoded for testing. - # node_endpoints = self._get_node_endpoints(ports=ports, public=public) + # node_endpoints = self._get_node_endpoints(ports=[9090], public=public) # take this out, it is hardcoded for testing. + node_endpoints = self._get_node_endpoints(ports=ports, public=public) log.debug("Catalog Reload URLS: {0}".format(node_endpoints)) response_dict = {'success': True, 'result': None, 'error': []} From a75bcf6330b90d38b9041a153be9407c75ec7617 Mon Sep 17 00:00:00 2001 From: romer8 Date: Tue, 29 Apr 2025 12:36:56 -0600 Subject: [PATCH 04/20] added changes back --- .../engines/geoserver_engine.py | 76 +------------------ 1 file changed, 2 insertions(+), 74 deletions(-) diff --git a/tethys_dataset_services/engines/geoserver_engine.py b/tethys_dataset_services/engines/geoserver_engine.py index f8b25ce..251f33e 100644 --- a/tethys_dataset_services/engines/geoserver_engine.py +++ b/tethys_dataset_services/engines/geoserver_engine.py @@ -677,8 +677,8 @@ def reload(self, ports=None, public=True): GeoServer are running in a clustered GeoServer configuration. public (bool): Use the public geoserver endpoint if True, otherwise use the internal endpoint. """ - node_endpoints = self._get_node_endpoints(ports=[9090], public=public) # take this out, it is hardcoded for testing. - # node_endpoints = self._get_node_endpoints(ports=ports, public=public) + + node_endpoints = self._get_node_endpoints(ports=ports, public=public) log.debug("Catalog Reload URLS: {0}".format(node_endpoints)) response_dict = {'success': True, 'result': None, 'error': []} @@ -1333,9 +1333,6 @@ def create_postgis_store(self, store_id, host, port, database, username, passwor return response_dict - - - def create_layer_from_postgis_store(self, store_id, table, layer_name=None, debug=False): """ Add an existing PostGIS table as a feature resource to a PostGIS store that already exists. @@ -1421,75 +1418,6 @@ def create_layer_from_postgis_store(self, store_id, table, layer_name=None, debu self._handle_debug(response_dict, debug) return response_dict - - def create_layer_from_postgis_store2(self, store_id, table, debug=False): - """ - Add an existing postgis table as a feature resource to a postgis store that already exists. - - Args: - store_id (string): Identifier for the store to add the resource to. Can be a store name or a workspace name combination (e.g.: "name" or "workspace:name"). Note that the workspace must be an existing workspace. If no workspace is given, the default workspace will be assigned. # noqa: E501 - table (string): Name of existing table to add as a feature resource. A layer will automatically be created for this resource. Both the resource and the layer will share the same name as the table. # noqa: E501 - debug (bool, optional): Pretty print the response dictionary to the console for debugging. Defaults to False. - - Returns: - (dict): Response dictionary - - Examples: - - response = engine.create_layer_from_postgis_store(store_id='workspace:store_name', table='table_name') - """ - # Process identifier - workspace, name = self._process_identifier(store_id) - - # Get default work space if none is given - if not workspace: - workspace = self.catalog.get_default_workspace().name - - # Throw error store does not exist - store = self.get_store(store_id, debug) - if not store['success']: - message = "There is no store named " + name - if workspace: - message += " in " + workspace - - response_dict = {'success': False, 'error': message} - - return response_dict - - # Prepare file - xml = """ - - {0} - - """.format(table) - - # Prepare headers - headers = { - "Content-type": "text/xml", - "Accept": "application/xml" - } - - # Prepare URL - url = self._assemble_url('workspaces', workspace, 'datastores', name, 'featuretypes') - - # Execute: POST /workspaces//datastores - response = requests.post( - url=url, - data=xml, - headers=headers, - auth=HTTPBasicAuth(username=self.username, password=self.password) - ) - - if response.status_code != 201: - response_dict = {'success': False, - 'error': '{1}({0}): {2}'.format(response.status_code, response.reason, response.text)} - - self._handle_debug(response_dict, debug) - return response_dict - - response_dict = self.get_store(store_id=store_id, debug=debug) - return response_dict - def create_sql_view_layer(self, store_id, layer_name, geometry_type, srid, sql, default_style, geometry_name='geometry', other_styles=None, parameters=None, reload_public=False, debug=False): From 183d720ea0adc446ee6cbb08e278c24a86d88392 Mon Sep 17 00:00:00 2001 From: romer8 Date: Tue, 29 Apr 2025 13:07:56 -0600 Subject: [PATCH 05/20] added black formatting --- tests/__init__.py | 2 +- tests/e2e_tests/ckan_engine_e2e_tests.py | 211 +- tests/e2e_tests/geoserver_engine_e2e_tests.py | 611 +-- tests/unit_tests/test_ckan_engine.py | 556 +-- tests/unit_tests/test_geoserver_engine.py | 3536 ++++++++++------- tests/unit_tests/test_utilities.py | 89 +- tethys_dataset_services/__init__.py | 2 +- .../base/dataset_engine_abc.py | 3 +- .../base/spatial_dataset_engine_abc.py | 3 +- .../engines/ckan_engine.py | 243 +- .../engines/geoserver_engine.py | 1878 +++++---- .../engines/hydroshare_engine.py | 6 +- tethys_dataset_services/utilities.py | 16 +- tethys_dataset_services/valid_engines.py | 10 +- 14 files changed, 4256 insertions(+), 2910 deletions(-) diff --git a/tests/__init__.py b/tests/__init__.py index 08bdcd7..0093377 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1 +1 @@ -__author__ = 'swainn' +__author__ = "swainn" diff --git a/tests/e2e_tests/ckan_engine_e2e_tests.py b/tests/e2e_tests/ckan_engine_e2e_tests.py index 133f06a..4e46d20 100644 --- a/tests/e2e_tests/ckan_engine_e2e_tests.py +++ b/tests/e2e_tests/ckan_engine_e2e_tests.py @@ -9,61 +9,68 @@ from ..test_config import TEST_CKAN_DATASET_SERVICE except ImportError: - print('ERROR: To perform tests, you must create a file in the "tests" package called "test_config.py". In this file' - 'provide a dictionary called "TEST_CKAN_DATASET_SERVICE" with keys "API_ENDPOINT" and "APIKEY".') + print( + 'ERROR: To perform tests, you must create a file in the "tests" package called "test_config.py". In this file' + 'provide a dictionary called "TEST_CKAN_DATASET_SERVICE" with keys "API_ENDPOINT" and "APIKEY".' + ) exit(1) def random_string_generator(size): chars = string.ascii_lowercase + string.digits - return ''.join(random.choice(chars) for _ in range(size)) + return "".join(random.choice(chars) for _ in range(size)) class TestCkanDatasetEngine(unittest.TestCase): def setUp(self): # Auth - self.endpoint = TEST_CKAN_DATASET_SERVICE['ENDPOINT'] - self.apikey = TEST_CKAN_DATASET_SERVICE['APIKEY'] - self.username = TEST_CKAN_DATASET_SERVICE['USERNAME'] + self.endpoint = TEST_CKAN_DATASET_SERVICE["ENDPOINT"] + self.apikey = TEST_CKAN_DATASET_SERVICE["APIKEY"] + self.username = TEST_CKAN_DATASET_SERVICE["USERNAME"] # Files self.tests_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - self.files_root = os.path.join(self.tests_root, 'files') - self.support_root = os.path.join(self.tests_root, 'support') + self.files_root = os.path.join(self.tests_root, "files") + self.support_root = os.path.join(self.tests_root, "support") # Create Test Engine - self.engine = CkanDatasetEngine(endpoint=self.endpoint, - apikey=self.apikey) + self.engine = CkanDatasetEngine(endpoint=self.endpoint, apikey=self.apikey) # Create Test Organization self.test_org = random_string_generator(10) - data_dict = { - 'name': self.test_org, - 'users': [{'name': self.username}] - } + data_dict = {"name": self.test_org, "users": [{"name": self.username}]} url, data, headers = self.engine._prepare_request( - 'organization_create', data_dict=data_dict, apikey=self.apikey + "organization_create", data_dict=data_dict, apikey=self.apikey ) status_code, response_text = self.engine._execute_request(url, data, headers) if status_code != 200: - raise requests.RequestException('Unable to create group: {}'.format(response_text)) + raise requests.RequestException( + "Unable to create group: {}".format(response_text) + ) # Create Test Dataset self.test_dataset_name = random_string_generator(10) - dataset_result = self.engine.create_dataset(name=self.test_dataset_name, version='1.0', owner_org=self.test_org) - if not dataset_result['success']: - raise requests.RequestException('Unable to create test dataset: {}'.format(dataset_result['error'])) - self.test_dataset = dataset_result['result'] + dataset_result = self.engine.create_dataset( + name=self.test_dataset_name, version="1.0", owner_org=self.test_org + ) + if not dataset_result["success"]: + raise requests.RequestException( + "Unable to create test dataset: {}".format(dataset_result["error"]) + ) + self.test_dataset = dataset_result["result"] # Create Test Resource self.test_resource_name = random_string_generator(10) - self.test_resource_url = 'http://home.byu.edu' - resource_result = self.engine.create_resource(self.test_dataset_name, - url=self.test_resource_url, format='zip') - if not resource_result['success']: - raise requests.RequestException('Unable to create test resource: {}'.format(resource_result['error'])) - self.test_resource = resource_result['result'] + self.test_resource_url = "http://home.byu.edu" + resource_result = self.engine.create_resource( + self.test_dataset_name, url=self.test_resource_url, format="zip" + ) + if not resource_result["success"]: + raise requests.RequestException( + "Unable to create test resource: {}".format(resource_result["error"]) + ) + self.test_resource = resource_result["result"] def tearDown(self): pass @@ -75,100 +82,110 @@ def test_create_dataset(self): new_dataset_name = random_string_generator(10) # Execute - result = self.engine.create_dataset(name=new_dataset_name, owner_org=self.test_org) + result = self.engine.create_dataset( + name=new_dataset_name, owner_org=self.test_org + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Should return the new one - self.assertEqual(new_dataset_name, result['result']['name']) + self.assertEqual(new_dataset_name, result["result"]["name"]) # TEST search_datasets - result = self.engine.search_datasets(query={'name': new_dataset_name}, console=False) + result = self.engine.search_datasets( + query={"name": new_dataset_name}, console=False + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Check search results - search_results = result['result']['results'] - self.assertIn(new_dataset_name, search_results[0]['name']) - self.assertIn(self.test_org, search_results[0]['organization']['name']) + search_results = result["result"]["results"] + self.assertIn(new_dataset_name, search_results[0]["name"]) + self.assertIn(self.test_org, search_results[0]["organization"]["name"]) # TEST list_datasets # Execute result = self.engine.list_datasets() # Verify Success - self.assertTrue(result['success']) - self.assertIn(new_dataset_name, result['result']) + self.assertTrue(result["success"]) + self.assertIn(new_dataset_name, result["result"]) # Delete result = self.engine.delete_dataset(dataset_id=new_dataset_name) # Check if success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) def test_create_resource_file(self): # Prepare - file_name = 'upload_test.txt' + file_name = "upload_test.txt" save_name = random_string_generator(10) file_to_upload = os.path.join(self.support_root, file_name) # Execute - result = self.engine.create_resource(dataset_id=self.test_dataset_name, - name=save_name, - file=file_to_upload) + result = self.engine.create_resource( + dataset_id=self.test_dataset_name, name=save_name, file=file_to_upload + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Verify name and url_type (which should be upload if file upload) - self.assertIn(save_name, result['result']['name']) - self.assertEqual(result['result']['url_type'], 'upload') + self.assertIn(save_name, result["result"]["name"]) + self.assertEqual(result["result"]["url_type"], "upload") # TEST search resource # Execute - result = self.engine.search_resources(query={'name': save_name}) + result = self.engine.search_resources(query={"name": save_name}) # Verify Success - self.assertTrue(result['success']) - self.assertIn(save_name, result['result']['results'][-1]['name']) + self.assertTrue(result["success"]) + self.assertIn(save_name, result["result"]["results"][-1]["name"]) # Delete - result = self.engine.delete_resource(resource_id=result['result']['results'][-1]['id']) - self.assertTrue(result['success']) + result = self.engine.delete_resource( + resource_id=result["result"]["results"][-1]["id"] + ) + self.assertTrue(result["success"]) def test_create_resource_url(self): # Prepare new_resource_name = random_string_generator(10) - new_resource_url = 'http://home.byu.edu/' + new_resource_url = "http://home.byu.edu/" # Execute - result = self.engine.create_resource(dataset_id=self.test_dataset_name, - url=new_resource_url, - name=new_resource_name) + result = self.engine.create_resource( + dataset_id=self.test_dataset_name, + url=new_resource_url, + name=new_resource_name, + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Verify name and url_type (which should be upload if file upload) - self.assertIn(new_resource_name, result['result']['name']) - self.assertEqual(result['result']['url'], new_resource_url) + self.assertIn(new_resource_name, result["result"]["name"]) + self.assertEqual(result["result"]["url"], new_resource_url) # TEST search resource # Execute - result = self.engine.search_resources(query={'name': new_resource_name}) + result = self.engine.search_resources(query={"name": new_resource_name}) # Verify Success - self.assertTrue(result['success']) - self.assertIn(new_resource_name, result['result']['results'][-1]['name']) - self.assertIn(new_resource_url, result['result']['results'][-1]['url']) + self.assertTrue(result["success"]) + self.assertIn(new_resource_name, result["result"]["results"][-1]["name"]) + self.assertIn(new_resource_url, result["result"]["results"][-1]["url"]) # Delete - result = self.engine.delete_resource(resource_id=result['result']['results'][-1]['id']) - self.assertTrue(result['success']) + result = self.engine.delete_resource( + resource_id=result["result"]["results"][-1]["id"] + ) + self.assertTrue(result["success"]) def test_update_dataset(self): # Setup @@ -176,36 +193,36 @@ def test_update_dataset(self): author = random_string_generator(5) # Execute - result = self.engine.update_dataset(dataset_id=self.test_dataset_name, - author=author, notes=notes) + result = self.engine.update_dataset( + dataset_id=self.test_dataset_name, author=author, notes=notes + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Verify new property - self.assertEqual(result['result']['author'], author) - self.assertEqual(result['result']['notes'], notes) + self.assertEqual(result["result"]["author"], author) + self.assertEqual(result["result"]["notes"], notes) # TEST get_dataset # Execute result = self.engine.get_dataset(dataset_id=self.test_dataset_name) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Verify Name - self.assertEqual(result['result']['name'], self.test_dataset_name) - self.assertEqual(result['result']['author'], author) - self.assertEqual(result['result']['notes'], notes) + self.assertEqual(result["result"]["name"], self.test_dataset_name) + self.assertEqual(result["result"]["author"], author) + self.assertEqual(result["result"]["notes"], notes) # TEST download_dataset location = self.files_root - result = self.engine.download_dataset(self.test_dataset_name, - location=location) + result = self.engine.download_dataset(self.test_dataset_name, location=location) # Result will return list of the file with .zip at the end. Check here - self.assertIn('.zip', result[0][-4:].lower()) + self.assertIn(".zip", result[0][-4:].lower()) download_file = os.path.basename(result[0]) @@ -215,59 +232,61 @@ def test_update_dataset(self): if os.path.isfile(location_final): os.remove(location_final) else: - raise AssertionError('No file has been downloaded') + raise AssertionError("No file has been downloaded") # TEST delete_dataset # Execute result = self.engine.delete_dataset(dataset_id=self.test_dataset_name) # Confirm Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Delete requests should return nothing - self.assertEqual(result['result'], None) + self.assertEqual(result["result"], None) def test_update_resource(self): # Get Resource ID result = self.engine.get_dataset(dataset_id=self.test_dataset_name) - resource_id = result['result']['resources'][0]['id'] + resource_id = result["result"]["resources"][0]["id"] # Setup - file_name = 'upload_test.txt' + file_name = "upload_test.txt" file_to_upload = os.path.join(self.support_root, file_name) description_new = random_string_generator(10) # Execute - result = self.engine.update_resource(resource_id=resource_id, - file=file_to_upload, - description=description_new) + result = self.engine.update_resource( + resource_id=resource_id, file=file_to_upload, description=description_new + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Verify Name (should be the same as the file uploaded by default) - self.assertEqual(result['result']['name'], file_name) - self.assertEqual(result['result']['description'], description_new) + self.assertEqual(result["result"]["name"], file_name) + self.assertEqual(result["result"]["description"], description_new) # TEST get_resource # Execute result = self.engine.get_resource(resource_id=resource_id) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Verify Properties - self.assertEqual(result['result']['name'], file_name) - self.assertEqual(result['result']['description'], description_new) + self.assertEqual(result["result"]["name"], file_name) + self.assertEqual(result["result"]["description"], description_new) # TEST download_resource location = self.files_root - result = self.engine.download_resource(resource_id=resource_id, location=location) + result = self.engine.download_resource( + resource_id=resource_id, location=location + ) # Result will return list of the file with .zip at the end. Check here - self.assertIn('.zip', result[-4:].lower()) + self.assertIn(".zip", result[-4:].lower()) download_file = os.path.basename(result) location_final = os.path.join(self.files_root, download_file) @@ -276,22 +295,24 @@ def test_update_resource(self): if os.path.isfile(location_final): os.remove(location_final) else: - raise AssertionError('No file has been downloaded') + raise AssertionError("No file has been downloaded") # TEST delete_resource # Execute result = self.engine.delete_resource(resource_id=resource_id) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Delete requests should return nothing - self.assertEqual(result['result'], None) + self.assertEqual(result["result"], None) def test_validate(self): self.engine.validate() def test_validate_status_code(self): - self.engine2 = CkanDatasetEngine(endpoint="http://localhost:5000/api/a/action/", - apikey=TEST_CKAN_DATASET_SERVICE['APIKEY']) + self.engine2 = CkanDatasetEngine( + endpoint="http://localhost:5000/api/a/action/", + apikey=TEST_CKAN_DATASET_SERVICE["APIKEY"], + ) self.assertRaises(AssertionError, self.engine2.validate) diff --git a/tests/e2e_tests/geoserver_engine_e2e_tests.py b/tests/e2e_tests/geoserver_engine_e2e_tests.py index 177ff37..a15aee8 100644 --- a/tests/e2e_tests/geoserver_engine_e2e_tests.py +++ b/tests/e2e_tests/geoserver_engine_e2e_tests.py @@ -24,7 +24,7 @@ def random_string_generator(size): chars = string.ascii_lowercase + string.digits - return ''.join(random.choice(chars) for _ in range(size)) + return "".join(random.choice(chars) for _ in range(size)) class GeoServerDatasetEngineEnd2EndTests(unittest.TestCase): @@ -32,28 +32,32 @@ class GeoServerDatasetEngineEnd2EndTests(unittest.TestCase): def setUp(self): # Files self.tests_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - self.files_root = os.path.join(self.tests_root, 'files') + self.files_root = os.path.join(self.tests_root, "files") # GeoServer - self.gs_endpoint = TEST_GEOSERVER_DATASET_SERVICE['ENDPOINT'] - self.gs_username = TEST_GEOSERVER_DATASET_SERVICE['USERNAME'] - self.gs_password = TEST_GEOSERVER_DATASET_SERVICE['PASSWORD'] - self.gs_public_endpoint = TEST_GEOSERVER_DATASET_SERVICE['PUBLIC_ENDPOINT'] - self.catalog = GeoServerCatalog(self.gs_endpoint, username=self.gs_username, password=self.gs_password) + self.gs_endpoint = TEST_GEOSERVER_DATASET_SERVICE["ENDPOINT"] + self.gs_username = TEST_GEOSERVER_DATASET_SERVICE["USERNAME"] + self.gs_password = TEST_GEOSERVER_DATASET_SERVICE["PASSWORD"] + self.gs_public_endpoint = TEST_GEOSERVER_DATASET_SERVICE["PUBLIC_ENDPOINT"] + self.catalog = GeoServerCatalog( + self.gs_endpoint, username=self.gs_username, password=self.gs_password + ) # Postgis - self.pg_username = TEST_POSTGIS_SERVICE['USERNAME'] - self.pg_password = TEST_POSTGIS_SERVICE['PASSWORD'] - self.pg_database = TEST_POSTGIS_SERVICE['DATABASE'] - self.pg_table_name = 'points' - self.pg_host = TEST_POSTGIS_SERVICE['HOST'] - self.pg_port = TEST_POSTGIS_SERVICE['PORT'] - self.pg_url = TEST_POSTGIS_SERVICE['URL'] - self.pg_public_url = TEST_POSTGIS_SERVICE['PUBLIC_URL'] + self.pg_username = TEST_POSTGIS_SERVICE["USERNAME"] + self.pg_password = TEST_POSTGIS_SERVICE["PASSWORD"] + self.pg_database = TEST_POSTGIS_SERVICE["DATABASE"] + self.pg_table_name = "points" + self.pg_host = TEST_POSTGIS_SERVICE["HOST"] + self.pg_port = TEST_POSTGIS_SERVICE["PORT"] + self.pg_url = TEST_POSTGIS_SERVICE["URL"] + self.pg_public_url = TEST_POSTGIS_SERVICE["PUBLIC_URL"] # Setup a testing workspace self.workspace_name = random_string_generator(10) - self.workspace_uri = 'http://www.tethysplatform.org/{}'.format(self.workspace_name) + self.workspace_uri = "http://www.tethysplatform.org/{}".format( + self.workspace_name + ) retries = 5 while retries > 0: @@ -61,7 +65,7 @@ def setUp(self): self.catalog.create_workspace(self.workspace_name, self.workspace_uri) break except AssertionError as e: - if 'Error persisting' in str(e) and retries > 0: + if "Error persisting" in str(e) and retries > 0: print("WARNING: FAILED TO PERSIST WORKSPACE.") retries -= 1 else: @@ -73,29 +77,29 @@ def setUp(self): self.transaction = self.connection.begin() # Create GeoServer Engine - self.endpoint = TEST_GEOSERVER_DATASET_SERVICE['ENDPOINT'] + self.endpoint = TEST_GEOSERVER_DATASET_SERVICE["ENDPOINT"] self.geoserver_engine = GeoServerSpatialDatasetEngine( endpoint=self.endpoint, - username=TEST_GEOSERVER_DATASET_SERVICE['USERNAME'], - password=TEST_GEOSERVER_DATASET_SERVICE['PASSWORD'], - public_endpoint=TEST_GEOSERVER_DATASET_SERVICE['PUBLIC_ENDPOINT'] + username=TEST_GEOSERVER_DATASET_SERVICE["USERNAME"], + password=TEST_GEOSERVER_DATASET_SERVICE["PASSWORD"], + public_endpoint=TEST_GEOSERVER_DATASET_SERVICE["PUBLIC_ENDPOINT"], ) - self.geometry_column = 'geometry' - self.geometry_type = 'Point' + self.geometry_column = "geometry" + self.geometry_type = "Point" self.srid = 4326 def assert_valid_response_object(self, response_object): # Response object should be a dictionary with the keys 'success' and either 'result' if success is True # or 'error' if success is False self.assertIsInstance(response_object, dict) - self.assertIn('success', response_object) + self.assertIn("success", response_object) - if isinstance(response_object, dict) and 'success' in response_object: - if response_object['success'] is True: - self.assertIn('result', response_object) - elif response_object['success'] is False: - self.assertIn('error', response_object) + if isinstance(response_object, dict) and "success" in response_object: + if response_object["success"] is True: + self.assertIn("result", response_object) + elif response_object["success"] is False: + self.assertIn("error", response_object) def tearDown(self): # Clean up GeoServer @@ -114,17 +118,19 @@ def setup_postgis_table(self): "id", "name", and "geometry." Use this table for the tests that require a database. """ # Clean up - delete_sql = "DROP TABLE IF EXISTS {table}".\ - format(table=self.pg_table_name) + delete_sql = "DROP TABLE IF EXISTS {table}".format(table=self.pg_table_name) self.connection.execute(delete_sql) # Create table - geom_table_sql = "CREATE TABLE IF NOT EXISTS {table} (" \ - "id integer CONSTRAINT points_primary_key PRIMARY KEY, " \ - "name varchar(20)" \ - "); " \ - "SELECT AddGeometryColumn('public', '{table}', 'geometry', 4326, 'POINT', 2);". \ - format(table=self.pg_table_name) + geom_table_sql = ( + "CREATE TABLE IF NOT EXISTS {table} (" + "id integer CONSTRAINT points_primary_key PRIMARY KEY, " + "name varchar(20)" + "); " + "SELECT AddGeometryColumn('public', '{table}', 'geometry', 4326, 'POINT', 2);".format( + table=self.pg_table_name + ) + ) self.connection.execute(geom_table_sql) @@ -138,10 +144,10 @@ def setup_postgis_table(self): for r in rows: sql = insert_sql.format( table=self.pg_table_name, - id=r['id'], - name=r['name'], - lat=r['lat'], - lon=r['lon'] + id=r["id"], + name=r["name"], + lat=r["lat"], + lon=r["lon"], ) self.connection.execute(sql) self.transaction.commit() @@ -152,31 +158,29 @@ def test_create_shapefile_resource_base(self): # TEST create shapefile # Setup - filename = 'test' - shapefile_name = os.path.join(self.files_root, 'shapefile', filename) + filename = "test" + shapefile_name = os.path.join(self.files_root, "shapefile", filename) workspace = self.workspace_name store_id = random_string_generator(10) - store_id_name = '{}:{}'.format(workspace, store_id) + store_id_name = "{}:{}".format(workspace, store_id) # Execute response = self.geoserver_engine.create_shapefile_resource( - store_id=store_id_name, - shapefile_base=shapefile_name, - overwrite=True + store_id=store_id_name, shapefile_base=shapefile_name, overwrite=True ) # Validate response object self.assert_valid_response_object(response) # Should succeed - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) - self.assertIn(store_id, r['name']) - self.assertIn(store_id, r['store']) + self.assertIn(store_id, r["name"]) + self.assertIn(store_id, r["store"]) # TEST list_resources @@ -187,10 +191,10 @@ def test_create_shapefile_resource_base(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Returns list self.assertIsInstance(result, list) @@ -202,31 +206,32 @@ def test_create_shapefile_resource_base(self): # Execute # Geoserver uses the store_id as the layer/resource name (not the filename) - resource_id_name = '{}:{}'.format(workspace, store_id) + resource_id_name = "{}:{}".format(workspace, store_id) response = self.geoserver_engine.get_resource(resource_id=resource_id_name) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Properties - self.assertIn('name', r) - self.assertEqual(store_id, r['name']) - self.assertIn(store_id, r['wfs']['shapefile']) + self.assertIn("name", r) + self.assertEqual(store_id, r["name"]) + self.assertIn(store_id, r["wfs"]["shapefile"]) # TEST delete_resource # Execute # This case the resource id is the same as the store id. - response = self.geoserver_engine.delete_resource(resource_id=resource_id_name, - store_id=store_id) + response = self.geoserver_engine.delete_resource( + resource_id=resource_id_name, store_id=store_id + ) # Validate response object self.assert_valid_response_object(response) @@ -242,32 +247,30 @@ def test_create_shapefile_resource_zip(self): # Test1.zip # Setup - shapefile_zip = os.path.join(self.files_root, 'shapefile', "test1.zip") + shapefile_zip = os.path.join(self.files_root, "shapefile", "test1.zip") shapefile = "test1" workspace = self.workspace_name store_id = random_string_generator(10) - store_id_name = '{}:{}'.format(workspace, store_id) + store_id_name = "{}:{}".format(workspace, store_id) # Execute response = self.geoserver_engine.create_shapefile_resource( - store_id=store_id_name, - shapefile_zip=shapefile_zip, - overwrite=True + store_id=store_id_name, shapefile_zip=shapefile_zip, overwrite=True ) # Validate response object self.assert_valid_response_object(response) # Should succeed - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type filename = os.path.splitext(os.path.basename(shapefile_zip))[0] self.assertIsInstance(r, dict) - self.assertIn(filename, r['name']) - self.assertIn(store_id, r['store']) + self.assertIn(filename, r["name"]) + self.assertIn(store_id, r["store"]) # TEST list_layers test # Execute @@ -277,43 +280,41 @@ def test_create_shapefile_resource_zip(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Returns list self.assertIsInstance(result, list) # Get the last item from result - layer_id = '{}:{}'.format(workspace, shapefile) + layer_id = "{}:{}".format(workspace, shapefile) # TEST get layers test # Execute - response = self.geoserver_engine.get_layer(layer_id=layer_id, - store_id=store_id) + response = self.geoserver_engine.get_layer(layer_id=layer_id, store_id=store_id) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) - self.assertIn(filename, r['name']) - self.assertIn(self.workspace_name, r['name']) + self.assertIn(filename, r["name"]) + self.assertIn(self.workspace_name, r["name"]) # TEST delete_layer - self.geoserver_engine.delete_layer(layer_id=layer_id, - datastore=store_id) + self.geoserver_engine.delete_layer(layer_id=layer_id, datastore=store_id) self.assert_valid_response_object(response) - self.assertTrue(response['success']) + self.assertTrue(response["success"]) def test_create_shapefile_resource_upload(self): # call methods: create_shapefile_resource, list_stores, get_store, delete_store @@ -322,37 +323,37 @@ def test_create_shapefile_resource_upload(self): # Use in memory file list: test.shp and friends # Setup - shapefile_cst = os.path.join(self.files_root, 'shapefile', 'test.cst') - shapefile_dbf = os.path.join(self.files_root, 'shapefile', 'test.dbf') - shapefile_prj = os.path.join(self.files_root, 'shapefile', 'test.prj') - shapefile_shp = os.path.join(self.files_root, 'shapefile', 'test.shp') - shapefile_shx = os.path.join(self.files_root, 'shapefile', 'test.shx') + shapefile_cst = os.path.join(self.files_root, "shapefile", "test.cst") + shapefile_dbf = os.path.join(self.files_root, "shapefile", "test.dbf") + shapefile_prj = os.path.join(self.files_root, "shapefile", "test.prj") + shapefile_shp = os.path.join(self.files_root, "shapefile", "test.shp") + shapefile_shx = os.path.join(self.files_root, "shapefile", "test.shx") # Workspace is given store_rand = random_string_generator(10) - store_id = '{}:{}'.format(self.workspace_name, store_rand) - - with open(shapefile_cst, 'rb') as cst_upload,\ - open(shapefile_dbf, 'rb') as dbf_upload,\ - open(shapefile_prj, 'rb') as prj_upload,\ - open(shapefile_shp, 'rb') as shp_upload,\ - open(shapefile_shx, 'rb') as shx_upload: + store_id = "{}:{}".format(self.workspace_name, store_rand) + + with open(shapefile_cst, "rb") as cst_upload, open( + shapefile_dbf, "rb" + ) as dbf_upload, open(shapefile_prj, "rb") as prj_upload, open( + shapefile_shp, "rb" + ) as shp_upload, open( + shapefile_shx, "rb" + ) as shx_upload: upload_list = [cst_upload, dbf_upload, prj_upload, shp_upload, shx_upload] response = self.geoserver_engine.create_shapefile_resource( - store_id=store_id, - shapefile_upload=upload_list, - overwrite=True + store_id=store_id, shapefile_upload=upload_list, overwrite=True ) # Should succeed - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) - self.assertIn(store_rand, r['name']) - self.assertIn(store_rand, r['store']) + self.assertIn(store_rand, r["name"]) + self.assertIn(store_rand, r["store"]) # TEST list_stores @@ -364,10 +365,10 @@ def test_create_shapefile_resource_upload(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # layer group listed self.assertIn(store_rand, result) @@ -381,26 +382,28 @@ def test_create_shapefile_resource_upload(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Properties - self.assertIn('name', r) - self.assertIn(r['name'], store_rand) - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) + self.assertIn("name", r) + self.assertIn(r["name"], store_rand) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) # TEST delete_store - response = self.geoserver_engine.delete_store(store_id=store_id, purge=True, recurse=True) + response = self.geoserver_engine.delete_store( + store_id=store_id, purge=True, recurse=True + ) # Failure Check self.assert_valid_response_object(response) - self.assertTrue(response['success']) + self.assertTrue(response["success"]) def test_create_coverage_layer_arcgrid(self): # call methods: create_coverage_layer, list_resources, get_resource, delete_resource @@ -408,31 +411,31 @@ def test_create_coverage_layer_arcgrid(self): # TEST create_coverage_layer # precip30min.zip layer_name = random_string_generator(10) - layer_id = f'{self.workspace_name}:{layer_name}' - expected_coverage_type = 'ArcGrid' - coverage_file_name = 'precip30min.zip' + layer_id = f"{self.workspace_name}:{layer_name}" + expected_coverage_type = "ArcGrid" + coverage_file_name = "precip30min.zip" coverage_file = os.path.join(self.files_root, "arc_sample", coverage_file_name) # Execute response = self.geoserver_engine.create_coverage_layer( layer_id=layer_id, coverage_type=expected_coverage_type, - coverage_file=coverage_file + coverage_file=coverage_file, ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Values - self.assertEqual(layer_id, r['name']) + self.assertEqual(layer_id, r["name"]) # TEST list_resources @@ -443,10 +446,10 @@ def test_create_coverage_layer_arcgrid(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Returns list self.assertIsInstance(result, list) @@ -457,32 +460,33 @@ def test_create_coverage_layer_arcgrid(self): # TEST get_resource # Execute - resource_id = '{}:{}'.format(self.workspace_name, layer_name) + resource_id = "{}:{}".format(self.workspace_name, layer_name) response = self.geoserver_engine.get_resource( resource_id=resource_id, - store_id=layer_name # layer and store share name (one to one approach) + store_id=layer_name, # layer and store share name (one to one approach) ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] - self.assertIn('ArcGrid', r['keywords']) - self.assertEqual(coverage_file_name.split('.')[0], r['title']) - self.assertEqual('coverage', r['resource_type']) + self.assertIn("ArcGrid", r["keywords"]) + self.assertEqual(coverage_file_name.split(".")[0], r["title"]) + self.assertEqual("coverage", r["resource_type"]) # delete_resource # TODO: delete_resource is returning a 403 error: not authorized. # Execute - resource_id = '{}:{}'.format(self.workspace_name, layer_name) - response = self.geoserver_engine.delete_resource(resource_id=resource_id, - store_id=layer_name) + resource_id = "{}:{}".format(self.workspace_name, layer_name) + response = self.geoserver_engine.delete_resource( + resource_id=resource_id, store_id=layer_name + ) # # Validate response object self.assert_valid_response_object(response) @@ -496,32 +500,32 @@ def test_create_coverage_layer_grassgrid(self): # TEST create_coverage resource # my_grass.zip layer_name = random_string_generator(10) - layer_id = '{}:{}'.format(self.workspace_name, layer_name) - expected_coverage_type = 'GrassGrid' - coverage_file_name = 'my_grass.zip' - coverage_name = coverage_file_name.split('.')[0] + layer_id = "{}:{}".format(self.workspace_name, layer_name) + expected_coverage_type = "GrassGrid" + coverage_file_name = "my_grass.zip" + coverage_name = coverage_file_name.split(".")[0] coverage_file = os.path.join(self.files_root, "grass_ascii", coverage_file_name) # Execute response = self.geoserver_engine.create_coverage_layer( layer_id=layer_id, coverage_type=expected_coverage_type, - coverage_file=coverage_file + coverage_file=coverage_file, ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Tests - self.assertIn(layer_id, r['name']) + self.assertIn(layer_id, r["name"]) # TEST list_layers @@ -532,10 +536,10 @@ def test_create_coverage_layer_grassgrid(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Returns list self.assertIsInstance(result, list) @@ -546,28 +550,28 @@ def test_create_coverage_layer_grassgrid(self): # TEST get_layer # Execute - response = self.geoserver_engine.get_layer(layer_id=layer_id, - store_id=layer_name) + response = self.geoserver_engine.get_layer( + layer_id=layer_id, store_id=layer_name + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) - self.assertIn(layer_name, r['store']) - self.assertIn(self.workspace_name, r['name']) + self.assertIn(layer_name, r["store"]) + self.assertIn(self.workspace_name, r["name"]) # TEST delete_layer - self.geoserver_engine.delete_layer(layer_id=layer_id, - datastore=layer_name) + self.geoserver_engine.delete_layer(layer_id=layer_id, datastore=layer_name) self.assert_valid_response_object(response) - self.assertTrue(response['success']) + self.assertTrue(response["success"]) def test_create_coverage_layer_geotiff(self): # adem.tif @@ -576,31 +580,31 @@ def test_create_coverage_layer_geotiff(self): # TEST create_coverage_layer layer_name = random_string_generator(10) - layer_id = '{}:{}'.format(self.workspace_name, layer_name) - expected_coverage_type = 'GeoTIFF' - coverage_file_name = 'adem.tif' + layer_id = "{}:{}".format(self.workspace_name, layer_name) + expected_coverage_type = "GeoTIFF" + coverage_file_name = "adem.tif" coverage_file = os.path.join(self.files_root, coverage_file_name) # Execute response = self.geoserver_engine.create_coverage_layer( layer_id=layer_id, coverage_type=expected_coverage_type, - coverage_file=coverage_file + coverage_file=coverage_file, ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Values - self.assertEqual(layer_id, r['name']) + self.assertEqual(layer_id, r["name"]) # TEST list_stores @@ -612,10 +616,10 @@ def test_create_coverage_layer_geotiff(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # TEST layer group listed self.assertIn(layer_name, result) @@ -623,64 +627,68 @@ def test_create_coverage_layer_geotiff(self): # TEST get store # Execute - response = self.geoserver_engine.get_store(store_id=layer_id) # layer_id == store_id + response = self.geoserver_engine.get_store( + store_id=layer_id + ) # layer_id == store_id # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Properties - self.assertIn('name', r) - self.assertIn(r['name'], layer_name) - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) + self.assertIn("name", r) + self.assertIn(r["name"], layer_name) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) # TEST delete_store - response = self.geoserver_engine.delete_store(store_id=layer_id, purge=True, recurse=True) + response = self.geoserver_engine.delete_store( + store_id=layer_id, purge=True, recurse=True + ) # Failure Check self.assert_valid_response_object(response) - self.assertTrue(response['success']) + self.assertTrue(response["success"]) def test_create_coverage_layer_world_file_tif(self): # pk50095.zip # call methods: create_coverage_layer, list_layers, get_layer, delete_layer # TEST create_coverage resource layer_name = random_string_generator(10) - layer_id = '{}:{}'.format(self.workspace_name, layer_name) - expected_coverage_type = 'WorldImage' - coverage_file_name = 'Pk50095.zip' - coverage_name = coverage_file_name.split('.')[0] + layer_id = "{}:{}".format(self.workspace_name, layer_name) + expected_coverage_type = "WorldImage" + coverage_file_name = "Pk50095.zip" + coverage_name = coverage_file_name.split(".")[0] coverage_file = os.path.join(self.files_root, "img_sample", coverage_file_name) # Execute response = self.geoserver_engine.create_coverage_layer( layer_id=layer_id, coverage_type=expected_coverage_type, - coverage_file=coverage_file + coverage_file=coverage_file, ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Tests - self.assertIn(layer_id, r['name']) + self.assertIn(layer_id, r["name"]) # TEST list_layers @@ -691,10 +699,10 @@ def test_create_coverage_layer_world_file_tif(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Returns list self.assertIsInstance(result, list) @@ -705,28 +713,28 @@ def test_create_coverage_layer_world_file_tif(self): # TEST get_layer # Execute - response = self.geoserver_engine.get_layer(layer_id=layer_id, - store_id=layer_name) + response = self.geoserver_engine.get_layer( + layer_id=layer_id, store_id=layer_name + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) - self.assertIn(layer_name, r['store']) + self.assertIn(layer_name, r["store"]) self.assertIn(self.workspace_name, self.workspace_name) # TEST delete_layer - self.geoserver_engine.delete_layer(layer_id=coverage_name, - datastore=layer_name) + self.geoserver_engine.delete_layer(layer_id=coverage_name, datastore=layer_name) self.assert_valid_response_object(response) - self.assertTrue(response['success']) + self.assertTrue(response["success"]) def test_create_layer_group(self): @@ -743,25 +751,25 @@ def test_create_layer_group(self): # expected_layer_group_id = '{}:{}'.format(self.workspace_name, random_string_generator(10)) expected_layer_group_id = random_string_generator(10) - expected_layers = ['roads', 'bugsites', 'streams'] - expected_styles = ['simple_roads', 'capitals', 'simple_streams'] + expected_layers = ["roads", "bugsites", "streams"] + expected_styles = ["simple_roads", "capitals", "simple_streams"] # TODO: create_layer_group: fails on catalog.save() when workspace is given. response = self.geoserver_engine.create_layer_group( - layer_group_id=f'sf:{expected_layer_group_id}', + layer_group_id=f"sf:{expected_layer_group_id}", layers=expected_layers, - styles=expected_styles + styles=expected_styles, ) # Should succeed self.assert_valid_response_object(response) - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Validate - result = response['result'] + result = response["result"] - self.assertEqual(result['name'], expected_layer_group_id) - self.assertEqual(result['layers'], expected_layers) - self.assertEqual(result['styles'], expected_styles) + self.assertEqual(result["name"], expected_layer_group_id) + self.assertEqual(result["layers"], expected_layers) + self.assertEqual(result["styles"], expected_styles) # TEST list_layer_groups @@ -772,10 +780,10 @@ def test_create_layer_group(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # layer group listed self.assertIn(expected_layer_group_id, result) @@ -783,34 +791,36 @@ def test_create_layer_group(self): # TEST get layer_group # Execute - response = self.geoserver_engine.get_layer_group(layer_group_id=expected_layer_group_id) + response = self.geoserver_engine.get_layer_group( + layer_group_id=expected_layer_group_id + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # List of dictionaries - self.assertIn('workspace', r) - self.assertEqual('sf', r['workspace']) - self.assertIn('layers', r) - self.assertEqual(expected_layers, r['layers']) - self.assertIn('styles', r) - self.assertEqual(expected_styles, r['styles']) - self.assertNotIn('dom', r) + self.assertIn("workspace", r) + self.assertEqual("sf", r["workspace"]) + self.assertIn("layers", r) + self.assertEqual(expected_layers, r["layers"]) + self.assertIn("styles", r) + self.assertEqual(expected_styles, r["styles"]) + self.assertNotIn("dom", r) # TEST delete layer group # Clean up self.geoserver_engine.delete_layer_group(layer_group_id=expected_layer_group_id) self.assert_valid_response_object(response) - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # self.assertIsNone(response['result']) def test_create_workspace(self): @@ -819,26 +829,28 @@ def test_create_workspace(self): # TEST create workspace expected_workspace_id = random_string_generator(10) - expected_uri = 'http://www.tethysplatform.org/{}'.format(expected_workspace_id) + expected_uri = "http://www.tethysplatform.org/{}".format(expected_workspace_id) # create workspace test - response = self.geoserver_engine.create_workspace(workspace_id=expected_workspace_id, uri=expected_uri) + response = self.geoserver_engine.create_workspace( + workspace_id=expected_workspace_id, uri=expected_uri + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) - self.assertIn('name', r) + self.assertIn("name", r) - self.assertEqual(expected_workspace_id, r['name']) + self.assertEqual(expected_workspace_id, r["name"]) # TEST list workspace @@ -849,10 +861,10 @@ def test_create_workspace(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # TEST layer group listed self.assertIn(expected_workspace_id, result) @@ -860,54 +872,60 @@ def test_create_workspace(self): # TEST get_workspace # Execute - response = self.geoserver_engine.get_workspace(workspace_id=expected_workspace_id) + response = self.geoserver_engine.get_workspace( + workspace_id=expected_workspace_id + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Properties - self.assertIn('name', r) - self.assertIn(r['name'], expected_workspace_id) + self.assertIn("name", r) + self.assertIn(r["name"], expected_workspace_id) # TEST delete work_space # Do delete - response = self.geoserver_engine.delete_workspace(workspace_id=expected_workspace_id) + response = self.geoserver_engine.delete_workspace( + workspace_id=expected_workspace_id + ) # Should succeed self.assert_valid_response_object(response) - self.assertTrue(response['success']) - self.assertIsNone(response['result']) + self.assertTrue(response["success"]) + self.assertIsNone(response["result"]) def test_create_style(self): # call methods: create_style, list_styles, get_style, delete_style # TEST create_style expected_style_id_name = random_string_generator(10) - expected_style_id = '{}:{}'.format(self.workspace_name, expected_style_id_name) - style_file_name = 'point.sld' + expected_style_id = "{}:{}".format(self.workspace_name, expected_style_id_name) + style_file_name = "point.sld" sld_file_path = os.path.join(self.files_root, style_file_name) # Execute - response = self.geoserver_engine.create_style(style_id=expected_style_id, sld_template=sld_file_path) + response = self.geoserver_engine.create_style( + style_id=expected_style_id, sld_template=sld_file_path + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) @@ -921,10 +939,10 @@ def test_create_style(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Returns list self.assertIsInstance(result, list) @@ -941,19 +959,19 @@ def test_create_style(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Properties - self.assertIn('name', r) - self.assertIn(r['name'], expected_style_id) - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) + self.assertIn("name", r) + self.assertIn(r["name"], expected_style_id) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) # TEST delete_style @@ -962,8 +980,8 @@ def test_create_style(self): # Should succeed self.assert_valid_response_object(response) - self.assertTrue(response['success']) - self.assertIsNone(response['result']) + self.assertTrue(response["success"]) + self.assertIsNone(response["result"]) def test_link_and_add_table(self): # call methods: link_sqlalchemy_db_to_geoserver, create_layer_from_postgis_store, list_stores, get_store, @@ -972,31 +990,27 @@ def test_link_and_add_table(self): # TEST link_sqlalchemy_db_to_geoserver store_id_name = random_string_generator(10) - store_id = '{}:{}'.format(self.workspace_name, store_id_name) + store_id = "{}:{}".format(self.workspace_name, store_id_name) sqlalchemy_engine = create_engine(self.pg_url) response = self.geoserver_engine.link_sqlalchemy_db_to_geoserver( - store_id=store_id, - sqlalchemy_engine=sqlalchemy_engine, - docker=True + store_id=store_id, sqlalchemy_engine=sqlalchemy_engine, docker=True ) # Check for success response - self.assertTrue(response['success']) + self.assertTrue(response["success"]) sqlalchemy_engine.dispose() # TEST create_layer_from_postgis_store # Execute response = self.geoserver_engine.create_layer_from_postgis_store( - store_id=store_id, - table=self.pg_table_name, - debug=True + store_id=store_id, table=self.pg_table_name, debug=True ) # Check for success response # TODO: returns an error in PostGIS 3.4: Internal Server Error(500): :java.io.IOException: Error occured calculating bounds for points - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # TEST list_stores @@ -1008,10 +1022,10 @@ def test_link_and_add_table(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # layer group listed self.assertIn(store_id_name, result) @@ -1025,26 +1039,28 @@ def test_link_and_add_table(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Properties - self.assertIn('name', r) - self.assertIn(store_id_name, r['name']) - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) + self.assertIn("name", r) + self.assertIn(store_id_name, r["name"]) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) # TEST delete_store - response = self.geoserver_engine.delete_store(store_id=store_id, purge=True, recurse=True) + response = self.geoserver_engine.delete_store( + store_id=store_id, purge=True, recurse=True + ) # Failure Check self.assert_valid_response_object(response) - self.assertTrue(response['success']) + self.assertTrue(response["success"]) def test_create_postgis_store(self): # call methods: test_create_postgis_store (with table), list_stores, get_store, delete_store @@ -1052,7 +1068,7 @@ def test_create_postgis_store(self): # TEST test_create_postgis_store store_id_name = random_string_generator(10) - store_id = '{}:{}'.format(self.workspace_name, store_id_name) + store_id = "{}:{}".format(self.workspace_name, store_id_name) response = self.geoserver_engine.create_postgis_store( store_id=store_id, @@ -1060,12 +1076,12 @@ def test_create_postgis_store(self): port=self.pg_port, database=self.pg_database, username=self.pg_username, - password=self.pg_password + password=self.pg_password, ) - self.assertTrue(response['success']) - - # Pause to let GeoServer catch up + self.assertTrue(response["success"]) + + # Pause to let GeoServer catch up sleep(5) # TEST list_stores @@ -1077,10 +1093,10 @@ def test_create_postgis_store(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # layer group listed self.assertIn(store_id_name, result) @@ -1094,26 +1110,28 @@ def test_create_postgis_store(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Properties - self.assertIn('name', r) - self.assertIn(store_id_name, r['name']) - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) + self.assertIn("name", r) + self.assertIn(store_id_name, r["name"]) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) # TEST delete_store - response = self.geoserver_engine.delete_store(store_id=store_id, purge=True, recurse=True) + response = self.geoserver_engine.delete_store( + store_id=store_id, purge=True, recurse=True + ) # Failure Check self.assert_valid_response_object(response) - self.assertTrue(response['success']) + self.assertTrue(response["success"]) def test_create_sql_view_layer(self): # call methods: create_sql_view, list_resources, list_stores, list_layers @@ -1121,7 +1139,7 @@ def test_create_sql_view_layer(self): # TEST test_create_postgis_store store_id_name = random_string_generator(10) - store_id = '{}:{}'.format(self.workspace_name, store_id_name) + store_id = "{}:{}".format(self.workspace_name, store_id_name) response = self.geoserver_engine.create_postgis_store( store_id=store_id, @@ -1131,26 +1149,24 @@ def test_create_sql_view_layer(self): username=self.pg_username, password=self.pg_password, ) - self.assertTrue(response['success']) - + self.assertTrue(response["success"]) + # Pause to let GeoServer catch up before continuing sleep(5) # Create layer from postgis store response = self.geoserver_engine.create_layer_from_postgis_store( - store_id=store_id, - table=self.pg_table_name, - debug=True + store_id=store_id, table=self.pg_table_name, debug=True ) # TODO: returns an error in PostGIS 3.4: Internal Server Error(500): :java.io.IOException: Error occured calculating bounds for points - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Pause to let GeoServer catch up before continuing sleep(5) feature_type_name = random_string_generator(10) - postgis_store_id = '{}:{}'.format(self.workspace_name, store_id_name) + postgis_store_id = "{}:{}".format(self.workspace_name, store_id_name) sql = "SELECT * FROM {}".format(self.pg_table_name) geometry_type = self.geometry_type @@ -1160,19 +1176,19 @@ def test_create_sql_view_layer(self): geometry_type=geometry_type, srid=self.srid, sql=sql, - default_style='points', + default_style="points", ) - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) - self.assertIn('name', r) - self.assertIn(feature_type_name, r['name']) + self.assertIn("name", r) + self.assertIn(feature_type_name, r["name"]) # TEST list_resources @@ -1183,10 +1199,10 @@ def test_create_sql_view_layer(self): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Returns list self.assertIsInstance(result, list) @@ -1198,31 +1214,32 @@ def test_create_sql_view_layer(self): # Execute # Geoserver uses the store_id as the layer/resource name (not the filename) - resource_id_name = '{}:{}'.format(self.workspace_name, feature_type_name) + resource_id_name = "{}:{}".format(self.workspace_name, feature_type_name) response = self.geoserver_engine.get_resource(resource_id=resource_id_name) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Properties - self.assertIn('name', r) - self.assertEqual(feature_type_name, r['name']) - self.assertIn(feature_type_name, r['wfs']['shapefile']) + self.assertIn("name", r) + self.assertEqual(feature_type_name, r["name"]) + self.assertIn(feature_type_name, r["wfs"]["shapefile"]) # TEST delete_resource # Execute # This case the resource id is the same as the store id. - response = self.geoserver_engine.delete_resource(resource_id=resource_id_name, - store_id=store_id_name) + response = self.geoserver_engine.delete_resource( + resource_id=resource_id_name, store_id=store_id_name + ) # Validate response object self.assert_valid_response_object(response) @@ -1232,8 +1249,8 @@ def test_create_sql_view_layer(self): # self.assertTrue(response['success']) -if __name__ == '__main__': +if __name__ == "__main__": suite = unittest.TestSuite() - suite.addTest(GeoServerDatasetEngineEnd2EndTests('test_create_style')) + suite.addTest(GeoServerDatasetEngineEnd2EndTests("test_create_style")) runner = unittest.TextTestRunner() runner.run(suite) diff --git a/tests/unit_tests/test_ckan_engine.py b/tests/unit_tests/test_ckan_engine.py index c2c7a00..2167087 100644 --- a/tests/unit_tests/test_ckan_engine.py +++ b/tests/unit_tests/test_ckan_engine.py @@ -15,29 +15,31 @@ from ..test_config import TEST_CKAN_DATASET_SERVICE except ImportError: - print('ERROR: To perform tests, you must create a file in the "tests" package called "test_config.py". In this file' - 'provide a dictionary called "TEST_CKAN_DATASET_SERVICE" with keys "API_ENDPOINT" and "APIKEY".') + print( + 'ERROR: To perform tests, you must create a file in the "tests" package called "test_config.py". In this file' + 'provide a dictionary called "TEST_CKAN_DATASET_SERVICE" with keys "API_ENDPOINT" and "APIKEY".' + ) exit(1) def random_string_generator(size): chars = string.ascii_lowercase + string.digits - return ''.join(random.choice(chars) for _ in range(size)) + return "".join(random.choice(chars) for _ in range(size)) class MockJsonResponse(object): def __init__(self, status_code, success=True, result=None, json_format=True): self.status_code = status_code data = dict() - data['success'] = success + data["success"] = success if not success: - data['error'] = {'message': 'failed message'} - data['result'] = result + data["error"] = {"message": "failed message"} + data["result"] = result # data['get'] = get_data if json_format: self.text = json.dumps(data) else: - self.text = 'Not a JSON object' + self.text = "Not a JSON object" # self.encode = encode @@ -56,81 +58,85 @@ class TestCkanDatasetEngine(unittest.TestCase): def setUp(self): # Create Test Engine - self.engine = CkanDatasetEngine(endpoint=TEST_CKAN_DATASET_SERVICE['ENDPOINT'], - apikey=TEST_CKAN_DATASET_SERVICE['APIKEY']) + self.engine = CkanDatasetEngine( + endpoint=TEST_CKAN_DATASET_SERVICE["ENDPOINT"], + apikey=TEST_CKAN_DATASET_SERVICE["APIKEY"], + ) # Test Dataset Name self.test_dataset_name = random_string_generator(10) # Test Resource Variables self.test_resource_name = random_string_generator(10) - self.test_resource_url = 'http://home.byu.edu' + self.test_resource_url = "http://home.byu.edu" # File paths self.tests_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - self.files_path = os.path.join(self.tests_path, 'files') - self.support_path = os.path.join(self.tests_path, 'support') + self.files_path = os.path.join(self.tests_path, "files") + self.support_path = os.path.join(self.tests_path, "support") def tearDown(self): pass - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_list_datasets_defaults(self, mock_post): - mock_post.return_value = MockJsonResponse(200, result='Datasetname') + mock_post.return_value = MockJsonResponse(200, result="Datasetname") # Execute result = self.engine.list_datasets() # Verify Success - self.assertTrue(result['success']) - self.assertIn('Datasetname', result['result']) + self.assertTrue(result["success"]) + self.assertIn("Datasetname", result["result"]) - @mock.patch('tethys_dataset_services.engines.ckan_engine.log') - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.log") + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_list_datasets_defaults_no_json(self, mock_post, mock_log): - mock_post.return_value = MockJsonResponse(201, result='Datasetname', json_format=False) + mock_post.return_value = MockJsonResponse( + 201, result="Datasetname", json_format=False + ) # Execute self.engine.list_datasets() mock_log.exception.assert_called() call_args = mock_log.exception.call_args_list - self.assertIn('Status Code 201', call_args[0][0][0]) + self.assertIn("Status Code 201", call_args[0][0][0]) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_list_datasets_with_resources(self, mock_post): - mock_post.return_value = MockJsonResponse(200, result='Datasetname') + mock_post.return_value = MockJsonResponse(200, result="Datasetname") # Execute result = self.engine.list_datasets(with_resources=True) # Verify Success - self.assertTrue(result['success']) - self.assertIn('Datasetname', result['result']) + self.assertTrue(result["success"]) + self.assertIn("Datasetname", result["result"]) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_list_datasets_with_params(self, mock_post): - data_list = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10'] + data_list = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"] mock_post.return_value = MockJsonResponse(200, result=data_list) # Setup limit = 10 - number_all = len(self.engine.list_datasets()['result']) + number_all = len(self.engine.list_datasets()["result"]) # Execute twice with offsets different - data_list = ['2', '3', '4', '5', '6', '7', '8', '9', '10'] + data_list = ["2", "3", "4", "5", "6", "7", "8", "9", "10"] mock_post.return_value = MockJsonResponse(200, result=data_list) result_page_1 = self.engine.list_datasets(limit=limit, offset=1, console=False) - data_list = ['3', '4', '5', '6', '7', '8', '9', '10'] + data_list = ["3", "4", "5", "6", "7", "8", "9", "10"] mock_post.return_value = MockJsonResponse(200, result=data_list) result_page_2 = self.engine.list_datasets(limit=limit, offset=2) # Verify success - self.assertTrue(result_page_1['success']) - self.assertTrue(result_page_2['success']) + self.assertTrue(result_page_1["success"]) + self.assertTrue(result_page_2["success"]) # Count the results - page_1_count = len(result_page_1['result']) - page_2_count = len(result_page_2['result']) + page_1_count = len(result_page_1["result"]) + page_2_count = len(result_page_2["result"]) # Verify count (should be less than or equal to limit) self.assertLessEqual(page_1_count, limit) @@ -140,339 +146,404 @@ def test_list_datasets_with_params(self, mock_post): if number_all > 5: self.assertNotEqual(result_page_1, result_page_2) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_search_resources(self, mock_post): - result_data = {'results': [{'format': 'ZIP'}, {'format': 'ZIP'}]} + result_data = {"results": [{"format": "ZIP"}, {"format": "ZIP"}]} mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute - result = self.engine.search_resources(query={'format': 'zip', 'contents': 'html'}) + result = self.engine.search_resources( + query={"format": "zip", "contents": "html"} + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Check search results if they exist - search_results = result['result']['results'] + search_results = result["result"]["results"] if len(search_results) > 1: for result in search_results: - self.assertIn('zip', result['format'].lower()) + self.assertIn("zip", result["format"].lower()) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_search_datasets(self, mock_post): - version = '1.0' - result_data = {'results': [{'version': version}, {'version': version}]} + version = "1.0" + result_data = {"results": [{"version": version}, {"version": version}]} mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute - result = self.engine.search_datasets(query={'version': version}, console=False) + result = self.engine.search_datasets(query={"version": version}, console=False) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Check search results if they exist - search_results = result['result']['results'] + search_results = result["result"]["results"] if len(search_results) > 1: for result in search_results: - self.assertIn('version', result) - self.assertEqual(result['version'], version) + self.assertIn("version", result) + self.assertEqual(result["version"], version) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_search_datasets_filtered(self, mock_post): - version = '1.0' - result_data = {'results': [{'version': version}, {'version': version}]} + version = "1.0" + result_data = {"results": [{"version": version}, {"version": version}]} mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute - result = self.engine.search_datasets(filtered_query={'version': version}, console=False) + result = self.engine.search_datasets( + filtered_query={"version": version}, console=False + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Check search results if they exist - search_results = result['result']['results'] + search_results = result["result"]["results"] if len(search_results) > 1: for result in search_results: - self.assertIn('version', result) - self.assertEqual(result['version'], version) + self.assertIn("version", result) + self.assertEqual(result["version"], version) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_search_datasets_no_queries(self, mock_post): - version = '1.0' - result_data = {'results': [{'version': version}, {'version': version}]} + version = "1.0" + result_data = {"results": [{"version": version}, {"version": version}]} mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute self.assertRaises(Exception, self.engine.search_datasets, console=False) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_create_dataset(self, mock_post): # Setup new_dataset_name = random_string_generator(10) - result_data = {'name': new_dataset_name} + result_data = {"name": new_dataset_name} mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute result = self.engine.create_dataset(name=new_dataset_name) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Should return the new one - self.assertEqual(new_dataset_name, result['result']['name']) + self.assertEqual(new_dataset_name, result["result"]["name"]) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_create_resource_url(self, mock_post): # Setup new_resource_name = random_string_generator(5) - new_resource_url = 'http://home.byu.edu' - result_data = {'name': new_resource_name, 'url': new_resource_url, - 'id': self.test_dataset_name} + new_resource_url = "http://home.byu.edu" + result_data = { + "name": new_resource_name, + "url": new_resource_url, + "id": self.test_dataset_name, + } mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute - result = self.engine.create_resource(dataset_id=self.test_dataset_name, - url=new_resource_url, - name=new_resource_name) + result = self.engine.create_resource( + dataset_id=self.test_dataset_name, + url=new_resource_url, + name=new_resource_name, + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Verify name and url - self.assertEqual(new_resource_name, result['result']['name']) - self.assertEqual(new_resource_url, result['result']['url']) + self.assertEqual(new_resource_name, result["result"]["name"]) + self.assertEqual(new_resource_url, result["result"]["url"]) def test_create_resource_url_file(self): - file_name = 'upload_test.txt' + file_name = "upload_test.txt" file_to_upload = os.path.join(self.support_path, file_name) # Setup - new_resource_url = 'http://home.byu.edu' + new_resource_url = "http://home.byu.edu" # Execute file=file_to_upload - self.assertRaises(IOError, self.engine.create_resource, dataset_id=self.test_dataset_name, - url=new_resource_url, - file=file_to_upload) + self.assertRaises( + IOError, + self.engine.create_resource, + dataset_id=self.test_dataset_name, + url=new_resource_url, + file=file_to_upload, + ) def test_create_resource_no_url_file(self): # Execute - self.assertRaises(IOError, self.engine.create_resource, dataset_id=self.test_dataset_name) + self.assertRaises( + IOError, self.engine.create_resource, dataset_id=self.test_dataset_name + ) def test_create_resource_file_not_exist(self): - file_name = 'upload_test1.txt' + file_name = "upload_test1.txt" file_to_upload = os.path.join(self.support_path, file_name) # Execute file=file_to_upload - self.assertRaises(IOError, self.engine.create_resource, dataset_id=self.test_dataset_name, - file=file_to_upload) - - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + self.assertRaises( + IOError, + self.engine.create_resource, + dataset_id=self.test_dataset_name, + file=file_to_upload, + ) + + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_create_resource_file_upload(self, mock_post): # Prepare - file_name = 'upload_test.txt' + file_name = "upload_test.txt" file_to_upload = os.path.join(self.support_path, file_name) - result_data = {'name': file_name, 'url_type': 'upload', - 'id': self.test_dataset_name} + result_data = { + "name": file_name, + "url_type": "upload", + "id": self.test_dataset_name, + } mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute - result = self.engine.create_resource(dataset_id=self.test_dataset_name, file=file_to_upload, console=False) + result = self.engine.create_resource( + dataset_id=self.test_dataset_name, file=file_to_upload, console=False + ) # Verify Success - self.assertTrue(result['success'], result) + self.assertTrue(result["success"], result) # Verify name and url_type (which should be upload if file upload) - self.assertEqual(result['result']['name'], 'upload_test.txt') - self.assertEqual(result['result']['url_type'], 'upload') + self.assertEqual(result["result"]["name"], "upload_test.txt") + self.assertEqual(result["result"]["url_type"], "upload") - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_create_resource_file_upload_no_ext(self, mock_post): # Prepare - file_name = 'upload_test.txt' - upload_file_name = 'testfile' + file_name = "upload_test.txt" + upload_file_name = "testfile" file_to_upload = os.path.join(self.support_path, file_name) - result_data = {'name': upload_file_name, 'url_type': 'upload', 'id': self.test_dataset_name} + result_data = { + "name": upload_file_name, + "url_type": "upload", + "id": self.test_dataset_name, + } mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute - result = self.engine.create_resource(dataset_id=self.test_dataset_name, - file=file_to_upload, - name=upload_file_name, - console=False) + result = self.engine.create_resource( + dataset_id=self.test_dataset_name, + file=file_to_upload, + name=upload_file_name, + console=False, + ) # Verify Success - self.assertTrue(result['success'], result) + self.assertTrue(result["success"], result) # Verify name and url_type (which should be upload if file upload) - self.assertEqual(upload_file_name, result['result']['name']) - self.assertEqual(result['result']['url_type'], 'upload') + self.assertEqual(upload_file_name, result["result"]["name"]) + self.assertEqual(result["result"]["url_type"], "upload") - @mock.patch('tethys_dataset_services.engines.ckan_engine.pprint') - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.pprint") + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_get_dataset(self, mock_post, _): - result_data = {'name': self.test_dataset_name, 'id': self.test_dataset_name} + result_data = {"name": self.test_dataset_name, "id": self.test_dataset_name} mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute - result = self.engine.get_dataset(dataset_id=self.test_dataset_name, console=True) + result = self.engine.get_dataset( + dataset_id=self.test_dataset_name, console=True + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Verify Name - self.assertEqual(result['result']['name'], self.test_dataset_name) + self.assertEqual(result["result"]["name"], self.test_dataset_name) - @mock.patch('tethys_dataset_services.engines.ckan_engine.pprint') - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.pprint") + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_get_resource(self, mock_post, mock_pprint): - result_data = {'name': self.test_dataset_name, 'url': self.test_resource_url} + result_data = {"name": self.test_dataset_name, "url": self.test_resource_url} mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute - result = self.engine.get_resource(resource_id=self.test_resource_name, console=True) + result = self.engine.get_resource( + resource_id=self.test_resource_name, console=True + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Verify Properties - self.assertEqual(result['result']['url'], self.test_resource_url) + self.assertEqual(result["result"]["url"], self.test_resource_url) mock_pprint.pprint.assert_called() - @mock.patch('tethys_dataset_services.engines.ckan_engine.log') - @mock.patch('tethys_dataset_services.engines.ckan_engine.pprint') - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.log") + @mock.patch("tethys_dataset_services.engines.ckan_engine.pprint") + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_get_resource_console_error(self, mock_post, mock_pprint, mock_log): - mock_pprint.pprint.side_effect = Exception('Fake Exception') + mock_pprint.pprint.side_effect = Exception("Fake Exception") - result_data = {'name': self.test_dataset_name, 'url': self.test_resource_url} + result_data = {"name": self.test_dataset_name, "url": self.test_resource_url} mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute - result = self.engine.get_resource(resource_id=self.test_resource_name, console=True) + result = self.engine.get_resource( + resource_id=self.test_resource_name, console=True + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Verify Properties - self.assertEqual(result['result']['url'], self.test_resource_url) + self.assertEqual(result["result"]["url"], self.test_resource_url) mock_pprint.pprint.assert_called() mock_log.exception.assert_called() - @mock.patch('tethys_dataset_services.engines.ckan_engine.log') - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.log") + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_get_resource_get_error(self, mock_post, mock_log): - result_data = {'name': self.test_dataset_name, 'url': self.test_resource_url} - mock_post.return_value = MockJsonResponse(200, result=result_data, success=False) + result_data = {"name": self.test_dataset_name, "url": self.test_resource_url} + mock_post.return_value = MockJsonResponse( + 200, result=result_data, success=False + ) # Execute - result = self.engine.get_resource(resource_id=self.test_resource_name, console=True) + result = self.engine.get_resource( + resource_id=self.test_resource_name, console=True + ) # Verify Success - self.assertFalse(result['success']) + self.assertFalse(result["success"]) # Verify Properties - self.assertIn('failed message', result['error']['message']) - self.assertEqual(result['result']['url'], self.test_resource_url) + self.assertIn("failed message", result["error"]["message"]) + self.assertEqual(result["result"]["url"], self.test_resource_url) mock_log.error.assert_called() - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_update_dataset(self, mock_post): # Setup - test_version = '2.0' - result_data = {'version': test_version, 'resources': self.test_resource_name, - 'tags': 'tag_test'} + test_version = "2.0" + result_data = { + "version": test_version, + "resources": self.test_resource_name, + "tags": "tag_test", + } mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute - result = self.engine.update_dataset(dataset_id=self.test_dataset_name, version=test_version) + result = self.engine.update_dataset( + dataset_id=self.test_dataset_name, version=test_version + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Verify new version property - self.assertEqual(result['result']['version'], test_version) - self.assertEqual(result['result']['resources'], self.test_resource_name,) - self.assertEqual(result['result']['tags'], 'tag_test') - - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + self.assertEqual(result["result"]["version"], test_version) + self.assertEqual( + result["result"]["resources"], + self.test_resource_name, + ) + self.assertEqual(result["result"]["tags"], "tag_test") + + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_update_resource_property_change(self, mock_post): # Setup - new_format = 'web' - result_data = {'format': new_format, 'url': self.test_resource_url} + new_format = "web" + result_data = {"format": new_format, "url": self.test_resource_url} mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute - result = self.engine.update_resource(resource_id=self.test_resource_name, format=new_format) + result = self.engine.update_resource( + resource_id=self.test_resource_name, format=new_format + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Verify new format - self.assertEqual(result['result']['format'], new_format) - self.assertEqual(result['result']['url'], self.test_resource_url) + self.assertEqual(result["result"]["format"], new_format) + self.assertEqual(result["result"]["url"], self.test_resource_url) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_update_resource_url_change(self, mock_post): # Setup - new_url = 'http://www.utah.edu' - result_data = {'url': new_url} + new_url = "http://www.utah.edu" + result_data = {"url": new_url} mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute - result = self.engine.update_resource(resource_id=self.test_resource_name, url=new_url) + result = self.engine.update_resource( + resource_id=self.test_resource_name, url=new_url + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Verify New URL Property - self.assertEqual(result['result']['url'], new_url) + self.assertEqual(result["result"]["url"], new_url) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_update_resource_file_upload(self, mock_post): # Setup - file_name = 'upload_test.txt' + file_name = "upload_test.txt" file_to_upload = os.path.join(self.support_path, file_name) - result_data = {'name': file_name, 'id': self.test_dataset_name, - 'url': self.test_resource_url} + result_data = { + "name": file_name, + "id": self.test_dataset_name, + "url": self.test_resource_url, + } mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute - result = self.engine.update_resource(resource_id=self.test_resource_name, file=file_to_upload, console=False) + result = self.engine.update_resource( + resource_id=self.test_resource_name, file=file_to_upload, console=False + ) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Verify Name (should be the same as the file uploaded by default) - self.assertEqual(result['result']['name'], file_name) - self.assertEqual(result['result']['url'], self.test_resource_url) + self.assertEqual(result["result"]["name"], file_name) + self.assertEqual(result["result"]["url"], self.test_resource_url) # URL should be different than original when file upload executes # self.assertNotEqual(result['result']['url'], self.test_resource['url']) def test_update_resource_url_file(self): - file_name = 'upload_test.txt' + file_name = "upload_test.txt" file_to_upload = os.path.join(self.support_path, file_name) # Setup - new_resource_url = 'http://home.byu.edu' + new_resource_url = "http://home.byu.edu" # Execute file=file_to_upload - self.assertRaises(IOError, self.engine.update_resource, resource_id=self.test_resource_name, - url=new_resource_url, - file=file_to_upload) + self.assertRaises( + IOError, + self.engine.update_resource, + resource_id=self.test_resource_name, + url=new_resource_url, + file=file_to_upload, + ) def test_update_resource_file_not_exist(self): - file_name = 'upload_test1.txt' + file_name = "upload_test1.txt" file_to_upload = os.path.join(self.support_path, file_name) # Execute file=file_to_upload - self.assertRaises(IOError, self.engine.update_resource, resource_id=self.test_resource_name, - file=file_to_upload) - - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + self.assertRaises( + IOError, + self.engine.update_resource, + resource_id=self.test_resource_name, + file=file_to_upload, + ) + + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_delete_resource(self, mock_post): result_data = None mock_post.return_value = MockJsonResponse(200, result=result_data) @@ -480,35 +551,38 @@ def test_delete_resource(self, mock_post): result = self.engine.delete_resource(resource_id=self.test_resource_name) # Verify Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Delete requests should return nothing - self.assertEqual(result['result'], None) + self.assertEqual(result["result"], None) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_delete_dataset(self, mock_post): result_data = None mock_post.return_value = MockJsonResponse(200, result=result_data) # Execute - result = self.engine.delete_dataset(dataset_id=self.test_dataset_name, console=False) + result = self.engine.delete_dataset( + dataset_id=self.test_dataset_name, console=False + ) # Confirm Success - self.assertTrue(result['success']) + self.assertTrue(result["success"]) # Delete requests should return nothing - self.assertEqual(result['result'], None) + self.assertEqual(result["result"], None) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_download_resource(self, mock_post): location = self.files_path - local_file_name = 'test_resource.test' + local_file_name = "test_resource.test" location_final = os.path.join(self.files_path, local_file_name) - result_data = {'url': self.test_resource_url} + result_data = {"url": self.test_resource_url} mock_post.return_value = MockJsonResponse(200, result=result_data) - result = self.engine.download_resource(self.test_resource_name, location=location, - local_file_name=local_file_name) + result = self.engine.download_resource( + self.test_resource_name, location=location, local_file_name=local_file_name + ) # Result will return the local file path. Check here self.assertEqual(location_final, result) @@ -517,16 +591,17 @@ def test_download_resource(self, mock_post): if os.path.isfile(location_final): os.remove(location_final) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_download_resource_no_location(self, mock_post): - local_file_name = 'test_resource.test' - location_check = os.path.join('./', local_file_name) + local_file_name = "test_resource.test" + location_check = os.path.join("./", local_file_name) - result_data = {'url': self.test_resource_url} + result_data = {"url": self.test_resource_url} mock_post.return_value = MockJsonResponse(200, result=result_data) - result = self.engine.download_resource(self.test_resource_name, - local_file_name=local_file_name) + result = self.engine.download_resource( + self.test_resource_name, local_file_name=local_file_name + ) # Result will return the local file path. Check here self.assertEqual(location_check, result) @@ -535,44 +610,52 @@ def test_download_resource_no_location(self, mock_post): if os.path.isfile(location_check): os.remove(location_check) - @mock.patch('tethys_dataset_services.engines.ckan_engine.CkanDatasetEngine.get_resource') + @mock.patch( + "tethys_dataset_services.engines.ckan_engine.CkanDatasetEngine.get_resource" + ) def test_download_resource_not_exist(self, mock_ckan): - mock_ckan.return_value = {'success': False} - self.assertRaises(Exception, self.engine.download_resource, self.test_dataset_name) + mock_ckan.return_value = {"success": False} + self.assertRaises( + Exception, self.engine.download_resource, self.test_dataset_name + ) mock_ckan.assert_called_with(self.test_dataset_name, console=False) - @mock.patch('sys.stdout', new_callable=StringIO) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.get') - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') - def test_download_resource_request_get_exception(self, mock_post, mock_get, mock_print): - mock_get.side_effect = Exception('Requests.get Exception') + @mock.patch("sys.stdout", new_callable=StringIO) + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.get") + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") + def test_download_resource_request_get_exception( + self, mock_post, mock_get, mock_print + ): + mock_get.side_effect = Exception("Requests.get Exception") location = self.files_path - local_file_name = 'test_resource.test' + local_file_name = "test_resource.test" - result_data = {'url': self.test_resource_url} + result_data = {"url": self.test_resource_url} mock_post.return_value = MockJsonResponse(200, result=result_data) - self.engine.download_resource(self.test_resource_name, location=location, - local_file_name=local_file_name) + self.engine.download_resource( + self.test_resource_name, location=location, local_file_name=local_file_name + ) output = mock_print.getvalue() # check results - self.assertIn('Requests.get Exception', output) + self.assertIn("Requests.get Exception", output) - @mock.patch('tethys_dataset_services.engines.ckan_engine.warnings') - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.warnings") + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_download_resouce(self, mock_post, mock_warnings): location = self.files_path - local_file_name = 'test_resource.test' + local_file_name = "test_resource.test" location_final = os.path.join(self.files_path, local_file_name) - result_data = {'url': self.test_resource_url} + result_data = {"url": self.test_resource_url} mock_post.return_value = MockJsonResponse(200, result=result_data) - result = self.engine.download_resouce(self.test_resource_name, location=location, - local_file_name=local_file_name) + result = self.engine.download_resouce( + self.test_resource_name, location=location, local_file_name=local_file_name + ) # Result will return None instead of the local path file. self.assertEqual(None, result) @@ -581,7 +664,7 @@ def test_download_resouce(self, mock_post, mock_warnings): if os.path.isfile(location_final): os.remove(location_final) else: - raise AssertionError('Resource has not been downloaded') + raise AssertionError("Resource has not been downloaded") # Delete the file if os.path.isfile(location_final): @@ -589,19 +672,28 @@ def test_download_resouce(self, mock_post, mock_warnings): mock_warnings.warn.assert_called() - @mock.patch('tethys_dataset_services.engines.ckan_engine.pprint') - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.ckan_engine.pprint") + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.post") def test_download_dataset(self, mock_post, _): location = self.files_path - location_final = os.path.join(self.files_path, 'resource1.txt') + location_final = os.path.join(self.files_path, "resource1.txt") result_check = [location_final] - result_data = {'resources': [{'name': 'resource1', 'id': 'resource2', - 'format': 'txt', 'url': self.test_resource_url}]} + result_data = { + "resources": [ + { + "name": "resource1", + "id": "resource2", + "format": "txt", + "url": self.test_resource_url, + } + ] + } mock_post.return_value = MockJsonResponse(200, result=result_data) - result = self.engine.download_dataset(self.test_dataset_name, location=location, - console=True) + result = self.engine.download_dataset( + self.test_dataset_name, location=location, console=True + ) # Result will return list of the local file path. Check here self.assertEqual(result_check, result) @@ -610,45 +702,53 @@ def test_download_dataset(self, mock_post, _): if os.path.isfile(location_final): os.remove(location_final) - @mock.patch('tethys_dataset_services.engines.ckan_engine.CkanDatasetEngine.get_dataset') + @mock.patch( + "tethys_dataset_services.engines.ckan_engine.CkanDatasetEngine.get_dataset" + ) def test_download_dataset_not_exist(self, mock_ckan): - mock_ckan.return_value = {'success': False} - self.assertRaises(Exception, self.engine.download_dataset, self.test_dataset_name) + mock_ckan.return_value = {"success": False} + self.assertRaises( + Exception, self.engine.download_dataset, self.test_dataset_name + ) mock_ckan.assert_called_with(self.test_dataset_name, console=False) def test_type(self): response = self.engine.type - expected_response = 'CKAN' + expected_response = "CKAN" # Check Response self.assertEqual(response, expected_response) def test_prepare_request(self): - method = 'resource_show' - result = self.engine._prepare_request(method, apikey=TEST_CKAN_DATASET_SERVICE['APIKEY']) + method = "resource_show" + result = self.engine._prepare_request( + method, apikey=TEST_CKAN_DATASET_SERVICE["APIKEY"] + ) # Check Result, result[0] is url, result[1] is data_dict, result[2] is headers - self.assertIn(TEST_CKAN_DATASET_SERVICE['APIKEY'], result[2]['X-CKAN-API-Key']) + self.assertIn(TEST_CKAN_DATASET_SERVICE["APIKEY"], result[2]["X-CKAN-API-Key"]) self.assertIn(method, result[0]) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.get') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.get") def test_validate(self, mock_get): mock_get.side_effect = requests.exceptions.MissingSchema self.assertRaises(AssertionError, self.engine.validate) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.get') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.get") def test_validate_status_code(self, mock_get): - self.engine = CkanDatasetEngine(endpoint="http://localhost:5000/api/3/action", - apikey=TEST_CKAN_DATASET_SERVICE['APIKEY']) + self.engine = CkanDatasetEngine( + endpoint="http://localhost:5000/api/3/action", + apikey=TEST_CKAN_DATASET_SERVICE["APIKEY"], + ) - result_data = {'resources': self.test_resource_name, 'version': '1.0'} + result_data = {"resources": self.test_resource_name, "version": "1.0"} mock_get.return_value = MockJsonResponse(201, result=result_data) self.assertRaises(AssertionError, self.engine.validate) - @mock.patch('tethys_dataset_services.engines.ckan_engine.requests.get') + @mock.patch("tethys_dataset_services.engines.ckan_engine.requests.get") def test_validate_no_version(self, mock_get): - mock_get.return_value = MockResponse(200, json='') + mock_get.return_value = MockResponse(200, json="") self.assertRaises(AssertionError, self.engine.validate) diff --git a/tests/unit_tests/test_geoserver_engine.py b/tests/unit_tests/test_geoserver_engine.py index 11c5f80..3d7dff1 100644 --- a/tests/unit_tests/test_geoserver_engine.py +++ b/tests/unit_tests/test_geoserver_engine.py @@ -14,7 +14,7 @@ def random_string_generator(size): chars = string.ascii_lowercase + string.digits - return ''.join(random.choice(chars) for _ in range(size)) + return "".join(random.choice(chars) for _ in range(size)) def mock_get_style(name, workspace=None): @@ -24,28 +24,32 @@ def mock_get_style(name, workspace=None): def mock_get_resource(name, **kwargs): - if 'workspace' or 'store' in kwargs: + if "workspace" or "store" in kwargs: mock_resource = mock.NonCallableMagicMock() mock_resource.name = name - if 'workspace' in kwargs: - mock_resource.workspace = kwargs['workspace'] - if 'store' in kwargs: - mock_resource.store = kwargs['store'] + if "workspace" in kwargs: + mock_resource.workspace = kwargs["workspace"] + if "store" in kwargs: + mock_resource.store = kwargs["store"] return mock_resource else: - raise AssertionError('Did not get expected keyword arguments: {}'.format(list(kwargs))) + raise AssertionError( + "Did not get expected keyword arguments: {}".format(list(kwargs)) + ) def mock_get_resource_create_postgis_feature_resource(name, **kwargs): - if 'workspace' in kwargs: + if "workspace" in kwargs: raise geoserver.catalog.FailedRequestError() - elif 'store' in kwargs: + elif "store" in kwargs: mock_resource = mock.NonCallableMagicMock() mock_resource.name = name - mock_resource.store = kwargs['store'] + mock_resource.store = kwargs["store"] return mock_resource else: - raise AssertionError('Did not get expected keyword arguments: {}'.format(list(kwargs))) + raise AssertionError( + "Did not get expected keyword arguments: {}".format(list(kwargs)) + ) class MockResponse(object): @@ -68,46 +72,52 @@ def setUp(self): # Files self.tests_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - self.files_root = os.path.join(self.tests_root, 'files') + self.files_root = os.path.join(self.tests_root, "files") - self.shapefile_name = 'test' - self.shapefile_base = os.path.join(self.files_root, 'shapefile', self.shapefile_name) + self.shapefile_name = "test" + self.shapefile_base = os.path.join( + self.files_root, "shapefile", self.shapefile_name + ) # Create Test Engine - self.endpoint = 'http://fake.geoserver.org:8181/geoserver/rest/' - self.public_endpoint = 'http://fake.public.geoserver.org:8181/geoserver/rest/' - self.username = 'foo' - self.password = 'bar' + self.endpoint = "http://fake.geoserver.org:8181/geoserver/rest/" + self.public_endpoint = "http://fake.public.geoserver.org:8181/geoserver/rest/" + self.username = "foo" + self.password = "bar" self.auth = (self.username, self.password) - + self.engine = GeoServerSpatialDatasetEngine( endpoint=self.endpoint, username=self.username, password=self.password, - public_endpoint=self.public_endpoint + public_endpoint=self.public_endpoint, ) # Catalog - self.catalog_endpoint = 'http://localhost:8181/geoserver/' + self.catalog_endpoint = "http://localhost:8181/geoserver/" self.mock_catalog = mock.NonCallableMagicMock(gs_base_url=self.catalog_endpoint) # Workspaces - self.workspace_name = 'a-workspace' + self.workspace_name = "a-workspace" # Store - self.store_name = 'a-store' - self.mock_store = mock.NonCallableMagicMock() #: Needs to pass not callable test + self.store_name = "a-store" + self.mock_store = ( + mock.NonCallableMagicMock() + ) #: Needs to pass not callable test # the "name" attribute needs to be set after create b/c name is a constructor argument # http://blog.tunarob.com/2017/04/27/mock-name-attribute/ self.mock_store.name = self.store_name # Default Style - self.default_style_name = 'a-style' - self.mock_default_style = mock.NonCallableMagicMock(workspace=self.workspace_name) + self.default_style_name = "a-style" + self.mock_default_style = mock.NonCallableMagicMock( + workspace=self.workspace_name + ) self.mock_default_style.name = self.default_style_name # Styles - self.style_names = ['points', 'lines'] + self.style_names = ["points", "lines"] self.mock_styles = [] for sn in self.style_names: mock_style = mock.NonCallableMagicMock(workspace=self.workspace_name) @@ -115,7 +125,7 @@ def setUp(self): self.mock_styles.append(mock_style) # Resources - self.resource_names = ['foo', 'bar', 'goo'] + self.resource_names = ["foo", "bar", "goo"] self.mock_resources = [] for rn in self.resource_names: mock_resource = mock.NonCallableMagicMock(workspace=self.workspace_name) @@ -124,7 +134,7 @@ def setUp(self): self.mock_resources.append(mock_resource) # Layers - self.layer_names = ['baz', 'bat', 'jazz'] + self.layer_names = ["baz", "bat", "jazz"] self.mock_layers = [] for ln in self.layer_names: mock_layer = mock.NonCallableMagicMock(workspace=self.workspace_name) @@ -135,21 +145,21 @@ def setUp(self): self.mock_layers.append(mock_layer) # Layer groups - self.layer_group_names = ['boo', 'moo'] + self.layer_group_names = ["boo", "moo"] self.mock_layer_groups = [] for lgn in self.layer_group_names: mock_layer_group = mock.NonCallableMagicMock( workspace=self.workspace_name, catalog=self.mock_catalog, - dom='fake-dom', + dom="fake-dom", layers=self.layer_names, - style=self.style_names + style=self.style_names, ) mock_layer_group.name = lgn self.mock_layer_groups.append(mock_layer_group) # Workspaces - self.workspace_names = ['b-workspace', 'c-workspace'] + self.workspace_names = ["b-workspace", "c-workspace"] self.mock_workspaces = [] for wp in self.workspace_names: mock_workspace = mock.NonCallableMagicMock() @@ -157,7 +167,7 @@ def setUp(self): self.mock_workspaces.append(mock_workspace) # Stores - self.store_names = ['b-store', 'c-store'] + self.store_names = ["b-store", "c-store"] self.mock_stores = [] for sn in self.store_names: mock_store_name = mock.NonCallableMagicMock(workspace=self.workspace_name) @@ -174,15 +184,15 @@ def assert_valid_response_object(self, response_object): # Response object should be a dictionary with the keys 'success' and either 'result' if success is True # or 'error' if success is False self.assertIsInstance(response_object, dict) - self.assertIn('success', response_object) + self.assertIn("success", response_object) - if isinstance(response_object, dict) and 'success' in response_object: - if response_object['success'] is True: - self.assertIn('result', response_object) - elif response_object['success'] is False: - self.assertIn('error', response_object) + if isinstance(response_object, dict) and "success" in response_object: + if response_object["success"] is True: + self.assertIn("result", response_object) + elif response_object["success"] is False: + self.assertIn("error", response_object) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_list_resources(self, mock_catalog): mc = mock_catalog() mc.get_resources.return_value = self.mock_resources @@ -194,10 +204,10 @@ def test_list_resources(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Returns list self.assertIsInstance(result, list) @@ -212,7 +222,7 @@ def test_list_resources(self, mock_catalog): mc.get_resources.assert_called_with(stores=None, workspaces=None) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_list_resources_with_properties(self, mock_catalog): mc = mock_catalog() mc.get_resources.return_value = self.mock_resources @@ -224,10 +234,10 @@ def test_list_resources_with_properties(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Returns list self.assertIsInstance(result, list) @@ -237,16 +247,16 @@ def test_list_resources_with_properties(self, mock_catalog): self.assertIsInstance(result[0], dict) for r in result: - self.assertIn('name', r) - self.assertIn(r['name'], self.resource_names) - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) - self.assertIn('store', r) - self.assertEqual(self.store_name, r['store']) + self.assertIn("name", r) + self.assertIn(r["name"], self.resource_names) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) + self.assertIn("store", r) + self.assertEqual(self.store_name, r["store"]) mc.get_resources.assert_called_with(stores=None, workspaces=None) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_list_resources_ambiguous_error(self, mock_catalog): mc = mock_catalog() mc.get_resources.side_effect = geoserver.catalog.AmbiguousRequestError() @@ -258,11 +268,11 @@ def test_list_resources_ambiguous_error(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertFalse(response['success']) + self.assertFalse(response["success"]) mc.get_resources.assert_called_with(stores=None, workspaces=None) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_list_resources_multiple_stores_error(self, mock_catalog): mc = mock_catalog() mc.get_resources.side_effect = TypeError() @@ -274,12 +284,12 @@ def test_list_resources_multiple_stores_error(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertFalse(response['success']) - self.assertIn('Multiple stores found named', response['error']) + self.assertFalse(response["success"]) + self.assertIn("Multiple stores found named", response["error"]) mc.get_resources.assert_called_with(stores=None, workspaces=None) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_list_layers(self, mock_catalog): mc = mock_catalog() mc.get_layers.return_value = self.mock_layers @@ -291,10 +301,10 @@ def test_list_layers(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Returns list self.assertIsInstance(result, list) @@ -309,7 +319,7 @@ def test_list_layers(self, mock_catalog): mc.get_layers.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_list_layers_with_properties(self, mock_catalog): mc = mock_catalog() mc.get_layers.return_value = self.mock_layers @@ -321,10 +331,10 @@ def test_list_layers_with_properties(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Returns list self.assertIsInstance(result, list) @@ -334,23 +344,27 @@ def test_list_layers_with_properties(self, mock_catalog): self.assertIsInstance(result[0], dict) for r in result: - self.assertIn('name', r) - self.assertIn(r['name'], self.layer_names) - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) - self.assertIn('store', r) - self.assertEqual(self.store_name, r['store']) - self.assertIn('default_style', r) - w_default_style = '{}:{}'.format(self.workspace_name, self.default_style_name) - self.assertEqual(w_default_style, r['default_style']) - self.assertIn('styles', r) - w_styles = ['{}:{}'.format(self.workspace_name, style) for style in self.style_names] - for s in r['styles']: + self.assertIn("name", r) + self.assertIn(r["name"], self.layer_names) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) + self.assertIn("store", r) + self.assertEqual(self.store_name, r["store"]) + self.assertIn("default_style", r) + w_default_style = "{}:{}".format( + self.workspace_name, self.default_style_name + ) + self.assertEqual(w_default_style, r["default_style"]) + self.assertIn("styles", r) + w_styles = [ + "{}:{}".format(self.workspace_name, style) for style in self.style_names + ] + for s in r["styles"]: self.assertIn(s, w_styles) mc.get_layers.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_list_layer_groups(self, mock_catalog): mc = mock_catalog() mc.get_layergroups.return_value = self.mock_layer_groups @@ -362,10 +376,10 @@ def test_list_layer_groups(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # List of strings if len(result) > 0: @@ -377,7 +391,7 @@ def test_list_layer_groups(self, mock_catalog): mc.get_layergroups.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_list_layer_groups_with_properties(self, mock_catalog): mc = mock_catalog() mc.get_layergroups.return_value = self.mock_layer_groups @@ -389,10 +403,10 @@ def test_list_layer_groups_with_properties(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Returns list self.assertIsInstance(result, list) @@ -402,18 +416,18 @@ def test_list_layer_groups_with_properties(self, mock_catalog): self.assertIsInstance(result[0], dict) for r in result: - self.assertIn('name', r) - self.assertIn(r['name'], self.layer_group_names) - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) - self.assertIn('catalog', r) - self.assertIn('layers', r) - self.assertEqual(self.layer_names, r['layers']) - self.assertNotIn('dom', r) + self.assertIn("name", r) + self.assertIn(r["name"], self.layer_group_names) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) + self.assertIn("catalog", r) + self.assertIn("layers", r) + self.assertEqual(self.layer_names, r["layers"]) + self.assertNotIn("dom", r) mc.get_layergroups.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_list_workspaces(self, mock_catalog): mc = mock_catalog() mc.get_workspaces.return_value = self.mock_workspaces @@ -425,10 +439,10 @@ def test_list_workspaces(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # List of strings if len(result) > 0: @@ -440,7 +454,7 @@ def test_list_workspaces(self, mock_catalog): mc.get_workspaces.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_list_stores(self, mock_catalog): mc = mock_catalog() mc.get_stores.return_value = self.mock_stores @@ -452,10 +466,10 @@ def test_list_stores(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # List of strings if len(result) > 0: @@ -467,13 +481,13 @@ def test_list_stores(self, mock_catalog): mc.get_stores.assert_called_with(workspaces=[]) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_list_stores_invalid_workspace(self, mock_catalog): mc = mock_catalog() mc.get_stores.return_value = self.mock_stores mc.get_stores.side_effect = AttributeError() - workspace = 'invalid' + workspace = "invalid" # Execute response = self.engine.list_stores(workspace=workspace, debug=self.debug) @@ -482,11 +496,11 @@ def test_list_stores_invalid_workspace(self, mock_catalog): self.assert_valid_response_object(response) # False - self.assertFalse(response['success']) - self.assertIn('Invalid workspace', response['error']) + self.assertFalse(response["success"]) + self.assertIn("Invalid workspace", response["error"]) mc.get_stores.assert_called_with(workspaces=[workspace]) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_list_styles(self, mock_catalog): mc = mock_catalog() mc.get_styles.return_value = self.mock_styles @@ -498,10 +512,10 @@ def test_list_styles(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Returns list self.assertIsInstance(result, list) @@ -515,26 +529,25 @@ def test_list_styles(self, mock_catalog): self.assertIn(n, result) mc.get_styles.assert_called_with(workspaces=[]) - - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_list_styles_of_workspace(self, mock_catalog): mc = mock_catalog() mc.get_styles.return_value = self.mock_styles # Execute response = self.engine.list_styles( - workspace=self.workspace_name, - debug=self.debug + workspace=self.workspace_name, debug=self.debug ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Returns list self.assertIsInstance(result, list) @@ -549,7 +562,7 @@ def test_list_styles_of_workspace(self, mock_catalog): mc.get_styles.assert_called_with(workspaces=[self.workspace_name]) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_list_styles_with_properties(self, mock_catalog): mc = mock_catalog() mc.get_styles.return_value = self.mock_styles @@ -561,10 +574,10 @@ def test_list_styles_with_properties(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Returns list self.assertIsInstance(result, list) @@ -574,45 +587,48 @@ def test_list_styles_with_properties(self, mock_catalog): self.assertIsInstance(result[0], dict) for r in result: - self.assertIn('name', r) - self.assertIn(r['name'], self.style_names) - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) + self.assertIn("name", r) + self.assertIn(r["name"], self.style_names) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) mc.get_styles.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_resource(self, mock_catalog): mc = mock_catalog() mc.get_default_workspace().name = self.workspace_name mc.get_resource.return_value = self.mock_resources[0] # Execute - response = self.engine.get_resource(resource_id=self.resource_names[0], debug=self.debug) + response = self.engine.get_resource( + resource_id=self.resource_names[0], debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Properties - self.assertIn('name', r) - self.assertIn(r['name'], self.resource_names) - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) - self.assertIn('store', r) - self.assertEqual(self.store_name, r['store']) - - mc.get_resource.assert_called_with(name=self.resource_names[0], store=None, - workspace=self.workspace_name) + self.assertIn("name", r) + self.assertIn(r["name"], self.resource_names) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) + self.assertIn("store", r) + self.assertEqual(self.store_name, r["store"]) + + mc.get_resource.assert_called_with( + name=self.resource_names[0], store=None, workspace=self.workspace_name + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_resource_with_workspace(self, mock_catalog): mc = mock_catalog() mc.get_resource.return_value = self.mock_resources[0] @@ -620,308 +636,346 @@ def test_get_resource_with_workspace(self, mock_catalog): # Execute resource_id = self.workspace_name + ":" + self.resource_names[0] - response = self.engine.get_resource(resource_id=resource_id, - debug=self.debug) + response = self.engine.get_resource(resource_id=resource_id, debug=self.debug) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Properties - self.assertIn('name', r) - self.assertIn(r['name'], self.resource_names) - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) - self.assertIn('store', r) - self.assertEqual(self.store_name, r['store']) - - mc.get_resource.assert_called_with(name=self.resource_names[0], store=None, workspace=self.workspace_name) + self.assertIn("name", r) + self.assertIn(r["name"], self.resource_names) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) + self.assertIn("store", r) + self.assertEqual(self.store_name, r["store"]) + + mc.get_resource.assert_called_with( + name=self.resource_names[0], store=None, workspace=self.workspace_name + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_resource_none(self, mock_catalog): mc = mock_catalog() mc.get_resource.return_value = None mc.get_default_workspace().name = self.workspace_name # Execute - response = self.engine.get_resource(resource_id=self.resource_names[0], debug=self.debug) + response = self.engine.get_resource( + resource_id=self.resource_names[0], debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # False - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Expect Error - r = response['error'] + r = response["error"] # Properties - self.assertIn('not found', r) + self.assertIn("not found", r) - mc.get_resource.assert_called_with(name=self.resource_names[0], store=None, - workspace=self.workspace_name) + mc.get_resource.assert_called_with( + name=self.resource_names[0], store=None, workspace=self.workspace_name + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_resource_failed_request_error(self, mock_catalog): mc = mock_catalog() - mc.get_resource.side_effect = geoserver.catalog.FailedRequestError('Failed Request') + mc.get_resource.side_effect = geoserver.catalog.FailedRequestError( + "Failed Request" + ) mc.get_default_workspace().name = self.workspace_name # Execute - response = self.engine.get_resource(resource_id=self.resource_names[0], debug=self.debug) + response = self.engine.get_resource( + resource_id=self.resource_names[0], debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # False - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Expect Error - r = response['error'] + r = response["error"] # Properties - self.assertIn('Failed Request', r) + self.assertIn("Failed Request", r) - mc.get_resource.assert_called_with(name=self.resource_names[0], store=None, - workspace=self.workspace_name) + mc.get_resource.assert_called_with( + name=self.resource_names[0], store=None, workspace=self.workspace_name + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_resource_with_store(self, mock_catalog): mc = mock_catalog() mc.get_resource.return_value = self.mock_resources[0] # Execute resource_id = self.workspace_name + ":" + self.resource_names[0] - response = self.engine.get_resource(resource_id=resource_id, - store_id=self.store_name, - debug=self.debug) + response = self.engine.get_resource( + resource_id=resource_id, store_id=self.store_name, debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Properties - self.assertIn('name', r) - self.assertIn(r['name'], self.resource_names) - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) - self.assertIn('store', r) - self.assertEqual(self.store_name, r['store']) - - mc.get_resource.assert_called_with(name=self.resource_names[0], - store=self.store_name, - workspace=self.workspace_name) - - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.get') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + self.assertIn("name", r) + self.assertIn(r["name"], self.resource_names) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) + self.assertIn("store", r) + self.assertEqual(self.store_name, r["store"]) + + mc.get_resource.assert_called_with( + name=self.resource_names[0], + store=self.store_name, + workspace=self.workspace_name, + ) + + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_layer(self, mock_catalog, mock_get): mc = mock_catalog() mc.get_layer.return_value = self.mock_layers[0] - mock_get.return_value = MockResponse(200, text='bar') + mock_get.return_value = MockResponse( + 200, text="bar" + ) # Execute - response = self.engine.get_layer(layer_id=self.layer_names[0], store_id=self.store_name, - debug=self.debug) + response = self.engine.get_layer( + layer_id=self.layer_names[0], store_id=self.store_name, debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Properties - self.assertIn('name', r) - self.assertEqual(self.layer_names[0], r['name']) - self.assertIn('store', r) - self.assertEqual(self.store_name, r['store']) - self.assertIn('default_style', r) - self.assertIn(self.default_style_name, r['default_style']) - self.assertIn('styles', r) - w_styles = ['{}:{}'.format(self.workspace_name, style) for style in self.style_names] - for s in r['styles']: + self.assertIn("name", r) + self.assertEqual(self.layer_names[0], r["name"]) + self.assertIn("store", r) + self.assertEqual(self.store_name, r["store"]) + self.assertIn("default_style", r) + self.assertIn(self.default_style_name, r["default_style"]) + self.assertIn("styles", r) + w_styles = [ + "{}:{}".format(self.workspace_name, style) for style in self.style_names + ] + for s in r["styles"]: self.assertIn(s, w_styles) - self.assertIn('tile_caching', r) - self.assertEqual({'foo': 'bar'}, r['tile_caching']) + self.assertIn("tile_caching", r) + self.assertEqual({"foo": "bar"}, r["tile_caching"]) mc.get_layer.assert_called_with(name=self.layer_names[0]) mock_get.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_layer_none(self, mock_catalog): mc = mock_catalog() mc.get_layer.return_value = None mc.get_default_workspace().name = self.workspace_name # Execute - response = self.engine.get_layer(layer_id=self.layer_names[0], store_id=self.store_name, debug=self.debug) + response = self.engine.get_layer( + layer_id=self.layer_names[0], store_id=self.store_name, debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Extract Result - r = response['error'] + r = response["error"] - self.assertIn('not found', r) + self.assertIn("not found", r) mc.get_layer.assert_called_with(name=self.layer_names[0]) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_layer_failed_request_error(self, mock_catalog): mc = mock_catalog() - mc.get_layer.side_effect = geoserver.catalog.FailedRequestError('Failed Request') + mc.get_layer.side_effect = geoserver.catalog.FailedRequestError( + "Failed Request" + ) # Execute - response = self.engine.get_layer(layer_id=self.layer_names[0], - store_id=self.store_name, - debug=self.debug) + response = self.engine.get_layer( + layer_id=self.layer_names[0], store_id=self.store_name, debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Extract Result - r = response['error'] + r = response["error"] - self.assertEqual(r, 'Failed Request') + self.assertEqual(r, "Failed Request") mc.get_layer.assert_called_with(name=self.layer_names[0]) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_layer_group(self, mock_catalog): mc = mock_catalog() mc.get_layergroups.return_value = self.mock_layer_groups mc._return_first_item.return_value = self.mock_layer_groups[0] # Execute - response = self.engine.get_layer_group(layer_group_id=self.layer_group_names[0], debug=self.debug) + response = self.engine.get_layer_group( + layer_group_id=self.layer_group_names[0], debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # List of dictionaries - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) - self.assertIn('catalog', r) - self.assertIn('layers', r) - self.assertEqual(self.layer_names, r['layers']) - self.assertNotIn('dom', r) - - mc.get_layergroups.assert_called_with(names=self.layer_group_names[0], workspaces=[]) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) + self.assertIn("catalog", r) + self.assertIn("layers", r) + self.assertEqual(self.layer_names, r["layers"]) + self.assertNotIn("dom", r) + + mc.get_layergroups.assert_called_with( + names=self.layer_group_names[0], workspaces=[] + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_layer_group_with_workspace(self, mock_catalog): mc = mock_catalog() mc.get_layergroups.return_value = self.mock_layer_groups mc._return_first_item.return_value = self.mock_layer_groups[0] - layer_group_id = f'{self.workspace_name}:{self.layer_group_names[0]}' + layer_group_id = f"{self.workspace_name}:{self.layer_group_names[0]}" # Execute - response = self.engine.get_layer_group(layer_group_id=layer_group_id, debug=self.debug) + response = self.engine.get_layer_group( + layer_group_id=layer_group_id, debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # List of dictionaries - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) - self.assertIn('catalog', r) - self.assertIn('layers', r) - self.assertEqual(self.layer_names, r['layers']) - self.assertNotIn('dom', r) - - mc.get_layergroups.assert_called_with(names=self.layer_group_names[0], workspaces=[self.workspace_name]) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) + self.assertIn("catalog", r) + self.assertIn("layers", r) + self.assertEqual(self.layer_names, r["layers"]) + self.assertNotIn("dom", r) + + mc.get_layergroups.assert_called_with( + names=self.layer_group_names[0], workspaces=[self.workspace_name] + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_layer_group_none(self, mock_catalog): mc = mock_catalog() mc.get_layergroups.return_value = None mc._return_first_item.return_value = None # Execute - response = self.engine.get_layer_group(layer_group_id=self.layer_group_names[0], debug=self.debug) + response = self.engine.get_layer_group( + layer_group_id=self.layer_group_names[0], debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Extract Result - r = response['error'] + r = response["error"] - self.assertIn('not found', r) + self.assertIn("not found", r) - mc.get_layergroups.assert_called_with(names=self.layer_group_names[0], workspaces=[]) + mc.get_layergroups.assert_called_with( + names=self.layer_group_names[0], workspaces=[] + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_layer_group_failed_request_error(self, mock_catalog): mc = mock_catalog() - mc.get_layergroups.side_effect = geoserver.catalog.FailedRequestError('Failed Request') + mc.get_layergroups.side_effect = geoserver.catalog.FailedRequestError( + "Failed Request" + ) # Execute - response = self.engine.get_layer_group(layer_group_id=self.layer_group_names[0], debug=self.debug) + response = self.engine.get_layer_group( + layer_group_id=self.layer_group_names[0], debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Extract Result - r = response['error'] + r = response["error"] - self.assertEqual(r, 'Failed Request') + self.assertEqual(r, "Failed Request") - mc.get_layergroups.assert_called_with(names=self.layer_group_names[0], workspaces=[]) + mc.get_layergroups.assert_called_with( + names=self.layer_group_names[0], workspaces=[] + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_store(self, mock_catalog): mc = mock_catalog() mc.get_store.return_value = self.mock_stores[0] @@ -933,42 +987,48 @@ def test_get_store(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Properties - self.assertIn('name', r) - self.assertIn(r['name'], self.store_names) - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) + self.assertIn("name", r) + self.assertIn(r["name"], self.store_names) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) - mc.get_store.assert_called_with(name=self.store_names[0], workspace=self.workspace_name) + mc.get_store.assert_called_with( + name=self.store_names[0], workspace=self.workspace_name + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_store_failed_request_error(self, mock_catalog): mc = mock_catalog() mc.get_store.return_value = self.mock_stores[0] - mc.get_store.side_effect = geoserver.catalog.FailedRequestError('Failed Request') + mc.get_store.side_effect = geoserver.catalog.FailedRequestError( + "Failed Request" + ) mc.get_default_workspace().name = self.workspace_name # Execute response = self.engine.get_store(store_id=self.store_names[0], debug=self.debug) # Success - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Extract Result - r = response['error'] + r = response["error"] - self.assertIn('Failed Request', r) + self.assertIn("Failed Request", r) - mc.get_store.assert_called_with(name=self.store_names[0], workspace=self.workspace_name) + mc.get_store.assert_called_with( + name=self.store_names[0], workspace=self.workspace_name + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_store_none(self, mock_catalog): mc = mock_catalog() mc.get_store.return_value = None @@ -981,16 +1041,18 @@ def test_get_store_none(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Extract Result - r = response['error'] + r = response["error"] - self.assertIn('not found', r) + self.assertIn("not found", r) - mc.get_store.assert_called_with(name=self.store_names[0], workspace=self.workspace_name) + mc.get_store.assert_called_with( + name=self.store_names[0], workspace=self.workspace_name + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_style(self, mock_catalog): mc = mock_catalog() mc.get_style.return_value = self.mock_styles[0] @@ -1002,23 +1064,25 @@ def test_get_style(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Properties - self.assertIn('name', r) - self.assertIn(r['name'], self.style_names) - self.assertIn('workspace', r) - self.assertEqual(self.workspace_name, r['workspace']) + self.assertIn("name", r) + self.assertIn(r["name"], self.style_names) + self.assertIn("workspace", r) + self.assertEqual(self.workspace_name, r["workspace"]) - mc.get_style.assert_called_with(name=self.style_names[0], workspace=self.workspace_name) + mc.get_style.assert_called_with( + name=self.style_names[0], workspace=self.workspace_name + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_style_none(self, mock_catalog): mc = mock_catalog() mc.get_style.return_value = None @@ -1031,19 +1095,23 @@ def test_get_style_none(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Extract Result - r = response['error'] + r = response["error"] - self.assertIn('not found', r) + self.assertIn("not found", r) - mc.get_style.assert_called_with(name=self.style_names[0], workspace=self.workspace_name) + mc.get_style.assert_called_with( + name=self.style_names[0], workspace=self.workspace_name + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_style_failed_request_error(self, mock_catalog): mc = mock_catalog() - mc.get_style.side_effect = geoserver.catalog.FailedRequestError('Failed Request') + mc.get_style.side_effect = geoserver.catalog.FailedRequestError( + "Failed Request" + ) mc.get_default_workspace().name = self.workspace_name # Execute response = self.engine.get_style(style_id=self.style_names[0], debug=self.debug) @@ -1052,508 +1120,568 @@ def test_get_style_failed_request_error(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Extract Result - r = response['error'] + r = response["error"] - self.assertIn('Failed Request', r) + self.assertIn("Failed Request", r) - mc.get_style.assert_called_with(name=self.style_names[0], workspace=self.workspace_name) + mc.get_style.assert_called_with( + name=self.style_names[0], workspace=self.workspace_name + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.get') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") def test_get_layer_extent(self, mock_get): - store_id = f'{self.workspace_name}:{self.store_name}' + store_id = f"{self.workspace_name}:{self.store_name}" expected_bb = [-14.23, 28.1, -50.42, 89.18] jsondict = { - 'featureType': { - 'nativeBoundingBox': {'minx': -12.23, 'miny': 22.1, 'maxx': -56.42, 'maxy': 32.18}, - 'latLonBoundingBox': {'minx': -14.23, 'miny': 28.1, 'maxx': -50.42, 'maxy': 89.18} + "featureType": { + "nativeBoundingBox": { + "minx": -12.23, + "miny": 22.1, + "maxx": -56.42, + "maxy": 32.18, + }, + "latLonBoundingBox": { + "minx": -14.23, + "miny": 28.1, + "maxx": -50.42, + "maxy": 89.18, + }, } } mock_get.return_value = MockResponse(200, json=jsondict) - rest_endpoint = '{endpoint}workspaces/{workspace}/datastores/{datastore}/featuretypes/{feature_name}.json'.format( # noqa: E501 + rest_endpoint = "{endpoint}workspaces/{workspace}/datastores/{datastore}/featuretypes/{feature_name}.json".format( # noqa: E501 endpoint=self.endpoint, workspace=self.workspace_name, datastore=self.store_name, - feature_name='fee' + feature_name="fee", ) - result = self.engine.get_layer_extent(store_id, 'fee', buffer_factor=1.0) + result = self.engine.get_layer_extent(store_id, "fee", buffer_factor=1.0) mock_get.assert_called_with(rest_endpoint, auth=self.auth) self.assertEqual(expected_bb, result) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.get') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) def test_get_layer_extent_native(self, mock_workspace, mock_get): store_id = self.store_name mock_workspace().name = self.workspace_name expected_bb = [-12.23, 22.1, -56.42, 32.18] jsondict = { - 'featureType': { - 'nativeBoundingBox': {'minx': -12.23, 'miny': 22.1, 'maxx': -56.42, 'maxy': 32.18}, - 'latLonBoundingBox': {'minx': -14.23, 'miny': 28.1, 'maxx': -50.42, 'maxy': 89.18} + "featureType": { + "nativeBoundingBox": { + "minx": -12.23, + "miny": 22.1, + "maxx": -56.42, + "maxy": 32.18, + }, + "latLonBoundingBox": { + "minx": -14.23, + "miny": 28.1, + "maxx": -50.42, + "maxy": 89.18, + }, } } mock_get.return_value = MockResponse(200, json=jsondict) - rest_endpoint = '{endpoint}workspaces/{workspace}/datastores/{datastore}/featuretypes/{feature_name}.json'.format( # noqa: E501 + rest_endpoint = "{endpoint}workspaces/{workspace}/datastores/{datastore}/featuretypes/{feature_name}.json".format( # noqa: E501 endpoint=self.endpoint, workspace=self.workspace_name, datastore=self.store_name, - feature_name='fee' + feature_name="fee", + ) + result = self.engine.get_layer_extent( + store_id, "fee", native=True, buffer_factor=1.0 ) - result = self.engine.get_layer_extent(store_id, 'fee', native=True, buffer_factor=1.0) mock_get.assert_called_with(rest_endpoint, auth=self.auth) self.assertEqual(expected_bb, result) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.get') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") def test_get_layer_extent_feature_bbox_none(self, mock_get): - store_id = f'{self.workspace_name}:{self.store_name}' + store_id = f"{self.workspace_name}:{self.store_name}" expected_bb = [-128.583984375, 22.1874049914, -64.423828125, 52.1065051908] jsondict = {} mock_get.return_value = MockResponse(200, json=jsondict) - rest_endpoint = '{endpoint}workspaces/{workspace}/datastores/{datastore}/featuretypes/{feature_name}.json'.format( # noqa: E501 + rest_endpoint = "{endpoint}workspaces/{workspace}/datastores/{datastore}/featuretypes/{feature_name}.json".format( # noqa: E501 endpoint=self.endpoint, workspace=self.workspace_name, datastore=self.store_name, - feature_name='fee' + feature_name="fee", ) - result = self.engine.get_layer_extent(store_id, 'fee', buffer_factor=1.0) + result = self.engine.get_layer_extent(store_id, "fee", buffer_factor=1.0) mock_get.assert_called_with(rest_endpoint, auth=self.auth) self.assertEqual(expected_bb, result) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.get') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") def test_get_layer_extent_not_200(self, mock_get, mock_logger): - store_id = f'{self.workspace_name}:{self.store_name}' + store_id = f"{self.workspace_name}:{self.store_name}" mock_get.return_value = MockResponse(500) - rest_endpoint = '{endpoint}workspaces/{workspace}/datastores/{datastore}/featuretypes/{feature_name}.json'.format( # noqa: E501 + rest_endpoint = "{endpoint}workspaces/{workspace}/datastores/{datastore}/featuretypes/{feature_name}.json".format( # noqa: E501 endpoint=self.endpoint, workspace=self.workspace_name, datastore=self.store_name, - feature_name='fee' + feature_name="fee", + ) + self.assertRaises( + requests.RequestException, + self.engine.get_layer_extent, + store_id, + "fee", + buffer_factor=1.0, ) - self.assertRaises(requests.RequestException, self.engine.get_layer_extent, store_id, 'fee', buffer_factor=1.0) mock_get.assert_called_with(rest_endpoint, auth=self.auth) mock_logger.error.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_workspace(self, mock_catalog): mc = mock_catalog() mc.get_workspace.return_value = self.mock_workspaces[0] # Execute - response = self.engine.get_workspace(workspace_id=self.workspace_names[0], debug=self.debug) + response = self.engine.get_workspace( + workspace_id=self.workspace_names[0], debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Properties - self.assertIn('name', r) - self.assertIn(r['name'], self.workspace_names[0]) + self.assertIn("name", r) + self.assertIn(r["name"], self.workspace_names[0]) mc.get_workspace.assert_called_with(name=self.workspace_names[0]) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_workspace_none(self, mock_catalog): mc = mock_catalog() mc.get_workspace.return_value = None # Execute - response = self.engine.get_workspace(workspace_id=self.workspace_names[0], debug=self.debug) + response = self.engine.get_workspace( + workspace_id=self.workspace_names[0], debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Extract Result - r = response['error'] + r = response["error"] - self.assertIn('not found', r) + self.assertIn("not found", r) mc.get_workspace.assert_called_with(name=self.workspace_names[0]) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_get_workspace_failed_request_error(self, mock_catalog): mc = mock_catalog() - mc.get_workspace.side_effect = geoserver.catalog.FailedRequestError('Failed Request') + mc.get_workspace.side_effect = geoserver.catalog.FailedRequestError( + "Failed Request" + ) # Execute - response = self.engine.get_workspace(workspace_id=self.workspace_names[0], debug=self.debug) + response = self.engine.get_workspace( + workspace_id=self.workspace_names[0], debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Extract Result - r = response['error'] + r = response["error"] - self.assertIn('Failed Request', r) + self.assertIn("Failed Request", r) mc.get_workspace.assert_called_with(name=self.workspace_names[0]) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_update_resource(self, mock_catalog): mc = mock_catalog() mc.get_resource.return_value = mock.NonCallableMagicMock( - title='foo', - geometry='points' + title="foo", geometry="points" ) # Setup resource_id = self.workspace_name + ":" + self.resource_names[0] new_title = random_string_generator(15) - new_geometry = 'lines' + new_geometry = "lines" # Execute - response = self.engine.update_resource(resource_id=resource_id, - title=new_title, - geometry=new_geometry, - debug=self.debug) + response = self.engine.update_resource( + resource_id=resource_id, + title=new_title, + geometry=new_geometry, + debug=self.debug, + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Properties - self.assertEqual(result['title'], new_title) - self.assertEqual(result['geometry'], new_geometry) + self.assertEqual(result["title"], new_title) + self.assertEqual(result["geometry"], new_geometry) - mc.get_resource.assert_called_with(name=self.resource_names[0], store=None, workspace=self.workspace_name) + mc.get_resource.assert_called_with( + name=self.resource_names[0], store=None, workspace=self.workspace_name + ) mc.save.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_update_resource_no_workspace(self, mock_catalog): mc = mock_catalog() mc.get_resource.return_value = mock.NonCallableMagicMock( - title='foo', - geometry='points' + title="foo", geometry="points" ) mc.get_default_workspace().name = self.workspace_name # Setup resource_id = self.resource_names[0] new_title = random_string_generator(15) - new_geometry = 'lines' + new_geometry = "lines" # Execute - response = self.engine.update_resource(resource_id=resource_id, - title=new_title, - geometry=new_geometry, - debug=self.debug) + response = self.engine.update_resource( + resource_id=resource_id, + title=new_title, + geometry=new_geometry, + debug=self.debug, + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Properties - self.assertEqual(result['title'], new_title) - self.assertEqual(result['geometry'], new_geometry) + self.assertEqual(result["title"], new_title) + self.assertEqual(result["geometry"], new_geometry) - mc.get_resource.assert_called_with(name=self.resource_names[0], store=None, workspace=self.workspace_name) + mc.get_resource.assert_called_with( + name=self.resource_names[0], store=None, workspace=self.workspace_name + ) mc.save.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_update_resource_style(self, mock_catalog): mc = mock_catalog() mc.get_resource.return_value = mock.NonCallableMagicMock( - styles=['style_name'], + styles=["style_name"], ) mc.get_style.side_effect = mock_get_style # Setup resource_id = self.workspace_name + ":" + self.resource_names[0] - new_styles = ['new_style_name'] + new_styles = ["new_style_name"] # Execute - response = self.engine.update_resource(resource_id=resource_id, - styles=new_styles, - debug=self.debug) + response = self.engine.update_resource( + resource_id=resource_id, styles=new_styles, debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Properties - self.assertEqual(result['styles'], new_styles) + self.assertEqual(result["styles"], new_styles) - mc.get_resource.assert_called_with(name=self.resource_names[0], store=None, workspace=self.workspace_name) + mc.get_resource.assert_called_with( + name=self.resource_names[0], store=None, workspace=self.workspace_name + ) mc.save.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_update_resource_style_colon(self, mock_catalog): mc = mock_catalog() mc.get_resource.return_value = mock.NonCallableMagicMock( - styles=['1:2'], + styles=["1:2"], ) mc.get_style.side_effect = mock_get_style # Setup resource_id = self.workspace_name + ":" + self.resource_names[0] - new_styles = ['11:22'] + new_styles = ["11:22"] # Execute - response = self.engine.update_resource(resource_id=resource_id, - styles=new_styles, - debug=self.debug) + response = self.engine.update_resource( + resource_id=resource_id, styles=new_styles, debug=self.debug + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Properties - self.assertEqual(result['styles'], new_styles) + self.assertEqual(result["styles"], new_styles) - mc.get_resource.assert_called_with(name=self.resource_names[0], store=None, workspace=self.workspace_name) + mc.get_resource.assert_called_with( + name=self.resource_names[0], store=None, workspace=self.workspace_name + ) mc.save.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_update_resource_failed_request_error(self, mock_catalog): mc = mock_catalog() - mc.get_resource.side_effect = geoserver.catalog.FailedRequestError('Failed Request') + mc.get_resource.side_effect = geoserver.catalog.FailedRequestError( + "Failed Request" + ) # Setup resource_id = self.workspace_name + ":" + self.resource_names[0] new_title = random_string_generator(15) - new_geometry = 'lines' + new_geometry = "lines" # Execute - response = self.engine.update_resource(resource_id=resource_id, - title=new_title, - geometry=new_geometry, - debug=self.debug) + response = self.engine.update_resource( + resource_id=resource_id, + title=new_title, + geometry=new_geometry, + debug=self.debug, + ) # Validate response object self.assert_valid_response_object(response) # Fail - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Expect Error - r = response['error'] + r = response["error"] # Properties - self.assertIn('Failed Request', r) + self.assertIn("Failed Request", r) - mc.get_resource.assert_called_with(name=self.resource_names[0], store=None, workspace=self.workspace_name) + mc.get_resource.assert_called_with( + name=self.resource_names[0], store=None, workspace=self.workspace_name + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_update_resource_store(self, mock_catalog): mc = mock_catalog() mc.get_resource.return_value = mock.NonCallableMagicMock( - store=self.store_name, - title='foo', - geometry='points' + store=self.store_name, title="foo", geometry="points" ) # Setup resource_id = self.workspace_name + ":" + self.resource_names[0] new_title = random_string_generator(15) - new_geometry = 'lines' + new_geometry = "lines" # Execute - response = self.engine.update_resource(resource_id=resource_id, - store=self.store_name, - title=new_title, - geometry=new_geometry, - debug=self.debug) + response = self.engine.update_resource( + resource_id=resource_id, + store=self.store_name, + title=new_title, + geometry=new_geometry, + debug=self.debug, + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Properties - self.assertEqual(result['title'], new_title) - self.assertEqual(result['geometry'], new_geometry) - self.assertEqual(result['store'], self.store_name) + self.assertEqual(result["title"], new_title) + self.assertEqual(result["geometry"], new_geometry) + self.assertEqual(result["store"], self.store_name) - mc.get_resource.assert_called_with(name=self.resource_names[0], - store=self.store_name, - workspace=self.workspace_name) + mc.get_resource.assert_called_with( + name=self.resource_names[0], + store=self.store_name, + workspace=self.workspace_name, + ) mc.save.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_update_layer(self, mock_catalog): mc = mock_catalog() mc.get_layer.return_value = mock.NonCallableMagicMock( - name=self.layer_names[0], - title='foo', - geometry='points' + name=self.layer_names[0], title="foo", geometry="points" ) # Setup new_title = random_string_generator(15) - new_geometry = 'lines' + new_geometry = "lines" # Execute - response = self.engine.update_layer(layer_id=self.layer_names[0], - title=new_title, - geometry=new_geometry, - debug=self.debug) + response = self.engine.update_layer( + layer_id=self.layer_names[0], + title=new_title, + geometry=new_geometry, + debug=self.debug, + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Properties - self.assertEqual(result['title'], new_title) - self.assertEqual(result['geometry'], new_geometry) + self.assertEqual(result["title"], new_title) + self.assertEqual(result["geometry"], new_geometry) mc.get_layer.assert_called_with(name=self.layer_names[0]) mc.save.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_update_layer_failed_request_error(self, mock_catalog): mc = mock_catalog() - mc.get_layer.side_effect = geoserver.catalog.FailedRequestError('Failed Request') + mc.get_layer.side_effect = geoserver.catalog.FailedRequestError( + "Failed Request" + ) mc.get_layer.return_value = mock.NonCallableMagicMock( - name=self.layer_names[0], - title='foo', - geometry='points' + name=self.layer_names[0], title="foo", geometry="points" ) # Setup new_title = random_string_generator(15) - new_geometry = 'lines' + new_geometry = "lines" # Execute - response = self.engine.update_layer(layer_id=self.layer_names[0], - title=new_title, - geometry=new_geometry, - debug=self.debug) + response = self.engine.update_layer( + layer_id=self.layer_names[0], + title=new_title, + geometry=new_geometry, + debug=self.debug, + ) # Validate response object self.assert_valid_response_object(response) # Fail - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Expect Error - r = response['error'] + r = response["error"] # Properties - self.assertIn('Failed Request', r) + self.assertIn("Failed Request", r) mc.get_layer.assert_called_with(name=self.layer_names[0]) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_update_layer_with_tile_caching_params(self, mock_catalog, mock_post): mc = mock_catalog() mc.get_layer.return_value = mock.NonCallableMagicMock( - name=self.layer_names[0], - title='foo', - geometry='points' + name=self.layer_names[0], title="foo", geometry="points" ) mock_post.return_value = MockResponse(200) # Setup new_title = random_string_generator(15) - new_geometry = 'lines' - tile_caching = {'foo': 'bar'} + new_geometry = "lines" + tile_caching = {"foo": "bar"} # Execute - response = self.engine.update_layer(layer_id=self.layer_names[0], - title=new_title, - geometry=new_geometry, - debug=self.debug, - tile_caching=tile_caching) + response = self.engine.update_layer( + layer_id=self.layer_names[0], + title=new_title, + geometry=new_geometry, + debug=self.debug, + tile_caching=tile_caching, + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Properties - self.assertEqual(result['title'], new_title) - self.assertEqual(result['geometry'], new_geometry) - self.assertIn('foo', result['tile_caching']) - self.assertEqual(result['tile_caching']['foo'], 'bar') + self.assertEqual(result["title"], new_title) + self.assertEqual(result["geometry"], new_geometry) + self.assertIn("foo", result["tile_caching"]) + self.assertEqual(result["tile_caching"]["foo"], "bar") mc.get_layer.assert_called_with(name=self.layer_names[0]) mc.save.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') - def test_update_layer_with_tile_caching_params_not_200(self, mock_catalog, mock_post): + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") + def test_update_layer_with_tile_caching_params_not_200( + self, mock_catalog, mock_post + ): mc = mock_catalog() mc.get_layer.return_value = mock.NonCallableMagicMock( - name=self.layer_names[0], - title='foo', - geometry='points' + name=self.layer_names[0], title="foo", geometry="points" ) - mock_post.return_value = MockResponse(500, text='server error') + mock_post.return_value = MockResponse(500, text="server error") # Setup new_title = random_string_generator(15) - new_geometry = 'lines' - tile_caching = {'foo': 'bar'} + new_geometry = "lines" + tile_caching = {"foo": "bar"} # Execute - response = self.engine.update_layer(layer_id=self.layer_names[0], - title=new_title, - geometry=new_geometry, - debug=self.debug, - tile_caching=tile_caching) + response = self.engine.update_layer( + layer_id=self.layer_names[0], + title=new_title, + geometry=new_geometry, + debug=self.debug, + tile_caching=tile_caching, + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Extract Result - self.assertIn('server error', response['error']) + self.assertIn("server error", response["error"]) mc.get_layer.assert_called_with(name=self.layer_names[0]) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_update_layer_group(self, mock_catalog): mc = mock_catalog() - mock_layer_group = mock.NonCallableMagicMock( - layers=self.layer_names - ) + mock_layer_group = mock.NonCallableMagicMock(layers=self.layer_names) mock_layer_group.name = self.layer_group_names[0] mc.get_layergroup.return_value = mock_layer_group @@ -1561,60 +1689,78 @@ def test_update_layer_group(self, mock_catalog): new_layers = random_string_generator(15) # Execute - response = self.engine.update_layer_group(layer_group_id=self.layer_group_names[0], - layers=new_layers, - debug=self.debug) + response = self.engine.update_layer_group( + layer_group_id=self.layer_group_names[0], + layers=new_layers, + debug=self.debug, + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - result = response['result'] + result = response["result"] # Properties - self.assertEqual(result['layers'], new_layers) + self.assertEqual(result["layers"], new_layers) - mc.get_layergroup.assert_called_with(name=self.layer_group_names[0], workspace=None) + mc.get_layergroup.assert_called_with( + name=self.layer_group_names[0], workspace=None + ) mc.save.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_update_layer_group_failed_request_error(self, mock_catalog): mc = mock_catalog() - mc.get_layergroup.side_effect = geoserver.catalog.FailedRequestError('Failed Request') + mc.get_layergroup.side_effect = geoserver.catalog.FailedRequestError( + "Failed Request" + ) # Setup new_layers = random_string_generator(15) # Execute - response = self.engine.update_layer_group(layer_group_id=self.mock_layer_groups[0], - layers=new_layers, - debug=self.debug) + response = self.engine.update_layer_group( + layer_group_id=self.mock_layer_groups[0], + layers=new_layers, + debug=self.debug, + ) # Validate response object self.assert_valid_response_object(response) # Fail - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Expect Error - r = response['error'] + r = response["error"] # Properties - self.assertIn('Failed Request', r) + self.assertIn("Failed Request", r) - mc.get_layergroup.assert_called_with(name=self.mock_layer_groups[0], workspace=None) + mc.get_layergroup.assert_called_with( + name=self.mock_layer_groups[0], workspace=None + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.list_styles') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') - def test_update_layer_styles(self, mock_ws, mock_list_styles, mock_put, mock_logger, mock_get_layer): + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.list_styles" + ) + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) + def test_update_layer_styles( + self, mock_ws, mock_list_styles, mock_put, mock_logger, mock_get_layer + ): mock_put.return_value = MockResponse(200) - mock_get_layer.return_value = {'success': True, 'result': None} + mock_get_layer.return_value = {"success": True, "result": None} mock_ws().name = self.workspace_name mock_list_styles.return_value = self.style_names layer_id = self.layer_names[0] @@ -1623,44 +1769,54 @@ def test_update_layer_styles(self, mock_ws, mock_list_styles, mock_put, mock_log self.engine.update_layer_styles(layer_id, default_style, other_styles) - expected_url = '{endpoint}layers/{layer}.xml'.format( - endpoint=self.endpoint, - layer=layer_id + expected_url = "{endpoint}layers/{layer}.xml".format( + endpoint=self.endpoint, layer=layer_id ) - expected_headers = { - "Content-type": "text/xml" - } + expected_headers = {"Content-type": "text/xml"} - with open(os.path.join(self.files_root, 'test_create_layer.xml')) as rendered: + with open(os.path.join(self.files_root, "test_create_layer.xml")) as rendered: expected_xml = rendered.read() - mock_put.assert_called_with(expected_url, headers=expected_headers, auth=self.auth, data=expected_xml) + mock_put.assert_called_with( + expected_url, headers=expected_headers, auth=self.auth, data=expected_xml + ) mock_logger.info.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.list_styles') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') - def test_update_layer_styles_exception(self, mock_ws, mock_list_styles, mock_put, mock_logger): - mock_put.return_value = MockResponse(500, '500 exception') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.list_styles" + ) + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) + def test_update_layer_styles_exception( + self, mock_ws, mock_list_styles, mock_put, mock_logger + ): + mock_put.return_value = MockResponse(500, "500 exception") mock_ws().name = self.workspace_name mock_list_styles.return_value = self.style_names layer_id = self.layer_names[0] default_style = self.style_names[0] other_styles = [self.style_names[1]] - self.assertRaises(requests.RequestException, self.engine.update_layer_styles, layer_id, default_style, - other_styles) + self.assertRaises( + requests.RequestException, + self.engine.update_layer_styles, + layer_id, + default_style, + other_styles, + ) mock_logger.error.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_delete_resource_with_workspace(self, mock_catalog): mc = mock_catalog() mc.get_resource.return_value = self.mock_resources[0] - resource_id = '{}:{}'.format(self.workspace_name, self.resource_names[0]) + resource_id = "{}:{}".format(self.workspace_name, self.resource_names[0]) # Execute response = self.engine.delete_resource(resource_id, store_id=self.mock_store) @@ -1669,12 +1825,17 @@ def test_delete_resource_with_workspace(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) - mc.get_resource.assert_called_with(name=self.resource_names[0], store=self.mock_store, - workspace=self.workspace_name) - mc.delete.assert_called_with(config_object=self.mock_resources[0], purge=False, recurse=False) + self.assertTrue(response["success"]) + mc.get_resource.assert_called_with( + name=self.resource_names[0], + store=self.mock_store, + workspace=self.workspace_name, + ) + mc.delete.assert_called_with( + config_object=self.mock_resources[0], purge=False, recurse=False + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_delete_resource_without_workspace(self, mock_catalog): mc = mock_catalog() mc.get_resource.return_value = self.mock_resources[0] @@ -1688,18 +1849,23 @@ def test_delete_resource_without_workspace(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) - mc.get_resource.assert_called_with(name=self.resource_names[0], store=self.mock_store, - workspace=self.workspace_name) - mc.delete.assert_called_with(config_object=self.mock_resources[0], purge=False, recurse=False) + self.assertTrue(response["success"]) + mc.get_resource.assert_called_with( + name=self.resource_names[0], + store=self.mock_store, + workspace=self.workspace_name, + ) + mc.delete.assert_called_with( + config_object=self.mock_resources[0], purge=False, recurse=False + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_delete_resource_error(self, mock_catalog): mc = mock_catalog() mc.get_resource.return_value = self.mock_resources[0] mc.delete.side_effect = geoserver.catalog.FailedRequestError() - resource_id = '{}:{}'.format(self.workspace_name, self.resource_names[0]) + resource_id = "{}:{}".format(self.workspace_name, self.resource_names[0]) # Execute response = self.engine.delete_resource(resource_id, store_id=self.mock_store) @@ -1708,17 +1874,22 @@ def test_delete_resource_error(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertFalse(response['success']) - mc.delete.assert_called_with(config_object=self.mock_resources[0], purge=False, recurse=False) - mc.get_resource.assert_called_with(name=self.resource_names[0], store=self.mock_store, - workspace=self.workspace_name) + self.assertFalse(response["success"]) + mc.delete.assert_called_with( + config_object=self.mock_resources[0], purge=False, recurse=False + ) + mc.get_resource.assert_called_with( + name=self.resource_names[0], + store=self.mock_store, + workspace=self.workspace_name, + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_delete_resource_does_not_exist(self, mock_catalog): mc = mock_catalog() mc.get_resource.return_value = None - resource_id = '{}:{}'.format(self.workspace_name, self.resource_names[0]) + resource_id = "{}:{}".format(self.workspace_name, self.resource_names[0]) # Execute response = self.engine.delete_resource(resource_id, store_id=self.store_name) @@ -1727,13 +1898,18 @@ def test_delete_resource_does_not_exist(self, mock_catalog): self.assert_valid_response_object(response) # Success - self.assertFalse(response['success']) - self.assertIn('GeoServer object does not exist', response['error']) - mc.get_resource.assert_called_with(name=self.resource_names[0], store=self.store_name, - workspace=self.workspace_name) + self.assertFalse(response["success"]) + self.assertIn("GeoServer object does not exist", response["error"]) + mc.get_resource.assert_called_with( + name=self.resource_names[0], + store=self.store_name, + workspace=self.workspace_name, + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.delete') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.delete") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) def test_delete_layer(self, mock_workspace, mock_delete): mock_delete.return_value = MockResponse(200) mock_workspace().name = self.workspace_name @@ -1746,75 +1922,86 @@ def test_delete_layer(self, mock_workspace, mock_delete): self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.delete') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.delete") def test_delete_layer_warning(self, mock_delete, mock_logger): mock_delete.return_value = MockResponse(404) - layer_name = f'{self.workspace_name}:{self.layer_names[0]}' + layer_name = f"{self.workspace_name}:{self.layer_names[0]}" # Execute self.engine.delete_layer(layer_name, datastore=self.store_name) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.delete') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.delete") def test_delete_layer_exception(self, mock_delete, mock_logger): - mock_delete.return_value = MockResponse(500, '500 exception') - layer_name = f'{self.workspace_name}:{self.layer_names[0]}' + mock_delete.return_value = MockResponse(500, "500 exception") + layer_name = f"{self.workspace_name}:{self.layer_names[0]}" # Execute - self.assertRaises(requests.RequestException, self.engine.delete_layer, layer_name, datastore=self.store_name) + self.assertRaises( + requests.RequestException, + self.engine.delete_layer, + layer_name, + datastore=self.store_name, + ) mock_logger.error.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.delete') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.delete") def test_delete_layer_group(self, mock_delete): mock_delete.return_value = MockResponse(200) - group_name = f'{self.workspace_name}:{self.layer_group_names[0]}' + group_name = f"{self.workspace_name}:{self.layer_group_names[0]}" self.engine.delete_layer_group(group_name) # Validate endpoint calls - url = '{endpoint}workspaces/{w}/layergroups/{lg}'.format( - endpoint=self.endpoint, - w=self.workspace_name, - lg=self.layer_group_names[0] + url = "{endpoint}workspaces/{w}/layergroups/{lg}".format( + endpoint=self.endpoint, w=self.workspace_name, lg=self.layer_group_names[0] ) # Create feature type call mock_delete.assert_called_with(url, auth=self.auth) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.delete') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.delete") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) def test_delete_layer_group_no_group(self, mock_workspace, mock_delete): - mock_delete.return_value = MockResponse(404, 'No such layer group') + mock_delete.return_value = MockResponse(404, "No such layer group") mock_workspace().name = self.workspace_name group_name = self.layer_group_names[0] self.engine.delete_layer_group(group_name) # Validate endpoint calls - url = '{endpoint}workspaces/{w}/layergroups/{lg}'.format( - endpoint=self.endpoint, - w=self.workspace_name, - lg=self.layer_group_names[0] + url = "{endpoint}workspaces/{w}/layergroups/{lg}".format( + endpoint=self.endpoint, w=self.workspace_name, lg=self.layer_group_names[0] ) # Create feature type call mock_delete.assert_called_with(url, auth=self.auth) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.delete') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') - def test_delete_layer_group_exception(self, mock_workspace, mock_delete, mock_logger): - mock_delete.return_value = MockResponse(404, "These aren't the droids you're looking for...") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.delete") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) + def test_delete_layer_group_exception( + self, mock_workspace, mock_delete, mock_logger + ): + mock_delete.return_value = MockResponse( + 404, "These aren't the droids you're looking for..." + ) mock_workspace().name = self.workspace_name group_name = self.layer_group_names[0] - self.assertRaises(requests.RequestException, self.engine.delete_layer_group, group_name) + self.assertRaises( + requests.RequestException, self.engine.delete_layer_group, group_name + ) mock_logger.error.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_delete_workspace(self, mock_catalog): mc = mock_catalog() mc.get_workspace.return_value = self.mock_workspaces[0] @@ -1824,13 +2011,15 @@ def test_delete_workspace(self, mock_catalog): # Should succeed self.assert_valid_response_object(response) - self.assertTrue(response['success']) - self.assertIsNone(response['result']) + self.assertTrue(response["success"]) + self.assertIsNone(response["result"]) mc.get_workspace.assert_called_with(self.workspace_names[0]) - mc.delete.assert_called_with(config_object=self.mock_workspaces[0], purge=False, recurse=False) + mc.delete.assert_called_with( + config_object=self.mock_workspaces[0], purge=False, recurse=False + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_delete_store(self, mock_catalog): mc = mock_catalog() mc.get_store.return_value = self.mock_stores[0] @@ -1841,16 +2030,22 @@ def test_delete_store(self, mock_catalog): # Should succeed self.assert_valid_response_object(response) - self.assertTrue(response['success']) - self.assertIsNone(response['result']) + self.assertTrue(response["success"]) + self.assertIsNone(response["result"]) - mc.get_store.assert_called_with(name=self.store_names[0], workspace=self.workspace_name) - mc.delete.assert_called_with(config_object=self.mock_stores[0], purge=False, recurse=False) + mc.get_store.assert_called_with( + name=self.store_names[0], workspace=self.workspace_name + ) + mc.delete.assert_called_with( + config_object=self.mock_stores[0], purge=False, recurse=False + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_delete_store_failed_request(self, mock_catalog): mc = mock_catalog() - mc.get_store.side_effect = geoserver.catalog.FailedRequestError('Failed Request') + mc.get_store.side_effect = geoserver.catalog.FailedRequestError( + "Failed Request" + ) mc.get_default_workspace().name = self.workspace_name @@ -1859,111 +2054,127 @@ def test_delete_store_failed_request(self, mock_catalog): # Failure Check self.assert_valid_response_object(response) - self.assertFalse(response['success']) - self.assertIn('Failed Request', response['error']) + self.assertFalse(response["success"]) + self.assertIn("Failed Request", response["error"]) - mc.get_store.assert_called_with(name=self.store_names[0], workspace=self.workspace_name) + mc.get_store.assert_called_with( + name=self.store_names[0], workspace=self.workspace_name + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.delete') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.delete") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) def test_delete_coverage_store(self, mock_ws, mock_delete): mock_delete.return_value = MockResponse(200) mock_ws().name = self.workspace_name - coverage_name = 'foo' - url = 'workspaces/{workspace}/coveragestores/{coverage_store_name}'.format( + coverage_name = "foo" + url = "workspaces/{workspace}/coveragestores/{coverage_store_name}".format( workspace=self.workspace_name, coverage_store_name=coverage_name, ) - json = {'recurse': True, 'purge': True} + json = {"recurse": True, "purge": True} self.engine.delete_coverage_store(store_id=coverage_name) put_call_args = mock_delete.call_args_list - self.assertIn(url, put_call_args[0][1]['url']) - self.assertEqual(json, put_call_args[0][1]['params']) - self.assertEqual({"Content-type": "application/json"}, put_call_args[0][1]['headers']) + self.assertIn(url, put_call_args[0][1]["url"]) + self.assertEqual(json, put_call_args[0][1]["params"]) + self.assertEqual( + {"Content-type": "application/json"}, put_call_args[0][1]["headers"] + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.delete') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.delete") def test_delete_coverage_store_with_warning(self, mock_delete, mock_log): mock_delete.return_value = MockResponse(403) - coverage_name = f'{self.workspace_name}:foo' - url = 'workspaces/{workspace}/coveragestores/{coverage_store_name}'.format( + coverage_name = f"{self.workspace_name}:foo" + url = "workspaces/{workspace}/coveragestores/{coverage_store_name}".format( workspace=self.workspace_name, - coverage_store_name='foo', + coverage_store_name="foo", ) - json = {'recurse': True, 'purge': True} + json = {"recurse": True, "purge": True} self.engine.delete_coverage_store(store_id=coverage_name) put_call_args = mock_delete.call_args_list - self.assertIn(url, put_call_args[0][1]['url']) - self.assertEqual(json, put_call_args[0][1]['params']) - self.assertEqual({"Content-type": "application/json"}, put_call_args[0][1]['headers']) + self.assertIn(url, put_call_args[0][1]["url"]) + self.assertEqual(json, put_call_args[0][1]["params"]) + self.assertEqual( + {"Content-type": "application/json"}, put_call_args[0][1]["headers"] + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.delete') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.delete") def test_delete_coverage_store_with_error(self, mock_delete, mock_log): mock_delete.return_value = MockResponse(500) - coverage_name = f'{self.workspace_name}:foo' - url = 'workspaces/{workspace}/coveragestores/{coverage_store_name}'.format( + coverage_name = f"{self.workspace_name}:foo" + url = "workspaces/{workspace}/coveragestores/{coverage_store_name}".format( workspace=self.workspace_name, - coverage_store_name='foo', + coverage_store_name="foo", ) - json = {'recurse': True, 'purge': True} + json = {"recurse": True, "purge": True} - self.assertRaises(requests.RequestException, self.engine.delete_coverage_store, coverage_name) + self.assertRaises( + requests.RequestException, self.engine.delete_coverage_store, coverage_name + ) put_call_args = mock_delete.call_args_list - self.assertIn(url, put_call_args[0][1]['url']) - self.assertEqual(json, put_call_args[0][1]['params']) - self.assertEqual({"Content-type": "application/json"}, put_call_args[0][1]['headers']) + self.assertIn(url, put_call_args[0][1]["url"]) + self.assertEqual(json, put_call_args[0][1]["params"]) + self.assertEqual( + {"Content-type": "application/json"}, put_call_args[0][1]["headers"] + ) mock_log.error.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.delete') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.delete") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) def test_delete_style(self, mock_workspace, mock_delete): mock_workspace.return_value = self.mock_workspaces[0] mock_delete.return_value = MockResponse(200) - style_id = '{}:{}'.format(self.mock_workspaces[0].name, self.mock_styles[0].name) + style_id = "{}:{}".format( + self.mock_workspaces[0].name, self.mock_styles[0].name + ) # Do delete response = self.engine.delete_style(style_id=style_id) # Should succeed self.assert_valid_response_object(response) - self.assertTrue(response['success']) - self.assertIsNone(response['result']) + self.assertTrue(response["success"]) + self.assertIsNone(response["result"]) # Delete Tests delete_call_args = mock_delete.call_args_list - expected_url = '{endpoint}workspaces/{w}/styles/{s}'.format( + expected_url = "{endpoint}workspaces/{w}/styles/{s}".format( endpoint=self.endpoint, w=self.mock_workspaces[0].name, - s=self.mock_styles[0].name + s=self.mock_styles[0].name, + ) + expected_headers = {"Content-type": "application/json"} + expected_params = {"purge": False} + self.assertEqual(expected_url, delete_call_args[0][1]["url"]) + self.assertEqual(self.auth, delete_call_args[0][1]["auth"]) + self.assertEqual(expected_headers, delete_call_args[0][1]["headers"]) + self.assertEqual(expected_params, delete_call_args[0][1]["params"]) + + mock_delete.assert_called_with( + url=expected_url, + auth=self.auth, + headers=expected_headers, + params=expected_params, ) - expected_headers = { - "Content-type": "application/json" - } - expected_params = { - 'purge': False - } - self.assertEqual(expected_url, delete_call_args[0][1]['url']) - self.assertEqual(self.auth, delete_call_args[0][1]['auth']) - self.assertEqual(expected_headers, delete_call_args[0][1]['headers']) - self.assertEqual(expected_params, delete_call_args[0][1]['params']) - - mock_delete.assert_called_with(url=expected_url, auth=self.auth, headers=expected_headers, - params=expected_params) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.delete') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.delete") def test_delete_style_warning(self, mock_delete, mock_logger): mock_delete.return_value = mock.MagicMock(status_code=404) style_id = self.mock_styles[0].name @@ -1971,19 +2182,19 @@ def test_delete_style_warning(self, mock_delete, mock_logger): self.engine.delete_style(style_id=style_id, purge=True) # Validate endpoint calls - url = '{endpoint}styles/{s}'.format(endpoint=self.endpoint, s=style_id) + url = "{endpoint}styles/{s}".format(endpoint=self.endpoint, s=style_id) - headers = { - "Content-type": "application/json" - } + headers = {"Content-type": "application/json"} - params = {'purge': True} + params = {"purge": True} # Create feature type call - mock_delete.assert_called_with(url=url, auth=self.auth, headers=headers, params=params) + mock_delete.assert_called_with( + url=url, auth=self.auth, headers=headers, params=params + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.delete') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.delete") def test_delete_style_exception(self, mock_delete, mock_logger): mock_delete.return_value = mock.MagicMock(status_code=500) style_id = self.mock_styles[0].name @@ -1991,57 +2202,66 @@ def test_delete_style_exception(self, mock_delete, mock_logger): self.assertRaises(requests.RequestException, self.engine.delete_style, style_id) # Validate endpoint calls - url = '{endpoint}styles/{s}'.format(endpoint=self.endpoint, s=style_id) + url = "{endpoint}styles/{s}".format(endpoint=self.endpoint, s=style_id) - headers = { - "Content-type": "application/json" - } + headers = {"Content-type": "application/json"} - params = {'purge': False} + params = {"purge": False} # Create feature type call - mock_delete.assert_called_with(url=url, auth=self.auth, headers=headers, params=params) + mock_delete.assert_called_with( + url=url, auth=self.auth, headers=headers, params=params + ) mock_logger.error.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer_group') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer_group" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_create_layer_group(self, mock_post, mock_get_layer_group): mock_post.return_value = MockResponse(201) - group_name = f'{self.workspace_name}:{self.layer_group_names[0]}' + group_name = f"{self.workspace_name}:{self.layer_group_names[0]}" layer_names = self.layer_names[:2] default_styles = self.style_names self.engine.create_layer_group(group_name, layer_names, default_styles) # Validate endpoint calls - layer_group_url = 'workspaces/{w}/layergroups.json'.format( + layer_group_url = "workspaces/{w}/layergroups.json".format( w=self.workspace_name ) - with open(os.path.join(self.files_root, 'test_create_layer_group.xml')) as rendered: + with open( + os.path.join(self.files_root, "test_create_layer_group.xml") + ) as rendered: expected_xml = rendered.read() # Create feature type call post_call_args = mock_post.call_args_list # call_args[call_num][0=args|1=kwargs][arg_index|kwarg_key] self.assertIn(layer_group_url, post_call_args[0][0][0]) - self.assertEqual(expected_xml, post_call_args[0][1]['data']) + self.assertEqual(expected_xml, post_call_args[0][1]["data"]) mock_get_layer_group.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) def test_create_layer_group_exception(self, mock_workspace, mock_post, mock_logger): - mock_post.return_value = MockResponse(500, 'Layer group exception') + mock_post.return_value = MockResponse(500, "Layer group exception") mock_workspace().name = self.workspace_name group_name = self.layer_group_names[0] layer_names = self.layer_names[:2] default_styles = self.style_names with self.assertRaises(requests.RequestException) as error: self.engine.create_layer_group(group_name, layer_names, default_styles) - self.assertEqual('Create Layer Group Status Code 500: Layer group exception', str(error.exception)) + self.assertEqual( + "Create Layer Group Status Code 500: Layer group exception", + str(error.exception), + ) mock_logger.error.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_create_shapefile_resource(self, mock_catalog, mock_put): mock_put.return_value = MockResponse(201) mc = mock_catalog() @@ -2049,221 +2269,248 @@ def test_create_shapefile_resource(self, mock_catalog, mock_put): mc.get_resource.return_value = self.mock_resources[0] # Setup - shapefile_name = os.path.join(self.files_root, 'shapefile', 'test') + shapefile_name = os.path.join(self.files_root, "shapefile", "test") store_id = self.store_names[0] # Execute - response = self.engine.create_shapefile_resource(store_id=store_id, - shapefile_base=shapefile_name, - overwrite=True - ) + response = self.engine.create_shapefile_resource( + store_id=store_id, shapefile_base=shapefile_name, overwrite=True + ) # Should succeed - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) - self.assertIn(self.mock_resources[0].name, r['name']) - self.assertIn(self.store_name[0], r['store']) + self.assertIn(self.mock_resources[0].name, r["name"]) + self.assertIn(self.store_name[0], r["store"]) mc.get_default_workspace.assert_called_with() - mc.get_resource.assert_called_with(name=self.store_names[0], store=self.store_names[0], - workspace=self.workspace_name[0]) + mc.get_resource.assert_called_with( + name=self.store_names[0], + store=self.store_names[0], + workspace=self.workspace_name[0], + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_create_shapefile_resource_zipfile(self, mock_catalog, mock_put): mock_put.return_value = MockResponse(201) mc = mock_catalog() mc.get_resource.return_value = self.mock_resources[0] # Setup - shapefile_name = os.path.join(self.files_root, 'shapefile', 'test1.zip') + shapefile_name = os.path.join(self.files_root, "shapefile", "test1.zip") # Workspace is given - store_id = '{}:{}'.format(self.workspace_name, self.store_names[0]) + store_id = "{}:{}".format(self.workspace_name, self.store_names[0]) # Execute - response = self.engine.create_shapefile_resource(store_id=store_id, - shapefile_zip=shapefile_name, - overwrite=True, - charset='ISO - 8559 - 1', - ) + response = self.engine.create_shapefile_resource( + store_id=store_id, + shapefile_zip=shapefile_name, + overwrite=True, + charset="ISO - 8559 - 1", + ) # Should succeed - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) - self.assertIn(self.mock_resources[0].name, r['name']) - self.assertIn(self.store_name[0], r['store']) + self.assertIn(self.mock_resources[0].name, r["name"]) + self.assertIn(self.store_name[0], r["store"]) - mc.get_resource.assert_called_with(name='test1', store=self.store_names[0], workspace=self.workspace_name) + mc.get_resource.assert_called_with( + name="test1", store=self.store_names[0], workspace=self.workspace_name + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_create_shapefile_resource_upload(self, mock_catalog, mock_put): mock_put.return_value = MockResponse(201) mc = mock_catalog() mc.get_resource.return_value = self.mock_resources[0] # Setup - shapefile_cst = os.path.join(self.files_root, 'shapefile', 'test.cst') - shapefile_dbf = os.path.join(self.files_root, 'shapefile', 'test.dbf') - shapefile_prj = os.path.join(self.files_root, 'shapefile', 'test.prj') - shapefile_shp = os.path.join(self.files_root, 'shapefile', 'test.shp') - shapefile_shx = os.path.join(self.files_root, 'shapefile', 'test.shx') + shapefile_cst = os.path.join(self.files_root, "shapefile", "test.cst") + shapefile_dbf = os.path.join(self.files_root, "shapefile", "test.dbf") + shapefile_prj = os.path.join(self.files_root, "shapefile", "test.prj") + shapefile_shp = os.path.join(self.files_root, "shapefile", "test.shp") + shapefile_shx = os.path.join(self.files_root, "shapefile", "test.shx") # Workspace is given - store_id = '{}:{}'.format(self.workspace_name, self.store_names[0]) - - with open(shapefile_cst, 'rb') as cst_upload,\ - open(shapefile_dbf, 'rb') as dbf_upload,\ - open(shapefile_prj, 'rb') as prj_upload,\ - open(shapefile_shp, 'rb') as shp_upload,\ - open(shapefile_shx, 'rb') as shx_upload: + store_id = "{}:{}".format(self.workspace_name, self.store_names[0]) + + with open(shapefile_cst, "rb") as cst_upload, open( + shapefile_dbf, "rb" + ) as dbf_upload, open(shapefile_prj, "rb") as prj_upload, open( + shapefile_shp, "rb" + ) as shp_upload, open( + shapefile_shx, "rb" + ) as shx_upload: upload_list = [cst_upload, dbf_upload, prj_upload, shp_upload, shx_upload] - response = self.engine.create_shapefile_resource(store_id=store_id, - shapefile_upload=upload_list, - overwrite=True, - ) + response = self.engine.create_shapefile_resource( + store_id=store_id, + shapefile_upload=upload_list, + overwrite=True, + ) # Should succeed - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) - self.assertIn(self.mock_resources[0].name, r['name']) - self.assertIn(self.store_name[0], r['store']) + self.assertIn(self.mock_resources[0].name, r["name"]) + self.assertIn(self.store_name[0], r["store"]) - mc.get_resource.assert_called_with(name=self.store_names[0], store=self.store_names[0], - workspace=self.workspace_name) + mc.get_resource.assert_called_with( + name=self.store_names[0], + store=self.store_names[0], + workspace=self.workspace_name, + ) def test_create_shapefile_resource_zipfile_typeerror(self): # Setup - shapefile_name = os.path.join(self.files_root, 'shapefile', 'test.shp') + shapefile_name = os.path.join(self.files_root, "shapefile", "test.shp") # Workspace is given - store_id = '{}:{}'.format(self.workspace_name, self.store_name[0]) + store_id = "{}:{}".format(self.workspace_name, self.store_name[0]) # Should Fail - self.assertRaises(TypeError, - self.engine.create_shapefile_resource, - store_id=store_id, - shapefile_zip=shapefile_name, - overwrite=True) + self.assertRaises( + TypeError, + self.engine.create_shapefile_resource, + store_id=store_id, + shapefile_zip=shapefile_name, + overwrite=True, + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_create_shapefile_resource_overwrite_store_exists(self, mock_catalog): # Setup - shapefile_name = os.path.join(self.files_root, 'shapefile', 'test') - store_id = '{}:{}'.format(self.workspace_name, self.store_names[0]) + shapefile_name = os.path.join(self.files_root, "shapefile", "test") + store_id = "{}:{}".format(self.workspace_name, self.store_names[0]) # Execute - response = self.engine.create_shapefile_resource(store_id=store_id, - shapefile_base=shapefile_name, - overwrite=False - ) + response = self.engine.create_shapefile_resource( + store_id=store_id, shapefile_base=shapefile_name, overwrite=False + ) # Should Fail - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Extract Result - r = response['error'] + r = response["error"] # Check error message - error_message = 'There is already a store named ' + self.store_names[0] + ' in ' + self.workspace_name + error_message = ( + "There is already a store named " + + self.store_names[0] + + " in " + + self.workspace_name + ) self.assertIn(error_message, r) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') - def test_create_shapefile_resource_overwrite_store_not_exists(self, mock_catalog, mock_put): + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") + def test_create_shapefile_resource_overwrite_store_not_exists( + self, mock_catalog, mock_put + ): mock_put.return_value = MockResponse(201) mc = mock_catalog() mc.get_store.side_effect = geoserver.catalog.FailedRequestError() mc.get_resource.return_value = self.mock_resources[0] # Setup - shapefile_name = os.path.join(self.files_root, 'shapefile', 'test') + shapefile_name = os.path.join(self.files_root, "shapefile", "test") # Workspace is given - store_id = '{}:{}'.format(self.workspace_name, self.store_names[0]) + store_id = "{}:{}".format(self.workspace_name, self.store_names[0]) # Execute response = self.engine.create_shapefile_resource( - store_id=store_id, - shapefile_base=shapefile_name, - overwrite=False + store_id=store_id, shapefile_base=shapefile_name, overwrite=False ) # Should succeed - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) - self.assertIn(self.mock_resources[0].name, r['name']) - self.assertIn(self.store_name[0], r['store']) + self.assertIn(self.mock_resources[0].name, r["name"]) + self.assertIn(self.store_name[0], r["store"]) - mc.get_resource.assert_called_with(name=self.store_names[0], store=self.store_names[0], - workspace=self.workspace_name) + mc.get_resource.assert_called_with( + name=self.store_names[0], + store=self.store_names[0], + workspace=self.workspace_name, + ) def test_create_shapefile_resource_validate_shapefile_args(self): - self.assertRaises(ValueError, - self.engine.create_shapefile_resource, - store_id='foo') - self.assertRaises(ValueError, - self.engine.create_shapefile_resource, - store_id='foo', - shapefile_zip='zipfile', - shapefile_upload='su', - shapefile_base='base') - self.assertRaises(ValueError, - self.engine.create_shapefile_resource, - store_id='foo', - shapefile_upload='su', - shapefile_base='base') - self.assertRaises(ValueError, - self.engine.create_shapefile_resource, - store_id='foo', - shapefile_zip='zipfile', - shapefile_base='base') - self.assertRaises(ValueError, - self.engine.create_shapefile_resource, - store_id='foo', - shapefile_zip='zipfile', - shapefile_upload='su') - - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + self.assertRaises( + ValueError, self.engine.create_shapefile_resource, store_id="foo" + ) + self.assertRaises( + ValueError, + self.engine.create_shapefile_resource, + store_id="foo", + shapefile_zip="zipfile", + shapefile_upload="su", + shapefile_base="base", + ) + self.assertRaises( + ValueError, + self.engine.create_shapefile_resource, + store_id="foo", + shapefile_upload="su", + shapefile_base="base", + ) + self.assertRaises( + ValueError, + self.engine.create_shapefile_resource, + store_id="foo", + shapefile_zip="zipfile", + shapefile_base="base", + ) + self.assertRaises( + ValueError, + self.engine.create_shapefile_resource, + store_id="foo", + shapefile_zip="zipfile", + shapefile_upload="su", + ) + + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_create_shapefile_resource_failure(self, _, mock_put): - mock_put.return_value = MockResponse(404, reason='Failure') + mock_put.return_value = MockResponse(404, reason="Failure") # Setup - shapefile_name = os.path.join(self.files_root, 'shapefile', 'test') - store_id = '{}:{}'.format(self.workspace_name, self.store_name[0]) + shapefile_name = os.path.join(self.files_root, "shapefile", "test") + store_id = "{}:{}".format(self.workspace_name, self.store_name[0]) # Execute - response = self.engine.create_shapefile_resource(store_id=store_id, - shapefile_base=shapefile_name, - overwrite=True - ) + response = self.engine.create_shapefile_resource( + store_id=store_id, shapefile_base=shapefile_name, overwrite=True + ) # Should succeed - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Extract Result - r = response['error'] + r = response["error"] # Check Response - self.assertIn('404', r) - self.assertIn('Failure', r) + self.assertIn("404", r) + self.assertIn("Failure", r) def test_type_property(self): response = self.engine.type - expected_response = 'GEOSERVER' + expected_response = "GEOSERVER" # Check Response self.assertEqual(response, expected_response) @@ -2272,37 +2519,37 @@ def test_public_endpoint_property(self): response = self.engine.public_endpoint # Check Response - self.assertIn('.public.', response) + self.assertIn(".public.", response) def test_gwc_endpoint_property(self): response = self.engine.gwc_endpoint # Check Response - self.assertIn('/gwc/rest/', response) + self.assertIn("/gwc/rest/", response) def test_get_gwc_endpoint(self): response = self.engine.get_gwc_endpoint(public=False) # Check Response - self.assertIn('/gwc/rest/', response) + self.assertIn("/gwc/rest/", response) mock_engine = GeoServerSpatialDatasetEngine( endpoint=self.endpoint, username=self.username, password=self.password, - public_endpoint=self.public_endpoint[:-1] + public_endpoint=self.public_endpoint[:-1], ) response = mock_engine.get_gwc_endpoint() # Check Response with public endpoint - self.assertIn('.public.', response) - self.assertIn('/gwc/rest/', response) + self.assertIn(".public.", response) + self.assertIn("/gwc/rest/", response) def test_get_ows_endpoint(self): workspace = self.workspace_name response = self.engine.get_ows_endpoint(workspace, public=False) - expected_url_match = '/{ws}/ows/'.format(ws=workspace) + expected_url_match = "/{ws}/ows/".format(ws=workspace) # Check Response self.assertIn(expected_url_match, response) @@ -2311,83 +2558,87 @@ def test_get_ows_endpoint(self): endpoint=self.endpoint, username=self.username, password=self.password, - public_endpoint=self.public_endpoint[:-1] + public_endpoint=self.public_endpoint[:-1], ) response = mock_engine.get_ows_endpoint(workspace) # Check Response with public endpoint - self.assertIn('.public.', response) + self.assertIn(".public.", response) self.assertIn(expected_url_match, response) def test_get_wms_endpoint(self): response = self.engine.get_wms_endpoint(public=False) # Check Response - self.assertIn('/wms/', response) + self.assertIn("/wms/", response) mock_engine = GeoServerSpatialDatasetEngine( endpoint=self.endpoint, username=self.username, password=self.password, - public_endpoint=self.public_endpoint[:-1] + public_endpoint=self.public_endpoint[:-1], ) response = mock_engine.get_wms_endpoint() # Check Response with public endpoint - self.assertIn('.public.', response) - self.assertIn('/wms/', response) + self.assertIn(".public.", response) + self.assertIn("/wms/", response) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_reload_ports_none(self, mock_post): mock_post.return_value = MockResponse(200) self.engine.reload() - rest_endpoint = self.public_endpoint + 'reload' + rest_endpoint = self.public_endpoint + "reload" mock_post.assert_called_with(rest_endpoint, auth=self.auth) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_reload_with_ports(self, mock_post): mock_post.return_value = MockResponse(200) self.engine.reload([17300, 18000]) self.assertEqual(mock_post.call_count, 2) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_reload_not_200(self, mock_post, mock_logger): - mock_post.return_value = MockResponse(500, '500 exception') + mock_post.return_value = MockResponse(500, "500 exception") response = self.engine.reload() mock_logger.error.assert_called() - self.assertEqual('Catalog Reload Status Code 500: 500 exception', response['error'][0]) + self.assertEqual( + "Catalog Reload Status Code 500: 500 exception", response["error"][0] + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_reload_connection_error(self, mock_post, mock_logger): mock_post.side_effect = requests.ConnectionError() response = self.engine.reload() mock_logger.warning.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_gwc_reload_ports_none(self, mock_post): mock_post.return_value = MockResponse(200) self.engine.gwc_reload() - rest_endpoint = self.public_endpoint.replace('rest', 'gwc/rest') + 'reload' + rest_endpoint = self.public_endpoint.replace("rest", "gwc/rest") + "reload" mock_post.assert_called_with(rest_endpoint, auth=self.auth) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_gwc_reload_with_ports(self, mock_post): mock_post.return_value = MockResponse(200) self.engine.gwc_reload([17300, 18000]) self.assertEqual(mock_post.call_count, 2) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_gwc_reload_not_200(self, mock_post, mock_logger): - mock_post.return_value = MockResponse(500, '500 exception') + mock_post.return_value = MockResponse(500, "500 exception") response = self.engine.gwc_reload() mock_logger.error.assert_called() - self.assertEqual('GeoWebCache Reload Status Code 500: 500 exception', response['error'][0]) + self.assertEqual( + "GeoWebCache Reload Status Code 500: 500 exception", response["error"][0] + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_gwc_reload_connection_error(self, mock_post, mock_logger): mock_post.side_effect = requests.ConnectionError() response = self.engine.gwc_reload() @@ -2395,63 +2646,61 @@ def test_gwc_reload_connection_error(self, mock_post, mock_logger): def test_ini_no_slash_endpoint(self): self.engine = GeoServerSpatialDatasetEngine( - endpoint='http://localhost:8181/geoserver/rest', + endpoint="http://localhost:8181/geoserver/rest", username=self.username, - password=self.password + password=self.password, ) - expected_endpoint = 'http://localhost:8181/geoserver/gwc/rest/' + expected_endpoint = "http://localhost:8181/geoserver/gwc/rest/" # Check Response self.assertEqual(expected_endpoint, self.engine.gwc_endpoint) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.get') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") def test_validate(self, mock_get): # Missing Schema mock_get.side_effect = requests.exceptions.MissingSchema - self.assertRaises(AssertionError, - self.engine.validate - ) + self.assertRaises(AssertionError, self.engine.validate) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.get') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") def test_validate_401(self, mock_get): # 401 Code mock_get.return_value = MockResponse(401) - self.assertRaises(AssertionError, - self.engine.validate - ) + self.assertRaises(AssertionError, self.engine.validate) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.get') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") def test_validate_not_200(self, mock_get): # !201 Code mock_get.return_value = MockResponse(201) - self.assertRaises(AssertionError, - self.engine.validate - ) + self.assertRaises(AssertionError, self.engine.validate) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.get') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") def test_validate_not_geoserver(self, mock_get): # text mock_get.return_value = MockResponse(200, text="Bad text") self.assertRaises(AssertionError, self.engine.validate) def test_modify_tile_cache_invalid_operation(self): - layer_id = f'{self.workspace_name}:gwc_layer_name' - operation = 'invalid-operation' - self.assertRaises(ValueError, self.engine.modify_tile_cache, layer_id, operation) + layer_id = f"{self.workspace_name}:gwc_layer_name" + operation = "invalid-operation" + self.assertRaises( + ValueError, self.engine.modify_tile_cache, layer_id, operation + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) def test_modify_tile_cache_mass_truncate(self, mock_ws, mock_post, mock_logger): mock_post.return_value = mock.MagicMock(status_code=200) mock_ws().name = self.workspace_name - layer_id = 'gwc_layer_name' + layer_id = "gwc_layer_name" operation = self.engine.GWC_OP_MASS_TRUNCATE self.engine.modify_tile_cache(layer_id, operation) - url = 'masstruncate/' + url = "masstruncate/" # Create feature type call post_call_args = mock_post.call_args_list @@ -2459,55 +2708,58 @@ def test_modify_tile_cache_mass_truncate(self, mock_ws, mock_post, mock_logger): self.assertIn(url, post_call_args[0][0][0]) mock_logger.info.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_modify_tile_cache_seed(self, mock_post, mock_logger): mock_post.return_value = mock.MagicMock(status_code=200) - layer_id = f'{self.workspace_name}:gwc_layer_name' + layer_id = f"{self.workspace_name}:gwc_layer_name" operation = self.engine.GWC_OP_SEED self.engine.modify_tile_cache(layer_id, operation) - url = 'seed/{workspace}:{name}.xml'.format( - workspace=self.workspace_name, - name='gwc_layer_name' + url = "seed/{workspace}:{name}.xml".format( + workspace=self.workspace_name, name="gwc_layer_name" ) # Create feature type call post_call_args = mock_post.call_args_list # call_args[call_num][0=args|1=kwargs][arg_index|kwarg_key] self.assertIn(url, post_call_args[0][0][0]) - self.assertIn(operation, post_call_args[0][1]['data']) + self.assertIn(operation, post_call_args[0][1]["data"]) mock_logger.info.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_modify_tile_cache_reseed(self, mock_post, mock_logger): mock_post.return_value = mock.MagicMock(status_code=200) - layer_id = f'{self.workspace_name}:gwc_layer_name' + layer_id = f"{self.workspace_name}:gwc_layer_name" operation = self.engine.GWC_OP_RESEED self.engine.modify_tile_cache(layer_id, operation) - url = 'seed/{workspace}:{name}.xml'.format( - workspace=self.workspace_name, - name='gwc_layer_name' + url = "seed/{workspace}:{name}.xml".format( + workspace=self.workspace_name, name="gwc_layer_name" ) # Create feature type call post_call_args = mock_post.call_args_list # call_args[call_num][0=args|1=kwargs][arg_index|kwarg_key] self.assertIn(url, post_call_args[0][0][0]) - self.assertIn(operation, post_call_args[0][1]['data']) + self.assertIn(operation, post_call_args[0][1]["data"]) mock_logger.info.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_modify_tile_cache_exception(self, mock_post, mock_logger): mock_post.return_value = mock.MagicMock(status_code=500) - layer_id = f'{self.workspace_name}:gwc_layer_name' + layer_id = f"{self.workspace_name}:gwc_layer_name" operation = self.engine.GWC_OP_MASS_TRUNCATE - self.assertRaises(requests.RequestException, self.engine.modify_tile_cache, layer_id, operation) + self.assertRaises( + requests.RequestException, + self.engine.modify_tile_cache, + layer_id, + operation, + ) - url = 'masstruncate/' + url = "masstruncate/" # Create feature type call post_call_args = mock_post.call_args_list @@ -2516,61 +2768,72 @@ def test_modify_tile_cache_exception(self, mock_post, mock_logger): mock_logger.error.assert_called() def test_terminate_tile_cache_tasks_invalid_operation(self): - layer_id = f'{self.workspace_name}:gwc_layer_name' - operation = 'invalid-operation' - self.assertRaises(ValueError, self.engine.terminate_tile_cache_tasks, layer_id, kill=operation) + layer_id = f"{self.workspace_name}:gwc_layer_name" + operation = "invalid-operation" + self.assertRaises( + ValueError, self.engine.terminate_tile_cache_tasks, layer_id, kill=operation + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) def test_terminate_tile_cache_tasks(self, mock_ws, mock_post): mock_post.return_value = mock.MagicMock(status_code=200) mock_ws().name = self.workspace_name - layer_id = 'gwc_layer_name' + layer_id = "gwc_layer_name" self.engine.terminate_tile_cache_tasks(layer_id) - url = '{endpoint}seed/{workspace}:{name}'.format( + url = "{endpoint}seed/{workspace}:{name}".format( endpoint=self.engine.get_gwc_endpoint(), workspace=self.workspace_name, - name=layer_id + name=layer_id, ) # Create feature type call - mock_post.assert_called_with(url, auth=self.auth, data={'kill_all': self.engine.GWC_KILL_ALL}) + mock_post.assert_called_with( + url, auth=self.auth, data={"kill_all": self.engine.GWC_KILL_ALL} + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_terminate_tile_cache_tasks_exception(self, mock_post): mock_post.return_value = mock.MagicMock(status_code=500) - layer_id = f'{self.workspace_name}:gwc_layer_name' + layer_id = f"{self.workspace_name}:gwc_layer_name" - self.assertRaises(requests.RequestException, self.engine.terminate_tile_cache_tasks, layer_id) + self.assertRaises( + requests.RequestException, self.engine.terminate_tile_cache_tasks, layer_id + ) - url = '{endpoint}seed/{workspace}:{name}'.format( + url = "{endpoint}seed/{workspace}:{name}".format( endpoint=self.engine.get_gwc_endpoint(), workspace=self.workspace_name, - name='gwc_layer_name' + name="gwc_layer_name", ) # Create feature type call - mock_post.assert_called_with(url, auth=self.auth, data={'kill_all': self.engine.GWC_KILL_ALL}) + mock_post.assert_called_with( + url, auth=self.auth, data={"kill_all": self.engine.GWC_KILL_ALL} + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.get') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) def test_query_tile_cache_tasks(self, mock_ws, mock_get): mock_response = mock.MagicMock(status_code=200) mock_ws().name = self.workspace_name - mock_response.json.return_value = {'long-array-array': [ - [1, 100, 99, 1, 1], - [10, 100, 90, 2, -2] - ]} + mock_response.json.return_value = { + "long-array-array": [[1, 100, 99, 1, 1], [10, 100, 90, 2, -2]] + } mock_get.return_value = mock_response - layer_id = 'gwc_layer_name' + layer_id = "gwc_layer_name" ret = self.engine.query_tile_cache_tasks(layer_id) - url = '{endpoint}seed/{workspace}:{name}.json'.format( + url = "{endpoint}seed/{workspace}:{name}.json".format( endpoint=self.engine.get_gwc_endpoint(), workspace=self.workspace_name, - name='gwc_layer_name' + name="gwc_layer_name", ) # Create feature type call @@ -2578,638 +2841,783 @@ def test_query_tile_cache_tasks(self, mock_ws, mock_get): self.assertIsInstance(ret, list) self.assertEqual(2, len(ret)) - self.assertEqual({'tiles_processed': 1, 'total_to_process': 100, 'num_remaining': 99, - 'task_id': 1, 'task_status': 'Running'}, ret[0]) - self.assertEqual({'tiles_processed': 10, 'total_to_process': 100, 'num_remaining': 90, - 'task_id': 2, 'task_status': -2}, ret[1]) + self.assertEqual( + { + "tiles_processed": 1, + "total_to_process": 100, + "num_remaining": 99, + "task_id": 1, + "task_status": "Running", + }, + ret[0], + ) + self.assertEqual( + { + "tiles_processed": 10, + "total_to_process": 100, + "num_remaining": 90, + "task_id": 2, + "task_status": -2, + }, + ret[1], + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.get') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") def test_query_tile_cache_tasks_exception(self, mock_get): mock_response = mock.MagicMock(status_code=500) mock_get.return_value = mock_response - layer_id = f'{self.workspace_name}:gwc_layer_name' - self.assertRaises(requests.RequestException, self.engine.query_tile_cache_tasks, layer_id) + layer_id = f"{self.workspace_name}:gwc_layer_name" + self.assertRaises( + requests.RequestException, self.engine.query_tile_cache_tasks, layer_id + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_create_coverage_store(self, mock_post, _): mock_post.return_value = MockResponse(201) - store_id = f'{self.workspace_name}:foo' - coverage_type = 'ArcGrid' + store_id = f"{self.workspace_name}:foo" + coverage_type = "ArcGrid" self.engine.create_coverage_store(store_id, coverage_type) mock_post.assert_called() post_call_args = mock_post.call_args_list - url = 'workspaces/{workspace}/coveragestores'.format( + url = "workspaces/{workspace}/coveragestores".format( workspace=self.workspace_name ) - self.assertIn(url, post_call_args[0][1]['url']) - self.assertIn('foo', post_call_args[0][1]['data']) - self.assertIn(coverage_type, post_call_args[0][1]['data']) - - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') + self.assertIn(url, post_call_args[0][1]["url"]) + self.assertIn("foo", post_call_args[0][1]["data"]) + self.assertIn(coverage_type, post_call_args[0][1]["data"]) + + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) def test_create_coverage_store_grass_grid(self, mock_workspace, mock_post, _): mock_post.return_value = MockResponse(201) mock_workspace().name = self.workspace_name - store_id = 'foo' - coverage_type = 'GrassGrid' # function converts this to ArcGrid + store_id = "foo" + coverage_type = "GrassGrid" # function converts this to ArcGrid self.engine.create_coverage_store(store_id, coverage_type) mock_post.assert_called() post_call_args = mock_post.call_args_list - url = 'workspaces/{workspace}/coveragestores'.format( + url = "workspaces/{workspace}/coveragestores".format( workspace=self.workspace_name ) - self.assertIn(url, post_call_args[0][1]['url']) - self.assertIn('foo', post_call_args[0][1]['data']) - self.assertIn('ArcGrid', post_call_args[0][1]['data']) - self.assertNotIn(coverage_type, post_call_args[0][1]['data']) - - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + self.assertIn(url, post_call_args[0][1]["url"]) + self.assertIn("foo", post_call_args[0][1]["data"]) + self.assertIn("ArcGrid", post_call_args[0][1]["data"]) + self.assertNotIn(coverage_type, post_call_args[0][1]["data"]) + + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_create_coverage_store_exception(self, mock_post, _): mock_post.return_value = MockResponse(500) - store_id = f'{self.workspace_name}:foo' - coverage_type = 'ArcGrid' - self.assertRaises(requests.RequestException, self.engine.create_coverage_store, store_id, coverage_type) + store_id = f"{self.workspace_name}:foo" + coverage_type = "ArcGrid" + self.assertRaises( + requests.RequestException, + self.engine.create_coverage_store, + store_id, + coverage_type, + ) def test_create_coverage_store_invalid_type(self): - store_id = f'{self.workspace_name}:foo' - coverage_type = 'INVALID_COVERAGE_TYPE' - self.assertRaises(ValueError, self.engine.create_coverage_store, store_id, coverage_type) - - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.update_layer_styles') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') + store_id = f"{self.workspace_name}:foo" + coverage_type = "INVALID_COVERAGE_TYPE" + self.assertRaises( + ValueError, self.engine.create_coverage_store, store_id, coverage_type + ) + + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.update_layer_styles" + ) + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) def test_create_coverage_layer(self, mock_workspace, mock_put, mock_get_layer, _): - coverage_name = 'adem' - expected_store_id = coverage_name # layer and store share name (one to one approach) + coverage_name = "adem" + expected_store_id = ( + coverage_name # layer and store share name (one to one approach) + ) mock_workspace.return_value = self.mock_workspaces[0] - expected_coverage_type = 'GeoTIFF' - coverage_file_name = 'adem.tif' + expected_coverage_type = "GeoTIFF" + coverage_file_name = "adem.tif" coverage_file = os.path.join(self.files_root, coverage_file_name) - mock_layer_dict = {'success': True, 'result': {'name': coverage_name, 'workspace': self.workspace_names[0]}} + mock_layer_dict = { + "success": True, + "result": {"name": coverage_name, "workspace": self.workspace_names[0]}, + } mock_get_layer.return_value = mock_layer_dict mock_put.return_value = MockResponse(201) # Execute - response = self.engine.create_coverage_layer(layer_id=coverage_name, coverage_type=expected_coverage_type, - coverage_file=coverage_file, default_style='points', debug=False) + response = self.engine.create_coverage_layer( + layer_id=coverage_name, + coverage_type=expected_coverage_type, + coverage_file=coverage_file, + default_style="points", + debug=False, + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Values - self.assertEqual(coverage_name, r['name']) - self.assertEqual(self.workspace_names[0], r['workspace']) + self.assertEqual(coverage_name, r["name"]) + self.assertEqual(self.workspace_names[0], r["workspace"]) mock_get_layer.assert_called_with(coverage_name, expected_store_id, False) # PUT Tests put_call_args = mock_put.call_args_list - expected_url = '{endpoint}workspaces/{w}/coveragestores/{s}/file.{ext}'.format( + expected_url = "{endpoint}workspaces/{w}/coveragestores/{s}/file.{ext}".format( endpoint=self.endpoint, w=self.workspace_names[0], s=expected_store_id, - ext=expected_coverage_type.lower() + ext=expected_coverage_type.lower(), ) expected_headers = { "Content-type": "application/zip", - "Accept": "application/xml" - } - expected_params = { - 'coverageName': coverage_name + "Accept": "application/xml", } - self.assertEqual(expected_url, put_call_args[0][1]['url']) - self.assertEqual(expected_headers, put_call_args[0][1]['headers']) - self.assertEqual(expected_params, put_call_args[0][1]['params']) + expected_params = {"coverageName": coverage_name} + self.assertEqual(expected_url, put_call_args[0][1]["url"]) + self.assertEqual(expected_headers, put_call_args[0][1]["headers"]) + self.assertEqual(expected_params, put_call_args[0][1]["params"]) def test_create_coverage_layer_invalid_coverage_type(self): - coverage_name = '{}:adem'.format(self.workspace_names[0]) - expected_coverage_type = 'test1' - coverage_file_name = 'adem.tif' + coverage_name = "{}:adem".format(self.workspace_names[0]) + expected_coverage_type = "test1" + coverage_file_name = "adem.tif" coverage_file = os.path.join(self.files_root, coverage_file_name) # Raise ValueError - self.assertRaises(ValueError, self.engine.create_coverage_layer, layer_id=coverage_name, - coverage_type=expected_coverage_type, coverage_file=coverage_file, debug=False) + self.assertRaises( + ValueError, + self.engine.create_coverage_layer, + layer_id=coverage_name, + coverage_type=expected_coverage_type, + coverage_file=coverage_file, + debug=False, + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") def test_create_coverage_layer_zip_file(self, mock_put, mock_get_layer): - coverage_name = '{}:precip30min'.format(self.workspace_names[0]) - expected_store_id = 'precip30min' # layer and store share name (one to one approach) - expected_coverage_type = 'ArcGrid' - coverage_file_name = 'precip30min.zip' - coverage_file = os.path.join(self.files_root, 'arc_sample', coverage_file_name) + coverage_name = "{}:precip30min".format(self.workspace_names[0]) + expected_store_id = ( + "precip30min" # layer and store share name (one to one approach) + ) + expected_coverage_type = "ArcGrid" + coverage_file_name = "precip30min.zip" + coverage_file = os.path.join(self.files_root, "arc_sample", coverage_file_name) - mock_layer_dict = {'success': True, 'result': {'name': coverage_name, 'workspace': self.workspace_names[0]}} + mock_layer_dict = { + "success": True, + "result": {"name": coverage_name, "workspace": self.workspace_names[0]}, + } mock_get_layer.return_value = mock_layer_dict mock_put.return_value = MockResponse(201) # Execute - response = self.engine.create_coverage_layer(layer_id=coverage_name, coverage_type=expected_coverage_type, - coverage_file=coverage_file, debug=False) + response = self.engine.create_coverage_layer( + layer_id=coverage_name, + coverage_type=expected_coverage_type, + coverage_file=coverage_file, + debug=False, + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Values - self.assertEqual(coverage_name, r['name']) - self.assertEqual(self.workspace_names[0], r['workspace']) + self.assertEqual(coverage_name, r["name"]) + self.assertEqual(self.workspace_names[0], r["workspace"]) mock_get_layer.assert_called_with(coverage_name, expected_store_id, False) # PUT Tests put_call_args = mock_put.call_args_list - expected_url = '{endpoint}workspaces/{w}/coveragestores/{s}/file.{ext}'.format( + expected_url = "{endpoint}workspaces/{w}/coveragestores/{s}/file.{ext}".format( endpoint=self.endpoint, w=self.workspace_names[0], s=expected_store_id, - ext=expected_coverage_type.lower() + ext=expected_coverage_type.lower(), ) expected_headers = { "Content-type": "application/zip", - "Accept": "application/xml" + "Accept": "application/xml", } - expected_params = { - 'coverageName': 'precip30min' - } - self.assertEqual(expected_url, put_call_args[0][1]['url']) - self.assertEqual(expected_headers, put_call_args[0][1]['headers']) - self.assertEqual(expected_params, put_call_args[0][1]['params']) - - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') + expected_params = {"coverageName": "precip30min"} + self.assertEqual(expected_url, put_call_args[0][1]["url"]) + self.assertEqual(expected_headers, put_call_args[0][1]["headers"]) + self.assertEqual(expected_params, put_call_args[0][1]["params"]) + + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") def test_create_coverage_layer_grass_grid(self, mock_put, mock_get_layer): - coverage_name = '{}:my_grass'.format(self.workspace_names[0]) - expected_store_id = 'my_grass' - expected_coverage_type = 'GrassGrid' - coverage_file_name = 'my_grass.zip' + coverage_name = "{}:my_grass".format(self.workspace_names[0]) + expected_store_id = "my_grass" + expected_coverage_type = "GrassGrid" + coverage_file_name = "my_grass.zip" coverage_file = os.path.join(self.files_root, "grass_ascii", coverage_file_name) - mock_layer_dict = {'success': True, 'result': {'name': coverage_name, 'workspace': self.workspace_names[0]}} + mock_layer_dict = { + "success": True, + "result": {"name": coverage_name, "workspace": self.workspace_names[0]}, + } mock_get_layer.return_value = mock_layer_dict mock_put.return_value = MockResponse(201) # Execute - response = self.engine.create_coverage_layer(layer_id=coverage_name, coverage_type=expected_coverage_type, - coverage_file=coverage_file, debug=False) + response = self.engine.create_coverage_layer( + layer_id=coverage_name, + coverage_type=expected_coverage_type, + coverage_file=coverage_file, + debug=False, + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Values - self.assertEqual(coverage_name, r['name']) - self.assertEqual(self.workspace_names[0], r['workspace']) + self.assertEqual(coverage_name, r["name"]) + self.assertEqual(self.workspace_names[0], r["workspace"]) mock_get_layer.assert_called_with(coverage_name, expected_store_id, False) # PUT Tests put_call_args = mock_put.call_args_list - expected_url = '{endpoint}workspaces/{w}/coveragestores/{s}/file.{ext}'.format( + expected_url = "{endpoint}workspaces/{w}/coveragestores/{s}/file.{ext}".format( endpoint=self.endpoint, w=self.workspace_names[0], s=expected_store_id, - ext='arcgrid' + ext="arcgrid", ) expected_headers = { "Content-type": "application/zip", - "Accept": "application/xml" + "Accept": "application/xml", } - expected_params = { - 'coverageName': 'my_grass' - } - self.assertEqual(expected_url, put_call_args[0][1]['url']) - self.assertEqual(expected_headers, put_call_args[0][1]['headers']) - self.assertEqual(expected_params, put_call_args[0][1]['params']) - - @mock.patch('tethys_dataset_services.engines.geoserver_engine.os.path.isdir') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.os.listdir') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') - def test_create_coverage_layer_grass_grid_skip_dir(self, mock_put, mock_get_layer, mock_contents, mock_isdir): - coverage_name = '{}:my_grass'.format(self.workspace_names[0]) - expected_store_id = 'my_grass' - expected_coverage_type = 'GrassGrid' - coverage_file_name = 'my_grass.zip' + expected_params = {"coverageName": "my_grass"} + self.assertEqual(expected_url, put_call_args[0][1]["url"]) + self.assertEqual(expected_headers, put_call_args[0][1]["headers"]) + self.assertEqual(expected_params, put_call_args[0][1]["params"]) + + @mock.patch("tethys_dataset_services.engines.geoserver_engine.os.path.isdir") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.os.listdir") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + def test_create_coverage_layer_grass_grid_skip_dir( + self, mock_put, mock_get_layer, mock_contents, mock_isdir + ): + coverage_name = "{}:my_grass".format(self.workspace_names[0]) + expected_store_id = "my_grass" + expected_coverage_type = "GrassGrid" + coverage_file_name = "my_grass.zip" mock_isdir.side_effect = [True, False] mock_contents.side_effect = [ - ['file1', coverage_file_name.replace('.zip', '.asc')], - [coverage_file_name.replace('.zip', '.prj'), coverage_file_name.replace('.zip', '.asc')] + ["file1", coverage_file_name.replace(".zip", ".asc")], + [ + coverage_file_name.replace(".zip", ".prj"), + coverage_file_name.replace(".zip", ".asc"), + ], ] coverage_file = os.path.join(self.files_root, "grass_ascii", coverage_file_name) - mock_layer_dict = {'success': True, 'result': {'name': coverage_name, 'workspace': self.workspace_names[0]}} + mock_layer_dict = { + "success": True, + "result": {"name": coverage_name, "workspace": self.workspace_names[0]}, + } mock_get_layer.return_value = mock_layer_dict mock_put.return_value = MockResponse(201) # Execute - response = self.engine.create_coverage_layer(layer_id=coverage_name, coverage_type=expected_coverage_type, - coverage_file=coverage_file, debug=False) + response = self.engine.create_coverage_layer( + layer_id=coverage_name, + coverage_type=expected_coverage_type, + coverage_file=coverage_file, + debug=False, + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Values - self.assertEqual(coverage_name, r['name']) - self.assertEqual(self.workspace_names[0], r['workspace']) + self.assertEqual(coverage_name, r["name"]) + self.assertEqual(self.workspace_names[0], r["workspace"]) mock_get_layer.assert_called_with(coverage_name, expected_store_id, False) # PUT Tests put_call_args = mock_put.call_args_list - expected_url = '{endpoint}workspaces/{w}/coveragestores/{s}/file.{ext}'.format( + expected_url = "{endpoint}workspaces/{w}/coveragestores/{s}/file.{ext}".format( endpoint=self.endpoint, w=self.workspace_names[0], s=expected_store_id, - ext='arcgrid' + ext="arcgrid", ) expected_headers = { "Content-type": "application/zip", - "Accept": "application/xml" - } - expected_params = { - 'coverageName': 'my_grass' + "Accept": "application/xml", } - self.assertEqual(expected_url, put_call_args[0][1]['url']) - self.assertEqual(expected_headers, put_call_args[0][1]['headers']) - self.assertEqual(expected_params, put_call_args[0][1]['params']) - - @mock.patch('tethys_dataset_services.engines.geoserver_engine.os.listdir') - def test_create_coverage_layer_grass_grid_exception(self, mock_working_dir_contents): - coverage_name = '{}:my_grass'.format(self.workspace_names[0]) - expected_coverage_type = 'GrassGrid' - coverage_file_name = 'my_grass.zip' - mock_working_dir_contents.return_value = [coverage_file_name, 'file2', 'file3'] + expected_params = {"coverageName": "my_grass"} + self.assertEqual(expected_url, put_call_args[0][1]["url"]) + self.assertEqual(expected_headers, put_call_args[0][1]["headers"]) + self.assertEqual(expected_params, put_call_args[0][1]["params"]) + + @mock.patch("tethys_dataset_services.engines.geoserver_engine.os.listdir") + def test_create_coverage_layer_grass_grid_exception( + self, mock_working_dir_contents + ): + coverage_name = "{}:my_grass".format(self.workspace_names[0]) + expected_coverage_type = "GrassGrid" + coverage_file_name = "my_grass.zip" + mock_working_dir_contents.return_value = [coverage_file_name, "file2", "file3"] coverage_file = os.path.join(self.files_root, "grass_ascii", coverage_file_name) # Raise ValueError - self.assertRaises(ValueError, self.engine.create_coverage_layer, layer_id=coverage_name, - coverage_type=expected_coverage_type, coverage_file=coverage_file, debug=False) + self.assertRaises( + ValueError, + self.engine.create_coverage_layer, + layer_id=coverage_name, + coverage_type=expected_coverage_type, + coverage_file=coverage_file, + debug=False, + ) def test_create_coverage_layer_grass_invalid_file(self): - coverage_name = '{}:my_grass'.format(self.workspace_names[0]) - expected_coverage_type = 'GrassGrid' - coverage_file_name = 'my_grass_invalid.zip' + coverage_name = "{}:my_grass".format(self.workspace_names[0]) + expected_coverage_type = "GrassGrid" + coverage_file_name = "my_grass_invalid.zip" coverage_file = os.path.join(self.files_root, "grass_ascii", coverage_file_name) # Execute - self.assertRaises(IOError, self.engine.create_coverage_layer, layer_id=coverage_name, - coverage_type=expected_coverage_type, coverage_file=coverage_file, debug=False) + self.assertRaises( + IOError, + self.engine.create_coverage_layer, + layer_id=coverage_name, + coverage_type=expected_coverage_type, + coverage_file=coverage_file, + debug=False, + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") def test_create_coverage_layer_image_mosaic(self, mock_put, mock_get_layer): - coverage_name = '{}:global_mosaic'.format(self.workspace_names[0]) - expected_store_id = 'global_mosaic' # layer and store share name (one to one approach) - expected_coverage_type = 'ImageMosaic' - coverage_file_name = 'global_mosaic.zip' - coverage_file = os.path.join(self.files_root, 'mosaic_sample', coverage_file_name) + coverage_name = "{}:global_mosaic".format(self.workspace_names[0]) + expected_store_id = ( + "global_mosaic" # layer and store share name (one to one approach) + ) + expected_coverage_type = "ImageMosaic" + coverage_file_name = "global_mosaic.zip" + coverage_file = os.path.join( + self.files_root, "mosaic_sample", coverage_file_name + ) - mock_layer_dict = {'success': True, 'result': {'name': coverage_name, 'workspace': self.workspace_names[0]}} + mock_layer_dict = { + "success": True, + "result": {"name": coverage_name, "workspace": self.workspace_names[0]}, + } mock_get_layer.return_value = mock_layer_dict mock_put.return_value = MockResponse(201) # Execute - response = self.engine.create_coverage_layer(layer_id=coverage_name, coverage_type=expected_coverage_type, - coverage_file=coverage_file, debug=False) + response = self.engine.create_coverage_layer( + layer_id=coverage_name, + coverage_type=expected_coverage_type, + coverage_file=coverage_file, + debug=False, + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) # Values - self.assertEqual(coverage_name, r['name']) - self.assertEqual(self.workspace_names[0], r['workspace']) + self.assertEqual(coverage_name, r["name"]) + self.assertEqual(self.workspace_names[0], r["workspace"]) mock_get_layer.assert_called_with(coverage_name, expected_store_id, False) # PUT Tests put_call_args = mock_put.call_args_list - expected_url = '{endpoint}workspaces/{w}/coveragestores/{s}/file.{ext}'.format( + expected_url = "{endpoint}workspaces/{w}/coveragestores/{s}/file.{ext}".format( endpoint=self.endpoint, w=self.workspace_names[0], s=expected_store_id, - ext=expected_coverage_type.lower() + ext=expected_coverage_type.lower(), ) expected_headers = { "Content-type": "application/zip", - "Accept": "application/xml" + "Accept": "application/xml", } - self.assertEqual(expected_url, put_call_args[0][1]['url']) - self.assertEqual(expected_headers, put_call_args[0][1]['headers']) - - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') - def test_create_coverage_layer_already_exists(self, mock_put, mock_log, mock_get_layer): - mock_put.return_value = MockResponse(500, 'already exists') - coverage_name = f'{self.workspace_name}:foo' - coverage_type = 'ArcGrid' - coverage_file = os.path.join(self.files_root, 'arc_sample', 'precip30min.asc') - self.engine.create_coverage_layer(layer_id=coverage_name, coverage_type=coverage_type, - coverage_file=coverage_file) + self.assertEqual(expected_url, put_call_args[0][1]["url"]) + self.assertEqual(expected_headers, put_call_args[0][1]["headers"]) + + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + def test_create_coverage_layer_already_exists( + self, mock_put, mock_log, mock_get_layer + ): + mock_put.return_value = MockResponse(500, "already exists") + coverage_name = f"{self.workspace_name}:foo" + coverage_type = "ArcGrid" + coverage_file = os.path.join(self.files_root, "arc_sample", "precip30min.asc") + self.engine.create_coverage_layer( + layer_id=coverage_name, + coverage_type=coverage_type, + coverage_file=coverage_file, + ) mock_put.assert_called() put_call_args = mock_put.call_args_list - url = 'workspaces/{workspace}/coveragestores/{coverage_store_name}/file.{extension}'.format( + url = "workspaces/{workspace}/coveragestores/{coverage_store_name}/file.{extension}".format( workspace=self.workspace_name, - coverage_store_name='foo', - extension=coverage_type.lower() + coverage_store_name="foo", + extension=coverage_type.lower(), ) - self.assertIn(url, put_call_args[0][1]['url']) - self.assertIn('coverageName', put_call_args[0][1]['params']) - self.assertEqual('foo', put_call_args[0][1]['params']['coverageName']) - self.assertIn('files', put_call_args[0][1]) + self.assertIn(url, put_call_args[0][1]["url"]) + self.assertIn("coverageName", put_call_args[0][1]["params"]) + self.assertEqual("foo", put_call_args[0][1]["params"]["coverageName"]) + self.assertIn("files", put_call_args[0][1]) mock_log.warning.assert_called() mock_get_layer.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") def test_create_coverage_layer_error_unzipping(self, mock_put, mock_log): - mock_put.return_value = MockResponse(500, 'Error occured unzipping file') - coverage_name = f'{self.workspace_name}:foo' - coverage_type = 'ArcGrid' - coverage_file = os.path.join(self.files_root, 'arc_sample', 'precip30min.asc') + mock_put.return_value = MockResponse(500, "Error occured unzipping file") + coverage_name = f"{self.workspace_name}:foo" + coverage_type = "ArcGrid" + coverage_file = os.path.join(self.files_root, "arc_sample", "precip30min.asc") self.assertRaises( requests.RequestException, self.engine.create_coverage_layer, layer_id=coverage_name, coverage_type=coverage_type, - coverage_file=coverage_file + coverage_file=coverage_file, ) num_put_calls = len(mock_put.call_args_list) self.assertEqual(5, num_put_calls) mock_log.error.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") def test_create_coverage_layer_error(self, mock_put, mock_log): - mock_put.return_value = MockResponse(500, '500 exception') - coverage_name = f'{self.workspace_name}:foo' - coverage_type = 'ArcGrid' - coverage_file = os.path.join(self.files_root, 'arc_sample', 'precip30min.asc') + mock_put.return_value = MockResponse(500, "500 exception") + coverage_name = f"{self.workspace_name}:foo" + coverage_type = "ArcGrid" + coverage_file = os.path.join(self.files_root, "arc_sample", "precip30min.asc") self.assertRaises( requests.RequestException, self.engine.create_coverage_layer, layer_id=coverage_name, coverage_type=coverage_type, - coverage_file=coverage_file + coverage_file=coverage_file, ) num_put_calls = len(mock_put.call_args_list) self.assertEqual(3, num_put_calls) mock_log.error.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) def test_enable_time_dimension(self, mock_ws, mock_put, _): mock_response = mock.MagicMock(status_code=200) mock_ws().name = self.workspace_name mock_put.return_value = mock_response - coverage_id = 'foo' + coverage_id = "foo" self.engine.enable_time_dimension(coverage_id=coverage_id) mock_put.assert_called() put_call_args = mock_put.call_args_list - url = '{endpoint}workspaces/{workspace}/coveragestores/{coverage_name}/coverages/{coverage_name}'.format( + url = "{endpoint}workspaces/{workspace}/coveragestores/{coverage_name}/coverages/{coverage_name}".format( endpoint=self.endpoint, workspace=self.workspace_name, coverage_name=coverage_id, ) self.assertEqual(url, put_call_args[0][0][0]) - self.assertIn('data', put_call_args[0][1]) + self.assertIn("data", put_call_args[0][1]) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.put') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") def test_enable_time_dimension_exception(self, mock_put, mock_log): mock_response = mock.MagicMock(status_code=500) mock_put.return_value = mock_response - coverage_id = f'{self.workspace_name}:foo' - self.assertRaises(requests.RequestException, self.engine.enable_time_dimension, coverage_id) + coverage_id = f"{self.workspace_name}:foo" + self.assertRaises( + requests.RequestException, self.engine.enable_time_dimension, coverage_id + ) - url = '{endpoint}workspaces/{workspace}/coveragestores/{coverage_name}/coverages/{coverage_name}'.format( + url = "{endpoint}workspaces/{workspace}/coveragestores/{coverage_name}/coverages/{coverage_name}".format( endpoint=self.endpoint, workspace=self.workspace_name, - coverage_name='foo', + coverage_name="foo", ) put_call_args = mock_put.call_args_list self.assertEqual(url, put_call_args[0][0][0]) - self.assertIn('data', put_call_args[0][1]) + self.assertIn("data", put_call_args[0][1]) mock_log.error.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_create_workspace(self, mock_catalog): mc = mock_catalog() - expected_uri = 'http:www.example.com/b-workspace' + expected_uri = "http:www.example.com/b-workspace" mc.create_workspace.return_value = self.mock_workspaces[0] # Execute - response = self.engine.create_workspace(workspace_id=self.workspace_names[0], - uri=expected_uri) + response = self.engine.create_workspace( + workspace_id=self.workspace_names[0], uri=expected_uri + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Type self.assertIsInstance(r, dict) - self.assertIn('name', r) - self.assertEqual(self.workspace_names[0], r['name']) + self.assertIn("name", r) + self.assertEqual(self.workspace_names[0], r["name"]) mc.create_workspace.assert_called_with(self.workspace_names[0], expected_uri) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_create_workspace_assertion_error(self, mock_catalog): mc = mock_catalog() - expected_uri = 'http:www.example.com/b-workspace' - mc.create_workspace.side_effect = AssertionError('AssertionError') + expected_uri = "http:www.example.com/b-workspace" + mc.create_workspace.side_effect = AssertionError("AssertionError") # Execute - response = self.engine.create_workspace(workspace_id=self.workspace_names[0], - uri=expected_uri) + response = self.engine.create_workspace( + workspace_id=self.workspace_names[0], uri=expected_uri + ) # False - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Expect Error - r = response['error'] + r = response["error"] # Properties - self.assertIn('AssertionError', r) + self.assertIn("AssertionError", r) mc.create_workspace.assert_called_with(self.workspace_names[0], expected_uri) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_style') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_style" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) def test_create_style(self, mock_workspace, mock_post, mock_get_style, mock_log): mock_post.return_value = mock.MagicMock(status_code=201) mock_workspace.return_value = self.mock_workspaces[0] - style_id = '{}:{}'.format(self.mock_workspaces[0].name, self.mock_styles[0].name) - sld_template = os.path.join(self.files_root, 'test_create_style.sld') - sld_context = {'foo': 'bar'} + style_id = "{}:{}".format( + self.mock_workspaces[0].name, self.mock_styles[0].name + ) + sld_template = os.path.join(self.files_root, "test_create_style.sld") + sld_context = {"foo": "bar"} mock_get_style.return_value = { - 'success': True, - 'result': {'name': self.mock_styles[0].name, 'workspace': self.workspace_name} + "success": True, + "result": { + "name": self.mock_styles[0].name, + "workspace": self.workspace_name, + }, } - + response = self.engine.create_style(style_id, sld_template, sld_context) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] # Values - self.assertEqual(self.mock_styles[0].name, r['name']) - self.assertEqual(self.workspace_name, r['workspace']) + self.assertEqual(self.mock_styles[0].name, r["name"]) + self.assertEqual(self.workspace_name, r["workspace"]) # Validate endpoint calls - style_url = 'workspaces/{w}/styles'.format(w=self.mock_workspaces[0].name) + style_url = "workspaces/{w}/styles".format(w=self.mock_workspaces[0].name) # Create feature type call post_call_args = mock_post.call_args_list self.assertIn(style_url, post_call_args[0][0][0]) mock_log.info.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_style') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_style" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_create_style_cannot_find_style(self, mock_post, mock_get_style, mock_log): - mock_post.return_value = mock.MagicMock(status_code=500, text='Unable to find style for event') + mock_post.return_value = mock.MagicMock( + status_code=500, text="Unable to find style for event" + ) style_name = self.mock_styles[0].name - sld_template = os.path.join(self.files_root, 'test_create_style.sld') - sld_context = {'foo': 'bar'} + sld_template = os.path.join(self.files_root, "test_create_style.sld") + sld_context = {"foo": "bar"} - mock_get_style.return_value = { - 'success': True, - 'result': ' warnings ' - } + mock_get_style.return_value = {"success": True, "result": " warnings "} self.engine.create_style(style_name, sld_template, sld_context) # Validate endpoint calls - style_url = '{endpoint}styles'.format(endpoint=self.endpoint) + style_url = "{endpoint}styles".format(endpoint=self.endpoint) # Create feature type call post_call_args = mock_post.call_args_list self.assertIn(style_url, post_call_args[0][0][0]) mock_log.warning.assert_called() - - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_create_style_exception(self, mock_post, mock_log): - mock_post.return_value = mock.MagicMock(status_code=500, text='500 exception') + mock_post.return_value = mock.MagicMock(status_code=500, text="500 exception") style_name = self.mock_styles[0].name - sld_template = os.path.join(self.files_root, 'test_create_style.sld') - sld_context = {'foo': 'bar'} + sld_template = os.path.join(self.files_root, "test_create_style.sld") + sld_context = {"foo": "bar"} - self.assertRaises(requests.RequestException, self.engine.create_style, style_name, sld_template, sld_context) + self.assertRaises( + requests.RequestException, + self.engine.create_style, + style_name, + sld_template, + sld_context, + ) mock_log.error.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_create_style_other_exception(self, mock_post, mock_log): - mock_post.return_value = mock.MagicMock(status_code=504, text='504 exception') + mock_post.return_value = mock.MagicMock(status_code=504, text="504 exception") style_name = self.mock_styles[0].name - sld_template = os.path.join(self.files_root, 'test_create_style.sld') - sld_context = {'foo': 'bar'} + sld_template = os.path.join(self.files_root, "test_create_style.sld") + sld_context = {"foo": "bar"} with self.assertRaises(requests.RequestException) as context: self.engine.create_style(style_name, sld_template, sld_context) - self.assertEqual('Create Style Status Code 504: 504 exception', str(context.exception)) + self.assertEqual( + "Create Style Status Code 504: 504 exception", str(context.exception) + ) mock_log.error.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_style') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_style" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_create_style_overwrite(self, mock_post, mock_logger, mock_get_style): """ Attempt to delete resulting in no style found is OK, so should proceed to create style. """ mock_post.return_value = mock.MagicMock(status_code=201) - self.delete_style = mock.MagicMock(side_effect=Exception('no such style')) - style_id = f'{self.workspace_name}:{self.mock_styles[0].name}' - sld_template = os.path.join(self.files_root, 'test_create_style.sld') - sld_context = {'foo': 'bar'} + self.delete_style = mock.MagicMock(side_effect=Exception("no such style")) + style_id = f"{self.workspace_name}:{self.mock_styles[0].name}" + sld_template = os.path.join(self.files_root, "test_create_style.sld") + sld_context = {"foo": "bar"} self.engine.delete_style = mock.MagicMock() mock_get_style.return_value = { - 'success': True, - 'result': {'name': self.mock_styles[0].name, 'workspace': self.workspace_name} + "success": True, + "result": { + "name": self.mock_styles[0].name, + "workspace": self.workspace_name, + }, } # Execute - response = self.engine.create_style(style_id, sld_template, sld_context, overwrite=True) + response = self.engine.create_style( + style_id, sld_template, sld_context, overwrite=True + ) # Validate response object self.assert_valid_response_object(response) # Success - self.assertTrue(response['success']) - + self.assertTrue(response["success"]) + # Extract Result - result = response['result'] + result = response["result"] # Type self.assertIsInstance(result, dict) @@ -3218,185 +3626,248 @@ def test_create_style_overwrite(self, mock_post, mock_logger, mock_get_style): self.engine.delete_style.assert_called_with(style_id, purge=True) # Validate endpoint calls - style_url = f'{self.endpoint}workspaces/{self.workspace_name}/styles' + style_url = f"{self.endpoint}workspaces/{self.workspace_name}/styles" mock_post.assert_called_with( style_url, - headers={'Content-type': 'application/vnd.ogc.sld+xml'}, + headers={"Content-type": "application/vnd.ogc.sld+xml"}, auth=self.auth, - params={'name': self.mock_styles[0].name}, - data=mock.ANY + params={"name": self.mock_styles[0].name}, + data=mock.ANY, ) # Validate SLD was rendered correctly - rendered_sld_path = os.path.join(self.files_root, 'test_create_style_rendered.sld') + rendered_sld_path = os.path.join( + self.files_root, "test_create_style_rendered.sld" + ) with open(rendered_sld_path) as rendered: rendered_sld = rendered.read() - self.assertEqual(rendered_sld, mock_post.call_args_list[0][1]['data']) + self.assertEqual(rendered_sld, mock_post.call_args_list[0][1]["data"]) # Verify log messages mock_logger.info.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") def test_create_style_overwrite_referenced_by_existing(self, mock_logger): - style_id = f'{self.workspace_name}:{self.mock_styles[0].name}' - sld_template = os.path.join(self.files_root, 'test_create_style.sld') - sld_context = {'foo': 'bar'} - self.engine.delete_style = mock.MagicMock(side_effect=ValueError('referenced by existing')) + style_id = f"{self.workspace_name}:{self.mock_styles[0].name}" + sld_template = os.path.join(self.files_root, "test_create_style.sld") + sld_context = {"foo": "bar"} + self.engine.delete_style = mock.MagicMock( + side_effect=ValueError("referenced by existing") + ) # Execute with self.assertRaises(ValueError) as error: - self.engine.create_style(style_id, sld_template, sld_context, overwrite=True) + self.engine.create_style( + style_id, sld_template, sld_context, overwrite=True + ) - self.assertEqual('referenced by existing', str(error.exception)) + self.assertEqual("referenced by existing", str(error.exception)) mock_logger.error.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.reload') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.update_layer_styles') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') - def test_create_sql_view_layer(self, mock_post, mock_logger, mock_update_layer_styles, mock_get_layer, mock_reload): + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.reload" + ) + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer" + ) + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.update_layer_styles" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + def test_create_sql_view_layer( + self, + mock_post, + mock_logger, + mock_update_layer_styles, + mock_get_layer, + mock_reload, + ): mock_post.side_effect = [MockResponse(201), MockResponse(200)] - store_id = f'{self.workspace_name}:foo' + store_id = f"{self.workspace_name}:foo" layer_name = self.layer_names[0] - geometry_type = 'Point' + geometry_type = "Point" srid = 4236 - sql = 'SELECT * FROM foo' - default_style = 'points' + sql = "SELECT * FROM foo" + default_style = "points" - self.engine.create_sql_view_layer(store_id, layer_name, geometry_type, srid, sql, default_style) + self.engine.create_sql_view_layer( + store_id, layer_name, geometry_type, srid, sql, default_style + ) # Validate endpoint calls - sql_view_url = 'workspaces/{workspace}/datastores/{datastore}/featuretypes'.format( - workspace=self.workspace_name, - datastore='foo' + sql_view_url = ( + "workspaces/{workspace}/datastores/{datastore}/featuretypes".format( + workspace=self.workspace_name, datastore="foo" + ) ) - gwc_layer_url = 'layers/{workspace}:{feature_name}.xml'.format( - workspace=self.workspace_name, - feature_name=layer_name + gwc_layer_url = "layers/{workspace}:{feature_name}.xml".format( + workspace=self.workspace_name, feature_name=layer_name ) - with open(os.path.join(self.files_root, 'test_create_layer_sql_view.xml')) as rendered: + with open( + os.path.join(self.files_root, "test_create_layer_sql_view.xml") + ) as rendered: expected_sql_xml = rendered.read() - with open(os.path.join(self.files_root, 'test_create_layer_gwc_layer.xml')) as rendered: + with open( + os.path.join(self.files_root, "test_create_layer_gwc_layer.xml") + ) as rendered: expected_gwc_lyr_xml = rendered.read() # Create feature type call post_call_args = mock_post.call_args_list self.assertIn(sql_view_url, post_call_args[0][0][0]) - self.assertEqual(expected_sql_xml, post_call_args[0][1]['data']) + self.assertEqual(expected_sql_xml, post_call_args[0][1]["data"]) # GWC Call self.assertIn(gwc_layer_url, post_call_args[1][0][0]) - self.assertEqual(expected_gwc_lyr_xml, str(post_call_args[1][1]['data'])) + self.assertEqual(expected_gwc_lyr_xml, str(post_call_args[1][1]["data"])) mock_logger.info.assert_called() mock_update_layer_styles.assert_called_with( - layer_id=f'{self.workspace_name}:{layer_name}', + layer_id=f"{self.workspace_name}:{layer_name}", default_style=default_style, - other_styles=None + other_styles=None, ) mock_get_layer.assert_called() mock_reload.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.reload') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.update_layer_styles') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') - def test_create_layer_create_feature_type_already_exists(self, mock_workspace, mock_post, mock_logger, - mock_update_layer_styles, mock_get_layer, mock_reload): - mock_post.side_effect = [MockResponse(500, 'already exists'), MockResponse(200)] + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.reload" + ) + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer" + ) + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.update_layer_styles" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) + def test_create_layer_create_feature_type_already_exists( + self, + mock_workspace, + mock_post, + mock_logger, + mock_update_layer_styles, + mock_get_layer, + mock_reload, + ): + mock_post.side_effect = [MockResponse(500, "already exists"), MockResponse(200)] mock_workspace().name = self.workspace_name - store_id = 'foo' - layer_name = self.layer_names[0] - geometry_type = 'Point' + store_id = "foo" + layer_name = self.layer_names[0] + geometry_type = "Point" srid = 4236 - sql = 'SELECT * FROM foo' - default_style = 'points' + sql = "SELECT * FROM foo" + default_style = "points" - self.engine.create_sql_view_layer(store_id, layer_name, geometry_type, srid, sql, default_style) + self.engine.create_sql_view_layer( + store_id, layer_name, geometry_type, srid, sql, default_style + ) # Validate endpoint calls - sql_view_url = 'workspaces/{workspace}/datastores/{datastore}/featuretypes'.format( - workspace=self.workspace_name, - datastore='foo' + sql_view_url = ( + "workspaces/{workspace}/datastores/{datastore}/featuretypes".format( + workspace=self.workspace_name, datastore="foo" + ) ) - gwc_layer_url = 'layers/{workspace}:{feature_name}.xml'.format( - workspace=self.workspace_name, - feature_name=layer_name + gwc_layer_url = "layers/{workspace}:{feature_name}.xml".format( + workspace=self.workspace_name, feature_name=layer_name ) - with open(os.path.join(self.files_root, 'test_create_layer_sql_view.xml')) as rendered: + with open( + os.path.join(self.files_root, "test_create_layer_sql_view.xml") + ) as rendered: expected_sql_xml = rendered.read() - with open(os.path.join(self.files_root, 'test_create_layer_gwc_layer.xml')) as rendered: + with open( + os.path.join(self.files_root, "test_create_layer_gwc_layer.xml") + ) as rendered: expected_gwc_lyr_xml = rendered.read() # Create feature type call post_call_args = mock_post.call_args_list self.assertIn(sql_view_url, post_call_args[0][0][0]) - self.assertEqual(expected_sql_xml, post_call_args[0][1]['data']) + self.assertEqual(expected_sql_xml, post_call_args[0][1]["data"]) # GWC Call self.assertIn(gwc_layer_url, post_call_args[1][0][0]) - self.assertEqual(expected_gwc_lyr_xml, str(post_call_args[1][1]['data'])) + self.assertEqual(expected_gwc_lyr_xml, str(post_call_args[1][1]["data"])) mock_logger.info.assert_called() mock_update_layer_styles.assert_called_with( - layer_id=f'{self.workspace_name}:{layer_name}', + layer_id=f"{self.workspace_name}:{layer_name}", default_style=default_style, - other_styles=None + other_styles=None, ) mock_get_layer.assert_called() mock_reload.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_create_layer_create_sql_view_exception(self, mock_post, mock_logger): - mock_post.return_value = MockResponse(500, 'other exception') - store_id = f'{self.workspace_name}:foo' + mock_post.return_value = MockResponse(500, "other exception") + store_id = f"{self.workspace_name}:foo" layer_name = self.layer_names[0] - geometry_type = 'Point' + geometry_type = "Point" srid = 4236 - sql = 'SELECT * FROM foo' - default_style = 'points' + sql = "SELECT * FROM foo" + default_style = "points" with self.assertRaises(requests.RequestException) as error: - self.engine.create_sql_view_layer(store_id, layer_name, geometry_type, srid, sql, default_style) + self.engine.create_sql_view_layer( + store_id, layer_name, geometry_type, srid, sql, default_style + ) - self.assertEqual("Create Feature Type Status Code 500: other exception", str(error.exception)) + self.assertEqual( + "Create Feature Type Status Code 500: other exception", str(error.exception) + ) mock_logger.error.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.update_layer_styles') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.update_layer_styles" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_create_sql_view_layer_gwc_error(self, mock_post, mock_logger, _): - mock_post.side_effect = [MockResponse(201)] + [MockResponse(200)] + ([MockResponse(500, 'GWC exception')] * 300) - store_id = f'{self.workspace_name}:foo' + mock_post.side_effect = ( + [MockResponse(201)] + + [MockResponse(200)] + + ([MockResponse(500, "GWC exception")] * 300) + ) + store_id = f"{self.workspace_name}:foo" layer_name = self.layer_names[0] - geometry_type = 'Point' + geometry_type = "Point" srid = 4236 - sql = 'SELECT * FROM foo' - default_style = 'points' + sql = "SELECT * FROM foo" + default_style = "points" with self.assertRaises(requests.RequestException) as error: - self.engine.create_sql_view_layer(store_id, layer_name, geometry_type, srid, sql, default_style) + self.engine.create_sql_view_layer( + store_id, layer_name, geometry_type, srid, sql, default_style + ) - self.assertEqual("Create GWC Layer Status Code 500: GWC exception", str(error.exception)) + self.assertEqual( + "Create GWC Layer Status Code 500: GWC exception", str(error.exception) + ) mock_logger.error.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_apply_changes_to_gs_object(self, mock_catalog): mc = mock_catalog() gs_object = mock.NonCallableMagicMock( layer_id=self.layer_names[0], styles=self.style_names, - default_style='d_styles' + default_style="d_styles", ) # new style - new_gs_args = {'styles': ['style1:style1a', 'style2'], 'default_style': 'dstyle1'} + new_gs_args = { + "styles": ["style1:style1a", "style2"], + "default_style": "dstyle1", + } # mock get_style to return value mc.get_style.return_value = self.mock_styles[0] @@ -3412,7 +3883,7 @@ def test_apply_changes_to_gs_object(self, mock_catalog): self.assertIn(self.mock_styles[0].name, d_style) # test default case with : - new_gs_args = {'default_style': 'dstyle1: dstyle2'} + new_gs_args = {"default_style": "dstyle1: dstyle2"} # mock get_style to return value mc.get_style.return_value = self.mock_styles[0] @@ -3427,10 +3898,10 @@ def test_apply_changes_to_gs_object(self, mock_catalog): def test_get_non_rest_endpoint(self): self.engine = GeoServerSpatialDatasetEngine( - endpoint='http://localhost:8181/geoserver/rest/', + endpoint="http://localhost:8181/geoserver/rest/", ) - expected_endpoint = 'http://localhost:8181/geoserver' + expected_endpoint = "http://localhost:8181/geoserver" endpoint = self.engine._get_non_rest_endpoint() # Check Response @@ -3438,97 +3909,130 @@ def test_get_non_rest_endpoint(self): def test_get_wms_url(self): self.engine = GeoServerSpatialDatasetEngine( - endpoint='http://localhost:8181/geoserver/rest/', + endpoint="http://localhost:8181/geoserver/rest/", ) # tiled and transparent are set as default value wms_url = self.engine._get_wms_url( layer_id=self.layer_names[0], style=self.style_names[0], - srs='EPSG:4326', - bbox='-180,-90,180,90', - version='1.1.0', - width='512', - height='512', - output_format='image/png', - tiled=False, transparent=True + srs="EPSG:4326", + bbox="-180,-90,180,90", + version="1.1.0", + width="512", + height="512", + output_format="image/png", + tiled=False, + transparent=True, ) - expected_url = 'http://localhost:8181/geoserver/wms?service=WMS&version=1.1.0&' \ - 'request=GetMap&layers={0}&styles={1}&transparent=true&' \ - 'tiled=no&srs=EPSG:4326&bbox=-180,-90,180,90&' \ - 'width=512&height=512&format=image/png'.format(self.layer_names[0], self.style_names[0]) + expected_url = ( + "http://localhost:8181/geoserver/wms?service=WMS&version=1.1.0&" + "request=GetMap&layers={0}&styles={1}&transparent=true&" + "tiled=no&srs=EPSG:4326&bbox=-180,-90,180,90&" + "width=512&height=512&format=image/png".format( + self.layer_names[0], self.style_names[0] + ) + ) # check wms_url self.assertEqual(expected_url, wms_url) # tiled and transparent are set as default value - wms_url = self.engine._get_wms_url(layer_id=self.layer_names[0], - style=self.style_names[0], - srs='EPSG:4326', - bbox='-180,-90,180,90', - version='1.1.0', - width='512', - height='512', - output_format='image/png', - tiled=True, transparent=False) - - expected_url = 'http://localhost:8181/geoserver/wms?service=WMS&version=1.1.0&' \ - 'request=GetMap&layers={0}&styles={1}&transparent=false&' \ - 'tiled=yes&srs=EPSG:4326&bbox=-180,-90,180,90&' \ - 'width=512&height=512&format=image/png'.format(self.layer_names[0], self.style_names[0]) + wms_url = self.engine._get_wms_url( + layer_id=self.layer_names[0], + style=self.style_names[0], + srs="EPSG:4326", + bbox="-180,-90,180,90", + version="1.1.0", + width="512", + height="512", + output_format="image/png", + tiled=True, + transparent=False, + ) + + expected_url = ( + "http://localhost:8181/geoserver/wms?service=WMS&version=1.1.0&" + "request=GetMap&layers={0}&styles={1}&transparent=false&" + "tiled=yes&srs=EPSG:4326&bbox=-180,-90,180,90&" + "width=512&height=512&format=image/png".format( + self.layer_names[0], self.style_names[0] + ) + ) # check wms_url self.assertEqual(expected_url, wms_url) def test_get_wcs_url(self): self.engine = GeoServerSpatialDatasetEngine( - endpoint='http://localhost:8181/geoserver/rest/', + endpoint="http://localhost:8181/geoserver/rest/", ) - wcs_url = self.engine._get_wcs_url(resource_id=self.resource_names[0], - srs='EPSG:4326', bbox='-180,-90,180,90', - output_format='png', namespace=self.store_name, - width='512', height='512') + wcs_url = self.engine._get_wcs_url( + resource_id=self.resource_names[0], + srs="EPSG:4326", + bbox="-180,-90,180,90", + output_format="png", + namespace=self.store_name, + width="512", + height="512", + ) - expected_wcs_url = 'http://localhost:8181/geoserver/wcs?service=WCS&version=1.1.0&' \ - 'request=GetCoverage&identifier={0}&srs=EPSG:4326&' \ - 'BoundingBox=-180,-90,180,90&width=512&' \ - 'height=512&format=png&namespace={1}'.format(self.resource_names[0], self.store_name) + expected_wcs_url = ( + "http://localhost:8181/geoserver/wcs?service=WCS&version=1.1.0&" + "request=GetCoverage&identifier={0}&srs=EPSG:4326&" + "BoundingBox=-180,-90,180,90&width=512&" + "height=512&format=png&namespace={1}".format( + self.resource_names[0], self.store_name + ) + ) # check wcs_url self.assertEqual(expected_wcs_url, wcs_url) def test_get_wfs_url(self): self.engine = GeoServerSpatialDatasetEngine( - endpoint='http://localhost:8181/geoserver/rest/', + endpoint="http://localhost:8181/geoserver/rest/", ) # GML3 Case - wfs_url = self.engine._get_wfs_url(resource_id=self.resource_names[0], output_format='GML3') - expected_wfs_url = 'http://localhost:8181/geoserver/wfs?service=WFS&' \ - 'version=2.0.0&request=GetFeature&' \ - 'typeNames={0}'.format(self.resource_names[0]) + wfs_url = self.engine._get_wfs_url( + resource_id=self.resource_names[0], output_format="GML3" + ) + expected_wfs_url = ( + "http://localhost:8181/geoserver/wfs?service=WFS&" + "version=2.0.0&request=GetFeature&" + "typeNames={0}".format(self.resource_names[0]) + ) # check wcs_url self.assertEqual(expected_wfs_url, wfs_url) # GML2 Case - wfs_url = self.engine._get_wfs_url(resource_id=self.resource_names[0], output_format='GML2') - expected_wfs_url = 'http://localhost:8181/geoserver/wfs?service=WFS&' \ - 'version=1.0.0&request=GetFeature&' \ - 'typeNames={0}&outputFormat={1}'.format(self.resource_names[0], 'GML2') + wfs_url = self.engine._get_wfs_url( + resource_id=self.resource_names[0], output_format="GML2" + ) + expected_wfs_url = ( + "http://localhost:8181/geoserver/wfs?service=WFS&" + "version=1.0.0&request=GetFeature&" + "typeNames={0}&outputFormat={1}".format(self.resource_names[0], "GML2") + ) # check wcs_url self.assertEqual(expected_wfs_url, wfs_url) # Other format Case - wfs_url = self.engine._get_wfs_url(resource_id=self.resource_names[0], output_format='Other') - expected_wfs_url = 'http://localhost:8181/geoserver/wfs?service=WFS&' \ - 'version=2.0.0&request=GetFeature&' \ - 'typeNames={0}&outputFormat={1}'.format(self.resource_names[0], 'Other') + wfs_url = self.engine._get_wfs_url( + resource_id=self.resource_names[0], output_format="Other" + ) + expected_wfs_url = ( + "http://localhost:8181/geoserver/wfs?service=WFS&" + "version=2.0.0&request=GetFeature&" + "typeNames={0}&outputFormat={1}".format(self.resource_names[0], "Other") + ) # check wcs_url self.assertEqual(expected_wfs_url, wfs_url) - @mock.patch('sys.stdout', new_callable=StringIO) + @mock.patch("sys.stdout", new_callable=StringIO) def test_handle_debug(self, mock_print): test_object = self.style_names @@ -3543,8 +4047,7 @@ def test_transcribe_geoserver_object(self): # NAMED_OBJECTS gs_object_store = mock.NonCallableMagicMock( - store=self.store_name, - styles=self.style_names + store=self.store_name, styles=self.style_names ) store_dict = self.engine._transcribe_geoserver_object(gs_object_store) @@ -3552,12 +4055,13 @@ def test_transcribe_geoserver_object(self): self.assertIsInstance(store_dict, dict) # check properties - self.assertIn(self.store_name, store_dict['store']) - self.assertIn(self.style_names[0], store_dict['styles']) + self.assertIn(self.store_name, store_dict["store"]) + self.assertIn(self.style_names[0], store_dict["styles"]) # NAMED_OBJECTS_WITH_WORKSPACE - gs_sub_object_resource = mock.NonCallableMagicMock(workspace=self.workspace_name, - writers='test_omit_attributes') + gs_sub_object_resource = mock.NonCallableMagicMock( + workspace=self.workspace_name, writers="test_omit_attributes" + ) gs_sub_object_resource.name = self.resource_names[0] gs_object_resource = mock.NonCallableMagicMock( resource=gs_sub_object_resource, @@ -3569,9 +4073,9 @@ def test_transcribe_geoserver_object(self): self.assertIsInstance(resource_dict, dict) # check properties - resource_att = '{0}:{1}'.format(self.workspace_name, self.resource_names[0]) - self.assertIn(resource_att, resource_dict['resource']) - self.assertIn(self.default_style_name, resource_dict['default_style']) + resource_att = "{0}:{1}".format(self.workspace_name, self.resource_names[0]) + self.assertIn(resource_att, resource_dict["resource"]) + self.assertIn(self.default_style_name, resource_dict["default_style"]) # NAMED_OBJECTS_WITH_NO_WORKSPACE to Cover if sub_object.workspace is not true gs_sub_object_resource = mock.NonCallableMagicMock(workspace=None) @@ -3587,109 +4091,133 @@ def test_transcribe_geoserver_object(self): # check properties resource_att = self.resource_names[0] - self.assertIn(resource_att, resource_dict['resource']) - self.assertIn(self.default_style_name, resource_dict['default_style']) + self.assertIn(resource_att, resource_dict["resource"]) + self.assertIn(self.default_style_name, resource_dict["default_style"]) # resource_type with workspace gs_object_resource = mock.NonCallableMagicMock( - resource_type='featureType', + resource_type="featureType", workspace=self.workspace_name, ) gs_object_resource.name = "test_name" - resource_type_dict = self.engine._transcribe_geoserver_object(gs_object_resource) + resource_type_dict = self.engine._transcribe_geoserver_object( + gs_object_resource + ) - self.assertIn('gml3', resource_type_dict['wfs']) + self.assertIn("gml3", resource_type_dict["wfs"]) # resource_type with no workspace gs_object_resource = mock.NonCallableMagicMock( - resource_type='featureType', + resource_type="featureType", workspace=None, ) gs_object_resource.name = "test_name" - resource_type_dict = self.engine._transcribe_geoserver_object(gs_object_resource) + resource_type_dict = self.engine._transcribe_geoserver_object( + gs_object_resource + ) - self.assertIn('gml3', resource_type_dict['wfs']) + self.assertIn("gml3", resource_type_dict["wfs"]) # resource_type with no workspace and coverage - gs_sub_object_resource = mock.NonCallableMagicMock(native_bbox=['0', '1', '2', '3']) + gs_sub_object_resource = mock.NonCallableMagicMock( + native_bbox=["0", "1", "2", "3"] + ) gs_object_resource = mock.NonCallableMagicMock( resource=gs_sub_object_resource, - resource_type='coverage', + resource_type="coverage", workspace=None, ) gs_object_resource.name = "test_name" - resource_type_dict = self.engine._transcribe_geoserver_object(gs_object_resource) + resource_type_dict = self.engine._transcribe_geoserver_object( + gs_object_resource + ) - self.assertIn('png', resource_type_dict['wcs']) + self.assertIn("png", resource_type_dict["wcs"]) # resource_type with workspace and coverage -wcs - gs_sub_object_resource = mock.NonCallableMagicMock(native_bbox=['0', '1', '2', '3']) + gs_sub_object_resource = mock.NonCallableMagicMock( + native_bbox=["0", "1", "2", "3"] + ) gs_object_resource = mock.NonCallableMagicMock( resource=gs_sub_object_resource, - resource_type='coverage', + resource_type="coverage", workspace=self.workspace_name, ) gs_object_resource.name = "test_name" - resource_type_dict = self.engine._transcribe_geoserver_object(gs_object_resource) + resource_type_dict = self.engine._transcribe_geoserver_object( + gs_object_resource + ) - self.assertIn('png', resource_type_dict['wcs']) + self.assertIn("png", resource_type_dict["wcs"]) # resource_type with workspace and layer - wms - gs_sub_object_resource = mock.NonCallableMagicMock(native_bbox=['0', '1', '2', '3']) + gs_sub_object_resource = mock.NonCallableMagicMock( + native_bbox=["0", "1", "2", "3"] + ) gs_object_resource = mock.NonCallableMagicMock( resource=gs_sub_object_resource, - resource_type='layer', + resource_type="layer", workspace=self.workspace_name, - default_style=self.default_style_name + default_style=self.default_style_name, ) gs_object_resource.name = "test_name" - resource_type_dict = self.engine._transcribe_geoserver_object(gs_object_resource) + resource_type_dict = self.engine._transcribe_geoserver_object( + gs_object_resource + ) - self.assertIn('png', resource_type_dict['wms']) + self.assertIn("png", resource_type_dict["wms"]) # resource_type with workspace and layer - wms with bounds - gs_sub_object_resource = mock.NonCallableMagicMock(native_bbox=['0', '1', '2', '3']) + gs_sub_object_resource = mock.NonCallableMagicMock( + native_bbox=["0", "1", "2", "3"] + ) gs_object_resource = mock.NonCallableMagicMock( resource=gs_sub_object_resource, - bounds=['0', '1', '2', '3', '4'], - resource_type='layerGroup', + bounds=["0", "1", "2", "3", "4"], + resource_type="layerGroup", workspace=self.workspace_name, - default_style=self.default_style_name + default_style=self.default_style_name, ) gs_object_resource.name = "test_name" - resource_type_dict = self.engine._transcribe_geoserver_object(gs_object_resource) + resource_type_dict = self.engine._transcribe_geoserver_object( + gs_object_resource + ) - self.assertIn('png', resource_type_dict['wms']) + self.assertIn("png", resource_type_dict["wms"]) def test_link_sqlalchemy_db_to_geoserver(self): self.engine.create_postgis_store = mock.MagicMock() - url = 'postgresql://user:pass@localhost:5432/foo' + url = "postgresql://user:pass@localhost:5432/foo" engine = create_engine(url) - self.engine.link_sqlalchemy_db_to_geoserver(store_id=self.store_names[0], sqlalchemy_engine=engine, docker=True) + self.engine.link_sqlalchemy_db_to_geoserver( + store_id=self.store_names[0], sqlalchemy_engine=engine, docker=True + ) self.engine.create_postgis_store.assert_called_with( store_id=self.store_names[0], - host='172.17.0.1', + host="172.17.0.1", port=5432, - database='foo', - username='user', - password='pass', + database="foo", + username="user", + password="pass", max_connections=5, max_connection_idle_time=30, evictor_run_periodicity=30, validate_connections=True, - debug=False + debug=False, ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_create_postgis_store_validate_connection(self, mock_post, _): mock_post.return_value = MockResponse(201) - store_id = '{}:foo'.format(self.workspace_name) - host = 'localhost' - port = '5432' - database = 'foo_db' - username = 'user' - password = 'pass' + store_id = "{}:foo".format(self.workspace_name) + host = "localhost" + port = "5432" + database = "foo_db" + username = "user" + password = "pass" max_connections = 10 max_connection_idle_time = 40 evictor_run_periodicity = 60 @@ -3711,34 +4239,56 @@ def test_create_postgis_store_validate_connection(self, mock_post, _): false - """.format('foo', host, port, database, username, password, max_connections, max_connection_idle_time, - evictor_run_periodicity) + """.format( + "foo", + host, + port, + database, + username, + password, + max_connections, + max_connection_idle_time, + evictor_run_periodicity, + ) - expected_headers = { - "Content-type": "text/xml", - "Accept": "application/xml" - } + expected_headers = {"Content-type": "text/xml", "Accept": "application/xml"} - rest_endpoint = '{endpoint}workspaces/{workspace}/datastores'.format( - endpoint=self.endpoint, - workspace=self.workspace_name + rest_endpoint = "{endpoint}workspaces/{workspace}/datastores".format( + endpoint=self.endpoint, workspace=self.workspace_name + ) + self.engine.create_postgis_store( + store_id, + host, + port, + database, + username, + password, + max_connections, + max_connection_idle_time, + evictor_run_periodicity, + ) + mock_post.assert_called_with( + url=rest_endpoint, data=xml, headers=expected_headers, auth=self.auth ) - self.engine.create_postgis_store(store_id, host, port, database, username, password, - max_connections, max_connection_idle_time, evictor_run_periodicity) - mock_post.assert_called_with(url=rest_endpoint, data=xml, headers=expected_headers, auth=self.auth) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') - def test_create_postgis_store_validate_connection_false(self, mock_workspace, mock_post, _): + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) + def test_create_postgis_store_validate_connection_false( + self, mock_workspace, mock_post, _ + ): mock_post.return_value = MockResponse(201) - store_id = 'foo' + store_id = "foo" mock_workspace().name = self.workspace_name - host = 'localhost' - port = '5432' - database = 'foo_db' - username = 'user' - password = 'pass' + host = "localhost" + port = "5432" + database = "foo_db" + username = "user" + password = "pass" max_connections = 10 max_connection_idle_time = 40 evictor_run_periodicity = 60 @@ -3760,34 +4310,57 @@ def test_create_postgis_store_validate_connection_false(self, mock_workspace, mo false - """.format('foo', host, port, database, username, password, max_connections, max_connection_idle_time, - evictor_run_periodicity) + """.format( + "foo", + host, + port, + database, + username, + password, + max_connections, + max_connection_idle_time, + evictor_run_periodicity, + ) - expected_headers = { - "Content-type": "text/xml", - "Accept": "application/xml" - } + expected_headers = {"Content-type": "text/xml", "Accept": "application/xml"} - rest_endpoint = '{endpoint}workspaces/{workspace}/datastores'.format( - endpoint=self.endpoint, - workspace=self.workspace_name + rest_endpoint = "{endpoint}workspaces/{workspace}/datastores".format( + endpoint=self.endpoint, workspace=self.workspace_name + ) + self.engine.create_postgis_store( + store_id, + host, + port, + database, + username, + password, + max_connections, + max_connection_idle_time, + evictor_run_periodicity, + validate_connections=False, + ) + mock_post.assert_called_with( + url=rest_endpoint, data=xml, headers=expected_headers, auth=self.auth ) - self.engine.create_postgis_store(store_id, host, port, database, username, password, max_connections, - max_connection_idle_time, evictor_run_periodicity, validate_connections=False) - mock_post.assert_called_with(url=rest_endpoint, data=xml, headers=expected_headers, auth=self.auth) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') - def test_create_postgis_store_expose_primary_keys_true(self, mock_workspace, mock_post, _): + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) + def test_create_postgis_store_expose_primary_keys_true( + self, mock_workspace, mock_post, _ + ): mock_post.return_value = MockResponse(201) - store_id = 'foo' + store_id = "foo" mock_workspace().name = self.workspace_name - host = 'localhost' - port = '5432' - database = 'foo_db' - username = 'user' - password = 'pass' + host = "localhost" + port = "5432" + database = "foo_db" + username = "user" + password = "pass" max_connections = 10 max_connection_idle_time = 40 evictor_run_periodicity = 60 @@ -3809,34 +4382,53 @@ def test_create_postgis_store_expose_primary_keys_true(self, mock_workspace, moc true - """.format('foo', host, port, database, username, password, max_connections, max_connection_idle_time, - evictor_run_periodicity) + """.format( + "foo", + host, + port, + database, + username, + password, + max_connections, + max_connection_idle_time, + evictor_run_periodicity, + ) - expected_headers = { - "Content-type": "text/xml", - "Accept": "application/xml" - } + expected_headers = {"Content-type": "text/xml", "Accept": "application/xml"} - rest_endpoint = '{endpoint}workspaces/{workspace}/datastores'.format( - endpoint=self.endpoint, - workspace=self.workspace_name + rest_endpoint = "{endpoint}workspaces/{workspace}/datastores".format( + endpoint=self.endpoint, workspace=self.workspace_name + ) + self.engine.create_postgis_store( + store_id, + host, + port, + database, + username, + password, + max_connections, + max_connection_idle_time, + evictor_run_periodicity, + validate_connections=False, + expose_primary_keys=True, + ) + mock_post.assert_called_with( + url=rest_endpoint, data=xml, headers=expected_headers, auth=self.auth ) - self.engine.create_postgis_store(store_id, host, port, database, username, password, max_connections, - max_connection_idle_time, evictor_run_periodicity, validate_connections=False, - expose_primary_keys=True) - mock_post.assert_called_with(url=rest_endpoint, data=xml, headers=expected_headers, auth=self.auth) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") def test_create_postgis_store_not_201(self, mock_post, mock_logger, _): mock_post.return_value = MockResponse(500) - store_id = '{}:foo'.format(self.workspace_name) - host = 'localhost' - port = '5432' - database = 'foo_db' - username = 'user' - password = 'pass' + store_id = "{}:foo".format(self.workspace_name) + host = "localhost" + port = "5432" + database = "foo_db" + username = "user" + password = "pass" max_connections = 10 max_connection_idle_time = 40 evictor_run_periodicity = 60 @@ -3858,121 +4450,145 @@ def test_create_postgis_store_not_201(self, mock_post, mock_logger, _): false - """.format('foo', host, port, database, username, password, max_connections, max_connection_idle_time, - evictor_run_periodicity) + """.format( + "foo", + host, + port, + database, + username, + password, + max_connections, + max_connection_idle_time, + evictor_run_periodicity, + ) - expected_headers = { - "Content-type": "text/xml", - "Accept": "application/xml" - } + expected_headers = {"Content-type": "text/xml", "Accept": "application/xml"} - rest_endpoint = '{endpoint}workspaces/{workspace}/datastores'.format( - endpoint=self.endpoint, - workspace=self.workspace_name + rest_endpoint = "{endpoint}workspaces/{workspace}/datastores".format( + endpoint=self.endpoint, workspace=self.workspace_name ) - self.assertRaises(requests.RequestException, self.engine.create_postgis_store, store_id, host, port, database, - username, password, max_connections, max_connection_idle_time, evictor_run_periodicity) + self.assertRaises( + requests.RequestException, + self.engine.create_postgis_store, + store_id, + host, + port, + database, + username, + password, + max_connections, + max_connection_idle_time, + evictor_run_periodicity, + ) mock_logger.error.assert_called() - mock_post.assert_called_with(url=rest_endpoint, data=xml, headers=expected_headers, auth=self.auth) + mock_post.assert_called_with( + url=rest_endpoint, data=xml, headers=expected_headers, auth=self.auth + ) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store') - def test_create_layer_from_postgis_store(self, mock_store, mock_workspace, mock_post): + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" + ) + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store" + ) + def test_create_layer_from_postgis_store( + self, mock_store, mock_workspace, mock_post + ): store_id = self.store_names[0] - mock_store.return_value = {'success': True, 'result': {'name': store_id}} + mock_store.return_value = {"success": True, "result": {"name": store_id}} mock_workspace.return_value = self.mock_workspaces[0] mock_post.return_value = MockResponse(201) - table_name = 'points' + table_name = "points" - response = self.engine.create_layer_from_postgis_store(store_id=store_id, table=table_name, debug=False) + response = self.engine.create_layer_from_postgis_store( + store_id=store_id, table=table_name, debug=False + ) - expected_url = '{endpoint}workspaces/{w}/datastores/{s}/featuretypes'.format( - endpoint=self.endpoint, - w=self.workspace_names[0], - s=self.store_names[0] + expected_url = "{endpoint}workspaces/{w}/datastores/{s}/featuretypes".format( + endpoint=self.endpoint, w=self.workspace_names[0], s=self.store_names[0] ) - expected_headers = { - "Content-type": "text/xml", - "Accept": "application/xml" - } + expected_headers = {"Content-type": "text/xml", "Accept": "application/xml"} # Validate response object self.assert_valid_response_object(response) - self.assertTrue(response['success']) + self.assertTrue(response["success"]) # Extract Result - r = response['result'] + r = response["result"] - self.assertIn('name', r) - self.assertIn(self.store_names[0], r['name']) + self.assertIn("name", r) + self.assertIn(self.store_names[0], r["name"]) post_call_args = mock_post.call_args_list - self.assertEqual(expected_url, post_call_args[0][1]['url']) - self.assertEqual(expected_headers, post_call_args[0][1]['headers']) + self.assertEqual(expected_url, post_call_args[0][1]["url"]) + self.assertEqual(expected_headers, post_call_args[0][1]["headers"]) mock_store.assert_called_with(store_id=store_id, debug=False) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store') + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store" + ) def test_create_layer_from_postgis_store_fail_request(self, mock_store): - mock_store.return_value = {'success': False, 'error': ''} - store_id = '{}:{}'.format(self.workspace_names[0], self.store_names[0]) + mock_store.return_value = {"success": False, "error": ""} + store_id = "{}:{}".format(self.workspace_names[0], self.store_names[0]) - table_name = 'points' + table_name = "points" - response = self.engine.create_layer_from_postgis_store(store_id=store_id, table=table_name, debug=False) + response = self.engine.create_layer_from_postgis_store( + store_id=store_id, table=table_name, debug=False + ) # Validate response object self.assert_valid_response_object(response) - self.assertFalse(response['success']) + self.assertFalse(response["success"]) # Extract Result - r = response['error'] + r = response["error"] - self.assertIn('There is no store named', r) + self.assertIn("There is no store named", r) mock_store.assert_called_with(store_id, False) - @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') - @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store') + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_store" + ) def test_create_layer_from_postgis_store_not_201(self, mock_store, mock_post): mock_store.return_value = self.mock_stores[0] - store_id = '{}:{}'.format(self.workspace_names[0], self.store_names[0]) + store_id = "{}:{}".format(self.workspace_names[0], self.store_names[0]) mock_post.return_value = MockResponse(500) - table_name = 'points' + table_name = "points" - response = self.engine.create_layer_from_postgis_store(store_id=store_id, table=table_name, debug=False) + response = self.engine.create_layer_from_postgis_store( + store_id=store_id, table=table_name, debug=False + ) - expected_url = '{endpoint}workspaces/{w}/datastores/{s}/featuretypes'.format( - endpoint=self.endpoint, - w=self.workspace_names[0], - s=self.store_names[0] + expected_url = "{endpoint}workspaces/{w}/datastores/{s}/featuretypes".format( + endpoint=self.endpoint, w=self.workspace_names[0], s=self.store_names[0] ) - expected_headers = { - "Content-type": "text/xml", - "Accept": "application/xml" - } + expected_headers = {"Content-type": "text/xml", "Accept": "application/xml"} # Validate response object self.assert_valid_response_object(response) - self.assertFalse(response['success']) + self.assertFalse(response["success"]) post_call_args = mock_post.call_args_list - self.assertEqual(expected_url, post_call_args[0][1]['url']) - self.assertEqual(expected_headers, post_call_args[0][1]['headers']) + self.assertEqual(expected_url, post_call_args[0][1]["url"]) + self.assertEqual(expected_headers, post_call_args[0][1]["headers"]) mock_store.assert_called_with(store_id, False) -if __name__ == '__main__': +if __name__ == "__main__": suite = unittest.TestSuite() - suite.addTest(TestGeoServerDatasetEngine('test_create_style')) + suite.addTest(TestGeoServerDatasetEngine("test_create_style")) runner = unittest.TextTestRunner() runner.run(suite) diff --git a/tests/unit_tests/test_utilities.py b/tests/unit_tests/test_utilities.py index 39ac6f2..c34f5db 100644 --- a/tests/unit_tests/test_utilities.py +++ b/tests/unit_tests/test_utilities.py @@ -11,87 +11,94 @@ class TestUtilities(unittest.TestCase): def setUp(self): # Files self.tests_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - self.files_root = os.path.join(self.tests_root, 'files') + self.files_root = os.path.join(self.tests_root, "files") def tearDown(self): pass def test_ConvertDictToXml(self): dict_data = { - 'note': { - 'importance': 'high', - 'todo': [ - {'type': 'active', '_text': 'Work'}, - {'type': 'active', '_text': 'Play'}, - {'type': 'active', '_text': 'Eat'}, - {'type': 'passive', '_text': 'Sleep'} + "note": { + "importance": "high", + "todo": [ + {"type": "active", "_text": "Work"}, + {"type": "active", "_text": "Play"}, + {"type": "active", "_text": "Eat"}, + {"type": "passive", "_text": "Sleep"}, ], - 'logged': 'true', - 'title': ['Happy', 'Sad'] + "logged": "true", + "title": ["Happy", "Sad"], } } result = utilities.ConvertDictToXml(dict_data) try: - xmlstr = ET.tostring(result, encoding='unicode') + xmlstr = ET.tostring(result, encoding="unicode") except LookupError: xmlstr = ET.tostring(result) # Check Result) - self.assertEqual('', xmlstr[:6]) - self.assertEqual('', xmlstr[-7:]) - self.assertIn('high', xmlstr) - self.assertIn('true', xmlstr) - self.assertIn('Happy', xmlstr) - self.assertIn('Sad', xmlstr) - self.assertIn('Workactive', xmlstr) - self.assertIn('Playactive', xmlstr) - self.assertIn('Eatactive', xmlstr) - self.assertIn('Sleeppassive', xmlstr) + self.assertEqual("", xmlstr[:6]) + self.assertEqual("", xmlstr[-7:]) + self.assertIn("high", xmlstr) + self.assertIn("true", xmlstr) + self.assertIn("Happy", xmlstr) + self.assertIn("Sad", xmlstr) + self.assertIn("Workactive", xmlstr) + self.assertIn("Playactive", xmlstr) + self.assertIn("Eatactive", xmlstr) + self.assertIn("Sleeppassive", xmlstr) def test_ConvertXmlToDict(self): - file_name = 'test.xml' + file_name = "test.xml" xml_file = os.path.join(self.files_root, file_name) dict_data = utilities.ConvertXmlToDict(root=xml_file) - solution = {'note': {'importance': 'high', - 'todo': [{'type': 'active', '_text': 'Work'}, - {'type': 'active', '_text': 'Play'}, - {'type': 'active', '_text': 'Eat'}, - {'type': 'passive', '_text': 'Sleep'}], - 'logged': 'true', 'title': ['Happy', 'Happy']}} + solution = { + "note": { + "importance": "high", + "todo": [ + {"type": "active", "_text": "Work"}, + {"type": "active", "_text": "Play"}, + {"type": "active", "_text": "Eat"}, + {"type": "passive", "_text": "Sleep"}, + ], + "logged": "true", + "title": ["Happy", "Happy"], + } + } # Check Result self.assertEqual(dict_data, solution) def test_ConvertXmlToDict_TypeError(self): - dictionary = {'1': '2'} + dictionary = {"1": "2"} self.assertRaises(TypeError, utilities.ConvertXmlToDict, root=dictionary) def test_XmlDictObject_dict(self): - dict_data = {'to_do': 'work', 'list1': ['test1', 'test2']} + dict_data = {"to_do": "work", "list1": ["test1", "test2"]} result = XmlDictObject.Wrap(dict_data) # Check setattr - self.assertEqual(str(result), '') - result._text = 'Hello, world!' + self.assertEqual(str(result), "") + result._text = "Hello, world!" result.x = 10 - result.list2 = ['test3', 'test4'] + result.list2 = ["test3", "test4"] # Check new object - self.assertEqual(str(result), 'Hello, world!') + self.assertEqual(str(result), "Hello, world!") self.assertEqual(result.x, 10) - self.assertEqual(result.to_do, 'work') - self.assertEqual(result.list1, ['test1', 'test2']) - self.assertEqual(result.list2, ['test3', 'test4']) + self.assertEqual(result.to_do, "work") + self.assertEqual(result.list1, ["test1", "test2"]) + self.assertEqual(result.list2, ["test3", "test4"]) # UnWrap new_dict = result.UnWrap() # Check new dict - self.assertEqual(new_dict['_text'], 'Hello, world!') - self.assertEqual(new_dict['x'], 10) - self.assertEqual(new_dict['list1'], ['test1', 'test2']) - self.assertEqual(new_dict['list2'], ['test3', 'test4']) + self.assertEqual(new_dict["_text"], "Hello, world!") + self.assertEqual(new_dict["x"], 10) + self.assertEqual(new_dict["list1"], ["test1", "test2"]) + self.assertEqual(new_dict["list2"], ["test3", "test4"]) diff --git a/tethys_dataset_services/__init__.py b/tethys_dataset_services/__init__.py index 08bdcd7..0093377 100644 --- a/tethys_dataset_services/__init__.py +++ b/tethys_dataset_services/__init__.py @@ -1 +1 @@ -__author__ = 'swainn' +__author__ = "swainn" diff --git a/tethys_dataset_services/base/dataset_engine_abc.py b/tethys_dataset_services/base/dataset_engine_abc.py index 2d09b2c..ae6cd84 100644 --- a/tethys_dataset_services/base/dataset_engine_abc.py +++ b/tethys_dataset_services/base/dataset_engine_abc.py @@ -16,6 +16,7 @@ class DatasetEngine: successful or not. If 'success' is True, then 'result' item will contain the resulting data. If 'success' is False, then 'error' should contain error information. """ + __metaclass__ = ABCMeta @property @@ -72,7 +73,7 @@ def __repr__(self): """ Representation of Dataset Engine object for debugging purposes. """ - return ''.format(self.type, self.endpoint) + return "".format(self.type, self.endpoint) @abstractmethod def search_datasets(self, query, **kwargs): diff --git a/tethys_dataset_services/base/spatial_dataset_engine_abc.py b/tethys_dataset_services/base/spatial_dataset_engine_abc.py index d3cbd1b..0b87be6 100644 --- a/tethys_dataset_services/base/spatial_dataset_engine_abc.py +++ b/tethys_dataset_services/base/spatial_dataset_engine_abc.py @@ -16,6 +16,7 @@ class SpatialDatasetEngine: successful or not. If 'success' is True, then 'result' item will contain the resulting data. If 'success' is False, then 'error' should contain error information. """ + __metaclass__ = ABCMeta @property @@ -72,7 +73,7 @@ def __repr__(self): """ Representation of Dataset Engine object for debugging purposes. """ - return ''.format(self.type, self.endpoint) + return "".format(self.type, self.endpoint) @abstractmethod def list_layers(self): diff --git a/tethys_dataset_services/engines/ckan_engine.py b/tethys_dataset_services/engines/ckan_engine.py index e5a5c52..ab1ee1b 100644 --- a/tethys_dataset_services/engines/ckan_engine.py +++ b/tethys_dataset_services/engines/ckan_engine.py @@ -10,7 +10,7 @@ from ..base import DatasetEngine -log = logging.getLogger('tethys_dataset_services.ckan_engine') +log = logging.getLogger("tethys_dataset_services.ckan_engine") class CkanDatasetEngine(DatasetEngine): @@ -23,7 +23,7 @@ def type(self): """ CKAN Dataset Engine Type """ - return 'CKAN' + return "CKAN" def _prepare_request(self, method, data_dict=None, file=None, apikey=None): """ @@ -45,18 +45,18 @@ def _prepare_request(self, method, data_dict=None, file=None, apikey=None): headers = {} if not file: - data_dict = json.dumps(data_dict).encode('ascii') - headers['Content-Type'] = 'application/json' + data_dict = json.dumps(data_dict).encode("ascii") + headers["Content-Type"] = "application/json" if apikey: apikey = str(apikey) else: apikey = str(self.apikey) - headers['X-CKAN-API-Key'] = apikey - headers['Authorization'] = apikey + headers["X-CKAN-API-Key"] = apikey + headers["Authorization"] = apikey - url = '/'.join((self.endpoint.rstrip('/'), method)) + url = "/".join((self.endpoint.rstrip("/"), method)) return url, data_dict, headers @staticmethod @@ -78,7 +78,7 @@ def _execute_request(url, data, headers, file=None): data.update(file) # data = {str(k): v for k, v in data.items()} m = MultipartEncoder(fields=data) - headers['Content-Type'] = m.content_type + headers["Content-Type"] = m.content_type r = requests.post(url, data=m, headers=headers) else: r = requests.post(url, data=data, headers=headers, files=file) @@ -100,24 +100,34 @@ def _parse_response(status, response, console=False): try: parsed = json.loads(response) if console: - if hasattr(parsed, 'get'): - if parsed.get('success'): + if hasattr(parsed, "get"): + if parsed.get("success"): try: pprint.pprint(parsed) except Exception: - log.exception('Exception encountered while trying to print debug info to the console.') + log.exception( + "Exception encountered while trying to print debug info to the console." + ) else: - log.error('ERROR: {0}'.format(parsed['error']['message'])) + log.error("ERROR: {0}".format(parsed["error"]["message"])) return parsed except Exception: - log.exception('Status Code {0}: {1}'.format(status, response.encode('utf-8'))) + log.exception( + "Status Code {0}: {1}".format(status, response.encode("utf-8")) + ) return None - def execute_api_method(self, method, console=False, file=None, apikey=None, **kwargs): + def execute_api_method( + self, method, console=False, file=None, apikey=None, **kwargs + ): # Execute - url, data, headers = self._prepare_request(method=method, file=file, apikey=apikey, data_dict=kwargs) - status, response = self._execute_request(url=url, data=data, headers=headers, file=file) + url, data, headers = self._prepare_request( + method=method, file=file, apikey=apikey, data_dict=kwargs + ) + status, response = self._execute_request( + url=url, data=data, headers=headers, file=file + ) return self._parse_response(status, response, console) @@ -128,10 +138,10 @@ def _get_query_params(self, query_dict): query_terms = [] if len(query_dict.keys()) > 1: for key, value in query_dict.items(): - query_terms.append('{0}:{1}'.format(key, value)) + query_terms.append("{0}:{1}".format(key, value)) else: for key, value in query_dict.items(): - query_terms = '{0}:{1}'.format(key, value) + query_terms = "{0}:{1}".format(key, value) return query_terms def search_datasets(self, query=None, filtered_query=None, console=False, **kwargs): @@ -158,13 +168,13 @@ def search_datasets(self, query=None, filtered_query=None, console=False, **kwar # Assemble the query parameters if query: - data['q'] = self._get_query_params(query) + data["q"] = self._get_query_params(query) if filtered_query: - data['fq'] = self._get_query_params(filtered_query) + data["fq"] = self._get_query_params(filtered_query) # Execute - method = 'package_search' + method = "package_search" return self.execute_api_method(method=method, console=console, **data) def search_resources(self, query, console=False, **kwargs): @@ -186,10 +196,10 @@ def search_resources(self, query, console=False, **kwargs): data = kwargs # Assemble the query parameters - data['query'] = self._get_query_params(query) + data["query"] = self._get_query_params(query) # Execute - method = 'resource_search' + method = "resource_search" return self.execute_api_method(method=method, console=console, **data) def list_datasets(self, with_resources=False, console=False, **kwargs): @@ -212,9 +222,9 @@ def list_datasets(self, with_resources=False, console=False, **kwargs): # Execute API Method if not with_resources: - method = 'package_list' + method = "package_list" else: - method = 'current_package_list_with_resources' + method = "current_package_list_with_resources" return self.execute_api_method(method=method, console=console, **data) @@ -235,10 +245,10 @@ def get_dataset(self, dataset_id, console=False, **kwargs): """ # Assemble data dictionary data = kwargs - data['id'] = dataset_id + data["id"] = dataset_id # Execute - method = 'package_show' + method = "package_show" return self.execute_api_method(method=method, console=console, **data) def get_resource(self, resource_id, console=False, **kwargs): @@ -258,10 +268,10 @@ def get_resource(self, resource_id, console=False, **kwargs): """ # Assemble data dictionary data = kwargs - data['id'] = resource_id + data["id"] = resource_id # Execute - method = 'resource_show' + method = "resource_show" return self.execute_api_method(method=method, console=console, **data) def create_dataset(self, name, console=False, **kwargs): @@ -281,10 +291,10 @@ def create_dataset(self, name, console=False, **kwargs): """ # Assemble the data dictionary data = kwargs - data['name'] = name + data["name"] = name # Execute - method = 'package_create' + method = "package_create" return self.execute_api_method(method=method, console=console, **data) def create_resource(self, dataset_id, url=None, file=None, console=False, **kwargs): @@ -306,23 +316,25 @@ def create_resource(self, dataset_id, url=None, file=None, console=False, **kwar """ # Validate file and url parameters (mutually exclusive) if url and file: - raise IOError('The url and file parameters are mutually exclusive: use one, not both.') + raise IOError( + "The url and file parameters are mutually exclusive: use one, not both." + ) elif not url and not file: - raise IOError('The url or file parameter is required, but do not use both.') + raise IOError("The url or file parameter is required, but do not use both.") # Assemble the data dictionary - method = 'resource_create' + method = "resource_create" data = kwargs - data['package_id'] = dataset_id + data["package_id"] = dataset_id if url: - data['url'] = url + data["url"] = url else: - data['url'] = '' + data["url"] = "" # Default naming convention - if 'name' not in data and file: - data['name'] = os.path.basename(file) + if "name" not in data and file: + data["name"] = os.path.basename(file) # Prepare file if file: @@ -330,14 +342,18 @@ def create_resource(self, dataset_id, url=None, file=None, console=False, **kwar raise IOError('The file "{0}" does not exist.'.format(file)) else: filename, extension = os.path.splitext(file) - upload_file_name = data['name'] + upload_file_name = data["name"] if not upload_file_name.endswith(extension): upload_file_name += extension - with open(file, 'rb') as upload_file: - file = {'upload': (upload_file_name, upload_file)} - response = self.execute_api_method(method=method, console=console, file=file, **data) + with open(file, "rb") as upload_file: + file = {"upload": (upload_file_name, upload_file)} + response = self.execute_api_method( + method=method, console=console, file=file, **data + ) else: - response = self.execute_api_method(method=method, console=console, file=file, **data) + response = self.execute_api_method( + method=method, console=console, file=file, **data + ) return response @@ -358,7 +374,7 @@ def update_dataset(self, dataset_id, console=False, **kwargs): """ # Assemble the data dictionary data = kwargs - data['id'] = dataset_id + data["id"] = dataset_id # Preserve the resources and tags if not included in parameters """ @@ -367,25 +383,30 @@ def update_dataset(self, dataset_id, console=False, **kwargs): disassociated with the dataset and float off into the ether. This behavior is modified in this method so that these properties are retained by default, unless included in the parameters that are being updated. """ - original_url, original_data, original_headers = self._prepare_request(method='package_show', data_dict=data) - original_status, original_response = self._execute_request(url=original_url, data=original_data, - headers=original_headers) + original_url, original_data, original_headers = self._prepare_request( + method="package_show", data_dict=data + ) + original_status, original_response = self._execute_request( + url=original_url, data=original_data, headers=original_headers + ) original_result = self._parse_response(original_status, original_response) - if original_result['success']: - original_dataset = original_result['result'] + if original_result["success"]: + original_dataset = original_result["result"] - if 'resources' not in data: - data['resources'] = original_dataset['resources'] + if "resources" not in data: + data["resources"] = original_dataset["resources"] - if 'tags' not in data: - data['tags'] = original_dataset['tags'] + if "tags" not in data: + data["tags"] = original_dataset["tags"] # Execute - method = 'package_update' + method = "package_update" return self.execute_api_method(method=method, console=console, **data) - def update_resource(self, resource_id, url=None, file=None, console=False, **kwargs): + def update_resource( + self, resource_id, url=None, file=None, console=False, **kwargs + ): """ Update CKAN resource @@ -404,18 +425,20 @@ def update_resource(self, resource_id, url=None, file=None, console=False, **kwa """ # Validate file and url parameters (mutually exclusive) if url and file: - raise IOError('The url and file parameters are mutually exclusive: use one, not both.') + raise IOError( + "The url and file parameters are mutually exclusive: use one, not both." + ) # Assemble the data dictionary data = kwargs - data['id'] = resource_id + data["id"] = resource_id if url: - data['url'] = url + data["url"] = url # Default naming convention - if 'name' not in data and file: - data['name'] = os.path.basename(file) + if "name" not in data and file: + data["name"] = os.path.basename(file) # Prepare file update_file = None @@ -423,19 +446,21 @@ def update_resource(self, resource_id, url=None, file=None, console=False, **kwa if not os.path.isfile(file): raise IOError('The file "{0}" does not exist.'.format(file)) else: - update_file = open(file, 'rb') - file = {'upload': update_file} + update_file = open(file, "rb") + file = {"upload": update_file} # if not url and not file: - if 'url' not in data: + if "url" not in data: result = self.get_resource(resource_id) - if result['success']: - resource = result['result'] - data['url'] = resource['url'] + if result["success"]: + resource = result["result"] + data["url"] = resource["url"] # Execute - method = 'resource_update' - response = self.execute_api_method(method=method, console=console, file=file, **data) + method = "resource_update" + response = self.execute_api_method( + method=method, console=console, file=file, **data + ) # Clean up if update_file and not update_file.closed: @@ -460,11 +485,13 @@ def delete_dataset(self, dataset_id, console=False, file=None, **kwargs): """ # Assemble the data dictionary data = kwargs - data['id'] = dataset_id + data["id"] = dataset_id # Execute - method = 'package_delete' - return self.execute_api_method(method=method, console=console, file=file, **data) + method = "package_delete" + return self.execute_api_method( + method=method, console=console, file=file, **data + ) def delete_resource(self, resource_id, console=False, **kwargs): """ @@ -483,10 +510,10 @@ def delete_resource(self, resource_id, console=False, **kwargs): """ # Assemble the data dictionary data = kwargs - data['id'] = resource_id + data["id"] = resource_id # Execute - method = 'resource_delete' + method = "resource_delete" return self.execute_api_method(method=method, console=console, **data) def download_dataset(self, dataset_id, location=None, console=False, **kwargs): @@ -505,21 +532,25 @@ def download_dataset(self, dataset_id, location=None, console=False, **kwargs): A list of the files that were downloaded. """ result = self.get_dataset(dataset_id, console=console, **kwargs) - if result['success']: - dataset = result['result'] + if result["success"]: + dataset = result["result"] - location = location or dataset['name'] + location = location or dataset["name"] downloaded_resources = [] - for resource in dataset['resources']: + for resource in dataset["resources"]: downloaded_resource = self._download_resource(resource, location) downloaded_resources.append(downloaded_resource) return downloaded_resources else: - raise Exception(str(result)) # TODO: raise an error stating that dataset doesn't exist + raise Exception( + str(result) + ) # TODO: raise an error stating that dataset doesn't exist - def download_resouce(self, resource_id, location=None, local_file_name=None, console=False, **kwargs): + def download_resouce( + self, resource_id, location=None, local_file_name=None, console=False, **kwargs + ): """ Deprecated alias for download_resource method for backwards compatibility (the old method was misspelled). @@ -537,7 +568,7 @@ def download_resouce(self, resource_id, location=None, local_file_name=None, con """ warnings.warn( "This method has been deprecated because it was misspelled. Use download_resource instead.", - DeprecationWarning + DeprecationWarning, ) self.download_resource( resource_id=resource_id, @@ -547,7 +578,9 @@ def download_resouce(self, resource_id, location=None, local_file_name=None, con **kwargs ) - def download_resource(self, resource_id, location=None, local_file_name=None, console=False, **kwargs): + def download_resource( + self, resource_id, location=None, local_file_name=None, console=False, **kwargs + ): """ Download a resource from a resource id @@ -564,13 +597,17 @@ def download_resource(self, resource_id, location=None, local_file_name=None, co Path and name of the downloaded file. """ result = self.get_resource(resource_id, console=console, **kwargs) - if result['success']: - resource = result['result'] - downloaded_resource = self._download_resource(resource, location, local_file_name) + if result["success"]: + resource = result["result"] + downloaded_resource = self._download_resource( + resource, location, local_file_name + ) return downloaded_resource else: - raise Exception(str(result)) # TODO: raise an error stating that dataset doesn't exist + raise Exception( + str(result) + ) # TODO: raise an error stating that dataset doesn't exist def _download_resource(self, resource, location=None, local_file_name=None): """ @@ -578,8 +615,8 @@ def _download_resource(self, resource, location=None, local_file_name=None): """ # create filename with extension if not local_file_name: - local_file_name = resource['name'] or resource['id'] - local_file_name = '.'.join((local_file_name, resource['format'])) + local_file_name = resource["name"] or resource["id"] + local_file_name = ".".join((local_file_name, resource["format"])) # ensure that the location exists if location: @@ -588,15 +625,15 @@ def _download_resource(self, resource, location=None, local_file_name=None): except OSError: pass else: - location = './' + location = "./" local_file = os.path.join(location, local_file_name) - url = resource['url'] + url = resource["url"] # download resource try: r = requests.get(url, stream=True) - with open(local_file, 'wb') as f: + with open(local_file, "wb") as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) @@ -611,7 +648,7 @@ def validate(self): Validate CKAN dataset engine. Will throw an error if not valid. """ # Strip off the '/action' or '/action/' portion of the endpoint URL - if self.endpoint[-1] == '/': + if self.endpoint[-1] == "/": api_endpoint = self.endpoint[:-8] else: api_endpoint = self.endpoint[:-7] @@ -619,13 +656,19 @@ def validate(self): r = requests.get(api_endpoint) except requests.exceptions.MissingSchema: - raise AssertionError('The URL "{0}" provided for the CKAN dataset service endpoint ' - 'is invalid.'.format(self.endpoint)) + raise AssertionError( + 'The URL "{0}" provided for the CKAN dataset service endpoint ' + "is invalid.".format(self.endpoint) + ) if r.status_code != 200: - raise AssertionError('The URL "{0}" is not a valid endpoint for a CKAN dataset ' - 'service.'.format(self.endpoint)) - - if 'version' not in r.json(): - raise AssertionError('The URL "{0}" is not a valid endpoint for a CKAN dataset ' - 'service.'.format(self.endpoint)) + raise AssertionError( + 'The URL "{0}" is not a valid endpoint for a CKAN dataset ' + "service.".format(self.endpoint) + ) + + if "version" not in r.json(): + raise AssertionError( + 'The URL "{0}" is not a valid endpoint for a CKAN dataset ' + "service.".format(self.endpoint) + ) diff --git a/tethys_dataset_services/engines/geoserver_engine.py b/tethys_dataset_services/engines/geoserver_engine.py index 251f33e..aa7c439 100644 --- a/tethys_dataset_services/engines/geoserver_engine.py +++ b/tethys_dataset_services/engines/geoserver_engine.py @@ -19,25 +19,31 @@ from ..utilities import ConvertDictToXml, ConvertXmlToDict from ..base import SpatialDatasetEngine -log = logging.getLogger('tds.engines.geoserver') +log = logging.getLogger("tds.engines.geoserver") class GeoServerSpatialDatasetEngine(SpatialDatasetEngine): """ Definition for GeoServer Dataset Engine objects. """ - XML_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'resources', 'geoserver', 'xml_templates') + + XML_PATH = os.path.join( + os.path.dirname(os.path.dirname(__file__)), + "resources", + "geoserver", + "xml_templates", + ) WARNING_STATUS_CODES = [403, 404] - GWC_OP_SEED = 'seed' - GWC_OP_RESEED = 'reseed' - GWC_OP_TRUNCATE = 'truncate' - GWC_OP_MASS_TRUNCATE = 'masstruncate' + GWC_OP_SEED = "seed" + GWC_OP_RESEED = "reseed" + GWC_OP_TRUNCATE = "truncate" + GWC_OP_MASS_TRUNCATE = "masstruncate" GWC_OPERATIONS = (GWC_OP_SEED, GWC_OP_RESEED, GWC_OP_TRUNCATE, GWC_OP_MASS_TRUNCATE) - GWC_KILL_ALL = 'all' - GWC_KILL_RUNNING = 'running' - GWC_KILL_PENDING = 'pending' + GWC_KILL_ALL = "all" + GWC_KILL_RUNNING = "running" + GWC_KILL_PENDING = "pending" GWC_KILL_OPERATIONS = (GWC_KILL_ALL, GWC_KILL_PENDING, GWC_KILL_RUNNING) GWC_STATUS_ABORTED = -1 @@ -45,43 +51,61 @@ class GeoServerSpatialDatasetEngine(SpatialDatasetEngine): GWC_STATUS_RUNNING = 1 GWC_STATUS_DONE = 2 GWC_STATUS_MAP = { - GWC_STATUS_ABORTED: 'Aborted', - GWC_STATUS_PENDING: 'Pending', - GWC_STATUS_RUNNING: 'Running', - GWC_STATUS_DONE: 'Done' + GWC_STATUS_ABORTED: "Aborted", + GWC_STATUS_PENDING: "Pending", + GWC_STATUS_RUNNING: "Running", + GWC_STATUS_DONE: "Done", } # coverage types - CT_AIG = 'AIG' - CT_ARC_GRID = 'ArcGrid' - CT_DTED = 'DTED' - CT_ECW = 'ECW' - CT_EHDR = 'EHdr' - CT_ENVIHDR = 'ENVIHdr' - CT_ERDASIMG = 'ERDASImg' - CT_GEOTIFF = 'GeoTIFF' - CT_GRASS_GRID = 'GrassGrid' - CT_GTOPO30 = 'Gtopo30' - CT_IMAGE_MOSAIC = 'ImageMosaic' - CT_IMAGE_PYRAMID = 'ImagePyramid' - CT_JP2MRSID = 'JP2MrSID' - CT_MRSID = 'MrSID' - CT_NETCDF = 'NetCDF' - CT_NITF = 'NITF' - CT_RPFTOC = 'RPFTOC' - CT_RST = 'RST' - CT_WORLD_IMAGE = 'WorldImage' - - VALID_COVERAGE_TYPES = (CT_AIG, CT_ARC_GRID, CT_DTED, CT_ECW, CT_EHDR, CT_ENVIHDR, CT_ERDASIMG, CT_GEOTIFF, - CT_GRASS_GRID, CT_GTOPO30, CT_IMAGE_MOSAIC, CT_IMAGE_PYRAMID, CT_JP2MRSID, CT_MRSID, - CT_NETCDF, CT_NITF, CT_RPFTOC, CT_RST, CT_WORLD_IMAGE) + CT_AIG = "AIG" + CT_ARC_GRID = "ArcGrid" + CT_DTED = "DTED" + CT_ECW = "ECW" + CT_EHDR = "EHdr" + CT_ENVIHDR = "ENVIHdr" + CT_ERDASIMG = "ERDASImg" + CT_GEOTIFF = "GeoTIFF" + CT_GRASS_GRID = "GrassGrid" + CT_GTOPO30 = "Gtopo30" + CT_IMAGE_MOSAIC = "ImageMosaic" + CT_IMAGE_PYRAMID = "ImagePyramid" + CT_JP2MRSID = "JP2MrSID" + CT_MRSID = "MrSID" + CT_NETCDF = "NetCDF" + CT_NITF = "NITF" + CT_RPFTOC = "RPFTOC" + CT_RST = "RST" + CT_WORLD_IMAGE = "WorldImage" + + VALID_COVERAGE_TYPES = ( + CT_AIG, + CT_ARC_GRID, + CT_DTED, + CT_ECW, + CT_EHDR, + CT_ENVIHDR, + CT_ERDASIMG, + CT_GEOTIFF, + CT_GRASS_GRID, + CT_GTOPO30, + CT_IMAGE_MOSAIC, + CT_IMAGE_PYRAMID, + CT_JP2MRSID, + CT_MRSID, + CT_NETCDF, + CT_NITF, + CT_RPFTOC, + CT_RST, + CT_WORLD_IMAGE, + ) @property def type(self): """ GeoServer Spatial Dataset Type """ - return 'GEOSERVER' + return "GEOSERVER" @property def gwc_endpoint(self): @@ -89,15 +113,21 @@ def gwc_endpoint(self): @property def catalog(self): - if not getattr(self, '_catalog', None): + if not getattr(self, "_catalog", None): self._catalog = GeoServerCatalog( - self.endpoint, - username=self.username, - password=self.password + self.endpoint, username=self.username, password=self.password ) return self._catalog - def __init__(self, endpoint, apikey=None, username=None, password=None, public_endpoint=None, node_ports=None): + def __init__( + self, + endpoint, + apikey=None, + username=None, + password=None, + public_endpoint=None, + node_ports=None, + ): """ Default constructor for Dataset Engines. @@ -111,18 +141,15 @@ def __init__(self, endpoint, apikey=None, username=None, password=None, public_e # Set custom property /geoserver/rest/ -> /geoserver/gwc/rest/ if public_endpoint: self.public_endpoint = public_endpoint - if '/' == endpoint[-1]: - self._gwc_endpoint = endpoint.replace('rest', 'gwc/rest') + if "/" == endpoint[-1]: + self._gwc_endpoint = endpoint.replace("rest", "gwc/rest") else: - self._gwc_endpoint = endpoint.replace('rest', 'gwc/rest/') + self._gwc_endpoint = endpoint.replace("rest", "gwc/rest/") self.node_ports = node_ports super(GeoServerSpatialDatasetEngine, self).__init__( - endpoint=endpoint, - apikey=apikey, - username=username, - password=password + endpoint=endpoint, apikey=apikey, username=username, password=password ) def __del__(self): @@ -132,26 +159,32 @@ def _apply_changes_to_gs_object(self, attributes_dict, gs_object): # Make the changes for attribute, value in attributes_dict.items(): if hasattr(gs_object, attribute): - if attribute == 'styles': + if attribute == "styles": styles_objects = [] - for style in attributes_dict['styles']: + for style in attributes_dict["styles"]: # Lookup by name and workspace - if ':' in style: - style_split = style.split(':') - styles_objects.append(self.catalog.get_style(name=style_split[1], workspace=style_split[0])) + if ":" in style: + style_split = style.split(":") + styles_objects.append( + self.catalog.get_style( + name=style_split[1], workspace=style_split[0] + ) + ) # Lookup by name only else: styles_objects.append(self.catalog.get_style(name=style)) - setattr(gs_object, 'styles', styles_objects) + setattr(gs_object, "styles", styles_objects) - elif attribute == 'default_style': - style = attributes_dict['default_style'] + elif attribute == "default_style": + style = attributes_dict["default_style"] - if ':' in style: - style_split = style.split(':') - style_object = self.catalog.get_style(name=style_split[1], workspace=style_split[0]) + if ":" in style: + style_split = style.split(":") + style_object = self.catalog.get_style( + name=style_split[1], workspace=style_split[0] + ) # Lookup by name only else: @@ -171,12 +204,12 @@ def _assemble_url(self, *args): endpoint = self.endpoint # Eliminate trailing slash if necessary - if endpoint[-1] == '/': + if endpoint[-1] == "/": endpoint = endpoint[:-1] pieces = list(args) pieces.insert(0, endpoint) - return '/'.join(pieces) + return "/".join(pieces) def _get_non_rest_endpoint(self): """ @@ -184,82 +217,128 @@ def _get_non_rest_endpoint(self): """ endpoint = self.endpoint # Eliminate trailing slash if necessary - if endpoint[-1] == '/': + if endpoint[-1] == "/": endpoint = endpoint[:-1] - if endpoint[-5:] == '/rest': + if endpoint[-5:] == "/rest": endpoint = endpoint[:-5] return endpoint - def _get_wms_url(self, layer_id, style='', srs='EPSG:4326', bbox='-180,-90,180,90', version='1.1.0', - width='512', height='512', output_format='image/png', tiled=False, transparent=True): + def _get_wms_url( + self, + layer_id, + style="", + srs="EPSG:4326", + bbox="-180,-90,180,90", + version="1.1.0", + width="512", + height="512", + output_format="image/png", + tiled=False, + transparent=True, + ): """ Assemble a WMS url. """ endpoint = self._get_non_rest_endpoint() if tiled: - tiled_option = 'yes' + tiled_option = "yes" else: - tiled_option = 'no' + tiled_option = "no" if transparent: - transparent_option = 'true' + transparent_option = "true" else: - transparent_option = 'false' - - wms_url = '{0}/wms?service=WMS&version={1}&request=GetMap&' \ - 'layers={2}&styles={3}&' \ - 'transparent={10}&tiled={9}&' \ - 'srs={4}&bbox={5}&' \ - 'width={6}&height={7}&' \ - 'format={8}'.format(endpoint, version, layer_id, style, srs, bbox, width, height, output_format, - tiled_option, transparent_option) + transparent_option = "false" + + wms_url = ( + "{0}/wms?service=WMS&version={1}&request=GetMap&" + "layers={2}&styles={3}&" + "transparent={10}&tiled={9}&" + "srs={4}&bbox={5}&" + "width={6}&height={7}&" + "format={8}".format( + endpoint, + version, + layer_id, + style, + srs, + bbox, + width, + height, + output_format, + tiled_option, + transparent_option, + ) + ) return wms_url - def _get_wcs_url(self, resource_id, srs='EPSG:4326', bbox='-180,-90,180,90', output_format='png', namespace=None, - width='512', height='512'): + def _get_wcs_url( + self, + resource_id, + srs="EPSG:4326", + bbox="-180,-90,180,90", + output_format="png", + namespace=None, + width="512", + height="512", + ): """ Assemble a WCS url. """ endpoint = self._get_non_rest_endpoint() - wcs_url = '{0}/wcs?service=WCS&version=1.1.0&request=GetCoverage&' \ - 'identifier={1}&' \ - 'srs={2}&BoundingBox={3}&' \ - 'width={5}&height={6}&' \ - 'format={4}'.format(endpoint, resource_id, srs, bbox, output_format, width, height) + wcs_url = ( + "{0}/wcs?service=WCS&version=1.1.0&request=GetCoverage&" + "identifier={1}&" + "srs={2}&BoundingBox={3}&" + "width={5}&height={6}&" + "format={4}".format( + endpoint, resource_id, srs, bbox, output_format, width, height + ) + ) if namespace and isinstance(namespace, str): - wcs_url = '{0}&namespace={1}'.format(wcs_url, namespace) + wcs_url = "{0}&namespace={1}".format(wcs_url, namespace) return wcs_url - def _get_wfs_url(self, resource_id, output_format='GML3'): + def _get_wfs_url(self, resource_id, output_format="GML3"): """ Assemble a WFS url. """ endpoint = self._get_non_rest_endpoint() - if output_format == 'GML3': - wfs_url = '{0}/wfs?service=WFS&version=2.0.0&request=GetFeature&typeNames={1}'.format(endpoint, resource_id) - elif output_format == 'GML2': - wfs_url = '{0}/wfs?service=WFS&version=1.0.0&request=GetFeature&typeNames={1}&' \ - 'outputFormat=GML2'.format(endpoint, resource_id) + if output_format == "GML3": + wfs_url = "{0}/wfs?service=WFS&version=2.0.0&request=GetFeature&typeNames={1}".format( + endpoint, resource_id + ) + elif output_format == "GML2": + wfs_url = ( + "{0}/wfs?service=WFS&version=1.0.0&request=GetFeature&typeNames={1}&" + "outputFormat=GML2".format(endpoint, resource_id) + ) else: - wfs_url = '{0}/wfs?service=WFS&version=2.0.0&request=GetFeature&typeNames={1}&' \ - 'outputFormat={2}'.format(endpoint, resource_id, output_format) + wfs_url = ( + "{0}/wfs?service=WFS&version=2.0.0&request=GetFeature&typeNames={1}&" + "outputFormat={2}".format(endpoint, resource_id, output_format) + ) return wfs_url def _get_node_endpoints(self, ports=None, public=True, gwc=False): node_endpoints = [] if not gwc: - endpoint = self.public_endpoint if public and hasattr(self, 'public_endpoint') else self.endpoint + endpoint = ( + self.public_endpoint + if public and hasattr(self, "public_endpoint") + else self.endpoint + ) else: endpoint = self.get_gwc_endpoint(public=public) - endpoint = f'{endpoint}/' if not endpoint.endswith('/') else endpoint + endpoint = f"{endpoint}/" if not endpoint.endswith("/") else endpoint if ports is None: ports = self.node_ports @@ -268,7 +347,9 @@ def _get_node_endpoints(self, ports=None, public=True, gwc=False): if ports is not None: gs_url = urlparse(endpoint) for port in ports: - node_endpoints.append(f"{gs_url.scheme}://{gs_url.hostname}:{port}{gs_url.path}") + node_endpoints.append( + f"{gs_url.scheme}://{gs_url.hostname}:{port}{gs_url.path}" + ) else: node_endpoints.append(endpoint) return node_endpoints @@ -286,25 +367,29 @@ def _handle_delete(self, identifier, gs_object, purge, recurse, debug): Handle delete calls """ # Initialize response dictionary - response_dict = {'success': False} + response_dict = {"success": False} if gs_object: try: # Execute - self.catalog.delete(config_object=gs_object, purge=purge, recurse=recurse) + self.catalog.delete( + config_object=gs_object, purge=purge, recurse=recurse + ) # Update response dictionary - response_dict['success'] = True - response_dict['result'] = None + response_dict["success"] = True + response_dict["result"] = None except geoserver.catalog.FailedRequestError as e: # Update response dictionary - response_dict['success'] = False - response_dict['error'] = str(e) + response_dict["success"] = False + response_dict["error"] = str(e) else: # Update response dictionary - response_dict['success'] = False - response_dict['error'] = 'GeoServer object does not exist: "{0}".'.format(identifier) + response_dict["success"] = False + response_dict["error"] = 'GeoServer object does not exist: "{0}".'.format( + identifier + ) self._handle_debug(response_dict, debug) return response_dict @@ -320,8 +405,7 @@ def _handle_list(self, gs_objects, with_properties, debug): names.append(gs_object.name) # Assemble Response - response_dict = {'success': True, - 'result': names} + response_dict = {"success": True, "result": names} # Handle the debug and return self._handle_debug(response_dict, debug) @@ -331,8 +415,7 @@ def _handle_list(self, gs_objects, with_properties, debug): gs_object_dicts = self._transcribe_geoserver_objects(gs_objects) # Assemble Response - response_dict = {'success': True, - 'result': gs_object_dicts} + response_dict = {"success": True, "result": gs_object_dicts} self._handle_debug(response_dict, debug) return response_dict @@ -346,8 +429,8 @@ def _process_identifier(self, identifier): name = identifier # Colon ':' is a delimiter between workspace and name i.e: workspace:name - if ':' in identifier: - workspace, name = identifier.split(':') + if ":" in identifier: + workspace, name = identifier.split(":") return workspace, name @@ -366,16 +449,26 @@ def _transcribe_geoserver_object(self, gs_object): Convert geoserver objects to Python dictionaries. """ # Constants - NAMED_OBJECTS = ('store', 'workspace') - NAMED_OBJECTS_WITH_WORKSPACE = ('resource', 'default_style') - OMIT_ATTRIBUTES = ('writers', 'attribution_object', 'dirty', 'dom', 'save_method') + NAMED_OBJECTS = ("store", "workspace") + NAMED_OBJECTS_WITH_WORKSPACE = ("resource", "default_style") + OMIT_ATTRIBUTES = ( + "writers", + "attribution_object", + "dirty", + "dom", + "save_method", + ) # Load into a dictionary object_dictionary = {} resource_object = None # Get the non-private attributes - attributes = [a for a in dir(gs_object) if not a.startswith('__') and not a.startswith('_')] + attributes = [ + a + for a in dir(gs_object) + if not a.startswith("__") and not a.startswith("_") + ] for attribute in attributes: if not callable(getattr(gs_object, attribute)): @@ -391,17 +484,19 @@ def _transcribe_geoserver_object(self, gs_object): # Append workspace if applicable sub_object = getattr(gs_object, attribute) # Stash resource for later use - if attribute == 'resource': + if attribute == "resource": resource_object = sub_object if sub_object and not isinstance(sub_object, str): if sub_object.workspace: try: - object_dictionary[attribute] = '{0}:{1}'.format(sub_object.workspace.name, - sub_object.name) + object_dictionary[attribute] = "{0}:{1}".format( + sub_object.workspace.name, sub_object.name + ) except AttributeError: - object_dictionary[attribute] = '{0}:{1}'.format(sub_object.workspace, - sub_object.name) + object_dictionary[attribute] = "{0}:{1}".format( + sub_object.workspace, sub_object.name + ) else: object_dictionary[attribute] = sub_object.name elif isinstance(sub_object, str): @@ -411,12 +506,12 @@ def _transcribe_geoserver_object(self, gs_object): # Omit these attributes pass - elif attribute == 'catalog': + elif attribute == "catalog": # Store URL in place of catalog - catalog_object = getattr(gs_object, 'catalog') + catalog_object = getattr(gs_object, "catalog") object_dictionary[attribute] = catalog_object.service_url - elif attribute == 'styles': + elif attribute == "styles": styles = getattr(gs_object, attribute) styles_names = [] @@ -424,7 +519,9 @@ def _transcribe_geoserver_object(self, gs_object): if style is not None: if not isinstance(style, str): if style.workspace: - styles_names.append('{0}:{1}'.format(style.workspace, style.name)) + styles_names.append( + "{0}:{1}".format(style.workspace, style.name) + ) else: styles_names.append(style.name) else: @@ -437,34 +534,36 @@ def _transcribe_geoserver_object(self, gs_object): object_dictionary[attribute] = getattr(gs_object, attribute) # Inject appropriate WFS and WMS URLs - if 'resource_type' in object_dictionary: + if "resource_type" in object_dictionary: # Feature Types Get WFS - if object_dictionary['resource_type'] == 'featureType': - if object_dictionary['workspace']: - resource_id = '{0}:{1}'.format(object_dictionary['workspace'], object_dictionary['name']) + if object_dictionary["resource_type"] == "featureType": + if object_dictionary["workspace"]: + resource_id = "{0}:{1}".format( + object_dictionary["workspace"], object_dictionary["name"] + ) else: - resource_id = object_dictionary['name'] - - object_dictionary['wfs'] = { - 'gml3': self._get_wfs_url(resource_id, 'GML3'), - 'gml2': self._get_wfs_url(resource_id, 'GML2'), - 'shapefile': self._get_wfs_url(resource_id, 'shape-zip'), - 'geojson': self._get_wfs_url(resource_id, 'application/json'), - 'geojsonp': self._get_wfs_url(resource_id, 'text/javascript'), - 'csv': self._get_wfs_url(resource_id, 'csv') + resource_id = object_dictionary["name"] + + object_dictionary["wfs"] = { + "gml3": self._get_wfs_url(resource_id, "GML3"), + "gml2": self._get_wfs_url(resource_id, "GML2"), + "shapefile": self._get_wfs_url(resource_id, "shape-zip"), + "geojson": self._get_wfs_url(resource_id, "application/json"), + "geojsonp": self._get_wfs_url(resource_id, "text/javascript"), + "csv": self._get_wfs_url(resource_id, "csv"), } # Coverage Types Get WCS - elif object_dictionary['resource_type'] == 'coverage': + elif object_dictionary["resource_type"] == "coverage": workspace = None - name = object_dictionary['name'] - bbox = '-180,-90,180,90' - srs = 'EPSG:4326' - width = '512' - height = '512' + name = object_dictionary["name"] + bbox = "-180,-90,180,90" + srs = "EPSG:4326" + width = "512" + height = "512" - if object_dictionary['workspace']: - workspace = object_dictionary['workspace'] + if object_dictionary["workspace"]: + workspace = object_dictionary["workspace"] if resource_object and resource_object.native_bbox: # Find the native bounding box @@ -474,40 +573,92 @@ def _transcribe_geoserver_object(self, gs_object): miny = nbbox[2] maxy = nbbox[3] srs = resource_object.projection - bbox = '{0},{1},{2},{3}'.format(minx, miny, maxx, maxy) + bbox = "{0},{1},{2},{3}".format(minx, miny, maxx, maxy) # Resize the width to be proportionate to the image aspect ratio - aspect_ratio = (float(maxx) - float(minx)) / (float(maxy) - float(miny)) + aspect_ratio = (float(maxx) - float(minx)) / ( + float(maxy) - float(miny) + ) width = str(int(aspect_ratio * float(height))) - object_dictionary['wcs'] = { - 'png': self._get_wcs_url(name, output_format='png', namespace=workspace, srs=srs, bbox=bbox), - 'gif': self._get_wcs_url(name, output_format='gif', namespace=workspace, srs=srs, bbox=bbox), - 'jpeg': self._get_wcs_url(name, output_format='jpeg', namespace=workspace, srs=srs, bbox=bbox), - 'tiff': self._get_wcs_url(name, output_format='tif', namespace=workspace, srs=srs, bbox=bbox), - 'bmp': self._get_wcs_url(name, output_format='bmp', namespace=workspace, srs=srs, bbox=bbox), - 'geotiff': self._get_wcs_url(name, output_format='geotiff', namespace=workspace, srs=srs, - bbox=bbox), - 'gtopo30': self._get_wcs_url(name, output_format='gtopo30', namespace=workspace, srs=srs, - bbox=bbox), - 'arcgrid': self._get_wcs_url(name, output_format='ArcGrid', namespace=workspace, srs=srs, - bbox=bbox), - 'arcgrid_gz': self._get_wcs_url(name, output_format='ArcGrid-GZIP', namespace=workspace, srs=srs, - bbox=bbox), + object_dictionary["wcs"] = { + "png": self._get_wcs_url( + name, + output_format="png", + namespace=workspace, + srs=srs, + bbox=bbox, + ), + "gif": self._get_wcs_url( + name, + output_format="gif", + namespace=workspace, + srs=srs, + bbox=bbox, + ), + "jpeg": self._get_wcs_url( + name, + output_format="jpeg", + namespace=workspace, + srs=srs, + bbox=bbox, + ), + "tiff": self._get_wcs_url( + name, + output_format="tif", + namespace=workspace, + srs=srs, + bbox=bbox, + ), + "bmp": self._get_wcs_url( + name, + output_format="bmp", + namespace=workspace, + srs=srs, + bbox=bbox, + ), + "geotiff": self._get_wcs_url( + name, + output_format="geotiff", + namespace=workspace, + srs=srs, + bbox=bbox, + ), + "gtopo30": self._get_wcs_url( + name, + output_format="gtopo30", + namespace=workspace, + srs=srs, + bbox=bbox, + ), + "arcgrid": self._get_wcs_url( + name, + output_format="ArcGrid", + namespace=workspace, + srs=srs, + bbox=bbox, + ), + "arcgrid_gz": self._get_wcs_url( + name, + output_format="ArcGrid-GZIP", + namespace=workspace, + srs=srs, + bbox=bbox, + ), } - elif object_dictionary['resource_type'] == 'layer': + elif object_dictionary["resource_type"] == "layer": # Defaults - bbox = '-180,-90,180,90' - srs = 'EPSG:4326' - width = '512' - height = '512' - style = '' + bbox = "-180,-90,180,90" + srs = "EPSG:4326" + width = "512" + height = "512" + style = "" # Layer and style - layer = object_dictionary['name'] - if 'default_style' in object_dictionary: - style = object_dictionary['default_style'] + layer = object_dictionary["name"] + if "default_style" in object_dictionary: + style = object_dictionary["default_style"] # Try to extract the bounding box from the resource which was saved earlier if resource_object and resource_object.native_bbox: @@ -518,100 +669,300 @@ def _transcribe_geoserver_object(self, gs_object): miny = nbbox[2] maxy = nbbox[3] srs = resource_object.projection - bbox = '{0},{1},{2},{3}'.format(minx, miny, maxx, maxy) + bbox = "{0},{1},{2},{3}".format(minx, miny, maxx, maxy) # Resize the width to be proportionate to the image aspect ratio - aspect_ratio = (float(maxx) - float(minx)) / (float(maxy) - float(miny)) + aspect_ratio = (float(maxx) - float(minx)) / ( + float(maxy) - float(miny) + ) width = str(int(aspect_ratio * float(height))) - object_dictionary['wms'] = { - 'png': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/png'), - 'png8': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/png8'), - 'jpeg': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/jpeg'), - 'gif': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/gif'), - 'tiff': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/tiff'), - 'tiff8': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/tiff8'), - 'geotiff': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/geotiff'), - 'geotiff8': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/geotiff8'), - 'svg': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/svg'), - 'pdf': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='application/pdf'), - 'georss': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='rss'), - 'kml': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='kml'), - 'kmz': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='kmz'), - 'openlayers': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='application/openlayers') + object_dictionary["wms"] = { + "png": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/png", + ), + "png8": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/png8", + ), + "jpeg": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/jpeg", + ), + "gif": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/gif", + ), + "tiff": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/tiff", + ), + "tiff8": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/tiff8", + ), + "geotiff": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/geotiff", + ), + "geotiff8": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/geotiff8", + ), + "svg": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/svg", + ), + "pdf": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="application/pdf", + ), + "georss": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="rss", + ), + "kml": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="kml", + ), + "kmz": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="kmz", + ), + "openlayers": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="application/openlayers", + ), } - elif object_dictionary['resource_type'] == 'layerGroup': + elif object_dictionary["resource_type"] == "layerGroup": # Defaults - bbox = '-180,-90,180,90' - srs = 'EPSG:4326' - width = '512' - height = '512' - style = '' + bbox = "-180,-90,180,90" + srs = "EPSG:4326" + width = "512" + height = "512" + style = "" # Layer and style - layer = object_dictionary['name'] - if 'default_style' in object_dictionary: - style = object_dictionary['default_style'] + layer = object_dictionary["name"] + if "default_style" in object_dictionary: + style = object_dictionary["default_style"] # Try to extract the bounding box from the resource which was saved earlier - if 'bounds' in object_dictionary and object_dictionary['bounds']: + if "bounds" in object_dictionary and object_dictionary["bounds"]: # Find the native bounding box - nbbox = object_dictionary['bounds'] + nbbox = object_dictionary["bounds"] minx = nbbox[0] maxx = nbbox[1] miny = nbbox[2] maxy = nbbox[3] srs = nbbox[4] - bbox = '{0},{1},{2},{3}'.format(minx, miny, maxx, maxy) + bbox = "{0},{1},{2},{3}".format(minx, miny, maxx, maxy) # Resize the width to be proportionate to the image aspect ratio - aspect_ratio = (float(maxx) - float(minx)) / (float(maxy) - float(miny)) + aspect_ratio = (float(maxx) - float(minx)) / ( + float(maxy) - float(miny) + ) width = str(int(aspect_ratio * float(height))) - object_dictionary['wms'] = { - 'png': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/png'), - 'png8': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/png8'), - 'jpeg': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/jpeg'), - 'gif': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/gif'), - 'tiff': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/tiff'), - 'tiff8': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/tiff8'), - 'geptiff': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/geotiff'), - 'geotiff8': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/geotiff8'), - 'svg': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='image/svg'), - 'pdf': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='application/pdf'), - 'georss': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='rss'), - 'kml': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='kml'), - 'kmz': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='kmz'), - 'openlayers': self._get_wms_url(layer, style, bbox=bbox, srs=srs, width=width, height=height, - output_format='application/openlayers') + object_dictionary["wms"] = { + "png": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/png", + ), + "png8": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/png8", + ), + "jpeg": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/jpeg", + ), + "gif": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/gif", + ), + "tiff": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/tiff", + ), + "tiff8": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/tiff8", + ), + "geptiff": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/geotiff", + ), + "geotiff8": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/geotiff8", + ), + "svg": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="image/svg", + ), + "pdf": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="application/pdf", + ), + "georss": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="rss", + ), + "kml": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="kml", + ), + "kmz": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="kmz", + ), + "openlayers": self._get_wms_url( + layer, + style, + bbox=bbox, + srs=srs, + width=width, + height=height, + output_format="application/openlayers", + ), } return object_dictionary @@ -623,14 +974,14 @@ def get_gwc_endpoint(self, public=True): Args: public (bool): return with the public endpoint if True. """ - if public and hasattr(self, 'public_endpoint'): - gs_endpoint = self.public_endpoint.replace('rest', 'gwc/rest') + if public and hasattr(self, "public_endpoint"): + gs_endpoint = self.public_endpoint.replace("rest", "gwc/rest") else: gs_endpoint = self._gwc_endpoint # Add trailing slash for consistency. - if not gs_endpoint.endswith('/'): - gs_endpoint += '/' + if not gs_endpoint.endswith("/"): + gs_endpoint += "/" return gs_endpoint @@ -642,12 +993,16 @@ def get_ows_endpoint(self, workspace, public=True): workspace (str): the name of the workspace public (bool): return with the public endpoint if True. """ - gs_endpoint = self.public_endpoint if public and hasattr(self, 'public_endpoint') else self.endpoint - gs_endpoint = gs_endpoint.replace('rest', '{0}/ows'.format(workspace)) + gs_endpoint = ( + self.public_endpoint + if public and hasattr(self, "public_endpoint") + else self.endpoint + ) + gs_endpoint = gs_endpoint.replace("rest", "{0}/ows".format(workspace)) # Add trailing slash for consistency. - if not gs_endpoint.endswith('/'): - gs_endpoint += '/' + if not gs_endpoint.endswith("/"): + gs_endpoint += "/" return gs_endpoint def get_wms_endpoint(self, public=True): @@ -657,12 +1012,16 @@ def get_wms_endpoint(self, public=True): Args: public (bool): return with the public endpoint if True. """ - gs_endpoint = self.public_endpoint if public and hasattr(self, 'public_endpoint') else self.endpoint - gs_endpoint = gs_endpoint.replace('rest', 'wms') + gs_endpoint = ( + self.public_endpoint + if public and hasattr(self, "public_endpoint") + else self.endpoint + ) + gs_endpoint = gs_endpoint.replace("rest", "wms") # Add trailing slash for consistency. - if not gs_endpoint.endswith('/'): - gs_endpoint += '/' + if not gs_endpoint.endswith("/"): + gs_endpoint += "/" return gs_endpoint def close(self): @@ -681,21 +1040,29 @@ def reload(self, ports=None, public=True): node_endpoints = self._get_node_endpoints(ports=ports, public=public) log.debug("Catalog Reload URLS: {0}".format(node_endpoints)) - response_dict = {'success': True, 'result': None, 'error': []} + response_dict = {"success": True, "result": None, "error": []} for endpoint in node_endpoints: try: - response = requests.post(f'{endpoint}reload', auth=(self.username, self.password)) + response = requests.post( + f"{endpoint}reload", auth=(self.username, self.password) + ) if response.status_code != 200: - msg = "Catalog Reload Status Code {0}: {1}".format(response.status_code, response.text) + msg = "Catalog Reload Status Code {0}: {1}".format( + response.status_code, response.text + ) exception = requests.RequestException(msg, response=response) log.error(exception) - response_dict['success'] = False - response_dict['error'].append(msg) + response_dict["success"] = False + response_dict["error"].append(msg) except requests.ConnectionError: - log.warning('Catalog could not be reloaded on a GeoServer node.') + log.warning("Catalog could not be reloaded on a GeoServer node.") - response_dict.pop('error', None) if not response_dict['error'] else response_dict.pop('result', None) + ( + response_dict.pop("error", None) + if not response_dict["error"] + else response_dict.pop("result", None) + ) return response_dict def gwc_reload(self, ports=None, public=True): @@ -711,33 +1078,45 @@ def gwc_reload(self, ports=None, public=True): node_endpoints = self._get_node_endpoints(ports=ports, public=public, gwc=True) log.debug("GeoWebCache Reload URLS: {0}".format(node_endpoints)) - response_dict = {'success': True, 'result': None, 'error': []} + response_dict = {"success": True, "result": None, "error": []} for endpoint in node_endpoints: retries_remaining = 3 while retries_remaining > 0: try: - response = requests.post(f'{endpoint}reload', auth=(self.username, self.password)) + response = requests.post( + f"{endpoint}reload", auth=(self.username, self.password) + ) if response.status_code != 200: - msg = "GeoWebCache Reload Status Code {0}: {1}".format(response.status_code, response.text) + msg = "GeoWebCache Reload Status Code {0}: {1}".format( + response.status_code, response.text + ) exception = requests.RequestException(msg, response=response) log.error(exception) retries_remaining -= 1 if retries_remaining == 0: - response_dict['success'] = False - response_dict['error'].append(msg) + response_dict["success"] = False + response_dict["error"].append(msg) continue except requests.ConnectionError: - log.warning('GeoWebCache could not be reloaded on a GeoServer node.') + log.warning( + "GeoWebCache could not be reloaded on a GeoServer node." + ) retries_remaining -= 1 break - response_dict.pop('error', None) if not response_dict['error'] else response_dict.pop('result', None) + ( + response_dict.pop("error", None) + if not response_dict["error"] + else response_dict.pop("result", None) + ) return response_dict - def list_resources(self, with_properties=False, store=None, workspace=None, debug=False): + def list_resources( + self, with_properties=False, store=None, workspace=None, debug=False + ): """ List the names of all resources available from the spatial dataset service. @@ -760,14 +1139,17 @@ def list_resources(self, with_properties=False, store=None, workspace=None, debu """ try: - resource_objects = self.catalog.get_resources(stores=store, workspaces=workspace) + resource_objects = self.catalog.get_resources( + stores=store, workspaces=workspace + ) return self._handle_list(resource_objects, with_properties, debug) except geoserver.catalog.AmbiguousRequestError as e: - response_object = {'success': False, - 'error': str(e)} + response_object = {"success": False, "error": str(e)} except TypeError: - response_object = {'success': False, - 'error': 'Multiple stores found named "{0}".'.format(store)} + response_object = { + "success": False, + "error": 'Multiple stores found named "{0}".'.format(store), + } self._handle_debug(response_object, debug) return response_object @@ -859,8 +1241,10 @@ def list_stores(self, workspace=None, with_properties=False, debug=False): return self._handle_list(stores, with_properties, debug) except AttributeError: - response_dict = {'success': False, - 'error': 'Invalid workspace "{0}".'.format(workspace)} + response_dict = { + "success": False, + "error": 'Invalid workspace "{0}".'.format(workspace), + } self._handle_debug(response_dict, debug) return response_dict @@ -918,20 +1302,22 @@ def get_resource(self, resource_id, store_id=None, debug=False): # Get resource try: - resource = self.catalog.get_resource(name=name, store=store_id, workspace=workspace) + resource = self.catalog.get_resource( + name=name, store=store_id, workspace=workspace + ) if not resource: - response_dict = {'success': False, - 'error': 'Resource "{0}" not found.'.format(resource_id)} + response_dict = { + "success": False, + "error": 'Resource "{0}" not found.'.format(resource_id), + } else: resource_dict = self._transcribe_geoserver_object(resource) # Assemble Response - response_dict = {'success': True, - 'result': resource_dict} + response_dict = {"success": True, "result": resource_dict} except geoserver.catalog.FailedRequestError as e: - response_dict = {'success': False, - 'error': str(e)} + response_dict = {"success": False, "error": str(e)} # Handle the debug and return self._handle_debug(response_dict, debug) @@ -960,28 +1346,28 @@ def get_layer(self, layer_id, store_id=None, debug=False): if layer and store_id: layer.store = store_id if not layer: - response_dict = {'success': False, - 'error': 'Layer "{0}" not found.'.format(layer_id)} + response_dict = { + "success": False, + "error": 'Layer "{0}" not found.'.format(layer_id), + } else: layer_dict = self._transcribe_geoserver_object(layer) # Get layer caching properties (gsconfig doesn't support this) - gwc_url = '{0}layers/{1}.xml'.format(self.gwc_endpoint, layer_id) + gwc_url = "{0}layers/{1}.xml".format(self.gwc_endpoint, layer_id) auth = (self.username, self.password) r = requests.get(gwc_url, auth=auth) if r.status_code == 200: root = ElementTree.XML(r.text) tile_caching_dict = ConvertXmlToDict(root) - layer_dict['tile_caching'] = tile_caching_dict['GeoServerLayer'] + layer_dict["tile_caching"] = tile_caching_dict["GeoServerLayer"] # Assemble Response - response_dict = {'success': True, - 'result': layer_dict} + response_dict = {"success": True, "result": layer_dict} except geoserver.catalog.FailedRequestError as e: - response_dict = {'success': False, - 'error': str(e)} + response_dict = {"success": False, "error": str(e)} # Handle the debug and return self._handle_debug(response_dict, debug) @@ -1016,22 +1402,24 @@ def get_layer_group(self, layer_group_id, debug=False): # Get layer group # Using get_layergroups instead of get_layergroup b/c get_layergroup # cannot handle the case where workspaces is None (always returns None) - layer_groups = self.catalog.get_layergroups(names=name, workspaces=workspaces) + layer_groups = self.catalog.get_layergroups( + names=name, workspaces=workspaces + ) layer_group = self.catalog._return_first_item(layer_groups) if not layer_group: - response_dict = {'success': False, - 'error': 'Layer Group "{0}" not found.'.format(layer_group_id)} + response_dict = { + "success": False, + "error": 'Layer Group "{0}" not found.'.format(layer_group_id), + } else: layer_group_dict = self._transcribe_geoserver_object(layer_group) # Assemble Response - response_dict = {'success': True, - 'result': layer_group_dict} + response_dict = {"success": True, "result": layer_group_dict} except geoserver.catalog.FailedRequestError as e: - response_dict = {'success': False, - 'error': str(e)} + response_dict = {"success": False, "error": str(e)} # Handle the debug and return self._handle_debug(response_dict, debug) @@ -1066,18 +1454,18 @@ def get_store(self, store_id, debug=False): store = self.catalog.get_store(name=name, workspace=workspace) if not store: - response_dict = {'success': False, - 'error': 'Store "{0}" not found.'.format(store_id)} + response_dict = { + "success": False, + "error": 'Store "{0}" not found.'.format(store_id), + } else: store_dict = self._transcribe_geoserver_object(store) # Assemble Response - response_dict = {'success': True, - 'result': store_dict} + response_dict = {"success": True, "result": store_dict} except geoserver.catalog.FailedRequestError as e: - response_dict = {'success': False, - 'error': str(e)} + response_dict = {"success": False, "error": str(e)} # Handle the debug and return self._handle_debug(response_dict, debug) @@ -1103,18 +1491,18 @@ def get_workspace(self, workspace_id, debug=False): workspace = self.catalog.get_workspace(name=workspace_id) if not workspace: - response_dict = {'success': False, - 'error': 'Workspace "{0}" not found.'.format(workspace_id)} + response_dict = { + "success": False, + "error": 'Workspace "{0}" not found.'.format(workspace_id), + } else: workspace_dict = self._transcribe_geoserver_object(workspace) # Assemble Response - response_dict = {'success': True, - 'result': workspace_dict} + response_dict = {"success": True, "result": workspace_dict} except geoserver.catalog.FailedRequestError as e: - response_dict = {'success': False, - 'error': str(e)} + response_dict = {"success": False, "error": str(e)} # Handle the debug and return self._handle_debug(response_dict, debug) @@ -1148,24 +1536,26 @@ def get_style(self, style_id, debug=False): style = self.catalog.get_style(name=name, workspace=workspace) if not style: - response_dict = {'success': False, - 'error': 'Workspace "{0}" not found.'.format(style_id)} + response_dict = { + "success": False, + "error": 'Workspace "{0}" not found.'.format(style_id), + } else: style_dict = self._transcribe_geoserver_object(style) # Assemble Response - response_dict = {'success': True, - 'result': style_dict} + response_dict = {"success": True, "result": style_dict} except geoserver.catalog.FailedRequestError as e: - response_dict = {'success': False, - 'error': str(e)} + response_dict = {"success": False, "error": str(e)} # Handle the debug and return self._handle_debug(response_dict, debug) return response_dict - def get_layer_extent(self, store_id, feature_name, native=False, buffer_factor=1.000001): + def get_layer_extent( + self, store_id, feature_name, native=False, buffer_factor=1.000001 + ): """ Get the legend extent for the given layer. @@ -1182,13 +1572,23 @@ def get_layer_extent(self, store_id, feature_name, native=False, buffer_factor=1 if not workspace: workspace = self.catalog.get_default_workspace().name - url = (self.endpoint + 'workspaces/' + workspace + '/datastores/' + datastore_name - + '/featuretypes/' + feature_name + '.json') + url = ( + self.endpoint + + "workspaces/" + + workspace + + "/datastores/" + + datastore_name + + "/featuretypes/" + + feature_name + + ".json" + ) response = requests.get(url, auth=(self.username, self.password)) if response.status_code != 200: - msg = "Get Layer Extent Status Code {0}: {1}".format(response.status_code, response.text) + msg = "Get Layer Extent Status Code {0}: {1}".format( + response.status_code, response.text + ) exception = requests.RequestException(msg, response=response) log.error(exception) raise exception @@ -1201,25 +1601,37 @@ def get_layer_extent(self, store_id, feature_name, native=False, buffer_factor=1 extent = [-128.583984375, 22.1874049914, -64.423828125, 52.1065051908] # Extract bounding box - if 'featureType' in json: + if "featureType" in json: if native: - if 'nativeBoundingBox' in json['featureType']: - bbox = json['featureType']['nativeBoundingBox'] + if "nativeBoundingBox" in json["featureType"]: + bbox = json["featureType"]["nativeBoundingBox"] else: - if 'latLonBoundingBox' in json['featureType']: - bbox = json['featureType']['latLonBoundingBox'] + if "latLonBoundingBox" in json["featureType"]: + bbox = json["featureType"]["latLonBoundingBox"] if bbox is not None: # minx, miny, maxx, maxy - extent = [bbox['minx'] / buffer_factor, bbox['miny'] / buffer_factor, - bbox['maxx'] * buffer_factor, bbox['maxy'] * buffer_factor] + extent = [ + bbox["minx"] / buffer_factor, + bbox["miny"] / buffer_factor, + bbox["maxx"] * buffer_factor, + bbox["maxy"] * buffer_factor, + ] return extent - def link_sqlalchemy_db_to_geoserver(self, store_id, sqlalchemy_engine, max_connections=5, - max_connection_idle_time=30, evictor_run_periodicity=30, - validate_connections=True, docker=False, debug=False, - docker_ip_address='172.17.0.1'): + def link_sqlalchemy_db_to_geoserver( + self, + store_id, + sqlalchemy_engine, + max_connections=5, + max_connection_idle_time=30, + evictor_run_periodicity=30, + validate_connections=True, + docker=False, + debug=False, + docker_ip_address="172.17.0.1", + ): """ Helper function to simplify linking postgis databases to geoservers using the sqlalchemy engine object. @@ -1244,14 +1656,26 @@ def link_sqlalchemy_db_to_geoserver(self, store_id, sqlalchemy_engine, max_conne ) if docker: - params['host'] = docker_ip_address + params["host"] = docker_ip_address response = self.create_postgis_store(**params) return response - def create_postgis_store(self, store_id, host, port, database, username, password, max_connections=5, - max_connection_idle_time=30, evictor_run_periodicity=30, validate_connections=True, - expose_primary_keys=False, debug=False): + def create_postgis_store( + self, + store_id, + host, + port, + database, + username, + password, + max_connections=5, + max_connection_idle_time=30, + evictor_run_periodicity=30, + validate_connections=True, + expose_primary_keys=False, + debug=False, + ): """ Use this method to link an existing PostGIS database to GeoServer as a feature store. Note that this method only works for data in vector formats. @@ -1305,25 +1729,21 @@ def create_postgis_store(self, store_id, host, port, database, username, passwor """ # Prepare headers - headers = { - "Content-type": "text/xml", - "Accept": "application/xml" - } + headers = {"Content-type": "text/xml", "Accept": "application/xml"} # Prepare URL to create store - url = self._assemble_url('workspaces', workspace, 'datastores') + url = self._assemble_url("workspaces", workspace, "datastores") # Execute: POST /workspaces//datastores response = requests.post( - url=url, - data=xml, - headers=headers, - auth=(self.username, self.password) + url=url, data=xml, headers=headers, auth=(self.username, self.password) ) # Return with error if this doesn't work if response.status_code != 201: - msg = "Create Postgis Store Status Code {0}: {1}".format(response.status_code, response.text) + msg = "Create Postgis Store Status Code {0}: {1}".format( + response.status_code, response.text + ) exception = requests.RequestException(msg, response=response) log.error(exception) raise exception @@ -1333,7 +1753,9 @@ def create_postgis_store(self, store_id, host, port, database, username, passwor return response_dict - def create_layer_from_postgis_store(self, store_id, table, layer_name=None, debug=False): + def create_layer_from_postgis_store( + self, store_id, table, layer_name=None, debug=False + ): """ Add an existing PostGIS table as a feature resource to a PostGIS store that already exists. @@ -1372,11 +1794,11 @@ def create_layer_from_postgis_store(self, store_id, table, layer_name=None, debu # Verify the store exists store_info = self.get_store(store_id, debug=debug) - if not store_info['success']: + if not store_info["success"]: message = f"There is no store named '{store_name}'" if workspace: message += f" in {workspace}" - return {'success': False, 'error': message} + return {"success": False, "error": message} # If no layer_name was provided, default to the PostGIS table name if not layer_name: @@ -1391,24 +1813,23 @@ def create_layer_from_postgis_store(self, store_id, table, layer_name=None, debu """ - headers = { - "Content-type": "text/xml", - "Accept": "application/xml" - } + headers = {"Content-type": "text/xml", "Accept": "application/xml"} # POST /workspaces//datastores//featuretypes - url = self._assemble_url('workspaces', workspace, 'datastores', store_name, 'featuretypes') + url = self._assemble_url( + "workspaces", workspace, "datastores", store_name, "featuretypes" + ) response = requests.post( url=url, data=xml_body, headers=headers, - auth=HTTPBasicAuth(username=self.username, password=self.password) + auth=HTTPBasicAuth(username=self.username, password=self.password), ) if response.status_code != 201: response_dict = { - 'success': False, - 'error': f'{response.reason}({response.status_code}): {response.text}' + "success": False, + "error": f"{response.reason}({response.status_code}): {response.text}", } self._handle_debug(response_dict, debug) return response_dict @@ -1418,9 +1839,20 @@ def create_layer_from_postgis_store(self, store_id, table, layer_name=None, debu self._handle_debug(response_dict, debug) return response_dict - def create_sql_view_layer(self, store_id, layer_name, geometry_type, srid, sql, default_style, - geometry_name='geometry', other_styles=None, parameters=None, reload_public=False, - debug=False): + def create_sql_view_layer( + self, + store_id, + layer_name, + geometry_type, + srid, + sql, + default_style, + geometry_name="geometry", + other_styles=None, + parameters=None, + reload_public=False, + debug=False, + ): """ Direct call to GeoServer REST API to create SQL View feature types and layers. @@ -1445,30 +1877,30 @@ def create_sql_view_layer(self, store_id, layer_name, geometry_type, srid, sql, workspace = self.catalog.get_default_workspace().name # use store's workspace as default for layer - layer_id = f'{workspace}:{layer_name}' if ':' not in layer_name else layer_name + layer_id = f"{workspace}:{layer_name}" if ":" not in layer_name else layer_name # Template context context = { - 'workspace': workspace, - 'feature_name': layer_name, - 'datastore_name': store_name, - 'geoserver_rest_endpoint': self.endpoint, - 'sql': sql, - 'geometry_name': geometry_name, - 'geometry_type': geometry_type, - 'srid': srid, - 'parameters': parameters or [], - 'default_style': default_style, - 'other_styles': other_styles or [] + "workspace": workspace, + "feature_name": layer_name, + "datastore_name": store_name, + "geoserver_rest_endpoint": self.endpoint, + "sql": sql, + "geometry_name": geometry_name, + "geometry_type": geometry_type, + "srid": srid, + "parameters": parameters or [], + "default_style": default_style, + "other_styles": other_styles or [], } # Open sql view template - sql_view_path = os.path.join(self.XML_PATH, 'sql_view_template.xml') - url = self._assemble_url('workspaces', workspace, 'datastores', store_name, 'featuretypes') - headers = { - "Content-type": "text/xml" - } - with open(sql_view_path, 'r') as sql_view_file: + sql_view_path = os.path.join(self.XML_PATH, "sql_view_template.xml") + url = self._assemble_url( + "workspaces", workspace, "datastores", store_name, "featuretypes" + ) + headers = {"Content-type": "text/xml"} + with open(sql_view_path, "r") as sql_view_file: text = sql_view_file.read() template = Template(text) xml = template.render(context) @@ -1484,14 +1916,16 @@ def create_sql_view_layer(self, store_id, layer_name, geometry_type, srid, sql, # Raise an exception if status code is not what we expect if response.status_code == 201: - log.info('Successfully created featuretype {}'.format(layer_name)) + log.info("Successfully created featuretype {}".format(layer_name)) break - if response.status_code == 500 and 'already exists' in response.text: + if response.status_code == 500 and "already exists" in response.text: break else: retries_remaining -= 1 if retries_remaining == 0: - msg = "Create Feature Type Status Code {0}: {1}".format(response.status_code, response.text) + msg = "Create Feature Type Status Code {0}: {1}".format( + response.status_code, response.text + ) exception = requests.RequestException(msg, response=response) log.error(exception) raise exception @@ -1501,18 +1935,21 @@ def create_sql_view_layer(self, store_id, layer_name, geometry_type, srid, sql, # Add styles to new layer self.update_layer_styles( - layer_id=layer_id, - default_style=default_style, - other_styles=other_styles + layer_id=layer_id, default_style=default_style, other_styles=other_styles ) # GeoWebCache Settings - gwc_layer_path = os.path.join(self.XML_PATH, 'gwc_layer_template.xml') - url = self.get_gwc_endpoint(public=False) + 'layers/' + workspace + ':' + layer_name + '.xml' - headers = { - "Content-type": "text/xml" - } - with open(gwc_layer_path, 'r') as gwc_layer_file: + gwc_layer_path = os.path.join(self.XML_PATH, "gwc_layer_template.xml") + url = ( + self.get_gwc_endpoint(public=False) + + "layers/" + + workspace + + ":" + + layer_name + + ".xml" + ) + headers = {"Content-type": "text/xml"} + with open(gwc_layer_path, "r") as gwc_layer_file: text = gwc_layer_file.read() template = Template(text) xml = template.render(context) @@ -1527,13 +1964,19 @@ def create_sql_view_layer(self, store_id, layer_name, geometry_type, srid, sql, ) if response.status_code == 200: - log.info('Successfully created GeoWebCache layer {}'.format(layer_name)) + log.info("Successfully created GeoWebCache layer {}".format(layer_name)) break else: - log.warning("GWC DID NOT RETURN 200, but instead: {}. {}\n".format(response.status_code, response.text)) + log.warning( + "GWC DID NOT RETURN 200, but instead: {}. {}\n".format( + response.status_code, response.text + ) + ) retries_remaining -= 1 if retries_remaining == 0: - msg = "Create GWC Layer Status Code {0}: {1}".format(response.status_code, response.text) + msg = "Create GWC Layer Status Code {0}: {1}".format( + response.status_code, response.text + ) exception = requests.RequestException(msg, response=response) log.error(exception) raise exception @@ -1541,8 +1984,16 @@ def create_sql_view_layer(self, store_id, layer_name, geometry_type, srid, sql, response_dict = self.get_layer(layer_id, store_name, debug=debug) return response_dict - def create_shapefile_resource(self, store_id, shapefile_base=None, shapefile_zip=None, shapefile_upload=None, - overwrite=False, charset=None, debug=False): + def create_shapefile_resource( + self, + store_id, + shapefile_base=None, + shapefile_zip=None, + shapefile_upload=None, + overwrite=False, + charset=None, + debug=False, + ): """ Use this method to add shapefile resources to GeoServer. @@ -1582,24 +2033,34 @@ def create_shapefile_resource(self, store_id, shapefile_base=None, shapefile_zip """ # Validate shapefile arguments - arg_value_error_msg = 'Exactly one of the "shapefile_base", "shapefile_zip", ' \ - 'or "shapefile_upload" arguments must be specified. ' + arg_value_error_msg = ( + 'Exactly one of the "shapefile_base", "shapefile_zip", ' + 'or "shapefile_upload" arguments must be specified. ' + ) if not shapefile_base and not shapefile_zip and not shapefile_upload: - raise ValueError(arg_value_error_msg + 'None given.') + raise ValueError(arg_value_error_msg + "None given.") elif shapefile_zip and shapefile_upload and shapefile_base: - raise ValueError(arg_value_error_msg + '"shapefile_base", "shapefile_zip", and ' - '"shapefile_upload" given.') + raise ValueError( + arg_value_error_msg + '"shapefile_base", "shapefile_zip", and ' + '"shapefile_upload" given.' + ) elif shapefile_base and shapefile_zip: - raise ValueError(arg_value_error_msg + '"shapefile_base" and "shapefile_zip" given.') + raise ValueError( + arg_value_error_msg + '"shapefile_base" and "shapefile_zip" given.' + ) elif shapefile_base and shapefile_upload: - raise ValueError(arg_value_error_msg + '"shapefile_base" and "shapefile_upload" given.') + raise ValueError( + arg_value_error_msg + '"shapefile_base" and "shapefile_upload" given.' + ) elif shapefile_zip and shapefile_upload: - raise ValueError(arg_value_error_msg + '"shapefile_zip" and "shapefile_upload" given.') + raise ValueError( + arg_value_error_msg + '"shapefile_zip" and "shapefile_upload" given.' + ) # Process identifier workspace, name = self._process_identifier(store_id) @@ -1616,8 +2077,7 @@ def create_shapefile_resource(self, store_id, shapefile_base=None, shapefile_zip if workspace: message += " in " + workspace - response_dict = {'success': False, - 'error': message} + response_dict = {"success": False, "error": message} self._handle_debug(response_dict, debug) return response_dict @@ -1632,19 +2092,21 @@ def create_shapefile_resource(self, store_id, shapefile_base=None, shapefile_zip # Shapefile Base Case if shapefile_base: shapefile_plus_sidecars = shapefile_and_friends(shapefile_base) - temp_archive = '{0}.zip'.format(os.path.join(os.path.split(shapefile_base)[0], name)) + temp_archive = "{0}.zip".format( + os.path.join(os.path.split(shapefile_base)[0], name) + ) - with ZipFile(temp_archive, 'w') as zfile: + with ZipFile(temp_archive, "w") as zfile: for extension, filepath in shapefile_plus_sidecars.items(): - filename = '{0}.{1}'.format(name, extension) + filename = "{0}.{1}".format(name, extension) zfile.write(filename=filepath, arcname=filename) - files = {'file': open(temp_archive, 'rb')} + files = {"file": open(temp_archive, "rb")} # Shapefile Zip Case elif shapefile_zip: if is_zipfile(shapefile_zip): - files = {'file': open(shapefile_zip, 'rb')} + files = {"file": open(shapefile_zip, "rb")} else: raise TypeError('"{0}" is not a zip archive.'.format(shapefile_zip)) @@ -1653,31 +2115,30 @@ def create_shapefile_resource(self, store_id, shapefile_base=None, shapefile_zip # Write files in memory to zipfile in memory zip_file_in_memory = BytesIO() - with ZipFile(zip_file_in_memory, 'w') as zfile: + with ZipFile(zip_file_in_memory, "w") as zfile: for file in shapefile_upload: extension = os.path.splitext(file.name)[1] - filename = '{0}{1}'.format(name, extension) + filename = "{0}{1}".format(name, extension) zfile.writestr(filename, file.read()) - files = {'file': zip_file_in_memory.getvalue()} + files = {"file": zip_file_in_memory.getvalue()} # Prepare headers - headers = { - "Content-type": "application/zip", - "Accept": "application/xml" - } + headers = {"Content-type": "application/zip", "Accept": "application/xml"} # Prepare URL - url = self._assemble_url('workspaces', workspace, 'datastores', name, 'file.shp') + url = self._assemble_url( + "workspaces", workspace, "datastores", name, "file.shp" + ) # Set params params = {} if charset: - params['charset'] = charset + params["charset"] = charset if overwrite: - params['update'] = 'overwrite' + params["update"] = "overwrite" # Execute: PUT /workspaces//datastores//file.shp response = requests.put( @@ -1685,12 +2146,12 @@ def create_shapefile_resource(self, store_id, shapefile_base=None, shapefile_zip files=files, headers=headers, params=params, - auth=HTTPBasicAuth(username=self.username, password=self.password) + auth=HTTPBasicAuth(username=self.username, password=self.password), ) # Clean up file stuff if shapefile_base or shapefile_zip: - files['file'].close() + files["file"].close() if temp_archive: os.remove(temp_archive) @@ -1700,8 +2161,12 @@ def create_shapefile_resource(self, store_id, shapefile_base=None, shapefile_zip # Wrap up with failure if response.status_code != 201: - response_dict = {'success': False, - 'error': '{1}({0}): {2}'.format(response.status_code, response.reason, response.text)} + response_dict = { + "success": False, + "error": "{1}({0}): {2}".format( + response.status_code, response.reason, response.text + ), + } self._handle_debug(response_dict, debug) return response_dict @@ -1717,11 +2182,12 @@ def create_shapefile_resource(self, store_id, shapefile_base=None, shapefile_zip resource_id = name # Wrap up successfully - new_resource = self.catalog.get_resource(name=resource_id, store=name, workspace=workspace) + new_resource = self.catalog.get_resource( + name=resource_id, store=name, workspace=workspace + ) resource_dict = self._transcribe_geoserver_object(new_resource) - response_dict = {'success': True, - 'result': resource_dict} + response_dict = {"success": True, "result": resource_dict} self._handle_debug(response_dict, debug) return response_dict @@ -1743,8 +2209,11 @@ def create_coverage_store(self, store_id, coverage_type, debug=False): # Validate coverage type if coverage_type not in self.VALID_COVERAGE_TYPES: - raise ValueError('"{0}" is not a valid coverage_type. Use either {1}'.format( - coverage_type, ', '.join(self.VALID_COVERAGE_TYPES))) + raise ValueError( + '"{0}" is not a valid coverage_type. Use either {1}'.format( + coverage_type, ", ".join(self.VALID_COVERAGE_TYPES) + ) + ) # Black magic for grass grid support if coverage_type == self.CT_GRASS_GRID: @@ -1760,28 +2229,26 @@ def create_coverage_store(self, store_id, coverage_type, debug=False): {workspace} - """.format(name=name, type=coverage_type, workspace=workspace) + """.format( + name=name, type=coverage_type, workspace=workspace + ) # Prepare headers - headers = { - "Content-type": "text/xml", - "Accept": "application/xml" - } + headers = {"Content-type": "text/xml", "Accept": "application/xml"} # Prepare URL to create store - url = self._assemble_url('workspaces', workspace, 'coveragestores') + url = self._assemble_url("workspaces", workspace, "coveragestores") # Execute: POST /workspaces//coveragestores response = requests.post( - url=url, - data=xml, - headers=headers, - auth=(self.username, self.password) + url=url, data=xml, headers=headers, auth=(self.username, self.password) ) # Return with error if this doesn't work if response.status_code != 201: - msg = "Create Coverage Store Status Code {0}: {1}".format(response.status_code, response.text) + msg = "Create Coverage Store Status Code {0}: {1}".format( + response.status_code, response.text + ) exception = requests.RequestException(msg, response=response) log.error(exception) raise exception @@ -1791,8 +2258,15 @@ def create_coverage_store(self, store_id, coverage_type, debug=False): return response_dict - def create_coverage_layer(self, layer_id, coverage_type, coverage_file, default_style='', - other_styles=None, debug=False): + def create_coverage_layer( + self, + layer_id, + coverage_type, + coverage_file, + default_style="", + other_styles=None, + debug=False, + ): """ Create a coverage store, coverage resource, and layer in the given workspace. @@ -1813,8 +2287,11 @@ def create_coverage_layer(self, layer_id, coverage_type, coverage_file, default_ # Validate coverage type if coverage_type not in self.VALID_COVERAGE_TYPES: - exception = ValueError('"{0}" is not a valid coverage_type. Use either {1}'.format( - coverage_type, ', '.join(self.VALID_COVERAGE_TYPES))) + exception = ValueError( + '"{0}" is not a valid coverage_type. Use either {1}'.format( + coverage_type, ", ".join(self.VALID_COVERAGE_TYPES) + ) + ) log.error(exception) raise exception @@ -1837,11 +2314,13 @@ def create_coverage_layer(self, layer_id, coverage_type, coverage_file, default_ working_dir_contents = os.listdir(working_dir) num_working_dir_items = len(working_dir_contents) if num_working_dir_items > 2: - exception = ValueError('Expected 1 or 2 files for coverage type "{}" but got {} instead: "{}"'.format( - self.CT_GRASS_GRID, - num_working_dir_items, - '", "'.join(working_dir_contents) - )) + exception = ValueError( + 'Expected 1 or 2 files for coverage type "{}" but got {} instead: "{}"'.format( + self.CT_GRASS_GRID, + num_working_dir_items, + '", "'.join(working_dir_contents), + ) + ) log.error(exception) raise exception @@ -1851,35 +2330,37 @@ def create_coverage_layer(self, layer_id, coverage_type, coverage_file, default_ continue # Skip the projection file - if 'prj' in item: + if "prj" in item: continue # Assume other file is the raster corrupt_file = False tmp_coverage_path = os.path.join(working_dir, item) - with open(tmp_coverage_path, 'r') as item: + with open(tmp_coverage_path, "r") as item: contents = item.readlines() for line in contents[0:6]: - if 'north' in line: - north = float(line.split(':')[1].strip()) - elif 'south' in line: - south = float(line.split(':')[1].strip()) - elif 'east' in line: + if "north" in line: + north = float(line.split(":")[1].strip()) + elif "south" in line: + south = float(line.split(":")[1].strip()) + elif "east" in line: pass # we don't use east in this algorithm so skip it. - elif 'west' in line: - west = float(line.split(':')[1].strip()) - elif 'rows' in line: - rows = int(line.split(':')[1].strip()) - elif 'cols' in line: - cols = int(line.split(':')[1].strip()) + elif "west" in line: + west = float(line.split(":")[1].strip()) + elif "rows" in line: + rows = int(line.split(":")[1].strip()) + elif "cols" in line: + cols = int(line.split(":")[1].strip()) else: corrupt_file = True if corrupt_file: - exception = IOError('GRASS file could not be processed, check to ensure the GRASS grid is ' - 'correctly formatted or included.') + exception = IOError( + "GRASS file could not be processed, check to ensure the GRASS grid is " + "correctly formatted or included." + ) log.error(exception) raise exception @@ -1888,11 +2369,13 @@ def create_coverage_layer(self, layer_id, coverage_type, coverage_file, default_ yllcorner = south cellsize = (north - south) / rows - header = ['ncols {0}\n'.format(cols), - 'nrows {0}\n'.format(rows), - 'xllcorner {0}\n'.format(xllcorner), - 'yllcorner {0}\n'.format(yllcorner), - 'cellsize {0}\n'.format(cellsize)] + header = [ + "ncols {0}\n".format(cols), + "nrows {0}\n".format(rows), + "xllcorner {0}\n".format(xllcorner), + "yllcorner {0}\n".format(yllcorner), + "cellsize {0}\n".format(cellsize), + ] # Strip off old header and add new one for _ in range(0, 6): @@ -1900,30 +2383,27 @@ def create_coverage_layer(self, layer_id, coverage_type, coverage_file, default_ contents = header + contents # Write the coverage to file - with open(tmp_coverage_path, 'w') as o: + with open(tmp_coverage_path, "w") as o: for line in contents: # Make sure the file ends with a new line - if line[-1] != '\n': - line = line + '\n' + if line[-1] != "\n": + line = line + "\n" o.write(line) # Prepare Files - coverage_archive_name = coverage_name + '.zip' + coverage_archive_name = coverage_name + ".zip" coverage_archive = os.path.join(working_dir, coverage_archive_name) - with ZipFile(coverage_archive, 'w') as zf: + with ZipFile(coverage_archive, "w") as zf: for item in os.listdir(working_dir): if item != coverage_archive_name: zf.write(os.path.join(working_dir, item), item) - files = {'file': open(coverage_archive, 'rb')} - content_type = 'application/zip' + files = {"file": open(coverage_archive, "rb")} + content_type = "application/zip" # Prepare headers - headers = { - "Content-type": content_type, - "Accept": "application/xml" - } + headers = {"Content-type": content_type, "Accept": "application/xml"} # Prepare URL extension = coverage_type.lower() @@ -1932,11 +2412,15 @@ def create_coverage_layer(self, layer_id, coverage_type, coverage_file, default_ extension = self.CT_ARC_GRID.lower() url = self._assemble_url( - 'workspaces', workspace, 'coveragestores', coverage_store_name, 'file.{0}'.format(extension) + "workspaces", + workspace, + "coveragestores", + coverage_store_name, + "file.{0}".format(extension), ) # Set params - params = {'coverageName': coverage_name} + params = {"coverageName": coverage_name} retries_remaining = 3 zip_error_retries = 5 @@ -1949,7 +2433,7 @@ def create_coverage_layer(self, layer_id, coverage_type, coverage_file, default_ url=url, files=files, headers=headers, - auth=(self.username, self.password) + auth=(self.username, self.password), ) else: response = requests.put( @@ -1957,17 +2441,20 @@ def create_coverage_layer(self, layer_id, coverage_type, coverage_file, default_ files=files, headers=headers, params=params, - auth=(self.username, self.password) + auth=(self.username, self.password), ) # Raise an exception if status code is not what we expect if response.status_code == 201: - log.info('Successfully created coverage {}'.format(coverage_name)) + log.info("Successfully created coverage {}".format(coverage_name)) break - if response.status_code == 500 and 'already exists' in response.text: - log.warning('Coverage already exists {}'.format(coverage_name)) + if response.status_code == 500 and "already exists" in response.text: + log.warning("Coverage already exists {}".format(coverage_name)) break - if response.status_code == 500 and 'Error occured unzipping file' in response.text: + if ( + response.status_code == 500 + and "Error occured unzipping file" in response.text + ): zip_error_retries -= 1 if zip_error_retries == 0: raise_error = True @@ -1977,13 +2464,15 @@ def create_coverage_layer(self, layer_id, coverage_type, coverage_file, default_ raise_error = True if raise_error: - msg = "Create Coverage Status Code {0}: {1}".format(response.status_code, response.text) + msg = "Create Coverage Status Code {0}: {1}".format( + response.status_code, response.text + ) exception = requests.RequestException(msg, response=response) log.error(exception) raise exception # Clean up - files['file'].close() + files["file"].close() if working_dir: shutil.rmtree(working_dir) @@ -1993,7 +2482,7 @@ def create_coverage_layer(self, layer_id, coverage_type, coverage_file, default_ self.update_layer_styles( layer_id=layer_id, default_style=default_style, - other_styles=other_styles + other_styles=other_styles, ) response_dict = self.get_layer(layer_id, coverage_store_name, debug) @@ -2027,20 +2516,14 @@ def create_layer_group(self, layer_group_id, layers, styles, debug=False): if not workspace: workspace = self.catalog.get_default_workspace().name - context = { - 'name': group_name, - 'layers': layers, - 'styles': styles - } + context = {"name": group_name, "layers": layers, "styles": styles} # Open layer group template - template_path = os.path.join(self.XML_PATH, 'layer_group_template.xml') - url = self._assemble_url('workspaces', workspace, 'layergroups.json') - headers = { - "Content-type": "text/xml" - } + template_path = os.path.join(self.XML_PATH, "layer_group_template.xml") + url = self._assemble_url("workspaces", workspace, "layergroups.json") + headers = {"Content-type": "text/xml"} - with open(template_path, 'r') as template_file: + with open(template_path, "r") as template_file: text = template_file.read() template = Template(text) xml = template.render(context) @@ -2053,7 +2536,9 @@ def create_layer_group(self, layer_group_id, layers, styles, debug=False): ) if response.status_code != 201: - msg = "Create Layer Group Status Code {}: {}".format(response.status_code, response.text) + msg = "Create Layer Group Status Code {}: {}".format( + response.status_code, response.text + ) exception = requests.RequestException(msg, response=response) log.error(exception) raise exception @@ -2083,17 +2568,17 @@ def create_workspace(self, workspace_id, uri, debug=False): # Do create workspace = self.catalog.create_workspace(workspace_id, uri) workspace_dict = self._transcribe_geoserver_object(workspace) - response_dict = {'success': True, - 'result': workspace_dict} + response_dict = {"success": True, "result": workspace_dict} except AssertionError as e: - response_dict = {'success': False, - 'error': str(e)} + response_dict = {"success": False, "error": str(e)} self._handle_debug(response_dict, debug) return response_dict - def create_style(self, style_id, sld_template, sld_context=None, overwrite=False, debug=False): + def create_style( + self, style_id, sld_template, sld_context=None, overwrite=False, debug=False + ): """ Create style layer from an SLD template. @@ -2107,23 +2592,23 @@ def create_style(self, style_id, sld_template, sld_context=None, overwrite=False workspace, style_name = self._process_identifier(style_id) if workspace is None: - url = self._assemble_url('styles') + url = self._assemble_url("styles") else: - url = self._assemble_url('workspaces', workspace, 'styles') + url = self._assemble_url("workspaces", workspace, "styles") if overwrite: try: self.delete_style(style_id, purge=True) except Exception as e: - if 'referenced by existing' in str(e): + if "referenced by existing" in str(e): log.error(str(e)) raise # Use post request to create style container first - headers = {'Content-type': 'application/vnd.ogc.sld+xml'} + headers = {"Content-type": "application/vnd.ogc.sld+xml"} # Render the SLD template - with open(sld_template, 'r') as sld_file: + with open(sld_template, "r") as sld_file: text = sld_file.read() if sld_context is not None: @@ -2134,20 +2619,27 @@ def create_style(self, style_id, sld_template, sld_context=None, overwrite=False url, headers=headers, auth=(self.username, self.password), - params={'name': style_name}, - data=text + params={"name": style_name}, + data=text, ) # Raise an exception if status code is not what we expect if response.status_code == 201: - log.info('Successfully created style {}'.format(style_name)) + log.info("Successfully created style {}".format(style_name)) else: - msg = 'Create Style Status Code {0}: {1}'.format(response.status_code, response.text) + msg = "Create Style Status Code {0}: {1}".format( + response.status_code, response.text + ) if response.status_code == 500: - if 'Unable to find style for event' in response.text or 'Error persisting' in response.text: - warning_msg = 'Created style {} with warnings: {}'.format(style_name, response.text) + if ( + "Unable to find style for event" in response.text + or "Error persisting" in response.text + ): + warning_msg = "Created style {} with warnings: {}".format( + style_name, response.text + ) log.warning(warning_msg) - return {'success': True, 'result': warning_msg} + return {"success": True, "result": warning_msg} else: exception = requests.RequestException(msg, response=response) log.error(msg) @@ -2186,7 +2678,9 @@ def update_resource(self, resource_id, store=None, debug=False, **kwargs): try: # Get resource - resource = self.catalog.get_resource(name=name, store=store, workspace=workspace) + resource = self.catalog.get_resource( + name=name, store=store, workspace=workspace + ) # Make the changes updated_resource = self._apply_changes_to_gs_object(kwargs, resource) @@ -2198,12 +2692,10 @@ def update_resource(self, resource_id, store=None, debug=False, **kwargs): resource_dict = self._transcribe_geoserver_object(updated_resource) # Assemble Response - response_dict = {'success': True, - 'result': resource_dict} + response_dict = {"success": True, "result": resource_dict} except geoserver.catalog.FailedRequestError as e: - response_dict = {'success': False, - 'error': str(e)} + response_dict = {"success": False, "error": str(e)} self._handle_debug(response_dict, debug) return response_dict @@ -2225,7 +2717,7 @@ def update_layer(self, layer_id, debug=False, **kwargs): updated_layer = engine.update_layer(layer_id='workspace:layer_name', default_style='style1', styles=['style1', 'style2']) # noqa: E501 """ # Pop tile caching properties to handle separately - tile_caching = kwargs.pop('tile_caching', None) + tile_caching = kwargs.pop("tile_caching", None) # breakpoint() try: # Get resource @@ -2241,32 +2733,28 @@ def update_layer(self, layer_id, debug=False, **kwargs): layer_dict = self._transcribe_geoserver_object(updated_layer) # Assemble Response - response_dict = {'success': True, - 'result': layer_dict} - + response_dict = {"success": True, "result": layer_dict} + # Handle tile caching properties (gsconfig doesn't support this) if tile_caching is not None: - gwc_url = '{0}layers/{1}.xml'.format(self.gwc_endpoint, layer_id) + gwc_url = "{0}layers/{1}.xml".format(self.gwc_endpoint, layer_id) auth = (self.username, self.password) - xml = ConvertDictToXml({'GeoServerLayer': tile_caching}) + xml = ConvertDictToXml({"GeoServerLayer": tile_caching}) r = requests.post( gwc_url, auth=auth, - headers={'Content-Type': 'text/xml'}, - data=ElementTree.tostring(xml) + headers={"Content-Type": "text/xml"}, + data=ElementTree.tostring(xml), ) if r.status_code == 200: - layer_dict['tile_caching'] = tile_caching - response_dict = {'success': True, - 'result': layer_dict} + layer_dict["tile_caching"] = tile_caching + response_dict = {"success": True, "result": layer_dict} else: - response_dict = {'success': False, - 'error': r.text} + response_dict = {"success": False, "error": r.text} except geoserver.catalog.FailedRequestError as e: - response_dict = {'success': False, - 'error': str(e)} + response_dict = {"success": False, "error": str(e)} self._handle_debug(response_dict, debug) return response_dict @@ -2306,17 +2794,17 @@ def update_layer_group(self, layer_group_id, debug=False, **kwargs): layer_group_dict = self._transcribe_geoserver_object(layer_group) # Assemble Response - response_dict = {'success': True, - 'result': layer_group_dict} + response_dict = {"success": True, "result": layer_group_dict} except geoserver.catalog.FailedRequestError as e: - response_dict = {'success': False, - 'error': str(e)} + response_dict = {"success": False, "error": str(e)} self._handle_debug(response_dict, debug) return response_dict - def update_layer_styles(self, layer_id, default_style, other_styles=None, debug=False): + def update_layer_styles( + self, layer_id, default_style, other_styles=None, debug=False + ): """ Update/add styles to existing layer. @@ -2335,26 +2823,24 @@ def update_layer_styles(self, layer_id, default_style, other_styles=None, debug= # check if layer workspace is style workspace else use styles default location lyr_ws_styles = self.list_styles(workspace=layer_workspace) if default_style in lyr_ws_styles: - default_style = '{0}:{1}'.format(layer_workspace, default_style) + default_style = "{0}:{1}".format(layer_workspace, default_style) if other_styles: for i in range(len(other_styles)): if other_styles[i] in lyr_ws_styles: - other_styles[i] = '{0}:{1}'.format(layer_workspace, other_styles[i]) + other_styles[i] = "{0}:{1}".format(layer_workspace, other_styles[i]) context = { - 'default_style': default_style, - 'other_styles': other_styles or [], - 'geoserver_rest_endpoint': self.endpoint + "default_style": default_style, + "other_styles": other_styles or [], + "geoserver_rest_endpoint": self.endpoint, } # Open layer template - layer_path = os.path.join(self.XML_PATH, 'layer_template.xml') - url = self._assemble_url('layers', '{0}.xml'.format(layer_name)) - headers = { - "Content-type": "text/xml" - } + layer_path = os.path.join(self.XML_PATH, "layer_template.xml") + url = self._assemble_url("layers", "{0}.xml".format(layer_name)) + headers = {"Content-type": "text/xml"} - with open(layer_path, 'r') as layer_file: + with open(layer_path, "r") as layer_file: text = layer_file.read() template = Template(text) xml = template.render(context) @@ -2370,12 +2856,14 @@ def update_layer_styles(self, layer_id, default_style, other_styles=None, debug= # Raise an exception if status code is not what we expect if response.status_code == 200: - log.info('Successfully created layer {}'.format(layer_name)) + log.info("Successfully created layer {}".format(layer_name)) break else: retries_remaining -= 1 if retries_remaining == 0: - msg = "Create Layer Status Code {0}: {1}".format(response.status_code, response.text) + msg = "Create Layer Status Code {0}: {1}".format( + response.status_code, response.text + ) exception = requests.RequestException(msg, response=response) log.error(exception) raise exception @@ -2384,7 +2872,9 @@ def update_layer_styles(self, layer_id, default_style, other_styles=None, debug= return response_dict - def delete_resource(self, resource_id, store_id, purge=False, recurse=False, debug=False): + def delete_resource( + self, resource_id, store_id, purge=False, recurse=False, debug=False + ): """ Delete a resource. @@ -2409,11 +2899,18 @@ def delete_resource(self, resource_id, store_id, purge=False, recurse=False, deb workspace = self.catalog.get_default_workspace().name # Get resource - resource = self.catalog.get_resource(name=name, store=store_id, workspace=workspace) + resource = self.catalog.get_resource( + name=name, store=store_id, workspace=workspace + ) # Handle delete - return self._handle_delete(identifier=name, gs_object=resource, purge=purge, - recurse=recurse, debug=debug) + return self._handle_delete( + identifier=name, + gs_object=resource, + purge=purge, + recurse=recurse, + debug=debug, + ) def delete_layer(self, layer_id, datastore, recurse=False): """ @@ -2429,19 +2926,16 @@ def delete_layer(self, layer_id, datastore, recurse=False): if not workspace: workspace = self.catalog.get_default_workspace().name - url = self._assemble_url('workspaces', workspace, 'datastores', datastore, 'featuretypes', name) + url = self._assemble_url( + "workspaces", workspace, "datastores", datastore, "featuretypes", name + ) # Prepare delete request - headers = { - "Content-type": "application/json" - } + headers = {"Content-type": "application/json"} - json = {'recurse': recurse} + json = {"recurse": recurse} response = requests.delete( - url, - auth=(self.username, self.password), - headers=headers, - params=json + url, auth=(self.username, self.password), headers=headers, params=json ) # Raise an exception if status code is not what we expect @@ -2449,12 +2943,14 @@ def delete_layer(self, layer_id, datastore, recurse=False): if response.status_code in self.WARNING_STATUS_CODES: pass else: - msg = "Delete Layer Status Code {0}: {1}".format(response.status_code, response.text) + msg = "Delete Layer Status Code {0}: {1}".format( + response.status_code, response.text + ) exception = requests.RequestException(msg, response=response) log.error(exception) raise exception - response_dict = {'success': True, 'result': None} + response_dict = {"success": True, "result": None} return response_dict def delete_layer_group(self, layer_group_id): @@ -2470,18 +2966,22 @@ def delete_layer_group(self, layer_group_id): if not workspace: workspace = self.catalog.get_default_workspace().name - url = self._assemble_url('workspaces', workspace, 'layergroups', '{0}'.format(group_name)) + url = self._assemble_url( + "workspaces", workspace, "layergroups", "{0}".format(group_name) + ) response = requests.delete(url, auth=(self.username, self.password)) if response.status_code != 200: if response.status_code == 404 and "No such layer group" in response.text: pass else: - msg = "Delete Layer Group Status Code {0}: {1}".format(response.status_code, response.text) + msg = "Delete Layer Group Status Code {0}: {1}".format( + response.status_code, response.text + ) exception = requests.RequestException(msg, response=response) log.error(exception) raise exception - response_dict = {'success': True, 'result': None} + response_dict = {"success": True, "result": None} return response_dict def delete_workspace(self, workspace_id, purge=False, recurse=False, debug=False): @@ -2505,8 +3005,13 @@ def delete_workspace(self, workspace_id, purge=False, recurse=False, debug=False workspace = self.catalog.get_workspace(workspace_id) # Handle delete - return self._handle_delete(identifier=workspace_id, gs_object=workspace, purge=purge, - recurse=recurse, debug=debug) + return self._handle_delete( + identifier=workspace_id, + gs_object=workspace, + purge=purge, + recurse=recurse, + debug=debug, + ) def delete_store(self, store_id, purge=False, recurse=False, debug=False): """ @@ -2537,12 +3042,16 @@ def delete_store(self, store_id, purge=False, recurse=False, debug=False): store = self.catalog.get_store(name=name, workspace=workspace) # Handle delete - return self._handle_delete(identifier=store_id, gs_object=store, purge=purge, - recurse=recurse, debug=debug) + return self._handle_delete( + identifier=store_id, + gs_object=store, + purge=purge, + recurse=recurse, + debug=debug, + ) except geoserver.catalog.FailedRequestError as e: # Update response dictionary - response_dict = {'success': False, - 'error': str(e)} + response_dict = {"success": False, "error": str(e)} self._handle_debug(response_dict, debug) return response_dict @@ -2568,33 +3077,30 @@ def delete_coverage_store(self, store_id, recurse=True, purge=True): workspace = self.catalog.get_default_workspace().name # Prepare headers - headers = { - "Content-type": "application/json" - } + headers = {"Content-type": "application/json"} # Prepare URL to create store - url = self._assemble_url('workspaces', workspace, 'coveragestores', name) + url = self._assemble_url("workspaces", workspace, "coveragestores", name) - json = {'recurse': recurse, 'purge': purge} + json = {"recurse": recurse, "purge": purge} # Execute: DELETE /workspaces//coveragestores/ response = requests.delete( - url=url, - headers=headers, - params=json, - auth=(self.username, self.password) + url=url, headers=headers, params=json, auth=(self.username, self.password) ) if response.status_code != 200: if response.status_code in self.WARNING_STATUS_CODES: pass else: - msg = "Delete Coverage Store Status Code {0}: {1}".format(response.status_code, response.text) + msg = "Delete Coverage Store Status Code {0}: {1}".format( + response.status_code, response.text + ) exception = requests.RequestException(msg, response=response) log.error(exception) raise exception - response_dict = {'success': True, 'result': None} + response_dict = {"success": True, "result": None} return response_dict def delete_style(self, style_id, purge=False): @@ -2611,22 +3117,17 @@ def delete_style(self, style_id, purge=False): workspace, style_name = self._process_identifier(style_id) if workspace is None: - url = self._assemble_url('styles', style_name) + url = self._assemble_url("styles", style_name) else: - url = self._assemble_url('workspaces', workspace, 'styles', style_name) + url = self._assemble_url("workspaces", workspace, "styles", style_name) # Prepare delete request - headers = { - "Content-type": "application/json" - } + headers = {"Content-type": "application/json"} - params = {'purge': purge} + params = {"purge": purge} response = requests.delete( - url=url, - auth=(self.username, self.password), - headers=headers, - params=params + url=url, auth=(self.username, self.password), headers=headers, params=params ) # Raise an exception if status code is not what we expect @@ -2634,12 +3135,14 @@ def delete_style(self, style_id, purge=False): if response.status_code in self.WARNING_STATUS_CODES: pass else: - msg = "Delete Style Status Code {0}: {1}".format(response.status_code, response.text) + msg = "Delete Style Status Code {0}: {1}".format( + response.status_code, response.text + ) exception = requests.RequestException(msg, response=response) log.error(exception) raise exception - response_dict = {'success': True, 'result': None} + response_dict = {"success": True, "result": None} return response_dict def validate(self): @@ -2650,23 +3153,41 @@ def validate(self): r = requests.get(self.endpoint, auth=(self.username, self.password)) except requests.exceptions.MissingSchema: - raise AssertionError('The URL "{0}" provided for the GeoServer spatial dataset service endpoint is ' - 'invalid.'.format(self.endpoint)) + raise AssertionError( + 'The URL "{0}" provided for the GeoServer spatial dataset service endpoint is ' + "invalid.".format(self.endpoint) + ) if r.status_code == 401: - raise AssertionError('The username and password of the GeoServer spatial dataset service engine are ' - 'not valid.') + raise AssertionError( + "The username and password of the GeoServer spatial dataset service engine are " + "not valid." + ) if r.status_code != 200: - raise AssertionError('The URL "{0}" is not a valid GeoServer spatial dataset service ' - 'endpoint.'.format(self.endpoint)) + raise AssertionError( + 'The URL "{0}" is not a valid GeoServer spatial dataset service ' + "endpoint.".format(self.endpoint) + ) - if 'Geoserver Configuration API' not in r.text: - raise AssertionError('The URL "{0}" is not a valid GeoServer spatial dataset service ' - 'endpoint.'.format(self.endpoint)) + if "Geoserver Configuration API" not in r.text: + raise AssertionError( + 'The URL "{0}" is not a valid GeoServer spatial dataset service ' + "endpoint.".format(self.endpoint) + ) - def modify_tile_cache(self, layer_id, operation, zoom_start=10, zoom_end=15, grid_set_id=900913, - image_format='image/png', thread_count=1, bounds=None, parameters=None): + def modify_tile_cache( + self, + layer_id, + operation, + zoom_start=10, + zoom_end=15, + grid_set_id=900913, + image_format="image/png", + thread_count=1, + bounds=None, + parameters=None, + ): """ Modify all or a portion of the GWC tile cache for given layer. Operations include seed, reseed, and truncate. @@ -2693,44 +3214,47 @@ def modify_tile_cache(self, layer_id, operation, zoom_start=10, zoom_end=15, gri workspace = self.catalog.get_default_workspace().name if operation not in self.GWC_OPERATIONS: - raise ValueError('Invalid value "{}" provided for argument "operation". Must be "{}".'.format( - operation, '" or "'.join(self.GWC_OPERATIONS)) + raise ValueError( + 'Invalid value "{}" provided for argument "operation". Must be "{}".'.format( + operation, '" or "'.join(self.GWC_OPERATIONS) + ) ) # Use post request to create style container first - headers = {'Content-type': 'text/xml'} + headers = {"Content-type": "text/xml"} if operation == self.GWC_OP_MASS_TRUNCATE: - url = self.get_gwc_endpoint() + 'masstruncate/' - xml_text = '{}:{}'.format(workspace, name) + url = self.get_gwc_endpoint() + "masstruncate/" + xml_text = ( + "{}:{}".format( + workspace, name + ) + ) response = requests.post( - url, - headers=headers, - auth=(self.username, self.password), - data=xml_text + url, headers=headers, auth=(self.username, self.password), data=xml_text ) else: - url = self.get_gwc_endpoint() + 'seed/' + workspace + ':' + name + '.xml' - xml = os.path.join(self.XML_PATH, 'gwc_tile_cache_operation_template.xml') + url = self.get_gwc_endpoint() + "seed/" + workspace + ":" + name + ".xml" + xml = os.path.join(self.XML_PATH, "gwc_tile_cache_operation_template.xml") # Open XML file - with open(xml, 'r') as sld_file: + with open(xml, "r") as sld_file: text = sld_file.read() # Compose XML context xml_context = { - 'workspace': workspace, - 'name': name, - 'operation': operation, - 'grid_set_id': grid_set_id, - 'zoom_start': zoom_start, - 'zoom_end': zoom_end, - 'format': image_format, - 'thread_count': thread_count, - 'parameters': parameters, - 'bounds': bounds + "workspace": workspace, + "name": name, + "operation": operation, + "grid_set_id": grid_set_id, + "zoom_start": zoom_start, + "zoom_end": zoom_end, + "format": image_format, + "thread_count": thread_count, + "parameters": parameters, + "bounds": bounds, } # Render the XML template @@ -2738,29 +3262,28 @@ def modify_tile_cache(self, layer_id, operation, zoom_start=10, zoom_end=15, gri rendered = template.render(xml_context) response = requests.post( - url, - headers=headers, - auth=(self.username, self.password), - data=rendered + url, headers=headers, auth=(self.username, self.password), data=rendered ) # Raise an exception if status code is not what we expect if response.status_code == 200: - log.info('Successfully submitted {} tile cache operation for layer {}:{}'.format( - operation, workspace, name - )) + log.info( + "Successfully submitted {} tile cache operation for layer {}:{}".format( + operation, workspace, name + ) + ) else: - msg = 'Unable to submit {} tile cache operation for layer {}:{}. {}:{}'.format( + msg = "Unable to submit {} tile cache operation for layer {}:{}. {}:{}".format( operation, workspace, name, response.status_code, response.text ) exception = requests.RequestException(msg, response=response) log.error(msg) raise exception - response_dict = {'success': True, 'result': None} + response_dict = {"success": True, "result": None} return response_dict - def terminate_tile_cache_tasks(self, layer_id, kill='all'): + def terminate_tile_cache_tasks(self, layer_id, kill="all"): """ Terminate running tile cache processes for given layer. @@ -2780,26 +3303,26 @@ def terminate_tile_cache_tasks(self, layer_id, kill='all'): workspace = self.catalog.get_default_workspace().name if kill not in self.GWC_KILL_OPERATIONS: - raise ValueError('Invalid value "{}" provided for argument "kill". Must be "{}".'.format( - kill, '" or "'.join(self.GWC_KILL_OPERATIONS)) + raise ValueError( + 'Invalid value "{}" provided for argument "kill". Must be "{}".'.format( + kill, '" or "'.join(self.GWC_KILL_OPERATIONS) + ) ) - url = self.get_gwc_endpoint() + 'seed/' + workspace + ':' + name + url = self.get_gwc_endpoint() + "seed/" + workspace + ":" + name response = requests.post( - url, - auth=(self.username, self.password), - data={'kill_all': kill} + url, auth=(self.username, self.password), data={"kill_all": kill} ) if response.status_code != 200: - msg = 'Unable to query tile cache status for layer {}:{}. {}:{}'.format( + msg = "Unable to query tile cache status for layer {}:{}. {}:{}".format( workspace, name, response.status_code, response.text ) exception = requests.RequestException(msg, response=response) raise exception - response_dict = {'success': True, 'result': None} + response_dict = {"success": True, "result": None} return response_dict def query_tile_cache_tasks(self, layer_id): @@ -2822,7 +3345,7 @@ def query_tile_cache_tasks(self, layer_id): if not workspace: workspace = self.catalog.get_default_workspace().name - url = self.get_gwc_endpoint() + 'seed/' + workspace + ':' + name + '.json' + url = self.get_gwc_endpoint() + "seed/" + workspace + ":" + name + ".json" status_list = [] response = requests.get( @@ -2833,20 +3356,24 @@ def query_tile_cache_tasks(self, layer_id): if response.status_code == 200: status = response.json() - if 'long-array-array' in status: - for s in status['long-array-array']: + if "long-array-array" in status: + for s in status["long-array-array"]: temp_dict = { - 'tiles_processed': s[0], - 'total_to_process': s[1], - 'num_remaining': s[2], - 'task_id': s[3], - 'task_status': self.GWC_STATUS_MAP[s[4]] if s[4] in self.GWC_STATUS_MAP else s[4] + "tiles_processed": s[0], + "total_to_process": s[1], + "num_remaining": s[2], + "task_id": s[3], + "task_status": ( + self.GWC_STATUS_MAP[s[4]] + if s[4] in self.GWC_STATUS_MAP + else s[4] + ), } status_list.append(dict(temp_dict)) return status_list else: - msg = 'Unable to terminate tile cache tasks for layer {}:{}. {}:{}'.format( + msg = "Unable to terminate tile cache tasks for layer {}:{}. {}:{}".format( workspace, name, response.status_code, response.text ) exception = requests.RequestException(msg, response=response) @@ -2869,10 +3396,15 @@ def enable_time_dimension(self, coverage_id): if not workspace: workspace = self.catalog.get_default_workspace().name - headers = { - "Content-type": "text/xml" - } - url = self._assemble_url('workspaces', workspace, 'coveragestores', coverage_name, 'coverages', coverage_name) + headers = {"Content-type": "text/xml"} + url = self._assemble_url( + "workspaces", + workspace, + "coveragestores", + coverage_name, + "coverages", + coverage_name, + ) data_xml = '\ true\ \ @@ -2891,11 +3423,13 @@ def enable_time_dimension(self, coverage_id): ) if response.status_code != 200: - msg = f"Enable Time Dimension Layer {coverage_name} with Status Code {response.status_code}:" \ - f" {response.text}" + msg = ( + f"Enable Time Dimension Layer {coverage_name} with Status Code {response.status_code}:" + f" {response.text}" + ) exception = requests.RequestException(msg, response=response) log.error(exception) raise exception - response_dict = {'success': True, 'result': None} + response_dict = {"success": True, "result": None} return response_dict diff --git a/tethys_dataset_services/engines/hydroshare_engine.py b/tethys_dataset_services/engines/hydroshare_engine.py index 47e66c2..a979498 100644 --- a/tethys_dataset_services/engines/hydroshare_engine.py +++ b/tethys_dataset_services/engines/hydroshare_engine.py @@ -13,7 +13,7 @@ def type(self): """ HydroShare Dataset Engine Type """ - return 'HydroShare' + return "HydroShare" def _prepare_request(self, method, data_dict=None, file=None, apikey=None): """ @@ -176,7 +176,9 @@ def update_dataset(self, dataset_id, console=False, **kwargs): """ raise NotImplementedError() - def update_resource(self, resource_id, url=None, file=None, console=False, **kwargs): + def update_resource( + self, resource_id, url=None, file=None, console=False, **kwargs + ): """ Update HydroShare file diff --git a/tethys_dataset_services/utilities.py b/tethys_dataset_services/utilities.py index 4bbafe4..3b5ea3f 100644 --- a/tethys_dataset_services/utilities.py +++ b/tethys_dataset_services/utilities.py @@ -23,10 +23,10 @@ def __setattr__(self, item, value): self.__setitem__(item, value) def __str__(self): - if '_text' in self: - return self.__getitem__('_text') + if "_text" in self: + return self.__getitem__("_text") else: - return '' + return "" @staticmethod def Wrap(x): @@ -61,8 +61,8 @@ def _ConvertDictToXmlRecurse(parent, dictitem): assert not isinstance(dictitem, type([])) if isinstance(dictitem, dict): - for (tag, child) in dictitem.items(): - if str(tag) == '_text': + for tag, child in dictitem.items(): + if str(tag) == "_text": parent.text = str(child) elif isinstance(child, type([])): # iterate through the array and convert @@ -111,14 +111,14 @@ def _ConvertXmlToDictRecurse(node, dictclass): nodedict[child.tag] = newitem if node.text is None: - text = '' + text = "" else: text = node.text.strip() if len(nodedict) > 0: # if we have a dictionary add the text as a dictionary value (if there is any) if len(text) > 0: - nodedict['_text'] = text + nodedict["_text"] = text else: # if we don't have child nodes or attributes, just set the text nodedict = text @@ -134,6 +134,6 @@ def ConvertXmlToDict(root, dictclass=XmlDictObject): if isinstance(root, basestring): root = ElementTree.parse(root).getroot() elif not isinstance(root, ElementTree.Element): - raise TypeError('Expected ElementTree.Element or file path string') + raise TypeError("Expected ElementTree.Element or file path string") return dictclass({root.tag: _ConvertXmlToDictRecurse(root, dictclass)}) diff --git a/tethys_dataset_services/valid_engines.py b/tethys_dataset_services/valid_engines.py index 916546f..5ba0974 100644 --- a/tethys_dataset_services/valid_engines.py +++ b/tethys_dataset_services/valid_engines.py @@ -1,4 +1,8 @@ -VALID_ENGINES = {'ckan': 'tethys_dataset_services.engines.CkanDatasetEngine', - 'hydroshare': 'tethys_dataset_services.engines.HydroShareDatasetEngine'} +VALID_ENGINES = { + "ckan": "tethys_dataset_services.engines.CkanDatasetEngine", + "hydroshare": "tethys_dataset_services.engines.HydroShareDatasetEngine", +} -VALID_SPATIAL_ENGINES = {'geoserver': 'tethys_dataset_services.engines.GeoServerSpatialDatasetEngine'} +VALID_SPATIAL_ENGINES = { + "geoserver": "tethys_dataset_services.engines.GeoServerSpatialDatasetEngine" +} From 66698820af498f9bb432a21bfa0839a5983b97d5 Mon Sep 17 00:00:00 2001 From: Nathan Date: Fri, 6 Jun 2025 10:08:47 -0600 Subject: [PATCH 06/20] WIP: Can specify default_style when using create_shapefile_resource --- .../engines/geoserver_engine.py | 39 +++++++++++++++++-- 1 file changed, 35 insertions(+), 4 deletions(-) diff --git a/tethys_dataset_services/engines/geoserver_engine.py b/tethys_dataset_services/engines/geoserver_engine.py index 5b11427..4afa861 100644 --- a/tethys_dataset_services/engines/geoserver_engine.py +++ b/tethys_dataset_services/engines/geoserver_engine.py @@ -1614,7 +1614,7 @@ def create_sql_view_layer(self, store_id, layer_name, geometry_type, srid, sql, return response_dict def create_shapefile_resource(self, store_id, shapefile_base=None, shapefile_zip=None, shapefile_upload=None, - overwrite=False, charset=None, debug=False): + overwrite=False, charset=None, default_style=None, debug=False): """ Use this method to add shapefile resources to GeoServer. @@ -1626,7 +1626,8 @@ def create_shapefile_resource(self, store_id, shapefile_base=None, shapefile_zip shapefile_zip (string, optional): Path to a zip file containing the shapefile and side cars. shapefile_upload (FileUpload list, optional): A list of Django FileUpload objects containing a shapefile and side cars that have been uploaded via multipart/form-data form. # noqa: E501 overwrite (bool, optional): Overwrite the file if it already exists. - charset (string, optional): Specify the character encoding of the file being uploaded (e.g.: ISO-8559-1) + charset (string, optional): Specify the character encoding of the file being uploaded (e.g.: ISO-8559-1). + default_style (string, optional): The name of the default style to apply to the layer. Can be a name or a workspace-name combination (e.g.: "name" or "workspace:name"). # noqa: E501 debug (bool, optional): Pretty print the response dictionary to the console for debugging. Defaults to False. Returns: @@ -1772,8 +1773,10 @@ def create_shapefile_resource(self, store_id, shapefile_base=None, shapefile_zip # Wrap up with failure if response.status_code != 201: - response_dict = {'success': False, - 'error': '{1}({0}): {2}'.format(response.status_code, response.reason, response.text)} + response_dict = { + 'success': False, + 'error': f'{response.reason}({response.status_code}): {response.text}' + } self._handle_debug(response_dict, debug) return response_dict @@ -1788,6 +1791,34 @@ def create_shapefile_resource(self, store_id, shapefile_base=None, shapefile_zip # This case uses the store name as the Resource ID. resource_id = name + # Set the default style + if default_style is not None: + layer_url = self._assemble_url('layers', f'{workspace}:{resource_id}.xml') + layer_headers = {"Content-Type": "application/xml"} + layer_data = f""" + + + {default_style} + + + """ + + layer_response = requests.put( + layer_url, + headers=layer_headers, + data=layer_data, + auth=HTTPBasicAuth(username=self.username, password=self.password) + ) + + if layer_response.status_code != 200: + layer_response_dict = { + 'success': False, + 'error': f'{layer_response.reason}({layer_response.status_code}): {layer_response.text}' + } + + self._handle_debug(layer_response_dict, debug) + return response_dict + # Wrap up successfully new_resource = self.catalog.get_resource(name=resource_id, store=name, workspace=workspace) resource_dict = self._transcribe_geoserver_object(new_resource) From b4000dfeb465e8396d078f617b59029dd4e8f447 Mon Sep 17 00:00:00 2001 From: Nathan Date: Wed, 11 Jun 2025 15:00:25 +0000 Subject: [PATCH 07/20] Lint with flake8 --- tests/e2e_tests/geoserver_engine_e2e_tests.py | 23 ++++++------ tests/unit_tests/test_geoserver_engine.py | 37 +++++++++---------- .../engines/geoserver_engine.py | 9 ++--- 3 files changed, 33 insertions(+), 36 deletions(-) diff --git a/tests/e2e_tests/geoserver_engine_e2e_tests.py b/tests/e2e_tests/geoserver_engine_e2e_tests.py index 177ff37..29f5927 100644 --- a/tests/e2e_tests/geoserver_engine_e2e_tests.py +++ b/tests/e2e_tests/geoserver_engine_e2e_tests.py @@ -332,11 +332,11 @@ def test_create_shapefile_resource_upload(self): store_rand = random_string_generator(10) store_id = '{}:{}'.format(self.workspace_name, store_rand) - with open(shapefile_cst, 'rb') as cst_upload,\ - open(shapefile_dbf, 'rb') as dbf_upload,\ - open(shapefile_prj, 'rb') as prj_upload,\ - open(shapefile_shp, 'rb') as shp_upload,\ - open(shapefile_shx, 'rb') as shx_upload: + with open(shapefile_cst, 'rb') as cst_upload, \ + open(shapefile_dbf, 'rb') as dbf_upload, \ + open(shapefile_prj, 'rb') as prj_upload, \ + open(shapefile_shp, 'rb') as shp_upload, \ + open(shapefile_shx, 'rb') as shx_upload: upload_list = [cst_upload, dbf_upload, prj_upload, shp_upload, shx_upload] response = self.geoserver_engine.create_shapefile_resource( store_id=store_id, @@ -499,7 +499,6 @@ def test_create_coverage_layer_grassgrid(self): layer_id = '{}:{}'.format(self.workspace_name, layer_name) expected_coverage_type = 'GrassGrid' coverage_file_name = 'my_grass.zip' - coverage_name = coverage_file_name.split('.')[0] coverage_file = os.path.join(self.files_root, "grass_ascii", coverage_file_name) # Execute @@ -995,7 +994,8 @@ def test_link_and_add_table(self): ) # Check for success response - # TODO: returns an error in PostGIS 3.4: Internal Server Error(500): :java.io.IOException: Error occured calculating bounds for points + # TODO: returns an error in PostGIS 3.4: Internal Server Error(500): + # :java.io.IOException: Error occured calculating bounds for points self.assertTrue(response['success']) # TEST list_stores @@ -1064,8 +1064,8 @@ def test_create_postgis_store(self): ) self.assertTrue(response['success']) - - # Pause to let GeoServer catch up + + # Pause to let GeoServer catch up sleep(5) # TEST list_stores @@ -1132,7 +1132,7 @@ def test_create_sql_view_layer(self): password=self.pg_password, ) self.assertTrue(response['success']) - + # Pause to let GeoServer catch up before continuing sleep(5) @@ -1143,7 +1143,8 @@ def test_create_sql_view_layer(self): debug=True ) - # TODO: returns an error in PostGIS 3.4: Internal Server Error(500): :java.io.IOException: Error occured calculating bounds for points + # TODO: returns an error in PostGIS 3.4: Internal Server Error(500): + # :java.io.IOException: Error occured calculating bounds for points self.assertTrue(response['success']) # Pause to let GeoServer catch up before continuing diff --git a/tests/unit_tests/test_geoserver_engine.py b/tests/unit_tests/test_geoserver_engine.py index 11c5f80..27301c5 100644 --- a/tests/unit_tests/test_geoserver_engine.py +++ b/tests/unit_tests/test_geoserver_engine.py @@ -79,7 +79,7 @@ def setUp(self): self.username = 'foo' self.password = 'bar' self.auth = (self.username, self.password) - + self.engine = GeoServerSpatialDatasetEngine( endpoint=self.endpoint, username=self.username, @@ -515,7 +515,7 @@ def test_list_styles(self, mock_catalog): self.assertIn(n, result) mc.get_styles.assert_called_with(workspaces=[]) - + @mock.patch('tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog') def test_list_styles_of_workspace(self, mock_catalog): mc = mock_catalog() @@ -1650,7 +1650,7 @@ def test_update_layer_styles_exception(self, mock_ws, mock_list_styles, mock_put default_style = self.style_names[0] other_styles = [self.style_names[1]] - self.assertRaises(requests.RequestException, self.engine.update_layer_styles, layer_id, default_style, + self.assertRaises(requests.RequestException, self.engine.update_layer_styles, layer_id, default_style, other_styles) mock_logger.error.assert_called() @@ -1959,7 +1959,7 @@ def test_delete_style(self, mock_workspace, mock_delete): self.assertEqual(expected_headers, delete_call_args[0][1]['headers']) self.assertEqual(expected_params, delete_call_args[0][1]['params']) - mock_delete.assert_called_with(url=expected_url, auth=self.auth, headers=expected_headers, + mock_delete.assert_called_with(url=expected_url, auth=self.auth, headers=expected_headers, params=expected_params) @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') @@ -2120,10 +2120,10 @@ def test_create_shapefile_resource_upload(self, mock_catalog, mock_put): # Workspace is given store_id = '{}:{}'.format(self.workspace_name, self.store_names[0]) - with open(shapefile_cst, 'rb') as cst_upload,\ - open(shapefile_dbf, 'rb') as dbf_upload,\ - open(shapefile_prj, 'rb') as prj_upload,\ - open(shapefile_shp, 'rb') as shp_upload,\ + with open(shapefile_cst, 'rb') as cst_upload, \ + open(shapefile_dbf, 'rb') as dbf_upload, \ + open(shapefile_prj, 'rb') as prj_upload, \ + open(shapefile_shp, 'rb') as shp_upload, \ open(shapefile_shx, 'rb') as shx_upload: upload_list = [cst_upload, dbf_upload, prj_upload, shp_upload, shx_upload] response = self.engine.create_shapefile_resource(store_id=store_id, @@ -2362,7 +2362,7 @@ def test_reload_not_200(self, mock_post, mock_logger): @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') def test_reload_connection_error(self, mock_post, mock_logger): mock_post.side_effect = requests.ConnectionError() - response = self.engine.reload() + self.engine.reload() mock_logger.warning.assert_called() @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') @@ -2390,7 +2390,7 @@ def test_gwc_reload_not_200(self, mock_post, mock_logger): @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') def test_gwc_reload_connection_error(self, mock_post, mock_logger): mock_post.side_effect = requests.ConnectionError() - response = self.engine.gwc_reload() + self.engine.gwc_reload() mock_logger.warning.assert_called() def test_ini_no_slash_endpoint(self): @@ -3104,10 +3104,10 @@ def test_create_style(self, mock_workspace, mock_post, mock_get_style, mock_log) sld_context = {'foo': 'bar'} mock_get_style.return_value = { - 'success': True, + 'success': True, 'result': {'name': self.mock_styles[0].name, 'workspace': self.workspace_name} } - + response = self.engine.create_style(style_id, sld_template, sld_context) # Validate response object @@ -3141,7 +3141,7 @@ def test_create_style_cannot_find_style(self, mock_post, mock_get_style, mock_lo sld_context = {'foo': 'bar'} mock_get_style.return_value = { - 'success': True, + 'success': True, 'result': ' warnings ' } @@ -3155,7 +3155,6 @@ def test_create_style_cannot_find_style(self, mock_post, mock_get_style, mock_lo self.assertIn(style_url, post_call_args[0][0][0]) mock_log.warning.assert_called() - @mock.patch('tethys_dataset_services.engines.geoserver_engine.log') @mock.patch('tethys_dataset_services.engines.geoserver_engine.requests.post') def test_create_style_exception(self, mock_post, mock_log): @@ -3195,7 +3194,7 @@ def test_create_style_overwrite(self, mock_post, mock_logger, mock_get_style): sld_context = {'foo': 'bar'} self.engine.delete_style = mock.MagicMock() mock_get_style.return_value = { - 'success': True, + 'success': True, 'result': {'name': self.mock_styles[0].name, 'workspace': self.workspace_name} } @@ -3207,7 +3206,7 @@ def test_create_style_overwrite(self, mock_post, mock_logger, mock_get_style): # Success self.assertTrue(response['success']) - + # Extract Result result = response['result'] @@ -3311,7 +3310,7 @@ def test_create_layer_create_feature_type_already_exists(self, mock_workspace, m mock_post.side_effect = [MockResponse(500, 'already exists'), MockResponse(200)] mock_workspace().name = self.workspace_name store_id = 'foo' - layer_name = self.layer_names[0] + layer_name = self.layer_names[0] geometry_type = 'Point' srid = 4236 sql = 'SELECT * FROM foo' @@ -3772,7 +3771,7 @@ def test_create_postgis_store_validate_connection_false(self, mock_workspace, mo endpoint=self.endpoint, workspace=self.workspace_name ) - self.engine.create_postgis_store(store_id, host, port, database, username, password, max_connections, + self.engine.create_postgis_store(store_id, host, port, database, username, password, max_connections, max_connection_idle_time, evictor_run_periodicity, validate_connections=False) mock_post.assert_called_with(url=rest_endpoint, data=xml, headers=expected_headers, auth=self.auth) @@ -3821,7 +3820,7 @@ def test_create_postgis_store_expose_primary_keys_true(self, mock_workspace, moc endpoint=self.endpoint, workspace=self.workspace_name ) - self.engine.create_postgis_store(store_id, host, port, database, username, password, max_connections, + self.engine.create_postgis_store(store_id, host, port, database, username, password, max_connections, max_connection_idle_time, evictor_run_periodicity, validate_connections=False, expose_primary_keys=True) mock_post.assert_called_with(url=rest_endpoint, data=xml, headers=expected_headers, auth=self.auth) diff --git a/tethys_dataset_services/engines/geoserver_engine.py b/tethys_dataset_services/engines/geoserver_engine.py index 4afa861..06c5441 100644 --- a/tethys_dataset_services/engines/geoserver_engine.py +++ b/tethys_dataset_services/engines/geoserver_engine.py @@ -677,7 +677,8 @@ def reload(self, ports=None, public=True): GeoServer are running in a clustered GeoServer configuration. public (bool): Use the public geoserver endpoint if True, otherwise use the internal endpoint. """ - # node_endpoints = self._get_node_endpoints(ports=[9090], public=public) # take this out, it is hardcoded for testing. + # take this out, it is hardcoded for testing: + # node_endpoints = self._get_node_endpoints(ports=[9090], public=public) node_endpoints = self._get_node_endpoints(ports=ports, public=public) log.debug("Catalog Reload URLS: {0}".format(node_endpoints)) @@ -1333,9 +1334,6 @@ def create_postgis_store(self, store_id, host, port, database, username, passwor return response_dict - - - def create_layer_from_postgis_store(self, store_id, table, layer_name=None, debug=False): """ Add an existing PostGIS table as a feature resource to a PostGIS store that already exists. @@ -1421,7 +1419,6 @@ def create_layer_from_postgis_store(self, store_id, table, layer_name=None, debu self._handle_debug(response_dict, debug) return response_dict - def create_layer_from_postgis_store2(self, store_id, table, debug=False): """ Add an existing postgis table as a feature resource to a postgis store that already exists. @@ -2346,7 +2343,7 @@ def update_layer(self, layer_id, debug=False, **kwargs): # Assemble Response response_dict = {'success': True, 'result': layer_dict} - + # Handle tile caching properties (gsconfig doesn't support this) if tile_caching is not None: gwc_url = '{0}layers/{1}.xml'.format(self.gwc_endpoint, layer_id) From 6eb9a5f818f0b062132c80ecc1b7934b115fd917 Mon Sep 17 00:00:00 2001 From: romer8 Date: Wed, 6 Aug 2025 11:23:29 -0600 Subject: [PATCH 08/20] rename the test top bec ompatible with tox --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9a7f468..cc65bd4 100644 --- a/README.md +++ b/README.md @@ -71,7 +71,7 @@ docker-compose up -d ``` cd .. -tox -e e2e_geoserver_tests +tox -e e2e_gs_tests ``` 6. It is recommended that after each run, you refresh the Docker containers. Run the following command to remove them: From b2e5862d1b713f750e4a8df1fbc4336b83d0ffd6 Mon Sep 17 00:00:00 2001 From: romer8 Date: Wed, 6 Aug 2025 11:23:55 -0600 Subject: [PATCH 09/20] udpated to use lastest version of geoserver and postgis --- tests/docker-compose.yml | 20 ++++++-------------- tests/setup_e2e_tests.sh | 0 2 files changed, 6 insertions(+), 14 deletions(-) mode change 100644 => 100755 tests/setup_e2e_tests.sh diff --git a/tests/docker-compose.yml b/tests/docker-compose.yml index fceef0b..988a07d 100644 --- a/tests/docker-compose.yml +++ b/tests/docker-compose.yml @@ -1,7 +1,6 @@ -version: "3.3" services: postgis: - image: "postgis/postgis:12-2.5" + image: "postgis/postgis:latest" ports: - "5432:5432" environment: @@ -9,17 +8,10 @@ services: - POSTGRES_USER=postgres geoserver: - image: "tethysplatform/geoserver" + image: "docker.osgeo.org/geoserver:2.27.0" ports: - - "8181:8181" - - "8081:8081" - - "8082:8082" - - "8083:8083" - - "8084:8084" + - "8181:8080" environment: - - ENABLED_NODES=4 - - REST_NODES=1 - - MAX_MEMORY=512 - - MIN_MEMORY=512 - - NUM_CORES=2 - - MAX_TIMEOUT=60 \ No newline at end of file + - SKIP_DEMO_DATA=false + - CORS_ENABLED=true + - ENABLE_JSONP=true \ No newline at end of file diff --git a/tests/setup_e2e_tests.sh b/tests/setup_e2e_tests.sh old mode 100644 new mode 100755 From 15ee23889e57c3310f70370571b50871fb0ce74c Mon Sep 17 00:00:00 2001 From: romer8 Date: Wed, 6 Aug 2025 11:24:13 -0600 Subject: [PATCH 10/20] geoserver end to end test finished --- tests/e2e_tests/geoserver_engine_e2e_tests.py | 12 ++- .../engines/geoserver_engine.py | 74 +++++++++---------- 2 files changed, 42 insertions(+), 44 deletions(-) diff --git a/tests/e2e_tests/geoserver_engine_e2e_tests.py b/tests/e2e_tests/geoserver_engine_e2e_tests.py index a15aee8..ff4cffe 100644 --- a/tests/e2e_tests/geoserver_engine_e2e_tests.py +++ b/tests/e2e_tests/geoserver_engine_e2e_tests.py @@ -545,7 +545,7 @@ def test_create_coverage_layer_grassgrid(self): self.assertIsInstance(result, list) # Check if layer is in list - self.assertIn(layer_name, result) + self.assertIn(layer_id, result) # TEST get_layer @@ -708,7 +708,7 @@ def test_create_coverage_layer_world_file_tif(self): self.assertIsInstance(result, list) # Check if layer is in list - self.assertIn(layer_name, result) + self.assertIn(layer_id, result) # TEST get_layer @@ -751,9 +751,8 @@ def test_create_layer_group(self): # expected_layer_group_id = '{}:{}'.format(self.workspace_name, random_string_generator(10)) expected_layer_group_id = random_string_generator(10) - expected_layers = ["roads", "bugsites", "streams"] + expected_layers = ["sf:roads", "sf:bugsites", "sf:streams"] expected_styles = ["simple_roads", "capitals", "simple_streams"] - # TODO: create_layer_group: fails on catalog.save() when workspace is given. response = self.geoserver_engine.create_layer_group( layer_group_id=f"sf:{expected_layer_group_id}", @@ -818,7 +817,7 @@ def test_create_layer_group(self): # TEST delete layer group # Clean up - self.geoserver_engine.delete_layer_group(layer_group_id=expected_layer_group_id) + self.geoserver_engine.delete_layer_group(layer_group_id=f"sf:{expected_layer_group_id}") self.assert_valid_response_object(response) self.assertTrue(response["success"]) # self.assertIsNone(response['result']) @@ -1169,14 +1168,13 @@ def test_create_sql_view_layer(self): postgis_store_id = "{}:{}".format(self.workspace_name, store_id_name) sql = "SELECT * FROM {}".format(self.pg_table_name) geometry_type = self.geometry_type - response = self.geoserver_engine.create_sql_view_layer( store_id=postgis_store_id, layer_name=feature_type_name, geometry_type=geometry_type, srid=self.srid, sql=sql, - default_style="points", + default_style="point", ) self.assertTrue(response["success"]) diff --git a/tethys_dataset_services/engines/geoserver_engine.py b/tethys_dataset_services/engines/geoserver_engine.py index aa7c439..06457c5 100644 --- a/tethys_dataset_services/engines/geoserver_engine.py +++ b/tethys_dataset_services/engines/geoserver_engine.py @@ -2718,7 +2718,6 @@ def update_layer(self, layer_id, debug=False, **kwargs): """ # Pop tile caching properties to handle separately tile_caching = kwargs.pop("tile_caching", None) - # breakpoint() try: # Get resource layer = self.catalog.get_layer(name=layer_id) @@ -2802,9 +2801,7 @@ def update_layer_group(self, layer_group_id, debug=False, **kwargs): self._handle_debug(response_dict, debug) return response_dict - def update_layer_styles( - self, layer_id, default_style, other_styles=None, debug=False - ): + def update_layer_styles(self, layer_id, default_style, other_styles=None, debug=False): """ Update/add styles to existing layer. @@ -2823,24 +2820,26 @@ def update_layer_styles( # check if layer workspace is style workspace else use styles default location lyr_ws_styles = self.list_styles(workspace=layer_workspace) if default_style in lyr_ws_styles: - default_style = "{0}:{1}".format(layer_workspace, default_style) + default_style = '{0}:{1}'.format(layer_workspace, default_style) if other_styles: for i in range(len(other_styles)): if other_styles[i] in lyr_ws_styles: - other_styles[i] = "{0}:{1}".format(layer_workspace, other_styles[i]) + other_styles[i] = '{0}:{1}'.format(layer_workspace, other_styles[i]) context = { - "default_style": default_style, - "other_styles": other_styles or [], - "geoserver_rest_endpoint": self.endpoint, + 'default_style': default_style, + 'other_styles': other_styles or [], + 'geoserver_rest_endpoint': self.endpoint } # Open layer template - layer_path = os.path.join(self.XML_PATH, "layer_template.xml") - url = self._assemble_url("layers", "{0}.xml".format(layer_name)) - headers = {"Content-type": "text/xml"} + layer_path = os.path.join(self.XML_PATH, 'layer_template.xml') + url = self._assemble_url('layers', '{0}.xml'.format(layer_name)) + headers = { + "Content-type": "text/xml" + } - with open(layer_path, "r") as layer_file: + with open(layer_path, 'r') as layer_file: text = layer_file.read() template = Template(text) xml = template.render(context) @@ -2856,14 +2855,12 @@ def update_layer_styles( # Raise an exception if status code is not what we expect if response.status_code == 200: - log.info("Successfully created layer {}".format(layer_name)) + log.info('Successfully created layer {}'.format(layer_name)) break else: retries_remaining -= 1 if retries_remaining == 0: - msg = "Create Layer Status Code {0}: {1}".format( - response.status_code, response.text - ) + msg = "Create Layer Status Code {0}: {1}".format(response.status_code, response.text) exception = requests.RequestException(msg, response=response) log.error(exception) raise exception @@ -2955,34 +2952,37 @@ def delete_layer(self, layer_id, datastore, recurse=False): def delete_layer_group(self, layer_group_id): """ - Args: - layer_group_id (string): Identifier of the layer group to delete. Can be a name or a workspace-name combination (e.g.: "name" or "workspace:name"). - - """ # noqa: E501 + Delete the specified layer-group. Works around a GeoServer 500 / NPE + that occurs on workspace-qualified groups by always passing + ``recurse=true``. + """ # Process identifier workspace, group_name = self._process_identifier(layer_group_id) - # Get default work space if none is given + # Fall back to default workspace if not workspace: workspace = self.catalog.get_default_workspace().name - url = self._assemble_url( - "workspaces", workspace, "layergroups", "{0}".format(group_name) + url = self._assemble_url("workspaces", workspace, "layergroups", + f"{group_name}") + response = requests.delete( + url, + auth=(self.username, self.password), + params={"recurse": "true"}, ) - response = requests.delete(url, auth=(self.username, self.password)) - if response.status_code != 200: - if response.status_code == 404 and "No such layer group" in response.text: - pass - else: - msg = "Delete Layer Group Status Code {0}: {1}".format( - response.status_code, response.text - ) - exception = requests.RequestException(msg, response=response) - log.error(exception) - raise exception - response_dict = {"success": True, "result": None} - return response_dict + # Accept 200 OK or 404 “already gone” + if response.status_code not in (200, 404): + # 404 with the expected text is OK (idempotent delete) + if not (response.status_code == 404 and + "No such layer group" in response.text): + msg = (f"Delete Layer Group Status Code {response.status_code}: " + f"{response.text}") + exc = requests.RequestException(msg, response=response) + log.error(exc) + raise exc + + return {"success": True, "result": None} def delete_workspace(self, workspace_id, purge=False, recurse=False, debug=False): """ From c5b34548fb8232daa22b494f1716e4e3ee4dfe25 Mon Sep 17 00:00:00 2001 From: romer8 Date: Wed, 6 Aug 2025 13:06:12 -0600 Subject: [PATCH 11/20] udpated e2e_test action --- .github/workflows/e2e_tests.yml | 24 ++++++------------------ 1 file changed, 6 insertions(+), 18 deletions(-) diff --git a/.github/workflows/e2e_tests.yml b/.github/workflows/e2e_tests.yml index c62f8ac..cea2e08 100644 --- a/.github/workflows/e2e_tests.yml +++ b/.github/workflows/e2e_tests.yml @@ -17,28 +17,16 @@ jobs: py: - "3.12" postgis: - - "9.6-2.5-alpine" - - "10-2.5-alpine" - - "11-2.5-alpine" - - "12-2.5-alpine" - # Not working on PostGIS 3.4 yet - # - "12-3.4-alpine" - # - "13-3.4-alpine" - # - "14-3.4-alpine" - # - "15-3.4-alpine" - # - "16-3.4-alpine" + - "latest" services: geoserver: - image: tethysplatform/geoserver + image: docker.osgeo.org/geoserver:2.27.0 env: - ENABLED_NODES: 1 - REST_NODES: 1 - MAX_MEMORY: 512 - MIN_MEMORY: 128 - NUM_CORES: 2 - MAX_TIMEOUT: 60 + SKIP_DEMO_DATA: false + CORS_ENABLED: true + ENABLE_JSONP: true ports: - - 8181:8181 + - 8181:8080 postgis: image: postgis/postgis:${{ matrix.postgis }} env: From a618546e6978c934914d7eaefde760729198ca8a Mon Sep 17 00:00:00 2001 From: romer8 Date: Wed, 6 Aug 2025 13:12:32 -0600 Subject: [PATCH 12/20] tg --- tests/setup_e2e_tests.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/setup_e2e_tests.sh b/tests/setup_e2e_tests.sh index 56ac7e9..76347d8 100755 --- a/tests/setup_e2e_tests.sh +++ b/tests/setup_e2e_tests.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash + docker-compose exec postgis /bin/bash -c "psql -U postgres -c \"CREATE DATABASE tds_tests WITH OWNER postgres;\"" docker-compose exec postgis /bin/bash -c "psql -U postgres -d tds_tests -c \"CREATE EXTENSION postgis;\"" curl -u admin:geoserver -H 'Accept: application/xml' -H 'Content-Type: application/xml' -X PUT -d 'http://127.0.0.1:8181/geoserver' http://127.0.0.1:8181/geoserver/rest/settings.xml \ No newline at end of file From 7b18ff6c5bc88e281ccc2c5a2da90364345929f0 Mon Sep 17 00:00:00 2001 From: romer8 Date: Sun, 10 Aug 2025 20:20:55 -0600 Subject: [PATCH 13/20] removed old python versions --- .github/workflows/unit_tests.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index 3ce19fe..157e065 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -22,8 +22,6 @@ jobs: # - "3.13" - "3.12" - "3.11" - - "3.10" - - "3.9" steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.py }} for test From 83606046e82d4779ada2f73d588552fbb34bff6d Mon Sep 17 00:00:00 2001 From: romer8 Date: Sun, 10 Aug 2025 20:27:55 -0600 Subject: [PATCH 14/20] replacing fake publci endpoint with localhost --- tests/unit_tests/test_geoserver_engine.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit_tests/test_geoserver_engine.py b/tests/unit_tests/test_geoserver_engine.py index 1ffadfa..642e732 100644 --- a/tests/unit_tests/test_geoserver_engine.py +++ b/tests/unit_tests/test_geoserver_engine.py @@ -80,8 +80,8 @@ def setUp(self): ) # Create Test Engine - self.endpoint = "http://fake.geoserver.org:8181/geoserver/rest/" - self.public_endpoint = "http://fake.public.geoserver.org:8181/geoserver/rest/" + self.endpoint = "http://localhost:8181/geoserver/rest/" + self.public_endpoint = "http://localhost:8181/geoserver/rest/" self.username = "foo" self.password = "bar" self.auth = (self.username, self.password) From 8639febb8059d3460394ed48d0f878a4fee8f969 Mon Sep 17 00:00:00 2001 From: romer8 Date: Mon, 11 Aug 2025 12:08:23 -0600 Subject: [PATCH 15/20] udpated unit tests --- .github/workflows/unit_tests.yml | 2 + tests/unit_tests/test_geoserver_engine.py | 46 ++++++++++--------- .../engines/geoserver_engine.py | 23 +++++----- 3 files changed, 38 insertions(+), 33 deletions(-) diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index 157e065..3ce19fe 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -22,6 +22,8 @@ jobs: # - "3.13" - "3.12" - "3.11" + - "3.10" + - "3.9" steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.py }} for test diff --git a/tests/unit_tests/test_geoserver_engine.py b/tests/unit_tests/test_geoserver_engine.py index 642e732..2d6a316 100644 --- a/tests/unit_tests/test_geoserver_engine.py +++ b/tests/unit_tests/test_geoserver_engine.py @@ -80,8 +80,8 @@ def setUp(self): ) # Create Test Engine - self.endpoint = "http://localhost:8181/geoserver/rest/" - self.public_endpoint = "http://localhost:8181/geoserver/rest/" + self.endpoint = "http://fake.geoserver.org:8181/geoserver/rest/" + self.public_endpoint = "http://fake.public.geoserver.org:8181/geoserver/rest/" self.username = "foo" self.password = "bar" self.auth = (self.username, self.password) @@ -1961,7 +1961,7 @@ def test_delete_layer_group(self, mock_delete): ) # Create feature type call - mock_delete.assert_called_with(url, auth=self.auth) + mock_delete.assert_called_with(url, auth=self.auth, params={'recurse': 'true'}) @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.delete") @mock.patch( @@ -1980,7 +1980,7 @@ def test_delete_layer_group_no_group(self, mock_workspace, mock_delete): ) # Create feature type call - mock_delete.assert_called_with(url, auth=self.auth) + mock_delete.assert_called_with(url, auth=self.auth, params={'recurse': 'true'}) @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.delete") @@ -3656,6 +3656,7 @@ def test_create_style_overwrite_referenced_by_existing(self, mock_logger): mock_logger.error.assert_called() + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") @mock.patch( "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.reload" ) @@ -3674,8 +3675,10 @@ def test_create_sql_view_layer( mock_update_layer_styles, mock_get_layer, mock_reload, + mock_put ): - mock_post.side_effect = [MockResponse(201), MockResponse(200)] + mock_post.side_effect = [MockResponse(201)] # featuretype create + mock_put.return_value = MockResponse(200) # GWC layer create store_id = f"{self.workspace_name}:foo" layer_name = self.layer_names[0] geometry_type = "Point" @@ -3711,10 +3714,9 @@ def test_create_sql_view_layer( self.assertIn(sql_view_url, post_call_args[0][0][0]) self.assertEqual(expected_sql_xml, post_call_args[0][1]["data"]) - # GWC Call - self.assertIn(gwc_layer_url, post_call_args[1][0][0]) - self.assertEqual(expected_gwc_lyr_xml, str(post_call_args[1][1]["data"])) - mock_logger.info.assert_called() + put_call_args = mock_put.call_args_list + self.assertIn(gwc_layer_url, put_call_args[0][0][0]) + self.assertEqual(expected_gwc_lyr_xml, str(put_call_args[0][1]["data"])) mock_update_layer_styles.assert_called_with( layer_id=f"{self.workspace_name}:{layer_name}", @@ -3735,19 +3737,22 @@ def test_create_sql_view_layer( ) @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") @mock.patch( "tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog.get_default_workspace" ) def test_create_layer_create_feature_type_already_exists( self, mock_workspace, + mock_put, mock_post, mock_logger, mock_update_layer_styles, mock_get_layer, mock_reload, ): - mock_post.side_effect = [MockResponse(500, "already exists"), MockResponse(200)] + mock_put.return_value = MockResponse(200) + mock_post.return_value = MockResponse(500, "already exists") mock_workspace().name = self.workspace_name store_id = 'foo' layer_name = self.layer_names[0] @@ -3785,8 +3790,9 @@ def test_create_layer_create_feature_type_already_exists( self.assertEqual(expected_sql_xml, post_call_args[0][1]["data"]) # GWC Call - self.assertIn(gwc_layer_url, post_call_args[1][0][0]) - self.assertEqual(expected_gwc_lyr_xml, str(post_call_args[1][1]["data"])) + put_call_args = mock_put.call_args_list + self.assertIn(gwc_layer_url, put_call_args[0][0][0]) + self.assertEqual(expected_gwc_lyr_xml, str(put_call_args[0][1]["data"])) mock_logger.info.assert_called() mock_update_layer_styles.assert_called_with( @@ -3821,14 +3827,12 @@ def test_create_layer_create_sql_view_exception(self, mock_post, mock_logger): @mock.patch( "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.update_layer_styles" ) - @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") - def test_create_sql_view_layer_gwc_error(self, mock_post, mock_logger, _): - mock_post.side_effect = ( - [MockResponse(201)] - + [MockResponse(200)] - + ([MockResponse(500, "GWC exception")] * 300) - ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.log") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + def test_create_sql_view_layer_gwc_error(self, mock_put, mock_logger, mock_post, _): + mock_post.side_effect = [MockResponse(201), MockResponse(200)] + mock_put.return_value = MockResponse(500, "GWC exception") store_id = f"{self.workspace_name}:foo" layer_name = self.layer_names[0] geometry_type = "Point" @@ -4520,7 +4524,7 @@ def test_create_layer_from_postgis_store_fail_request(self, mock_store): self.assertIn("There is no store named", r) - mock_store.assert_called_with(store_id, False) + mock_store.assert_called_with(store_id, debug=False) @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") @mock.patch( @@ -4551,7 +4555,7 @@ def test_create_layer_from_postgis_store_not_201(self, mock_store, mock_post): self.assertEqual(expected_url, post_call_args[0][1]["url"]) self.assertEqual(expected_headers, post_call_args[0][1]["headers"]) - mock_store.assert_called_with(store_id, False) + mock_store.assert_called_with(store_id, debug=False) if __name__ == "__main__": diff --git a/tethys_dataset_services/engines/geoserver_engine.py b/tethys_dataset_services/engines/geoserver_engine.py index 6687478..06d55c4 100644 --- a/tethys_dataset_services/engines/geoserver_engine.py +++ b/tethys_dataset_services/engines/geoserver_engine.py @@ -2989,18 +2989,17 @@ def delete_layer_group(self, layer_group_id): params={"recurse": "true"}, ) - # Accept 200 OK or 404 “already gone” - if response.status_code not in (200, 404): - # 404 with the expected text is OK (idempotent delete) - if not (response.status_code == 404 and - "No such layer group" in response.text): - msg = (f"Delete Layer Group Status Code {response.status_code}: " - f"{response.text}") - exc = requests.RequestException(msg, response=response) - log.error(exc) - raise exc - - return {"success": True, "result": None} + if response.status_code != 200: + if response.status_code == 404 and "No such layer group" in response.text: + pass + else: + msg = "Delete Layer Group Status Code {0}: {1}".format(response.status_code, response.text) + exception = requests.RequestException(msg, response=response) + log.error(exception) + raise exception + + response_dict = {'success': True, 'result': None} + return response_dict def delete_workspace(self, workspace_id, purge=False, recurse=False, debug=False): """ From b4585883becaed5c43ed2535257314f1764567d0 Mon Sep 17 00:00:00 2001 From: romer8 Date: Mon, 11 Aug 2025 13:16:06 -0600 Subject: [PATCH 16/20] added test for shapefile resource --- tests/unit_tests/test_geoserver_engine.py | 48 +++++++++++++++++++ .../engines/geoserver_engine.py | 2 +- 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/tests/unit_tests/test_geoserver_engine.py b/tests/unit_tests/test_geoserver_engine.py index 2d6a316..89d9c36 100644 --- a/tests/unit_tests/test_geoserver_engine.py +++ b/tests/unit_tests/test_geoserver_engine.py @@ -2539,6 +2539,54 @@ def test_get_gwc_endpoint(self): self.assertIn(".public.", response) self.assertIn("/gwc/rest/", response) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") + def test_create_shapefile_resource_default_style_success(self, mock_catalog, mock_put): + # first PUT = shapefile upload (201), second PUT = set default style (200) + mock_put.side_effect = [MockResponse(201), MockResponse(200)] + mc = mock_catalog() + mc.get_default_workspace().name = self.workspace_name[0] + mc.get_resource.return_value = self.mock_resources[0] + + shapefile_base = os.path.join(self.files_root, "shapefile", "test") + store_id = self.store_names[0] + + resp = self.engine.create_shapefile_resource( + store_id=store_id, + shapefile_base=shapefile_base, + overwrite=True, + default_style="points", + ) + + self.assertTrue(resp["success"]) + # ensure we hit the second PUT with the layer XML body + self.assertEqual(2, len(mock_put.call_args_list)) + second_call = mock_put.call_args_list[1] + self.assertIn("", second_call.kwargs["data"]) + self.assertIn("points", second_call.kwargs["data"]) + + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") + def test_create_shapefile_resource_default_style_failure(self, mock_catalog, mock_put): + # first PUT = shapefile upload ok, second PUT = layer update fails + mock_put.side_effect = [MockResponse(201), MockResponse(500, text="bad", reason="Oops")] + mc = mock_catalog() + mc.get_default_workspace().name = self.workspace_name[0] + mc.get_resource.return_value = self.mock_resources[0] + + shapefile_base = os.path.join(self.files_root, "shapefile", "test") + store_id = self.store_names[0] + + resp = self.engine.create_shapefile_resource( + store_id=store_id, + shapefile_base=shapefile_base, + overwrite=True, + default_style="points", + ) + + self.assertFalse(resp["success"]) + self.assertIn("Oops(500): bad", resp["error"]) + def test_get_ows_endpoint(self): workspace = self.workspace_name response = self.engine.get_ows_endpoint(workspace, public=False) diff --git a/tethys_dataset_services/engines/geoserver_engine.py b/tethys_dataset_services/engines/geoserver_engine.py index 06d55c4..b0f5d12 100644 --- a/tethys_dataset_services/engines/geoserver_engine.py +++ b/tethys_dataset_services/engines/geoserver_engine.py @@ -2196,7 +2196,7 @@ def create_shapefile_resource(self, store_id, shapefile_base=None, shapefile_zip } self._handle_debug(layer_response_dict, debug) - return response_dict + return layer_response_dict # Wrap up successfully new_resource = self.catalog.get_resource( From 835e0d49511ff6412808ed4e28f22d5e00ea56dc Mon Sep 17 00:00:00 2001 From: romer8 Date: Mon, 11 Aug 2025 13:45:27 -0600 Subject: [PATCH 17/20] update to make 3.9 python work --- .github/workflows/unit_tests.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index 3ce19fe..bae29cc 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -30,6 +30,17 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ matrix.py }} + + - name: macOS prerequisites for psycopg2 + if: startsWith(matrix.os, 'macos') && matrix.py == '3.9' + run: | + brew update + brew install libpq + echo "/opt/homebrew/opt/libpq/bin" >> $GITHUB_PATH + echo "LDFLAGS=-L/opt/homebrew/opt/libpq/lib" >> $GITHUB_ENV + echo "CPPFLAGS=-I/opt/homebrew/opt/libpq/include" >> $GITHUB_ENV + echo "PKG_CONFIG_PATH=/opt/homebrew/opt/libpq/lib/pkgconfig" >> $GITHUB_EN + - name: Install dependencies run: | python -m pip install --upgrade pip From 4a88526a287dffd6281df4b8a841c0347cc0f068 Mon Sep 17 00:00:00 2001 From: romer8 Date: Mon, 11 Aug 2025 13:51:00 -0600 Subject: [PATCH 18/20] GITHUB_ENV typo --- .github/workflows/unit_tests.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index bae29cc..f3c4dd7 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -37,9 +37,10 @@ jobs: brew update brew install libpq echo "/opt/homebrew/opt/libpq/bin" >> $GITHUB_PATH + echo "PG_CONFIG=/opt/homebrew/opt/libpq/bin/pg_config" >> $GITHUB_ENV echo "LDFLAGS=-L/opt/homebrew/opt/libpq/lib" >> $GITHUB_ENV echo "CPPFLAGS=-I/opt/homebrew/opt/libpq/include" >> $GITHUB_ENV - echo "PKG_CONFIG_PATH=/opt/homebrew/opt/libpq/lib/pkgconfig" >> $GITHUB_EN + echo "PKG_CONFIG_PATH=/opt/homebrew/opt/libpq/lib/pkgconfig" >> $GITHUB_ENV - name: Install dependencies run: | From 1569e405f255143ae3f6a3f4671ecae41d21229a Mon Sep 17 00:00:00 2001 From: romer8 Date: Mon, 11 Aug 2025 14:04:03 -0600 Subject: [PATCH 19/20] removed 3.9 and added 3.13 --- .github/workflows/unit_tests.yml | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index f3c4dd7..6bdd7db 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -19,11 +19,10 @@ jobs: - windows - macos py: - # - "3.13" + - "3.13" - "3.12" - "3.11" - "3.10" - - "3.9" steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.py }} for test @@ -31,17 +30,6 @@ jobs: with: python-version: ${{ matrix.py }} - - name: macOS prerequisites for psycopg2 - if: startsWith(matrix.os, 'macos') && matrix.py == '3.9' - run: | - brew update - brew install libpq - echo "/opt/homebrew/opt/libpq/bin" >> $GITHUB_PATH - echo "PG_CONFIG=/opt/homebrew/opt/libpq/bin/pg_config" >> $GITHUB_ENV - echo "LDFLAGS=-L/opt/homebrew/opt/libpq/lib" >> $GITHUB_ENV - echo "CPPFLAGS=-I/opt/homebrew/opt/libpq/include" >> $GITHUB_ENV - echo "PKG_CONFIG_PATH=/opt/homebrew/opt/libpq/lib/pkgconfig" >> $GITHUB_ENV - - name: Install dependencies run: | python -m pip install --upgrade pip From 066221235a2678a86269cecbce3952bd94097290 Mon Sep 17 00:00:00 2001 From: romer8 Date: Wed, 13 Aug 2025 12:28:01 -0600 Subject: [PATCH 20/20] added gwc on sql_views creation and updated tests for gwc --- tests/e2e_tests/geoserver_engine_e2e_tests.py | 29 ++- tests/unit_tests/test_geoserver_engine.py | 242 +++++++++++++++++- .../engines/geoserver_engine.py | 123 ++++++--- 3 files changed, 339 insertions(+), 55 deletions(-) diff --git a/tests/e2e_tests/geoserver_engine_e2e_tests.py b/tests/e2e_tests/geoserver_engine_e2e_tests.py index 1d7e057..5078021 100644 --- a/tests/e2e_tests/geoserver_engine_e2e_tests.py +++ b/tests/e2e_tests/geoserver_engine_e2e_tests.py @@ -11,7 +11,7 @@ import random import string -from time import sleep +from time import time, sleep import unittest import os from sqlalchemy.engine import create_engine @@ -1174,6 +1174,8 @@ def test_create_sql_view_layer(self): srid=self.srid, sql=sql, default_style="point", + enable_gwc=True, + gwc_method="AUTO", ) self.assertTrue(response["success"]) @@ -1188,24 +1190,23 @@ def test_create_sql_view_layer(self): self.assertIn(feature_type_name, r["name"]) # TEST list_resources - - # Execute - response = self.geoserver_engine.list_resources() - - # Validate response object - self.assert_valid_response_object(response) + deadline = time() + 30 + listed = [] + while time() < deadline: + response = self.geoserver_engine.list_resources() + self.assert_valid_response_object(response) + if response["success"] and isinstance(response["result"], list): + listed = response["result"] + if feature_type_name in listed: + break + sleep(1) # Success self.assertTrue(response["success"]) - - # Extract Result - result = response["result"] - # Returns list - self.assertIsInstance(result, list) - + self.assertIsInstance(listed, list) # layer listed - self.assertIn(feature_type_name, result) + self.assertIn(feature_type_name, listed) # TEST get_resources diff --git a/tests/unit_tests/test_geoserver_engine.py b/tests/unit_tests/test_geoserver_engine.py index 89d9c36..329968f 100644 --- a/tests/unit_tests/test_geoserver_engine.py +++ b/tests/unit_tests/test_geoserver_engine.py @@ -3735,7 +3735,8 @@ def test_create_sql_view_layer( default_style = "points" self.engine.create_sql_view_layer( - store_id, layer_name, geometry_type, srid, sql, default_style + store_id, layer_name, geometry_type, srid, sql, default_style, + gwc_method="PUT" # force create path; uses mock_put and avoids extra POST ) # Validate endpoint calls @@ -3799,8 +3800,8 @@ def test_create_layer_create_feature_type_already_exists( mock_get_layer, mock_reload, ): - mock_put.return_value = MockResponse(200) - mock_post.return_value = MockResponse(500, "already exists") + mock_post.side_effect = [MockResponse(500, "already exists"), MockResponse(200)] + mock_put.return_value = MockResponse(200) # ensure no accidental failure if called mock_workspace().name = self.workspace_name store_id = 'foo' layer_name = self.layer_names[0] @@ -3810,7 +3811,8 @@ def test_create_layer_create_feature_type_already_exists( default_style = "points" self.engine.create_sql_view_layer( - store_id, layer_name, geometry_type, srid, sql, default_style + store_id, layer_name, geometry_type, srid, sql, default_style, + gwc_method="POST" ) # Validate endpoint calls @@ -3838,9 +3840,9 @@ def test_create_layer_create_feature_type_already_exists( self.assertEqual(expected_sql_xml, post_call_args[0][1]["data"]) # GWC Call - put_call_args = mock_put.call_args_list - self.assertIn(gwc_layer_url, put_call_args[0][0][0]) - self.assertEqual(expected_gwc_lyr_xml, str(put_call_args[0][1]["data"])) + post_call_args = mock_post.call_args_list + self.assertIn(gwc_layer_url, post_call_args[1][0][0]) + self.assertEqual(expected_gwc_lyr_xml, str(post_call_args[1][1]["data"])) mock_logger.info.assert_called() mock_update_layer_styles.assert_called_with( @@ -3890,14 +3892,236 @@ def test_create_sql_view_layer_gwc_error(self, mock_put, mock_logger, mock_post, with self.assertRaises(requests.RequestException) as error: self.engine.create_sql_view_layer( - store_id, layer_name, geometry_type, srid, sql, default_style + store_id, layer_name, geometry_type, srid, sql, default_style, + gwc_method="PUT" ) self.assertEqual( - "Create GWC Layer Status Code 500: GWC exception", str(error.exception) + "Create/Update GWC Layer Status Code 500: GWC exception", str(error.exception) ) mock_logger.error.assert_called() + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.reload") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.update_layer_styles") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + def test_create_sql_view_layer_gwc_invalid_method( + self, mock_post, mock_put, mock_get, mock_update_layer_styles, mock_reload + ): + # Feature type creation succeeds, then invalid gwc_method triggers ValueError + mock_post.return_value = MockResponse(201) + store_id = f"{self.workspace_name}:foo" + layer_name = self.layer_names[0] + default_style = "points" + + with self.assertRaises(ValueError) as err: + self.engine.create_sql_view_layer( + store_id, layer_name, "Point", 4236, "SELECT * FROM foo", "points", + gwc_method="BAD" + ) + self.assertIn("gwc_method must be one of 'AUTO', 'POST', or 'PUT'", str(err.exception)) + # ensure we didn't try to probe or call GWC after the check + mock_get.assert_not_called() + mock_put.assert_not_called() + # FT creation happened + mock_post.assert_called_once() + mock_update_layer_styles.assert_called_with( + layer_id=f"{self.workspace_name}:{layer_name}", + default_style=default_style, + other_styles=None, + ) + mock_reload.assert_called() + + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.reload") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.update_layer_styles") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + def test_create_sql_view_layer_gwc_auto_probe_exists_uses_post( + self, mock_post, mock_put, mock_get, mock_get_layer, mock_update_layer_styles, mock_reload + ): + # FT create (POST 201), then AUTO probe (GET 200) -> GWC POST (modify 200) + mock_post.side_effect = [MockResponse(201), MockResponse(200)] + mock_get.return_value = MockResponse(200) + store_id = f"{self.workspace_name}:foo" + layer_name = self.layer_names[0] + default_style = "points" + + self.engine.create_sql_view_layer( + store_id, layer_name, "Point", 4236, "SELECT * FROM foo", "points", + gwc_method="AUTO" + ) + + gwc_layer_url = "layers/{workspace}:{feature_name}.xml".format( + workspace=self.workspace_name, feature_name=layer_name + ) + # second POST call should be to GWC + post_calls = mock_post.call_args_list + self.assertIn(gwc_layer_url, post_calls[1][0][0]) + mock_put.assert_not_called() + mock_update_layer_styles.assert_called_with( + layer_id=f"{self.workspace_name}:{layer_name}", + default_style=default_style, + other_styles=None, + ) + mock_get_layer.assert_called() + mock_reload.assert_called() + + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.reload") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.update_layer_styles") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + def test_create_sql_view_layer_gwc_auto_probe_missing_uses_put( + self, mock_post, mock_put, mock_get, mock_get_layer, mock_update_layer_styles, mock_reload + ): + # FT create (POST 201), then AUTO probe (GET 404) -> GWC PUT (create 200) + mock_post.side_effect = [MockResponse(201)] + mock_put.return_value = MockResponse(200) + mock_get.return_value = MockResponse(404) + store_id = f"{self.workspace_name}:foo" + layer_name = self.layer_names[0] + default_style = "points" + + self.engine.create_sql_view_layer( + store_id, layer_name, "Point", 4236, "SELECT * FROM foo", "points", + gwc_method="AUTO" + ) + + gwc_layer_url = "layers/{workspace}:{feature_name}.xml".format( + workspace=self.workspace_name, feature_name=layer_name + ) + put_calls = mock_put.call_args_list + self.assertIn(gwc_layer_url, put_calls[0][0][0]) + # only one POST (feature type), no GWC POST + self.assertEqual(len(mock_post.call_args_list), 1) + + mock_update_layer_styles.assert_called_with( + layer_id=f"{self.workspace_name}:{layer_name}", + default_style=default_style, + other_styles=None, + ) + mock_get_layer.assert_called() + mock_reload.assert_called() + + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.reload") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.update_layer_styles") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + def test_create_sql_view_layer_gwc_auto_probe_unknown_status_uses_post( + self, mock_post, mock_put, mock_get, mock_get_layer, mock_update_layer_styles, mock_reload + ): + # FT create (POST 201), then AUTO probe (GET 500) -> GWC POST (modify 200) as safe default + mock_post.side_effect = [MockResponse(201), MockResponse(200)] + mock_get.return_value = MockResponse(500) + store_id = f"{self.workspace_name}:foo" + layer_name = self.layer_names[0] + default_style = "points" + + self.engine.create_sql_view_layer( + store_id, layer_name, "Point", 4236, "SELECT * FROM foo", "points", + gwc_method="AUTO" + ) + + gwc_layer_url = "layers/{workspace}:{feature_name}.xml".format( + workspace=self.workspace_name, feature_name=layer_name + ) + self.assertIn(gwc_layer_url, mock_post.call_args_list[1][0][0]) + mock_put.assert_not_called() + + mock_update_layer_styles.assert_called_with( + layer_id=f"{self.workspace_name}:{layer_name}", + default_style=default_style, + other_styles=None, + ) + mock_get_layer.assert_called() + mock_reload.assert_called() + + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.reload") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.update_layer_styles") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.get") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + def test_create_sql_view_layer_gwc_auto_probe_exception_uses_post( + self, mock_post, mock_put, mock_get, mock_get_layer, mock_update_layer_styles, mock_reload + ): + # FT create (POST 201), then AUTO probe raises -> GWC POST (modify 200) + mock_post.side_effect = [MockResponse(201), MockResponse(200)] + mock_get.side_effect = Exception("probe failed") + store_id = f"{self.workspace_name}:foo" + layer_name = self.layer_names[0] + default_style = "points" + self.engine.create_sql_view_layer( + store_id, layer_name, "Point", 4236, "SELECT * FROM foo", "points", + gwc_method="AUTO" + ) + + gwc_layer_url = "layers/{workspace}:{feature_name}.xml".format( + workspace=self.workspace_name, feature_name=layer_name + ) + self.assertIn(gwc_layer_url, mock_post.call_args_list[1][0][0]) + mock_put.assert_not_called() + mock_update_layer_styles.assert_called_with( + layer_id=f"{self.workspace_name}:{layer_name}", + default_style=default_style, + other_styles=None, + ) + mock_get_layer.assert_called() + mock_reload.assert_called() + + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.reload") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.update_layer_styles") + @mock.patch( + "tethys_dataset_services.engines.geoserver_engine.GeoServerSpatialDatasetEngine.get_layer" + ) + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.put") + @mock.patch("tethys_dataset_services.engines.geoserver_engine.requests.post") + def test_create_sql_view_layer_gwc_put_fallbacks_to_post_when_exists( + self, mock_post, mock_put, mock_get_layer, mock_update_layer_styles, mock_reload + ): + # FT create (POST 201) + # GWC PUT returns 409 "already exists" -> code falls back to POST next loop -> 200 + mock_post.side_effect = [MockResponse(201), MockResponse(200)] + mock_put.side_effect = [MockResponse(409, "already exists")] + store_id = f"{self.workspace_name}:foo" + layer_name = self.layer_names[0] + default_style = "points" + + self.engine.create_sql_view_layer( + store_id, layer_name, "Point", 4236, "SELECT * FROM foo", "points", + gwc_method="PUT" + ) + + gwc_layer_url = "layers/{workspace}:{feature_name}.xml".format( + workspace=self.workspace_name, feature_name=layer_name + ) + # First PUT attempted once + self.assertIn(gwc_layer_url, mock_put.call_args_list[0][0][0]) + # Then POST used after fallback (second POST call overall) + self.assertIn(gwc_layer_url, mock_post.call_args_list[1][0][0]) + + mock_update_layer_styles.assert_called_with( + layer_id=f"{self.workspace_name}:{layer_name}", + default_style=default_style, + other_styles=None, + ) + mock_get_layer.assert_called() + mock_reload.assert_called() + @mock.patch("tethys_dataset_services.engines.geoserver_engine.GeoServerCatalog") def test_apply_changes_to_gs_object(self, mock_catalog): mc = mock_catalog() diff --git a/tethys_dataset_services/engines/geoserver_engine.py b/tethys_dataset_services/engines/geoserver_engine.py index b0f5d12..9cc3a83 100644 --- a/tethys_dataset_services/engines/geoserver_engine.py +++ b/tethys_dataset_services/engines/geoserver_engine.py @@ -1850,6 +1850,9 @@ def create_sql_view_layer( parameters=None, reload_public=False, debug=False, + *, + enable_gwc=True, + gwc_method="AUTO" ): """ Direct call to GeoServer REST API to create SQL View feature types and layers. @@ -1866,6 +1869,13 @@ def create_sql_view_layer( parameters: A list of parameter dictionaries { name, default_value, regex_validator }. reload_public: (bool, optional): Reload the catalog using the public endpoint. Defaults to False. debug (bool, optional): Pretty print the response dictionary to the console for debugging. Defaults to False. + enable_gwc (bool, keyword-only): If True, create/modify the GWC layer after the feature type is created. Default: True (backward compatible). + gwc_method (str, keyword-only): + One of {"AUTO", "POST", "PUT"}: + - "AUTO": probe and pick the correct method (POST=modify, PUT=create). + - "POST": force modify. + - "PUT": force create. + Default: "AUTO". """ # noqa: E501 # Process identifier workspace, store_name = self._process_identifier(store_id) @@ -1937,45 +1947,94 @@ def create_sql_view_layer( ) # GeoWebCache Settings - gwc_layer_path = os.path.join(self.XML_PATH, "gwc_layer_template.xml") - url = ( - self.get_gwc_endpoint(public=False) - + "layers/" - + workspace - + ":" - + layer_name - + ".xml" - ) - headers = {"Content-type": "text/xml"} - with open(gwc_layer_path, "r") as gwc_layer_file: - text = gwc_layer_file.read() - template = Template(text) - xml = template.render(context) - - retries_remaining = 300 - while retries_remaining > 0: - response = requests.put( - url, - headers=headers, - auth=(self.username, self.password), - data=xml, + if enable_gwc: + gwc_layer_path = os.path.join(self.XML_PATH, "gwc_layer_template.xml") + gwc_url = ( + self.get_gwc_endpoint(public=False) + + "layers/" + + workspace + + ":" + + layer_name + + ".xml" ) + gwc_headers = {"Content-type": "text/xml"} + with open(gwc_layer_path, "r") as gwc_layer_file: + text = gwc_layer_file.read() + template = Template(text) + xml = template.render(context) - if response.status_code == 200: - log.info("Successfully created GeoWebCache layer {}".format(layer_name)) - break + + # Decide method, aligned with current GWC REST: + # PUT => add new layer, POST => modify existing layer + method = (gwc_method or "AUTO").upper() + if method not in {"AUTO", "POST", "PUT"}: + raise ValueError("gwc_method must be one of 'AUTO', 'POST', or 'PUT'") + + if method == "AUTO": + try: + probe = requests.get( + gwc_url, + auth=(self.username, self.password), + headers={"Accept": "application/xml"}, + ) + # Exists? -> POST (modify). Missing? -> PUT (create). + if probe.status_code == 200: + method_to_use = "POST" + elif probe.status_code == 404: + method_to_use = "PUT" + else: + method_to_use = "POST" # safe default + except Exception: + method_to_use = "POST" # safe default if probe fails else: - log.warning( - "GWC DID NOT RETURN 200, but instead: {}. {}\n".format( - response.status_code, response.text + method_to_use = method + + retries_remaining = 3 + put_fallback_done = False + + while retries_remaining > 0: + if method_to_use == "PUT": + resp = requests.put( + gwc_url, + headers=gwc_headers, + auth=(self.username, self.password), + data=xml, ) - ) + ok = resp.status_code == 200 # docs show 200 on success + else: # POST (modify) + resp = requests.post( + gwc_url, + headers=gwc_headers, + auth=(self.username, self.password), + data=xml, + ) + ok = resp.status_code == 200 + + if ok: + log.info("Successfully applied GeoWebCache layer settings for %s", layer_name) + break + + # If trying PUT but the layer already exists, switch once to POST + if ( + method_to_use == "PUT" + and not put_fallback_done + and ( + resp.status_code in (405, 409) + or "already exists" in (resp.text or "").lower() + ) + ): + log.info("GWC layer %s already exists; switching to POST.", layer_name) + method_to_use = "POST" + put_fallback_done = True + continue + + log.warning("GWC returned %s. %s\n", resp.status_code, resp.text) retries_remaining -= 1 if retries_remaining == 0: - msg = "Create GWC Layer Status Code {0}: {1}".format( - response.status_code, response.text + msg = "Create/Update GWC Layer Status Code {0}: {1}".format( + resp.status_code, resp.text ) - exception = requests.RequestException(msg, response=response) + exception = requests.RequestException(msg, response=resp) log.error(exception) raise exception