diff --git a/.env_dev b/.env_dev index 34f85b41da6..6b1b5081339 100644 --- a/.env_dev +++ b/.env_dev @@ -37,9 +37,9 @@ GEODATABASE_URL=postgis://geonode:geonode@localhost:5432/geonode_data GEONODE_DB_CONN_MAX_AGE=0 GEONODE_DB_CONN_TOUT=5 DEFAULT_BACKEND_DATASTORE=datastore -BROKER_URL=amqp://admin:admin@localhost:5672// +BROKER_URL=amqp://guest:guest@localhost:5672// CELERY_BEAT_SCHEDULER=celery.beat:PersistentScheduler -ASYNC_SIGNALS=False +ASYNC_SIGNALS=True SITEURL=http://localhost:8000/ diff --git a/geonode/upload/api/serializer.py b/geonode/upload/api/serializer.py index 2150455178f..4657ddb9f3d 100644 --- a/geonode/upload/api/serializer.py +++ b/geonode/upload/api/serializer.py @@ -50,10 +50,11 @@ class Meta: ref_name = "OverwriteImporterSerializer" model = ResourceBase view_name = "importer_upload" - fields = ImporterSerializer.Meta.fields + ( - "overwrite_existing_layer", - "resource_pk", - ) + fields = ImporterSerializer.Meta.fields + ( + "overwrite_existing_layer", + "resource_pk", + ) + overwrite_existing_layer = serializers.BooleanField(required=True) resource_pk = serializers.IntegerField(required=True) diff --git a/geonode/upload/api/views.py b/geonode/upload/api/views.py index 5c33fc58804..53f86bab425 100644 --- a/geonode/upload/api/views.py +++ b/geonode/upload/api/views.py @@ -16,7 +16,6 @@ # along with this program. If not, see . # ######################################################################### -import ast import logging from urllib.parse import urljoin from django.conf import settings @@ -44,7 +43,7 @@ from geonode.upload.models import UploadParallelismLimit, UploadSizeLimit from geonode.upload.utils import UploadLimitValidator from geonode.upload.api.exceptions import HandlerException, ImportException -from geonode.upload.api.serializer import ImporterSerializer, OverwriteImporterSerializer +from geonode.upload.api.serializer import ImporterSerializer from geonode.upload.celery_tasks import import_orchestrator from geonode.upload.orchestrator import orchestrator from oauth2_provider.contrib.rest_framework import OAuth2Authentication diff --git a/geonode/upload/celery_tasks.py b/geonode/upload/celery_tasks.py index 8e2568aac8a..3eea77684ce 100644 --- a/geonode/upload/celery_tasks.py +++ b/geonode/upload/celery_tasks.py @@ -170,7 +170,6 @@ def import_resource(self, execution_id, /, handler_module_path, action, **kwargs _datastore.start_import(execution_id, **kwargs) """ - The orchestrator to proceed to the next step, should be called by the handler since the call to the orchestrator can changed based on the handler called. See the GPKG handler gpkg_next_step task """ @@ -753,7 +752,10 @@ def rollback(self, *args, **kwargs): ) handler = import_string(handler_module_path)() - handler.rollback(exec_id, rollback_from_step, action_to_rollback, *args, **kwargs) + if exec_object.input_params.get("overwrite_existing_layer"): + logger.warning("Rollback is skipped for the overwrite") + else: + handler.rollback(exec_id, rollback_from_step, action_to_rollback, *args, **kwargs) error = find_key_recursively(kwargs, "error") or "Some issue has occured, please check the logs" orchestrator.set_as_failed(exec_id, reason=error, delete_file=False) return exec_id, kwargs diff --git a/geonode/upload/handlers/common/metadata.py b/geonode/upload/handlers/common/metadata.py index aded6cdcddd..f3598c8cc54 100644 --- a/geonode/upload/handlers/common/metadata.py +++ b/geonode/upload/handlers/common/metadata.py @@ -76,7 +76,8 @@ def extract_params_from_data(_data, action=None): return { "dataset_title": _data.pop("dataset_title", None), "skip_existing_layers": _data.pop("skip_existing_layers", "False"), - "overwrite_existing_layer": _data.pop("overwrite_existing_layer", "False"), + "overwrite_existing_layer": _data.pop("overwrite_existing_layer", False), + "resource_pk": _data.pop("resource_pk", None), "store_spatial_file": _data.pop("store_spatial_files", "True"), "source": _data.pop("source", "resource_file_upload"), }, _data diff --git a/geonode/upload/handlers/common/raster.py b/geonode/upload/handlers/common/raster.py index 1c5938201cd..3e22287550e 100644 --- a/geonode/upload/handlers/common/raster.py +++ b/geonode/upload/handlers/common/raster.py @@ -129,7 +129,8 @@ def extract_params_from_data(_data, action=None): return { "skip_existing_layers": _data.pop("skip_existing_layers", "False"), - "overwrite_existing_layer": _data.pop("overwrite_existing_layer", "False"), + "overwrite_existing_layer": _data.pop("overwrite_existing_layer", False), + "resource_pk": _data.pop("resource_pk", None), "store_spatial_file": _data.pop("store_spatial_files", "True"), "source": _data.pop("source", "upload"), }, _data diff --git a/geonode/upload/handlers/common/vector.py b/geonode/upload/handlers/common/vector.py index a33f6249dba..c24abb2979b 100644 --- a/geonode/upload/handlers/common/vector.py +++ b/geonode/upload/handlers/common/vector.py @@ -139,7 +139,8 @@ def extract_params_from_data(_data, action=None): return { "skip_existing_layers": _data.pop("skip_existing_layers", "False"), - "overwrite_existing_layer": _data.pop("overwrite_existing_layer", "False"), + "overwrite_existing_layer": _data.pop("overwrite_existing_layer", False), + "resource_pk": _data.pop("resource_pk", None), "store_spatial_file": _data.pop("store_spatial_files", "True"), "source": _data.pop("source", "upload"), }, _data @@ -411,6 +412,13 @@ def _select_valid_layers(self, all_layers): return layers def find_alternate_by_dataset(self, _exec_obj, layer_name, should_be_overwritten): + if _exec_obj.input_params.get("resource_pk"): + dataset = Dataset.objects.filter(pk=_exec_obj.input_params.get("resource_pk")).first() + if not dataset: + raise ImportException("The dataset selected for the ovewrite does not exists") + alternate = dataset.alternate.split(":") + return alternate[-1] + workspace = DataPublisher(None).workspace dataset_available = Dataset.objects.filter(alternate__iexact=f"{workspace.name}:{layer_name}") @@ -624,7 +632,6 @@ def overwrite_geonode_resource( delete_dataset_cache(dataset.alternate) # recalculate featuretype info - DataPublisher(str(self)).cat.recalculate_featuretype(dataset) set_geowebcache_invalidate_cache(dataset_alternate=dataset.alternate) dataset = resource_manager.update(dataset.uuid, instance=dataset, files=asset.location) diff --git a/geonode/upload/handlers/shapefile/handler.py b/geonode/upload/handlers/shapefile/handler.py index dfa9a31c983..e136186d3f4 100644 --- a/geonode/upload/handlers/shapefile/handler.py +++ b/geonode/upload/handlers/shapefile/handler.py @@ -105,7 +105,8 @@ def extract_params_from_data(_data, action=None): additional_params = { "skip_existing_layers": _data.pop("skip_existing_layers", "False"), - "overwrite_existing_layer": _data.pop("overwrite_existing_layer", "False"), + "overwrite_existing_layer": _data.pop("overwrite_existing_layer", False), + "resource_pk": _data.pop("resource_pk", None), "store_spatial_file": _data.pop("store_spatial_files", "True"), "source": _data.pop("source", "upload"), } diff --git a/geonode/upload/handlers/shapefile/serializer.py b/geonode/upload/handlers/shapefile/serializer.py index ae6616ec939..cf7aa407436 100644 --- a/geonode/upload/handlers/shapefile/serializer.py +++ b/geonode/upload/handlers/shapefile/serializer.py @@ -56,9 +56,10 @@ class Meta: ref_name = "ShapeFileSerializer" model = ResourceBase view_name = "importer_upload" - fields = ShapeFileSerializer.Meta.fields + ( - "overwrite_existing_layer", - "resource_pk", - ) + fields = ShapeFileSerializer.Meta.fields + ( + "overwrite_existing_layer", + "resource_pk", + ) + overwrite_existing_layer = serializers.BooleanField(required=True) resource_pk = serializers.IntegerField(required=True) diff --git a/geonode/upload/orchestrator.py b/geonode/upload/orchestrator.py index 368862e3383..e8c69a57a55 100644 --- a/geonode/upload/orchestrator.py +++ b/geonode/upload/orchestrator.py @@ -66,7 +66,7 @@ def get_serializer(self, _data) -> serializers.Serializer: return _serializer logger.info("specific serializer not found, fallback on the default one") is_overwrite_flow = ast.literal_eval(_data.get("overwrite_existing_layer", "False")) - return (OverwriteImporterSerializer if is_overwrite_flow else ImporterSerializer) + return OverwriteImporterSerializer if is_overwrite_flow else ImporterSerializer def load_handler(self, module_path): try: