From a126899c65488b3a9f5831dbfe23ab129f71a674 Mon Sep 17 00:00:00 2001 From: Bento007 Date: Tue, 28 May 2024 17:00:23 -0700 Subject: [PATCH] undo can_publish --- .happy/terraform/modules/schema_migration/main.tf | 4 ++++ backend/layers/processing/schema_migration.py | 8 +++++--- .../schema_migration/test_publish_and_cleanup.py | 2 +- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/.happy/terraform/modules/schema_migration/main.tf b/.happy/terraform/modules/schema_migration/main.tf index bdee78f14768c..1bc5f4e06eef6 100644 --- a/.happy/terraform/modules/schema_migration/main.tf +++ b/.happy/terraform/modules/schema_migration/main.tf @@ -353,6 +353,10 @@ resource aws_sfn_state_machine sfn_schema_migration { "Name": "COLLECTION_VERSION_ID", "Value.$": "$.collection_version_id" }, + { + "Name": "CAN_PUBLISH", + "Value.$": "$.can_publish" + }, { "Name": "TASK_TOKEN", "Value.$": "$$.Task.Token" diff --git a/backend/layers/processing/schema_migration.py b/backend/layers/processing/schema_migration.py index 0c823e7764e56..fb279464c3a41 100644 --- a/backend/layers/processing/schema_migration.py +++ b/backend/layers/processing/schema_migration.py @@ -175,6 +175,7 @@ def collection_migrate(self, collection_id: str, collection_version_id: str, can # ^^^ The top level fields are used for handling error cases in the AWS SFN. "datasets": [ { + "can_publish": str(can_publish), "collection_id": collection_id, "collection_url": collection_url, "collection_version_id": private_collection_version_id, @@ -196,7 +197,7 @@ def collection_migrate(self, collection_id: str, collection_version_id: str, can self._store_sfn_response("publish_and_cleanup", version.collection_id.id, response) return response - def publish_and_cleanup(self, collection_version_id: str) -> list: + def publish_and_cleanup(self, collection_version_id: str, can_publish: bool) -> list: errors = [] collection_version = self.business_logic.get_collection_version(CollectionVersionId(collection_version_id)) object_keys_to_delete = [] @@ -259,7 +260,7 @@ def publish_and_cleanup(self, collection_version_id: str) -> list: self.s3_provider.delete_files(self.artifact_bucket, object_keys_to_delete) if errors: self._store_sfn_response("report/errors", collection_version_id, errors) - elif extra_info["can_publish"] == "true": + elif can_publish: self.business_logic.publish_collection_version(collection_version.version_id) return errors @@ -381,8 +382,9 @@ def migrate(self, step_name) -> bool: ) elif step_name == "collection_publish": collection_version_id = os.environ["COLLECTION_VERSION_ID"] + can_publish = os.environ["CAN_PUBLISH"].lower() == "true" publish_and_cleanup = self.error_wrapper(self.publish_and_cleanup, collection_version_id) - response = publish_and_cleanup(collection_version_id=collection_version_id) + response = publish_and_cleanup(collection_version_id=collection_version_id, can_publish=can_publish) elif step_name == "report": response = self.report() self.logger.info("output", extra={"response": response}) diff --git a/tests/unit/processing/schema_migration/test_publish_and_cleanup.py b/tests/unit/processing/schema_migration/test_publish_and_cleanup.py index bc2ef2c0fe2ca..06cc3f347412f 100644 --- a/tests/unit/processing/schema_migration/test_publish_and_cleanup.py +++ b/tests/unit/processing/schema_migration/test_publish_and_cleanup.py @@ -135,7 +135,7 @@ def test_can_not_publish(self, mock_json, local_schema_migrate): ] ) local_schema_migrate.business_logic.get_collection_version.return_value = collection_version - errors = local_schema_migrate.publish_and_cleanup(collection_version.version_id.id, False) + errors = local_schema_migrate.publish_and_cleanup(collection_version.version_id.id) assert errors == [] local_schema_migrate.business_logic.publish_collection_version.assert_not_called() local_schema_migrate.s3_provider.delete_files.assert_any_call(