From 3ceb302a0e6b5c355aa7cab680e4594dec2c6c38 Mon Sep 17 00:00:00 2001 From: Sushil Tiwari Date: Wed, 10 Sep 2025 13:08:08 +0545 Subject: [PATCH 1/9] feat(test): Add e2e testing for validate project and tutorial --- apps/common/utils.py | 10 + apps/project/tests/e2e_create_project_test.py | 566 ++++++++++++++++++ 2 files changed, 576 insertions(+) create mode 100644 apps/project/tests/e2e_create_project_test.py diff --git a/apps/common/utils.py b/apps/common/utils.py index 2c28bae1..fd631470 100644 --- a/apps/common/utils.py +++ b/apps/common/utils.py @@ -1,3 +1,6 @@ +import base64 +import gzip +import json import typing from django.core.files.storage import FileSystemStorage @@ -37,3 +40,10 @@ def remove_object_keys(obj: typing.Any, keys_to_ignore: list[str] | set[str]): for item in obj: remove_object_keys(item, keys_to_ignore) return obj + + +def decode_tasks(encoded_task: str) -> list[dict[str, typing.Any]]: + """Decode compressed task string back into list of dicts.""" + compressed_bytes = base64.b64decode(encoded_task) + json_bytes = gzip.decompress(compressed_bytes) + return json.loads(json_bytes.decode("utf-8")) diff --git a/apps/project/tests/e2e_create_project_test.py b/apps/project/tests/e2e_create_project_test.py new file mode 100644 index 00000000..417cad97 --- /dev/null +++ b/apps/project/tests/e2e_create_project_test.py @@ -0,0 +1,566 @@ +import typing +from datetime import datetime +from pathlib import Path + +import json5 +from django.conf import settings +from ulid import ULID + +from apps.common.utils import decode_tasks, remove_object_keys +from apps.contributor.factories import ContributorUserFactory +from apps.user.factories import UserFactory +from main.config import Config +from main.tests import TestCase + + +class TestValidateProjectE2E(TestCase): + class Mutation: + CREATE_PROJECT = """ + mutation CreateProject($data: ProjectCreateInput!) { + createProject(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + firebaseId + } + } + } + } + """ + + UPDATE_PROJECT = """ + mutation UpdateProject($pk: ID!, $data: ProjectUpdateInput!) { + updateProject(pk: $pk, data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + } + } + } + } + """ + + UPLOAD_PROJECT_ASSET = """ + mutation CreateProjectAsset($data: ProjectAssetCreateInput!) { + createProjectAsset(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectAssetTypeMutationResponseType { + errors + ok + result { + id + } + } + } + } + """ + + UPDATE_PROCESSED_PROJECT = """ + mutation UpdateProcessedProject($pk: ID!, $data: ProcessedProjectUpdateInput!) { + updateProcessedProject(pk: $pk, data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + } + } + } + } + """ + + UPDATE_PROJECT_STATUS = """ + mutation UpdateProjectStatus($pk: ID!, $data: ProjectStatusUpdateInput!) { + updateProjectStatus(pk: $pk, data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + status + } + } + } + } + """ + + CREATE_ORGANIZATION = """ + mutation CreateOrganization($data: OrganizationCreateInput!) { + createOrganization(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on OrganizationTypeMutationResponseType { + errors + ok + result { + id + firebaseId + } + } + } + } + """ + + CREATE_TUTORIAL = """ + mutation CreateTutorial($data: TutorialCreateInput!) { + createTutorial(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on TutorialTypeMutationResponseType { + errors + ok + result { + id + clientId + projectId + firebaseId + } + } + } + } + """ + + UPDATE_TUTORIAL = """ + mutation UpdateTutorial($data: TutorialUpdateInput!, $pk: ID!) { + updateTutorial(data: $data, pk: $pk) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on TutorialTypeMutationResponseType { + errors + ok + result { + id + } + } + } + } + """ + + UPDATE_TUTORIAL_STATUS = """ + mutation UpdateTutorialStatus($data: TutorialStatusUpdateInput!, $pk: ID!) { + updateTutorialStatus(data: $data, pk: $pk) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on TutorialTypeMutationResponseType { + errors + ok + result { + id + status + } + } + } + } + """ + + @typing.override + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.firebase_helper = Config.FIREBASE_HELPER + + cls.contributor_user = ContributorUserFactory.create( + username="Ram Bahadur", + ) + + cls.user = UserFactory.create( + contributor_user=cls.contributor_user, + ) + cls.user_resource_kwargs = dict( + created_by=cls.user, + modified_by=cls.user, + ) + + def test_validate_project_e2e(self): + self._test_project( + "validate", + "assets/tests/projects/validate/project_data.json5", + ) + # TODO(susilnem): Add street data here too + # TODO(susilnem): Add more test with filters + + def _test_project(self, projectKey: str, filename: str): + self.force_login(self.user) + + # Load test data file + full_path = Path(settings.BASE_DIR, filename) + with full_path.open("r", encoding="utf-8") as f: + test_data = json5.load(f) + + # Define full path for image and AOI files + image_filename = Path(settings.BASE_DIR) / test_data["assets"]["image"] + aoi_geometry_filename = Path(settings.BASE_DIR) / test_data["assets"]["aoi"] + + # Load Project data initially. + create_project_data = test_data["create_project"] + + # Create an organization and attach to project + create_organization_data = test_data["create_organization"] + with self.captureOnCommitCallbacks(execute=True): + organization_content = self.query_check( + self.Mutation.CREATE_ORGANIZATION, + variables={"data": create_organization_data}, + ) + + organization_response = organization_content["data"]["createOrganization"] + assert organization_response is not None, "Organization create response is None" + assert organization_response["ok"] + + organization_id = organization_response["result"]["id"] + organization_fb_id = organization_response["result"]["firebaseId"] + + # CHECK ORGANIZATION in firebase + + organization_fb_ref = self.firebase_helper.ref(f"/v2/organisations/{organization_fb_id}") + organization_fb_data = organization_fb_ref.get() + + # Check organization in firebase + assert organization_fb_data is not None, "organization in firebase is None" + assert isinstance(organization_fb_data, dict), "organization in firebase should be a dictionary" + + assert organization_fb_data == test_data["expected_organization_data"], ( + "Difference found for organization data in firebase." + ) + + # Create project + create_project_data["requestingOrganization"] = organization_id + with self.captureOnCommitCallbacks(execute=True): + project_content = self.query_check( + self.Mutation.CREATE_PROJECT, + variables={"data": create_project_data}, + ) + + project_response = project_content["data"]["createProject"] + assert project_response is not None, "Project create response is None" + assert project_response["ok"] + + project_id = project_response["result"]["id"] + project_fb_id = project_response["result"]["firebaseId"] + project_client_id = create_project_data["clientId"] + + # Create Image Asset for cover image + image_asset_data = { + "clientId": project_client_id, + "inputType": "COVER_IMAGE", + "project": project_id, + } + with image_filename.open("rb") as img_file: + image_content = self.query_check( + self.Mutation.UPLOAD_PROJECT_ASSET, + variables={"data": image_asset_data}, + files={"imageFile": img_file}, + map={"imageFile": ["variables.data.file"]}, + ) + image_response = image_content["data"]["createProjectAsset"] + assert image_response is not None, "Image create response is None" + assert image_response["ok"] + image_id = image_response["result"]["id"] + + # Create GeoJSON Asset for AOI Geometry + aoi_asset_data = { + "clientId": str(ULID()), + "inputType": "AOI_GEOMETRY", + "project": project_id, + } + with aoi_geometry_filename.open("rb") as geo_file: + aoi_content = self.query_check( + self.Mutation.UPLOAD_PROJECT_ASSET, + variables={"data": aoi_asset_data}, + files={"geoFile": geo_file}, + map={"geoFile": ["variables.data.file"]}, + ) + aoi_response = aoi_content["data"]["createProjectAsset"] + assert aoi_response is not None, "AOI create response is None" + assert aoi_response["ok"] + aoi_id = aoi_response["result"]["id"] + + # Update project + update_project_data = test_data["update_project"] + update_project_data["image"] = image_id + update_project_data["projectTypeSpecifics"][projectKey]["objectSource"]["aoiGeometry"] = aoi_id + update_project_data["requestingOrganization"] = organization_id + with self.captureOnCommitCallbacks(execute=True): + update_content = self.query_check( + self.Mutation.UPDATE_PROJECT, + variables={"pk": project_id, "data": update_project_data}, + ) + update_response = update_content["data"]["updateProject"] + assert update_response["ok"], update_response["errors"] + assert update_response is not None, "Project update response is None" + + # Process project + process_project_data = { + "clientId": project_client_id, + "status": "MARKED_AS_READY", + } + with self.captureOnCommitCallbacks(execute=True): + process_project_content = self.query_check( + self.Mutation.UPDATE_PROJECT_STATUS, + variables={"pk": project_id, "data": process_project_data}, + ) + process_project_response = process_project_content["data"]["updateProjectStatus"] + assert process_project_response is not None, "Project mark as ready response is None" + assert process_project_response["ok"], process_project_response["errors"] + assert process_project_response["result"]["status"] == "MARKED_AS_READY", "Project should be marked as ready" + + # Load Tutorial data initially. + create_tutorial_data = test_data["create_tutorial"] + create_tutorial_data["project"] = project_id + with self.captureOnCommitCallbacks(execute=True): + tutorial_content = self.query_check( + self.Mutation.CREATE_TUTORIAL, + variables={"data": create_tutorial_data}, + ) + + tutorial_response = tutorial_content["data"]["createTutorial"] + assert tutorial_response is not None, "Tutorial create response is None" + assert tutorial_response["ok"] + + tutorial_id = tutorial_response["result"]["id"] + tutorial_fb_id = tutorial_response["result"]["firebaseId"] + tutorial_client_id = create_tutorial_data["clientId"] + + # Update Tutorial + with self.captureOnCommitCallbacks(execute=True): + update_tutorial_content = self.query_check( + query=self.Mutation.UPDATE_TUTORIAL, + variables={ + "data": test_data["update_tutorial"], + "pk": tutorial_id, + }, + ) + update_tutorial_response = update_tutorial_content["data"]["updateTutorial"] + assert update_tutorial_response is not None, "Tutorial update response is None" + assert update_tutorial_response["ok"], update_tutorial_response["errors"] + assert update_tutorial_response is not None, "Tutorial update response is None" + + # Publish Tutorial + publish_tutorial_data = { + "clientId": tutorial_client_id, + "status": "PUBLISHED", + } + with self.captureOnCommitCallbacks(execute=True): + publish_tutorial_content = self.query_check( + self.Mutation.UPDATE_TUTORIAL_STATUS, + variables={"pk": tutorial_id, "data": publish_tutorial_data}, + ) + publish_tutorial_response = publish_tutorial_content["data"]["updateTutorialStatus"] + assert publish_tutorial_response["ok"], publish_tutorial_response["errors"] + assert publish_tutorial_response is not None, "Processed tutorial publish response is None" + assert publish_tutorial_response["result"]["status"] == "PUBLISHED", "tutorial should be published" + + # CHECK TUTORIAL, GROUP AND TASK CREATED IN FIREBASE + + tutorial_fb_ref = self.firebase_helper.ref(f"/v2/projects/{tutorial_fb_id}") + tutorial_fb_data = tutorial_fb_ref.get() + + # Check tutorial in firebase + assert tutorial_fb_data is not None, "Tutorial in firebase is None" + assert isinstance(tutorial_fb_data, dict), "Tutorial in firebase should be a dictionary" + assert tutorial_fb_data["projectId"] == tutorial_fb_id, "Field 'projectId' should match firebaseId" + + ignored_tutorial_keys = {"projectId", "tutorialDraftId"} + filtered_tutorial_actual = remove_object_keys(tutorial_fb_data, ignored_tutorial_keys) + filtered_tutorial_expected = remove_object_keys(test_data["expected_tutorial_data"], ignored_tutorial_keys) + assert filtered_tutorial_actual == filtered_tutorial_expected, "Difference found for tutorial data in firebase." + + # Check group in firebase + tutorial_groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{tutorial_fb_id}/") + tutorial_groups_fb_data = tutorial_groups_fb_ref.get() + + if tutorial_groups_fb_data: + for group in iter(tutorial_groups_fb_data.values()): # type: ignore[reportAttributeAccessIssue] + assert group["projectId"] == tutorial_fb_id, "Field 'tutorialId' of each group should match firebaseId" + + ignored_group_keys = {"projectId"} + filtered_group_actual = remove_object_keys(tutorial_groups_fb_data, ignored_group_keys) + filtered_group_expected = remove_object_keys(test_data["expected_tutorial_groups_data"], ignored_tutorial_keys) + assert filtered_group_actual == filtered_group_expected, "Difference found for tutorial group data in firebase." + + # Check tutorial tasks in firebase + tutorial_tasks_ref = self.firebase_helper.ref(f"/v2/tasks/{tutorial_fb_id}/") + tutorial_task_fb_data = tutorial_tasks_ref.get() + + ignored_task_keys: set[str] = {"projectId", "geometry"} + # TODO(susilnem): geometry should be present + sanitized_tasks_actual: list[dict[str, typing.Any]] = [] + sanitized_tasks_expected: list[dict[str, typing.Any]] = [] + + for group in iter(tutorial_task_fb_data.values()): # type: ignore[reportAttributeAccessIssue] + for task_fb in decode_tasks(group): + sanitized_tasks_actual.append(remove_object_keys(task_fb, ignored_task_keys)) # type: ignore[reportGeneralTypeIssues] + + for group in iter(test_data["expected_tutorial_tasks_data"].values()): + for task in decode_tasks(group): + sanitized_tasks_expected.append(remove_object_keys(task, ignored_task_keys)) # type: ignore[reportGeneralTypeIssues] + + # Sorting and comparing tasks + sanitized_tasks_actual_sorted = sorted(sanitized_tasks_actual, key=lambda t: t["taskId"]) + sanitized_tasks_expected_sorted = sorted(sanitized_tasks_expected, key=lambda t: t["taskId"]) + + assert sanitized_tasks_actual_sorted == sanitized_tasks_expected_sorted, ( + "Differences found between expected and actual tasks on tutorial in firebase." + ) + + # Update processed project + update_processed_project_data = test_data["update_processed_project"] + update_processed_project_data["tutorial"] = tutorial_id + update_processed_project_data["requestingOrganization"] = organization_id + with self.captureOnCommitCallbacks(execute=True): + update_processed_project_content = self.query_check( + self.Mutation.UPDATE_PROCESSED_PROJECT, + variables={"pk": project_id, "data": update_processed_project_data}, + ) + update_processed_response = update_processed_project_content["data"]["updateProcessedProject"] + assert update_processed_response["ok"], update_processed_response["errors"] + assert update_processed_response is not None, "Processed project update response is None" + + # Publish project + publish_project_data = { + "clientId": project_client_id, + "status": "PUBLISHED", + } + with self.captureOnCommitCallbacks(execute=True): + publish_project_content = self.query_check( + self.Mutation.UPDATE_PROJECT_STATUS, + variables={"pk": project_id, "data": publish_project_data}, + ) + publish_project_response = publish_project_content["data"]["updateProjectStatus"] + assert publish_project_response["ok"], publish_project_response["errors"] + assert publish_project_response is not None, "Processed project publish response is None" + assert publish_project_response["result"]["status"] == "PUBLISHED", "Project should be published" + + # CHECK PROJECT, GROUP AND TASK CREATED IN FIREBASE + + project_fb_ref = self.firebase_helper.ref(f"/v2/projects/{project_fb_id}") + project_fb_data = project_fb_ref.get() + + # Check project in firebase + # tutorial.refresh_from_db() + assert project_fb_data is not None, "Project in firebase is None" + assert isinstance(project_fb_data, dict), "Project in firebase should be a dictionary" + assert project_fb_data["created"] is not None, "Field 'created' should be defined" + assert datetime.fromisoformat(project_fb_data["created"]), "Field 'created' should be a timestamp" + assert project_fb_data["projectId"] == project_fb_id, "Field 'projectId' should match firebaseId" + assert project_fb_data["tutorialId"] == tutorial_fb_id, "Field 'tutorialId' should match tutorial's firebaseId" + assert project_fb_data["createdBy"] == self.contributor_user.firebase_id, ( + "Field 'createdBy' should match contributor user's firebaseId" + ) + + ignored_project_keys = {"created", "createdBy", "projectId", "tutorialId"} + filtered_project_actual = remove_object_keys(project_fb_data, ignored_project_keys) + filtered_project_expected = remove_object_keys(test_data["expected_project_data"], ignored_project_keys) + assert filtered_project_actual == filtered_project_expected, "Difference found for project data in firebase." + + # Check group in firebase + groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{project_fb_id}/") + groups_fb_data = groups_fb_ref.get() + + if groups_fb_data: + for group in iter(groups_fb_data.values()): # type: ignore[reportAttributeAccessIssue] + assert group["projectId"] == project_fb_id, "Field 'projectId' of each group should match firebaseId" + + ignored_group_keys = {"projectId"} + filtered_group_actual = remove_object_keys(groups_fb_data, ignored_group_keys) + filtered_group_expected = remove_object_keys(test_data["expected_project_groups_data"], ignored_project_keys) + assert filtered_group_actual == filtered_group_expected, "Difference found for group data on project in firebase." + + # Check tasks in firebase + project_tasks_ref = self.firebase_helper.ref(f"/v2/tasks/{project_fb_id}/") + project_tasks_fb_data = project_tasks_ref.get() + + ignored_task_keys: set[str] = {"projectId"} + sanitized_tasks_actual: list[dict[str, typing.Any]] = [] + sanitized_tasks_expected: list[dict[str, typing.Any]] = [] + + for group in iter(project_tasks_fb_data.values()): # type: ignore[reportAttributeAccessIssue] + for task_fb in decode_tasks(group): + sanitized_tasks_actual.append(remove_object_keys(task_fb, ignored_task_keys)) # type: ignore[reportGeneralTypeIssues] + + for group in iter(test_data["expected_project_tasks_data"].values()): + for task in decode_tasks(group): + sanitized_tasks_expected.append(remove_object_keys(task, ignored_task_keys)) # type: ignore[reportGeneralTypeIssues] + + # Sorting and comparing tasks + sanitized_tasks_actual_sorted = sorted(sanitized_tasks_actual, key=lambda t: t["taskId"]) + sanitized_tasks_expected_sorted = sorted(sanitized_tasks_expected, key=lambda t: t["taskId"]) + + assert sanitized_tasks_actual_sorted == sanitized_tasks_expected_sorted, ( + "Differences found between expected and actual tasks in firebase." + ) From de2fcce6ecf639485fea314b83999ab7b895179d Mon Sep 17 00:00:00 2001 From: Sushil Tiwari Date: Wed, 10 Sep 2025 16:17:05 +0545 Subject: [PATCH 2/9] feat(test): Add e2e data for the street project --- .../tests/e2e_create_street_project_test.py | 563 ++++++++++++++++++ ...py => e2e_create_validate_project_test.py} | 59 +- assets | 2 +- firebase | 2 +- 4 files changed, 598 insertions(+), 28 deletions(-) create mode 100644 apps/project/tests/e2e_create_street_project_test.py rename apps/project/tests/{e2e_create_project_test.py => e2e_create_validate_project_test.py} (93%) diff --git a/apps/project/tests/e2e_create_street_project_test.py b/apps/project/tests/e2e_create_street_project_test.py new file mode 100644 index 00000000..8becded3 --- /dev/null +++ b/apps/project/tests/e2e_create_street_project_test.py @@ -0,0 +1,563 @@ +import typing +from datetime import datetime +from pathlib import Path + +import json5 +from django.conf import settings +from ulid import ULID + +from apps.common.utils import decode_tasks, remove_object_keys +from apps.contributor.factories import ContributorUserFactory +from apps.tutorial.factories import TutorialFactory +from apps.user.factories import UserFactory +from main.config import Config +from main.tests import TestCase + + +class TestStreetProjectE2E(TestCase): + class Mutation: + CREATE_PROJECT = """ + mutation CreateProject($data: ProjectCreateInput!) { + createProject(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + firebaseId + } + } + } + } + """ + + UPDATE_PROJECT = """ + mutation UpdateProject($pk: ID!, $data: ProjectUpdateInput!) { + updateProject(pk: $pk, data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + } + } + } + } + """ + + UPLOAD_PROJECT_ASSET = """ + mutation CreateProjectAsset($data: ProjectAssetCreateInput!) { + createProjectAsset(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectAssetTypeMutationResponseType { + errors + ok + result { + id + } + } + } + } + """ + + UPDATE_PROCESSED_PROJECT = """ + mutation UpdateProcessedProject($pk: ID!, $data: ProcessedProjectUpdateInput!) { + updateProcessedProject(pk: $pk, data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + } + } + } + } + """ + + UPDATE_PROJECT_STATUS = """ + mutation UpdateProjectStatus($pk: ID!, $data: ProjectStatusUpdateInput!) { + updateProjectStatus(pk: $pk, data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + status + } + } + } + } + """ + + CREATE_ORGANIZATION = """ + mutation CreateOrganization($data: OrganizationCreateInput!) { + createOrganization(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on OrganizationTypeMutationResponseType { + errors + ok + result { + id + firebaseId + } + } + } + } + """ + + CREATE_TUTORIAL = """ + mutation CreateTutorial($data: TutorialCreateInput!) { + createTutorial(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on TutorialTypeMutationResponseType { + errors + ok + result { + id + clientId + projectId + firebaseId + } + } + } + } + """ + + UPDATE_TUTORIAL = """ + mutation UpdateTutorial($data: TutorialUpdateInput!, $pk: ID!) { + updateTutorial(data: $data, pk: $pk) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on TutorialTypeMutationResponseType { + errors + ok + result { + id + } + } + } + } + """ + + UPDATE_TUTORIAL_STATUS = """ + mutation UpdateTutorialStatus($data: TutorialStatusUpdateInput!, $pk: ID!) { + updateTutorialStatus(data: $data, pk: $pk) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on TutorialTypeMutationResponseType { + errors + ok + result { + id + status + } + } + } + } + """ + + @typing.override + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.firebase_helper = Config.FIREBASE_HELPER + + cls.contributor_user = ContributorUserFactory.create( + username="Ram Bahadur", + ) + + cls.user = UserFactory.create( + contributor_user=cls.contributor_user, + ) + cls.user_resource_kwargs = dict( + created_by=cls.user, + modified_by=cls.user, + ) + + def test_street_project_e2e(self): + self._test_project( + "assets/tests/projects/street/project_data.json5", + ) + + # TODO(susilnem): Add more test with filters + + def _test_project(self, filename: str): + self.force_login(self.user) + + # Load test data file + full_path = Path(settings.BASE_DIR, filename) + with full_path.open("r", encoding="utf-8") as f: + test_data = json5.load(f) + + # Define full path for image and AOI files + image_filename = Path(settings.BASE_DIR) / test_data["assets"]["image"] + aoi_geometry_filename = Path(settings.BASE_DIR) / test_data["assets"]["aoi"] + + # Load Project data initially. + create_project_data = test_data["create_project"] + + # Create an organization and attach to project + create_organization_data = test_data["create_organization"] + with self.captureOnCommitCallbacks(execute=True): + organization_content = self.query_check( + self.Mutation.CREATE_ORGANIZATION, + variables={"data": create_organization_data}, + ) + + organization_response = organization_content["data"]["createOrganization"] + assert organization_response is not None, "Organization create response is None" + assert organization_response["ok"] + + organization_id = organization_response["result"]["id"] + organization_fb_id = organization_response["result"]["firebaseId"] + + # CHECK ORGANIZATION in firebase + + organization_fb_ref = self.firebase_helper.ref(f"/v2/organisations/{organization_fb_id}") + organization_fb_data = organization_fb_ref.get() + + # Check organization in firebase + assert organization_fb_data is not None, "organization in firebase is None" + assert isinstance(organization_fb_data, dict), "organization in firebase should be a dictionary" + + assert organization_fb_data == test_data["expected_organization_data"], ( + "Difference found for organization data in firebase." + ) + + # Create project + create_project_data["requestingOrganization"] = organization_id + with self.captureOnCommitCallbacks(execute=True): + project_content = self.query_check( + self.Mutation.CREATE_PROJECT, + variables={"data": create_project_data}, + ) + + project_response = project_content["data"]["createProject"] + assert project_response is not None, "Project create response is None" + assert project_response["ok"], project_response["errors"] + + project_id = project_response["result"]["id"] + project_fb_id = project_response["result"]["firebaseId"] + project_client_id = create_project_data["clientId"] + + # Create Image Asset for cover image + image_asset_data = { + "clientId": project_client_id, + "inputType": "COVER_IMAGE", + "project": project_id, + } + with image_filename.open("rb") as img_file: + image_content = self.query_check( + self.Mutation.UPLOAD_PROJECT_ASSET, + variables={"data": image_asset_data}, + files={"imageFile": img_file}, + map={"imageFile": ["variables.data.file"]}, + ) + image_response = image_content["data"]["createProjectAsset"] + assert image_response is not None, "Image create response is None" + assert image_response["ok"] + image_id = image_response["result"]["id"] + + # Update project + update_project_data = test_data["update_project"] + update_project_data["requestingOrganization"] = organization_id + update_project_data["image"] = image_id + + # Create GeoJSON Asset for AOI Geometry + aoi_asset_data = { + "clientId": str(ULID()), + "inputType": "AOI_GEOMETRY", + "project": project_id, + } + with aoi_geometry_filename.open("rb") as geo_file: + aoi_content = self.query_check( + self.Mutation.UPLOAD_PROJECT_ASSET, + variables={"data": aoi_asset_data}, + files={"geoFile": geo_file}, + map={"geoFile": ["variables.data.file"]}, + ) + aoi_response = aoi_content["data"]["createProjectAsset"] + assert aoi_response is not None, "AOI create response is None" + assert aoi_response["ok"] + aoi_id = aoi_response["result"]["id"] + update_project_data["projectTypeSpecifics"]["street"]["aoiGeometry"] = aoi_id + + with self.captureOnCommitCallbacks(execute=True): + update_content = self.query_check( + self.Mutation.UPDATE_PROJECT, + variables={"pk": project_id, "data": update_project_data}, + ) + update_response = update_content["data"]["updateProject"] + assert update_response["ok"], update_response["errors"] + assert update_response is not None, "Project update response is None" + + # Process project + process_project_data = { + "clientId": project_client_id, + "status": "MARKED_AS_READY", + } + with self.captureOnCommitCallbacks(execute=True): + process_project_content = self.query_check( + self.Mutation.UPDATE_PROJECT_STATUS, + variables={"pk": project_id, "data": process_project_data}, + ) + process_project_response = process_project_content["data"]["updateProjectStatus"] + assert process_project_response is not None, "Project mark as ready response is None" + assert process_project_response["ok"], process_project_response["errors"] + assert process_project_response["result"]["status"] == "MARKED_AS_READY", "Project should be marked as ready" + + # Load Tutorial data initially. + create_tutorial_data = test_data["create_tutorial"] + create_tutorial_data["project"] = project_id + with self.captureOnCommitCallbacks(execute=True): + tutorial_content = self.query_check( + self.Mutation.CREATE_TUTORIAL, + variables={"data": create_tutorial_data}, + ) + + tutorial_response = tutorial_content["data"]["createTutorial"] + assert tutorial_response is not None, "Tutorial create response is None" + assert tutorial_response["ok"] + + tutorial_id = tutorial_response["result"]["id"] + tutorial_fb_id = tutorial_response["result"]["firebaseId"] + tutorial_client_id = create_tutorial_data["clientId"] + + # Update Tutorial + with self.captureOnCommitCallbacks(execute=True): + update_tutorial_content = self.query_check( + query=self.Mutation.UPDATE_TUTORIAL, + variables={ + "data": test_data["update_tutorial"], + "pk": tutorial_id, + }, + ) + update_tutorial_response = update_tutorial_content["data"]["updateTutorial"] + assert update_tutorial_response is not None, "Tutorial update response is None" + assert update_tutorial_response["ok"], update_tutorial_response["errors"] + assert update_tutorial_response is not None, "Tutorial update response is None" + + # Publish Tutorial + publish_tutorial_data = { + "clientId": tutorial_client_id, + "status": "PUBLISHED", + } + with self.captureOnCommitCallbacks(execute=True): + publish_tutorial_content = self.query_check( + self.Mutation.UPDATE_TUTORIAL_STATUS, + variables={"pk": tutorial_id, "data": publish_tutorial_data}, + ) + publish_tutorial_response = publish_tutorial_content["data"]["updateTutorialStatus"] + assert publish_tutorial_response["ok"], publish_tutorial_response["errors"] + assert publish_tutorial_response is not None, "Processed tutorial publish response is None" + assert publish_tutorial_response["result"]["status"] == "PUBLISHED", "tutorial should be published" + + # CHECK TUTORIAL, GROUP AND TASK CREATED IN FIREBASE + + tutorial_fb_ref = self.firebase_helper.ref(f"/v2/projects/{tutorial_fb_id}") + tutorial_fb_data = tutorial_fb_ref.get() + + # Check tutorial in firebase + assert tutorial_fb_data is not None, "Tutorial in firebase is None" + assert isinstance(tutorial_fb_data, dict), "Tutorial in firebase should be a dictionary" + assert tutorial_fb_data["projectId"] == tutorial_fb_id, "Field 'projectId' should match firebaseId" + + ignored_tutorial_keys = {"projectId", "tutorialDraftId"} + filtered_tutorial_actual = remove_object_keys(tutorial_fb_data, ignored_tutorial_keys) + filtered_tutorial_expected = remove_object_keys(test_data["expected_tutorial_data"], ignored_tutorial_keys) + assert filtered_tutorial_actual == filtered_tutorial_expected, "Difference found for tutorial data in firebase." + + # Check group in firebase + tutorial_groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{tutorial_fb_id}/") + tutorial_groups_fb_data = tutorial_groups_fb_ref.get() + + if tutorial_groups_fb_data: + for group in iter(tutorial_groups_fb_data.values()): # type: ignore[reportAttributeAccessIssue] + assert group["projectId"] == tutorial_fb_id, "Field 'tutorialId' of each group should match firebaseId" + + ignored_group_keys = {"projectId"} + filtered_group_actual = remove_object_keys(tutorial_groups_fb_data, ignored_group_keys) + filtered_group_expected = remove_object_keys(test_data["expected_tutorial_groups_data"], ignored_tutorial_keys) + assert filtered_group_actual == filtered_group_expected, "Difference found for tutorial group data in firebase." + + # Check tutorial tasks in firebase + tutorial_tasks_ref = self.firebase_helper.ref(f"/v2/tasks/{tutorial_fb_id}/") + tutorial_task_fb_data = tutorial_tasks_ref.get() + + ignored_task_keys: set[str] = {"projectId"} + sanitized_tutorial_tasks_actual: list[dict[str, typing.Any]] = [] + sanitized_tutorial_tasks_expected: list[dict[str, typing.Any]] = [] + + for group in iter(tutorial_task_fb_data.values()): # type: ignore[reportAttributeAccessIssue] + for task_fb in decode_tasks(group): + sanitized_tutorial_tasks_actual.append(remove_object_keys(task_fb, ignored_task_keys)) # type: ignore[reportGeneralTypeIssues] + + for group in iter(test_data["expected_tutorial_tasks_data"].values()): + for task in decode_tasks(group): + sanitized_tutorial_tasks_expected.append(remove_object_keys(task, ignored_task_keys)) # type: ignore[reportGeneralTypeIssues] + + # Sorting and comparing tasks + sanitized_tasks_actual_sorted = sorted(sanitized_tutorial_tasks_actual, key=lambda t: t["taskId"]) + sanitized_tasks_expected_sorted = sorted(sanitized_tutorial_tasks_expected, key=lambda t: t["taskId"]) + + assert sanitized_tasks_actual_sorted == sanitized_tasks_expected_sorted, ( + "Differences found between expected and actual tasks on tutorial in firebase." + ) + tutorial = TutorialFactory.create( + project_id=project_id, + **self.user_resource_kwargs, + ) + + # Update processed project + update_processed_project_data = test_data["update_processed_project"] + update_processed_project_data["tutorial"] = tutorial.id + update_processed_project_data["requestingOrganization"] = organization_id + with self.captureOnCommitCallbacks(execute=True): + update_processed_project_content = self.query_check( + self.Mutation.UPDATE_PROCESSED_PROJECT, + variables={"pk": project_id, "data": update_processed_project_data}, + ) + update_processed_response = update_processed_project_content["data"]["updateProcessedProject"] + assert update_processed_response["ok"], update_processed_response["errors"] + assert update_processed_response is not None, "Processed project update response is None" + + # Publish project + publish_project_data = { + "clientId": project_client_id, + "status": "PUBLISHED", + } + with self.captureOnCommitCallbacks(execute=True): + publish_project_content = self.query_check( + self.Mutation.UPDATE_PROJECT_STATUS, + variables={"pk": project_id, "data": publish_project_data}, + ) + publish_project_response = publish_project_content["data"]["updateProjectStatus"] + assert publish_project_response["ok"], publish_project_response["errors"] + assert publish_project_response is not None, "Processed project publish response is None" + assert publish_project_response["result"]["status"] == "PUBLISHED", "Project should be published" + + # CHECK PROJECT, GROUP AND TASK CREATED IN FIREBASE + + project_fb_ref = self.firebase_helper.ref(f"/v2/projects/{project_fb_id}") + project_fb_data = project_fb_ref.get() + + # Check project in firebase + # tutorial.refresh_from_db() + assert project_fb_data is not None, "Project in firebase is None" + assert isinstance(project_fb_data, dict), "Project in firebase should be a dictionary" + assert project_fb_data["created"] is not None, "Field 'created' should be defined" + assert datetime.fromisoformat(project_fb_data["created"]), "Field 'created' should be a timestamp" + assert project_fb_data["projectId"] == project_fb_id, "Field 'projectId' should match firebaseId" + assert project_fb_data["tutorialId"] == tutorial.firebase_id, "Field 'tutorialId' should match tutorial's firebaseId" + assert project_fb_data["createdBy"] == self.contributor_user.firebase_id, ( + "Field 'createdBy' should match contributor user's firebaseId" + ) + + ignored_project_keys = {"created", "createdBy", "projectId", "tutorialId"} + filtered_project_actual = remove_object_keys(project_fb_data, ignored_project_keys) + filtered_project_expected = remove_object_keys(test_data["expected_project_data"], ignored_project_keys) + assert filtered_project_actual == filtered_project_expected, "Difference found for project data in firebase." + + # Check group in firebase + groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{project_fb_id}/") + groups_fb_data = groups_fb_ref.get() + + if groups_fb_data: + for group in iter(groups_fb_data.values()): # type: ignore[reportAttributeAccessIssue] + assert group["projectId"] == project_fb_id, "Field 'projectId' of each group should match firebaseId" + + ignored_group_keys = {"projectId"} + filtered_group_actual = remove_object_keys(groups_fb_data, ignored_group_keys) + filtered_group_expected = remove_object_keys(test_data["expected_project_groups_data"], ignored_project_keys) + assert filtered_group_actual == filtered_group_expected, "Difference found for group data on project in firebase." + + # Check tasks in firebase + tasks_ref = self.firebase_helper.ref(Config.FirebaseKeys.project_tasks(project_fb_id)) + project_tasks_fb_data = tasks_ref.get() + + if project_tasks_fb_data: + for groups in iter(project_tasks_fb_data.values()): # type: ignore[reportAttributeAccessIssue] + for task in groups: + assert task["projectId"] == project_fb_id, "Field 'projectId' of each task should match firebaseId" + + ignored_task_keys = {"projectId"} + sanitized_tasks_actual = remove_object_keys(project_tasks_fb_data, ignored_task_keys) + sanitized_tasks_expected = remove_object_keys(test_data["expected_project_tasks_data"], ignored_task_keys) + + assert sanitized_tasks_actual == sanitized_tasks_expected, ( + "Differences found between expected and actual tasks in firebase." + ) diff --git a/apps/project/tests/e2e_create_project_test.py b/apps/project/tests/e2e_create_validate_project_test.py similarity index 93% rename from apps/project/tests/e2e_create_project_test.py rename to apps/project/tests/e2e_create_validate_project_test.py index 417cad97..1b140380 100644 --- a/apps/project/tests/e2e_create_project_test.py +++ b/apps/project/tests/e2e_create_validate_project_test.py @@ -8,6 +8,7 @@ from apps.common.utils import decode_tasks, remove_object_keys from apps.contributor.factories import ContributorUserFactory +from apps.tutorial.factories import TutorialFactory from apps.user.factories import UserFactory from main.config import Config from main.tests import TestCase @@ -249,13 +250,12 @@ def setUpClass(cls): def test_validate_project_e2e(self): self._test_project( - "validate", "assets/tests/projects/validate/project_data.json5", ) - # TODO(susilnem): Add street data here too + # TODO(susilnem): Add more test with filters - def _test_project(self, projectKey: str, filename: str): + def _test_project(self, filename: str): self.force_login(self.user) # Load test data file @@ -308,7 +308,7 @@ def _test_project(self, projectKey: str, filename: str): project_response = project_content["data"]["createProject"] assert project_response is not None, "Project create response is None" - assert project_response["ok"] + assert project_response["ok"], project_response["errors"] project_id = project_response["result"]["id"] project_fb_id = project_response["result"]["firebaseId"] @@ -332,29 +332,32 @@ def _test_project(self, projectKey: str, filename: str): assert image_response["ok"] image_id = image_response["result"]["id"] - # Create GeoJSON Asset for AOI Geometry - aoi_asset_data = { - "clientId": str(ULID()), - "inputType": "AOI_GEOMETRY", - "project": project_id, - } - with aoi_geometry_filename.open("rb") as geo_file: - aoi_content = self.query_check( - self.Mutation.UPLOAD_PROJECT_ASSET, - variables={"data": aoi_asset_data}, - files={"geoFile": geo_file}, - map={"geoFile": ["variables.data.file"]}, - ) - aoi_response = aoi_content["data"]["createProjectAsset"] - assert aoi_response is not None, "AOI create response is None" - assert aoi_response["ok"] - aoi_id = aoi_response["result"]["id"] - # Update project update_project_data = test_data["update_project"] - update_project_data["image"] = image_id - update_project_data["projectTypeSpecifics"][projectKey]["objectSource"]["aoiGeometry"] = aoi_id update_project_data["requestingOrganization"] = organization_id + update_project_data["image"] = image_id + + # Create GeoJSON Asset for AOI Geometry + if update_project_data["projectTypeSpecifics"]["validate"]["objectSource"]["sourceType"] == "AOI_GEOJSON_FILE": + aoi_asset_data = { + "clientId": str(ULID()), + "inputType": "AOI_GEOMETRY", + "project": project_id, + } + with aoi_geometry_filename.open("rb") as geo_file: + aoi_content = self.query_check( + self.Mutation.UPLOAD_PROJECT_ASSET, + variables={"data": aoi_asset_data}, + files={"geoFile": geo_file}, + map={"geoFile": ["variables.data.file"]}, + ) + aoi_response = aoi_content["data"]["createProjectAsset"] + assert aoi_response is not None, "AOI create response is None" + assert aoi_response["ok"] + aoi_id = aoi_response["result"]["id"] + + update_project_data["projectTypeSpecifics"]["validate"]["objectSource"]["aoiGeometry"] = aoi_id + with self.captureOnCommitCallbacks(execute=True): update_content = self.query_check( self.Mutation.UPDATE_PROJECT, @@ -477,10 +480,14 @@ def _test_project(self, projectKey: str, filename: str): assert sanitized_tasks_actual_sorted == sanitized_tasks_expected_sorted, ( "Differences found between expected and actual tasks on tutorial in firebase." ) + tutorial = TutorialFactory.create( + project_id=project_id, + **self.user_resource_kwargs, + ) # Update processed project update_processed_project_data = test_data["update_processed_project"] - update_processed_project_data["tutorial"] = tutorial_id + update_processed_project_data["tutorial"] = tutorial.id update_processed_project_data["requestingOrganization"] = organization_id with self.captureOnCommitCallbacks(execute=True): update_processed_project_content = self.query_check( @@ -518,7 +525,7 @@ def _test_project(self, projectKey: str, filename: str): assert project_fb_data["created"] is not None, "Field 'created' should be defined" assert datetime.fromisoformat(project_fb_data["created"]), "Field 'created' should be a timestamp" assert project_fb_data["projectId"] == project_fb_id, "Field 'projectId' should match firebaseId" - assert project_fb_data["tutorialId"] == tutorial_fb_id, "Field 'tutorialId' should match tutorial's firebaseId" + assert project_fb_data["tutorialId"] == tutorial.firebase_id, "Field 'tutorialId' should match tutorial's firebaseId" assert project_fb_data["createdBy"] == self.contributor_user.firebase_id, ( "Field 'createdBy' should match contributor user's firebaseId" ) diff --git a/assets b/assets index 489599dc..1f64ac84 160000 --- a/assets +++ b/assets @@ -1 +1 @@ -Subproject commit 489599dcf84da1ddf91641423b1aedfaa7771092 +Subproject commit 1f64ac849e7146670bcef3f650280afbb40b1aba diff --git a/firebase b/firebase index 23eadc93..f261606f 160000 --- a/firebase +++ b/firebase @@ -1 +1 @@ -Subproject commit 23eadc9377d026ff7107c9b9fb0cc1624f4f9a6f +Subproject commit f261606f6617addb17f19afcfedceb12b1ed404d From 795d154b604118c53c22e571aacd5b7f0054e3f2 Mon Sep 17 00:00:00 2001 From: Sushil Tiwari Date: Wed, 10 Sep 2025 19:14:41 +0545 Subject: [PATCH 3/9] feat(test): Add e2e testing for validate image project and tutorial --- .../tests/e2e_create_street_project_test.py | 19 +- .../tests/e2e_create_validate_project_test.py | 18 +- .../tests/e2e_validate_image_project_test.py | 556 ++++++++++++++++++ firebase | 2 +- 4 files changed, 566 insertions(+), 29 deletions(-) create mode 100644 apps/project/tests/e2e_validate_image_project_test.py diff --git a/apps/project/tests/e2e_create_street_project_test.py b/apps/project/tests/e2e_create_street_project_test.py index 8becded3..b878646b 100644 --- a/apps/project/tests/e2e_create_street_project_test.py +++ b/apps/project/tests/e2e_create_street_project_test.py @@ -8,7 +8,6 @@ from apps.common.utils import decode_tasks, remove_object_keys from apps.contributor.factories import ContributorUserFactory -from apps.tutorial.factories import TutorialFactory from apps.user.factories import UserFactory from main.config import Config from main.tests import TestCase @@ -243,10 +242,6 @@ def setUpClass(cls): cls.user = UserFactory.create( contributor_user=cls.contributor_user, ) - cls.user_resource_kwargs = dict( - created_by=cls.user, - modified_by=cls.user, - ) def test_street_project_e2e(self): self._test_project( @@ -368,7 +363,7 @@ def _test_project(self, filename: str): # Process project process_project_data = { "clientId": project_client_id, - "status": "MARKED_AS_READY", + "status": "READY_TO_PROCESS", } with self.captureOnCommitCallbacks(execute=True): process_project_content = self.query_check( @@ -376,9 +371,9 @@ def _test_project(self, filename: str): variables={"pk": project_id, "data": process_project_data}, ) process_project_response = process_project_content["data"]["updateProjectStatus"] - assert process_project_response is not None, "Project mark as ready response is None" + assert process_project_response is not None, "Project ready to process response is None" assert process_project_response["ok"], process_project_response["errors"] - assert process_project_response["result"]["status"] == "MARKED_AS_READY", "Project should be marked as ready" + assert process_project_response["result"]["status"] == "READY_TO_PROCESS", "Project should be ready to process" # Load Tutorial data initially. create_tutorial_data = test_data["create_tutorial"] @@ -477,14 +472,10 @@ def _test_project(self, filename: str): assert sanitized_tasks_actual_sorted == sanitized_tasks_expected_sorted, ( "Differences found between expected and actual tasks on tutorial in firebase." ) - tutorial = TutorialFactory.create( - project_id=project_id, - **self.user_resource_kwargs, - ) # Update processed project update_processed_project_data = test_data["update_processed_project"] - update_processed_project_data["tutorial"] = tutorial.id + update_processed_project_data["tutorial"] = tutorial_id update_processed_project_data["requestingOrganization"] = organization_id with self.captureOnCommitCallbacks(execute=True): update_processed_project_content = self.query_check( @@ -522,7 +513,7 @@ def _test_project(self, filename: str): assert project_fb_data["created"] is not None, "Field 'created' should be defined" assert datetime.fromisoformat(project_fb_data["created"]), "Field 'created' should be a timestamp" assert project_fb_data["projectId"] == project_fb_id, "Field 'projectId' should match firebaseId" - assert project_fb_data["tutorialId"] == tutorial.firebase_id, "Field 'tutorialId' should match tutorial's firebaseId" + assert project_fb_data["tutorialId"] == tutorial_fb_id, "Field 'tutorialId' should match tutorial's firebaseId" assert project_fb_data["createdBy"] == self.contributor_user.firebase_id, ( "Field 'createdBy' should match contributor user's firebaseId" ) diff --git a/apps/project/tests/e2e_create_validate_project_test.py b/apps/project/tests/e2e_create_validate_project_test.py index 1b140380..d06fd416 100644 --- a/apps/project/tests/e2e_create_validate_project_test.py +++ b/apps/project/tests/e2e_create_validate_project_test.py @@ -8,7 +8,6 @@ from apps.common.utils import decode_tasks, remove_object_keys from apps.contributor.factories import ContributorUserFactory -from apps.tutorial.factories import TutorialFactory from apps.user.factories import UserFactory from main.config import Config from main.tests import TestCase @@ -243,10 +242,6 @@ def setUpClass(cls): cls.user = UserFactory.create( contributor_user=cls.contributor_user, ) - cls.user_resource_kwargs = dict( - created_by=cls.user, - modified_by=cls.user, - ) def test_validate_project_e2e(self): self._test_project( @@ -370,7 +365,7 @@ def _test_project(self, filename: str): # Process project process_project_data = { "clientId": project_client_id, - "status": "MARKED_AS_READY", + "status": "READY_TO_PROCESS", } with self.captureOnCommitCallbacks(execute=True): process_project_content = self.query_check( @@ -380,7 +375,7 @@ def _test_project(self, filename: str): process_project_response = process_project_content["data"]["updateProjectStatus"] assert process_project_response is not None, "Project mark as ready response is None" assert process_project_response["ok"], process_project_response["errors"] - assert process_project_response["result"]["status"] == "MARKED_AS_READY", "Project should be marked as ready" + assert process_project_response["result"]["status"] == "READY_TO_PROCESS", "Project should be marked as ready" # Load Tutorial data initially. create_tutorial_data = test_data["create_tutorial"] @@ -480,14 +475,10 @@ def _test_project(self, filename: str): assert sanitized_tasks_actual_sorted == sanitized_tasks_expected_sorted, ( "Differences found between expected and actual tasks on tutorial in firebase." ) - tutorial = TutorialFactory.create( - project_id=project_id, - **self.user_resource_kwargs, - ) # Update processed project update_processed_project_data = test_data["update_processed_project"] - update_processed_project_data["tutorial"] = tutorial.id + update_processed_project_data["tutorial"] = tutorial_id update_processed_project_data["requestingOrganization"] = organization_id with self.captureOnCommitCallbacks(execute=True): update_processed_project_content = self.query_check( @@ -519,13 +510,12 @@ def _test_project(self, filename: str): project_fb_data = project_fb_ref.get() # Check project in firebase - # tutorial.refresh_from_db() assert project_fb_data is not None, "Project in firebase is None" assert isinstance(project_fb_data, dict), "Project in firebase should be a dictionary" assert project_fb_data["created"] is not None, "Field 'created' should be defined" assert datetime.fromisoformat(project_fb_data["created"]), "Field 'created' should be a timestamp" assert project_fb_data["projectId"] == project_fb_id, "Field 'projectId' should match firebaseId" - assert project_fb_data["tutorialId"] == tutorial.firebase_id, "Field 'tutorialId' should match tutorial's firebaseId" + assert project_fb_data["tutorialId"] == tutorial_fb_id, "Field 'tutorialId' should match tutorial's firebaseId" assert project_fb_data["createdBy"] == self.contributor_user.firebase_id, ( "Field 'createdBy' should match contributor user's firebaseId" ) diff --git a/apps/project/tests/e2e_validate_image_project_test.py b/apps/project/tests/e2e_validate_image_project_test.py new file mode 100644 index 00000000..08dfc067 --- /dev/null +++ b/apps/project/tests/e2e_validate_image_project_test.py @@ -0,0 +1,556 @@ +import typing +from datetime import datetime +from pathlib import Path + +import json5 +from django.conf import settings +from ulid import ULID + +from apps.common.utils import remove_object_keys +from apps.contributor.factories import ContributorUserFactory +from apps.user.factories import UserFactory +from main.config import Config +from main.tests import TestCase + + +class TestValidateImageProjectE2E(TestCase): + class Mutation: + CREATE_PROJECT = """ + mutation CreateProject($data: ProjectCreateInput!) { + createProject(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + firebaseId + } + } + } + } + """ + + UPDATE_PROJECT = """ + mutation UpdateProject($pk: ID!, $data: ProjectUpdateInput!) { + updateProject(pk: $pk, data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + } + } + } + } + """ + + UPLOAD_PROJECT_ASSET = """ + mutation CreateProjectAsset($data: ProjectAssetCreateInput!) { + createProjectAsset(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectAssetTypeMutationResponseType { + errors + ok + result { + id + } + } + } + } + """ + + UPDATE_PROCESSED_PROJECT = """ + mutation UpdateProcessedProject($pk: ID!, $data: ProcessedProjectUpdateInput!) { + updateProcessedProject(pk: $pk, data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + } + } + } + } + """ + + UPDATE_PROJECT_STATUS = """ + mutation UpdateProjectStatus($pk: ID!, $data: ProjectStatusUpdateInput!) { + updateProjectStatus(pk: $pk, data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on ProjectTypeMutationResponseType { + errors + ok + result { + id + status + } + } + } + } + """ + + CREATE_ORGANIZATION = """ + mutation CreateOrganization($data: OrganizationCreateInput!) { + createOrganization(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on OrganizationTypeMutationResponseType { + errors + ok + result { + id + firebaseId + } + } + } + } + """ + + CREATE_TUTORIAL = """ + mutation CreateTutorial($data: TutorialCreateInput!) { + createTutorial(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on TutorialTypeMutationResponseType { + errors + ok + result { + id + clientId + projectId + firebaseId + } + } + } + } + """ + + UPDATE_TUTORIAL = """ + mutation UpdateTutorial($data: TutorialUpdateInput!, $pk: ID!) { + updateTutorial(data: $data, pk: $pk) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on TutorialTypeMutationResponseType { + errors + ok + result { + id + } + } + } + } + """ + + UPDATE_TUTORIAL_STATUS = """ + mutation UpdateTutorialStatus($data: TutorialStatusUpdateInput!, $pk: ID!) { + updateTutorialStatus(data: $data, pk: $pk) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on TutorialTypeMutationResponseType { + errors + ok + result { + id + status + } + } + } + } + """ + + @typing.override + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.firebase_helper = Config.FIREBASE_HELPER + + cls.contributor_user = ContributorUserFactory.create( + username="Ram Bahadur", + ) + + cls.user = UserFactory.create( + contributor_user=cls.contributor_user, + ) + + def test_validate_image_project_e2e(self): + self._test_project( + "assets/tests/projects/validate_image/project_data.json5", + ) + + # TODO(susilnem): Add more test with filters + def _test_project(self, filename: str): + self.force_login(self.user) + + # Load test data file + full_path = Path(settings.BASE_DIR, filename) + with full_path.open("r", encoding="utf-8") as f: + test_data = json5.load(f) + + # Define full path for image and AOI files + image_filename = Path(settings.BASE_DIR) / test_data["assets"]["image"] + coco_filename = Path(settings.BASE_DIR) / test_data["assets"]["coco_dataset"] + + # Load Project data initially. + create_project_data = test_data["create_project"] + + # Create an organization and attach to project + create_organization_data = test_data["create_organization"] + with self.captureOnCommitCallbacks(execute=True): + organization_content = self.query_check( + self.Mutation.CREATE_ORGANIZATION, + variables={"data": create_organization_data}, + ) + + organization_response = organization_content["data"]["createOrganization"] + assert organization_response is not None, "Organization create response is None" + assert organization_response["ok"] + + organization_id = organization_response["result"]["id"] + organization_fb_id = organization_response["result"]["firebaseId"] + + # CHECK ORGANIZATION in firebase + + organization_fb_ref = self.firebase_helper.ref(f"/v2/organisations/{organization_fb_id}") + organization_fb_data = organization_fb_ref.get() + + # Check organization in firebase + assert organization_fb_data is not None, "organization in firebase is None" + assert isinstance(organization_fb_data, dict), "organization in firebase should be a dictionary" + + assert organization_fb_data == test_data["expected_organization_data"], ( + "Difference found for organization data in firebase." + ) + + # Create project + create_project_data["requestingOrganization"] = organization_id + with self.captureOnCommitCallbacks(execute=True): + project_content = self.query_check( + self.Mutation.CREATE_PROJECT, + variables={"data": create_project_data}, + ) + + project_response = project_content["data"]["createProject"] + assert project_response is not None, "Project create response is None" + assert project_response["ok"], project_response["errors"] + + project_id = project_response["result"]["id"] + project_fb_id = project_response["result"]["firebaseId"] + project_client_id = create_project_data["clientId"] + + # Create Image Asset for cover image + image_asset_data = { + "clientId": project_client_id, + "inputType": "COVER_IMAGE", + "project": project_id, + } + with image_filename.open("rb") as img_file: + image_content = self.query_check( + self.Mutation.UPLOAD_PROJECT_ASSET, + variables={"data": image_asset_data}, + files={"imageFile": img_file}, + map={"imageFile": ["variables.data.file"]}, + ) + image_response = image_content["data"]["createProjectAsset"] + assert image_response is not None, "Image create response is None" + assert image_response["ok"] + image_id = image_response["result"]["id"] + + # Update project + update_project_data = test_data["update_project"] + update_project_data["requestingOrganization"] = organization_id + update_project_data["image"] = image_id + + # Create COCO dataset assets + if update_project_data["projectTypeSpecifics"]["validateImage"]["sourceType"] == "DATASET_FILE": + with coco_filename.open("r", encoding="utf-8") as f: + coco_data = json5.load(f) + for image in iter(coco_data["images"]): + aoi_asset_data = { + "clientId": str(ULID()), + "inputType": "OBJECT_IMAGE", + "project": project_id, + "assetTypeSpecifics": { + "objectImage": { + "image": { + "cocoUrl": image["coco_url"], + "fileName": image["file_name"], + "height": image["height"], + "id": str(image["id"]), + "width": image["width"], + }, + }, + }, + "externalUrl": image["coco_url"], + } + aoi_content = self.query_check( + self.Mutation.UPLOAD_PROJECT_ASSET, + variables={"data": aoi_asset_data}, + ) + aoi_response = aoi_content["data"]["createProjectAsset"] + assert aoi_response is not None, "AOI create response is None" + assert aoi_response["ok"] + + with self.captureOnCommitCallbacks(execute=True): + update_content = self.query_check( + self.Mutation.UPDATE_PROJECT, + variables={"pk": project_id, "data": update_project_data}, + ) + update_response = update_content["data"]["updateProject"] + assert update_response["ok"], update_response["errors"] + assert update_response is not None, "Project update response is None" + + # Process project + process_project_data = { + "clientId": project_client_id, + "status": "READY_TO_PROCESS", + } + with self.captureOnCommitCallbacks(execute=True): + process_project_content = self.query_check( + self.Mutation.UPDATE_PROJECT_STATUS, + variables={"pk": project_id, "data": process_project_data}, + ) + process_project_response = process_project_content["data"]["updateProjectStatus"] + assert process_project_response is not None, "Project ready to process response is None" + assert process_project_response["ok"], process_project_response["errors"] + assert process_project_response["result"]["status"] == "READY_TO_PROCESS", "Project should be ready to process" + + # Load Tutorial data initially. + create_tutorial_data = test_data["create_tutorial"] + create_tutorial_data["project"] = project_id + with self.captureOnCommitCallbacks(execute=True): + tutorial_content = self.query_check( + self.Mutation.CREATE_TUTORIAL, + variables={"data": create_tutorial_data}, + ) + + tutorial_response = tutorial_content["data"]["createTutorial"] + assert tutorial_response is not None, "Tutorial create response is None" + assert tutorial_response["ok"] + + tutorial_id = tutorial_response["result"]["id"] + tutorial_fb_id = tutorial_response["result"]["firebaseId"] + tutorial_client_id = create_tutorial_data["clientId"] + + # Update Tutorial + with self.captureOnCommitCallbacks(execute=True): + update_tutorial_content = self.query_check( + query=self.Mutation.UPDATE_TUTORIAL, + variables={ + "data": test_data["update_tutorial"], + "pk": tutorial_id, + }, + ) + update_tutorial_response = update_tutorial_content["data"]["updateTutorial"] + assert update_tutorial_response is not None, "Tutorial update response is None" + assert update_tutorial_response["ok"], update_tutorial_response["errors"] + assert update_tutorial_response is not None, "Tutorial update response is None" + + # Publish Tutorial + publish_tutorial_data = { + "clientId": tutorial_client_id, + "status": "READY_TO_PUBLISH", + } + with self.captureOnCommitCallbacks(execute=True): + publish_tutorial_content = self.query_check( + self.Mutation.UPDATE_TUTORIAL_STATUS, + variables={"pk": tutorial_id, "data": publish_tutorial_data}, + ) + publish_tutorial_response = publish_tutorial_content["data"]["updateTutorialStatus"] + assert publish_tutorial_response["ok"], publish_tutorial_response["errors"] + assert publish_tutorial_response is not None, "Processed tutorial publish response is None" + assert publish_tutorial_response["result"]["status"] == "READY_TO_PUBLISH", "tutorial should be ready to published" + + # CHECK TUTORIAL, GROUP AND TASK CREATED IN FIREBASE + + tutorial_fb_ref = self.firebase_helper.ref(f"/v2/projects/{tutorial_fb_id}") + tutorial_fb_data = tutorial_fb_ref.get() + + # Check tutorial in firebase + assert tutorial_fb_data is not None, "Tutorial in firebase is None" + assert isinstance(tutorial_fb_data, dict), "Tutorial in firebase should be a dictionary" + assert tutorial_fb_data["projectId"] == tutorial_fb_id, "Field 'projectId' should match firebaseId" + + ignored_tutorial_keys = {"projectId", "tutorialDraftId"} + filtered_tutorial_actual = remove_object_keys(tutorial_fb_data, ignored_tutorial_keys) + filtered_tutorial_expected = remove_object_keys(test_data["expected_tutorial_data"], ignored_tutorial_keys) + assert filtered_tutorial_actual == filtered_tutorial_expected, "Difference found for tutorial data in firebase." + + # Check group in firebase + tutorial_groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{tutorial_fb_id}/") + tutorial_groups_fb_data = tutorial_groups_fb_ref.get() + + if tutorial_groups_fb_data: + for group in iter(tutorial_groups_fb_data.values()): # type: ignore[reportAttributeAccessIssue] + assert group["projectId"] == tutorial_fb_id, "Field 'tutorialId' of each group should match firebaseId" + + ignored_group_keys = {"projectId"} + filtered_group_actual = remove_object_keys(tutorial_groups_fb_data, ignored_group_keys) + filtered_group_expected = remove_object_keys(test_data["expected_tutorial_groups_data"], ignored_tutorial_keys) + assert filtered_group_actual == filtered_group_expected, "Difference found for tutorial group data in firebase." + + # Check tutorial tasks in firebase + tutorial_tasks_ref = self.firebase_helper.ref(f"/v2/tasks/{tutorial_fb_id}/") + tutorial_task_fb_data = tutorial_tasks_ref.get() + + if tutorial_task_fb_data: + for groups in iter(tutorial_task_fb_data.values()): # type: ignore[reportAttributeAccessIssue] + for task in groups: + assert task["projectId"] == tutorial_fb_id, "Field 'projectId' of each task should match firebaseId" + + ignored_task_keys = {"projectId"} + sanitized_tasks_actual = remove_object_keys(tutorial_task_fb_data, ignored_task_keys) + sanitized_tasks_expected = remove_object_keys(test_data["expected_tutorial_tasks_data"], ignored_task_keys) + + assert sanitized_tasks_actual == sanitized_tasks_expected, ( + "Differences found between expected and actual tasks in firebase." + ) + + # Update processed project + update_processed_project_data = test_data["update_processed_project"] + update_processed_project_data["tutorial"] = tutorial_id + update_processed_project_data["requestingOrganization"] = organization_id + with self.captureOnCommitCallbacks(execute=True): + update_processed_project_content = self.query_check( + self.Mutation.UPDATE_PROCESSED_PROJECT, + variables={"pk": project_id, "data": update_processed_project_data}, + ) + update_processed_response = update_processed_project_content["data"]["updateProcessedProject"] + assert update_processed_response["ok"], update_processed_response["errors"] + assert update_processed_response is not None, "Processed project update response is None" + + # Publish project + publish_project_data = { + "clientId": project_client_id, + "status": "READY_TO_PUBLISH", + } + with self.captureOnCommitCallbacks(execute=True): + publish_project_content = self.query_check( + self.Mutation.UPDATE_PROJECT_STATUS, + variables={"pk": project_id, "data": publish_project_data}, + ) + publish_project_response = publish_project_content["data"]["updateProjectStatus"] + assert publish_project_response["ok"], publish_project_response["errors"] + assert publish_project_response is not None, "Processed project publish response is None" + assert publish_project_response["result"]["status"] == "READY_TO_PUBLISH", "Project should be ready to published" + + # CHECK PROJECT, GROUP AND TASK CREATED IN FIREBASE + + project_fb_ref = self.firebase_helper.ref(f"/v2/projects/{project_fb_id}") + project_fb_data = project_fb_ref.get() + + # Check project in firebase + # tutorial.refresh_from_db() + assert project_fb_data is not None, "Project in firebase is None" + assert isinstance(project_fb_data, dict), "Project in firebase should be a dictionary" + assert project_fb_data["created"] is not None, "Field 'created' should be defined" + assert datetime.fromisoformat(project_fb_data["created"]), "Field 'created' should be a timestamp" + assert project_fb_data["projectId"] == project_fb_id, "Field 'projectId' should match firebaseId" + assert project_fb_data["tutorialId"] == tutorial_fb_id, "Field 'tutorialId' should match tutorial's firebaseId" + assert project_fb_data["createdBy"] == self.contributor_user.firebase_id, ( + "Field 'createdBy' should match contributor user's firebaseId" + ) + + ignored_project_keys = {"created", "createdBy", "projectId", "tutorialId"} + filtered_project_actual = remove_object_keys(project_fb_data, ignored_project_keys) + filtered_project_expected = remove_object_keys(test_data["expected_project_data"], ignored_project_keys) + assert filtered_project_actual == filtered_project_expected, "Difference found for project data in firebase." + + # Check group in firebase + groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{project_fb_id}/") + groups_fb_data = groups_fb_ref.get() + + if groups_fb_data: + for group in iter(groups_fb_data.values()): # type: ignore[reportAttributeAccessIssue] + assert group["projectId"] == project_fb_id, "Field 'projectId' of each group should match firebaseId" + + ignored_group_keys = {"projectId"} + filtered_group_actual = remove_object_keys(groups_fb_data, ignored_group_keys) + filtered_group_expected = remove_object_keys(test_data["expected_project_groups_data"], ignored_project_keys) + assert filtered_group_actual == filtered_group_expected, "Difference found for group data on project in firebase." + + # Check tasks in firebase + project_tasks_ref = self.firebase_helper.ref(f"/v2/tasks/{project_fb_id}/") + project_tasks_fb_data = project_tasks_ref.get() + if project_tasks_fb_data: + for groups in iter(project_tasks_fb_data.values()): # type: ignore[reportAttributeAccessIssue] + for task in groups: + assert task["projectId"] == project_fb_id, "Field 'projectId' of each task should match firebaseId" + + ignored_task_keys = {"projectId"} + sanitized_tasks_actual = remove_object_keys(project_tasks_fb_data, ignored_task_keys) + sanitized_tasks_expected = remove_object_keys(test_data["expected_project_tasks_data"], ignored_task_keys) + + assert sanitized_tasks_actual == sanitized_tasks_expected, ( + "Differences found between expected and actual tasks in firebase." + ) diff --git a/firebase b/firebase index f261606f..23eadc93 160000 --- a/firebase +++ b/firebase @@ -1 +1 @@ -Subproject commit f261606f6617addb17f19afcfedceb12b1ed404d +Subproject commit 23eadc9377d026ff7107c9b9fb0cc1624f4f9a6f From c7adcc6762479c28f8f63ffae3475119797645c6 Mon Sep 17 00:00:00 2001 From: Sushil Tiwari Date: Thu, 25 Sep 2025 11:16:41 +0545 Subject: [PATCH 4/9] fix(project): Update e2e test for street and validate image --- apps/project/tests/e2e_create_street_project_test.py | 8 ++++---- apps/project/tests/e2e_create_validate_project_test.py | 8 ++++---- project_types/validate_image/project.py | 2 +- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/apps/project/tests/e2e_create_street_project_test.py b/apps/project/tests/e2e_create_street_project_test.py index b878646b..364426cf 100644 --- a/apps/project/tests/e2e_create_street_project_test.py +++ b/apps/project/tests/e2e_create_street_project_test.py @@ -409,7 +409,7 @@ def _test_project(self, filename: str): # Publish Tutorial publish_tutorial_data = { "clientId": tutorial_client_id, - "status": "PUBLISHED", + "status": "READY_TO_PUBLISH", } with self.captureOnCommitCallbacks(execute=True): publish_tutorial_content = self.query_check( @@ -419,7 +419,7 @@ def _test_project(self, filename: str): publish_tutorial_response = publish_tutorial_content["data"]["updateTutorialStatus"] assert publish_tutorial_response["ok"], publish_tutorial_response["errors"] assert publish_tutorial_response is not None, "Processed tutorial publish response is None" - assert publish_tutorial_response["result"]["status"] == "PUBLISHED", "tutorial should be published" + assert publish_tutorial_response["result"]["status"] == "READY_TO_PUBLISH", "tutorial should be ready to published" # CHECK TUTORIAL, GROUP AND TASK CREATED IN FIREBASE @@ -489,7 +489,7 @@ def _test_project(self, filename: str): # Publish project publish_project_data = { "clientId": project_client_id, - "status": "PUBLISHED", + "status": "READY_TO_PUBLISH", } with self.captureOnCommitCallbacks(execute=True): publish_project_content = self.query_check( @@ -499,7 +499,7 @@ def _test_project(self, filename: str): publish_project_response = publish_project_content["data"]["updateProjectStatus"] assert publish_project_response["ok"], publish_project_response["errors"] assert publish_project_response is not None, "Processed project publish response is None" - assert publish_project_response["result"]["status"] == "PUBLISHED", "Project should be published" + assert publish_project_response["result"]["status"] == "READY_TO_PUBLISH", "Project should be ready to published" # CHECK PROJECT, GROUP AND TASK CREATED IN FIREBASE diff --git a/apps/project/tests/e2e_create_validate_project_test.py b/apps/project/tests/e2e_create_validate_project_test.py index d06fd416..162b0bfa 100644 --- a/apps/project/tests/e2e_create_validate_project_test.py +++ b/apps/project/tests/e2e_create_validate_project_test.py @@ -411,7 +411,7 @@ def _test_project(self, filename: str): # Publish Tutorial publish_tutorial_data = { "clientId": tutorial_client_id, - "status": "PUBLISHED", + "status": "READY_TO_PUBLISH", } with self.captureOnCommitCallbacks(execute=True): publish_tutorial_content = self.query_check( @@ -421,7 +421,7 @@ def _test_project(self, filename: str): publish_tutorial_response = publish_tutorial_content["data"]["updateTutorialStatus"] assert publish_tutorial_response["ok"], publish_tutorial_response["errors"] assert publish_tutorial_response is not None, "Processed tutorial publish response is None" - assert publish_tutorial_response["result"]["status"] == "PUBLISHED", "tutorial should be published" + assert publish_tutorial_response["result"]["status"] == "READY_TO_PUBLISH", "tutorial should be ready to published" # CHECK TUTORIAL, GROUP AND TASK CREATED IN FIREBASE @@ -492,7 +492,7 @@ def _test_project(self, filename: str): # Publish project publish_project_data = { "clientId": project_client_id, - "status": "PUBLISHED", + "status": "READY_TO_PUBLISH", } with self.captureOnCommitCallbacks(execute=True): publish_project_content = self.query_check( @@ -502,7 +502,7 @@ def _test_project(self, filename: str): publish_project_response = publish_project_content["data"]["updateProjectStatus"] assert publish_project_response["ok"], publish_project_response["errors"] assert publish_project_response is not None, "Processed project publish response is None" - assert publish_project_response["result"]["status"] == "PUBLISHED", "Project should be published" + assert publish_project_response["result"]["status"] == "READY_TO_PUBLISH", "Project should be ready to published" # CHECK PROJECT, GROUP AND TASK CREATED IN FIREBASE diff --git a/project_types/validate_image/project.py b/project_types/validate_image/project.py index f0f373b0..08d084cd 100644 --- a/project_types/validate_image/project.py +++ b/project_types/validate_image/project.py @@ -213,7 +213,7 @@ def create_tasks(self, group: ProjectTaskGroup, raw_group: Grouping[ValidImage]) bulk_mgr.add( ProjectTask( - firebase_id=f"t{f_id}", + firebase_id=f_id, task_group_id=group.pk, geometry=None, # FIXME(tnagorra): Do we need to define all of these here? From ea386b2579bb79c2b8d039546e26e90e4c48d401 Mon Sep 17 00:00:00 2001 From: Sushil Tiwari Date: Thu, 25 Sep 2025 16:11:48 +0545 Subject: [PATCH 5/9] feat(pytest): Add vcr testing configurations --- .../tests/e2e_create_street_project_test.py | 2 + .../tests/e2e_create_validate_project_test.py | 2 + conftest.py | 11 + pyproject.toml | 1 + uv.lock | 189 ++++++++++++++++++ 5 files changed, 205 insertions(+) create mode 100644 conftest.py diff --git a/apps/project/tests/e2e_create_street_project_test.py b/apps/project/tests/e2e_create_street_project_test.py index 364426cf..a5213075 100644 --- a/apps/project/tests/e2e_create_street_project_test.py +++ b/apps/project/tests/e2e_create_street_project_test.py @@ -3,6 +3,7 @@ from pathlib import Path import json5 +import pytest from django.conf import settings from ulid import ULID @@ -243,6 +244,7 @@ def setUpClass(cls): contributor_user=cls.contributor_user, ) + @pytest.mark.vcr def test_street_project_e2e(self): self._test_project( "assets/tests/projects/street/project_data.json5", diff --git a/apps/project/tests/e2e_create_validate_project_test.py b/apps/project/tests/e2e_create_validate_project_test.py index 162b0bfa..8e0203e1 100644 --- a/apps/project/tests/e2e_create_validate_project_test.py +++ b/apps/project/tests/e2e_create_validate_project_test.py @@ -3,6 +3,7 @@ from pathlib import Path import json5 +import pytest from django.conf import settings from ulid import ULID @@ -243,6 +244,7 @@ def setUpClass(cls): contributor_user=cls.contributor_user, ) + @pytest.mark.vcr def test_validate_project_e2e(self): self._test_project( "assets/tests/projects/validate/project_data.json5", diff --git a/conftest.py b/conftest.py new file mode 100644 index 00000000..7091fb14 --- /dev/null +++ b/conftest.py @@ -0,0 +1,11 @@ +import pytest + + +@pytest.fixture(autouse=True) +def vcr_config(): + return { + "record_mode": "once", + "ignore_hosts": ["localhost", "firebase-test"], + "ignore_localhost": True, + "cassette_library_dir": "assets/tests/tests_vcr_snapshots", + } diff --git a/pyproject.toml b/pyproject.toml index 6cb7f428..01f2351b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,6 +67,7 @@ test = [ "pytest-icdiff", "pytest-ordering", "pytest-profiling", + "pytest-recording", ] [tool.uv.workspace] diff --git a/uv.lock b/uv.lock index b191898d..c55b1b30 100644 --- a/uv.lock +++ b/uv.lock @@ -1,6 +1,10 @@ version = 1 revision = 2 requires-python = ">=3.13.2, <3.14" +resolution-markers = [ + "platform_python_implementation != 'PyPy'", + "platform_python_implementation == 'PyPy'", +] [manifest] members = [ @@ -1285,6 +1289,7 @@ test = [ { name = "pytest-icdiff" }, { name = "pytest-ordering" }, { name = "pytest-profiling" }, + { name = "pytest-recording" }, ] [package.metadata] @@ -1345,6 +1350,7 @@ test = [ { name = "pytest-icdiff" }, { name = "pytest-ordering" }, { name = "pytest-profiling" }, + { name = "pytest-recording" }, ] [[package]] @@ -1398,6 +1404,51 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ca/91/7dc28d5e2a11a5ad804cf2b7f7a5fcb1eb5a4966d66a5d2b41aee6376543/msgpack-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:6d489fba546295983abd142812bda76b57e33d0b9f5d5b71c09a583285506f69", size = 72341, upload-time = "2025-06-13T06:52:27.835Z" }, ] +[[package]] +name = "multidict" +version = "6.6.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/7f/0652e6ed47ab288e3756ea9c0df8b14950781184d4bd7883f4d87dd41245/multidict-6.6.4.tar.gz", hash = "sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd", size = 101843 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/5d/e1db626f64f60008320aab00fbe4f23fc3300d75892a3381275b3d284580/multidict-6.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f46a6e8597f9bd71b31cc708195d42b634c8527fecbcf93febf1052cacc1f16e", size = 75848 }, + { url = "https://files.pythonhosted.org/packages/4c/aa/8b6f548d839b6c13887253af4e29c939af22a18591bfb5d0ee6f1931dae8/multidict-6.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:22e38b2bc176c5eb9c0a0e379f9d188ae4cd8b28c0f53b52bce7ab0a9e534657", size = 45060 }, + { url = "https://files.pythonhosted.org/packages/eb/c6/f5e97e5d99a729bc2aa58eb3ebfa9f1e56a9b517cc38c60537c81834a73f/multidict-6.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5df8afd26f162da59e218ac0eefaa01b01b2e6cd606cffa46608f699539246da", size = 43269 }, + { url = "https://files.pythonhosted.org/packages/dc/31/d54eb0c62516776f36fe67f84a732f97e0b0e12f98d5685bebcc6d396910/multidict-6.6.4-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:49517449b58d043023720aa58e62b2f74ce9b28f740a0b5d33971149553d72aa", size = 237158 }, + { url = "https://files.pythonhosted.org/packages/c4/1c/8a10c1c25b23156e63b12165a929d8eb49a6ed769fdbefb06e6f07c1e50d/multidict-6.6.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9408439537c5afdca05edd128a63f56a62680f4b3c234301055d7a2000220f", size = 257076 }, + { url = "https://files.pythonhosted.org/packages/ad/86/90e20b5771d6805a119e483fd3d1e8393e745a11511aebca41f0da38c3e2/multidict-6.6.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:87a32d20759dc52a9e850fe1061b6e41ab28e2998d44168a8a341b99ded1dba0", size = 240694 }, + { url = "https://files.pythonhosted.org/packages/e7/49/484d3e6b535bc0555b52a0a26ba86e4d8d03fd5587d4936dc59ba7583221/multidict-6.6.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:52e3c8d43cdfff587ceedce9deb25e6ae77daba560b626e97a56ddcad3756879", size = 266350 }, + { url = "https://files.pythonhosted.org/packages/bf/b4/aa4c5c379b11895083d50021e229e90c408d7d875471cb3abf721e4670d6/multidict-6.6.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ad8850921d3a8d8ff6fbef790e773cecfc260bbfa0566998980d3fa8f520bc4a", size = 267250 }, + { url = "https://files.pythonhosted.org/packages/80/e5/5e22c5bf96a64bdd43518b1834c6d95a4922cc2066b7d8e467dae9b6cee6/multidict-6.6.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:497a2954adc25c08daff36f795077f63ad33e13f19bfff7736e72c785391534f", size = 254900 }, + { url = "https://files.pythonhosted.org/packages/17/38/58b27fed927c07035abc02befacab42491e7388ca105e087e6e0215ead64/multidict-6.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:024ce601f92d780ca1617ad4be5ac15b501cc2414970ffa2bb2bbc2bd5a68fa5", size = 252355 }, + { url = "https://files.pythonhosted.org/packages/d0/a1/dad75d23a90c29c02b5d6f3d7c10ab36c3197613be5d07ec49c7791e186c/multidict-6.6.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a693fc5ed9bdd1c9e898013e0da4dcc640de7963a371c0bd458e50e046bf6438", size = 250061 }, + { url = "https://files.pythonhosted.org/packages/b8/1a/ac2216b61c7f116edab6dc3378cca6c70dc019c9a457ff0d754067c58b20/multidict-6.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:190766dac95aab54cae5b152a56520fd99298f32a1266d66d27fdd1b5ac00f4e", size = 249675 }, + { url = "https://files.pythonhosted.org/packages/d4/79/1916af833b800d13883e452e8e0977c065c4ee3ab7a26941fbfdebc11895/multidict-6.6.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:34d8f2a5ffdceab9dcd97c7a016deb2308531d5f0fced2bb0c9e1df45b3363d7", size = 261247 }, + { url = "https://files.pythonhosted.org/packages/c5/65/d1f84fe08ac44a5fc7391cbc20a7cedc433ea616b266284413fd86062f8c/multidict-6.6.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:59e8d40ab1f5a8597abcef00d04845155a5693b5da00d2c93dbe88f2050f2812", size = 257960 }, + { url = "https://files.pythonhosted.org/packages/13/b5/29ec78057d377b195ac2c5248c773703a6b602e132a763e20ec0457e7440/multidict-6.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:467fe64138cfac771f0e949b938c2e1ada2b5af22f39692aa9258715e9ea613a", size = 250078 }, + { url = "https://files.pythonhosted.org/packages/c4/0e/7e79d38f70a872cae32e29b0d77024bef7834b0afb406ddae6558d9e2414/multidict-6.6.4-cp313-cp313-win32.whl", hash = "sha256:14616a30fe6d0a48d0a48d1a633ab3b8bec4cf293aac65f32ed116f620adfd69", size = 41708 }, + { url = "https://files.pythonhosted.org/packages/9d/34/746696dffff742e97cd6a23da953e55d0ea51fa601fa2ff387b3edcfaa2c/multidict-6.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:40cd05eaeb39e2bc8939451f033e57feaa2ac99e07dbca8afe2be450a4a3b6cf", size = 45912 }, + { url = "https://files.pythonhosted.org/packages/c7/87/3bac136181e271e29170d8d71929cdeddeb77f3e8b6a0c08da3a8e9da114/multidict-6.6.4-cp313-cp313-win_arm64.whl", hash = "sha256:f6eb37d511bfae9e13e82cb4d1af36b91150466f24d9b2b8a9785816deb16605", size = 43076 }, + { url = "https://files.pythonhosted.org/packages/64/94/0a8e63e36c049b571c9ae41ee301ada29c3fee9643d9c2548d7d558a1d99/multidict-6.6.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:6c84378acd4f37d1b507dfa0d459b449e2321b3ba5f2338f9b085cf7a7ba95eb", size = 82812 }, + { url = "https://files.pythonhosted.org/packages/25/1a/be8e369dfcd260d2070a67e65dd3990dd635cbd735b98da31e00ea84cd4e/multidict-6.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0e0558693063c75f3d952abf645c78f3c5dfdd825a41d8c4d8156fc0b0da6e7e", size = 48313 }, + { url = "https://files.pythonhosted.org/packages/26/5a/dd4ade298674b2f9a7b06a32c94ffbc0497354df8285f27317c66433ce3b/multidict-6.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3f8e2384cb83ebd23fd07e9eada8ba64afc4c759cd94817433ab8c81ee4b403f", size = 46777 }, + { url = "https://files.pythonhosted.org/packages/89/db/98aa28bc7e071bfba611ac2ae803c24e96dd3a452b4118c587d3d872c64c/multidict-6.6.4-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f996b87b420995a9174b2a7c1a8daf7db4750be6848b03eb5e639674f7963773", size = 229321 }, + { url = "https://files.pythonhosted.org/packages/c7/bc/01ddda2a73dd9d167bd85d0e8ef4293836a8f82b786c63fb1a429bc3e678/multidict-6.6.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc356250cffd6e78416cf5b40dc6a74f1edf3be8e834cf8862d9ed5265cf9b0e", size = 249954 }, + { url = "https://files.pythonhosted.org/packages/06/78/6b7c0f020f9aa0acf66d0ab4eb9f08375bac9a50ff5e3edb1c4ccd59eafc/multidict-6.6.4-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:dadf95aa862714ea468a49ad1e09fe00fcc9ec67d122f6596a8d40caf6cec7d0", size = 228612 }, + { url = "https://files.pythonhosted.org/packages/00/44/3faa416f89b2d5d76e9d447296a81521e1c832ad6e40b92f990697b43192/multidict-6.6.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7dd57515bebffd8ebd714d101d4c434063322e4fe24042e90ced41f18b6d3395", size = 257528 }, + { url = "https://files.pythonhosted.org/packages/05/5f/77c03b89af0fcb16f018f668207768191fb9dcfb5e3361a5e706a11db2c9/multidict-6.6.4-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:967af5f238ebc2eb1da4e77af5492219fbd9b4b812347da39a7b5f5c72c0fa45", size = 256329 }, + { url = "https://files.pythonhosted.org/packages/cf/e9/ed750a2a9afb4f8dc6f13dc5b67b514832101b95714f1211cd42e0aafc26/multidict-6.6.4-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a4c6875c37aae9794308ec43e3530e4aa0d36579ce38d89979bbf89582002bb", size = 247928 }, + { url = "https://files.pythonhosted.org/packages/1f/b5/e0571bc13cda277db7e6e8a532791d4403dacc9850006cb66d2556e649c0/multidict-6.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f683a551e92bdb7fac545b9c6f9fa2aebdeefa61d607510b3533286fcab67f5", size = 245228 }, + { url = "https://files.pythonhosted.org/packages/f3/a3/69a84b0eccb9824491f06368f5b86e72e4af54c3067c37c39099b6687109/multidict-6.6.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:3ba5aaf600edaf2a868a391779f7a85d93bed147854925f34edd24cc70a3e141", size = 235869 }, + { url = "https://files.pythonhosted.org/packages/a9/9d/28802e8f9121a6a0804fa009debf4e753d0a59969ea9f70be5f5fdfcb18f/multidict-6.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:580b643b7fd2c295d83cad90d78419081f53fd532d1f1eb67ceb7060f61cff0d", size = 243446 }, + { url = "https://files.pythonhosted.org/packages/38/ea/6c98add069b4878c1d66428a5f5149ddb6d32b1f9836a826ac764b9940be/multidict-6.6.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:37b7187197da6af3ee0b044dbc9625afd0c885f2800815b228a0e70f9a7f473d", size = 252299 }, + { url = "https://files.pythonhosted.org/packages/3a/09/8fe02d204473e14c0af3affd50af9078839dfca1742f025cca765435d6b4/multidict-6.6.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e1b93790ed0bc26feb72e2f08299691ceb6da5e9e14a0d13cc74f1869af327a0", size = 246926 }, + { url = "https://files.pythonhosted.org/packages/37/3d/7b1e10d774a6df5175ecd3c92bff069e77bed9ec2a927fdd4ff5fe182f67/multidict-6.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a506a77ddee1efcca81ecbeae27ade3e09cdf21a8ae854d766c2bb4f14053f92", size = 243383 }, + { url = "https://files.pythonhosted.org/packages/50/b0/a6fae46071b645ae98786ab738447de1ef53742eaad949f27e960864bb49/multidict-6.6.4-cp313-cp313t-win32.whl", hash = "sha256:f93b2b2279883d1d0a9e1bd01f312d6fc315c5e4c1f09e112e4736e2f650bc4e", size = 47775 }, + { url = "https://files.pythonhosted.org/packages/b2/0a/2436550b1520091af0600dff547913cb2d66fbac27a8c33bc1b1bccd8d98/multidict-6.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:6d46a180acdf6e87cc41dc15d8f5c2986e1e8739dc25dbb7dac826731ef381a4", size = 53100 }, + { url = "https://files.pythonhosted.org/packages/97/ea/43ac51faff934086db9c072a94d327d71b7d8b40cd5dcb47311330929ef0/multidict-6.6.4-cp313-cp313t-win_arm64.whl", hash = "sha256:756989334015e3335d087a27331659820d53ba432befdef6a718398b0a8493ad", size = 45501 }, + { url = "https://files.pythonhosted.org/packages/fd/69/b547032297c7e63ba2af494edba695d781af8a0c6e89e4d06cf848b21d80/multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c", size = 12313 }, +] + [[package]] name = "numpy" version = "2.3.2" @@ -1614,6 +1665,47 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810, upload-time = "2025-04-15T09:18:44.753Z" }, ] +[[package]] +name = "propcache" +version = "0.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", size = 71286 }, + { url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", size = 42425 }, + { url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", size = 41846 }, + { url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", size = 208871 }, + { url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", size = 215720 }, + { url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", size = 215203 }, + { url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", size = 206365 }, + { url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", size = 196016 }, + { url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", size = 205596 }, + { url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", size = 200977 }, + { url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", size = 197220 }, + { url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", size = 210642 }, + { url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206", size = 212789 }, + { url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", size = 205880 }, + { url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", size = 37220 }, + { url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", size = 40678 }, + { url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", size = 76560 }, + { url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", size = 44676 }, + { url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", size = 44701 }, + { url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", size = 276934 }, + { url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", size = 278316 }, + { url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", size = 282619 }, + { url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", size = 265896 }, + { url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", size = 252111 }, + { url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", size = 268334 }, + { url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", size = 255026 }, + { url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", size = 250724 }, + { url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", size = 268868 }, + { url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", size = 271322 }, + { url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", size = 265778 }, + { url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", size = 41175 }, + { url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", size = 44857 }, + { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663 }, +] + [[package]] name = "proto-plus" version = "1.26.1" @@ -1921,6 +2013,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e3/ac/c428c66241a144617a8af7a28e2e055e1438d23b949b62ac4b401a69fb79/pytest_profiling-1.8.1-py3-none-any.whl", hash = "sha256:3dd8713a96298b42d83de8f5951df3ada3e61b3e5d2a06956684175529e17aea", size = 9929, upload-time = "2024-11-29T19:33:02.111Z" }, ] +[[package]] +name = "pytest-recording" +version = "0.13.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, + { name = "vcrpy", version = "5.1.0", source = { registry = "https://pypi.org/simple" }, marker = "platform_python_implementation == 'PyPy'" }, + { name = "vcrpy", version = "7.0.0", source = { registry = "https://pypi.org/simple" }, marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/32/9c/f4027c5f1693847b06d11caf4b4f6bb09f22c1581ada4663877ec166b8c6/pytest_recording-0.13.4.tar.gz", hash = "sha256:568d64b2a85992eec4ae0a419c855d5fd96782c5fb016784d86f18053792768c", size = 26576 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/c2/ce34735972cc42d912173e79f200fe66530225190c06655c5632a9d88f1e/pytest_recording-0.13.4-py3-none-any.whl", hash = "sha256:ad49a434b51b1c4f78e85b1e6b74fdcc2a0a581ca16e52c798c6ace971f7f439", size = 13723 }, +] + [[package]] name = "python-crontab" version = "3.3.0" @@ -2375,6 +2481,41 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, ] +[[package]] +name = "vcrpy" +version = "5.1.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "platform_python_implementation == 'PyPy'", +] +dependencies = [ + { name = "pyyaml", marker = "platform_python_implementation == 'PyPy'" }, + { name = "wrapt", marker = "platform_python_implementation == 'PyPy'" }, + { name = "yarl", marker = "platform_python_implementation == 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a5/ea/a166a3cce4ac5958ba9bbd9768acdb1ba38ae17ff7986da09fa5b9dbc633/vcrpy-5.1.0.tar.gz", hash = "sha256:bbf1532f2618a04f11bce2a99af3a9647a32c880957293ff91e0a5f187b6b3d2", size = 84576 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/5b/3f70bcb279ad30026cc4f1df0a0491a0205a24dddd88301f396c485de9e7/vcrpy-5.1.0-py2.py3-none-any.whl", hash = "sha256:605e7b7a63dcd940db1df3ab2697ca7faf0e835c0852882142bafb19649d599e", size = 41969 }, +] + +[[package]] +name = "vcrpy" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "platform_python_implementation != 'PyPy'", +] +dependencies = [ + { name = "pyyaml", marker = "platform_python_implementation != 'PyPy'" }, + { name = "urllib3", marker = "platform_python_implementation != 'PyPy'" }, + { name = "wrapt", marker = "platform_python_implementation != 'PyPy'" }, + { name = "yarl", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/25/d3/856e06184d4572aada1dd559ddec3bedc46df1f2edc5ab2c91121a2cccdb/vcrpy-7.0.0.tar.gz", hash = "sha256:176391ad0425edde1680c5b20738ea3dc7fb942520a48d2993448050986b3a50", size = 85502 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/5d/1f15b252890c968d42b348d1e9b0aa12d5bf3e776704178ec37cceccdb63/vcrpy-7.0.0-py2.py3-none-any.whl", hash = "sha256:55791e26c18daa363435054d8b35bd41a4ac441b6676167635d1b37a71dbe124", size = 42321 }, +] + [[package]] name = "vine" version = "5.1.0" @@ -2423,3 +2564,51 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16", size = 36896, upload-time = "2025-08-12T05:52:55.34Z" }, { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, ] + +[[package]] +name = "yarl" +version = "1.20.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", size = 186428 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", size = 131811 }, + { url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", size = 90078 }, + { url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", size = 88748 }, + { url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", size = 349595 }, + { url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", size = 342616 }, + { url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", size = 361324 }, + { url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", size = 359676 }, + { url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", size = 352614 }, + { url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", size = 336766 }, + { url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", size = 364615 }, + { url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", size = 360982 }, + { url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", size = 369792 }, + { url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", size = 382049 }, + { url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", size = 384774 }, + { url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", size = 374252 }, + { url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", size = 81198 }, + { url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", size = 86346 }, + { url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", size = 138826 }, + { url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", size = 93217 }, + { url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", size = 92700 }, + { url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", size = 347644 }, + { url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", size = 323452 }, + { url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", size = 346378 }, + { url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", size = 353261 }, + { url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", size = 335987 }, + { url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", size = 329361 }, + { url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", size = 346460 }, + { url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", size = 334486 }, + { url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", size = 342219 }, + { url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", size = 350693 }, + { url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", size = 355803 }, + { url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", size = 341709 }, + { url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", size = 86591 }, + { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003 }, + { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542 }, +] From 720af6c8f00a81be8ce09385ae671c9425a4d55d Mon Sep 17 00:00:00 2001 From: tnagorra Date: Wed, 1 Oct 2025 08:43:19 +0545 Subject: [PATCH 6/9] fix(project): sort tasks and groups before creating on firebase - compress street tasks on firebase --- project_types/base/project.py | 4 ++-- project_types/street/project.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/project_types/base/project.py b/project_types/base/project.py index 0dce8a64..2af9e75f 100644 --- a/project_types/base/project.py +++ b/project_types/base/project.py @@ -296,7 +296,7 @@ def compress_tasks_on_firebase(self) -> bool: return False def create_tasks_on_firebase(self, task_ref: FbReference): - tasks = ProjectTask.objects.filter(task_group__project_id=self.project.pk) + tasks = ProjectTask.objects.filter(task_group__project_id=self.project.pk).order_by("id") grouped_tasks: dict[str, list[dict[str, typing.Any]]] = defaultdict(list) for task in tasks.iterator(): @@ -328,7 +328,7 @@ def create_tasks_on_firebase(self, task_ref: FbReference): firebase_bulk_mgr.done() def create_groups_on_firebase(self, group_ref: FbReference): - groups = ProjectTaskGroup.objects.filter(project_id=self.project.pk) + groups = ProjectTaskGroup.objects.filter(project_id=self.project.pk).order_by("id") fb_groups: dict[str, dict[str, dict]] = {} firebase_bulk_mgr = FirebaseBulkManager(ref=group_ref) diff --git a/project_types/street/project.py b/project_types/street/project.py index a354425e..f530633e 100644 --- a/project_types/street/project.py +++ b/project_types/street/project.py @@ -225,8 +225,8 @@ def get_max_time_spend_percentile(self) -> float: # FIREBASE @typing.override - def skip_tasks_on_firebase(self) -> bool: - return False + def compress_tasks_on_firebase(self) -> bool: + return True @typing.override def get_task_specifics_for_firebase(self, task: ProjectTask): From 6d192cfa10f8c3239bec8e61800df101a149f1a5 Mon Sep 17 00:00:00 2001 From: tnagorra Date: Wed, 1 Oct 2025 08:44:23 +0545 Subject: [PATCH 7/9] test(project): refactor street, validate and validate image project - remove un-necessary use of remove_object_keys - disable logging of vcr logs --- ...2e_create_project_tile_map_service_test.py | 43 ++-- .../tests/e2e_create_street_project_test.py | 183 ++++++-------- ...e2e_create_validate_image_project_test.py} | 175 ++++++-------- .../tests/e2e_create_validate_project_test.py | 226 ++++++++---------- assets | 2 +- conftest.py | 10 +- 6 files changed, 280 insertions(+), 359 deletions(-) rename apps/project/tests/{e2e_validate_image_project_test.py => e2e_create_validate_image_project_test.py} (78%) diff --git a/apps/project/tests/e2e_create_project_tile_map_service_test.py b/apps/project/tests/e2e_create_project_tile_map_service_test.py index e0e4f485..8a075cd8 100644 --- a/apps/project/tests/e2e_create_project_tile_map_service_test.py +++ b/apps/project/tests/e2e_create_project_tile_map_service_test.py @@ -40,7 +40,7 @@ def pre_save_override(sender: typing.Any, instance: typing.Any, **kwargs): pre_save.disconnect(pre_save_override) -class TestProjectE2E(TestCase): +class TestTileMapServiceProjectE2E(TestCase): class Mutation: CREATE_PROJECT = """ mutation CreateProject($data: ProjectCreateInput!) { @@ -287,26 +287,28 @@ class Mutation: def test_find_project_e2e(self): with create_override(): - self._test_tile_map_service( + self._test_project( "find", "assets/tests/projects/find/project_data.json5", ) def test_completeness_project_e2e(self): with create_override(): - self._test_tile_map_service( + self._test_project( "completeness", "assets/tests/projects/completeness/project_data.json5", ) def test_compare_project_e2e(self): with create_override(): - self._test_tile_map_service( + self._test_project( "compare", "assets/tests/projects/compare/project_data.json5", ) - def _test_tile_map_service(self, projectKey: str, filename: str): + # Generic functions + + def _test_project(self, projectKey: str, filename: str): # Load test data file full_path = Path(settings.BASE_DIR, filename) with full_path.open("r", encoding="utf-8") as f: @@ -358,7 +360,7 @@ def _test_tile_map_service(self, projectKey: str, filename: str): project_response = project_content["data"]["createProject"] assert project_response is not None, "Project create response is None" - assert project_response["ok"] + assert project_response["ok"], project_response["errors"] project_id = project_response["result"]["id"] project_fb_id = project_response["result"]["firebaseId"] @@ -473,7 +475,7 @@ def _test_tile_map_service(self, projectKey: str, filename: str): publish_tutorial_response = publish_tutorial_content["data"]["updateTutorialStatus"] assert publish_tutorial_response["ok"], publish_tutorial_response["errors"] assert publish_tutorial_response is not None, "Processed tutorial publish response is None" - assert publish_tutorial_response["result"]["status"] == "READY_TO_PUBLISH", "tutorial should be published" + assert publish_tutorial_response["result"]["status"] == "READY_TO_PUBLISH", "tutorial should be ready to published" tutorial_fb_ref = self.firebase_helper.ref(f"/v2/projects/{tutorial_fb_id}") tutorial_fb_data = tutorial_fb_ref.get() @@ -482,27 +484,24 @@ def _test_tile_map_service(self, projectKey: str, filename: str): assert tutorial_fb_data is not None, "Tutorial in firebase is None" assert isinstance(tutorial_fb_data, dict), "Tutorial in firebase should be a dictionary" - ignored_tutorial_keys = [] - filtered_tutorial_actual = remove_object_keys(tutorial_fb_data, ignored_tutorial_keys) - filtered_tutorial_expected = remove_object_keys(test_data["expected_tutorial_data"], ignored_tutorial_keys) + filtered_tutorial_actual = tutorial_fb_data + filtered_tutorial_expected = test_data["expected_tutorial_data"] assert filtered_tutorial_actual == filtered_tutorial_expected, "Difference found for tutorial data in firebase." # Check tutorial groups in firebase tutorial_groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{tutorial_fb_id}/") tutorial_groups_fb_data = tutorial_groups_fb_ref.get() - ignored_group_keys = [] - filtered_group_actual = remove_object_keys(tutorial_groups_fb_data, ignored_group_keys) - filtered_group_expected = remove_object_keys(test_data["expected_tutorial_groups_data"], ignored_tutorial_keys) + filtered_group_actual = tutorial_groups_fb_data + filtered_group_expected = test_data["expected_tutorial_groups_data"] assert filtered_group_actual == filtered_group_expected, "Difference found for tutorial group data in firebase." # Check tutorial tasks in firebase tutorial_tasks_ref = self.firebase_helper.ref(f"/v2/tasks/{tutorial_fb_id}/") tutorial_task_fb_data = tutorial_tasks_ref.get() - ignored_task_keys = [] - sanitized_tasks_actual = remove_object_keys(tutorial_task_fb_data, ignored_task_keys) - sanitized_tasks_expected = remove_object_keys(test_data["expected_tutorial_tasks_data"], ignored_task_keys) + sanitized_tasks_actual = tutorial_task_fb_data + sanitized_tasks_expected = test_data["expected_tutorial_tasks_data"] assert sanitized_tasks_actual == sanitized_tasks_expected, ( "Differences found between expected and actual tasks on tutorial in firebase." @@ -554,18 +553,16 @@ def _test_tile_map_service(self, projectKey: str, filename: str): groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{project_fb_id}/") groups_fb_data = groups_fb_ref.get() - ignored_group_keys = [] - filtered_group_actual = remove_object_keys(groups_fb_data, ignored_group_keys) - filtered_group_expected = remove_object_keys(test_data["expected_project_groups_data"], ignored_project_keys) + filtered_group_actual = groups_fb_data + filtered_group_expected = test_data["expected_project_groups_data"] assert filtered_group_actual == filtered_group_expected, "Difference found for group data on project in firebase." # Check project tasks in firebase project_tasks_ref = self.firebase_helper.ref(f"/v2/tasks/{project_fb_id}/") - project_tasks_db_data = project_tasks_ref.get() + project_tasks_fb_data = project_tasks_ref.get() - ignored_task_keys = [] - sanitized_tasks_actual = remove_object_keys(project_tasks_db_data, ignored_task_keys) - sanitized_tasks_expected = remove_object_keys(test_data["expected_project_tasks_data"], ignored_task_keys) + sanitized_tasks_actual = project_tasks_fb_data + sanitized_tasks_expected = test_data["expected_project_tasks_data"] assert sanitized_tasks_actual == sanitized_tasks_expected, ( "Differences found between expected and actual tasks on project in firebase." diff --git a/apps/project/tests/e2e_create_street_project_test.py b/apps/project/tests/e2e_create_street_project_test.py index a5213075..594084ab 100644 --- a/apps/project/tests/e2e_create_street_project_test.py +++ b/apps/project/tests/e2e_create_street_project_test.py @@ -1,18 +1,39 @@ +import logging import typing +from contextlib import contextmanager from datetime import datetime from pathlib import Path import json5 import pytest from django.conf import settings +from django.db.models.signals import pre_save from ulid import ULID from apps.common.utils import decode_tasks, remove_object_keys from apps.contributor.factories import ContributorUserFactory +from apps.project.models import Organization, Project +from apps.tutorial.models import Tutorial from apps.user.factories import UserFactory -from main.config import Config from main.tests import TestCase +logging.getLogger("vcr").setLevel(logging.WARNING) + + +@contextmanager +def create_override(): + def pre_save_override(sender: typing.Any, instance: typing.Any, **kwargs): + if sender == Tutorial: + instance.firebase_id = f"tutorial_{instance.client_id}" + elif sender in {Project, Organization}: + instance.firebase_id = instance.client_id + + pre_save.connect(pre_save_override) + try: + yield True + finally: + pre_save.disconnect(pre_save_override) + class TestStreetProjectE2E(TestCase): class Mutation: @@ -200,6 +221,7 @@ class Mutation: ok result { id + status } } } @@ -230,44 +252,38 @@ class Mutation: } """ - @typing.override - @classmethod - def setUpClass(cls): - super().setUpClass() - cls.firebase_helper = Config.FIREBASE_HELPER - - cls.contributor_user = ContributorUserFactory.create( - username="Ram Bahadur", - ) - - cls.user = UserFactory.create( - contributor_user=cls.contributor_user, - ) - - @pytest.mark.vcr + @pytest.mark.vcr("assets/tests/projects/street/cassette") def test_street_project_e2e(self): - self._test_project( - "assets/tests/projects/street/project_data.json5", - ) + # TODO(susilnem): Add more test with filters + with create_override(): + self._test_project( + "assets/tests/projects/street/project_data.json5", + ) - # TODO(susilnem): Add more test with filters + # Generic functions def _test_project(self, filename: str): - self.force_login(self.user) - # Load test data file full_path = Path(settings.BASE_DIR, filename) with full_path.open("r", encoding="utf-8") as f: test_data = json5.load(f) + # Create contributor user and login + contributor_user = ContributorUserFactory.create( + username="Ram Bahadur", + firebase_id=test_data["contributor_user_firebase_id"], + ) + user = UserFactory.create( + contributor_user=contributor_user, + ) + + self.force_login(user) + # Define full path for image and AOI files image_filename = Path(settings.BASE_DIR) / test_data["assets"]["image"] aoi_geometry_filename = Path(settings.BASE_DIR) / test_data["assets"]["aoi"] - # Load Project data initially. - create_project_data = test_data["create_project"] - - # Create an organization and attach to project + # Create an organization create_organization_data = test_data["create_organization"] with self.captureOnCommitCallbacks(execute=True): organization_content = self.query_check( @@ -282,21 +298,14 @@ def _test_project(self, filename: str): organization_id = organization_response["result"]["id"] organization_fb_id = organization_response["result"]["firebaseId"] - # CHECK ORGANIZATION in firebase - organization_fb_ref = self.firebase_helper.ref(f"/v2/organisations/{organization_fb_id}") organization_fb_data = organization_fb_ref.get() - - # Check organization in firebase - assert organization_fb_data is not None, "organization in firebase is None" - assert isinstance(organization_fb_data, dict), "organization in firebase should be a dictionary" - - assert organization_fb_data == test_data["expected_organization_data"], ( - "Difference found for organization data in firebase." - ) + assert organization_fb_data is not None, "Organization in firebase is None" # Create project + create_project_data = test_data["create_project"] create_project_data["requestingOrganization"] = organization_id + with self.captureOnCommitCallbacks(execute=True): project_content = self.query_check( self.Mutation.CREATE_PROJECT, @@ -329,11 +338,6 @@ def _test_project(self, filename: str): assert image_response["ok"] image_id = image_response["result"]["id"] - # Update project - update_project_data = test_data["update_project"] - update_project_data["requestingOrganization"] = organization_id - update_project_data["image"] = image_id - # Create GeoJSON Asset for AOI Geometry aoi_asset_data = { "clientId": str(ULID()), @@ -351,8 +355,12 @@ def _test_project(self, filename: str): assert aoi_response is not None, "AOI create response is None" assert aoi_response["ok"] aoi_id = aoi_response["result"]["id"] - update_project_data["projectTypeSpecifics"]["street"]["aoiGeometry"] = aoi_id + # Update project + update_project_data = test_data["update_project"] + update_project_data["image"] = image_id + update_project_data["projectTypeSpecifics"]["street"]["aoiGeometry"] = aoi_id + update_project_data["requestingOrganization"] = organization_id with self.captureOnCommitCallbacks(execute=True): update_content = self.query_check( self.Mutation.UPDATE_PROJECT, @@ -377,7 +385,7 @@ def _test_project(self, filename: str): assert process_project_response["ok"], process_project_response["errors"] assert process_project_response["result"]["status"] == "READY_TO_PROCESS", "Project should be ready to process" - # Load Tutorial data initially. + # Create tutorial from above project create_tutorial_data = test_data["create_tutorial"] create_tutorial_data["project"] = project_id with self.captureOnCommitCallbacks(execute=True): @@ -408,7 +416,7 @@ def _test_project(self, filename: str): assert update_tutorial_response["ok"], update_tutorial_response["errors"] assert update_tutorial_response is not None, "Tutorial update response is None" - # Publish Tutorial + # Publish tutorial publish_tutorial_data = { "clientId": tutorial_client_id, "status": "READY_TO_PUBLISH", @@ -423,59 +431,41 @@ def _test_project(self, filename: str): assert publish_tutorial_response is not None, "Processed tutorial publish response is None" assert publish_tutorial_response["result"]["status"] == "READY_TO_PUBLISH", "tutorial should be ready to published" - # CHECK TUTORIAL, GROUP AND TASK CREATED IN FIREBASE - tutorial_fb_ref = self.firebase_helper.ref(f"/v2/projects/{tutorial_fb_id}") tutorial_fb_data = tutorial_fb_ref.get() # Check tutorial in firebase assert tutorial_fb_data is not None, "Tutorial in firebase is None" assert isinstance(tutorial_fb_data, dict), "Tutorial in firebase should be a dictionary" - assert tutorial_fb_data["projectId"] == tutorial_fb_id, "Field 'projectId' should match firebaseId" - ignored_tutorial_keys = {"projectId", "tutorialDraftId"} - filtered_tutorial_actual = remove_object_keys(tutorial_fb_data, ignored_tutorial_keys) - filtered_tutorial_expected = remove_object_keys(test_data["expected_tutorial_data"], ignored_tutorial_keys) + filtered_tutorial_actual = tutorial_fb_data + filtered_tutorial_expected = test_data["expected_tutorial_data"] assert filtered_tutorial_actual == filtered_tutorial_expected, "Difference found for tutorial data in firebase." - # Check group in firebase + # Check tutorial groups in firebase tutorial_groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{tutorial_fb_id}/") tutorial_groups_fb_data = tutorial_groups_fb_ref.get() - if tutorial_groups_fb_data: - for group in iter(tutorial_groups_fb_data.values()): # type: ignore[reportAttributeAccessIssue] - assert group["projectId"] == tutorial_fb_id, "Field 'tutorialId' of each group should match firebaseId" - - ignored_group_keys = {"projectId"} - filtered_group_actual = remove_object_keys(tutorial_groups_fb_data, ignored_group_keys) - filtered_group_expected = remove_object_keys(test_data["expected_tutorial_groups_data"], ignored_tutorial_keys) + filtered_group_actual = tutorial_groups_fb_data + filtered_group_expected = test_data["expected_tutorial_groups_data"] assert filtered_group_actual == filtered_group_expected, "Difference found for tutorial group data in firebase." # Check tutorial tasks in firebase tutorial_tasks_ref = self.firebase_helper.ref(f"/v2/tasks/{tutorial_fb_id}/") - tutorial_task_fb_data = tutorial_tasks_ref.get() - - ignored_task_keys: set[str] = {"projectId"} - sanitized_tutorial_tasks_actual: list[dict[str, typing.Any]] = [] - sanitized_tutorial_tasks_expected: list[dict[str, typing.Any]] = [] + tutorial_task_fb_data: dict[str, typing.Any] = tutorial_tasks_ref.get() # type: ignore[reportArgumentType] - for group in iter(tutorial_task_fb_data.values()): # type: ignore[reportAttributeAccessIssue] - for task_fb in decode_tasks(group): - sanitized_tutorial_tasks_actual.append(remove_object_keys(task_fb, ignored_task_keys)) # type: ignore[reportGeneralTypeIssues] + # NOTE: We want to decode the tasks before comparison + for key, value in tutorial_task_fb_data.items(): + tutorial_task_fb_data[key] = decode_tasks(value) - for group in iter(test_data["expected_tutorial_tasks_data"].values()): - for task in decode_tasks(group): - sanitized_tutorial_tasks_expected.append(remove_object_keys(task, ignored_task_keys)) # type: ignore[reportGeneralTypeIssues] + sanitized_tasks_actual = tutorial_task_fb_data + sanitized_tasks_expected = test_data["expected_tutorial_tasks_data"] - # Sorting and comparing tasks - sanitized_tasks_actual_sorted = sorted(sanitized_tutorial_tasks_actual, key=lambda t: t["taskId"]) - sanitized_tasks_expected_sorted = sorted(sanitized_tutorial_tasks_expected, key=lambda t: t["taskId"]) - - assert sanitized_tasks_actual_sorted == sanitized_tasks_expected_sorted, ( + assert sanitized_tasks_actual == sanitized_tasks_expected, ( "Differences found between expected and actual tasks on tutorial in firebase." ) - # Update processed project + # Update processed project: attach tutorial, organization update_processed_project_data = test_data["update_processed_project"] update_processed_project_data["tutorial"] = tutorial_id update_processed_project_data["requestingOrganization"] = organization_id @@ -501,56 +491,41 @@ def _test_project(self, filename: str): publish_project_response = publish_project_content["data"]["updateProjectStatus"] assert publish_project_response["ok"], publish_project_response["errors"] assert publish_project_response is not None, "Processed project publish response is None" - assert publish_project_response["result"]["status"] == "READY_TO_PUBLISH", "Project should be ready to published" - - # CHECK PROJECT, GROUP AND TASK CREATED IN FIREBASE + assert publish_project_response["result"]["status"] == "READY_TO_PUBLISH", "Project should be ready to publish" project_fb_ref = self.firebase_helper.ref(f"/v2/projects/{project_fb_id}") project_fb_data = project_fb_ref.get() # Check project in firebase - # tutorial.refresh_from_db() assert project_fb_data is not None, "Project in firebase is None" assert isinstance(project_fb_data, dict), "Project in firebase should be a dictionary" assert project_fb_data["created"] is not None, "Field 'created' should be defined" assert datetime.fromisoformat(project_fb_data["created"]), "Field 'created' should be a timestamp" - assert project_fb_data["projectId"] == project_fb_id, "Field 'projectId' should match firebaseId" - assert project_fb_data["tutorialId"] == tutorial_fb_id, "Field 'tutorialId' should match tutorial's firebaseId" - assert project_fb_data["createdBy"] == self.contributor_user.firebase_id, ( - "Field 'createdBy' should match contributor user's firebaseId" - ) - ignored_project_keys = {"created", "createdBy", "projectId", "tutorialId"} + ignored_project_keys = {"created"} filtered_project_actual = remove_object_keys(project_fb_data, ignored_project_keys) filtered_project_expected = remove_object_keys(test_data["expected_project_data"], ignored_project_keys) assert filtered_project_actual == filtered_project_expected, "Difference found for project data in firebase." - # Check group in firebase + # Check project groups in firebase groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{project_fb_id}/") groups_fb_data = groups_fb_ref.get() - if groups_fb_data: - for group in iter(groups_fb_data.values()): # type: ignore[reportAttributeAccessIssue] - assert group["projectId"] == project_fb_id, "Field 'projectId' of each group should match firebaseId" - - ignored_group_keys = {"projectId"} - filtered_group_actual = remove_object_keys(groups_fb_data, ignored_group_keys) - filtered_group_expected = remove_object_keys(test_data["expected_project_groups_data"], ignored_project_keys) + filtered_group_actual = groups_fb_data + filtered_group_expected = test_data["expected_project_groups_data"] assert filtered_group_actual == filtered_group_expected, "Difference found for group data on project in firebase." - # Check tasks in firebase - tasks_ref = self.firebase_helper.ref(Config.FirebaseKeys.project_tasks(project_fb_id)) - project_tasks_fb_data = tasks_ref.get() + # Check project tasks in firebase + project_tasks_ref = self.firebase_helper.ref(f"/v2/tasks/{project_fb_id}/") + project_tasks_fb_data: dict[str, typing.Any] = project_tasks_ref.get() # type: ignore[reportArgumentType] - if project_tasks_fb_data: - for groups in iter(project_tasks_fb_data.values()): # type: ignore[reportAttributeAccessIssue] - for task in groups: - assert task["projectId"] == project_fb_id, "Field 'projectId' of each task should match firebaseId" + # NOTE: We want to decode the tasks before comparison + for key, value in project_tasks_fb_data.items(): + project_tasks_fb_data[key] = decode_tasks(value) - ignored_task_keys = {"projectId"} - sanitized_tasks_actual = remove_object_keys(project_tasks_fb_data, ignored_task_keys) - sanitized_tasks_expected = remove_object_keys(test_data["expected_project_tasks_data"], ignored_task_keys) + sanitized_tasks_actual = project_tasks_fb_data + sanitized_tasks_expected = test_data["expected_project_tasks_data"] assert sanitized_tasks_actual == sanitized_tasks_expected, ( - "Differences found between expected and actual tasks in firebase." + "Differences found between expected and actual tasks on project in firebase." ) diff --git a/apps/project/tests/e2e_validate_image_project_test.py b/apps/project/tests/e2e_create_validate_image_project_test.py similarity index 78% rename from apps/project/tests/e2e_validate_image_project_test.py rename to apps/project/tests/e2e_create_validate_image_project_test.py index 08dfc067..02114d2f 100644 --- a/apps/project/tests/e2e_validate_image_project_test.py +++ b/apps/project/tests/e2e_create_validate_image_project_test.py @@ -1,18 +1,36 @@ import typing +from contextlib import contextmanager from datetime import datetime from pathlib import Path import json5 from django.conf import settings +from django.db.models.signals import pre_save from ulid import ULID from apps.common.utils import remove_object_keys from apps.contributor.factories import ContributorUserFactory +from apps.project.models import Organization, Project +from apps.tutorial.models import Tutorial from apps.user.factories import UserFactory -from main.config import Config from main.tests import TestCase +@contextmanager +def create_override(): + def pre_save_override(sender: typing.Any, instance: typing.Any, **kwargs): + if sender == Tutorial: + instance.firebase_id = f"tutorial_{instance.client_id}" + elif sender in {Project, Organization}: + instance.firebase_id = instance.client_id + + pre_save.connect(pre_save_override) + try: + yield True + finally: + pre_save.disconnect(pre_save_override) + + class TestValidateImageProjectE2E(TestCase): class Mutation: CREATE_PROJECT = """ @@ -199,6 +217,7 @@ class Mutation: ok result { id + status } } } @@ -229,42 +248,37 @@ class Mutation: } """ - @typing.override - @classmethod - def setUpClass(cls): - super().setUpClass() - cls.firebase_helper = Config.FIREBASE_HELPER - - cls.contributor_user = ContributorUserFactory.create( - username="Ram Bahadur", - ) - - cls.user = UserFactory.create( - contributor_user=cls.contributor_user, - ) - def test_validate_image_project_e2e(self): - self._test_project( - "assets/tests/projects/validate_image/project_data.json5", - ) + # TODO(susilnem): Add more test with filters + with create_override(): + self._test_project( + "assets/tests/projects/validate_image/project_data.json5", + ) - # TODO(susilnem): Add more test with filters - def _test_project(self, filename: str): - self.force_login(self.user) + # Generic functions + def _test_project(self, filename: str): # Load test data file full_path = Path(settings.BASE_DIR, filename) with full_path.open("r", encoding="utf-8") as f: test_data = json5.load(f) + # Create contributor user and login + contributor_user = ContributorUserFactory.create( + username="Ram Bahadur", + firebase_id=test_data["contributor_user_firebase_id"], + ) + user = UserFactory.create( + contributor_user=contributor_user, + ) + + self.force_login(user) + # Define full path for image and AOI files image_filename = Path(settings.BASE_DIR) / test_data["assets"]["image"] coco_filename = Path(settings.BASE_DIR) / test_data["assets"]["coco_dataset"] - # Load Project data initially. - create_project_data = test_data["create_project"] - - # Create an organization and attach to project + # Create an organization create_organization_data = test_data["create_organization"] with self.captureOnCommitCallbacks(execute=True): organization_content = self.query_check( @@ -279,21 +293,14 @@ def _test_project(self, filename: str): organization_id = organization_response["result"]["id"] organization_fb_id = organization_response["result"]["firebaseId"] - # CHECK ORGANIZATION in firebase - organization_fb_ref = self.firebase_helper.ref(f"/v2/organisations/{organization_fb_id}") organization_fb_data = organization_fb_ref.get() - - # Check organization in firebase - assert organization_fb_data is not None, "organization in firebase is None" - assert isinstance(organization_fb_data, dict), "organization in firebase should be a dictionary" - - assert organization_fb_data == test_data["expected_organization_data"], ( - "Difference found for organization data in firebase." - ) + assert organization_fb_data is not None, "Organization in firebase is None" # Create project + create_project_data = test_data["create_project"] create_project_data["requestingOrganization"] = organization_id + with self.captureOnCommitCallbacks(execute=True): project_content = self.query_check( self.Mutation.CREATE_PROJECT, @@ -326,17 +333,12 @@ def _test_project(self, filename: str): assert image_response["ok"] image_id = image_response["result"]["id"] - # Update project - update_project_data = test_data["update_project"] - update_project_data["requestingOrganization"] = organization_id - update_project_data["image"] = image_id - - # Create COCO dataset assets - if update_project_data["projectTypeSpecifics"]["validateImage"]["sourceType"] == "DATASET_FILE": + # Create Image Asset for COCO images + if test_data["update_project"]["projectTypeSpecifics"]["validateImage"]["sourceType"] == "DATASET_FILE": with coco_filename.open("r", encoding="utf-8") as f: coco_data = json5.load(f) for image in iter(coco_data["images"]): - aoi_asset_data = { + image_asset_data = { "clientId": str(ULID()), "inputType": "OBJECT_IMAGE", "project": project_id, @@ -353,14 +355,18 @@ def _test_project(self, filename: str): }, "externalUrl": image["coco_url"], } - aoi_content = self.query_check( + image_content = self.query_check( self.Mutation.UPLOAD_PROJECT_ASSET, - variables={"data": aoi_asset_data}, + variables={"data": image_asset_data}, ) - aoi_response = aoi_content["data"]["createProjectAsset"] - assert aoi_response is not None, "AOI create response is None" - assert aoi_response["ok"] + image_response = image_content["data"]["createProjectAsset"] + assert image_response is not None, "image create response is None" + assert image_response["ok"] + # Update project + update_project_data = test_data["update_project"] + update_project_data["image"] = image_id + update_project_data["requestingOrganization"] = organization_id with self.captureOnCommitCallbacks(execute=True): update_content = self.query_check( self.Mutation.UPDATE_PROJECT, @@ -385,7 +391,7 @@ def _test_project(self, filename: str): assert process_project_response["ok"], process_project_response["errors"] assert process_project_response["result"]["status"] == "READY_TO_PROCESS", "Project should be ready to process" - # Load Tutorial data initially. + # Create tutorial from above project create_tutorial_data = test_data["create_tutorial"] create_tutorial_data["project"] = project_id with self.captureOnCommitCallbacks(execute=True): @@ -416,7 +422,7 @@ def _test_project(self, filename: str): assert update_tutorial_response["ok"], update_tutorial_response["errors"] assert update_tutorial_response is not None, "Tutorial update response is None" - # Publish Tutorial + # Publish tutorial publish_tutorial_data = { "clientId": tutorial_client_id, "status": "READY_TO_PUBLISH", @@ -431,52 +437,37 @@ def _test_project(self, filename: str): assert publish_tutorial_response is not None, "Processed tutorial publish response is None" assert publish_tutorial_response["result"]["status"] == "READY_TO_PUBLISH", "tutorial should be ready to published" - # CHECK TUTORIAL, GROUP AND TASK CREATED IN FIREBASE - tutorial_fb_ref = self.firebase_helper.ref(f"/v2/projects/{tutorial_fb_id}") tutorial_fb_data = tutorial_fb_ref.get() # Check tutorial in firebase assert tutorial_fb_data is not None, "Tutorial in firebase is None" assert isinstance(tutorial_fb_data, dict), "Tutorial in firebase should be a dictionary" - assert tutorial_fb_data["projectId"] == tutorial_fb_id, "Field 'projectId' should match firebaseId" - ignored_tutorial_keys = {"projectId", "tutorialDraftId"} - filtered_tutorial_actual = remove_object_keys(tutorial_fb_data, ignored_tutorial_keys) - filtered_tutorial_expected = remove_object_keys(test_data["expected_tutorial_data"], ignored_tutorial_keys) + filtered_tutorial_actual = tutorial_fb_data + filtered_tutorial_expected = test_data["expected_tutorial_data"] assert filtered_tutorial_actual == filtered_tutorial_expected, "Difference found for tutorial data in firebase." - # Check group in firebase + # Check tutorial groups in firebase tutorial_groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{tutorial_fb_id}/") tutorial_groups_fb_data = tutorial_groups_fb_ref.get() - if tutorial_groups_fb_data: - for group in iter(tutorial_groups_fb_data.values()): # type: ignore[reportAttributeAccessIssue] - assert group["projectId"] == tutorial_fb_id, "Field 'tutorialId' of each group should match firebaseId" - - ignored_group_keys = {"projectId"} - filtered_group_actual = remove_object_keys(tutorial_groups_fb_data, ignored_group_keys) - filtered_group_expected = remove_object_keys(test_data["expected_tutorial_groups_data"], ignored_tutorial_keys) + filtered_group_actual = tutorial_groups_fb_data + filtered_group_expected = test_data["expected_tutorial_groups_data"] assert filtered_group_actual == filtered_group_expected, "Difference found for tutorial group data in firebase." # Check tutorial tasks in firebase tutorial_tasks_ref = self.firebase_helper.ref(f"/v2/tasks/{tutorial_fb_id}/") tutorial_task_fb_data = tutorial_tasks_ref.get() - if tutorial_task_fb_data: - for groups in iter(tutorial_task_fb_data.values()): # type: ignore[reportAttributeAccessIssue] - for task in groups: - assert task["projectId"] == tutorial_fb_id, "Field 'projectId' of each task should match firebaseId" - - ignored_task_keys = {"projectId"} - sanitized_tasks_actual = remove_object_keys(tutorial_task_fb_data, ignored_task_keys) - sanitized_tasks_expected = remove_object_keys(test_data["expected_tutorial_tasks_data"], ignored_task_keys) + sanitized_tasks_actual = tutorial_task_fb_data + sanitized_tasks_expected = test_data["expected_tutorial_tasks_data"] assert sanitized_tasks_actual == sanitized_tasks_expected, ( - "Differences found between expected and actual tasks in firebase." + "Differences found between expected and actual tasks on tutorial in firebase." ) - # Update processed project + # Update processed project: attach tutorial, organization update_processed_project_data = test_data["update_processed_project"] update_processed_project_data["tutorial"] = tutorial_id update_processed_project_data["requestingOrganization"] = organization_id @@ -502,55 +493,37 @@ def _test_project(self, filename: str): publish_project_response = publish_project_content["data"]["updateProjectStatus"] assert publish_project_response["ok"], publish_project_response["errors"] assert publish_project_response is not None, "Processed project publish response is None" - assert publish_project_response["result"]["status"] == "READY_TO_PUBLISH", "Project should be ready to published" - - # CHECK PROJECT, GROUP AND TASK CREATED IN FIREBASE + assert publish_project_response["result"]["status"] == "READY_TO_PUBLISH", "Project should be ready to publish" project_fb_ref = self.firebase_helper.ref(f"/v2/projects/{project_fb_id}") project_fb_data = project_fb_ref.get() # Check project in firebase - # tutorial.refresh_from_db() assert project_fb_data is not None, "Project in firebase is None" assert isinstance(project_fb_data, dict), "Project in firebase should be a dictionary" assert project_fb_data["created"] is not None, "Field 'created' should be defined" assert datetime.fromisoformat(project_fb_data["created"]), "Field 'created' should be a timestamp" - assert project_fb_data["projectId"] == project_fb_id, "Field 'projectId' should match firebaseId" - assert project_fb_data["tutorialId"] == tutorial_fb_id, "Field 'tutorialId' should match tutorial's firebaseId" - assert project_fb_data["createdBy"] == self.contributor_user.firebase_id, ( - "Field 'createdBy' should match contributor user's firebaseId" - ) - ignored_project_keys = {"created", "createdBy", "projectId", "tutorialId"} + ignored_project_keys = {"created"} filtered_project_actual = remove_object_keys(project_fb_data, ignored_project_keys) filtered_project_expected = remove_object_keys(test_data["expected_project_data"], ignored_project_keys) assert filtered_project_actual == filtered_project_expected, "Difference found for project data in firebase." - # Check group in firebase + # Check project groups in firebase groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{project_fb_id}/") groups_fb_data = groups_fb_ref.get() - if groups_fb_data: - for group in iter(groups_fb_data.values()): # type: ignore[reportAttributeAccessIssue] - assert group["projectId"] == project_fb_id, "Field 'projectId' of each group should match firebaseId" - - ignored_group_keys = {"projectId"} - filtered_group_actual = remove_object_keys(groups_fb_data, ignored_group_keys) - filtered_group_expected = remove_object_keys(test_data["expected_project_groups_data"], ignored_project_keys) + filtered_group_actual = groups_fb_data + filtered_group_expected = test_data["expected_project_groups_data"] assert filtered_group_actual == filtered_group_expected, "Difference found for group data on project in firebase." - # Check tasks in firebase + # Check project tasks in firebase project_tasks_ref = self.firebase_helper.ref(f"/v2/tasks/{project_fb_id}/") project_tasks_fb_data = project_tasks_ref.get() - if project_tasks_fb_data: - for groups in iter(project_tasks_fb_data.values()): # type: ignore[reportAttributeAccessIssue] - for task in groups: - assert task["projectId"] == project_fb_id, "Field 'projectId' of each task should match firebaseId" - ignored_task_keys = {"projectId"} - sanitized_tasks_actual = remove_object_keys(project_tasks_fb_data, ignored_task_keys) - sanitized_tasks_expected = remove_object_keys(test_data["expected_project_tasks_data"], ignored_task_keys) + sanitized_tasks_actual = project_tasks_fb_data + sanitized_tasks_expected = test_data["expected_project_tasks_data"] assert sanitized_tasks_actual == sanitized_tasks_expected, ( - "Differences found between expected and actual tasks in firebase." + "Differences found between expected and actual tasks on project in firebase." ) diff --git a/apps/project/tests/e2e_create_validate_project_test.py b/apps/project/tests/e2e_create_validate_project_test.py index 8e0203e1..7b73a76d 100644 --- a/apps/project/tests/e2e_create_validate_project_test.py +++ b/apps/project/tests/e2e_create_validate_project_test.py @@ -1,18 +1,41 @@ +import logging import typing + +# import unittest +from contextlib import contextmanager from datetime import datetime from pathlib import Path import json5 import pytest from django.conf import settings +from django.db.models.signals import pre_save from ulid import ULID from apps.common.utils import decode_tasks, remove_object_keys from apps.contributor.factories import ContributorUserFactory +from apps.project.models import Organization, Project +from apps.tutorial.models import Tutorial from apps.user.factories import UserFactory -from main.config import Config from main.tests import TestCase +logging.getLogger("vcr").setLevel(logging.WARNING) + + +@contextmanager +def create_override(): + def pre_save_override(sender: typing.Any, instance: typing.Any, **kwargs): + if sender == Tutorial: + instance.firebase_id = f"tutorial_{instance.client_id}" + elif sender in {Project, Organization}: + instance.firebase_id = instance.client_id + + pre_save.connect(pre_save_override) + try: + yield True + finally: + pre_save.disconnect(pre_save_override) + class TestValidateProjectE2E(TestCase): class Mutation: @@ -200,6 +223,7 @@ class Mutation: ok result { id + status } } } @@ -230,44 +254,38 @@ class Mutation: } """ - @typing.override - @classmethod - def setUpClass(cls): - super().setUpClass() - cls.firebase_helper = Config.FIREBASE_HELPER - - cls.contributor_user = ContributorUserFactory.create( - username="Ram Bahadur", - ) - - cls.user = UserFactory.create( - contributor_user=cls.contributor_user, - ) - - @pytest.mark.vcr + @pytest.mark.vcr("assets/tests/projects/validate/cassette") def test_validate_project_e2e(self): - self._test_project( - "assets/tests/projects/validate/project_data.json5", - ) + # TODO(susilnem): Add more test with filters + with create_override(): + self._test_project( + "assets/tests/projects/validate/project_data.json5", + ) - # TODO(susilnem): Add more test with filters + # Generic functions def _test_project(self, filename: str): - self.force_login(self.user) - # Load test data file full_path = Path(settings.BASE_DIR, filename) with full_path.open("r", encoding="utf-8") as f: test_data = json5.load(f) + # Create contributor user and login + contributor_user = ContributorUserFactory.create( + username="Ram Bahadur", + firebase_id=test_data["contributor_user_firebase_id"], + ) + user = UserFactory.create( + contributor_user=contributor_user, + ) + + self.force_login(user) + # Define full path for image and AOI files image_filename = Path(settings.BASE_DIR) / test_data["assets"]["image"] aoi_geometry_filename = Path(settings.BASE_DIR) / test_data["assets"]["aoi"] - # Load Project data initially. - create_project_data = test_data["create_project"] - - # Create an organization and attach to project + # Create an organization create_organization_data = test_data["create_organization"] with self.captureOnCommitCallbacks(execute=True): organization_content = self.query_check( @@ -282,21 +300,14 @@ def _test_project(self, filename: str): organization_id = organization_response["result"]["id"] organization_fb_id = organization_response["result"]["firebaseId"] - # CHECK ORGANIZATION in firebase - organization_fb_ref = self.firebase_helper.ref(f"/v2/organisations/{organization_fb_id}") organization_fb_data = organization_fb_ref.get() - - # Check organization in firebase - assert organization_fb_data is not None, "organization in firebase is None" - assert isinstance(organization_fb_data, dict), "organization in firebase should be a dictionary" - - assert organization_fb_data == test_data["expected_organization_data"], ( - "Difference found for organization data in firebase." - ) + assert organization_fb_data is not None, "Organization in firebase is None" # Create project + create_project_data = test_data["create_project"] create_project_data["requestingOrganization"] = organization_id + with self.captureOnCommitCallbacks(execute=True): project_content = self.query_check( self.Mutation.CREATE_PROJECT, @@ -329,32 +340,30 @@ def _test_project(self, filename: str): assert image_response["ok"] image_id = image_response["result"]["id"] + # Create GeoJSON Asset for AOI Geometry + aoi_asset_data = { + "clientId": str(ULID()), + "inputType": "AOI_GEOMETRY", + "project": project_id, + } + with aoi_geometry_filename.open("rb") as geo_file: + aoi_content = self.query_check( + self.Mutation.UPLOAD_PROJECT_ASSET, + variables={"data": aoi_asset_data}, + files={"geoFile": geo_file}, + map={"geoFile": ["variables.data.file"]}, + ) + aoi_response = aoi_content["data"]["createProjectAsset"] + assert aoi_response is not None, "AOI create response is None" + assert aoi_response["ok"] + aoi_id = aoi_response["result"]["id"] + # Update project update_project_data = test_data["update_project"] - update_project_data["requestingOrganization"] = organization_id update_project_data["image"] = image_id - - # Create GeoJSON Asset for AOI Geometry if update_project_data["projectTypeSpecifics"]["validate"]["objectSource"]["sourceType"] == "AOI_GEOJSON_FILE": - aoi_asset_data = { - "clientId": str(ULID()), - "inputType": "AOI_GEOMETRY", - "project": project_id, - } - with aoi_geometry_filename.open("rb") as geo_file: - aoi_content = self.query_check( - self.Mutation.UPLOAD_PROJECT_ASSET, - variables={"data": aoi_asset_data}, - files={"geoFile": geo_file}, - map={"geoFile": ["variables.data.file"]}, - ) - aoi_response = aoi_content["data"]["createProjectAsset"] - assert aoi_response is not None, "AOI create response is None" - assert aoi_response["ok"] - aoi_id = aoi_response["result"]["id"] - update_project_data["projectTypeSpecifics"]["validate"]["objectSource"]["aoiGeometry"] = aoi_id - + update_project_data["requestingOrganization"] = organization_id with self.captureOnCommitCallbacks(execute=True): update_content = self.query_check( self.Mutation.UPDATE_PROJECT, @@ -375,11 +384,11 @@ def _test_project(self, filename: str): variables={"pk": project_id, "data": process_project_data}, ) process_project_response = process_project_content["data"]["updateProjectStatus"] - assert process_project_response is not None, "Project mark as ready response is None" + assert process_project_response is not None, "Project ready to process response is None" assert process_project_response["ok"], process_project_response["errors"] - assert process_project_response["result"]["status"] == "READY_TO_PROCESS", "Project should be marked as ready" + assert process_project_response["result"]["status"] == "READY_TO_PROCESS", "Project should be ready to process" - # Load Tutorial data initially. + # Create tutorial from above project create_tutorial_data = test_data["create_tutorial"] create_tutorial_data["project"] = project_id with self.captureOnCommitCallbacks(execute=True): @@ -410,7 +419,7 @@ def _test_project(self, filename: str): assert update_tutorial_response["ok"], update_tutorial_response["errors"] assert update_tutorial_response is not None, "Tutorial update response is None" - # Publish Tutorial + # Publish tutorial publish_tutorial_data = { "clientId": tutorial_client_id, "status": "READY_TO_PUBLISH", @@ -425,60 +434,42 @@ def _test_project(self, filename: str): assert publish_tutorial_response is not None, "Processed tutorial publish response is None" assert publish_tutorial_response["result"]["status"] == "READY_TO_PUBLISH", "tutorial should be ready to published" - # CHECK TUTORIAL, GROUP AND TASK CREATED IN FIREBASE - tutorial_fb_ref = self.firebase_helper.ref(f"/v2/projects/{tutorial_fb_id}") tutorial_fb_data = tutorial_fb_ref.get() # Check tutorial in firebase assert tutorial_fb_data is not None, "Tutorial in firebase is None" assert isinstance(tutorial_fb_data, dict), "Tutorial in firebase should be a dictionary" - assert tutorial_fb_data["projectId"] == tutorial_fb_id, "Field 'projectId' should match firebaseId" - ignored_tutorial_keys = {"projectId", "tutorialDraftId"} - filtered_tutorial_actual = remove_object_keys(tutorial_fb_data, ignored_tutorial_keys) - filtered_tutorial_expected = remove_object_keys(test_data["expected_tutorial_data"], ignored_tutorial_keys) + filtered_tutorial_actual = tutorial_fb_data + filtered_tutorial_expected = test_data["expected_tutorial_data"] assert filtered_tutorial_actual == filtered_tutorial_expected, "Difference found for tutorial data in firebase." - # Check group in firebase + # Check tutorial groups in firebase tutorial_groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{tutorial_fb_id}/") tutorial_groups_fb_data = tutorial_groups_fb_ref.get() - if tutorial_groups_fb_data: - for group in iter(tutorial_groups_fb_data.values()): # type: ignore[reportAttributeAccessIssue] - assert group["projectId"] == tutorial_fb_id, "Field 'tutorialId' of each group should match firebaseId" - - ignored_group_keys = {"projectId"} - filtered_group_actual = remove_object_keys(tutorial_groups_fb_data, ignored_group_keys) - filtered_group_expected = remove_object_keys(test_data["expected_tutorial_groups_data"], ignored_tutorial_keys) + filtered_group_actual = tutorial_groups_fb_data + filtered_group_expected = test_data["expected_tutorial_groups_data"] assert filtered_group_actual == filtered_group_expected, "Difference found for tutorial group data in firebase." # Check tutorial tasks in firebase tutorial_tasks_ref = self.firebase_helper.ref(f"/v2/tasks/{tutorial_fb_id}/") - tutorial_task_fb_data = tutorial_tasks_ref.get() - - ignored_task_keys: set[str] = {"projectId", "geometry"} - # TODO(susilnem): geometry should be present - sanitized_tasks_actual: list[dict[str, typing.Any]] = [] - sanitized_tasks_expected: list[dict[str, typing.Any]] = [] + tutorial_task_fb_data: dict[str, typing.Any] = tutorial_tasks_ref.get() # type: ignore[reportArgumentType] - for group in iter(tutorial_task_fb_data.values()): # type: ignore[reportAttributeAccessIssue] - for task_fb in decode_tasks(group): - sanitized_tasks_actual.append(remove_object_keys(task_fb, ignored_task_keys)) # type: ignore[reportGeneralTypeIssues] + # FIXME(tnagorra): check why ignoring the geometry + # NOTE: We want to decode the tasks before comparison + for key, value in tutorial_task_fb_data.items(): + tutorial_task_fb_data[key] = decode_tasks(value) - for group in iter(test_data["expected_tutorial_tasks_data"].values()): - for task in decode_tasks(group): - sanitized_tasks_expected.append(remove_object_keys(task, ignored_task_keys)) # type: ignore[reportGeneralTypeIssues] + sanitized_tasks_actual = tutorial_task_fb_data + sanitized_tasks_expected = test_data["expected_tutorial_tasks_data"] - # Sorting and comparing tasks - sanitized_tasks_actual_sorted = sorted(sanitized_tasks_actual, key=lambda t: t["taskId"]) - sanitized_tasks_expected_sorted = sorted(sanitized_tasks_expected, key=lambda t: t["taskId"]) - - assert sanitized_tasks_actual_sorted == sanitized_tasks_expected_sorted, ( + assert sanitized_tasks_actual == sanitized_tasks_expected, ( "Differences found between expected and actual tasks on tutorial in firebase." ) - # Update processed project + # Update processed project: attach tutorial, organization update_processed_project_data = test_data["update_processed_project"] update_processed_project_data["tutorial"] = tutorial_id update_processed_project_data["requestingOrganization"] = organization_id @@ -504,9 +495,7 @@ def _test_project(self, filename: str): publish_project_response = publish_project_content["data"]["updateProjectStatus"] assert publish_project_response["ok"], publish_project_response["errors"] assert publish_project_response is not None, "Processed project publish response is None" - assert publish_project_response["result"]["status"] == "READY_TO_PUBLISH", "Project should be ready to published" - - # CHECK PROJECT, GROUP AND TASK CREATED IN FIREBASE + assert publish_project_response["result"]["status"] == "READY_TO_PUBLISH", "Project should be ready to publish" project_fb_ref = self.firebase_helper.ref(f"/v2/projects/{project_fb_id}") project_fb_data = project_fb_ref.get() @@ -516,50 +505,31 @@ def _test_project(self, filename: str): assert isinstance(project_fb_data, dict), "Project in firebase should be a dictionary" assert project_fb_data["created"] is not None, "Field 'created' should be defined" assert datetime.fromisoformat(project_fb_data["created"]), "Field 'created' should be a timestamp" - assert project_fb_data["projectId"] == project_fb_id, "Field 'projectId' should match firebaseId" - assert project_fb_data["tutorialId"] == tutorial_fb_id, "Field 'tutorialId' should match tutorial's firebaseId" - assert project_fb_data["createdBy"] == self.contributor_user.firebase_id, ( - "Field 'createdBy' should match contributor user's firebaseId" - ) - ignored_project_keys = {"created", "createdBy", "projectId", "tutorialId"} + ignored_project_keys = {"created"} filtered_project_actual = remove_object_keys(project_fb_data, ignored_project_keys) filtered_project_expected = remove_object_keys(test_data["expected_project_data"], ignored_project_keys) assert filtered_project_actual == filtered_project_expected, "Difference found for project data in firebase." - # Check group in firebase + # Check project groups in firebase groups_fb_ref = self.firebase_helper.ref(f"/v2/groups/{project_fb_id}/") groups_fb_data = groups_fb_ref.get() - if groups_fb_data: - for group in iter(groups_fb_data.values()): # type: ignore[reportAttributeAccessIssue] - assert group["projectId"] == project_fb_id, "Field 'projectId' of each group should match firebaseId" - - ignored_group_keys = {"projectId"} - filtered_group_actual = remove_object_keys(groups_fb_data, ignored_group_keys) - filtered_group_expected = remove_object_keys(test_data["expected_project_groups_data"], ignored_project_keys) + filtered_group_actual = groups_fb_data + filtered_group_expected = test_data["expected_project_groups_data"] assert filtered_group_actual == filtered_group_expected, "Difference found for group data on project in firebase." - # Check tasks in firebase + # Check project tasks in firebase project_tasks_ref = self.firebase_helper.ref(f"/v2/tasks/{project_fb_id}/") - project_tasks_fb_data = project_tasks_ref.get() - - ignored_task_keys: set[str] = {"projectId"} - sanitized_tasks_actual: list[dict[str, typing.Any]] = [] - sanitized_tasks_expected: list[dict[str, typing.Any]] = [] - - for group in iter(project_tasks_fb_data.values()): # type: ignore[reportAttributeAccessIssue] - for task_fb in decode_tasks(group): - sanitized_tasks_actual.append(remove_object_keys(task_fb, ignored_task_keys)) # type: ignore[reportGeneralTypeIssues] + project_tasks_fb_data: dict[str, typing.Any] = project_tasks_ref.get() # type: ignore[reportArgumentType] - for group in iter(test_data["expected_project_tasks_data"].values()): - for task in decode_tasks(group): - sanitized_tasks_expected.append(remove_object_keys(task, ignored_task_keys)) # type: ignore[reportGeneralTypeIssues] + # NOTE: We want to decode the tasks before comparison + for key, value in project_tasks_fb_data.items(): + project_tasks_fb_data[key] = decode_tasks(value) - # Sorting and comparing tasks - sanitized_tasks_actual_sorted = sorted(sanitized_tasks_actual, key=lambda t: t["taskId"]) - sanitized_tasks_expected_sorted = sorted(sanitized_tasks_expected, key=lambda t: t["taskId"]) + sanitized_tasks_actual = project_tasks_fb_data + sanitized_tasks_expected = test_data["expected_project_tasks_data"] - assert sanitized_tasks_actual_sorted == sanitized_tasks_expected_sorted, ( - "Differences found between expected and actual tasks in firebase." + assert sanitized_tasks_actual == sanitized_tasks_expected, ( + "Differences found between expected and actual tasks on project in firebase." ) diff --git a/assets b/assets index 1f64ac84..9db08775 160000 --- a/assets +++ b/assets @@ -1 +1 @@ -Subproject commit 1f64ac849e7146670bcef3f650280afbb40b1aba +Subproject commit 9db08775e5b7ae0ec21e65494bb90dc7d501ee84 diff --git a/conftest.py b/conftest.py index 7091fb14..34018f23 100644 --- a/conftest.py +++ b/conftest.py @@ -2,10 +2,16 @@ @pytest.fixture(autouse=True) -def vcr_config(): +def vcr_config(request): + marker = request.node.get_closest_marker("vcr") + + cassette_path = None + if marker and marker.args: + cassette_path = marker.args[0] + return { "record_mode": "once", "ignore_hosts": ["localhost", "firebase-test"], "ignore_localhost": True, - "cassette_library_dir": "assets/tests/tests_vcr_snapshots", + "cassette_library_dir": cassette_path or "assets/tests/tests_vcr_snapshots", } From 02bb7675a8c296e992c19d761be965c5037be827 Mon Sep 17 00:00:00 2001 From: tnagorra Date: Wed, 1 Oct 2025 09:54:24 +0545 Subject: [PATCH 8/9] fix(test): clear out tokens from generated cassette files --- assets | 2 +- conftest.py | 23 +++++++++++++++++++++++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/assets b/assets index 9db08775..6a0e74f6 160000 --- a/assets +++ b/assets @@ -1 +1 @@ -Subproject commit 9db08775e5b7ae0ec21e65494bb90dc7d501ee84 +Subproject commit 6a0e74f68ba26716d7deaf2c5d0697704d5ca26a diff --git a/conftest.py b/conftest.py index 34018f23..2432ea10 100644 --- a/conftest.py +++ b/conftest.py @@ -1,4 +1,26 @@ +from urllib.parse import urlparse, urlunparse + import pytest +from django.http import QueryDict + + +def scrub_auth_token(request): + # NOTE: We want to redact sensitize information from "Authorization: Token XYZ" + authorization_header = request.headers.get("authorization") + if authorization_header and authorization_header.lower().startswith("token"): + request.headers["authorization"] = "Token DUMMY_TOKEN" + + url = request.uri + if "access_token" in url: + parsed_url = urlparse(url) + + query_dict = QueryDict(parsed_url.query, mutable=True) + query_dict["access_token"] = "DUMMY_TOKEN" # noqa: S105 + new_query = query_dict.urlencode() + + new_url = urlunparse(parsed_url._replace(query=new_query)) + request.uri = new_url + return request @pytest.fixture(autouse=True) @@ -14,4 +36,5 @@ def vcr_config(request): "ignore_hosts": ["localhost", "firebase-test"], "ignore_localhost": True, "cassette_library_dir": cassette_path or "assets/tests/tests_vcr_snapshots", + "before_record_request": scrub_auth_token, } From 3d3ce1535e55c13407bd676383433d32a38532fe Mon Sep 17 00:00:00 2001 From: tnagorra Date: Wed, 1 Oct 2025 12:38:15 +0545 Subject: [PATCH 9/9] fix(validate-image): use image id for taskId else fallback to index - add imageId for task during tutorial creation --- assets | 2 +- project_types/validate_image/project.py | 7 ++++++- project_types/validate_image/tutorial.py | 3 ++- schema.graphql | 10 ++++------ utils/asset_types/models.py | 3 +-- 5 files changed, 14 insertions(+), 11 deletions(-) diff --git a/assets b/assets index 6a0e74f6..fc1252f3 160000 --- a/assets +++ b/assets @@ -1 +1 @@ -Subproject commit 6a0e74f68ba26716d7deaf2c5d0697704d5ca26a +Subproject commit fc1252f315ac89c74b4e8e921da6964ad56d358f diff --git a/project_types/validate_image/project.py b/project_types/validate_image/project.py index 08d084cd..32691573 100644 --- a/project_types/validate_image/project.py +++ b/project_types/validate_image/project.py @@ -25,6 +25,7 @@ class ValidImage(typing.TypedDict): + source_identifier: str | None url: str file_name: str width: int | None @@ -103,6 +104,8 @@ def _validate_direct_images(self) -> list[ValidImage]: inputs: list[ValidImage] = [] for image_asset in direct_images_assets_qs.iterator(): valid_image: ValidImage = { + # NOTE: There is no identifier from source if uploaded directly + "source_identifier": None, "url": get_absolute_uri(image_asset.file), "file_name": image_asset.file.name, "width": None, @@ -147,6 +150,7 @@ def _validate_dataset_file(self) -> list[ValidImage]: if annotations: for annotation in annotations: valid_image: ValidImage = { + "source_identifier": asset_specifics.image.id, "url": image_asset.external_url, "file_name": asset_specifics.image.file_name, "width": asset_specifics.image.width, @@ -156,6 +160,7 @@ def _validate_dataset_file(self) -> list[ValidImage]: inputs.append(valid_image) else: valid_image: ValidImage = { + "source_identifier": asset_specifics.image.id, "url": image_asset.external_url, "file_name": asset_specifics.image.file_name, "width": None, @@ -213,7 +218,7 @@ def create_tasks(self, group: ProjectTaskGroup, raw_group: Grouping[ValidImage]) bulk_mgr.add( ProjectTask( - firebase_id=f_id, + firebase_id=feature["source_identifier"] or f_id, task_group_id=group.pk, geometry=None, # FIXME(tnagorra): Do we need to define all of these here? diff --git a/project_types/validate_image/tutorial.py b/project_types/validate_image/tutorial.py index 3fb43679..b24ad03e 100644 --- a/project_types/validate_image/tutorial.py +++ b/project_types/validate_image/tutorial.py @@ -17,6 +17,7 @@ class ValidateImageTutorialTaskProperty(BaseTutorialTaskProperty): url: str + image_id: str | None = None file_name: str width: int | None = None height: int | None = None @@ -45,7 +46,7 @@ def get_task_specifics_for_firebase(self, task: TutorialTask, index: int): projectId=self.tutorial.firebase_id, referenceAnswer=task.reference, screen=task.scenario.scenario_page_number, - taskId=f"{index}", + taskId=task_specifics.image_id or str(index), url=task_specifics.url, fileName=task_specifics.file_name, width=task_specifics.width, diff --git a/schema.graphql b/schema.graphql index bd80d124..063a587b 100644 --- a/schema.graphql +++ b/schema.graphql @@ -975,9 +975,7 @@ input ObjectImageAnnotationInput { """Numeric value as string""" id: String! - - """Numeric value as string""" - imageId: String! + imageId: String = null iscrowd: Int = null segmentation: [[Float!]!] = null } @@ -989,9 +987,7 @@ type ObjectImageAnnotationType { """Numeric value as string""" id: String! - - """Numeric value as string""" - imageId: String! + imageId: String iscrowd: Int segmentation: [[Float!]!] } @@ -2724,6 +2720,7 @@ input ValidateImageTutorialTaskPropertyInput { annotation: ObjectImageAnnotationInput = null fileName: String! height: Int = null + imageId: String = null url: String! width: Int = null } @@ -2732,6 +2729,7 @@ type ValidateImageTutorialTaskPropertyType { annotation: ObjectImageAnnotationType fileName: String! height: Int + imageId: String url: String! width: Int } diff --git a/utils/asset_types/models.py b/utils/asset_types/models.py index 33cf62a8..1091048b 100644 --- a/utils/asset_types/models.py +++ b/utils/asset_types/models.py @@ -37,8 +37,7 @@ class ObjectImageAnnotation(BaseModel): # NOTE: `id` is not required in coco format but we might need this to be required # NOTE: converting id and image_id to string as large integers are not supported id: custom_fields.PydanticId - # NOTE: converting id and image_id to string as large integers are not supported - image_id: custom_fields.PydanticId + image_id: custom_fields.PydanticId | None = None category_id: custom_fields.PydanticId | None = None iscrowd: custom_fields.PydanticPositiveInt | None = None segmentation: list[list[float]] | None = None