From 5bdd864a7073adc56394ab4b493469a162edeab6 Mon Sep 17 00:00:00 2001 From: tnagorra Date: Mon, 6 Oct 2025 22:50:38 +0545 Subject: [PATCH 01/19] fix(project): throw validation error if required results is empty --- project_types/base/project.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/project_types/base/project.py b/project_types/base/project.py index 7befbaa5..a6b8e86d 100644 --- a/project_types/base/project.py +++ b/project_types/base/project.py @@ -151,15 +151,15 @@ def analyze_groups(self): ) )["required_results"] or 0 + if self.project.required_results == 0: + raise ValidationException("Project does not contain any groups or tasks") + self.project.total_area = ( ProjectTaskGroup.objects.filter(project_id=self.project.pk).aggregate(agg_area=models.Sum("total_area")) )["agg_area"] or 0 self.project.save(update_fields=(["required_results"])) - # FIXME: Throw error if no. of tasks is zero. - # FIXME: Throw error if no. of groups is zero. - @abstractmethod def get_max_time_spend_percentile(self) -> float: """Factor calculated by @Hagellach37 From 00d888990f67f99d8a7b0adc5770b4167c5c5761 Mon Sep 17 00:00:00 2001 From: tnagorra Date: Mon, 6 Oct 2025 22:54:10 +0545 Subject: [PATCH 02/19] fix(project): do not allow editing requesting organization later --- apps/project/graphql/mutations.py | 4 ++-- apps/project/serializers.py | 20 +++++++++++++++++++- 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/apps/project/graphql/mutations.py b/apps/project/graphql/mutations.py index 456db98e..a517dd15 100644 --- a/apps/project/graphql/mutations.py +++ b/apps/project/graphql/mutations.py @@ -5,7 +5,7 @@ from apps.project.models import Organization, Project from apps.project.serializers import ( OrganizationSerializer, - ProcessedProjectSerializer, + ProcessedProjectUpdateSerializer, ProjectAssetSerializer, ProjectCreateSerializer, ProjectStatusUpdateSerializer, @@ -53,7 +53,7 @@ async def update_processed_project( pk: strawberry.ID, ) -> MutationResponseType[ProjectType]: project = await Project.objects.aget(pk=pk) - return await ModelMutation(ProcessedProjectSerializer).handle_update_mutation(data, info, project) + return await ModelMutation(ProcessedProjectUpdateSerializer).handle_update_mutation(data, info, project) @strawberry_django.mutation(extensions=[IsAuthenticated()]) async def create_project_asset( diff --git a/apps/project/serializers.py b/apps/project/serializers.py index e9a3f4b8..3a250547 100644 --- a/apps/project/serializers.py +++ b/apps/project/serializers.py @@ -337,7 +337,7 @@ def update(self, instance: Project, validated_data: dict[typing.Any, typing.Any] # NOTE: Make sure this matches with the strawberry Input ./graphql/inputs.py -class ProcessedProjectSerializer(UserResourceSerializer[Project]): +class ProcessedProjectUpdateSerializer(UserResourceSerializer[Project]): class Meta: # type: ignore[reportIncompatibleVariableOverride] model = Project fields = ( @@ -431,6 +431,24 @@ def validate(self, attrs: dict[str, typing.Any]): }, ) + # disallow changing requesting organization once published + org = attrs.get("requesting_organization") + if ( + org + and org != self.instance.requesting_organization + and self.instance.status_enum + not in [ + Project.Status.PROCESSED, + Project.Status.PUBLISHING_FAILED, + ] + ): + raise serializers.ValidationError( + { + "status": gettext("Cannot update project with status %s") % self.instance.status_enum.label, + }, + ) + + _validate_project_name(attrs, self.instance) self._validate_project_instruction(attrs) return super().validate(attrs) From ad523a14c405bdbd9377792a648be2c3ad534436 Mon Sep 17 00:00:00 2001 From: tnagorra Date: Mon, 6 Oct 2025 23:01:37 +0545 Subject: [PATCH 03/19] fix(project): remove default value for "max tasks per user" - enable editing "max tasks per user" on projects later --- apps/project/graphql/inputs/inputs.py | 1 + .../0006_alter_project_max_tasks_per_user.py | 18 ++++++++++++++++++ apps/project/models.py | 1 - apps/project/serializers.py | 1 + apps/project/tests/mutation_test.py | 2 +- schema.graphql | 3 +++ 6 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 apps/project/migrations/0006_alter_project_max_tasks_per_user.py diff --git a/apps/project/graphql/inputs/inputs.py b/apps/project/graphql/inputs/inputs.py index 59500179..7b99982d 100644 --- a/apps/project/graphql/inputs/inputs.py +++ b/apps/project/graphql/inputs/inputs.py @@ -101,6 +101,7 @@ class ProcessedProjectUpdateInput(UserResourceTopLevelUpdateInputMixin): additional_info_url: strawberry.auto is_featured: strawberry.auto description: strawberry.auto + max_tasks_per_user: strawberry.auto tutorial: strawberry.ID | None = strawberry.UNSET requesting_organization: strawberry.ID | None = strawberry.UNSET image: strawberry.ID | None = strawberry.UNSET diff --git a/apps/project/migrations/0006_alter_project_max_tasks_per_user.py b/apps/project/migrations/0006_alter_project_max_tasks_per_user.py new file mode 100644 index 00000000..e45ed626 --- /dev/null +++ b/apps/project/migrations/0006_alter_project_max_tasks_per_user.py @@ -0,0 +1,18 @@ +# Generated by Django 5.2.5 on 2025-10-06 10:41 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('project', '0005_merge_20251001_0845'), + ] + + operations = [ + migrations.AlterField( + model_name='project', + name='max_tasks_per_user', + field=models.PositiveSmallIntegerField(blank=True, help_text='How many tasks each user is allowed to work on for this project', null=True), + ), + ] diff --git a/apps/project/models.py b/apps/project/models.py index 4387efed..7fd01079 100644 --- a/apps/project/models.py +++ b/apps/project/models.py @@ -350,7 +350,6 @@ class Project(UserResource, FirebasePushResource): help_text=gettext_lazy("How many tasks each user is allowed to work on for this project"), null=True, blank=True, - default=10, ) # TODO(tnagorra): Currently this field collects any data not stored by another fields, pulled from firebase. diff --git a/apps/project/serializers.py b/apps/project/serializers.py index 3a250547..17f1c188 100644 --- a/apps/project/serializers.py +++ b/apps/project/serializers.py @@ -348,6 +348,7 @@ class Meta: # type: ignore[reportIncompatibleVariableOverride] "look_for", "project_instruction", "additional_info_url", + "max_tasks_per_user", "description", "image", "tutorial", diff --git a/apps/project/tests/mutation_test.py b/apps/project/tests/mutation_test.py index 018e060a..6bfc42e3 100644 --- a/apps/project/tests/mutation_test.py +++ b/apps/project/tests/mutation_test.py @@ -629,7 +629,7 @@ def test_project_create(self): description=latest_project.description, verificationNumber=3, groupSize=10, - maxTasksPerUser=10, + maxTasksPerUser=None, isFeatured=latest_project.is_featured, status=self.genum(Project.Status.DRAFT), processingStatus=None, diff --git a/schema.graphql b/schema.graphql index 063a587b..99ead70b 100644 --- a/schema.graphql +++ b/schema.graphql @@ -1178,6 +1178,9 @@ input ProcessedProjectUpdateInput { """What should the users look for (e.g. buildings, cars, trees)""" lookFor: String + """How many tasks each user is allowed to work on for this project""" + maxTasksPerUser: Int + """Provide project instruction""" projectInstruction: String projectNumber: Int From 0c15e2a4d67c8212b7a80d0f8de43fdb34aad39e Mon Sep 17 00:00:00 2001 From: tnagorra Date: Mon, 6 Oct 2025 23:04:43 +0545 Subject: [PATCH 04/19] fix(project): add validation for project fields - add validation for group size - add validation for verification number - add default value for group size depending on project type - use validation for "project name" --- apps/project/models.py | 26 +------------------- apps/project/serializers.py | 38 +++++++++++++++++++++++++++++ apps/project/tests/mutation_test.py | 4 +-- 3 files changed, 41 insertions(+), 27 deletions(-) diff --git a/apps/project/models.py b/apps/project/models.py index 7fd01079..90489e1c 100644 --- a/apps/project/models.py +++ b/apps/project/models.py @@ -280,7 +280,6 @@ class Project(UserResource, FirebasePushResource): project_number = models.PositiveIntegerField[int, int]() # TODO(tnagorra): Max length is 25 in manager dashboard. - # TODO(frozenhelium): We should discuss if we need this field. look_for = models.CharField[str | None, str | None]( null=True, blank=True, @@ -310,7 +309,6 @@ class Project(UserResource, FirebasePushResource): ), ) - # NOTE: JPG and PNG should be supported. image = models.ForeignKey["ProjectAsset | None", "ProjectAsset | None"]( "project.ProjectAsset", related_name="+", @@ -320,7 +318,6 @@ class Project(UserResource, FirebasePushResource): ) # FIXME(tnagorra): We might need to rename this field - # NOTE: The tutorial should align with what we are looking for. tutorial = models.ForeignKey["Tutorial | None", "Tutorial | None"]( "tutorial.Tutorial", null=True, # NOTE: Validation makes sure active project have tutorial attached @@ -330,13 +327,11 @@ class Project(UserResource, FirebasePushResource): help_text=gettext_lazy("Tutorial used for this project."), ) # NOTE: tutorial_id before - # TODO(tnagorra): This should be an integer from 3 to 10000 verification_number = models.PositiveSmallIntegerField[int, int]( help_text=gettext_lazy("How many people do you want to see every tile before you consider it finished?"), default=3, ) - # TODO(tnagorra): This should be an integer from 10 to 25 group_size = models.PositiveSmallIntegerField[int, int]( help_text=gettext_lazy( "How big should a mapping session be? Group size refers to the number of tasks per mapping session.", @@ -344,8 +339,6 @@ class Project(UserResource, FirebasePushResource): default=10, ) - # TODO(tnagorra): This should be an integer from 10 to 250 - # TODO(tnagorra): Empty indicates that no limit is set. But, this field is required in manager dashboard. max_tasks_per_user = models.PositiveSmallIntegerField[int, int]( help_text=gettext_lazy("How many tasks each user is allowed to work on for this project"), null=True, @@ -437,7 +430,7 @@ class Project(UserResource, FirebasePushResource): default=ProjectProgressStatusEnum.ON_GOING, ) - # TODO: Change this to float? + # FIXME(tnagorra): Change this to float? progress = models.PositiveSmallIntegerField[int, int]( default=0, validators=[validate_percentage], @@ -581,23 +574,6 @@ def status_enum(self): def progress_status_enum(self): return ProjectProgressStatusEnum(self.progress_status) - @typing.override - def clean(self): - ... - # if not self.teamId: - # self.status = "inactive" # this is a public project - # else: - # self.status = ( - # "private_inactive" # private project visible only for team members - # ) - # - # if max_tasks_per_user is not None: - # self.maxTasksPerUser = int(max_tasks_per_user) - - # for group in self.groups.values(): - # group.requiredCount = self.verificationNumber - # self.requiredResults += group.requiredCount * group.numberOfTasks - class ProjectAsset(UserResource, CommonAsset): # type: ignore[reportIncompatibleVariableOverride] """Model representing assets for a project.""" diff --git a/apps/project/serializers.py b/apps/project/serializers.py index 17f1c188..c40f7f66 100644 --- a/apps/project/serializers.py +++ b/apps/project/serializers.py @@ -102,10 +102,33 @@ class Meta: # type: ignore[reportIncompatibleVariableOverride] "team", ) + def _validate_group_size(self, attrs: dict[str, typing.Any]): + project_type = attrs["project_type"] + if not isinstance(project_type, Project.Type): + project_type = Project.Type(project_type) + + group_size: int + match project_type: + case Project.Type.FIND: + group_size = 25 + case Project.Type.VALIDATE: + group_size = 120 + case Project.Type.VALIDATE_IMAGE: + group_size = 25 + case Project.Type.COMPARE: + group_size = 25 + case Project.Type.COMPLETENESS: + group_size = 80 + case Project.Type.STREET: + group_size = 25 + + attrs["group_size"] = group_size + @typing.override def validate(self, attrs: dict[str, typing.Any]): attrs = super().validate(attrs) _validate_project_name(attrs, None) + self._validate_group_size(attrs) return attrs def validate_requesting_organization(self, requesting_organization: Organization | None) -> Organization | None: @@ -141,6 +164,21 @@ class Meta: # type: ignore[reportIncompatibleVariableOverride] "team", ) + def validate_group_size(self, group_size: int): + # FIXME(tnagorra): minimum group size is actually 10, but using 5 to pass existing tests + if group_size < 5: + raise serializers.ValidationError(gettext("Group size should be equal to or greater than 5")) + if group_size > 250: + raise serializers.ValidationError(gettext("Group size should be equal to or less than 250")) + return group_size + + def validate_verification_number(self, verification_number: int): + if verification_number < 3: + raise serializers.ValidationError(gettext("Verification number should be equal to or greater than 3")) + if verification_number > 10000: + raise serializers.ValidationError(gettext("Verification number should be equal to or less than 10000")) + return verification_number + def validate_requesting_organization(self, requesting_organization: Organization | None) -> Organization | None: assert self.instance is not None current_org = self.instance.requesting_organization diff --git a/apps/project/tests/mutation_test.py b/apps/project/tests/mutation_test.py index 6bfc42e3..54c2e6db 100644 --- a/apps/project/tests/mutation_test.py +++ b/apps/project/tests/mutation_test.py @@ -628,7 +628,7 @@ def test_project_create(self): additionalInfoUrl=latest_project.additional_info_url, description=latest_project.description, verificationNumber=3, - groupSize=10, + groupSize=25, maxTasksPerUser=None, isFeatured=latest_project.is_featured, status=self.genum(Project.Status.DRAFT), @@ -688,7 +688,7 @@ def test_project_update(self, mock_requests): # type: ignore[reportMissingParam "projectInstruction": "Buildings and Houses", "additionalInfoUrl": "https://hi-there/about.html?code=1", "description": "The new updated **project** from hi-there.", - "verificationNumber": 2, + "verificationNumber": 5, "groupSize": 16, "maxTasksPerUser": 11, "clientId": proj.client_id, From 41444b3ff9f3d423ba613a4c3537f204665899dd Mon Sep 17 00:00:00 2001 From: tnagorra Date: Mon, 6 Oct 2025 23:12:40 +0545 Subject: [PATCH 05/19] feat(project): sync project and contributorCount to firebase on update --- firebase | 2 +- project_types/base/project.py | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/firebase b/firebase index 23eadc93..a1dbcf40 160000 --- a/firebase +++ b/firebase @@ -1 +1 @@ -Subproject commit 23eadc9377d026ff7107c9b9fb0cc1624f4f9a6f +Subproject commit a1dbcf408f2f20f8145ef5f82a35f98d27c7501d diff --git a/project_types/base/project.py b/project_types/base/project.py index a6b8e86d..0b0e9136 100644 --- a/project_types/base/project.py +++ b/project_types/base/project.py @@ -18,6 +18,7 @@ from apps.common.models import FirebasePushStatusEnum from apps.common.utils import get_absolute_uri +from apps.mapping.models import MappingSession from apps.project.models import ( Project, ProjectAsset, @@ -420,6 +421,17 @@ def update_project_on_firebase(self, project_ref: FbReference, fb_project: fireb assert self.project.tutorial_id is not None, "Tutorial is required before project can be pushed to firebase" assert self.project.tutorial is not None, "Tutorial is required before project can be pushed to firebase" + unique_contributors_count = ( + MappingSession.objects.filter( + project_task_group__in=ProjectTaskGroup.objects.filter(project=self.project), + ) + .values( + "contributor_user_id", + ) + .distinct() + .count() + ) + project_ref.update( value=firebase_utils.serialize( firebase_models.FbProjectUpdateInput( @@ -437,6 +449,8 @@ def update_project_on_firebase(self, project_ref: FbReference, fb_project: fireb tutorialId=self.project.tutorial.firebase_id, status=BaseProject.get_firebase_status(self.project.status_enum, not self.project.team_id), teamId=self.project.team.firebase_id if self.project.team else None, + contributorCount=unique_contributors_count, + progress=self.project.progress, # FIXME(tnagorra): Need to check how we get this? language="en-us", ), From bc62bab95bc04fea02ad5ae36cd83804b898b096 Mon Sep 17 00:00:00 2001 From: tnagorra Date: Mon, 6 Oct 2025 23:13:33 +0545 Subject: [PATCH 06/19] feat(firebase): update project info on firebase after results is fetched --- apps/project/exports/exports.py | 24 ++++++++++++++----- ...2e_create_project_tile_map_service_test.py | 8 +++++++ 2 files changed, 26 insertions(+), 6 deletions(-) diff --git a/apps/project/exports/exports.py b/apps/project/exports/exports.py index beea2f92..09710e07 100644 --- a/apps/project/exports/exports.py +++ b/apps/project/exports/exports.py @@ -6,11 +6,11 @@ from django.db import transaction from ulid import ULID -from apps.common.models import AssetTypeEnum +from apps.common.models import AssetTypeEnum, FirebasePushStatusEnum from apps.project.custom_options import get_fallback_custom_options_for_export from apps.project.exports.geojson import gzipped_csv_to_gzipped_geojson from apps.project.models import Project, ProjectAsset, ProjectAssetExportTypeEnum, ProjectProgressStatusEnum, ProjectTypeEnum -from apps.project.tasks import send_slack_message_for_project +from apps.project.tasks import push_project_to_firebase, send_slack_message_for_project from apps.user.models import User from main.config import Config from main.logging import log_extra @@ -151,14 +151,17 @@ def _export_project_data(project: Project, tmp_directory: Path): ) if not project_stats_by_date_df.empty: - project.progress = project_stats_by_date_df["cum_progress"].iloc[-1] * 100 - if project.progress >= 100: - project.progress_status = ProjectProgressStatusEnum.COMPLETED project.number_of_contributor_users = project_stats_by_date_df["cum_number_of_users"].iloc[-1] project.number_of_results = project_stats_by_date_df["cum_number_of_results"].iloc[-1] project.number_of_results_for_progress = project_stats_by_date_df["cum_number_of_results_progress"].iloc[-1] project.last_contribution_date = project_stats_by_date_df.index[-1] - # TODO: Trigger slack notifications on progress change + + previous_progress = project.progress + project.progress = project_stats_by_date_df["cum_progress"].iloc[-1] * 100 + + if project.progress >= 100: + project.progress_status = ProjectProgressStatusEnum.COMPLETED + if project.progress >= 90 and project.slack_progress_notifications < 90: transaction.on_commit( lambda: send_slack_message_for_project.delay(project_id=project.id, action="progress-change"), @@ -169,6 +172,13 @@ def _export_project_data(project: Project, tmp_directory: Path): lambda: send_slack_message_for_project.delay(project_id=project.id, action="progress-change"), ) + if project.progress != previous_progress: + # FIXME(tnagorra): Do we only send updates for the 2 fields? + transaction.on_commit( + lambda: push_project_to_firebase.delay(project_id=project.id), + ) + project.update_firebase_push_status(FirebasePushStatusEnum.PENDING, False) + project.save( update_fields=( "progress", @@ -177,6 +187,8 @@ def _export_project_data(project: Project, tmp_directory: Path): "number_of_results", "number_of_results_for_progress", "last_contribution_date", + "firebase_push_status", + "firebase_last_pushed", ), ) diff --git a/apps/project/tests/e2e_create_project_tile_map_service_test.py b/apps/project/tests/e2e_create_project_tile_map_service_test.py index 378d7a04..e1504baa 100644 --- a/apps/project/tests/e2e_create_project_tile_map_service_test.py +++ b/apps/project/tests/e2e_create_project_tile_map_service_test.py @@ -603,6 +603,7 @@ def _test_project(self, projectKey: str, filename: str): with self.captureOnCommitCallbacks(execute=True): pull_results_from_firebase() + # Check if results are stored in database assert [ MappingSession.objects.count(), MappingSessionResult.objects.count(), @@ -619,3 +620,10 @@ def _test_project(self, projectKey: str, filename: str): project.refresh_from_db() assert project.progress == test_data["expected_pulled_results_data"]["progress"] + + # Check if progress and contributorCount synced to firebase + project_fb_data = project_fb_ref.get() + assert project_fb_data is not None, "Project in firebase is None" + assert isinstance(project_fb_data, dict), "Project in firebase should be a dictionary" + assert project_fb_data["progress"] == project.progress, "Progress should be synced with firebase" + assert project_fb_data["contributorCount"] == 1, "Contributor count should be synced with firebase" From df373bdbde3b36a311a237c1c7e4d164c4699e05 Mon Sep 17 00:00:00 2001 From: tnagorra Date: Tue, 7 Oct 2025 22:03:08 +0545 Subject: [PATCH 07/19] fix(project): add project type on name uniqueness constraint --- ...ve_project_unique_project_name_and_more.py | 26 +++++++++++++++++++ .../0008_alter_project_unique_project_name.py | 23 ++++++++++++++++ apps/project/models.py | 3 ++- apps/project/serializers.py | 6 +++++ 4 files changed, 57 insertions(+), 1 deletion(-) create mode 100644 apps/project/migrations/0007_remove_project_unique_project_name_and_more.py create mode 100644 apps/project/migrations/0008_alter_project_unique_project_name.py diff --git a/apps/project/migrations/0007_remove_project_unique_project_name_and_more.py b/apps/project/migrations/0007_remove_project_unique_project_name_and_more.py new file mode 100644 index 00000000..e75767a3 --- /dev/null +++ b/apps/project/migrations/0007_remove_project_unique_project_name_and_more.py @@ -0,0 +1,26 @@ +# Generated by Django 5.2.5 on 2025-10-07 16:17 + +import django.db.models.functions.text +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('contributor', '0002_initial'), + ('project', '0006_alter_project_max_tasks_per_user'), + ('tutorial', '0002_initial'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.RemoveConstraint( + model_name='project', + name='unique_project_name', + ), + migrations.AddConstraint( + model_name='project', + constraint=models.UniqueConstraint(models.F('project_type'), django.db.models.functions.text.Lower('topic'), django.db.models.functions.text.Lower('region'), models.F('project_number'), models.F('requesting_organization'), name='unique_project_name', violation_error_message='A project with the same type, topic, region, project number and requesting organization already exists.'), + ), + ] diff --git a/apps/project/migrations/0008_alter_project_unique_project_name.py b/apps/project/migrations/0008_alter_project_unique_project_name.py new file mode 100644 index 00000000..7dd9c50e --- /dev/null +++ b/apps/project/migrations/0008_alter_project_unique_project_name.py @@ -0,0 +1,23 @@ +# Generated by Django 5.2.5 on 2025-10-08 03:41 + +import django.db.models.functions.text +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('contributor', '0002_initial'), + ('project', '0007_remove_project_unique_project_name_and_more'), + ('tutorial', '0002_initial'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.AlterConstraint( + model_name='project', + name='unique_project_name', + constraint=models.UniqueConstraint(models.F('project_type'), django.db.models.functions.text.Lower('topic'), django.db.models.functions.text.Lower('region'), models.F('project_number'), models.F('requesting_organization'), name='unique_project_name', violation_error_message='A project with the same type, topic, region, number and requesting organization already exists.'), + ), + ] diff --git a/apps/project/models.py b/apps/project/models.py index 90489e1c..d0d81853 100644 --- a/apps/project/models.py +++ b/apps/project/models.py @@ -481,13 +481,14 @@ class Meta: # type: ignore[reportIncompatibleVariableOverride] constraints = [ # XXX: Changing this also requires changes in the serializers models.UniqueConstraint( + "project_type", Lower("topic"), Lower("region"), "project_number", "requesting_organization", name="unique_project_name", violation_error_message=gettext_lazy( - "A project with the same topic, region, project number and requesting organization already exists.", + "A project with the same type, topic, region, number and requesting organization already exists.", ), ), ] diff --git a/apps/project/serializers.py b/apps/project/serializers.py index c40f7f66..f8abb938 100644 --- a/apps/project/serializers.py +++ b/apps/project/serializers.py @@ -62,16 +62,22 @@ def _validate_project_name( region = attrs.get("region", project.region) project_number = attrs.get("project_number", project.project_number) requesting_organization = attrs.get("requesting_organization", project.requesting_organization) + project_type = attrs.get("project_type", project.project_type) existing_projects = existing_projects.exclude(id=project.pk) else: topic = attrs["topic"] region = attrs["region"] project_number = attrs["project_number"] + project_type = attrs["project_type"] requesting_organization = attrs["requesting_organization"] + if not isinstance(project_type, ProjectTypeEnum): + project_type = ProjectTypeEnum(project_type) + existing_projects = existing_projects.filter( topic__iexact=topic, region__iexact=region, + project_type=project_type, project_number=project_number, requesting_organization=requesting_organization, ) From 43fdeea974b19e6e6182da831a85eab5d44d183a Mon Sep 17 00:00:00 2001 From: tnagorra Date: Tue, 7 Oct 2025 22:04:10 +0545 Subject: [PATCH 08/19] feat(export): add metata if maxar raster-tiles are used --- apps/project/exports/exports.py | 30 ++++++++++++++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/apps/project/exports/exports.py b/apps/project/exports/exports.py index 09710e07..c52109b9 100644 --- a/apps/project/exports/exports.py +++ b/apps/project/exports/exports.py @@ -18,6 +18,7 @@ from project_types.tile_map_service.compare.project import CompareProjectProperty from project_types.tile_map_service.completeness.project import CompletenessProjectProperty from project_types.tile_map_service.find.project import FindProjectProperty +from utils.geo.raster_tile_server.config import RasterTileServerNameEnum from .mapping_results import generate_mapping_results from .mapping_results_aggregate.task import generate_mapping_results_aggregate_by_task @@ -56,8 +57,32 @@ def _export_project_data(project: Project, tmp_directory: Path): # legacy system path: /api/hot_tm/hot_tm_{project.id}.geojson tmp_tasking_manager_hot_tm_geojson = tmp_directory / f"hot_tm_{project.id}.geojson" - # TODO: if maxar is used for tile_server_name, this should be true - add_metadata = False + # FIXME(tnagorra): move this to project handler + tile_servers = set[RasterTileServerNameEnum]() + if isinstance( + project_type_handler.project_type_specifics, + FindProjectProperty, + ): + tile_servers.add(project_type_handler.project_type_specifics.tile_server_property.name) + elif isinstance( + project_type_handler.project_type_specifics, + CompareProjectProperty, + ): + tile_servers.add(project_type_handler.project_type_specifics.tile_server_property.name) + tile_servers.add(project_type_handler.project_type_specifics.tile_server_b_property.name) + elif isinstance( + project_type_handler.project_type_specifics, + CompletenessProjectProperty, + ): + tile_servers.add(project_type_handler.project_type_specifics.tile_server_property.name) + if project_type_handler.project_type_specifics.overlay_tile_server_property.raster: + tile_servers.add( + project_type_handler.project_type_specifics.overlay_tile_server_property.raster.tile_server.name, + ) + + add_metadata = ( + RasterTileServerNameEnum.MAXAR_STANDARD in tile_servers or RasterTileServerNameEnum.MAXAR_PREMIUM in tile_servers + ) custom_options_raw = [] @@ -136,6 +161,7 @@ def _export_project_data(project: Project, tmp_directory: Path): tmp_project_stats_by_date_csv.name, ) + # FIXME(tnagorra): move this to project handler generate_hot_tm_geometries = project.project_type_enum in [ ProjectTypeEnum.COMPARE, ProjectTypeEnum.COMPLETENESS, From 0d6542999e93056613b1f852f1be7dd537b82f13 Mon Sep 17 00:00:00 2001 From: tnagorra Date: Tue, 7 Oct 2025 22:05:55 +0545 Subject: [PATCH 09/19] fix(firebase): fix issue with announcement sync when adding another one - new annoucement was not synced because a value already existed in firebase --- apps/common/admin.py | 2 +- apps/common/firebase/base.py | 19 ++++++++++--- apps/common/tests/common_test.py | 46 ++++++++++++++++++++++++++++---- 3 files changed, 57 insertions(+), 10 deletions(-) diff --git a/apps/common/admin.py b/apps/common/admin.py index a057e846..6b7e986d 100644 --- a/apps/common/admin.py +++ b/apps/common/admin.py @@ -195,6 +195,6 @@ def save_model(self, request, obj, form, change): # type: ignore[reportMissingP previous_announcements = Announcement.objects.exclude(id=obj.id) previous_announcements.update(is_active=False) - FirebaseAnnouncementPush(obj).trigger() + FirebaseAnnouncementPush(obj).trigger(force_update=True) else: FirebaseAnnouncementPush(obj).trigger(delete=True) diff --git a/apps/common/firebase/base.py b/apps/common/firebase/base.py index 35bbf4b7..9907dc46 100644 --- a/apps/common/firebase/base.py +++ b/apps/common/firebase/base.py @@ -59,7 +59,12 @@ def handle_object_update_on_firebase(self, model_obj: T, fb_obj: K, fb_reference @abc.abstractmethod def get_firebase_path(self, firebase_id: str, model: type[T]) -> str: ... - def trigger(self, *, delete: bool | None = None) -> None: + def trigger( + self, + *, + delete: bool | None = None, + force_update: bool | None = None, + ) -> None: model_obj = self.obj model_obj.update_firebase_push_status(FirebasePushStatusEnum.PENDING) @@ -73,7 +78,7 @@ def trigger(self, *, delete: bool | None = None) -> None: return self._delete(model_obj) else: - self._push(model_obj) + self._push(model_obj, force_update=force_update) def _delete(self, model_obj: T) -> None: if model_obj.firebase_push_status_enum != FirebasePushStatusEnum.PENDING: @@ -106,7 +111,12 @@ def _delete(self, model_obj: T) -> None: model_obj.firebase_push_status = None model_obj.save(update_fields=["firebase_last_pushed", "firebase_push_status"]) - def _push(self, model_obj: T) -> None: + def _push( + self, + model_obj: T, + *, + force_update: bool | None = None, + ) -> None: if model_obj.firebase_push_status_enum != FirebasePushStatusEnum.PENDING: logger.warning( "Firebase push error: push is not required for %s", @@ -124,13 +134,14 @@ def _push(self, model_obj: T) -> None: fb_model: typing.Any = model_ref.get() if not model_obj.firebase_last_pushed: - if fb_model is not None: + if not force_update and fb_model is not None: logger.error( "Firebase create error: %s already exists in Firebase", model_obj._meta.label, extra={"id": model_obj.pk}, ) raise InvalidObjectPushException + self.handle_new_object_on_firebase(model_obj, model_ref) else: if fb_model is None: diff --git a/apps/common/tests/common_test.py b/apps/common/tests/common_test.py index d1216269..32363aa4 100644 --- a/apps/common/tests/common_test.py +++ b/apps/common/tests/common_test.py @@ -5,6 +5,7 @@ from apps.common.models import Announcement from apps.user.models import User +from main.config import Config from main.tests import TestCase @@ -14,18 +15,26 @@ class MockRequest(typing.NamedTuple): class TestAnnouncement(TestCase): @typing.override - def setUp(self): + @classmethod + def setUpClass(cls): + super().setUpClass() # Create a superuser for admin login User = get_user_model() - self.admin_user = User.objects.create_superuser( # type: ignore[reportAttributeAccessIssue] + cls.admin_user = User.objects.create_superuser( # type: ignore[reportAttributeAccessIssue] email="admin@example.com", password="password123", # noqa: S106 ) - self.client.login(email="admin@example.com", password="password123") # noqa: S106 def test_create_announcement(self): + request = MockRequest(user=self.admin_user) + self.force_login(request.user) + url = reverse("admin:common_announcement_add") + announcement_ref = self.firebase_helper.ref( + Config.FirebaseKeys.announcement(), + ) + data = { "client_id": "01K44YMVYTKY1R3906XW3QQK05", "text": "We have a new release v1.2.3", @@ -43,6 +52,10 @@ def test_create_announcement(self): assert announcement_1.is_active assert announcement_1.url == "https://play.google.com/store/apps/details?id=org.missingmaps.mapswipe" + firebase_announcement: typing.Any = announcement_ref.get() + assert firebase_announcement is not None + assert firebase_announcement.get("url") == "https://play.google.com/store/apps/details?id=org.missingmaps.mapswipe" + # test active only one announcement at once data = { "client_id": "01K44ZF3KMS6GV2AD93EG6WP9X", @@ -61,9 +74,32 @@ def test_create_announcement(self): assert announcement_2.is_active assert announcement_2.url == "https://mapswipe.org/en/blogs/2025-04-03-papua-new-guinea-swiping-to-find-airstrips" + firebase_announcement: typing.Any = announcement_ref.get() + assert firebase_announcement is not None + assert ( + firebase_announcement.get("url") + == "https://mapswipe.org/en/blogs/2025-04-03-papua-new-guinea-swiping-to-find-airstrips" + ) + # check only one active announcement assert Announcement.objects.filter(is_active=True).count() == 1 - announcement_1.refresh_from_db() - # check if other announcements are inactive + # check if current announcement is active + announcement_1.refresh_from_db() assert not announcement_1.is_active + + # test announcement de-activation + url = reverse("admin:common_announcement_change", args=[announcement_2.pk]) + data = { + "client_id": "01K44ZF3KMS6GV2AD93EG6WP9X", + "text": "Checkout the latest blog post about airstrips", + "is_active": False, + "url": "https://mapswipe.org/en/blogs/2025-04-03-papua-new-guinea-swiping-to-find-airstrips", + "created_by": self.admin_user.id, + "modified_by": self.admin_user.id, + } + response = self.client.post(url, data, follow=True) + assert response.status_code == 200 + + firebase_announcement: typing.Any = announcement_ref.get() + assert firebase_announcement is None From 084b4b3bec9a34bb9001bb3bdce88d315f8aaeac Mon Sep 17 00:00:00 2001 From: tnagorra Date: Tue, 7 Oct 2025 22:08:08 +0545 Subject: [PATCH 10/19] test(project): use different raster tileserver in project mutation tests --- apps/project/tests/mutation_test.py | 121 +++++++++++++++------------- 1 file changed, 64 insertions(+), 57 deletions(-) diff --git a/apps/project/tests/mutation_test.py b/apps/project/tests/mutation_test.py index 54c2e6db..ccdeb795 100644 --- a/apps/project/tests/mutation_test.py +++ b/apps/project/tests/mutation_test.py @@ -858,17 +858,15 @@ def test_project_update(self, mock_requests): # type: ignore[reportMissingParam "aoiGeometry": aoi_geometry_asset["id"], "zoomLevel": 15, "tileServerProperty": { - "name": self.genum(RasterTileServerNameEnum.CUSTOM), - "custom": { - "url": "https://hi-there/{x}/{y}/{z}", - "credits": "My Map", + "name": self.genum(RasterTileServerNameEnum.MAXAR_STANDARD), + "maxarStandard": { + "credits": "default credits", }, }, "tileServerBProperty": { - "name": self.genum(RasterTileServerNameEnum.CUSTOM), - "custom": { - "url": "https://hi-there-2/{x}/{y}/{z}", - "credits": "My Map 2", + "name": self.genum(RasterTileServerNameEnum.MAXAR_PREMIUM), + "maxarPremium": { + "credits": "default credits", }, }, }, @@ -895,10 +893,9 @@ def test_project_update(self, mock_requests): # type: ignore[reportMissingParam "aoiGeometry": aoi_geometry_asset["id"], "zoomLevel": 15, "tileServerProperty": { - "name": self.genum(RasterTileServerNameEnum.CUSTOM), - "custom": { - "url": "https://hi-there/{x}/{y}/{z}", - "credits": "My Map", + "name": self.genum(RasterTileServerNameEnum.MAXAR_PREMIUM), + "maxarPremium": { + "credits": "default credits", }, }, }, @@ -917,10 +914,9 @@ def test_project_update(self, mock_requests): # type: ignore[reportMissingParam "zoom_level": 15, "aoi_geometry": aoi_geometry_asset["id"], "tile_server_property": { - "name": RasterTileServerNameEnum.CUSTOM.value, - "custom": { - "credits": "My Map", - "url": "https://hi-there/{x}/{y}/{z}", + "name": RasterTileServerNameEnum.MAXAR_PREMIUM.value, + "maxar_premium": { + "credits": "default credits", }, }, } @@ -1084,39 +1080,6 @@ def setUpClass(cls): "projectInstruction": "Buildings", } - cls.tile_server_property = { - "valid_custom": { - "name": cls.genum(RasterTileServerNameEnum.CUSTOM), - "custom": { - "url": "https://hi-there/{x}/{y}/{z}", - "credits": "My Map", - }, - }, - "valid_custom_02": { - "name": cls.genum(RasterTileServerNameEnum.CUSTOM), - "custom": { - "url": "https://hi-here/{x}/{y}/{z}", - "credits": "My Map", - }, - }, - "invalid_custom": { - "name": cls.genum(RasterTileServerNameEnum.CUSTOM), - "custom": { - "url": "https://hi-there", - "credits": "My Map", - }, - }, - "invalid_custom_02": { - "name": cls.genum(RasterTileServerNameEnum.CUSTOM), - "custom": { - "url": "https://hi-there/{{x}}/{{y}}/{{z}}", - "credits": "My Map", - }, - }, - } - # NOTE: _internal is for snake_case attributes, currently its same - cls.tile_server_property_internal = cls.tile_server_property - def _create_project_aoi_asset(self, project_asset_data: dict, **kwargs): # type: ignore[reportMissingParameterType, reportMissingTypeArgument] with self.captureOnCommitCallbacks(execute=True): return create_project_aoi_asset_query( @@ -1272,8 +1235,19 @@ def test_project_compare(self, mock_requests): # type: ignore[reportMissingPara "compare": { "aoiGeometry": aoi_geometry_asset["id"], "zoomLevel": 15, - "tileServerProperty": self.tile_server_property["invalid_custom"], - "tileServerBProperty": self.tile_server_property["valid_custom"], + "tileServerProperty": { + "name": self.genum(RasterTileServerNameEnum.CUSTOM), + "custom": { + "url": "https://hi-there", + "credits": "My Map", + }, + }, + "tileServerBProperty": { + "name": self.genum(RasterTileServerNameEnum.ESRI), + "esri": { + "credits": "default credits", + }, + }, }, }, } @@ -1299,8 +1273,19 @@ def test_project_compare(self, mock_requests): # type: ignore[reportMissingPara "compare": { "aoiGeometry": aoi_geometry_asset["id"], "zoomLevel": 15, - "tileServerProperty": self.tile_server_property["invalid_custom_02"], - "tileServerBProperty": self.tile_server_property["valid_custom"], + "tileServerProperty": { + "name": self.genum(RasterTileServerNameEnum.CUSTOM), + "custom": { + "url": "https://hi-there/{{x}}/{{y}}/{{z}}", + "credits": "My Map", + }, + }, + "tileServerBProperty": { + "name": self.genum(RasterTileServerNameEnum.ESRI_BETA), + "esriBeta": { + "credits": "default credits", + }, + }, }, }, } @@ -1326,8 +1311,19 @@ def test_project_compare(self, mock_requests): # type: ignore[reportMissingPara "compare": { "aoiGeometry": aoi_geometry_asset["id"], "zoomLevel": 15, - "tileServerProperty": self.tile_server_property["valid_custom"], - "tileServerBProperty": self.tile_server_property["valid_custom_02"], + "tileServerProperty": { + "name": self.genum(RasterTileServerNameEnum.CUSTOM), + "custom": { + "url": "https://hi-there/{x}/{y}/{z}", + "credits": "My Map", + }, + }, + "tileServerBProperty": { + "name": self.genum(RasterTileServerNameEnum.MAXAR_STANDARD), + "maxarStandard": { + "credits": "default credits", + }, + }, }, }, } @@ -1344,8 +1340,19 @@ def test_project_compare(self, mock_requests): # type: ignore[reportMissingPara assert latest_project.project_type_specifics == { "aoi_geometry": aoi_geometry_asset["id"], "zoom_level": 15, - "tile_server_property": self.tile_server_property_internal["valid_custom"], - "tile_server_b_property": self.tile_server_property_internal["valid_custom_02"], + "tile_server_property": { + "name": RasterTileServerNameEnum.CUSTOM.value, + "custom": { + "url": "https://hi-there/{x}/{y}/{z}", + "credits": "My Map", + }, + }, + "tile_server_b_property": { + "name": RasterTileServerNameEnum.MAXAR_STANDARD.value, + "maxar_standard": { + "credits": "default credits", + }, + }, } compare_project.CompareProjectProperty.model_validate( latest_project.project_type_specifics, From 21a0268bc6dcf2edc5e0fe932e9fc7db431c30da Mon Sep 17 00:00:00 2001 From: tnagorra Date: Wed, 8 Oct 2025 09:09:52 +0545 Subject: [PATCH 11/19] chore(tutorial): remove unused create method for tutorial --- apps/tutorial/serializers.py | 32 -------------------------------- 1 file changed, 32 deletions(-) diff --git a/apps/tutorial/serializers.py b/apps/tutorial/serializers.py index 3e521a86..d2259774 100644 --- a/apps/tutorial/serializers.py +++ b/apps/tutorial/serializers.py @@ -340,38 +340,6 @@ class Meta: # type: ignore[reportIncompatibleVariableOverride] "scenarios", ) - @typing.override - def create(self, validated_data: dict[typing.Any, typing.Any]): - scenarios_data = self.initial_data["scenarios"] - information_pages_data = self.initial_data["information_pages"] - validated_data.pop("scenarios") - validated_data.pop("information_pages") - tutorial = super().create(validated_data) - - for scenario_data in scenarios_data: - scenario_serializer = TutorialScenarioPageSerializer( - data=scenario_data, - context={ - **self.context, - "tutorial": tutorial, - }, - ) - scenario_serializer.is_valid(raise_exception=True) - scenario_serializer.save() - - for information_page_data in information_pages_data: - information_page_serializer = TutorialInformationPageSerializer( - data=information_page_data, - context={ - **self.context, - "tutorial": tutorial, - }, - ) - information_page_serializer.is_valid(raise_exception=True) - information_page_serializer.save() - - return tutorial - @typing.override def update(self, instance: Tutorial, validated_data: dict[typing.Any, typing.Any]): scenarios_data = self.initial_data.get("scenarios") or [] From cec03201a28a41203507762ae2602eb8ae01450d Mon Sep 17 00:00:00 2001 From: tnagorra Date: Wed, 8 Oct 2025 09:10:40 +0545 Subject: [PATCH 12/19] test(tutorial): add test for image block --- apps/tutorial/tests/mutation_test.py | 94 ++++++++++++++++++++++++++++ 1 file changed, 94 insertions(+) diff --git a/apps/tutorial/tests/mutation_test.py b/apps/tutorial/tests/mutation_test.py index d9b8e812..0b19e313 100644 --- a/apps/tutorial/tests/mutation_test.py +++ b/apps/tutorial/tests/mutation_test.py @@ -1,6 +1,10 @@ import typing +from io import BytesIO from pathlib import Path +from django.conf import settings +from django.core.files.temp import NamedTemporaryFile +from PIL import Image from ulid import ULID from apps.project.factories import OrganizationFactory, ProjectFactory @@ -14,6 +18,7 @@ ) from apps.tutorial.models import ( Tutorial, + TutorialAssetInputTypeEnum, TutorialStatusEnum, ) from apps.tutorial.serializers import VALID_TUTORIAL_STATUS_TRANSITIONS @@ -26,6 +31,39 @@ BASE_DIR = Path(__file__).resolve().parent +def create_tutorial_image_asset_query( + *, + query_check_func: typing.Callable, # type: ignore[reportMissingTypeArgument] + query: str, + tutorial_asset_data: dict, # type: ignore[reportMissingTypeArgument] + **kwargs, # type: ignore[reportMissingParameterType] +) -> dict: # type: ignore[reportMissingTypeArgument] + with ( + NamedTemporaryFile(dir=settings.TEMP_DIR, suffix=".jpeg") as image_file, + ): + img = Image.new("RGB", (10, 10), color="red") + buf = BytesIO() + img.save(buf, format="JPEG") + buf.seek(0) + + image_file.write(buf.read()) + image_file.seek(0) + + return query_check_func( + query, + variables={ + "data": tutorial_asset_data, + }, + files={ + "imageFile": image_file, + }, + map={ + "imageFile": ["variables.data.file"], + }, + **kwargs, + ) + + class Mutation: CREATE_TUTORIAL = """ mutation CreateTutorial($data: TutorialCreateInput!) { @@ -201,6 +239,33 @@ class Mutation: } """ + CREATE_TUTORIAL_ASSET = """ + mutation CreateTutorialAsset($data: TutorialAssetCreateInput!) { + createTutorialAsset(data: $data) { + ... on OperationInfo { + __typename + messages { + code + field + kind + message + } + } + ... on TutorialAssetTypeMutationResponseType { + errors + ok + result { + id + clientId + type + mimetype + tutorialId + } + } + } + } + """ + class TestTutorialMutation(TestCase): @typing.override @@ -240,6 +305,17 @@ def setUpClass(cls): project_type_specifics=cls.project_type_specifics, ) + def _create_tutorial_image_asset(self, tutorial_asset_data: dict, **kwargs): # type: ignore[reportMissingParameterType, reportMissingTypeArgument] + with self.captureOnCommitCallbacks(execute=True): + return create_tutorial_image_asset_query( + query_check_func=self.query_check, + query=Mutation.CREATE_TUTORIAL_ASSET, + tutorial_asset_data={ + **tutorial_asset_data, + "inputType": self.genum(TutorialAssetInputTypeEnum.INFORMATION_BLOCK_IMAGE), + }, + ) + def _create_tutorial_mutation(self, tutorial_data: dict, **kwargs): # type: ignore[reportMissingParameterType, reportMissingTypeArgument] with self.captureOnCommitCallbacks(execute=True): return self.query_check( @@ -298,6 +374,18 @@ def test_tutorial_create(self): assert latest_tutorial.created_by_id == self.user.pk assert latest_tutorial.modified_by_id == self.user.pk + # Create page block image + + # Creating Project Image Asset + tutorial_asset_data = { + "tutorial": latest_tutorial.pk, + "clientId": str(ULID()), + } + content = self._create_tutorial_image_asset(tutorial_asset_data) + resp_data = content["data"]["createTutorialAsset"] + assert resp_data["errors"] is None, content + image_asset = resp_data["result"] + # Update Tutorial tutorial_data = { @@ -419,6 +507,12 @@ def test_tutorial_create(self): "text": "These structures are built to serve specific purposes, such as housing, " "transportation, defense, communication, or recreation.", }, + { + "clientId": str(ULID()), + "blockNumber": 3, + "blockType": "IMAGE", + "image": image_asset["id"], + }, ], }, }, From 9cf46ab28166ac0aeb98651911078f65db3548e7 Mon Sep 17 00:00:00 2001 From: tnagorra Date: Wed, 8 Oct 2025 09:13:08 +0545 Subject: [PATCH 13/19] test(project): add test for updating processed project --- apps/project/tests/mutation_test.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/apps/project/tests/mutation_test.py b/apps/project/tests/mutation_test.py index ccdeb795..bed54a00 100644 --- a/apps/project/tests/mutation_test.py +++ b/apps/project/tests/mutation_test.py @@ -1567,6 +1567,22 @@ class TaskGroupType(typing.TypedDict): fb_project: typing.Any = project_ref.get() assert fb_project is not None + # Updating Processed Project after publishing + project_data = { + "clientId": project_client_id, + "maxTasksPerUser": 1000, + } + content = self._update_processed_project_mutation(project_id, project_data) + resp_data = content["data"]["updateProcessedProject"] + assert resp_data["errors"] is None, content + assert resp_data["result"]["maxTasksPerUser"] == 1000 + project_ref = self.firebase_helper.ref( + Config.FirebaseKeys.project(latest_project.firebase_id), + ) + fb_project: typing.Any = project_ref.get() + assert fb_project is not None + assert fb_project["maxTasksPerUser"] == 1000 + @patch("apps.project.serializers.process_project_task.apply_async") def test_project_street(self, mock_requests): # type: ignore[reportMissingParameterType] self.force_login(self.user) From 5587c75214fcdd558c12eea3c339bfc7dfe9a5ba Mon Sep 17 00:00:00 2001 From: tnagorra Date: Wed, 8 Oct 2025 09:16:11 +0545 Subject: [PATCH 14/19] fix(firebase): sync maxTasksPerUser on project update --- firebase | 2 +- project_types/base/project.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/firebase b/firebase index a1dbcf40..ddabcd2e 160000 --- a/firebase +++ b/firebase @@ -1 +1 @@ -Subproject commit a1dbcf408f2f20f8145ef5f82a35f98d27c7501d +Subproject commit ddabcd2e5184cac2ca330b8befd3e86a41c0217c diff --git a/project_types/base/project.py b/project_types/base/project.py index 0b0e9136..cf62cfa0 100644 --- a/project_types/base/project.py +++ b/project_types/base/project.py @@ -443,6 +443,7 @@ def update_project_on_firebase(self, project_ref: FbReference, fb_project: fireb projectNumber=self.project.project_number, projectRegion=self.project.region, projectTopic=self.project.topic, + maxTasksPerUser=self.project.max_tasks_per_user, projectTopicKey=self.project.generate_name().lower().strip(), projectDetails=self.project.description or "n/a", requestingOrganisation=self.project.requesting_organization.name, From 95581378038b8b40dba2be4087a5a20be7c32f73 Mon Sep 17 00:00:00 2001 From: tnagorra Date: Wed, 8 Oct 2025 09:22:17 +0545 Subject: [PATCH 15/19] fix(project): enable editing paused, withdrawn and finished project --- apps/project/serializers.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/apps/project/serializers.py b/apps/project/serializers.py index f8abb938..913b1bdf 100644 --- a/apps/project/serializers.py +++ b/apps/project/serializers.py @@ -294,7 +294,10 @@ def _validate_project_type_specifics(self, attrs: dict[str, typing.Any]): def validate(self, attrs: dict[str, typing.Any]): assert self.instance is not None - if self.instance.status_enum not in [Project.Status.DRAFT, Project.Status.PROCESSING_FAILED]: + if self.instance.status_enum not in [ + Project.Status.DRAFT, + Project.Status.PROCESSING_FAILED, + ]: raise serializers.ValidationError( { "status": gettext("Cannot update project with status %s") % self.instance.status_enum.label, @@ -467,8 +470,11 @@ def validate(self, attrs: dict[str, typing.Any]): # FIXME(tnagorra): Should we be able to edit paused, withdrawn, and published project if self.instance.status_enum not in [ Project.Status.PROCESSED, - Project.Status.PUBLISHED, Project.Status.PUBLISHING_FAILED, + Project.Status.PUBLISHED, + Project.Status.PAUSED, + Project.Status.WITHDRAWN, + Project.Status.FINISHED, ]: raise serializers.ValidationError( { From be5a648a91d6572dd6b866bd234dae8e393aa197 Mon Sep 17 00:00:00 2001 From: tnagorra Date: Wed, 8 Oct 2025 13:55:00 +0545 Subject: [PATCH 16/19] fix(serializers): add checks for non implemented actions --- apps/project/serializers.py | 22 +++++++++++++++++++++- apps/tutorial/serializers.py | 18 +++++++++++++++++- 2 files changed, 38 insertions(+), 2 deletions(-) diff --git a/apps/project/serializers.py b/apps/project/serializers.py index 913b1bdf..16a9ea20 100644 --- a/apps/project/serializers.py +++ b/apps/project/serializers.py @@ -7,7 +7,7 @@ from django.utils.translation import gettext from rest_framework import serializers -from apps.common.models import AssetMimetypeEnum, FirebasePushStatusEnum +from apps.common.models import AssetMimetypeEnum, CommonAsset, FirebasePushStatusEnum from apps.common.serializers import ArchivableResourceSerializer, CommonAssetSerializer, UserResourceSerializer from apps.contributor.models import ContributorTeam from apps.project.firebase.push import FirebaseOrganizationPush @@ -147,6 +147,10 @@ def validate_team(self, team: ContributorTeam | None) -> ContributorTeam | None: raise serializers.ValidationError(gettext("Cannot use archived team on a project.")) return team + @typing.override + def update(self, instance: Project, validated_data: dict[typing.Any, typing.Any]): + raise NotImplementedError("update is not allowed") + # NOTE: Make sure this matches with the strawberry Input ./graphql/inputs.py class ProjectUpdateSerializer(UserResourceSerializer[Project]): @@ -309,6 +313,10 @@ def validate(self, attrs: dict[str, typing.Any]): self._validate_project_type_specifics(attrs) return super().validate(attrs) + @typing.override + def create(self, validated_data: dict[typing.Any, typing.Any]): + raise NotImplementedError("create is not allowed") + @typing.override def update(self, instance: Project, validated_data: dict[typing.Any, typing.Any]): proj = super().update(instance, validated_data) @@ -503,6 +511,10 @@ def validate(self, attrs: dict[str, typing.Any]): self._validate_project_instruction(attrs) return super().validate(attrs) + @typing.override + def create(self, validated_data: dict[typing.Any, typing.Any]): + raise NotImplementedError("create is not allowed") + @typing.override def update(self, instance: Project, validated_data: dict[str, typing.Any]) -> Project: updated_project = super().update(instance, validated_data) @@ -671,6 +683,10 @@ def _validate_asset_type_specifics( attrs["asset_type_specifics"] = asset_type_specifics + @typing.override + def update(self, instance: CommonAsset, validated_data: dict[typing.Any, typing.Any]): + raise NotImplementedError("update is not allowed") + @typing.override def validate(self, attrs: dict[str, typing.Any]) -> dict[str, typing.Any]: attrs = super().validate(attrs) @@ -775,6 +791,10 @@ def validate(self, attrs: dict[str, typing.Any]): ) return attrs + @typing.override + def create(self, validated_data: dict[typing.Any, typing.Any]): + raise NotImplementedError("create is not allowed") + @typing.override def update(self, instance: Project, validated_data: dict[str, typing.Any]) -> Project: old_status_enum = instance.status_enum diff --git a/apps/tutorial/serializers.py b/apps/tutorial/serializers.py index d2259774..5a095670 100644 --- a/apps/tutorial/serializers.py +++ b/apps/tutorial/serializers.py @@ -7,7 +7,7 @@ from django.utils.translation import gettext from rest_framework import serializers -from apps.common.models import AssetMimetypeEnum, FirebasePushStatusEnum +from apps.common.models import AssetMimetypeEnum, CommonAsset, FirebasePushStatusEnum from apps.common.serializers import CommonAssetSerializer, DrfContextType, UserResourceSerializer from apps.project.models import Project, ProjectTypeEnum from project_types.store import get_tutorial_task_property @@ -326,6 +326,10 @@ def validate_project(self, project: Project) -> Project: ) return project + @typing.override + def update(self, instance: Tutorial, validated_data: dict[typing.Any, typing.Any]): + raise NotImplementedError("update is not allowed") + # NOTE: Make sure this matches with the strawberry Input ./graphql/inputs.py class TutorialUpdateSerializer(UserResourceSerializer[Tutorial]): @@ -340,6 +344,10 @@ class Meta: # type: ignore[reportIncompatibleVariableOverride] "scenarios", ) + @typing.override + def create(self, validated_data: dict[typing.Any, typing.Any]): + raise NotImplementedError("create is not allowed") + @typing.override def update(self, instance: Tutorial, validated_data: dict[typing.Any, typing.Any]): scenarios_data = self.initial_data.get("scenarios") or [] @@ -451,6 +459,10 @@ def validate(self, attrs: dict[str, typing.Any]) -> dict[str, typing.Any]: return attrs + @typing.override + def update(self, instance: CommonAsset, validated_data: dict[typing.Any, typing.Any]): + raise NotImplementedError("update is not allowed") + class TutorialStatusUpdateSerializer(UserResourceSerializer[Tutorial]): class Meta: # type: ignore[reportIncompatibleVariableOverride] @@ -499,6 +511,10 @@ def validate(self, attrs: dict[str, typing.Any]): ) return attrs + @typing.override + def create(self, validated_data: dict[typing.Any, typing.Any]): + raise NotImplementedError("create is not allowed") + @typing.override def update(self, instance: Tutorial, validated_data: dict[typing.Any, typing.Any]): old_status_enum = instance.status_enum From c337f0d3c83c06e5809b06a6a16d7e5399f53037 Mon Sep 17 00:00:00 2001 From: tnagorra Date: Wed, 8 Oct 2025 14:25:01 +0545 Subject: [PATCH 17/19] refactor(config): use Config instead of settings --- apps/common/views.py | 5 +++-- .../management/commands/create_contributor_users.py | 3 +-- apps/contributor/tests/e2e_usergroup_test.py | 4 ++-- .../tests/e2e_create_project_tile_map_service_test.py | 8 ++++---- apps/project/tests/e2e_create_street_project_test.py | 8 ++++---- .../tests/e2e_create_validate_image_project_test.py | 8 ++++---- apps/project/tests/e2e_create_validate_project_test.py | 8 ++++---- apps/project/tests/e2e_organization_test.py | 4 ++-- apps/project/tests/mutation_test.py | 3 +-- apps/tutorial/tests/mutation_test.py | 3 +-- project_types/tile_map_service/base/project.py | 4 ++-- 11 files changed, 28 insertions(+), 30 deletions(-) diff --git a/apps/common/views.py b/apps/common/views.py index 4764e6c6..35124f19 100644 --- a/apps/common/views.py +++ b/apps/common/views.py @@ -14,6 +14,7 @@ from rest_framework.views import APIView from apps.contributor.models import ContributorUser +from main.config import Config from .serializers import FirebaseAuthRequestSerializer @@ -29,7 +30,7 @@ # FIXME: Maybe a better approach then this? def _get_version_from_pyproject(base_path: Path) -> str: - data = toml.load(settings.BASE_DIR / base_path / "pyproject.toml") + data = toml.load(Config.BASE_DIR / base_path / "pyproject.toml") return data["project"]["version"] @@ -46,7 +47,7 @@ def render_to_response_json( **json.loads(response.content), "app": { "environment": settings.APP_ENVIRONMENT, - "version": _get_version_from_pyproject(settings.BASE_DIR), + "version": _get_version_from_pyproject(Config.BASE_DIR), "git": { "branch": git_helper.branch, "commit": git_helper.commit_sha, diff --git a/apps/contributor/management/commands/create_contributor_users.py b/apps/contributor/management/commands/create_contributor_users.py index 82c368ad..0a7259ba 100644 --- a/apps/contributor/management/commands/create_contributor_users.py +++ b/apps/contributor/management/commands/create_contributor_users.py @@ -2,7 +2,6 @@ import typing import uuid -from django.conf import settings from django.core.management.base import BaseCommand from django.utils import timezone from pyfirebase_mapswipe import extended_models as firebase_extended_models @@ -62,7 +61,7 @@ class Command(BaseCommand): @typing.override def handle(self, *args, **options): # type: ignore[reportMissingParameterType] - if not settings.ENABLE_DANGER_MODE: + if not Config.ENABLE_DANGER_MODE: logger.warning("Dummy data generation is disabled") return diff --git a/apps/contributor/tests/e2e_usergroup_test.py b/apps/contributor/tests/e2e_usergroup_test.py index fff07c58..d1c1e985 100644 --- a/apps/contributor/tests/e2e_usergroup_test.py +++ b/apps/contributor/tests/e2e_usergroup_test.py @@ -3,11 +3,11 @@ from pathlib import Path import json5 -from django.conf import settings from apps.common.utils import remove_object_keys from apps.contributor.factories import ContributorUserFactory from apps.user.factories import UserFactory +from main.config import Config from main.tests import TestCase @@ -83,7 +83,7 @@ def setUpClass(cls): def test_usergroup_e2e(self): self.force_login(self.user) - data_file = Path(settings.BASE_DIR, "assets/tests/usergroup/data.json5") + data_file = Path(Config.BASE_DIR, "assets/tests/usergroup/data.json5") with data_file.open("r", encoding="utf-8") as f: test_data_list = json5.load(f) diff --git a/apps/project/tests/e2e_create_project_tile_map_service_test.py b/apps/project/tests/e2e_create_project_tile_map_service_test.py index e1504baa..a6b6f35f 100644 --- a/apps/project/tests/e2e_create_project_tile_map_service_test.py +++ b/apps/project/tests/e2e_create_project_tile_map_service_test.py @@ -4,7 +4,6 @@ from pathlib import Path import json5 -from django.conf import settings from django.db.models.signals import pre_save from ulid import ULID @@ -22,6 +21,7 @@ from apps.project.models import Organization, Project from apps.tutorial.models import Tutorial from apps.user.factories import UserFactory +from main.config import Config from main.tests import TestCase @@ -310,7 +310,7 @@ def test_compare_project_e2e(self): def _test_project(self, projectKey: str, filename: str): # Load test data file - full_path = Path(settings.BASE_DIR, filename) + full_path = Path(Config.BASE_DIR, filename) with full_path.open("r", encoding="utf-8") as f: test_data = json5.load(f) @@ -326,8 +326,8 @@ def _test_project(self, projectKey: str, filename: str): self.force_login(user) # Define full path for image and AOI files - image_filename = Path(settings.BASE_DIR) / test_data["assets"]["image"] - aoi_geometry_filename = Path(settings.BASE_DIR) / test_data["assets"]["aoi"] + image_filename = Path(Config.BASE_DIR) / test_data["assets"]["image"] + aoi_geometry_filename = Path(Config.BASE_DIR) / test_data["assets"]["aoi"] # Create an organization create_organization_data = test_data["create_organization"] diff --git a/apps/project/tests/e2e_create_street_project_test.py b/apps/project/tests/e2e_create_street_project_test.py index f01b638d..aa0766ad 100644 --- a/apps/project/tests/e2e_create_street_project_test.py +++ b/apps/project/tests/e2e_create_street_project_test.py @@ -6,7 +6,6 @@ import json5 import pytest -from django.conf import settings from django.db.models.signals import pre_save from ulid import ULID @@ -15,6 +14,7 @@ from apps.project.models import Organization, Project from apps.tutorial.models import Tutorial from apps.user.factories import UserFactory +from main.config import Config from main.tests import TestCase logging.getLogger("vcr").setLevel(logging.WARNING) @@ -264,7 +264,7 @@ def test_street_project_e2e(self): def _test_project(self, filename: str): # Load test data file - full_path = Path(settings.BASE_DIR, filename) + full_path = Path(Config.BASE_DIR, filename) with full_path.open("r", encoding="utf-8") as f: test_data = json5.load(f) @@ -280,8 +280,8 @@ def _test_project(self, filename: str): self.force_login(user) # Define full path for image and AOI files - image_filename = Path(settings.BASE_DIR) / test_data["assets"]["image"] - aoi_geometry_filename = Path(settings.BASE_DIR) / test_data["assets"]["aoi"] + image_filename = Path(Config.BASE_DIR) / test_data["assets"]["image"] + aoi_geometry_filename = Path(Config.BASE_DIR) / test_data["assets"]["aoi"] # Create an organization create_organization_data = test_data["create_organization"] diff --git a/apps/project/tests/e2e_create_validate_image_project_test.py b/apps/project/tests/e2e_create_validate_image_project_test.py index 79a2bcc1..c7425dbb 100644 --- a/apps/project/tests/e2e_create_validate_image_project_test.py +++ b/apps/project/tests/e2e_create_validate_image_project_test.py @@ -4,7 +4,6 @@ from pathlib import Path import json5 -from django.conf import settings from django.db.models.signals import pre_save from ulid import ULID @@ -13,6 +12,7 @@ from apps.project.models import Organization, Project from apps.tutorial.models import Tutorial from apps.user.factories import UserFactory +from main.config import Config from main.tests import TestCase @@ -259,7 +259,7 @@ def test_validate_image_project_e2e(self): def _test_project(self, filename: str): # Load test data file - full_path = Path(settings.BASE_DIR, filename) + full_path = Path(Config.BASE_DIR, filename) with full_path.open("r", encoding="utf-8") as f: test_data = json5.load(f) @@ -275,8 +275,8 @@ def _test_project(self, filename: str): self.force_login(user) # Define full path for image and AOI files - image_filename = Path(settings.BASE_DIR) / test_data["assets"]["image"] - coco_filename = Path(settings.BASE_DIR) / test_data["assets"]["coco_dataset"] + image_filename = Path(Config.BASE_DIR) / test_data["assets"]["image"] + coco_filename = Path(Config.BASE_DIR) / test_data["assets"]["coco_dataset"] # Create an organization create_organization_data = test_data["create_organization"] diff --git a/apps/project/tests/e2e_create_validate_project_test.py b/apps/project/tests/e2e_create_validate_project_test.py index 8b853926..de63cf80 100644 --- a/apps/project/tests/e2e_create_validate_project_test.py +++ b/apps/project/tests/e2e_create_validate_project_test.py @@ -8,7 +8,6 @@ import json5 import pytest -from django.conf import settings from django.db.models.signals import pre_save from ulid import ULID @@ -17,6 +16,7 @@ from apps.project.models import Organization, Project from apps.tutorial.models import Tutorial from apps.user.factories import UserFactory +from main.config import Config from main.tests import TestCase logging.getLogger("vcr").setLevel(logging.WARNING) @@ -266,7 +266,7 @@ def test_validate_project_e2e(self): def _test_project(self, filename: str): # Load test data file - full_path = Path(settings.BASE_DIR, filename) + full_path = Path(Config.BASE_DIR, filename) with full_path.open("r", encoding="utf-8") as f: test_data = json5.load(f) @@ -282,8 +282,8 @@ def _test_project(self, filename: str): self.force_login(user) # Define full path for image and AOI files - image_filename = Path(settings.BASE_DIR) / test_data["assets"]["image"] - aoi_geometry_filename = Path(settings.BASE_DIR) / test_data["assets"]["aoi"] + image_filename = Path(Config.BASE_DIR) / test_data["assets"]["image"] + aoi_geometry_filename = Path(Config.BASE_DIR) / test_data["assets"]["aoi"] # Create an organization create_organization_data = test_data["create_organization"] diff --git a/apps/project/tests/e2e_organization_test.py b/apps/project/tests/e2e_organization_test.py index a299f4cf..5ab30ca5 100644 --- a/apps/project/tests/e2e_organization_test.py +++ b/apps/project/tests/e2e_organization_test.py @@ -2,9 +2,9 @@ from pathlib import Path import json5 -from django.conf import settings from apps.user.factories import UserFactory +from main.config import Config from main.tests import TestCase @@ -76,7 +76,7 @@ def test_organization_e2e(self): self.force_login(self.user) # Load test data from JSON5 - data_file = Path(settings.BASE_DIR, "assets/tests/organization/data.json5") + data_file = Path(Config.BASE_DIR, "assets/tests/organization/data.json5") with data_file.open("r", encoding="utf-8") as f: test_data_list = json5.load(f) diff --git a/apps/project/tests/mutation_test.py b/apps/project/tests/mutation_test.py index bed54a00..8009cebd 100644 --- a/apps/project/tests/mutation_test.py +++ b/apps/project/tests/mutation_test.py @@ -3,7 +3,6 @@ from pathlib import Path from unittest.mock import call, patch -from django.conf import settings from django.core.files.temp import NamedTemporaryFile from PIL import Image from ulid import ULID @@ -41,7 +40,7 @@ def create_project_image_asset_query( **kwargs, # type: ignore[reportMissingParameterType] ) -> dict: # type: ignore[reportMissingTypeArgument] with ( - NamedTemporaryFile(dir=settings.TEMP_DIR, suffix=".jpeg") as image_file, + NamedTemporaryFile(dir=Config.TEMP_DIR, suffix=".jpeg") as image_file, ): img = Image.new("RGB", (10, 10), color="red") buf = BytesIO() diff --git a/apps/tutorial/tests/mutation_test.py b/apps/tutorial/tests/mutation_test.py index 0b19e313..268f1d55 100644 --- a/apps/tutorial/tests/mutation_test.py +++ b/apps/tutorial/tests/mutation_test.py @@ -2,7 +2,6 @@ from io import BytesIO from pathlib import Path -from django.conf import settings from django.core.files.temp import NamedTemporaryFile from PIL import Image from ulid import ULID @@ -39,7 +38,7 @@ def create_tutorial_image_asset_query( **kwargs, # type: ignore[reportMissingParameterType] ) -> dict: # type: ignore[reportMissingTypeArgument] with ( - NamedTemporaryFile(dir=settings.TEMP_DIR, suffix=".jpeg") as image_file, + NamedTemporaryFile(dir=Config.TEMP_DIR, suffix=".jpeg") as image_file, ): img = Image.new("RGB", (10, 10), color="red") buf = BytesIO() diff --git a/project_types/tile_map_service/base/project.py b/project_types/tile_map_service/base/project.py index 53540b42..4335d92c 100644 --- a/project_types/tile_map_service/base/project.py +++ b/project_types/tile_map_service/base/project.py @@ -5,7 +5,6 @@ from abc import ABC from pathlib import Path -from django.conf import settings from django.contrib.gis.geos import GEOSGeometry from django.core.files.base import ContentFile from pydantic import BaseModel @@ -25,6 +24,7 @@ ProjectTaskGroup, ) from main.bulk_managers import BulkCreateManager +from main.config import Config from project_types.base import project as base_project from utils import fields as custom_fields from utils.asset_types.models import AoiGeometryAssetProperty @@ -216,7 +216,7 @@ def validate(self): raise base_project.ValidationException(f"Area for AOI Geometry must be less than {allowed_area} sq. km") extension = Path(aoi_asset.file.name).suffix - with tempfile.NamedTemporaryFile(suffix=extension, dir=settings.TEMP_DIR) as temp_file: + with tempfile.NamedTemporaryFile(suffix=extension, dir=Config.TEMP_DIR) as temp_file: # FIXME(frozenhelium): close the aoi_asset file? with aoi_asset.file.open() as aoi_file: temp_file.write(aoi_file.read()) From cb85897c410b5fe1cad98bc5a17c0bca401128c5 Mon Sep 17 00:00:00 2001 From: tnagorra Date: Wed, 8 Oct 2025 14:36:54 +0545 Subject: [PATCH 18/19] test(project): add tests for street project, aggregate, geo functions --- .../tests/__init__.py | 0 .../tests/task_test.py | 168 +++++++++ assets | 2 +- project_types/street/api_calls.py | 29 +- project_types/street/tests/__init__.py | 0 project_types/street/tests/api_calls_test.py | 348 ++++++++++++++++++ utils/geo/tests/__init__.py | 0 utils/geo/tests/tile_grouping_test.py | 44 +++ utils/tests/spatial_sampling_test.py | 95 +++++ 9 files changed, 679 insertions(+), 7 deletions(-) create mode 100644 apps/project/exports/mapping_results_aggregate/tests/__init__.py create mode 100644 apps/project/exports/mapping_results_aggregate/tests/task_test.py create mode 100644 project_types/street/tests/__init__.py create mode 100644 project_types/street/tests/api_calls_test.py create mode 100644 utils/geo/tests/__init__.py create mode 100644 utils/geo/tests/tile_grouping_test.py create mode 100644 utils/tests/spatial_sampling_test.py diff --git a/apps/project/exports/mapping_results_aggregate/tests/__init__.py b/apps/project/exports/mapping_results_aggregate/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/project/exports/mapping_results_aggregate/tests/task_test.py b/apps/project/exports/mapping_results_aggregate/tests/task_test.py new file mode 100644 index 00000000..356aafca --- /dev/null +++ b/apps/project/exports/mapping_results_aggregate/tests/task_test.py @@ -0,0 +1,168 @@ +import pandas as pd + +from apps.project.exports.mapping_results_aggregate.task import ( + _add_missing_result_columns, # type: ignore[reportPrivateUsage] + _calc_agreement, # type: ignore[reportPrivateUsage] + _calc_count, # type: ignore[reportPrivateUsage] + _calc_parent_option_count, # type: ignore[reportPrivateUsage] + _calc_share, # type: ignore[reportPrivateUsage] + _get_custom_options, # type: ignore[reportPrivateUsage] +) +from main.tests import TestCase + + +class TestProjectStats(TestCase): + def test_calc_agreement(self): + ds = pd.Series( + data=[40, 15, 5, 17, 3], + index=["total_count", "1_count", "2_count", "3_count", "4_count"], + ) + agg2 = _calc_agreement(ds) + assert agg2 == 0.32564102564102565 + + def test_calc_count(self): + df = pd.DataFrame( + data=[[1, 15, 5, 20], [1, 234, 45, 6]], + columns=["taskId", "1_count", "2_count", "3_count"], + ) + result = _calc_count(df) + assert result[0] == 40 + + def test_calc_share(self): + df = pd.DataFrame( + data=[[1, 40, 15, 5, 20], [1, 285, 234, 45, 6]], + columns=["taskId", "total_count", "1_count", "2_count", "3_count"], + ) + share = _calc_share(df) + assert share.filter(like="share").iloc[0].tolist() == [0.375, 0.125, 0.5] + + def test_get_custom_options(self): + for raw_custom_options, excepted_values in [ + ( + [{"value": 0}, {"value": 1}, {"value": 2}, {"value": 3}], + {0: set(), 1: set(), 2: set(), 3: set()}, + ), + ( + [ + { + "value": 0, + "subOptions": [{"value": 4}, {"value": 5}], + }, + {"value": 1}, + {"value": 2}, + {"value": 3}, + ], + {0: {4, 5}, 1: set(), 2: set(), 3: set()}, + ), + ( + [ + { + "value": 0, + "subOptions": [{"value": 4}, {"value": 5}], + }, + {"value": 1}, + {"value": 2}, + { + "value": 3, + "subOptions": [{"value": 10}, {"value": 12}], + }, + ], + {0: {4, 5}, 1: set(), 2: set(), 3: {10, 12}}, + ), + ]: + parsed_custom_options = _get_custom_options(raw_custom_options) + assert parsed_custom_options == excepted_values + + def test_add_missing_result_columns(self): + df = pd.DataFrame( + data=[ + ["project-1-group-1-task-1", 1], + ["project-1-group-1-task-1", 5], + ["project-1-group-2-task-1", 1], + ["project-1-group-2-task-1", 1], + ["project-1-group-2-task-1", 1], + ["project-2-group-3-task-1", 2], + ["project-2-group-1-task-1", 3], + ], + columns=[ + "task_id", + "result", + ], + ) + df = df.groupby(["task_id", "result"]).size().unstack(fill_value=0) + updated_df = _add_missing_result_columns( + df, + { + 1: {4, 5}, + 2: {6}, + 3: set(), + }, + ) + # Existing columns + assert list(df.columns) == [1, 2, 3, 5] + # New columns + assert list(updated_df.columns) == [1, 2, 3, 4, 5, 6] + # Existing data + assert df.to_csv() == ( + "task_id,1,2,3,5\n" + "project-1-group-1-task-1,1,0,0,1\n" + "project-1-group-2-task-1,3,0,0,0\n" + "project-2-group-1-task-1,0,0,1,0\n" + "project-2-group-3-task-1,0,1,0,0\n" + ) + # New data + assert updated_df.to_csv() == ( + "task_id,1,2,3,4,5,6\n" + "project-1-group-1-task-1,1,0,0,0,1,0\n" + "project-1-group-2-task-1,3,0,0,0,0,0\n" + "project-2-group-1-task-1,0,0,1,0,0,0\n" + "project-2-group-3-task-1,0,1,0,0,0,0\n" + ) + + def test_calc_parent_option_count(self): + df = pd.DataFrame( + data=[ + [1, 40, 1, 0, 20, 0, 1, 0], + [2, 41, 0, 5, 20, 0, 0, 0], + [3, 42, 10, 10, 20, 0, 0, 1], + [4, 281, 0, 1, 0, 1, 1, 4], + [5, 282, 15, 10, 0, 1, 2, 4], + [1, 283, 2, 20, 0, 1, 0, 0], + ], + columns=[ + "taskId", + "total_count", + "1_count", + "2_count", + "3_count", + "4_count", # Child of 1 + "5_count", # Child of 1 + "6_count", # Child of 2 + ], + ) + updated_df = _calc_parent_option_count( + df, + { + 1: {4, 5}, + 2: {6}, + 3: set(), + }, + ) + # Columns without child shouldn't change + for column in [ + "taskId", + "total_count", + "3_count", + "4_count", + "5_count", + "6_count", + ]: + assert df[column].compare(updated_df[column]).size == 0 + # Columns with child should change + for column, updated_index, updated_value in [ + ("1_count", [0, 3, 4, 5], [2, 2, 18, 3]), + ("2_count", [2, 3, 4], [11, 5, 14]), + ]: + compared = df[column].compare(updated_df[column]) + assert list(compared["other"].index) == updated_index + assert list(compared["other"]) == updated_value diff --git a/assets b/assets index 241fb35e..3a2cafe0 160000 --- a/assets +++ b/assets @@ -1 +1 @@ -Subproject commit 241fb35e50af266dc64b023d55a2cc157e820619 +Subproject commit 3a2cafe08383e86a0889b2305001f5f854c06ac9 diff --git a/project_types/street/api_calls.py b/project_types/street/api_calls.py index 4ceaa6a0..dc8ca018 100644 --- a/project_types/street/api_calls.py +++ b/project_types/street/api_calls.py @@ -8,6 +8,7 @@ import mercantile # type: ignore[reportMissingTypeStubs] import pandas as pd import requests +from geojson_pydantic import Feature as PydanticFeature from geojson_pydantic import FeatureCollection as PydanticFeatureCollection from geojson_pydantic.geometries import MultiPolygon as PydanticMultiPolygon from geojson_pydantic.geometries import Polygon as PydanticPolygon @@ -73,17 +74,33 @@ def create_tiles( # FIXME: move this to utils def geojson_to_polygon(geojson_data: dict[str, Any]): + fc: PydanticFeatureCollection[Any] | None + try: + feature = PydanticFeature(**geojson_data) + fc = PydanticFeatureCollection( + type="FeatureCollection", + features=[feature], + ) + except ValidationError: + fc = None + # NOTE: We might not need this, as we already check this try: - fc = PydanticFeatureCollection(**geojson_data) + if not fc: + fc = PydanticFeatureCollection(**geojson_data) except ValidationError as e: raise ValidationException("Invalid GeoJSON FeatureCollection") from e polygon_types = (PydanticPolygon, PydanticMultiPolygon) - geometries = [shape(feature.geometry) for feature in fc.features if isinstance(feature.geometry, polygon_types)] - if not geometries: - raise ValidationException("No valid Polygon or MultiPolygon found in the GeoJSON FeatureCollection") + has_invalid_geometries = any(not isinstance(feature.geometry, polygon_types) for feature in fc.features) + if has_invalid_geometries: + raise ValidationException("Non-polygon geometries cannot be combined into a MultiPolygon.") + + geometries = [shape(feature.geometry) for feature in fc.features] + + # if not geometries: + # raise ValidationException("No valid Polygon or MultiPolygon found in the GeoJSON FeatureCollection") return unary_union(geometries) @@ -140,7 +157,7 @@ def parallelized_processing( def download_and_process_tile( *, - row: dict[Hashable, Any], + row: dict[Hashable, Any] | pd.Series, polygon: ShapelyBaseGeometry, kwargs: dict[str, Any], attempt_limit: int = 3, @@ -219,7 +236,7 @@ def filter_results( results_df: pd.DataFrame, creator_id: int | None = None, is_pano: bool | None = None, - organization_id: str | None = None, + organization_id: int | None = None, start_time: str | None = None, end_time: str | None = None, ): diff --git a/project_types/street/tests/__init__.py b/project_types/street/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/project_types/street/tests/api_calls_test.py b/project_types/street/tests/api_calls_test.py new file mode 100644 index 00000000..74259ef5 --- /dev/null +++ b/project_types/street/tests/api_calls_test.py @@ -0,0 +1,348 @@ +import json +import typing +import unittest +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pandas as pd +import pytest +from shapely import wkt +from shapely.geometry import GeometryCollection, MultiPolygon, Point, Polygon + +from main.config import Config +from project_types.base.project import ValidationException +from project_types.street.api_calls import ( + coordinate_download, + create_tiles, + download_and_process_tile, + filter_by_timerange, + filter_results, + geojson_to_polygon, + get_image_metadata, +) + +if typing.TYPE_CHECKING: + from collections.abc import Hashable + + +class TestTileGroupingFunctions(unittest.TestCase): + level: int # type: ignore[reportUninitializedInstanceVariable] + test_polygon: Polygon # type: ignore[reportUninitializedInstanceVariable] + empty_polygon: Polygon # type: ignore[reportUninitializedInstanceVariable] + empty_geometry: GeometryCollection # type: ignore[reportUninitializedInstanceVariable] + row: pd.Series # type: ignore[reportUninitializedInstanceVariable] + + @typing.override + @classmethod + def setUpClass(cls): + with Path(Config.BASE_DIR, "assets/fixtures/street_aoi.geojson").open(encoding="utf-8") as file: + cls.fixture_data = json.load(file) + + with Path(Config.BASE_DIR, "assets/fixtures/mapillary_response.csv").open(encoding="utf-8") as file: + df = pd.read_csv(file) + df["geometry"] = df["geometry"].apply(wkt.loads) # type: ignore[reportArgumentType] + cls.fixture_df = df + + @typing.override + def setUp(self): + self.level = 14 + self.test_polygon = Polygon([(0, 0), (1, 0), (1, 1), (0, 1)]) + self.empty_polygon = Polygon() + self.empty_geometry = GeometryCollection() + self.row = pd.Series({"x": 1, "y": 1, "z": self.level}) + + def test_create_tiles_with_valid_polygon(self): + tiles = create_tiles(polygon=self.test_polygon, level=self.level) + assert not tiles.empty + + def test_create_tiles_with_multipolygon(self): + polygon = Polygon( + [ + (0.00000000, 0.00000000), + (0.000000001, 0.00000000), + (0.00000000, 0.000000001), + (0.00000000, 0.000000001), + ], + ) + multipolygon = MultiPolygon([polygon, polygon]) + tiles = create_tiles(polygon=multipolygon, level=self.level) + assert not tiles.empty + assert len(tiles) == 1 + + def test_create_tiles_with_empty_polygon(self): + tiles = create_tiles(polygon=self.empty_polygon, level=self.level) + assert tiles.empty + + def test_create_tiles_with_empty_geometry(self): + tiles = create_tiles(polygon=self.empty_geometry, level=self.level) + assert tiles.empty + + def test_geojson_to_polygon_feature_collection_with_multiple_polygons(self): + geojson_data = { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [[(0, 0), (1, 0), (1, 1), (0, 1), (0, 0)]], + }, + "properties": {}, + }, + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [[(2, 2), (3, 2), (3, 3), (2, 3), (2, 2)]], + }, + "properties": {}, + }, + ], + } + result = geojson_to_polygon(geojson_data) + assert isinstance(result, MultiPolygon) + assert len(result.geoms) == 2 + + def test_geojson_to_polygon_single_feature_polygon(self): + geojson_data = { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [[(0, 0), (1, 0), (1, 1), (0, 1), (0, 0)]], + }, + "properties": {}, + } + result = geojson_to_polygon(geojson_data) + assert isinstance(result, Polygon) + + def test_geojson_to_polygon_single_feature_multipolygon(self): + geojson_data = { + "type": "Feature", + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [[(0, 0), (1, 0), (1, 1), (0, 1), (0, 0)]], + [[(2, 2), (3, 2), (3, 3), (2, 3), (2, 2)]], + ], + }, + "properties": {}, + } + result = geojson_to_polygon(geojson_data) + assert isinstance(result, MultiPolygon) + assert len(result.geoms) == 2 + + def test_geojson_to_polygon_non_polygon_geometry_in_feature_collection(self): + geojson_data = { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": {"type": "LineString", "coordinates": [(0, 0), (1, 1)]}, + "properties": {}, + }, + ], + } + with pytest.raises(ValidationException) as context: + geojson_to_polygon(geojson_data) + assert str(context.value) == "Non-polygon geometries cannot be combined into a MultiPolygon." + + def test_geojson_to_polygon_empty_feature_collection(self): + geojson_data = {"type": "FeatureCollection", "features": []} + result = geojson_to_polygon(geojson_data) + assert result.is_empty + + def test_geojson_to_polygon_contribution_geojson(self): + result = geojson_to_polygon(self.fixture_data) + assert isinstance(result, Polygon) + + @patch("project_types.street.api_calls.vt2geojson_tools.vt_bytes_to_geojson") + @patch("project_types.street.api_calls.requests.get") + def test_download_and_process_tile_success(self, mock_get, mock_vt2geojson): # type: ignore[reportMissingParameterType] + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.content = b"mock vector tile data" # Example mock data + mock_get.return_value = mock_response + + mock_vt2geojson.return_value = { + "features": [ + { + "geometry": {"type": "Point", "coordinates": [0, 0]}, + "properties": {"id": 1}, + }, + ], + } + + row: dict[Hashable, typing.Any] = {"x": 1, "y": 1, "z": 14} + + polygon = wkt.loads("POLYGON ((-1 -1, -1 1, 1 1, 1 -1, -1 -1))") + + result = download_and_process_tile(row=row, polygon=polygon, kwargs={}) + assert result is not None + assert len(result) == 1 + assert result["geometry"][0].wkt == "POINT (0 0)" + + @patch("project_types.street.api_calls.requests.get") + def test_download_and_process_tile_failure(self, mock_get): # type: ignore[reportMissingParameterType] + mock_response = MagicMock() + mock_response.status_code = 500 + mock_get.return_value = mock_response + + result = download_and_process_tile(row=self.row, polygon=self.test_polygon, kwargs={}) + assert result is None + + @patch("project_types.street.api_calls.get_mapillary_data") + def test_download_and_process_tile_spatial_filtering(self, mock_get_mapillary_data): # type: ignore[reportMissingParameterType] + inside_points = [ + (0.2, 0.2), + (0.5, 0.5), + ] + outside_points = [ + (1.5, 0.5), + (0.5, 1.5), + (-0.5, 0.5), + ] + points = inside_points + outside_points + data = [ + { + "geometry": Point(x, y), + } + for x, y in points + ] + + mock_get_mapillary_data.return_value = pd.DataFrame(data) + + metadata = download_and_process_tile(row=self.row, polygon=self.test_polygon, kwargs={}) + assert metadata is not None + metadata = metadata.drop_duplicates() + assert len(metadata) == len(inside_points) + + @patch("project_types.street.api_calls.download_and_process_tile") + def test_coordinate_download_with_failures(self, mock_download_and_process_tile): # type: ignore[reportMissingParameterType] + mock_download_and_process_tile.return_value = pd.DataFrame() + + metadata = coordinate_download(polygon=self.test_polygon, level=self.level, kwargs={}) + + assert metadata.empty + + def test_filter_within_time_range(self): + start_time = "2016-01-20 00:00:00" + end_time = "2022-01-21 23:59:59" + filtered_df = filter_by_timerange(self.fixture_df, start_time, end_time) + + assert len(filtered_df) == 3 + assert all(filtered_df["captured_at"] >= pd.to_datetime(start_time)) + assert all(filtered_df["captured_at"] <= pd.to_datetime(end_time)) + + def test_filter_without_end_time(self): + start_time = "2020-01-20 00:00:00" + filtered_df = filter_by_timerange(self.fixture_df, start_time) + + assert len(filtered_df) == 3 + assert all(filtered_df["captured_at"] >= pd.to_datetime(start_time)) + + def test_filter_time_no_data(self): + start_time = "2016-01-30 00:00:00" + end_time = "2016-01-31 00:00:00" + filtered_df = filter_by_timerange(self.fixture_df, start_time, end_time) + assert filtered_df.empty + + def test_filter_default(self): + filtered_df = filter_results(self.fixture_df) + assert filtered_df is not None + assert len(filtered_df) == len(self.fixture_df) + + def test_filter_pano_true(self): + filtered_df = filter_results(self.fixture_df, is_pano=True) + assert filtered_df is not None + assert len(filtered_df) == 3 + + def test_filter_pano_false(self): + filtered_df = filter_results(self.fixture_df, is_pano=False) + assert filtered_df is not None + assert len(filtered_df) == 3 + + def test_filter_organization_id(self): + filtered_df = filter_results(self.fixture_df, organization_id=1) + assert filtered_df is not None + assert len(filtered_df) == 1 + + def test_filter_creator_id(self): + filtered_df = filter_results(self.fixture_df, creator_id=102506575322825) + assert filtered_df is not None + assert len(filtered_df) == 3 + + def test_filter_time_range(self): + start_time = "2016-01-20 00:00:00" + end_time = "2022-01-21 23:59:59" + filtered_df = filter_results( + self.fixture_df, + start_time=start_time, + end_time=end_time, + ) + assert filtered_df is not None + assert len(filtered_df) == 3 + + def test_filter_no_rows_after_filter(self): + filtered_df = filter_results(self.fixture_df, is_pano="False") # type: ignore[reportArgumentType] + assert filtered_df is not None + assert filtered_df.empty + + def test_filter_missing_columns(self): + columns_to_check = [ + "is_pano", + "organization_id", + "captured_at", + ] + for column in columns_to_check: + df_copy = self.fixture_df.copy() + df_copy[column] = None + + if column == "captured_at": + column = "start_time" # noqa: PLW2901 + + result = filter_results(df_copy, **{column: True}) # type: ignore[reportArgumentType] + assert result is None + + @patch("project_types.street.api_calls.coordinate_download") + def test_get_image_metadata(self, mock_coordinate_download): # type: ignore[reportMissingParameterType] + mock_coordinate_download.return_value = self.fixture_df + result = get_image_metadata(aoi_geojson=self.fixture_data) + assert isinstance(result, dict) + assert "feature_ids" in result + assert "features" in result + + @patch("project_types.street.api_calls.coordinate_download") + def test_get_image_metadata_empty_response(self, mock_coordinate_download): # type: ignore[reportMissingParameterType] + df = self.fixture_df.copy() + df = df.drop(df.index) + mock_coordinate_download.return_value = df + + with pytest.raises(ValidationException): + get_image_metadata(aoi_geojson=self.fixture_data) + + @patch("project_types.street.api_calls.filter_results") + @patch("project_types.street.api_calls.coordinate_download") + def test_get_image_metadata_size_restriction( + self, + mock_coordinate_download, # type: ignore[reportMissingParameterType] + mock_filter_results, # type: ignore[reportMissingParameterType] + ): + mock_df = pd.DataFrame({"id": range(1, 100002), "geometry": range(1, 100002)}) + mock_coordinate_download.return_value = mock_df + with pytest.raises(ValidationException): + get_image_metadata(aoi_geojson=self.fixture_data) + + @patch("project_types.street.api_calls.coordinate_download") + def test_get_image_metadata_drop_duplicates(self, mock_coordinate_download): # type: ignore[reportMissingParameterType] + test_df = pd.DataFrame( + { + "id": [1, 2, 2, 3, 4, 4, 5], + "geometry": ["a", "b", "b", "c", "d", "d", "e"], + }, + ) + mock_coordinate_download.return_value = test_df + return_dict = get_image_metadata(aoi_geojson=self.fixture_data) + + return_df = pd.DataFrame(return_dict) + + assert len(return_df) != len(test_df) diff --git a/utils/geo/tests/__init__.py b/utils/geo/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/utils/geo/tests/tile_grouping_test.py b/utils/geo/tests/tile_grouping_test.py new file mode 100644 index 00000000..1b6059ab --- /dev/null +++ b/utils/geo/tests/tile_grouping_test.py @@ -0,0 +1,44 @@ +import typing +from pathlib import Path + +from main.config import Config +from main.tests import TestCase +from utils.geo.tile_grouping import extent_to_groups, get_geometry_from_file + +BASE_DIR = Path(__file__).resolve().parent + + +class TestProjectQuery(TestCase): + @typing.override + @classmethod + def setUpClass(cls): + super().setUpClass() + + def test_project_geometries_intersection(self): + zoom = 18 + + project_extent_file = Path(Config.BASE_DIR, "assets/fixtures/polygon_with_intersection.geojson") + project_extent_file_json = get_geometry_from_file(str(project_extent_file)) + + groups_with_overlaps = extent_to_groups(project_extent_file_json, zoom, 100) + assert len(groups_with_overlaps) == 92 + + def test_group_size(self): + zoom = 18 + project_extent_file = Path(Config.BASE_DIR, "assets/fixtures/aoi.geojson") + + project_extent_file_json = get_geometry_from_file(str(project_extent_file)) + + groups_dict = extent_to_groups(project_extent_file_json, zoom, 100) + + assert len(groups_dict) == 711 + + for _, group in groups_dict.items(): + # check if height is 3 + y_group_size = int(group["yMax"]) - int(group["yMin"]) + 1 + assert y_group_size == 3 + + for _, group in groups_dict.items(): + # check if group x size is of factor 2 + x_group_size = int(group["xMax"]) - int(group["xMin"]) + 1 + assert x_group_size % 2 == 0 diff --git a/utils/tests/spatial_sampling_test.py b/utils/tests/spatial_sampling_test.py new file mode 100644 index 00000000..6b522915 --- /dev/null +++ b/utils/tests/spatial_sampling_test.py @@ -0,0 +1,95 @@ +import typing +import unittest +from pathlib import Path + +import numpy as np +import pandas as pd +from shapely import wkt +from shapely.geometry import Point + +from main.config import Config +from utils.spatial_sampling import ( + distance_on_sphere, + filter_points, + spatial_sampling, +) + +BASE_DIR = Path(__file__).resolve().parent + + +class TestDistanceCalculations(unittest.TestCase): + @typing.override + @classmethod + def setUpClass(cls): + with Path(Config.BASE_DIR, "assets/fixtures/mapillary_sequence.csv").open(encoding="utf-8") as file: + df = pd.read_csv(file) + df["geometry"] = df["geometry"].apply(wkt.loads) # type: ignore[reportArgumentType] + cls.fixture_df = df + + def test_distance_on_sphere(self): + p1 = Point(-74.006, 40.7128) + p2 = Point(-118.2437, 34.0522) + + distance = distance_on_sphere([p1.x, p1.y], [p2.x, p2.y]) + expected_distance = 3940 # Approximate known distance in km + + assert np.isclose(distance, expected_distance, atol=50) + + def test_filter_points(self): + data = { + "geometry": [ + "POINT (-74.006 40.7128)", + "POINT (-75.006 41.7128)", + "POINT (-76.006 42.7128)", + "POINT (-77.006 43.7128)", + ], + } + df = pd.DataFrame(data) + + df["geometry"] = df["geometry"].apply(wkt.loads) # type: ignore[reportArgumentType] + + df["long"] = df["geometry"].apply( + lambda geom: geom.x if geom.geom_type == "Point" else None, + ) + df["lat"] = df["geometry"].apply( + lambda geom: geom.y if geom.geom_type == "Point" else None, + ) + threshold_distance = 100 + filtered_df = filter_points(df, threshold_distance) + + assert isinstance(filtered_df, pd.DataFrame) + assert len(filtered_df) <= len(df) + + def test_spatial_sampling_ordering(self): + data = { + "geometry": [ + "POINT (-74.006 40.7128)", + "POINT (-75.006 41.7128)", + "POINT (-76.006 42.7128)", + "POINT (-77.006 43.7128)", + ], + "captured_at": [1, 2, 3, 4], + "sequence_id": ["1", "1", "1", "1"], + } + df = pd.DataFrame(data) + df["geometry"] = df["geometry"].apply(wkt.loads) # type: ignore[reportArgumentType] + + interval_length = 0.1 + filtered_gdf = spatial_sampling(df=df, interval_length=interval_length) + + assert filtered_gdf["captured_at"].is_monotonic_decreasing + + def test_spatial_sampling_with_sequence(self): + threshold_distance = 0.01 + filtered_df = spatial_sampling(df=self.fixture_df, interval_length=threshold_distance) + assert isinstance(filtered_df, pd.DataFrame) + assert len(filtered_df) < len(self.fixture_df) + + filtered_df.reset_index(drop=True, inplace=True) + for i in range(len(filtered_df) - 1): + geom1 = filtered_df.loc[i, "geometry"] + geom2 = filtered_df.loc[i + 1, "geometry"] + + distance = geom1.distance(geom2) # type: ignore[reportAttributeAccessIssue] + + assert distance < threshold_distance From b60178bb80f99680b075883037495a4cdc7a95fa Mon Sep 17 00:00:00 2001 From: tnagorra Date: Wed, 8 Oct 2025 15:12:46 +0545 Subject: [PATCH 19/19] feat(filter): add "created by" filter for project and tutorial --- apps/project/graphql/filters.py | 3 ++- apps/tutorial/graphql/filters.py | 1 + schema.graphql | 8 +++----- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/apps/project/graphql/filters.py b/apps/project/graphql/filters.py index 9ac14793..bef98444 100644 --- a/apps/project/graphql/filters.py +++ b/apps/project/graphql/filters.py @@ -14,9 +14,10 @@ class ProjectFilter: requesting_organization_id: strawberry.auto is_featured: strawberry.auto status: strawberry.auto - team: strawberry.auto + team_id: strawberry.auto is_private: strawberry.auto progress_status: strawberry.auto + created_by_id: strawberry.auto topic = unaccented_filter("topic") region = unaccented_filter("region") diff --git a/apps/tutorial/graphql/filters.py b/apps/tutorial/graphql/filters.py index 63d0ea1d..02aa73cb 100644 --- a/apps/tutorial/graphql/filters.py +++ b/apps/tutorial/graphql/filters.py @@ -11,6 +11,7 @@ class TutorialFilter: id: strawberry.auto status: strawberry.auto project: ProjectFilter | None + created_by_id: strawberry.auto name = unaccented_filter("name") diff --git a/schema.graphql b/schema.graphql index 99ead70b..46edb314 100644 --- a/schema.graphql +++ b/schema.graphql @@ -774,10 +774,6 @@ union DeleteProjectPayload = OperationInfo | ProjectType union DeleteTutorialPayload = OperationInfo | TutorialType -input DjangoModelFilterInput { - id: ID! -} - input FindProjectPropertyInput { """Numeric value as string""" aoiGeometry: String! @@ -1453,6 +1449,7 @@ input ProjectFilter { DISTINCT: Boolean NOT: ProjectFilter OR: ProjectFilter + createdById: IDBaseFilterLookup id: IDBaseFilterLookup isFeatured: BoolBaseFilterLookup @@ -1470,7 +1467,7 @@ input ProjectFilter { """Which group, institution or community is requesting this project?""" requestingOrganizationId: IDBaseFilterLookup status: ProjectStatusEnumFilterLookup - team: DjangoModelFilterInput + teamId: IDBaseFilterLookup topic: String } @@ -2270,6 +2267,7 @@ input TutorialFilter { DISTINCT: Boolean NOT: TutorialFilter OR: TutorialFilter + createdById: IDBaseFilterLookup id: IDBaseFilterLookup name: String