diff --git a/apps/common/tasks.py b/apps/common/tasks.py index 871b70ff..84b84d50 100644 --- a/apps/common/tasks.py +++ b/apps/common/tasks.py @@ -17,7 +17,7 @@ def clear_expired_django_sessions(): if not acquired: logger.warning("Clear expired django sessions") return - management.call_command("clearsessions", verbosity=0) + management.call_command("clearsessions", verbosity=0) @shared_task diff --git a/apps/community_dashboard/graphql/queries.py b/apps/community_dashboard/graphql/queries.py index 9d08ad7e..a0e74715 100644 --- a/apps/community_dashboard/graphql/queries.py +++ b/apps/community_dashboard/graphql/queries.py @@ -5,11 +5,11 @@ import strawberry from asgiref.sync import sync_to_async from django.db import models -from django.shortcuts import aget_object_or_404 from django.utils import timezone from apps.community_dashboard.models import AggregatedUserGroupStatData, AggregatedUserStatData -from apps.contributor.models import ContributorUser +from apps.contributor.models import ContributorUser, ContributorUserGroup +from utils.graphql.inputs import FirebaseOrInternalIdInputType from .types import ( AggregateHelper, @@ -117,17 +117,19 @@ async def community_filtered_stats( ) -> CommunityFilteredStats: return CommunityFilteredStats(date_range=date_range) + # By Internal ID @strawberry.field async def community_user_stats( self, - firebase_id: strawberry.ID, + user_id: FirebaseOrInternalIdInputType, ) -> ContributorUserStats: - user = await aget_object_or_404(ContributorUser, firebase_id=firebase_id) + user = await FirebaseOrInternalIdInputType.aget_object_or_404(ContributorUser, object_id=user_id) return ContributorUserStats(user=user) @strawberry.field async def community_user_group_stats( self, - user_group_id: strawberry.ID, + user_group_id: FirebaseOrInternalIdInputType, ) -> ContributorUserGroupStats: - return ContributorUserGroupStats(user_group_id=int(user_group_id)) + user_group = await FirebaseOrInternalIdInputType.aget_object_or_404(ContributorUserGroup, object_id=user_group_id) + return ContributorUserGroupStats(user_group=user_group) diff --git a/apps/community_dashboard/graphql/types.py b/apps/community_dashboard/graphql/types.py index e0c50093..8ed20f90 100644 --- a/apps/community_dashboard/graphql/types.py +++ b/apps/community_dashboard/graphql/types.py @@ -11,7 +11,7 @@ from django_cte import With # type: ignore[reportMissingTypeStubs] from apps.community_dashboard.models import AggregatedUserGroupStatData, AggregatedUserStatData -from apps.contributor.models import ContributorUser +from apps.contributor.models import ContributorUser, ContributorUserGroup from apps.project.models import Project, ProjectTypeEnum from utils.graphql.inputs import DateRangeInput from utils.graphql.types import AreaSqKm, GenericJSON @@ -349,6 +349,10 @@ def __post_init__(self, user: ContributorUser): async def id(self) -> strawberry.ID: return typing.cast("strawberry.ID", self._user.pk) + @strawberry.field + async def firebase_id(self) -> strawberry.ID: + return typing.cast("strawberry.ID", self._user.firebase_id) + @strawberry.field async def stats(self) -> ContributorUserStatType: # TODO: Cache this @@ -423,15 +427,15 @@ def __post_init__(self, date_range: DateRangeInput | None, user_group_id: int): @strawberry.type class ContributorUserGroupStats: - user_group_id: InitVar[int] + user_group: InitVar[ContributorUserGroup] _user_group_id: strawberry.Private[int] = dataclass_field(init=False) _ug_qs: strawberry.Private[models.QuerySet[AggregatedUserGroupStatData]] = dataclass_field(init=False) - def __post_init__(self, user_group_id: int): - self._user_group_id = user_group_id + def __post_init__(self, user_group: ContributorUserGroup): + self._user_group_id = user_group.pk self._ug_qs = AggregatedUserGroupStatData.objects.filter( - user_group_id=user_group_id, + user_group_id=user_group.pk, ) @strawberry.field diff --git a/apps/community_dashboard/tasks.py b/apps/community_dashboard/tasks.py index 48fc4fc6..7e3d4cbb 100644 --- a/apps/community_dashboard/tasks.py +++ b/apps/community_dashboard/tasks.py @@ -19,4 +19,4 @@ def update_aggregated_data(): logger.warning("Community Dashboard update aggregate already running") return - UpdateAggregateCommand().handle() + UpdateAggregateCommand().handle() diff --git a/apps/community_dashboard/tests/query_test.py b/apps/community_dashboard/tests/query_test.py index f4ca7431..2fad9a32 100644 --- a/apps/community_dashboard/tests/query_test.py +++ b/apps/community_dashboard/tests/query_test.py @@ -256,7 +256,7 @@ def test_filtered_community_stats(self): def test_user_group_aggregated_calc(self): query = """ query MyQuery($userGroupId: ID!) { - communityUserGroupStats(userGroupId: $userGroupId) { + communityUserGroupStats(userGroupId: {id: $userGroupId}) { stats { totalAreaSwiped totalContributors @@ -306,7 +306,7 @@ def test_user_group_aggregated_calc(self): def test_user_group_query(self): query = """ query MyQuery($userGroupId: ID!, $pagination: OffsetPaginationInput!) { - contributorUserGroup(id: $userGroupId) { + contributorUserGroup(userGroupId: {id: $userGroupId}) { id name createdAt @@ -502,13 +502,13 @@ def test_user_query(self): $toDate: Date!, ) { - contributorUserByFirebaseId(firebaseId: $firebaseId) { + contributorUser(userId: {firebaseId: $firebaseId}) { id firebaseId username } - communityUserStats(firebaseId: $firebaseId) { + communityUserStats(userId: {firebaseId: $firebaseId}) { id stats { totalSwipes @@ -644,7 +644,7 @@ def test_user_query(self): ) assert { - "contributorUserByFirebaseId": { + "contributorUser": { "id": self.gID(contributor_user.pk), "firebaseId": contributor_user.firebase_id, "username": contributor_user.username, diff --git a/apps/contributor/firebase/pull.py b/apps/contributor/firebase/pull.py index 1b37c943..f14764c5 100644 --- a/apps/contributor/firebase/pull.py +++ b/apps/contributor/firebase/pull.py @@ -5,6 +5,7 @@ from pyfirebase_mapswipe import extended_models as firebase_ext_models from pyfirebase_mapswipe import models as firebase_models +from apps.common.models import FirebasePushStatusEnum from apps.contributor.models import ( ContributorUser, ContributorUserGroupMembershipLogActionEnum, @@ -41,6 +42,9 @@ def pull_users_from_firebase(): username=valid_user.username, created_at=valid_user.created, modified_at=valid_user.created, + # NOTE: Setting firebase_last_pushed so that we can send updates to firebase. + firebase_last_pushed=datetime.datetime.now(), + firebase_push_status=FirebasePushStatusEnum.SUCCESS, ) users_to_pull.append(user) diff --git a/apps/contributor/graphql/queries.py b/apps/contributor/graphql/queries.py index 1258057d..1738222f 100644 --- a/apps/contributor/graphql/queries.py +++ b/apps/contributor/graphql/queries.py @@ -3,11 +3,11 @@ import strawberry import strawberry_django from django.db.models import QuerySet -from django.shortcuts import aget_object_or_404 from strawberry_django.pagination import OffsetPaginated from strawberry_django.permissions import IsAuthenticated from apps.contributor.models import ContributorTeam, ContributorUser, ContributorUserGroup, ContributorUserGroupMembership +from utils.graphql.inputs import FirebaseOrInternalIdInputType from .filters import ( ContributorTeamFilter, @@ -31,18 +31,25 @@ class Query: filters=ContributorUserFilter, ) - contributor_user: ContributorUserType = strawberry_django.field() - - contributor_user_group: ContributorUserGroupType = strawberry_django.field() - # Team contributor_team: ContributorTeamType = strawberry_django.field() @strawberry.field - async def contributor_user_by_firebase_id(self, firebase_id: strawberry.ID) -> ContributorUserType: - obj = await aget_object_or_404(ContributorUser, firebase_id=firebase_id) + async def contributor_user( + self, + user_id: FirebaseOrInternalIdInputType, + ) -> ContributorUserType: + obj = await FirebaseOrInternalIdInputType.aget_object_or_404(ContributorUser, object_id=user_id) return typing.cast("ContributorUserType", obj) + @strawberry.field + async def contributor_user_group( + self, + user_group_id: FirebaseOrInternalIdInputType, + ) -> ContributorUserGroupType: + obj = await FirebaseOrInternalIdInputType.aget_object_or_404(ContributorUserGroup, object_id=user_group_id) + return typing.cast("ContributorUserGroupType", obj) + # --- Paginated # --- ContributorUserGroup @strawberry_django.offset_paginated( diff --git a/apps/contributor/tasks.py b/apps/contributor/tasks.py index 92feb63f..24bb6706 100644 --- a/apps/contributor/tasks.py +++ b/apps/contributor/tasks.py @@ -15,7 +15,7 @@ def pull_users_from_firebase_task(): logger.warning("Pull users from firebase is already running") return - pull_users_from_firebase() + pull_users_from_firebase() @shared_task @@ -25,4 +25,4 @@ def pull_user_group_memberships_from_firebase_task(): logger.warning("Pull user group memberships from firebase is already running") return - pull_user_group_memberships_from_firebase() + pull_user_group_memberships_from_firebase() diff --git a/apps/existing_database/management/commands/loaddata_from_existing_database.py b/apps/existing_database/management/commands/loaddata_from_existing_database.py index 6285535b..785e15da 100644 --- a/apps/existing_database/management/commands/loaddata_from_existing_database.py +++ b/apps/existing_database/management/commands/loaddata_from_existing_database.py @@ -532,10 +532,11 @@ def parse_project_name( def parse_project_status(existing_project: existing_db_models.Project) -> ProjectStatusEnum: assert existing_project.status is not None return { - "inactive": ProjectStatusEnum.PAUSED, + "inactive": ProjectStatusEnum.DISCARDED, "active": ProjectStatusEnum.PUBLISHED, "private_active": ProjectStatusEnum.PUBLISHED, "private_finished": ProjectStatusEnum.FINISHED, + "private_inactive": ProjectStatusEnum.DISCARDED, "finished": ProjectStatusEnum.FINISHED, "archived": ProjectStatusEnum.WITHDRAWN, }[existing_project.status] @@ -573,6 +574,8 @@ def create_project( requesting_organization=get_organization_by_name(requesting_organization, bot_user), created_by_id=get_user_by_contributor_user_firebase_id(existing_project.created_by, fallback=bot_user), modified_by_id=get_user_by_contributor_user_firebase_id(existing_project.created_by, fallback=bot_user), + project_type_specifics=existing_project.project_type_specifics, + description=existing_project.project_details.strip() if existing_project.project_details else "", ) # Progress metadata @@ -986,7 +989,7 @@ def handle_project(self): output_field=GeometryField(geography=True), ), ) - / 100_000, + / 1_000_000, ) self.stdout.write("\n") diff --git a/apps/mapping/models.py b/apps/mapping/models.py index 80d7d02d..0f60ca85 100644 --- a/apps/mapping/models.py +++ b/apps/mapping/models.py @@ -11,7 +11,9 @@ class MappingSessionClientTypeEnum(models.IntegerChoices): - """Enum representing client type used during a mapping session.""" + """Enum representing client type used during a mapping session. + https://github.com/react-native-device-info/react-native-device-info#getsystemname. + """ UNKNOWN = 0, "Unknown" MOBILE_ANDROID = 1, "Mobile (Android)" @@ -23,6 +25,8 @@ def get_client_type(cls, value: str) -> "MappingSessionClientTypeEnum": return { "mobile-android": cls.MOBILE_ANDROID, "mobile-ios": cls.MOBILE_IOS, + "mobile-iphone os": cls.MOBILE_IOS, + "mobile-ipados": cls.MOBILE_IOS, "web": cls.WEB, }.get(value, cls.UNKNOWN) diff --git a/apps/mapping/tasks.py b/apps/mapping/tasks.py index c36d78f7..0f1a77a0 100644 --- a/apps/mapping/tasks.py +++ b/apps/mapping/tasks.py @@ -19,4 +19,4 @@ def pull_mapping_session_from_firebase(): logger.warning("Mapping Session Pull from Firebase already running") return - pull_results_from_firebase() + pull_results_from_firebase() diff --git a/apps/project/exports/exports.py b/apps/project/exports/exports.py index 641dda56..6925ec66 100644 --- a/apps/project/exports/exports.py +++ b/apps/project/exports/exports.py @@ -188,12 +188,12 @@ def _export_project_data(project: Project, tmp_directory: Path): if project.progress >= 100: project.progress_status = ProjectProgressStatusEnum.COMPLETED - if project.progress >= 90 and project.slack_progress_notifications < 90: + if project.progress >= 90 and (project.slack_progress_notifications or 0) < 90: transaction.on_commit( lambda: send_slack_message_for_project.delay(project_id=project.id, action="progress-change"), ) - if project.progress >= 100 and project.slack_progress_notifications < 100: + if project.progress >= 100 and (project.slack_progress_notifications or 0) < 100: transaction.on_commit( lambda: send_slack_message_for_project.delay(project_id=project.id, action="progress-change"), ) @@ -201,7 +201,7 @@ def _export_project_data(project: Project, tmp_directory: Path): if project.progress != previous_progress: # FIXME(tnagorra): Do we only send updates for the 2 fields? transaction.on_commit( - lambda: push_project_to_firebase.delay(project_id=project.id), + lambda: push_project_to_firebase.delay(project_id=project.id, only_stats=True), ) project.update_firebase_push_status(FirebasePushStatusEnum.PENDING, False) diff --git a/apps/project/exports/mapping_results.py b/apps/project/exports/mapping_results.py index cdad664f..1919cd4c 100644 --- a/apps/project/exports/mapping_results.py +++ b/apps/project/exports/mapping_results.py @@ -77,7 +77,7 @@ def generate_mapping_results(*, destination_filename: Path, project: Project) -> logger.info("there are no results for this project %s", project.id) else: # TODO: Is this required? - df["timestamp"] = pd.to_datetime(df["timestamp"]) + df["timestamp"] = pd.to_datetime(df["timestamp"], format="ISO8601") df["day"] = df["timestamp"].apply(lambda x: datetime.datetime(year=x.year, month=x.month, day=x.day)) logger.info("added day attribute for results for %s", project.id) return df diff --git a/apps/project/graphql/queries.py b/apps/project/graphql/queries.py index 482c82d8..adeb2025 100644 --- a/apps/project/graphql/queries.py +++ b/apps/project/graphql/queries.py @@ -54,8 +54,9 @@ def _get_vector_tile_server_type(enum: VectorTileServerNameEnumWithoutCustom): raster=[ _get_raster_tile_server_type(RasterTileServerNameEnum.BING), _get_raster_tile_server_type(RasterTileServerNameEnum.MAPBOX), - _get_raster_tile_server_type(RasterTileServerNameEnum.MAXAR_STANDARD), - _get_raster_tile_server_type(RasterTileServerNameEnum.MAXAR_PREMIUM), + # NOTE: Disabled because it's not working for 2+ years + # _get_raster_tile_server_type(RasterTileServerNameEnum.MAXAR_STANDARD), + # _get_raster_tile_server_type(RasterTileServerNameEnum.MAXAR_PREMIUM), _get_raster_tile_server_type(RasterTileServerNameEnum.ESRI), _get_raster_tile_server_type(RasterTileServerNameEnum.ESRI_BETA), ], @@ -154,8 +155,9 @@ def public_projects( ) -> QuerySet[Project]: return Project.objects.filter( status__in=[ - Project.Status.FINISHED, Project.Status.PUBLISHED, + Project.Status.PAUSED, + Project.Status.FINISHED, ], ).all() diff --git a/apps/project/graphql/types/types.py b/apps/project/graphql/types/types.py index a1ee2ed9..d72f8370 100644 --- a/apps/project/graphql/types/types.py +++ b/apps/project/graphql/types/types.py @@ -167,7 +167,6 @@ class ProjectType(UserResourceTypeMixin, ProjectExportAssetTypeMixin, FirebasePu processing_status: strawberry.auto status_message: strawberry.auto - total_area: strawberry.auto team: ContributorTeamType | None is_private: strawberry.auto required_results: strawberry.auto @@ -181,6 +180,8 @@ class ProjectType(UserResourceTypeMixin, ProjectExportAssetTypeMixin, FirebasePu number_of_results_for_progress: strawberry.auto last_contribution_date: strawberry.auto + total_area: strawberry.auto = strawberry.field(deprecation_reason="Use AOI Geometry instead") + @strawberry_django.field( description=str(Project._meta.get_field("progress").help_text), # type: ignore[reportAttributeAccessIssue] ) diff --git a/apps/project/migrations/0010_alter_projecttask_unique_together.py b/apps/project/migrations/0010_alter_projecttask_unique_together.py new file mode 100644 index 00000000..571e3726 --- /dev/null +++ b/apps/project/migrations/0010_alter_projecttask_unique_together.py @@ -0,0 +1,17 @@ +# Generated by Django 5.2.5 on 2025-10-13 07:55 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('project', '0009_merge_20251008_1121'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='projecttask', + unique_together=set(), + ), + ] diff --git a/apps/project/models.py b/apps/project/models.py index 363145f9..44aa399c 100644 --- a/apps/project/models.py +++ b/apps/project/models.py @@ -465,7 +465,7 @@ class Project(UserResource, FirebasePushResource): help_text=gettext_lazy("Timestamp of the base slack message"), ) - slack_progress_notifications = models.PositiveIntegerField[int, int]( + slack_progress_notifications = models.PositiveIntegerField[int | None, int | None]( null=True, blank=True, help_text=gettext_lazy("Stores the last progress checkpoint notified via Slack."), @@ -692,12 +692,14 @@ class ProjectTask(FirebasePushResource): id: int task_group_id: int - class Meta: # type: ignore[reportIncompatibleVariableOverride] - unique_together = ( - # FIXME(tnagorra): Should we use project instead of task_group here? - "task_group", - "firebase_id", - ) + # FIXME: Quick fix involves removing uniqueness constraint + # As firebase_id for tasks are derived from user input, + # we should discuss if we need db level uniqueness + # class Meta: + # unique_together = ( + # "task_group", + # "firebase_id", + # ) @typing.override def __str__(self): diff --git a/apps/project/serializers.py b/apps/project/serializers.py index 09ca461a..65e8640f 100644 --- a/apps/project/serializers.py +++ b/apps/project/serializers.py @@ -114,9 +114,9 @@ def _validate_group_size(self, attrs: dict[str, typing.Any]): group_size: int match project_type: case Project.Type.FIND: - group_size = 25 - case Project.Type.VALIDATE: group_size = 120 + case Project.Type.VALIDATE: + group_size = 25 case Project.Type.VALIDATE_IMAGE: group_size = 25 case Project.Type.COMPARE: diff --git a/apps/project/slack_messages.py b/apps/project/slack_messages.py index ac5710f3..7f99b10c 100644 --- a/apps/project/slack_messages.py +++ b/apps/project/slack_messages.py @@ -44,8 +44,8 @@ def format_tutorial_link(tutorial: Tutorial | None): return f"<{tutorial_url}|{tutorial.name}>" @staticmethod - def format_project_name(project: Project): - if project.is_private: + def format_project_name(project: Project, *, show_private_indicator=False): + if project.is_private and show_private_indicator: return f"{project.generate_name()} :lock:" return project.generate_name() @@ -122,7 +122,7 @@ def get_message_for_project_progress( website_url = Config.WebsiteKeys.project(firebase_id=project.firebase_id) if progress >= 100: - text = "Project reached 100%" + text = f"Project '{SlackMessage.format_project_name(project)}' reached 100%" blocks = [ { "type": "header", @@ -163,7 +163,7 @@ def get_message_for_project_progress( } if 90 <= progress < 100: - text = "Progress reached 90%" + text = f"Project '{SlackMessage.format_project_name(project)}' reached 90%" blocks = [ { "type": "header", @@ -185,7 +185,7 @@ def get_message_for_project_progress( "blocks": blocks, } - text = f"Project reached {project.progress}%" + text = f"Project '{SlackMessage.format_project_name(project)}' reached {project.progress}%" blocks = [ { "type": "section", @@ -213,7 +213,7 @@ def get_message_for_project_publish( "type": "header", "text": { "type": "plain_text", - "text": SlackMessage.format_project_name(project), + "text": SlackMessage.format_project_name(project, show_private_indicator=True), }, }, SlackMessage.get_project_information_block(project), @@ -275,22 +275,11 @@ def get_message_for_project_status( if action_failed: text = "Project status could not be updated!" - heading = "Project status updated :pushpin:" - if action_failed: - heading = "Project status could not be updated :x:" - description = f"The project has been *{status_label}* by {username} at {modified_at}" if action_failed: description = f"The project could be *{status_label}* by {username} at {modified_at}" blocks = [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": heading, - }, - }, { "type": "section", "text": { diff --git a/apps/project/tasks.py b/apps/project/tasks.py index 6e084d2c..28cddc96 100644 --- a/apps/project/tasks.py +++ b/apps/project/tasks.py @@ -24,23 +24,23 @@ def process_project_task(project_id: int): logger.warning("Project(id: %s) processing is already running", project_id) return None - project = Project.objects.get(pk=project_id) - project_type_handler = get_project_type_handler(project.project_type_enum)(project) - project_type_handler.process_project() - return True + project = Project.objects.get(pk=project_id) + project_type_handler = get_project_type_handler(project.project_type_enum)(project) + project_type_handler.process_project() + return True @shared_task -def push_project_to_firebase(project_id: int): +def push_project_to_firebase(project_id: int, *, only_stats: bool = False): with CeleryLock.redis_lock(CeleryLock.Key.PUSH_PROJECT_TO_FIREBASE.format(project_id)) as acquired: if not acquired: logger.warning("Project(id: %s) push project to firebase already running", project_id) return None - project = Project.objects.get(pk=project_id) - project_type_handler = get_project_type_handler(project.project_type_enum)(project) - project_type_handler.push_project_on_firebase() - return True + project = Project.objects.get(pk=project_id) + project_type_handler = get_project_type_handler(project.project_type_enum)(project) + project_type_handler.push_project_on_firebase(only_stats=only_stats) + return True # TODO: How to trigger this? Scheduled or trigger by pull_mapping_session_from_firebase task? @@ -62,9 +62,9 @@ def generate_project_exports( logger.warning("Project(id: %s) exports generate already running", project.id) return None - project_type_handler = get_project_type_handler(project.project_type_enum)(project) - project_type_handler.generate_exports() - return True + project_type_handler = get_project_type_handler(project.project_type_enum)(project) + project_type_handler.generate_exports() + return True @shared_task @@ -74,8 +74,8 @@ def regenerate_global_project_assets(): logger.warning("regenerate_global_project_assets already running") return None - overall_stats.generate() - return True + overall_stats.generate() + return True @shared_task diff --git a/apps/project/tests/mutation_test.py b/apps/project/tests/mutation_test.py index 0a24aa37..f406ac4a 100644 --- a/apps/project/tests/mutation_test.py +++ b/apps/project/tests/mutation_test.py @@ -627,7 +627,7 @@ def test_project_create(self): additionalInfoUrl=latest_project.additional_info_url, description=latest_project.description, verificationNumber=3, - groupSize=25, + groupSize=120, maxTasksPerUser=None, isFeatured=latest_project.is_featured, status=self.genum(Project.Status.DRAFT), @@ -1394,7 +1394,7 @@ class TaskGroupType(typing.TypedDict): "firebase_id": "g101", "number_of_tasks": 18, "required_count": 10, - "total_area": 210.10735845202447, + "total_area": 21.010735845202447, "project_type_specifics": { "x_max": 24152, "x_min": 24147, @@ -1406,7 +1406,7 @@ class TaskGroupType(typing.TypedDict): "firebase_id": "g102", "number_of_tasks": 24, "required_count": 10, - "total_area": 280.2915392364502, + "total_area": 28.02915392364502, "project_type_specifics": { "x_max": 24153, "x_min": 24146, @@ -1418,7 +1418,7 @@ class TaskGroupType(typing.TypedDict): "firebase_id": "g103", "number_of_tasks": 24, "required_count": 10, - "total_area": 280.4398676951218, + "total_area": 28.043986769512177, "project_type_specifics": { "x_max": 24153, "x_min": 24146, @@ -1430,7 +1430,7 @@ class TaskGroupType(typing.TypedDict): "firebase_id": "g104", "number_of_tasks": 6, "required_count": 10, - "total_area": 70.14703242812156, + "total_area": 7.014703242812157, "project_type_specifics": { "x_max": 24150, "x_min": 24149, diff --git a/apps/tutorial/tasks.py b/apps/tutorial/tasks.py index 96273866..2e64c2e2 100644 --- a/apps/tutorial/tasks.py +++ b/apps/tutorial/tasks.py @@ -16,7 +16,7 @@ def push_tutorial_to_firebase(tutorial_id: int): logger.warning("Tutorial(id: %s) push tutorial to firebase already running", tutorial_id) return None - tutorial = Tutorial.objects.get(pk=tutorial_id) - tutorial_type_handler = get_tutorial_type_handler(tutorial.project.project_type_enum)(tutorial) - tutorial_type_handler.push_tutorial_on_firebase() - return True + tutorial = Tutorial.objects.get(pk=tutorial_id) + tutorial_type_handler = get_tutorial_type_handler(tutorial.project.project_type_enum)(tutorial) + tutorial_type_handler.push_tutorial_on_firebase() + return True diff --git a/firebase b/firebase index b7b1b68d..93b3ffc5 160000 --- a/firebase +++ b/firebase @@ -1 +1 @@ -Subproject commit b7b1b68d271be3bdaa2ccad30b7503f814be2645 +Subproject commit 93b3ffc5bdcca6c0c1ab0075ef82f6a6f0c9f525 diff --git a/project_types/base/project.py b/project_types/base/project.py index d5369065..731c2268 100644 --- a/project_types/base/project.py +++ b/project_types/base/project.py @@ -9,7 +9,8 @@ from django.contrib.gis.db.models.functions import Area from django.core.files.base import ContentFile from django.db import models -from django.db.models.functions import Cast +from django.db.models.expressions import Subquery +from django.db.models.functions import Cast, Coalesce from firebase_admin.db import Reference as FbReference # type: ignore[reportMissingTypeStubs] from pydantic import BaseModel, ConfigDict from pyfirebase_mapswipe import extended_models as firebase_ext_models @@ -115,51 +116,78 @@ def analyze_groups(self): # Update number_of_tasks self.project.update_processing_status(Project.ProcessingStatus.ANALYZING_GROUPS_AND_TASK, True) + # Check for uniqueness for project tasks + groups = ProjectTaskGroup.objects.filter( + project_id=self.project.pk, + ).values("id") + duplicated_task_ids_qs = ( + ProjectTask.objects.filter(task_group__in=Subquery(groups)) + .values("firebase_id") + .annotate(firebase_id_count=models.Count("id")) + .filter(firebase_id_count__gt=1) + ) + duplicated_task_ids_count = duplicated_task_ids_qs.count() + if duplicated_task_ids_count > 0: + MAX_EXAMPLES = 5 + duplicated_task_ids_examples = duplicated_task_ids_qs.values_list("firebase_id", flat=True)[:MAX_EXAMPLES] + examples = ", ".join(duplicated_task_ids_examples) + error_message = f"There are {duplicated_task_ids_count} tasks with duplicate identifiers: {examples}" + if duplicated_task_ids_count > MAX_EXAMPLES: + error_message += ", ..." + raise ValidationException(error_message) + + # Calculating aggregates on groups project_task_groups_qs = ProjectTaskGroup.objects.filter(project_id=self.project.pk) project_task_groups_qs.update( - number_of_tasks=models.Subquery( - ProjectTask.objects.filter(task_group_id=models.OuterRef("id")) - .values("task_group_id") - .annotate(total_tasks=models.Count("*")) - .values("total_tasks")[:1], + number_of_tasks=Coalesce( + models.Subquery( + ProjectTask.objects.filter(task_group_id=models.OuterRef("id")) + .values("task_group_id") + .annotate(total_tasks=models.Count("*")) + .values("total_tasks")[:1], + ), + models.Value(0), ), - total_area=models.Subquery( - ProjectTask.objects.filter(task_group_id=models.OuterRef("id")) - .values("task_group_id") - .annotate( - total_task_group_area=models.Sum( - Area( - Cast( - "geometry", - output_field=GeometryField(geography=True), + total_area=Coalesce( + models.Subquery( + ProjectTask.objects.filter(task_group_id=models.OuterRef("id")) + .values("task_group_id") + .annotate( + total_task_group_area=models.Sum( + Area( + Cast( + "geometry", + output_field=GeometryField(geography=True), + ), ), - ), + ) + / 1_000_000, ) - / 100_000, - ) - .values("total_task_group_area")[:1], + .values("total_task_group_area")[:1], + ), + models.Value(0), ), + required_count=self.project.verification_number, ) - # NOTE: After number_of_tasks is calculated + # NOTE: calculation of time_spent_max_allowed depends on number_of_tasks project_task_groups_qs.update( - required_count=self.project.verification_number, time_spent_max_allowed=(models.F("number_of_tasks") * self.get_max_time_spend_percentile()), ) + # Calculation aggregates on project self.project.required_results = ( ProjectTaskGroup.objects.filter(project_id=self.project.pk).aggregate( required_results=models.Sum("number_of_tasks") * self.project.verification_number, ) )["required_results"] or 0 - - if self.project.required_results == 0: - raise ValidationException("Project does not contain any groups or tasks") - self.project.total_area = ( ProjectTaskGroup.objects.filter(project_id=self.project.pk).aggregate(agg_area=models.Sum("total_area")) )["agg_area"] or 0 - self.project.save(update_fields=(["required_results"])) + if self.project.required_results == 0: + raise ValidationException("Project does not contain any groups or tasks") + + self.project.save(update_fields=(["required_results", "total_area"])) @abstractmethod def get_max_time_spend_percentile(self) -> float: @@ -424,10 +452,27 @@ def create_project_on_firebase(self, project_ref: FbReference): }, ) - def update_project_on_firebase(self, project_ref: FbReference, fb_project: firebase_ext_models.FbProject): + def update_project_on_firebase( + self, + project_ref: FbReference, + fb_project: firebase_ext_models.FbProject, + *, + only_stats: bool = False, + ): assert self.project.tutorial_id is not None, "Tutorial is required before project can be pushed to firebase" assert self.project.tutorial is not None, "Tutorial is required before project can be pushed to firebase" + if only_stats: + project_ref.update( + value=firebase_utils.serialize( + firebase_models.FbProjectUpdateStatsInput( + contributorCount=self.project.number_of_contributor_users, + progress=self.project.progress, + ), + ), + ) + return + project_ref.update( value=firebase_utils.serialize( firebase_models.FbProjectUpdateInput( @@ -454,7 +499,7 @@ def update_project_on_firebase(self, project_ref: FbReference, fb_project: fireb ), ) - def _push_project_on_firebase(self): + def _push_project_on_firebase(self, *, only_stats: bool = False): if self.project.status_enum not in [ Project.Status.READY_TO_PUBLISH, Project.Status.PUBLISHED, @@ -497,11 +542,11 @@ class RelaxedModel(firebase_ext_models.FbProject): valid_project = RelaxedModel.model_validate(obj=fb_project) valid_project = firebase_ext_models.FbProject.model_validate(obj=valid_project) - self.update_project_on_firebase(project_ref, valid_project) + self.update_project_on_firebase(project_ref, valid_project, only_stats=only_stats) - def push_project_on_firebase(self): + def push_project_on_firebase(self, *, only_stats: bool = False): try: - self._push_project_on_firebase() + self._push_project_on_firebase(only_stats=only_stats) except Exception as ex: if isinstance(ex, ValidationException): logger.warning( diff --git a/project_types/validate/project.py b/project_types/validate/project.py index a2d2cb0d..08cf2dff 100644 --- a/project_types/validate/project.py +++ b/project_types/validate/project.py @@ -28,7 +28,7 @@ from main.config import Config from project_types.base import project as base_project from project_types.tile_map_service.base.project import create_json_dump -from project_types.validate.api_calls import ohsome +from project_types.validate.api_calls import ValidateApiCallError, ohsome from utils import fields as custom_fields from utils.asset_types.models import AoiGeometryAssetProperty from utils.common import Grouping, clean_up_none_keys, to_groups @@ -144,24 +144,47 @@ def get_aoi_geometry_asset(self) -> ProjectAsset | None: ) def _get_object_geometry_from_ohsome(self, geojson: dict): # type: ignore[reportMissingTypeArgument] - feature_collection = PydanticFeatureCollection.model_validate(geojson) + try: + feature_collection = PydanticFeatureCollection.model_validate(geojson) + except Exception as e: + raise base_project.ValidationException( + "AOI GeoJSON should be a valid feature collection of polygon or multi-polygon", + ) from e + ohsome_request = { "endpoint": "elements/geometry", "filter": self.project_type_specifics.object_source.ohsome_filter, } - return ohsome( - ohsome_request, - feature_collection.model_dump_json(), - properties="tags, metadata", - ) + try: + geojson_result = ohsome( + ohsome_request, + feature_collection.model_dump_json(), + properties="tags, metadata", + ) + except ValidateApiCallError as e: + # NOTE: Handles calls from OHSOME, OSMCHA and OSM + raise base_project.ValidationException("Failed to fetch data from OHSOME/OSMCHA/OSM") from e + except requests.JSONDecodeError as e: + # NOTE: Handles calls from OHSOME and OSMCHA + # OSM responds in XML format + raise base_project.ValidationException( + "OHSOME/OSMCHA did not respond with a valid JSON", + ) from e + + try: + return convert_json_dict_to_features(geojson_result) + except Exception as e: + raise base_project.ValidationException( + "OHSOME did not respond with a valid feature collection of polygon or multi-polygon", + ) from e def _validate_aoi_geojson_file(self): if self.project_type_specifics.object_source.aoi_geometry is None: - raise base_project.ValidationException("AOI Geometry is missing for validate geojson file") + raise base_project.ValidationException("AOI Geometry is missing") if self.project_type_specifics.object_source.ohsome_filter is None: - raise base_project.ValidationException("Ohsome filter is missing for validate geojson file") + raise base_project.ValidationException("Ohsome filter is missing") aoi_asset = self.project.aoi_geometry_input_asset if not aoi_asset: @@ -175,14 +198,9 @@ def _validate_aoi_geojson_file(self): with aoi_asset.file.open() as aoi_file: aoi_geojson = json.loads(aoi_file.read()) - geojson_result = self._get_object_geometry_from_ohsome(aoi_geojson) - # TODO(tnagorra): Also store intermediate geometries? - try: - return convert_json_dict_to_features(geojson_result) - except Exception as e: - raise base_project.ValidationException("Invalid Feature Collection") from e + return self._get_object_geometry_from_ohsome(aoi_geojson) def _validate_object_geojson_url(self): url = self.project_type_specifics.object_source.object_geojson_url @@ -192,17 +210,25 @@ def _validate_object_geojson_url(self): logger.info("Fetching object geojson from %s", url) # FIXME(frozenhelium): use predefined timeout duration + # FIXME(tnagorra): handle timeout error response = requests.get(url, timeout=500) if response.status_code != 200: - logger.warning("Failed to fetch object geojson from %s", url) + raise base_project.ValidationException( + f"Failed to fetch object geojson from {url}", + ) logger.info("Successfully fetched object geojson from %s", url) - geojson = response.json() + try: + geojson = response.json() + except Exception as e: + raise base_project.ValidationException("GeoJSON URL did not respond with valid JSON") from e try: features, geometry_collection = convert_json_dict_to_geometry_collection(geojson) except Exception as e: - raise base_project.ValidationException("Invalid Feature Collection") from e + raise base_project.ValidationException( + "GeoJSON URL did not respond with a valid feature collection of polygon or multi-polygon", + ) from e # TODO(tnagorra): Also store intermediate geometries? # TODO(tnagorra): Also create a input geometry? @@ -242,14 +268,23 @@ def _validate_tasking_manager(self): raise base_project.ValidationException("HOT Tasking Manager Project ID is missing") hot_tm_url = f"{Config.HOT_TASKING_MANAGER_PROJECT_API_LINK}projects/{hot_tm_id}/queries/aoi/?as_file=false" + logger.info("Fetching AOI geojson on HOT from %s", hot_tm_url) # FIXME(frozenhelium): use predefined timeout duration + # FIXME(tnagorra): handle timeout error aoi_result = requests.get(hot_tm_url, timeout=500) if aoi_result.status_code != 200: - logger.warning("Failed to fetch AOI geojson from HOT for tm_id %s", hot_tm_id) + raise base_project.ValidationException( + f"Failed to fetch AOI GeoJSON from HOT Tasking Manager for tm_id {hot_tm_id}", + ) logger.info("Successfully fetched AOI geojson from HOT for tm_id %s", hot_tm_id) + try: + geometry_dict = aoi_result.json() + except Exception as e: + raise base_project.ValidationException("HOT Tasking Manager did not respond with a valid JSON") from e + aoi_geojson = { "type": "FeatureCollection", "metadata": { @@ -259,7 +294,7 @@ def _validate_tasking_manager(self): "features": [ { "type": "Feature", - "geometry": aoi_result.json(), + "geometry": geometry_dict, "properties": { "hot_tm_project_id": hot_tm_id, }, @@ -267,6 +302,8 @@ def _validate_tasking_manager(self): ], } + # TODO(tnagorra): Add area validation for the AOI + # TODO(tnagorra): Also create a input geometry? # TODO(tnagorra): Also store intermediate geometries? geometry = GEOSGeometry(aoi_result.text, srid=4326) @@ -296,12 +333,7 @@ def _validate_tasking_manager(self): self.project.centroid = geometry_center self.project.save(update_fields=["aoi_geometry", "total_area", "bbox", "centroid"]) - geojson_result = self._get_object_geometry_from_ohsome(aoi_geojson) - - try: - return convert_json_dict_to_features(geojson_result) - except Exception as e: - raise base_project.ValidationException("Invalid Feature Collection") from e + return self._get_object_geometry_from_ohsome(aoi_geojson) @typing.override def validate(self) -> list[AoiFeature]: diff --git a/project_types/validate_image/project.py b/project_types/validate_image/project.py index 66de3c93..932b9072 100644 --- a/project_types/validate_image/project.py +++ b/project_types/validate_image/project.py @@ -150,7 +150,7 @@ def _validate_dataset_file(self) -> list[ValidImage]: if annotations: for annotation in annotations: valid_image: ValidImage = { - "source_identifier": asset_specifics.image.id, + "source_identifier": annotation.id, "url": image_asset.external_url, "file_name": asset_specifics.image.file_name, "width": asset_specifics.image.width, diff --git a/release.sh b/release.sh index b28486a2..34165d65 100755 --- a/release.sh +++ b/release.sh @@ -92,7 +92,7 @@ echo "Preparing $version_tag..." # update the version msg="# managed by release.sh" sed -E -i "s/^version = .* $msg$/version = \"${version_tag#v}\" $msg/" "./pyproject.toml" -uv sync +uv sync --all-groups --all-extras git add ./pyproject.toml ./uv.lock sed -E -i "s/^version: .* $msg$/version: ${version_tag#v}-SET-BY-CICD $msg/" "./helm/Chart.yaml" diff --git a/schema.graphql b/schema.graphql index 8ab6df98..e536cc8f 100644 --- a/schema.graphql +++ b/schema.graphql @@ -663,6 +663,7 @@ type ContributorUserStatType { type ContributorUserStats { filteredStats(dateRange: DateRangeInput = null): ContributorUserFilteredStats! + firebaseId: ID! id: ID! stats: ContributorUserStatType! @@ -804,6 +805,11 @@ type FindTutorialTaskPropertyType { tileZ: Int! } +input FirebaseOrInternalIdInputType @oneOf { + firebaseId: ID + id: ID +} + interface FirebasePushResourceTypeMixin { firebaseId: String! firebaseLastPushed: DateTime @@ -1820,7 +1826,7 @@ type ProjectType implements UserResourceTypeMixin & ProjectExportAssetTypeMixin statusMessage: String team: ContributorTeamType topic: String! - totalArea: Float + totalArea: Float @deprecated(reason: "Use AOI Geometry instead") """Tutorial used for this project.""" tutorial: TutorialType @@ -2009,13 +2015,12 @@ type Query { """Stats from last 30 days.""" communityStatsLatest: CommunityStatsType! - communityUserGroupStats(userGroupId: ID!): ContributorUserGroupStats! - communityUserStats(firebaseId: ID!): ContributorUserStats! + communityUserGroupStats(userGroupId: FirebaseOrInternalIdInputType!): ContributorUserGroupStats! + communityUserStats(userId: FirebaseOrInternalIdInputType!): ContributorUserStats! contributorTeam(id: ID!): ContributorTeamType! contributorTeams(includeAll: Boolean! = false, filters: ContributorTeamFilter, order: ContributorTeamOrder, pagination: OffsetPaginationInput): ContributorTeamTypeOffsetPaginated! @isAuthenticated - contributorUser(id: ID!): ContributorUserType! - contributorUserByFirebaseId(firebaseId: ID!): ContributorUserType! - contributorUserGroup(id: ID!): ContributorUserGroupType! + contributorUser(userId: FirebaseOrInternalIdInputType!): ContributorUserType! + contributorUserGroup(userGroupId: FirebaseOrInternalIdInputType!): ContributorUserGroupType! contributorUserGroupMembers(includeAll: Boolean! = false, filters: ContributorUserGroupMembershipFilter, order: ContributorUserGroupMembershipOrder, pagination: OffsetPaginationInput): ContributorUserGroupMembershipTypeOffsetPaginated! contributorUserGroups(includeAll: Boolean! = false, filters: ContributorUserGroupFilter, order: ContributorUserGroupOrder, pagination: OffsetPaginationInput): ContributorUserGroupTypeOffsetPaginated! contributorUsers(pagination: OffsetPaginationInput, filters: ContributorUserFilter, order: ContributorUserOrder): ContributorUserTypeOffsetPaginated! diff --git a/utils/geo/transform.py b/utils/geo/transform.py index b2b0f1e5..e7830941 100644 --- a/utils/geo/transform.py +++ b/utils/geo/transform.py @@ -22,7 +22,7 @@ def to_2d(geom: GEOSGeometry) -> GEOSGeometry: def get_area_of_geometry(geom: GeometryCollection | GEOSGeometry): area_m2: float = geom.transform(6933, clone=True).area - return area_m2 / 1000_000 + return area_m2 / 1_000_000 def get_polygon_of_extent(extent: tuple[float, float, float, float]): diff --git a/utils/graphql/inputs.py b/utils/graphql/inputs.py index 69501658..8f219c9e 100644 --- a/utils/graphql/inputs.py +++ b/utils/graphql/inputs.py @@ -1,9 +1,28 @@ import datetime import strawberry +from django.db import models +from django.shortcuts import aget_object_or_404 @strawberry.input class DateRangeInput: from_date: datetime.date to_date: datetime.date + + +@strawberry.input(one_of=True) +class FirebaseOrInternalIdInputType: + id: strawberry.Maybe[strawberry.ID] + firebase_id: strawberry.Maybe[strawberry.ID] + + @staticmethod + async def aget_object_or_404[M: models.Model]( + model: type[M], + object_id: "FirebaseOrInternalIdInputType", + ) -> M: + if object_id.id is not None: + return await aget_object_or_404(model, id=object_id.id.value) + if object_id.firebase_id is not None: + return await aget_object_or_404(model, firebase_id=object_id.firebase_id.value) + raise Exception("This should never be called")