From 2b87dc3320ee1b4c4f312054ea6e51254467adcb Mon Sep 17 00:00:00 2001 From: Anson He <60114875+ansonjwhe@users.noreply.github.com> Date: Sat, 17 Feb 2024 19:14:02 -0500 Subject: [PATCH] ASP dashboard list view and onsite contact migration (#92) --- .github/workflows/lint.yml | 3 +- .gitignore | 3 +- .idea/.gitignore | 3 + .idea/feeding-canadian-kids.iml | 15 + .../inspectionProfiles/profiles_settings.xml | 6 + .idea/misc.xml | 7 + .idea/modules.xml | 8 + .idea/vcs.xml | 6 + README.md | 13 +- backend/Dockerfile | 1 - backend/Makefile | 6 +- backend/app/graphql/__init__.py | 16 +- backend/app/graphql/meal_request.py | 47 +- .../app/graphql/onsite_contact_mutations.py | 96 ++ backend/app/graphql/onsite_contact_queries.py | 20 + backend/app/graphql/services.py | 6 +- backend/app/graphql/types.py | 18 +- backend/app/models/meal_request.py | 31 +- backend/app/models/onsite_contact.py | 33 + backend/app/models/user.py | 2 +- backend/app/models/user_info.py | 6 +- backend/app/resources/meal_request_dto.py | 22 +- backend/app/resources/onsite_contact_dto.py | 31 + backend/app/resources/validate_utils.py | 4 +- .../implementations/meal_request_service.py | 43 +- .../onboarding_request_service.py | 4 +- .../implementations/onsite_contact_service.py | 80 ++ .../services/implementations/user_service.py | 23 +- .../app/services/interfaces/auth_service.py | 4 +- .../interfaces/meal_request_service.py | 19 +- .../interfaces/onsite_contact_service.py | 51 + .../app/utilities/location_to_coordinates.py | 29 +- backend/mypy.ini | 6 + backend/setup.cfg | 3 +- backend/tests/graphql/conftest.py | 60 +- backend/tests/graphql/mock_test_data.py | 89 +- .../tests/graphql/test_all_user_mutations.py | 42 +- .../tests/graphql/test_all_user_queries.py | 9 +- backend/tests/graphql/test_meal_request.py | 140 ++- .../tests/graphql/test_onboarding_request.py | 38 +- backend/tests/graphql/test_onsite_contact.py | 168 ++++ backend/tests/unit/test_models.py | 18 +- backend/typings/graphene/__init__.pyi | 87 ++ backend/typings/graphene/pyutils/__init__.pyi | 0 .../typings/graphene/pyutils/dataclasses.pyi | 94 ++ backend/typings/graphene/pyutils/version.pyi | 7 + backend/typings/graphene/relay/__init__.pyi | 17 + backend/typings/graphene/relay/connection.pyi | 54 ++ backend/typings/graphene/relay/mutation.pyi | 19 + backend/typings/graphene/relay/node.pyi | 59 ++ backend/typings/graphene/types/__init__.pyi | 60 ++ backend/typings/graphene/types/argument.pyi | 24 + backend/typings/graphene/types/base.pyi | 28 + backend/typings/graphene/types/base64.pyi | 10 + backend/typings/graphene/types/context.pyi | 2 + backend/typings/graphene/types/datetime.pyi | 26 + backend/typings/graphene/types/decimal.pyi | 10 + .../typings/graphene/types/definitions.pyi | 23 + backend/typings/graphene/types/dynamic.pyi | 13 + backend/typings/graphene/types/enum.pyi | 36 + backend/typings/graphene/types/field.pyi | 39 + backend/typings/graphene/types/generic.pyi | 11 + backend/typings/graphene/types/inputfield.pyi | 23 + .../graphene/types/inputobjecttype.pyi | 29 + backend/typings/graphene/types/interface.pyi | 20 + backend/typings/graphene/types/json.pyi | 10 + .../typings/graphene/types/mountedtype.pyi | 6 + backend/typings/graphene/types/mutation.pyi | 38 + backend/typings/graphene/types/objecttype.pyi | 32 + backend/typings/graphene/types/resolver.pyi | 8 + backend/typings/graphene/types/scalars.pyi | 62 ++ backend/typings/graphene/types/schema.pyi | 89 ++ backend/typings/graphene/types/structures.pyi | 15 + backend/typings/graphene/types/union.pyi | 20 + .../typings/graphene/types/unmountedtype.pyi | 13 + backend/typings/graphene/types/utils.pyi | 9 + backend/typings/graphene/types/uuid.pyi | 10 + backend/typings/graphene/utils/__init__.pyi | 0 backend/typings/graphene/utils/crunch.pyi | 4 + backend/typings/graphene/utils/dataloader.pyi | 39 + .../typings/graphene/utils/deduplicator.pyi | 3 + backend/typings/graphene/utils/deprecated.pyi | 6 + .../graphene/utils/get_unbound_function.pyi | 1 + .../graphene/utils/is_introspection_key.pyi | 1 + .../typings/graphene/utils/module_loading.pyi | 4 + .../typings/graphene/utils/orderedtype.pyi | 12 + backend/typings/graphene/utils/props.pyi | 4 + .../graphene/utils/resolve_only_args.pyi | 3 + .../typings/graphene/utils/str_converters.pyi | 2 + .../graphene/utils/subclass_with_meta.pyi | 8 + backend/typings/graphene/utils/thenables.pyi | 2 + .../typings/graphene/utils/trim_docstring.pyi | 1 + .../typings/graphene/validation/__init__.pyi | 4 + .../graphene/validation/depth_limit.pyi | 34 + .../validation/disable_introspection.pyi | 6 + backend/typings/mongoengine/__init__.pyi | 205 ++++ backend/typings/mongoengine/base/__init__.pyi | 38 + backend/typings/mongoengine/base/common.pyi | 8 + .../mongoengine/base/datastructures.pyi | 86 ++ backend/typings/mongoengine/base/document.pyi | 25 + backend/typings/mongoengine/base/fields.pyi | 72 ++ .../typings/mongoengine/base/metaclasses.pyi | 14 + backend/typings/mongoengine/base/utils.pyi | 6 + backend/typings/mongoengine/common.pyi | 0 backend/typings/mongoengine/connection.pyi | 38 + .../typings/mongoengine/context_managers.pyi | 90 ++ backend/typings/mongoengine/dereference.pyi | 33 + backend/typings/mongoengine/document.pyi | 89 ++ backend/typings/mongoengine/errors.pyi | 40 + backend/typings/mongoengine/fields.pyi | 463 +++++++++ .../typings/mongoengine/mongodb_support.pyi | 9 + .../typings/mongoengine/pymongo_support.pyi | 14 + .../typings/mongoengine/queryset/__init__.pyi | 43 + backend/typings/mongoengine/queryset/base.pyi | 120 +++ .../mongoengine/queryset/field_list.pyi | 22 + .../typings/mongoengine/queryset/manager.pyi | 12 + .../typings/mongoengine/queryset/queryset.pyi | 28 + .../mongoengine/queryset/transform.pyi | 8 + .../typings/mongoengine/queryset/visitor.pyi | 48 + backend/typings/mongoengine/signals.pyi | 33 + e2e-tests/test_entity_gql.py | 4 +- e2e-tests/test_simple_entity_gql.py | 2 +- e2e-tests/test_user_gql.py | 4 +- frontend/.eslintrc.js | 7 +- frontend/Dockerfile | 1 - frontend/babel.config.js | 1 + frontend/package.json | 12 +- frontend/src/App.tsx | 22 +- frontend/src/Routes.tsx | 4 +- .../asp/requests/CreateMealRequest.tsx | 23 +- .../SchedulingFormReviewAndSubmit.tsx | 14 +- .../components/asp/requests/TitleSection.tsx | 4 +- frontend/src/components/auth/Join.tsx | 654 ++++++------- frontend/src/components/auth/JoinSuccess.tsx | 4 +- frontend/src/components/auth/Login.tsx | 18 +- frontend/src/components/auth/SetPassword.tsx | 17 +- frontend/src/components/common/Footer.tsx | 4 +- frontend/src/components/common/Header.tsx | 282 +++--- .../src/components/common/LoadingSpinner.tsx | 19 + .../components/common/OnsiteStaffSection.tsx | 305 +++--- .../components/crud/DisplayTableContainer.tsx | 6 +- .../SimpleEntityDisplayTableContainer.tsx | 6 +- .../src/components/mealrequest/ListView.tsx | 546 +++++++++++ frontend/src/constants/Routes.ts | 2 +- frontend/src/pages/ASPDashboard.tsx | 118 +++ frontend/src/pages/CreatePage.tsx | 4 +- frontend/src/pages/Dashboard.tsx | 60 -- frontend/src/pages/Default.tsx | 50 +- frontend/src/pages/DisplayPage.tsx | 4 +- frontend/src/pages/EditMealRequestForm.tsx | 609 ++++++++---- frontend/src/pages/MealDonorDashboard.tsx | 4 +- frontend/src/pages/NotFound.tsx | 4 +- frontend/src/pages/Settings.tsx | 899 ++++++++++-------- frontend/src/pages/SimpleEntityCreatePage.tsx | 4 +- .../src/pages/SimpleEntityDisplayPage.tsx | 4 +- frontend/src/pages/SimpleEntityUpdatePage.tsx | 4 +- frontend/src/pages/UpdatePage.tsx | 4 +- .../src/pages/__tests__/Dashboard.test.tsx | 18 +- frontend/src/theme/components/text.ts | 7 + frontend/src/types/MealRequestTypes.ts | 50 + frontend/src/types/UserTypes.ts | 14 +- frontend/src/utils/AuthUtils.ts | 3 +- frontend/src/utils/GraphQLUtils.ts | 15 + frontend/src/utils/ValidationUtils.ts | 16 +- frontend/src/utils/useGetOnsiteContacts.ts | 59 ++ frontend/src/utils/useIsAdmin.ts | 11 + frontend/src/utils/useIsMealDonor.ts | 11 + frontend/tsconfig.json | 6 +- frontend/yarn.lock | 859 ++++++++++++++++- update_secret_files.py | 1 + 170 files changed, 7048 insertions(+), 1661 deletions(-) create mode 100644 .idea/.gitignore create mode 100644 .idea/feeding-canadian-kids.iml create mode 100644 .idea/inspectionProfiles/profiles_settings.xml create mode 100644 .idea/misc.xml create mode 100644 .idea/modules.xml create mode 100644 .idea/vcs.xml create mode 100644 backend/app/graphql/onsite_contact_mutations.py create mode 100644 backend/app/graphql/onsite_contact_queries.py create mode 100644 backend/app/models/onsite_contact.py create mode 100644 backend/app/resources/onsite_contact_dto.py create mode 100644 backend/app/services/implementations/onsite_contact_service.py create mode 100644 backend/app/services/interfaces/onsite_contact_service.py create mode 100644 backend/mypy.ini create mode 100644 backend/tests/graphql/test_onsite_contact.py create mode 100644 backend/typings/graphene/__init__.pyi create mode 100644 backend/typings/graphene/pyutils/__init__.pyi create mode 100644 backend/typings/graphene/pyutils/dataclasses.pyi create mode 100644 backend/typings/graphene/pyutils/version.pyi create mode 100644 backend/typings/graphene/relay/__init__.pyi create mode 100644 backend/typings/graphene/relay/connection.pyi create mode 100644 backend/typings/graphene/relay/mutation.pyi create mode 100644 backend/typings/graphene/relay/node.pyi create mode 100644 backend/typings/graphene/types/__init__.pyi create mode 100644 backend/typings/graphene/types/argument.pyi create mode 100644 backend/typings/graphene/types/base.pyi create mode 100644 backend/typings/graphene/types/base64.pyi create mode 100644 backend/typings/graphene/types/context.pyi create mode 100644 backend/typings/graphene/types/datetime.pyi create mode 100644 backend/typings/graphene/types/decimal.pyi create mode 100644 backend/typings/graphene/types/definitions.pyi create mode 100644 backend/typings/graphene/types/dynamic.pyi create mode 100644 backend/typings/graphene/types/enum.pyi create mode 100644 backend/typings/graphene/types/field.pyi create mode 100644 backend/typings/graphene/types/generic.pyi create mode 100644 backend/typings/graphene/types/inputfield.pyi create mode 100644 backend/typings/graphene/types/inputobjecttype.pyi create mode 100644 backend/typings/graphene/types/interface.pyi create mode 100644 backend/typings/graphene/types/json.pyi create mode 100644 backend/typings/graphene/types/mountedtype.pyi create mode 100644 backend/typings/graphene/types/mutation.pyi create mode 100644 backend/typings/graphene/types/objecttype.pyi create mode 100644 backend/typings/graphene/types/resolver.pyi create mode 100644 backend/typings/graphene/types/scalars.pyi create mode 100644 backend/typings/graphene/types/schema.pyi create mode 100644 backend/typings/graphene/types/structures.pyi create mode 100644 backend/typings/graphene/types/union.pyi create mode 100644 backend/typings/graphene/types/unmountedtype.pyi create mode 100644 backend/typings/graphene/types/utils.pyi create mode 100644 backend/typings/graphene/types/uuid.pyi create mode 100644 backend/typings/graphene/utils/__init__.pyi create mode 100644 backend/typings/graphene/utils/crunch.pyi create mode 100644 backend/typings/graphene/utils/dataloader.pyi create mode 100644 backend/typings/graphene/utils/deduplicator.pyi create mode 100644 backend/typings/graphene/utils/deprecated.pyi create mode 100644 backend/typings/graphene/utils/get_unbound_function.pyi create mode 100644 backend/typings/graphene/utils/is_introspection_key.pyi create mode 100644 backend/typings/graphene/utils/module_loading.pyi create mode 100644 backend/typings/graphene/utils/orderedtype.pyi create mode 100644 backend/typings/graphene/utils/props.pyi create mode 100644 backend/typings/graphene/utils/resolve_only_args.pyi create mode 100644 backend/typings/graphene/utils/str_converters.pyi create mode 100644 backend/typings/graphene/utils/subclass_with_meta.pyi create mode 100644 backend/typings/graphene/utils/thenables.pyi create mode 100644 backend/typings/graphene/utils/trim_docstring.pyi create mode 100644 backend/typings/graphene/validation/__init__.pyi create mode 100644 backend/typings/graphene/validation/depth_limit.pyi create mode 100644 backend/typings/graphene/validation/disable_introspection.pyi create mode 100644 backend/typings/mongoengine/__init__.pyi create mode 100644 backend/typings/mongoengine/base/__init__.pyi create mode 100644 backend/typings/mongoengine/base/common.pyi create mode 100644 backend/typings/mongoengine/base/datastructures.pyi create mode 100644 backend/typings/mongoengine/base/document.pyi create mode 100644 backend/typings/mongoengine/base/fields.pyi create mode 100644 backend/typings/mongoengine/base/metaclasses.pyi create mode 100644 backend/typings/mongoengine/base/utils.pyi create mode 100644 backend/typings/mongoengine/common.pyi create mode 100644 backend/typings/mongoengine/connection.pyi create mode 100644 backend/typings/mongoengine/context_managers.pyi create mode 100644 backend/typings/mongoengine/dereference.pyi create mode 100644 backend/typings/mongoengine/document.pyi create mode 100644 backend/typings/mongoengine/errors.pyi create mode 100644 backend/typings/mongoengine/fields.pyi create mode 100644 backend/typings/mongoengine/mongodb_support.pyi create mode 100644 backend/typings/mongoengine/pymongo_support.pyi create mode 100644 backend/typings/mongoengine/queryset/__init__.pyi create mode 100644 backend/typings/mongoengine/queryset/base.pyi create mode 100644 backend/typings/mongoengine/queryset/field_list.pyi create mode 100644 backend/typings/mongoengine/queryset/manager.pyi create mode 100644 backend/typings/mongoengine/queryset/queryset.pyi create mode 100644 backend/typings/mongoengine/queryset/transform.pyi create mode 100644 backend/typings/mongoengine/queryset/visitor.pyi create mode 100644 backend/typings/mongoengine/signals.pyi create mode 100644 frontend/babel.config.js create mode 100644 frontend/src/components/common/LoadingSpinner.tsx create mode 100644 frontend/src/components/mealrequest/ListView.tsx create mode 100644 frontend/src/pages/ASPDashboard.tsx delete mode 100644 frontend/src/pages/Dashboard.tsx create mode 100644 frontend/src/types/MealRequestTypes.ts create mode 100644 frontend/src/utils/GraphQLUtils.ts create mode 100644 frontend/src/utils/useGetOnsiteContacts.ts create mode 100644 frontend/src/utils/useIsAdmin.ts create mode 100644 frontend/src/utils/useIsMealDonor.ts diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 674ee866..dd771895 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,4 +1,3 @@ - name: Lint codebase on: @@ -52,4 +51,4 @@ jobs: - name: Lint backend if: steps.changes.outputs.backend == 'true' working-directory: ./backend - run: pip install black flake8 && python -m black --check . && python -m flake8 . + run: pip install black flake8 && python -m black --check . --exclude ".*typings.*|test_csv.py" && flake8 . diff --git a/.gitignore b/.gitignore index 093f0d82..0a014b41 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,5 @@ **/*.cache **/*.egg-info **/*.eslintcache -**/*.swp \ No newline at end of file +**/*.swp +**/.mypy_cache/** \ No newline at end of file diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 00000000..26d33521 --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,3 @@ +# Default ignored files +/shelf/ +/workspace.xml diff --git a/.idea/feeding-canadian-kids.iml b/.idea/feeding-canadian-kids.iml new file mode 100644 index 00000000..5fdd65ba --- /dev/null +++ b/.idea/feeding-canadian-kids.iml @@ -0,0 +1,15 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 00000000..105ce2da --- /dev/null +++ b/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 00000000..a377feb7 --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,7 @@ + + + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 00000000..0e711090 --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 00000000..35eb1ddf --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/README.md b/README.md index 97b17fbe..8d1ee1aa 100644 --- a/README.md +++ b/README.md @@ -60,7 +60,18 @@ docker-compose up --build The backend runs at http://localhost:5000 and the frontend runs at http://localhost:3000. -If you need to login as a user, there is a user in the development db: `test@test.com` with password `12345678`. +### Test Users + +Password For all test accounts: `12345678` + +Test ASP: +Email: `shahan.neda+asptest1@gmail.com` + +Test Donor: +Email: `shahan.neda+mealdonortest1@gmail.com` + +Test Admin: +Email: `shahan.neda+testadmin1@gmail.com` ## Useful Commands diff --git a/backend/Dockerfile b/backend/Dockerfile index 34a970ff..e65a574a 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -1,5 +1,4 @@ FROM python:3.8 - WORKDIR /app COPY requirements.txt ./ diff --git a/backend/Makefile b/backend/Makefile index 1515aded..dfaaad05 100644 --- a/backend/Makefile +++ b/backend/Makefile @@ -4,8 +4,8 @@ test: docker ps -qf name=fck_backend | grep "." > /dev/null || (echo "****** Run \`docker-compose up\` first! ******" && false) # Add -s to the end of this next line to see all prints when testing # i.e "... python -m pytest -s" - docker exec -it fck_backend /bin/bash -c "pip install -r requirements.txt && python -m pytest" - + docker exec -it fck_backend /bin/bash -c "pip install -r requirements.txt && python -m pytest -s" lint: docker ps -qf name=fck_backend | grep "." > /dev/null || (echo "****** Run \`docker-compose up\` first! ******" && false) - docker exec -it fck_backend /bin/bash -c "black . && flake8 ." + docker exec -it fck_backend /bin/bash -c "black . && flake8 . --exclude **/typings/**" + diff --git a/backend/app/graphql/__init__.py b/backend/app/graphql/__init__.py index b937225e..332b619f 100644 --- a/backend/app/graphql/__init__.py +++ b/backend/app/graphql/__init__.py @@ -2,6 +2,10 @@ import os from flask import current_app + + +from .onsite_contact_mutations import OnsiteContactMutations +from .onsite_contact_queries import OnsiteContactQueries from .example import ExampleQueries, ExampleMutations from .user_queries import UserQueries from .user_mutations import UserMutations @@ -9,6 +13,7 @@ from ..services.implementations.user_service import UserService from ..services.implementations.email_service import EmailService from ..services.implementations.auth_service import AuthService +from ..services.implementations.onsite_contact_service import OnsiteContactService from .auth import AuthMutations from .meal_request import MealRequestMutations, MealRequestQueries from ..services.implementations.meal_request_service import MealRequestService @@ -24,6 +29,7 @@ class RootQuery( UserQueries, OnboardingRequestQueries, MealRequestQueries, + OnsiteContactQueries, ): pass @@ -35,6 +41,7 @@ class RootMutation( OnboardingRequestMutations, MealRequestMutations, UserMutations, + OnsiteContactMutations, ): pass @@ -48,7 +55,6 @@ class RootMutation( def init_app(app): with app.app_context(): # Add your services here: services["service_name"] = ... - services["user_service"] = UserService(logger=current_app.logger) services["email_service"] = EmailService( logger=current_app.logger, credentials={ @@ -60,6 +66,13 @@ def init_app(app): sender_email=os.getenv("MAILER_USER"), display_name="Feeding Canadian Kids", ) + services["onsite_contact_service"] = OnsiteContactService( + logger=current_app.logger + ) + services["user_service"] = UserService( + logger=current_app.logger, + onsite_contact_service=services["onsite_contact_service"], + ) services["auth_service"] = AuthService( logger=current_app.logger, user_service=services["user_service"], @@ -69,4 +82,3 @@ def init_app(app): logger=current_app.logger, email_service=services["email_service"] ) services["meal_request_service"] = MealRequestService(logger=current_app.logger) - services["user_service"] = UserService(logger=current_app.logger) diff --git a/backend/app/graphql/meal_request.py b/backend/app/graphql/meal_request.py index 3bf2326a..363d4378 100644 --- a/backend/app/graphql/meal_request.py +++ b/backend/app/graphql/meal_request.py @@ -1,15 +1,14 @@ import graphene from .types import ( - ContactInput, - Contact, Mutation, MutationList, + OnsiteContact, QueryList, SortDirection, User, ) -from ..models.meal_request import MealStatus, MEAL_STATUSES +from ..models.meal_request import MEAL_STATUSES_ENUMS, MealStatus from ..graphql.services import services # Input Types @@ -34,8 +33,9 @@ class MealInfoResponse(graphene.ObjectType): class CreateMealRequestResponse(graphene.ObjectType): id = graphene.ID() drop_off_datetime = graphene.DateTime(required=True) - status = graphene.String(required=True) + status = graphene.Field(graphene.Enum.from_enum(MealStatus), required=True) meal_info = graphene.Field(MealInfoResponse, required=True) + onsite_staff = graphene.List(OnsiteContact) class DonationInfo(graphene.ObjectType): @@ -48,11 +48,11 @@ class DonationInfo(graphene.ObjectType): class MealRequestResponse(graphene.ObjectType): id = graphene.ID() requestor = graphene.Field(User) - status = graphene.String() + status = graphene.Field(graphene.Enum.from_enum(MealStatus), required=True) drop_off_datetime = graphene.DateTime() drop_off_location = graphene.String() meal_info = graphene.Field(MealInfoResponse) - onsite_staff = graphene.List(Contact) + onsite_staff = graphene.List(OnsiteContact) date_created = graphene.DateTime() date_updated = graphene.DateTime() delivery_instructions = graphene.String() @@ -70,7 +70,7 @@ class Arguments: drop_off_time = graphene.Time(required=True) drop_off_location = graphene.String(required=True) delivery_instructions = graphene.String(default_value=None) - onsite_staff = graphene.List(ContactInput, required=True) + onsite_staff = graphene.List(graphene.String, default_value=[]) # return values meal_requests = graphene.List(CreateMealRequestResponse) @@ -102,12 +102,12 @@ def mutate( class UpdateMealRequest(Mutation): class Arguments: meal_request_id = graphene.ID(required=True) - requestor = graphene.ID(required=False) + requestor_id = graphene.ID(required=False) drop_off_datetime = graphene.DateTime(required=False) meal_info = MealTypeInput() drop_off_location = graphene.String() delivery_instructions = graphene.String() - onsite_staff = graphene.List(ContactInput) + onsite_staff = graphene.List(graphene.String) # return values meal_request = graphene.Field(MealRequestResponse) @@ -116,7 +116,7 @@ def mutate( self, info, meal_request_id, - requestor=None, + requestor_id: str, drop_off_datetime=None, meal_info=None, drop_off_location=None, @@ -124,7 +124,7 @@ def mutate( onsite_staff=None, ): result = services["meal_request_service"].update_meal_request( - requestor=requestor, + requestor_id=requestor_id, meal_info=meal_info, drop_off_datetime=drop_off_datetime, drop_off_location=drop_off_location, @@ -138,7 +138,7 @@ def mutate( class CommitToMealRequest(Mutation): class Arguments: - requester = graphene.ID(required=True) + requestor = graphene.ID(required=True) meal_request_ids = graphene.List(graphene.ID, required=True) meal_description = graphene.String(required=True) additional_info = graphene.String(default_value=None) @@ -148,13 +148,13 @@ class Arguments: def mutate( self, info, - requester, + requestor, meal_request_ids, meal_description, additional_info=None, ): result = services["meal_request_service"].commit_to_meal_request( - donor_id=requester, + donor_id=requestor, meal_request_ids=meal_request_ids, meal_description=meal_description, additional_info=additional_info, @@ -175,15 +175,32 @@ class MealRequestQueries(QueryList): requestor_id=graphene.ID(required=True), min_drop_off_date=graphene.Date(default_value=None), max_drop_off_date=graphene.Date(default_value=None), + # status=graphene.List(graphene.Enum.from_enum(MealStatus)), status=graphene.List( graphene.Enum.from_enum(MealStatus), - default_value=MEAL_STATUSES, + # MealStatus, + default_value=MEAL_STATUSES_ENUMS, ), offset=graphene.Int(default_value=0), limit=graphene.Int(default_value=None), sort_by_date_direction=SortDirection(default_value=SortDirection.ASCENDING), ) + getMealRequestById = graphene.Field( + MealRequestResponse, + requestor_id=graphene.ID(required=True), + id=graphene.ID(required=True), + ) + + def resolve_getMealRequestById( + self, + info, + requestor_id: str, + id: str, + ): + meal_request = services["meal_request_service"].get_meal_request_by_id(id) + return meal_request + def resolve_getMealRequestsByRequestorId( self, info, diff --git a/backend/app/graphql/onsite_contact_mutations.py b/backend/app/graphql/onsite_contact_mutations.py new file mode 100644 index 00000000..d305cb58 --- /dev/null +++ b/backend/app/graphql/onsite_contact_mutations.py @@ -0,0 +1,96 @@ +import graphene +from .services import services +from .types import ( + Mutation, + MutationList, + OnsiteContact, +) + + +class CreateOnsiteContact(Mutation): + class Arguments: + requestor_id = graphene.String(required=True) + organization_id = graphene.String(required=True) + name = graphene.String(required=True) + phone = graphene.String(required=True) + email = graphene.String(required=True) + + onsite_contact = graphene.Field(OnsiteContact) + + def mutate(self, info, requestor_id, organization_id, name, phone, email): + onsite_contact_service = services["onsite_contact_service"] + user_service = services["user_service"] + requestor_auth_id = user_service.get_auth_id_by_user_id(requestor_id) + requestor_role = user_service.get_user_role_by_auth_id(requestor_auth_id) + + if requestor_role == "Admin" or requestor_id == organization_id: + onsite_contact_dto = onsite_contact_service.create_onsite_contact( + organization_id, + name, + email, + phone, + ) + return CreateOnsiteContact(onsite_contact_dto) + + +class UpdateOnsiteContact(Mutation): + class Arguments: + requestor_id = graphene.String(required=True) + id = graphene.String(required=True) + name = graphene.String() + phone = graphene.String() + email = graphene.String() + + onsite_contact = graphene.Field(OnsiteContact) + + def mutate(self, info, requestor_id, id, name=None, phone=None, email=None): + # TODO: Add requestor_id check + user_service = services["user_service"] + onsite_contact_service = services["onsite_contact_service"] + + requestor_auth_id = user_service.get_auth_id_by_user_id(requestor_id) + requestor_role = user_service.get_user_role_by_auth_id(requestor_auth_id) + onsite_contact = onsite_contact_service.get_onsite_contact_by_id(id) + organization_id = onsite_contact.organization_id + + if requestor_role != "Admin" and requestor_id != organization_id: + raise Exception("Unauthorized") + + updated_onsite_contact = onsite_contact_service.update_onsite_contact_by_id( + id, name, email, phone + ) + return UpdateOnsiteContact(updated_onsite_contact) + + +class DeleteOnsiteContact(Mutation): + class Arguments: + requestor_id = graphene.String(required=True) + id = graphene.String(required=True) + + success = graphene.Boolean() + + def mutate(self, info, requestor_id, id): + onsite_contact_service = services["onsite_contact_service"] + user_service = services["user_service"] + + requestor_auth_id = user_service.get_auth_id_by_user_id(requestor_id) + requestor_role = user_service.get_user_role_by_auth_id(requestor_auth_id) + onsite_contact = onsite_contact_service.get_onsite_contact_by_id(id) + organization_id = onsite_contact.organization_id + + if requestor_role != "Admin" and requestor_id != organization_id: + raise Exception("Unauthorized") + + try: + onsite_contact_service.delete_onsite_contact_by_id(id) + success = True + except Exception as e: + print(e) + success = False + return DeleteOnsiteContact(success) + + +class OnsiteContactMutations(MutationList): + createOnsiteContact = CreateOnsiteContact.Field() + updateOnsiteContact = UpdateOnsiteContact.Field() + deleteOnsiteContact = DeleteOnsiteContact.Field() diff --git a/backend/app/graphql/onsite_contact_queries.py b/backend/app/graphql/onsite_contact_queries.py new file mode 100644 index 00000000..b0afec49 --- /dev/null +++ b/backend/app/graphql/onsite_contact_queries.py @@ -0,0 +1,20 @@ +import graphene +from .services import services +from .types import OnsiteContact, QueryList + + +class OnsiteContactQueries(QueryList): + get_onsite_contact_for_user_by_id = graphene.Field( + graphene.List(OnsiteContact), user_id=graphene.String(required=True) + ) + get_onsite_contact_by_id = graphene.Field( + OnsiteContact, id=graphene.String(required=True) + ) + + def resolve_get_onsite_contact_for_user_by_id(self, info, user_id): + return services["onsite_contact_service"].get_onsite_contacts_for_user_by_id( + user_id + ) + + def resolve_get_onsite_contact_by_id(self, info, id): + return services["onsite_contact_service"].get_onsite_contact_by_id(id) diff --git a/backend/app/graphql/services.py b/backend/app/graphql/services.py index d4745591..afe665db 100644 --- a/backend/app/graphql/services.py +++ b/backend/app/graphql/services.py @@ -1,4 +1,6 @@ from typing import TypedDict + +from ..services.interfaces.onsite_contact_service import IOnsiteContactService from ..services.interfaces.user_service import IUserService from ..services.interfaces.auth_service import IAuthService from ..services.interfaces.email_service import IEmailService @@ -17,6 +19,7 @@ class ServicesObject(TypedDict): auth_service: IAuthService onboarding_request_service: IOnboardingRequestService meal_request_service: IMealRequestService + onsite_contact_service: IOnsiteContactService services: ServicesObject = { @@ -25,4 +28,5 @@ class ServicesObject(TypedDict): "auth_service": None, "onboarding_request_service": None, "meal_request_service": None, -} + "onsite_contact_service": None, +} # type: ignore diff --git a/backend/app/graphql/types.py b/backend/app/graphql/types.py index f63bc94f..be15adc4 100644 --- a/backend/app/graphql/types.py +++ b/backend/app/graphql/types.py @@ -16,7 +16,9 @@ class QueryList(graphene.ObjectType, metaclass=LogErrors(graphene.ObjectType)): # get around this, we declare the method initially, then delete it from the # class definition once the validation logic has completed. This way, subclasses # still get the validation logic. -class Mutation(graphene.Mutation, metaclass=LogErrors(graphene.Mutation)): +class Mutation( + graphene.Mutation, metaclass=LogErrors(graphene.Mutation) +): # type: ignore def mutate(self): pass @@ -57,9 +59,11 @@ class UserInfo(graphene.ObjectType): role = graphene.String() role_info = graphene.Field(RoleInfo) primary_contact = graphene.Field(Contact) - onsite_contacts = graphene.List(Contact) + initial_onsite_contacts = graphene.List(Contact) active = graphene.Boolean() + # def resolve_onsite_contacts() + class ContactInput(graphene.InputObjectType): name = graphene.String(required=True) @@ -89,7 +93,7 @@ class UserInfoInput(graphene.InputObjectType): role = graphene.String(required=True) role_info = graphene.Field(RoleInfoInput) primary_contact = graphene.Field(ContactInput, required=True) - onsite_contacts = graphene.List(ContactInput, required=True) + initial_onsite_contacts = graphene.List(ContactInput, required=True) active = graphene.Boolean() @@ -98,6 +102,14 @@ class User(graphene.ObjectType): info = graphene.Field(UserInfo) +class OnsiteContact(graphene.ObjectType): + id = graphene.ID() + organization_id = graphene.String() + name = graphene.String() + email = graphene.String() + phone = graphene.String() + + class ASPDistance(graphene.ObjectType): id = graphene.String() info = graphene.Field(UserInfo) diff --git a/backend/app/models/meal_request.py b/backend/app/models/meal_request.py index 40b76635..2deaa118 100644 --- a/backend/app/models/meal_request.py +++ b/backend/app/models/meal_request.py @@ -2,8 +2,9 @@ from datetime import datetime from enum import Enum +from app.models.onsite_contact import OnsiteContact + from .user import User -from .user_info import Contact class MealStatus(Enum): @@ -13,7 +14,8 @@ class MealStatus(Enum): CANCELLED = "Cancelled" -MEAL_STATUSES = [status.value for status in MealStatus] +MEAL_STATUSES_ENUMS = [status for status in MealStatus] +MEAL_STATUSES_STRINGS = [status.value for status in MealStatus] # Information on the requested meals, provided by the ASP @@ -32,18 +34,34 @@ class DonationInfo(mg.EmbeddedDocument): class MealRequest(mg.Document): requestor = mg.ReferenceField(User, required=True) + # status = mg.EnumField(MealStatus, default=MealStatus.OPEN) status = mg.StringField( - choices=MEAL_STATUSES, required=True, default=MealStatus.OPEN.value + choices=MEAL_STATUSES_STRINGS, required=True, default=MealStatus.OPEN.value ) + drop_off_datetime = mg.DateTimeField(required=True) drop_off_location = mg.StringField(required=True) meal_info = mg.EmbeddedDocumentField(MealInfo, required=True) - onsite_staff = mg.EmbeddedDocumentListField(Contact, required=True) + + # https://docs.mongoengine.org/apireference.html#mongoengine.fields.ReferenceField + onsite_staff = mg.ListField( + mg.ReferenceField(OnsiteContact, required=True, reverse_delete_rule=4) + ) # 4 = PULL date_created = mg.DateTimeField(required=True, default=datetime.utcnow) date_updated = mg.DateTimeField(required=True, default=datetime.utcnow) delivery_instructions = mg.StringField(default=None) donation_info = mg.EmbeddedDocumentField(DonationInfo, default=None) + def validate_onsite_contacts(self): + if self.onsite_staff: + # Try to fetch the referenced document to ensure it exists, will throw an error if it doesn't + for contact in self.onsite_staff: + contact = OnsiteContact.objects(id=contact.id).first() + if not contact or contact.organization_id != self.requestor.id: + raise Exception( + f"onsite contact {contact.id} not found or not associated with the requestor's organization" + ) + def to_serializable_dict(self): """ Returns a dict representation of the document that is JSON serializable @@ -53,6 +71,11 @@ def to_serializable_dict(self): meal_request_dict = self.to_mongo().to_dict() id = meal_request_dict.pop("_id", None) meal_request_dict["id"] = str(id) + contacts = [contact.to_mongo().to_dict() for contact in self.onsite_staff] + for contact in contacts: + id = contact.pop("_id") + contact["id"] = id + meal_request_dict["onsite_staff"] = contacts return meal_request_dict meta = {"collection": "meal_requests"} diff --git a/backend/app/models/onsite_contact.py b/backend/app/models/onsite_contact.py new file mode 100644 index 00000000..a1671760 --- /dev/null +++ b/backend/app/models/onsite_contact.py @@ -0,0 +1,33 @@ +import mongoengine as mg + +from app.resources.onsite_contact_dto import OnsiteContactDTO + + +class OnsiteContact(mg.Document): + organization_id = mg.ObjectIdField(required=True) + name = mg.StringField(required=True) + email = mg.StringField(required=True) + phone = mg.StringField(required=True) + + def to_serializable_dict(self): + """ + Returns a dict representation of the document that is JSON serializable + ObjectId must be converted to a string. + """ + + dict = self.to_mongo().to_dict() + id = dict.pop("_id", None) + dict["id"] = str(id) + + # MongoDB ObjectId type is hard to work with graphQL so convert to a string + dict["organization_id"] = str(dict["organization_id"]) + + return dict + + def to_dto(self) -> OnsiteContactDTO: + return OnsiteContactDTO(**self.to_serializable_dict()) # type: ignore # + + def __str__(self): + return f"OnsiteContact({self.id}, {self.organization_id}, {self.name}, {self.email}, {self.phone})" + + meta = {"collection": "onsite_contacts"} diff --git a/backend/app/models/user.py b/backend/app/models/user.py index 6f3b3da3..d5a3ec38 100644 --- a/backend/app/models/user.py +++ b/backend/app/models/user.py @@ -1,6 +1,6 @@ import mongoengine as mg -from .user_info import UserInfo +from app.models.user_info import UserInfo class User(mg.Document): diff --git a/backend/app/models/user_info.py b/backend/app/models/user_info.py index 0aea8735..3520ba1b 100644 --- a/backend/app/models/user_info.py +++ b/backend/app/models/user_info.py @@ -44,7 +44,11 @@ class UserInfo(mg.EmbeddedDocument): role = mg.StringField(choices=USERINFO_ROLES, required=True) role_info = mg.EmbeddedDocumentField(RoleInfo) primary_contact = mg.EmbeddedDocumentField(Contact, required=True) - onsite_contacts = mg.EmbeddedDocumentListField(Contact, required=True) + + # This information is given as part of the onboarding request + # When a user actually signs up, these contacts get turned into separate OnsiteContact documents + initial_onsite_contacts = mg.EmbeddedDocumentListField(Contact, required=False) + active = mg.BooleanField(default=True) meta = {"allow_inheritance": True} diff --git a/backend/app/resources/meal_request_dto.py b/backend/app/resources/meal_request_dto.py index 3acb28de..0aed09f4 100644 --- a/backend/app/resources/meal_request_dto.py +++ b/backend/app/resources/meal_request_dto.py @@ -1,6 +1,6 @@ import datetime -from ..models.meal_request import MEAL_STATUSES +from ..models.meal_request import MEAL_STATUSES_STRINGS from .validate_utils import ( validate_contact, validate_donation_info, @@ -49,15 +49,17 @@ def validate(self): validate_user(self.requestor, "requestor", error_list) - if type(self.status) is not str: - error_list.append("The status supplied is not a string.") - - if self.status not in MEAL_STATUSES: - error_list.append( - "The status {self_status} is not one of {valid_statuses}".format( - self_status=self.status, valid_statuses=", ".join(MEAL_STATUSES) - ) - ) + # if type(self.status) is not str: + # error_list.append("The status supplied is not a string.") + + if self.status not in MEAL_STATUSES_STRINGS: + # TODO: revisit this + pass + # error_list.append( + # "The status {self_status} is not one of {valid_statuses}".format( + # self_status=self.status, valid_statuses=", ".join(MEAL_STATUSES) + # ) + # ) if type(self.drop_off_datetime) is not datetime.datetime: error_list.append( diff --git a/backend/app/resources/onsite_contact_dto.py b/backend/app/resources/onsite_contact_dto.py new file mode 100644 index 00000000..f43816db --- /dev/null +++ b/backend/app/resources/onsite_contact_dto.py @@ -0,0 +1,31 @@ +class OnsiteContactDTO: + def __init__( + self, id: str, organization_id: str, name: str, email: str, phone: str + ): + self.id = id + self.organization_id = organization_id + self.name = name + self.email = email + self.phone = phone + + error_list = self.validate() + if len(error_list) > 0: + error_message = "\n".join(error_list) + raise Exception(error_message) + + def validate(self): + error_list = [] + + if type(self.id) is not str: + error_list.append("The id supplied is not a string.") + + for field in [self.name, self.email, self.phone, self.organization_id]: + if type(field) is not str: + error_list.append( + f'The field "{field}" in onsite_contact {self.id} is not a string.' + ) + elif field == "": + error_list.append( + f'The field "{field}" in onsite_contact {self.id} must not be an empty string.' + ) + return error_list diff --git a/backend/app/resources/validate_utils.py b/backend/app/resources/validate_utils.py index aabf510b..91aa9d59 100644 --- a/backend/app/resources/validate_utils.py +++ b/backend/app/resources/validate_utils.py @@ -79,7 +79,7 @@ def validate_userinfo(userinfo, error_list): "role", "role_info", "primary_contact", - "onsite_contacts", + "initial_onsite_contacts", "active", ] if not isinstance(userinfo, dict): @@ -94,7 +94,7 @@ def validate_userinfo(userinfo, error_list): for key, val in userinfo.items(): if key == "primary_contact": error_list = validate_contact(val, "info.primary_contact", error_list) - elif key == "onsite_contacts": + elif key == "initial_onsite_contacts": if not isinstance(val, list): error_list.append("The info.onsite_contacts supplied is not a list.") else: diff --git a/backend/app/services/implementations/meal_request_service.py b/backend/app/services/implementations/meal_request_service.py index 888b0b93..d623aaf3 100644 --- a/backend/app/services/implementations/meal_request_service.py +++ b/backend/app/services/implementations/meal_request_service.py @@ -1,4 +1,4 @@ -from ...models.user_info import Contact +from typing import List from ...models.meal_request import MealInfo, MealRequest from ..interfaces.meal_request_service import IMealRequestService from datetime import datetime @@ -22,7 +22,7 @@ def create_meal_request( drop_off_time, drop_off_location, delivery_instructions, - onsite_staff, + onsite_staff: List[str], ): try: # Create MealRequests @@ -32,6 +32,7 @@ def create_meal_request( meal_requests = [] for request_date in request_dates: + # print("creaing!") new_meal_request = MealRequest( requestor=requestor, meal_info=meal_info, @@ -40,6 +41,7 @@ def create_meal_request( delivery_instructions=delivery_instructions, onsite_staff=onsite_staff, ) + new_meal_request.validate_onsite_contacts() new_meal_request.save() meal_requests.append(new_meal_request.to_serializable_dict()) except Exception as error: @@ -49,7 +51,7 @@ def create_meal_request( def update_meal_request( self, - requestor, + requestor_id, meal_info, drop_off_datetime, drop_off_location, @@ -58,13 +60,13 @@ def update_meal_request( meal_request_id, ): original_meal_request: MealRequest = MealRequest.objects( - id=meal_request_id + id=meal_request_id, requestor=requestor_id ).first() - if not original_meal_request: - raise Exception(f"meal request with id {meal_request_id} not found") - if requestor is not None: - original_meal_request.requestor = requestor + if not original_meal_request: + raise Exception( + f"meal request with id {meal_request_id} by {requestor_id} not found" + ) if drop_off_datetime is not None: original_meal_request.drop_off_datetime = drop_off_datetime @@ -82,18 +84,18 @@ def update_meal_request( original_meal_request.delivery_instructions = delivery_instructions if onsite_staff is not None: - original_meal_request.onsite_staff = [ - Contact(name=staff.name, phone=staff.phone, email=staff.email) - for staff in onsite_staff - ] + original_meal_request.onsite_staff = onsite_staff requestor = original_meal_request.requestor + # Does validation, meal_request_dto = self.convert_meal_request_to_dto( original_meal_request, requestor ) + original_meal_request.validate_onsite_contacts() original_meal_request.save() + return meal_request_dto def commit_to_meal_request( @@ -102,7 +104,7 @@ def commit_to_meal_request( meal_request_ids: [str], meal_description: str, additional_info: str, - ) -> [MealRequestDTO]: + ) -> List[MealRequestDTO]: try: donor = User.objects(id=donor_id).first() if not donor: @@ -169,11 +171,12 @@ def get_meal_requests_by_requestor_id( requestor_id, min_drop_off_date, max_drop_off_date, - status, + status: List[MealStatus], offset, limit, sort_by_date_direction, ): + status_value_list = list(map(lambda stat: stat.value, status)) try: sort_prefix = "+" if sort_by_date_direction == SortDirection.DESCENDING: @@ -182,8 +185,8 @@ def get_meal_requests_by_requestor_id( requestor = User.objects(id=requestor_id).first() requests = MealRequest.objects( requestor=requestor, - status__in=status, - ).order_by(f"{sort_prefix}date_created") + status__in=status_value_list, + ).order_by(f"{sort_prefix}drop_off_datetime") # Filter results by optional parameters. # Since we want to filter these optionally (i.e. filter only if specified), @@ -212,3 +215,11 @@ def get_meal_requests_by_requestor_id( except Exception as error: self.logger.error(str(error)) raise error + + def get_meal_request_by_id(self, id: str) -> MealRequestDTO: + meal_request = MealRequest.objects(id=id).first() + meal_request_dto = self.convert_meal_request_to_dto( + meal_request, meal_request.requestor + ) + + return meal_request_dto diff --git a/backend/app/services/implementations/onboarding_request_service.py b/backend/app/services/implementations/onboarding_request_service.py index 56c32559..5fcc3764 100644 --- a/backend/app/services/implementations/onboarding_request_service.py +++ b/backend/app/services/implementations/onboarding_request_service.py @@ -36,10 +36,9 @@ def create_onboarding_request(self, userInfo: UserInfo): role=userInfo.role, role_info=userInfo.role_info, primary_contact=userInfo.primary_contact, - onsite_contacts=userInfo.onsite_contacts, + initial_onsite_contacts=userInfo.initial_onsite_contacts, active=userInfo.active, ) - # Create OnboardingRequest object new_onboarding_request = OnboardingRequest( info=user_info, @@ -112,7 +111,6 @@ def approve_onboarding_request(self, request_id): referenced_onboarding_request.status = ( ONBOARDING_REQUEST_STATUS_APPROVED # approve the onboarding request ) - referenced_onboarding_request.save() # save the changes recipient_email = referenced_onboarding_request.info.email diff --git a/backend/app/services/implementations/onsite_contact_service.py b/backend/app/services/implementations/onsite_contact_service.py new file mode 100644 index 00000000..dc981b4d --- /dev/null +++ b/backend/app/services/implementations/onsite_contact_service.py @@ -0,0 +1,80 @@ +from typing import List, Optional + +from app.models.onsite_contact import OnsiteContact +from app.resources.onsite_contact_dto import OnsiteContactDTO + +from ..interfaces.onsite_contact_service import IOnsiteContactService + +from ...models.user import User + + +class OnsiteContactService(IOnsiteContactService): + def __init__(self, logger): + self.logger = logger + + def get_onsite_contact_by_id( + self, + id: str, + ): + onsite_staff = OnsiteContact.objects(id=id).first() + if onsite_staff: + return onsite_staff.to_dto() + else: + return None + + def get_onsite_contacts_for_user_by_id( + self, + user_id: str, + ) -> List[OnsiteContactDTO]: + onsite_staff = OnsiteContact.objects(organization_id=user_id).all() + return [staff.to_dto() for staff in onsite_staff] + + def delete_onsite_contact_by_id( + self, + id: str, + ): + OnsiteContact.objects(id=id).delete() + + def update_onsite_contact_by_id( + self, + id: str, + name: Optional[str], + email: Optional[str], + phone: Optional[str], + ) -> OnsiteContactDTO: + existing: OnsiteContact = OnsiteContact.objects(id=id).first() + if not existing: + raise Exception(f'onsite contact with id "{id}" not found') + if name is not None: + existing.name = name + if email is not None: + existing.email = email + if phone is not None: + existing.phone = phone + existing.save() + existing.to_serializable_dict() + + return existing.to_dto() + + def create_onsite_contact( + self, organization_id: str, name: str, email: str, phone: str + ) -> OnsiteContactDTO: + try: + # Check that the organization does exist + organization = User.objects(id=organization_id).first() + if not organization: + raise Exception(f'organization with id "{organization_id}" not found') + + new_contact = OnsiteContact( + organization_id=organization_id, + name=name, + email=email, + phone=phone, + ) + new_contact.save() + + return new_contact.to_dto() + + except Exception as error: + self.logger.error(str(error)) + raise error diff --git a/backend/app/services/implementations/user_service.py b/backend/app/services/implementations/user_service.py index 39080d20..716955bd 100644 --- a/backend/app/services/implementations/user_service.py +++ b/backend/app/services/implementations/user_service.py @@ -1,4 +1,7 @@ import firebase_admin.auth +from app.models.user_info import UserInfo + +from app.services.interfaces.onsite_contact_service import IOnsiteContactService from ...models.onboarding_request import OnboardingRequest from ..interfaces.user_service import IUserService from ...models.user import User @@ -12,7 +15,7 @@ class UserService(IUserService): UserService implementation with user management methods """ - def __init__(self, logger): + def __init__(self, logger, onsite_contact_service: IOnsiteContactService): """ Create an instance of UserService @@ -20,6 +23,7 @@ def __init__(self, logger): :type logger: logger """ self.logger = logger + self.onsite_contact_service = onsite_contact_service def get_user_by_id(self, user_id): try: @@ -173,13 +177,24 @@ def create_user(self, create_user_dto): ) try: + user_info: UserInfo = ( + OnboardingRequest.objects(id=create_user_dto.request_id) + .first() + .info + ) new_user: User = User( auth_id=firebase_user.uid, - info=OnboardingRequest.objects(id=create_user_dto.request_id) - .first() - .info, + info=user_info, ) new_user.save() + for onsite_contact in user_info.initial_onsite_contacts: + self.onsite_contact_service.create_onsite_contact( + organization_id=new_user.id, + name=onsite_contact.name, + email=onsite_contact.email, + phone=onsite_contact.phone, + ) + except Exception as mongo_error: # rollback user creation in Firebase try: diff --git a/backend/app/services/interfaces/auth_service.py b/backend/app/services/interfaces/auth_service.py index 1f0f3938..db2bab2d 100644 --- a/backend/app/services/interfaces/auth_service.py +++ b/backend/app/services/interfaces/auth_service.py @@ -1,5 +1,7 @@ from abc import ABC, abstractmethod +from app.resources.auth_dto import AuthDTO + class IAuthService(ABC): """ @@ -7,7 +9,7 @@ class IAuthService(ABC): """ @abstractmethod - def generate_token(self, email, password): + def generate_token(self, email, password) -> AuthDTO: """ Generate a short-lived JWT access token and a long-lived refresh token when supplied user's email and password diff --git a/backend/app/services/interfaces/meal_request_service.py b/backend/app/services/interfaces/meal_request_service.py index 5e13e304..ccc74d1b 100644 --- a/backend/app/services/interfaces/meal_request_service.py +++ b/backend/app/services/interfaces/meal_request_service.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from typing import Union +from typing import Union, List from ...resources.meal_request_dto import MealRequestDTO @@ -18,7 +18,7 @@ def create_meal_request( drop_off_time, drop_off_location, delivery_instructions, - onsite_staff, + onsite_staff: List[str], ): """Create a new MealRequest object and corresponding MealRequests @@ -33,12 +33,12 @@ def create_meal_request( @abstractmethod def update_meal_request( self, - requestor, + requestor_id: str, meal_info, drop_off_datetime, drop_off_location, delivery_instructions, - onsite_staff, + onsite_staff: List[str], meal_request_id, ): pass @@ -47,10 +47,17 @@ def update_meal_request( def commit_to_meal_request( self, donor_id: str, - meal_request_ids: [str], + meal_request_ids: List[str], meal_description: str, additional_info: Union[str, None], - ) -> [MealRequestDTO]: + ) -> List[MealRequestDTO]: + pass + + @abstractmethod + def get_meal_request_by_id( + self, + id: str, + ) -> MealRequestDTO: pass @abstractmethod diff --git a/backend/app/services/interfaces/onsite_contact_service.py b/backend/app/services/interfaces/onsite_contact_service.py new file mode 100644 index 00000000..6656fe33 --- /dev/null +++ b/backend/app/services/interfaces/onsite_contact_service.py @@ -0,0 +1,51 @@ +from abc import ABC, abstractmethod +from typing import Optional, List + +from ...resources.onsite_contact_dto import OnsiteContactDTO + + +class IOnsiteContactService(ABC): + """ + OnsiteContactService for handling requests relating to onsite contacts + """ + + @abstractmethod + def create_onsite_contact( + self, + organization_id: str, + name: str, + email: str, + phone: str, + ) -> OnsiteContactDTO: + pass + + @abstractmethod + def update_onsite_contact_by_id( + self, + id: str, + name: Optional[str], + email: Optional[str], + phone: Optional[str], + ) -> OnsiteContactDTO: + pass + + @abstractmethod + def delete_onsite_contact_by_id( + self, + id: str, + ): + pass + + @abstractmethod + def get_onsite_contacts_for_user_by_id( + self, + user_id: str, + ) -> List[OnsiteContactDTO]: + pass + + @abstractmethod + def get_onsite_contact_by_id( + self, + id: str, + ) -> OnsiteContactDTO: + pass diff --git a/backend/app/utilities/location_to_coordinates.py b/backend/app/utilities/location_to_coordinates.py index aab1c2ef..de83b9ab 100644 --- a/backend/app/utilities/location_to_coordinates.py +++ b/backend/app/utilities/location_to_coordinates.py @@ -1,18 +1,31 @@ +import os + import requests GEOCODE_API_URL = "https://geocode.maps.co/search" +GEOCODE_API_KEY = os.getenv("GEOCODING_API_KEY") def getGeocodeFromAddress(organization_address): + if "PYTEST_CURRENT_TEST" in os.environ: + print("MOCKING LATITUDE IN TESTING") + return [-11.1, 11.1] + response = requests.get( - '{base_url}?q="{address}"'.format( - base_url=GEOCODE_API_URL, address=organization_address + '{base_url}?q="{address}"&api_key={api_key}'.format( + base_url=GEOCODE_API_URL, + address=organization_address, + api_key=GEOCODE_API_KEY, ) ) + try: + if response.status_code != 200: + raise Exception("Failed to get coordinates from Geocode API") - response_json = response.json() - - if len(response_json) == 0: - raise Exception("Failed to get coordinates from Geocode API") - - return [float(response_json[0]["lon"]), float(response_json[0]["lat"])] + response_json = response.json() + if len(response_json) == 0: + raise Exception("Failed to get coordinates from Geocode API") + return [float(response_json[0]["lon"]), float(response_json[0]["lat"])] + except Exception as e: + print("Failed when getting geoencoding from address!") + raise e diff --git a/backend/mypy.ini b/backend/mypy.ini new file mode 100644 index 00000000..21d459ba --- /dev/null +++ b/backend/mypy.ini @@ -0,0 +1,6 @@ +[mypy] +ignore_missing_imports = True +exclude = (?x)( + .conftest.| + .mock_test_data. + ) \ No newline at end of file diff --git a/backend/setup.cfg b/backend/setup.cfg index 8dd399ab..8bbe463d 100644 --- a/backend/setup.cfg +++ b/backend/setup.cfg @@ -1,3 +1,4 @@ [flake8] max-line-length = 88 -extend-ignore = E203 +extend-ignore = E203,E501,W503 +exclude = /home/runner/work/feeding-canadian-kids/feeding-canadian-kids/backend/typings/graphene/relay/mutation.pyi diff --git a/backend/tests/graphql/conftest.py b/backend/tests/graphql/conftest.py index 6d7ce7e0..2791bc56 100644 --- a/backend/tests/graphql/conftest.py +++ b/backend/tests/graphql/conftest.py @@ -2,6 +2,7 @@ from app.models.user import User from app.models.onboarding_request import OnboardingRequest from app.models.meal_request import MealRequest +from app.models.onsite_contact import OnsiteContact from tests.graphql.mock_test_data import ( MOCK_INFO1_SNAKE, MOCK_INFO2_SNAKE, @@ -22,7 +23,7 @@ def graphql_schema(): yield graphql_schema -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(scope="function", autouse=True) def onboarding_request_setup(): onboarding_request_1 = OnboardingRequest( info=MOCK_INFO1_SNAKE, status="Pending" @@ -37,20 +38,32 @@ def onboarding_request_setup(): onboarding_request_2.delete() -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(scope="function", autouse=True) def user_setup(): - user_1 = User(**MOCK_USER1_SNAKE).save() - user_2 = User(**MOCK_USER2_SNAKE).save() - user_3 = User(**MOCK_USER3_SNAKE).save() + users = [] + for MOCK_USER in [MOCK_USER1_SNAKE, MOCK_USER2_SNAKE, MOCK_USER3_SNAKE]: + user = User(**MOCK_USER) + user.save() - yield user_1, user_2, user_3 + # onsite_contact_1 = OnsiteContact(**MOCK_ONSITE_CONTACT_1) + # onsite_contact_1.organization_id = user.id + # onsite_contact_1.save() - user_1.delete() - user_2.delete() - user_3.delete() + # onsite_contact_2 = OnsiteContact(**MOCK_ONSITE_CONTACT_2) + # onsite_contact_2.organization_id = user.id + # onsite_contact_2.save() + # user.info.onsite_contacts.extend([onsite_contact_1.id, onsite_contact_2.id]) + # user.save() + users.append(user) -@pytest.fixture(scope="session", autouse=True) + yield users + + for user in users: + user.delete() + + +@pytest.fixture(scope="function", autouse=True) def meal_request_setup(user_setup): requestor, donor, _ = user_setup meal_request = MealRequest(requestor=requestor, **MOCK_MEALREQUEST1_SNAKE).save() @@ -60,3 +73,30 @@ def meal_request_setup(user_setup): requestor.delete() donor.delete() meal_request.delete() + + +@pytest.fixture(scope="function", autouse=True) +def onsite_contact_setup(user_setup): + asp, donor, _ = user_setup + asp_onsite_contact = OnsiteContact( + name="Sample Contact", + email="sample@test.com", + phone="123-456-7890", + organization_id=asp.id, + ).save() + asp_onsite_contact2 = OnsiteContact( + name="Sample Contact 2", + email="sample2@test.com", + phone="123-456-7890", + organization_id=asp.id, + ).save() + donor_onsite_contact = OnsiteContact( + name="Sample Contact 2", + email="sample2@test.com", + phone="123-333-7890", + organization_id=donor.id, + ).save() + + yield asp, donor, [asp_onsite_contact, asp_onsite_contact2], donor_onsite_contact + asp_onsite_contact.delete() + donor_onsite_contact.delete() diff --git a/backend/tests/graphql/mock_test_data.py b/backend/tests/graphql/mock_test_data.py index 33b1421b..c88b4fc9 100644 --- a/backend/tests/graphql/mock_test_data.py +++ b/backend/tests/graphql/mock_test_data.py @@ -1,9 +1,12 @@ +# NOTE: The coodinates have been mocked out in testing MOCK_INFO1_SNAKE = { "email": "test1@organization.com", "organization_address": "255 King St N", "organization_name": "Test1 Org", "organization_desc": "Testing123", - "organization_coordinates": [-80.52565465, 43.477876300000005], + "organization_coordinates": [-11.1, 11.1], + # Real Coordinates: + # "organization_coordinates": [-80.52565465, 43.477876300000005], "role": "ASP", "role_info": { "asp_info": { @@ -16,19 +19,29 @@ "phone": "123456", "email": "jessie123@gmail.com", }, - "onsite_contacts": [ - {"name": "abc", "phone": "123-456-7890", "email": "abc@uwblueprint.org"}, - {"name": "Jane Doe", "phone": "111-222-3333", "email": "example@domain.com"}, - ], + "initial_onsite_contacts": [], "active": True, } +MOCK_ONSITE_CONTACT_1 = { + "name": "abc", + "phone": "123-456-7890", + "email": "abc@uwblueprint.org", +} +MOCK_ONSITE_CONTACT_2 = { + "name": "Jane Doe", + "phone": "111-222-3333", + "email": "example@domain.com", +} + MOCK_INFO1_CAMEL = { "email": "test1@organization.com", "organizationAddress": "255 King St N", "organizationName": "Test1 Org", "organizationDesc": "Testing123", - "organizationCoordinates": [-80.52565465, 43.477876300000005], + "organizationCoordinates": [-11.1, 11.1], + # Real Coordinates: + # "organizationCoordinates": [-80.52565465, 43.477876300000005], "role": "ASP", "roleInfo": { "aspInfo": { @@ -41,10 +54,7 @@ "phone": "123456", "email": "jessie123@gmail.com", }, - "onsiteContacts": [ - {"name": "abc", "phone": "123-456-7890", "email": "abc@uwblueprint.org"}, - {"name": "Jane Doe", "phone": "111-222-3333", "email": "example@domain.com"}, - ], + "initialOnsiteContacts": [], "active": True, } @@ -53,7 +63,9 @@ "organization_address": "370 Highland Rd W", "organization_name": "Test2 Org", "organization_desc": "Testing123", - "organization_coordinates": [-80.5118701, 43.4384664], + "organization_coordinates": [-11.1, 11.1], + # Real Coordinates: + # "organization_coordinates": [-80.5118701, 43.4384664], "role": "Donor", "role_info": { "asp_info": None, @@ -67,10 +79,11 @@ "phone": "98765", "email": "goose@gmail.com", }, - "onsite_contacts": [ - {"name": "def", "phone": "098-765-4321", "email": "def@uwblueprint.org"}, - {"name": "John Doe", "phone": "444-555-6666", "email": "elpmaxe@niamod.moc"}, - ], + "initial_onsite_contacts": [], + # "initial_onsite_contacts": [ + # {"name": "def", "phone": "098-765-4321", "email": "def@uwblueprint.org"}, + # {"name": "John Doe", "phone": "444-555-6666", "email": "elpmaxe@niamod.moc"}, + # ], "active": True, } @@ -79,7 +92,9 @@ "organizationAddress": "370 Highland Rd W", "organizationName": "Test2 Org", "organizationDesc": "Testing123", - "organizationCoordinates": [-80.5118701, 43.4384664], + "organizationCoordinates": [-11.1, 11.1], + # Real Coordinates: + # "organizationCoordinates": [-80.5118701, 43.4384664], "role": "Donor", "roleInfo": { "aspInfo": None, @@ -93,10 +108,11 @@ "phone": "98765", "email": "goose@gmail.com", }, - "onsiteContacts": [ - {"name": "def", "phone": "098-765-4321", "email": "def@uwblueprint.org"}, - {"name": "John Doe", "phone": "444-555-6666", "email": "elpmaxe@niamod.moc"}, - ], + "initialOnsiteContacts": [], + # "initialOnsiteContacts": [ + # {"name": "def", "phone": "098-765-4321", "email": "def@uwblueprint.org"}, + # {"name": "John Doe", "phone": "444-555-6666", "email": "elpmaxe@niamod.moc"}, + # ], "active": True, } @@ -105,7 +121,9 @@ "organization_address": "170 University Ave W", "organization_name": "Test3 Org", "organization_desc": "Testing 123", - "organization_coordinates": [-80.5373252901463, 43.472995850000004], + "organization_coordinates": [-11.1, 11.1], + # Real Coordinates: + # "organization_coordinates": [-80.5373252901463, 43.472995850000004], "role": "Admin", "role_info": None, "primary_contact": { @@ -113,10 +131,11 @@ "phone": "13579", "email": "anon@gmail.com", }, - "onsite_contacts": [ - {"name": "ghi", "phone": "135-792-4680", "email": "ghi@uwblueprint.org"}, - {"name": "Jack Doe", "phone": "777-888-999", "email": "com@domain.email"}, - ], + "initial_onsite_contacts": [], + # "initial_onsite_contacts": [ + # {"name": "ghi", "phone": "135-792-4680", "email": "ghi@uwblueprint.org"}, + # {"name": "Jack Doe", "phone": "777-888-999", "email": "com@domain.email"}, + # ], "active": False, } @@ -125,7 +144,9 @@ "organizationAddress": "170 University Ave W", "organizationName": "Test3 Org", "organizationDesc": "Testing 123", - "organizationCoordinates": [-80.5373252901463, 43.472995850000004], + # Real Coordinates: + # "organizationCoordinates": [-80.5373252901463, 43.472995850000004], + "organizationCoordinates": [-11.1, 11.1], "role": "Admin", "roleInfo": None, "primaryContact": { @@ -133,10 +154,11 @@ "phone": "13579", "email": "anon@gmail.com", }, - "onsiteContacts": [ - {"name": "ghi", "phone": "135-792-4680", "email": "ghi@uwblueprint.org"}, - {"name": "Jack Doe", "phone": "777-888-999", "email": "com@domain.email"}, - ], + "initialOnsiteContacts": [], + # "initialOnsiteContacts": [ + # {"name": "ghi", "phone": "135-792-4680", "email": "ghi@uwblueprint.org"}, + # {"name": "Jack Doe", "phone": "777-888-999", "email": "com@domain.email"}, + # ], "active": False, } @@ -163,13 +185,6 @@ "portions": 10, "dietary_restrictions": "Vegan", }, - "onsite_staff": [ - { - "name": "Test name", - "email": "test@gmail.com", - "phone": "1234567890", - } - ], "date_created": "2023-03-31T00:00:00", "date_updated": "2023-03-31T00:00:00", "delivery_instructions": "Test instructions", diff --git a/backend/tests/graphql/test_all_user_mutations.py b/backend/tests/graphql/test_all_user_mutations.py index 4473b7b1..00a9ae73 100644 --- a/backend/tests/graphql/test_all_user_mutations.py +++ b/backend/tests/graphql/test_all_user_mutations.py @@ -30,18 +30,7 @@ def test_update_user_by_id(user_setup, mocker): phone: "13579", email: "anon@gmail.com", }}, - onsiteContacts: [ - {{ - name: "ghi", - phone: "135-792-4680", - email: "ghi@uwblueprint.org" - }}, - {{ - name: "Jack Doe", - phone: "777-888-999", - email: "com@domain.email" - }}, - ], + initialOnsiteContacts: [], active: false }} ) {{ @@ -68,7 +57,7 @@ def test_update_user_by_id(user_setup, mocker): phone email }} - onsiteContacts {{ + initialOnsiteContacts {{ name phone email @@ -108,18 +97,7 @@ def test_update_user_by_id(user_setup, mocker): phone: "123456", email: "jessie123@gmail.com" }}, - onsiteContacts: [ - {{ - name: "abc", - phone: "123-456-7890", - email: "abc@uwblueprint.org" - }}, - {{ - name: "Jane Doe", - phone: "111-222-3333", - email: "example@domain.com" - }} - ], + initialOnsiteContacts: [], active: true }} ) {{ @@ -146,7 +124,7 @@ def test_update_user_by_id(user_setup, mocker): phone email }} - onsiteContacts {{ + initialOnsiteContacts {{ name phone email @@ -193,7 +171,7 @@ def test_number_of_kids_cant_be_set_negative(user_setup, mocker): phone: "123456", email: "jessie123@gmail.com" }}, - onsiteContacts: [ + initialOnsiteContacts: [ {{ name: "abc", phone: "123-456-7890", @@ -231,7 +209,7 @@ def test_number_of_kids_cant_be_set_negative(user_setup, mocker): phone email }} - onsiteContacts {{ + initialOnsiteContacts {{ name phone email @@ -282,7 +260,7 @@ def test_activate_user_by_id(user_setup, mocker): phone email }} - onsiteContacts {{ + initialOnsiteContacts {{ name phone email @@ -328,7 +306,7 @@ def test_deactivate_user_by_id(user_setup, mocker): phone email }} - onsiteContacts {{ + initialOnsiteContacts {{ name phone email @@ -370,7 +348,7 @@ def test_deactivate_user_by_id(user_setup, mocker): phone email }} - onsiteContacts {{ + initialOnsiteContacts {{ name phone email @@ -413,7 +391,7 @@ def test_deactivate_user_by_id(user_setup, mocker): phone email }} - onsiteContacts {{ + initialOnsiteContacts {{ name phone email diff --git a/backend/tests/graphql/test_all_user_queries.py b/backend/tests/graphql/test_all_user_queries.py index 33edca9d..bbffceb5 100644 --- a/backend/tests/graphql/test_all_user_queries.py +++ b/backend/tests/graphql/test_all_user_queries.py @@ -33,7 +33,7 @@ def test_all_users(user_setup): phone email } - onsiteContacts { + initialOnsiteContacts { name phone email @@ -45,7 +45,6 @@ def test_all_users(user_setup): ) assert len(executed.data["getAllUsers"]) == 3 - print(executed.data["getAllUsers"]) user_result1 = executed.data["getAllUsers"][0] assert user_result1["id"] == str(user_1.id) assert user_result1["info"] == MOCK_INFO1_CAMEL @@ -84,7 +83,7 @@ def test_all_users_filter_by_role(user_setup): phone email } - onsiteContacts { + initialOnsiteContacts { name phone email @@ -127,7 +126,7 @@ def test_get_user_by_id(user_setup): phone email }} - onsiteContacts {{ + initialOnsiteContacts {{ name phone email @@ -171,7 +170,7 @@ def test_get_user_by_id(user_setup): # phone # email # }} -# onsiteContacts {{ +# initialOnsiteContacts {{ # name # phone # email diff --git a/backend/tests/graphql/test_meal_request.py b/backend/tests/graphql/test_meal_request.py index 2acf9e75..1d9bd53c 100644 --- a/backend/tests/graphql/test_meal_request.py +++ b/backend/tests/graphql/test_meal_request.py @@ -8,8 +8,21 @@ """ -def test_create_meal_request(meal_request_setup): - requestor, _, _ = meal_request_setup +def compare_returned_onsite_contact(result, onsite_contact): + assert result["id"] == str(onsite_contact.id) + assert result["name"] == onsite_contact.name + assert result["email"] == onsite_contact.email + assert result["phone"] == onsite_contact.phone + assert result["organizationId"] == str(onsite_contact.organization_id) + + +def test_create_meal_request(meal_request_setup, onsite_contact_setup): + ( + asp, + donor, + [asp_onsite_contact, asp_onsite_contact2], + donor_onsite_contact, + ) = onsite_contact_setup mutation = f""" mutation testCreateMealRequest {{ @@ -21,19 +34,8 @@ def test_create_meal_request(meal_request_setup): portions: 40, dietaryRestrictions: "7 gluten free, 7 no beef", }}, - onsiteStaff: [ - {{ - name: "John Doe", - email: "john.doe@example.com", - phone: "+1234567890" - }}, - {{ - name: "Jane Smith", - email: "jane.smith@example.com", - phone: "+9876543210" - }} - ], - requestorId: "{str(requestor.id)}", + onsiteStaff: ["{asp_onsite_contact.id}", "{asp_onsite_contact2.id}"], + requestorId: "{str(asp.id)}", requestDates: [ "2023-06-01", "2023-06-02", @@ -48,15 +50,21 @@ def test_create_meal_request(meal_request_setup): portions dietaryRestrictions }} + onsiteStaff{{ + id + name + email + phone + organizationId + }} }} }} }} """ result = graphql_schema.execute(mutation) - assert result.errors is None - assert result.data["createMealRequest"]["mealRequests"][0]["status"] == "Open" + assert result.data["createMealRequest"]["mealRequests"][0]["status"] == "OPEN" assert ( result.data["createMealRequest"]["mealRequests"][0]["mealInfo"]["portions"] == 40 @@ -76,11 +84,72 @@ def test_create_meal_request(meal_request_setup): == "2023-06-02T16:30:00+00:00" ) + created_onsite_contacts = result.data["createMealRequest"]["mealRequests"][0][ + "onsiteStaff" + ] + expected_onsite_contacts = ( + [asp_onsite_contact, asp_onsite_contact2] + if created_onsite_contacts[0]["id"] == str(asp_onsite_contact.id) + else [asp_onsite_contact2, asp_onsite_contact] + ) + for created, expected in zip(created_onsite_contacts, expected_onsite_contacts): + assert created["id"] == str(expected.id) + assert created["name"] == str(expected.name) + assert created["email"] == str(expected.email) + assert created["phone"] == str(expected.phone) + assert created["organizationId"] == str(expected.organization_id) + # Delete the created meal requests for meal_request in result.data["createMealRequest"]["mealRequests"]: MealRequest.objects(id=meal_request["id"]).delete() +def test_create_meal_request_fails_invalid_onsite_contact( + meal_request_setup, onsite_contact_setup +): + asp, donor, asp_onsite_contact, donor_onsite_contact = onsite_contact_setup + + counter_before = MealRequest.objects().count() + mutation = f""" + mutation testCreateMealRequest {{ + createMealRequest( + deliveryInstructions: "Leave at front door", + dropOffLocation: "123 Main Street", + dropOffTime: "16:30:00Z", + mealInfo: {{ + portions: 40, + dietaryRestrictions: "7 gluten free, 7 no beef", + }}, + onsiteStaff: ["{asp_onsite_contact}, fdsfdja"], + requestorId: "{str(asp.id)}", + requestDates: [ + "2023-06-01", + "2023-06-02", + ], + ) + {{ + mealRequests {{ + status + id + dropOffDatetime + mealInfo {{ + portions + dietaryRestrictions + }} + onsiteStaff{{ + id + }} + }} + }} + }} + """ + + result = graphql_schema.execute(mutation) + assert result.errors is not None + counter_after = MealRequest.objects().count() + assert counter_before == counter_after + + # Happy path: A donor commits to fulfilling one meal request def test_commit_to_meal_request(meal_request_setup): _, donor, meal_request = meal_request_setup @@ -88,7 +157,7 @@ def test_commit_to_meal_request(meal_request_setup): mutation = f""" mutation testCommitToMealRequest {{ commitToMealRequest( - requester: "{str(donor.id)}", + requestor: "{str(donor.id)}", mealRequestIds: ["{str(meal_request.id)}"], mealDescription: "Pizza", additionalInfo: "No nuts" @@ -132,10 +201,7 @@ def test_commit_to_meal_request(meal_request_setup): assert result.errors is None # Verify that the meal request's status was updated - assert ( - result.data["commitToMealRequest"]["mealRequests"][0]["status"] - == MealStatus.UPCOMING.value - ) + assert result.data["commitToMealRequest"]["mealRequests"][0]["status"] == "UPCOMING" # Verify that the meal request's donationInfo was populated correctly assert result.data["commitToMealRequest"]["mealRequests"][0]["donationInfo"][ @@ -165,11 +231,12 @@ def test_commit_to_meal_request_fails_for_non_donor(meal_request_setup): for role in INVALID_USERINFO_ROLES: donor.info.role = role + donor.save() mutation = f""" mutation testCommitToMealRequest {{ commitToMealRequest( - requester: "{str(donor.id)}", + requestor: "{str(donor.id)}", mealRequestIds: ["{str(meal_request.id)}"], mealDescription: "Pizza", additionalInfo: "No nuts" @@ -200,11 +267,12 @@ def test_commit_to_meal_request_fails_if_not_open(meal_request_setup): for meal_status in INVALID_MEAL_STATUSES: meal_request.status = meal_status + meal_request.save() mutation = f""" mutation testCommitToMealRequest {{ commitToMealRequest( - requester: "{str(donor.id)}", + requestor: "{str(donor.id)}", mealRequestIds: ["{str(meal_request.id)}"], mealDescription: "Pizza", additionalInfo: "No nuts" @@ -221,9 +289,13 @@ def test_commit_to_meal_request_fails_if_not_open(meal_request_setup): assert result.errors is not None -def test_update_meal_request(meal_request_setup): +def test_update_meal_request(onsite_contact_setup, meal_request_setup): + requestor, _, asp_onsite_contacts, donor_onsite_contact = onsite_contact_setup _, _, meal_request = meal_request_setup + onsite_contact1 = asp_onsite_contacts[0] + onsite_contact2 = asp_onsite_contacts[1] + updatedDateTime = "2023-10-31T16:45:00+00:00" updatedDeliveryInstructions = "Updated delivery instructions" updatedDropOffLocation = "Updated drop off location" @@ -231,13 +303,11 @@ def test_update_meal_request(meal_request_setup): "portions": 11, "dietaryRestrictions": "No nuts", } - updatedOnsiteStaff = [ - {"name": "test", "email": "test@test.com", "phone": "604-441-1171"} - ] mutation = f""" mutation testUpdateMealRequest{{ updateMealRequest( + requestorId:"{str(requestor.id)}", mealRequestId:"{meal_request.id}", deliveryInstructions:"{updatedDeliveryInstructions}", dropOffDatetime: "{updatedDateTime}", @@ -246,11 +316,7 @@ def test_update_meal_request(meal_request_setup): portions: {updatedMealInfo["portions"]}, dietaryRestrictions: "{updatedMealInfo["dietaryRestrictions"]}", }}, - onsiteStaff:[{{ - name: "{updatedOnsiteStaff[0]["name"]}", - email: "{updatedOnsiteStaff[0]["email"]}", - phone: "{updatedOnsiteStaff[0]["phone"]}" - }}] + onsiteStaff: ["{onsite_contact1.id}","{onsite_contact2.id}"] ) {{ mealRequest{{ @@ -263,9 +329,11 @@ def test_update_meal_request(meal_request_setup): dietaryRestrictions }} onsiteStaff{{ + id name email phone + organizationId }} donationInfo{{ donor{{ @@ -284,11 +352,13 @@ def test_update_meal_request(meal_request_setup): assert result.errors is None updatedMealRequest = result.data["updateMealRequest"]["mealRequest"] - assert updatedMealRequest["dropOffLocation"] == updatedDropOffLocation assert updatedMealRequest["deliveryInstructions"] == updatedDeliveryInstructions assert updatedMealRequest["mealInfo"] == updatedMealInfo - assert updatedMealRequest["onsiteStaff"] == updatedOnsiteStaff + returned_onsite_contacts = updatedMealRequest["onsiteStaff"] + compare_returned_onsite_contact(returned_onsite_contacts[0], onsite_contact1) + compare_returned_onsite_contact(returned_onsite_contacts[1], onsite_contact2) + assert updatedMealRequest["dropOffDatetime"] == updatedDateTime diff --git a/backend/tests/graphql/test_onboarding_request.py b/backend/tests/graphql/test_onboarding_request.py index acf525f4..d2815475 100644 --- a/backend/tests/graphql/test_onboarding_request.py +++ b/backend/tests/graphql/test_onboarding_request.py @@ -22,18 +22,7 @@ def test_create_onboarding_request(): phone: "13579", email: "anon@gmail.com", }, - onsiteContacts: [ - { - name: "ghi", - phone: "135-792-4680", - email: "ghi@uwblueprint.org" - }, - { - name: "Jack Doe", - phone: "777-888-999", - email: "com@domain.email" - }, - ], + initialOnsiteContacts: [], active: false } ) { @@ -60,7 +49,7 @@ def test_create_onboarding_request(): phone email } - onsiteContacts { + initialOnsiteContacts { name phone email @@ -104,18 +93,7 @@ def test_create_onboarding_request_with_existing_email_errors(): phone: "123456", email: "jessie123@gmail.com" }}, - onsiteContacts: [ - {{ - name: "abc", - phone: "123-456-7890", - email: "abc@uwblueprint.org" - }}, - {{ - name: "Jane Doe", - phone: "111-222-3333", - email: "example@domain.com" - }} - ], + initialOnsiteContacts: [], active: true }} ) {{ @@ -142,7 +120,7 @@ def test_create_onboarding_request_with_existing_email_errors(): phone email }} - onsiteContacts {{ + initialOnsiteContacts {{ name phone email @@ -187,7 +165,7 @@ def test_get_all_requests(onboarding_request_setup): phone email } - onsiteContacts { + initialOnsiteContacts { name phone email @@ -239,7 +217,7 @@ def test_filter_requests_by_role(onboarding_request_setup): phone email } - onsiteContacts { + initialOnsiteContacts { name phone email @@ -286,7 +264,7 @@ def test_filter_requests_by_status(onboarding_request_setup): phone email } - onsiteContacts { + initialOnsiteContacts { name phone email @@ -333,7 +311,7 @@ def test_get_requests_by_id(onboarding_request_setup): phone email }} - onsiteContacts {{ + initialOnsiteContacts {{ name phone email diff --git a/backend/tests/graphql/test_onsite_contact.py b/backend/tests/graphql/test_onsite_contact.py new file mode 100644 index 00000000..a9b69e35 --- /dev/null +++ b/backend/tests/graphql/test_onsite_contact.py @@ -0,0 +1,168 @@ +from app.graphql import schema as graphql_schema +from app.models.onsite_contact import OnsiteContact + +""" +Tests for ONsite contact and query/mutation logic +Running graphql_schema.execute(...) also tests the service logic +""" + + +def compare_returned_onsite_contact(result, onsite_contact): + assert result["id"] == str(onsite_contact.id) + assert result["name"] == onsite_contact.name + assert result["email"] == onsite_contact.email + assert result["phone"] == onsite_contact.phone + assert result["organizationId"] == str(onsite_contact.organization_id) + + +def test_create_onsite_contact(onsite_contact_setup): + asp, donor, onsite_contacts, _ = onsite_contact_setup + + mutation = f""" + mutation c{{ + createOnsiteContact( + email: "bob@test.com", + name: "Bob Cat", + organizationId: "{donor.id}", + phone: "604-433-1111", + requestorId: "{donor.id}" + ){{ + onsiteContact{{ + id + name + email + phone + organizationId + }} + }} + }} + """ + result = graphql_schema.execute(mutation) + assert result.errors is None + return_result_contact = result.data["createOnsiteContact"]["onsiteContact"] + db_result = OnsiteContact.objects( + organization_id=donor.id, id=return_result_contact["id"] + ).first() + + for contact in [return_result_contact, db_result]: + assert contact["name"] == "Bob Cat" + assert contact["email"] == "bob@test.com" + assert contact["phone"] == "604-433-1111" + + assert return_result_contact["organizationId"] == str(donor.id) + assert db_result.organization_id == donor.id + + +def test_update_onsite_contact(onsite_contact_setup): + asp, donor, asp_onsite_contacts, donor_onsite_contact = onsite_contact_setup + + # Test for the update mutation + mutation = f""" + mutation u{{ + updateOnsiteContact( + id: "{donor_onsite_contact.id}", + name: "Updated Bob Cat", + email: "updated_bob@test.com", + phone: "604-433-2222", + requestorId: "{donor.id}" + ){{ + onsiteContact{{ + id + name + email + phone + }} + }} + }} + """ + result = graphql_schema.execute(mutation) + assert result.errors is None + updated_result_contact = result.data["updateOnsiteContact"]["onsiteContact"] + updated_db_result = OnsiteContact.objects(id=donor_onsite_contact["id"]).first() + + for contact in [updated_result_contact, updated_db_result]: + assert contact["name"] == "Updated Bob Cat" + assert contact["email"] == "updated_bob@test.com" + assert contact["phone"] == "604-433-2222" + + +def test_delete_onsite_contact(onsite_contact_setup): + asp, donor, onsite_contacts, _ = onsite_contact_setup + onsite_contact = onsite_contacts[0] + + # Test for the delete mutation + mutation = f""" + mutation d{{ + deleteOnsiteContact( + id: "{onsite_contact.id}", + requestorId: "{asp.id}" + ){{ + success + }} + }} + """ + result = graphql_schema.execute(mutation) + assert result.errors is None + deleted_db_result = OnsiteContact.objects(id=onsite_contact["id"]).first() + + assert deleted_db_result is None + + +def test_get_onsite_contacts_for_user_by_id(onsite_contact_setup): + asp, donor, onsite_contacts, _ = onsite_contact_setup + onsite_contact = onsite_contacts[0] + + # asp = User.objects(id=asp.id).get() + # asp.info.onsite_contacts = [] + # asp.save() + + # Test for the get_onsite_contact_for_user_by_id query + query = f""" + query q{{ + getOnsiteContactForUserById( + userId: "{asp.id}" + ){{ + id + name + email + phone + }} + }} + """ + result = graphql_schema.execute(query) + assert result.errors is None + result = result.data["getOnsiteContactForUserById"][0] + + assert result["id"] == str(onsite_contact.id) + assert result["name"] == onsite_contact.name + assert result["email"] == onsite_contact.email + assert result["phone"] == onsite_contact.phone + + +def test_get_onsite_contact_by_id(onsite_contact_setup): + asp, donor, onsite_contacts, _ = onsite_contact_setup + onsite_contact = onsite_contacts[0] + + # Test for the get_onsite_contact_by_id query + query = f""" + query q{{ + getOnsiteContactById( + id: "{onsite_contact.id}" + ){{ + id + name + email + phone + organizationId + }} + }} + """ + result = graphql_schema.execute(query) + assert result.errors is None + result = result.data["getOnsiteContactById"] + + assert result["id"] == str(onsite_contact.id) + assert result["name"] == onsite_contact.name + assert result["email"] == onsite_contact.email + assert result["phone"] == onsite_contact.phone + assert result["organizationId"] == str(onsite_contact.organization_id) diff --git a/backend/tests/unit/test_models.py b/backend/tests/unit/test_models.py index 51fd9315..cf7c0b83 100644 --- a/backend/tests/unit/test_models.py +++ b/backend/tests/unit/test_models.py @@ -27,7 +27,7 @@ "phone": "123-456-7890", "email": "ansonjwhe@gmail.com", }, - "onsite_contacts": [ + "initial_onsite_contacts": [ {"name": "Abu", "phone": "123-456-7890", "email": "abu@uwblueprint.org"}, {"name": "Jane Doe", "phone": "111-222-3333", "email": "example@domain.com"}, ], @@ -81,20 +81,4 @@ def test_create_onboarding_request(): onboarding_request.info.primary_contact.email == test_user_info["primary_contact"]["email"] ) - assert len(onboarding_request.info.onsite_contacts) == len( - test_user_info["onsite_contacts"] - ) - for i in range(len(test_user_info["onsite_contacts"])): - assert ( - onboarding_request.info.onsite_contacts[i].name - == test_user_info["onsite_contacts"][i]["name"] - ) - assert ( - onboarding_request.info.onsite_contacts[i].phone - == test_user_info["onsite_contacts"][i]["phone"] - ) - assert ( - onboarding_request.info.onsite_contacts[i].email - == test_user_info["onsite_contacts"][i]["email"] - ) assert onboarding_request.info.active == test_user_info["active"] diff --git a/backend/typings/graphene/__init__.pyi b/backend/typings/graphene/__init__.pyi new file mode 100644 index 00000000..f939f2f5 --- /dev/null +++ b/backend/typings/graphene/__init__.pyi @@ -0,0 +1,87 @@ +from .relay import ( + ClientIDMutation as ClientIDMutation, + Connection as Connection, + ConnectionField as ConnectionField, + GlobalID as GlobalID, + Node as Node, + PageInfo as PageInfo, + is_node as is_node, +) +from .types import ( + Argument as Argument, + Base64 as Base64, + BigInt as BigInt, + Boolean as Boolean, + Context as Context, + Date as Date, + DateTime as DateTime, + Decimal as Decimal, + Dynamic as Dynamic, + Enum as Enum, + Field as Field, + Float as Float, + ID as ID, + InputField as InputField, + InputObjectType as InputObjectType, + Int as Int, + Interface as Interface, + JSONString as JSONString, + List as List, + Mutation as Mutation, + NonNull as NonNull, + ObjectType as ObjectType, + ResolveInfo as ResolveInfo, + Scalar as Scalar, + Schema as Schema, + String as String, + Time as Time, + UUID as UUID, + Union as Union, +) +from .utils.module_loading import lazy_import as lazy_import +from .utils.resolve_only_args import resolve_only_args as resolve_only_args +from _typeshed import Incomplete + +__all__ = [ + "__version__", + "Argument", + "Base64", + "BigInt", + "Boolean", + "ClientIDMutation", + "Connection", + "ConnectionField", + "Context", + "Date", + "DateTime", + "Decimal", + "Dynamic", + "Enum", + "Field", + "Float", + "GlobalID", + "ID", + "InputField", + "InputObjectType", + "Int", + "Interface", + "JSONString", + "List", + "Mutation", + "Node", + "NonNull", + "ObjectType", + "PageInfo", + "ResolveInfo", + "Scalar", + "Schema", + "String", + "Time", + "UUID", + "Union", + "is_node", + "lazy_import", + "resolve_only_args", +] + +__version__: Incomplete diff --git a/backend/typings/graphene/pyutils/__init__.pyi b/backend/typings/graphene/pyutils/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/backend/typings/graphene/pyutils/dataclasses.pyi b/backend/typings/graphene/pyutils/dataclasses.pyi new file mode 100644 index 00000000..8e789d8f --- /dev/null +++ b/backend/typings/graphene/pyutils/dataclasses.pyi @@ -0,0 +1,94 @@ +from _typeshed import Incomplete + +__all__ = [ + "dataclass", + "field", + "Field", + "FrozenInstanceError", + "InitVar", + "MISSING", + "fields", + "asdict", + "astuple", + "make_dataclass", + "replace", + "is_dataclass", +] + +class FrozenInstanceError(AttributeError): ... +class _HAS_DEFAULT_FACTORY_CLASS: ... +class _MISSING_TYPE: ... + +MISSING: Incomplete + +class _FIELD_BASE: + name: Incomplete + def __init__(self, name) -> None: ... + +class _InitVarMeta(type): + def __getitem__(self, params): ... + +class InitVar(metaclass=_InitVarMeta): ... + +class Field: + name: Incomplete + type: Incomplete + default: Incomplete + default_factory: Incomplete + init: Incomplete + repr: Incomplete + hash: Incomplete + compare: Incomplete + metadata: Incomplete + def __init__( + self, default, default_factory, init, repr, hash, compare, metadata + ) -> None: ... + def __set_name__(self, owner, name) -> None: ... + +class _DataclassParams: + init: Incomplete + repr: Incomplete + eq: Incomplete + order: Incomplete + unsafe_hash: Incomplete + frozen: Incomplete + def __init__(self, init, repr, eq, order, unsafe_hash, frozen) -> None: ... + +def field( + *, + default=..., + default_factory=..., + init: bool = True, + repr: bool = True, + hash: Incomplete | None = None, + compare: bool = True, + metadata: Incomplete | None = None +): ... +def dataclass( + _cls: Incomplete | None = None, + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False +): ... +def fields(class_or_instance): ... +def is_dataclass(obj): ... +def asdict(obj, *, dict_factory=...): ... +def astuple(obj, *, tuple_factory=...): ... +def make_dataclass( + cls_name, + fields, + *, + bases=(), + namespace: Incomplete | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False +): ... +def replace(obj, **changes): ... diff --git a/backend/typings/graphene/pyutils/version.pyi b/backend/typings/graphene/pyutils/version.pyi new file mode 100644 index 00000000..ba28483b --- /dev/null +++ b/backend/typings/graphene/pyutils/version.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete + +def get_version(version: Incomplete | None = None): ... +def get_main_version(version: Incomplete | None = None): ... +def get_complete_version(version: Incomplete | None = None): ... +def get_docs_version(version: Incomplete | None = None): ... +def get_git_changeset(): ... diff --git a/backend/typings/graphene/relay/__init__.pyi b/backend/typings/graphene/relay/__init__.pyi new file mode 100644 index 00000000..ed99ded3 --- /dev/null +++ b/backend/typings/graphene/relay/__init__.pyi @@ -0,0 +1,17 @@ +from .connection import ( + Connection as Connection, + ConnectionField as ConnectionField, + PageInfo as PageInfo, +) +from .mutation import ClientIDMutation as ClientIDMutation +from .node import GlobalID as GlobalID, Node as Node, is_node as is_node + +__all__ = [ + "Node", + "is_node", + "GlobalID", + "ClientIDMutation", + "Connection", + "ConnectionField", + "PageInfo", +] diff --git a/backend/typings/graphene/relay/connection.pyi b/backend/typings/graphene/relay/connection.pyi new file mode 100644 index 00000000..1f615d1d --- /dev/null +++ b/backend/typings/graphene/relay/connection.pyi @@ -0,0 +1,54 @@ +from ..types import ( + Boolean as Boolean, + Enum as Enum, + Int as Int, + Interface as Interface, + List as List, + NonNull as NonNull, + Scalar as Scalar, + String as String, + Union as Union, +) +from ..types.field import Field as Field +from ..types.objecttype import ( + ObjectType as ObjectType, + ObjectTypeOptions as ObjectTypeOptions, +) +from ..utils.thenables import maybe_thenable as maybe_thenable +from .node import is_node as is_node +from _typeshed import Incomplete + +class PageInfo(ObjectType): + class Meta: + description: str + has_next_page: Incomplete + has_previous_page: Incomplete + start_cursor: Incomplete + end_cursor: Incomplete + +def page_info_adapter(startCursor, endCursor, hasPreviousPage, hasNextPage): ... + +class ConnectionOptions(ObjectTypeOptions): + node: Incomplete + +class Connection(ObjectType): + class Meta: + abstract: bool + @classmethod + def __init_subclass_with_meta__( + cls, node: Incomplete | None = None, name: Incomplete | None = None, **options + ): ... + +def connection_adapter(cls, edges, pageInfo): ... + +class IterableConnectionField(Field): + def __init__(self, type_, *args, **kwargs) -> None: ... + @property + def type(self): ... + @classmethod + def resolve_connection(cls, connection_type, args, resolved): ... + @classmethod + def connection_resolver(cls, resolver, connection_type, root, info, **args): ... + def wrap_resolve(self, parent_resolver): ... + +ConnectionField = IterableConnectionField diff --git a/backend/typings/graphene/relay/mutation.pyi b/backend/typings/graphene/relay/mutation.pyi new file mode 100644 index 00000000..7fd7cd52 --- /dev/null +++ b/backend/typings/graphene/relay/mutation.pyi @@ -0,0 +1,19 @@ +from ..types import Field as Field, InputObjectType as InputObjectType, String as String +from ..types.mutation import Mutation as Mutation +from ..utils.thenables import maybe_thenable as maybe_thenable +from _typeshed import Incomplete + +class ClientIDMutation(Mutation): + class Meta: + abstract: bool + @classmethod + def __init_subclass_with_meta__( + cls, + output: Incomplete | None = None, + input_fields: Incomplete | None = None, + arguments: Incomplete | None = None, + name: Incomplete | None = None, + **options + ) -> None: ... + @classmethod + def mutate(cls, root, info, input): ... diff --git a/backend/typings/graphene/relay/node.pyi b/backend/typings/graphene/relay/node.pyi new file mode 100644 index 00000000..cb747c6b --- /dev/null +++ b/backend/typings/graphene/relay/node.pyi @@ -0,0 +1,59 @@ +from ..types import ( + Field as Field, + ID as ID, + Interface as Interface, + ObjectType as ObjectType, +) +from ..types.interface import InterfaceOptions as InterfaceOptions +from ..types.utils import get_type as get_type +from _typeshed import Incomplete + +def is_node(objecttype): ... + +class GlobalID(Field): + node: Incomplete + parent_type_name: Incomplete + def __init__( + self, + node: Incomplete | None = None, + parent_type: Incomplete | None = None, + required: bool = True, + *args, + **kwargs + ) -> None: ... + @staticmethod + def id_resolver( + parent_resolver, + node, + root, + info, + parent_type_name: Incomplete | None = None, + **args + ): ... + def wrap_resolve(self, parent_resolver): ... + +class NodeField(Field): + node_type: Incomplete + field_type: Incomplete + def __init__(self, node, type_: bool = False, **kwargs) -> None: ... + def wrap_resolve(self, parent_resolver): ... + +class AbstractNode(Interface): + class Meta: + abstract: bool + @classmethod + def __init_subclass_with_meta__(cls, **options) -> None: ... + +class Node(AbstractNode): + @classmethod + def Field(cls, *args, **kwargs): ... + @classmethod + def node_resolver(cls, only_type, root, info, id): ... + @classmethod + def get_node_from_global_id( + cls, info, global_id, only_type: Incomplete | None = None + ): ... + @classmethod + def from_global_id(cls, global_id): ... + @classmethod + def to_global_id(cls, type_, id): ... diff --git a/backend/typings/graphene/types/__init__.pyi b/backend/typings/graphene/types/__init__.pyi new file mode 100644 index 00000000..e6e94ffe --- /dev/null +++ b/backend/typings/graphene/types/__init__.pyi @@ -0,0 +1,60 @@ +from .argument import Argument as Argument +from .base64 import Base64 as Base64 +from .context import Context as Context +from .datetime import Date as Date, DateTime as DateTime, Time as Time +from .decimal import Decimal as Decimal +from .dynamic import Dynamic as Dynamic +from .enum import Enum as Enum +from .field import Field as Field +from .inputfield import InputField as InputField +from .inputobjecttype import InputObjectType as InputObjectType +from .interface import Interface as Interface +from .json import JSONString as JSONString +from .mutation import Mutation as Mutation +from .objecttype import ObjectType as ObjectType +from .scalars import ( + BigInt as BigInt, + Boolean as Boolean, + Float as Float, + ID as ID, + Int as Int, + Scalar as Scalar, + String as String, +) +from .schema import Schema as Schema +from .structures import List as List, NonNull as NonNull +from .union import Union as Union +from .uuid import UUID as UUID +from graphql import GraphQLResolveInfo as ResolveInfo + +__all__ = [ + "Argument", + "Base64", + "BigInt", + "Boolean", + "Context", + "Date", + "DateTime", + "Decimal", + "Dynamic", + "Enum", + "Field", + "Float", + "ID", + "InputField", + "InputObjectType", + "Int", + "Interface", + "JSONString", + "List", + "Mutation", + "NonNull", + "ObjectType", + "ResolveInfo", + "Scalar", + "Schema", + "String", + "Time", + "UUID", + "Union", +] diff --git a/backend/typings/graphene/types/argument.pyi b/backend/typings/graphene/types/argument.pyi new file mode 100644 index 00000000..44c189f5 --- /dev/null +++ b/backend/typings/graphene/types/argument.pyi @@ -0,0 +1,24 @@ +from .dynamic import Dynamic as Dynamic +from .mountedtype import MountedType as MountedType +from .structures import NonNull as NonNull +from .utils import get_type as get_type +from _typeshed import Incomplete + +class Argument(MountedType): + name: Incomplete + default_value: Incomplete + description: Incomplete + def __init__( + self, + type_, + default_value=..., + description: Incomplete | None = None, + name: Incomplete | None = None, + required: bool = False, + _creation_counter: Incomplete | None = None, + ) -> None: ... + @property + def type(self): ... + def __eq__(self, other): ... + +def to_arguments(args, extra_args: Incomplete | None = None): ... diff --git a/backend/typings/graphene/types/base.pyi b/backend/typings/graphene/types/base.pyi new file mode 100644 index 00000000..de6fd2ff --- /dev/null +++ b/backend/typings/graphene/types/base.pyi @@ -0,0 +1,28 @@ +from ..utils.subclass_with_meta import ( + SubclassWithMeta as SubclassWithMeta, + SubclassWithMeta_Meta as SubclassWithMeta_Meta, +) +from ..utils.trim_docstring import trim_docstring as trim_docstring +from _typeshed import Incomplete + +class BaseOptions: + name: str + description: str + class_type: Incomplete + def __init__(self, class_type) -> None: ... + def freeze(self) -> None: ... + def __setattr__(self, name, value) -> None: ... + +BaseTypeMeta = SubclassWithMeta_Meta + +class BaseType(SubclassWithMeta): + @classmethod + def create_type(cls, class_name, **options): ... + @classmethod + def __init_subclass_with_meta__( + cls, + name: Incomplete | None = None, + description: Incomplete | None = None, + _meta: Incomplete | None = None, + **_kwargs + ) -> None: ... diff --git a/backend/typings/graphene/types/base64.pyi b/backend/typings/graphene/types/base64.pyi new file mode 100644 index 00000000..f6e6d13e --- /dev/null +++ b/backend/typings/graphene/types/base64.pyi @@ -0,0 +1,10 @@ +from .scalars import Scalar as Scalar +from _typeshed import Incomplete + +class Base64(Scalar): + @staticmethod + def serialize(value): ... + @classmethod + def parse_literal(cls, node, _variables: Incomplete | None = None): ... + @staticmethod + def parse_value(value): ... diff --git a/backend/typings/graphene/types/context.pyi b/backend/typings/graphene/types/context.pyi new file mode 100644 index 00000000..97effe22 --- /dev/null +++ b/backend/typings/graphene/types/context.pyi @@ -0,0 +1,2 @@ +class Context: + def __init__(self, **params) -> None: ... diff --git a/backend/typings/graphene/types/datetime.pyi b/backend/typings/graphene/types/datetime.pyi new file mode 100644 index 00000000..d6dcffe9 --- /dev/null +++ b/backend/typings/graphene/types/datetime.pyi @@ -0,0 +1,26 @@ +from .scalars import Scalar as Scalar +from _typeshed import Incomplete + +class Date(Scalar): + @staticmethod + def serialize(date): ... + @classmethod + def parse_literal(cls, node, _variables: Incomplete | None = None): ... + @staticmethod + def parse_value(value): ... + +class DateTime(Scalar): + @staticmethod + def serialize(dt): ... + @classmethod + def parse_literal(cls, node, _variables: Incomplete | None = None): ... + @staticmethod + def parse_value(value): ... + +class Time(Scalar): + @staticmethod + def serialize(time): ... + @classmethod + def parse_literal(cls, node, _variables: Incomplete | None = None): ... + @classmethod + def parse_value(cls, value): ... diff --git a/backend/typings/graphene/types/decimal.pyi b/backend/typings/graphene/types/decimal.pyi new file mode 100644 index 00000000..26236e30 --- /dev/null +++ b/backend/typings/graphene/types/decimal.pyi @@ -0,0 +1,10 @@ +from .scalars import Scalar as Scalar +from _typeshed import Incomplete + +class Decimal(Scalar): + @staticmethod + def serialize(dec): ... + @classmethod + def parse_literal(cls, node, _variables: Incomplete | None = None): ... + @staticmethod + def parse_value(value): ... diff --git a/backend/typings/graphene/types/definitions.pyi b/backend/typings/graphene/types/definitions.pyi new file mode 100644 index 00000000..0946a001 --- /dev/null +++ b/backend/typings/graphene/types/definitions.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete +from graphql import ( + GraphQLEnumType, + GraphQLInputObjectType, + GraphQLInterfaceType, + GraphQLObjectType, + GraphQLScalarType, + GraphQLUnionType, +) + +class GrapheneGraphQLType: + graphene_type: Incomplete + def __init__(self, *args, **kwargs) -> None: ... + +class GrapheneInterfaceType(GrapheneGraphQLType, GraphQLInterfaceType): ... +class GrapheneUnionType(GrapheneGraphQLType, GraphQLUnionType): ... +class GrapheneObjectType(GrapheneGraphQLType, GraphQLObjectType): ... +class GrapheneScalarType(GrapheneGraphQLType, GraphQLScalarType): ... + +class GrapheneEnumType(GrapheneGraphQLType, GraphQLEnumType): + def serialize(self, value): ... + +class GrapheneInputObjectType(GrapheneGraphQLType, GraphQLInputObjectType): ... diff --git a/backend/typings/graphene/types/dynamic.pyi b/backend/typings/graphene/types/dynamic.pyi new file mode 100644 index 00000000..46c3248b --- /dev/null +++ b/backend/typings/graphene/types/dynamic.pyi @@ -0,0 +1,13 @@ +from .mountedtype import MountedType as MountedType +from _typeshed import Incomplete + +class Dynamic(MountedType): + type: Incomplete + with_schema: Incomplete + def __init__( + self, + type_, + with_schema: bool = False, + _creation_counter: Incomplete | None = None, + ) -> None: ... + def get_type(self, schema: Incomplete | None = None): ... diff --git a/backend/typings/graphene/types/enum.pyi b/backend/typings/graphene/types/enum.pyi new file mode 100644 index 00000000..2b67fd3e --- /dev/null +++ b/backend/typings/graphene/types/enum.pyi @@ -0,0 +1,36 @@ +from .base import BaseOptions as BaseOptions, BaseType as BaseType +from .unmountedtype import UnmountedType as UnmountedType +from _typeshed import Incomplete +from graphene.utils.subclass_with_meta import ( + SubclassWithMeta_Meta as SubclassWithMeta_Meta, +) + +def eq_enum(self, other): ... + +EnumType: Incomplete + +class EnumOptions(BaseOptions): + enum: Enum + deprecation_reason: Incomplete + +class EnumMeta(SubclassWithMeta_Meta): + def __new__(cls, name_, bases, classdict, **options): ... + def get(cls, value): ... + def __getitem__(cls, value): ... + def __prepare__(name, bases, **kwargs): ... + def __call__(cls, *args, **kwargs): ... + def from_enum( + cls, + enum, + name: Incomplete | None = None, + description: Incomplete | None = None, + deprecation_reason: Incomplete | None = None, + ): ... + +class Enum(UnmountedType, BaseType, metaclass=EnumMeta): + @classmethod + def __init_subclass_with_meta__( + cls, enum: Incomplete | None = None, _meta: Incomplete | None = None, **options + ) -> None: ... + @classmethod + def get_type(cls): ... diff --git a/backend/typings/graphene/types/field.pyi b/backend/typings/graphene/types/field.pyi new file mode 100644 index 00000000..96c54211 --- /dev/null +++ b/backend/typings/graphene/types/field.pyi @@ -0,0 +1,39 @@ +from ..utils.deprecated import warn_deprecation as warn_deprecation +from .argument import Argument as Argument, to_arguments as to_arguments +from .mountedtype import MountedType as MountedType +from .resolver import default_resolver as default_resolver +from .structures import NonNull as NonNull +from .unmountedtype import UnmountedType as UnmountedType +from .utils import get_type as get_type +from _typeshed import Incomplete + +base_type = type + +def source_resolver(source, root, info, **args): ... + +class Field(MountedType): + name: Incomplete + args: Incomplete + resolver: Incomplete + deprecation_reason: Incomplete + description: Incomplete + default_value: Incomplete + def __init__( + self, + type_, + args: Incomplete | None = None, + resolver: Incomplete | None = None, + source: Incomplete | None = None, + deprecation_reason: Incomplete | None = None, + name: Incomplete | None = None, + description: Incomplete | None = None, + required: bool = False, + _creation_counter: Incomplete | None = None, + default_value: Incomplete | None = None, + **extra_args + ) -> None: ... + @property + def type(self): ... + get_resolver: Incomplete + def wrap_resolve(self, parent_resolver): ... + def wrap_subscribe(self, parent_subscribe): ... diff --git a/backend/typings/graphene/types/generic.pyi b/backend/typings/graphene/types/generic.pyi new file mode 100644 index 00000000..38b6b7ab --- /dev/null +++ b/backend/typings/graphene/types/generic.pyi @@ -0,0 +1,11 @@ +from .scalars import Scalar as Scalar +from _typeshed import Incomplete +from graphene.types.scalars import MAX_INT as MAX_INT, MIN_INT as MIN_INT + +class GenericScalar(Scalar): + @staticmethod + def identity(value): ... + serialize = identity + parse_value = identity + @staticmethod + def parse_literal(ast, _variables: Incomplete | None = None): ... diff --git a/backend/typings/graphene/types/inputfield.pyi b/backend/typings/graphene/types/inputfield.pyi new file mode 100644 index 00000000..795ce7e7 --- /dev/null +++ b/backend/typings/graphene/types/inputfield.pyi @@ -0,0 +1,23 @@ +from .mountedtype import MountedType as MountedType +from .structures import NonNull as NonNull +from .utils import get_type as get_type +from _typeshed import Incomplete + +class InputField(MountedType): + name: Incomplete + deprecation_reason: Incomplete + default_value: Incomplete + description: Incomplete + def __init__( + self, + type_, + name: Incomplete | None = None, + default_value=..., + deprecation_reason: Incomplete | None = None, + description: Incomplete | None = None, + required: bool = False, + _creation_counter: Incomplete | None = None, + **extra_args + ) -> None: ... + @property + def type(self): ... diff --git a/backend/typings/graphene/types/inputobjecttype.pyi b/backend/typings/graphene/types/inputobjecttype.pyi new file mode 100644 index 00000000..02be9410 --- /dev/null +++ b/backend/typings/graphene/types/inputobjecttype.pyi @@ -0,0 +1,29 @@ +from .base import BaseOptions as BaseOptions, BaseType as BaseType +from .inputfield import InputField as InputField +from .unmountedtype import UnmountedType as UnmountedType +from .utils import yank_fields_from_attrs as yank_fields_from_attrs +from _typeshed import Incomplete +from typing import Dict + +MYPY: bool + +class InputObjectTypeOptions(BaseOptions): + fields: Dict[str, InputField] + container: InputObjectTypeContainer + +class InputObjectTypeContainer(dict, BaseType): + class Meta: + abstract: bool + def __init__(self, *args, **kwargs) -> None: ... + def __init_subclass__(cls, *args, **kwargs) -> None: ... + +class InputObjectType(UnmountedType, BaseType): + @classmethod + def __init_subclass_with_meta__( + cls, + container: Incomplete | None = None, + _meta: Incomplete | None = None, + **options + ) -> None: ... + @classmethod + def get_type(cls): ... diff --git a/backend/typings/graphene/types/interface.pyi b/backend/typings/graphene/types/interface.pyi new file mode 100644 index 00000000..a912e281 --- /dev/null +++ b/backend/typings/graphene/types/interface.pyi @@ -0,0 +1,20 @@ +from .base import BaseOptions as BaseOptions, BaseType as BaseType +from .field import Field as Field +from .utils import yank_fields_from_attrs as yank_fields_from_attrs +from _typeshed import Incomplete +from typing import Dict, Iterable, Type + +MYPY: bool + +class InterfaceOptions(BaseOptions): + fields: Dict[str, Field] + interfaces: Iterable[Type[Interface]] + +class Interface(BaseType): + @classmethod + def __init_subclass_with_meta__( + cls, _meta: Incomplete | None = None, interfaces=(), **options + ) -> None: ... + @classmethod + def resolve_type(cls, instance, info): ... + def __init__(self, *args, **kwargs) -> None: ... diff --git a/backend/typings/graphene/types/json.pyi b/backend/typings/graphene/types/json.pyi new file mode 100644 index 00000000..c055e0d5 --- /dev/null +++ b/backend/typings/graphene/types/json.pyi @@ -0,0 +1,10 @@ +from .scalars import Scalar as Scalar +from _typeshed import Incomplete + +class JSONString(Scalar): + @staticmethod + def serialize(dt): ... + @staticmethod + def parse_literal(node, _variables: Incomplete | None = None): ... + @staticmethod + def parse_value(value): ... diff --git a/backend/typings/graphene/types/mountedtype.pyi b/backend/typings/graphene/types/mountedtype.pyi new file mode 100644 index 00000000..7fafb514 --- /dev/null +++ b/backend/typings/graphene/types/mountedtype.pyi @@ -0,0 +1,6 @@ +from ..utils.orderedtype import OrderedType as OrderedType +from .unmountedtype import UnmountedType as UnmountedType + +class MountedType(OrderedType): + @classmethod + def mounted(cls, unmounted): ... diff --git a/backend/typings/graphene/types/mutation.pyi b/backend/typings/graphene/types/mutation.pyi new file mode 100644 index 00000000..1e692c1b --- /dev/null +++ b/backend/typings/graphene/types/mutation.pyi @@ -0,0 +1,38 @@ +from ..utils.deprecated import warn_deprecation as warn_deprecation +from ..utils.get_unbound_function import get_unbound_function as get_unbound_function +from ..utils.props import props as props +from .argument import Argument as Argument +from .field import Field as Field +from .interface import Interface as Interface +from .objecttype import ObjectType as ObjectType, ObjectTypeOptions as ObjectTypeOptions +from .utils import yank_fields_from_attrs as yank_fields_from_attrs +from _typeshed import Incomplete +from typing import Callable, Dict, Iterable, Type + +MYPY: bool + +class MutationOptions(ObjectTypeOptions): + arguments: Dict[str, Argument] + output: Type[ObjectType] + resolver: Callable + interfaces: Iterable[Type[Interface]] + +class Mutation(ObjectType): + @classmethod + def __init_subclass_with_meta__( + cls, + interfaces=(), + resolver: Incomplete | None = None, + output: Incomplete | None = None, + arguments: Incomplete | None = None, + _meta: Incomplete | None = None, + **options + ) -> None: ... + @classmethod + def Field( + cls, + name: Incomplete | None = None, + description: Incomplete | None = None, + deprecation_reason: Incomplete | None = None, + required: bool = False, + ): ... diff --git a/backend/typings/graphene/types/objecttype.pyi b/backend/typings/graphene/types/objecttype.pyi new file mode 100644 index 00000000..e9131ced --- /dev/null +++ b/backend/typings/graphene/types/objecttype.pyi @@ -0,0 +1,32 @@ +from ..pyutils.dataclasses import field as field, make_dataclass as make_dataclass +from .base import ( + BaseOptions as BaseOptions, + BaseType as BaseType, + BaseTypeMeta as BaseTypeMeta, +) +from .field import Field as Field +from .interface import Interface as Interface +from .utils import yank_fields_from_attrs as yank_fields_from_attrs +from _typeshed import Incomplete +from typing import Dict, Iterable, Type + +MYPY: bool + +class ObjectTypeOptions(BaseOptions): + fields: Dict[str, Field] + interfaces: Iterable[Type[Interface]] + +class ObjectTypeMeta(BaseTypeMeta): + def __new__(cls, name_, bases, namespace, **options): ... + +class ObjectType(BaseType, metaclass=ObjectTypeMeta): + @classmethod + def __init_subclass_with_meta__( + cls, + interfaces=(), + possible_types=(), + default_resolver: Incomplete | None = None, + _meta: Incomplete | None = None, + **options + ) -> None: ... + is_type_of: Incomplete diff --git a/backend/typings/graphene/types/resolver.pyi b/backend/typings/graphene/types/resolver.pyi new file mode 100644 index 00000000..e0a8c72d --- /dev/null +++ b/backend/typings/graphene/types/resolver.pyi @@ -0,0 +1,8 @@ +def attr_resolver(attname, default_value, root, info, **args): ... +def dict_resolver(attname, default_value, root, info, **args): ... +def dict_or_attr_resolver(attname, default_value, root, info, **args): ... + +default_resolver = dict_or_attr_resolver + +def set_default_resolver(resolver) -> None: ... +def get_default_resolver(): ... diff --git a/backend/typings/graphene/types/scalars.pyi b/backend/typings/graphene/types/scalars.pyi new file mode 100644 index 00000000..ae3ae45b --- /dev/null +++ b/backend/typings/graphene/types/scalars.pyi @@ -0,0 +1,62 @@ +from .base import BaseOptions as BaseOptions, BaseType as BaseType +from .unmountedtype import UnmountedType as UnmountedType +from _typeshed import Incomplete +from typing import Any + +class ScalarOptions(BaseOptions): ... + +class Scalar(UnmountedType, BaseType): + @classmethod + def __init_subclass_with_meta__(cls, **options) -> None: ... + serialize: Incomplete + parse_value: Incomplete + parse_literal: Incomplete + @classmethod + def get_type(cls): ... + +MAX_INT: int +MIN_INT: int + +class Int(Scalar): + @staticmethod + def coerce_int(value): ... + serialize = coerce_int + parse_value = coerce_int + @staticmethod + def parse_literal(ast, _variables: Incomplete | None = None): ... + +class BigInt(Scalar): + @staticmethod + def coerce_int(value): ... + serialize = coerce_int + parse_value = coerce_int + @staticmethod + def parse_literal(ast, _variables: Incomplete | None = None): ... + +class Float(Scalar): + @staticmethod + def coerce_float(value: Any) -> float: ... + serialize = coerce_float + parse_value = coerce_float + @staticmethod + def parse_literal(ast, _variables: Incomplete | None = None): ... + +class String(Scalar): + @staticmethod + def coerce_string(value): ... + serialize = coerce_string + parse_value = coerce_string + @staticmethod + def parse_literal(ast, _variables: Incomplete | None = None): ... + +class Boolean(Scalar): + serialize = bool + parse_value = bool + @staticmethod + def parse_literal(ast, _variables: Incomplete | None = None): ... + +class ID(Scalar): + serialize = str + parse_value = str + @staticmethod + def parse_literal(ast, _variables: Incomplete | None = None): ... diff --git a/backend/typings/graphene/types/schema.pyi b/backend/typings/graphene/types/schema.pyi new file mode 100644 index 00000000..8c38df4d --- /dev/null +++ b/backend/typings/graphene/types/schema.pyi @@ -0,0 +1,89 @@ +from ..utils.get_unbound_function import get_unbound_function as get_unbound_function +from ..utils.str_converters import to_camel_case as to_camel_case +from .definitions import ( + GrapheneEnumType as GrapheneEnumType, + GrapheneGraphQLType as GrapheneGraphQLType, + GrapheneInputObjectType as GrapheneInputObjectType, + GrapheneInterfaceType as GrapheneInterfaceType, + GrapheneObjectType as GrapheneObjectType, + GrapheneScalarType as GrapheneScalarType, + GrapheneUnionType as GrapheneUnionType, +) +from .dynamic import Dynamic as Dynamic +from .enum import Enum as Enum +from .field import Field as Field +from .inputobjecttype import InputObjectType as InputObjectType +from .interface import Interface as Interface +from .objecttype import ObjectType as ObjectType +from .resolver import get_default_resolver as get_default_resolver +from .scalars import ( + Boolean as Boolean, + Float as Float, + ID as ID, + Int as Int, + Scalar as Scalar, + String as String, +) +from .structures import List as List, NonNull as NonNull +from .union import Union as Union +from .utils import get_field_as as get_field_as +from _typeshed import Incomplete + +introspection_query: Incomplete +IntrospectionSchema: Incomplete + +def assert_valid_root_type(type_) -> None: ... +def is_graphene_type(type_): ... +def is_type_of_from_possible_types(possible_types, root, _info): ... +def identity_resolve(root, info, **arguments): ... + +class TypeMap(dict): + auto_camelcase: Incomplete + query: Incomplete + mutation: Incomplete + subscription: Incomplete + types: Incomplete + def __init__( + self, + query: Incomplete | None = None, + mutation: Incomplete | None = None, + subscription: Incomplete | None = None, + types: Incomplete | None = None, + auto_camelcase: bool = True, + ) -> None: ... + def add_type(self, graphene_type): ... + @staticmethod + def create_scalar(graphene_type): ... + @staticmethod + def create_enum(graphene_type): ... + def create_objecttype(self, graphene_type): ... + def create_interface(self, graphene_type): ... + def create_inputobjecttype(self, graphene_type): ... + def construct_union(self, graphene_type): ... + def get_name(self, name): ... + def create_fields_for_type(self, graphene_type, is_input_type: bool = False): ... + def get_function_for_type(self, graphene_type, func_name, name, default_value): ... + def resolve_type(self, resolve_type_func, type_name, root, info, _type): ... + +class Schema: + query: Incomplete + mutation: Incomplete + subscription: Incomplete + graphql_schema: Incomplete + def __init__( + self, + query: Incomplete | None = None, + mutation: Incomplete | None = None, + subscription: Incomplete | None = None, + types: Incomplete | None = None, + directives: Incomplete | None = None, + auto_camelcase: bool = True, + ) -> None: ... + def __getattr__(self, type_name): ... + def lazy(self, _type): ... + def execute(self, *args, **kwargs): ... + async def execute_async(self, *args, **kwargs): ... + async def subscribe(self, query, *args, **kwargs): ... + def introspect(self): ... + +def normalize_execute_kwargs(kwargs): ... diff --git a/backend/typings/graphene/types/structures.pyi b/backend/typings/graphene/types/structures.pyi new file mode 100644 index 00000000..809b5564 --- /dev/null +++ b/backend/typings/graphene/types/structures.pyi @@ -0,0 +1,15 @@ +from .unmountedtype import UnmountedType as UnmountedType +from .utils import get_type as get_type + +class Structure(UnmountedType): + def __init__(self, of_type, *args, **kwargs) -> None: ... + @property + def of_type(self): ... + def get_type(self): ... + +class List(Structure): + def __eq__(self, other): ... + +class NonNull(Structure): + def __init__(self, *args, **kwargs) -> None: ... + def __eq__(self, other): ... diff --git a/backend/typings/graphene/types/union.pyi b/backend/typings/graphene/types/union.pyi new file mode 100644 index 00000000..1d17fdcd --- /dev/null +++ b/backend/typings/graphene/types/union.pyi @@ -0,0 +1,20 @@ +from .base import BaseOptions as BaseOptions, BaseType as BaseType +from .objecttype import ObjectType as ObjectType +from .unmountedtype import UnmountedType as UnmountedType +from _typeshed import Incomplete +from typing import Iterable, Type + +MYPY: bool + +class UnionOptions(BaseOptions): + types: Iterable[Type[ObjectType]] + +class Union(UnmountedType, BaseType): + @classmethod + def __init_subclass_with_meta__( + cls, types: Incomplete | None = None, **options + ) -> None: ... + @classmethod + def get_type(cls): ... + @classmethod + def resolve_type(cls, instance, info): ... diff --git a/backend/typings/graphene/types/unmountedtype.pyi b/backend/typings/graphene/types/unmountedtype.pyi new file mode 100644 index 00000000..52b48781 --- /dev/null +++ b/backend/typings/graphene/types/unmountedtype.pyi @@ -0,0 +1,13 @@ +from ..utils.orderedtype import OrderedType as OrderedType +from _typeshed import Incomplete + +class UnmountedType(OrderedType): + args: Incomplete + kwargs: Incomplete + def __init__(self, *args, **kwargs) -> None: ... + def get_type(self) -> None: ... + def mount_as(self, _as): ... + def Field(self): ... + def InputField(self): ... + def Argument(self): ... + def __eq__(self, other): ... diff --git a/backend/typings/graphene/types/utils.pyi b/backend/typings/graphene/types/utils.pyi new file mode 100644 index 00000000..1b4a290e --- /dev/null +++ b/backend/typings/graphene/types/utils.pyi @@ -0,0 +1,9 @@ +from ..utils.module_loading import import_string as import_string +from .mountedtype import MountedType as MountedType +from .unmountedtype import UnmountedType as UnmountedType +from _typeshed import Incomplete + +def get_field_as(value, _as: Incomplete | None = None): ... +def yank_fields_from_attrs(attrs, _as: Incomplete | None = None, sort: bool = True): ... +def get_type(_type): ... +def get_underlying_type(_type): ... diff --git a/backend/typings/graphene/types/uuid.pyi b/backend/typings/graphene/types/uuid.pyi new file mode 100644 index 00000000..30159903 --- /dev/null +++ b/backend/typings/graphene/types/uuid.pyi @@ -0,0 +1,10 @@ +from .scalars import Scalar as Scalar +from _typeshed import Incomplete + +class UUID(Scalar): + @staticmethod + def serialize(uuid): ... + @staticmethod + def parse_literal(node, _variables: Incomplete | None = None): ... + @staticmethod + def parse_value(value): ... diff --git a/backend/typings/graphene/utils/__init__.pyi b/backend/typings/graphene/utils/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/backend/typings/graphene/utils/crunch.pyi b/backend/typings/graphene/utils/crunch.pyi new file mode 100644 index 00000000..2f965dce --- /dev/null +++ b/backend/typings/graphene/utils/crunch.pyi @@ -0,0 +1,4 @@ +def to_key(value): ... +def insert(value, index, values): ... +def flatten(data, index, values): ... +def crunch(data): ... diff --git a/backend/typings/graphene/utils/dataloader.pyi b/backend/typings/graphene/utils/dataloader.pyi new file mode 100644 index 00000000..8488f938 --- /dev/null +++ b/backend/typings/graphene/utils/dataloader.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete +from typing import NamedTuple + +class Loader(NamedTuple): + key: Incomplete + future: Incomplete + +def iscoroutinefunctionorpartial(fn): ... + +class DataLoader: + batch: bool + max_batch_size: int + cache: bool + batch_load_fn: Incomplete + get_cache_key: Incomplete + def __init__( + self, + batch_load_fn: Incomplete | None = None, + batch: Incomplete | None = None, + max_batch_size: Incomplete | None = None, + cache: Incomplete | None = None, + get_cache_key: Incomplete | None = None, + cache_map: Incomplete | None = None, + loop: Incomplete | None = None, + ) -> None: ... + @property + def loop(self): ... + def load(self, key: Incomplete | None = None): ... + def do_resolve_reject(self, key, future) -> None: ... + def load_many(self, keys): ... + def clear(self, key): ... + def clear_all(self): ... + def prime(self, key, value): ... + +def enqueue_post_future_job(loop, loader) -> None: ... +def get_chunks(iterable_obj, chunk_size: int = 1): ... +def dispatch_queue(loader) -> None: ... +async def dispatch_queue_batch(loader, queue): ... +def failed_dispatch(loader, queue, error) -> None: ... diff --git a/backend/typings/graphene/utils/deduplicator.pyi b/backend/typings/graphene/utils/deduplicator.pyi new file mode 100644 index 00000000..dced923a --- /dev/null +++ b/backend/typings/graphene/utils/deduplicator.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def deflate(node, index: Incomplete | None = None, path: Incomplete | None = None): ... diff --git a/backend/typings/graphene/utils/deprecated.pyi b/backend/typings/graphene/utils/deprecated.pyi new file mode 100644 index 00000000..f02961be --- /dev/null +++ b/backend/typings/graphene/utils/deprecated.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete + +string_types: Incomplete + +def warn_deprecation(text) -> None: ... +def deprecated(reason): ... diff --git a/backend/typings/graphene/utils/get_unbound_function.pyi b/backend/typings/graphene/utils/get_unbound_function.pyi new file mode 100644 index 00000000..c86c6d96 --- /dev/null +++ b/backend/typings/graphene/utils/get_unbound_function.pyi @@ -0,0 +1 @@ +def get_unbound_function(func): ... diff --git a/backend/typings/graphene/utils/is_introspection_key.pyi b/backend/typings/graphene/utils/is_introspection_key.pyi new file mode 100644 index 00000000..dfbc96d2 --- /dev/null +++ b/backend/typings/graphene/utils/is_introspection_key.pyi @@ -0,0 +1 @@ +def is_introspection_key(key): ... diff --git a/backend/typings/graphene/utils/module_loading.pyi b/backend/typings/graphene/utils/module_loading.pyi new file mode 100644 index 00000000..109c5dee --- /dev/null +++ b/backend/typings/graphene/utils/module_loading.pyi @@ -0,0 +1,4 @@ +from _typeshed import Incomplete + +def import_string(dotted_path, dotted_attributes: Incomplete | None = None): ... +def lazy_import(dotted_path, dotted_attributes: Incomplete | None = None): ... diff --git a/backend/typings/graphene/utils/orderedtype.pyi b/backend/typings/graphene/utils/orderedtype.pyi new file mode 100644 index 00000000..c426e378 --- /dev/null +++ b/backend/typings/graphene/utils/orderedtype.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +class OrderedType: + creation_counter: int + def __init__(self, _creation_counter: Incomplete | None = None) -> None: ... + @staticmethod + def gen_counter(): ... + def reset_counter(self) -> None: ... + def __eq__(self, other): ... + def __lt__(self, other): ... + def __gt__(self, other): ... + def __hash__(self): ... diff --git a/backend/typings/graphene/utils/props.pyi b/backend/typings/graphene/utils/props.pyi new file mode 100644 index 00000000..11ebcfbd --- /dev/null +++ b/backend/typings/graphene/utils/props.pyi @@ -0,0 +1,4 @@ +class _OldClass: ... +class _NewClass: ... + +def props(x): ... diff --git a/backend/typings/graphene/utils/resolve_only_args.pyi b/backend/typings/graphene/utils/resolve_only_args.pyi new file mode 100644 index 00000000..bd699bf2 --- /dev/null +++ b/backend/typings/graphene/utils/resolve_only_args.pyi @@ -0,0 +1,3 @@ +from .deprecated import deprecated as deprecated + +def resolve_only_args(func): ... diff --git a/backend/typings/graphene/utils/str_converters.pyi b/backend/typings/graphene/utils/str_converters.pyi new file mode 100644 index 00000000..6f19101f --- /dev/null +++ b/backend/typings/graphene/utils/str_converters.pyi @@ -0,0 +1,2 @@ +def to_camel_case(snake_str): ... +def to_snake_case(name): ... diff --git a/backend/typings/graphene/utils/subclass_with_meta.pyi b/backend/typings/graphene/utils/subclass_with_meta.pyi new file mode 100644 index 00000000..ec72b8f1 --- /dev/null +++ b/backend/typings/graphene/utils/subclass_with_meta.pyi @@ -0,0 +1,8 @@ +from .props import props as props + +class SubclassWithMeta_Meta(type): ... + +class SubclassWithMeta(metaclass=SubclassWithMeta_Meta): + def __init_subclass__(cls, **meta_options) -> None: ... + @classmethod + def __init_subclass_with_meta__(cls, **meta_options) -> None: ... diff --git a/backend/typings/graphene/utils/thenables.pyi b/backend/typings/graphene/utils/thenables.pyi new file mode 100644 index 00000000..797f13b5 --- /dev/null +++ b/backend/typings/graphene/utils/thenables.pyi @@ -0,0 +1,2 @@ +def await_and_execute(obj, on_resolve): ... +def maybe_thenable(obj, on_resolve): ... diff --git a/backend/typings/graphene/utils/trim_docstring.pyi b/backend/typings/graphene/utils/trim_docstring.pyi new file mode 100644 index 00000000..ceff5b4f --- /dev/null +++ b/backend/typings/graphene/utils/trim_docstring.pyi @@ -0,0 +1 @@ +def trim_docstring(docstring): ... diff --git a/backend/typings/graphene/validation/__init__.pyi b/backend/typings/graphene/validation/__init__.pyi new file mode 100644 index 00000000..ceda0425 --- /dev/null +++ b/backend/typings/graphene/validation/__init__.pyi @@ -0,0 +1,4 @@ +from .depth_limit import depth_limit_validator as depth_limit_validator +from .disable_introspection import DisableIntrospection as DisableIntrospection + +__all__ = ["DisableIntrospection", "depth_limit_validator"] diff --git a/backend/typings/graphene/validation/depth_limit.pyi b/backend/typings/graphene/validation/depth_limit.pyi new file mode 100644 index 00000000..25691b12 --- /dev/null +++ b/backend/typings/graphene/validation/depth_limit.pyi @@ -0,0 +1,34 @@ +from ..utils.is_introspection_key import is_introspection_key as is_introspection_key +from graphql.language import ( + DefinitionNode as DefinitionNode, + FieldNode, + FragmentDefinitionNode, + Node as Node, + OperationDefinitionNode, +) +from graphql.validation import ValidationContext as ValidationContext +from typing import Callable, Dict, List, Optional, Pattern, Union + +IgnoreType = Union[Callable[[str], bool], Pattern, str] + +def depth_limit_validator( + max_depth: int, + ignore: Optional[List[IgnoreType]] = None, + callback: Callable[[Dict[str, int]], None] = None, +): ... +def get_fragments( + definitions: List[DefinitionNode], +) -> Dict[str, FragmentDefinitionNode]: ... +def get_queries_and_mutations( + definitions: List[DefinitionNode], +) -> Dict[str, OperationDefinitionNode]: ... +def determine_depth( + node: Node, + fragments: Dict[str, FragmentDefinitionNode], + depth_so_far: int, + max_depth: int, + context: ValidationContext, + operation_name: str, + ignore: Optional[List[IgnoreType]] = None, +) -> int: ... +def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bool: ... diff --git a/backend/typings/graphene/validation/disable_introspection.pyi b/backend/typings/graphene/validation/disable_introspection.pyi new file mode 100644 index 00000000..7dd36e02 --- /dev/null +++ b/backend/typings/graphene/validation/disable_introspection.pyi @@ -0,0 +1,6 @@ +from ..utils.is_introspection_key import is_introspection_key as is_introspection_key +from graphql.language import FieldNode as FieldNode +from graphql.validation import ValidationRule + +class DisableIntrospection(ValidationRule): + def enter_field(self, node: FieldNode, *_args): ... diff --git a/backend/typings/mongoengine/__init__.pyi b/backend/typings/mongoengine/__init__.pyi new file mode 100644 index 00000000..05761cd8 --- /dev/null +++ b/backend/typings/mongoengine/__init__.pyi @@ -0,0 +1,205 @@ +from mongoengine.connection import * +from mongoengine.document import * +from mongoengine.errors import * +from mongoengine.fields import * +from mongoengine.queryset import * +from mongoengine.signals import * + +__all__ = [ + "Document", + "EmbeddedDocument", + "DynamicDocument", + "DynamicEmbeddedDocument", + "OperationError", + "InvalidCollectionError", + "NotUniqueError", + "MapReduceDocument", + "StringField", + "URLField", + "EmailField", + "IntField", + "LongField", + "FloatField", + "DecimalField", + "BooleanField", + "DateTimeField", + "DateField", + "ComplexDateTimeField", + "EmbeddedDocumentField", + "ObjectIdField", + "GenericEmbeddedDocumentField", + "DynamicField", + "ListField", + "SortedListField", + "EmbeddedDocumentListField", + "DictField", + "MapField", + "ReferenceField", + "CachedReferenceField", + "LazyReferenceField", + "GenericLazyReferenceField", + "GenericReferenceField", + "BinaryField", + "GridFSError", + "GridFSProxy", + "FileField", + "ImageGridFsProxy", + "ImproperlyConfigured", + "ImageField", + "GeoPointField", + "PointField", + "LineStringField", + "PolygonField", + "SequenceField", + "UUIDField", + "EnumField", + "MultiPointField", + "MultiLineStringField", + "MultiPolygonField", + "GeoJsonBaseField", + "Decimal128Field", + "DEFAULT_CONNECTION_NAME", + "DEFAULT_DATABASE_NAME", + "ConnectionFailure", + "connect", + "disconnect", + "disconnect_all", + "get_connection", + "get_db", + "register_connection", + "QuerySet", + "QuerySetNoCache", + "Q", + "queryset_manager", + "QuerySetManager", + "QueryFieldList", + "DO_NOTHING", + "NULLIFY", + "CASCADE", + "DENY", + "PULL", + "DoesNotExist", + "InvalidQueryError", + "MultipleObjectsReturned", + "NotUniqueError", + "OperationError", + "pre_init", + "post_init", + "pre_save", + "pre_save_post_validation", + "post_save", + "pre_delete", + "post_delete", + "NotRegistered", + "InvalidDocumentError", + "LookUpError", + "DoesNotExist", + "MultipleObjectsReturned", + "InvalidQueryError", + "OperationError", + "NotUniqueError", + "BulkWriteError", + "FieldDoesNotExist", + "ValidationError", + "SaveConditionError", + "DeprecatedError", +] + +# Names in __all__ with no definition: +# BinaryField +# BooleanField +# BulkWriteError +# CASCADE +# CachedReferenceField +# ComplexDateTimeField +# ConnectionFailure +# DEFAULT_CONNECTION_NAME +# DEFAULT_DATABASE_NAME +# DENY +# DO_NOTHING +# DateField +# DateTimeField +# Decimal128Field +# DecimalField +# DeprecatedError +# DictField +# Document +# DoesNotExist +# DoesNotExist +# DynamicDocument +# DynamicEmbeddedDocument +# DynamicField +# EmailField +# EmbeddedDocument +# EmbeddedDocumentField +# EmbeddedDocumentListField +# EnumField +# FieldDoesNotExist +# FileField +# FloatField +# GenericEmbeddedDocumentField +# GenericLazyReferenceField +# GenericReferenceField +# GeoJsonBaseField +# GeoPointField +# GridFSError +# GridFSProxy +# ImageField +# ImageGridFsProxy +# ImproperlyConfigured +# IntField +# InvalidCollectionError +# InvalidDocumentError +# InvalidQueryError +# InvalidQueryError +# LazyReferenceField +# LineStringField +# ListField +# LongField +# LookUpError +# MapField +# MapReduceDocument +# MultiLineStringField +# MultiPointField +# MultiPolygonField +# MultipleObjectsReturned +# MultipleObjectsReturned +# NULLIFY +# NotRegistered +# NotUniqueError +# NotUniqueError +# NotUniqueError +# ObjectIdField +# OperationError +# OperationError +# OperationError +# PULL +# PointField +# PolygonField +# Q +# QueryFieldList +# QuerySet +# QuerySetManager +# QuerySetNoCache +# ReferenceField +# SaveConditionError +# SequenceField +# SortedListField +# StringField +# URLField +# UUIDField +# ValidationError +# connect +# disconnect +# disconnect_all +# get_connection +# get_db +# post_delete +# post_init +# post_save +# pre_delete +# pre_init +# pre_save +# pre_save_post_validation +# queryset_manager +# register_connection diff --git a/backend/typings/mongoengine/base/__init__.pyi b/backend/typings/mongoengine/base/__init__.pyi new file mode 100644 index 00000000..aab2b104 --- /dev/null +++ b/backend/typings/mongoengine/base/__init__.pyi @@ -0,0 +1,38 @@ +from mongoengine.base.common import * +from mongoengine.base.datastructures import * +from mongoengine.base.document import * +from mongoengine.base.fields import * +from mongoengine.base.metaclasses import * + +__all__ = [ + "UPDATE_OPERATORS", + "_document_registry", + "get_document", + "BaseDict", + "BaseList", + "EmbeddedDocumentList", + "LazyReference", + "BaseDocument", + "BaseField", + "ComplexBaseField", + "ObjectIdField", + "GeoJsonBaseField", + "DocumentMetaclass", + "TopLevelDocumentMetaclass", +] + +# Names in __all__ with no definition: +# BaseDict +# BaseDocument +# BaseField +# BaseList +# ComplexBaseField +# DocumentMetaclass +# EmbeddedDocumentList +# GeoJsonBaseField +# LazyReference +# ObjectIdField +# TopLevelDocumentMetaclass +# UPDATE_OPERATORS +# _document_registry +# get_document diff --git a/backend/typings/mongoengine/base/common.pyi b/backend/typings/mongoengine/base/common.pyi new file mode 100644 index 00000000..15873b0f --- /dev/null +++ b/backend/typings/mongoengine/base/common.pyi @@ -0,0 +1,8 @@ +from _typeshed import Incomplete + +__all__ = ["UPDATE_OPERATORS", "get_document", "_document_registry"] + +UPDATE_OPERATORS: Incomplete +_document_registry: Incomplete + +def get_document(name): ... diff --git a/backend/typings/mongoengine/base/datastructures.pyi b/backend/typings/mongoengine/base/datastructures.pyi new file mode 100644 index 00000000..77eac409 --- /dev/null +++ b/backend/typings/mongoengine/base/datastructures.pyi @@ -0,0 +1,86 @@ +from _typeshed import Incomplete +from bson import DBRef +from collections.abc import Generator + +__all__ = [ + "BaseDict", + "StrictDict", + "BaseList", + "EmbeddedDocumentList", + "LazyReference", +] + +class BaseDict(dict): + def __init__(self, dict_items, instance, name) -> None: ... + def get(self, key, default: Incomplete | None = None): ... + def __getitem__(self, key): ... + __setitem__: Incomplete + __delattr__: Incomplete + __delitem__: Incomplete + pop: Incomplete + clear: Incomplete + update: Incomplete + popitem: Incomplete + setdefault: Incomplete + +class BaseList(list): + def __init__(self, list_items, instance, name) -> None: ... + def __getitem__(self, key): ... + def __iter__(self): ... + def __setitem__(self, key, value) -> None: ... + append: Incomplete + extend: Incomplete + insert: Incomplete + pop: Incomplete + remove: Incomplete + reverse: Incomplete + sort: Incomplete + __delitem__: Incomplete + __iadd__: Incomplete + __imul__: Incomplete + +class EmbeddedDocumentList(BaseList): + def __init__(self, list_items, instance, name) -> None: ... + def filter(self, **kwargs): ... + def exclude(self, **kwargs): ... + def count(self): ... + def get(self, **kwargs): ... + def first(self): ... + def create(self, **values): ... + def save(self, *args, **kwargs) -> None: ... + def delete(self): ... + def update(self, **update): ... + +class StrictDict: + def __init__(self, **kwargs) -> None: ... + def __getitem__(self, key): ... + def __setitem__(self, key, value) -> None: ... + def __contains__(self, key) -> bool: ... + def get(self, key, default: Incomplete | None = None): ... + def pop(self, key, default: Incomplete | None = None): ... + def iteritems(self) -> Generator[Incomplete, None, None]: ... + def items(self): ... + def iterkeys(self): ... + def keys(self): ... + def __iter__(self): ... + def __len__(self) -> int: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + @classmethod + def create(cls, allowed_keys): ... + +class LazyReference(DBRef): + def fetch(self, force: bool = False): ... + @property + def pk(self): ... + document_type: Incomplete + passthrough: Incomplete + def __init__( + self, + document_type, + pk, + cached_doc: Incomplete | None = None, + passthrough: bool = False, + ) -> None: ... + def __getitem__(self, name): ... + def __getattr__(self, name): ... diff --git a/backend/typings/mongoengine/base/document.pyi b/backend/typings/mongoengine/base/document.pyi new file mode 100644 index 00000000..bd9b35f8 --- /dev/null +++ b/backend/typings/mongoengine/base/document.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +__all__ = ["BaseDocument", "NON_FIELD_ERRORS"] + +NON_FIELD_ERRORS: str + +class BaseDocument: + STRICT: bool + def __init__(self, *args, **values) -> None: ... + def __delattr__(self, *args, **kwargs) -> None: ... + def __setattr__(self, name, value) -> None: ... + def __iter__(self): ... + def __getitem__(self, name): ... + def __setitem__(self, name, value) -> None: ... + def __contains__(self, name) -> bool: ... + def __len__(self) -> int: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def clean(self) -> None: ... + def get_text_score(self): ... + def to_mongo(self, use_db_field: bool = True, fields: Incomplete | None = None): ... + def validate(self, clean: bool = True) -> None: ... + def to_json(self, *args, **kwargs): ... + @classmethod + def from_json(cls, json_data, created: bool = False, **kwargs): ... diff --git a/backend/typings/mongoengine/base/fields.pyi b/backend/typings/mongoengine/base/fields.pyi new file mode 100644 index 00000000..dcaff72f --- /dev/null +++ b/backend/typings/mongoengine/base/fields.pyi @@ -0,0 +1,72 @@ +from _typeshed import Incomplete + +__all__ = ["BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField"] + +class BaseField: + name: Incomplete + creation_counter: int + auto_creation_counter: int + db_field: Incomplete + required: Incomplete + default: Incomplete + unique: Incomplete + unique_with: Incomplete + primary_key: Incomplete + validation: Incomplete + choices: Incomplete + null: Incomplete + sparse: Incomplete + def __init__( + self, + db_field: Incomplete | None = None, + required: bool = False, + default: Incomplete | None = None, + unique: bool = False, + unique_with: Incomplete | None = None, + primary_key: bool = False, + validation: Incomplete | None = None, + choices: Incomplete | None = None, + null: bool = False, + sparse: bool = False, + **kwargs + ) -> None: ... + def __get__(self, instance, owner): ... + def __set__(self, instance, value) -> None: ... + def error( + self, + message: str = "", + errors: Incomplete | None = None, + field_name: Incomplete | None = None, + ) -> None: ... + def to_python(self, value): ... + def to_mongo(self, value): ... + def prepare_query_value(self, op, value): ... + def validate(self, value, clean: bool = True) -> None: ... + @property + def owner_document(self): ... + @owner_document.setter + def owner_document(self, owner_document) -> None: ... + +class ComplexBaseField(BaseField): + field: Incomplete + def __init__(self, field: Incomplete | None = None, **kwargs) -> None: ... + def __set__(self, instance, value): ... + def __get__(self, instance, owner): ... + def to_python(self, value): ... + def to_mongo( + self, value, use_db_field: bool = True, fields: Incomplete | None = None + ): ... + def validate(self, value) -> None: ... + def prepare_query_value(self, op, value): ... + def lookup_member(self, member_name): ... + +class ObjectIdField(BaseField): + def to_python(self, value): ... + def to_mongo(self, value): ... + def prepare_query_value(self, op, value): ... + def validate(self, value) -> None: ... + +class GeoJsonBaseField(BaseField): + def __init__(self, auto_index: bool = True, *args, **kwargs) -> None: ... + def validate(self, value): ... + def to_mongo(self, value): ... diff --git a/backend/typings/mongoengine/base/metaclasses.pyi b/backend/typings/mongoengine/base/metaclasses.pyi new file mode 100644 index 00000000..26c39a64 --- /dev/null +++ b/backend/typings/mongoengine/base/metaclasses.pyi @@ -0,0 +1,14 @@ +__all__ = ["DocumentMetaclass", "TopLevelDocumentMetaclass"] + +class DocumentMetaclass(type): + def __new__(mcs, name, bases, attrs): ... + +class TopLevelDocumentMetaclass(DocumentMetaclass): + def __new__(mcs, name, bases, attrs): ... + @classmethod + def get_auto_id_names(mcs, new_class): ... + +class MetaDict(dict): + def merge(self, new_options) -> None: ... + +class BasesTuple(tuple): ... diff --git a/backend/typings/mongoengine/base/utils.pyi b/backend/typings/mongoengine/base/utils.pyi new file mode 100644 index 00000000..24e4ce33 --- /dev/null +++ b/backend/typings/mongoengine/base/utils.pyi @@ -0,0 +1,6 @@ +class LazyRegexCompiler: + def __init__(self, pattern, flags: int = 0) -> None: ... + @property + def compiled_regex(self): ... + def __get__(self, instance, owner): ... + def __set__(self, instance, value) -> None: ... diff --git a/backend/typings/mongoengine/common.pyi b/backend/typings/mongoengine/common.pyi new file mode 100644 index 00000000..e69de29b diff --git a/backend/typings/mongoengine/connection.pyi b/backend/typings/mongoengine/connection.pyi new file mode 100644 index 00000000..c564c256 --- /dev/null +++ b/backend/typings/mongoengine/connection.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +__all__ = [ + "DEFAULT_CONNECTION_NAME", + "DEFAULT_DATABASE_NAME", + "ConnectionFailure", + "connect", + "disconnect", + "disconnect_all", + "get_connection", + "get_db", + "register_connection", +] + +DEFAULT_CONNECTION_NAME: str +DEFAULT_DATABASE_NAME: str + +class ConnectionFailure(Exception): ... + +def register_connection( + alias, + db: Incomplete | None = None, + name: Incomplete | None = None, + host: Incomplete | None = None, + port: Incomplete | None = None, + read_preference=..., + username: Incomplete | None = None, + password: Incomplete | None = None, + authentication_source: Incomplete | None = None, + authentication_mechanism: Incomplete | None = None, + authmechanismproperties: Incomplete | None = None, + **kwargs +) -> None: ... +def disconnect(alias=...) -> None: ... +def disconnect_all() -> None: ... +def get_connection(alias=..., reconnect: bool = False): ... +def get_db(alias=..., reconnect: bool = False): ... +def connect(db: Incomplete | None = None, alias=..., **kwargs): ... diff --git a/backend/typings/mongoengine/context_managers.pyi b/backend/typings/mongoengine/context_managers.pyi new file mode 100644 index 00000000..cf02060e --- /dev/null +++ b/backend/typings/mongoengine/context_managers.pyi @@ -0,0 +1,90 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +__all__ = [ + "switch_db", + "switch_collection", + "no_dereference", + "no_sub_classes", + "query_counter", + "set_write_concern", + "set_read_write_concern", +] + +class switch_db: + cls: Incomplete + collection: Incomplete + db_alias: Incomplete + ori_db_alias: Incomplete + def __init__(self, cls, db_alias) -> None: ... + def __enter__(self): ... + def __exit__( + self, + t: type[BaseException] | None, + value: BaseException | None, + traceback: types.TracebackType | None, + ) -> None: ... + +class switch_collection: + cls: Incomplete + ori_collection: Incomplete + ori_get_collection_name: Incomplete + collection_name: Incomplete + def __init__(self, cls, collection_name) -> None: ... + def __enter__(self): ... + def __exit__( + self, + t: type[BaseException] | None, + value: BaseException | None, + traceback: types.TracebackType | None, + ) -> None: ... + +class no_dereference: + cls: Incomplete + deref_fields: Incomplete + def __init__(self, cls) -> None: ... + def __enter__(self): ... + def __exit__( + self, + t: type[BaseException] | None, + value: BaseException | None, + traceback: types.TracebackType | None, + ): ... + +class no_sub_classes: + cls: Incomplete + cls_initial_subclasses: Incomplete + def __init__(self, cls) -> None: ... + def __enter__(self): ... + def __exit__( + self, + t: type[BaseException] | None, + value: BaseException | None, + traceback: types.TracebackType | None, + ) -> None: ... + +class query_counter: + db: Incomplete + initial_profiling_level: Incomplete + def __init__(self, alias=...) -> None: ... + def __enter__(self): ... + def __exit__( + self, + t: type[BaseException] | None, + value: BaseException | None, + traceback: types.TracebackType | None, + ) -> None: ... + def __eq__(self, value): ... + def __ne__(self, value): ... + def __lt__(self, value): ... + def __le__(self, value): ... + def __gt__(self, value): ... + def __ge__(self, value): ... + def __int__(self) -> int: ... + +def set_write_concern( + collection, write_concerns +) -> Generator[Incomplete, None, None]: ... +def set_read_write_concern( + collection, write_concerns, read_concerns +) -> Generator[Incomplete, None, None]: ... diff --git a/backend/typings/mongoengine/dereference.pyi b/backend/typings/mongoengine/dereference.pyi new file mode 100644 index 00000000..3be09122 --- /dev/null +++ b/backend/typings/mongoengine/dereference.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete +from mongoengine.base import ( + BaseDict as BaseDict, + BaseList as BaseList, + EmbeddedDocumentList as EmbeddedDocumentList, + TopLevelDocumentMetaclass as TopLevelDocumentMetaclass, + get_document as get_document, +) +from mongoengine.base.datastructures import LazyReference as LazyReference +from mongoengine.connection import get_db as get_db +from mongoengine.document import ( + Document as Document, + EmbeddedDocument as EmbeddedDocument, +) +from mongoengine.fields import ( + DictField as DictField, + ListField as ListField, + MapField as MapField, + ReferenceField as ReferenceField, +) +from mongoengine.queryset import QuerySet as QuerySet + +class DeReference: + max_depth: Incomplete + reference_map: Incomplete + object_map: Incomplete + def __call__( + self, + items, + max_depth: int = 1, + instance: Incomplete | None = None, + name: Incomplete | None = None, + ): ... diff --git a/backend/typings/mongoengine/document.pyi b/backend/typings/mongoengine/document.pyi new file mode 100644 index 00000000..ef0c7cd2 --- /dev/null +++ b/backend/typings/mongoengine/document.pyi @@ -0,0 +1,89 @@ +from _typeshed import Incomplete +from mongoengine.base import BaseDocument, DocumentMetaclass, TopLevelDocumentMetaclass +from mongoengine.queryset import ( + NotUniqueError as NotUniqueError, + OperationError as OperationError, +) + +__all__ = [ + "Document", + "EmbeddedDocument", + "DynamicDocument", + "DynamicEmbeddedDocument", + "OperationError", + "InvalidCollectionError", + "NotUniqueError", + "MapReduceDocument", +] + +class InvalidCollectionError(Exception): ... + +class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass): + my_metaclass = DocumentMetaclass + __hash__: Incomplete + def __init__(self, *args, **kwargs) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def to_mongo(self, *args, **kwargs): ... + +class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): + my_metaclass = TopLevelDocumentMetaclass + @property + def pk(self): ... + @pk.setter + def pk(self, value): ... + def __hash__(self): ... + def to_mongo(self, *args, **kwargs): ... + def modify(self, query: Incomplete | None = None, **update): ... + def save( + self, + force_insert: bool = False, + validate: bool = True, + clean: bool = True, + write_concern: Incomplete | None = None, + cascade: Incomplete | None = None, + cascade_kwargs: Incomplete | None = None, + _refs: Incomplete | None = None, + save_condition: Incomplete | None = None, + signal_kwargs: Incomplete | None = None, + **kwargs + ): ... + def cascade_save(self, **kwargs) -> None: ... + def update(self, **kwargs): ... + def delete( + self, signal_kwargs: Incomplete | None = None, **write_concern + ) -> None: ... + def switch_db(self, db_alias, keep_created: bool = True): ... + def switch_collection(self, collection_name, keep_created: bool = True): ... + def select_related(self, max_depth: int = 1): ... + def reload(self, *fields, **kwargs): ... + def to_dbref(self): ... + @classmethod + def register_delete_rule(cls, document_cls, field_name, rule) -> None: ... + @classmethod + def drop_collection(cls) -> None: ... + @classmethod + def create_index(cls, keys, background: bool = False, **kwargs): ... + @classmethod + def ensure_indexes(cls) -> None: ... + @classmethod + def list_indexes(cls): ... + @classmethod + def compare_indexes(cls): ... + @staticmethod + def objects(**kwargs): ... + +class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass): + my_metaclass = TopLevelDocumentMetaclass + def __delattr__(self, *args, **kwargs) -> None: ... + +class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass): + my_metaclass = DocumentMetaclass + def __delattr__(self, *args, **kwargs) -> None: ... + +class MapReduceDocument: + key: Incomplete + value: Incomplete + def __init__(self, document, collection, key, value) -> None: ... + @property + def object(self): ... diff --git a/backend/typings/mongoengine/errors.pyi b/backend/typings/mongoengine/errors.pyi new file mode 100644 index 00000000..6fdbbf34 --- /dev/null +++ b/backend/typings/mongoengine/errors.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete + +__all__ = [ + "NotRegistered", + "InvalidDocumentError", + "LookUpError", + "DoesNotExist", + "MultipleObjectsReturned", + "InvalidQueryError", + "OperationError", + "NotUniqueError", + "BulkWriteError", + "FieldDoesNotExist", + "ValidationError", + "SaveConditionError", + "DeprecatedError", +] + +class MongoEngineException(Exception): ... +class NotRegistered(MongoEngineException): ... +class InvalidDocumentError(MongoEngineException): ... +class LookUpError(AttributeError): ... +class DoesNotExist(MongoEngineException): ... +class MultipleObjectsReturned(MongoEngineException): ... +class InvalidQueryError(MongoEngineException): ... +class OperationError(MongoEngineException): ... +class NotUniqueError(OperationError): ... +class BulkWriteError(OperationError): ... +class SaveConditionError(OperationError): ... +class FieldDoesNotExist(MongoEngineException): ... + +class ValidationError(AssertionError): + errors: Incomplete + field_name: Incomplete + message: Incomplete + def __init__(self, message: str = "", **kwargs) -> None: ... + def __getattribute__(self, name): ... + def to_dict(self): ... + +class DeprecatedError(MongoEngineException): ... diff --git a/backend/typings/mongoengine/fields.pyi b/backend/typings/mongoengine/fields.pyi new file mode 100644 index 00000000..7cb9937f --- /dev/null +++ b/backend/typings/mongoengine/fields.pyi @@ -0,0 +1,463 @@ +from _typeshed import Incomplete +from mongoengine.base import ( + BaseField, + ComplexBaseField, + GeoJsonBaseField as GeoJsonBaseField, + ObjectIdField as ObjectIdField, +) + +__all__ = [ + "StringField", + "URLField", + "EmailField", + "IntField", + "LongField", + "FloatField", + "DecimalField", + "BooleanField", + "DateTimeField", + "DateField", + "ComplexDateTimeField", + "EmbeddedDocumentField", + "ObjectIdField", + "GenericEmbeddedDocumentField", + "DynamicField", + "ListField", + "SortedListField", + "EmbeddedDocumentListField", + "DictField", + "MapField", + "ReferenceField", + "CachedReferenceField", + "LazyReferenceField", + "GenericLazyReferenceField", + "GenericReferenceField", + "BinaryField", + "GridFSError", + "GridFSProxy", + "FileField", + "ImageGridFsProxy", + "ImproperlyConfigured", + "ImageField", + "GeoPointField", + "PointField", + "LineStringField", + "PolygonField", + "SequenceField", + "UUIDField", + "EnumField", + "MultiPointField", + "MultiLineStringField", + "MultiPolygonField", + "GeoJsonBaseField", + "Decimal128Field", +] + +class StringField(BaseField): + regex: Incomplete + max_length: Incomplete + min_length: Incomplete + def __init__( + self, + regex: Incomplete | None = None, + max_length: Incomplete | None = None, + min_length: Incomplete | None = None, + **kwargs + ) -> None: ... + def to_python(self, value): ... + def validate(self, value) -> None: ... + def lookup_member(self, member_name) -> None: ... + def prepare_query_value(self, op, value): ... + +class URLField(StringField): + url_regex: Incomplete + schemes: Incomplete + def __init__( + self, + url_regex: Incomplete | None = None, + schemes: Incomplete | None = None, + **kwargs + ) -> None: ... + def validate(self, value) -> None: ... + +class EmailField(StringField): + USER_REGEX: Incomplete + UTF8_USER_REGEX: Incomplete + DOMAIN_REGEX: Incomplete + error_msg: str + domain_whitelist: Incomplete + allow_utf8_user: Incomplete + allow_ip_domain: Incomplete + def __init__( + self, + domain_whitelist: Incomplete | None = None, + allow_utf8_user: bool = False, + allow_ip_domain: bool = False, + *args, + **kwargs + ) -> None: ... + def validate_user_part(self, user_part): ... + def validate_domain_part(self, domain_part): ... + def validate(self, value) -> None: ... + +class IntField(BaseField): + def __init__( + self, + min_value: Incomplete | None = None, + max_value: Incomplete | None = None, + **kwargs + ) -> None: ... + def to_python(self, value): ... + def validate(self, value) -> None: ... + def prepare_query_value(self, op, value): ... + +class LongField(IntField): + def to_mongo(self, value): ... + +class FloatField(BaseField): + def __init__( + self, + min_value: Incomplete | None = None, + max_value: Incomplete | None = None, + **kwargs + ) -> None: ... + def to_python(self, value): ... + def validate(self, value) -> None: ... + def prepare_query_value(self, op, value): ... + +class DecimalField(BaseField): + min_value: Incomplete + max_value: Incomplete + force_string: Incomplete + precision: Incomplete + rounding: Incomplete + def __init__( + self, + min_value: Incomplete | None = None, + max_value: Incomplete | None = None, + force_string: bool = False, + precision: int = 2, + rounding=..., + **kwargs + ) -> None: ... + def to_python(self, value): ... + def to_mongo(self, value): ... + def validate(self, value) -> None: ... + def prepare_query_value(self, op, value): ... + +class BooleanField(BaseField): + def to_python(self, value): ... + def validate(self, value) -> None: ... + +class DateTimeField(BaseField): + def validate(self, value) -> None: ... + def to_mongo(self, value): ... + def prepare_query_value(self, op, value): ... + +class DateField(DateTimeField): + def to_mongo(self, value): ... + def to_python(self, value): ... + +class ComplexDateTimeField(StringField): + separator: Incomplete + format: Incomplete + def __init__(self, separator: str = ",", **kwargs) -> None: ... + def __get__(self, instance, owner): ... + def __set__(self, instance, value) -> None: ... + def validate(self, value) -> None: ... + def to_python(self, value): ... + def to_mongo(self, value): ... + def prepare_query_value(self, op, value): ... + +class EmbeddedDocumentField(BaseField): + document_type_obj: Incomplete + def __init__(self, document_type, **kwargs) -> None: ... + @property + def document_type(self): ... + def to_python(self, value): ... + def to_mongo( + self, value, use_db_field: bool = True, fields: Incomplete | None = None + ): ... + def validate(self, value, clean: bool = True) -> None: ... + def lookup_member(self, member_name): ... + def prepare_query_value(self, op, value): ... + +class GenericEmbeddedDocumentField(BaseField): + def prepare_query_value(self, op, value): ... + def to_python(self, value): ... + def validate(self, value, clean: bool = True): ... + def lookup_member(self, member_name): ... + def to_mongo( + self, document, use_db_field: bool = True, fields: Incomplete | None = None + ): ... + +class DynamicField(BaseField): + def to_mongo( + self, value, use_db_field: bool = True, fields: Incomplete | None = None + ): ... + def to_python(self, value): ... + def lookup_member(self, member_name): ... + def prepare_query_value(self, op, value): ... + def validate(self, value, clean: bool = True) -> None: ... + +class ListField(ComplexBaseField): + max_length: Incomplete + def __init__( + self, + field: Incomplete | None = None, + max_length: Incomplete | None = None, + **kwargs + ) -> None: ... + def __get__(self, instance, owner): ... + def validate(self, value) -> None: ... + def prepare_query_value(self, op, value): ... + +class EmbeddedDocumentListField(ListField): + def __init__(self, document_type, **kwargs) -> None: ... + +class SortedListField(ListField): + def __init__(self, field, **kwargs) -> None: ... + def to_mongo( + self, value, use_db_field: bool = True, fields: Incomplete | None = None + ): ... + +class DictField(ComplexBaseField): + def __init__(self, field: Incomplete | None = None, *args, **kwargs) -> None: ... + def validate(self, value) -> None: ... + def lookup_member(self, member_name): ... + def prepare_query_value(self, op, value): ... + +class MapField(DictField): + def __init__(self, field: Incomplete | None = None, *args, **kwargs) -> None: ... + +class ReferenceField(BaseField): + dbref: Incomplete + document_type_obj: Incomplete + reverse_delete_rule: Incomplete + def __init__( + self, document_type, dbref: bool = False, reverse_delete_rule=..., **kwargs + ) -> None: ... + @property + def document_type(self): ... + def __get__(self, instance, owner): ... + def to_mongo(self, document): ... + def to_python(self, value): ... + def prepare_query_value(self, op, value): ... + def validate(self, value) -> None: ... + def lookup_member(self, member_name): ... + +class CachedReferenceField(BaseField): + auto_sync: Incomplete + document_type_obj: Incomplete + fields: Incomplete + def __init__( + self, + document_type, + fields: Incomplete | None = None, + auto_sync: bool = True, + **kwargs + ) -> None: ... + def start_listener(self) -> None: ... + def on_document_pre_save(self, sender, document, created, **kwargs) -> None: ... + def to_python(self, value): ... + @property + def document_type(self): ... + def __get__(self, instance, owner): ... + def to_mongo( + self, document, use_db_field: bool = True, fields: Incomplete | None = None + ): ... + def prepare_query_value(self, op, value): ... + def validate(self, value) -> None: ... + def lookup_member(self, member_name): ... + def sync_all(self) -> None: ... + +class GenericReferenceField(BaseField): + choices: Incomplete + def __init__(self, *args, **kwargs) -> None: ... + def __get__(self, instance, owner): ... + def validate(self, value) -> None: ... + def to_mongo(self, document): ... + def prepare_query_value(self, op, value): ... + +class BinaryField(BaseField): + max_bytes: Incomplete + def __init__(self, max_bytes: Incomplete | None = None, **kwargs) -> None: ... + def __set__(self, instance, value): ... + def to_mongo(self, value): ... + def validate(self, value) -> None: ... + def prepare_query_value(self, op, value): ... + +class EnumField(BaseField): + def __init__(self, enum, **kwargs) -> None: ... + def validate(self, value): ... + def to_python(self, value): ... + def __set__(self, instance, value): ... + def to_mongo(self, value): ... + def prepare_query_value(self, op, value): ... + +class GridFSError(Exception): ... + +class GridFSProxy: + grid_id: Incomplete + key: Incomplete + instance: Incomplete + db_alias: Incomplete + collection_name: Incomplete + newfile: Incomplete + gridout: Incomplete + def __init__( + self, + grid_id: Incomplete | None = None, + key: Incomplete | None = None, + instance: Incomplete | None = None, + db_alias=..., + collection_name: str = "fs", + ) -> None: ... + def __getattr__(self, name): ... + def __get__(self, instance, value): ... + def __bool__(self) -> bool: ... + def __copy__(self): ... + def __deepcopy__(self, memo): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + @property + def fs(self): ... + def get(self, grid_id: Incomplete | None = None): ... + def new_file(self, **kwargs) -> None: ... + def put(self, file_obj, **kwargs) -> None: ... + def write(self, string) -> None: ... + def writelines(self, lines) -> None: ... + def read(self, size: int = -1): ... + def delete(self) -> None: ... + def replace(self, file_obj, **kwargs) -> None: ... + def close(self) -> None: ... + +class FileField(BaseField): + proxy_class = GridFSProxy + collection_name: Incomplete + db_alias: Incomplete + def __init__(self, db_alias=..., collection_name: str = "fs", **kwargs) -> None: ... + def __get__(self, instance, owner): ... + def __set__(self, instance, value) -> None: ... + def get_proxy_obj( + self, + key, + instance, + db_alias: Incomplete | None = None, + collection_name: Incomplete | None = None, + ): ... + def to_mongo(self, value): ... + def to_python(self, value): ... + def validate(self, value) -> None: ... + +class ImageGridFsProxy(GridFSProxy): + def put(self, file_obj, **kwargs): ... + def delete(self, *args, **kwargs): ... + @property + def size(self): ... + @property + def format(self): ... + @property + def thumbnail(self): ... + def write(self, *args, **kwargs) -> None: ... + def writelines(self, *args, **kwargs) -> None: ... + +class ImproperlyConfigured(Exception): ... + +class ImageField(FileField): + proxy_class = ImageGridFsProxy + def __init__( + self, + size: Incomplete | None = None, + thumbnail_size: Incomplete | None = None, + collection_name: str = "images", + **kwargs + ) -> None: ... + +class SequenceField(BaseField): + COLLECTION_NAME: str + VALUE_DECORATOR = int + collection_name: Incomplete + db_alias: Incomplete + sequence_name: Incomplete + value_decorator: Incomplete + def __init__( + self, + collection_name: Incomplete | None = None, + db_alias: Incomplete | None = None, + sequence_name: Incomplete | None = None, + value_decorator: Incomplete | None = None, + *args, + **kwargs + ) -> None: ... + def generate(self): ... + def set_next_value(self, value): ... + def get_next_value(self): ... + def get_sequence_name(self): ... + def __get__(self, instance, owner): ... + def __set__(self, instance, value): ... + def prepare_query_value(self, op, value): ... + def to_python(self, value): ... + +class UUIDField(BaseField): + def __init__(self, binary: bool = True, **kwargs) -> None: ... + def to_python(self, value): ... + def to_mongo(self, value): ... + def prepare_query_value(self, op, value): ... + def validate(self, value) -> None: ... + +class GeoPointField(BaseField): + def validate(self, value) -> None: ... + +class PointField(GeoJsonBaseField): ... +class LineStringField(GeoJsonBaseField): ... +class PolygonField(GeoJsonBaseField): ... +class MultiPointField(GeoJsonBaseField): ... +class MultiLineStringField(GeoJsonBaseField): ... +class MultiPolygonField(GeoJsonBaseField): ... + +class LazyReferenceField(BaseField): + dbref: Incomplete + passthrough: Incomplete + document_type_obj: Incomplete + reverse_delete_rule: Incomplete + def __init__( + self, + document_type, + passthrough: bool = False, + dbref: bool = False, + reverse_delete_rule=..., + **kwargs + ) -> None: ... + @property + def document_type(self): ... + def build_lazyref(self, value): ... + def __get__(self, instance, owner): ... + def to_mongo(self, value): ... + def to_python(self, value): ... + def validate(self, value) -> None: ... + def prepare_query_value(self, op, value): ... + def lookup_member(self, member_name): ... + +class GenericLazyReferenceField(GenericReferenceField): + passthrough: Incomplete + def __init__(self, *args, **kwargs) -> None: ... + def build_lazyref(self, value): ... + def __get__(self, instance, owner): ... + def validate(self, value): ... + def to_mongo(self, document): ... + +class Decimal128Field(BaseField): + DECIMAL_CONTEXT: Incomplete + min_value: Incomplete + max_value: Incomplete + def __init__( + self, + min_value: Incomplete | None = None, + max_value: Incomplete | None = None, + **kwargs + ) -> None: ... + def to_mongo(self, value): ... + def to_python(self, value): ... + def validate(self, value) -> None: ... + def prepare_query_value(self, op, value): ... diff --git a/backend/typings/mongoengine/mongodb_support.pyi b/backend/typings/mongoengine/mongodb_support.pyi new file mode 100644 index 00000000..f12c224b --- /dev/null +++ b/backend/typings/mongoengine/mongodb_support.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete +from mongoengine.connection import get_connection as get_connection + +MONGODB_34: Incomplete +MONGODB_36: Incomplete +MONGODB_42: Incomplete +MONGODB_44: Incomplete + +def get_mongodb_version(): ... diff --git a/backend/typings/mongoengine/pymongo_support.pyi b/backend/typings/mongoengine/pymongo_support.pyi new file mode 100644 index 00000000..811f1ba0 --- /dev/null +++ b/backend/typings/mongoengine/pymongo_support.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +PYMONGO_VERSION: Incomplete +LEGACY_JSON_OPTIONS: Incomplete + +def count_documents( + collection, + filter, + skip: Incomplete | None = None, + limit: Incomplete | None = None, + hint: Incomplete | None = None, + collation: Incomplete | None = None, +): ... +def list_collection_names(db, include_system_collections: bool = False): ... diff --git a/backend/typings/mongoengine/queryset/__init__.pyi b/backend/typings/mongoengine/queryset/__init__.pyi new file mode 100644 index 00000000..cc1c705c --- /dev/null +++ b/backend/typings/mongoengine/queryset/__init__.pyi @@ -0,0 +1,43 @@ +from mongoengine.errors import * +from mongoengine.queryset.field_list import * +from mongoengine.queryset.manager import * +from mongoengine.queryset.queryset import * +from mongoengine.queryset.transform import * +from mongoengine.queryset.visitor import * + +__all__ = [ + "QuerySet", + "QuerySetNoCache", + "Q", + "queryset_manager", + "QuerySetManager", + "QueryFieldList", + "DO_NOTHING", + "NULLIFY", + "CASCADE", + "DENY", + "PULL", + "DoesNotExist", + "InvalidQueryError", + "MultipleObjectsReturned", + "NotUniqueError", + "OperationError", +] + +# Names in __all__ with no definition: +# CASCADE +# DENY +# DO_NOTHING +# DoesNotExist +# InvalidQueryError +# MultipleObjectsReturned +# NULLIFY +# NotUniqueError +# OperationError +# PULL +# Q +# QueryFieldList +# QuerySet +# QuerySetManager +# QuerySetNoCache +# queryset_manager diff --git a/backend/typings/mongoengine/queryset/base.pyi b/backend/typings/mongoengine/queryset/base.pyi new file mode 100644 index 00000000..d02248e6 --- /dev/null +++ b/backend/typings/mongoengine/queryset/base.pyi @@ -0,0 +1,120 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +__all__ = ["BaseQuerySet", "DO_NOTHING", "NULLIFY", "CASCADE", "DENY", "PULL"] + +DO_NOTHING: int +NULLIFY: int +CASCADE: int +DENY: int +PULL: int + +class BaseQuerySet: + def __init__(self, document, collection) -> None: ... + def __call__(self, q_obj: Incomplete | None = None, **query): ... + def __getitem__(self, key): ... + def __iter__(self): ... + def __bool__(self) -> bool: ... + def all(self): ... + def filter(self, *q_objs, **query): ... + def search_text(self, text, language: Incomplete | None = None): ... + def get(self, *q_objs, **query): ... + def create(self, **kwargs): ... + def first(self): ... + def insert( + self, + doc_or_docs, + load_bulk: bool = True, + write_concern: Incomplete | None = None, + signal_kwargs: Incomplete | None = None, + ): ... + def count(self, with_limit_and_skip: bool = False): ... + def delete( + self, + write_concern: Incomplete | None = None, + _from_doc_delete: bool = False, + cascade_refs: Incomplete | None = None, + ): ... + def update( + self, + upsert: bool = False, + multi: bool = True, + write_concern: Incomplete | None = None, + read_concern: Incomplete | None = None, + full_result: bool = False, + **update + ): ... + def upsert_one( + self, + write_concern: Incomplete | None = None, + read_concern: Incomplete | None = None, + **update + ): ... + def update_one( + self, + upsert: bool = False, + write_concern: Incomplete | None = None, + full_result: bool = False, + **update + ): ... + def modify( + self, + upsert: bool = False, + full_response: bool = False, + remove: bool = False, + new: bool = False, + **update + ): ... + def with_id(self, object_id): ... + def in_bulk(self, object_ids): ... + def none(self): ... + def no_sub_classes(self): ... + def using(self, alias): ... + def clone(self): ... + def select_related(self, max_depth: int = 1): ... + def limit(self, n): ... + def skip(self, n): ... + def hint(self, index: Incomplete | None = None): ... + def collation(self, collation: Incomplete | None = None): ... + def batch_size(self, size): ... + def distinct(self, field): ... + def only(self, *fields): ... + def exclude(self, *fields): ... + def fields(self, _only_called: bool = False, **kwargs): ... + def all_fields(self): ... + def order_by(self, *keys): ... + def clear_cls_query(self): ... + def comment(self, text): ... + def explain(self): ... + def snapshot(self, enabled): ... + def allow_disk_use(self, enabled): ... + def timeout(self, enabled): ... + def read_preference(self, read_preference): ... + def read_concern(self, read_concern): ... + def scalar(self, *fields): ... + def values_list(self, *fields): ... + def as_pymongo(self): ... + def max_time_ms(self, ms): ... + def to_json(self, *args, **kwargs): ... + def from_json(self, json_data): ... + def aggregate(self, pipeline, *suppl_pipeline, **kwargs): ... + def map_reduce( + self, + map_f, + reduce_f, + output, + finalize_f: Incomplete | None = None, + limit: Incomplete | None = None, + scope: Incomplete | None = None, + ) -> Generator[Incomplete, None, None]: ... + def exec_js(self, code, *fields, **options): ... + def where(self, where_clause): ... + def sum(self, field): ... + def average(self, field): ... + def item_frequencies( + self, field, normalize: bool = False, map_reduce: bool = True + ): ... + def __next__(self): ... + def rewind(self) -> None: ... + def __deepcopy__(self, memo): ... + def no_dereference(self): ... diff --git a/backend/typings/mongoengine/queryset/field_list.pyi b/backend/typings/mongoengine/queryset/field_list.pyi new file mode 100644 index 00000000..e6e8cfc3 --- /dev/null +++ b/backend/typings/mongoengine/queryset/field_list.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete + +__all__ = ["QueryFieldList"] + +class QueryFieldList: + ONLY: int + EXCLUDE: int + value: Incomplete + fields: Incomplete + always_include: Incomplete + slice: Incomplete + def __init__( + self, + fields: Incomplete | None = None, + value=..., + always_include: Incomplete | None = None, + _only_called: bool = False, + ) -> None: ... + def __add__(self, f): ... + def __bool__(self) -> bool: ... + def as_dict(self): ... + def reset(self) -> None: ... diff --git a/backend/typings/mongoengine/queryset/manager.pyi b/backend/typings/mongoengine/queryset/manager.pyi new file mode 100644 index 00000000..aa641461 --- /dev/null +++ b/backend/typings/mongoengine/queryset/manager.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete +from mongoengine.queryset.queryset import QuerySet + +__all__ = ["queryset_manager", "QuerySetManager"] + +class QuerySetManager: + get_queryset: Incomplete + default = QuerySet + def __init__(self, queryset_func: Incomplete | None = None) -> None: ... + def __get__(self, instance, owner): ... + +def queryset_manager(func): ... diff --git a/backend/typings/mongoengine/queryset/queryset.pyi b/backend/typings/mongoengine/queryset/queryset.pyi new file mode 100644 index 00000000..ad2ddd08 --- /dev/null +++ b/backend/typings/mongoengine/queryset/queryset.pyi @@ -0,0 +1,28 @@ +from mongoengine.queryset.base import ( + BaseQuerySet, + CASCADE as CASCADE, + DENY as DENY, + DO_NOTHING as DO_NOTHING, + NULLIFY as NULLIFY, + PULL as PULL, +) + +__all__ = [ + "QuerySet", + "QuerySetNoCache", + "DO_NOTHING", + "NULLIFY", + "CASCADE", + "DENY", + "PULL", +] + +class QuerySet(BaseQuerySet): + def __iter__(self): ... + def __len__(self) -> int: ... + def count(self, with_limit_and_skip: bool = False): ... + def no_cache(self): ... + +class QuerySetNoCache(BaseQuerySet): + def cache(self): ... + def __iter__(self): ... diff --git a/backend/typings/mongoengine/queryset/transform.pyi b/backend/typings/mongoengine/queryset/transform.pyi new file mode 100644 index 00000000..8c217685 --- /dev/null +++ b/backend/typings/mongoengine/queryset/transform.pyi @@ -0,0 +1,8 @@ +from _typeshed import Incomplete + +__all__ = ["query", "update", "STRING_OPERATORS"] + +STRING_OPERATORS: Incomplete + +def query(_doc_cls: Incomplete | None = None, **kwargs): ... +def update(_doc_cls: Incomplete | None = None, **update): ... diff --git a/backend/typings/mongoengine/queryset/visitor.pyi b/backend/typings/mongoengine/queryset/visitor.pyi new file mode 100644 index 00000000..729f0405 --- /dev/null +++ b/backend/typings/mongoengine/queryset/visitor.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete +from mongoengine.errors import InvalidQueryError + +__all__ = ["Q", "QNode"] + +class QNodeVisitor: + def visit_combination(self, combination): ... + def visit_query(self, query): ... + +class DuplicateQueryConditionsError(InvalidQueryError): ... + +class SimplificationVisitor(QNodeVisitor): + def visit_combination(self, combination): ... + +class QueryCompilerVisitor(QNodeVisitor): + document: Incomplete + def __init__(self, document) -> None: ... + def visit_combination(self, combination): ... + def visit_query(self, query): ... + +class QNode: + AND: int + OR: int + def to_query(self, document): ... + def accept(self, visitor) -> None: ... + @property + def empty(self): ... + def __or__(self, other): ... + def __and__(self, other): ... + +class QCombination(QNode): + operation: Incomplete + children: Incomplete + def __init__(self, operation, children) -> None: ... + def __bool__(self) -> bool: ... + def accept(self, visitor): ... + @property + def empty(self): ... + def __eq__(self, other): ... + +class Q(QNode): + query: Incomplete + def __init__(self, **query) -> None: ... + def __bool__(self) -> bool: ... + def __eq__(self, other): ... + def accept(self, visitor): ... + @property + def empty(self): ... diff --git a/backend/typings/mongoengine/signals.pyi b/backend/typings/mongoengine/signals.pyi new file mode 100644 index 00000000..856929e2 --- /dev/null +++ b/backend/typings/mongoengine/signals.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +__all__ = [ + "pre_init", + "post_init", + "pre_save", + "pre_save_post_validation", + "post_save", + "pre_delete", + "post_delete", +] + +class Namespace: + def signal(self, name, doc: Incomplete | None = None): ... + +class _FakeSignal: + name: Incomplete + __doc__: Incomplete + def __init__(self, name, doc: Incomplete | None = None) -> None: ... + send: Incomplete + connect: Incomplete + disconnect: Incomplete + has_receivers_for: Incomplete + receivers_for: Incomplete + temporarily_connected_to: Incomplete + +pre_init: Incomplete +post_init: Incomplete +pre_save: Incomplete +pre_save_post_validation: Incomplete +post_save: Incomplete +pre_delete: Incomplete +post_delete: Incomplete diff --git a/e2e-tests/test_entity_gql.py b/e2e-tests/test_entity_gql.py index d8bb055b..880e17f6 100644 --- a/e2e-tests/test_entity_gql.py +++ b/e2e-tests/test_entity_gql.py @@ -110,7 +110,7 @@ def create_entity(backend_url, auth_header, body, fs, file, filename_field): enumField fileName } - } + } """ # https://github.com/jaydenseric/graphql-multipart-request-spec operations = json.dumps( @@ -137,7 +137,7 @@ def create_entity(backend_url, auth_header, body, fs, file, filename_field): boolField enumField } - } + } """ response = requests.post( f"{backend_url}/graphql", diff --git a/e2e-tests/test_simple_entity_gql.py b/e2e-tests/test_simple_entity_gql.py index 5951263e..1dacf704 100644 --- a/e2e-tests/test_simple_entity_gql.py +++ b/e2e-tests/test_simple_entity_gql.py @@ -59,7 +59,7 @@ def create_entity(backend_url, auth_header, body): boolField enumField } - } + } """ response = requests.post( f"{backend_url}/graphql", diff --git a/e2e-tests/test_user_gql.py b/e2e-tests/test_user_gql.py index b75a796d..3e174154 100644 --- a/e2e-tests/test_user_gql.py +++ b/e2e-tests/test_user_gql.py @@ -78,7 +78,7 @@ def create_user(backend_url, auth_header, body): email role } - } + } """ response = requests.post( f"{backend_url}/graphql", @@ -104,7 +104,7 @@ def update_user(backend_url, auth_header, id, body): email role } - } + } """ response = requests.post( f"{backend_url}/graphql", diff --git a/frontend/.eslintrc.js b/frontend/.eslintrc.js index 2492c4b8..2bdb6ac7 100644 --- a/frontend/.eslintrc.js +++ b/frontend/.eslintrc.js @@ -20,14 +20,14 @@ module.exports = { "airbnb", "airbnb-typescript", "prettier", - "plugin:prettier/recommended", + // "plugin:prettier/recommended", "plugin:react/recommended", "plugin:react-hooks/recommended", "plugin:@typescript-eslint/eslint-recommended", "plugin:@typescript-eslint/recommended", ], rules: { - "prettier/prettier": ["warn", { endOfLine: "auto" }], + // "prettier/prettier": ["warn", { endOfLine: "auto" }], "react/require-default-props": "off", "react/no-array-index-key": "off", "jsx-a11y/click-events-have-key-events": "off", @@ -41,6 +41,7 @@ module.exports = { }, ], "sort-imports": ["error", { ignoreDeclarationSort: true }], + "prefer-destructuring": ["off"], "import/order": [ "error", { @@ -57,6 +58,8 @@ module.exports = { }, }, ], + "import/prefer-default-export": ["off"], + "@typescript-eslint/naming-convention": "off", }, ignorePatterns: ["build/*"], }; diff --git a/frontend/Dockerfile b/frontend/Dockerfile index 38afd3ce..efa3710d 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -1,6 +1,5 @@ FROM node:20-slim - WORKDIR /app COPY package.json yarn.lock ./ diff --git a/frontend/babel.config.js b/frontend/babel.config.js new file mode 100644 index 00000000..721e8b82 --- /dev/null +++ b/frontend/babel.config.js @@ -0,0 +1 @@ +module.exports = { presets: ["@babel/preset-env"] }; diff --git a/frontend/package.json b/frontend/package.json index 14ee70d5..99bc8321 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -3,6 +3,7 @@ "version": "0.1.0", "private": true, "dependencies": { + "@babel/preset-env": "^7.22.14", "@apollo/client": "^3.8.0", "@chakra-ui/icons": "^2.0.17", "@chakra-ui/react": "^2.4.1", @@ -12,6 +13,7 @@ "@react-oauth/google": "^0.5.0", "@rjsf/bootstrap-4": "^2.5.1", "@rjsf/core": "^2.5.1", + "@table-library/react-table-library": "^4.1.4", "@tanstack/react-table": "^8.7.0", "apollo-upload-client": "^17.0.0", "axios": "^0.21.1", @@ -24,6 +26,7 @@ "react": "^18.2.0", "react-bootstrap": "^1.5.2", "react-dom": "^18.2.0", + "react-icons": "^4.11.0", "react-json-schema": "^1.2.2", "react-jsonschema-form": "^1.8.1", "react-multi-date-picker": "^4.4.1", @@ -35,8 +38,8 @@ "scripts": { "start": "react-scripts start", "build": "react-scripts build", - "test": "react-scripts test", - "test:ci": " react-scripts test --watchAll=false", + "test": "react-scripts test --transformIgnorePatterns \"node_modules/(?!@codemirror)/\"", + "test:ci": " react-scripts test --watchAll=false --transformIgnorePatterns \"node_modules/(?!@codemirror)/\" --verbose", "eject": "react-scripts eject", "lint": "eslint . --ext .ts,.tsx,.js,.jsx --cache", "fix": "eslint . --ext .ts,.tsx,.js,.jsx --fix --cache" @@ -47,6 +50,11 @@ "react-app/jest" ] }, + "jest": { + "transform": { + "^.+\\.[t|j]sx?$": "babel-jest" + } + }, "browserslist": { "production": [ ">0.2%", diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index 83bacb69..4da6f724 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -24,13 +24,21 @@ const App = (): React.ReactElement => { // Providers for app-specific state like contexts are here. // For providers for libraries like Apollo and OAuth, see index.tsx. return ( - - -
- -