diff --git a/.github/workflows/django.yml b/.github/workflows/django.yml index c9a56e5b3e..627a9e1879 100644 --- a/.github/workflows/django.yml +++ b/.github/workflows/django.yml @@ -21,6 +21,15 @@ jobs: --health-interval 10s --health-timeout 5s --health-retries 5 + redis: + image: redis + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379 steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} @@ -60,7 +69,7 @@ jobs: ${{ runner.os }}-build-${{ env.cache-name }}- ${{ runner.os }}-build- ${{ runner.os }}- - - name: check a4 hashes equal + - name: Check a4 hashes equal run: | ./scripts/a4-check.sh - name: Install Dependencies diff --git a/changelog/2223.md b/changelog/2223.md new file mode 100644 index 0000000000..ae65f0e4d3 --- /dev/null +++ b/changelog/2223.md @@ -0,0 +1,3 @@ +### Added + +- logger in apps init file diff --git a/changelog/7275.md b/changelog/7275.md index f5620e0ea2..6eae80d3ba 100644 --- a/changelog/7275.md +++ b/changelog/7275.md @@ -1,5 +1,5 @@ -### Changed +### Added -- improves performance of the api endpoint `api/plans/` by prefetching projects -- for details, see `docs/performance_of_projects_overview.md` +- enables caching for api endpoints `api/{plans,extprojects,projects}/` +- caches are expired by signals and by periodic tasks, for details, see `docs/api_caching.md` diff --git a/docs/api_caching.md b/docs/api_caching.md new file mode 100644 index 0000000000..7166162fcb --- /dev/null +++ b/docs/api_caching.md @@ -0,0 +1,60 @@ +## Background + +We have noticed that the page load of `mein.berlin.de/projekte/` is pretty slow with about 6s for 550 projects. Three API calls are particularly slow: + +- https://mein.berlin.de/api/projects/?status=pastParticipation 2.811s +- https://mein.berlin.de/api/plans/ 3.613s +- https://mein.berlin.de/api/extprojects/ 5.041s + +These paths correspond to the following api views: + +- `projects/api.py::ProjectListViewSet` +- `plans/api.py::PlansListViewSet` +- `extprojects/api.py::ExternalProjectListViewSet` + +we decided to start caching the endpoints with redis. + +## Developer Notes + +The cache target is the `list` method of the following views: + +- `ExternalProjectListViewSet` +- `PlansListViewSet` +- `ProjectListViewSet` +- `PrivateProjectListViewSet` + +Cache keys expire after a timeout (default value 1h) or if a context specific signal is received (e.g. cache keys for projects are deleted if the signal for a saved project is detected). + +The cache keys for projects are constructed by the view namespace and their status if exists: +- `projects_activeParticipation` +- `projects_pastParticipation` +- `projects_futureParticipation` +- `privateprojects` +- `extprojects` +- `plans` + +## Celery tasks + +A periodic task checks for projects that will become either active or past in the next 10 minutes. +- schedule_reset_cache_for_projects() + +In case of projects becoming active the cache is cleared for: +- `projects_activeParticipation` +- `projects_futureParticipation` +- `privateprojects` +- `extprojects` + +in case of projects becoming past the cache is cleared for: +- `projects_activeParticipation` +- `projects_pastParticipation` +- `privateprojects` +- `extprojects` + +In production, we use django's built-in [Redis](https://docs.djangoproject.com/en/4.2/topics/cache/#redis) as cache backend (see `settings/production.py::CACHES`). For development and testing the cache backend is the default, that is [local memory](https://docs.djangoproject.com/en/4.2/topics/cache/#local-memory-caching). If you want to enable redis cache for local development, then copy the production settings to your `settings/local.py`. + +files: +- `./meinberlin/apps/plans/api.py` +- `./meinberlin/apps/extprojects/api.py` +- `./meinberlin/apps/projects/api.py` +- `./meinberlin/apps/projects/tasks.py` +- `./meinberlin/config/settings/production.py` diff --git a/meinberlin/apps/__init__.py b/meinberlin/apps/__init__.py index e69de29bb2..eea436a379 100644 --- a/meinberlin/apps/__init__.py +++ b/meinberlin/apps/__init__.py @@ -0,0 +1,3 @@ +import logging + +logger = logging.getLogger(__name__) diff --git a/meinberlin/apps/bplan/tasks.py b/meinberlin/apps/bplan/tasks.py index b9b29c6db8..9cff1d38fa 100644 --- a/meinberlin/apps/bplan/tasks.py +++ b/meinberlin/apps/bplan/tasks.py @@ -1,15 +1,13 @@ import json -import logging import urllib from celery import shared_task from adhocracy4.administrative_districts.models import AdministrativeDistrict from adhocracy4.projects.models import Topic +from meinberlin.apps import logger from meinberlin.apps.bplan.models import Bplan -logger = logging.getLogger(__name__) - def get_features_from_bplan_api(endpoint): url = "https://bplan-prod.liqd.net/api/" + endpoint diff --git a/meinberlin/apps/extprojects/api.py b/meinberlin/apps/extprojects/api.py index 87c1681572..42984b6b4d 100644 --- a/meinberlin/apps/extprojects/api.py +++ b/meinberlin/apps/extprojects/api.py @@ -1,5 +1,7 @@ +from django.core.cache import cache from django.utils import timezone from rest_framework import viewsets +from rest_framework.response import Response from adhocracy4.projects.enums import Access from meinberlin.apps.extprojects.models import ExternalProject @@ -18,3 +20,13 @@ def get_queryset(self): def get_serializer(self, *args, **kwargs): now = timezone.now() return ExternalProjectSerializer(now=now, *args, **kwargs) + + def list(self, request, *args, **kwargs): + data = cache.get("extprojects") + if data is None: + queryset = self.filter_queryset(self.get_queryset()) + serializer = self.get_serializer(queryset, many=True) + data = serializer.data + cache.set("extprojects", data) + + return Response(data) diff --git a/meinberlin/apps/extprojects/apps.py b/meinberlin/apps/extprojects/apps.py index f9f064b6d4..bdffe45c4b 100644 --- a/meinberlin/apps/extprojects/apps.py +++ b/meinberlin/apps/extprojects/apps.py @@ -4,3 +4,6 @@ class Config(AppConfig): name = "meinberlin.apps.extprojects" label = "meinberlin_extprojects" + + def ready(self): + import meinberlin.apps.extprojects.signals # noqa:F401 diff --git a/meinberlin/apps/extprojects/signals.py b/meinberlin/apps/extprojects/signals.py new file mode 100644 index 0000000000..82b147b434 --- /dev/null +++ b/meinberlin/apps/extprojects/signals.py @@ -0,0 +1,12 @@ +from django.core.cache import cache +from django.db.models.signals import post_delete +from django.db.models.signals import post_save +from django.dispatch import receiver + +from .models import ExternalProject + + +@receiver(post_save, sender=ExternalProject) +@receiver(post_delete, sender=ExternalProject) +def reset_cache(sender, instance, *args, **kwargs): + cache.delete("extprojects") diff --git a/meinberlin/apps/plans/api.py b/meinberlin/apps/plans/api.py index 822d04c78a..40e39daef5 100644 --- a/meinberlin/apps/plans/api.py +++ b/meinberlin/apps/plans/api.py @@ -1,4 +1,6 @@ +from django.core.cache import cache from rest_framework import viewsets +from rest_framework.response import Response from meinberlin.apps.plans.models import Plan from meinberlin.apps.plans.serializers import PlanSerializer @@ -9,3 +11,13 @@ class PlansListViewSet(viewsets.ReadOnlyModelViewSet): def get_queryset(self): return Plan.objects.filter(is_draft=False).prefetch_related("projects") + + def list(self, request, *args, **kwargs): + data = cache.get("plans") + if data is None: + queryset = self.filter_queryset(self.get_queryset()) + serializer = self.get_serializer(queryset, many=True) + data = serializer.data + cache.set("plans", data) + + return Response(data) diff --git a/meinberlin/apps/plans/apps.py b/meinberlin/apps/plans/apps.py index 979032a2af..ccd214e7c6 100644 --- a/meinberlin/apps/plans/apps.py +++ b/meinberlin/apps/plans/apps.py @@ -4,3 +4,6 @@ class Config(AppConfig): name = "meinberlin.apps.plans" label = "meinberlin_plans" + + def ready(self): + import meinberlin.apps.plans.signals # noqa:F401 diff --git a/meinberlin/apps/plans/signals.py b/meinberlin/apps/plans/signals.py new file mode 100644 index 0000000000..5018dc9681 --- /dev/null +++ b/meinberlin/apps/plans/signals.py @@ -0,0 +1,12 @@ +from django.core.cache import cache +from django.db.models.signals import post_delete +from django.db.models.signals import post_save +from django.dispatch import receiver + +from .models import Plan + + +@receiver(post_save, sender=Plan) +@receiver(post_delete, sender=Plan) +def reset_cache(sender, instance, *args, **kwargs): + cache.delete("plans") diff --git a/meinberlin/apps/projects/api.py b/meinberlin/apps/projects/api.py index b0cfaffcdf..f2e0f26bd8 100644 --- a/meinberlin/apps/projects/api.py +++ b/meinberlin/apps/projects/api.py @@ -1,7 +1,9 @@ +from django.core.cache import cache from django.db.models import Q from django.utils import timezone from django_filters.rest_framework import DjangoFilterBackend from rest_framework import viewsets +from rest_framework.response import Response from adhocracy4.projects.enums import Access from adhocracy4.projects.models import Project @@ -39,6 +41,18 @@ def get_queryset(self): ) return projects + def list(self, request, *args, **kwargs): + statustype = "" + if "status" in self.request.GET: + statustype = self.request.GET["status"] + data = cache.get("projects_" + statustype) + if data is None: + queryset = self.filter_queryset(self.get_queryset()) + serializer = self.get_serializer(queryset, many=True) + data = serializer.data + cache.set("projects_" + statustype, data) + return Response(data) + def get_serializer(self, *args, **kwargs): if "status" in self.request.GET: statustype = self.request.GET["status"] @@ -64,9 +78,12 @@ def __init__(self, *args, **kwargs): self.now = now def get_queryset(self): - private_projects = Project.objects.filter( - is_draft=False, is_archived=False, access=Access.PRIVATE - ) + private_projects = cache.get("private_projects") + if private_projects is None: + private_projects = Project.objects.filter( + is_draft=False, is_archived=False, access=Access.PRIVATE + ) + cache.set("private_projects", private_projects) if private_projects: not_allowed_projects = [ project.id diff --git a/meinberlin/apps/projects/apps.py b/meinberlin/apps/projects/apps.py index ae82a48939..eb63c64047 100644 --- a/meinberlin/apps/projects/apps.py +++ b/meinberlin/apps/projects/apps.py @@ -6,6 +6,7 @@ class Config(AppConfig): label = "meinberlin_projects" def ready(self): - from . import overwrites + import meinberlin.apps.projects.signals # noqa:F401 + from meinberlin.apps.projects import overwrites overwrites.overwrite_access_enum_label() diff --git a/meinberlin/apps/projects/filters.py b/meinberlin/apps/projects/filters.py index 15de46c1a5..b966e8f8d8 100644 --- a/meinberlin/apps/projects/filters.py +++ b/meinberlin/apps/projects/filters.py @@ -3,7 +3,6 @@ class StatusFilter(filters.BaseFilterBackend): def filter_queryset(self, request, queryset, view): - now = view.now if "status" in request.GET: diff --git a/meinberlin/apps/projects/signals.py b/meinberlin/apps/projects/signals.py new file mode 100644 index 0000000000..a5e0b0268c --- /dev/null +++ b/meinberlin/apps/projects/signals.py @@ -0,0 +1,49 @@ +from django.core.cache import cache +from django.db.models.signals import post_delete +from django.db.models.signals import post_save +from django.dispatch import receiver + +from adhocracy4.dashboard import signals as a4dashboard_signals +from adhocracy4.projects.models import Project +from meinberlin.apps.projects.tasks import get_next_projects_end +from meinberlin.apps.projects.tasks import get_next_projects_start + + +@receiver(a4dashboard_signals.project_created) +@receiver(a4dashboard_signals.project_published) +@receiver(a4dashboard_signals.project_unpublished) +def post_dashboard_signal_delete(sender, project, user, **kwargs): + cache.delete_many( + [ + "projects_activeParticipation", + "projects_futureParticipation", + "projects_pastParticipation", + "private_projects", + "extprojects", + ] + ) + + +@receiver(post_save, sender=Project) +@receiver(post_delete, sender=Project) +def post_save_delete(sender, instance, *args, **kwargs): + """ + Delete cache for project list views. + Capture any new phases that may got created/updated while saving a project. + """ + + cache.delete_many( + [ + "projects_activeParticipation", + "projects_futureParticipation", + "projects_pastParticipation", + "private_projects", + "extprojects", + ] + ) + + # set cache for the next projects that will be published in the next 10min + get_next_projects_start() + + # set cache for the next project that ends and should be unpublished + get_next_projects_end() diff --git a/meinberlin/apps/projects/tasks.py b/meinberlin/apps/projects/tasks.py new file mode 100644 index 0000000000..547e7a2fe7 --- /dev/null +++ b/meinberlin/apps/projects/tasks.py @@ -0,0 +1,195 @@ +from datetime import datetime +from datetime import timedelta +from datetime import timezone + +from celery import shared_task +from django.core.cache import cache + +from adhocracy4.phases.models import Phase +from meinberlin.apps import logger + + +def get_next_projects_start() -> list: + """ + Helper function to query the db and retrieve the + phases for projects that will start in the next 10min. + + Returns: + A list with the phases timestamp and remaining seconds. + """ + now = datetime.now(tz=timezone.utc) # tz is UTC + phases = ( + Phase.objects.filter( + module__is_draft=False, + start_date__isnull=False, + start_date__range=[now, now + timedelta(minutes=10)], + ) + .order_by("start_date") + .all() + ) + list_format_phases = [] + if phases: + for phase in phases: + # compare now with next start date + phase_start_date = phase.start_date.astimezone(timezone.utc) + remain_time = phase_start_date - now + str_phase = phase_start_date.strftime("%Y-%m-%d, %H:%M:%S %Z") + list_format_phases.append( + (str_phase, remain_time.total_seconds(), "future") + ) + + # set the redis key: value + cache.set("next_projects_start", list_format_phases) + + return list_format_phases + + +def get_next_projects_end() -> list: + """ + Helper function to query the db and + retrieve the earliest phase that will end. + + Returns: + A list with the phases timestamp and remaining seconds. + """ + now = datetime.now(tz=timezone.utc) # tz is UTC + phases = ( + Phase.objects.filter( + module__is_draft=False, + end_date__isnull=False, + end_date__range=[now, now + timedelta(minutes=10)], + ) + .order_by("end_date") + .all() + ) + + list_format_phases = [] + if phases: + for phase in phases: + # compare now with next start date + phase_end_date = phase.end_date.astimezone(timezone.utc) + remain_time = phase_end_date - now + str_phase = phase_end_date.strftime("%Y-%m-%d, %H:%M:%S %Z") + list_format_phases.append( + (str_phase, remain_time.total_seconds(), "active") + ) + + # set the redis key: value + cache.set("next_projects_end", list_format_phases) + + return list_format_phases + + +@shared_task(name="schedule_reset_cache_for_projects") +def schedule_reset_cache_for_projects() -> bool: + """The task is set via celery beat every 10 minutes in + settings/production.txt. + + Returns: + A boolean indicating if there are projects + becoming past or active in the next 10 minutes + and when the cache will be cleared. + + Task propagates a log info with a list + of the phases timestamps. + """ + + msg = "Projects will be removed from cache: " + success = False + starts = False + ends = False + + # check if redis has cache for past projects ending + list_projects_end = cache.get("next_projects_end") + if not list_projects_end: + list_projects_end = get_next_projects_end() + + # check if redis has cache for future projects starting + list_projects_start = cache.get("next_projects_start") + if not list_projects_start: + list_projects_start = get_next_projects_start() + + list_timestamps = list_projects_end + list_projects_start + + if list_timestamps: + for timestamp in list_timestamps: + project_phase = timestamp[0] + remain_time = timestamp[1] + project_status = timestamp[2] + if project_status == "future": + starts = True + else: + ends = True + # schedule cache clear for the seconds between now and next end + reset_cache_for_projects.apply_async([starts, ends], countdown=remain_time) + msg = f""" + {project_status} {project_phase} in {remain_time/60} minutes + """ + success = True + else: + msg += "None" + + logger.info(msg) + return success + + +@shared_task +def reset_cache_for_projects(starts: bool, ends: bool) -> str: + """ + Task called by schedule_reset_cache_for_projects + and clears cache for projects. + + Returns: + A message indicating the participation + status of the projects that the cache + succeeded or failed to clear along with + other relevant type of projects. + """ + + msg = "Clear cache " + if starts: + # remove redis key next_project_start + cache.delete("next_projects_start") + cache.delete_many( + [ + "projects_activeParticipation", + "projects_futureParticipation", + "private_projects", + "extprojects", + ] + ) + if cache.get_many( + [ + "projects_activeParticipation", + "projects_futureParticipation", + "private_projects", + "extprojects", + ] + ): + msg += "failed for future projects becoming active" + else: + msg += "succeeded for future projects becoming active" + if ends: + # remove redis key next_projects_end + cache.delete("next_projects_end") + cache.delete_many( + [ + "projects_activeParticipation", + "projects_pastParticipation", + "private_projects", + "extprojects", + ] + ) + if cache.get_many( + [ + "projects_activeParticipation", + "projects_pastParticipation", + "private_projects", + "extprojects", + ] + ): + msg += "failed for active projects becoming past" + else: + msg += "succeeded for active projects becoming past" + logger.info(msg) + return msg diff --git a/meinberlin/config/settings/dev.py b/meinberlin/config/settings/dev.py index 12c155de9d..60ca30c9e2 100644 --- a/meinberlin/config/settings/dev.py +++ b/meinberlin/config/settings/dev.py @@ -9,6 +9,13 @@ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab" +CACHES = { + "default": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + "LOCATION": "unique-snowflake", + } +} + CELERY_TASK_ALWAYS_EAGER = True try: diff --git a/meinberlin/config/settings/production.py b/meinberlin/config/settings/production.py index 072711b3d9..e452a93049 100644 --- a/meinberlin/config/settings/production.py +++ b/meinberlin/config/settings/production.py @@ -1,6 +1,17 @@ +from celery.schedules import crontab + from .base import * DEBUG = False + +CACHES = { + "default": { + "BACKEND": "django.core.cache.backends.redis.RedisCache", + "LOCATION": "redis://127.0.0.1:6379/1", # defaut is 0 and is taken by celery for backend results + "TIMEOUT": 60, + } +} + STORAGES = { "default": { "BACKEND": "django.core.files.storage.FileSystemStorage", @@ -30,3 +41,11 @@ CKEDITOR_CONFIGS["video-editor"]["embed_provider"] = CKEDITOR_URL except NameError: pass + +CELERY_BEAT_SCHEDULE = { + "update-cache-for-projects-every-10-mim": { + "task": "schedule_reset_cache_for_projects", + "schedule": crontab(minute="*/10"), + "args": (), + }, +} diff --git a/meinberlin/config/settings/travis.py b/meinberlin/config/settings/travis.py index 858051b8bb..6f5e0727c2 100644 --- a/meinberlin/config/settings/travis.py +++ b/meinberlin/config/settings/travis.py @@ -1,5 +1,12 @@ from .test import * +CACHES = { + "default": { + "BACKEND": "django.core.cache.backends.redis.RedisCache", + "LOCATION": "redis://127.0.0.1:6379/1", + } +} + DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql", diff --git a/requirements/prod.txt b/requirements/prod.txt index efad5af4fe..64cf70a43d 100644 --- a/requirements/prod.txt +++ b/requirements/prod.txt @@ -1,5 +1,4 @@ -r base.txt -django-redis==5.3.0 hiredis==2.2.3 gunicorn==20.1.0 psycopg[c]==3.1.12 diff --git a/tests/conftest.py b/tests/conftest.py index 6b863b7092..df4de65404 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,7 @@ import factory import pytest from celery import Celery +from django.core.cache import cache from django.urls import reverse from pytest_factoryboy import register from rest_framework.test import APIClient @@ -91,3 +92,9 @@ def logout_url(): @pytest.fixture def signup_url(): return reverse("account_signup") + + +@pytest.fixture(scope="function", autouse=True) +def cache_clear(): + yield cache + cache.clear() diff --git a/tests/projects/test_api_caching.py b/tests/projects/test_api_caching.py new file mode 100644 index 0000000000..8112daba9c --- /dev/null +++ b/tests/projects/test_api_caching.py @@ -0,0 +1,178 @@ +import pytest +from dateutil.parser import parse +from django.core.cache import cache +from django.urls import reverse +from django.utils import timezone + +from adhocracy4.projects.enums import Access +from adhocracy4.test.factories import PhaseFactory +from adhocracy4.test.factories import ProjectFactory +from meinberlin.test.factories.extprojects import ExternalProjectFactory +from meinberlin.test.factories.plans import PlanFactory + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "namespace,url_name,factory,factory_kwargs", + [ + ("plans", "plans-list", PlanFactory, {}), + ( + "extprojects", + "extprojects-list", + ExternalProjectFactory, + {"access": Access.PUBLIC, "is_draft": False, "is_archived": False}, + ), + ], +) +def test_calling_plans_extprojects_list_creates_cached_value( + client, namespace, url_name, factory, factory_kwargs, django_assert_num_queries +): + n_objects = 3 + cache_key = namespace + cache_value_before = cache.get(cache_key) + + objects = factory.create_batch(size=n_objects, **factory_kwargs) + + # check cache is set when calling the endpoint + url = reverse(url_name) + response = client.get(url) + cache_value_after = cache.get(cache_key) + + assert response.status_code == 200 + assert cache_value_before is None + assert len(cache_value_after) == len(objects) == n_objects + assert response.status_code == 200 + assert response.data == cache_value_after + + # check if query cache refrains from hitting the db + with django_assert_num_queries(0): + response = client.get(url) + assert response.status_code == 200 + assert len(response.data) == len(objects) == n_objects + + # check cache is clear when updating an object + obj = objects[0] # fetch the first object + obj.config_name = "admin" + obj.save() + cache_value_post_saving = cache.get(cache_key) + cache_value_post_saving is None + + # check cache is clear when deleting an object + obj = objects[0] # fetch the first object + obj.delete() + cache_value_post_saving = cache.get(cache_key) + cache_value_post_saving is None + + # check cache is set when calling the endpoint + response = client.get(url) + cache_value_after = cache.get(cache_key) + assert len(cache_value_after) == len(objects) - 1 == n_objects - 1 + + # check cache is clear when creating a new object + factory.create(**factory_kwargs) + cache_value_post_saving = cache.get(cache_key) + cache_value_post_saving is None + + # check cache is set when calling the endpoint + response = client.get(url) + cache_value_after = cache.get(cache_key) + assert len(cache_value_after) == len(objects) == n_objects + + cache.delete(cache_key) + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "namespace,statustype,url_name,phase_factory,factory", + [ + ( + "projects_", + "activeParticipation", + "projects-list", + PhaseFactory, + ProjectFactory, + ), + ( + "projects_", + "futureParticipation", + "projects-list", + PhaseFactory, + ProjectFactory, + ), + ( + "projects_", + "pastParticipation", + "projects-list", + PhaseFactory, + ProjectFactory, + ), + ], +) +def test_calling_list_api_creates_cached_value( + client, + namespace, + statustype, + url_name, + phase_factory, + factory, + django_assert_num_queries, +): + n_objects = 3 + cache_key = namespace + statustype + cache_value_before = cache.get(cache_key) + + objects = factory.create_batch(size=n_objects) + + # make dates to work with phase_factory + now = parse("2023-10-11 18:00:00 UTC") + yesterday = now - timezone.timedelta(days=1) + tomorrow = now + timezone.timedelta(days=1) + last_week = now - timezone.timedelta(days=7) + next_week = now + timezone.timedelta(days=7) + + # assign phases to projects + if "active" in statustype: + # make them active projects + for project_active in objects: + phase_factory( + start_date=last_week, + end_date=next_week, + module__project=project_active, + ) + if "future" in statustype: + # make them future projects + for project_future in objects: + phase_factory( + start_date=tomorrow, + end_date=next_week, + module__project=project_future, + ) + if "past" in statustype: + # make them past projects + for project_past in objects: + phase_factory( + start_date=last_week, + end_date=yesterday, + module__project=project_past, + ) + + from freezegun import freeze_time + + with freeze_time(now): + url = reverse(url_name) + f"?status={statustype}" + response = client.get(url) + cache_value_after = cache.get(cache_key) + + assert response.status_code == 200 + assert cache_value_before is None + assert len(cache_value_after) == len(objects) == n_objects + assert response.status_code == 200 + assert response.data == cache_value_after + + with django_assert_num_queries(0): + url = reverse(url_name) + f"?status={statustype}" + response = client.get(url) + assert response.status_code == 200 + assert response.data == cache_value_after + + cache.delete(cache_key) diff --git a/tests/projects/test_tasks_caching.py b/tests/projects/test_tasks_caching.py new file mode 100644 index 0000000000..6a378f7f0f --- /dev/null +++ b/tests/projects/test_tasks_caching.py @@ -0,0 +1,116 @@ +from datetime import datetime +from datetime import timedelta +from datetime import timezone + +import pytest +from django.core.cache import cache +from freezegun import freeze_time + +from meinberlin.apps.projects.tasks import get_next_projects_end +from meinberlin.apps.projects.tasks import get_next_projects_start +from meinberlin.apps.projects.tasks import schedule_reset_cache_for_projects + + +@pytest.mark.django_db +def test_task_schedule_reset_cache_for_projects_becoming_active( + client, phase_factory, project_factory, django_assert_num_queries +): + n_objects = 6 + objects = project_factory.create_batch(size=n_objects) + + # make dates to work with phase_factory + now = datetime.now(tz=timezone.utc) + last_week = now - timedelta(days=7) + next_week = now + timedelta(days=7) + + # make active projects + active_projects = objects[:3] + for proj in active_projects: + phase_factory( + start_date=last_week, + end_date=next_week, + module__project=proj, + ) + # make future projects + count = 2 + future_projects = objects[3:6] + for proj in future_projects: + phase_factory( + start_date=now + timedelta(minutes=count), + end_date=next_week, + module__project=proj, + ) + count += 2 + + # check function get_next_projects_start + with django_assert_num_queries(1): + future_projects_timestamps = get_next_projects_start() + assert len(future_projects) == len(future_projects_timestamps) + + # call celery task inline as a function + with freeze_time(now): + next_projects_start = cache.get("next_projects_start") + assert next_projects_start is not None + assert next_projects_start == future_projects_timestamps + + result = schedule_reset_cache_for_projects() + assert result is True + + with freeze_time(now + timedelta(minutes=11)): + result = schedule_reset_cache_for_projects() + assert result is False + + next_projects_start = cache.get("next_projects_start") + assert next_projects_start is None + + +@pytest.mark.django_db +def test_task_schedule_reset_cache_for_projects_becoming_past( + client, phase_factory, project_factory, django_assert_num_queries +): + n_objects = 4 + objects = project_factory.create_batch(size=n_objects) + + # make dates to work with phase_factory + now = datetime.now(tz=timezone.utc) + last_week = now - timedelta(days=7) + next_week = now + timedelta(days=7) + + # make active projects + active_projects = objects[:3] + for proj in active_projects: + phase_factory( + start_date=last_week, + end_date=next_week, + module__project=proj, + ) + + # make project that will become past in the next 10 mins + become_past_project = objects[3] + phase_factory( + start_date=last_week, + end_date=now + timedelta(minutes=5), + module__project=become_past_project, + ) + # check function get_next_projects_start + with django_assert_num_queries(1): + project_timestamps = get_next_projects_end() + + str_end_date = become_past_project.end_date.strftime("%Y-%m-%d, %H:%M:%S %Z") + assert str_end_date == project_timestamps[0][0] + + # call celery task inline as a function + with freeze_time(now): + next_projects_end = cache.get("next_projects_end") + assert next_projects_end is not None + assert next_projects_end == project_timestamps + + result = schedule_reset_cache_for_projects() + assert result is True + + with freeze_time(now + timedelta(minutes=11)): + result = schedule_reset_cache_for_projects() + assert result is False + + next_projects_end = cache.get("next_projects_end") + assert next_projects_end is None