From 973697891160b446af3ac0b2b741498134f303a2 Mon Sep 17 00:00:00 2001 From: ronardcaktus Date: Wed, 23 Oct 2024 13:44:39 -0400 Subject: [PATCH] Update documentation, remove django-cache-machine and references, commit Colin's notes --- .github/workflows/deploy.yaml | 2 +- docs/data-import.rst | 4 ++-- docs/dev-setup.rst | 2 +- nc/data/importer.py | 5 ++--- nc/filters.py | 2 +- nc/models.py | 25 ++++++------------------- nc/notebooks/requirements.txt | 2 +- nc/prime_cache.py | 4 ++-- nc/tasks.py | 10 ++++++++++ nc/views/main.py | 6 ------ requirements/base/base.in | 3 +-- requirements/base/base.txt | 6 ++---- requirements/deploy/deploy.txt | 2 +- requirements/dev/dev.txt | 12 +++++------- requirements/test/test.txt | 11 +---------- 15 files changed, 36 insertions(+), 60 deletions(-) diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml index c9698ea5..1561ffe0 100755 --- a/.github/workflows/deploy.yaml +++ b/.github/workflows/deploy.yaml @@ -2,7 +2,7 @@ name: deploy on: push: - branches: [main, develop, CU-8686edfh0-performance] + branches: [main, develop, CU-86897a79g-cache-misses] jobs: deploy: diff --git a/docs/data-import.rst b/docs/data-import.rst index 17e6e3bb..6e5c86c3 100644 --- a/docs/data-import.rst +++ b/docs/data-import.rst @@ -25,8 +25,8 @@ To load an existing database dump on S3, run: .. code-block:: bash dropdb traffic_stops_nc && createdb -E UTF-8 traffic_stops_nc - aws s3 cp s3://traffic-stops/traffic_stops_nc.dump . - pg_restore -Ox -d traffic_stops_nc traffic_stops_nc.dump + aws s3 cp s3://forwardjustice-trafficstops/trafficstops-staging_database.dump . + pg_restore -Ox -d traffic_stops_nc trafficstops-staging_database.dump Raw NC Data (slower) diff --git a/docs/dev-setup.rst b/docs/dev-setup.rst index fd83b77f..a09c3eae 100755 --- a/docs/dev-setup.rst +++ b/docs/dev-setup.rst @@ -66,7 +66,7 @@ Getting Started (Manual) Run PostgreSQL in Docker:: - docker-compose up -d + docker compose up -d db redis This will create a PostgreSQL server with multiple databases (see ``docker-entrypoint.postgres.sql``). diff --git a/nc/data/importer.py b/nc/data/importer.py index 2e86d44e..e3a299af 100755 --- a/nc/data/importer.py +++ b/nc/data/importer.py @@ -14,7 +14,6 @@ from nc.data import copy_nc from nc.models import Agency, Search, Stop, StopSummary -from nc.prime_cache import run as prime_cache_run from tsdata.dataset_facts import compute_dataset_facts from tsdata.sql import drop_constraints_and_indexes from tsdata.utils import call, download_and_unzip_data, line_count, unzip_data @@ -100,8 +99,8 @@ def run(url, destination=None, zip_path=None, min_stop_id=None, max_stop_id=None logger.info("Materialized view updated") # prime the query cache for large NC agencies - if prime_cache: - prime_cache_run() + # if prime_cache: + # prime_cache_run() def truncate_input_data(destination, min_stop_id, max_stop_id): diff --git a/nc/filters.py b/nc/filters.py index 242041fb..146eada6 100755 --- a/nc/filters.py +++ b/nc/filters.py @@ -6,7 +6,7 @@ class DriverStopsFilter(filters.FilterSet): agency = filters.ModelChoiceFilter( - queryset=models.Agency.objects.no_cache().all(), + queryset=models.Agency.objects.all(), label="Agency", method="filter_agency", required=True, diff --git a/nc/models.py b/nc/models.py index 73d24498..bda0df43 100755 --- a/nc/models.py +++ b/nc/models.py @@ -1,4 +1,3 @@ -from caching.base import CachingManager, CachingMixin from django.db import models from django.utils.html import format_html from django_pgviews import view as pg @@ -133,7 +132,7 @@ class DriverEthnicity(models.TextChoices): ) -class Stop(CachingMixin, models.Model): +class Stop(models.Model): stop_id = models.PositiveIntegerField(primary_key=True) agency_description = models.CharField(max_length=100) agency = models.ForeignKey("Agency", null=True, related_name="stops", on_delete=models.CASCADE) @@ -151,10 +150,8 @@ class Stop(CachingMixin, models.Model): stop_location = models.CharField(max_length=15) # todo: keys stop_city = models.CharField(max_length=20) - objects = CachingManager() - -class Person(CachingMixin, models.Model): +class Person(models.Model): person_id = models.IntegerField(primary_key=True) stop = models.ForeignKey(Stop, on_delete=models.CASCADE) type = models.CharField(max_length=2, choices=PERSON_TYPE_CHOICES) @@ -163,10 +160,8 @@ class Person(CachingMixin, models.Model): ethnicity = models.CharField(max_length=2, choices=ETHNICITY_CHOICES) race = models.CharField(max_length=2, choices=RACE_CHOICES) - objects = CachingManager() - -class Search(CachingMixin, models.Model): +class Search(models.Model): search_id = models.IntegerField(primary_key=True) stop = models.ForeignKey(Stop, on_delete=models.CASCADE) person = models.ForeignKey(Person, on_delete=models.CASCADE) @@ -179,10 +174,8 @@ class Search(CachingMixin, models.Model): personal_property_siezed = models.BooleanField(default=False) other_property_sized = models.BooleanField(default=False) - objects = CachingManager() - -class Contraband(CachingMixin, models.Model): +class Contraband(models.Model): contraband_id = models.IntegerField(primary_key=True) search = models.ForeignKey(Search, on_delete=models.CASCADE) person = models.ForeignKey(Person, on_delete=models.CASCADE) @@ -198,27 +191,21 @@ class Contraband(CachingMixin, models.Model): weapons = models.FloatField(default=0, null=True) dollar_amount = models.FloatField(default=0, null=True) - objects = CachingManager() - -class SearchBasis(CachingMixin, models.Model): +class SearchBasis(models.Model): search_basis_id = models.IntegerField(primary_key=True) search = models.ForeignKey(Search, on_delete=models.CASCADE) person = models.ForeignKey(Person, on_delete=models.CASCADE) stop = models.ForeignKey(Stop, on_delete=models.CASCADE) basis = models.CharField(max_length=4, choices=SEARCH_BASIS_CHOICES) - objects = CachingManager() - -class Agency(CachingMixin, models.Model): +class Agency(models.Model): name = models.CharField(max_length=255) # link to CensusProfile (no cross-database foreign key) census_profile_id = models.CharField(max_length=16, blank=True, default="") last_reported_stop = models.DateField(null=True) - objects = CachingManager() - class Meta(object): verbose_name_plural = "Agencies" diff --git a/nc/notebooks/requirements.txt b/nc/notebooks/requirements.txt index 78aead6d..407e1321 100644 --- a/nc/notebooks/requirements.txt +++ b/nc/notebooks/requirements.txt @@ -5,4 +5,4 @@ jupyter-dash==0.4.2 jupyter_server>=2.0.0 dash==2.8.1 plotly==5.13.0 -psycopg2==2.9.3 +psycopg2==2.9.9 diff --git a/nc/prime_cache.py b/nc/prime_cache.py index fc124a33..80dd3307 100755 --- a/nc/prime_cache.py +++ b/nc/prime_cache.py @@ -148,8 +148,7 @@ def __repr__(self): class AgencyStopsPrimer(CachePrimer): def get_queryset(self): qs = list( - Stop.objects.no_cache() - .annotate(agency_name=F("agency_description")) + Stop.objects.annotate(agency_name=F("agency_description")) .values("agency_name", "agency_id") .annotate(num_stops=Count("stop_id")) .order_by("-num_stops") @@ -226,6 +225,7 @@ def run( if clear_cache: logger.info("Clearing cache") + # TODO: Change to create CloudFront invalidation cache.clear() if not skip_agencies: diff --git a/nc/tasks.py b/nc/tasks.py index eda1871b..b50c85c7 100755 --- a/nc/tasks.py +++ b/nc/tasks.py @@ -44,3 +44,13 @@ def download_and_import_nc_dataset(): nc_dataset.report_email_2 = settings.NC_AUTO_IMPORT_MONITORS[1] nc_dataset.save() import_dataset.delay(nc_dataset.pk) + + +@app.task +def prime_cache(): + pass + + +@app.task +def prime_agency(): + pass diff --git a/nc/views/main.py b/nc/views/main.py index 21c602b2..a5a49cdf 100644 --- a/nc/views/main.py +++ b/nc/views/main.py @@ -19,7 +19,6 @@ from rest_framework.decorators import action from rest_framework.response import Response from rest_framework.views import APIView -from rest_framework_extensions.cache.decorators import cache_response from rest_framework_extensions.key_constructor import bits from rest_framework_extensions.key_constructor.constructors import DefaultObjectKeyConstructor @@ -202,14 +201,12 @@ def query(self, results, group_by, filter_=None): results.add(**data) @action(detail=True, methods=["get"]) - @cache_response(key_func=query_cache_key_func) def stops(self, request, pk=None): results = GroupedData(by="year", defaults=GROUP_DEFAULTS) self.query(results, group_by=("year", "driver_race", "driver_ethnicity")) return Response(results.flatten()) @action(detail=True, methods=["get"]) - @cache_response(key_func=query_cache_key_func) def stops_by_reason(self, request, pk=None): response = {} # stops @@ -227,7 +224,6 @@ def stops_by_reason(self, request, pk=None): return Response(response) @action(detail=True, methods=["get"]) - @cache_response(key_func=query_cache_key_func) def use_of_force(self, request, pk=None): results = GroupedData(by="year", defaults=GROUP_DEFAULTS) q = Q(search_type__isnull=False) & Q(engage_force="t") @@ -235,7 +231,6 @@ def use_of_force(self, request, pk=None): return Response(results.flatten()) @action(detail=True, methods=["get"]) - @cache_response(key_func=query_cache_key_func) def searches(self, request, pk=None): results = GroupedData(by="year", defaults=GROUP_DEFAULTS) q = Q(search_type__isnull=False) @@ -243,7 +238,6 @@ def searches(self, request, pk=None): return Response(results.flatten()) @action(detail=True, methods=["get"]) - @cache_response(key_func=query_cache_key_func) def searches_by_type(self, request, pk=None): results = GroupedData(by=("search_type", "year"), defaults=GROUP_DEFAULTS) q = Q(search_type__isnull=False) diff --git a/requirements/base/base.in b/requirements/base/base.in index ec0b326d..5a51907b 100755 --- a/requirements/base/base.in +++ b/requirements/base/base.in @@ -8,7 +8,6 @@ boto boto3==1.34.100 botocore==1.34.100 click==8.1.7 -django-cache-machine==1.2.0 django-ckeditor==6.7.0 django-click==2.3.0 django-crispy-forms @@ -22,7 +21,7 @@ django-storages==1.13.2 djangorestframework==3.12.4 dj-database-url drf-extensions==0.7.1 -psycopg2<2.9 +psycopg2==2.9.9 requests==2.32.3 urllib3==2.2.1 six diff --git a/requirements/base/base.txt b/requirements/base/base.txt index 113a2cdb..da6aaec8 100755 --- a/requirements/base/base.txt +++ b/requirements/base/base.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.11 # by the following command: # # pip-compile --output-file=requirements/base/base.txt requirements/base/base.in @@ -55,8 +55,6 @@ django==3.2.25 # django-redis # django-storages # djangorestframework -django-cache-machine==1.2.0 - # via -r requirements/base/base.in django-ckeditor==6.7.0 # via -r requirements/base/base.in django-click==2.3.0 @@ -101,7 +99,7 @@ pandas==2.2.2 # via -r requirements/base/base.in prompt-toolkit==3.0.47 # via click-repl -psycopg2==2.8.6 +psycopg2==2.9.9 # via -r requirements/base/base.in python-dateutil==2.9.0.post0 # via diff --git a/requirements/deploy/deploy.txt b/requirements/deploy/deploy.txt index b2cec8a1..9d8a361b 100644 --- a/requirements/deploy/deploy.txt +++ b/requirements/deploy/deploy.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.11 # by the following command: # # pip-compile --output-file=requirements/deploy/deploy.txt requirements/deploy/deploy.in diff --git a/requirements/dev/dev.txt b/requirements/dev/dev.txt index 99e784e2..6bffed34 100755 --- a/requirements/dev/dev.txt +++ b/requirements/dev/dev.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.11 # by the following command: # # pip-compile --output-file=requirements/dev/dev.txt requirements/dev/dev.in @@ -13,7 +13,9 @@ ansible==9.5.1 ansible-core==2.16.7 # via ansible appnope==0.1.0 - # via -r requirements/dev/dev.in + # via + # -r requirements/dev/dev.in + # ipython argh==0.26.2 # via sphinx-autobuild asgiref==3.5.2 @@ -151,8 +153,6 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pkgutil-resolve-name==1.3.10 - # via jsonschema port-for==0.3.1 # via sphinx-autobuild prompt-toolkit==3.0.47 @@ -273,9 +273,7 @@ traitlets==4.3.3 troposphere==3.1.1 # via -r requirements/dev/dev.in typing-extensions==4.12.2 - # via - # -c requirements/dev/../test/test.txt - # kubernetes-validate + # via kubernetes-validate urllib3==2.2.1 # via # -c requirements/dev/../base/base.txt diff --git a/requirements/test/test.txt b/requirements/test/test.txt index 34c9ae9b..286b2009 100755 --- a/requirements/test/test.txt +++ b/requirements/test/test.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.11 # by the following command: # # pip-compile --output-file=requirements/test/test.txt requirements/test/test.in @@ -18,8 +18,6 @@ coverage[toml]==7.5.3 # pytest-cov distlib==0.3.6 # via virtualenv -exceptiongroup==1.2.1 - # via pytest factory-boy==2.12.0 # via -r requirements/test/test.in faker==4.1.0 @@ -83,12 +81,5 @@ six==1.15.0 # python-dateutil text-unidecode==1.3 # via faker -tomli==2.0.1 - # via - # black - # coverage - # pytest -typing-extensions==4.12.2 - # via black virtualenv==20.17.1 # via pre-commit