From 454e227303cbb1df5065457a11cd08bd15b1643a Mon Sep 17 00:00:00 2001 From: Peter Siemens Date: Sat, 6 Jan 2024 00:52:52 -0800 Subject: [PATCH] Re-enable App Engine memcache --- app.yaml | 5 +- config/settings/production.py | 25 ++------- requirements.txt | 1 + ubyssey/cache.py | 99 +++++++++++++++++++++++++++++++++++ ubyssey/wsgi.py | 4 +- 5 files changed, 110 insertions(+), 24 deletions(-) create mode 100644 ubyssey/cache.py diff --git a/app.yaml b/app.yaml index 9deaf1c59..e69259d7b 100644 --- a/app.yaml +++ b/app.yaml @@ -7,6 +7,8 @@ default_expiration: "365d" automatic_scaling: max_instances: 6 +app_engine_apis: true + handlers: - url: /robots.txt static_files: robots.txt @@ -14,6 +16,3 @@ handlers: - url: .* script: auto secure: always - -vpc_access_connector: - name: projects/ubyssey-prd/locations/us-central1/connectors/ubyssey-connector diff --git a/config/settings/production.py b/config/settings/production.py index c7284773c..910f5cf69 100644 --- a/config/settings/production.py +++ b/config/settings/production.py @@ -19,31 +19,16 @@ # Sessions are used to anonymously keep track of individual site visitors SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db' -# We use Redis as a cache backend, as recommended by Wagtail here: -# https://docs.wagtail.org/en/v2.10.2/advanced_topics/performance.html#cache -# -# We previously used the Memcache service bundled with Google App Engine, -# but it is now considered a legacy service and does not work seamlessly with Django. -# -# TODO: switch to built-in Redis backend after upgrading Django. -# Ref: https://github.com/ubyssey/ubyssey.ca/issues/1340 -# CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": "redis://%s:%s" % (REDIS_HOST, REDIS_PORT), - "TIMEOUT": 3600, # 1 hour + 'default': { + 'BACKEND': 'ubyssey.cache.AppEngineMemcacheCache', + 'TIMEOUT': 3600, # 1 hour }, - # The "renditions" cache is for Wagtail image renditions. - # Ref: https://docs.wagtail.org/en/v2.10.2/advanced_topics/performance.html#caching-image-renditions "renditions": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": "redis://%s:%s" % (REDIS_HOST, REDIS_PORT), - "TIMEOUT": 3600, # 1 hour + 'BACKEND': 'ubyssey.cache.AppEngineMemcacheCache', + 'TIMEOUT': 3600, # 1 hour } } -# WAGTAIL_CACHE_IGNORE_COOKIES = False -# WAGTAIL_CACHE_IGNORE_QS = None ADS_TXT_URL = 'https://ubyssey.storage.googleapis.com/ads.txt' diff --git a/requirements.txt b/requirements.txt index aa2428089..bc4fb184e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -36,3 +36,4 @@ ipython django-redis==5.4.0 redis==5.0.1 hiredis==2.2.3 +appengine-python-standard>=1.0.0 diff --git a/ubyssey/cache.py b/ubyssey/cache.py new file mode 100644 index 000000000..d407b6756 --- /dev/null +++ b/ubyssey/cache.py @@ -0,0 +1,99 @@ +""" +Django Cache Backend for the Google App Engine Memcache API. +""" +import time +import logging + +from django.core.cache.backends.base import BaseCache + +from google.appengine.api import memcache + +class AppEngineMemcacheCache(BaseCache): + + def __init__(self, location, params): + super(AppEngineMemcacheCache, self).__init__(params) + + def _get_memcache_timeout(self, timeout): + """ + Memcached deals with long (> 30 days) timeouts in a special + way. Call this function to obtain a safe value for your timeout. + """ + timeout = timeout or self.default_timeout + if timeout > 2592000: # 60*60*24*30, 30 days + # See http://code.google.com/p/memcached/wiki/FAQ + # "You can set expire times up to 30 days in the future. After that + # memcached interprets it as a date, and will expire the item after + # said date. This is a simple (but obscure) mechanic." + # + # This means that we have to switch to absolute timestamps. + timeout += int(time.time()) + return timeout + + def add(self, key, value, timeout=None, version=None): + return memcache.add(key, value, timeout=None, version=None) + + def get(self, key, default=None, version=None): + key = self.make_key(key, version=version) + self.validate_key(key) + + val = memcache.get(key) + if val is None: + return default + return val + + def get_many(self, keys, version=None): + new_keys = map(lambda x: self.make_key(x, version=version), keys) + ret = memcache.get_multi(new_keys) + if ret: + _ = {} + m = dict(zip(new_keys, keys)) + for k, v in ret.items(): + _[m[k]] = v + ret = _ + return ret + + def incr(self, key, delta=1, version=None): + key = self.make_key(key, version=version) + + val = memcache.incr(key, delta) + if val is None: + raise ValueError("Key '%s' not found" % key) + return val + + def decr(self, key, delta=1, version=None): + key = self.make_key(key, version=version) + + val = memcache.decr(key, delta) + if val is None: + raise ValueError("Key '%s' not found" % key) + return val + + def set(self, key, value, timeout=None, version=None): + key = self.make_key(key, version=version) + self.validate_key(key) + + # there might be an exception if the pickled data is bigger than 1mb + try: + memcache.set(key, value, self._get_memcache_timeout(timeout)) + except: + logging.warning('Could not save response in memcache - too large') + + def delete(self, key, version=None): + key = self.make_key(key, version=version) + self.validate_key(key) + + memcache.delete(key) + + def set_many(self, data, timeout=0, version=None): + safe_data = {} + for key, value in data.items(): + key = self.make_key(key, version=version) + safe_data[key] = value + memcache.set_multi(safe_data, self._get_memcache_timeout(timeout)) + + def delete_many(self, keys, version=None): + l = lambda x: self.make_key(x, version=version) + memcache.delete_multi(map(l, keys)) + + def clear(self): + return memcache.flush_all() diff --git a/ubyssey/wsgi.py b/ubyssey/wsgi.py index ee192cfc0..97150dc3b 100644 --- a/ubyssey/wsgi.py +++ b/ubyssey/wsgi.py @@ -2,6 +2,8 @@ from django.core.wsgi import get_wsgi_application +from google.appengine.api import wrap_wsgi_app + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production") -application = get_wsgi_application() +application = wrap_wsgi_app(get_wsgi_application())