Skip to content

Commit

Permalink
Merge pull request #1390 from psiemens/memcache
Browse files Browse the repository at this point in the history
Re-enable App Engine memcache
  • Loading branch information
psiemens authored Jan 6, 2024
2 parents 4d747ca + 454e227 commit 2c66783
Show file tree
Hide file tree
Showing 5 changed files with 110 additions and 24 deletions.
5 changes: 2 additions & 3 deletions app.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,12 @@ default_expiration: "365d"
automatic_scaling:
max_instances: 6

app_engine_apis: true

handlers:
- url: /robots.txt
static_files: robots.txt
upload: robots.txt
- url: .*
script: auto
secure: always

vpc_access_connector:
name: projects/ubyssey-prd/locations/us-central1/connectors/ubyssey-connector
25 changes: 5 additions & 20 deletions config/settings/production.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,31 +19,16 @@
# Sessions are used to anonymously keep track of individual site visitors
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'

# We use Redis as a cache backend, as recommended by Wagtail here:
# https://docs.wagtail.org/en/v2.10.2/advanced_topics/performance.html#cache
#
# We previously used the Memcache service bundled with Google App Engine,
# but it is now considered a legacy service and does not work seamlessly with Django.
#
# TODO: switch to built-in Redis backend after upgrading Django.
# Ref: https://github.com/ubyssey/ubyssey.ca/issues/1340
#
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://%s:%s" % (REDIS_HOST, REDIS_PORT),
"TIMEOUT": 3600, # 1 hour
'default': {
'BACKEND': 'ubyssey.cache.AppEngineMemcacheCache',
'TIMEOUT': 3600, # 1 hour
},
# The "renditions" cache is for Wagtail image renditions.
# Ref: https://docs.wagtail.org/en/v2.10.2/advanced_topics/performance.html#caching-image-renditions
"renditions": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://%s:%s" % (REDIS_HOST, REDIS_PORT),
"TIMEOUT": 3600, # 1 hour
'BACKEND': 'ubyssey.cache.AppEngineMemcacheCache',
'TIMEOUT': 3600, # 1 hour
}
}
# WAGTAIL_CACHE_IGNORE_COOKIES = False
# WAGTAIL_CACHE_IGNORE_QS = None

ADS_TXT_URL = 'https://ubyssey.storage.googleapis.com/ads.txt'

Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -36,3 +36,4 @@ ipython
django-redis==5.4.0
redis==5.0.1
hiredis==2.2.3
appengine-python-standard>=1.0.0
99 changes: 99 additions & 0 deletions ubyssey/cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
"""
Django Cache Backend for the Google App Engine Memcache API.
"""
import time
import logging

from django.core.cache.backends.base import BaseCache

from google.appengine.api import memcache

class AppEngineMemcacheCache(BaseCache):

def __init__(self, location, params):
super(AppEngineMemcacheCache, self).__init__(params)

def _get_memcache_timeout(self, timeout):
"""
Memcached deals with long (> 30 days) timeouts in a special
way. Call this function to obtain a safe value for your timeout.
"""
timeout = timeout or self.default_timeout
if timeout > 2592000: # 60*60*24*30, 30 days
# See http://code.google.com/p/memcached/wiki/FAQ
# "You can set expire times up to 30 days in the future. After that
# memcached interprets it as a date, and will expire the item after
# said date. This is a simple (but obscure) mechanic."
#
# This means that we have to switch to absolute timestamps.
timeout += int(time.time())
return timeout

def add(self, key, value, timeout=None, version=None):
return memcache.add(key, value, timeout=None, version=None)

def get(self, key, default=None, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)

val = memcache.get(key)
if val is None:
return default
return val

def get_many(self, keys, version=None):
new_keys = map(lambda x: self.make_key(x, version=version), keys)
ret = memcache.get_multi(new_keys)
if ret:
_ = {}
m = dict(zip(new_keys, keys))
for k, v in ret.items():
_[m[k]] = v
ret = _
return ret

def incr(self, key, delta=1, version=None):
key = self.make_key(key, version=version)

val = memcache.incr(key, delta)
if val is None:
raise ValueError("Key '%s' not found" % key)
return val

def decr(self, key, delta=1, version=None):
key = self.make_key(key, version=version)

val = memcache.decr(key, delta)
if val is None:
raise ValueError("Key '%s' not found" % key)
return val

def set(self, key, value, timeout=None, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)

# there might be an exception if the pickled data is bigger than 1mb
try:
memcache.set(key, value, self._get_memcache_timeout(timeout))
except:
logging.warning('Could not save response in memcache - too large')

def delete(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)

memcache.delete(key)

def set_many(self, data, timeout=0, version=None):
safe_data = {}
for key, value in data.items():
key = self.make_key(key, version=version)
safe_data[key] = value
memcache.set_multi(safe_data, self._get_memcache_timeout(timeout))

def delete_many(self, keys, version=None):
l = lambda x: self.make_key(x, version=version)
memcache.delete_multi(map(l, keys))

def clear(self):
return memcache.flush_all()
4 changes: 3 additions & 1 deletion ubyssey/wsgi.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

from django.core.wsgi import get_wsgi_application

from google.appengine.api import wrap_wsgi_app

os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")

application = get_wsgi_application()
application = wrap_wsgi_app(get_wsgi_application())

0 comments on commit 2c66783

Please sign in to comment.