Skip to content

Commit

Permalink
Add an s3 cache backend to the content_harvester
Browse files Browse the repository at this point in the history
  • Loading branch information
amywieliczka committed Sep 25, 2024
1 parent 982b93f commit 1745d17
Show file tree
Hide file tree
Showing 2 changed files with 49 additions and 1 deletion.
6 changes: 5 additions & 1 deletion content_harvester/by_record.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,14 @@

from . import settings
from . import derivatives
from .s3_cache import S3Cache

from rikolti.utils.storage import upload_file

persistent_cache = dict()

persistent_cache = S3Cache(
os.environ.get('CONTENT_CACHE_BUCKET', 'rikolti-content-component-cache')
)
in_memory_cache = dict()


Expand Down
44 changes: 44 additions & 0 deletions content_harvester/s3_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import boto3

class S3Cache(object):
"""
Create a dictionary-like interface to an s3 bucket
"""
def __init__(self, bucket_name, **kwargs):
self.bucket_name = bucket_name
self.s3 = boto3.client('s3', **kwargs)

def __getitem__(self, key):
try:
response = self.s3.get_object(Bucket=self.bucket_name, Key=key)
return response['Body'].read()
except Exception as e:
print(f"{self} error getting {key}")
raise e

def get(self, key, default=None):
try:
response = self.s3.get_object(Bucket=self.bucket_name, Key=key)
return response['Body'].read()
except Exception:
return default

def __setitem__(self, key, data):
try:
self.s3.put_object(Bucket=self.bucket_name, Key=key, Body=data)
except Exception as e:
print(f"{self} error setting {key}: {e}")
return None

def __contains__(self, key):
try:
self.s3.head_object(Bucket=self.bucket_name, Key=key)
return True
except Exception:
return False

def __delitem__(self, key):
self.s3.delete_object(Bucket=self.bucket_name, Key=key)

def __repr__(self):
return f"S3Cache(bucket_name={self.bucket_name})"

0 comments on commit 1745d17

Please sign in to comment.