Skip to content

Commit 9fbec68

Browse files
authored
Merge pull request #245 from DerekTBrown/feat-default-creds
feat: consolidate s3 logic, use default providers chain
2 parents 3732370 + 7fe4957 commit 9fbec68

File tree

3 files changed

+45
-34
lines changed

3 files changed

+45
-34
lines changed

grafana_backup/s3_common.py

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
import boto3
2+
from botocore.exceptions import NoCredentialsError, ClientError
3+
4+
def get_boto_session(settings) -> boto3.Session:
5+
aws_default_region = settings.get("AWS_DEFAULT_REGION")
6+
aws_access_key_id = settings.get("AWS_ACCESS_KEY_ID")
7+
aws_secret_access_key = settings.get("AWS_SECRET_ACCESS_KEY")
8+
9+
# If no credentials are provided, boto3 will use the default credentials provider chain.
10+
if aws_access_key_id is None or aws_secret_access_key is None:
11+
return boto3.Session(
12+
region_name=aws_default_region,
13+
)
14+
15+
# Otherwise, use the provided credentials.
16+
return boto3.Session(
17+
aws_access_key_id=aws_access_key_id,
18+
aws_secret_access_key=aws_secret_access_key,
19+
region_name=aws_default_region,
20+
)
21+
22+
def get_s3_resource(settings):
23+
session = get_boto_session(settings)
24+
aws_endpoint_url = settings.get("AWS_ENDPOINT_URL")
25+
s3 = session.resource(
26+
service_name="s3",
27+
endpoint_url=aws_endpoint_url,
28+
)
29+
return s3
30+
31+
32+
def get_s3_object(settings, s3_file_name):
33+
aws_s3_bucket_name = settings.get('AWS_S3_BUCKET_NAME')
34+
aws_s3_bucket_key = settings.get('AWS_S3_BUCKET_KEY')
35+
36+
s3 = get_s3_resource(settings)
37+
s3_object = s3.Object(aws_s3_bucket_name, '{0}/{1}'.format(aws_s3_bucket_key, s3_file_name))
38+
39+
return s3_object

grafana_backup/s3_download.py

Lines changed: 4 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -2,24 +2,13 @@
22
from botocore.exceptions import NoCredentialsError, ClientError
33
from io import BytesIO
44

5+
from grafana_backup.s3_common import get_s3_object
56

67
def main(args, settings):
78
arg_archive_file = args.get("<archive_file>", None)
8-
aws_s3_bucket_name = settings.get("AWS_S3_BUCKET_NAME")
9-
aws_s3_bucket_key = settings.get("AWS_S3_BUCKET_KEY")
10-
aws_default_region = settings.get("AWS_DEFAULT_REGION")
11-
aws_access_key_id = settings.get("AWS_ACCESS_KEY_ID")
12-
aws_secret_access_key = settings.get("AWS_SECRET_ACCESS_KEY")
13-
aws_endpoint_url = settings.get("AWS_ENDPOINT_URL")
149

15-
session = boto3.Session(
16-
aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=aws_default_region
17-
)
18-
19-
s3 = session.resource(service_name="s3", endpoint_url=aws_endpoint_url)
20-
21-
s3_path = "{0}/{1}".format(aws_s3_bucket_key, arg_archive_file)
22-
s3_object = s3.Object(aws_s3_bucket_name, s3_path)
10+
aws_s3_bucket_name = settings.get('AWS_S3_BUCKET_NAME')
11+
s3_object = get_s3_object(settings, s3_file_name=arg_archive_file)
2312

2413
try:
2514
# .read() left off on purpose, tarfile.open() takes care of that part
@@ -28,7 +17,7 @@ def main(args, settings):
2817
print("Download from S3 was successful")
2918
except ClientError as e:
3019
if e.response["Error"]["Code"] == "NoSuchKey":
31-
print("Error: Key {0} does not exist in bucket {1}".format(s3_path, aws_s3_bucket_name))
20+
print("Error: Key {0} does not exist in bucket {1}".format(s3_object.key, aws_s3_bucket_name))
3221
return False
3322
raise e
3423
except NoCredentialsError:

grafana_backup/s3_upload.py

Lines changed: 2 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,16 @@
11
import boto3
22
from botocore.exceptions import NoCredentialsError
33

4+
from grafana_backup.s3_common import get_s3_object
45

56
def main(args, settings):
6-
aws_s3_bucket_name = settings.get('AWS_S3_BUCKET_NAME')
7-
aws_s3_bucket_key = settings.get('AWS_S3_BUCKET_KEY')
8-
aws_default_region = settings.get('AWS_DEFAULT_REGION')
9-
aws_access_key_id = settings.get('AWS_ACCESS_KEY_ID')
10-
aws_secret_access_key = settings.get('AWS_SECRET_ACCESS_KEY')
11-
aws_endpoint_url = settings.get('AWS_ENDPOINT_URL')
12-
137
backup_dir = settings.get('BACKUP_DIR')
148
timestamp = settings.get('TIMESTAMP')
159

1610
s3_file_name = '{0}.tar.gz'.format(timestamp)
1711
archive_file = '{0}/{1}'.format(backup_dir, s3_file_name)
1812

19-
session = boto3.Session(
20-
aws_access_key_id=aws_access_key_id,
21-
aws_secret_access_key=aws_secret_access_key,
22-
region_name=aws_default_region
23-
)
24-
25-
s3 = session.resource(
26-
service_name='s3',
27-
endpoint_url=aws_endpoint_url
28-
)
29-
30-
s3_object = s3.Object(aws_s3_bucket_name, '{0}/{1}'.format(aws_s3_bucket_key, s3_file_name))
13+
s3_object = get_s3_object(settings, s3_file_name=s3_file_name)
3114

3215
try:
3316
s3_object.put(Body=open(archive_file, 'rb'))

0 commit comments

Comments
 (0)