From c3b2263c442c3692196a6e2c786917d71c5cb318 Mon Sep 17 00:00:00 2001 From: Vivian Ta Date: Thu, 24 Aug 2017 11:18:47 -0700 Subject: [PATCH] Parameterizing more values so it's more versatile. --- README.md | 50 +++++++++++++++++++++++++++----------------------- files/main.sh | 45 +++++++++++++++++++++++++++------------------ 2 files changed, 54 insertions(+), 41 deletions(-) diff --git a/README.md b/README.md index a9941f4..142d27e 100755 --- a/README.md +++ b/README.md @@ -28,52 +28,56 @@ You can find an automated build of this container on the Docker Hub: https://hub ## Optional inputs If access for your instance, task, etc. is configured through an IAM role you may omit the following inputs: - AWS_KEY= - AWS_SECRET= + AWS_ACCESS_KEY= + AWS_SECRET_KEY= ## Copy from local to S3: - AWS_KEY= - AWS_SECRET= - BUCKET=s3://garland.public.bucket/database2/ + AWS_ACCESS_KEY= + AWS_SECRET_KEY= + S3_BUCKET=garland-bucket + SRC_S3=s3folder LOCAL_FILE=/tmp/database docker run \ - --env aws_key=${AWS_KEY} \ - --env aws_secret=${AWS_SECRET} \ - --env cmd=sync-local-to-s3 \ - --env DEST_S3=${BUCKET} \ + --env AWS_ACCESS_KEY=${AWS_ACCESS_KEY} \ + --env AWS_SECRET_KEY=${AWS_SECRET_KEY} \ + --env S3_CMD=sync-local-to-s3 \ + --env S3_BUCKET=${S3_BUCKET} \ + --env SRC_S3=${SRC_S3} \ -v ${LOCAL_FILE}:/opt/src \ garland/docker-s3cmd -* Change `LOCAL_FILE` to file/folder you want to upload to S3 +* Change `LOCAL_FILE` to file/folder you want to upload to S3. ## Copy from S3 to local: - AWS_KEY= - AWS_SECRET= - BUCKET=s3://garland.public.bucket/database + AWS_ACCESS_KEY= + AWS_SECRET_KEY= + S3_BUCKET=garland-bucket + SRC_S3=s3folder LOCAL_FILE=/tmp docker run \ - --env aws_key=${AWS_KEY} \ - --env aws_secret=${AWS_SECRET} \ - --env cmd=sync-s3-to-local \ - --env SRC_S3=${BUCKET} \ + --env AWS_ACCESS_KEY=${AWS_ACCESS_KEY} \ + --env AWS_SECRET_KEY=${AWS_SECRET_KEY} \ + --env S3_CMD=sync-s3-to-local \ + --env S3_BUCKET=${S3_BUCKET} \ + --env SRC_S3=${SRC_S3} \ -v ${LOCAL_FILE}:/opt/dest \ garland/docker-s3cmd -* Change `LOCAL_FILE` to the file/folder where you want to download the files from S3 to the local computer +* Change `LOCAL_FILE` to the file/folder where you want to download the files from S3 to the local computer. When using this option, it automatically grabs the latest back up from S3 in that bucket. ## Run interactively with s3cmd - AWS_KEY= - AWS_SECRET= + AWS_ACCESS_KEY= + AWS_SECRET_KEY= docker run -it \ - --env aws_key=${AWS_KEY} \ - --env aws_secret=${AWS_SECRET} \ - --env cmd=interactive \ + --env AWS_ACCESS_KEY=${AWS_ACCESS_KEY} \ + --env AWS_SECRET_KEY=${AWS_SECRET_KEY} \ + --env S3_CMD=interactive \ -v /:/opt/dest \ garland/docker-s3cmd /bin/sh diff --git a/files/main.sh b/files/main.sh index dcfdaed..1b1e15f 100755 --- a/files/main.sh +++ b/files/main.sh @@ -8,18 +8,18 @@ S3CMD_PATH=/opt/s3cmd/s3cmd # # Check for required parameters # -if [ -z "${aws_key}" ]; then +if [ -z "${AWS_ACCESS_KEY}" ]; then echo "The environment variable key is not set. Attempting to create empty creds file to use role." - aws_key="" + AWS_ACCESS_KEY="" fi -if [ -z "${aws_secret}" ]; then +if [ -z "${AWS_SECRET_KEY}" ]; then echo "The environment variable secret is not set." - aws_secret="" - security_token="" + AWS_SECRET_KEY="" + AWS_SECURITY_TOKEN="" fi -if [ -z "${cmd}" ]; then +if [ -z "${S3_CMD}" ]; then echo "ERROR: The environment variable cmd is not set." exit 1 fi @@ -28,11 +28,11 @@ fi # Replace key and secret in the /.s3cfg file with the one the user provided # echo "" >> /.s3cfg -echo "access_key = ${aws_key}" >> /.s3cfg -echo "secret_key = ${aws_secret}" >> /.s3cfg +echo "AWS_ACCESS_KEY= ${AWS_ACCESS_KEY}" >> /.s3cfg +echo "AWS_SECRET_KEY = ${AWS_SECRET_KEY}" >> /.s3cfg -if [ -z "${security_token}" ]; then - echo "security_token = ${aws_security_token}" >> /.s3cfg +if [ -z "${AWS_SECURITY_TOKEN}" ]; then + echo "security_token = ${AWS_SECURITY_TOKEN}" >> /.s3cfg fi # @@ -43,22 +43,31 @@ if [ "${s3_host_base}" != "" ]; then echo "host_base = ${s3_host_base}" >> /.s3cfg fi -# Chevk if we want to run in interactive mode or not -if [ "${cmd}" != "interactive" ]; then - +# Check if we want to run in interactive mode or not +if [ "${S3_CMD}" != "interactive" ]; then # # sync-s3-to-local - copy from s3 to local # - if [ "${cmd}" = "sync-s3-to-local" ]; then - echo ${src-s3} - ${S3CMD_PATH} --config=/.s3cfg sync ${SRC_S3} /opt/dest/ + if [ "${S3_CMD}" = "sync-s3-to-local" ]; then + # Grabbing latest S3 file in provided path + S3_FILE=`${S3CMD_PATH} --config=/.s3cfg ls s3://${S3_BUCKET}/${SRC_S3}/ | sort | tail -n 1 | awk -F"l/" '{print $NF}'` + + echo ${src-s3} + if [ -n "$(ls -A /opt/dest)" ] + then + echo "Backup is already present in mounted directory" + exit 0 + else + echo "Downloading from S3..." + ${S3CMD_PATH} --config=/.s3cfg sync s3://${S3_BUCKET}/${SRC_S3}/${S3_FILE} /opt/dest/ + fi fi # # sync-local-to-s3 - copy from local to s3 # - if [ "${cmd}" = "sync-local-to-s3" ]; then - ${S3CMD_PATH} --config=/.s3cfg sync /opt/src/ ${DEST_S3} + if [ "${S3_CMD}" = "sync-local-to-s3" ]; then + ${S3CMD_PATH} --config=/.s3cfg sync /opt/src/ s3://${S3_BUCKET}/${SRC_S3}/ fi else # Copy file over to the default location where S3cmd is looking for the config file