Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update action and improve logging #104

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
83 changes: 45 additions & 38 deletions .github/workflows/build-latest.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ on:
branches:
- master
jobs:
run-scenario-tests:
build-backup-image:
if: |
github.actor != 'dependabot[bot]' &&
!(
Expand All @@ -29,9 +29,6 @@ jobs:
- 3
postgisMinorRelease:
- 5
scenario:
- restore
- s3
steps:
- uses: actions/checkout@v4
- name: Set up QEMU
Expand All @@ -47,16 +44,47 @@ jobs:
push: false
load: true
tags: kartoza/pg-backup:manual-build
outputs: type=docker,dest=/tmp/pg-backup.tar
build-args: |
POSTGRES_MAJOR_VERSION=${{ matrix.postgresMajorVersion }}
POSTGIS_MAJOR_VERSION=${{ matrix.postgisMajorVersion }}
POSTGIS_MINOR_VERSION=${{ matrix.postgisMinorRelease }}
cache-from: |
type=gha,scope=test
type=gha,scope=prod
type=gha,scope=base
type=gha,scope=test
type=gha,scope=prod
type=gha,scope=base
cache-to: type=gha,scope=test
target: postgis-backup-test
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: kartoza-pg-backup
path: /tmp/pg-backup.tar

run-scenario-tests:
if: |
github.actor != 'dependabot[bot]' &&
!(
contains(github.event.pull_request.title, '[skip-release]') ||
contains(github.event.comment.body, '/skiprelease')
)
runs-on: ubuntu-latest
needs: [ build-backup-image ]
strategy:
matrix:
scenario:
- restore
- s3
steps:
- uses: actions/checkout@v4
- name: Download artifact
uses: actions/download-artifact@v4
with:
name: kartoza-pg-backup
path: /tmp
- name: Load image
run: |
docker load --input /tmp/pg-backup.tar

- name: Run scenario test ${{ matrix.scenario }}
working-directory: scenario_tests/${{ matrix.scenario }}
Expand All @@ -76,21 +104,17 @@ jobs:
contains(github.event.comment.body, '/skiprelease')
)
runs-on: ubuntu-latest
needs: [ run-scenario-tests ]
strategy:
matrix:
postgresMajorVersion:
- 17
postgisMajorVersion:
- 3
postgisMinorRelease:
- 5
needs: [ build-backup-image ]
steps:
- uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Download artifact
uses: actions/download-artifact@v4
with:
name: kartoza-pg-backup
path: /tmp
- name: Load image
run: |
docker load --input /tmp/pg-backup.tar
- name: Login to DockerHub
uses: docker/login-action@v3
with:
Expand All @@ -106,21 +130,4 @@ jobs:
type=ref,event=branch
type=ref,event=pr

- name: Build image for testing
id: docker_build_testing_image
uses: docker/build-push-action@v6
with:
context: .
file: Dockerfile
push: true
tags: |
${{ steps.docker_meta.outputs.tags }}-${{ matrix.postgresMajorVersion }}-${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }}
build-args: |
POSTGRES_MAJOR_VERSION=${{ matrix.postgresMajorVersion }}
POSTGIS_MAJOR_VERSION=${{ matrix.postgisMajorVersion }}
POSTGIS_MINOR_VERSION=${{ matrix.postgisMinorRelease }}
cache-from: |
type=gha,scope=test
type=gha,scope=prod
cache-to: type=gha,scope=test
target: postgis-backup-test

19 changes: 11 additions & 8 deletions .github/workflows/deploy-image.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,11 @@ jobs:
- name: Check if image exists on Docker Hub
id: check_hub_image_exists
run: |
docker login --username ${{ secrets.DOCKERHUB_USERNAME }} --password ${{ secrets.DOCKERHUB_PASSWORD }}
echo ${{ secrets.DOCKERHUB_PASSWORD }} > /tmp/credentials.txt
cat /tmp/credentials.txt | docker login --username ${{ secrets.DOCKERHUB_USERNAME }} --password-stdin
TOKEN=$(curl -s -H "Content-Type: application/json" -X POST -d '{"username": "'${{ secrets.DOCKERHUB_USERNAME }}'", "password": "'${{ secrets.DOCKERHUB_PASSWORD }}'"}' https://hub.docker.com/v2/users/login/ | jq -r .token)
check_image=$(curl --silent -f --head -lL https://hub.docker.com/v2/repositories/kartoza/pg-backup/tags/${{ matrix.postgresMajorVersion }}-${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }}/ | head -n 1 | cut -d ' ' -f2) >> $GITHUB_OUTPUT
rm /tmp/credentials.txt

- name: Build prod image
id: docker_build_prod
Expand Down Expand Up @@ -85,11 +87,12 @@ jobs:
needs: [ deploy-image ]
strategy:
matrix:
geoserverMajorVersion:
- 2
geoserverMinorVersion:
- minor: 26
patch: 2
postgresMajorVersion:
- 17
postgisMajorVersion:
- 3
postgisMinorRelease:
- 5
steps:
- name: Checkout code
id: git_checkout
Expand All @@ -99,7 +102,7 @@ jobs:

- name: Get Current Date
id: current_date
run: echo "formatted=$(date -u +%Y.%m.%d)" >> $GITHUB_OUTPUT
run: echo "formatted=$(date -u +%Y-%m-%d)" >> $GITHUB_OUTPUT

- name: Get Latest Commit Hash
id: latest_commit_hash
Expand All @@ -108,6 +111,6 @@ jobs:
- name: publish_release
id: tag_releases
run: |
gh release create v${{ matrix.postgresMajorVersion }}.${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }}--v${{ steps.current_date.outputs.formatted }}--${{ steps.latest_commit_hash.outputs.commit }} --notes ${{ steps.latest_commit_hash.outputs.commit }} --target master --repo $GITHUB_REPOSITORY
gh release create v${{ matrix.postgresMajorVersion }}.${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }}--${{ steps.current_date.outputs.formatted }}--${{ steps.latest_commit_hash.outputs.commit }} --notes ${{ steps.latest_commit_hash.outputs.commit }} --target master --repo $GITHUB_REPOSITORY
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
2 changes: 1 addition & 1 deletion build_data/backups-cron
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Run the backups at 11pm each night
${CRON_SCHEDULE} /backup-scripts/backups.sh > /var/log/cron.out 2>&1
${CRON_SCHEDULE} /backup-scripts/backups.sh > ${CONSOLE_LOGGING_OUTPUT}

# We need a blank line here for it to be a valid cron file
2 changes: 1 addition & 1 deletion build_data/backups-cron-default
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Run the backups at 11pm each night
0 23 * * * /backup-scripts/backups.sh > /var/log/cron.out 2>&1
0 23 * * * /backup-scripts/backups.sh > ${CONSOLE_LOGGING_OUTPUT}

# We need a blank line here for it to be a valid cron file
3 changes: 2 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@ services:
- POSTGRES_PASS=docker
- POSTGRES_PORT=5432
- RUN_AS_ROOT=true
#- CRON_SCHEDULE="*/5 * * * *"
- CRON_SCHEDULE="*/5 * * * *"
- CONSOLE_LOGGING=TRUE
#- DB_DUMP_ENCRYPTION=true
restart: on-failure
depends_on:
Expand Down
14 changes: 7 additions & 7 deletions scripts/backups.sh
Original file line number Diff line number Diff line change
Expand Up @@ -81,17 +81,17 @@ function backup_db() {
else
export FILENAME=${MYBASEDIR}/"${ARCHIVE_FILENAME}.${DB}.dmp"
fi
echo "Backing up $DB" >>/var/log/cron.log
echo "Backing up $DB" >> ${CONSOLE_LOGGING_OUTPUT}
if [ -z "${DB_TABLES:-}" ]; then
if [[ "${DB_DUMP_ENCRYPTION}" =~ [Tt][Rr][Uu][Ee] ]];then
PGPASSWORD=${POSTGRES_PASS} pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d ${DB} | openssl enc -aes-256-cbc -pass pass:${DB_DUMP_ENCRYPTION_PASS_PHRASE} -pbkdf2 -iter 10000 -md sha256 -out ${FILENAME}
else
PGPASSWORD=${POSTGRES_PASS} pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d ${DB} > ${FILENAME}
fi
echo "Backing up $FILENAME done" >>/var/log/cron.log
echo "Backing up $FILENAME done" >> ${CONSOLE_LOGGING_OUTPUT}
if [[ ${STORAGE_BACKEND} == "S3" ]]; then
gzip $FILENAME
echo "Backing up $FILENAME to s3://${BUCKET}/" >>/var/log/cron.log
echo "Backing up $FILENAME to s3://${BUCKET}/" >> ${CONSOLE_LOGGING_OUTPUT}
${EXTRA_PARAMS}
rm ${MYBACKUPDIR}/*.dmp.gz
fi
Expand All @@ -118,7 +118,7 @@ if [[ ${STORAGE_BACKEND} == "S3" ]]; then

# Backup globals Always get the latest
PGPASSWORD=${POSTGRES_PASS} pg_dumpall ${PG_CONN_PARAMETERS} --globals-only | s3cmd put - s3://${BUCKET}/globals.sql
echo "Sync globals.sql to ${BUCKET} bucket " >>/var/log/cron.log
echo "Sync globals.sql to ${BUCKET} bucket " >> ${CONSOLE_LOGGING_OUTPUT}
backup_db "s3cmd sync -r ${MYBASEDIR}/* s3://${BUCKET}/"

elif [[ ${STORAGE_BACKEND} =~ [Ff][Ii][Ll][Ee] ]]; then
Expand All @@ -129,14 +129,14 @@ elif [[ ${STORAGE_BACKEND} =~ [Ff][Ii][Ll][Ee] ]]; then

fi

echo "Backup running to $MYBACKUPDIR" >>/var/log/cron.log
echo "Backup running to $MYBACKUPDIR" >> ${CONSOLE_LOGGING_OUTPUT}


if [ "${REMOVE_BEFORE:-}" ]; then
TIME_MINUTES=$((REMOVE_BEFORE * 24 * 60))
if [[ ${STORAGE_BACKEND} == "FILE" ]]; then
echo "Removing following backups older than ${REMOVE_BEFORE} days" >>/var/log/cron.log
find ${MYBASEDIR}/* -type f -mmin +${TIME_MINUTES} -delete &>>/var/log/cron.log
echo "Removing following backups older than ${REMOVE_BEFORE} days" >> ${CONSOLE_LOGGING_OUTPUT}
find ${MYBASEDIR}/* -type f -mmin +${TIME_MINUTES} -delete &>> ${CONSOLE_LOGGING_OUTPUT}
elif [[ ${STORAGE_BACKEND} == "S3" ]]; then
# Credits https://shout.setfive.com/2011/12/05/deleting-files-older-than-specified-time-with-s3cmd-and-bash/
clean_s3bucket "${BUCKET}" "${REMOVE_BEFORE} days"
Expand Down
29 changes: 21 additions & 8 deletions scripts/start.sh
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,10 @@ if [ -z "${DB_DUMP_ENCRYPTION}" ]; then
DB_DUMP_ENCRYPTION=FALSE
fi

if [ -z "${CONSOLE_LOGGING}" ]; then
CONSOLE_LOGGING=FALSE
fi

file_env 'DB_DUMP_ENCRYPTION_PASS_PHRASE'
if [ -z "${DB_DUMP_ENCRYPTION_PASS_PHRASE}" ]; then
STRING_LENGTH=30
Expand All @@ -128,20 +132,16 @@ fi


function cron_config() {
if [[ ! -f /backup-scripts/backups-cron ]]; then
# If it doesn't exists, copy from ${EXTRA_CONF_DIR} directory if exists
if [[ -f ${EXTRA_CONFIG_DIR}/backups-cron ]]; then
cp -f ${EXTRA_CONFIG_DIR}/backups-cron /backup-scripts
else
if [[ -f ${EXTRA_CONFIG_DIR}/backups-cron ]]; then
envsubst < ${EXTRA_CONFIG_DIR}/backups-cron > /backup-scripts/backups-cron
else
# default value
if [ -z "${CRON_SCHEDULE}" ]; then
cp /build_data/backups-cron-default /backup-scripts/backups-cron
envsubst < /build_data/backups-cron-default > /backup-scripts/backups-cron
else
envsubst < /build_data/backups-cron > /backup-scripts/backups-cron
fi
fi
fi

}

function directory_checker() {
Expand All @@ -167,8 +167,14 @@ function non_root_permission() {

mkdir -p ${DEFAULT_EXTRA_CONF_DIR}
# Copy settings for cron file
if [[ ${CONSOLE_LOGGING} =~ [Tt][Rr][Uu][Ee] ]];then
export CONSOLE_LOGGING_OUTPUT='/proc/1/fd/1 2>&1'
else
export CONSOLE_LOGGING_OUTPUT='/var/log/cron.out 2>&1'
fi

cron_config

function configure_env_variables() {
echo "
export PATH=\"${PATH}\"
Expand Down Expand Up @@ -196,11 +202,18 @@ DB_DUMP_ENCRYPTION="${DB_DUMP_ENCRYPTION}"
export PG_CONN_PARAMETERS=\"${PG_CONN_PARAMETERS}\"
export DBLIST=\"${DBLIST}\"
" > /backup-scripts/pgenv.sh

echo "Start script running with these environment options"
set | grep PG

}
configure_env_variables
if [[ ${CONSOLE_LOGGING} =~ [Tt][Rr][Uu][Ee] ]];then
sed -i 's#${CONSOLE_LOGGING_OUTPUT}#/proc/1/fd/1 2>\&1#g' /backup-scripts/backups.sh
else
sed -i 's#${CONSOLE_LOGGING_OUTPUT}#/var/log/cron.out 2>\&1#g' /backup-scripts/backups.sh
fi

# Fix variables not interpolated
sed -i "s/'//g" /backup-scripts/backups-cron
sed -i 's/\"//g' /backup-scripts/backups-cron
Expand Down