Skip to content

Commit

Permalink
test deploy on openshift
Browse files Browse the repository at this point in the history
  • Loading branch information
hannah-macdonald1 committed Feb 1, 2024
1 parent 6cde014 commit f120391
Show file tree
Hide file tree
Showing 5 changed files with 93 additions and 30 deletions.
17 changes: 16 additions & 1 deletion .github/workflows/.merge.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,12 @@ on:
pull_request:
branches: [dev]
types: [closed]
# temp change to force a deployment
push:
branches:
- "*"
commit:

workflow_dispatch:

concurrency:
Expand Down Expand Up @@ -59,12 +65,16 @@ jobs:
packages: write
strategy:
matrix:
package: [vhers-virus-scan]
package: [vhers-virus-scan, db-cleanup]
include:
- package: vhers-virus-scan
build_context: ./
triggers: ('vhers-virus-scan/')
build_file: Dockerfile
- package: db-cleanup
build_context: ./containers/db_cleanup
triggers: ('vhers-virus-scan/containers/db_cleanup')
build_file: Dockerfile
timeout-minutes: 10
steps:
- uses: bcgov-nr/action-builder-ghcr@v2.0.0
Expand All @@ -90,6 +100,11 @@ jobs:
- name: vhers-virus-scan
file: ./openshift.deploy.yml
overwrite: true
name: [db-cleanup]
include:
- name: db-cleanup
file: ./containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml
overwrite: true

steps:
- uses: bcgov-nr/action-deployer-openshift@v2.0.0
Expand Down
50 changes: 32 additions & 18 deletions containers/db_cleanup/cleanup.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,36 +28,50 @@ const fileTimeString = `${now.toDate()}-${now.hour().toPrecision(2)}:${now.minut
const retainUntilString = retainUntil.format('YYYY-MM-DD HH:mm:ss.SSS ZZ');

// COPY logs that are about to be deleted to csv
// \\g /deleted/vhers-audit-log.csv
// TODO: Automate storing them elsewhere?
// try {
// pg.raw(`\\COPY public.vhers_audit_log TO '/deleted/vhers-audit-log.csv' WITH (FORMAT CSV, HEADER);`).then();
// } catch (err) {
// console.log(err);
// exit(1); // cannot continue without saving backup
// }
// pg.raw(`COPY public.vhers_audit_log TO STDOUT WITH (FORMAT CSV, HEADER)`).then(
// (ret)=>{console.log(ret); process.exit(0);},
// (err) => {console.log(err); process.exit(1);}
// );

// pool.connect(function (err, client, done) {
// var stream = client.query(to(`COPY public.vhers_audit_log TO STDOUT`))
// // var fileStream = fs.createReadStream('/deleted/vhers-audit-log.csv')
// // fileStream.on('error', done)
// stream.on('error', done)
// stream.on('finish', done)
// // fileStream.pipe(stream)
// });

// const fs = require('node:fs');
// const fs = require('fs');
// const csv = require('csv');
// const path = require('path');
// const EOL = require('os').EOL;
// const { Pool } = require('pg');
// const { to } = require('pg-copy-streams');

// var pool = new Pool({
// const pool = new Pool({
// host: process.env.DB_HOST,
// port: process.env.DB_PORT,
// user: process.env.DB_USERNAME,
// database: process.env.DB_NAME,
// password: process.env.DB_PASSWORD,
// });

// const exec = require('child_process').exec;
// const outFile = path.join( __dirname, 'vhers_audit_log.csv');
// const writeStream = fs.createWriteStream(outFile);

// const parse = csv.parse();

// const transform = csv.transform((row, cb) => {
// row.push('NEW_COL');
// result = row.join(',') + EOL;
// cb(null, result);
// });

// pool.connect(function (err, client, done) {
// const stream = client.query(to(`COPY public.vhers_audit_log TO STDOUT WITH (FORMAT CSV, HEADER)`))
// // var fileStream = fs.createReadStream('/deleted/vhers-audit-log.csv')
// // // fileStream.on('error', done)
// // stream.on('error', done)
// // stream.on('finish', done)
// // // fileStream.pipe(stream)
// stream.pipe(parse).pipe(transform).pipe(writeStream);
// stream.on('end', done)
// stream.on('error', done)
// });

// Delete the logs
pg('vhers_audit_log').where('created_at', '<', retainUntilString).delete().then(
Expand Down
20 changes: 9 additions & 11 deletions containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,10 @@ objects:
metadata:
name: db-cleanup-cronjob
spec:
schedule: "30 2 * * SUN"
startingDeadlineSeconds: 3600
# schedule: "30 2 * * SUN"
# Different schedule for test purposes
schedule: "*/10 * * * *"
startingDeadlineSeconds: 200
concurrencyPolicy: Forbid
successfulJobsHistoryLimit: 3
failedJobsHistoryLimit: 3
Expand All @@ -90,27 +92,27 @@ objects:
valueFrom:
secretKeyRef:
name: patroni-ha-postgres-instance
key: app-db-name
key: app-db-name-dev
- name: DB_HOST
valueFrom:
secretKeyRef:
name: patroni-ha-postgres-instance
key: app-db-hostname
key: db-hostname-dev
- name: DB_PORT
valueFrom:
secretKeyRef:
name: patroni-ha-postgres-instance
key: app-db-port
key: db-port-dev
- name: DB_USER
valueFrom:
secretKeyRef:
name: patroni-ha-postgres-instance
key: superuser-username
key: superuser-username-dev
- name: DB_PASSWORD
valueFrom:
secretKeyRef:
name: patroni-ha-postgres-instance
key: superuser-password
key: superuser-password-dev
- name: RENTENTION_MONTHS
valueFrom:
secretKeyRef:
Expand All @@ -126,8 +128,4 @@ objects:
requests:
memory: "${MEMORY_REQUEST}"
cpu: "${CPU_REQUEST}"
volumes:
- name: output
persistentVolumeClaim:
claimName: db-deletion-dump
restartPolicy: OnFailure
35 changes: 35 additions & 0 deletions containers/db_cleanup/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions containers/db_cleanup/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
"author": "",
"license": "ISC",
"dependencies": {
"csv": "^6.3.6",
"dayjs": "^1.11.10",
"dotenv": "^16.4.1",
"knex": "^3.1.0",
Expand Down

0 comments on commit f120391

Please sign in to comment.