From fe2fb3024400d713420dde1c9ec4116c851ab994 Mon Sep 17 00:00:00 2001 From: Matthew Bystedt Date: Tue, 23 Apr 2024 09:38:41 -0700 Subject: [PATCH] fix: compress skip flipped logic --- README.md | 2 +- src/constants.ts | 5 ++- src/cron/backup.ts | 9 +++-- src/index.broker.ts | 0 src/index.ts | 9 +++-- src/services/job.service.ts | 71 ++++++++++++++++++------------------- 6 files changed, 48 insertions(+), 48 deletions(-) delete mode 100644 src/index.broker.ts diff --git a/README.md b/README.md index 1c1137d..8173efc 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ Rotated files are appended with the file's change date and the current UTC times The environment variable `CRON_COMPRESS` is used to schedule the compression of the rotated files. The each file is compressed into a 'tgz' archive. -This stage can run frequently with little cost. +This stage can run frequently with little cost. If you wish to skip this stage, set the environment variable `COMPRESS_SKIP` to be true. ### Stage 3 - Backup log file diff --git a/src/constants.ts b/src/constants.ts index a95b6b2..f331af5 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -3,9 +3,8 @@ export const CRON_COMPRESS = process.env.CRON_COMPRESS ?? '*/10 * * * *'; export const CRON_BACKUP = process.env.CRON_BACKUP ?? '*/20 * * * *'; export const CRON_JANITOR = process.env.CRON_JANITOR ?? '*/10 * * * *'; -export const RUN_ONCE = process.env.RUN_ONCE ?? 'false'; - -export const COMPRESS_ENABLED = process.env.COMPRESS_ENABLED ?? 'true'; +export const RUN_ONCE = process.env.RUN_ONCE === 'true'; +export const COMPRESS_SKIP = process.env.COMPRESS_SKIP === 'true'; export const LOGROTATE_DIRECTORY = process.env.LOGROTATE_DIRECTORY ?? 'logs'; export const LOGROTATE_STATUSFILE = diff --git a/src/cron/backup.ts b/src/cron/backup.ts index 99a94b4..be7ab42 100644 --- a/src/cron/backup.ts +++ b/src/cron/backup.ts @@ -12,7 +12,7 @@ import { BROKER_SERVICE, BROKER_USER, DB_FILE_STATUS, - COMPRESS_ENABLED, + COMPRESS_SKIP, OBJECT_STORAGE_ACCESS_KEY, OBJECT_STORAGE_BUCKET, OBJECT_STORAGE_ENABLED, @@ -52,10 +52,9 @@ const objectstorageMetadata: ItemBucketMetadata = : undefined; export async function backup(db: DatabaseService) { - const dbFileStatus = - COMPRESS_ENABLED === 'true' - ? DB_FILE_STATUS.Compressed - : DB_FILE_STATUS.Rotated; + const dbFileStatus = COMPRESS_SKIP + ? DB_FILE_STATUS.Rotated + : DB_FILE_STATUS.Compressed; console.log('backup: start'); const result = await db.all<{ id: number; diff --git a/src/index.broker.ts b/src/index.broker.ts deleted file mode 100644 index e69de29..0000000 diff --git a/src/index.ts b/src/index.ts index a8c0f58..39865f0 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,3 +1,4 @@ +import { RUN_ONCE } from './constants'; import { DatabaseService } from './services/database.service'; import { JobService } from './services/job.service'; @@ -13,10 +14,12 @@ async function main() { 'when launching node to enable forced garbage collection.', ); } - const runOnce = process.env.RUN_ONCE === 'true'; - const compressEnabled = process.env.COMPRESS_ENABLED === 'true'; - jobService.run(runOnce, compressEnabled); + if (RUN_ONCE) { + await jobService.runOnce(); + } else { + await jobService.runCron(); + } } main(); diff --git a/src/services/job.service.ts b/src/services/job.service.ts index 196bf9c..e6296f5 100644 --- a/src/services/job.service.ts +++ b/src/services/job.service.ts @@ -1,5 +1,6 @@ import { Cron } from 'croner'; import { + COMPRESS_SKIP, CRON_BACKUP, CRON_COMPRESS, CRON_JANITOR, @@ -24,36 +25,11 @@ export class JobService { this.db = db; } - rotate = async () => { - await rotateLogs(this.db); - runGarbageCollection(); - }; - - compress = async () => { - console.log('start compress'); - await syncLogsDb(this.db); - await compress(this.db); - runGarbageCollection(); - }; - - backup = async () => { - console.log('start backup'); - await syncLogsDb(this.db); - await backup(this.db); - runGarbageCollection(); - }; - - janitor = async () => { - await syncLogsDb(this.db); - await removeOldLogs(this.db); - runGarbageCollection(); - }; - - private async runOnceJob(compressEnabled: boolean) { + public async runOnce() { // Stage 1: Rotate log await this.rotate(); // Stage 2: Compress files - optional - if (compressEnabled) { + if (!COMPRESS_SKIP) { await this.compress(); } // Stage 3: Backup @@ -62,23 +38,46 @@ export class JobService { await this.janitor(); } - private async cronJobs(compressEnabled: boolean) { + public async runCron() { const rotateCronJob = Cron(CRON_ROTATE, this.rotate); - - if (compressEnabled) { - const compressCronJob = Cron(CRON_COMPRESS, this.compress); - console.log(`Compress job next run: ${compressCronJob.nextRun()}`); - } + const compressCronJob = COMPRESS_SKIP + ? null + : Cron(CRON_COMPRESS, this.compress); const backupCronJob = Cron(CRON_BACKUP, this.backup); const janitorCronJob = Cron(CRON_JANITOR, this.janitor); console.log(`Backup job next run: ${backupCronJob.nextRun()}`); + if (compressCronJob) { + console.log(`Compress job next run: ${compressCronJob.nextRun()}`); + } else { + console.log(`Compress job next run: stage skipped`); + } console.log(`Rotate job next run: ${rotateCronJob.nextRun()}`); console.log(`Janitor job next run: ${janitorCronJob.nextRun()}`); } - async run(runOnce: boolean, compressEnabled: boolean) { - if (runOnce) this.runOnceJob(compressEnabled); - else this.cronJobs(compressEnabled); + private async rotate() { + await rotateLogs(this.db); + runGarbageCollection(); + } + + private async compress() { + console.log('start compress'); + await syncLogsDb(this.db); + await compress(this.db); + runGarbageCollection(); + } + + private async backup() { + console.log('start backup'); + await syncLogsDb(this.db); + await backup(this.db); + runGarbageCollection(); + } + + private async janitor() { + await syncLogsDb(this.db); + await removeOldLogs(this.db); + runGarbageCollection(); } }