diff --git a/src/constants.ts b/src/constants.ts index f843063..a95b6b2 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -3,6 +3,10 @@ export const CRON_COMPRESS = process.env.CRON_COMPRESS ?? '*/10 * * * *'; export const CRON_BACKUP = process.env.CRON_BACKUP ?? '*/20 * * * *'; export const CRON_JANITOR = process.env.CRON_JANITOR ?? '*/10 * * * *'; +export const RUN_ONCE = process.env.RUN_ONCE ?? 'false'; + +export const COMPRESS_ENABLED = process.env.COMPRESS_ENABLED ?? 'true'; + export const LOGROTATE_DIRECTORY = process.env.LOGROTATE_DIRECTORY ?? 'logs'; export const LOGROTATE_STATUSFILE = process.env.LOGROTATE_STATUSFILE ?? 'cron.db'; diff --git a/src/cron/backup.ts b/src/cron/backup.ts index 64c94e7..23d9d97 100644 --- a/src/cron/backup.ts +++ b/src/cron/backup.ts @@ -12,6 +12,7 @@ import { BROKER_SERVICE, BROKER_USER, DB_FILE_STATUS, + COMPRESS_ENABLED, OBJECT_STORAGE_ACCESS_KEY, OBJECT_STORAGE_BUCKET, OBJECT_STORAGE_ENABLED, @@ -51,6 +52,10 @@ const objectstorageMetadata: ItemBucketMetadata = : undefined; export async function backup(db: DatabaseService) { + const dbFileStatus = + COMPRESS_ENABLED === 'true' + ? DB_FILE_STATUS.Compressed + : DB_FILE_STATUS.Rotated; console.log('backup: start'); const result = await db.all<{ id: number; @@ -63,7 +68,7 @@ export async function backup(db: DatabaseService) { WHERE status = ? ORDER BY id DESC `, - [DB_FILE_STATUS.Compressed], + [dbFileStatus], ); if (result.rows.length === 0) { diff --git a/src/index.ts b/src/index.ts index c3348b9..a8c0f58 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,26 +1,11 @@ -import { Cron } from 'croner'; -import { - CRON_BACKUP, - CRON_COMPRESS, - CRON_JANITOR, - CRON_ROTATE, -} from './constants'; -import { backup } from './cron/backup'; import { DatabaseService } from './services/database.service'; -import { rotateLogs } from './cron/rotate'; -import { removeOldLogs, syncLogsDb } from './cron/janitor'; -import { compress } from './cron/compress'; +import { JobService } from './services/job.service'; console.log('Starting...'); -function runGarbageCollection() { - if (global.gc) { - global.gc(); - } -} - async function main() { const db = await DatabaseService.create(); + const jobService = new JobService(db); if (!global.gc) { console.log( @@ -28,30 +13,10 @@ async function main() { 'when launching node to enable forced garbage collection.', ); } + const runOnce = process.env.RUN_ONCE === 'true'; + const compressEnabled = process.env.COMPRESS_ENABLED === 'true'; - const rotateJob = Cron(CRON_ROTATE, async () => { - await rotateLogs(db); - runGarbageCollection(); - }); - const compressJob = Cron(CRON_COMPRESS, async () => { - await syncLogsDb(db); - await compress(db); - runGarbageCollection(); - }); - const backupJob = Cron(CRON_BACKUP, async () => { - await syncLogsDb(db); - await backup(db); - runGarbageCollection(); - }); - const janitorJob = Cron(CRON_JANITOR, async () => { - await syncLogsDb(db); - await removeOldLogs(db); - runGarbageCollection(); - }); - console.log(`Rotate job next run: ${rotateJob.nextRun()}`); - console.log(`Compress job next run: ${compressJob.nextRun()}`); - console.log(`Backup job next run: ${backupJob.nextRun()}`); - console.log(`Janitor job next run: ${janitorJob.nextRun()}`); + jobService.run(runOnce, compressEnabled); } main(); diff --git a/src/services/job.service.ts b/src/services/job.service.ts new file mode 100644 index 0000000..196bf9c --- /dev/null +++ b/src/services/job.service.ts @@ -0,0 +1,84 @@ +import { Cron } from 'croner'; +import { + CRON_BACKUP, + CRON_COMPRESS, + CRON_JANITOR, + CRON_ROTATE, +} from '../constants'; +import { backup } from '../cron/backup'; +import { rotateLogs } from '../cron/rotate'; +import { removeOldLogs, syncLogsDb } from '../cron/janitor'; +import { compress } from '../cron/compress'; +import { DatabaseService } from './database.service'; + +function runGarbageCollection() { + if (global.gc) { + global.gc(); + } +} + +export class JobService { + private db: DatabaseService; + + constructor(db: DatabaseService) { + this.db = db; + } + + rotate = async () => { + await rotateLogs(this.db); + runGarbageCollection(); + }; + + compress = async () => { + console.log('start compress'); + await syncLogsDb(this.db); + await compress(this.db); + runGarbageCollection(); + }; + + backup = async () => { + console.log('start backup'); + await syncLogsDb(this.db); + await backup(this.db); + runGarbageCollection(); + }; + + janitor = async () => { + await syncLogsDb(this.db); + await removeOldLogs(this.db); + runGarbageCollection(); + }; + + private async runOnceJob(compressEnabled: boolean) { + // Stage 1: Rotate log + await this.rotate(); + // Stage 2: Compress files - optional + if (compressEnabled) { + await this.compress(); + } + // Stage 3: Backup + await this.backup(); + // Stage 4: Janitor + await this.janitor(); + } + + private async cronJobs(compressEnabled: boolean) { + const rotateCronJob = Cron(CRON_ROTATE, this.rotate); + + if (compressEnabled) { + const compressCronJob = Cron(CRON_COMPRESS, this.compress); + console.log(`Compress job next run: ${compressCronJob.nextRun()}`); + } + const backupCronJob = Cron(CRON_BACKUP, this.backup); + const janitorCronJob = Cron(CRON_JANITOR, this.janitor); + + console.log(`Backup job next run: ${backupCronJob.nextRun()}`); + console.log(`Rotate job next run: ${rotateCronJob.nextRun()}`); + console.log(`Janitor job next run: ${janitorCronJob.nextRun()}`); + } + + async run(runOnce: boolean, compressEnabled: boolean) { + if (runOnce) this.runOnceJob(compressEnabled); + else this.cronJobs(compressEnabled); + } +}