Skip to content

Commit

Permalink
Merge pull request #20 from bcgov/feat/addRunOnce
Browse files Browse the repository at this point in the history
feat: add options for compress and run once
  • Loading branch information
GraceRuan authored Mar 11, 2024
2 parents f59ff7b + 3569419 commit 266d7b4
Show file tree
Hide file tree
Showing 4 changed files with 99 additions and 41 deletions.
4 changes: 4 additions & 0 deletions src/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@ export const CRON_COMPRESS = process.env.CRON_COMPRESS ?? '*/10 * * * *';
export const CRON_BACKUP = process.env.CRON_BACKUP ?? '*/20 * * * *';
export const CRON_JANITOR = process.env.CRON_JANITOR ?? '*/10 * * * *';

export const RUN_ONCE = process.env.RUN_ONCE ?? 'false';

export const COMPRESS_ENABLED = process.env.COMPRESS_ENABLED ?? 'true';

export const LOGROTATE_DIRECTORY = process.env.LOGROTATE_DIRECTORY ?? 'logs';
export const LOGROTATE_STATUSFILE =
process.env.LOGROTATE_STATUSFILE ?? 'cron.db';
Expand Down
7 changes: 6 additions & 1 deletion src/cron/backup.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import {
BROKER_SERVICE,
BROKER_USER,
DB_FILE_STATUS,
COMPRESS_ENABLED,
OBJECT_STORAGE_ACCESS_KEY,
OBJECT_STORAGE_BUCKET,
OBJECT_STORAGE_ENABLED,
Expand Down Expand Up @@ -51,6 +52,10 @@ const objectstorageMetadata: ItemBucketMetadata =
: undefined;

export async function backup(db: DatabaseService) {
const dbFileStatus =
COMPRESS_ENABLED === 'true'
? DB_FILE_STATUS.Compressed
: DB_FILE_STATUS.Rotated;
console.log('backup: start');
const result = await db.all<{
id: number;
Expand All @@ -63,7 +68,7 @@ export async function backup(db: DatabaseService) {
WHERE status = ?
ORDER BY id DESC
`,
[DB_FILE_STATUS.Compressed],
[dbFileStatus],
);

if (result.rows.length === 0) {
Expand Down
45 changes: 5 additions & 40 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,57 +1,22 @@
import { Cron } from 'croner';
import {
CRON_BACKUP,
CRON_COMPRESS,
CRON_JANITOR,
CRON_ROTATE,
} from './constants';
import { backup } from './cron/backup';
import { DatabaseService } from './services/database.service';
import { rotateLogs } from './cron/rotate';
import { removeOldLogs, syncLogsDb } from './cron/janitor';
import { compress } from './cron/compress';
import { JobService } from './services/job.service';

console.log('Starting...');

function runGarbageCollection() {
if (global.gc) {
global.gc();
}
}

async function main() {
const db = await DatabaseService.create();
const jobService = new JobService(db);

if (!global.gc) {
console.log(
'Garbage collection unavailable. Pass --expose-gc ' +
'when launching node to enable forced garbage collection.',
);
}
const runOnce = process.env.RUN_ONCE === 'true';
const compressEnabled = process.env.COMPRESS_ENABLED === 'true';

const rotateJob = Cron(CRON_ROTATE, async () => {
await rotateLogs(db);
runGarbageCollection();
});
const compressJob = Cron(CRON_COMPRESS, async () => {
await syncLogsDb(db);
await compress(db);
runGarbageCollection();
});
const backupJob = Cron(CRON_BACKUP, async () => {
await syncLogsDb(db);
await backup(db);
runGarbageCollection();
});
const janitorJob = Cron(CRON_JANITOR, async () => {
await syncLogsDb(db);
await removeOldLogs(db);
runGarbageCollection();
});
console.log(`Rotate job next run: ${rotateJob.nextRun()}`);
console.log(`Compress job next run: ${compressJob.nextRun()}`);
console.log(`Backup job next run: ${backupJob.nextRun()}`);
console.log(`Janitor job next run: ${janitorJob.nextRun()}`);
jobService.run(runOnce, compressEnabled);
}

main();
84 changes: 84 additions & 0 deletions src/services/job.service.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
import { Cron } from 'croner';
import {
CRON_BACKUP,
CRON_COMPRESS,
CRON_JANITOR,
CRON_ROTATE,
} from '../constants';
import { backup } from '../cron/backup';
import { rotateLogs } from '../cron/rotate';
import { removeOldLogs, syncLogsDb } from '../cron/janitor';
import { compress } from '../cron/compress';
import { DatabaseService } from './database.service';

function runGarbageCollection() {
if (global.gc) {
global.gc();
}
}

export class JobService {
private db: DatabaseService;

constructor(db: DatabaseService) {
this.db = db;
}

rotate = async () => {
await rotateLogs(this.db);
runGarbageCollection();
};

compress = async () => {
console.log('start compress');
await syncLogsDb(this.db);
await compress(this.db);
runGarbageCollection();
};

backup = async () => {
console.log('start backup');
await syncLogsDb(this.db);
await backup(this.db);
runGarbageCollection();
};

janitor = async () => {
await syncLogsDb(this.db);
await removeOldLogs(this.db);
runGarbageCollection();
};

private async runOnceJob(compressEnabled: boolean) {
// Stage 1: Rotate log
await this.rotate();
// Stage 2: Compress files - optional
if (compressEnabled) {
await this.compress();
}
// Stage 3: Backup
await this.backup();
// Stage 4: Janitor
await this.janitor();
}

private async cronJobs(compressEnabled: boolean) {
const rotateCronJob = Cron(CRON_ROTATE, this.rotate);

if (compressEnabled) {
const compressCronJob = Cron(CRON_COMPRESS, this.compress);
console.log(`Compress job next run: ${compressCronJob.nextRun()}`);
}
const backupCronJob = Cron(CRON_BACKUP, this.backup);
const janitorCronJob = Cron(CRON_JANITOR, this.janitor);

console.log(`Backup job next run: ${backupCronJob.nextRun()}`);
console.log(`Rotate job next run: ${rotateCronJob.nextRun()}`);
console.log(`Janitor job next run: ${janitorCronJob.nextRun()}`);
}

async run(runOnce: boolean, compressEnabled: boolean) {
if (runOnce) this.runOnceJob(compressEnabled);
else this.cronJobs(compressEnabled);
}
}

0 comments on commit 266d7b4

Please sign in to comment.