From 8ba3e75f57e57b3fda6e376cbbac900fb282c379 Mon Sep 17 00:00:00 2001 From: Blayne Chard Date: Mon, 16 Dec 2024 13:13:30 +1300 Subject: [PATCH] refactor: add in index.ts for default handler --- .../src/__test__/analytics.test.ts | 6 ++--- .../lambda-analytic-cloudfront/src/handler.ts | 24 +++++++++---------- .../lambda-analytic-cloudfront/src/index.ts | 6 +++++ 3 files changed, 21 insertions(+), 15 deletions(-) create mode 100644 packages/lambda-analytic-cloudfront/src/index.ts diff --git a/packages/lambda-analytic-cloudfront/src/__test__/analytics.test.ts b/packages/lambda-analytic-cloudfront/src/__test__/analytics.test.ts index 29e960974..5c2e62a04 100644 --- a/packages/lambda-analytic-cloudfront/src/__test__/analytics.test.ts +++ b/packages/lambda-analytic-cloudfront/src/__test__/analytics.test.ts @@ -7,7 +7,7 @@ import { Client } from '@elastic/elasticsearch'; import { getYesterday } from '../date.js'; import { Elastic } from '../elastic.js'; -import { handler } from '../handler.js'; +import { main } from '../handler.js'; import { LogStats } from '../log.stats.js'; import { LogData } from './log.data.js'; @@ -59,7 +59,7 @@ describe('analytic lambda', () => { await fsa.write(new URL(`mem://source/cfid.${shortDate}/data.txt.gz`), gzipSync(LogData)); - await handler(); + await main(); // One call to insert assert.equal(operations.length, 1); @@ -105,7 +105,7 @@ describe('analytic lambda', () => { await fsa.write(new URL(`mem://source/cfid.${shortDate}/data.txt.gz`), gzipSync(LogData)); - const ret = await handler().catch((e: Error) => e); + const ret = await main().catch((e: Error) => e); assert.equal(String(ret), 'Error: Failed to index'); diff --git a/packages/lambda-analytic-cloudfront/src/handler.ts b/packages/lambda-analytic-cloudfront/src/handler.ts index e67edcfa7..299aabc01 100644 --- a/packages/lambda-analytic-cloudfront/src/handler.ts +++ b/packages/lambda-analytic-cloudfront/src/handler.ts @@ -2,6 +2,7 @@ import { promisify } from 'node:util'; import { gzip } from 'node:zlib'; import { Env, fsa, LogConfig } from '@basemaps/shared'; +import { LambdaRequest } from '@linzjs/lambda'; import pLimit from 'p-limit'; import { basename } from 'path'; @@ -31,15 +32,14 @@ function getEnvUrl(env: string): URL { } } -export async function handler(): Promise { +export async function main(req: LambdaRequest): Promise { const SourceLocation = getEnvUrl(Env.Analytics.CloudFrontSourceBucket); const CacheLocation = getEnvUrl(Env.Analytics.CacheBucket); const CloudFrontId = Env.get(Env.Analytics.CloudFrontId); const MaxToProcess = Env.getNumber(Env.Analytics.MaxRecords, 24 * 7 * 4); // Process 4 weeks of logs by default - const logger = LogConfig.get(); - logger.info( + req.log.info( { source: SourceLocation.href, cacheLocation: CacheLocation.href, cloudFrontId: CloudFrontId }, 'log:index:start', ); @@ -70,19 +70,19 @@ export async function handler(): Promise { const promise = hourQ(async () => { // Cache file exists skip processing if (await fsa.exists(cacheUrl)) { - logger.debug({ prefix }, 'log:prefix:skip'); + req.log.debug({ prefix }, 'log:prefix:skip'); return; } const startTime = performance.now(); - logger.trace({ prefix }, 'log:prefix:start'); + req.log.trace({ prefix }, 'log:prefix:start'); const logPrefix = new URL(`${CloudFrontId}.${prefix}`, SourceLocation); const stats = new Map(); const logFiles = await fsa.toArray(fsa.list(logPrefix)); if (logFiles.length === 0) { - logger.info({ prefix }, 'log:prefix:no-files'); + req.log.info({ prefix }, 'log:prefix:no-files'); return; } @@ -93,7 +93,7 @@ export async function handler(): Promise { const fileStartTime = performance.now(); const fileLines = await FileProcess.process(lf, stats); - logger.trace( + req.log.trace( { prefix: prefix, file: basename(lf.pathname), @@ -113,11 +113,11 @@ export async function handler(): Promise { // Extrac thte values const allStats = [...stats.values()]; - await Elastic.insert(prefix, allStats, logger); + await Elastic.insert(prefix, allStats, req.log); // Ensure everything is indexed into elasticsearch before writing the cache to disk await fsa.write(cacheUrl, await gzipPromise(JSON.stringify(allStats))); - logger.info( + req.log.info( { prefix: prefix, files: logFiles.length, @@ -134,17 +134,17 @@ export async function handler(): Promise { const rets = await Promise.allSettled(todo); - // If anythign fails to index write the errors out to a log file at the cache location + // If anything fails to index write the errors out to a log file at the cache location if (Elastic.errors.length > 0) { const errorLocation = new URL(`./errors-${new Date().toISOString()}.json`, CacheLocation); - logger.fatal({ errorLocation: errorLocation.href }, 'log:index:failed'); + req.log.fatal({ errorLocation: errorLocation.href }, 'log:index:failed'); await fsa.write(errorLocation, JSON.stringify(Elastic.errors)); } let failed = false; for (const ret of rets) { if (ret.status !== 'rejected') continue; - logger.fatal({ err: ret.reason }, 'log:index:failed'); + req.log.fatal({ err: ret.reason }, 'log:index:failed'); failed = true; } if (failed) throw new Error('Failed to index'); diff --git a/packages/lambda-analytic-cloudfront/src/index.ts b/packages/lambda-analytic-cloudfront/src/index.ts new file mode 100644 index 000000000..8fe889cc7 --- /dev/null +++ b/packages/lambda-analytic-cloudfront/src/index.ts @@ -0,0 +1,6 @@ +import { LogConfig } from '@basemaps/shared'; +import { lf } from '@linzjs/lambda'; + +import { main } from './handler.js'; + +export const handler = lf.handler(main, { tracePercent: 0, rejectOnError: true }, LogConfig.get());