From c9ac1934b5c14905293e9a8bf6335e164bc27d96 Mon Sep 17 00:00:00 2001 From: Harry Minsky Date: Mon, 4 Nov 2024 16:02:13 -0500 Subject: [PATCH] Refactor and migrate bulkupload to bulkuploadtask This commit refactors all references of BulkUpload (and derivations) to BulkUploadTasks, along with refactoring the bulkUpload routes into a new task router, which is intended to contain all tasks moving forward. --- CHANGELOG.md | 8 + ...nt.test.ts => bulkUploadTasks.int.test.ts} | 114 +++++----- .../bulk_upload_task_to_json.sql | 25 +++ .../initialization/bulk_upload_to_json.sql | 25 --- ...name-bulk_uploads-to-bulk_upload_tasks.sql | 2 + .../createBulkUploadTask.ts} | 14 +- .../operations/bulkUploadTasks/index.ts | 4 + .../loadBulkUploadTask.ts} | 12 +- .../loadBulkUploadTaskBundle.ts} | 12 +- .../updateBulkUploadTask.ts} | 16 +- src/database/operations/bulkUploads/index.ts | 4 - src/database/operations/index.ts | 2 +- .../insertOne.sql | 4 +- .../queries/bulkUploadTasks/selectById.sql | 3 + .../selectWithPagination.sql | 8 +- .../updateById.sql | 4 +- .../queries/bulkUploads/selectById.sql | 3 - ...Handlers.ts => bulkUploadTasksHandlers.ts} | 40 ++-- src/jobQueue.ts | 8 +- src/openapi.json | 26 +-- src/routers/bulkUploadsRouter.ts | 19 -- src/routers/index.ts | 4 +- src/routers/tasksRouter.ts | 19 ++ .../empty.csv | 0 .../invalidShortCode.csv | 0 .../missingEmail.csv | 0 .../validCsvTemplate.csv | 0 .../validCsvTemplateWithChangemakers.csv | 0 ...t.ts => processBulkUploadTask.int.test.ts} | 208 +++++++++--------- ....ts => processBulkUploadTask.unit.test.ts} | 6 +- src/tasks/index.ts | 2 +- ...BulkUpload.ts => processBulkUploadTask.ts} | 99 +++++---- .../{BulkUpload.ts => BulkUploadTask.ts} | 34 ++- src/types/TaskStatus.ts | 9 + src/types/index.ts | 3 +- 35 files changed, 382 insertions(+), 355 deletions(-) rename src/__tests__/{bulkUploads.int.test.ts => bulkUploadTasks.int.test.ts} (82%) create mode 100644 src/database/initialization/bulk_upload_task_to_json.sql delete mode 100644 src/database/initialization/bulk_upload_to_json.sql create mode 100644 src/database/migrations/0040-rename-bulk_uploads-to-bulk_upload_tasks.sql rename src/database/operations/{bulkUploads/createBulkUpload.ts => bulkUploadTasks/createBulkUploadTask.ts} (62%) create mode 100644 src/database/operations/bulkUploadTasks/index.ts rename src/database/operations/{bulkUploads/loadBulkUpload.ts => bulkUploadTasks/loadBulkUploadTask.ts} (59%) rename src/database/operations/{bulkUploads/loadBulkUploadBundle.ts => bulkUploadTasks/loadBulkUploadTaskBundle.ts} (72%) rename src/database/operations/{bulkUploads/updateBulkUpload.ts => bulkUploadTasks/updateBulkUploadTask.ts} (62%) delete mode 100644 src/database/operations/bulkUploads/index.ts rename src/database/queries/{bulkUploads => bulkUploadTasks}/insertOne.sql (58%) create mode 100644 src/database/queries/bulkUploadTasks/selectById.sql rename src/database/queries/{bulkUploads => bulkUploadTasks}/selectWithPagination.sql (58%) rename src/database/queries/{bulkUploads => bulkUploadTasks}/updateById.sql (62%) delete mode 100644 src/database/queries/bulkUploads/selectById.sql rename src/handlers/{bulkUploadsHandlers.ts => bulkUploadTasksHandlers.ts} (75%) delete mode 100644 src/routers/bulkUploadsRouter.ts create mode 100644 src/routers/tasksRouter.ts rename src/tasks/__tests__/fixtures/{processBulkUpload => processBulkUploadTask}/empty.csv (100%) rename src/tasks/__tests__/fixtures/{processBulkUpload => processBulkUploadTask}/invalidShortCode.csv (100%) rename src/tasks/__tests__/fixtures/{processBulkUpload => processBulkUploadTask}/missingEmail.csv (100%) rename src/tasks/__tests__/fixtures/{processBulkUpload => processBulkUploadTask}/validCsvTemplate.csv (100%) rename src/tasks/__tests__/fixtures/{processBulkUpload => processBulkUploadTask}/validCsvTemplateWithChangemakers.csv (100%) rename src/tasks/__tests__/{processBulkUpload.int.test.ts => processBulkUploadTask.int.test.ts} (73%) rename src/tasks/__tests__/{processBulkUpload.unit.test.ts => processBulkUploadTask.unit.test.ts} (54%) rename src/tasks/{processBulkUpload.ts => processBulkUploadTask.ts} (77%) rename src/types/{BulkUpload.ts => BulkUploadTask.ts} (53%) create mode 100644 src/types/TaskStatus.ts diff --git a/CHANGELOG.md b/CHANGELOG.md index 5c1275a9..9c2f3587 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Upgraded to use OpenAPI Specification 3.1. +## 0.17.0 2024-11-07 + +### Changed + +- `BulkUpload` is now `BulkUploadTask`. +- `GET /bulkUpload` and `POST /bulkUpload` are now `GET /tasks/bulkUpload` and `POST /tasks/bulkUpload`. + Future jobs for the graphile-worker will be routed under `tasks` as well. + ## 0.16.0 2024-11-7 ### Added diff --git a/src/__tests__/bulkUploads.int.test.ts b/src/__tests__/bulkUploadTasks.int.test.ts similarity index 82% rename from src/__tests__/bulkUploads.int.test.ts rename to src/__tests__/bulkUploadTasks.int.test.ts index 1409ffca..60348ad1 100644 --- a/src/__tests__/bulkUploads.int.test.ts +++ b/src/__tests__/bulkUploadTasks.int.test.ts @@ -1,7 +1,7 @@ import request from 'supertest'; import { app } from '../app'; import { - createBulkUpload, + createBulkUploadTask, createUser, loadSystemSource, loadSystemUser, @@ -13,66 +13,66 @@ import { mockJwtWithoutSub as authHeaderWithNoSub, mockJwtWithAdminRole as authHeaderWithAdminRole, } from '../test/mockJwt'; -import { BulkUploadStatus, keycloakUserIdToString } from '../types'; +import { TaskStatus, keycloakUserIdToString } from '../types'; -describe('/bulkUploads', () => { +describe('/tasks/bulkUploads', () => { describe('GET /', () => { it('requires authentication', async () => { - await request(app).get('/bulkUploads').expect(401); + await request(app).get('/tasks/bulkUploads').expect(401); }); it('requires a user', async () => { await request(app) - .get('/bulkUploads') + .get('/tasks/bulkUploads') .set(authHeaderWithNoSub) .expect(401); }); it('returns an empty Bundle when no data is present', async () => { - await request(app).get('/bulkUploads').set(authHeader).expect(200, { + await request(app).get('/tasks/bulkUploads').set(authHeader).expect(200, { total: 0, entries: [], }); }); - it('returns bulk uploads associated with the requesting user', async () => { + it('returns bulk upload tasks associated with the requesting user', async () => { const systemUser = await loadSystemUser(); const systemSource = await loadSystemSource(); const testUser = await loadTestUser(); const thirdUser = await createUser({ keycloakUserId: '123e4567-e89b-12d3-a456-426614174000', }); - await createBulkUpload({ + await createBulkUploadTask({ sourceId: systemSource.id, fileName: 'foo.csv', sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-foo', - status: BulkUploadStatus.PENDING, + status: TaskStatus.PENDING, createdBy: testUser.keycloakUserId, }); - await createBulkUpload({ + await createBulkUploadTask({ sourceId: systemSource.id, fileName: 'bar.csv', sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-bar', - status: BulkUploadStatus.COMPLETED, + status: TaskStatus.COMPLETED, createdBy: testUser.keycloakUserId, }); - await createBulkUpload({ + await createBulkUploadTask({ sourceId: systemSource.id, fileName: 'baz.csv', sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-baz', - status: BulkUploadStatus.COMPLETED, + status: TaskStatus.COMPLETED, createdBy: systemUser.keycloakUserId, }); - await createBulkUpload({ + await createBulkUploadTask({ sourceId: systemSource.id, fileName: 'boop.csv', sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-boop', - status: BulkUploadStatus.COMPLETED, + status: TaskStatus.COMPLETED, createdBy: thirdUser.keycloakUserId, }); await request(app) - .get('/bulkUploads') + .get('/tasks/bulkUploads') .set(authHeader) .expect(200) .expect((res) => @@ -86,7 +86,7 @@ describe('/bulkUploads', () => { fileName: 'bar.csv', fileSize: null, sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-bar', - status: BulkUploadStatus.COMPLETED, + status: TaskStatus.COMPLETED, createdAt: expectTimestamp, createdBy: testUser.keycloakUserId, }, @@ -97,7 +97,7 @@ describe('/bulkUploads', () => { fileName: 'foo.csv', fileSize: null, sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-foo', - status: BulkUploadStatus.PENDING, + status: TaskStatus.PENDING, createdAt: expectTimestamp, createdBy: testUser.keycloakUserId, }, @@ -112,23 +112,23 @@ describe('/bulkUploads', () => { const anotherUser = await createUser({ keycloakUserId: '123e4567-e89b-12d3-a456-426614174000', }); - await createBulkUpload({ + await createBulkUploadTask({ sourceId: systemSource.id, fileName: 'foo.csv', sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-foo', - status: BulkUploadStatus.PENDING, + status: TaskStatus.PENDING, createdBy: testUser.keycloakUserId, }); - await createBulkUpload({ + await createBulkUploadTask({ sourceId: systemSource.id, fileName: 'bar.csv', sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-bar', - status: BulkUploadStatus.COMPLETED, + status: TaskStatus.COMPLETED, createdBy: anotherUser.keycloakUserId, }); await request(app) - .get('/bulkUploads') + .get('/tasks/bulkUploads') .set(authHeaderWithAdminRole) .expect(200) .expect((res) => @@ -142,7 +142,7 @@ describe('/bulkUploads', () => { fileName: 'bar.csv', fileSize: null, sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-bar', - status: BulkUploadStatus.COMPLETED, + status: TaskStatus.COMPLETED, createdAt: expectTimestamp, createdBy: anotherUser.keycloakUserId, }, @@ -153,7 +153,7 @@ describe('/bulkUploads', () => { fileName: 'foo.csv', fileSize: null, sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-foo', - status: BulkUploadStatus.PENDING, + status: TaskStatus.PENDING, createdAt: expectTimestamp, createdBy: testUser.keycloakUserId, }, @@ -162,30 +162,30 @@ describe('/bulkUploads', () => { ); }); - it('returns uploads for specified createdBy user', async () => { + it('returns upload tasks for specified createdBy user', async () => { const systemSource = await loadSystemSource(); const testUser = await loadTestUser(); const anotherUser = await createUser({ keycloakUserId: '123e4567-e89b-12d3-a456-426614174000', }); - await createBulkUpload({ + await createBulkUploadTask({ sourceId: systemSource.id, fileName: 'foo.csv', sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-foo', - status: BulkUploadStatus.PENDING, + status: TaskStatus.PENDING, createdBy: testUser.keycloakUserId, }); - await createBulkUpload({ + await createBulkUploadTask({ sourceId: systemSource.id, fileName: 'bar.csv', sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-bar', - status: BulkUploadStatus.COMPLETED, + status: TaskStatus.COMPLETED, createdBy: anotherUser.keycloakUserId, }); await request(app) .get( - `/bulkUploads?createdBy=${keycloakUserIdToString(anotherUser.keycloakUserId)}`, + `/tasks/bulkUploads?createdBy=${keycloakUserIdToString(anotherUser.keycloakUserId)}`, ) .set(authHeaderWithAdminRole) .expect(200) @@ -200,7 +200,7 @@ describe('/bulkUploads', () => { fileName: 'bar.csv', fileSize: null, sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-bar', - status: BulkUploadStatus.COMPLETED, + status: TaskStatus.COMPLETED, createdAt: expectTimestamp, createdBy: anotherUser.keycloakUserId, }, @@ -209,29 +209,29 @@ describe('/bulkUploads', () => { ); }); - it('returns uploads for the admin user when createdBy is set to me as an admin', async () => { + it('returns upload tasks for the admin user when createdBy is set to me as an admin', async () => { const systemSource = await loadSystemSource(); const testUser = await loadTestUser(); const anotherUser = await createUser({ keycloakUserId: '123e4567-e89b-12d3-a456-426614174000', }); - await createBulkUpload({ + await createBulkUploadTask({ sourceId: systemSource.id, fileName: 'foo.csv', sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-foo', - status: BulkUploadStatus.PENDING, + status: TaskStatus.PENDING, createdBy: testUser.keycloakUserId, }); - await createBulkUpload({ + await createBulkUploadTask({ sourceId: systemSource.id, fileName: 'bar.csv', sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-bar', - status: BulkUploadStatus.COMPLETED, + status: TaskStatus.COMPLETED, createdBy: anotherUser.keycloakUserId, }); await request(app) - .get(`/bulkUploads?createdBy=me`) + .get(`/tasks/bulkUploads?createdBy=me`) .set(authHeaderWithAdminRole) .expect(200) .expect((res) => @@ -245,7 +245,7 @@ describe('/bulkUploads', () => { fileName: 'foo.csv', fileSize: null, sourceKey: '96ddab90-1931-478d-8c02-a1dc80ae01e5-foo', - status: BulkUploadStatus.PENDING, + status: TaskStatus.PENDING, createdAt: expectTimestamp, createdBy: testUser.keycloakUserId, }, @@ -259,17 +259,17 @@ describe('/bulkUploads', () => { const testUser = await loadTestUser(); await Array.from(Array(20)).reduce(async (p, _, i) => { await p; - await createBulkUpload({ + await createBulkUploadTask({ sourceId: systemSource.id, fileName: `bar-${i + 1}.csv`, sourceKey: 'unprocessed/96ddab90-1931-478d-8c02-a1dc80ae01e5-bar', - status: BulkUploadStatus.COMPLETED, + status: TaskStatus.COMPLETED, createdBy: testUser.keycloakUserId, }); }, Promise.resolve()); await request(app) - .get('/bulkUploads') + .get('/tasks/bulkUploads') .query({ _page: 2, _count: 5, @@ -288,7 +288,7 @@ describe('/bulkUploads', () => { fileSize: null, sourceKey: 'unprocessed/96ddab90-1931-478d-8c02-a1dc80ae01e5-bar', - status: BulkUploadStatus.COMPLETED, + status: TaskStatus.COMPLETED, createdAt: expectTimestamp, createdBy: testUser.keycloakUserId, }, @@ -300,7 +300,7 @@ describe('/bulkUploads', () => { fileSize: null, sourceKey: 'unprocessed/96ddab90-1931-478d-8c02-a1dc80ae01e5-bar', - status: BulkUploadStatus.COMPLETED, + status: TaskStatus.COMPLETED, createdAt: expectTimestamp, createdBy: testUser.keycloakUserId, }, @@ -312,7 +312,7 @@ describe('/bulkUploads', () => { fileSize: null, sourceKey: 'unprocessed/96ddab90-1931-478d-8c02-a1dc80ae01e5-bar', - status: BulkUploadStatus.COMPLETED, + status: TaskStatus.COMPLETED, createdAt: expectTimestamp, createdBy: testUser.keycloakUserId, }, @@ -324,7 +324,7 @@ describe('/bulkUploads', () => { fileSize: null, sourceKey: 'unprocessed/96ddab90-1931-478d-8c02-a1dc80ae01e5-bar', - status: BulkUploadStatus.COMPLETED, + status: TaskStatus.COMPLETED, createdAt: expectTimestamp, createdBy: testUser.keycloakUserId, }, @@ -336,7 +336,7 @@ describe('/bulkUploads', () => { fileSize: null, sourceKey: 'unprocessed/96ddab90-1931-478d-8c02-a1dc80ae01e5-bar', - status: BulkUploadStatus.COMPLETED, + status: TaskStatus.COMPLETED, createdAt: expectTimestamp, createdBy: testUser.keycloakUserId, }, @@ -348,21 +348,21 @@ describe('/bulkUploads', () => { describe('POST /', () => { it('requires authentication', async () => { - await request(app).post('/bulkUploads').expect(401); + await request(app).post('/tasks/bulkUploads/').expect(401); }); it('requires a user', async () => { await request(app) - .post('/bulkUploads') + .post('/tasks/bulkUploads/') .set(authHeaderWithNoSub) .expect(401); }); - it('creates exactly one bulk upload', async () => { + it('creates exactly one bulk upload task', async () => { const systemSource = await loadSystemSource(); - const before = await loadTableMetrics('bulk_uploads'); + const before = await loadTableMetrics('bulk_upload_tasks'); const result = await request(app) - .post('/bulkUploads') + .post('/tasks/bulkUploads/') .type('application/json') .set(authHeader) .send({ @@ -371,7 +371,7 @@ describe('/bulkUploads', () => { sourceKey: 'unprocessed/96ddab90-1931-478d-8c02-a1dc80ae01e5-bar', }) .expect(201); - const after = await loadTableMetrics('bulk_uploads'); + const after = await loadTableMetrics('bulk_upload_tasks'); const testUser = await loadTestUser(); expect(before.count).toEqual(0); @@ -392,7 +392,7 @@ describe('/bulkUploads', () => { it('returns 400 bad request when no file name is provided', async () => { const systemSource = await loadSystemSource(); const result = await request(app) - .post('/bulkUploads') + .post('/tasks/bulkUploads') .type('application/json') .set(authHeader) .send({ @@ -409,7 +409,7 @@ describe('/bulkUploads', () => { it('returns 400 bad request when an invalid file name is provided', async () => { const systemSource = await loadSystemSource(); const result = await request(app) - .post('/bulkUploads') + .post('/tasks/bulkUploads') .type('application/json') .set(authHeader) .send({ @@ -426,7 +426,7 @@ describe('/bulkUploads', () => { it('returns 400 bad request when an invalid source key is provided', async () => { const result = await request(app) - .post('/bulkUploads') + .post('/tasks/bulkUploads') .type('application/json') .set(authHeader) .send({ @@ -442,7 +442,7 @@ describe('/bulkUploads', () => { it('returns 400 bad request when no source key is provided', async () => { const result = await request(app) - .post('/bulkUploads') + .post('/tasks/bulkUploads') .type('application/json') .set(authHeader) .send({ diff --git a/src/database/initialization/bulk_upload_task_to_json.sql b/src/database/initialization/bulk_upload_task_to_json.sql new file mode 100644 index 00000000..8fb86943 --- /dev/null +++ b/src/database/initialization/bulk_upload_task_to_json.sql @@ -0,0 +1,25 @@ +SELECT drop_function('bulk_upload_task_to_json'); + +CREATE FUNCTION bulk_upload_task_to_json(bulk_upload_task bulk_upload_tasks) +RETURNS JSONB AS $$ +DECLARE + source_json JSONB; +BEGIN + SELECT source_to_json(sources.*) + INTO source_json + FROM sources + WHERE sources.id = bulk_upload_task.source_id; + + RETURN jsonb_build_object( + 'id', bulk_upload_task.id, + 'sourceId', bulk_upload_task.source_id, + 'source', source_json, + 'fileName', bulk_upload_task.file_name, + 'sourceKey', bulk_upload_task.source_key, + 'status', bulk_upload_task.status, + 'fileSize', bulk_upload_task.file_size, + 'createdBy', bulk_upload_task.created_by, + 'createdAt', to_json(bulk_upload_task.created_at)::jsonb + ); +END; +$$ LANGUAGE plpgsql; diff --git a/src/database/initialization/bulk_upload_to_json.sql b/src/database/initialization/bulk_upload_to_json.sql deleted file mode 100644 index 81563508..00000000 --- a/src/database/initialization/bulk_upload_to_json.sql +++ /dev/null @@ -1,25 +0,0 @@ -SELECT drop_function('bulk_upload_to_json'); - -CREATE FUNCTION bulk_upload_to_json(bulk_upload bulk_uploads) -RETURNS JSONB AS $$ -DECLARE - source_json JSONB; -BEGIN - SELECT source_to_json(sources.*) - INTO source_json - FROM sources - WHERE sources.id = bulk_upload.source_id; - - RETURN jsonb_build_object( - 'id', bulk_upload.id, - 'sourceId', bulk_upload.source_id, - 'source', source_json, - 'fileName', bulk_upload.file_name, - 'sourceKey', bulk_upload.source_key, - 'status', bulk_upload.status, - 'fileSize', bulk_upload.file_size, - 'createdBy', bulk_upload.created_by, - 'createdAt', to_json(bulk_upload.created_at)::jsonb - ); -END; -$$ LANGUAGE plpgsql; diff --git a/src/database/migrations/0040-rename-bulk_uploads-to-bulk_upload_tasks.sql b/src/database/migrations/0040-rename-bulk_uploads-to-bulk_upload_tasks.sql new file mode 100644 index 00000000..0854e3ea --- /dev/null +++ b/src/database/migrations/0040-rename-bulk_uploads-to-bulk_upload_tasks.sql @@ -0,0 +1,2 @@ +ALTER TABLE bulk_uploads RENAME TO bulk_upload_tasks; +ALTER TYPE bulk_upload_status RENAME TO task_status; diff --git a/src/database/operations/bulkUploads/createBulkUpload.ts b/src/database/operations/bulkUploadTasks/createBulkUploadTask.ts similarity index 62% rename from src/database/operations/bulkUploads/createBulkUpload.ts rename to src/database/operations/bulkUploadTasks/createBulkUploadTask.ts index fb2a5d2f..74b8add9 100644 --- a/src/database/operations/bulkUploads/createBulkUpload.ts +++ b/src/database/operations/bulkUploadTasks/createBulkUploadTask.ts @@ -1,17 +1,17 @@ import { db } from '../../db'; import type { JsonResultSet, - BulkUpload, - InternallyWritableBulkUpload, + BulkUploadTask, + InternallyWritableBulkUploadTask, } from '../../../types'; -export const createBulkUpload = async ( - createValues: InternallyWritableBulkUpload, -): Promise => { +export const createBulkUploadTask = async ( + createValues: InternallyWritableBulkUploadTask, +): Promise => { const { sourceId, fileName, sourceKey, status, createdBy } = createValues; - const result = await db.sql>( - 'bulkUploads.insertOne', + const result = await db.sql>( + 'bulkUploadTasks.insertOne', { sourceId, fileName, diff --git a/src/database/operations/bulkUploadTasks/index.ts b/src/database/operations/bulkUploadTasks/index.ts new file mode 100644 index 00000000..9dbeb965 --- /dev/null +++ b/src/database/operations/bulkUploadTasks/index.ts @@ -0,0 +1,4 @@ +export * from './createBulkUploadTask'; +export * from './loadBulkUploadTask'; +export * from './loadBulkUploadTaskBundle'; +export * from './updateBulkUploadTask'; diff --git a/src/database/operations/bulkUploads/loadBulkUpload.ts b/src/database/operations/bulkUploadTasks/loadBulkUploadTask.ts similarity index 59% rename from src/database/operations/bulkUploads/loadBulkUpload.ts rename to src/database/operations/bulkUploadTasks/loadBulkUploadTask.ts index b1b2e54a..e32f06a2 100644 --- a/src/database/operations/bulkUploads/loadBulkUpload.ts +++ b/src/database/operations/bulkUploadTasks/loadBulkUploadTask.ts @@ -1,10 +1,12 @@ import { db } from '../../db'; import { NotFoundError } from '../../../errors'; -import type { JsonResultSet, BulkUpload } from '../../../types'; +import type { JsonResultSet, BulkUploadTask } from '../../../types'; -export const loadBulkUpload = async (id: number): Promise => { - const bulkUploadsQueryResult = await db.sql>( - 'bulkUploads.selectById', +export const loadBulkUploadTask = async ( + id: number, +): Promise => { + const bulkUploadsQueryResult = await db.sql>( + 'bulkUploadTasks.selectById', { id, }, @@ -12,7 +14,7 @@ export const loadBulkUpload = async (id: number): Promise => { const { object } = bulkUploadsQueryResult.rows[0] ?? {}; if (object === undefined) { throw new NotFoundError(`Entity not found`, { - entityType: 'BulkUpload', + entityType: 'BulkUploadTask', entityId: id, }); } diff --git a/src/database/operations/bulkUploads/loadBulkUploadBundle.ts b/src/database/operations/bulkUploadTasks/loadBulkUploadTaskBundle.ts similarity index 72% rename from src/database/operations/bulkUploads/loadBulkUploadBundle.ts rename to src/database/operations/bulkUploadTasks/loadBulkUploadTaskBundle.ts index 37ad4491..4a437266 100644 --- a/src/database/operations/bulkUploads/loadBulkUploadBundle.ts +++ b/src/database/operations/bulkUploadTasks/loadBulkUploadTaskBundle.ts @@ -2,22 +2,22 @@ import { loadBundle } from '../generic/loadBundle'; import type { JsonResultSet, Bundle, - BulkUpload, + BulkUploadTask, AuthContext, KeycloakUserId, } from '../../../types'; -export const loadBulkUploadBundle = async ( +export const loadBulkUploadTaskBundle = async ( authContext: AuthContext | undefined, createdBy: KeycloakUserId | undefined, limit: number | undefined, offset: number, -): Promise> => { +): Promise> => { const authContextKeycloakUserId = authContext?.user.keycloakUserId; const authContextIsAdministrator = authContext?.role.isAdministrator; - const bundle = await loadBundle>( - 'bulkUploads.selectWithPagination', + const bundle = await loadBundle>( + 'bulkUploadTasks.selectWithPagination', { authContextIsAdministrator, authContextKeycloakUserId, @@ -25,7 +25,7 @@ export const loadBulkUploadBundle = async ( limit, offset, }, - 'bulk_uploads', + 'bulk_upload_tasks', ); const entries = bundle.entries.map((entry) => entry.object); return { diff --git a/src/database/operations/bulkUploads/updateBulkUpload.ts b/src/database/operations/bulkUploadTasks/updateBulkUploadTask.ts similarity index 62% rename from src/database/operations/bulkUploads/updateBulkUpload.ts rename to src/database/operations/bulkUploadTasks/updateBulkUploadTask.ts index e3c0b40e..459748ad 100644 --- a/src/database/operations/bulkUploads/updateBulkUpload.ts +++ b/src/database/operations/bulkUploadTasks/updateBulkUploadTask.ts @@ -2,22 +2,22 @@ import { db } from '../../db'; import { NotFoundError } from '../../../errors'; import type { JsonResultSet, - BulkUpload, - InternallyWritableBulkUpload, + BulkUploadTask, + InternallyWritableBulkUploadTask, } from '../../../types'; -export const updateBulkUpload = async ( +export const updateBulkUploadTask = async ( id: number, - updateValues: Partial, -): Promise => { + updateValues: Partial, +): Promise => { const { fileSize, sourceKey, status } = updateValues; const defaultValues = { fileSize: -1, sourceKey: '', status: '', }; - const result = await db.sql>( - 'bulkUploads.updateById', + const result = await db.sql>( + 'bulkUploadTasks.updateById', { ...defaultValues, id, @@ -29,7 +29,7 @@ export const updateBulkUpload = async ( const { object } = result.rows[0] ?? {}; if (object === undefined) { throw new NotFoundError(`Entity not found`, { - entityType: 'BulkUpload', + entityType: 'BulkUploadTask', entityId: id, }); } diff --git a/src/database/operations/bulkUploads/index.ts b/src/database/operations/bulkUploads/index.ts deleted file mode 100644 index 0855d3a8..00000000 --- a/src/database/operations/bulkUploads/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -export * from './createBulkUpload'; -export * from './loadBulkUpload'; -export * from './loadBulkUploadBundle'; -export * from './updateBulkUpload'; diff --git a/src/database/operations/index.ts b/src/database/operations/index.ts index 4dd103dc..920bef00 100644 --- a/src/database/operations/index.ts +++ b/src/database/operations/index.ts @@ -2,7 +2,7 @@ export * from './applicationFormFields'; export * from './applicationForms'; export * from './baseFieldLocalization'; export * from './baseFields'; -export * from './bulkUploads'; +export * from './bulkUploadTasks'; export * from './changemakerProposals'; export * from './changemakers'; export * from './dataProviders'; diff --git a/src/database/queries/bulkUploads/insertOne.sql b/src/database/queries/bulkUploadTasks/insertOne.sql similarity index 58% rename from src/database/queries/bulkUploads/insertOne.sql rename to src/database/queries/bulkUploadTasks/insertOne.sql index 740662f2..f1926d69 100644 --- a/src/database/queries/bulkUploads/insertOne.sql +++ b/src/database/queries/bulkUploadTasks/insertOne.sql @@ -1,4 +1,4 @@ -INSERT INTO bulk_uploads ( +INSERT INTO bulk_upload_tasks ( source_id, file_name, source_key, @@ -12,4 +12,4 @@ VALUES ( :status, :createdBy ) -RETURNING bulk_upload_to_json(bulk_uploads) AS "object"; +RETURNING bulk_upload_task_to_json(bulk_upload_tasks) AS "object"; diff --git a/src/database/queries/bulkUploadTasks/selectById.sql b/src/database/queries/bulkUploadTasks/selectById.sql new file mode 100644 index 00000000..34ff44af --- /dev/null +++ b/src/database/queries/bulkUploadTasks/selectById.sql @@ -0,0 +1,3 @@ +SELECT bulk_upload_task_to_json(bulk_upload_tasks.*) as "object" +FROM bulk_upload_tasks +WHERE id = :id; diff --git a/src/database/queries/bulkUploads/selectWithPagination.sql b/src/database/queries/bulkUploadTasks/selectWithPagination.sql similarity index 58% rename from src/database/queries/bulkUploads/selectWithPagination.sql rename to src/database/queries/bulkUploadTasks/selectWithPagination.sql index 833a3f31..30d02794 100644 --- a/src/database/queries/bulkUploads/selectWithPagination.sql +++ b/src/database/queries/bulkUploadTasks/selectWithPagination.sql @@ -1,18 +1,18 @@ -SELECT bulk_upload_to_json(bulk_uploads.*) as "object" -FROM bulk_uploads +SELECT bulk_upload_task_to_json(bulk_upload_tasks.*) as "object" +FROM bulk_upload_tasks WHERE CASE WHEN :createdBy::UUID IS NULL THEN true ELSE - bulk_uploads.created_by = :createdBy + bulk_upload_tasks.created_by = :createdBy END AND CASE WHEN :authContextKeycloakUserId::UUID IS NULL THEN true ELSE ( - bulk_uploads.created_by = :authContextKeycloakUserId + bulk_upload_tasks.created_by = :authContextKeycloakUserId OR :authContextIsAdministrator::boolean ) END diff --git a/src/database/queries/bulkUploads/updateById.sql b/src/database/queries/bulkUploadTasks/updateById.sql similarity index 62% rename from src/database/queries/bulkUploads/updateById.sql rename to src/database/queries/bulkUploadTasks/updateById.sql index 5e0d16b4..358d96aa 100644 --- a/src/database/queries/bulkUploads/updateById.sql +++ b/src/database/queries/bulkUploadTasks/updateById.sql @@ -1,8 +1,8 @@ -UPDATE bulk_uploads +UPDATE bulk_upload_tasks SET file_size = COALESCE(:fileSize, file_size), source_key = COALESCE(:sourceKey, source_key), status = COALESCE(:status, status) WHERE id = :id -RETURNING bulk_upload_to_json(bulk_uploads) AS "object"; +RETURNING bulk_upload_task_to_json(bulk_upload_tasks) AS "object"; diff --git a/src/database/queries/bulkUploads/selectById.sql b/src/database/queries/bulkUploads/selectById.sql deleted file mode 100644 index 9b30a35c..00000000 --- a/src/database/queries/bulkUploads/selectById.sql +++ /dev/null @@ -1,3 +0,0 @@ -SELECT bulk_upload_to_json(bulk_uploads.*) as "object" -FROM bulk_uploads -WHERE id = :id; diff --git a/src/handlers/bulkUploadsHandlers.ts b/src/handlers/bulkUploadTasksHandlers.ts similarity index 75% rename from src/handlers/bulkUploadsHandlers.ts rename to src/handlers/bulkUploadTasksHandlers.ts index a6397149..7d8ce4bf 100644 --- a/src/handlers/bulkUploadsHandlers.ts +++ b/src/handlers/bulkUploadTasksHandlers.ts @@ -1,14 +1,14 @@ import { assertSourceExists, - createBulkUpload, + createBulkUploadTask, getLimitValues, - loadBulkUploadBundle, + loadBulkUploadTaskBundle, } from '../database'; import { - BulkUploadStatus, + TaskStatus, isAuthContext, isTinyPgErrorWithQueryContext, - isWritableBulkUpload, + isWritableBulkUploadTask, } from '../types'; import { DatabaseError, @@ -21,11 +21,11 @@ import { extractCreatedByParameters, extractPaginationParameters, } from '../queryParameters'; -import { addProcessBulkUploadJob } from '../jobQueue'; +import { addProcessBulkUploadTaskJob } from '../jobQueue'; import { S3_UNPROCESSED_KEY_PREFIX } from '../s3Client'; import type { Request, Response, NextFunction } from 'express'; -const postBulkUpload = ( +const postBulkUploadTask = ( req: Request, res: Response, next: NextFunction, @@ -34,11 +34,11 @@ const postBulkUpload = ( next(new FailedMiddlewareError('Unexpected lack of auth context.')); return; } - if (!isWritableBulkUpload(req.body)) { + if (!isWritableBulkUploadTask(req.body)) { next( new InputValidationError( 'Invalid request body.', - isWritableBulkUpload.errors ?? [], + isWritableBulkUploadTask.errors ?? [], ), ); return; @@ -56,21 +56,21 @@ const postBulkUpload = ( assertSourceExists(sourceId) .then(async () => { - const bulkUpload = await createBulkUpload({ + const bulkUploadTask = await createBulkUploadTask({ sourceId, fileName, sourceKey, - status: BulkUploadStatus.PENDING, + status: TaskStatus.PENDING, createdBy, }); - await addProcessBulkUploadJob({ - bulkUploadId: bulkUpload.id, + await addProcessBulkUploadTaskJob({ + bulkUploadId: bulkUploadTask.id, }); - res.status(201).contentType('application/json').send(bulkUpload); + res.status(201).contentType('application/json').send(bulkUploadTask); }) .catch((error: unknown) => { if (isTinyPgErrorWithQueryContext(error)) { - next(new DatabaseError('Error creating bulk upload.', error)); + next(new DatabaseError('Error creating bulk upload task.', error)); return; } if (error instanceof NotFoundError) { @@ -88,7 +88,7 @@ const postBulkUpload = ( }); }; -const getBulkUploads = ( +const getBulkUploadTasks = ( req: Request, res: Response, next: NextFunction, @@ -101,17 +101,17 @@ const getBulkUploads = ( const { offset, limit } = getLimitValues(paginationParameters); const { createdBy } = extractCreatedByParameters(req); (async () => { - const bulkUploadBundle = await loadBulkUploadBundle( + const bulkUploadTaskBundle = await loadBulkUploadTaskBundle( req, createdBy, limit, offset, ); - res.status(200).contentType('application/json').send(bulkUploadBundle); + res.status(200).contentType('application/json').send(bulkUploadTaskBundle); })().catch((error: unknown) => { if (isTinyPgErrorWithQueryContext(error)) { - next(new DatabaseError('Error retrieving bulk uploads.', error)); + next(new DatabaseError('Error retrieving bulk upload tasks.', error)); return; } next(error); @@ -119,6 +119,6 @@ const getBulkUploads = ( }; export const bulkUploadsHandlers = { - postBulkUpload, - getBulkUploads, + postBulkUploadTask, + getBulkUploadTasks, }; diff --git a/src/jobQueue.ts b/src/jobQueue.ts index ada3ea5b..2553e2d9 100644 --- a/src/jobQueue.ts +++ b/src/jobQueue.ts @@ -1,13 +1,13 @@ import { Logger, quickAddJob, run, runMigrations } from 'graphile-worker'; import { getLogger } from './logger'; import { db } from './database/db'; -import { processBulkUpload } from './tasks'; +import { processBulkUploadTask } from './tasks'; import type { ProcessBulkUploadJobPayload } from './types'; const logger = getLogger(__filename); enum JobType { - PROCESS_BULK_UPLOAD = 'processBulkUpload', + PROCESS_BULK_UPLOAD = 'processBulkUploadTask', } export const jobQueueLogger = new Logger((scope) => (level, message, meta) => { @@ -37,7 +37,7 @@ export const startJobQueue = async () => { noHandleSignals: false, pollInterval: 1000, taskList: { - processBulkUpload, + processBulkUploadTask, }, }); runner.promise.catch((err) => { @@ -61,6 +61,6 @@ export const addJob = async (jobType: JobType, payload: unknown) => payload, ); -export const addProcessBulkUploadJob = async ( +export const addProcessBulkUploadTaskJob = async ( payload: ProcessBulkUploadJobPayload, ) => addJob(JobType.PROCESS_BULK_UPLOAD, payload); diff --git a/src/openapi.json b/src/openapi.json index b1719163..c7634e6c 100644 --- a/src/openapi.json +++ b/src/openapi.json @@ -108,7 +108,7 @@ } }, "schemas": { - "BulkUpload": { + "BulkUploadTask": { "type": "object", "properties": { "id": { @@ -174,7 +174,7 @@ "createdAt" ] }, - "BulkUploadBundle": { + "BulkUploadTaskBundle": { "allOf": [ { "$ref": "#/components/schemas/Bundle" @@ -185,7 +185,7 @@ "entries": { "type": "array", "items": { - "$ref": "#/components/schemas/BulkUpload" + "$ref": "#/components/schemas/BulkUploadTask" } } }, @@ -1253,11 +1253,11 @@ } } }, - "/bulkUploads": { + "/tasks/bulkUploads": { "get": { - "operationId": "getBulkUploads", - "summary": "Gets a list of bulk uploads.", - "tags": ["Bulk Uploads"], + "operationId": "getBulkUploadTasks", + "summary": "Gets a list of bulk upload tasks.", + "tags": ["Tasks", "Bulk Uploads"], "security": [ { "auth": [] @@ -1274,7 +1274,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkUploadBundle" + "$ref": "#/components/schemas/BulkUploadTaskBundle" } } } @@ -1292,8 +1292,8 @@ } }, "post": { - "operationId": "addBulkUpload", - "summary": "Registers a bulk upload. This lets PDC ingest data previously uploaded via a POST to the /presignedPostRequests endpoint.", + "operationId": "addBulkUploadTask", + "summary": "Registers a bulk upload task. This lets PDC ingest data previously uploaded via a POST to the /presignedPostRequests endpoint.", "tags": ["Bulk Uploads"], "security": [ { @@ -1305,7 +1305,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkUpload" + "$ref": "#/components/schemas/BulkUploadTask" } } } @@ -1316,7 +1316,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/BulkUpload" + "$ref": "#/components/schemas/BulkUploadTask" } } } @@ -1564,7 +1564,7 @@ }, "responses": { "201": { - "description": "The presigned post request. Use the contents of this JSON to POST form data to S3. After posting data to S3, use the /bulkUploads endpoint to let PDC ingest the data.", + "description": "The presigned post request. Use the contents of this JSON to POST form data to S3. After posting data to S3, use the /bulkUploadTasks endpoint to let PDC ingest the data.", "content": { "application/json": { "schema": { diff --git a/src/routers/bulkUploadsRouter.ts b/src/routers/bulkUploadsRouter.ts deleted file mode 100644 index 7ccda0d0..00000000 --- a/src/routers/bulkUploadsRouter.ts +++ /dev/null @@ -1,19 +0,0 @@ -import express from 'express'; -import { bulkUploadsHandlers } from '../handlers/bulkUploadsHandlers'; -import { requireAuthentication } from '../middleware'; - -const bulkUploadsRouter = express.Router(); - -bulkUploadsRouter.post( - '/', - requireAuthentication, - bulkUploadsHandlers.postBulkUpload, -); - -bulkUploadsRouter.get( - '/', - requireAuthentication, - bulkUploadsHandlers.getBulkUploads, -); - -export { bulkUploadsRouter }; diff --git a/src/routers/index.ts b/src/routers/index.ts index 8ca251ce..1dad92c0 100644 --- a/src/routers/index.ts +++ b/src/routers/index.ts @@ -1,7 +1,6 @@ import express from 'express'; import { applicationFormsRouter } from './applicationFormsRouter'; import { baseFieldsRouter } from './baseFieldsRouter'; -import { bulkUploadsRouter } from './bulkUploadsRouter'; import { dataProvidersRouter } from './dataProviderRouter'; import { fundersRouter } from './fundersRouter'; import { opportunitiesRouter } from './opportunitiesRouter'; @@ -12,6 +11,7 @@ import { presignedPostRequestsRouter } from './presignedPostRequestsRouter'; import { proposalsRouter } from './proposalsRouter'; import { proposalVersionsRouter } from './proposalVersionsRouter'; import { sourcesRouter } from './sourcesRouter'; +import { tasksRouter } from './tasksRouter'; import { usersRouter } from './usersRouter'; import { documentationRouter } from './documentationRouter'; @@ -19,7 +19,7 @@ const rootRouter = express.Router(); rootRouter.use('/applicationForms', applicationFormsRouter); rootRouter.use('/baseFields', baseFieldsRouter); -rootRouter.use('/bulkUploads', bulkUploadsRouter); +rootRouter.use('/tasks', tasksRouter); rootRouter.use('/changemakers', changemakersRouter); rootRouter.use('/changemakerProposals', changemakerProposalsRouter); rootRouter.use('/dataProviders', dataProvidersRouter); diff --git a/src/routers/tasksRouter.ts b/src/routers/tasksRouter.ts new file mode 100644 index 00000000..bc5aa95f --- /dev/null +++ b/src/routers/tasksRouter.ts @@ -0,0 +1,19 @@ +import express from 'express'; +import { bulkUploadsHandlers } from '../handlers/bulkUploadTasksHandlers'; +import { requireAuthentication } from '../middleware'; + +const tasksRouter = express.Router(); + +tasksRouter.post( + '/bulkUploads', + requireAuthentication, + bulkUploadsHandlers.postBulkUploadTask, +); + +tasksRouter.get( + '/bulkUploads', + requireAuthentication, + bulkUploadsHandlers.getBulkUploadTasks, +); + +export { tasksRouter }; diff --git a/src/tasks/__tests__/fixtures/processBulkUpload/empty.csv b/src/tasks/__tests__/fixtures/processBulkUploadTask/empty.csv similarity index 100% rename from src/tasks/__tests__/fixtures/processBulkUpload/empty.csv rename to src/tasks/__tests__/fixtures/processBulkUploadTask/empty.csv diff --git a/src/tasks/__tests__/fixtures/processBulkUpload/invalidShortCode.csv b/src/tasks/__tests__/fixtures/processBulkUploadTask/invalidShortCode.csv similarity index 100% rename from src/tasks/__tests__/fixtures/processBulkUpload/invalidShortCode.csv rename to src/tasks/__tests__/fixtures/processBulkUploadTask/invalidShortCode.csv diff --git a/src/tasks/__tests__/fixtures/processBulkUpload/missingEmail.csv b/src/tasks/__tests__/fixtures/processBulkUploadTask/missingEmail.csv similarity index 100% rename from src/tasks/__tests__/fixtures/processBulkUpload/missingEmail.csv rename to src/tasks/__tests__/fixtures/processBulkUploadTask/missingEmail.csv diff --git a/src/tasks/__tests__/fixtures/processBulkUpload/validCsvTemplate.csv b/src/tasks/__tests__/fixtures/processBulkUploadTask/validCsvTemplate.csv similarity index 100% rename from src/tasks/__tests__/fixtures/processBulkUpload/validCsvTemplate.csv rename to src/tasks/__tests__/fixtures/processBulkUploadTask/validCsvTemplate.csv diff --git a/src/tasks/__tests__/fixtures/processBulkUpload/validCsvTemplateWithChangemakers.csv b/src/tasks/__tests__/fixtures/processBulkUploadTask/validCsvTemplateWithChangemakers.csv similarity index 100% rename from src/tasks/__tests__/fixtures/processBulkUpload/validCsvTemplateWithChangemakers.csv rename to src/tasks/__tests__/fixtures/processBulkUploadTask/validCsvTemplateWithChangemakers.csv diff --git a/src/tasks/__tests__/processBulkUpload.int.test.ts b/src/tasks/__tests__/processBulkUploadTask.int.test.ts similarity index 73% rename from src/tasks/__tests__/processBulkUpload.int.test.ts rename to src/tasks/__tests__/processBulkUploadTask.int.test.ts index 09cb354b..50333b53 100644 --- a/src/tasks/__tests__/processBulkUpload.int.test.ts +++ b/src/tasks/__tests__/processBulkUploadTask.int.test.ts @@ -2,10 +2,10 @@ import nock from 'nock'; import { requireEnv } from 'require-env-variable'; import { createBaseField, - loadBulkUpload, + loadBulkUploadTask, loadProposalBundle, loadApplicationFormBundle, - createBulkUpload, + createBulkUploadTask, loadSystemUser, loadChangemakerBundle, loadChangemakerProposalBundle, @@ -14,17 +14,17 @@ import { } from '../../database'; import { s3Client } from '../../s3Client'; import { getMockJobHelpers } from '../../test/mockGraphileWorker'; -import { processBulkUpload } from '../processBulkUpload'; +import { processBulkUploadTask } from '../processBulkUploadTask'; import { BaseFieldDataType, BaseFieldScope, - BulkUploadStatus, + TaskStatus, Proposal, } from '../../types'; import { expectTimestamp, NO_LIMIT, NO_OFFSET } from '../../test/utils'; import type { - BulkUpload, - InternallyWritableBulkUpload, + BulkUploadTask, + InternallyWritableBulkUploadTask, Changemaker, } from '../../types'; @@ -48,19 +48,19 @@ const getS3Path = () => (S3_PATH_STYLE === 'true' ? `/${S3_BUCKET}` : ''); const getS3KeyPath = (key: string) => `${getS3Path()}/${key}`; -const createTestBulkUpload = async ( - overrideValues?: Partial, -): Promise => { +const createTestBulkUploadTask = async ( + overrideValues?: Partial, +): Promise => { const systemUser = await loadSystemUser(); const systemSource = await loadSystemSource(); const defaultValues = { fileName: 'bar.csv', sourceId: systemSource.id, sourceKey: TEST_UNPROCESSED_SOURCE_KEY, - status: BulkUploadStatus.PENDING, + status: TaskStatus.PENDING, createdBy: systemUser.keycloakUserId, }; - return createBulkUpload({ + return createBulkUploadTask({ ...defaultValues, ...overrideValues, }); @@ -114,18 +114,18 @@ const mockS3DeleteObjectReply = async (sourceKey: string) => .query({ 'x-id': 'DeleteObject' }) .reply(204); -const mockS3ResponsesForBulkUploadProcessing = async ( - bulkUpload: BulkUpload, +const mockS3ResponsesForBulkUploadTaskProcessing = async ( + bulkUploadTask: BulkUploadTask, bulkUploadFilePath: string, ) => { const getRequest = await mockS3GetObjectReplyWithFile( - bulkUpload.sourceKey, + bulkUploadTask.sourceKey, bulkUploadFilePath, ); const copyRequest = await mockS3CopyObjectReply( - `bulk-uploads/${bulkUpload.id}`, + `bulk-uploads/${bulkUploadTask.id}`, ); - const deleteRequest = await mockS3DeleteObjectReply(bulkUpload.sourceKey); + const deleteRequest = await mockS3DeleteObjectReply(bulkUploadTask.sourceKey); return { getRequest, copyRequest, @@ -133,17 +133,17 @@ const mockS3ResponsesForBulkUploadProcessing = async ( }; }; -describe('processBulkUpload', () => { +describe('processBulkUploadTask', () => { it('should attempt to access the contents of the sourceKey associated with the specified bulk upload', async () => { const sourceKey = TEST_UNPROCESSED_SOURCE_KEY; - const bulkUpload = await createTestBulkUpload({ sourceKey }); - const requests = await mockS3ResponsesForBulkUploadProcessing( - bulkUpload, - `${__dirname}/fixtures/processBulkUpload/validCsvTemplate.csv`, + const bulkUploadTask = await createTestBulkUploadTask({ sourceKey }); + const requests = await mockS3ResponsesForBulkUploadTaskProcessing( + bulkUploadTask, + `${__dirname}/fixtures/processBulkUploadTask/validCsvTemplate.csv`, ); - await processBulkUpload( + await processBulkUploadTask( { - bulkUploadId: bulkUpload.id, + bulkUploadId: bulkUploadTask.id, }, getMockJobHelpers(), ); @@ -153,14 +153,14 @@ describe('processBulkUpload', () => { it('should attempt to copy the contents of the sourceKey associated with the specified bulk upload to a processed location', async () => { await createTestBaseFields(); const sourceKey = TEST_UNPROCESSED_SOURCE_KEY; - const bulkUpload = await createTestBulkUpload({ sourceKey }); - const requests = await mockS3ResponsesForBulkUploadProcessing( - bulkUpload, - `${__dirname}/fixtures/processBulkUpload/validCsvTemplate.csv`, + const bulkUploadTask = await createTestBulkUploadTask({ sourceKey }); + const requests = await mockS3ResponsesForBulkUploadTaskProcessing( + bulkUploadTask, + `${__dirname}/fixtures/processBulkUploadTask/validCsvTemplate.csv`, ); - await processBulkUpload( + await processBulkUploadTask( { - bulkUploadId: bulkUpload.id, + bulkUploadId: bulkUploadTask.id, }, getMockJobHelpers(), ); @@ -170,14 +170,14 @@ describe('processBulkUpload', () => { it('should attempt to delete the unprocessed file of the sourceKey associated with the specified bulk upload', async () => { await createTestBaseFields(); const sourceKey = TEST_UNPROCESSED_SOURCE_KEY; - const bulkUpload = await createTestBulkUpload({ sourceKey }); - const requests = await mockS3ResponsesForBulkUploadProcessing( - bulkUpload, - `${__dirname}/fixtures/processBulkUpload/validCsvTemplate.csv`, + const bulkUploadTask = await createTestBulkUploadTask({ sourceKey }); + const requests = await mockS3ResponsesForBulkUploadTaskProcessing( + bulkUploadTask, + `${__dirname}/fixtures/processBulkUploadTask/validCsvTemplate.csv`, ); - await processBulkUpload( + await processBulkUploadTask( { - bulkUploadId: bulkUpload.id, + bulkUploadId: bulkUploadTask.id, }, getMockJobHelpers(), ); @@ -186,20 +186,20 @@ describe('processBulkUpload', () => { it('should fail if the sourceKey is not accessible', async () => { const sourceKey = TEST_UNPROCESSED_SOURCE_KEY; - const bulkUpload = await createTestBulkUpload({ sourceKey }); + const bulkUploadTask = await createTestBulkUploadTask({ sourceKey }); const sourceRequest = nock(await getS3Endpoint()) .get(getS3KeyPath(sourceKey)) .query({ 'x-id': 'GetObject' }) .reply(404); - await processBulkUpload( - { bulkUploadId: bulkUpload.id }, + await processBulkUploadTask( + { bulkUploadId: bulkUploadTask.id }, getMockJobHelpers(), ); - const updatedBulkUpload = await loadBulkUpload(bulkUpload.id); - expect(updatedBulkUpload).toMatchObject({ - status: BulkUploadStatus.FAILED, + const updatedBulkUploadTask = await loadBulkUploadTask(bulkUploadTask.id); + expect(updatedBulkUploadTask).toMatchObject({ + status: TaskStatus.FAILED, fileSize: null, }); expect(sourceRequest.isDone()).toEqual(true); @@ -207,20 +207,20 @@ describe('processBulkUpload', () => { it('should not process, and fail, if the sourceKey is not in the unprocessed namespace', async () => { const sourceKey = TEST_BULK_UPLOAD_SOURCE_KEY; - const bulkUpload = await createTestBulkUpload({ sourceKey }); - const requests = await mockS3ResponsesForBulkUploadProcessing( - bulkUpload, - `${__dirname}/fixtures/processBulkUpload/validCsvTemplate.csv`, + const bulkUploadTask = await createTestBulkUploadTask({ sourceKey }); + const requests = await mockS3ResponsesForBulkUploadTaskProcessing( + bulkUploadTask, + `${__dirname}/fixtures/processBulkUploadTask/validCsvTemplate.csv`, ); - await processBulkUpload( - { bulkUploadId: bulkUpload.id }, + await processBulkUploadTask( + { bulkUploadId: bulkUploadTask.id }, getMockJobHelpers(), ); - const updatedBulkUpload = await loadBulkUpload(bulkUpload.id); + const updatedBulkUpload = await loadBulkUploadTask(bulkUploadTask.id); expect(updatedBulkUpload).toMatchObject({ - status: BulkUploadStatus.FAILED, + status: TaskStatus.FAILED, fileSize: null, }); expect(requests.getRequest.isDone()).toEqual(false); @@ -228,42 +228,42 @@ describe('processBulkUpload', () => { it('should not process or modify processing status if the bulk upload is not PENDING', async () => { const sourceKey = TEST_UNPROCESSED_SOURCE_KEY; - const bulkUpload = await createTestBulkUpload({ + const bulkUploadTask = await createTestBulkUploadTask({ sourceKey, - status: BulkUploadStatus.IN_PROGRESS, + status: TaskStatus.IN_PROGRESS, }); - const requests = await mockS3ResponsesForBulkUploadProcessing( - bulkUpload, - `${__dirname}/fixtures/processBulkUpload/validCsvTemplate.csv`, + const requests = await mockS3ResponsesForBulkUploadTaskProcessing( + bulkUploadTask, + `${__dirname}/fixtures/processBulkUploadTask/validCsvTemplate.csv`, ); - await processBulkUpload( - { bulkUploadId: bulkUpload.id }, + await processBulkUploadTask( + { bulkUploadId: bulkUploadTask.id }, getMockJobHelpers(), ); - const updatedBulkUpload = await loadBulkUpload(bulkUpload.id); - expect(updatedBulkUpload.status).toEqual(BulkUploadStatus.IN_PROGRESS); + const updatedBulkUpload = await loadBulkUploadTask(bulkUploadTask.id); + expect(updatedBulkUpload.status).toEqual(TaskStatus.IN_PROGRESS); expect(requests.getRequest.isDone()).toEqual(false); }); it('should fail if the csv contains an invalid short code', async () => { await createTestBaseFields(); const sourceKey = TEST_UNPROCESSED_SOURCE_KEY; - const bulkUpload = await createTestBulkUpload({ sourceKey }); - await mockS3ResponsesForBulkUploadProcessing( - bulkUpload, - `${__dirname}/fixtures/processBulkUpload/invalidShortCode.csv`, + const bulkUploadTask = await createTestBulkUploadTask({ sourceKey }); + await mockS3ResponsesForBulkUploadTaskProcessing( + bulkUploadTask, + `${__dirname}/fixtures/processBulkUploadTask/invalidShortCode.csv`, ); - await processBulkUpload( + await processBulkUploadTask( { - bulkUploadId: bulkUpload.id, + bulkUploadId: bulkUploadTask.id, }, getMockJobHelpers(), ); - const updatedBulkUpload = await loadBulkUpload(bulkUpload.id); - expect(updatedBulkUpload).toMatchObject({ - status: BulkUploadStatus.FAILED, + const updatedBulkUploadTask = await loadBulkUploadTask(bulkUploadTask.id); + expect(updatedBulkUploadTask).toMatchObject({ + status: TaskStatus.FAILED, fileSize: 97, }); }); @@ -271,14 +271,14 @@ describe('processBulkUpload', () => { it('should move the csv file to processed location if the csv contains an invalid short code', async () => { await createTestBaseFields(); const sourceKey = TEST_UNPROCESSED_SOURCE_KEY; - const bulkUpload = await createTestBulkUpload({ sourceKey }); - const requests = await mockS3ResponsesForBulkUploadProcessing( - bulkUpload, - `${__dirname}/fixtures/processBulkUpload/invalidShortCode.csv`, + const bulkUploadTask = await createTestBulkUploadTask({ sourceKey }); + const requests = await mockS3ResponsesForBulkUploadTaskProcessing( + bulkUploadTask, + `${__dirname}/fixtures/processBulkUploadTask/invalidShortCode.csv`, ); - await processBulkUpload( + await processBulkUploadTask( { - bulkUploadId: bulkUpload.id, + bulkUploadId: bulkUploadTask.id, }, getMockJobHelpers(), ); @@ -289,21 +289,21 @@ describe('processBulkUpload', () => { it('should have a proper failed state if the csv is empty', async () => { await createTestBaseFields(); const sourceKey = TEST_UNPROCESSED_SOURCE_KEY; - const bulkUpload = await createTestBulkUpload({ sourceKey }); - await mockS3ResponsesForBulkUploadProcessing( - bulkUpload, - `${__dirname}/fixtures/processBulkUpload/empty.csv`, + const bulkUploadTask = await createTestBulkUploadTask({ sourceKey }); + await mockS3ResponsesForBulkUploadTaskProcessing( + bulkUploadTask, + `${__dirname}/fixtures/processBulkUploadTask/empty.csv`, ); - await processBulkUpload( + await processBulkUploadTask( { - bulkUploadId: bulkUpload.id, + bulkUploadId: bulkUploadTask.id, }, getMockJobHelpers(), ); - const updatedBulkUpload = await loadBulkUpload(bulkUpload.id); + const updatedBulkUpload = await loadBulkUploadTask(bulkUploadTask.id); expect(updatedBulkUpload).toMatchObject({ - status: BulkUploadStatus.FAILED, + status: TaskStatus.FAILED, fileSize: 0, }); }); @@ -311,21 +311,21 @@ describe('processBulkUpload', () => { it('should update the file size for the bulk upload if the sourceKey is accessible and contains a valid CSV', async () => { await createTestBaseFields(); const sourceKey = TEST_UNPROCESSED_SOURCE_KEY; - const bulkUpload = await createTestBulkUpload({ sourceKey }); - await mockS3ResponsesForBulkUploadProcessing( - bulkUpload, - `${__dirname}/fixtures/processBulkUpload/validCsvTemplate.csv`, + const bulkUploadTask = await createTestBulkUploadTask({ sourceKey }); + await mockS3ResponsesForBulkUploadTaskProcessing( + bulkUploadTask, + `${__dirname}/fixtures/processBulkUploadTask/validCsvTemplate.csv`, ); - expect(bulkUpload.fileSize).toBe(null); + expect(bulkUploadTask.fileSize).toBe(null); - await processBulkUpload( + await processBulkUploadTask( { - bulkUploadId: bulkUpload.id, + bulkUploadId: bulkUploadTask.id, }, getMockJobHelpers(), ); - const updatedBulkUpload = await loadBulkUpload(bulkUpload.id); - expect(updatedBulkUpload).toMatchObject({ + const updatedBulkUploadTask = await loadBulkUploadTask(bulkUploadTask.id); + expect(updatedBulkUploadTask).toMatchObject({ fileSize: 93, }); }); @@ -335,22 +335,22 @@ describe('processBulkUpload', () => { const systemSource = await loadSystemSource(); const systemUser = await loadSystemUser(); const sourceKey = TEST_UNPROCESSED_SOURCE_KEY; - const bulkUpload = await createTestBulkUpload({ + const bulkUploadTask = await createTestBulkUploadTask({ sourceKey, createdBy: systemUser.keycloakUserId, }); - const requests = await mockS3ResponsesForBulkUploadProcessing( - bulkUpload, - `${__dirname}/fixtures/processBulkUpload/validCsvTemplate.csv`, + const requests = await mockS3ResponsesForBulkUploadTaskProcessing( + bulkUploadTask, + `${__dirname}/fixtures/processBulkUploadTask/validCsvTemplate.csv`, ); - await processBulkUpload( + await processBulkUploadTask( { - bulkUploadId: bulkUpload.id, + bulkUploadId: bulkUploadTask.id, }, getMockJobHelpers(), ); - const updatedBulkUpload = await loadBulkUpload(bulkUpload.id); + const updatedBulkUploadTask = await loadBulkUploadTask(bulkUploadTask.id); const { entries: [opportunity], @@ -559,7 +559,7 @@ describe('processBulkUpload', () => { total: 0, }); - expect(updatedBulkUpload.status).toEqual(BulkUploadStatus.COMPLETED); + expect(updatedBulkUploadTask.status).toEqual(TaskStatus.COMPLETED); expect(requests.getRequest.isDone()).toEqual(true); expect(requests.copyRequest.isDone()).toEqual(true); expect(requests.deleteRequest.isDone()).toEqual(true); @@ -568,15 +568,15 @@ describe('processBulkUpload', () => { it('should create changemakers and changemaker-proposal relationships', async () => { await createTestBaseFields(); const sourceKey = TEST_UNPROCESSED_SOURCE_KEY; - const bulkUpload = await createTestBulkUpload({ sourceKey }); - await mockS3ResponsesForBulkUploadProcessing( - bulkUpload, - `${__dirname}/fixtures/processBulkUpload/validCsvTemplateWithChangemakers.csv`, + const bulkUploadTask = await createTestBulkUploadTask({ sourceKey }); + await mockS3ResponsesForBulkUploadTaskProcessing( + bulkUploadTask, + `${__dirname}/fixtures/processBulkUploadTask/validCsvTemplateWithChangemakers.csv`, ); - await processBulkUpload( + await processBulkUploadTask( { - bulkUploadId: bulkUpload.id, + bulkUploadId: bulkUploadTask.id, }, getMockJobHelpers(), ); diff --git a/src/tasks/__tests__/processBulkUpload.unit.test.ts b/src/tasks/__tests__/processBulkUploadTask.unit.test.ts similarity index 54% rename from src/tasks/__tests__/processBulkUpload.unit.test.ts rename to src/tasks/__tests__/processBulkUploadTask.unit.test.ts index e90a9ded..bf231fea 100644 --- a/src/tasks/__tests__/processBulkUpload.unit.test.ts +++ b/src/tasks/__tests__/processBulkUploadTask.unit.test.ts @@ -1,10 +1,10 @@ import { getMockJobHelpers } from '../../test/mockGraphileWorker'; -import { processBulkUpload } from '../processBulkUpload'; +import { processBulkUploadTask } from '../processBulkUploadTask'; -describe('processBulkUpload', () => { +describe('processBulkUploadTask', () => { it('should not error when passed an invalid payload', async () => { await expect( - processBulkUpload({}, getMockJobHelpers()), + processBulkUploadTask({}, getMockJobHelpers()), ).resolves.not.toThrow(); }); }); diff --git a/src/tasks/index.ts b/src/tasks/index.ts index ed461e3f..c3d92c30 100644 --- a/src/tasks/index.ts +++ b/src/tasks/index.ts @@ -1 +1 @@ -export * from './processBulkUpload'; +export * from './processBulkUploadTask'; diff --git a/src/tasks/processBulkUpload.ts b/src/tasks/processBulkUploadTask.ts similarity index 77% rename from src/tasks/processBulkUpload.ts rename to src/tasks/processBulkUploadTask.ts index e8072a52..261f84b0 100644 --- a/src/tasks/processBulkUpload.ts +++ b/src/tasks/processBulkUploadTask.ts @@ -18,11 +18,11 @@ import { createProposalFieldValue, createProposalVersion, loadBaseFields, - loadBulkUpload, + loadBulkUploadTask, loadChangemakerByTaxId, - updateBulkUpload, + updateBulkUploadTask, } from '../database/operations'; -import { BulkUploadStatus, isProcessBulkUploadJobPayload } from '../types'; +import { TaskStatus, isProcessBulkUploadJobPayload } from '../types'; import { fieldValueIsValid } from '../fieldValidation'; import type { Readable } from 'stream'; import type { GetObjectCommandOutput } from '@aws-sdk/client-s3'; @@ -30,7 +30,7 @@ import type { JobHelpers, Logger } from 'graphile-worker'; import type { FileResult } from 'tmp-promise'; import type { ApplicationFormField, - BulkUpload, + BulkUploadTask, Opportunity, Changemaker, ProposalFieldValue, @@ -81,7 +81,7 @@ const downloadS3ObjectToTemporaryStorage = async ( return temporaryFile; }; -const loadShortCodesFromBulkUploadCsv = async ( +const loadShortCodesFromBulkUploadTaskCsv = async ( csvPath: string, ): Promise => { let shortCodes: string[] = []; @@ -123,7 +123,7 @@ const assertShortCodesAreValid = async ( const assertCsvContainsValidShortCodes = async ( csvPath: string, ): Promise => { - const shortCodes = await loadShortCodesFromBulkUploadCsv(csvPath); + const shortCodes = await loadShortCodesFromBulkUploadTaskCsv(csvPath); if (shortCodes.length === 0) { throw new Error('No short codes detected in the first row of the CSV'); } @@ -146,23 +146,25 @@ const assertCsvContainsRowsOfEqualLength = async ( await finished(parser); }; -const assertBulkUploadCsvIsValid = async (csvPath: string): Promise => { +const assertBulkUploadTaskCsvIsValid = async ( + csvPath: string, +): Promise => { await assertCsvContainsValidShortCodes(csvPath); await assertCsvContainsRowsOfEqualLength(csvPath); }; -const createOpportunityForBulkUpload = async ( - bulkUpload: BulkUpload, +const createOpportunityForBulkUploadTask = async ( + bulkUploadTask: BulkUploadTask, ): Promise => createOpportunity({ - title: `Bulk Upload (${bulkUpload.createdAt})`, + title: `Bulk Upload (${bulkUploadTask.createdAt})`, }); -const createApplicationFormFieldsForBulkUpload = async ( +const createApplicationFormFieldsForBulkUploadTask = async ( csvPath: string, applicationFormId: number, ): Promise => { - const shortCodes = await loadShortCodesFromBulkUploadCsv(csvPath); + const shortCodes = await loadShortCodesFromBulkUploadTaskCsv(csvPath); const baseFields = await loadBaseFields(); const applicationFormFields = await Promise.all( shortCodes.map(async (shortCode, index) => { @@ -186,8 +188,8 @@ const createApplicationFormFieldsForBulkUpload = async ( return applicationFormFields; }; -const getProcessedKey = (bulkUpload: BulkUpload): string => - `${S3_BULK_UPLOADS_KEY_PREFIX}/${bulkUpload.id}`; +const getProcessedKey = (bulkUploadTask: BulkUploadTask): string => + `${S3_BULK_UPLOADS_KEY_PREFIX}/${bulkUploadTask.id}`; const getChangemakerTaxIdIndex = (columns: string[]): number => columns.indexOf(CHANGEMAKER_TAX_ID_SHORT_CODE); @@ -211,7 +213,7 @@ const createOrLoadChangemaker = async ( return undefined; }; -export const processBulkUpload = async ( +export const processBulkUploadTask = async ( payload: unknown, helpers: JobHelpers, ): Promise => { @@ -224,52 +226,59 @@ export const processBulkUpload = async ( helpers.logger.debug( `Started processBulkUpload Job for Bulk Upload ID ${payload.bulkUploadId}`, ); - const bulkUpload = await loadBulkUpload(payload.bulkUploadId); - if (bulkUpload.status !== BulkUploadStatus.PENDING) { + const bulkUploadTask = await loadBulkUploadTask(payload.bulkUploadId); + if (bulkUploadTask.status !== TaskStatus.PENDING) { helpers.logger.warn( 'Bulk upload cannot be processed because it is not in a PENDING state', - { bulkUpload }, + { bulkUploadTask }, ); return; } - if (!bulkUpload.sourceKey.startsWith(S3_UNPROCESSED_KEY_PREFIX)) { + if (!bulkUploadTask.sourceKey.startsWith(S3_UNPROCESSED_KEY_PREFIX)) { helpers.logger.info( - `Bulk upload cannot be processed because the associated sourceKey does not begin with ${S3_UNPROCESSED_KEY_PREFIX}`, - { bulkUpload }, + `Bulk upload task cannot be processed because the associated sourceKey does not begin with ${S3_UNPROCESSED_KEY_PREFIX}`, + { bulkUploadTask }, ); - await updateBulkUpload(bulkUpload.id, { status: BulkUploadStatus.FAILED }); + await updateBulkUploadTask(bulkUploadTask.id, { + status: TaskStatus.FAILED, + }); return; } let bulkUploadFile: FileResult; let bulkUploadHasFailed = false; try { - await updateBulkUpload(bulkUpload.id, { - status: BulkUploadStatus.IN_PROGRESS, + await updateBulkUploadTask(bulkUploadTask.id, { + status: TaskStatus.IN_PROGRESS, }); bulkUploadFile = await downloadS3ObjectToTemporaryStorage( - bulkUpload.sourceKey, + bulkUploadTask.sourceKey, helpers.logger, ); } catch (err) { helpers.logger.warn('Download of bulk upload file from S3 failed', { err }); - await updateBulkUpload(bulkUpload.id, { status: BulkUploadStatus.FAILED }); + await updateBulkUploadTask(bulkUploadTask.id, { + status: TaskStatus.FAILED, + }); return; } - const shortCodes = await loadShortCodesFromBulkUploadCsv(bulkUploadFile.path); + const shortCodes = await loadShortCodesFromBulkUploadTaskCsv( + bulkUploadFile.path, + ); const changemakerNameIndex = getChangemakerNameIndex(shortCodes); const changemakerTaxIdIndex = getChangemakerTaxIdIndex(shortCodes); try { - await assertBulkUploadCsvIsValid(bulkUploadFile.path); - const opportunity = await createOpportunityForBulkUpload(bulkUpload); + await assertBulkUploadTaskCsvIsValid(bulkUploadFile.path); + const opportunity = + await createOpportunityForBulkUploadTask(bulkUploadTask); const applicationForm = await createApplicationForm({ opportunityId: opportunity.id, }); const applicationFormFields = - await createApplicationFormFieldsForBulkUpload( + await createApplicationFormFieldsForBulkUploadTask( bulkUploadFile.path, applicationForm.id, ); @@ -284,13 +293,13 @@ export const processBulkUpload = async ( const proposal = await createProposal({ opportunityId: opportunity.id, externalId: `${recordNumber}`, - createdBy: bulkUpload.createdBy, + createdBy: bulkUploadTask.createdBy, }); const proposalVersion = await createProposalVersion({ proposalId: proposal.id, applicationFormId: applicationForm.id, - sourceId: bulkUpload.sourceId, - createdBy: bulkUpload.createdBy, + sourceId: bulkUploadTask.sourceId, + createdBy: bulkUploadTask.createdBy, }); const changemakerName = record[changemakerNameIndex]; @@ -339,10 +348,10 @@ export const processBulkUpload = async ( try { const fileStats = await fs.promises.stat(bulkUploadFile.path); const fileSize = fileStats.size; - await updateBulkUpload(bulkUpload.id, { fileSize }); + await updateBulkUploadTask(bulkUploadTask.id, { fileSize }); } catch (err) { helpers.logger.warn( - `Unable to update the fileSize for bulkUpload ${bulkUpload.id}`, + `Unable to update the fileSize for bulkUploadTask ${bulkUploadTask.id}`, { err }, ); } @@ -357,8 +366,8 @@ export const processBulkUpload = async ( } try { - const copySource = `${S3_BUCKET}/${bulkUpload.sourceKey}`; - const copyDestination = getProcessedKey(bulkUpload); + const copySource = `${S3_BUCKET}/${bulkUploadTask.sourceKey}`; + const copyDestination = getProcessedKey(bulkUploadTask); await s3Client.copyObject({ Bucket: S3_BUCKET, CopySource: copySource, @@ -366,21 +375,25 @@ export const processBulkUpload = async ( }); await s3Client.deleteObject({ Bucket: S3_BUCKET, - Key: bulkUpload.sourceKey, + Key: bulkUploadTask.sourceKey, + }); + await updateBulkUploadTask(bulkUploadTask.id, { + sourceKey: copyDestination, }); - await updateBulkUpload(bulkUpload.id, { sourceKey: copyDestination }); } catch (err) { helpers.logger.warn( - `Moving the bulk upload file to final processed destination failed (${bulkUploadFile.path})`, + `Moving the bulk upload task file to final processed destination failed (${bulkUploadFile.path})`, { err }, ); } if (bulkUploadHasFailed) { - await updateBulkUpload(bulkUpload.id, { status: BulkUploadStatus.FAILED }); + await updateBulkUploadTask(bulkUploadTask.id, { + status: TaskStatus.FAILED, + }); } else { - await updateBulkUpload(bulkUpload.id, { - status: BulkUploadStatus.COMPLETED, + await updateBulkUploadTask(bulkUploadTask.id, { + status: TaskStatus.COMPLETED, }); } }; diff --git a/src/types/BulkUpload.ts b/src/types/BulkUploadTask.ts similarity index 53% rename from src/types/BulkUpload.ts rename to src/types/BulkUploadTask.ts index a3a934c7..6b43aac7 100644 --- a/src/types/BulkUpload.ts +++ b/src/types/BulkUploadTask.ts @@ -1,35 +1,28 @@ import { ajv } from '../ajv'; +import { TaskStatus } from './TaskStatus'; import type { JSONSchemaType } from 'ajv'; import type { Writable } from './Writable'; import type { Source } from './Source'; import type { KeycloakUserId } from './KeycloakUserId'; -enum BulkUploadStatus { - PENDING = 'pending', - IN_PROGRESS = 'in_progress', - COMPLETED = 'completed', - FAILED = 'failed', - CANCELED = 'canceled', -} - -interface BulkUpload { +interface BulkUploadTask { readonly id: number; sourceId: number; readonly source: Source; fileName: string; sourceKey: string; - readonly status: BulkUploadStatus; + readonly status: TaskStatus; readonly fileSize?: number | null; // see https://github.com/ajv-validator/ajv/issues/2163 readonly createdAt: string; readonly createdBy: KeycloakUserId; } -type WritableBulkUpload = Writable; +type WritableBulkUploadTask = Writable; -type InternallyWritableBulkUpload = WritableBulkUpload & - Pick; +type InternallyWritableBulkUploadTask = WritableBulkUploadTask & + Pick; -const writableBulkUploadSchema: JSONSchemaType = { +const writableBulkUploadTaskSchema: JSONSchemaType = { type: 'object', properties: { sourceId: { @@ -47,13 +40,12 @@ const writableBulkUploadSchema: JSONSchemaType = { required: ['sourceId', 'fileName', 'sourceKey'], }; -const isWritableBulkUpload = ajv.compile(writableBulkUploadSchema); +const isWritableBulkUploadTask = ajv.compile(writableBulkUploadTaskSchema); export { - BulkUpload, - BulkUploadStatus, - InternallyWritableBulkUpload, - WritableBulkUpload, - isWritableBulkUpload, - writableBulkUploadSchema, + BulkUploadTask, + InternallyWritableBulkUploadTask, + WritableBulkUploadTask, + isWritableBulkUploadTask, + writableBulkUploadTaskSchema, }; diff --git a/src/types/TaskStatus.ts b/src/types/TaskStatus.ts new file mode 100644 index 00000000..0b55d8f4 --- /dev/null +++ b/src/types/TaskStatus.ts @@ -0,0 +1,9 @@ +enum TaskStatus { + PENDING = 'pending', + IN_PROGRESS = 'in_progress', + COMPLETED = 'completed', + FAILED = 'failed', + CANCELED = 'canceled', +} + +export { TaskStatus }; diff --git a/src/types/index.ts b/src/types/index.ts index 4787af3e..3d11618a 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -3,7 +3,7 @@ export * from './ApplicationFormField'; export * from './AuthContext'; export * from './BaseField'; export * from './BaseFieldLocalization'; -export * from './BulkUpload'; +export * from './BulkUploadTask'; export * from './Bundle'; export * from './Changemaker'; export * from './ChangemakerProposal'; @@ -30,6 +30,7 @@ export * from './ProposalVersion'; export * from './ShortCode'; export * from './Source'; export * from './TableMetrics'; +export * from './TaskStatus'; export * from './TinyPgErrorWithQueryContext'; export * from './User'; export * from './UserChangemakerPermission';