diff --git a/.github/workflows/build-and-deploy-consumer.yaml b/.github/workflows/build-and-deploy-consumer.yaml new file mode 100644 index 0000000..09964e6 --- /dev/null +++ b/.github/workflows/build-and-deploy-consumer.yaml @@ -0,0 +1,55 @@ +name: Build and Deploy Consumer to Production +on: + push: + branches: ["master", "feature/cd", "feat/delete-files"] +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Check Out Repo + uses: actions/checkout@v4 + - name: Login to DockerHub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Build and push to DockerHub + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile + push: true + tags: ${{ secrets.DOCKERHUB_USERNAME }}/background-tasks:${{ github.sha }} + deploy: + needs: build + runs-on: ubuntu-latest + environment: + name: production + steps: + - uses: actions/checkout@master + - name: Update delete items deployment image + uses: steebchen/kubectl@v2.0.0 + with: + config: ${{ secrets.KUBE_CONFIG_DATA }} + version: v1.27.4 + command: set image --record deployment/delete-files-consumer delete-files-consumer=${{ secrets.DOCKERHUB_USERNAME }}/background-tasks:${{ github.sha }} + - name: Verify successful deployment + uses: steebchen/kubectl@v2.0.0 + with: + config: ${{ secrets.KUBE_CONFIG_DATA }} + version: v1.27.4 + command: rollout status deployment/delete-files-consumer + - name: Update delete file versions deployment image + uses: steebchen/kubectl@v2.0.0 + with: + config: ${{ secrets.KUBE_CONFIG_DATA }} + version: v1.27.4 + command: set image --record deployment/delete-file-versions-consumer delete-file-versions-consumer=${{ secrets.DOCKERHUB_USERNAME }}/background-tasks:${{ github.sha }} + - name: Verify successful deployment + uses: steebchen/kubectl@v2.0.0 + with: + config: ${{ secrets.KUBE_CONFIG_DATA }} + version: v1.27.4 + command: rollout status deployment/delete-file-versions-consumer diff --git a/.github/workflows/build-and-deploy-producer.yaml b/.github/workflows/build-and-deploy-producer.yaml index fb2531f..4d95698 100644 --- a/.github/workflows/build-and-deploy-producer.yaml +++ b/.github/workflows/build-and-deploy-producer.yaml @@ -1,7 +1,7 @@ name: build & deploy on: push: - branches: ["master", "feature/cd"] + branches: ["master", "feature/cd", "feat/delete-files"] jobs: build: runs-on: ubuntu-latest @@ -19,25 +19,37 @@ jobs: uses: docker/build-push-action@v5 with: context: ./ - file: ./producer.Dockerfile + file: ./Dockerfile push: true - tags: ${{ secrets.DOCKERHUB_USERNAME }}/drive-background-tasks-producer:${{ github.sha }} + tags: ${{ secrets.DOCKERHUB_USERNAME }}/background-tasks:${{ github.sha }} deploy: needs: build runs-on: ubuntu-latest environment: name: production steps: - - uses: actions/checkout@master - - name: Update deployment image + # - uses: actions/checkout@master + # - name: Update delete files/folders deployment image + # uses: steebchen/kubectl@v2.0.0 + # with: + # config: ${{ secrets.KUBE_CONFIG_DATA }} + # version: v1.27.4 + # command: set image --record deployment/background-tasks-producer background-tasks-producer=${{ secrets.DOCKERHUB_USERNAME }}/drive-background-tasks:${{ github.sha }} + # - name: Verify succesful deployment + # uses: steebchen/kubectl@v2.0.0 + # with: + # config: ${{ secrets.KUBE_CONFIG_DATA }} + # version: v1.27.4 + # command: rollout status deployment/background-tasks-producer + - name: Update delete file versions deployment image uses: steebchen/kubectl@v2.0.0 with: config: ${{ secrets.KUBE_CONFIG_DATA }} version: v1.27.4 - command: set image --record deployment/background-tasks-producer background-tasks-producer=${{ secrets.DOCKERHUB_USERNAME }}/drive-background-tasks-producer:${{ github.sha }} + command: set image --record deployment/delete-file-versions-producer delete-file-versions-producer=${{ secrets.DOCKERHUB_USERNAME }}/background-tasks:${{ github.sha }} - name: Verify succesful deployment uses: steebchen/kubectl@v2.0.0 with: config: ${{ secrets.KUBE_CONFIG_DATA }} version: v1.27.4 - command: rollout status deployment/background-tasks-producer \ No newline at end of file + command: rollout status deployment/delete-file-versions-producer diff --git a/.github/workflows/deploy-and-deploy-consumer.yaml b/.github/workflows/deploy-and-deploy-consumer.yaml deleted file mode 100644 index 34b005a..0000000 --- a/.github/workflows/deploy-and-deploy-consumer.yaml +++ /dev/null @@ -1,43 +0,0 @@ -name: build & deploy -on: - push: - branches: ["master", "feature/cd"] -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Check Out Repo - uses: actions/checkout@v4 - - name: Login to DockerHub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - name: Build and push to DockerHub - uses: docker/build-push-action@v5 - with: - context: ./ - file: ./consumer.Dockerfile - push: true - tags: ${{ secrets.DOCKERHUB_USERNAME }}/drive-background-tasks-consumer:${{ github.sha }} - deploy: - needs: build - runs-on: ubuntu-latest - environment: - name: production - steps: - - uses: actions/checkout@master - - name: Update deployment image - uses: steebchen/kubectl@v2.0.0 - with: - config: ${{ secrets.KUBE_CONFIG_DATA }} - version: v1.27.4 - command: set image --record deployment/background-tasks-consumer background-tasks-consumer=${{ secrets.DOCKERHUB_USERNAME }}/drive-background-tasks-consumer:${{ github.sha }} - - name: Verify successful deployment - uses: steebchen/kubectl@v2.0.0 - with: - config: ${{ secrets.KUBE_CONFIG_DATA }} - version: v1.27.4 - command: rollout status deployment/background-tasks-consumer \ No newline at end of file diff --git a/producer.Dockerfile b/Dockerfile similarity index 85% rename from producer.Dockerfile rename to Dockerfile index 25903e5..b38d2fa 100644 --- a/producer.Dockerfile +++ b/Dockerfile @@ -9,4 +9,4 @@ COPY . . RUN yarn && yarn build && yarn --production && yarn cache clean # Start server -CMD yarn start:prod:producer \ No newline at end of file +CMD yarn start:prod diff --git a/consumer.Dockerfile b/consumer.Dockerfile index 7bb64c3..614e329 100644 --- a/consumer.Dockerfile +++ b/consumer.Dockerfile @@ -1,4 +1,4 @@ -FROM node:iron-slim +FROM node:24 LABEL author="internxt" WORKDIR /app @@ -9,4 +9,4 @@ COPY . . RUN yarn && yarn build && yarn --production && yarn cache clean # Start server -CMD yarn start:prod:consumer \ No newline at end of file +CMD yarn start:prod:consumer diff --git a/index.ts b/index.ts index c4734a1..0f0ff69 100644 --- a/index.ts +++ b/index.ts @@ -3,50 +3,34 @@ import amqp from 'amqplib'; import { v4 } from 'uuid'; import { createLogger } from './src/utils'; -import { Consumer } from './src/consumer'; -import { Producer } from './src/producer'; import { DriveDatabase } from './src/drive'; -import { DeletedFoldersIterator } from './src/tasks/process-folder-deletion/deleted-folders.iterator'; +import { taskTypes, tasks } from './src/tasks'; +import { ProcessType } from './src/tasks/process'; -const [,, ...args] = process.argv; -const [type] = args; +config(); -if (!type) { - console.error('Missing argument: type'); - process.exit(1); -} +const taskType = process.env.TASK_TYPE as undefined | string; -if (type !== 'producer' && type !== 'consumer') { - console.error('Invalid argument: type. Accepted values are "producer" or "consumer"'); +if (!taskType || !taskTypes.includes(taskType)) { + console.error(`Invalid or missing task type. Expected ${ + taskTypes.map(t => `'${t}'`).join(', ') + } but got '${taskType}'`); process.exit(1); } -const processId = v4(); -const logger = createLogger(processId); -config(); - -const amqpServer = process.env.AMQP_SERVER; -const queueName = process.env.MARK_DELETED_ITEMS_QUEUE_NAME; -const maxEnqueuedItems = process.env.TASK_MARK_DELETED_ITEMS_PRODUCER_MAX_ENQUEUED_ITEMS; -const maxConcurrentItems = process.env.TASK_MARK_DELETED_ITEMS_CONSUMER_MAX_CONCURRENT_ITEMS; +const processType = process.env.PROCESS_TYPE as undefined | ProcessType; -if (!maxEnqueuedItems) { - logger.log('Missing env var: TASK_MARK_DELETED_ITEMS_PRODUCER_MAX_ENQUEUED_ITEMS'); +if (!processType || !Object.values(ProcessType).includes(processType)) { + console.error(`Invalid or missing process type. Expected ${ + Object.values(ProcessType).map(t => `'${t}'`).join(', ') + } but got '${processType}'`); process.exit(1); } -if (!maxConcurrentItems) { - logger.log('Missing env var: TASK_MARK_DELETED_ITEMS_CONSUMER_MAX_CONCURRENT_ITEMS'); - process.exit(1); -} +const processId = v4(); +const logger = createLogger(processId); -logger.log(`params: process_type -> ${type}, env -> ${ - JSON.stringify({ - maxConcurrentItems, - maxEnqueuedItems, - queueName - }) -}`); +const amqpServer = process.env.QUEUE_SERVER; let db: DriveDatabase; let connection: amqp.Connection; @@ -75,7 +59,7 @@ function handleStop() { } } -async function start(): Promise<{ connection: amqp.Connection, db: DriveDatabase }> { +async function start(): Promise { db = new DriveDatabase(); logger.log('(drive-db) connecting ...'); @@ -86,63 +70,13 @@ async function start(): Promise<{ connection: amqp.Connection, db: DriveDatabase connection = await amqp.connect(amqpServer as string); logger.log('(rabbit) connected !'); - return { connection, db }; + await tasks[taskType as string](processType as 'consumer' | 'producer', { db }, connection); } -start().then(({ connection, db }) => { - if (type === 'producer') { - const deletedFoldersIterator = new DeletedFoldersIterator(db); - - return connection.createChannel().then((channel) => { - const producer = new Producer( - channel, - queueName as string, - deletedFoldersIterator, - maxEnqueuedItems ? parseInt(maxEnqueuedItems as string) : undefined, - ); - - producer.on('enqueue', (item) => { - logger.log(`enqueued item: + ${JSON.stringify(item)}`, 'producer'); - }); - - producer.on('queue-full', () => { - logger.log(`queue full, waiting 1s...`, 'producer'); - }); - - return producer.run(); - }); - } else { - connection.createChannel().then((channel) => { - const consumer = new Consumer<{ - folder_id: string, - processed: boolean, - created_at: Date, - updated_at: Date, - processed_at: Date, - }>( - channel, - queueName as string, - async (taskPayload) => { - logger.log(`received item: + ${JSON.stringify(taskPayload)}`, 'consumer'); - - await db.markChildrenFilesAsDeleted(taskPayload.folder_id); - await db.markChildrenFoldersAsDeleted(taskPayload.folder_id); - await db.markDeletedFolderAsProcessed([taskPayload.folder_id]); - }, - maxConcurrentItems ? parseInt(maxConcurrentItems as string) : undefined, - ); - - consumer.on('error', ({ err, msg }) => { - logger.error(`error processing item: ${JSON.stringify(msg.content)}`, err, 'consumer'); - }); - - consumer.run(); - }); - } -}).catch((err) => { +start().catch((err) => { logger.error('Error starting', err); process.exit(1); -}) +}); process.on('uncaughtException', (err) => { logger.error('Uncaught exception', err); diff --git a/package.json b/package.json index 4def583..17bb998 100644 --- a/package.json +++ b/package.json @@ -4,15 +4,15 @@ "main": "index.js", "license": "MIT", "scripts": { - "start:dev:producer": "ts-node index.ts producer", - "start:dev:consumer": "ts-node index.ts consumer", - "start:prod:producer": "node dist/index.js producer", - "start:prod:consumer": "node dist/index.js consumer", + "start:dev": "ts-node index.ts", + "start:prod": "node dist/index.js", "build": "tsc" }, "dependencies": { "amqplib": "^0.10.3", + "axios": "^1.6.1", "dotenv": "^16.3.1", + "jsonwebtoken": "^9.0.2", "pg": "^8.11.3", "ts-node": "^10.9.1", "typescript": "^5.2.2", @@ -20,6 +20,7 @@ }, "devDependencies": { "@types/amqplib": "^0.10.3", + "@types/jsonwebtoken": "^9.0.5", "@types/node": "^20.8.9", "@types/pg": "^8.10.7", "@types/uuid": "^9.0.6" diff --git a/src/drive.ts b/src/drive.ts index 56f44ab..5bf7d3d 100644 --- a/src/drive.ts +++ b/src/drive.ts @@ -49,6 +49,47 @@ export class DriveDatabase { await this.client.end(); } + async getDeletedFiles(): Promise<{ + fileId: string; + processed: boolean, + createdAt: Date, + updatedAt: Date, + processedAt: Date, + }[]> { + const query = 'SELECT * FROM deleted_files WHERE processed = false AND enqueued = false LIMIT 100'; + + const result = await this.client.query(query); + + return result.rows.map(r => ({ + fileId: r.file_id, + processed: r.processed, + createdAt: r.created_at, + updatedAt: r.updated_at, + processedAt: r.processed_at, + networkFileId: r.network_file_id, + })); + } + + async setFilesAsEnqueued(fileIds: string[]): Promise { + const query = ` + UPDATE deleted_files + SET enqueued = true, enqueued_at = NOW(), updated_at = NOW() + WHERE file_id IN (${fileIds.map((fileIds) => `'${fileIds}'`).join(', ')}) + `; + + await this.client.query(query); + } + + async markDeletedFilesAsProcessed(uuids: string[]): Promise { + const query = ` + UPDATE deleted_files + SET processed = true, processed_at = NOW(), updated_at = NOW() + WHERE file_id IN (${uuids.map((uuid) => `'${uuid}'`).join(', ')}) + `; + + await this.client.query(query); + } + async getChildrenFoldersOfDeletedFolders(): Promise<{ folder_id: string, processed: boolean, @@ -142,4 +183,114 @@ export class DriveDatabase { count = result.rowCount; } while (count === 1000); } + + /** + * Gets network file IDs for existing file versions in batches + * @param fileIds + */ + async getFileVersionsByFileId(fileIds: string[]): Promise< + { + id: string; + fileId: string; + networkFileId: string; + }[] + > { + const placeholders = fileIds.map((_, i) => `$${i + 1}`).join(", "); + const query = ` + SELECT network_file_id, file_id, id + FROM file_versions + WHERE file_id IN (${placeholders}) + AND status = 'EXISTS' + `; + + const result = await this.client.query(query, fileIds); + + return result.rows.map((r) => ({ + id: r.id, + networkFileId: r.network_file_id, + fileId: r.file_id, + })); + } + + + /** + * Mark file versions as deleted + * @param versionIds + */ + async markFileVersionsAsDeleted(versionIds: string[]): Promise { + const placeholders = versionIds.map((_, i) => `$${i + 1}`).join(", "); + if (placeholders.length === 0) { + return 0; + } + + const query = ` + UPDATE file_versions + SET status = 'DELETED', updated_at = NOW() + WHERE id IN (${placeholders}) + AND status = 'EXISTS' + `; + const result = await this.client.query(query, versionIds); + + return result.rowCount; + } + + /** + * Gets deleted file versions pending processing + * @returns Array of deleted file versions + */ + async getDeletedFileVersions(): Promise<{ + fileVersionId: string; + fileId: string; + networkFileId: string; + size: bigint; + processed: boolean; + enqueued: boolean; + createdAt: Date; + updatedAt: Date; + processedAt: Date; + }[]> { + const query = 'SELECT * FROM deleted_file_versions WHERE processed = false AND enqueued = false LIMIT 100'; + + const result = await this.client.query(query); + + return result.rows.map(r => ({ + fileVersionId: r.file_version_id, + fileId: r.file_id, + networkFileId: r.network_file_id, + size: r.size, + processed: r.processed, + enqueued: r.enqueued, + createdAt: r.created_at, + updatedAt: r.updated_at, + processedAt: r.processed_at, + })); + } + + /** + * Mark file versions as enqueued for deletion + * @param versionIds + */ + async setFileVersionsAsEnqueued(versionIds: string[]): Promise { + const placeholders = versionIds.map((_, i) => `$${i + 1}`).join(", "); + const query = ` + UPDATE deleted_file_versions + SET enqueued = true, enqueued_at = NOW(), updated_at = NOW() + WHERE file_version_id IN (${placeholders}) + `; + await this.client.query(query, versionIds); + } + + /** + * Mark deleted file versions as processed after successful deletion from network + * @param versionIds + */ + async markDeletedFileVersionsAsProcessed(versionIds: string[]): Promise { + const placeholders = versionIds.map((_, i) => `$${i + 1}`).join(", "); + const query = ` + UPDATE deleted_file_versions + SET processed = true, processed_at = NOW(), updated_at = NOW() + WHERE file_version_id IN (${placeholders}) + `; + await this.client.query(query, versionIds); + } } \ No newline at end of file diff --git a/src/network.ts b/src/network.ts new file mode 100644 index 0000000..695c724 --- /dev/null +++ b/src/network.ts @@ -0,0 +1,38 @@ +import axios, { AxiosRequestConfig } from 'axios'; +import { sign } from 'jsonwebtoken'; + +export type DeleteFilesResponse = { + message: { + confirmed: string[], + notConfirmed: string[] + } +} + +export function signToken(duration: string, secret: string): string { + return sign( + {}, + Buffer.from(secret, 'base64').toString('utf8'), + { + algorithm: 'RS256', + expiresIn: duration + } + ); +} + +export function deleteFiles(endpoint: string, fileIds: string[]): Promise { + const params: AxiosRequestConfig = { + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${signToken( + '5m', + process.env.NETWORK_GATEWAY_DELETE_FILES_SECRET as string + )}` + }, + data: { + files: fileIds + } + }; + + return axios.delete(endpoint, params) + .then((res) => res.data); +} diff --git a/src/producer.ts b/src/producer.ts index 271ce57..e38ef53 100644 --- a/src/producer.ts +++ b/src/producer.ts @@ -21,7 +21,7 @@ export class Producer extends EventEmitter { this.channel.sendToQueue( this.queueName, Buffer.from( - JSON.stringify(item), + (item as any[]).length ? JSON.stringify({ payload: item }) : JSON.stringify(item), ), ); diff --git a/src/tasks/file-deletion/deleted-files.iterator.ts b/src/tasks/file-deletion/deleted-files.iterator.ts new file mode 100644 index 0000000..ba12380 --- /dev/null +++ b/src/tasks/file-deletion/deleted-files.iterator.ts @@ -0,0 +1,36 @@ +import { DriveDatabase } from "../../drive"; + +export class DeletedFilesIterator { + constructor(private readonly db: DriveDatabase) {} + + async * [Symbol.asyncIterator]() { + let rows : { + file_id: string, + network_file_id: string; + processed: boolean, + created_at: Date, + updated_at: Date, + processed_at: Date, + }[] = []; + let n = 10; + + do { + const rows = await this.db.getDeletedFiles(); + if (rows.length === 0) { + // Wait for a short period before checking for new data. + console.log('No data to process, waiting 1s...'); + await new Promise(resolve => setTimeout(resolve, 1000)); + } else { + await this.db.setFilesAsEnqueued(rows.map(row => row.fileId)); + while (rows.length >= n) { + const chunk = rows.splice(0, n); + yield chunk; + } + + if (rows.length > 0) { + yield rows; + } + } + } while (true); + } +} diff --git a/src/tasks/file-deletion/index.ts b/src/tasks/file-deletion/index.ts new file mode 100644 index 0000000..6221eca --- /dev/null +++ b/src/tasks/file-deletion/index.ts @@ -0,0 +1,103 @@ +import { v4 } from 'uuid'; + +import { createLogger } from '../../utils'; +import { Consumer } from '../../consumer'; +import { Producer } from '../../producer'; +import { DeletedFilesIterator } from './deleted-files.iterator'; +import { TaskFunction } from '../task'; +import { deleteFiles } from '../../network'; + +const task: TaskFunction = async ( + processType, + drive, + connection, +) => { + const processId = v4(); + const logger = createLogger(processId); + + const queueName = `${process.env.TASK_TYPE}-${process.env.NODE_ENV}`; + const maxEnqueuedItems = process.env.TASK_DELETE_FILES_PRODUCER_MAX_ENQUEUED_ITEMS; + const maxConcurrentItems = process.env.TASK_DELETE_FILES_CONSUMER_MAX_CONCURRENT_ITEMS; + + if (!maxEnqueuedItems) { + logger.log('Missing env var: TASK_MARK_DELETED_ITEMS_PRODUCER_MAX_ENQUEUED_ITEMS'); + process.exit(1); + } + + if (!maxConcurrentItems) { + logger.log('Missing env var: TASK_MARK_DELETED_ITEMS_CONSUMER_MAX_CONCURRENT_ITEMS'); + process.exit(1); + } + + logger.log(`params: process_type -> ${processType}, env -> ${ + JSON.stringify({ + maxConcurrentItems, + maxEnqueuedItems, + queueName + }) + }`); + + + if (processType === 'producer') { + const deletedFilesIterator = new DeletedFilesIterator(drive.db); + + return connection.createChannel().then((channel) => { + const producer = new Producer( + channel, + queueName as string, + deletedFilesIterator, + maxEnqueuedItems ? parseInt(maxEnqueuedItems as string) : undefined, + ); + + producer.on('enqueue', (item) => { + logger.log(`enqueued item: + ${JSON.stringify(item)}`, 'producer'); + }); + + producer.on('queue-full', () => { + logger.log(`queue full, waiting 1s...`, 'producer'); + }); + + return producer.run(); + }); + } else { + connection.createChannel().then((channel) => { + const consumer = new Consumer<{ + payload: { + fileId: string, + processed: boolean, + createdAt: Date, + updatedAt: Date, + processedAt: Date, + networkFileId: string, + }[] + }>( + channel, + queueName as string, + async (task) => { + logger.log(`received item: + ${JSON.stringify(task)}`, 'consumer'); + + const networkFileIdsToDelete = task.payload.map((file) => file.networkFileId); + const fileIdsToDelete = task.payload.map((file) => file.fileId); + + const res = await deleteFiles(process.env.NETWORK_GATEWAY_DELETE_FILES_ENDPOINT as string, networkFileIdsToDelete); + const fileIdsDeletedSuccessfully = res.message.confirmed; + const filesToMarkAsProcessed = task.payload.filter((file) => fileIdsDeletedSuccessfully.includes(file.networkFileId)); + + await drive.db.markDeletedFilesAsProcessed(filesToMarkAsProcessed.map(f => f.fileId)); + + const fileVersionsData = await drive.db.getFileVersionsByFileId(fileIdsToDelete); + await drive.db.markFileVersionsAsDeleted(fileVersionsData.map(fv => fv.id)); + }, + maxConcurrentItems ? parseInt(maxConcurrentItems as string) : undefined, + ); + + consumer.on('error', ({ err, msg }) => { + logger.error(`error processing item: ${JSON.stringify(msg.content)}`, err, 'consumer'); + }); + + consumer.run(); + }); + } +} + +export default task; diff --git a/src/tasks/file-version-deletion/deleted-file-versions.iterator.ts b/src/tasks/file-version-deletion/deleted-file-versions.iterator.ts new file mode 100644 index 0000000..70b776c --- /dev/null +++ b/src/tasks/file-version-deletion/deleted-file-versions.iterator.ts @@ -0,0 +1,38 @@ +import { DriveDatabase } from "../../drive"; + +export class DeletedFileVersionsIterator { + constructor(private readonly db: DriveDatabase) {} + + async * [Symbol.asyncIterator]() { + let rows : { + fileVersionId: string, + fileId: string, + networkFileId: string; + size: bigint, + processed: boolean, + enqueued: boolean, + createdAt: Date, + updatedAt: Date, + processedAt: Date, + }[] = []; + let n = 50; + + do { + const rows = await this.db.getDeletedFileVersions(); + if (rows.length === 0) { + console.log('No file versions to process, waiting 1s...'); + await new Promise(resolve => setTimeout(resolve, 1000)); + } else { + await this.db.setFileVersionsAsEnqueued(rows.map(row => row.fileVersionId)); + while (rows.length >= n) { + const chunk = rows.splice(0, n); + yield chunk; + } + + if (rows.length > 0) { + yield rows; + } + } + } while (true); + } +} diff --git a/src/tasks/file-version-deletion/index.ts b/src/tasks/file-version-deletion/index.ts new file mode 100644 index 0000000..badc4a2 --- /dev/null +++ b/src/tasks/file-version-deletion/index.ts @@ -0,0 +1,102 @@ +import { v4 } from 'uuid'; + +import { createLogger } from '../../utils'; +import { Consumer } from '../../consumer'; +import { Producer } from '../../producer'; +import { DeletedFileVersionsIterator } from './deleted-file-versions.iterator'; +import { TaskFunction } from '../task'; +import { deleteFiles } from '../../network'; + +const task: TaskFunction = async ( + processType, + drive, + connection, +) => { + const processId = v4(); + const logger = createLogger(processId); + + const queueName = `${process.env.TASK_TYPE}-${process.env.NODE_ENV}`; + const maxEnqueuedItems = process.env.TASK_DELETE_FILE_VERSIONS_PRODUCER_MAX_ENQUEUED_ITEMS; + const maxConcurrentItems = process.env.TASK_DELETE_FILE_VERSIONS_CONSUMER_MAX_CONCURRENT_ITEMS; + + if (!maxEnqueuedItems) { + logger.log('Missing env var: TASK_DELETE_FILE_VERSIONS_PRODUCER_MAX_ENQUEUED_ITEMS'); + process.exit(1); + } + + if (!maxConcurrentItems) { + logger.log('Missing env var: TASK_DELETE_FILE_VERSIONS_CONSUMER_MAX_CONCURRENT_ITEMS'); + process.exit(1); + } + + logger.log(`params: process_type -> ${processType}, env -> ${ + JSON.stringify({ + maxConcurrentItems, + maxEnqueuedItems, + queueName + }) + }`); + + + if (processType === 'producer') { + const deletedFileVersionsIterator = new DeletedFileVersionsIterator(drive.db); + + return connection.createChannel().then((channel) => { + const producer = new Producer( + channel, + queueName as string, + deletedFileVersionsIterator, + maxEnqueuedItems ? parseInt(maxEnqueuedItems as string) : undefined, + ); + + producer.on('enqueue', (item) => { + logger.log(`enqueued item: + ${JSON.stringify(item)}`, 'producer'); + }); + + producer.on('queue-full', () => { + logger.log(`queue full, waiting 1s...`, 'producer'); + }); + + return producer.run(); + }); + } else { + connection.createChannel().then((channel) => { + const consumer = new Consumer<{ + payload: { + fileVersionId: string, + fileId: string, + networkFileId: string, + size: bigint, + processed: boolean, + enqueued: boolean, + createdAt: Date, + updatedAt: Date, + processedAt: Date, + }[] + }>( + channel, + queueName as string, + async (task) => { + logger.log(`received item: + ${JSON.stringify(task)}`, 'consumer'); + + const networkFileIdsToDelete = task.payload.map((version) => version.networkFileId); + + const res = await deleteFiles(process.env.NETWORK_GATEWAY_DELETE_FILES_ENDPOINT as string, networkFileIdsToDelete); + const versionIdsDeletedSuccessfully = res.message.confirmed; + const versionsToMarkAsProcessed = task.payload.filter((version) => versionIdsDeletedSuccessfully.includes(version.networkFileId)); + + await drive.db.markDeletedFileVersionsAsProcessed(versionsToMarkAsProcessed.map(v => v.fileVersionId)); + }, + maxConcurrentItems ? parseInt(maxConcurrentItems as string) : undefined, + ); + + consumer.on('error', ({ err, msg }) => { + logger.error(`error processing item: ${JSON.stringify(msg.content)}`, err, 'consumer'); + }); + + consumer.run(); + }); + } +} + +export default task; diff --git a/src/tasks/process-folder-deletion/deleted-folders.iterator.ts b/src/tasks/folder-deletion/deleted-folders.iterator.ts similarity index 100% rename from src/tasks/process-folder-deletion/deleted-folders.iterator.ts rename to src/tasks/folder-deletion/deleted-folders.iterator.ts diff --git a/src/tasks/folder-deletion/index.ts b/src/tasks/folder-deletion/index.ts new file mode 100644 index 0000000..19a4b1e --- /dev/null +++ b/src/tasks/folder-deletion/index.ts @@ -0,0 +1,72 @@ +import { v4 } from 'uuid'; + +import { DeletedFoldersIterator } from './deleted-folders.iterator'; +import { Producer } from '../../producer'; +import { Consumer } from '../../consumer'; +import { createLogger } from '../../utils'; +import { TaskFunction } from '../task'; + +const task: TaskFunction = async( + processType, + drive, + connection, +) => { + const processId = v4(); + const logger = createLogger(processId); + + const queueName = process.env.MARK_DELETED_ITEMS_QUEUE_NAME; + const maxEnqueuedItems = process.env.TASK_MARK_DELETED_ITEMS_PRODUCER_MAX_ENQUEUED_ITEMS; + const maxConcurrentItems = process.env.TASK_MARK_DELETED_ITEMS_CONSUMER_MAX_CONCURRENT_ITEMS; + + if (processType === 'producer') { + const deletedFoldersIterator = new DeletedFoldersIterator(drive.db); + + return connection.createChannel().then((channel) => { + const producer = new Producer( + channel, + queueName as string, + deletedFoldersIterator, + maxEnqueuedItems ? parseInt(maxEnqueuedItems as string) : undefined, + ); + + producer.on('enqueue', (item) => { + logger.log(`enqueued item: + ${JSON.stringify(item)}`, 'producer'); + }); + + producer.on('queue-full', () => { + logger.log(`queue full, waiting 1s...`, 'producer'); + }); + + return producer.run(); + }); + } else { + connection.createChannel().then((channel) => { + const consumer = new Consumer<{ + folder_id: string, + processed: boolean, + created_at: Date, + updated_at: Date, + processed_at: Date, + }>( + channel, + queueName as string, + async (taskPayload) => { + logger.log(`received item: + ${JSON.stringify(taskPayload)}`, 'consumer'); + + await drive.db.markChildrenFilesAsDeleted(taskPayload.folder_id); + await drive.db.markChildrenFoldersAsDeleted(taskPayload.folder_id); + await drive.db.markDeletedFolderAsProcessed([taskPayload.folder_id]); + }, + maxConcurrentItems ? parseInt(maxConcurrentItems as string) : undefined, + ); + + consumer.on('error', ({ err, msg }) => { + logger.error(`error processing item: ${JSON.stringify(msg.content)}`, err, 'consumer'); + }); + + consumer.run(); + }); + } +} + +export default task; diff --git a/src/tasks/index.ts b/src/tasks/index.ts new file mode 100644 index 0000000..356eb8e --- /dev/null +++ b/src/tasks/index.ts @@ -0,0 +1,13 @@ +import { TaskFunction } from './task'; + +import fileDeletion from './file-deletion'; +import folderDeletion from './folder-deletion'; +import fileVersionDeletion from './file-version-deletion'; + +export const tasks: Record = { + 'delete-files': fileDeletion, + 'delete-folders': folderDeletion, + 'delete-file-versions': fileVersionDeletion, +}; + +export const taskTypes = Object.keys(tasks); diff --git a/src/tasks/process.ts b/src/tasks/process.ts new file mode 100644 index 0000000..9f14b86 --- /dev/null +++ b/src/tasks/process.ts @@ -0,0 +1,4 @@ +export enum ProcessType { + Consumer = 'consumer', + Producer = 'producer', +} diff --git a/src/tasks/task.ts b/src/tasks/task.ts new file mode 100644 index 0000000..c499332 --- /dev/null +++ b/src/tasks/task.ts @@ -0,0 +1,11 @@ +import amqp from 'amqplib'; + +import { DriveDatabase } from '../drive'; + +export type TaskFunction = ( + processType: 'producer' | 'consumer', + drive: { + db: DriveDatabase + }, + connection: amqp.Connection, +) => Promise; diff --git a/yarn.lock b/yarn.lock index 6646fdd..08cef29 100644 --- a/yarn.lock +++ b/yarn.lock @@ -63,6 +63,13 @@ dependencies: "@types/node" "*" +"@types/jsonwebtoken@^9.0.5": + version "9.0.5" + resolved "https://registry.yarnpkg.com/@types/jsonwebtoken/-/jsonwebtoken-9.0.5.tgz#0bd9b841c9e6c5a937c17656e2368f65da025588" + integrity sha512-VRLSGzik+Unrup6BsouBeHsf4d1hOEgYWTm/7Nmw1sXoN1+tRly/Gy/po3yeahnP4jfnQWWAhQAqcNfH7ngOkA== + dependencies: + "@types/node" "*" + "@types/node@*", "@types/node@^20.8.9": version "20.8.9" resolved "https://registry.yarnpkg.com/@types/node/-/node-20.8.9.tgz#646390b4fab269abce59c308fc286dcd818a2b08" @@ -109,6 +116,25 @@ arg@^4.1.0: resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== + +axios@^1.6.1: + version "1.6.1" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.1.tgz#76550d644bf0a2d469a01f9244db6753208397d7" + integrity sha512-vfBmhDpKafglh0EldBEbVuoe7DyAavGSLWhuSm5ZSEKQnHhBf0xAAwybbNH1IkrJNGnS/VG4I5yxig1pCEXE4g== + dependencies: + follow-redirects "^1.15.0" + form-data "^4.0.0" + proxy-from-env "^1.1.0" + +buffer-equal-constant-time@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819" + integrity sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA== + buffer-more-ints@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/buffer-more-ints/-/buffer-more-ints-1.0.0.tgz#ef4f8e2dddbad429ed3828a9c55d44f05c611422" @@ -119,6 +145,13 @@ buffer-writer@2.0.0: resolved "https://registry.yarnpkg.com/buffer-writer/-/buffer-writer-2.0.0.tgz#ce7eb81a38f7829db09c873f2fbb792c0c98ec04" integrity sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw== +combined-stream@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + core-util-is@~1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" @@ -136,6 +169,11 @@ debug@^4.3.4: dependencies: ms "2.1.2" +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + diff@^4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" @@ -146,6 +184,27 @@ dotenv@^16.3.1: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ== +ecdsa-sig-formatter@1.0.11: + version "1.0.11" + resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf" + integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ== + dependencies: + safe-buffer "^5.0.1" + +follow-redirects@^1.15.0: + version "1.15.3" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.3.tgz#fe2f3ef2690afce7e82ed0b44db08165b207123a" + integrity sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q== + +form-data@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" + integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + inherits@~2.0.1: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" @@ -156,16 +215,108 @@ isarray@0.0.1: resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== +jsonwebtoken@^9.0.2: + version "9.0.2" + resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz#65ff91f4abef1784697d40952bb1998c504caaf3" + integrity sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ== + dependencies: + jws "^3.2.2" + lodash.includes "^4.3.0" + lodash.isboolean "^3.0.3" + lodash.isinteger "^4.0.4" + lodash.isnumber "^3.0.3" + lodash.isplainobject "^4.0.6" + lodash.isstring "^4.0.1" + lodash.once "^4.0.0" + ms "^2.1.1" + semver "^7.5.4" + +jwa@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a" + integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA== + dependencies: + buffer-equal-constant-time "1.0.1" + ecdsa-sig-formatter "1.0.11" + safe-buffer "^5.0.1" + +jws@^3.2.2: + version "3.2.2" + resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" + integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== + dependencies: + jwa "^1.4.1" + safe-buffer "^5.0.1" + +lodash.includes@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f" + integrity sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w== + +lodash.isboolean@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6" + integrity sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg== + +lodash.isinteger@^4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343" + integrity sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA== + +lodash.isnumber@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc" + integrity sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw== + +lodash.isplainobject@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" + integrity sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA== + +lodash.isstring@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451" + integrity sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw== + +lodash.once@^4.0.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" + integrity sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg== + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + make-error@^1.1.1: version "1.3.6" resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + ms@2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== +ms@^2.1.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + obuf@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" @@ -301,6 +452,11 @@ postgres-range@^1.1.1: resolved "https://registry.yarnpkg.com/postgres-range/-/postgres-range-1.1.3.tgz#9ccd7b01ca2789eb3c2e0888b3184225fa859f76" integrity sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g== +proxy-from-env@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" + integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== + querystringify@^2.1.1: version "2.2.0" resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" @@ -321,11 +477,23 @@ requires-port@^1.0.0: resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== +safe-buffer@^5.0.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + safe-buffer@~5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== +semver@^7.5.4: + version "7.5.4" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" + integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== + dependencies: + lru-cache "^6.0.0" + split2@^4.1.0: version "4.2.0" resolved "https://registry.yarnpkg.com/split2/-/split2-4.2.0.tgz#c9c5920904d148bab0b9f67145f245a86aadbfa4" @@ -388,6 +556,11 @@ xtend@^4.0.0: resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + yn@3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"