From 0e3441d5ebb493e1e7b70222283e4154f77d417f Mon Sep 17 00:00:00 2001 From: larry-internxt Date: Fri, 27 Dec 2024 15:09:02 +0100 Subject: [PATCH 1/9] improved progress reporting and migrated upload from node-fetch to axios --- package.json | 4 +-- src/commands/download-file.ts | 3 +- src/commands/upload-file.ts | 10 +++--- src/services/network/download.service.ts | 6 ++-- .../network/network-facade.service.ts | 31 ++++++---------- src/services/network/upload.service.ts | 20 ++++++----- src/webdav/handlers/GET.handler.ts | 19 +++++----- yarn.lock | 35 +------------------ 8 files changed, 44 insertions(+), 84 deletions(-) diff --git a/package.json b/package.json index a121a8c3..0adee561 100644 --- a/package.json +++ b/package.json @@ -54,7 +54,6 @@ "express-basic-auth": "1.2.1", "fast-xml-parser": "4.5.1", "mime-types": "2.1.35", - "node-fetch": "2.7.0", "openpgp": "5.11.2", "pm2": "5.4.3", "range-parser": "^1.2.1", @@ -75,8 +74,7 @@ "@types/express": "5.0.0", "@types/mime-types": "2.1.4", "@types/node": "22.10.2", - "@types/node-fetch": "2.6.12", - "@types/range-parser": "^1.2.7", + "@types/range-parser": "1.2.7", "@vitest/coverage-istanbul": "2.1.8", "@vitest/spy": "2.1.8", "eslint": "9.17.0", diff --git a/src/commands/download-file.ts b/src/commands/download-file.ts index a4d06ba0..233e2083 100644 --- a/src/commands/download-file.ts +++ b/src/commands/download-file.ts @@ -79,12 +79,13 @@ export default class DownloadFile extends Command { user.bucket, user.mnemonic, driveFile.fileId, + driveFile.size, StreamUtils.writeStreamToWritableStream(fileWriteStream), undefined, { abortController: new AbortController(), progressCallback: (progress) => { - progressBar.update(progress); + progressBar.update(progress * 0.99); }, }, ); diff --git a/src/commands/upload-file.ts b/src/commands/upload-file.ts index 3f8a82b4..b63f86a4 100644 --- a/src/commands/upload-file.ts +++ b/src/commands/upload-file.ts @@ -74,14 +74,14 @@ export default class UploadFile extends Command { CLIUtils.done(); - const timer = CLIUtils.timer(); // 2. Upload file to the Network const fileStream = createReadStream(filePath); + const timer = CLIUtils.timer(); const progressBar = CLIUtils.progress({ format: 'Uploading file [{bar}] {percentage}%', linewrap: true, }); - progressBar.start(1, 0); + progressBar.start(100, 0); const [uploadPromise, abortable] = await networkFacade.uploadFromStream( user.bucket, user.mnemonic, @@ -89,7 +89,7 @@ export default class UploadFile extends Command { fileStream, { progressCallback: (progress) => { - progressBar.update(progress); + progressBar.update(progress * 0.99); }, }, ); @@ -100,7 +100,6 @@ export default class UploadFile extends Command { }); const uploadResult = await uploadPromise; - progressBar.stop(); // 3. Create the file in Drive const fileInfo = path.parse(filePath); @@ -115,6 +114,9 @@ export default class UploadFile extends Command { name: '', }); + progressBar.update(100); + progressBar.stop(); + const uploadTime = timer.stop(); this.log('\n'); // eslint-disable-next-line max-len diff --git a/src/services/network/download.service.ts b/src/services/network/download.service.ts index 1907e30a..2df324eb 100644 --- a/src/services/network/download.service.ts +++ b/src/services/network/download.service.ts @@ -5,6 +5,7 @@ export class DownloadService { async downloadFile( url: string, + size: number, options: { progressCallback?: (progress: number) => void; abortController?: AbortController; @@ -14,9 +15,8 @@ export class DownloadService { const response = await axios.get(url, { responseType: 'stream', onDownloadProgress(progressEvent) { - if (options.progressCallback && progressEvent.total) { - const reportedProgress = progressEvent.loaded / progressEvent.total; - + if (options.progressCallback && progressEvent.loaded) { + const reportedProgress = Math.round((progressEvent.loaded / size) * 100); options.progressCallback(reportedProgress); } }, diff --git a/src/services/network/network-facade.service.ts b/src/services/network/network-facade.service.ts index 339523f5..209427f4 100644 --- a/src/services/network/network-facade.service.ts +++ b/src/services/network/network-facade.service.ts @@ -16,7 +16,6 @@ import { UploadService } from './upload.service'; import { DownloadService } from './download.service'; import { ValidationService } from '../validation.service'; import { HashStream } from '../../utils/hash.utils'; -import { ProgressTransform } from '../../utils/stream.utils'; import { RangeOptions } from '../../utils/network.utils'; export class NetworkFacade { @@ -54,6 +53,7 @@ export class NetworkFacade { bucketId: string, mnemonic: string, fileId: string, + size: number, to: WritableStream, rangeOptions?: RangeOptions, options?: DownloadOptions, @@ -67,10 +67,6 @@ export class NetworkFacade { options.progressCallback(progress); }; - const onDownloadProgress = (progress: number) => { - onProgress(progress); - }; - const decryptFile: DecryptFileFunction = async (_, key, iv) => { let startOffsetByte; if (rangeOptions) { @@ -96,8 +92,8 @@ export class NetworkFacade { throw new Error('Download aborted'); } - const encryptedContentStream = await this.downloadService.downloadFile(downloadable.url, { - progressCallback: onDownloadProgress, + const encryptedContentStream = await this.downloadService.downloadFile(downloadable.url, size, { + progressCallback: onProgress, abortController: options?.abortController, rangeHeader: rangeOptions?.range, }); @@ -142,11 +138,7 @@ export class NetworkFacade { const hashStream = new HashStream(); const abortable = options?.abortController ?? new AbortController(); let encryptionTransform: Transform; - const progressTransform = new ProgressTransform({ totalBytes: size }, (progress) => { - if (options?.progressCallback) { - options.progressCallback(progress * 0.95); - } - }); + let hash: Buffer; const onProgress: UploadProgressCallback = (progress: number) => { if (!options?.progressCallback) return; @@ -165,15 +157,14 @@ export class NetworkFacade { }; const uploadFile: UploadFileFunction = async (url) => { - await this.uploadService.uploadFile(url, encryptionTransform.pipe(progressTransform), { + await this.uploadService.uploadFile(url, size, encryptionTransform, { abortController: abortable, - progressCallback: () => { - // No progress here, we are using the progressTransform - }, + progressCallback: onProgress, }); - - return hashStream.getHash().toString('hex'); + hash = hashStream.getHash(); + return hash.toString('hex'); }; + const uploadOperation = async () => { const uploadResult = await NetworkUpload.uploadFile( this.network, @@ -184,12 +175,10 @@ export class NetworkFacade { encryptFile, uploadFile, ); - const fileHash: Buffer = Buffer.from(''); - onProgress(1); return { fileId: uploadResult, - hash: fileHash, + hash: hash, }; }; diff --git a/src/services/network/upload.service.ts b/src/services/network/upload.service.ts index 836d39f0..cda9f333 100644 --- a/src/services/network/upload.service.ts +++ b/src/services/network/upload.service.ts @@ -1,20 +1,22 @@ import { Readable } from 'node:stream'; -import fetch from 'node-fetch'; -import { AbortSignal } from 'node-fetch/externals'; +import axios from 'axios'; import { UploadOptions } from '../../types/network.types'; export class UploadService { public static readonly instance: UploadService = new UploadService(); - async uploadFile(url: string, from: Readable, options: UploadOptions): Promise<{ etag: string }> { - const response = await fetch(url, { - method: 'PUT', - body: from, - signal: options.abortController?.signal as AbortSignal, + async uploadFile(url: string, size: number, from: Readable, options: UploadOptions): Promise<{ etag: string }> { + const response = await axios.put(url, from, { + signal: options.abortController?.signal, + onUploadProgress: (progressEvent) => { + if (options.progressCallback && progressEvent.loaded) { + const reportedProgress = Math.round((progressEvent.loaded / size) * 100); + options.progressCallback(reportedProgress); + } + }, }); - const etag = response.headers.get('etag'); - options.progressCallback(1); + const etag = response.headers['etag']; if (!etag) { throw new Error('Missing Etag in response when uploading file'); } diff --git a/src/webdav/handlers/GET.handler.ts b/src/webdav/handlers/GET.handler.ts index 9b72dceb..ce85a6e0 100644 --- a/src/webdav/handlers/GET.handler.ts +++ b/src/webdav/handlers/GET.handler.ts @@ -43,15 +43,6 @@ export class GETRequestHandler implements WebDavMethodHandler { const { user } = await authService.getAuthDetails(); webdavLogger.info(`[GET] [${driveFile.uuid}] Network ready for download`); - const writable = new WritableStream({ - write(chunk) { - res.write(chunk); - }, - close() { - res.end(); - }, - }); - const range = req.headers['range']; const rangeOptions = NetworkUtils.parseRangeHeader({ range, @@ -66,10 +57,20 @@ export class GETRequestHandler implements WebDavMethodHandler { res.header('Content-Type', 'application/octet-stream'); res.header('Content-length', contentLength.toString()); + const writable = new WritableStream({ + write(chunk) { + res.write(chunk); + }, + close() { + res.end(); + }, + }); + const [executeDownload] = await networkFacade.downloadToStream( driveFile.bucket, user.mnemonic, driveFile.fileId, + contentLength, writable, rangeOptions, ); diff --git a/yarn.lock b/yarn.lock index fc5e7037..a2da60fb 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2937,14 +2937,6 @@ dependencies: "@types/node" "*" -"@types/node-fetch@2.6.12": - version "2.6.12" - resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.12.tgz#8ab5c3ef8330f13100a7479e2cd56d3386830a03" - integrity sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA== - dependencies: - "@types/node" "*" - form-data "^4.0.0" - "@types/node-forge@^1.3.0": version "1.3.11" resolved "https://registry.yarnpkg.com/@types/node-forge/-/node-forge-1.3.11.tgz#0972ea538ddb0f4d9c2fa0ec5db5724773a604da" @@ -2971,7 +2963,7 @@ resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.17.tgz#fc560f60946d0aeff2f914eb41679659d3310e1a" integrity sha512-rX4/bPcfmvxHDv0XjfJELTTr+iB+tn032nPILqHm5wbthUUUuVtNGGqzhya9XUxjTP8Fpr0qYgSZZKxGY++svQ== -"@types/range-parser@*", "@types/range-parser@^1.2.7": +"@types/range-parser@*", "@types/range-parser@1.2.7": version "1.2.7" resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.7.tgz#50ae4353eaaddc04044279812f52c8c65857dbcb" integrity sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ== @@ -6540,13 +6532,6 @@ node-addon-api@^7.0.0: resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-7.1.1.tgz#1aba6693b0f255258a049d621329329322aad558" integrity sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ== -node-fetch@2.7.0: - version "2.7.0" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" - integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== - dependencies: - whatwg-url "^5.0.0" - node-forge@^1: version "1.3.1" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" @@ -8143,11 +8128,6 @@ touch@^3.1.0: resolved "https://registry.yarnpkg.com/touch/-/touch-3.1.1.tgz#097a23d7b161476435e5c1344a95c0f75b4a5694" integrity sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA== -tr46@~0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" - integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== - triple-beam@^1.3.0: version "1.4.1" resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.4.1.tgz#6fde70271dc6e5d73ca0c3b24e2d92afb7441984" @@ -8530,19 +8510,6 @@ web-streams-polyfill@~3.0.3: resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.0.3.tgz#f49e487eedeca47a207c1aee41ee5578f884b42f" integrity sha512-d2H/t0eqRNM4w2WvmTdoeIvzAUSpK7JmATB8Nr2lb7nQ9BTIJVjbQ/TRFVEh2gUH1HwclPdoPtfMoFfetXaZnA== -webidl-conversions@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" - integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== - -whatwg-url@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" - integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== - dependencies: - tr46 "~0.0.3" - webidl-conversions "^3.0.0" - which-boxed-primitive@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" From adf228590be47b95064654a39d96f4a2da5d74eb Mon Sep 17 00:00:00 2001 From: larry-internxt Date: Fri, 27 Dec 2024 15:21:45 +0100 Subject: [PATCH 2/9] fixed MKCOL functionality when creating folders that not ended with / --- src/webdav/handlers/MKCOL.handler.ts | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/webdav/handlers/MKCOL.handler.ts b/src/webdav/handlers/MKCOL.handler.ts index d847299d..44578f94 100644 --- a/src/webdav/handlers/MKCOL.handler.ts +++ b/src/webdav/handlers/MKCOL.handler.ts @@ -7,7 +7,6 @@ import { webdavLogger } from '../../utils/logger.utils'; import { XMLUtils } from '../../utils/xml.utils'; import { AsyncUtils } from '../../utils/async.utils'; import { DriveFolderItem } from '../../types/drive.types'; -import { MethodNotAllowed } from '../../utils/errors.utils'; export class MKCOLRequestHandler implements WebDavMethodHandler { constructor( @@ -21,8 +20,6 @@ export class MKCOLRequestHandler implements WebDavMethodHandler { const { driveDatabaseManager, driveFolderService } = this.dependencies; const resource = await WebDavUtils.getRequestedResource(req); - if (resource.type === 'file') throw new MethodNotAllowed('Files cannot be created with MKCOL. Use PUT instead.'); - webdavLogger.info(`[MKCOL] Request received for ${resource.type} at ${resource.url}`); const parentResource = await WebDavUtils.getRequestedResource(resource.parentPath); @@ -34,7 +31,7 @@ export class MKCOLRequestHandler implements WebDavMethodHandler { })) as DriveFolderItem; const [createFolder] = driveFolderService.createFolder({ - plainName: resource.name, + plainName: resource.path.base, parentFolderUuid: parentFolderItem.uuid, }); From 8c36d5e56c00186cbae21bb1fcf35eecd9585364 Mon Sep 17 00:00:00 2001 From: larry-internxt Date: Fri, 27 Dec 2024 17:26:43 +0100 Subject: [PATCH 3/9] added chance as testing random examples provider --- package.json | 2 + test/commands/login.test.ts | 4 +- test/fixtures/auth.fixture.ts | 77 +++-- test/fixtures/common.fixture.ts | 13 - test/fixtures/drive-database.fixture.ts | 51 ++-- test/fixtures/drive.fixture.ts | 273 ++++++++++-------- test/fixtures/login.fixture.ts | 23 +- test/fixtures/webdav.fixture.ts | 3 +- test/services/auth.service.test.ts | 26 +- .../drive/drive-folder.service.test.ts | 7 +- .../services/network/download.service.test.ts | 19 +- .../network/network-facade.service.test.ts | 40 ++- test/services/network/upload.service.test.ts | 19 +- test/webdav/handlers/GET.handler.test.ts | 2 + yarn.lock | 10 + 15 files changed, 331 insertions(+), 238 deletions(-) delete mode 100644 test/fixtures/common.fixture.ts diff --git a/package.json b/package.json index 0adee561..b8dee266 100644 --- a/package.json +++ b/package.json @@ -70,6 +70,7 @@ "@internxt/prettier-config": "internxt/prettier-config#v1.0.2", "@oclif/test": "4.1.4", "@openpgp/web-stream-tools": "0.0.11-patch-0", + "@types/chance": "1.1.6", "@types/cli-progress": "3.11.6", "@types/express": "5.0.0", "@types/mime-types": "2.1.4", @@ -77,6 +78,7 @@ "@types/range-parser": "1.2.7", "@vitest/coverage-istanbul": "2.1.8", "@vitest/spy": "2.1.8", + "chance": "1.1.12", "eslint": "9.17.0", "husky": "9.1.7", "lint-staged": "15.2.11", diff --git a/test/commands/login.test.ts b/test/commands/login.test.ts index 4450bd67..07ec7fd9 100644 --- a/test/commands/login.test.ts +++ b/test/commands/login.test.ts @@ -135,7 +135,7 @@ describe('Login Command', () => { .spyOn(CLIUtils, 'getValueFromFlag') .mockResolvedValueOnce(UserLoginFixture.email) // email .mockResolvedValueOnce(UserLoginFixture.password) // password - .mockResolvedValueOnce(UserLoginFixture.twoFactor) // two factor code + .mockResolvedValueOnce(UserLoginFixture.tfaCode!) // two factor code .mockRejectedValue(new Error()); // default const is2FaNeededSpy = vi.spyOn(AuthService.instance, 'is2FANeeded').mockResolvedValue(true); const doLoginSpy = vi.spyOn(AuthService.instance, 'doLogin').mockResolvedValue(UserCredentialsFixture); @@ -149,7 +149,7 @@ describe('Login Command', () => { const result = await Login.run([ `--email="${UserLoginFixture.email}"`, `--password="${UserLoginFixture.password}"`, - `--twofactor="${UserLoginFixture.twoFactor}"`, + `--twofactor="${UserLoginFixture.tfaCode}"`, ]); expect(result).to.be.deep.equal(expected); diff --git a/test/fixtures/auth.fixture.ts b/test/fixtures/auth.fixture.ts index 2383fe1e..9b4c6484 100644 --- a/test/fixtures/auth.fixture.ts +++ b/test/fixtures/auth.fixture.ts @@ -1,41 +1,68 @@ import { UserSettings } from '@internxt/sdk/dist/shared/types/userSettings'; -import crypto from 'node:crypto'; +import Chance from 'chance'; +import { generateMnemonic } from 'bip39'; + +const randomDataGenerator = new Chance(); export const UserFixture: UserSettings = { - userId: crypto.randomBytes(16).toString('hex'), - uuid: crypto.randomBytes(16).toString('hex'), - email: crypto.randomBytes(16).toString('hex'), - name: crypto.randomBytes(16).toString('hex'), - lastname: crypto.randomBytes(16).toString('hex'), - username: crypto.randomBytes(16).toString('hex'), - bridgeUser: crypto.randomBytes(16).toString('hex'), - bucket: crypto.randomBytes(16).toString('hex'), - backupsBucket: crypto.randomBytes(16).toString('hex'), - root_folder_id: crypto.randomInt(1, 9999), - rootFolderId: crypto.randomBytes(16).toString('hex'), - rootFolderUuid: crypto.randomBytes(16).toString('hex'), + userId: randomDataGenerator.natural({ min: 1 }).toString(), + uuid: randomDataGenerator.guid({ version: 4 }), + email: randomDataGenerator.email(), + name: randomDataGenerator.name(), + lastname: randomDataGenerator.name(), + username: randomDataGenerator.word(), + bridgeUser: randomDataGenerator.email(), + bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + backupsBucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + root_folder_id: randomDataGenerator.natural({ min: 1 }), + rootFolderId: randomDataGenerator.guid({ version: 4 }), + rootFolderUuid: randomDataGenerator.guid({ version: 4 }), sharedWorkspace: false, - credit: crypto.randomInt(1, 9999), - mnemonic: crypto.randomBytes(16).toString('hex'), - privateKey: crypto.randomBytes(16).toString('hex'), - publicKey: crypto.randomBytes(16).toString('hex'), - revocationKey: crypto.randomBytes(16).toString('hex'), + credit: randomDataGenerator.natural({ min: 1, max: 9999 }), + mnemonic: generateMnemonic(), + privateKey: randomDataGenerator.string({ + length: randomDataGenerator.integer({ min: 500, max: 1000 }), + pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', + }), + publicKey: randomDataGenerator.string({ + length: randomDataGenerator.integer({ min: 500, max: 1000 }), + pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', + }), + revocationKey: randomDataGenerator.string({ + length: randomDataGenerator.integer({ min: 500, max: 1000 }), + pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', + }), teams: false, appSumoDetails: null, registerCompleted: true, hasReferralsProgram: false, - createdAt: new Date(), - avatar: crypto.randomBytes(16).toString('hex'), + createdAt: randomDataGenerator.date(), + avatar: randomDataGenerator.url(), emailVerified: true, keys: { ecc: { - privateKey: crypto.randomBytes(16).toString('hex'), - publicKey: crypto.randomBytes(16).toString('hex'), - revocationKey: crypto.randomBytes(16).toString('hex'), + privateKey: randomDataGenerator.string({ + length: randomDataGenerator.integer({ min: 500, max: 1000 }), + pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', + }), + publicKey: randomDataGenerator.string({ + length: randomDataGenerator.integer({ min: 500, max: 1000 }), + pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', + }), + revocationKey: randomDataGenerator.string({ + length: randomDataGenerator.integer({ min: 500, max: 1000 }), + pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', + }), }, kyber: { - privateKyberKey: crypto.randomBytes(16).toString('hex'), - publicKyberKey: crypto.randomBytes(16).toString('hex'), + privateKyberKey: randomDataGenerator.string({ + length: randomDataGenerator.integer({ min: 500, max: 1000 }), + pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', + }), + publicKyberKey: randomDataGenerator.string({ + length: randomDataGenerator.integer({ min: 500, max: 1000 }), + pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', + }), }, }, }; diff --git a/test/fixtures/common.fixture.ts b/test/fixtures/common.fixture.ts deleted file mode 100644 index 0fe62d76..00000000 --- a/test/fixtures/common.fixture.ts +++ /dev/null @@ -1,13 +0,0 @@ -export const CommonFixture = { - createObjectId: () => { - const timestamp = ((new Date().getTime() / 1000) | 0).toString(16); - return ( - timestamp + - 'xxxxxxxxxxxxxxxx' - .replace(/[x]/g, function () { - return ((Math.random() * 16) | 0).toString(16); - }) - .toLowerCase() - ); - }, -}; diff --git a/test/fixtures/drive-database.fixture.ts b/test/fixtures/drive-database.fixture.ts index c251a259..18e56eeb 100644 --- a/test/fixtures/drive-database.fixture.ts +++ b/test/fixtures/drive-database.fixture.ts @@ -4,37 +4,46 @@ import { DriveFolder } from '../../src/services/database/drive-folder/drive-fold import { DriveDatabaseManager } from '../../src/services/database/drive-database-manager.service'; import { DriveFileRepository } from '../../src/services/database/drive-file/drive-file.repository'; import { DriveFolderRepository } from '../../src/services/database/drive-folder/drive-folder.repository'; -import { randomInt, randomUUID } from 'node:crypto'; +import Chance from 'chance'; + +const randomDataGenerator = new Chance(); export const getDriveFileDatabaseFixture = (): DriveFile => { + const createdAt = randomDataGenerator.date(); + const name = randomDataGenerator.word(); + const ext = randomDataGenerator.word(); + const filePath = `/${name}.${ext}`; const object: DriveFile = new DriveFile({ - id: randomInt(2000), - name: `file_${new Date().getTime().toString()}`, - uuid: randomUUID(), - relativePath: `file_${new Date().getTime().toString()}.txt`, - createdAt: new Date(), - updatedAt: new Date(), + uuid: randomDataGenerator.guid({ version: 4 }), + id: randomDataGenerator.natural({ min: 1 }), + name: name, + type: ext, + relativePath: filePath, + createdAt, + updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), + fileId: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + size: randomDataGenerator.natural({ min: 1 }), + folderId: randomDataGenerator.natural({ min: 1 }), + folderUuid: randomDataGenerator.guid({ version: 4 }), status: 'EXISTS', - fileId: `file_id_${new Date().getTime().toString()}`, - folderId: randomInt(2000), - bucket: new Date().getTime().toString(), - size: randomInt(2000), - folderUuid: randomUUID(), - type: 'txt', }); return object; }; export const getDriveFolderDatabaseFixture = (): DriveFolder => { + const createdAt = randomDataGenerator.date(); + const name = randomDataGenerator.word(); + const folderPath = `/${name}`; const object: DriveFolder = new DriveFolder({ - id: randomInt(2000), - name: `folder_${new Date().getTime().toString()}`, - uuid: randomUUID(), - relativePath: '', - createdAt: new Date(), - updatedAt: new Date(), - parentId: randomInt(2000), - parentUuid: randomUUID(), + id: randomDataGenerator.natural({ min: 1 }), + name: name, + uuid: randomDataGenerator.guid({ version: 4 }), + relativePath: folderPath, + createdAt: createdAt, + updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), + parentId: randomDataGenerator.natural({ min: 1 }), + parentUuid: randomDataGenerator.guid({ version: 4 }), status: 'EXISTS', }); diff --git a/test/fixtures/drive.fixture.ts b/test/fixtures/drive.fixture.ts index 3be53093..9292ae5e 100644 --- a/test/fixtures/drive.fixture.ts +++ b/test/fixtures/drive.fixture.ts @@ -7,194 +7,236 @@ import { FileStatus, FolderMeta, } from '@internxt/sdk/dist/drive/storage/types'; -import { getDefaultWordlist, wordlists } from 'bip39'; -import crypto, { randomInt, randomUUID } from 'node:crypto'; import { DriveFileItem, DriveFolderItem } from '../../src/types/drive.types'; import { DriveFile } from '../../src/services/database/drive-file/drive-file.domain'; import { DriveFileAttributes } from '../../src/services/database/drive-file/drive-file.attributes'; import { DriveFolderAttributes } from '../../src/services/database/drive-folder/drive-folder.attributes'; import { DriveFolder } from '../../src/services/database/drive-folder/drive-folder.domain'; +import Chance from 'chance'; -const wordlist = wordlists[getDefaultWordlist()]; -const fileTypes = ['png', 'jpg', 'docx', 'pdf', 'mp4', 'mp3']; +const randomDataGenerator = new Chance(); + +export const FileTypesFixture = [ + 'png', + 'jpg', + 'jpeg', + 'gif', + 'bmp', + 'webp', + 'tiff', + 'svg', + 'docx', + 'xlsx', + 'pptx', + 'pdf', + 'mp4', + 'mkv', + 'mov', + 'avi', + 'mp3', + 'wav', + 'flac', + 'ogg', + 'txt', + 'zip', + 'rar', + 'tar', + 'gz', + 'tgz', + 'iso', + 'exe', + 'apk', + 'deb', +]; export const newFolderItem = (attributes?: Partial): DriveFolderItem => { + const createdAt = randomDataGenerator.date(); const folder: DriveFolderItem = { - id: randomInt(1, 100000), - uuid: randomUUID(), - parentId: randomInt(1, 100000), - bucket: crypto.randomBytes(16).toString('hex'), - name: wordlist[randomInt(wordlist.length)], - encryptedName: crypto.randomBytes(16).toString('hex'), - createdAt: new Date(), - updatedAt: new Date(), + id: randomDataGenerator.natural({ min: 1 }), + uuid: randomDataGenerator.guid({ version: 4 }), + bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + name: randomDataGenerator.word(), + encryptedName: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + createdAt: createdAt, + updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), status: 'EXISTS', - parentUuid: randomUUID(), + parentId: randomDataGenerator.bool({ likelihood: 50 }) ? randomDataGenerator.natural({ min: 1 }) : null, + parentUuid: randomDataGenerator.bool({ likelihood: 50 }) ? randomDataGenerator.guid({ version: 4 }) : null, }; return { ...folder, ...attributes }; }; export const newFileItem = (attributes?: Partial): DriveFileItem => { + const createdAt = randomDataGenerator.date(); const file: DriveFileItem = { - id: randomInt(1, 100000), - uuid: crypto.randomBytes(16).toString('hex'), - fileId: crypto.randomBytes(16).toString('hex'), - folderId: randomInt(1, 100000), - bucket: crypto.randomBytes(16).toString('hex'), - name: wordlist[randomInt(wordlist.length)], - encryptedName: crypto.randomBytes(16).toString('hex'), - createdAt: new Date(), - updatedAt: new Date(), - size: randomInt(1, 10000), - type: fileTypes[randomInt(fileTypes.length)], + id: randomDataGenerator.natural({ min: 1 }), + uuid: randomDataGenerator.guid({ version: 4 }), + fileId: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + folderId: randomDataGenerator.natural({ min: 1 }), + bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + name: randomDataGenerator.word(), + encryptedName: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + createdAt: createdAt, + updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), + size: randomDataGenerator.natural({ min: 1 }), + type: randomDataGenerator.pickone(FileTypesFixture), status: FileStatus.EXISTS, - folderUuid: randomUUID(), + folderUuid: randomDataGenerator.guid({ version: 4 }), }; return { ...file, ...attributes }; }; export const newFolderMeta = (attributes?: Partial): FolderMeta => { + const createdAt = randomDataGenerator.date(); const folder: FolderMeta = { - bucket: crypto.randomBytes(16).toString('hex'), - createdAt: new Date().toString(), - created_at: new Date().toString(), + bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + createdAt: createdAt.toString(), + created_at: createdAt.toString(), deleted: false, deletedAt: null, deleted_at: null, encryptVersion: EncryptionVersion.Aes03, encrypt_version: EncryptionVersion.Aes03, - id: randomInt(1, 100000), - name: crypto.randomBytes(16).toString('hex'), + id: randomDataGenerator.natural({ min: 1 }), + name: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), parent: null, - parentId: randomInt(1, 100000), - parent_id: randomInt(1, 100000), - plainName: wordlist[randomInt(wordlist.length)], - plain_name: wordlist[randomInt(wordlist.length)], + parentId: randomDataGenerator.natural({ min: 1 }), + parent_id: randomDataGenerator.natural({ min: 1 }), + plainName: randomDataGenerator.word(), + plain_name: randomDataGenerator.word(), removed: false, removedAt: null, removed_at: null, - size: randomInt(1, 10000), + size: 0, type: 'folder', - updatedAt: new Date().toString(), - updated_at: new Date().toString(), + updatedAt: new Date(randomDataGenerator.date({ min: createdAt })).toString(), + updated_at: new Date(randomDataGenerator.date({ min: createdAt })).toString(), user: null, - userId: randomInt(1, 100000), - user_id: randomInt(1, 100000), - uuid: randomUUID(), - parentUuid: randomUUID(), - parent_uuid: randomUUID(), - creation_time: new Date().toString(), - modification_time: new Date().toString(), + userId: randomDataGenerator.natural({ min: 1 }), + user_id: randomDataGenerator.natural({ min: 1 }), + uuid: randomDataGenerator.guid({ version: 4 }), + parentUuid: randomDataGenerator.guid({ version: 4 }), + parent_uuid: randomDataGenerator.guid({ version: 4 }), + creation_time: new Date(randomDataGenerator.date({ min: createdAt })).toString(), + modification_time: new Date(randomDataGenerator.date({ min: createdAt })).toString(), }; return { ...folder, ...attributes }; }; export const newFileMeta = (attributes?: Partial): FileMeta => { + const createdAt = randomDataGenerator.date(); const file: FileMeta = { - bucket: crypto.randomBytes(16).toString('hex'), - createdAt: new Date().toString(), - created_at: new Date().toString(), + bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + createdAt: createdAt.toString(), + created_at: createdAt.toString(), deleted: false, deletedAt: null, encrypt_version: EncryptionVersion.Aes03, - fileId: crypto.randomBytes(16).toString('hex'), - folderId: randomInt(1, 100000), - folder_id: randomInt(1, 100000), - id: randomInt(1, 100000), - name: crypto.randomBytes(16).toString('hex'), - plain_name: wordlist[randomInt(wordlist.length)], - plainName: wordlist[randomInt(wordlist.length)], - size: randomInt(1, 10000), - type: fileTypes[randomInt(fileTypes.length)], - updatedAt: new Date().toString(), + fileId: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + folderId: randomDataGenerator.natural({ min: 1 }), + folder_id: randomDataGenerator.natural({ min: 1 }), + id: randomDataGenerator.natural({ min: 1 }), + name: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + plain_name: randomDataGenerator.word(), + plainName: randomDataGenerator.word(), + size: randomDataGenerator.natural({ min: 1 }), + type: randomDataGenerator.pickone(FileTypesFixture), + updatedAt: new Date(randomDataGenerator.date({ min: createdAt })).toString(), status: FileStatus.EXISTS, thumbnails: [], currentThumbnail: null, - uuid: crypto.randomBytes(16).toString('hex'), - folderUuid: crypto.randomBytes(16).toString('hex'), + uuid: randomDataGenerator.guid({ version: 4 }), + folderUuid: randomDataGenerator.guid({ version: 4 }), }; return { ...file, ...attributes }; }; export const newPaginatedFolder = (attributes?: Partial): FetchPaginatedFolder => { + const createdAt = randomDataGenerator.date(); const folder: FetchPaginatedFolder = { - bucket: crypto.randomBytes(16).toString('hex'), - createdAt: new Date(), + bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + createdAt: createdAt, deleted: false, deletedAt: null, encryptVersion: EncryptionVersion.Aes03, - id: randomInt(1, 100000), - name: crypto.randomBytes(16).toString('hex'), + id: randomDataGenerator.natural({ min: 1 }), + name: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), parent: null, - parentId: randomInt(1, 100000), - plainName: wordlist[randomInt(wordlist.length)], + parentId: randomDataGenerator.natural({ min: 1 }), + plainName: randomDataGenerator.word(), removed: false, removedAt: null, - updatedAt: new Date(), + updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), user: null, - userId: randomInt(1, 100000), - uuid: randomUUID(), - parentUuid: randomUUID(), + userId: randomDataGenerator.natural({ min: 1 }), + uuid: randomDataGenerator.guid({ version: 4 }), + parentUuid: randomDataGenerator.guid({ version: 4 }), }; return { ...folder, ...attributes }; }; export const newPaginatedFile = (attributes?: Partial): FetchPaginatedFile => { + const createdAt = randomDataGenerator.date(); const file: FetchPaginatedFile = { - bucket: crypto.randomBytes(16).toString('hex'), - createdAt: new Date(), + bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + createdAt: createdAt, deleted: false, deletedAt: null, encryptVersion: EncryptionVersion.Aes03, - fileId: crypto.randomBytes(16).toString('hex'), - folderId: randomInt(1, 100000), - id: randomInt(1, 100000), - name: crypto.randomBytes(16).toString('hex'), - plainName: wordlist[randomInt(wordlist.length)], - size: BigInt(randomInt(1, 10000)), - type: fileTypes[randomInt(fileTypes.length)], - updatedAt: new Date(), + fileId: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + folderId: randomDataGenerator.natural({ min: 1 }), + id: randomDataGenerator.natural({ min: 1 }), + name: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + plainName: randomDataGenerator.word(), + size: BigInt(randomDataGenerator.natural({ min: 1 })), + type: randomDataGenerator.pickone(FileTypesFixture), + updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), status: FileStatus.EXISTS, thumbnails: [], - uuid: randomUUID(), - folderUuid: randomUUID(), + uuid: randomDataGenerator.guid({ version: 4 }), + folderUuid: randomDataGenerator.guid({ version: 4 }), removed: false, removedAt: null, - userId: randomInt(1, 100000), - modificationTime: new Date(), + userId: randomDataGenerator.natural({ min: 1 }), + modificationTime: new Date(randomDataGenerator.date({ min: createdAt })), }; return { ...file, ...attributes }; }; export const newDriveFolder = (attributes?: Partial): DriveFolder => { + const createdAt = randomDataGenerator.date(); + const name = randomDataGenerator.word(); const folder: DriveFolderAttributes = { - id: randomInt(1, 100000), - name: crypto.randomBytes(16).toString('hex'), - uuid: crypto.randomBytes(16).toString('hex'), - relativePath: crypto.randomBytes(16).toString('hex'), - parentId: randomInt(1, 100000), - parentUuid: crypto.randomBytes(16).toString('hex'), - createdAt: new Date(), - updatedAt: new Date(), + id: randomDataGenerator.natural({ min: 1 }), + name: name, + uuid: randomDataGenerator.guid({ version: 4 }), + relativePath: `/${name}`, + parentId: randomDataGenerator.natural({ min: 1 }), + parentUuid: randomDataGenerator.guid({ version: 4 }), + createdAt: createdAt, + updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), status: FileStatus.EXISTS, }; return new DriveFolder({ ...folder, ...attributes }); }; export const newDriveFile = (attributes?: Partial): DriveFile => { + const createdAt = randomDataGenerator.date(); + const name = randomDataGenerator.word(); + const type = randomDataGenerator.pickone(FileTypesFixture); const file: DriveFileAttributes = { - id: randomInt(1, 100000), - name: crypto.randomBytes(16).toString('hex'), - type: fileTypes[randomInt(fileTypes.length)], - uuid: crypto.randomBytes(16).toString('hex'), - fileId: crypto.randomBytes(16).toString('hex'), - folderId: randomInt(1, 100000), - folderUuid: crypto.randomBytes(16).toString('hex'), - bucket: crypto.randomBytes(16).toString('hex'), - relativePath: crypto.randomBytes(16).toString('hex'), - createdAt: new Date(), - updatedAt: new Date(), - size: randomInt(1, 10000), + id: randomDataGenerator.natural({ min: 1 }), + name: name, + type: type, + uuid: randomDataGenerator.guid({ version: 4 }), + fileId: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + folderId: randomDataGenerator.natural({ min: 1 }), + folderUuid: randomDataGenerator.guid({ version: 4 }), + bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + relativePath: `/${name}.${type}`, + createdAt: createdAt, + updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), + size: randomDataGenerator.natural({ min: 1 }), status: FileStatus.EXISTS, }; return new DriveFile({ ...file, ...attributes }); @@ -213,24 +255,25 @@ export const generateSubcontent = (uuid: string, countFolders: number, countFile }; export const newCreateFolderResponse = (attributes?: Partial): CreateFolderResponse => { + const createdAt = randomDataGenerator.date(); const folder: CreateFolderResponse = { - id: randomInt(1, 100000), - parentId: randomInt(1, 100000), - parentUuid: randomUUID(), - name: crypto.randomBytes(16).toString('hex'), - bucket: crypto.randomBytes(16).toString('hex'), - userId: randomInt(1, 100000), + id: randomDataGenerator.natural({ min: 1 }), + parentId: randomDataGenerator.natural({ min: 1 }), + parentUuid: randomDataGenerator.guid({ version: 4 }), + name: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + userId: randomDataGenerator.natural({ min: 1 }), encryptVersion: EncryptionVersion.Aes03, deleted: false, deletedAt: null, - createdAt: new Date(), - updatedAt: new Date(), - uuid: randomUUID(), - plainName: wordlist[randomInt(wordlist.length)], + createdAt: createdAt, + updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), + uuid: randomDataGenerator.guid({ version: 4 }), + plainName: randomDataGenerator.word(), removed: false, removedAt: null, - creationTime: new Date(), - modificationTime: new Date(), + creationTime: new Date(randomDataGenerator.date({ min: createdAt })), + modificationTime: new Date(randomDataGenerator.date({ min: createdAt })), }; return { ...folder, ...attributes }; }; diff --git a/test/fixtures/login.fixture.ts b/test/fixtures/login.fixture.ts index 2b942ba4..ec2cd755 100644 --- a/test/fixtures/login.fixture.ts +++ b/test/fixtures/login.fixture.ts @@ -1,23 +1,26 @@ -import { randomBytes, randomInt } from 'node:crypto'; import { UserFixture } from './auth.fixture'; import { LoginCredentials } from '../../src/types/command.types'; import { SdkManagerApiSecurity } from '../../src/services/sdk-manager.service'; +import Chance from 'chance'; +import { LoginDetails } from '@internxt/sdk'; -export const UserLoginFixture = { - email: `${randomBytes(8).toString('hex')}@${randomBytes(8).toString('hex')}.com`, - password: randomBytes(16).toString('hex'), - twoFactor: randomInt(0, 999999).toString().padStart(6, '0'), +const randomDataGenerator = new Chance(); + +export const UserLoginFixture: LoginDetails = { + email: UserFixture.email, + password: randomDataGenerator.string({ length: 32 }), + tfaCode: randomDataGenerator.natural({ min: 0, max: 999999 }).toString().padStart(6, '0'), }; export const ApiSecurityFixture: SdkManagerApiSecurity = { - newToken: randomBytes(16).toString('hex'), - token: randomBytes(16).toString('hex'), + newToken: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + token: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), }; export const UserCredentialsFixture: LoginCredentials = { - user: { ...UserFixture, email: UserLoginFixture.email }, + user: UserFixture, token: ApiSecurityFixture.token, newToken: ApiSecurityFixture.newToken, - lastLoggedInAt: randomBytes(16).toString('hex'), - lastTokenRefreshAt: randomBytes(16).toString('hex'), + lastLoggedInAt: randomDataGenerator.date().toISOString(), + lastTokenRefreshAt: randomDataGenerator.date().toISOString(), }; diff --git a/test/fixtures/webdav.fixture.ts b/test/fixtures/webdav.fixture.ts index f9301b4a..91e9500d 100644 --- a/test/fixtures/webdav.fixture.ts +++ b/test/fixtures/webdav.fixture.ts @@ -5,11 +5,10 @@ import { WebDavRequestedResource } from '../../src/types/webdav.types'; import path from 'node:path'; export const createWebDavRequestFixture = (request: T): T & Request => { - const userSettings = UserSettingsFixture; return getMockReq({ // @ts-expect-error - User is not defined in the Request type from the sinon-express-mock package user: request.user ?? { - rootFolderId: userSettings.root_folder_id, + rootFolderId: UserSettingsFixture.root_folder_id, }, ...request, }); diff --git a/test/services/auth.service.test.ts b/test/services/auth.service.test.ts index 3173bc5b..778945c6 100644 --- a/test/services/auth.service.test.ts +++ b/test/services/auth.service.test.ts @@ -1,6 +1,6 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; import crypto from 'node:crypto'; -import { Auth, LoginDetails, SecurityDetails } from '@internxt/sdk'; +import { Auth, SecurityDetails } from '@internxt/sdk'; import { AuthService } from '../../src/services/auth.service'; import { KeysService } from '../../src/services/keys.service'; import { CryptoService } from '../../src/services/crypto.service'; @@ -14,7 +14,7 @@ import { LoginCredentials, MissingCredentialsError, } from '../../src/types/command.types'; -import { UserCredentialsFixture } from '../fixtures/login.fixture'; +import { UserCredentialsFixture, UserLoginFixture } from '../fixtures/login.fixture'; import { fail } from 'node:assert'; describe('Auth service', () => { @@ -24,9 +24,7 @@ describe('Auth service', () => { it('When user logs in, then login user credentials are generated', async () => { const loginResponse = { - token: crypto.randomBytes(16).toString('hex'), - newToken: crypto.randomBytes(16).toString('hex'), - user: UserFixture, + ...UserCredentialsFixture, userTeam: null, }; const mockDate = new Date().toISOString(); @@ -39,11 +37,7 @@ describe('Auth service', () => { vi.spyOn(CryptoService.instance, 'decryptTextWithKey').mockReturnValue(loginResponse.user.mnemonic); vi.spyOn(Date.prototype, 'toISOString').mockReturnValue(mockDate); - const responseLogin = await AuthService.instance.doLogin( - loginResponse.user.email, - crypto.randomBytes(16).toString('hex'), - '', - ); + const responseLogin = await AuthService.instance.doLogin(UserLoginFixture.email, UserLoginFixture.password, ''); const expectedResponseLogin: LoginCredentials = { user: { ...loginResponse.user, privateKey: Buffer.from(loginResponse.user.privateKey).toString('base64') }, @@ -56,17 +50,11 @@ describe('Auth service', () => { }); it('When user logs in and credentials are not correct, then an error is thrown', async () => { - const loginDetails: LoginDetails = { - email: crypto.randomBytes(16).toString('hex'), - password: crypto.randomBytes(8).toString('hex'), - tfaCode: crypto.randomInt(1, 999999).toString().padStart(6, '0'), - }; - const loginStub = vi.spyOn(Auth.prototype, 'login').mockRejectedValue(new Error('Login failed')); vi.spyOn(SdkManager.instance, 'getAuth').mockReturnValue(Auth.prototype); try { - await AuthService.instance.doLogin(loginDetails.email, loginDetails.password, loginDetails.tfaCode || ''); + await AuthService.instance.doLogin(UserLoginFixture.email, UserLoginFixture.password, UserLoginFixture.tfaCode); fail('Expected function to throw an error, but it did not.'); } catch { /* no op */ @@ -75,7 +63,7 @@ describe('Auth service', () => { }); it('When two factor authentication is enabled, then it is returned from is2FANeeded functionality', async () => { - const email = crypto.randomBytes(16).toString('hex'); + const email = UserLoginFixture.email; const securityDetails: SecurityDetails = { encryptedSalt: crypto.randomBytes(16).toString('hex'), tfaEnabled: true, @@ -90,7 +78,7 @@ describe('Auth service', () => { }); it('When email is not correct when checking two factor authentication, then an error is thrown', async () => { - const email = crypto.randomBytes(16).toString('hex'); + const email = UserLoginFixture.email; const securityStub = vi.spyOn(Auth.prototype, 'securityDetails').mockRejectedValue(new Error()); vi.spyOn(SdkManager.instance, 'getAuth').mockReturnValue(Auth.prototype); diff --git a/test/services/drive/drive-folder.service.test.ts b/test/services/drive/drive-folder.service.test.ts index 697af896..43a5f000 100644 --- a/test/services/drive/drive-folder.service.test.ts +++ b/test/services/drive/drive-folder.service.test.ts @@ -1,5 +1,4 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; -import { randomUUID } from 'node:crypto'; import { Storage } from '@internxt/sdk/dist/drive'; import { DriveFolderService } from '../../../src/services/drive/drive-folder.service'; import { SdkManager } from '../../../src/services/sdk-manager.service'; @@ -41,8 +40,8 @@ describe('Drive folder Service', () => { }); it('When folder content is requested, then all its subfolders and subfiles are returned', async () => { - const parentUuid = randomUUID(); - const subContentFixture = generateSubcontent(parentUuid, 112, 117); //112 subfolders and 117 subfiles + const parentFolder = newFolderMeta(); + const subContentFixture = generateSubcontent(parentFolder.uuid, 112, 117); //112 subfolders and 117 subfiles const requestCancelerMock = { cancel: () => {} }; vi.spyOn(Storage.prototype, 'getFolderFoldersByUuid').mockImplementation((_: string, offset) => { @@ -73,7 +72,7 @@ describe('Drive folder Service', () => { }); vi.spyOn(SdkManager.instance, 'getStorage').mockReturnValue(Storage.prototype); - const resultContent = await sut.getFolderContent(parentUuid); + const resultContent = await sut.getFolderContent(parentFolder.uuid); expect(subContentFixture).to.deep.equal(resultContent); }); diff --git a/test/services/network/download.service.test.ts b/test/services/network/download.service.test.ts index a577764c..240bd9ae 100644 --- a/test/services/network/download.service.test.ts +++ b/test/services/network/download.service.test.ts @@ -2,16 +2,18 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; import { DownloadService } from '../../../src/services/network/download.service'; import { Readable } from 'node:stream'; import axios from 'axios'; +import Chance from 'chance'; describe('Download Service', () => { const sut = DownloadService.instance; + const randomDataGenerator = new Chance(); beforeEach(() => { vi.restoreAllMocks(); }); it('When a file is downloaded, should return a ReadableStream', async () => { - const fileContent = Buffer.from('file-content'); + const fileContent = Buffer.from(randomDataGenerator.string({ length: 64 })); const readableContent = new Readable({ read() { this.push(fileContent); @@ -20,7 +22,7 @@ describe('Download Service', () => { }); vi.spyOn(axios, 'get').mockResolvedValue({ data: readableContent }); - const readable = await sut.downloadFile('https://example.com/file', {}); + const readable = await sut.downloadFile('https://example.com/file', fileContent.length, {}); const reader = readable.getReader(); @@ -30,7 +32,7 @@ describe('Download Service', () => { }); it('When a file is downloaded, progress should be reported', async () => { - const fileContent = Buffer.from('file-content'); + const fileContent = Buffer.from(randomDataGenerator.string({ length: 64 })); const options = { progressCallback: vi.fn(), }; @@ -42,12 +44,17 @@ describe('Download Service', () => { }); vi.spyOn(axios, 'get').mockImplementation((_, config) => { - config?.onDownloadProgress?.({ loaded: 100, total: 100, bytes: 100, lengthComputable: true }); + config?.onDownloadProgress?.({ + loaded: fileContent.length, + total: fileContent.length, + bytes: fileContent.length, + lengthComputable: true, + }); return Promise.resolve({ data: readableContent }); }); - await sut.downloadFile('https://example.com/file', options); + await sut.downloadFile('https://example.com/file', fileContent.length, options); - expect(options.progressCallback).toHaveBeenCalledWith(1); + expect(options.progressCallback).toHaveBeenCalledWith(100); }); }); diff --git a/test/services/network/network-facade.service.test.ts b/test/services/network/network-facade.service.test.ts index 7998bbc6..23f9dd74 100644 --- a/test/services/network/network-facade.service.test.ts +++ b/test/services/network/network-facade.service.test.ts @@ -4,12 +4,14 @@ import { NetworkFacade } from '../../../src/services/network/network-facade.serv import { SdkManager } from '../../../src/services/sdk-manager.service'; import path from 'node:path'; import { createReadStream } from 'node:fs'; +import fs from 'node:fs/promises'; import { UploadService } from '../../../src/services/network/upload.service'; import { CryptoService } from '../../../src/services/crypto.service'; import { DownloadService } from '../../../src/services/network/download.service'; import { Readable } from 'node:stream'; import axios from 'axios'; import { fail } from 'node:assert'; +import { UserFixture } from '../../fixtures/auth.fixture'; describe('Network Facade Service', () => { beforeEach(() => { @@ -31,6 +33,7 @@ describe('Network Facade Service', () => { CryptoService.instance, ); const file = path.join(process.cwd(), 'test/fixtures/test-content.fixture.txt'); + const fileStat = await fs.stat(file); const readStream = createReadStream(file); const options = { progressCallback: vi.fn(), @@ -38,9 +41,9 @@ describe('Network Facade Service', () => { }; const result = await sut.uploadFromStream( - 'f1858bc9675f9e4f7ab29429', - 'animal fog wink trade december thumb sight cousin crunch plunge captain enforce letter creek text', - 100, + UserFixture.bucket, + UserFixture.mnemonic, + fileStat.size, readStream, options, ); @@ -57,6 +60,7 @@ describe('Network Facade Service', () => { CryptoService.instance, ); const file = path.join(process.cwd(), 'test/fixtures/test-content.fixture.txt'); + const fileStat = await fs.stat(file); const readStream = createReadStream(file); const options = { progressCallback: vi.fn(), @@ -65,9 +69,9 @@ describe('Network Facade Service', () => { vi.spyOn(NetworkUpload, 'uploadFile').mockResolvedValue('uploaded_file_id'); const [executeUpload] = await sut.uploadFromStream( - 'f1858bc9675f9e4f7ab29429', - 'animal fog wink trade december thumb sight cousin crunch plunge captain enforce letter creek text', - 100, + UserFixture.bucket, + UserFixture.mnemonic, + fileStat.size, readStream, options, ); @@ -92,7 +96,7 @@ describe('Network Facade Service', () => { index: '29f07b8914d8353b663ab783f4bbe9950fdde680a69524405790cecca9c549f9', bucket: bucket, created: new Date(), - size: 100, + size: encryptedContent.length, shards: [ { url: 'https://doesnotexists.com/file', @@ -120,6 +124,7 @@ describe('Network Facade Service', () => { // eslint-disable-next-line max-len 'index course habit soon assist dragon tragic helmet salute stuff later twice consider grit pulse cement obvious trick sponsor stereo hello win royal more', 'f1858bc9675f9e4f7ab29429', + encryptedContent.length, writable, ); @@ -131,7 +136,7 @@ describe('Network Facade Service', () => { it('When a file download is aborted, should abort the download', async () => { const encryptedContent = Buffer.from('b6ccfa381c150f3a4b65245bffa4d84087', 'hex'); - const bucket = 'cd8abd7e8b13081660b58dbe'; + const readableContent = new ReadableStream({ pull(controller) { controller.enqueue(encryptedContent); @@ -142,9 +147,9 @@ describe('Network Facade Service', () => { const networkMock = getNetworkMock(); vi.spyOn(networkMock, 'getDownloadLinks').mockResolvedValue({ index: '29f07b8914d8353b663ab783f4bbe9950fdde680a69524405790cecca9c549f9', - bucket: bucket, + bucket: UserFixture.bucket, created: new Date(), - size: 100, + size: encryptedContent.length, shards: [ { url: 'https://doesnotexists.com/file', @@ -162,10 +167,11 @@ describe('Network Facade Service', () => { const writable = new WritableStream(); const [executeDownload, abort] = await sut.downloadToStream( - bucket, + UserFixture.bucket, // eslint-disable-next-line max-len 'index course habit soon assist dragon tragic helmet salute stuff later twice consider grit pulse cement obvious trick sponsor stereo hello win royal more', 'f1858bc9675f9e4f7ab29429', + encryptedContent.length, writable, ); @@ -194,7 +200,7 @@ describe('Network Facade Service', () => { index: '29f07b8914d8353b663ab783f4bbe9950fdde680a69524405790cecca9c549f9', bucket: bucket, created: new Date(), - size: 100, + size: encryptedContent.length, shards: [ { url: 'https://doesnotexists.com/file', @@ -214,7 +220,12 @@ describe('Network Facade Service', () => { const options = { progressCallback: vi.fn() }; vi.spyOn(axios, 'get').mockImplementation((_, config) => { - config?.onDownloadProgress?.({ loaded: 100, total: 100, bytes: 100, lengthComputable: true }); + config?.onDownloadProgress?.({ + loaded: encryptedContent.length, + total: encryptedContent.length, + bytes: encryptedContent.length, + lengthComputable: true, + }); return Promise.resolve({ data: readableContent }); }); @@ -223,6 +234,7 @@ describe('Network Facade Service', () => { // eslint-disable-next-line max-len 'index course habit soon assist dragon tragic helmet salute stuff later twice consider grit pulse cement obvious trick sponsor stereo hello win royal more', 'f1858bc9675f9e4f7ab29429', + encryptedContent.length, writable, undefined, options, @@ -230,6 +242,6 @@ describe('Network Facade Service', () => { await executeDownload; - expect(options.progressCallback).toHaveBeenCalledWith(1); + expect(options.progressCallback).toHaveBeenCalledWith(100); }); }); diff --git a/test/services/network/upload.service.test.ts b/test/services/network/upload.service.test.ts index 3eac29bb..ed4fd057 100644 --- a/test/services/network/upload.service.test.ts +++ b/test/services/network/upload.service.test.ts @@ -1,10 +1,12 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; import { UploadService } from '../../../src/services/network/upload.service'; import nock from 'nock'; +import Chance from 'chance'; import { Readable } from 'node:stream'; describe('Upload Service', () => { const sut = UploadService.instance; + const randomDataGenerator = new Chance(); beforeEach(() => { vi.restoreAllMocks(); @@ -12,9 +14,10 @@ describe('Upload Service', () => { it('When a file is uploaded and etag is missing, should throw an error', async () => { const url = 'https://example.com/upload'; + const fileContent = Buffer.from(randomDataGenerator.string({ length: 64 })); const data = new Readable({ read() { - this.push('test content'); + this.push(fileContent); this.push(null); }, }); @@ -26,7 +29,7 @@ describe('Upload Service', () => { nock('https://example.com').put('/upload').reply(200, '', {}); try { - await sut.uploadFile(url, data, options); + await sut.uploadFile(url, fileContent.length, data, options); } catch (error) { expect((error as Error).message).to.contain('Missing Etag'); } @@ -34,9 +37,10 @@ describe('Upload Service', () => { it('When a file is uploaded and etag is returned, the etag should be returned', async () => { const url = 'https://example.com/upload'; + const fileContent = Buffer.from(randomDataGenerator.string({ length: 64 })); const data = new Readable({ read() { - this.push('test content'); + this.push(fileContent); this.push(null); }, }); @@ -49,15 +53,16 @@ describe('Upload Service', () => { etag: 'test-etag', }); - const result = await sut.uploadFile(url, data, options); + const result = await sut.uploadFile(url, fileContent.length, data, options); expect(result.etag).to.be.equal('test-etag'); }); it('When a file is uploaded, should update the progress', async () => { const url = 'https://example.com/upload'; + const fileContent = Buffer.from(randomDataGenerator.string({ length: 64 })); const data = new Readable({ read() { - this.push('test content'); + this.push(fileContent); this.push(null); }, }); @@ -70,8 +75,8 @@ describe('Upload Service', () => { etag: 'test-etag', }); - await sut.uploadFile(url, data, options); - expect(options.progressCallback).toHaveBeenCalledWith(1); + await sut.uploadFile(url, fileContent.length, data, options); + expect(options.progressCallback).toHaveBeenCalledWith(100); }); it('When a file is uploaded and the upload is aborted, should cancel the request', async () => { diff --git a/test/webdav/handlers/GET.handler.test.ts b/test/webdav/handlers/GET.handler.test.ts index 2be73414..145c928c 100644 --- a/test/webdav/handlers/GET.handler.test.ts +++ b/test/webdav/handlers/GET.handler.test.ts @@ -134,6 +134,7 @@ describe('GET request handler', () => { mockFile.bucket, mockAuthDetails.user.mnemonic, mockFile.fileId, + mockFile.size, expect.any(Object), undefined, ); @@ -205,6 +206,7 @@ describe('GET request handler', () => { mockFile.bucket, mockAuthDetails.user.mnemonic, mockFile.fileId, + mockSize - rangeStart, expect.any(Object), expectedRangeOptions, ); diff --git a/yarn.lock b/yarn.lock index a2da60fb..a238c5c2 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2834,6 +2834,11 @@ "@types/connect" "*" "@types/node" "*" +"@types/chance@1.1.6": + version "1.1.6" + resolved "https://registry.yarnpkg.com/@types/chance/-/chance-1.1.6.tgz#2fe3de58742629602c3fbab468093b27207f04ad" + integrity sha512-V+pm3stv1Mvz8fSKJJod6CglNGVqEQ6OyuqitoDkWywEODM/eJd1eSuIp9xt6DrX8BWZ2eDSIzbw1tPCUTvGbQ== + "@types/cli-progress@3.11.6": version "3.11.6" resolved "https://registry.yarnpkg.com/@types/cli-progress/-/cli-progress-3.11.6.tgz#94b334ebe4190f710e51c1bf9b4fedb681fa9e45" @@ -3716,6 +3721,11 @@ chalk@~5.3.0: resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385" integrity sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w== +chance@1.1.12: + version "1.1.12" + resolved "https://registry.yarnpkg.com/chance/-/chance-1.1.12.tgz#6a263cf241674af50a1b903357f9d328a6f252fb" + integrity sha512-vVBIGQVnwtUG+SYe0ge+3MvF78cvSpuCOEUJr7sVEk2vSBuMW6OXNJjSzdtzrlxNUEaoqH2GBd5Y/+18BEB01Q== + change-case@^4: version "4.1.2" resolved "https://registry.yarnpkg.com/change-case/-/change-case-4.1.2.tgz#fedfc5f136045e2398c0410ee441f95704641e12" From 793b72bee7e166ff392df11285fe120ef5537f11 Mon Sep 17 00:00:00 2001 From: larry-internxt Date: Tue, 7 Jan 2025 13:30:18 +0100 Subject: [PATCH 4/9] added app version to log --- src/webdav/webdav-server.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/webdav/webdav-server.ts b/src/webdav/webdav-server.ts index af8e1714..9ce2ebb7 100644 --- a/src/webdav/webdav-server.ts +++ b/src/webdav/webdav-server.ts @@ -170,8 +170,8 @@ export class WebDavServer { server.requestTimeout = 15 * 60 * 1000; server.listen(configs.port, () => { - webdavLogger.info( - `Internxt WebDav server listening at ${configs.protocol}://${ConfigService.WEBDAV_LOCAL_URL}:${configs.port}`, + webdavLogger.info(`Internxt ${SdkManager.getAppDetails().clientVersion} WebDav server ` + + `listening at ${configs.protocol}://${ConfigService.WEBDAV_LOCAL_URL}:${configs.port}`, ); }); }; From cf660c7d37731d8d480cb2b9e269fd4af9f1b338 Mon Sep 17 00:00:00 2001 From: larry-internxt Date: Mon, 13 Jan 2025 16:30:24 +0100 Subject: [PATCH 5/9] Revert "added chance as testing random examples provider" This reverts commit 8c36d5e56c00186cbae21bb1fcf35eecd9585364. --- package.json | 2 - test/commands/login.test.ts | 4 +- test/fixtures/auth.fixture.ts | 77 ++--- test/fixtures/common.fixture.ts | 13 + test/fixtures/drive-database.fixture.ts | 51 ++-- test/fixtures/drive.fixture.ts | 273 ++++++++---------- test/fixtures/login.fixture.ts | 23 +- test/fixtures/webdav.fixture.ts | 3 +- test/services/auth.service.test.ts | 26 +- .../drive/drive-folder.service.test.ts | 7 +- .../services/network/download.service.test.ts | 19 +- .../network/network-facade.service.test.ts | 40 +-- test/services/network/upload.service.test.ts | 19 +- test/webdav/handlers/GET.handler.test.ts | 2 - yarn.lock | 10 - 15 files changed, 238 insertions(+), 331 deletions(-) create mode 100644 test/fixtures/common.fixture.ts diff --git a/package.json b/package.json index b8dee266..0adee561 100644 --- a/package.json +++ b/package.json @@ -70,7 +70,6 @@ "@internxt/prettier-config": "internxt/prettier-config#v1.0.2", "@oclif/test": "4.1.4", "@openpgp/web-stream-tools": "0.0.11-patch-0", - "@types/chance": "1.1.6", "@types/cli-progress": "3.11.6", "@types/express": "5.0.0", "@types/mime-types": "2.1.4", @@ -78,7 +77,6 @@ "@types/range-parser": "1.2.7", "@vitest/coverage-istanbul": "2.1.8", "@vitest/spy": "2.1.8", - "chance": "1.1.12", "eslint": "9.17.0", "husky": "9.1.7", "lint-staged": "15.2.11", diff --git a/test/commands/login.test.ts b/test/commands/login.test.ts index 07ec7fd9..4450bd67 100644 --- a/test/commands/login.test.ts +++ b/test/commands/login.test.ts @@ -135,7 +135,7 @@ describe('Login Command', () => { .spyOn(CLIUtils, 'getValueFromFlag') .mockResolvedValueOnce(UserLoginFixture.email) // email .mockResolvedValueOnce(UserLoginFixture.password) // password - .mockResolvedValueOnce(UserLoginFixture.tfaCode!) // two factor code + .mockResolvedValueOnce(UserLoginFixture.twoFactor) // two factor code .mockRejectedValue(new Error()); // default const is2FaNeededSpy = vi.spyOn(AuthService.instance, 'is2FANeeded').mockResolvedValue(true); const doLoginSpy = vi.spyOn(AuthService.instance, 'doLogin').mockResolvedValue(UserCredentialsFixture); @@ -149,7 +149,7 @@ describe('Login Command', () => { const result = await Login.run([ `--email="${UserLoginFixture.email}"`, `--password="${UserLoginFixture.password}"`, - `--twofactor="${UserLoginFixture.tfaCode}"`, + `--twofactor="${UserLoginFixture.twoFactor}"`, ]); expect(result).to.be.deep.equal(expected); diff --git a/test/fixtures/auth.fixture.ts b/test/fixtures/auth.fixture.ts index 9b4c6484..2383fe1e 100644 --- a/test/fixtures/auth.fixture.ts +++ b/test/fixtures/auth.fixture.ts @@ -1,68 +1,41 @@ import { UserSettings } from '@internxt/sdk/dist/shared/types/userSettings'; -import Chance from 'chance'; -import { generateMnemonic } from 'bip39'; - -const randomDataGenerator = new Chance(); +import crypto from 'node:crypto'; export const UserFixture: UserSettings = { - userId: randomDataGenerator.natural({ min: 1 }).toString(), - uuid: randomDataGenerator.guid({ version: 4 }), - email: randomDataGenerator.email(), - name: randomDataGenerator.name(), - lastname: randomDataGenerator.name(), - username: randomDataGenerator.word(), - bridgeUser: randomDataGenerator.email(), - bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - backupsBucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - root_folder_id: randomDataGenerator.natural({ min: 1 }), - rootFolderId: randomDataGenerator.guid({ version: 4 }), - rootFolderUuid: randomDataGenerator.guid({ version: 4 }), + userId: crypto.randomBytes(16).toString('hex'), + uuid: crypto.randomBytes(16).toString('hex'), + email: crypto.randomBytes(16).toString('hex'), + name: crypto.randomBytes(16).toString('hex'), + lastname: crypto.randomBytes(16).toString('hex'), + username: crypto.randomBytes(16).toString('hex'), + bridgeUser: crypto.randomBytes(16).toString('hex'), + bucket: crypto.randomBytes(16).toString('hex'), + backupsBucket: crypto.randomBytes(16).toString('hex'), + root_folder_id: crypto.randomInt(1, 9999), + rootFolderId: crypto.randomBytes(16).toString('hex'), + rootFolderUuid: crypto.randomBytes(16).toString('hex'), sharedWorkspace: false, - credit: randomDataGenerator.natural({ min: 1, max: 9999 }), - mnemonic: generateMnemonic(), - privateKey: randomDataGenerator.string({ - length: randomDataGenerator.integer({ min: 500, max: 1000 }), - pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', - }), - publicKey: randomDataGenerator.string({ - length: randomDataGenerator.integer({ min: 500, max: 1000 }), - pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', - }), - revocationKey: randomDataGenerator.string({ - length: randomDataGenerator.integer({ min: 500, max: 1000 }), - pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', - }), + credit: crypto.randomInt(1, 9999), + mnemonic: crypto.randomBytes(16).toString('hex'), + privateKey: crypto.randomBytes(16).toString('hex'), + publicKey: crypto.randomBytes(16).toString('hex'), + revocationKey: crypto.randomBytes(16).toString('hex'), teams: false, appSumoDetails: null, registerCompleted: true, hasReferralsProgram: false, - createdAt: randomDataGenerator.date(), - avatar: randomDataGenerator.url(), + createdAt: new Date(), + avatar: crypto.randomBytes(16).toString('hex'), emailVerified: true, keys: { ecc: { - privateKey: randomDataGenerator.string({ - length: randomDataGenerator.integer({ min: 500, max: 1000 }), - pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', - }), - publicKey: randomDataGenerator.string({ - length: randomDataGenerator.integer({ min: 500, max: 1000 }), - pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', - }), - revocationKey: randomDataGenerator.string({ - length: randomDataGenerator.integer({ min: 500, max: 1000 }), - pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', - }), + privateKey: crypto.randomBytes(16).toString('hex'), + publicKey: crypto.randomBytes(16).toString('hex'), + revocationKey: crypto.randomBytes(16).toString('hex'), }, kyber: { - privateKyberKey: randomDataGenerator.string({ - length: randomDataGenerator.integer({ min: 500, max: 1000 }), - pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', - }), - publicKyberKey: randomDataGenerator.string({ - length: randomDataGenerator.integer({ min: 500, max: 1000 }), - pool: 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', - }), + privateKyberKey: crypto.randomBytes(16).toString('hex'), + publicKyberKey: crypto.randomBytes(16).toString('hex'), }, }, }; diff --git a/test/fixtures/common.fixture.ts b/test/fixtures/common.fixture.ts new file mode 100644 index 00000000..0fe62d76 --- /dev/null +++ b/test/fixtures/common.fixture.ts @@ -0,0 +1,13 @@ +export const CommonFixture = { + createObjectId: () => { + const timestamp = ((new Date().getTime() / 1000) | 0).toString(16); + return ( + timestamp + + 'xxxxxxxxxxxxxxxx' + .replace(/[x]/g, function () { + return ((Math.random() * 16) | 0).toString(16); + }) + .toLowerCase() + ); + }, +}; diff --git a/test/fixtures/drive-database.fixture.ts b/test/fixtures/drive-database.fixture.ts index 18e56eeb..c251a259 100644 --- a/test/fixtures/drive-database.fixture.ts +++ b/test/fixtures/drive-database.fixture.ts @@ -4,46 +4,37 @@ import { DriveFolder } from '../../src/services/database/drive-folder/drive-fold import { DriveDatabaseManager } from '../../src/services/database/drive-database-manager.service'; import { DriveFileRepository } from '../../src/services/database/drive-file/drive-file.repository'; import { DriveFolderRepository } from '../../src/services/database/drive-folder/drive-folder.repository'; -import Chance from 'chance'; - -const randomDataGenerator = new Chance(); +import { randomInt, randomUUID } from 'node:crypto'; export const getDriveFileDatabaseFixture = (): DriveFile => { - const createdAt = randomDataGenerator.date(); - const name = randomDataGenerator.word(); - const ext = randomDataGenerator.word(); - const filePath = `/${name}.${ext}`; const object: DriveFile = new DriveFile({ - uuid: randomDataGenerator.guid({ version: 4 }), - id: randomDataGenerator.natural({ min: 1 }), - name: name, - type: ext, - relativePath: filePath, - createdAt, - updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), - fileId: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - size: randomDataGenerator.natural({ min: 1 }), - folderId: randomDataGenerator.natural({ min: 1 }), - folderUuid: randomDataGenerator.guid({ version: 4 }), + id: randomInt(2000), + name: `file_${new Date().getTime().toString()}`, + uuid: randomUUID(), + relativePath: `file_${new Date().getTime().toString()}.txt`, + createdAt: new Date(), + updatedAt: new Date(), status: 'EXISTS', + fileId: `file_id_${new Date().getTime().toString()}`, + folderId: randomInt(2000), + bucket: new Date().getTime().toString(), + size: randomInt(2000), + folderUuid: randomUUID(), + type: 'txt', }); return object; }; export const getDriveFolderDatabaseFixture = (): DriveFolder => { - const createdAt = randomDataGenerator.date(); - const name = randomDataGenerator.word(); - const folderPath = `/${name}`; const object: DriveFolder = new DriveFolder({ - id: randomDataGenerator.natural({ min: 1 }), - name: name, - uuid: randomDataGenerator.guid({ version: 4 }), - relativePath: folderPath, - createdAt: createdAt, - updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), - parentId: randomDataGenerator.natural({ min: 1 }), - parentUuid: randomDataGenerator.guid({ version: 4 }), + id: randomInt(2000), + name: `folder_${new Date().getTime().toString()}`, + uuid: randomUUID(), + relativePath: '', + createdAt: new Date(), + updatedAt: new Date(), + parentId: randomInt(2000), + parentUuid: randomUUID(), status: 'EXISTS', }); diff --git a/test/fixtures/drive.fixture.ts b/test/fixtures/drive.fixture.ts index 9292ae5e..3be53093 100644 --- a/test/fixtures/drive.fixture.ts +++ b/test/fixtures/drive.fixture.ts @@ -7,236 +7,194 @@ import { FileStatus, FolderMeta, } from '@internxt/sdk/dist/drive/storage/types'; +import { getDefaultWordlist, wordlists } from 'bip39'; +import crypto, { randomInt, randomUUID } from 'node:crypto'; import { DriveFileItem, DriveFolderItem } from '../../src/types/drive.types'; import { DriveFile } from '../../src/services/database/drive-file/drive-file.domain'; import { DriveFileAttributes } from '../../src/services/database/drive-file/drive-file.attributes'; import { DriveFolderAttributes } from '../../src/services/database/drive-folder/drive-folder.attributes'; import { DriveFolder } from '../../src/services/database/drive-folder/drive-folder.domain'; -import Chance from 'chance'; -const randomDataGenerator = new Chance(); - -export const FileTypesFixture = [ - 'png', - 'jpg', - 'jpeg', - 'gif', - 'bmp', - 'webp', - 'tiff', - 'svg', - 'docx', - 'xlsx', - 'pptx', - 'pdf', - 'mp4', - 'mkv', - 'mov', - 'avi', - 'mp3', - 'wav', - 'flac', - 'ogg', - 'txt', - 'zip', - 'rar', - 'tar', - 'gz', - 'tgz', - 'iso', - 'exe', - 'apk', - 'deb', -]; +const wordlist = wordlists[getDefaultWordlist()]; +const fileTypes = ['png', 'jpg', 'docx', 'pdf', 'mp4', 'mp3']; export const newFolderItem = (attributes?: Partial): DriveFolderItem => { - const createdAt = randomDataGenerator.date(); const folder: DriveFolderItem = { - id: randomDataGenerator.natural({ min: 1 }), - uuid: randomDataGenerator.guid({ version: 4 }), - bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - name: randomDataGenerator.word(), - encryptedName: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - createdAt: createdAt, - updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), + id: randomInt(1, 100000), + uuid: randomUUID(), + parentId: randomInt(1, 100000), + bucket: crypto.randomBytes(16).toString('hex'), + name: wordlist[randomInt(wordlist.length)], + encryptedName: crypto.randomBytes(16).toString('hex'), + createdAt: new Date(), + updatedAt: new Date(), status: 'EXISTS', - parentId: randomDataGenerator.bool({ likelihood: 50 }) ? randomDataGenerator.natural({ min: 1 }) : null, - parentUuid: randomDataGenerator.bool({ likelihood: 50 }) ? randomDataGenerator.guid({ version: 4 }) : null, + parentUuid: randomUUID(), }; return { ...folder, ...attributes }; }; export const newFileItem = (attributes?: Partial): DriveFileItem => { - const createdAt = randomDataGenerator.date(); const file: DriveFileItem = { - id: randomDataGenerator.natural({ min: 1 }), - uuid: randomDataGenerator.guid({ version: 4 }), - fileId: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - folderId: randomDataGenerator.natural({ min: 1 }), - bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - name: randomDataGenerator.word(), - encryptedName: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - createdAt: createdAt, - updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), - size: randomDataGenerator.natural({ min: 1 }), - type: randomDataGenerator.pickone(FileTypesFixture), + id: randomInt(1, 100000), + uuid: crypto.randomBytes(16).toString('hex'), + fileId: crypto.randomBytes(16).toString('hex'), + folderId: randomInt(1, 100000), + bucket: crypto.randomBytes(16).toString('hex'), + name: wordlist[randomInt(wordlist.length)], + encryptedName: crypto.randomBytes(16).toString('hex'), + createdAt: new Date(), + updatedAt: new Date(), + size: randomInt(1, 10000), + type: fileTypes[randomInt(fileTypes.length)], status: FileStatus.EXISTS, - folderUuid: randomDataGenerator.guid({ version: 4 }), + folderUuid: randomUUID(), }; return { ...file, ...attributes }; }; export const newFolderMeta = (attributes?: Partial): FolderMeta => { - const createdAt = randomDataGenerator.date(); const folder: FolderMeta = { - bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - createdAt: createdAt.toString(), - created_at: createdAt.toString(), + bucket: crypto.randomBytes(16).toString('hex'), + createdAt: new Date().toString(), + created_at: new Date().toString(), deleted: false, deletedAt: null, deleted_at: null, encryptVersion: EncryptionVersion.Aes03, encrypt_version: EncryptionVersion.Aes03, - id: randomDataGenerator.natural({ min: 1 }), - name: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + id: randomInt(1, 100000), + name: crypto.randomBytes(16).toString('hex'), parent: null, - parentId: randomDataGenerator.natural({ min: 1 }), - parent_id: randomDataGenerator.natural({ min: 1 }), - plainName: randomDataGenerator.word(), - plain_name: randomDataGenerator.word(), + parentId: randomInt(1, 100000), + parent_id: randomInt(1, 100000), + plainName: wordlist[randomInt(wordlist.length)], + plain_name: wordlist[randomInt(wordlist.length)], removed: false, removedAt: null, removed_at: null, - size: 0, + size: randomInt(1, 10000), type: 'folder', - updatedAt: new Date(randomDataGenerator.date({ min: createdAt })).toString(), - updated_at: new Date(randomDataGenerator.date({ min: createdAt })).toString(), + updatedAt: new Date().toString(), + updated_at: new Date().toString(), user: null, - userId: randomDataGenerator.natural({ min: 1 }), - user_id: randomDataGenerator.natural({ min: 1 }), - uuid: randomDataGenerator.guid({ version: 4 }), - parentUuid: randomDataGenerator.guid({ version: 4 }), - parent_uuid: randomDataGenerator.guid({ version: 4 }), - creation_time: new Date(randomDataGenerator.date({ min: createdAt })).toString(), - modification_time: new Date(randomDataGenerator.date({ min: createdAt })).toString(), + userId: randomInt(1, 100000), + user_id: randomInt(1, 100000), + uuid: randomUUID(), + parentUuid: randomUUID(), + parent_uuid: randomUUID(), + creation_time: new Date().toString(), + modification_time: new Date().toString(), }; return { ...folder, ...attributes }; }; export const newFileMeta = (attributes?: Partial): FileMeta => { - const createdAt = randomDataGenerator.date(); const file: FileMeta = { - bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - createdAt: createdAt.toString(), - created_at: createdAt.toString(), + bucket: crypto.randomBytes(16).toString('hex'), + createdAt: new Date().toString(), + created_at: new Date().toString(), deleted: false, deletedAt: null, encrypt_version: EncryptionVersion.Aes03, - fileId: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - folderId: randomDataGenerator.natural({ min: 1 }), - folder_id: randomDataGenerator.natural({ min: 1 }), - id: randomDataGenerator.natural({ min: 1 }), - name: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - plain_name: randomDataGenerator.word(), - plainName: randomDataGenerator.word(), - size: randomDataGenerator.natural({ min: 1 }), - type: randomDataGenerator.pickone(FileTypesFixture), - updatedAt: new Date(randomDataGenerator.date({ min: createdAt })).toString(), + fileId: crypto.randomBytes(16).toString('hex'), + folderId: randomInt(1, 100000), + folder_id: randomInt(1, 100000), + id: randomInt(1, 100000), + name: crypto.randomBytes(16).toString('hex'), + plain_name: wordlist[randomInt(wordlist.length)], + plainName: wordlist[randomInt(wordlist.length)], + size: randomInt(1, 10000), + type: fileTypes[randomInt(fileTypes.length)], + updatedAt: new Date().toString(), status: FileStatus.EXISTS, thumbnails: [], currentThumbnail: null, - uuid: randomDataGenerator.guid({ version: 4 }), - folderUuid: randomDataGenerator.guid({ version: 4 }), + uuid: crypto.randomBytes(16).toString('hex'), + folderUuid: crypto.randomBytes(16).toString('hex'), }; return { ...file, ...attributes }; }; export const newPaginatedFolder = (attributes?: Partial): FetchPaginatedFolder => { - const createdAt = randomDataGenerator.date(); const folder: FetchPaginatedFolder = { - bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - createdAt: createdAt, + bucket: crypto.randomBytes(16).toString('hex'), + createdAt: new Date(), deleted: false, deletedAt: null, encryptVersion: EncryptionVersion.Aes03, - id: randomDataGenerator.natural({ min: 1 }), - name: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + id: randomInt(1, 100000), + name: crypto.randomBytes(16).toString('hex'), parent: null, - parentId: randomDataGenerator.natural({ min: 1 }), - plainName: randomDataGenerator.word(), + parentId: randomInt(1, 100000), + plainName: wordlist[randomInt(wordlist.length)], removed: false, removedAt: null, - updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), + updatedAt: new Date(), user: null, - userId: randomDataGenerator.natural({ min: 1 }), - uuid: randomDataGenerator.guid({ version: 4 }), - parentUuid: randomDataGenerator.guid({ version: 4 }), + userId: randomInt(1, 100000), + uuid: randomUUID(), + parentUuid: randomUUID(), }; return { ...folder, ...attributes }; }; export const newPaginatedFile = (attributes?: Partial): FetchPaginatedFile => { - const createdAt = randomDataGenerator.date(); const file: FetchPaginatedFile = { - bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - createdAt: createdAt, + bucket: crypto.randomBytes(16).toString('hex'), + createdAt: new Date(), deleted: false, deletedAt: null, encryptVersion: EncryptionVersion.Aes03, - fileId: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - folderId: randomDataGenerator.natural({ min: 1 }), - id: randomDataGenerator.natural({ min: 1 }), - name: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - plainName: randomDataGenerator.word(), - size: BigInt(randomDataGenerator.natural({ min: 1 })), - type: randomDataGenerator.pickone(FileTypesFixture), - updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), + fileId: crypto.randomBytes(16).toString('hex'), + folderId: randomInt(1, 100000), + id: randomInt(1, 100000), + name: crypto.randomBytes(16).toString('hex'), + plainName: wordlist[randomInt(wordlist.length)], + size: BigInt(randomInt(1, 10000)), + type: fileTypes[randomInt(fileTypes.length)], + updatedAt: new Date(), status: FileStatus.EXISTS, thumbnails: [], - uuid: randomDataGenerator.guid({ version: 4 }), - folderUuid: randomDataGenerator.guid({ version: 4 }), + uuid: randomUUID(), + folderUuid: randomUUID(), removed: false, removedAt: null, - userId: randomDataGenerator.natural({ min: 1 }), - modificationTime: new Date(randomDataGenerator.date({ min: createdAt })), + userId: randomInt(1, 100000), + modificationTime: new Date(), }; return { ...file, ...attributes }; }; export const newDriveFolder = (attributes?: Partial): DriveFolder => { - const createdAt = randomDataGenerator.date(); - const name = randomDataGenerator.word(); const folder: DriveFolderAttributes = { - id: randomDataGenerator.natural({ min: 1 }), - name: name, - uuid: randomDataGenerator.guid({ version: 4 }), - relativePath: `/${name}`, - parentId: randomDataGenerator.natural({ min: 1 }), - parentUuid: randomDataGenerator.guid({ version: 4 }), - createdAt: createdAt, - updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), + id: randomInt(1, 100000), + name: crypto.randomBytes(16).toString('hex'), + uuid: crypto.randomBytes(16).toString('hex'), + relativePath: crypto.randomBytes(16).toString('hex'), + parentId: randomInt(1, 100000), + parentUuid: crypto.randomBytes(16).toString('hex'), + createdAt: new Date(), + updatedAt: new Date(), status: FileStatus.EXISTS, }; return new DriveFolder({ ...folder, ...attributes }); }; export const newDriveFile = (attributes?: Partial): DriveFile => { - const createdAt = randomDataGenerator.date(); - const name = randomDataGenerator.word(); - const type = randomDataGenerator.pickone(FileTypesFixture); const file: DriveFileAttributes = { - id: randomDataGenerator.natural({ min: 1 }), - name: name, - type: type, - uuid: randomDataGenerator.guid({ version: 4 }), - fileId: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - folderId: randomDataGenerator.natural({ min: 1 }), - folderUuid: randomDataGenerator.guid({ version: 4 }), - bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - relativePath: `/${name}.${type}`, - createdAt: createdAt, - updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), - size: randomDataGenerator.natural({ min: 1 }), + id: randomInt(1, 100000), + name: crypto.randomBytes(16).toString('hex'), + type: fileTypes[randomInt(fileTypes.length)], + uuid: crypto.randomBytes(16).toString('hex'), + fileId: crypto.randomBytes(16).toString('hex'), + folderId: randomInt(1, 100000), + folderUuid: crypto.randomBytes(16).toString('hex'), + bucket: crypto.randomBytes(16).toString('hex'), + relativePath: crypto.randomBytes(16).toString('hex'), + createdAt: new Date(), + updatedAt: new Date(), + size: randomInt(1, 10000), status: FileStatus.EXISTS, }; return new DriveFile({ ...file, ...attributes }); @@ -255,25 +213,24 @@ export const generateSubcontent = (uuid: string, countFolders: number, countFile }; export const newCreateFolderResponse = (attributes?: Partial): CreateFolderResponse => { - const createdAt = randomDataGenerator.date(); const folder: CreateFolderResponse = { - id: randomDataGenerator.natural({ min: 1 }), - parentId: randomDataGenerator.natural({ min: 1 }), - parentUuid: randomDataGenerator.guid({ version: 4 }), - name: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - bucket: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - userId: randomDataGenerator.natural({ min: 1 }), + id: randomInt(1, 100000), + parentId: randomInt(1, 100000), + parentUuid: randomUUID(), + name: crypto.randomBytes(16).toString('hex'), + bucket: crypto.randomBytes(16).toString('hex'), + userId: randomInt(1, 100000), encryptVersion: EncryptionVersion.Aes03, deleted: false, deletedAt: null, - createdAt: createdAt, - updatedAt: new Date(randomDataGenerator.date({ min: createdAt })), - uuid: randomDataGenerator.guid({ version: 4 }), - plainName: randomDataGenerator.word(), + createdAt: new Date(), + updatedAt: new Date(), + uuid: randomUUID(), + plainName: wordlist[randomInt(wordlist.length)], removed: false, removedAt: null, - creationTime: new Date(randomDataGenerator.date({ min: createdAt })), - modificationTime: new Date(randomDataGenerator.date({ min: createdAt })), + creationTime: new Date(), + modificationTime: new Date(), }; return { ...folder, ...attributes }; }; diff --git a/test/fixtures/login.fixture.ts b/test/fixtures/login.fixture.ts index ec2cd755..2b942ba4 100644 --- a/test/fixtures/login.fixture.ts +++ b/test/fixtures/login.fixture.ts @@ -1,26 +1,23 @@ +import { randomBytes, randomInt } from 'node:crypto'; import { UserFixture } from './auth.fixture'; import { LoginCredentials } from '../../src/types/command.types'; import { SdkManagerApiSecurity } from '../../src/services/sdk-manager.service'; -import Chance from 'chance'; -import { LoginDetails } from '@internxt/sdk'; -const randomDataGenerator = new Chance(); - -export const UserLoginFixture: LoginDetails = { - email: UserFixture.email, - password: randomDataGenerator.string({ length: 32 }), - tfaCode: randomDataGenerator.natural({ min: 0, max: 999999 }).toString().padStart(6, '0'), +export const UserLoginFixture = { + email: `${randomBytes(8).toString('hex')}@${randomBytes(8).toString('hex')}.com`, + password: randomBytes(16).toString('hex'), + twoFactor: randomInt(0, 999999).toString().padStart(6, '0'), }; export const ApiSecurityFixture: SdkManagerApiSecurity = { - newToken: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), - token: randomDataGenerator.string({ length: 24, pool: 'abcdef0123456789' }), + newToken: randomBytes(16).toString('hex'), + token: randomBytes(16).toString('hex'), }; export const UserCredentialsFixture: LoginCredentials = { - user: UserFixture, + user: { ...UserFixture, email: UserLoginFixture.email }, token: ApiSecurityFixture.token, newToken: ApiSecurityFixture.newToken, - lastLoggedInAt: randomDataGenerator.date().toISOString(), - lastTokenRefreshAt: randomDataGenerator.date().toISOString(), + lastLoggedInAt: randomBytes(16).toString('hex'), + lastTokenRefreshAt: randomBytes(16).toString('hex'), }; diff --git a/test/fixtures/webdav.fixture.ts b/test/fixtures/webdav.fixture.ts index 91e9500d..f9301b4a 100644 --- a/test/fixtures/webdav.fixture.ts +++ b/test/fixtures/webdav.fixture.ts @@ -5,10 +5,11 @@ import { WebDavRequestedResource } from '../../src/types/webdav.types'; import path from 'node:path'; export const createWebDavRequestFixture = (request: T): T & Request => { + const userSettings = UserSettingsFixture; return getMockReq({ // @ts-expect-error - User is not defined in the Request type from the sinon-express-mock package user: request.user ?? { - rootFolderId: UserSettingsFixture.root_folder_id, + rootFolderId: userSettings.root_folder_id, }, ...request, }); diff --git a/test/services/auth.service.test.ts b/test/services/auth.service.test.ts index 778945c6..3173bc5b 100644 --- a/test/services/auth.service.test.ts +++ b/test/services/auth.service.test.ts @@ -1,6 +1,6 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; import crypto from 'node:crypto'; -import { Auth, SecurityDetails } from '@internxt/sdk'; +import { Auth, LoginDetails, SecurityDetails } from '@internxt/sdk'; import { AuthService } from '../../src/services/auth.service'; import { KeysService } from '../../src/services/keys.service'; import { CryptoService } from '../../src/services/crypto.service'; @@ -14,7 +14,7 @@ import { LoginCredentials, MissingCredentialsError, } from '../../src/types/command.types'; -import { UserCredentialsFixture, UserLoginFixture } from '../fixtures/login.fixture'; +import { UserCredentialsFixture } from '../fixtures/login.fixture'; import { fail } from 'node:assert'; describe('Auth service', () => { @@ -24,7 +24,9 @@ describe('Auth service', () => { it('When user logs in, then login user credentials are generated', async () => { const loginResponse = { - ...UserCredentialsFixture, + token: crypto.randomBytes(16).toString('hex'), + newToken: crypto.randomBytes(16).toString('hex'), + user: UserFixture, userTeam: null, }; const mockDate = new Date().toISOString(); @@ -37,7 +39,11 @@ describe('Auth service', () => { vi.spyOn(CryptoService.instance, 'decryptTextWithKey').mockReturnValue(loginResponse.user.mnemonic); vi.spyOn(Date.prototype, 'toISOString').mockReturnValue(mockDate); - const responseLogin = await AuthService.instance.doLogin(UserLoginFixture.email, UserLoginFixture.password, ''); + const responseLogin = await AuthService.instance.doLogin( + loginResponse.user.email, + crypto.randomBytes(16).toString('hex'), + '', + ); const expectedResponseLogin: LoginCredentials = { user: { ...loginResponse.user, privateKey: Buffer.from(loginResponse.user.privateKey).toString('base64') }, @@ -50,11 +56,17 @@ describe('Auth service', () => { }); it('When user logs in and credentials are not correct, then an error is thrown', async () => { + const loginDetails: LoginDetails = { + email: crypto.randomBytes(16).toString('hex'), + password: crypto.randomBytes(8).toString('hex'), + tfaCode: crypto.randomInt(1, 999999).toString().padStart(6, '0'), + }; + const loginStub = vi.spyOn(Auth.prototype, 'login').mockRejectedValue(new Error('Login failed')); vi.spyOn(SdkManager.instance, 'getAuth').mockReturnValue(Auth.prototype); try { - await AuthService.instance.doLogin(UserLoginFixture.email, UserLoginFixture.password, UserLoginFixture.tfaCode); + await AuthService.instance.doLogin(loginDetails.email, loginDetails.password, loginDetails.tfaCode || ''); fail('Expected function to throw an error, but it did not.'); } catch { /* no op */ @@ -63,7 +75,7 @@ describe('Auth service', () => { }); it('When two factor authentication is enabled, then it is returned from is2FANeeded functionality', async () => { - const email = UserLoginFixture.email; + const email = crypto.randomBytes(16).toString('hex'); const securityDetails: SecurityDetails = { encryptedSalt: crypto.randomBytes(16).toString('hex'), tfaEnabled: true, @@ -78,7 +90,7 @@ describe('Auth service', () => { }); it('When email is not correct when checking two factor authentication, then an error is thrown', async () => { - const email = UserLoginFixture.email; + const email = crypto.randomBytes(16).toString('hex'); const securityStub = vi.spyOn(Auth.prototype, 'securityDetails').mockRejectedValue(new Error()); vi.spyOn(SdkManager.instance, 'getAuth').mockReturnValue(Auth.prototype); diff --git a/test/services/drive/drive-folder.service.test.ts b/test/services/drive/drive-folder.service.test.ts index 43a5f000..697af896 100644 --- a/test/services/drive/drive-folder.service.test.ts +++ b/test/services/drive/drive-folder.service.test.ts @@ -1,4 +1,5 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { randomUUID } from 'node:crypto'; import { Storage } from '@internxt/sdk/dist/drive'; import { DriveFolderService } from '../../../src/services/drive/drive-folder.service'; import { SdkManager } from '../../../src/services/sdk-manager.service'; @@ -40,8 +41,8 @@ describe('Drive folder Service', () => { }); it('When folder content is requested, then all its subfolders and subfiles are returned', async () => { - const parentFolder = newFolderMeta(); - const subContentFixture = generateSubcontent(parentFolder.uuid, 112, 117); //112 subfolders and 117 subfiles + const parentUuid = randomUUID(); + const subContentFixture = generateSubcontent(parentUuid, 112, 117); //112 subfolders and 117 subfiles const requestCancelerMock = { cancel: () => {} }; vi.spyOn(Storage.prototype, 'getFolderFoldersByUuid').mockImplementation((_: string, offset) => { @@ -72,7 +73,7 @@ describe('Drive folder Service', () => { }); vi.spyOn(SdkManager.instance, 'getStorage').mockReturnValue(Storage.prototype); - const resultContent = await sut.getFolderContent(parentFolder.uuid); + const resultContent = await sut.getFolderContent(parentUuid); expect(subContentFixture).to.deep.equal(resultContent); }); diff --git a/test/services/network/download.service.test.ts b/test/services/network/download.service.test.ts index 240bd9ae..a577764c 100644 --- a/test/services/network/download.service.test.ts +++ b/test/services/network/download.service.test.ts @@ -2,18 +2,16 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; import { DownloadService } from '../../../src/services/network/download.service'; import { Readable } from 'node:stream'; import axios from 'axios'; -import Chance from 'chance'; describe('Download Service', () => { const sut = DownloadService.instance; - const randomDataGenerator = new Chance(); beforeEach(() => { vi.restoreAllMocks(); }); it('When a file is downloaded, should return a ReadableStream', async () => { - const fileContent = Buffer.from(randomDataGenerator.string({ length: 64 })); + const fileContent = Buffer.from('file-content'); const readableContent = new Readable({ read() { this.push(fileContent); @@ -22,7 +20,7 @@ describe('Download Service', () => { }); vi.spyOn(axios, 'get').mockResolvedValue({ data: readableContent }); - const readable = await sut.downloadFile('https://example.com/file', fileContent.length, {}); + const readable = await sut.downloadFile('https://example.com/file', {}); const reader = readable.getReader(); @@ -32,7 +30,7 @@ describe('Download Service', () => { }); it('When a file is downloaded, progress should be reported', async () => { - const fileContent = Buffer.from(randomDataGenerator.string({ length: 64 })); + const fileContent = Buffer.from('file-content'); const options = { progressCallback: vi.fn(), }; @@ -44,17 +42,12 @@ describe('Download Service', () => { }); vi.spyOn(axios, 'get').mockImplementation((_, config) => { - config?.onDownloadProgress?.({ - loaded: fileContent.length, - total: fileContent.length, - bytes: fileContent.length, - lengthComputable: true, - }); + config?.onDownloadProgress?.({ loaded: 100, total: 100, bytes: 100, lengthComputable: true }); return Promise.resolve({ data: readableContent }); }); - await sut.downloadFile('https://example.com/file', fileContent.length, options); + await sut.downloadFile('https://example.com/file', options); - expect(options.progressCallback).toHaveBeenCalledWith(100); + expect(options.progressCallback).toHaveBeenCalledWith(1); }); }); diff --git a/test/services/network/network-facade.service.test.ts b/test/services/network/network-facade.service.test.ts index 23f9dd74..7998bbc6 100644 --- a/test/services/network/network-facade.service.test.ts +++ b/test/services/network/network-facade.service.test.ts @@ -4,14 +4,12 @@ import { NetworkFacade } from '../../../src/services/network/network-facade.serv import { SdkManager } from '../../../src/services/sdk-manager.service'; import path from 'node:path'; import { createReadStream } from 'node:fs'; -import fs from 'node:fs/promises'; import { UploadService } from '../../../src/services/network/upload.service'; import { CryptoService } from '../../../src/services/crypto.service'; import { DownloadService } from '../../../src/services/network/download.service'; import { Readable } from 'node:stream'; import axios from 'axios'; import { fail } from 'node:assert'; -import { UserFixture } from '../../fixtures/auth.fixture'; describe('Network Facade Service', () => { beforeEach(() => { @@ -33,7 +31,6 @@ describe('Network Facade Service', () => { CryptoService.instance, ); const file = path.join(process.cwd(), 'test/fixtures/test-content.fixture.txt'); - const fileStat = await fs.stat(file); const readStream = createReadStream(file); const options = { progressCallback: vi.fn(), @@ -41,9 +38,9 @@ describe('Network Facade Service', () => { }; const result = await sut.uploadFromStream( - UserFixture.bucket, - UserFixture.mnemonic, - fileStat.size, + 'f1858bc9675f9e4f7ab29429', + 'animal fog wink trade december thumb sight cousin crunch plunge captain enforce letter creek text', + 100, readStream, options, ); @@ -60,7 +57,6 @@ describe('Network Facade Service', () => { CryptoService.instance, ); const file = path.join(process.cwd(), 'test/fixtures/test-content.fixture.txt'); - const fileStat = await fs.stat(file); const readStream = createReadStream(file); const options = { progressCallback: vi.fn(), @@ -69,9 +65,9 @@ describe('Network Facade Service', () => { vi.spyOn(NetworkUpload, 'uploadFile').mockResolvedValue('uploaded_file_id'); const [executeUpload] = await sut.uploadFromStream( - UserFixture.bucket, - UserFixture.mnemonic, - fileStat.size, + 'f1858bc9675f9e4f7ab29429', + 'animal fog wink trade december thumb sight cousin crunch plunge captain enforce letter creek text', + 100, readStream, options, ); @@ -96,7 +92,7 @@ describe('Network Facade Service', () => { index: '29f07b8914d8353b663ab783f4bbe9950fdde680a69524405790cecca9c549f9', bucket: bucket, created: new Date(), - size: encryptedContent.length, + size: 100, shards: [ { url: 'https://doesnotexists.com/file', @@ -124,7 +120,6 @@ describe('Network Facade Service', () => { // eslint-disable-next-line max-len 'index course habit soon assist dragon tragic helmet salute stuff later twice consider grit pulse cement obvious trick sponsor stereo hello win royal more', 'f1858bc9675f9e4f7ab29429', - encryptedContent.length, writable, ); @@ -136,7 +131,7 @@ describe('Network Facade Service', () => { it('When a file download is aborted, should abort the download', async () => { const encryptedContent = Buffer.from('b6ccfa381c150f3a4b65245bffa4d84087', 'hex'); - + const bucket = 'cd8abd7e8b13081660b58dbe'; const readableContent = new ReadableStream({ pull(controller) { controller.enqueue(encryptedContent); @@ -147,9 +142,9 @@ describe('Network Facade Service', () => { const networkMock = getNetworkMock(); vi.spyOn(networkMock, 'getDownloadLinks').mockResolvedValue({ index: '29f07b8914d8353b663ab783f4bbe9950fdde680a69524405790cecca9c549f9', - bucket: UserFixture.bucket, + bucket: bucket, created: new Date(), - size: encryptedContent.length, + size: 100, shards: [ { url: 'https://doesnotexists.com/file', @@ -167,11 +162,10 @@ describe('Network Facade Service', () => { const writable = new WritableStream(); const [executeDownload, abort] = await sut.downloadToStream( - UserFixture.bucket, + bucket, // eslint-disable-next-line max-len 'index course habit soon assist dragon tragic helmet salute stuff later twice consider grit pulse cement obvious trick sponsor stereo hello win royal more', 'f1858bc9675f9e4f7ab29429', - encryptedContent.length, writable, ); @@ -200,7 +194,7 @@ describe('Network Facade Service', () => { index: '29f07b8914d8353b663ab783f4bbe9950fdde680a69524405790cecca9c549f9', bucket: bucket, created: new Date(), - size: encryptedContent.length, + size: 100, shards: [ { url: 'https://doesnotexists.com/file', @@ -220,12 +214,7 @@ describe('Network Facade Service', () => { const options = { progressCallback: vi.fn() }; vi.spyOn(axios, 'get').mockImplementation((_, config) => { - config?.onDownloadProgress?.({ - loaded: encryptedContent.length, - total: encryptedContent.length, - bytes: encryptedContent.length, - lengthComputable: true, - }); + config?.onDownloadProgress?.({ loaded: 100, total: 100, bytes: 100, lengthComputable: true }); return Promise.resolve({ data: readableContent }); }); @@ -234,7 +223,6 @@ describe('Network Facade Service', () => { // eslint-disable-next-line max-len 'index course habit soon assist dragon tragic helmet salute stuff later twice consider grit pulse cement obvious trick sponsor stereo hello win royal more', 'f1858bc9675f9e4f7ab29429', - encryptedContent.length, writable, undefined, options, @@ -242,6 +230,6 @@ describe('Network Facade Service', () => { await executeDownload; - expect(options.progressCallback).toHaveBeenCalledWith(100); + expect(options.progressCallback).toHaveBeenCalledWith(1); }); }); diff --git a/test/services/network/upload.service.test.ts b/test/services/network/upload.service.test.ts index ed4fd057..3eac29bb 100644 --- a/test/services/network/upload.service.test.ts +++ b/test/services/network/upload.service.test.ts @@ -1,12 +1,10 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; import { UploadService } from '../../../src/services/network/upload.service'; import nock from 'nock'; -import Chance from 'chance'; import { Readable } from 'node:stream'; describe('Upload Service', () => { const sut = UploadService.instance; - const randomDataGenerator = new Chance(); beforeEach(() => { vi.restoreAllMocks(); @@ -14,10 +12,9 @@ describe('Upload Service', () => { it('When a file is uploaded and etag is missing, should throw an error', async () => { const url = 'https://example.com/upload'; - const fileContent = Buffer.from(randomDataGenerator.string({ length: 64 })); const data = new Readable({ read() { - this.push(fileContent); + this.push('test content'); this.push(null); }, }); @@ -29,7 +26,7 @@ describe('Upload Service', () => { nock('https://example.com').put('/upload').reply(200, '', {}); try { - await sut.uploadFile(url, fileContent.length, data, options); + await sut.uploadFile(url, data, options); } catch (error) { expect((error as Error).message).to.contain('Missing Etag'); } @@ -37,10 +34,9 @@ describe('Upload Service', () => { it('When a file is uploaded and etag is returned, the etag should be returned', async () => { const url = 'https://example.com/upload'; - const fileContent = Buffer.from(randomDataGenerator.string({ length: 64 })); const data = new Readable({ read() { - this.push(fileContent); + this.push('test content'); this.push(null); }, }); @@ -53,16 +49,15 @@ describe('Upload Service', () => { etag: 'test-etag', }); - const result = await sut.uploadFile(url, fileContent.length, data, options); + const result = await sut.uploadFile(url, data, options); expect(result.etag).to.be.equal('test-etag'); }); it('When a file is uploaded, should update the progress', async () => { const url = 'https://example.com/upload'; - const fileContent = Buffer.from(randomDataGenerator.string({ length: 64 })); const data = new Readable({ read() { - this.push(fileContent); + this.push('test content'); this.push(null); }, }); @@ -75,8 +70,8 @@ describe('Upload Service', () => { etag: 'test-etag', }); - await sut.uploadFile(url, fileContent.length, data, options); - expect(options.progressCallback).toHaveBeenCalledWith(100); + await sut.uploadFile(url, data, options); + expect(options.progressCallback).toHaveBeenCalledWith(1); }); it('When a file is uploaded and the upload is aborted, should cancel the request', async () => { diff --git a/test/webdav/handlers/GET.handler.test.ts b/test/webdav/handlers/GET.handler.test.ts index 145c928c..2be73414 100644 --- a/test/webdav/handlers/GET.handler.test.ts +++ b/test/webdav/handlers/GET.handler.test.ts @@ -134,7 +134,6 @@ describe('GET request handler', () => { mockFile.bucket, mockAuthDetails.user.mnemonic, mockFile.fileId, - mockFile.size, expect.any(Object), undefined, ); @@ -206,7 +205,6 @@ describe('GET request handler', () => { mockFile.bucket, mockAuthDetails.user.mnemonic, mockFile.fileId, - mockSize - rangeStart, expect.any(Object), expectedRangeOptions, ); diff --git a/yarn.lock b/yarn.lock index a238c5c2..a2da60fb 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2834,11 +2834,6 @@ "@types/connect" "*" "@types/node" "*" -"@types/chance@1.1.6": - version "1.1.6" - resolved "https://registry.yarnpkg.com/@types/chance/-/chance-1.1.6.tgz#2fe3de58742629602c3fbab468093b27207f04ad" - integrity sha512-V+pm3stv1Mvz8fSKJJod6CglNGVqEQ6OyuqitoDkWywEODM/eJd1eSuIp9xt6DrX8BWZ2eDSIzbw1tPCUTvGbQ== - "@types/cli-progress@3.11.6": version "3.11.6" resolved "https://registry.yarnpkg.com/@types/cli-progress/-/cli-progress-3.11.6.tgz#94b334ebe4190f710e51c1bf9b4fedb681fa9e45" @@ -3721,11 +3716,6 @@ chalk@~5.3.0: resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385" integrity sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w== -chance@1.1.12: - version "1.1.12" - resolved "https://registry.yarnpkg.com/chance/-/chance-1.1.12.tgz#6a263cf241674af50a1b903357f9d328a6f252fb" - integrity sha512-vVBIGQVnwtUG+SYe0ge+3MvF78cvSpuCOEUJr7sVEk2vSBuMW6OXNJjSzdtzrlxNUEaoqH2GBd5Y/+18BEB01Q== - change-case@^4: version "4.1.2" resolved "https://registry.yarnpkg.com/change-case/-/change-case-4.1.2.tgz#fedfc5f136045e2398c0410ee441f95704641e12" From 512d6c0459d1e2e5c6cd55e57fc8966eca854f05 Mon Sep 17 00:00:00 2001 From: larry-internxt Date: Mon, 13 Jan 2025 18:09:46 +0100 Subject: [PATCH 6/9] refactor: update progress callback handling in download and upload services --- src/services/network/download.service.ts | 7 +++---- src/services/network/network-facade.service.ts | 16 +++++++++------- src/services/network/upload.service.ts | 5 ++--- src/types/network.types.ts | 3 ++- test/services/network/download.service.test.ts | 2 +- .../network/network-facade.service.test.ts | 12 ++++++++++-- test/services/network/upload.service.test.ts | 12 ++++++++---- test/webdav/handlers/GET.handler.test.ts | 2 ++ 8 files changed, 37 insertions(+), 22 deletions(-) diff --git a/src/services/network/download.service.ts b/src/services/network/download.service.ts index 2df324eb..028cc2a7 100644 --- a/src/services/network/download.service.ts +++ b/src/services/network/download.service.ts @@ -1,13 +1,13 @@ import axios from 'axios'; +import { DownloadProgressCallback } from '../../types/network.types'; export class DownloadService { static readonly instance = new DownloadService(); async downloadFile( url: string, - size: number, options: { - progressCallback?: (progress: number) => void; + progressCallback?: DownloadProgressCallback; abortController?: AbortController; rangeHeader?: string; }, @@ -16,8 +16,7 @@ export class DownloadService { responseType: 'stream', onDownloadProgress(progressEvent) { if (options.progressCallback && progressEvent.loaded) { - const reportedProgress = Math.round((progressEvent.loaded / size) * 100); - options.progressCallback(reportedProgress); + options.progressCallback(progressEvent.loaded); } }, headers: { diff --git a/src/services/network/network-facade.service.ts b/src/services/network/network-facade.service.ts index 209427f4..f1ae3829 100644 --- a/src/services/network/network-facade.service.ts +++ b/src/services/network/network-facade.service.ts @@ -10,7 +10,7 @@ import { import { Environment } from '@internxt/inxt-js'; import { randomBytes } from 'node:crypto'; import { Readable, Transform } from 'node:stream'; -import { DownloadOptions, UploadOptions, UploadProgressCallback } from '../../types/network.types'; +import { DownloadOptions, UploadOptions, UploadProgressCallback, DownloadProgressCallback } from '../../types/network.types'; import { CryptoService } from '../crypto.service'; import { UploadService } from './upload.service'; import { DownloadService } from './download.service'; @@ -62,9 +62,10 @@ export class NetworkFacade { let fileStream: ReadableStream; const abortable = options?.abortController ?? new AbortController(); - const onProgress: UploadProgressCallback = (progress: number) => { + const onProgress: DownloadProgressCallback = (loadedBytes: number) => { if (!options?.progressCallback) return; - options.progressCallback(progress); + const reportedProgress = Math.round((loadedBytes / size) * 100); + options.progressCallback(reportedProgress); }; const decryptFile: DecryptFileFunction = async (_, key, iv) => { @@ -92,7 +93,7 @@ export class NetworkFacade { throw new Error('Download aborted'); } - const encryptedContentStream = await this.downloadService.downloadFile(downloadable.url, size, { + const encryptedContentStream = await this.downloadService.downloadFile(downloadable.url, { progressCallback: onProgress, abortController: options?.abortController, rangeHeader: rangeOptions?.range, @@ -140,9 +141,10 @@ export class NetworkFacade { let encryptionTransform: Transform; let hash: Buffer; - const onProgress: UploadProgressCallback = (progress: number) => { + const onProgress: UploadProgressCallback = (loadedBytes: number) => { if (!options?.progressCallback) return; - options.progressCallback(progress); + const reportedProgress = Math.round((loadedBytes / size) * 100); + options.progressCallback(reportedProgress); }; const encryptFile: EncryptFileFunction = async (_, key, iv) => { @@ -157,7 +159,7 @@ export class NetworkFacade { }; const uploadFile: UploadFileFunction = async (url) => { - await this.uploadService.uploadFile(url, size, encryptionTransform, { + await this.uploadService.uploadFile(url, encryptionTransform, { abortController: abortable, progressCallback: onProgress, }); diff --git a/src/services/network/upload.service.ts b/src/services/network/upload.service.ts index cda9f333..f11a2a4e 100644 --- a/src/services/network/upload.service.ts +++ b/src/services/network/upload.service.ts @@ -5,13 +5,12 @@ import { UploadOptions } from '../../types/network.types'; export class UploadService { public static readonly instance: UploadService = new UploadService(); - async uploadFile(url: string, size: number, from: Readable, options: UploadOptions): Promise<{ etag: string }> { + async uploadFile(url: string, from: Readable, options: UploadOptions): Promise<{ etag: string }> { const response = await axios.put(url, from, { signal: options.abortController?.signal, onUploadProgress: (progressEvent) => { if (options.progressCallback && progressEvent.loaded) { - const reportedProgress = Math.round((progressEvent.loaded / size) * 100); - options.progressCallback(reportedProgress); + options.progressCallback(progressEvent.loaded); } }, }); diff --git a/src/types/network.types.ts b/src/types/network.types.ts index 7a668dcc..800fe09e 100644 --- a/src/types/network.types.ts +++ b/src/types/network.types.ts @@ -3,7 +3,8 @@ export interface NetworkCredentials { pass: string; } -export type UploadProgressCallback = (progress: number) => void; +export type DownloadProgressCallback = (downloadedBytes: number) => void; +export type UploadProgressCallback = (uploadedBytes: number) => void; export interface NetworkOperationBaseOptions { progressCallback: UploadProgressCallback; abortController?: AbortController; diff --git a/test/services/network/download.service.test.ts b/test/services/network/download.service.test.ts index a577764c..8a74caf7 100644 --- a/test/services/network/download.service.test.ts +++ b/test/services/network/download.service.test.ts @@ -48,6 +48,6 @@ describe('Download Service', () => { await sut.downloadFile('https://example.com/file', options); - expect(options.progressCallback).toHaveBeenCalledWith(1); + expect(options.progressCallback).toHaveBeenCalledWith(100); }); }); diff --git a/test/services/network/network-facade.service.test.ts b/test/services/network/network-facade.service.test.ts index 7998bbc6..94ba3f24 100644 --- a/test/services/network/network-facade.service.test.ts +++ b/test/services/network/network-facade.service.test.ts @@ -120,6 +120,7 @@ describe('Network Facade Service', () => { // eslint-disable-next-line max-len 'index course habit soon assist dragon tragic helmet salute stuff later twice consider grit pulse cement obvious trick sponsor stereo hello win royal more', 'f1858bc9675f9e4f7ab29429', + encryptedContent.length, writable, ); @@ -166,6 +167,7 @@ describe('Network Facade Service', () => { // eslint-disable-next-line max-len 'index course habit soon assist dragon tragic helmet salute stuff later twice consider grit pulse cement obvious trick sponsor stereo hello win royal more', 'f1858bc9675f9e4f7ab29429', + encryptedContent.length, writable, ); @@ -214,7 +216,12 @@ describe('Network Facade Service', () => { const options = { progressCallback: vi.fn() }; vi.spyOn(axios, 'get').mockImplementation((_, config) => { - config?.onDownloadProgress?.({ loaded: 100, total: 100, bytes: 100, lengthComputable: true }); + config?.onDownloadProgress?.({ + loaded: encryptedContent.length, + total: encryptedContent.length, + bytes: encryptedContent.length, + lengthComputable: true + }); return Promise.resolve({ data: readableContent }); }); @@ -223,6 +230,7 @@ describe('Network Facade Service', () => { // eslint-disable-next-line max-len 'index course habit soon assist dragon tragic helmet salute stuff later twice consider grit pulse cement obvious trick sponsor stereo hello win royal more', 'f1858bc9675f9e4f7ab29429', + encryptedContent.length, writable, undefined, options, @@ -230,6 +238,6 @@ describe('Network Facade Service', () => { await executeDownload; - expect(options.progressCallback).toHaveBeenCalledWith(1); + expect(options.progressCallback).toHaveBeenCalledWith(100); }); }); diff --git a/test/services/network/upload.service.test.ts b/test/services/network/upload.service.test.ts index 3eac29bb..4999dfd7 100644 --- a/test/services/network/upload.service.test.ts +++ b/test/services/network/upload.service.test.ts @@ -2,6 +2,7 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; import { UploadService } from '../../../src/services/network/upload.service'; import nock from 'nock'; import { Readable } from 'node:stream'; +import crypto from 'node:crypto'; describe('Upload Service', () => { const sut = UploadService.instance; @@ -12,9 +13,10 @@ describe('Upload Service', () => { it('When a file is uploaded and etag is missing, should throw an error', async () => { const url = 'https://example.com/upload'; + const file = crypto.randomBytes(16).toString('hex'); const data = new Readable({ read() { - this.push('test content'); + this.push(file); this.push(null); }, }); @@ -34,9 +36,10 @@ describe('Upload Service', () => { it('When a file is uploaded and etag is returned, the etag should be returned', async () => { const url = 'https://example.com/upload'; + const file = crypto.randomBytes(16).toString('hex'); const data = new Readable({ read() { - this.push('test content'); + this.push(file); this.push(null); }, }); @@ -55,9 +58,10 @@ describe('Upload Service', () => { it('When a file is uploaded, should update the progress', async () => { const url = 'https://example.com/upload'; + const file = crypto.randomBytes(16).toString('hex'); const data = new Readable({ read() { - this.push('test content'); + this.push(file); this.push(null); }, }); @@ -71,7 +75,7 @@ describe('Upload Service', () => { }); await sut.uploadFile(url, data, options); - expect(options.progressCallback).toHaveBeenCalledWith(1); + expect(options.progressCallback).toHaveBeenCalledWith(file.length); }); it('When a file is uploaded and the upload is aborted, should cancel the request', async () => { diff --git a/test/webdav/handlers/GET.handler.test.ts b/test/webdav/handlers/GET.handler.test.ts index 2be73414..145c928c 100644 --- a/test/webdav/handlers/GET.handler.test.ts +++ b/test/webdav/handlers/GET.handler.test.ts @@ -134,6 +134,7 @@ describe('GET request handler', () => { mockFile.bucket, mockAuthDetails.user.mnemonic, mockFile.fileId, + mockFile.size, expect.any(Object), undefined, ); @@ -205,6 +206,7 @@ describe('GET request handler', () => { mockFile.bucket, mockAuthDetails.user.mnemonic, mockFile.fileId, + mockSize - rangeStart, expect.any(Object), expectedRangeOptions, ); From bfaa6378db57c84f4deeecd306dabb579d8d7e91 Mon Sep 17 00:00:00 2001 From: larry-internxt Date: Mon, 13 Jan 2025 18:18:20 +0100 Subject: [PATCH 7/9] refactor: simplify encryption transformation in network facade service --- src/services/network/network-facade.service.ts | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/src/services/network/network-facade.service.ts b/src/services/network/network-facade.service.ts index f1ae3829..04b4bb10 100644 --- a/src/services/network/network-facade.service.ts +++ b/src/services/network/network-facade.service.ts @@ -148,14 +148,11 @@ export class NetworkFacade { }; const encryptFile: EncryptFileFunction = async (_, key, iv) => { - encryptionTransform = from - .pipe( - await this.cryptoService.getEncryptionTransform( - Buffer.from(key as ArrayBuffer), - Buffer.from(iv as ArrayBuffer), - ), - ) - .pipe(hashStream); + const encryptionCipher = this.cryptoService.getEncryptionTransform( + Buffer.from(key as ArrayBuffer), + Buffer.from(iv as ArrayBuffer), + ); + encryptionTransform = from.pipe(encryptionCipher).pipe(hashStream); }; const uploadFile: UploadFileFunction = async (url) => { From eeca79746577afd82ceccda9713deb0280a4e41e Mon Sep 17 00:00:00 2001 From: larry-internxt Date: Mon, 13 Jan 2025 18:24:08 +0100 Subject: [PATCH 8/9] refactor: convert getEncryptionTransform to an arrow function --- src/services/crypto.service.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/services/crypto.service.ts b/src/services/crypto.service.ts index ad5829cb..b43dd6f7 100644 --- a/src/services/crypto.service.ts +++ b/src/services/crypto.service.ts @@ -166,11 +166,11 @@ export class CryptoService { return decryptedStream; } - public async getEncryptionTransform(key: Buffer, iv: Buffer): Promise { + public getEncryptionTransform = (key: Buffer, iv: Buffer): Transform => { const cipher = createCipheriv('aes-256-ctr', key, iv); - return cipher; - } + }; + /** * Generates the key and the iv by transforming a secret and a salt. * It will generate the same key and iv if the same secret and salt is used. From ba071426344394314329cce8749bfdccb549d2a0 Mon Sep 17 00:00:00 2001 From: larry-internxt Date: Tue, 14 Jan 2025 13:51:40 +0100 Subject: [PATCH 9/9] test: add progress reporting for file uploads in network facade --- .../network/network-facade.service.test.ts | 66 ++++++++++++++++++- 1 file changed, 65 insertions(+), 1 deletion(-) diff --git a/test/services/network/network-facade.service.test.ts b/test/services/network/network-facade.service.test.ts index 94ba3f24..69fad07c 100644 --- a/test/services/network/network-facade.service.test.ts +++ b/test/services/network/network-facade.service.test.ts @@ -10,6 +10,8 @@ import { DownloadService } from '../../../src/services/network/download.service' import { Readable } from 'node:stream'; import axios from 'axios'; import { fail } from 'node:assert'; +import crypto from 'node:crypto'; +import { HashStream } from '../../../src/utils/hash.utils'; describe('Network Facade Service', () => { beforeEach(() => { @@ -77,6 +79,68 @@ describe('Network Facade Service', () => { expect(uploadResult.fileId).to.be.equal('uploaded_file_id'); }); + it('When a file is uploaded, then it should report progress', async () => { + const bucket = 'f1858bc9675f9e4f7ab29429'; + const networkMock = getNetworkMock(); + + const sut = new NetworkFacade( + networkMock, + UploadService.instance, + DownloadService.instance, + CryptoService.instance, + ); + const file = crypto.randomBytes(16).toString('hex'); + const readStream = new Readable({ + read() { + this.push(file); + this.push(null); + }, + }); + const options = { + progressCallback: vi.fn(), + abortController: new AbortController(), + }; + + vi.spyOn(HashStream.prototype, 'getHash').mockImplementation(() => Buffer.from('')); + + vi.spyOn(axios, 'put').mockImplementation((_, __, config) => { + config?.onUploadProgress?.({ + loaded: file.length, + total: file.length, + bytes: file.length, + lengthComputable: true, + }); + return Promise.resolve({ + data: readStream, + headers: { + etag: 'any-etag', + }, + }); + }); + + vi.spyOn(networkMock, 'startUpload').mockResolvedValue({ + uploads: [{ index: 0, url: 'any-url', uuid: 'any-uuid', urls: [] }], + }); + + vi.spyOn(networkMock, 'finishUpload') + // @ts-expect-error - We only mock the properties we need + .mockResolvedValue({ + id: 'any-id', + }); + + const [executeUpload] = await sut.uploadFromStream( + bucket, + 'animal fog wink trade december thumb sight cousin crunch plunge captain enforce letter creek text', + file.length, + readStream, + options, + ); + + await executeUpload; + + expect(options.progressCallback).toHaveBeenCalledWith(100); + }); + it('When a file is downloaded, should write it to a stream', async () => { const encryptedContent = Buffer.from('b6ccfa381c150f3a4b65245bffa4d84087', 'hex'); const bucket = 'cd8abd7e8b13081660b58dbe'; @@ -220,7 +284,7 @@ describe('Network Facade Service', () => { loaded: encryptedContent.length, total: encryptedContent.length, bytes: encryptedContent.length, - lengthComputable: true + lengthComputable: true, }); return Promise.resolve({ data: readableContent }); });