diff --git a/package.json b/package.json index d632ca698b..ae05468985 100644 --- a/package.json +++ b/package.json @@ -21,6 +21,7 @@ "dependencies": { "@aws-sdk/client-s3": "^3.705.0", "@aws-sdk/credential-providers": "^3.864.0", + "@aws-sdk/middleware-retry": "^3.374.0", "@azure/storage-blob": "^12.28.0", "@hapi/joi": "^17.1.1", "@smithy/node-http-handler": "^3.0.0", diff --git a/tests/functional/sse-kms-migration/arnPrefix.js b/tests/functional/sse-kms-migration/arnPrefix.js index da93645ddc..d8d2328c77 100644 --- a/tests/functional/sse-kms-migration/arnPrefix.js +++ b/tests/functional/sse-kms-migration/arnPrefix.js @@ -34,8 +34,8 @@ describe('SSE KMS arnPrefix', () => { ? bkt.kmsKeyInfo.masterKeyArn : bkt.kmsKeyInfo.masterKeyId; } - await helpers.s3.createBucket(({ Bucket: bkt.name })).promise(); - await helpers.s3.createBucket(({ Bucket: bkt.vname })).promise(); + await helpers.s3.createBucket(({ Bucket: bkt.name })); + await helpers.s3.createBucket(({ Bucket: bkt.vname })); if (bktConf.deleteSSE) { await scenarios.deleteBucketSSEBeforeEach(bkt.name, log); await scenarios.deleteBucketSSEBeforeEach(bkt.vname, log); @@ -46,12 +46,12 @@ describe('SSE KMS arnPrefix', () => { Bucket: bkt.name, ServerSideEncryptionConfiguration: helpers.hydrateSSEConfig({ algo: bktConf.algo, masterKeyId: bkt.kmsKey }), - }).promise(); + }); await helpers.s3.putBucketEncryption({ Bucket: bkt.vname, ServerSideEncryptionConfiguration: helpers.hydrateSSEConfig({ algo: bktConf.algo, masterKeyId: bkt.kmsKey }), - }).promise(); + }); } // Put an object for each SSE conf in each bucket @@ -77,7 +77,7 @@ describe('SSE KMS arnPrefix', () => { before('setup', async () => { console.log('Run arnPrefix', { profile: helpers.credsProfile, accessKeyId: helpers.s3.config.credentials.accessKeyId }); - const allBuckets = (await helpers.s3.listBuckets().promise()).Buckets.map(b => b.Name); + const allBuckets = (await helpers.s3.listBuckets()).Buckets.map(b => b.Name); console.log('List buckets:', allBuckets); await helpers.MD.setup(); copyKmsKey = (await helpers.createKmsKey(log)).masterKeyArn; @@ -92,13 +92,13 @@ describe('SSE KMS arnPrefix', () => { } catch (e) { void e; } // init copy bucket - await helpers.s3.createBucket(({ Bucket: copyBkt })).promise(); - await helpers.s3.createBucket(({ Bucket: mpuCopyBkt })).promise(); + await helpers.s3.createBucket(({ Bucket: copyBkt })); + await helpers.s3.createBucket(({ Bucket: mpuCopyBkt })); await helpers.s3.putBucketEncryption({ Bucket: copyBkt, ServerSideEncryptionConfiguration: helpers.hydrateSSEConfig({ algo: 'aws:kms', masterKeyId: copyKmsKey }), - }).promise(); - await helpers.s3.putObject({ Bucket: copyBkt, Key: copyObj, Body: 'BODY(copy)' }).promise(); + }); + await helpers.s3.putObject({ Bucket: copyBkt, Key: copyObj, Body: 'BODY(copy)' }); // Prepare every buckets with 1 object (for copy) await Promise.all(scenarios.testCases.map(async bktConf => this.initBucket(bktConf))); @@ -192,7 +192,7 @@ describe('SSE KMS arnPrefix', () => { it('should encrypt MPU and put 2 encrypted parts', async () => { const mpuKey = `${obj.name}-mpu`; const mpu = await helpers.s3.createMultipartUpload( - helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)).promise(); + helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)); const partsBody = [`${obj.body}-MPU1`, `${obj.body}-MPU2`]; const newParts = []; for (const [index, body] of partsBody.entries()) { @@ -220,7 +220,7 @@ describe('SSE KMS arnPrefix', () => { it('should encrypt MPU and copy an encrypted parts from encrypted bucket', async () => { const mpuKey = `${obj.name}-mpucopy`; const mpu = await helpers.s3.createMultipartUpload( - helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)).promise(); + helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)); const part1 = await scenarios.tests.mpuUploadPartCopy({ UploadId: mpu.UploadId, Bucket: bkt.name, @@ -251,7 +251,7 @@ describe('SSE KMS arnPrefix', () => { it('should encrypt MPU and copy an encrypted range parts from encrypted bucket', async () => { const mpuKey = `${obj.name}-mpucopy`; const mpu = await helpers.s3.createMultipartUpload( - helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)).promise(); + helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)); // source body is "BODY(copy)" // [copy, BODY] const sourceRanges = ['bytes=5-8', 'bytes=0-3']; @@ -283,9 +283,9 @@ describe('SSE KMS arnPrefix', () => { it(`should PutObject versioned with SSE ${obj.name}`, async () => { // ensure versioned bucket is empty await helpers.bucketUtil.empty(bkt.vname); - let { Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }).promise(); + let { Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }); // regularly count versioned objects - assert.strictEqual(Versions.length, 0); + assert.strictEqual(Versions?.length, 0); const bodyBase = `BODY(${obj.name})-base`; await helpers.putEncryptedObject(bkt.vname, obj.name, objConf, obj.kmsKey, bodyBase); @@ -293,23 +293,23 @@ describe('SSE KMS arnPrefix', () => { await scenarios.assertObjectSSE( { ...baseAssertion, Body: bodyBase }, { objConf, obj }, { bktConf, bkt }, {}, 'after'); - ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }).promise()); + ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname })); assert.strictEqual(Versions.length, 1); await helpers.s3.putBucketVersioning({ Bucket: bkt.vname, VersioningConfiguration: { Status: 'Enabled' }, - }).promise(); + }); const bodyV1 = `BODY(${obj.name})-v1`; const v1 = await helpers.putEncryptedObject(bkt.vname, obj.name, objConf, obj.kmsKey, bodyV1); const bodyV2 = `BODY(${obj.name})-v2`; const v2 = await helpers.putEncryptedObject(bkt.vname, obj.name, objConf, obj.kmsKey, bodyV2); - ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }).promise()); + ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname })); assert.strictEqual(Versions.length, 3); - const current = await helpers.s3.headObject({ Bucket: bkt.vname, Key: obj.name }).promise(); + const current = await helpers.s3.headObject({ Bucket: bkt.vname, Key: obj.name }); assert.strictEqual(current.VersionId, v2.VersionId); // ensure versioning as expected - ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }).promise()); + ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname })); assert.strictEqual(Versions.length, 3); await scenarios.assertObjectSSE( @@ -324,12 +324,12 @@ describe('SSE KMS arnPrefix', () => { await scenarios.assertObjectSSE( { ...baseAssertion, VersionId: v2.VersionId, Body: bodyV2 }, { objConf, obj }, { bktConf, bkt }, {}, 'after'); - ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }).promise()); + ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname })); assert.strictEqual(Versions.length, 3); await helpers.s3.putBucketVersioning({ Bucket: bkt.vname, VersioningConfiguration: { Status: 'Suspended' }, - }).promise(); + }); // should be fine after version suspension await scenarios.assertObjectSSE( @@ -344,7 +344,7 @@ describe('SSE KMS arnPrefix', () => { await scenarios.assertObjectSSE( { ...baseAssertion, VersionId: v2.VersionId, Body: bodyV2 }, { objConf, obj }, { bktConf, bkt }, {}, 'after'); - ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }).promise()); + ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname })); assert.strictEqual(Versions.length, 3); // put a new null version @@ -356,7 +356,7 @@ describe('SSE KMS arnPrefix', () => { await scenarios.assertObjectSSE( { ...baseAssertion, Body: bodyFinal }, { objConf, obj }, { bktConf, bkt }, 'null', 'after'); - ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }).promise()); + ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname })); assert.strictEqual(Versions.length, 3); }); })); @@ -367,10 +367,10 @@ describe('SSE KMS arnPrefix', () => { Bucket: mpuCopyBkt, // AES256 because input key is broken for now ServerSideEncryptionConfiguration: helpers.hydrateSSEConfig({ algo: 'AES256' }), - }).promise(); + }); const mpuKey = 'mpucopy'; const mpu = await helpers.s3.createMultipartUpload( - helpers.putObjParams(mpuCopyBkt, mpuKey, {}, null)).promise(); + helpers.putObjParams(mpuCopyBkt, mpuKey, {}, null)); const copyPartArg = { UploadId: mpu.UploadId, Bucket: mpuCopyBkt, @@ -388,7 +388,7 @@ describe('SSE KMS arnPrefix', () => { ...copyPartArg, PartNumber: partNumber, CopySource: `${bkt.name}/${obj.name}`, - }).promise(); + }); return { partNumber, body: obj.body, res: res.CopyPartResult }; })); @@ -403,7 +403,7 @@ describe('SSE KMS arnPrefix', () => { MultipartUpload: { Parts: parts.map(part => ({ PartNumber: part.partNumber, ETag: part.res.ETag })), }, - }).promise(); + }); const assertion = { Bucket: mpuCopyBkt, Key: mpuKey, @@ -421,11 +421,11 @@ describe('ensure MPU use good SSE', () => { before(async () => { kmsKeympuKmsBkt = (await helpers.createKmsKey(log)).masterKeyArn; await helpers.MD.setup(); - await helpers.s3.createBucket({ Bucket: mpuKmsBkt }).promise(); + await helpers.s3.createBucket({ Bucket: mpuKmsBkt }); await helpers.s3.putBucketEncryption({ Bucket: mpuKmsBkt, ServerSideEncryptionConfiguration: - helpers.hydrateSSEConfig({ algo: 'aws:kms', masterKeyId: kmsKeympuKmsBkt }) }).promise(); + helpers.hydrateSSEConfig({ algo: 'aws:kms', masterKeyId: kmsKeympuKmsBkt }) }); }); after(async () => { @@ -435,11 +435,11 @@ describe('ensure MPU use good SSE', () => { it('mpu upload part should fail with sse header', async () => { const key = 'mpuKeyBadUpload'; const mpu = await helpers.s3.createMultipartUpload({ - Bucket: mpuKmsBkt, Key: key }).promise(); + Bucket: mpuKmsBkt, Key: key }); const res = await promisify(makeRequest)({ method: 'PUT', - hostname: helpers.s3.endpoint.hostname, - port: helpers.s3.endpoint.port, + hostname: helpers.s3.config.endpoint.hostname, + port: helpers.s3.config.endpoint.port, path: `/${mpuKmsBkt}/${key}`, headers: { 'content-length': 4, @@ -465,7 +465,7 @@ describe('ensure MPU use good SSE', () => { const key = 'mpuKey'; const mpuKms = (await helpers.createKmsKey(log)).masterKeyArn; const mpu = await helpers.s3.createMultipartUpload({ - Bucket: mpuKmsBkt, Key: key, ServerSideEncryption: 'aws:kms', SSEKMSKeyId: mpuKms }).promise(); + Bucket: mpuKmsBkt, Key: key, ServerSideEncryption: 'aws:kms', SSEKMSKeyId: mpuKms }); assert.strictEqual(mpu.ServerSideEncryption, 'aws:kms'); assert.strictEqual(mpu.SSEKMSKeyId, helpers.getKey(mpuKms)); @@ -560,25 +560,25 @@ describe('KMS error', () => { } before(async () => { - await helpers.s3.createBucket({ Bucket }).promise(); + await helpers.s3.createBucket({ Bucket }); await helpers.s3.putObject({ ...helpers.putObjParams(Bucket, 'plaintext', {}, null), Body: body, - }).promise(); + }); mpuPlaintext = await helpers.s3.createMultipartUpload( - helpers.putObjParams(Bucket, 'mpuPlaintext', {}, null)).promise(); + helpers.putObjParams(Bucket, 'mpuPlaintext', {}, null)); ({ masterKeyId, masterKeyArn } = await helpers.createKmsKey(log)); await helpers.putEncryptedObject(Bucket, Key, sseConfig, masterKeyArn, body); // ensure we can decrypt and read the object - const obj = await helpers.s3.getObject({ Bucket, Key }).promise(); + const obj = await helpers.s3.getObject({ Bucket, Key }); assert.strictEqual(obj.Body.toString(), body); mpuEncrypted = await helpers.s3.createMultipartUpload( - helpers.putObjParams(Bucket, 'mpuEncrypted', sseConfig, masterKeyArn)).promise(); + helpers.putObjParams(Bucket, 'mpuEncrypted', sseConfig, masterKeyArn)); // make key unavailable await helpers.destroyKmsKey(masterKeyArn, log); @@ -602,12 +602,12 @@ describe('KMS error', () => { }, { action: 'getObject', kmsAction: 'Decrypt', - fct: async () => helpers.s3.getObject({ Bucket, Key }).promise(), + fct: async () => helpers.s3.getObject({ Bucket, Key }) , }, { action: 'copyObject', detail: ' when getting from source', kmsAction: 'Decrypt', fct: async () => - helpers.s3.copyObject({ Bucket, Key: 'copy', CopySource: `${Bucket}/${Key}` }).promise(), + helpers.s3.copyObject({ Bucket, Key: 'copy', CopySource: `${Bucket}/${Key}` }) , }, { action: 'copyObject', detail: ' when putting to destination', kmsAction: 'Encrypt', @@ -617,12 +617,12 @@ describe('KMS error', () => { CopySource: `${Bucket}/plaintext`, ServerSideEncryption: 'aws:kms', SSEKMSKeyId: masterKeyArn, - }).promise(), + }) , }, { action: 'createMPU', kmsAction: 'Encrypt', fct: async ({ masterKeyArn }) => helpers.s3.createMultipartUpload( - helpers.putObjParams(Bucket, 'mpuKeyEncryptedFail', sseConfig, masterKeyArn)).promise(), + helpers.putObjParams(Bucket, 'mpuKeyEncryptedFail', sseConfig, masterKeyArn)) , }, { action: 'mpu uploadPartCopy', detail: ' when getting from source', kmsAction: 'Decrypt', @@ -632,7 +632,7 @@ describe('KMS error', () => { Key: 'mpuPlaintext', PartNumber: 1, CopySource: `${Bucket}/${Key}`, - }).promise(), + }) , }, { action: 'mpu uploadPart', detail: ' when putting to destination', kmsAction: 'Encrypt', @@ -642,7 +642,7 @@ describe('KMS error', () => { Key: 'mpuEncrypted', PartNumber: 1, Body: body, - }).promise(), + }), }, { action: 'mpu uploadPartCopy', detail: ' when putting to destination', kmsAction: 'Encrypt', @@ -652,7 +652,7 @@ describe('KMS error', () => { Key: 'mpuEncrypted', PartNumber: 1, CopySource: `${Bucket}/plaintext`, - }).promise(), + }), }, ]; diff --git a/tests/functional/sse-kms-migration/beforeMigration.js b/tests/functional/sse-kms-migration/beforeMigration.js index e2c06272d1..2c45aca6bd 100644 --- a/tests/functional/sse-kms-migration/beforeMigration.js +++ b/tests/functional/sse-kms-migration/beforeMigration.js @@ -40,20 +40,20 @@ describe('SSE KMS before migration', () => { ? bkt.kmsKeyInfo.masterKeyArn : bkt.kmsKeyInfo.masterKeyId; } - await helpers.s3.createBucket(({ Bucket: bkt.name })).promise(); - await helpers.s3.createBucket(({ Bucket: bkt.vname })).promise(); + await helpers.s3.createBucket({ Bucket: bkt.name }); + await helpers.s3.createBucket({ Bucket: bkt.vname }); if (bktConf.algo) { // bucket encryption will be asserted in bucket test await helpers.s3.putBucketEncryption({ Bucket: bkt.name, ServerSideEncryptionConfiguration: helpers.hydrateSSEConfig({ algo: bktConf.algo, masterKeyId: bkt.kmsKey }), - }).promise(); + }); await helpers.s3.putBucketEncryption({ Bucket: bkt.vname, ServerSideEncryptionConfiguration: helpers.hydrateSSEConfig({ algo: bktConf.algo, masterKeyId: bkt.kmsKey }), - }).promise(); + }); } // Put an object for each SSE conf in each bucket @@ -79,18 +79,18 @@ describe('SSE KMS before migration', () => { before(async () => { console.log('Run before migration', { profile: helpers.credsProfile, accessKeyId: helpers.s3.config.credentials.accessKeyId }); - const allBuckets = (await helpers.s3.listBuckets().promise()).Buckets.map(b => b.Name); + const allBuckets = ((await helpers.s3.listBuckets({})).Buckets || []).map(b => b.Name); console.log('List buckets:', allBuckets); await promisify(metadata.setup.bind(metadata))(); // init copy bucket - await helpers.s3.createBucket(({ Bucket: copyBkt })).promise(); - await helpers.s3.createBucket(({ Bucket: mpuCopyBkt })).promise(); + await helpers.s3.createBucket({ Bucket: copyBkt }); + await helpers.s3.createBucket({ Bucket: mpuCopyBkt }); await helpers.s3.putBucketEncryption({ Bucket: copyBkt, ServerSideEncryptionConfiguration: helpers.hydrateSSEConfig({ algo: 'aws:kms', masterKeyId: copyKmsKey }), - }).promise(); - await helpers.s3.putObject({ Bucket: copyBkt, Key: copyObj, Body: 'BODY(copy)' }).promise(); + }); + await helpers.s3.putObject({ Bucket: copyBkt, Key: copyObj, Body: 'BODY(copy)' }); // Prepare every buckets with 1 object (for copy) await Promise.all(scenarios.testCases.map(async bktConf => this.initBucket(bktConf))); @@ -172,7 +172,7 @@ describe('SSE KMS before migration', () => { optionalSkip('should encrypt MPU and put 2 encrypted parts', async () => { const mpuKey = `${obj.name}-mpu`; const mpu = await helpers.s3.createMultipartUpload( - helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)).promise(); + helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)); const partsBody = [`${obj.body}-MPU1`, `${obj.body}-MPU2`]; const newParts = []; for (const [index, body] of partsBody.entries()) { @@ -200,7 +200,7 @@ describe('SSE KMS before migration', () => { optionalSkip('should encrypt MPU and copy an encrypted parts from encrypted bucket', async () => { const mpuKey = `${obj.name}-mpucopy`; const mpu = await helpers.s3.createMultipartUpload( - helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)).promise(); + helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)); const part1 = await scenarios.tests.mpuUploadPartCopy({ UploadId: mpu.UploadId, Bucket: bkt.name, @@ -231,7 +231,7 @@ describe('SSE KMS before migration', () => { optionalSkip('should encrypt MPU and copy an encrypted range parts from encrypted bucket', async () => { const mpuKey = `${obj.name}-mpucopyrange`; const mpu = await helpers.s3.createMultipartUpload( - helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)).promise(); + helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)); // source body is "BODY(copy)" // [copy, BODY] const sourceRanges = ['bytes=5-8', 'bytes=0-3']; @@ -263,13 +263,13 @@ describe('SSE KMS before migration', () => { optionalSkip('should prepare empty encrypted MPU without completion', async () => { const mpuKey = `${obj.name}-migration-mpu-empty`; await helpers.s3.createMultipartUpload( - helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)).promise(); + helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)); }); - optionalSkip('should prepare encrypte MPU and put 2 encrypted parts without completion', async () => { + optionalSkip('should prepare encrypted MPU and put 2 encrypted parts without completion', async () => { const mpuKey = `${obj.name}-migration-mpu`; const mpu = await helpers.s3.createMultipartUpload( - helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)).promise(); + helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)); const partsBody = [`${obj.body}-MPU1`, `${obj.body}-MPU2`]; for (const [index, body] of partsBody.entries()) { await scenarios.tests.mpuUploadPart({ @@ -286,7 +286,7 @@ describe('SSE KMS before migration', () => { 'from encrypted bucket without completion', async () => { const mpuKey = `${obj.name}-migration-mpucopy`; const mpu = await helpers.s3.createMultipartUpload( - helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)).promise(); + helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)); await scenarios.tests.mpuUploadPartCopy({ UploadId: mpu.UploadId, Bucket: bkt.name, @@ -303,11 +303,11 @@ describe('SSE KMS before migration', () => { }, mpu, objConf.algo || bktConf.algo, 'before'); }); - optionalSkip('should prepare encrypte MPU and copy an encrypted range parts ' + + optionalSkip('should prepare encrypted MPU and copy an encrypted range parts ' + 'from encrypted bucket without completion', async () => { const mpuKey = `${obj.name}-migration-mpucopyrange`; const mpu = await helpers.s3.createMultipartUpload( - helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)).promise(); + helpers.putObjParams(bkt.name, mpuKey, objConf, obj.kmsKey)); // source body is "BODY(copy)" // [copy, BODY] const sourceRanges = ['bytes=5-8', 'bytes=0-3']; @@ -326,7 +326,7 @@ describe('SSE KMS before migration', () => { it(`should PutObject versioned with SSE ${obj.name}`, async () => { // ensure versioned bucket is empty await helpers.bucketUtil.empty(bkt.vname); - let { Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }).promise(); + let { Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }) || []; // regularly count versioned objects assert.strictEqual(Versions.length, 0); @@ -336,23 +336,23 @@ describe('SSE KMS before migration', () => { await scenarios.assertObjectSSE( { ...baseAssertion, Body: bodyBase }, { objConf, obj }, { bktConf, bkt }); - ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }).promise()); + ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname })); assert.strictEqual(Versions.length, 1); await helpers.s3.putBucketVersioning({ Bucket: bkt.vname, VersioningConfiguration: { Status: 'Enabled' }, - }).promise(); + }); const bodyV1 = `BODY(${obj.name})-v1`; const v1 = await helpers.putEncryptedObject(bkt.vname, obj.name, objConf, obj.kmsKey, bodyV1); const bodyV2 = `BODY(${obj.name})-v2`; const v2 = await helpers.putEncryptedObject(bkt.vname, obj.name, objConf, obj.kmsKey, bodyV2); - ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }).promise()); + ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname })); assert.strictEqual(Versions.length, 3); - const current = await helpers.s3.headObject({ Bucket: bkt.vname, Key: obj.name }).promise(); + const current = await helpers.s3.headObject({ Bucket: bkt.vname, Key: obj.name }); assert.strictEqual(current.VersionId, v2.VersionId); // ensure versioning as expected - ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }).promise()); + ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname })); assert.strictEqual(Versions.length, 3); await scenarios.assertObjectSSE( @@ -363,12 +363,12 @@ describe('SSE KMS before migration', () => { { ...baseAssertion, VersionId: v1.VersionId, Body: bodyV1 }, { objConf, obj }, { bktConf, bkt }); await scenarios.assertObjectSSE( { ...baseAssertion, VersionId: v2.VersionId, Body: bodyV2 }, { objConf, obj }, { bktConf, bkt }); - ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }).promise()); + ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname })); assert.strictEqual(Versions.length, 3); await helpers.s3.putBucketVersioning({ Bucket: bkt.vname, VersioningConfiguration: { Status: 'Suspended' }, - }).promise(); + }); // should be fine after version suspension await scenarios.assertObjectSSE( @@ -379,7 +379,7 @@ describe('SSE KMS before migration', () => { { ...baseAssertion, VersionId: v1.VersionId, Body: bodyV1 }, { objConf, obj }, { bktConf, bkt }); await scenarios.assertObjectSSE( { ...baseAssertion, VersionId: v2.VersionId, Body: bodyV2 }, { objConf, obj }, { bktConf, bkt }); - ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }).promise()); + ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname })); assert.strictEqual(Versions.length, 3); // put a new null version @@ -389,7 +389,7 @@ describe('SSE KMS before migration', () => { { ...baseAssertion, Body: bodyFinal }, { objConf, obj }, { bktConf, bkt }); // null await scenarios.assertObjectSSE( { ...baseAssertion, Body: bodyFinal }, { objConf, obj }, { bktConf, bkt }, 'null'); - ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname }).promise()); + ({ Versions } = await helpers.s3.listObjectVersions({ Bucket: bkt.vname })); assert.strictEqual(Versions.length, 3); }); })); @@ -401,10 +401,10 @@ describe('SSE KMS before migration', () => { Bucket: mpuCopyBkt, // AES256 because input key is broken for now ServerSideEncryptionConfiguration: helpers.hydrateSSEConfig({ algo: 'AES256' }), - }).promise(); + }); const mpuKey = 'mpucopy'; const mpu = await helpers.s3.createMultipartUpload( - helpers.putObjParams(mpuCopyBkt, mpuKey, {}, null)).promise(); + helpers.putObjParams(mpuCopyBkt, mpuKey, {}, null)); const copyPartArg = { UploadId: mpu.UploadId, Bucket: mpuCopyBkt, @@ -422,7 +422,7 @@ describe('SSE KMS before migration', () => { ...copyPartArg, PartNumber: partNumber, CopySource: `${bkt.name}/${obj.name}`, - }).promise(); + }); return { partNumber, body: obj.body, res: res.CopyPartResult }; })); diff --git a/tests/functional/sse-kms-migration/cleanup.js b/tests/functional/sse-kms-migration/cleanup.js index a184255e0b..d1e5cb32e2 100644 --- a/tests/functional/sse-kms-migration/cleanup.js +++ b/tests/functional/sse-kms-migration/cleanup.js @@ -18,7 +18,7 @@ describe('SSE KMS Cleanup', () => { it('Empty and delete buckets for SSE KMS Migration', async () => { console.log('Run cleanup', { profile: helpers.credsProfile, accessKeyId: helpers.s3.config.credentials.accessKeyId }); - const allBuckets = (await helpers.s3.listBuckets().promise()).Buckets.map(b => b.Name); + const allBuckets = ((await helpers.s3.listBuckets()).Buckets || []).map(b => b.Name); console.log('List buckets:', allBuckets); await helpers.MD.setup(); diff --git a/tests/functional/sse-kms-migration/helpers.js b/tests/functional/sse-kms-migration/helpers.js index bd9ad70c91..d471829861 100644 --- a/tests/functional/sse-kms-migration/helpers.js +++ b/tests/functional/sse-kms-migration/helpers.js @@ -1,5 +1,28 @@ const getConfig = require('../aws-node-sdk/test/support/config'); -const { S3 } = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + PutBucketEncryptionCommand, + GetBucketEncryptionCommand, + CopyObjectCommand, + ListObjectVersionsCommand, + HeadObjectCommand, + ListBucketsCommand, + ListMultipartUploadsCommand, + ListPartsCommand, + GetObjectCommand, + PutObjectCommand, + CreateMultipartUploadCommand, + UploadPartCommand, + UploadPartCopyCommand, + CompleteMultipartUploadCommand, + PutBucketVersioningCommand, + HeadBucketCommand, +} = require('@aws-sdk/client-s3'); +const { NodeHttpHandler } = require('@smithy/node-http-handler'); +const { StandardRetryStrategy } = require('@aws-sdk/middleware-retry'); +const { Agent: HttpAgent } = require('http'); +const { Agent: HttpsAgent } = require('https'); const kms = require('../../../lib/kms/wrapper'); const { promisify } = require('util'); const { DummyRequestLogger } = require('../../unit/helpers'); @@ -14,20 +37,121 @@ function getKey(key) { return config.kmsHideScalityArn ? getKeyIdFromArn(key) : key; } -// for Integration use default profile, in cloudserver use vault profile +// For Integration use default profile, in cloudserver use vault profile const credsProfile = process.env.S3_END_TO_END === 'true' ? 'default' : 'vault'; -const s3config = getConfig(credsProfile, { signatureVersion: 'v4' }); -const s3 = new S3(s3config); + +// Create custom agents with specific pooling settings +const httpAgent = new HttpAgent({ + keepAlive: true, + keepAliveMsecs: 30000, + maxSockets: 50, + maxFreeSockets: 10, + timeout: 120000, +}); + +const httpsAgent = new HttpsAgent({ + keepAlive: true, + keepAliveMsecs: 30000, + maxSockets: 50, + maxFreeSockets: 10, + timeout: 120000, +}); + +const s3config = { + ...getConfig(credsProfile, {}), + requestHandler: new NodeHttpHandler({ + connectionTimeout: 120000, + socketTimeout: 120000, + httpAgent, + httpsAgent, + }), + maxAttempts: 8, + retryStrategy: new StandardRetryStrategy({ + maxAttempts: 8, + retryDecider: error => + ( + error.code === 'ECONNREFUSED' || + error.code === 'ECONNRESET' || + error.name === 'TimeoutError' || + error.message?.includes('socket hang up') || + error.code === 'ThrottlingException' || + error.code === 'RequestTimeout' + ) + , + delayDecider: attempts => Math.min(1000 * Math.pow(2, attempts), 30000), // Exponential backoff + }), +}; + +const s3Client = new S3Client(s3config); + +// Remove logger middleware if present +if (s3Client.middlewareStack.identify().includes('loggerMiddleware')) { + s3Client.middlewareStack.remove('loggerMiddleware'); +} + const bucketUtil = new BucketUtility(credsProfile); +// Wrapper for SDK v3 commands to return promises directly +const wrap = exec => exec(); +const s3 = { + createBucket: params => wrap(() => s3Client.send(new CreateBucketCommand(params))), + putBucketEncryption: params => wrap(() => s3Client.send(new PutBucketEncryptionCommand(params))), + getBucketEncryption: params => wrap(() => s3Client.send(new GetBucketEncryptionCommand(params))), + putObject: params => wrap(() => s3Client.send(new PutObjectCommand(params))), + getObject: params => wrap(async () => { + const response = await s3Client.send(new GetObjectCommand(params)); + const body = await response.Body.transformToString(); + return { ...response, Body: body }; + }), + listBuckets: params => wrap(() => s3Client.send(new ListBucketsCommand(params || {}))), + copyObject: params => wrap(() => s3Client.send(new CopyObjectCommand(params))), + listObjectVersions: params => wrap(async () => { + const response = await s3Client.send(new ListObjectVersionsCommand(params)); + return { + ...response, + Versions: response.Versions || [], + DeleteMarkers: response.DeleteMarkers || [], + CommonPrefixes: response.CommonPrefixes || [] + }; + }), + headObject: params => wrap(() => s3Client.send(new HeadObjectCommand(params))), + createMultipartUpload: params => wrap(() => s3Client.send(new CreateMultipartUploadCommand(params))), + uploadPart: params => wrap(() => s3Client.send(new UploadPartCommand(params))), + uploadPartCopy: params => wrap(() => s3Client.send(new UploadPartCopyCommand(params))), + completeMultipartUpload: params => wrap(() => s3Client.send(new CompleteMultipartUploadCommand(params))), + putBucketVersioning: params => wrap(() => s3Client.send(new PutBucketVersioningCommand(params))), + headBucket: params => wrap(() => s3Client.send(new HeadBucketCommand(params))), + listMultipartUploads: params => wrap(async () => { + const response = await s3Client.send(new ListMultipartUploadsCommand(params)); + return { + ...response, + Uploads: response.Uploads || [], + CommonPrefixes: response.CommonPrefixes || [] + }; + }), + listParts: params => wrap(() => s3Client.send(new ListPartsCommand(params))), + _compat: bucketUtil.s3, + config: { + credentials: s3config.credentials || { + accessKeyId: s3config.accessKeyId, + secretAccessKey: s3config.secretAccessKey, + }, + endpoint: { + hostname: s3config.endpoint.hostname, + port: s3config.port, + }, + }, +}; + function hydrateSSEConfig({ algo: SSEAlgorithm, masterKeyId: KMSMasterKeyID }) { - // stringify and parse to strip undefined values + // Stringify and parse to strip undefined values return JSON.parse(JSON.stringify({ Rules: [{ - ApplyServerSideEncryptionByDefault: { - SSEAlgorithm, - KMSMasterKeyID, - }, - }] })); + ApplyServerSideEncryptionByDefault: { + SSEAlgorithm, + KMSMasterKeyID, + }, + }], + })); } function putObjParams(Bucket, Key, sseConfig, kmsKeyId) { @@ -51,29 +175,29 @@ const MD = { }; async function getBucketSSE(Bucket) { - const sse = await s3.getBucketEncryption({ Bucket }).promise(); - return sse - .ServerSideEncryptionConfiguration - .Rules[0] - .ApplyServerSideEncryptionByDefault; + try { + const sse = await s3Client.send(new GetBucketEncryptionCommand({ Bucket })); + return sse.ServerSideEncryptionConfiguration.Rules[0].ApplyServerSideEncryptionByDefault; + } catch (error) { + if (error.name === 'ServerSideEncryptionConfigurationNotFoundError') { + return null; + } + throw error; + } } async function putEncryptedObject(Bucket, Key, sseConfig, kmsKeyId, Body) { - return s3.putObject({ + return s3Client.send(new PutObjectCommand({ ...putObjParams(Bucket, Key, sseConfig, kmsKeyId), Body, - }).promise(); + })); } async function getObjectMDSSE(Bucket, Key) { const objMD = await MD.getObject(Bucket, Key, {}, log); - - const sse = objMD['x-amz-server-side-encryption']; - const key = objMD['x-amz-server-side-encryption-aws-kms-key-id']; - return { - ServerSideEncryption: sse, - SSEKMSKeyId: key, + ServerSideEncryption: objMD['x-amz-server-side-encryption'], + SSEKMSKeyId: objMD['x-amz-server-side-encryption-aws-kms-key-id'], }; } @@ -97,7 +221,7 @@ const destroyKmsKey = promisify(kms.destroyBucketKey); async function cleanup(Bucket) { await bucketUtil.empty(Bucket); - await s3.deleteBucket({ Bucket }).promise(); + await s3Client.send(new DeleteBucketCommand({ Bucket })); } module.exports = { @@ -105,6 +229,7 @@ module.exports = { getKey, credsProfile, s3, + s3Client, bucketUtil, hydrateSSEConfig, putObjParams, diff --git a/tests/functional/sse-kms-migration/load.js b/tests/functional/sse-kms-migration/load.js index 9ae2b43eed..5a88b6eb50 100644 --- a/tests/functional/sse-kms-migration/load.js +++ b/tests/functional/sse-kms-migration/load.js @@ -103,12 +103,12 @@ describe(`KMS load (kmip cluster ${KMS_NODES} nodes): ${OBJECT_NUMBER const Bucket = `kms-load-${i}`; const { masterKeyArn } = await helpers.createKmsKey(log); - await helpers.s3.createBucket({ Bucket }).promise(); + await helpers.s3.createBucket({ Bucket }); await helpers.s3.putBucketEncryption({ Bucket, ServerSideEncryptionConfiguration: helpers.hydrateSSEConfig({ algo: 'aws:kms', masterKeyId: masterKeyArn }), - }).promise(); + }); return { Bucket, masterKeyArn }; })); @@ -184,7 +184,7 @@ describe(`KMS load (kmip cluster ${KMS_NODES} nodes): ${OBJECT_NUMBER await (Promise.all( buckets.map(async ({ Bucket }) => Promise.all( new Array(OBJECT_NUMBER).fill(0).map(async (_, i) => - helpers.s3.putObject({ Bucket, Key: `obj-${i}`, Body: `body-${i}` }).promise()) + helpers.s3.putObject({ Bucket, Key: `obj-${i}`, Body: `body-${i}` })) )) )); await assertRepartition(closePromise); @@ -194,7 +194,7 @@ describe(`KMS load (kmip cluster ${KMS_NODES} nodes): ${OBJECT_NUMBER await Promise.all( buckets.map(async ({ Bucket }) => Promise.all( new Array(OBJECT_NUMBER).fill(0).map(async (_, i) => - helpers.s3.getObject({ Bucket, Key: `obj-${i}` }).promise()) + helpers.s3.getObject({ Bucket, Key: `obj-${i}` })) )) ); await assertRepartition(closePromise); diff --git a/tests/functional/sse-kms-migration/migration.js b/tests/functional/sse-kms-migration/migration.js index 1fedacaee0..f34748c223 100644 --- a/tests/functional/sse-kms-migration/migration.js +++ b/tests/functional/sse-kms-migration/migration.js @@ -23,7 +23,7 @@ async function assertObjectSSE( { arnPrefix = kms.arnPrefix, put, headers } = { arnPrefix: kms.arnPrefix }, ) { const sseMD = await helpers.getObjectMDSSE(Bucket, Key); - const head = await helpers.s3.headObject({ Bucket, Key, VersionId }).promise(); + const head = await helpers.s3.headObject({ Bucket, Key, VersionId }); const sseMDMigrated = await helpers.getObjectMDSSE(Bucket, Key); const expectedKey = `${sseMD.SSEKMSKeyId && isScalityKmsArn(sseMD.SSEKMSKeyId) ? '' : arnPrefix}${sseMD.SSEKMSKeyId}`; @@ -58,7 +58,7 @@ async function assertObjectSSE( } // always verify GetObject as well to ensure acurate decryption - const get = await helpers.s3.getObject({ Bucket, Key, ...(VersionId && { VersionId }) }).promise(); + const get = await helpers.s3.getObject({ Bucket, Key, ...(VersionId && { VersionId }) }); assert.strictEqual(get.Body.toString(), Body); } @@ -86,8 +86,8 @@ describe('SSE KMS migration', () => { ? bkt.kmsKeyInfo.masterKeyArn : bkt.kmsKeyInfo.masterKeyId; } - await helpers.s3.headBucket(({ Bucket: bkt.name })).promise(); - await helpers.s3.headBucket(({ Bucket: bkt.vname })).promise(); + await helpers.s3.headBucket(({ Bucket: bkt.name })); + await helpers.s3.headBucket(({ Bucket: bkt.vname })); if (bktConf.algo) { const bktSSE = await helpers.getBucketSSE(bkt.name); assert.strictEqual(bktSSE.SSEAlgorithm, bktConf.algo); @@ -127,12 +127,12 @@ describe('SSE KMS migration', () => { before('setup', async () => { console.log('Run migration', { profile: helpers.credsProfile, accessKeyId: helpers.s3.config.credentials.accessKeyId }); - const allBuckets = (await helpers.s3.listBuckets().promise()).Buckets.map(b => b.Name); + const allBuckets = (await helpers.s3.listBuckets()).Buckets.map(b => b.Name); console.log('List buckets:', allBuckets); await helpers.MD.setup(); - await helpers.s3.headBucket({ Bucket: copyBkt }).promise(); - await helpers.s3.headBucket(({ Bucket: mpuCopyBkt })).promise(); - const copySSE = await helpers.s3.getBucketEncryption({ Bucket: copyBkt }).promise(); + await helpers.s3.headBucket({ Bucket: copyBkt }); + await helpers.s3.headBucket(({ Bucket: mpuCopyBkt })); + const copySSE = await helpers.s3.getBucketEncryption({ Bucket: copyBkt }); const { SSEAlgorithm, KMSMasterKeyID } = copySSE .ServerSideEncryptionConfiguration.Rules[0].ApplyServerSideEncryptionByDefault; assert.strictEqual(SSEAlgorithm, 'aws:kms'); @@ -209,7 +209,7 @@ describe('SSE KMS migration', () => { const mpus = {}; before('retrieve MPUS', async () => { - const listed = await helpers.s3.listMultipartUploads({ Bucket: bkt.name }).promise(); + const listed = await helpers.s3.listMultipartUploads({ Bucket: bkt.name }); assert.strictEqual(listed.IsTruncated, false, 'Too much MPUs, need to loop on pagination'); for (const mpu of listed.Uploads) { mpus[mpu.Key] = mpu.UploadId; @@ -267,7 +267,7 @@ describe('SSE KMS migration', () => { const mpuOverviewMDSSE = await helpers.getObjectMDSSE(MPUBucketName, longMPUIdentifier); const existingParts = await helpers.s3.listParts({ - Bucket: bkt.name, Key: mpuKey, UploadId: uploadId }).promise(); + Bucket: bkt.name, Key: mpuKey, UploadId: uploadId }); const partCount = (existingParts.Parts || []).length || 0; assert.strictEqual(existingParts.IsTruncated, false, 'Too much parts, need to loop on pagination'); assert.strictEqual(partCount, expectedExistingParts); @@ -384,7 +384,7 @@ describe('SSE KMS migration', () => { it('should finish ongoing encrypted MPU by copy parts from all bkt and objects matrice', async () => { const mpuKey = 'mpucopy'; - const listed = await helpers.s3.listMultipartUploads({ Bucket: mpuCopyBkt }).promise(); + const listed = await helpers.s3.listMultipartUploads({ Bucket: mpuCopyBkt }); assert.strictEqual(listed.IsTruncated, false, 'Too much MPUs, need to loop on pagination'); assert.strictEqual(listed.Uploads.length, 1, 'There should be only one MPU for global copy'); const uploadId = listed.Uploads[0].UploadId; @@ -394,7 +394,7 @@ describe('SSE KMS migration', () => { Key: mpuKey, }; - const existingParts = await helpers.s3.listParts(copyPartArg).promise(); + const existingParts = await helpers.s3.listParts(copyPartArg); const partCount = (existingParts.Parts || []).length || 0; assert.strictEqual(existingParts.IsTruncated, false, 'Too much parts, need to loop on pagination'); assert.strictEqual(partCount, scenarios.testCases.length * scenarios.testCasesObj.length); @@ -411,7 +411,7 @@ describe('SSE KMS migration', () => { ...copyPartArg, PartNumber: partNumber, CopySource: `${bkt.name}/${obj.name}`, - }).promise(); + }); return { partNumber, body: obj.body, res: res.CopyPartResult }; })); @@ -429,7 +429,7 @@ describe('SSE KMS migration', () => { ...parts.map(part => ({ PartNumber: part.partNumber, ETag: part.res.ETag })), ], }, - }).promise(); + }); const assertion = { Bucket: mpuCopyBkt, Key: mpuKey, diff --git a/tests/functional/sse-kms-migration/scenarios.js b/tests/functional/sse-kms-migration/scenarios.js index cf8f9e923b..0b14234286 100644 --- a/tests/functional/sse-kms-migration/scenarios.js +++ b/tests/functional/sse-kms-migration/scenarios.js @@ -48,7 +48,7 @@ async function assertObjectSSE( { arnPrefix = kms.arnPrefix, headers } = { arnPrefix: kms.arnPrefix }, testCase, ) { - const head = await helpers.s3.headObject({ Bucket, Key, VersionId }).promise(); + const head = await helpers.s3.headObject({ Bucket, Key, VersionId }); const sseMD = await helpers.getObjectMDSSE(Bucket, Key); const arnPrefixReg = new RegExp(`^${arnPrefix}`); @@ -83,7 +83,7 @@ async function assertObjectSSE( } // always verify GetObject as well to ensure accurate decryption - const get = await helpers.s3.getObject({ Bucket, Key, ...(VersionId && { VersionId }) }).promise(); + const get = await helpers.s3.getObject({ Bucket, Key, ...(VersionId && { VersionId }) }); assert.strictEqual(get.Body.toString(), Body); } @@ -96,10 +96,12 @@ async function deleteBucketSSEBeforeEach(bktName, log) { } async function getBucketSSEError(Bucket) { - await assert.rejects(helpers.s3.getBucketEncryption({ Bucket }).promise(), err => { - assert.strictEqual(err.code, 'ServerSideEncryptionConfigurationNotFoundError'); - return true; - }); + try { + await helpers.s3.getBucketEncryption({ Bucket }); + throw new Error('Expected error but got success'); + } catch (err) { + assert.strictEqual(err.name, 'ServerSideEncryptionConfigurationNotFoundError'); + } } // testCase should be one of before, migration, after @@ -199,7 +201,7 @@ async function copyObjectAndSSE( body: 'BODY(copy)', }, ]; - const headers = await helpers.s3.copyObject(tests[index].copyArgs).promise(); + const headers = await helpers.s3.copyObject(tests[index].copyArgs); let forcedSSE; if (forceBktSSE) { @@ -233,6 +235,10 @@ async function copyObjectAndSSE( // check MPU headers against the MPU overview MD // because there is no migration for ongoing MPU function assertMPUSSEHeaders(actual, expected, algo) { + // eslint-disable-next-line no-console + console.log('actual', actual); + // eslint-disable-next-line no-console + console.log('expected', expected); if (algo) { assert.strictEqual(actual.ServerSideEncryption, algo); } @@ -253,7 +259,7 @@ async function mpuUploadPart({ UploadId, Bucket, Key, Body, PartNumber }, mpuOve Body, Key, PartNumber, - }).promise(); + }); testCase !== 'before' && assertMPUSSEHeaders(part, mpuOverviewMDSSE, algo); return part; } @@ -270,24 +276,45 @@ async function mpuUploadPartCopy( PartNumber, CopySource, CopySourceRange, - }).promise(); + }); testCase !== 'before' && assertMPUSSEHeaders(part, mpuOverviewMDSSE, algo); return part; } // before has no headers to assert async function mpuComplete({ UploadId, Bucket, Key }, { existingParts, newParts }, mpuOverviewMDSSE, algo, testCase) { + const extractETag = part => { + // For uploadPartCopy responses, ETag is in CopyPartResult + if (part.CopyPartResult && part.CopyPartResult.ETag) { + return part.CopyPartResult.ETag; + } + // For regular uploadPart responses, ETag is at the top level + if (part.ETag) { + return part.ETag; + } + // If neither exists, throw an error for debugging + throw new Error(`Could not find ETag in part: ${JSON.stringify(part)}`); + }; + + // Build the parts array with proper ETag extraction + const allParts = [ + ...existingParts.map(part => ({ + PartNumber: part.PartNumber, + ETag: extractETag(part) + })), + ...newParts.map((part, idx) => ({ + PartNumber: existingParts.length + idx + 1, + ETag: extractETag(part) + })), + ]; const complete = await helpers.s3.completeMultipartUpload({ UploadId, Bucket, Key, MultipartUpload: { - Parts: [ - ...existingParts.map(part => ({ PartNumber: part.PartNumber, ETag: part.ETag })), - ...newParts.map((part, idx) => ({ PartNumber: existingParts.length + idx + 1, ETag: part.ETag })), - ], + Parts: allParts, }, - }).promise(); + }); testCase !== 'before' && assertMPUSSEHeaders(complete, mpuOverviewMDSSE, algo); return complete; } diff --git a/yarn.lock b/yarn.lock index 4ab1d8e30c..5e6c9f8f2e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -489,6 +489,15 @@ "@smithy/types" "^4.5.0" tslib "^2.6.2" +"@aws-sdk/middleware-retry@^3.374.0": + version "3.374.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-retry/-/middleware-retry-3.374.0.tgz#2e80bad67338a3bd3c7dd7364e16482b08c9ffda" + integrity sha512-ZnT84qnT+Zmelv7y6hAqgAEaZgpGlrvf/+rchNWT0oG4duxI5bLWcRi9U88Jz7G8JgNQcGKJqPfC6oogCd7p8w== + dependencies: + "@smithy/middleware-retry" "^1.0.3" + tslib "^2.5.0" + uuid "^8.3.2" + "@aws-sdk/middleware-sdk-s3@3.894.0": version "3.894.0" resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.894.0.tgz#92debe07448ef9c446f024f5f843956e89eb1d11" @@ -1660,6 +1669,19 @@ "@smithy/util-middleware" "^4.1.1" tslib "^2.6.2" +"@smithy/middleware-retry@^1.0.3": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@smithy/middleware-retry/-/middleware-retry-1.1.0.tgz#084f70df112f22b5bfa0de8faaa14a5dcf22149e" + integrity sha512-lINKYxIvT+W20YFOtHBKeGm7npuJg0/YCoShttU7fVpsmU+a2rdb9zrJn1MHqWfUL6DhTAWGa0tH2O7l4XrDcw== + dependencies: + "@smithy/protocol-http" "^1.2.0" + "@smithy/service-error-classification" "^1.1.0" + "@smithy/types" "^1.2.0" + "@smithy/util-middleware" "^1.1.0" + "@smithy/util-retry" "^1.1.0" + tslib "^2.5.0" + uuid "^8.3.2" + "@smithy/middleware-retry@^4.2.4": version "4.3.0" resolved "https://registry.yarnpkg.com/@smithy/middleware-retry/-/middleware-retry-4.3.0.tgz#453c9668b013fbfa900957857f74f3b15936b384" @@ -1732,6 +1754,14 @@ "@smithy/types" "^4.5.0" tslib "^2.6.2" +"@smithy/protocol-http@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@smithy/protocol-http/-/protocol-http-1.2.0.tgz#a554e4dabb14508f0bc2cdef9c3710e2b294be04" + integrity sha512-GfGfruksi3nXdFok5RhgtOnWe5f6BndzYfmEXISD+5gAGdayFGpjWu5pIqIweTudMtse20bGbc+7MFZXT1Tb8Q== + dependencies: + "@smithy/types" "^1.2.0" + tslib "^2.5.0" + "@smithy/protocol-http@^4.1.8": version "4.1.8" resolved "https://registry.yarnpkg.com/@smithy/protocol-http/-/protocol-http-4.1.8.tgz#0461758671335f65e8ff3fc0885ab7ed253819c9" @@ -1774,6 +1804,11 @@ "@smithy/types" "^4.5.0" tslib "^2.6.2" +"@smithy/service-error-classification@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@smithy/service-error-classification/-/service-error-classification-1.1.0.tgz#264dd432ae513b3f2ad9fc6f461deda8c516173c" + integrity sha512-OCTEeJ1igatd5kFrS2VDlYbainNNpf7Lj1siFOxnRWqYOP9oNvC5HOJBd3t+Z8MbrmehBtuDJ2QqeBsfeiNkww== + "@smithy/service-error-classification@^4.1.2": version "4.1.2" resolved "https://registry.yarnpkg.com/@smithy/service-error-classification/-/service-error-classification-4.1.2.tgz#06839c332f4620a4b80c78a0c32377732dc6697a" @@ -2003,6 +2038,13 @@ dependencies: tslib "^2.6.2" +"@smithy/util-middleware@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@smithy/util-middleware/-/util-middleware-1.1.0.tgz#9f186489437ca2ef753c5e1de2930f76fd1edc14" + integrity sha512-6hhckcBqVgjWAqLy2vqlPZ3rfxLDhFWEmM7oLh2POGvsi7j0tHkbN7w4DFhuBExVJAbJ/qqxqZdRY6Fu7/OezQ== + dependencies: + tslib "^2.5.0" + "@smithy/util-middleware@^2.2.0": version "2.2.0" resolved "https://registry.yarnpkg.com/@smithy/util-middleware/-/util-middleware-2.2.0.tgz#80cfad40f6cca9ffe42a5899b5cb6abd53a50006" @@ -2027,6 +2069,14 @@ "@smithy/types" "^4.5.0" tslib "^2.6.2" +"@smithy/util-retry@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@smithy/util-retry/-/util-retry-1.1.0.tgz#f6e62ec7d7d30f1dd9608991730ba7a86e445047" + integrity sha512-ygQW5HBqYXpR3ua09UciS0sL7UGJzGiktrKkOuEJwARoUuzz40yaEGU6xd9Gs7KBmAaFC8gMfnghHtwZ2nyBCQ== + dependencies: + "@smithy/service-error-classification" "^1.1.0" + tslib "^2.5.0" + "@smithy/util-retry@^4.1.2": version "4.1.2" resolved "https://registry.yarnpkg.com/@smithy/util-retry/-/util-retry-4.1.2.tgz#8d28c27cf69643e173c75cc18ff0186deb7cefed"