diff --git a/cumulus/tasks/discover-granules/package.json b/cumulus/tasks/discover-granules/package.json index afcd6c0b2a9..b5105da3a35 100644 --- a/cumulus/tasks/discover-granules/package.json +++ b/cumulus/tasks/discover-granules/package.json @@ -23,7 +23,6 @@ "ava": { "files": "tests", "color": false, - "serial": true, "babel": "inherit", "require": [ "babel-polyfill", diff --git a/cumulus/tasks/discover-granules/schemas/config.json b/cumulus/tasks/discover-granules/schemas/config.json index b868cbe3bb0..f3c16c63a00 100644 --- a/cumulus/tasks/discover-granules/schemas/config.json +++ b/cumulus/tasks/discover-granules/schemas/config.json @@ -53,6 +53,9 @@ } } } + }, + "useList": { + "type": "boolean" } } } diff --git a/cumulus/tasks/discover-granules/tests/index.js b/cumulus/tasks/discover-granules/tests/index.js index c6e53b850ee..3ebd4f4d4a6 100644 --- a/cumulus/tasks/discover-granules/tests/index.js +++ b/cumulus/tasks/discover-granules/tests/index.js @@ -7,7 +7,7 @@ const mur = require('./fixtures/mur.json'); const { cloneDeep } = require('lodash'); const { recursivelyDeleteS3Bucket, s3 } = require('@cumulus/common/aws'); const { - findGitRepoRootDirectory, + findTmpTestDataDirectory, randomString, validateConfig, validateOutput @@ -39,8 +39,7 @@ test('discover granules using SFTP', async (t) => { const providerPath = randomString(); // Figure out the directory paths that we're working with - const gitRepoRootDirectory = await findGitRepoRootDirectory(); - const providerPathDirectory = path.join(gitRepoRootDirectory, 'tmp-test-data', providerPath); + const providerPathDirectory = path.join(await findTmpTestDataDirectory(), providerPath); // Create providerPathDirectory and internal bucket await Promise.all([ @@ -98,8 +97,7 @@ test('discover granules using HTTP', async (t) => { const providerPath = randomString(); // Figure out the directory paths that we're working with - const gitRepoRootDirectory = await findGitRepoRootDirectory(); - const providerPathDirectory = path.join(gitRepoRootDirectory, 'tmp-test-data', providerPath); + const providerPathDirectory = path.join(await findTmpTestDataDirectory(), providerPath); // Create providerPathDirectory and internal bucket await Promise.all([ @@ -153,8 +151,7 @@ test('discover granules using S3', async (t) => { const providerPath = randomString(); // Figure out the directory paths that we're working with - const gitRepoRootDirectory = await findGitRepoRootDirectory(); - const providerPathDirectory = path.join(gitRepoRootDirectory, 'tmp-test-data', providerPath); + const providerPathDirectory = path.join(await findTmpTestDataDirectory(), providerPath); // Create providerPathDirectory and internal bucket await Promise.all([ diff --git a/cumulus/tasks/discover-pdrs/index.js b/cumulus/tasks/discover-pdrs/index.js index 627fc65b0d2..eebad25ddda 100644 --- a/cumulus/tasks/discover-pdrs/index.js +++ b/cumulus/tasks/discover-pdrs/index.js @@ -29,7 +29,8 @@ function discoverPdrs(event) { stack, bucket, collection, - provider + provider, + config.useList ); log.debug('Starting PDR discovery'); diff --git a/cumulus/tasks/discover-pdrs/package.json b/cumulus/tasks/discover-pdrs/package.json index 1a66583a21c..e14677b992b 100644 --- a/cumulus/tasks/discover-pdrs/package.json +++ b/cumulus/tasks/discover-pdrs/package.json @@ -26,7 +26,6 @@ "babel-polyfill", "babel-register" ], - "serial": true, "color": false }, "babel": { diff --git a/cumulus/tasks/discover-pdrs/schemas/config.json b/cumulus/tasks/discover-pdrs/schemas/config.json index c9e0df45d00..b705af728af 100644 --- a/cumulus/tasks/discover-pdrs/schemas/config.json +++ b/cumulus/tasks/discover-pdrs/schemas/config.json @@ -39,6 +39,9 @@ "name": { "type": "string" }, "granuleIdExtraction": { "type": "string" } } + }, + "useList": { + "type": "boolean" } } } diff --git a/cumulus/tasks/discover-pdrs/tests/index.js b/cumulus/tasks/discover-pdrs/tests/index.js index b051757ccd5..d12d9a91527 100644 --- a/cumulus/tasks/discover-pdrs/tests/index.js +++ b/cumulus/tasks/discover-pdrs/tests/index.js @@ -11,7 +11,8 @@ const input = require('./fixtures/input.json'); const { recursivelyDeleteS3Bucket, s3 } = require('@cumulus/common/aws'); const { - findGitRepoRootDirectory, + findTestDataDirectory, + findTmpTestDataDirectory, randomString, validateConfig, validateOutput @@ -21,6 +22,7 @@ test('test pdr discovery with FTP assuming all PDRs are new', async (t) => { const event = cloneDeep(input); event.config.bucket = randomString(); event.config.collection.provider_path = '/pdrs'; + event.config.useList = true; event.config.provider = { id: 'MODAPS', protocol: 'ftp', @@ -110,6 +112,7 @@ test('test pdr discovery with FTP assuming some PDRs are new', async (t) => { }; const newPayload = cloneDeep(input); + newPayload.config.useList = true; newPayload.config.provider = provider; newPayload.config.collection.provider_path = '/pdrs'; newPayload.input = {}; @@ -153,9 +156,8 @@ test('test pdr discovery with HTTP assuming some PDRs are new', async (t) => { const providerPath = randomString(); // Figure out the directory paths that we're working with - const gitRepoRootDirectory = await findGitRepoRootDirectory(); - const testDataDirectory = path.join(gitRepoRootDirectory, 'packages', 'test-data', 'pdrs'); - const providerPathDirectory = path.join(gitRepoRootDirectory, 'tmp-test-data', providerPath); + const testDataDirectory = path.join(await findTestDataDirectory(), 'pdrs'); + const providerPathDirectory = path.join(await findTmpTestDataDirectory(), providerPath); // Create providerPathDirectory and internal bucket await Promise.all([ @@ -225,9 +227,8 @@ test('test pdr discovery with SFTP assuming some PDRs are new', async (t) => { const providerPath = randomString(); // Figure out the directory paths that we're working with - const gitRepoRootDirectory = await findGitRepoRootDirectory(); - const testDataDirectory = path.join(gitRepoRootDirectory, 'packages', 'test-data', 'pdrs'); - const providerPathDirectory = path.join(gitRepoRootDirectory, 'tmp-test-data', providerPath); + const testDataDirectory = path.join(await findTestDataDirectory(), 'pdrs'); + const providerPathDirectory = path.join(await findTmpTestDataDirectory(), providerPath); // Create providerPathDirectory and internal bucket await Promise.all([ diff --git a/cumulus/tasks/discover-pdrs/tests/s3.js b/cumulus/tasks/discover-pdrs/tests/s3.js index 0e91cdcd7ed..50a2b253c4e 100644 --- a/cumulus/tasks/discover-pdrs/tests/s3.js +++ b/cumulus/tasks/discover-pdrs/tests/s3.js @@ -21,7 +21,8 @@ test.beforeEach(async (t) => { id: randomString(), protocol: 's3' }, - stack: randomString() + stack: randomString(), + useList: true } }; diff --git a/cumulus/tasks/parse-pdr/tests/parse_pdrs_test.js b/cumulus/tasks/parse-pdr/tests/parse_pdrs_test.js index e7de137474b..faa41c50ecd 100644 --- a/cumulus/tasks/parse-pdr/tests/parse_pdrs_test.js +++ b/cumulus/tasks/parse-pdr/tests/parse_pdrs_test.js @@ -9,7 +9,8 @@ const test = require('ava'); const { recursivelyDeleteS3Bucket, s3 } = require('@cumulus/common/aws'); const { cloneDeep } = require('lodash'); const { - findGitRepoRootDirectory, + findTestDataDirectory, + findTmpTestDataDirectory, randomString, validateConfig, validateInput, @@ -19,7 +20,12 @@ const { const { parsePdr } = require('../index'); test('parse PDR from FTP endpoint', async (t) => { - const provider = { + const internalBucketName = randomString(); + + const newPayload = cloneDeep(modis); + + newPayload.config.bucket = internalBucketName; + newPayload.config.provider = { id: 'MODAPS', protocol: 'ftp', host: 'localhost', @@ -27,21 +33,13 @@ test('parse PDR from FTP endpoint', async (t) => { password: 'testpass' }; - const pdrName = 'MOD09GQ.PDR'; - - const newPayload = cloneDeep(modis); - newPayload.config.provider = provider; - - const internalBucketName = randomString(); - newPayload.config.bucket = internalBucketName; - await validateConfig(t, newPayload.config); return s3().createBucket({ Bucket: internalBucketName }).promise() .then(() => parsePdr(newPayload)) .then((output) => { t.is(output.granules.length, output.granulesCount); - t.is(output.pdr.name, pdrName); + t.is(output.pdr.name, newPayload.input.pdr.name); t.is(output.filesCount, 2); return output; }) @@ -61,8 +59,8 @@ test('parse PDR from HTTP endpoint', async (t) => { const providerPath = randomString(); // Figure out the directory paths that we're working with - const gitRepoRootDirectory = await findGitRepoRootDirectory(); - const providerPathDirectory = path.join(gitRepoRootDirectory, 'tmp-test-data', providerPath); + const testDataDirectory = path.join(await findTestDataDirectory(), 'pdrs'); + const providerPathDirectory = path.join(await findTmpTestDataDirectory(), providerPath); // Create providerPathDirectory and internal bucket await Promise.all([ @@ -73,7 +71,7 @@ test('parse PDR from HTTP endpoint', async (t) => { const pdrName = 'MOD09GQ.PDR'; await fs.copy( - path.join(gitRepoRootDirectory, 'packages', 'test-data', 'pdrs', pdrName), + path.join(testDataDirectory, pdrName), path.join(providerPathDirectory, pdrName)); const newPayload = cloneDeep(modis); @@ -118,10 +116,7 @@ test('parse PDR from HTTP endpoint', async (t) => { test('parse PDR from SFTP endpoint', async (t) => { const internalBucketName = randomString(); const providerPath = randomString(); - - // Figure out the directory paths that we're working with - const gitRepoRootDirectory = await findGitRepoRootDirectory(); - const providerPathDirectory = path.join(gitRepoRootDirectory, 'tmp-test-data', providerPath); + const providerPathDirectory = path.join(await findTmpTestDataDirectory(), providerPath); // Create providerPathDirectory and internal bucket await Promise.all([ @@ -131,10 +126,6 @@ test('parse PDR from SFTP endpoint', async (t) => { const pdrName = 'MOD09GQ.PDR'; - await fs.copy( - path.join(gitRepoRootDirectory, 'packages', 'test-data', 'pdrs', pdrName), - path.join(providerPathDirectory, pdrName)); - const newPayload = cloneDeep(modis); newPayload.config.bucket = internalBucketName; newPayload.config.provider = { @@ -158,7 +149,14 @@ test('parse PDR from SFTP endpoint', async (t) => { await validateConfig(t, newPayload.config); try { + // Stage the file to be downloaded + const testDataDirectory = path.join(await findTestDataDirectory(), 'pdrs'); + await fs.copy( + path.join(testDataDirectory, pdrName), + path.join(providerPathDirectory, pdrName)); + const output = await parsePdr(newPayload); + await validateOutput(t, output); t.is(output.granules.length, output.granulesCount); t.is(output.pdr.name, pdrName); diff --git a/cumulus/tasks/sync-granule/tests/.eslintrc.json b/cumulus/tasks/sync-granule/tests/.eslintrc.json new file mode 100644 index 00000000000..ada42bca77f --- /dev/null +++ b/cumulus/tasks/sync-granule/tests/.eslintrc.json @@ -0,0 +1,5 @@ +{ + "rules": { + "no-param-reassign": "off" + } +} diff --git a/cumulus/tasks/sync-granule/tests/sync_granule_test.js b/cumulus/tasks/sync-granule/tests/sync_granule_test.js index 9422fb9224f..82c651a8c5e 100644 --- a/cumulus/tasks/sync-granule/tests/sync_granule_test.js +++ b/cumulus/tasks/sync-granule/tests/sync_granule_test.js @@ -1,26 +1,64 @@ 'use strict'; -const aws = require('@cumulus/common/aws'); const fs = require('fs-extra'); const test = require('ava'); const errors = require('@cumulus/common/errors'); const path = require('path'); - const payload = require('@cumulus/test-data/payloads/new-message-schema/ingest.json'); const payloadChecksumFile = require('@cumulus/test-data/payloads/new-message-schema/ingest-checksumfile.json'); // eslint-disable-line max-len +const { recursivelyDeleteS3Bucket, s3 } = require('@cumulus/common/aws'); +const { cloneDeep } = require('lodash'); const { - findGitRepoRootDirectory, + findTestDataDirectory, + findTmpTestDataDirectory, randomString, validateConfig, validateInput, validateOutput } = require('@cumulus/common/test-utils'); -const { cloneDeep } = require('lodash'); const { syncGranule } = require('../index'); +// Setup buckets and the test event +test.beforeEach(async (t) => { + t.context.internalBucketName = randomString(); + t.context.protectedBucketName = randomString(); + t.context.privateBucketName = randomString(); + + await Promise.all([ + s3().createBucket({ Bucket: t.context.internalBucketName }).promise(), + s3().createBucket({ Bucket: t.context.privateBucketName }).promise(), + s3().createBucket({ Bucket: t.context.protectedBucketName }).promise() + ]); + + t.context.event = cloneDeep(payload); + + t.context.event.config.buckets.internal = t.context.internalBucketName; + t.context.event.config.buckets.private = t.context.privateBucketName; + t.context.event.config.buckets.protected = t.context.protectedBucketName; +}); + +// Clean up +test.afterEach.always((t) => Promise.all([ + recursivelyDeleteS3Bucket(t.context.internalBucketName), + recursivelyDeleteS3Bucket(t.context.privateBucketName), + recursivelyDeleteS3Bucket(t.context.protectedBucketName) +])); + +test('error when provider info is missing', async (t) => { + delete t.context.event.config.provider; + + try { + await syncGranule(t.context.event); + t.fail(); + } + catch (error) { + t.true(error instanceof errors.ProviderNotFound); + } +}); + test('download Granule from FTP endpoint', async (t) => { - const provider = { + t.context.event.config.provider = { id: 'MODAPS', protocol: 'ftp', host: 'localhost', @@ -28,217 +66,220 @@ test('download Granule from FTP endpoint', async (t) => { password: 'testpass' }; - const newPayload = cloneDeep(payload); - newPayload.config.provider = provider; - - const protectedBucketName = randomString(); - const internalBucketName = randomString(); - - newPayload.config.buckets.protected = protectedBucketName; - newPayload.config.buckets.internal = internalBucketName; - - await validateInput(t, newPayload.input); - await validateConfig(t, newPayload.config); - - return aws.s3().createBucket({ Bucket: protectedBucketName }).promise() - .then(() => aws.s3().createBucket({ Bucket: internalBucketName }).promise()) - .then(() => syncGranule(newPayload)) - .then((output) => validateOutput(t, output).then(() => output)) - .then((output) => { - t.is(output.granules.length, 1); - t.is(output.granules[0].files.length, 1); - t.is( - output.granules[0].files[0].filename, - `s3://${protectedBucketName}/MOD09GQ.A2017224.h27v08.006.2017227165029.hdf` - ); - - return aws.recursivelyDeleteS3Bucket(internalBucketName); - }) - .catch((e) => { - if (e instanceof errors.RemoteResourceError) { - t.pass('ignoring this test. Test server seems to be down'); - } - else throw e; - }); -}); + validateConfig(t, t.context.event.config); + validateInput(t, t.context.event.input); -test('download Granule from HTTP endpoint', async (t) => { - const granuleUrlPath = randomString(); + try { + const output = await syncGranule(t.context.event); - // Figure out the directory paths that we're working with - const gitRepoRootDirectory = await findGitRepoRootDirectory(); - const tmpTestDataDirectory = path.join(gitRepoRootDirectory, 'tmp-test-data', granuleUrlPath); + validateOutput(t, output); - const granuleFilename = 'MOD09GQ.A2017224.h27v08.006.2017227165029.hdf'; + t.is(output.granules.length, 1); + t.is(output.granules[0].files.length, 1); + t.is( + output.granules[0].files[0].filename, + `s3://${t.context.protectedBucketName}/MOD09GQ.A2017224.h27v08.006.2017227165029.hdf` + ); + } + catch (e) { + if (e instanceof errors.RemoteResourceError) { + t.pass('ignoring this test. Test server seems to be down'); + } + else throw e; + } +}); - const event = cloneDeep(payload); - event.config.buckets.internal = randomString(); - event.config.buckets.protected = randomString(); - event.config.provider = { +test('download Granule from HTTP endpoint', async (t) => { + const granulePath = randomString(); + const localGranulePath = path.join(await findTmpTestDataDirectory(), granulePath); + + t.context.event.config.provider = { id: 'MODAPS', protocol: 'http', host: 'http://localhost:8080' }; - event.input.granules[0].files[0].path = `/${granuleUrlPath}`; + t.context.event.input.granules[0].files[0].path = `/${granulePath}`; - await validateConfig(t, event.config); - await validateInput(t, event.input); + validateConfig(t, t.context.event.config); + validateInput(t, t.context.event.input); - await fs.ensureDir(tmpTestDataDirectory); + await fs.mkdir(localGranulePath); try { - await Promise.all([ - fs.copy( - path.join(gitRepoRootDirectory, 'packages', 'test-data', 'granules', granuleFilename), - path.join(tmpTestDataDirectory, granuleFilename)), - aws.s3().createBucket({ Bucket: event.config.buckets.internal }).promise(), - aws.s3().createBucket({ Bucket: event.config.buckets.protected }).promise() - ]); + const granuleFilename = t.context.event.input.granules[0].files[0].name; - const output = await syncGranule(event); + // Stage the file to be downloaded + await fs.copy( + path.join(await findTestDataDirectory(), 'granules', granuleFilename), + path.join(localGranulePath, granuleFilename) + ); + + const output = await syncGranule(t.context.event); + + validateOutput(t, output); - await validateOutput(t, output); t.is(output.granules.length, 1); t.is(output.granules[0].files.length, 1); t.is( output.granules[0].files[0].filename, - `s3://${event.config.buckets.protected}/${granuleFilename}` + `s3://${t.context.protectedBucketName}/${granuleFilename}` ); } catch (e) { if (e instanceof errors.RemoteResourceError) { t.pass('ignoring this test. Test server seems to be down'); } - else t.fail(e); + else throw e; } finally { - await Promise.all([ - fs.remove(tmpTestDataDirectory), - aws.recursivelyDeleteS3Bucket(event.config.buckets.internal), - aws.recursivelyDeleteS3Bucket(event.config.buckets.protected) - ]); + fs.remove(localGranulePath); } }); -test('download granule from S3 provider', async (t) => { - const internalBucket = randomString(); - const protectedBucket = randomString(); - const sourceBucket = randomString(); +test('download Granule from SFTP endpoint', async (t) => { + const granulePath = randomString(); + const localGranulePath = path.join(await findTmpTestDataDirectory(), granulePath); - const granuleFilePath = randomString(); - const granuleFileName = payload.input.granules[0].files[0].name; + t.context.event.config.provider = t.context.event.config.provider = { + id: 'MODAPS', + protocol: 'sftp', + host: 'localhost', + port: 2222, + username: 'user', + password: 'password' + }; - // Create required buckets - await Promise.all([ - aws.s3().createBucket({ Bucket: internalBucket }).promise(), - aws.s3().createBucket({ Bucket: protectedBucket }).promise(), - aws.s3().createBucket({ Bucket: sourceBucket }).promise() - ]); + // The test-data prefix is required because of the way that the sftp + // container is configured in docker-compose.yml. + t.context.event.input.granules[0].files[0].path = `test-data/${granulePath}`; - // Stage the file that's going to be downloaded - await aws.s3().putObject({ - Bucket: sourceBucket, - Key: `${granuleFilePath}/${granuleFileName}`, - Body: fs.createReadStream(`../../../packages/test-data/granules/${granuleFileName}`) - }).promise(); + validateConfig(t, t.context.event.config); + validateInput(t, t.context.event.input); - const event = Object.assign({}, payload); - event.config.provider = { + await fs.mkdir(localGranulePath); + try { + const granuleFilename = t.context.event.input.granules[0].files[0].name; + + // Stage the file to be downloaded + await fs.copy( + path.join(await findTestDataDirectory(), 'granules', granuleFilename), + path.join(localGranulePath, granuleFilename) + ); + + const output = await syncGranule(t.context.event); + + validateOutput(t, output); + + t.is(output.granules.length, 1); + t.is(output.granules[0].files.length, 1); + t.is( + output.granules[0].files[0].filename, + `s3://${t.context.protectedBucketName}/${granuleFilename}` + ); + } + catch (e) { + if (e instanceof errors.RemoteResourceError) { + t.pass('ignoring this test. Test server seems to be down'); + } + else throw e; + } + finally { + fs.remove(localGranulePath); + } +}); + +test('download granule from S3 provider', async (t) => { + const granuleFilePath = randomString(); + const granuleFileName = payload.input.granules[0].files[0].name; + + t.context.event.config.provider = { id: 'MODAPS', protocol: 's3', - host: sourceBucket + host: randomString() }; - event.config.buckets.internal = internalBucket; - event.config.buckets.protected = protectedBucket; - event.input.granules[0].files[0].path = granuleFilePath; + t.context.event.input.granules[0].files[0].path = granuleFilePath; + + validateConfig(t, t.context.event.config); + validateInput(t, t.context.event.input); - await validateConfig(t, event.config); - await validateInput(t, event.input); + await s3().createBucket({ Bucket: t.context.event.config.provider.host }).promise(); - let output; try { - output = await syncGranule(event); + // Stage the file that's going to be downloaded + await s3().putObject({ + Bucket: t.context.event.config.provider.host, + Key: `${granuleFilePath}/${granuleFileName}`, + Body: fs.createReadStream(`../../../packages/test-data/granules/${granuleFileName}`) + }).promise(); + + const output = await syncGranule(t.context.event); + + validateOutput(t, output); + + t.is(output.granules.length, 1); + t.is(output.granules[0].files.length, 1); + t.is( + output.granules[0].files[0].filename, + `s3://${t.context.protectedBucketName}/${granuleFileName}` // eslint-disable-line max-len + ); } finally { // Clean up - await Promise.all([ - aws.recursivelyDeleteS3Bucket(internalBucket), - aws.recursivelyDeleteS3Bucket(protectedBucket), - aws.recursivelyDeleteS3Bucket(sourceBucket) - ]); + recursivelyDeleteS3Bucket(t.context.event.config.provider.host); } - - await validateOutput(t, output); - t.is(output.granules.length, 1); - t.is(output.granules[0].files.length, 1); - t.is( - output.granules[0].files[0].filename, - `s3://${protectedBucket}/MOD09GQ.A2017224.h27v08.006.2017227165029.hdf` - ); }); -test('download granule over HTTP with checksum in file', async (t) => { - const granuleUrlPath = randomString(); - - // Figure out the directory paths that we're working with - const gitRepoRootDirectory = await findGitRepoRootDirectory(); - const tmpTestDataDirectory = path.join(gitRepoRootDirectory, 'tmp-test-data', granuleUrlPath); - - const granuleFilename = '20160115-MODIS_T-JPL-L2P-T2016015000000.L2_LAC_GHRSST_N-v01.nc.bz2'; - const checksumFilename = '20160115-MODIS_T-JPL-L2P-T2016015000000.L2_LAC_GHRSST_N-v01.nc.bz2.md5'; - +test('download granule with checksum in file from an HTTP endpoint', async (t) => { const event = cloneDeep(payloadChecksumFile); - event.config.buckets.internal = randomString(); - event.config.buckets.private = randomString(); - event.config.buckets.protected = randomString(); + + event.config.buckets.internal = t.context.internalBucketName; + event.config.buckets.private = t.context.privateBucketName; + event.config.buckets.protected = t.context.protectedBucketName; event.config.provider = { id: 'MODAPS', protocol: 'http', host: 'http://localhost:8080' }; - event.input.granules[0].files[0].path = `/${granuleUrlPath}`; - event.input.granules[0].files[1].path = `/${granuleUrlPath}`; - await validateConfig(t, event.config); - await validateInput(t, event.input); + const granulePath = randomString(); + event.input.granules[0].files[0].path = `/${granulePath}`; + event.input.granules[0].files[1].path = `/${granulePath}`; + + validateConfig(t, event.config); + validateInput(t, event.input); - await fs.ensureDir(tmpTestDataDirectory); + const localGranulePath = path.join(await findTmpTestDataDirectory(), granulePath); + await fs.mkdir(localGranulePath); try { + // Stage the files to be downloaded + const sourceDir = path.join(await findTestDataDirectory(), 'granules'); + const granuleFilename = event.input.granules[0].files[0].name; + const checksumFilename = event.input.granules[0].files[1].name; await Promise.all([ - fs.copy( - path.join(gitRepoRootDirectory, 'packages', 'test-data', 'granules', granuleFilename), - path.join(tmpTestDataDirectory, granuleFilename)), - fs.copy( - path.join(gitRepoRootDirectory, 'packages', 'test-data', 'granules', checksumFilename), - path.join(tmpTestDataDirectory, checksumFilename)), - aws.s3().createBucket({ Bucket: event.config.buckets.internal }).promise(), - aws.s3().createBucket({ Bucket: event.config.buckets.private }).promise(), - aws.s3().createBucket({ Bucket: event.config.buckets.protected }).promise() + fs.copy(path.join(sourceDir, granuleFilename), + path.join(localGranulePath, granuleFilename)), + fs.copy(path.join(sourceDir, checksumFilename), + path.join(localGranulePath, checksumFilename)) ]); const output = await syncGranule(event); - await validateOutput(t, output); + validateOutput(t, output); + t.is(output.granules.length, 1); t.is(output.granules[0].files.length, 1); - t.is( - output.granules[0].files[0].filename, - `s3://${event.config.buckets.private}/${granuleFilename}` // eslint-disable-line max-len - ); + t.is(output.granules[0].files[0].filename, + `s3://${t.context.privateBucketName}/${granuleFilename}`); } catch (e) { if (e instanceof errors.RemoteResourceError) { t.pass('ignoring this test. Test server seems to be down'); } - else t.fail(e); + else throw e; } finally { - await Promise.all([ - fs.remove(tmpTestDataDirectory), - aws.recursivelyDeleteS3Bucket(event.config.buckets.internal), - aws.recursivelyDeleteS3Bucket(event.config.buckets.private), - aws.recursivelyDeleteS3Bucket(event.config.buckets.protected) - ]); + // Clean up + fs.remove(localGranulePath); } }); @@ -252,7 +293,7 @@ test('download granule over HTTP with checksum in file', async (t) => { // sinon.stub(S3, 'fileExists').callsFake(() => true); // const uploaded = sinon.stub(S3, 'upload').callsFake(() => '/test/test.hd'); -// const newPayload = Object.assign({}, payload); +// const newPayload = cloneDeep(payload); // newPayload.provider = provider; // handler(newPayload, {}, (e, r) => { // S3.fileExists.restore(); @@ -271,7 +312,7 @@ test('download granule over HTTP with checksum in file', async (t) => { // sinon.stub(S3, 'fileExists').callsFake(() => true); // const uploaded = sinon.stub(S3, 'upload').callsFake(() => '/test/test.hd'); -// const newPayload = Object.assign({}, payload); +// const newPayload = cloneDeep(payload); // newPayload.config.collection.duplicateHandling = 'skip'; // handler(newPayload, {}, (e, r) => { // S3.fileExists.restore(); diff --git a/docker-compose.yml b/docker-compose.yml index 44bc6a1cb45..dafe9723fcf 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -21,12 +21,12 @@ services: ports: - "8080:80" volumes: - - "./tmp-test-data:/var/www/html:ro" + - "./.tmp-test-data:/var/www/html:ro" sftp: image: atmoz/sftp ports: - "2222:22" volumes: - - "./tmp-test-data:/home/user/test-data:ro" + - "./.tmp-test-data:/home/user/test-data:ro" command: user:password diff --git a/packages/common/test-utils.js b/packages/common/test-utils.js index 9a3ac908ac4..4f32d078d4e 100644 --- a/packages/common/test-utils.js +++ b/packages/common/test-utils.js @@ -182,13 +182,9 @@ exports.validateOutput = validateOutput; /** * Determine the path of the current git repo * - * @param {string} dirname - the directory to start searching from. Defaults to - * `process.cwd()` - * @returns {string} - the filesystem path of the current git repo + * @returns {Promise.} - the filesystem path of the current git repo */ async function findGitRepoRootDirectory(dirname) { - if (dirname === undefined) return findGitRepoRootDirectory(path.dirname(process.cwd())); - if (await fs.pathExists(path.join(dirname, '.git'))) return dirname; // This indicates that we've reached the root of the filesystem @@ -199,3 +195,25 @@ async function findGitRepoRootDirectory(dirname) { return findGitRepoRootDirectory(path.dirname(dirname)); } exports.findGitRepoRootDirectory = findGitRepoRootDirectory; + +/** + * Determine the path of the .tmp-test-data directory + * + * @returns {Promise.} - the filesystem path of the .tmp-test-data directory + */ +function findTmpTestDataDirectory() { + return exports.findGitRepoRootDirectory(process.cwd()) + .then((gitRepoRoot) => path.join(gitRepoRoot, '.tmp-test-data')); +} +exports.findTmpTestDataDirectory = findTmpTestDataDirectory; + +/** + * Determine the path of the packages/test-data directory + * + * @returns {Promise.} - the filesystem path of the packages/test-data directory + */ +function findTestDataDirectory() { + return exports.findGitRepoRootDirectory(process.cwd()) + .then((gitRepoRoot) => path.join(gitRepoRoot, 'packages', 'test-data')); +} +exports.findTestDataDirectory = findTestDataDirectory; diff --git a/packages/ingest/ftp.js b/packages/ingest/ftp.js index 673110b7254..e99ce88f2e3 100644 --- a/packages/ingest/ftp.js +++ b/packages/ingest/ftp.js @@ -16,7 +16,8 @@ module.exports.ftpMixin = (superclass) => class extends superclass { host: this.host, port: this.port || 21, user: this.username || 'anonymous', - pass: this.password || 'password' + pass: this.password || 'password', + useList: this.useList || false }; this.connected = false; diff --git a/packages/ingest/pdr.js b/packages/ingest/pdr.js index ba3a52a3000..afe3d5b747b 100644 --- a/packages/ingest/pdr.js +++ b/packages/ingest/pdr.js @@ -25,6 +25,7 @@ class Discover { bucket, collection, provider, + useList = false, folder = 'pdrs', ) { if (this.constructor === Discover) { @@ -36,6 +37,7 @@ class Discover { this.collection = collection; this.provider = provider; this.folder = folder; + this.useList = useList; // get authentication information this.port = get(this.provider, 'port', 21);