Skip to content

Commit

Permalink
#40: misc improvements
Browse files Browse the repository at this point in the history
bugfix, minor refactor, combine validate and generate, add support for @OData field in payload
  • Loading branch information
mohit-s96 committed Sep 19, 2023
1 parent 6f3b6f6 commit a3679a6
Show file tree
Hide file tree
Showing 5 changed files with 262 additions and 107 deletions.
3 changes: 1 addition & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,5 @@ dist
.tern-port

config.json
output
errors
reso-schema-validation-temp
.DS_Store
29 changes: 11 additions & 18 deletions index.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#! /usr/bin/env node

const { generate, validate } = require('./lib/schema');
const { schema } = require('./lib/schema');
const { restore } = require('./lib/restore-utils');
const { runTests } = require('./lib/batch-test-runner');
const { findVariations, computeVariations } = require('./lib/find-variations/index.js');
Expand All @@ -11,24 +11,17 @@ if (require?.main === module) {
program.name('reso-certification-utils').description('Command line batch-testing and restore utils').version('0.0.3');

program
.command('generate')
.command('schema')
.option('-g, --generate', 'Generate a schema for payload validation')
.option('-v, --validate', 'Validate one or multiple payloads with a schema')
.option('-m, --metadataPath <string>', 'Path to the metadata report JSON file')
.option('-o, --outputPath <string>', 'Path tho the directory to store the generated schema')
.option('-a, --additionalProperties', 'Pass this flag to allow additional properties in the schema')
.description('Generate schema from a given metadata report')
.action(generate);

program
.command('validate')
.option('-m, --metadataPath <string>', 'Path to the metadata report JSON file')
.option('-p, --payloadPath <string>', 'Path to the payload that needs to be validated')
.option('-s, --schemaPath <string>', 'Path to the generated JSON schema')
.option('-e, --errorPath <string>', 'Path to save error reports in case of failed validation. Defaults to "./errors"')
.option('-a, --additionalProperties', 'Pass this flag to allow additional properties in the schema')
.option('-z, --zipFilePath <string>', 'Path to a zip file containing JSON payloads')
.option('-dv, --version <string>', 'The data dictionary version of the metadata report. Defaults to 1.7')
.description('Validate a payload against a schema')
.action(validate);
.option('-o, --outputPath <string>', 'Path tho the directory to store the generated schema. Defaults to "./"')
.option('-a, --additionalProperties', 'Pass this flag to allow additional properties in the schema. False by default')
.option('-dv, --ddVersion <string>', 'The DD version of the metadata report')
.option('-p, --payloadPath <string>', 'Path to the payload file OR directory/zip containing files that need to be validated')
.option('-r, --resourceName <string>', 'Resource name to validate against. Required if --version is passed when validating.')
.description('Generate a schema or validate a payload against a schema')
.action(schema);

program
.command('restore')
Expand Down
238 changes: 159 additions & 79 deletions lib/schema/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,14 @@ const chalk = require('chalk');
const { promises: fs } = require('fs');
const { generateSchema } = require('./generate');
const path = require('path');
const { validatePayload } = require('./validate');
const { extractFilesFromZip } = require('../../common');
const { validatePayload, isValidDdVersion } = require('./validate');
const { CURRENT_DATA_DICTIONARY_VERSION } = require('../../common');
const { readDirectory } = require('../restore-utils');
const { getReferenceMetadata } = require('reso-certification-etl');
const { processFiles } = require('./utils');

const OUTPUT_DIR = 'output';
const OUTPUT_DIR = 'reso-schema-validation-temp';
const ERROR_REPORT = 'schema-validation-report.json';

const readFile = async filePath => {
try {
Expand Down Expand Up @@ -43,98 +45,172 @@ const writeFile = async (path, data) => {
}
};

const generate = async ({ metadataPath = '', outputPath = '', additionalProperties = false }) => {
const schema = async ({
metadataPath = '',
outputPath = '.',
additionalProperties = false,
generate,
validate,
ddVersion,
payloadPath,
resourceName
}) => {
try {
const metadataReportJson = JSON.parse((await readFile(metadataPath)) || null);
if (!metadataReportJson) {
console.log(chalk.redBright.bold('Invalid metadata file'));
if ((!generate && !validate) || (generate && validate)) {
console.log(chalk.redBright('Only one of --generate (-g) or --validate (-v) should be passed'));
return;
}
const schema = generateSchema(metadataReportJson, additionalProperties);
if (!schema) {
console.log(chalk.redBright.bold('Error generating JSON schema from the given metadata report'));

if (metadataPath && ddVersion) {
console.log(chalk.redBright('Only one of --metadataPath (-m) or --ddVersion (-dv) should be present'));
return;
}
// write schema to the output path
const fileName = 'schema-' + (metadataPath?.split('/')?.at(-1) || '');
const success = await writeFile(path.join(outputPath, fileName), JSON.stringify(schema));
if (!success) {
console.log(chalk.redBright.bold('Error writing generated schema to the given location'));

const version = ddVersion ?? CURRENT_DATA_DICTIONARY_VERSION;

if (!isValidDdVersion(version)) {
console.log(chalk.redBright(`Invalid DD Version ${version}`));
return;
}
console.log(chalk.greenBright.bold(`Schema successfully generated and saved in ${outputPath}/${fileName}`));

if (generate) {
let metadataReport = null;
if (!metadataPath) {
metadataReport = getReferenceMetadata(version);
if (!metadataReport) {
console.log(chalk.redBright(`Invalid version ${version}`));
return;
}
}
const result = await generateSchemaFromMetadata({
metadataPath,
metadataReport,
additionalProperties
});
if (result?.schema) {
const { schema } = result;

// write schema to the output path
const fileName = 'schema-' + (metadataPath?.split('/')?.at(-1) || 'metadata.json');
const success = await writeFile(path.join(outputPath, fileName), JSON.stringify(schema));
if (!success) {
console.log(chalk.redBright.bold('Error writing generated schema to the given location'));
return;
}
console.log(chalk.greenBright.bold(`Schema successfully generated and saved in ${outputPath}/${fileName}`));
}

return;
}

if (validate) {
if (!payloadPath) {
console.log(chalk.redBright('Invalid path to payloads'));
return;
}

if ((ddVersion && !resourceName) || (resourceName && !ddVersion)) {
console.log(chalk.redBright('Resource name (-r, --resourceName) and version (-dv, ddVersion) should be passed together'));
return;
}

try {
await fs.rm(OUTPUT_DIR, { recursive: true, force: true });
} catch {
/**ignore */
}

const outputPathExists = await createDirectoryIfNotPresent(OUTPUT_DIR);
if (!outputPathExists) {
throw new Error('Unable to create output directory for extracted files');
}

const { error } = (await processFiles({ inputPath: payloadPath, outputPath: OUTPUT_DIR })) || {};
if (error) {
console.log(chalk.redBright('Invalid payload path'));
return;
}

await validatePayloads({
metadataPath,
payloadPath: OUTPUT_DIR,
additionalProperties,
version: ddVersion,
resourceName
});
}
} catch (error) {
console.log(error);
console.log(chalk.redBright.bold('Something went wrong while generating the schema'));
console.log(chalk.redBright('SOmething wen wrong while processing'));
}
};

const validate = async ({
metadataPath = '',
payloadPath = '',
schemaPath = '',
errorPath = 'errors',
const generateSchemaFromMetadata = async ({
metadataPath = '', // This can be ignored when calling in a lib. `metadataReport` can be passed instead.
additionalProperties = false,
zipFilePath = '',
version = '1.7'
metadataReport
}) => {
if (zipFilePath) {
try {
await fs.rm(OUTPUT_DIR, { recursive: true, force: true });
} catch {
/**ignore */
try {
const metadataReportJson = metadataPath ? JSON.parse((await readFile(metadataPath)) || null) : metadataReport;
if (!metadataReportJson) {
console.log(chalk.redBright.bold('Invalid metadata file'));
return;
}
const outputPathExists = await createDirectoryIfNotPresent(OUTPUT_DIR);
if (!outputPathExists) {
throw new Error('Unable to create output directory for extracted files');
const schema = generateSchema(metadataReportJson, additionalProperties);
if (!schema) {
console.log(chalk.redBright.bold('Error generating JSON schema from the given metadata report'));
return;
}
//TODO: make this be independent of CLI usage.
await extractFilesFromZip({ outputPath: OUTPUT_DIR, zipPath: zipFilePath });
const files = await readDirectory(OUTPUT_DIR);

if (!files.length) throw new Error(`No JSON files found in the archive at ${zipFilePath}`);

await validatePayloadAndGenerateResults({
errorPath,
schemaPath,
payloadPaths: files.map(f => path.join(OUTPUT_DIR, f)),
version,
metadataPath,
additionalProperties
});
} else {
await validatePayloadAndGenerateResults({
errorPath,
payloadPaths: [payloadPath],
schemaPath,
version,
metadataPath,
additionalProperties
});
return { schema };
} catch (error) {
console.log(error);
console.log(chalk.redBright.bold('Something went wrong while generating the schema'));
}
};

async function validatePayloadAndGenerateResults({ schemaPath, payloadPaths, errorPath, version, metadataPath, additionalProperties }) {
const validatePayloads = async ({ metadataPath = '', payloadPath = '', additionalProperties = false, version, resourceName }) => {
//TODO: make this be independent of CLI usage.
const files = await readDirectory(payloadPath);

if (!files.length) throw new Error(`No JSON files found at ${payloadPath}`);

await validatePayloadAndGenerateResults({
errorPath: '.',
payloadPaths: files.map(f => path.join(OUTPUT_DIR, f)),
version,
metadataPath,
additionalProperties,
resourceName
});
};

const validatePayloadAndGenerateResults = async ({
payloadPaths,
errorPath,
version,
metadataPath,
additionalProperties,
resourceName
}) => {
try {
let schemaJson = schemaPath ? JSON.parse((await readFile(schemaPath)) || null) : null;
let schemaJson = null;

let metadataJson = metadataPath ? JSON.parse((await readFile(metadataPath)) || null) : null;
if (!metadataJson) {
// use RESO metadata report instead
metadataJson = getReferenceMetadata(version);
schemaJson = generateSchema(metadataJson, additionalProperties);
} else {
schemaJson = generateSchema(metadataJson, additionalProperties);
}

if (!schemaJson) {
let metadataJson = metadataPath ? JSON.parse((await readFile(metadataPath)) || null) : null;
if (!metadataJson) {
// use RESO metadata report instead
metadataJson = getReferenceMetadata(version);
schemaJson = generateSchema(metadataJson, true);
} else {
schemaJson = generateSchema(metadataJson, additionalProperties);
}
if (!schemaJson) {
console.log(
chalk.bgRed.bold(
'Unable to generate a schema file. Pass the schema/metadata file in the options or check for invalid DD version.'
)
);
return;
}
console.log(
chalk.bgRed.bold('Unable to generate a schema file. Pass the schema/metadata file in the options or check for invalid DD version.')
);
return;
}

const payloadsJson = {};
for (const payloadPath of payloadPaths) {
const payloadJson = JSON.parse((await readFile(payloadPath)) || null);
Expand All @@ -149,13 +225,18 @@ async function validatePayloadAndGenerateResults({ schemaPath, payloadPaths, err
console.log(chalk.redBright.bold('No payloads could be found'));
return;
}
const result = validatePayload(payloadsJson, schemaJson);
if (result.errors) {
const result = validatePayload({
payloads: payloadsJson,
schema: schemaJson,
resourceNameFromArgs: resourceName,
versionFromArgs: version
});
if (result?.errors) {
const errorDirectoryExists = await createDirectoryIfNotPresent(errorPath);
if (!errorDirectoryExists) throw new Error('Unable to create error directory');

const success = await writeFile(
path.join(errorPath, 'error-report.json'),
path.join(errorPath, ERROR_REPORT),
JSON.stringify({
...(result.errors || {})
})
Expand All @@ -173,9 +254,8 @@ async function validatePayloadAndGenerateResults({ schemaPath, payloadPaths, err
console.log(error);
console.log(chalk.redBright.bold('Something went wrong while validating the payload'));
}
}
};

module.exports = {
validate,
generate
schema
};
47 changes: 47 additions & 0 deletions lib/schema/utils.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
const fs = require('fs');
const fsPromises = fs.promises;
const path = require('path');
const { extractFilesFromZip } = require('../../common');

const processFiles = async ({ inputPath, outputPath }) => {
try {
const stats = await fsPromises.stat(inputPath);

if (stats.isFile()) {
await processFile({ filePath: inputPath, outputPath });
} else if (stats.isDirectory()) {
const files = await fsPromises.readdir(inputPath);

for (const file of files) {
await processFile({
filePath: path.join(inputPath, file),
outputPath
});
}
} else {
console.error(`Unsupported file type: ${inputPath}`);
}
} catch (error) {
console.log(error);
return { error };
}
};

const processFile = async ({ filePath, outputPath }) => {
const ext = path.extname(filePath);

if (ext === '.json') {
await fsPromises.copyFile(filePath, path.join(outputPath, path.basename(filePath)));
} else if (ext === '.zip') {
await extractFilesFromZip({
zipPath: filePath,
outputPath
});
} else {
console.error(`Unsupported file type: ${filePath}`);
}
};

module.exports = {
processFiles
};
Loading

0 comments on commit a3679a6

Please sign in to comment.