Skip to content

Commit

Permalink
#102: initial changes for rcf
Browse files Browse the repository at this point in the history
report generation works via cli when -c is passed

still needs to work with the lib
  • Loading branch information
mohit-s96 committed Sep 24, 2024
1 parent 3e188d7 commit 45af53b
Show file tree
Hide file tree
Showing 5 changed files with 364 additions and 49 deletions.
7 changes: 4 additions & 3 deletions common.js
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ const buildMetadataMap = ({ fields = [], lookups = [] } = {}) => {
...fields.reduce(
(
acc,
{ resourceName, fieldName, type, isExpansion = false, isComplexType = false, annotations, typeName = '', nullable = true, isCollection = false }
{ resourceName, fieldName, type, isExpansion = false, isComplexType = false, annotations, typeName = '', nullable = true, ...rest }
) => {
if (!acc[resourceName]) {
acc[resourceName] = {};
Expand All @@ -381,10 +381,11 @@ const buildMetadataMap = ({ fields = [], lookups = [] } = {}) => {
typeName,
nullable,
isExpansion,
isCollection,
isLookupField,
isComplexType: isComplexType || (!isExpansion && !type?.startsWith('Edm.') && !isLookupField),
ddWikiUrl
annotations,
ddWikiUrl,
...rest
};

if (isLookupField && lookupMap?.[type]) {
Expand Down
1 change: 1 addition & 0 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,7 @@ if (require?.main === module) {
.option('-a, --additionalProperties', 'Pass this flag to allow additional properties in the schema. False by default')
.option('-v, --version <string>', 'The DD version of the metadata report')
.option('-p, --payloadPath <string>', 'Path to the payload file OR directory/zip containing files that need to be validated')
.option('-c, --createReports', 'Option to generate metadata and availability reports for RCF testing')
.option('-r, --resourceName <string>', 'Resource name to validate against. Required if --version is passed when validating.')
.description('Generate a schema or validate a payload against a schema')
.action(options => schema({ ...options, fromCli: FROM_CLI }));
Expand Down
294 changes: 294 additions & 0 deletions lib/schema/create-report.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,294 @@
const { getMetadata } = require('@reso/reso-certification-etl/lib/common');
const { createReplicationStateServiceInstance } = require('../../common');
const { generateJsonSchema } = require('./generate');
// const payload = require('/Users/mohit/Downloads/reso-replication-output/Property/2023-11-15T08-21-39.464Z/page-1.json');
// const payload = require('/Users/mohit/Downloads/Property/2024-03-31T20-33-25.511Z/page-8.json');
const replicationInstance = createReplicationStateServiceInstance();

const analyzeNumber = num => {
const result = {};

if (Number.isInteger(num)) {
if (num >= -32768 && num <= 32767) {
result.type = 'Edm.Int16';
} else if (num >= -2147483648 && num <= 2147483647) {
result.type = 'Edm.Int32';
} else {
result.type = 'Edm.Int64';
}
} else {
result.type = 'Edm.Decimal';

const [, decimal] = num.toString().split('.');
result.scale = decimal ? decimal.length : 0;
result.precision = num.toString().replace('.', '').length;
}

return result;
};
const inferType = value => {
if (Array.isArray(value)) {
const types = [];
value.forEach(v => types.push(inferType(v)));
const isExpansion = types.some(t => t.isExpansion);
return { types, isCollection: true, isExpansion };
}

if (typeof value === 'boolean') {
return { type: 'Edm.Boolean' };
}

if (typeof value === 'number') {
return analyzeNumber(value);
}

if (typeof value === 'string') {
return { type: 'Edm.String' };
}

if (value === null) {
return { type: 'null', nullable: true };
}

if (typeof value === 'object') {
return { type: 'object', isExpansion: true };
}

// unreachable
throw Error('Unreachable code: Invalid Type');
};

const buildPayloadCache = (payload, cache, resourceName, metadataMap) => {
payload = Array.isArray(payload.value) ? payload.value : [payload];
payload.forEach(v => {
Object.entries(v).forEach(([key, value]) => {
const metadata = metadataMap?.[resourceName]?.[key];
const { isExpansion: isLocalExpansion } = inferType(value);
if (metadata?.isExpansion) {
buildPayloadCache({ value }, cache, metadata?.typeName, metadataMap);
} else if (isLocalExpansion) {
buildPayloadCache({ value }, cache, key, metadataMap);
} else {
if (!cache[resourceName]) {
cache[resourceName] = {};
}
if (key.startsWith('@')) return;
if (!cache[resourceName][key]) {
cache[resourceName][key] = [];
}
cache[resourceName][key].push(value);
}
});
});
};

const generateDDReport = ({ daReport, schema, payload, resourceName }) => {
const { MetadataMap } = schema.definitions || {};
const { fields = [], lookupValues } = daReport || {};
const ddFields = [],
ddLookups = [];
const localFields = [],
localLookups = [];
const cache = {};

buildPayloadCache(payload, cache, resourceName, MetadataMap);
lookupValues.forEach(l => {
const { resourceName, fieldName, lookupValue } = l;
const lookup =
MetadataMap?.[resourceName]?.[fieldName]?.lookupValues?.[lookupValue] ??
MetadataMap?.[resourceName]?.[fieldName]?.legacyODataValues?.[lookupValue] ??
{};
const { type: localLookupName } = MetadataMap?.[resourceName]?.[fieldName] ?? {};
const isReso = Object.keys(lookup).length > 0;
const { ddWikiUrl, type } = lookup;
if (isReso) {
const lookupObj = {
lookupName: type,
lookupValue,
type: 'Edm.Int32',
annotations: [
{
term: 'RESO.DDWikiUrl',
value: ddWikiUrl
}
]
};
ddLookups.push(lookupObj);
} else {
localLookups.push({
lookupName: localLookupName,
lookupValue,
type: 'Edm.Int32'
});
}
});
fields.forEach(f => {
const { resourceName, fieldName } = f;
const isReso = !!MetadataMap?.[resourceName]?.[fieldName];
const { ddWikiUrl, legacyODataValues, isLookupField, lookupValues, ...fieldMetadata } = MetadataMap?.[resourceName]?.[fieldName] ?? {};
if (isReso) {
const fieldObject = {
fieldName,
resourceName,
...fieldMetadata
};
ddFields.push(fieldObject);
} else {
localFields.push({ fieldName, resourceName });
}
});
localFields.forEach(({ fieldName, resourceName }) => {
const inferredMetadata = {
resourceName,
fieldName
};
cache[resourceName][fieldName].forEach(v => {
const { type, types, isCollection, nullable, scale, precision, isExpansion } = inferType(v);
if (isCollection) {
inferredMetadata.isCollection = true;
const typeNames = [...new Set(types.map(t => t.type))];
if (typeNames.length > 2) {
throw new Error('Impossible condition found');
}
if (typeNames.includes('null') || nullable) {
inferredMetadata.nullable = true;
}
const nonNullTypes = typeNames.filter(x => x !== 'null');
if (nonNullTypes.includes('object')) {
inferredMetadata.isExpansion = true;
}
// eslint-disable-next-line prefer-destructuring
inferredMetadata.type = nonNullTypes[0];
} else {
if (type === 'null') return;
if (type?.startsWith('Edm.Int')) {
if (!inferredMetadata.type || inferredMetadata.type < type) {
inferredMetadata.type = type;
}
} else {
inferredMetadata.type = type;
}
if (isExpansion) {
inferredMetadata.isExpansion = true;
inferredMetadata.type = 'Custom Type';
}
if (nullable) {
inferredMetadata.nullable = nullable;
}
if (scale) {
if (!inferredMetadata.scale || inferredMetadata.scale < scale) {
inferredMetadata.scale = scale;
}
}
if (precision) {
if (!inferredMetadata.precision || inferredMetadata.precision < precision) {
inferredMetadata.precision = precision;
}
}
if (type === 'Edm.String') {
const { length } = v;
if (!inferredMetadata.maxLength || inferredMetadata.maxLength < length) {
inferredMetadata.maxLength = length;
}
}
}
});
ddFields.push(inferredMetadata);
});
return {
description: 'RESO Data Dictionary Metadata Report',
generatedOn: new Date().toISOString(),
version: daReport.version,
fields: ddFields,
lookups: [...ddLookups, ...localLookups]
};
};

const expansionInfoFromPayload = (payload, resourceName, metadataMap) => {
const expansionInfoMap = {};
payload = Array.isArray(payload.value) ? payload.value : [payload];
payload.forEach(p => {
Object.entries(p).forEach(([fieldName, value]) => {
const metadata = metadataMap?.[resourceName]?.[fieldName];
const { isExpansion: isLocalExpansion, isCollection } = inferType(value);
if (metadata?.isExpansion) {
const modelName = metadata?.typeName;
if (!expansionInfoMap[modelName]) {
expansionInfoMap[modelName] = {};
}
if (!expansionInfoMap[modelName][fieldName]) {
expansionInfoMap[modelName][fieldName] = { isCollection: metadata?.isCollection, type: metadata?.type };
}
} else if (isLocalExpansion) {
const modelName = fieldName;
if (!expansionInfoMap[modelName]) {
expansionInfoMap[modelName] = {};
}
if (!expansionInfoMap[modelName][fieldName]) {
expansionInfoMap[modelName][fieldName] = { isCollection: isCollection, type: 'Custom Type' };
}
}
});
});
return Object.entries(expansionInfoMap).flatMap(([modelName, value]) =>
Object.entries(value).map(([fieldName, { isCollection, type }]) => ({ fieldName, modelName, isCollection, type }))
);
};

const generateRcfReports = async ({ payload, version, resourceName }) => {
const { scorePayload, consolidateResults } = require('../replication/utils');

const metadataReport = getMetadata(version);
const schema = await generateJsonSchema({
metadataReportJson: metadataReport
});

const expansionInfo = expansionInfoFromPayload(payload, 'Property', schema.definitions.MetadataMap);

replicationInstance.setMetadataMap(metadataReport);
scorePayload({
expansionInfo: expansionInfo,
jsonData: payload,
replicationStateServiceInstance: replicationInstance,
resourceName
});

const daReport = {
description: 'RESO Data Availability Report',
version,
generatedOn: new Date().toISOString(),
...consolidateResults({
resourceAvailabilityMap: replicationInstance.getResourceAvailabilityMap(),
responses: replicationInstance.getResponses(),
topLevelResourceCounts: replicationInstance.getTopLevelResourceCounts()
})
};

const ddReport = generateDDReport({ daReport, schema, payload, resourceName });

expansionInfo.forEach(({ fieldName, isCollection, modelName, type }) => {
ddReport.fields.push({
resourceName,
fieldName,
typeName: modelName,
isCollection,
isExpansion: true,
type
});
});

return { ddReport, daReport };
};

const combineDDReports = (reports = []) => {
return reports.slice(1).reduce((acc, curr) => {
acc.fields = acc?.fields?.concat(curr?.fields);
acc.lookups = acc?.lookups?.concat(curr?.lookups);
return acc;
}, reports[0]);
};

module.exports = {
generateRcfReports,
combineDDReports
};
Loading

0 comments on commit 45af53b

Please sign in to comment.