|  | 
|  | 1 | +const { execFileSync } = require('child_process'); | 
|  | 2 | +const { readFileSync, readdirSync, writeFileSync } = require('fs'); | 
|  | 3 | +const path = require('path'); | 
|  | 4 | + | 
|  | 5 | +const nodeDirRegex = /^\d+$/; | 
|  | 6 | + | 
|  | 7 | +// Given a name and a tag, this returns an array of architectures that it supports | 
|  | 8 | +const fetchImageArches = (repoTag) => execFileSync('bashbrew', [ | 
|  | 9 | +  'cat', repoTag, | 
|  | 10 | +], { encoding: 'utf8' }).split('\n') | 
|  | 11 | +  .find((line) => line.startsWith('Architectures:')) | 
|  | 12 | +  .split(':')[1] | 
|  | 13 | +  .trim() | 
|  | 14 | +  .split(/\s*,\s*/); | 
|  | 15 | + | 
|  | 16 | +// Parses an "architectures" file into an object like: | 
|  | 17 | +// { | 
|  | 18 | +//   arch1: ['variant1', 'variant2'], | 
|  | 19 | +//   //... | 
|  | 20 | +// } | 
|  | 21 | +const parseArchitecturesFile = (file) => Object.fromEntries( | 
|  | 22 | +  [...readFileSync(file, 'utf8').matchAll(/^(?<arch>\S+)\s+(?<variants>\S+)$/mg)] | 
|  | 23 | +    .slice(1) | 
|  | 24 | +    .map(({ groups: { arch, variants } }) => [arch, variants.split(',')]), | 
|  | 25 | +); | 
|  | 26 | + | 
|  | 27 | +// Takes in an object like: | 
|  | 28 | +//   { | 
|  | 29 | +//     arch1: ['variant1', 'variant2'], | 
|  | 30 | +//     // ... | 
|  | 31 | +//   } | 
|  | 32 | +// and returns an object like | 
|  | 33 | +//   { | 
|  | 34 | +//     variant1: ['arch1', 'arch2'], | 
|  | 35 | +//     // ... | 
|  | 36 | +//   } | 
|  | 37 | +const invertObject = (obj) => Object.entries(obj) | 
|  | 38 | +  .reduce((acc, [key, vals]) => vals.reduce((valAcc, val) => { | 
|  | 39 | +    const { [val]: keys, ...rest } = valAcc; | 
|  | 40 | +    return { | 
|  | 41 | +      ...rest, | 
|  | 42 | +      [val]: keys | 
|  | 43 | +        ? [...keys, key] | 
|  | 44 | +        : [key], | 
|  | 45 | +    }; | 
|  | 46 | +  }, acc), {}); | 
|  | 47 | + | 
|  | 48 | +// Returns a list of the child directories in the given path | 
|  | 49 | +const getChildDirectories = (parent) => readdirSync(parent, { withFileTypes: true }) | 
|  | 50 | +  .filter((dirent) => dirent.isDirectory()) | 
|  | 51 | +  .map(({ name }) => path.resolve(parent, name)); | 
|  | 52 | + | 
|  | 53 | +const getNodeVerionDirs = (base) => getChildDirectories(base) | 
|  | 54 | +  .filter((childPath) => nodeDirRegex.test(path.basename(childPath))); | 
|  | 55 | + | 
|  | 56 | +// Assume no duplicates | 
|  | 57 | +const areArraysEquilivant = (arches1, arches2) => arches1.length === arches2.length | 
|  | 58 | +    && arches1.every((arch) => arches2.includes(arch)); | 
|  | 59 | + | 
|  | 60 | +// Returns the paths of Dockerfiles that are at: base/*/Dockerfile | 
|  | 61 | +const getDockerfilesInChildDirs = (base) => getChildDirectories(base) | 
|  | 62 | +  .map((childDir) => path.resolve(childDir, 'Dockerfile')); | 
|  | 63 | + | 
|  | 64 | +// Given a path to a Dockerfile like .../14/variant/Dockerfile, this will return "variant" | 
|  | 65 | +const getVariantFromPath = (file) => path.dirname(file).split(path.sep).slice(-1); | 
|  | 66 | + | 
|  | 67 | +const getBaseImageFromDockerfile = (file) => readFileSync(file, 'utf8') | 
|  | 68 | +  .match(/^FROM (\S+)/m)[1]; | 
|  | 69 | + | 
|  | 70 | +// Given a dockerfile, this function returns an array like [variant, [arch1, arch2, ...]] | 
|  | 71 | +const getVariantAndArches = (dockerfile) => { | 
|  | 72 | +  const variant = getVariantFromPath(dockerfile); | 
|  | 73 | +  const baseImage = getBaseImageFromDockerfile(dockerfile); | 
|  | 74 | +  const arches = fetchImageArches(baseImage); | 
|  | 75 | + | 
|  | 76 | +  // TODO: filter by arches node supports | 
|  | 77 | +  return [variant, arches]; | 
|  | 78 | +}; | 
|  | 79 | + | 
|  | 80 | +const getStoredVariantArches = (file) => { | 
|  | 81 | +  const storedArchVariants = parseArchitecturesFile(file); | 
|  | 82 | +  return invertObject(storedArchVariants); | 
|  | 83 | +}; | 
|  | 84 | + | 
|  | 85 | +const areVariantArchesEquilivant = (current, stored) => Object.keys(current).length | 
|  | 86 | +  === Object.keys(stored).length | 
|  | 87 | +    && Object.entries(current).every( | 
|  | 88 | +      ([variant, arches]) => stored[variant] && areArraysEquilivant(arches, stored[variant]), | 
|  | 89 | +    ); | 
|  | 90 | + | 
|  | 91 | +const formatEntry = ([arch, variants], variantOffset) => `${arch}${' '.repeat(variantOffset - arch.length)}${variants.join(',')}`; | 
|  | 92 | + | 
|  | 93 | +const storeArchitectures = (variantArches, architecturesFile) => { | 
|  | 94 | +  const archVariants = invertObject(variantArches); | 
|  | 95 | +  const data = { | 
|  | 96 | +    'bashbrew-arch': ['variants'], | 
|  | 97 | +    ...archVariants, | 
|  | 98 | +  }; | 
|  | 99 | + | 
|  | 100 | +  const maxKeyLength = Math.max(...Object.keys(data).map((key) => key.length)); | 
|  | 101 | +  // Variants start 2 spaces after the longest key | 
|  | 102 | +  const variantOffset = maxKeyLength + 2; | 
|  | 103 | + | 
|  | 104 | +  const str = Object.entries(data) | 
|  | 105 | +    .map((entry) => formatEntry(entry, variantOffset)) | 
|  | 106 | +    .join('\n'); | 
|  | 107 | + | 
|  | 108 | +  writeFileSync(architecturesFile, str); | 
|  | 109 | + | 
|  | 110 | +  // Just here for debugging purposes | 
|  | 111 | +  console.log(str); | 
|  | 112 | +  console.log('\n\n'); | 
|  | 113 | +}; | 
|  | 114 | + | 
|  | 115 | +const updateNodeDirArches = (nodeDir) => { | 
|  | 116 | +  const dockerfiles = getDockerfilesInChildDirs(nodeDir); | 
|  | 117 | + | 
|  | 118 | +  const currentVariantArches = Object.fromEntries(dockerfiles.map(getVariantAndArches)); | 
|  | 119 | +  const architecturesFile = path.resolve(nodeDir, 'architectures'); | 
|  | 120 | +  const storedVariantArches = getStoredVariantArches(architecturesFile); | 
|  | 121 | + | 
|  | 122 | +  if (areVariantArchesEquilivant(currentVariantArches, storedVariantArches)) { | 
|  | 123 | +    console.log('Architectures up-to-date: ', nodeDir); | 
|  | 124 | +    return false; | 
|  | 125 | +  } | 
|  | 126 | + | 
|  | 127 | +  console.log('Architectures outdated: ', nodeDir); | 
|  | 128 | +  storeArchitectures(currentVariantArches, architecturesFile); | 
|  | 129 | + | 
|  | 130 | +  return true; | 
|  | 131 | +}; | 
|  | 132 | + | 
|  | 133 | +const updateArchitectures = () => { | 
|  | 134 | +  const nodeDirs = getNodeVerionDirs(__dirname); | 
|  | 135 | +  const dirsUpdated = nodeDirs.map(updateNodeDirArches); | 
|  | 136 | +  return dirsUpdated.some((updated) => updated); | 
|  | 137 | +}; | 
|  | 138 | + | 
|  | 139 | +module.exports = updateArchitectures; | 
0 commit comments