diff --git a/package.json b/package.json index ecb92e9..0f3f239 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "hash-backup", - "version": "1.0.0", + "version": "1.0.1", "description": "A hash based incremental file backupper.", "main": "src/hash_backup.js", "scripts": { diff --git a/src/hash_backup.js b/src/hash_backup.js index 1740050..e658d38 100644 --- a/src/hash_backup.js +++ b/src/hash_backup.js @@ -319,8 +319,8 @@ async function _setFileToBackup(backupDir, backupDirInfo, fileHash, fileBytes) { throw new Error(`Error: invalid compression algorithm ${backupDirInfo.compression.algorithm}`); } - if (storeBytes.length > fileBytes.length) { - console.debug(`Not compressed with ${backupDirInfo.compression.algorithm} as file increases in size from ${fileBytes.length} to ${storeBytes.length} bytes`); + if (storeBytes.length >= fileBytes.length) { + console.debug(`Not compressed with ${backupDirInfo.compression.algorithm} as file increases or stays the same size from ${fileBytes.length} to ${storeBytes.length} bytes`); storeBytes = fileBytes; resultAlgo = null; } else { diff --git a/todo.txt b/todo.txt index 9d98fbf..58190fc 100644 --- a/todo.txt +++ b/todo.txt @@ -1,3 +1,4 @@ +split single nodejs file into many files add check for valid hash or compression algorythm and valid compression level modify command that can modify backup dir parameters including upgrading or downgrading hash backup format implement backup command ignore-symlinks arg set to true, in-memory arg set to false, check-duplicate-hash set to false @@ -19,7 +20,6 @@ add auto purge option to remove command compress stored json files add checksums memoize fsmetajson gets and maybe make into function -split single nodejs file into many files hash backup version 3 can have 2 modes, human readable json files and minified compressed json files add alternate stream support on windows allow subfolders in backups folder @@ -28,3 +28,6 @@ add lzma support to node-hash-backup as an optional dependency add ultimate compression mode that tests every algo with max settings and uses the one with the smallest filesize change in-memory to in-memory-cutoff, filesize in bytes above which it will not be in memory; add max-compress-cutoff above which no max compression; add compress-cutoff above or maybe below which no compression at all add archive wide toggle for write protection on the archive files +extensive test suite, first part tests each facet of each feature, second part generates many random file setups and tests backup and restore of them, checking for exact metadata accuracy; can do both types of tests in temp folders through node.js fs get temp folder +check to make sure stopping program halfway does not cause broken data state anywhere +make sure all async subfunctions / function calls in every async function are awaited