-
Notifications
You must be signed in to change notification settings - Fork 25
Expand file tree
/
Copy pathsimple.js
More file actions
102 lines (85 loc) · 3.06 KB
/
simple.js
File metadata and controls
102 lines (85 loc) · 3.06 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
/// Parameters for generating the PAR2 file
// array of filenames to protect against
var files = ['simple.js'];
// PAR2 output file
var par2output = 'some_file.par2';
// PAR2 block size
var sliceSize = 512*1024; // 512KB
// number of recovery blocks to generate
var recoverySlices = 8;
/// Include libraries
var ParPar = require('../');
var fs = require('fs');
var async = require('async');
/// PAR2 requires some file info before it can start generating anything, specifically the file size and the MD5 of the first 16KB of each file
// ParPar.fileInfo() is a convenience function which gives us all that info
ParPar.fileInfo(files, function(err, info) {
if(err) return console.error('Error: ', err);
/// once we have the info, we can create a PAR2 instance
var par2 = new ParPar.PAR2(info, sliceSize);
// tell PAR2 how many recovery blocks we want - need to specify this before pumping through any data, otherwise no recovery blocks will be generated
par2.setRecoverySlices(recoverySlices);
// buffer to hold read data
// note that we create the buffer outside the loop. Not only does this reduce overheads with reallocation, it's nicer to the GC
var buf = new Buffer(sliceSize);
// loop through each file; NOTE: files may be a re-ordered version of the files array you supplied it!
var pFiles = par2.getFiles();
async.eachSeries(pFiles, function(file, cb) {
console.log('Processing file: ' + file.name);
fs.open(file.name, 'r', function(err, fd) {
if(err) return cb(err);
// read in all data and send it to file.process()
var eof = false;
async.until(function(){return eof;}, function(cb) {
fs.read(fd, buf, 0, sliceSize, null, function(err, bytesRead) {
if(err) return cb(err);
if(!bytesRead) {
eof = true;
return cb();
}
// pump data
file.process(buf.slice(0, bytesRead), cb);
});
}, function(err) {
// file fully read or error occurred
if(err) return cb(err);
fs.close(fd, cb);
});
});
}, function(err) {
(function(cb) {
if(err) return cb(err);
/// we need to indicate that we're done with sending input; .finish() will prepare the recovery data
par2.finish(function() {
// all data processed, write out PAR2
var data = [];
// note that the ordering of PAR2 packets is arbitrary
async.timesSeries(recoverySlices, function(i, cb) {
par2.getNextRecoveryData(function(idx, recData) {
par2.getRecoveryPacketHeader(recData, function(recHeader) {
data.push(Buffer.concat([recHeader, recData.data]));
recData.release();
cb();
});
});
}, function() {
pFiles.forEach(function(file) {
data.push(file.makePacketDescription());
data.push(file.getPacketChecksums());
});
data.push(par2.getPacketMain());
data.push(par2.makePacketCreator('ParPar example'));
fs.writeFile(par2output, Buffer.concat(data), cb);
});
});
})(function(err) {
par2.close();
if(err) {
console.error('Error: ', err);
return;
} else {
console.log('Complete!');
}
});
});
});