node-archiver
node-archiver copied to clipboard
Archiever with custom readableStream is not working?
Hi! I am planning to use Archiver into an open-source protocol. I can't get it working with a custom readableStream. Here is my code. What am I doing wrong?
var archiver = require('archiver');
var fs = require('fs')
// create a file to stream archive data to.
var output = fs.createWriteStream(__dirname + '/example.zip');
var archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
});
// listen for all archive data to be written
// 'close' event is fired only when a file descriptor is involved
output.on('close', function() {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
// This event is fired when the data source is drained no matter what was the data source.
// It is not part of this library but rather from the NodeJS Stream API.
// @see: https://nodejs.org/api/stream.html#stream_event_end
output.on('end', function() {
console.log('Data has been drained');
});
// good practice to catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', function(err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
throw err;
}
});
// good practice to catch this error explicitly
archive.on('error', function(err) {
throw err;
});
// pipe archive data to the file
archive.pipe(output);
/* Implementation */
var stream = require('stream');
// Create the custom stream
function Num(options) {
// Inherit properties
stream.Readable.call(this, options);
this._start = 0;
this._end = 100;
this._curr = this._start;
}
//Inherit prototype
Num.prototype = Object.create(stream.Readable.prototype);
Num.prototype.constructor = stream.Readable;
//Add my own implementation of read method
Num.prototype._read = function() {
var num = this._curr;
var buf = Buffer.from(num.toString(), 'utf-8');
this.push(buf);
this._curr++;
if (num === this._end) {
this.push(null);
}
};
//Usage
var num = new Num();
//Listening to data event (will be executed any time that get a piece of data)
num.on('data', function(chunk) {
console.log(chunk.toString('utf-8'));
});
archive.append( num, { name: 'file1.txt' });
archive.finalize();
it generates me a zip file with 134 bytes. When I open the archieve, I see my file 'file1.txt' but it is empty and its size is 0 bytes.
I am facing this exact issue. When I .append() a stream, it will not fully process all chunks of data.