Close getting fired before finishing Transform on all files
I have a zip file with multiple files. the problem here is that close fires before all Transforms are done.
What I want is to fire code in the unzipParser.on('close' after all entries was processed by transform: function(entry,e,cb) {
Also note that I do not know the number of files in the zip as this can be any number.
My Current Example (Not working as close is getting fired before all entries was read through the transform function):
var fs = require('fs');
var stream = require('stream');
var unzip = require('unzip-stream');
var workfolder = "/mytest";
var originalname = "mytest.zip";
var uType = "";
var unzipParser = unzip.Parse();
unzipParser.on('error', function (err) {
console.error('Error unziping: ' + err);
});
unzipParser.on('close', function () {
if (uType == "") {
return res.status(200).send({
success: false,
message: 'ZIP file does not contain SHP or TAB files'
});
} else {
res.status(200).json({
success: true,
message: 'Valid'
});
}
});
fs.createReadStream(workfolder + "/" + originalname)
.pipe(unzipParser)
.pipe(stream.Transform({
objectMode: true,
transform: function(entry,e,cb) {
var tFileName = entry.path;
var tType = entry.type; // 'Directory' or 'File'
var tSize = entry.size;
if (tType === "File") {
var tFileExt = tFileName.substr(tFileName.lastIndexOf('.') + 1);
tFileExt = tFileExt.toUpperCase().trim();
if (tFileExt == "SHP" || tFileExt == "TAB") {
uType = tFileExt;
}
}
entry.pipe(fs.createWriteStream(workfolder + "/" + tFileName)).on('finish',cb);
}
}));
Right, atm the only guarantee with the Parse stream is that 'end' is emitted after all entries, not necessarily after the transform callbacks finished.
Here's one way to deal with it using promises - https://github.com/mhr3/unzip-stream/issues/22#issuecomment-429256365
But I agree that your example should just work, unfortunately because of multiple levels of buffering it doesn't.
Shouldn't be below?
diff --git a/lib/parser-stream.js b/lib/parser-stream.js
index f124407..0205488 100644
--- a/lib/parser-stream.js
+++ b/lib/parser-stream.js
@@ -20,6 +20,9 @@ function ParserStream(opts) {
this.unzipStream.on('error', function(error) {
self.emit('error', error);
});
+ this.unzipStream.on('end', function() {
+ process.nextTick(function() { self.emit('close'); });
+ });
}
util.inherits(ParserStream, Transform);
@@ -31,7 +34,6 @@ ParserStream.prototype._transform = function (chunk, encoding, cb) {
ParserStream.prototype._flush = function (cb) {
var self = this;
this.unzipStream.end(function() {
- process.nextTick(function() { self.emit('close'); });
cb();
});
}
The current implementation emits close event more than once,
probably on every _flush() call.
However, as Node.js specification,
stream.{Readable,Writable}'s close event is triggered when "no more events will be evented".
I suppose that close event should be fired only once, followed by no events.
Also, end event may "not be emitted" always, as the Node.js specification;
shouldn't you use it as if it is guaranteed to be fired in any cases, should you?
Oh, but some tests come to fail. Isn't it so simple?