node-archiver
node-archiver copied to clipboard
Draining issue while piping large files
I am trying to create and download a zip of large encrypted files on the fly, sometimes it is working fine while sometimes getting the drain event of Archiver. Don't know why there is this issue, pipe function of NodeJs handles backpressure automatically then why is there an issue in my case. Also, let me know if my finalizing of the archiver is in the right place.
Here is a sample code
const handleEntries = ({ elem, uniqueFiles, archive, speedLimit }) => {
return new Promise((resolve, reject) => {
let fileName = elem.fileName;
const url = elem.url;
const decipher = elem.decipher;
// changing fileName if same filename is already added to zip
if (uniqueFiles[fileName] || uniqueFiles[fileName] === 0) {
uniqueFiles[fileName]++;
} else {
uniqueFiles[fileName] = 0;
}
if (uniqueFiles[fileName]) {
const lastDotIndex = fileName.lastIndexOf(".");
const name = fileName.substring(0, lastDotIndex);
const extension = fileName.substring(lastDotIndex + 1);
fileName = `${name}(${uniqueFiles[fileName]}).${extension}`;
}
let readableStream = Request(url);
// create a "Throttle" instance that reads at speedLimit bps
if (speedLimit) {
const throttle = new Throttle({ bps: Number(speedLimit) });
readableStream = readableStream.pipe(throttle);
}
// if file is encrypted, need to decrypt it before piping to zip
readableStream = decipher ? readableStream.pipe(decipher) : readableStream;
archive.append(readableStream, { name: fileName });
readableStream.on("complete", result => {
console.log("Request stream event complete : ", fileName);
resolve("done");
// readableStream.unpipe();
// readableStream.destroy();
});
readableStream
.on("error", error => {
console.log("Request stream event error fileName : ", fileName, " error : ", error);
// readableStream.unpipe();
// readableStream.destroy();
resolve("done");
})
.on("pipe", result => {
console.log("Request stream event pipe : ", fileName);
})
.on("request", result => {
console.log("Request stream event request : ", fileName);
})
.on("response", result => {
console.log("Request stream event response : ", fileName);
})
.on("socket", result => {
result.setKeepAlive(true);
console.log("Request stream event socket : ", fileName);
});
});
};
const useArchiver = async ({ resp, urls, speedLimit }) => {
resp.writeHead(200, {
"Content-Type": "application/zip",
"Content-Disposition": `attachment; filename="${outputFileName}"`,
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, OPTIONS"
});
const uniqueFiles = {};
const archive = Archiver("zip", { zlib: 0 });
archive.pipe(resp);
archive
.on("close", result => {
console.log("archive stream event close : ", result);
// archive.unpipe();
// archive.destroy();
})
.on("drain", result => {
console.log("archive stream event drain : ", result);
})
.on("entry", result => {
console.log("archive stream event entry : ", result.stats);
})
.on("error", error => {
console.log("archive stream event error : ", error);
reject("error");
// archive.unpipe();
// archive.destroy();
})
.on("finish", result => {
console.log("archive stream event finish : ", result);
// archive.unpipe();
// archive.destroy();
})
.on("pipe", result => {
console.log("archive stream event pipe : ");
})
.on("progress", async result => {
console.log("archive stream event progress : ", result.entries);
if (urls.length === result.entries.total && urls.length === result.entries.processed) {
await archive.finalize();
console.log("finalized : ", urls[0]);
}
})
.on("unpipe", result => {
console.log("archive stream event unpipe : ");
})
.on("warning", result => {
console.log("archive stream event warning : ", result);
});
for (const elem of urls) {
await handleEntries({ elem, uniqueFiles, archive, speedLimit });
}
};