compressing
compressing copied to clipboard
File get fragmentary when .addEntry(stream, { relativePath }) with multi streams by async functions
Problem
I want to compress a .tgz file with two streams. But the file get fragmentary when TgzStream.addEntry(stream, { relativePath })
invoked in asynchronous functions.
Recurrence
I have written two test cases for recurrence. The second case catch this issue.
Put below test codes in the test/tgz/stream.test.js
and run npm test
.
Then uncompress the .tgz file and checkout its content by yourself.
Test codes
// Generated .tgz file consists of `xx.log` and `yy.log`.
it.only('.addEntry(stream, { relativePath }) with multi streams', done => {
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tgz');
const fileStream = fs.createWriteStream(destFile);
console.log('dest', destFile);
mm(console, 'warn', msg => {
assert(msg === 'You should specify the size of streamming data by opts.size to prevent all streaming data from loading into memory. If you are sure about memory cost, pass opts.supressSizeWarning: true to suppress this warning');
});
const { Readable } = require('stream');
const tgzStream = new TgzStream();
const stream = new Readable({
encoding: 'utf8',
read() {
[ '123213', 'asdasdasd' ].forEach(str => {
this.push(str);
this.push('\n');
});
this.push(null);
},
});
tgzStream.addEntry(stream, { relativePath: 'xx.log' });
const stream2 = new Readable({
encoding: 'utf8',
read() {
[ '98765', 'mnbv' ].forEach(str => {
this.push(str);
this.push('\n');
});
this.push(null);
},
});
tgzStream.addEntry(stream2, { relativePath: 'yy.log' });
pump(tgzStream, fileStream, err => {
assert(!err);
assert(fs.existsSync(destFile));
done();
});
});
// Generated .tgz file only have `xx.log`.
it.only('.addEntry(stream, { relativePath }) with multi streams by async functions', done => {
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tgz');
const fileStream = fs.createWriteStream(destFile);
console.log('dest', destFile);
mm(console, 'warn', msg => {
assert(msg === 'You should specify the size of streamming data by opts.size to prevent all streaming data from loading into memory. If you are sure about memory cost, pass opts.supressSizeWarning: true to suppress this warning');
});
const { Readable } = require('stream');
const tgzStream = new TgzStream();
Promise.resolve().then(() => {
const stream = new Readable({
encoding: 'utf8',
read() {
[ '123213', 'asdasdasd' ].forEach(str => {
this.push(str);
this.push('\n');
});
this.push(null);
},
});
tgzStream.addEntry(stream, { relativePath: 'xx.log' });
});
const delay = timeout => new Promise(resolve => setTimeout(resolve, timeout));
delay(0).then(() => {
const stream = new Readable({
encoding: 'utf8',
read() {
[ '98765', 'mnbv' ].forEach(str => {
this.push(str);
this.push('\n');
});
this.push(null);
},
});
tgzStream.addEntry(stream, { relativePath: 'yy.log' });
});
pump(tgzStream, fileStream, err => {
assert(!err);
assert(fs.existsSync(destFile));
done();
});
});
Expected
Generate a .tgz file consists of two file xx.log
and yy.log
.
Environments
- node: v8.9.1
- compressing: 1.3.1
@shaoshuai0102 @popomore Can you deal with the issue?