Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/nodejs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,14 @@ on:
pull_request:
branches: [ master ]

merge_group:

jobs:
Job:
name: Node.js
uses: node-modules/github-actions/.github/workflows/node-test.yml@master
with:
os: 'ubuntu-latest, macos-latest, windows-latest'
version: '18, 20, 22, 24'
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
5 changes: 2 additions & 3 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,7 @@ on:

jobs:
release:
name: Node.js
uses: node-modules/github-actions/.github/workflows/node-release.yml@master
name: NPM
uses: node-modules/github-actions/.github/workflows/npm-release.yml@master
secrets:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
GIT_TOKEN: ${{ secrets.GIT_TOKEN }}
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ test/fixtures/chinese-path-test.zip
.DS_Store
yarn.lock
!test/fixtures/symlink/node_modules
pnpm-lock.yaml
49 changes: 31 additions & 18 deletions lib/zip/uncompress_stream.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

// https://github.com/thejoshwolfe/yauzl#no-streaming-unzip-api

const debug = require('util').debuglog('compressing/zip/uncompress_stream');
const yauzl = require('@eggjs/yauzl');
const stream = require('stream');
const UncompressBaseStream = require('../base_write_stream');
Expand Down Expand Up @@ -38,12 +39,20 @@ class ZipUncompressStream extends UncompressBaseStream {
if (this._zipFileNameEncoding === 'utf-8') {
this._zipFileNameEncoding = 'utf8';
}
this._finalCallback = err => {
if (err) {
debug('finalCallback, error: %j', err);
return this.emit('error', err);
}
this.emit('finish');
};

this[YAUZL_CALLBACK] = this[YAUZL_CALLBACK].bind(this);

const sourceType = utils.sourceType(opts.source);

const yauzlOpts = this._yauzlOpts = Object.assign({}, DEFAULTS, opts.yauzl);
debug('sourceType: %s, yauzlOpts: %j', sourceType, yauzlOpts);
if (sourceType === 'file') {
yauzl.open(opts.source, yauzlOpts, this[YAUZL_CALLBACK]);
return;
Expand All @@ -60,27 +69,26 @@ class ZipUncompressStream extends UncompressBaseStream {
.catch(e => this.emit('error', e));
return;
}

this.on('pipe', srcStream => {
srcStream.unpipe(srcStream);

utils.streamToBuffer(srcStream)
.then(buf => {
this._chunks.push(buf);
buf = Buffer.concat(this._chunks);
yauzl.fromBuffer(buf, yauzlOpts, this[YAUZL_CALLBACK]);
})
.catch(e => this.emit('error', e));
});
}

_write(chunk) {
// push to _chunks array, this will only happen once, for stream will be unpiped.
_write(chunk, _encoding, callback) {
this._chunks.push(chunk);
debug('write size: %d, chunks: %d', chunk.length, this._chunks.length);
callback();
}

_final(callback) {
const buf = Buffer.concat(this._chunks);
debug('final, buf size: %d, chunks: %d', buf.length, this._chunks.length);
this._finalCallback = callback;
yauzl.fromBuffer(buf, this._yauzlOpts, this[YAUZL_CALLBACK]);
}

[YAUZL_CALLBACK](err, zipFile) {
if (err) return this.emit('error', err);
if (err) {
debug('yauzl error', err);
return this._finalCallback(err);
}

zipFile.readEntry();

Expand All @@ -106,17 +114,22 @@ class ZipUncompressStream extends UncompressBaseStream {

if (type === 'file') {
zipFile.openReadStream(entry, (err, readStream) => {
if (err) return this.emit('error', err);
if (err) {
debug('file, error: %j', err);
return this._finalCallback(err);
}
debug('file, header: %j', header);
this.emit('entry', header, readStream, next);
});
} else { // directory
const placeholder = new stream.Readable({ read() {} });
debug('directory, header: %j', header);
this.emit('entry', header, placeholder, next);
setImmediate(() => placeholder.emit('end'));
}
})
.on('end', () => this.emit('finish'))
.on('error', err => this.emit('error', err));
.on('end', () => this._finalCallback())
.on('error', err => this._finalCallback(err));

function next() {
zipFile.readEntry();
Expand Down
10 changes: 5 additions & 5 deletions test/gzip/file_stream.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ describe('test/gzip/file_stream.test.js', () => {
const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log');
const sourceStream = fs.createReadStream(sourceFile);
const destFile = path.join(os.tmpdir(), uuid.v4() + '.log.gz');
console.log('destFile', destFile);
// console.log('destFile', destFile);
const gzipStream = new compressing.gzip.FileStream();
const destStream = fs.createWriteStream(destFile);
pump(sourceStream, gzipStream, destStream, err => {
Expand All @@ -24,7 +24,7 @@ describe('test/gzip/file_stream.test.js', () => {
it('should compress according to file path', done => {
const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log');
const destFile = path.join(os.tmpdir(), uuid.v4() + '.log.gz');
console.log('destFile', destFile);
// console.log('destFile', destFile);
const gzipStream = new compressing.gzip.FileStream({ source: sourceFile });
const destStream = fs.createWriteStream(destFile);
pump(gzipStream, destStream, err => {
Expand All @@ -44,14 +44,14 @@ describe('test/gzip/file_stream.test.js', () => {

const destFile = path.join(os.tmpdir(), uuid.v4() + '.log.gz');
await fs.promises.writeFile(destFile, Buffer.concat(gzipChunks));
console.log(destFile);
// console.log(destFile);
});

it('should compress buffer', done => {
const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log');
const sourceBuffer = fs.readFileSync(sourceFile);
const destFile = path.join(os.tmpdir(), uuid.v4() + '.log.gz');
console.log('destFile', destFile);
// console.log('destFile', destFile);
const destStream = fs.createWriteStream(destFile);
const gzipStream = new compressing.gzip.FileStream({ source: sourceBuffer });
pump(gzipStream, destStream, err => {
Expand All @@ -66,7 +66,7 @@ describe('test/gzip/file_stream.test.js', () => {
const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log');
const sourceStream = fs.createReadStream(sourceFile);
const destFile = path.join(os.tmpdir(), uuid.v4() + '.log.gz');
console.log('destFile', destFile);
// console.log('destFile', destFile);
const destStream = fs.createWriteStream(destFile);
const gzipStream = new compressing.gzip.FileStream({ source: sourceStream });
pump(gzipStream, destStream, err => {
Expand Down
6 changes: 3 additions & 3 deletions test/gzip/index.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ describe('test/gzip/index.test.js', () => {
it('gzip.compressFile(file, stream)', async () => {
const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log');
const destFile = path.join(os.tmpdir(), uuid.v4() + '.log.gz');
console.log('destFile', destFile);
// console.log('destFile', destFile);
const fileStream = fs.createWriteStream(destFile);
await compressing.gzip.compressFile(sourceFile, fileStream);
assert(fs.existsSync(destFile));
Expand All @@ -38,7 +38,7 @@ describe('test/gzip/index.test.js', () => {
const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log');
const sourceBuffer = fs.readFileSync(sourceFile);
const destFile = path.join(os.tmpdir(), uuid.v4() + '.log.gz');
console.log('destFile', destFile);
// console.log('destFile', destFile);
const fileStream = fs.createWriteStream(destFile);
await compressing.gzip.compressFile(sourceBuffer, fileStream);
assert(fs.existsSync(destFile));
Expand All @@ -48,7 +48,7 @@ describe('test/gzip/index.test.js', () => {
const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log');
const sourceStream = fs.createReadStream(sourceFile);
const destFile = path.join(os.tmpdir(), uuid.v4() + '.log.gz');
console.log('destFile', destFile);
// console.log('destFile', destFile);
const fileStream = fs.createWriteStream(destFile);
await compressing.gzip.compressFile(sourceStream, fileStream);
assert(fs.existsSync(destFile));
Expand Down
4 changes: 2 additions & 2 deletions test/tar/file_stream.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ describe('test/tar/file_stream.test.js', () => {
const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log');
const sourceStream = fs.createReadStream(sourceFile);
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tar');
console.log('dest', destFile);
// console.log('dest', destFile);

mm(console, 'warn', msg => {
assert(msg === 'You should specify the size of streamming data by opts.size to prevent all streaming data from loading into memory. If you are sure about memory cost, pass opts.suppressSizeWarning: true to suppress this warning');
Expand All @@ -34,7 +34,7 @@ describe('test/tar/file_stream.test.js', () => {
const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log');
const sourceStream = fs.createReadStream(sourceFile);
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tar');
console.log('dest', destFile);
// console.log('dest', destFile);

mm(console, 'warn', msg => {
assert(!msg);
Expand Down
26 changes: 13 additions & 13 deletions test/tar/index.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ describe('test/tar/index.test.js', () => {
it('tar.compressFile(file, stream)', async () => {
const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log');
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tar');
console.log('dest', destFile);
// console.log('dest', destFile);
const fileStream = fs.createWriteStream(destFile);
await compressing.tar.compressFile(sourceFile, fileStream);
assert(fs.existsSync(destFile));
Expand All @@ -25,7 +25,7 @@ describe('test/tar/index.test.js', () => {
it('tar.compressFile(file, stream, { relativePath })', async () => {
const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log');
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tar');
console.log('dest', destFile);
// console.log('dest', destFile);
const fileStream = fs.createWriteStream(destFile);
await compressing.tar.compressFile(sourceFile, fileStream, { relativePath: 'dd/dd.log' });
assert(fs.existsSync(destFile));
Expand All @@ -35,7 +35,7 @@ describe('test/tar/index.test.js', () => {
it('tar.compressFile(file, stream) should error if file not exist', async () => {
const sourceFile = path.join(__dirname, '..', 'fixtures', 'not-exist.log');
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tar');
console.log('dest', destFile);
// console.log('dest', destFile);
const fileStream = fs.createWriteStream(destFile);
let err;
try {
Expand Down Expand Up @@ -65,7 +65,7 @@ describe('test/tar/index.test.js', () => {
const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log');
const sourceStream = fs.createReadStream(sourceFile);
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tar');
console.log('dest', destFile);
// console.log('dest', destFile);
const fileStream = fs.createWriteStream(destFile);
mm(console, 'warn', msg => {
assert(msg === 'You should specify the size of streamming data by opts.size to prevent all streaming data from loading into memory. If you are sure about memory cost, pass opts.suppressSizeWarning: true to suppress this warning');
Expand All @@ -78,7 +78,7 @@ describe('test/tar/index.test.js', () => {
const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log');
const sourceStream = fs.createReadStream(sourceFile);
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tar');
console.log('destFile', destFile);
// console.log('destFile', destFile);
const fileStream = fs.createWriteStream(destFile);
mm(console, 'warn', msg => {
assert(!msg);
Expand All @@ -91,7 +91,7 @@ describe('test/tar/index.test.js', () => {
const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log');
const sourceBuffer = fs.readFileSync(sourceFile);
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tar');
console.log('dest', destFile);
// console.log('dest', destFile);
const fileStream = fs.createWriteStream(destFile);
await compressing.tar.compressFile(sourceBuffer, fileStream, { relativePath: 'xx.log' });
assert(fs.existsSync(destFile));
Expand All @@ -101,7 +101,7 @@ describe('test/tar/index.test.js', () => {
const sourceFile = path.join(__dirname, '..', 'fixtures/xxx/bin');
const originStat = fs.statSync(sourceFile);
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tar');
console.log('dest', destFile);
// console.log('dest', destFile);
const fileStream = fs.createWriteStream(destFile);
await compressing.tar.compressFile(sourceFile, fileStream);
assert(fs.existsSync(destFile));
Expand All @@ -111,7 +111,7 @@ describe('test/tar/index.test.js', () => {
await compressing.tar.uncompress(destFile, destDir);
const stat = fs.statSync(path.join(destDir, 'bin'));
assert(stat.mode === originStat.mode);
console.log(destDir);
// console.log(destDir);
});

});
Expand All @@ -120,7 +120,7 @@ describe('test/tar/index.test.js', () => {
it('tar.compressDir(dir, destFile)', async () => {
const sourceDir = path.join(__dirname, '..', 'fixtures');
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tar');
console.log('dest', destFile);
// console.log('dest', destFile);
await compressing.tar.compressDir(sourceDir, destFile);
assert(fs.existsSync(destFile));
});
Expand All @@ -129,7 +129,7 @@ describe('test/tar/index.test.js', () => {
const sourceDir = path.join(__dirname, '..', 'fixtures');
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tar');
const destStream = fs.createWriteStream(destFile);
console.log('dest', destFile);
// console.log('dest', destFile);
await compressing.tar.compressDir(sourceDir, destStream);
assert(fs.existsSync(destFile));
});
Expand All @@ -138,15 +138,15 @@ describe('test/tar/index.test.js', () => {
const sourceDir = path.join(__dirname, '..', 'fixtures');
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tar');
const destStream = fs.createWriteStream(destFile);
console.log('dest', destFile);
// console.log('dest', destFile);
await compressing.tar.compressDir(sourceDir, destStream, { ignoreBase: true });
assert(fs.existsSync(destFile));
});

it('tar.compressDir(dir, destStream) should return promise', async () => {
const sourceDir = path.join(__dirname, '..', 'fixtures');
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tar');
console.log('dest', destFile);
// console.log('dest', destFile);
await compressing.tar.compressDir(sourceDir, destFile);
assert(fs.existsSync(destFile));
});
Expand All @@ -155,7 +155,7 @@ describe('test/tar/index.test.js', () => {
const sourceDir = path.join(__dirname, '..', 'fixtures');
const destFile = path.join(os.tmpdir(), uuid.v4() + '.tar');
const destStream = fs.createWriteStream(destFile);
console.log('dest', destFile);
// console.log('dest', destFile);
setImmediate(() => {
destStream.emit('error', new Error('xxx'));
});
Expand Down
Loading
Loading