From 6c596c29b455f96dae5bd5888d5cf6e347ca2922 Mon Sep 17 00:00:00 2001 From: Ziggy Jonsson Date: Sat, 11 May 2024 13:05:07 -0400 Subject: [PATCH] Move open to async/await --- lib/Open/directory.js | 317 ++++++++++++++++++++---------------------- lib/Open/unzip.js | 85 ++++++----- 2 files changed, 191 insertions(+), 211 deletions(-) diff --git a/lib/Open/directory.js b/lib/Open/directory.js index 0ffae26..44963da 100644 --- a/lib/Open/directory.js +++ b/lib/Open/directory.js @@ -11,29 +11,27 @@ var parseBuffer = require('../parseBuffer'); var signature = Buffer.alloc(4); signature.writeUInt32LE(0x06054b50,0); -function getCrxHeader(source) { +async function getCrxHeader(source) { var sourceStream = source.stream(0).pipe(PullStream()); - return sourceStream.pull(4).then(function(data) { - var signature = data.readUInt32LE(0); - if (signature === 0x34327243) { - var crxHeader; - return sourceStream.pull(12).then(function(data) { - crxHeader = parseBuffer.parse(data, [ - ['version', 4], - ['pubKeyLength', 4], - ['signatureLength', 4], - ]); - }).then(function() { - return sourceStream.pull(crxHeader.pubKeyLength +crxHeader.signatureLength); - }).then(function(data) { - crxHeader.publicKey = data.slice(0,crxHeader.pubKeyLength); - crxHeader.signature = data.slice(crxHeader.pubKeyLength); - crxHeader.size = 16 + crxHeader.pubKeyLength +crxHeader.signatureLength; - return crxHeader; - }); - } - }); + let data = await sourceStream.pull(4); + var signature = data.readUInt32LE(0); + if (signature === 0x34327243) { + var crxHeader; + data = await sourceStream.pull(12); + crxHeader = parseBuffer.parse(data, [ + ['version', 4], + ['pubKeyLength', 4], + ['signatureLength', 4], + ]); + + data = await sourceStream.pull(crxHeader.pubKeyLength +crxHeader.signatureLength); + + crxHeader.publicKey = data.slice(0,crxHeader.pubKeyLength); + crxHeader.signature = data.slice(crxHeader.pubKeyLength); + crxHeader.size = 16 + crxHeader.pubKeyLength +crxHeader.signatureLength; + return crxHeader; + } } // Zip64 File Format Notes: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT @@ -77,160 +75,147 @@ function parseZip64DirRecord (dir64record) { return vars } -module.exports = function centralDirectory(source, options) { +module.exports = async function centralDirectory(source, options) { var endDir = PullStream(), records = PullStream(), tailSize = (options && options.tailSize) || 80, - sourceSize, crxHeader, startOffset, vars; if (options && options.crx) - crxHeader = getCrxHeader(source); - - return source.size() - .then(function(size) { - sourceSize = size; - - source.stream(Math.max(0,size-tailSize)) - .on('error', function (error) { endDir.emit('error', error) }) - .pipe(endDir); - - return endDir.pull(signature); - }) - .then(function() { - return Promise.props({directory: endDir.pull(22), crxHeader: crxHeader}); - }) - .then(function(d) { - var data = d.directory; - startOffset = d.crxHeader && d.crxHeader.size || 0; - - vars = parseBuffer.parse(data, [ - ['signature', 4], - ['diskNumber', 2], - ['diskStart', 2], - ['numberOfRecordsOnDisk', 2], - ['numberOfRecords', 2], - ['sizeOfCentralDirectory', 4], - ['offsetToStartOfCentralDirectory', 4], - ['commentLength', 2], - ]); - - // Is this zip file using zip64 format? Use same check as Go: - // https://github.com/golang/go/blob/master/src/archive/zip/reader.go#L503 - // For zip64 files, need to find zip64 central directory locator header to extract - // relative offset for zip64 central directory record. - if (vars.numberOfRecords == 0xffff|| vars.numberOfRecords == 0xffff || - vars.offsetToStartOfCentralDirectory == 0xffffffff) { - - // Offset to zip64 CDL is 20 bytes before normal CDR - const zip64CDLSize = 20 - const zip64CDLOffset = sourceSize - (tailSize - endDir.match + zip64CDLSize) - const zip64CDLStream = PullStream(); - - source.stream(zip64CDLOffset).pipe(zip64CDLStream); - - return zip64CDLStream.pull(zip64CDLSize) - .then(function (d) { return getZip64CentralDirectory(source, d) }) - .then(function (dir64record) { - vars = parseZip64DirRecord(dir64record) - }) - } else { - vars.offsetToStartOfCentralDirectory += startOffset; + crxHeader = await getCrxHeader(source); + + const sourceSize = await source.size(); + + source.stream(Math.max(0,sourceSize-tailSize)) + .on('error', function (error) { endDir.emit('error', error) }) + .pipe(endDir); + + await endDir.pull(signature); + + var data = await endDir.pull(22); + startOffset = crxHeader && crxHeader.size || 0; + + vars = parseBuffer.parse(data, [ + ['signature', 4], + ['diskNumber', 2], + ['diskStart', 2], + ['numberOfRecordsOnDisk', 2], + ['numberOfRecords', 2], + ['sizeOfCentralDirectory', 4], + ['offsetToStartOfCentralDirectory', 4], + ['commentLength', 2], + ]); + + // Is this zip file using zip64 format? Use same check as Go: + // https://github.com/golang/go/blob/master/src/archive/zip/reader.go#L503 + // For zip64 files, need to find zip64 central directory locator header to extract + // relative offset for zip64 central directory record. + if (vars.numberOfRecords == 0xffff|| vars.numberOfRecords == 0xffff || + vars.offsetToStartOfCentralDirectory == 0xffffffff) { + + // Offset to zip64 CDL is 20 bytes before normal CDR + const zip64CDLSize = 20 + const zip64CDLOffset = sourceSize - (tailSize - endDir.match + zip64CDLSize) + const zip64CDLStream = PullStream(); + + source.stream(zip64CDLOffset).pipe(zip64CDLStream); + + const d = await zip64CDLStream.pull(zip64CDLSize) + const dir64record = await getZip64CentralDirectory(source, d);; + + vars = parseZip64DirRecord(dir64record) + + } else { + vars.offsetToStartOfCentralDirectory += startOffset; + } + + if (vars.commentLength) { + const comment = await endDir.pull(vars.commentLength); + vars.comment = comment.toString('utf8'); + }; + + source.stream(vars.offsetToStartOfCentralDirectory).pipe(records); + + vars.extract = async function(opts) { + if (!opts || !opts.path) throw new Error('PATH_MISSING'); + // make sure path is normalized before using it + opts.path = path.resolve(path.normalize(opts.path)); + const files = await vars.files; + + return Promise.map(files, function(entry) { + if (entry.type == 'Directory') return; + + // to avoid zip slip (writing outside of the destination), we resolve + // the target path, and make sure it's nested in the intended + // destination, or not extract it otherwise. + var extractPath = path.join(opts.path, entry.path); + if (extractPath.indexOf(opts.path) != 0) { + return; } - }) - .then(function() { - if (vars.commentLength) return endDir.pull(vars.commentLength).then(function(comment) { - vars.comment = comment.toString('utf8'); + var writer = opts.getWriter ? opts.getWriter({path: extractPath}) : Writer({ path: extractPath }); + + return new Promise(function(resolve, reject) { + entry.stream(opts.password) + .on('error',reject) + .pipe(writer) + .on('close',resolve) + .on('error',reject); }); - }) - .then(function() { - source.stream(vars.offsetToStartOfCentralDirectory).pipe(records); - - vars.extract = function(opts) { - if (!opts || !opts.path) throw new Error('PATH_MISSING'); - // make sure path is normalized before using it - opts.path = path.resolve(path.normalize(opts.path)); - return vars.files.then(function(files) { - return Promise.map(files, function(entry) { - if (entry.type == 'Directory') return; - - // to avoid zip slip (writing outside of the destination), we resolve - // the target path, and make sure it's nested in the intended - // destination, or not extract it otherwise. - var extractPath = path.join(opts.path, entry.path); - if (extractPath.indexOf(opts.path) != 0) { - return; - } - var writer = opts.getWriter ? opts.getWriter({path: extractPath}) : Writer({ path: extractPath }); - - return new Promise(function(resolve, reject) { - entry.stream(opts.password) - .on('error',reject) - .pipe(writer) - .on('close',resolve) - .on('error',reject); - }); - }, { concurrency: opts.concurrency > 1 ? opts.concurrency : 1 }); - }); - }; - - vars.files = Promise.mapSeries(Array(vars.numberOfRecords),function() { - return records.pull(46).then(function(data) { - var vars = vars = parseBuffer.parse(data, [ - ['signature', 4], - ['versionMadeBy', 2], - ['versionsNeededToExtract', 2], - ['flags', 2], - ['compressionMethod', 2], - ['lastModifiedTime', 2], - ['lastModifiedDate', 2], - ['crc32', 4], - ['compressedSize', 4], - ['uncompressedSize', 4], - ['fileNameLength', 2], - ['extraFieldLength', 2], - ['fileCommentLength', 2], - ['diskNumber', 2], - ['internalFileAttributes', 2], - ['externalFileAttributes', 4], - ['offsetToLocalFileHeader', 4], - ]); - - vars.offsetToLocalFileHeader += startOffset; - vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime); - - return records.pull(vars.fileNameLength).then(function(fileNameBuffer) { - vars.pathBuffer = fileNameBuffer; - vars.path = fileNameBuffer.toString('utf8'); - vars.isUnicode = (vars.flags & 0x800) != 0; - return records.pull(vars.extraFieldLength); - }) - .then(function(extraField) { - vars.extra = parseExtraField(extraField, vars); - return records.pull(vars.fileCommentLength); - }) - .then(function(comment) { - vars.comment = comment; - vars.type = (vars.uncompressedSize === 0 && /[\/\\]$/.test(vars.path)) ? 'Directory' : 'File'; - vars.stream = function(_password) { - var totalSize = 30 - + 10 // add an extra buffer - + (vars.extraFieldLength || 0) - + (vars.fileNameLength || 0) - + vars.compressedSize; - - return unzip(source, vars.offsetToLocalFileHeader,_password, vars, totalSize); - }; - vars.buffer = function(_password) { - return BufferStream(vars.stream(_password)); - }; - return vars; - }); - }); - }); - - return Promise.props(vars); + }, { concurrency: opts.concurrency > 1 ? opts.concurrency : 1 }); + }; + + vars.files = Promise.mapSeries(Array(vars.numberOfRecords),async function() { + const data = await records.pull(46) + var vars = vars = parseBuffer.parse(data, [ + ['signature', 4], + ['versionMadeBy', 2], + ['versionsNeededToExtract', 2], + ['flags', 2], + ['compressionMethod', 2], + ['lastModifiedTime', 2], + ['lastModifiedDate', 2], + ['crc32', 4], + ['compressedSize', 4], + ['uncompressedSize', 4], + ['fileNameLength', 2], + ['extraFieldLength', 2], + ['fileCommentLength', 2], + ['diskNumber', 2], + ['internalFileAttributes', 2], + ['externalFileAttributes', 4], + ['offsetToLocalFileHeader', 4], + ]); + + vars.offsetToLocalFileHeader += startOffset; + vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime); + + const fileNameBuffer = await records.pull(vars.fileNameLength); + vars.pathBuffer = fileNameBuffer; + vars.path = fileNameBuffer.toString('utf8'); + vars.isUnicode = (vars.flags & 0x800) != 0; + const extraField = await records.pull(vars.extraFieldLength); + + vars.extra = parseExtraField(extraField, vars); + const comment = await records.pull(vars.fileCommentLength); + + vars.comment = comment; + vars.type = (vars.uncompressedSize === 0 && /[\/\\]$/.test(vars.path)) ? 'Directory' : 'File'; + vars.stream = function(_password) { + var totalSize = 30 + + 10 // add an extra buffer + + (vars.extraFieldLength || 0) + + (vars.fileNameLength || 0) + + vars.compressedSize; + + return unzip(source, vars.offsetToLocalFileHeader,_password, vars, totalSize); + }; + vars.buffer = function(_password) { + return BufferStream(vars.stream(_password)); + }; + return vars; }); + + return Promise.props(vars); }; diff --git a/lib/Open/unzip.js b/lib/Open/unzip.js index 6205ab4..0d92d62 100644 --- a/lib/Open/unzip.js +++ b/lib/Open/unzip.js @@ -16,9 +16,12 @@ module.exports = function unzip(source, offset, _password, directoryVars, length entry.emit('error', e); }); - entry.vars = file.pull(30) - .then(function(data) { - var vars = parseBuffer.parse(data, [ + // Create a separate promise chain to pipe into entry + // This allows us to return entry synchronously + Promise.resolve() + .then(async function () { + const data = await file.pull(30); + let vars = parseBuffer.parse(data, [ ['signature', 4], ['versionsNeededToExtract', 2], ['flags', 2], @@ -34,50 +37,42 @@ module.exports = function unzip(source, offset, _password, directoryVars, length vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime); - return file.pull(vars.fileNameLength) - .then(function(fileName) { - vars.fileName = fileName.toString('utf8'); - return file.pull(vars.extraFieldLength); - }) - .then(function(extraField) { - var checkEncryption; - vars.extra = parseExtraField(extraField, vars); - // Ignore logal file header vars if the directory vars are available - if (directoryVars && directoryVars.compressedSize) vars = directoryVars; - - if (vars.flags & 0x01) checkEncryption = file.pull(12) - .then(function(header) { - if (!_password) - throw new Error('MISSING_PASSWORD'); - - var decrypt = Decrypt(); - - String(_password).split('').forEach(function(d) { - decrypt.update(d); - }); - - for (var i=0; i < header.length; i++) - header[i] = decrypt.decryptByte(header[i]); - - vars.decrypt = decrypt; - vars.compressedSize -= 12; - - var check = (vars.flags & 0x8) ? (vars.lastModifiedTime >> 8) & 0xff : (vars.crc32 >> 24) & 0xff; - if (header[11] !== check) - throw new Error('BAD_PASSWORD'); - - return vars; - }); - - return Promise.resolve(checkEncryption) - .then(function() { - entry.emit('vars',vars); - return vars; - }); + const fileName = await file.pull(vars.fileNameLength); + + vars.fileName = fileName.toString('utf8'); + const extraField = await file.pull(vars.extraFieldLength); + + var checkEncryption; + vars.extra = parseExtraField(extraField, vars); + // Ignore logal file header vars if the directory vars are available + if (directoryVars && directoryVars.compressedSize) vars = directoryVars; + + if (vars.flags & 0x01) { + const header = await file.pull(12) + + if (!_password) + throw new Error('MISSING_PASSWORD'); + + var decrypt = Decrypt(); + + String(_password).split('').forEach(function(d) { + decrypt.update(d); }); - }); - entry.vars.then(function(vars) { + for (var i=0; i < header.length; i++) + header[i] = decrypt.decryptByte(header[i]); + + vars.decrypt = decrypt; + vars.compressedSize -= 12; + + var check = (vars.flags & 0x8) ? (vars.lastModifiedTime >> 8) & 0xff : (vars.crc32 >> 24) & 0xff; + if (header[11] !== check) + throw new Error('BAD_PASSWORD'); + }; + + + entry.emit('vars',vars); + var fileSizeKnown = !(vars.flags & 0x08) || vars.compressedSize > 0, eof;