Skip to content

Commit

Permalink
fix eslint
Browse files Browse the repository at this point in the history
  • Loading branch information
ZJONSSON committed May 11, 2024
1 parent 1c4ba07 commit c1cdc58
Show file tree
Hide file tree
Showing 3 changed files with 90 additions and 101 deletions.
77 changes: 37 additions & 40 deletions lib/Open/directory.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,22 +12,21 @@ const signature = Buffer.alloc(4);
signature.writeUInt32LE(0x06054b50, 0);

async function getCrxHeader(source) {
var sourceStream = source.stream(0).pipe(PullStream());
const sourceStream = source.stream(0).pipe(PullStream());

let data = await sourceStream.pull(4);
var signature = data.readUInt32LE(0);
const signature = data.readUInt32LE(0);
if (signature === 0x34327243) {
var crxHeader;
data = await sourceStream.pull(12);
crxHeader = parseBuffer.parse(data, [
const crxHeader = parseBuffer.parse(data, [
['version', 4],
['pubKeyLength', 4],
['signatureLength', 4],
]);

data = await sourceStream.pull(crxHeader.pubKeyLength +crxHeader.signatureLength);

crxHeader.publicKey = data.slice(0,crxHeader.pubKeyLength);
crxHeader.publicKey = data.slice(0, crxHeader.pubKeyLength);
crxHeader.signature = data.slice(crxHeader.pubKeyLength);
crxHeader.size = 16 + crxHeader.pubKeyLength +crxHeader.signatureLength;
return crxHeader;
Expand Down Expand Up @@ -76,26 +75,24 @@ function parseZip64DirRecord (dir64record) {
}

module.exports = async function centralDirectory(source, options) {
var endDir = PullStream(),
records = PullStream(),
tailSize = (options && options.tailSize) || 80,
crxHeader,
startOffset,
vars;
const endDir = PullStream();
const records = PullStream();
const tailSize = (options && options.tailSize) || 80;
let crxHeader, vars;

if (options && options.crx)
crxHeader = await getCrxHeader(source);

const sourceSize = await source.size();

source.stream(Math.max(0,sourceSize-tailSize))
.on('error', function (error) { endDir.emit('error', error) })
source.stream(Math.max(0, sourceSize-tailSize))
.on('error', function (error) { endDir.emit('error', error); })
.pipe(endDir);

await endDir.pull(signature);

var data = await endDir.pull(22);
startOffset = crxHeader && crxHeader.size || 0;
const data = await endDir.pull(22);
const startOffset = crxHeader && crxHeader.size || 0;

vars = parseBuffer.parse(data, [
['signature', 4],
Expand All @@ -116,16 +113,16 @@ module.exports = async function centralDirectory(source, options) {
vars.offsetToStartOfCentralDirectory == 0xffffffff) {

// Offset to zip64 CDL is 20 bytes before normal CDR
const zip64CDLSize = 20
const zip64CDLOffset = sourceSize - (tailSize - endDir.match + zip64CDLSize)
const zip64CDLSize = 20;
const zip64CDLOffset = sourceSize - (tailSize - endDir.match + zip64CDLSize);
const zip64CDLStream = PullStream();

source.stream(zip64CDLOffset).pipe(zip64CDLStream);

const d = await zip64CDLStream.pull(zip64CDLSize)
const d = await zip64CDLStream.pull(zip64CDLSize);
const dir64record = await getZip64CentralDirectory(source, d);;
vars = parseZip64DirRecord(dir64record)

vars = parseZip64DirRecord(dir64record);

} else {
vars.offsetToStartOfCentralDirectory += startOffset;
Expand All @@ -143,32 +140,32 @@ module.exports = async function centralDirectory(source, options) {
// make sure path is normalized before using it
opts.path = path.resolve(path.normalize(opts.path));
const files = await vars.files;
return Promise.map(files, function(entry) {

return Bluebird.map(files, function(entry) {
if (entry.type == 'Directory') return;

// to avoid zip slip (writing outside of the destination), we resolve
// the target path, and make sure it's nested in the intended
// destination, or not extract it otherwise.
var extractPath = path.join(opts.path, entry.path);
const extractPath = path.join(opts.path, entry.path);
if (extractPath.indexOf(opts.path) != 0) {
return;
}
var writer = opts.getWriter ? opts.getWriter({path: extractPath}) : Writer({ path: extractPath });
const writer = opts.getWriter ? opts.getWriter({path: extractPath}) : Writer({ path: extractPath });

return new Promise(function(resolve, reject) {
entry.stream(opts.password)
.on('error',reject)
.on('error', reject)
.pipe(writer)
.on('close',resolve)
.on('error',reject);
.on('close', resolve)
.on('error', reject);
});
}, { concurrency: opts.concurrency > 1 ? opts.concurrency : 1 });
};

vars.files = Promise.mapSeries(Array(vars.numberOfRecords),async function() {
const data = await records.pull(46)
var vars = vars = parseBuffer.parse(data, [
vars.files = Bluebird.mapSeries(Array(vars.numberOfRecords), async function() {
const data = await records.pull(46);
const vars = parseBuffer.parse(data, [
['signature', 4],
['versionMadeBy', 2],
['versionsNeededToExtract', 2],
Expand Down Expand Up @@ -198,25 +195,25 @@ module.exports = async function centralDirectory(source, options) {
const extraField = await records.pull(vars.extraFieldLength);

vars.extra = parseExtraField(extraField, vars);
const comment = await records.pull(vars.fileCommentLength);
const comment = await records.pull(vars.fileCommentLength);

vars.comment = comment;
vars.type = (vars.uncompressedSize === 0 && /[\/\\]$/.test(vars.path)) ? 'Directory' : 'File';
var padding = options && options.padding || 1000;
vars.type = (vars.uncompressedSize === 0 && /[/\\]$/.test(vars.path)) ? 'Directory' : 'File';
const padding = options && options.padding || 1000;
vars.stream = function(_password) {
var totalSize = 30
const totalSize = 30
+ padding // add an extra buffer
+ (vars.extraFieldLength || 0)
+ (vars.extraFieldLength || 0)
+ (vars.fileNameLength || 0)
+ vars.compressedSize;

return unzip(source, vars.offsetToLocalFileHeader,_password, vars, totalSize);
return unzip(source, vars.offsetToLocalFileHeader, _password, vars, totalSize);
};
vars.buffer = function(_password) {
return BufferStream(vars.stream(_password));
};
return vars;
});
return Promise.props(vars);

return Bluebird.props(vars);
};
99 changes: 49 additions & 50 deletions lib/Open/unzip.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ const parseDateTime = require('../parseDateTime');
const parseBuffer = require('../parseBuffer');

module.exports = function unzip(source, offset, _password, directoryVars, length, _entry) {
var file = PullStream(),
entry = _entry || Stream.PassThrough();
const file = PullStream();
const entry = _entry || Stream.PassThrough();

const req = source.stream(offset, length);
req.pipe(file).on('error', function(e) {
Expand All @@ -34,12 +34,12 @@ module.exports = function unzip(source, offset, _password, directoryVars, length
['extraFieldLength', 2],
]);

var localSize = 30
const localSize = 30
+ 100 // add extra padding
+ (vars.extraFieldLength || 0)
+ (vars.extraFieldLength || 0)
+ (vars.fileNameLength || 0)
+ vars.compressedSize;

if (localSize > length) {
entry.emit('streamRetry', localSize);
return unzip(source, offset, _password, directoryVars, localSize, entry);
Expand All @@ -48,76 +48,75 @@ module.exports = function unzip(source, offset, _password, directoryVars, length
vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);

const fileName = await file.pull(vars.fileNameLength);

vars.fileName = fileName.toString('utf8');
const extraField = await file.pull(vars.extraFieldLength);

var checkEncryption;

vars.extra = parseExtraField(extraField, vars);
// Ignore logal file header vars if the directory vars are available
if (directoryVars && directoryVars.compressedSize) vars = directoryVars;

if (vars.flags & 0x01) {
const header = await file.pull(12)
const header = await file.pull(12);

if (!_password)
throw new Error('MISSING_PASSWORD');

var decrypt = Decrypt();
const decrypt = Decrypt();

String(_password).split('').forEach(function(d) {
decrypt.update(d);
});

for (var i=0; i < header.length; i++)
for (let i=0; i < header.length; i++)
header[i] = decrypt.decryptByte(header[i]);

vars.decrypt = decrypt;
vars.compressedSize -= 12;

var check = (vars.flags & 0x8) ? (vars.lastModifiedTime >> 8) & 0xff : (vars.crc32 >> 24) & 0xff;
const check = (vars.flags & 0x8) ? (vars.lastModifiedTime >> 8) & 0xff : (vars.crc32 >> 24) & 0xff;
if (header[11] !== check)
throw new Error('BAD_PASSWORD');
};


entry.emit('vars',vars);

var fileSizeKnown = !(vars.flags & 0x08) || vars.compressedSize > 0,
eof;

const inflater = vars.compressionMethod ? zlib.createInflateRaw() : Stream.PassThrough();

if (fileSizeKnown) {
entry.size = vars.uncompressedSize;
eof = vars.compressedSize;
} else {
eof = Buffer.alloc(4);
eof.writeUInt32LE(0x08074b50, 0);
}

let stream = file.stream(eof);

if (vars.decrypt)
stream = stream.pipe(vars.decrypt.stream());

stream
.pipe(inflater)
.on('error', function(err) { entry.emit('error', err);})
.pipe(entry)
.on('finish', function() {
if(req.destroy)
req.destroy();
else if (req.abort)
req.abort();
else if (req.close)
req.close();
else if (req.push)
req.push();
else
console.log('warning - unable to close stream');
});
})
entry.emit('vars', vars);

const fileSizeKnown = !(vars.flags & 0x08) || vars.compressedSize > 0;
let eof;

const inflater = vars.compressionMethod ? zlib.createInflateRaw() : Stream.PassThrough();

if (fileSizeKnown) {
entry.size = vars.uncompressedSize;
eof = vars.compressedSize;
} else {
eof = Buffer.alloc(4);
eof.writeUInt32LE(0x08074b50, 0);
}

let stream = file.stream(eof);

if (vars.decrypt)
stream = stream.pipe(vars.decrypt.stream());

stream
.pipe(inflater)
.on('error', function(err) { entry.emit('error', err);})
.pipe(entry)
.on('finish', function() {
if(req.destroy)
req.destroy();
else if (req.abort)
req.abort();
else if (req.close)
req.close();
else if (req.push)
req.push();
else
console.log('warning - unable to close stream');
});
})
.catch(function(e) {
entry.emit('error', e);
});
Expand Down
15 changes: 4 additions & 11 deletions test/office-files.js
Original file line number Diff line number Diff line change
@@ -1,17 +1,10 @@
const test = require('tap').test;
const path = require('path');
const unzip = require('../');
const NoopStream = require('../lib/NoopStream');

var test = require('tap').test;
var fs = require('fs');
var path = require('path');
var unzip = require('../');
var il = require('iconv-lite');
var Promise = require('bluebird');
var NoopStream = require('../lib/NoopStream');

test("get content a docx file without errors", async function (t) {
var archive = path.join(__dirname, '../testData/office/testfile.docx');
test("get content a docx file without errors", async function () {
const archive = path.join(__dirname, '../testData/office/testfile.docx');

const directory = await unzip.Open.file(archive);
await Promise.all(directory.files.map(file => file.buffer()));
Expand All @@ -25,7 +18,7 @@ test("get content a xlsx file without errors", async function () {
});

test("stream retries when the local file header indicates bigger size than central directory", async function (t) {
var archive = path.join(__dirname, '../testData/office/testfile.xlsx');
const archive = path.join(__dirname, '../testData/office/testfile.xlsx');
let retries = 0, size;
const directory = await unzip.Open.file(archive, {padding: 10});
const stream = directory.files[0].stream();
Expand Down

0 comments on commit c1cdc58

Please sign in to comment.