(https://github.com/cthackers)",
- "bugs": {
- "email": "sy@another-d-mention.ro",
- "url": "https://github.com/cthackers/adm-zip/issues"
- },
- "license": "MIT",
- "files": [
- "adm-zip.js",
- "headers",
- "methods",
- "util",
- "zipEntry.js",
- "zipFile.js",
- "LICENSE"
- ],
- "main": "adm-zip.js",
- "repository": {
- "type": "git",
- "url": "https://github.com/cthackers/adm-zip.git"
- },
- "engines": {
- "node": ">=12.0"
- },
- "devDependencies": {
- "chai": "^4.3.4",
- "iconv-lite": "^0.6.3",
- "mocha": "^10.2.0",
- "prettier": "^3.3.2",
- "rimraf": "^3.0.2"
- }
-}
diff --git a/node_modules/adm-zip/util/constants.js b/node_modules/adm-zip/util/constants.js
deleted file mode 100644
index 119954bae8..0000000000
--- a/node_modules/adm-zip/util/constants.js
+++ /dev/null
@@ -1,142 +0,0 @@
-module.exports = {
- /* The local file header */
- LOCHDR : 30, // LOC header size
- LOCSIG : 0x04034b50, // "PK\003\004"
- LOCVER : 4, // version needed to extract
- LOCFLG : 6, // general purpose bit flag
- LOCHOW : 8, // compression method
- LOCTIM : 10, // modification time (2 bytes time, 2 bytes date)
- LOCCRC : 14, // uncompressed file crc-32 value
- LOCSIZ : 18, // compressed size
- LOCLEN : 22, // uncompressed size
- LOCNAM : 26, // filename length
- LOCEXT : 28, // extra field length
-
- /* The Data descriptor */
- EXTSIG : 0x08074b50, // "PK\007\008"
- EXTHDR : 16, // EXT header size
- EXTCRC : 4, // uncompressed file crc-32 value
- EXTSIZ : 8, // compressed size
- EXTLEN : 12, // uncompressed size
-
- /* The central directory file header */
- CENHDR : 46, // CEN header size
- CENSIG : 0x02014b50, // "PK\001\002"
- CENVEM : 4, // version made by
- CENVER : 6, // version needed to extract
- CENFLG : 8, // encrypt, decrypt flags
- CENHOW : 10, // compression method
- CENTIM : 12, // modification time (2 bytes time, 2 bytes date)
- CENCRC : 16, // uncompressed file crc-32 value
- CENSIZ : 20, // compressed size
- CENLEN : 24, // uncompressed size
- CENNAM : 28, // filename length
- CENEXT : 30, // extra field length
- CENCOM : 32, // file comment length
- CENDSK : 34, // volume number start
- CENATT : 36, // internal file attributes
- CENATX : 38, // external file attributes (host system dependent)
- CENOFF : 42, // LOC header offset
-
- /* The entries in the end of central directory */
- ENDHDR : 22, // END header size
- ENDSIG : 0x06054b50, // "PK\005\006"
- ENDSUB : 8, // number of entries on this disk
- ENDTOT : 10, // total number of entries
- ENDSIZ : 12, // central directory size in bytes
- ENDOFF : 16, // offset of first CEN header
- ENDCOM : 20, // zip file comment length
-
- END64HDR : 20, // zip64 END header size
- END64SIG : 0x07064b50, // zip64 Locator signature, "PK\006\007"
- END64START : 4, // number of the disk with the start of the zip64
- END64OFF : 8, // relative offset of the zip64 end of central directory
- END64NUMDISKS : 16, // total number of disks
-
- ZIP64SIG : 0x06064b50, // zip64 signature, "PK\006\006"
- ZIP64HDR : 56, // zip64 record minimum size
- ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE
- ZIP64SIZE : 4, // zip64 size of the central directory record
- ZIP64VEM : 12, // zip64 version made by
- ZIP64VER : 14, // zip64 version needed to extract
- ZIP64DSK : 16, // zip64 number of this disk
- ZIP64DSKDIR : 20, // number of the disk with the start of the record directory
- ZIP64SUB : 24, // number of entries on this disk
- ZIP64TOT : 32, // total number of entries
- ZIP64SIZB : 40, // zip64 central directory size in bytes
- ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number
- ZIP64EXTRA : 56, // extensible data sector
-
- /* Compression methods */
- STORED : 0, // no compression
- SHRUNK : 1, // shrunk
- REDUCED1 : 2, // reduced with compression factor 1
- REDUCED2 : 3, // reduced with compression factor 2
- REDUCED3 : 4, // reduced with compression factor 3
- REDUCED4 : 5, // reduced with compression factor 4
- IMPLODED : 6, // imploded
- // 7 reserved for Tokenizing compression algorithm
- DEFLATED : 8, // deflated
- ENHANCED_DEFLATED: 9, // enhanced deflated
- PKWARE : 10,// PKWare DCL imploded
- // 11 reserved by PKWARE
- BZIP2 : 12, // compressed using BZIP2
- // 13 reserved by PKWARE
- LZMA : 14, // LZMA
- // 15-17 reserved by PKWARE
- IBM_TERSE : 18, // compressed using IBM TERSE
- IBM_LZ77 : 19, // IBM LZ77 z
- AES_ENCRYPT : 99, // WinZIP AES encryption method
-
- /* General purpose bit flag */
- // values can obtained with expression 2**bitnr
- FLG_ENC : 1, // Bit 0: encrypted file
- FLG_COMP1 : 2, // Bit 1, compression option
- FLG_COMP2 : 4, // Bit 2, compression option
- FLG_DESC : 8, // Bit 3, data descriptor
- FLG_ENH : 16, // Bit 4, enhanced deflating
- FLG_PATCH : 32, // Bit 5, indicates that the file is compressed patched data.
- FLG_STR : 64, // Bit 6, strong encryption (patented)
- // Bits 7-10: Currently unused.
- FLG_EFS : 2048, // Bit 11: Language encoding flag (EFS)
- // Bit 12: Reserved by PKWARE for enhanced compression.
- // Bit 13: encrypted the Central Directory (patented).
- // Bits 14-15: Reserved by PKWARE.
- FLG_MSK : 4096, // mask header values
-
- /* Load type */
- FILE : 2,
- BUFFER : 1,
- NONE : 0,
-
- /* 4.5 Extensible data fields */
- EF_ID : 0,
- EF_SIZE : 2,
-
- /* Header IDs */
- ID_ZIP64 : 0x0001,
- ID_AVINFO : 0x0007,
- ID_PFS : 0x0008,
- ID_OS2 : 0x0009,
- ID_NTFS : 0x000a,
- ID_OPENVMS : 0x000c,
- ID_UNIX : 0x000d,
- ID_FORK : 0x000e,
- ID_PATCH : 0x000f,
- ID_X509_PKCS7 : 0x0014,
- ID_X509_CERTID_F : 0x0015,
- ID_X509_CERTID_C : 0x0016,
- ID_STRONGENC : 0x0017,
- ID_RECORD_MGT : 0x0018,
- ID_X509_PKCS7_RL : 0x0019,
- ID_IBM1 : 0x0065,
- ID_IBM2 : 0x0066,
- ID_POSZIP : 0x4690,
-
- EF_ZIP64_OR_32 : 0xffffffff,
- EF_ZIP64_OR_16 : 0xffff,
- EF_ZIP64_SUNCOMP : 0,
- EF_ZIP64_SCOMP : 8,
- EF_ZIP64_RHO : 16,
- EF_ZIP64_DSN : 24
-};
diff --git a/node_modules/adm-zip/util/decoder.js b/node_modules/adm-zip/util/decoder.js
deleted file mode 100644
index d8b98f0848..0000000000
--- a/node_modules/adm-zip/util/decoder.js
+++ /dev/null
@@ -1,5 +0,0 @@
-module.exports = {
- efs: true,
- encode: (data) => Buffer.from(data, "utf8"),
- decode: (data) => data.toString("utf8")
-};
diff --git a/node_modules/adm-zip/util/errors.js b/node_modules/adm-zip/util/errors.js
deleted file mode 100644
index ad594ed628..0000000000
--- a/node_modules/adm-zip/util/errors.js
+++ /dev/null
@@ -1,63 +0,0 @@
-const errors = {
- /* Header error messages */
- INVALID_LOC: "Invalid LOC header (bad signature)",
- INVALID_CEN: "Invalid CEN header (bad signature)",
- INVALID_END: "Invalid END header (bad signature)",
-
- /* Descriptor */
- DESCRIPTOR_NOT_EXIST: "No descriptor present",
- DESCRIPTOR_UNKNOWN: "Unknown descriptor format",
- DESCRIPTOR_FAULTY: "Descriptor data is malformed",
-
- /* ZipEntry error messages*/
- NO_DATA: "Nothing to decompress",
- BAD_CRC: "CRC32 checksum failed {0}",
- FILE_IN_THE_WAY: "There is a file in the way: {0}",
- UNKNOWN_METHOD: "Invalid/unsupported compression method",
-
- /* Inflater error messages */
- AVAIL_DATA: "inflate::Available inflate data did not terminate",
- INVALID_DISTANCE: "inflate::Invalid literal/length or distance code in fixed or dynamic block",
- TO_MANY_CODES: "inflate::Dynamic block code description: too many length or distance codes",
- INVALID_REPEAT_LEN: "inflate::Dynamic block code description: repeat more than specified lengths",
- INVALID_REPEAT_FIRST: "inflate::Dynamic block code description: repeat lengths with no first length",
- INCOMPLETE_CODES: "inflate::Dynamic block code description: code lengths codes incomplete",
- INVALID_DYN_DISTANCE: "inflate::Dynamic block code description: invalid distance code lengths",
- INVALID_CODES_LEN: "inflate::Dynamic block code description: invalid literal/length code lengths",
- INVALID_STORE_BLOCK: "inflate::Stored block length did not match one's complement",
- INVALID_BLOCK_TYPE: "inflate::Invalid block type (type == 3)",
-
- /* ADM-ZIP error messages */
- CANT_EXTRACT_FILE: "Could not extract the file",
- CANT_OVERRIDE: "Target file already exists",
- DISK_ENTRY_TOO_LARGE: "Number of disk entries is too large",
- NO_ZIP: "No zip file was loaded",
- NO_ENTRY: "Entry doesn't exist",
- DIRECTORY_CONTENT_ERROR: "A directory cannot have content",
- FILE_NOT_FOUND: 'File not found: "{0}"',
- NOT_IMPLEMENTED: "Not implemented",
- INVALID_FILENAME: "Invalid filename",
- INVALID_FORMAT: "Invalid or unsupported zip format. No END header found",
- INVALID_PASS_PARAM: "Incompatible password parameter",
- WRONG_PASSWORD: "Wrong Password",
-
- /* ADM-ZIP */
- COMMENT_TOO_LONG: "Comment is too long", // Comment can be max 65535 bytes long (NOTE: some non-US characters may take more space)
- EXTRA_FIELD_PARSE_ERROR: "Extra field parsing error"
-};
-
-// template
-function E(message) {
- return function (...args) {
- if (args.length) { // Allow {0} .. {9} arguments in error message, based on argument number
- message = message.replace(/\{(\d)\}/g, (_, n) => args[n] || '');
- }
-
- return new Error('ADM-ZIP: ' + message);
- };
-}
-
-// Init errors with template
-for (const msg of Object.keys(errors)) {
- exports[msg] = E(errors[msg]);
-}
diff --git a/node_modules/adm-zip/util/fattr.js b/node_modules/adm-zip/util/fattr.js
deleted file mode 100644
index 720b82787c..0000000000
--- a/node_modules/adm-zip/util/fattr.js
+++ /dev/null
@@ -1,76 +0,0 @@
-const pth = require("path");
-
-module.exports = function (/*String*/ path, /*Utils object*/ { fs }) {
- var _path = path || "",
- _obj = newAttr(),
- _stat = null;
-
- function newAttr() {
- return {
- directory: false,
- readonly: false,
- hidden: false,
- executable: false,
- mtime: 0,
- atime: 0
- };
- }
-
- if (_path && fs.existsSync(_path)) {
- _stat = fs.statSync(_path);
- _obj.directory = _stat.isDirectory();
- _obj.mtime = _stat.mtime;
- _obj.atime = _stat.atime;
- _obj.executable = (0o111 & _stat.mode) !== 0; // file is executable who ever har right not just owner
- _obj.readonly = (0o200 & _stat.mode) === 0; // readonly if owner has no write right
- _obj.hidden = pth.basename(_path)[0] === ".";
- } else {
- console.warn("Invalid path: " + _path);
- }
-
- return {
- get directory() {
- return _obj.directory;
- },
-
- get readOnly() {
- return _obj.readonly;
- },
-
- get hidden() {
- return _obj.hidden;
- },
-
- get mtime() {
- return _obj.mtime;
- },
-
- get atime() {
- return _obj.atime;
- },
-
- get executable() {
- return _obj.executable;
- },
-
- decodeAttributes: function () {},
-
- encodeAttributes: function () {},
-
- toJSON: function () {
- return {
- path: _path,
- isDirectory: _obj.directory,
- isReadOnly: _obj.readonly,
- isHidden: _obj.hidden,
- isExecutable: _obj.executable,
- mTime: _obj.mtime,
- aTime: _obj.atime
- };
- },
-
- toString: function () {
- return JSON.stringify(this.toJSON(), null, "\t");
- }
- };
-};
diff --git a/node_modules/adm-zip/util/index.js b/node_modules/adm-zip/util/index.js
deleted file mode 100644
index 10119cfede..0000000000
--- a/node_modules/adm-zip/util/index.js
+++ /dev/null
@@ -1,5 +0,0 @@
-module.exports = require("./utils");
-module.exports.Constants = require("./constants");
-module.exports.Errors = require("./errors");
-module.exports.FileAttr = require("./fattr");
-module.exports.decoder = require("./decoder");
diff --git a/node_modules/adm-zip/util/utils.js b/node_modules/adm-zip/util/utils.js
deleted file mode 100644
index 568076983f..0000000000
--- a/node_modules/adm-zip/util/utils.js
+++ /dev/null
@@ -1,336 +0,0 @@
-const fsystem = require("fs");
-const pth = require("path");
-const Constants = require("./constants");
-const Errors = require("./errors");
-const isWin = typeof process === "object" && "win32" === process.platform;
-
-const is_Obj = (obj) => typeof obj === "object" && obj !== null;
-
-// generate CRC32 lookup table
-const crcTable = new Uint32Array(256).map((t, c) => {
- for (let k = 0; k < 8; k++) {
- if ((c & 1) !== 0) {
- c = 0xedb88320 ^ (c >>> 1);
- } else {
- c >>>= 1;
- }
- }
- return c >>> 0;
-});
-
-// UTILS functions
-
-function Utils(opts) {
- this.sep = pth.sep;
- this.fs = fsystem;
-
- if (is_Obj(opts)) {
- // custom filesystem
- if (is_Obj(opts.fs) && typeof opts.fs.statSync === "function") {
- this.fs = opts.fs;
- }
- }
-}
-
-module.exports = Utils;
-
-// INSTANTIABLE functions
-
-Utils.prototype.makeDir = function (/*String*/ folder) {
- const self = this;
-
- // Sync - make directories tree
- function mkdirSync(/*String*/ fpath) {
- let resolvedPath = fpath.split(self.sep)[0];
- fpath.split(self.sep).forEach(function (name) {
- if (!name || name.substr(-1, 1) === ":") return;
- resolvedPath += self.sep + name;
- var stat;
- try {
- stat = self.fs.statSync(resolvedPath);
- } catch (e) {
- self.fs.mkdirSync(resolvedPath);
- }
- if (stat && stat.isFile()) throw Errors.FILE_IN_THE_WAY(`"${resolvedPath}"`);
- });
- }
-
- mkdirSync(folder);
-};
-
-Utils.prototype.writeFileTo = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr) {
- const self = this;
- if (self.fs.existsSync(path)) {
- if (!overwrite) return false; // cannot overwrite
-
- var stat = self.fs.statSync(path);
- if (stat.isDirectory()) {
- return false;
- }
- }
- var folder = pth.dirname(path);
- if (!self.fs.existsSync(folder)) {
- self.makeDir(folder);
- }
-
- var fd;
- try {
- fd = self.fs.openSync(path, "w", 0o666); // 0666
- } catch (e) {
- self.fs.chmodSync(path, 0o666);
- fd = self.fs.openSync(path, "w", 0o666);
- }
- if (fd) {
- try {
- self.fs.writeSync(fd, content, 0, content.length, 0);
- } finally {
- self.fs.closeSync(fd);
- }
- }
- self.fs.chmodSync(path, attr || 0o666);
- return true;
-};
-
-Utils.prototype.writeFileToAsync = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr, /*Function*/ callback) {
- if (typeof attr === "function") {
- callback = attr;
- attr = undefined;
- }
-
- const self = this;
-
- self.fs.exists(path, function (exist) {
- if (exist && !overwrite) return callback(false);
-
- self.fs.stat(path, function (err, stat) {
- if (exist && stat.isDirectory()) {
- return callback(false);
- }
-
- var folder = pth.dirname(path);
- self.fs.exists(folder, function (exists) {
- if (!exists) self.makeDir(folder);
-
- self.fs.open(path, "w", 0o666, function (err, fd) {
- if (err) {
- self.fs.chmod(path, 0o666, function () {
- self.fs.open(path, "w", 0o666, function (err, fd) {
- self.fs.write(fd, content, 0, content.length, 0, function () {
- self.fs.close(fd, function () {
- self.fs.chmod(path, attr || 0o666, function () {
- callback(true);
- });
- });
- });
- });
- });
- } else if (fd) {
- self.fs.write(fd, content, 0, content.length, 0, function () {
- self.fs.close(fd, function () {
- self.fs.chmod(path, attr || 0o666, function () {
- callback(true);
- });
- });
- });
- } else {
- self.fs.chmod(path, attr || 0o666, function () {
- callback(true);
- });
- }
- });
- });
- });
- });
-};
-
-Utils.prototype.findFiles = function (/*String*/ path) {
- const self = this;
-
- function findSync(/*String*/ dir, /*RegExp*/ pattern, /*Boolean*/ recursive) {
- if (typeof pattern === "boolean") {
- recursive = pattern;
- pattern = undefined;
- }
- let files = [];
- self.fs.readdirSync(dir).forEach(function (file) {
- const path = pth.join(dir, file);
- const stat = self.fs.statSync(path);
-
- if (!pattern || pattern.test(path)) {
- files.push(pth.normalize(path) + (stat.isDirectory() ? self.sep : ""));
- }
-
- if (stat.isDirectory() && recursive) files = files.concat(findSync(path, pattern, recursive));
- });
- return files;
- }
-
- return findSync(path, undefined, true);
-};
-
-/**
- * Callback for showing if everything was done.
- *
- * @callback filelistCallback
- * @param {Error} err - Error object
- * @param {string[]} list - was request fully completed
- */
-
-/**
- *
- * @param {string} dir
- * @param {filelistCallback} cb
- */
-Utils.prototype.findFilesAsync = function (dir, cb) {
- const self = this;
- let results = [];
- self.fs.readdir(dir, function (err, list) {
- if (err) return cb(err);
- let list_length = list.length;
- if (!list_length) return cb(null, results);
- list.forEach(function (file) {
- file = pth.join(dir, file);
- self.fs.stat(file, function (err, stat) {
- if (err) return cb(err);
- if (stat) {
- results.push(pth.normalize(file) + (stat.isDirectory() ? self.sep : ""));
- if (stat.isDirectory()) {
- self.findFilesAsync(file, function (err, res) {
- if (err) return cb(err);
- results = results.concat(res);
- if (!--list_length) cb(null, results);
- });
- } else {
- if (!--list_length) cb(null, results);
- }
- }
- });
- });
- });
-};
-
-Utils.prototype.getAttributes = function () {};
-
-Utils.prototype.setAttributes = function () {};
-
-// STATIC functions
-
-// crc32 single update (it is part of crc32)
-Utils.crc32update = function (crc, byte) {
- return crcTable[(crc ^ byte) & 0xff] ^ (crc >>> 8);
-};
-
-Utils.crc32 = function (buf) {
- if (typeof buf === "string") {
- buf = Buffer.from(buf, "utf8");
- }
-
- let len = buf.length;
- let crc = ~0;
- for (let off = 0; off < len; ) crc = Utils.crc32update(crc, buf[off++]);
- // xor and cast as uint32 number
- return ~crc >>> 0;
-};
-
-Utils.methodToString = function (/*Number*/ method) {
- switch (method) {
- case Constants.STORED:
- return "STORED (" + method + ")";
- case Constants.DEFLATED:
- return "DEFLATED (" + method + ")";
- default:
- return "UNSUPPORTED (" + method + ")";
- }
-};
-
-/**
- * removes ".." style path elements
- * @param {string} path - fixable path
- * @returns string - fixed filepath
- */
-Utils.canonical = function (/*string*/ path) {
- if (!path) return "";
- // trick normalize think path is absolute
- const safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/"));
- return pth.join(".", safeSuffix);
-};
-
-/**
- * fix file names in achive
- * @param {string} path - fixable path
- * @returns string - fixed filepath
- */
-
-Utils.zipnamefix = function (path) {
- if (!path) return "";
- // trick normalize think path is absolute
- const safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/"));
- return pth.posix.join(".", safeSuffix);
-};
-
-/**
- *
- * @param {Array} arr
- * @param {function} callback
- * @returns
- */
-Utils.findLast = function (arr, callback) {
- if (!Array.isArray(arr)) throw new TypeError("arr is not array");
-
- const len = arr.length >>> 0;
- for (let i = len - 1; i >= 0; i--) {
- if (callback(arr[i], i, arr)) {
- return arr[i];
- }
- }
- return void 0;
-};
-
-// make abolute paths taking prefix as root folder
-Utils.sanitize = function (/*string*/ prefix, /*string*/ name) {
- prefix = pth.resolve(pth.normalize(prefix));
- var parts = name.split("/");
- for (var i = 0, l = parts.length; i < l; i++) {
- var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep)));
- if (path.indexOf(prefix) === 0) {
- return path;
- }
- }
- return pth.normalize(pth.join(prefix, pth.basename(name)));
-};
-
-// converts buffer, Uint8Array, string types to buffer
-Utils.toBuffer = function toBuffer(/*buffer, Uint8Array, string*/ input, /* function */ encoder) {
- if (Buffer.isBuffer(input)) {
- return input;
- } else if (input instanceof Uint8Array) {
- return Buffer.from(input);
- } else {
- // expect string all other values are invalid and return empty buffer
- return typeof input === "string" ? encoder(input) : Buffer.alloc(0);
- }
-};
-
-Utils.readBigUInt64LE = function (/*Buffer*/ buffer, /*int*/ index) {
- var slice = Buffer.from(buffer.slice(index, index + 8));
- slice.swap64();
-
- return parseInt(`0x${slice.toString("hex")}`);
-};
-
-Utils.fromDOS2Date = function (val) {
- return new Date(((val >> 25) & 0x7f) + 1980, Math.max(((val >> 21) & 0x0f) - 1, 0), Math.max((val >> 16) & 0x1f, 1), (val >> 11) & 0x1f, (val >> 5) & 0x3f, (val & 0x1f) << 1);
-};
-
-Utils.fromDate2DOS = function (val) {
- let date = 0;
- let time = 0;
- if (val.getFullYear() > 1979) {
- date = (((val.getFullYear() - 1980) & 0x7f) << 9) | ((val.getMonth() + 1) << 5) | val.getDate();
- time = (val.getHours() << 11) | (val.getMinutes() << 5) | (val.getSeconds() >> 1);
- }
- return (date << 16) | time;
-};
-
-Utils.isWin = isWin; // Do we have windows system
-Utils.crcTable = crcTable;
diff --git a/node_modules/adm-zip/zipEntry.js b/node_modules/adm-zip/zipEntry.js
deleted file mode 100644
index e7804b6c2d..0000000000
--- a/node_modules/adm-zip/zipEntry.js
+++ /dev/null
@@ -1,405 +0,0 @@
-var Utils = require("./util"),
- Headers = require("./headers"),
- Constants = Utils.Constants,
- Methods = require("./methods");
-
-module.exports = function (/** object */ options, /*Buffer*/ input) {
- var _centralHeader = new Headers.EntryHeader(),
- _entryName = Buffer.alloc(0),
- _comment = Buffer.alloc(0),
- _isDirectory = false,
- uncompressedData = null,
- _extra = Buffer.alloc(0),
- _extralocal = Buffer.alloc(0),
- _efs = true;
-
- // assign options
- const opts = options;
-
- const decoder = typeof opts.decoder === "object" ? opts.decoder : Utils.decoder;
- _efs = decoder.hasOwnProperty("efs") ? decoder.efs : false;
-
- function getCompressedDataFromZip() {
- //if (!input || !Buffer.isBuffer(input)) {
- if (!input || !(input instanceof Uint8Array)) {
- return Buffer.alloc(0);
- }
- _extralocal = _centralHeader.loadLocalHeaderFromBinary(input);
- return input.slice(_centralHeader.realDataOffset, _centralHeader.realDataOffset + _centralHeader.compressedSize);
- }
-
- function crc32OK(data) {
- // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the local header is written
- if (!_centralHeader.flags_desc) {
- if (Utils.crc32(data) !== _centralHeader.localHeader.crc) {
- return false;
- }
- } else {
- const descriptor = {};
- const dataEndOffset = _centralHeader.realDataOffset + _centralHeader.compressedSize;
- // no descriptor after compressed data, instead new local header
- if (input.readUInt32LE(dataEndOffset) == Constants.LOCSIG || input.readUInt32LE(dataEndOffset) == Constants.CENSIG) {
- throw Utils.Errors.DESCRIPTOR_NOT_EXIST();
- }
-
- // get decriptor data
- if (input.readUInt32LE(dataEndOffset) == Constants.EXTSIG) {
- // descriptor with signature
- descriptor.crc = input.readUInt32LE(dataEndOffset + Constants.EXTCRC);
- descriptor.compressedSize = input.readUInt32LE(dataEndOffset + Constants.EXTSIZ);
- descriptor.size = input.readUInt32LE(dataEndOffset + Constants.EXTLEN);
- } else if (input.readUInt16LE(dataEndOffset + 12) === 0x4b50) {
- // descriptor without signature (we check is new header starting where we expect)
- descriptor.crc = input.readUInt32LE(dataEndOffset + Constants.EXTCRC - 4);
- descriptor.compressedSize = input.readUInt32LE(dataEndOffset + Constants.EXTSIZ - 4);
- descriptor.size = input.readUInt32LE(dataEndOffset + Constants.EXTLEN - 4);
- } else {
- throw Utils.Errors.DESCRIPTOR_UNKNOWN();
- }
-
- // check data integrity
- if (descriptor.compressedSize !== _centralHeader.compressedSize || descriptor.size !== _centralHeader.size || descriptor.crc !== _centralHeader.crc) {
- throw Utils.Errors.DESCRIPTOR_FAULTY();
- }
- if (Utils.crc32(data) !== descriptor.crc) {
- return false;
- }
-
- // @TODO: zip64 bit descriptor fields
- // if bit 3 is set and any value in local header "zip64 Extended information" extra field are set 0 (place holder)
- // then 64-bit descriptor format is used instead of 32-bit
- // central header - "zip64 Extended information" extra field should store real values and not place holders
- }
- return true;
- }
-
- function decompress(/*Boolean*/ async, /*Function*/ callback, /*String, Buffer*/ pass) {
- if (typeof callback === "undefined" && typeof async === "string") {
- pass = async;
- async = void 0;
- }
- if (_isDirectory) {
- if (async && callback) {
- callback(Buffer.alloc(0), Utils.Errors.DIRECTORY_CONTENT_ERROR()); //si added error.
- }
- return Buffer.alloc(0);
- }
-
- var compressedData = getCompressedDataFromZip();
-
- if (compressedData.length === 0) {
- // File is empty, nothing to decompress.
- if (async && callback) callback(compressedData);
- return compressedData;
- }
-
- if (_centralHeader.encrypted) {
- if ("string" !== typeof pass && !Buffer.isBuffer(pass)) {
- throw Utils.Errors.INVALID_PASS_PARAM();
- }
- compressedData = Methods.ZipCrypto.decrypt(compressedData, _centralHeader, pass);
- }
-
- var data = Buffer.alloc(_centralHeader.size);
-
- switch (_centralHeader.method) {
- case Utils.Constants.STORED:
- compressedData.copy(data);
- if (!crc32OK(data)) {
- if (async && callback) callback(data, Utils.Errors.BAD_CRC()); //si added error
- throw Utils.Errors.BAD_CRC();
- } else {
- //si added otherwise did not seem to return data.
- if (async && callback) callback(data);
- return data;
- }
- case Utils.Constants.DEFLATED:
- var inflater = new Methods.Inflater(compressedData, _centralHeader.size);
- if (!async) {
- const result = inflater.inflate(data);
- result.copy(data, 0);
- if (!crc32OK(data)) {
- throw Utils.Errors.BAD_CRC(`"${decoder.decode(_entryName)}"`);
- }
- return data;
- } else {
- inflater.inflateAsync(function (result) {
- result.copy(result, 0);
- if (callback) {
- if (!crc32OK(result)) {
- callback(result, Utils.Errors.BAD_CRC()); //si added error
- } else {
- callback(result);
- }
- }
- });
- }
- break;
- default:
- if (async && callback) callback(Buffer.alloc(0), Utils.Errors.UNKNOWN_METHOD());
- throw Utils.Errors.UNKNOWN_METHOD();
- }
- }
-
- function compress(/*Boolean*/ async, /*Function*/ callback) {
- if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) {
- // no data set or the data wasn't changed to require recompression
- if (async && callback) callback(getCompressedDataFromZip());
- return getCompressedDataFromZip();
- }
-
- if (uncompressedData.length && !_isDirectory) {
- var compressedData;
- // Local file header
- switch (_centralHeader.method) {
- case Utils.Constants.STORED:
- _centralHeader.compressedSize = _centralHeader.size;
-
- compressedData = Buffer.alloc(uncompressedData.length);
- uncompressedData.copy(compressedData);
-
- if (async && callback) callback(compressedData);
- return compressedData;
- default:
- case Utils.Constants.DEFLATED:
- var deflater = new Methods.Deflater(uncompressedData);
- if (!async) {
- var deflated = deflater.deflate();
- _centralHeader.compressedSize = deflated.length;
- return deflated;
- } else {
- deflater.deflateAsync(function (data) {
- compressedData = Buffer.alloc(data.length);
- _centralHeader.compressedSize = data.length;
- data.copy(compressedData);
- callback && callback(compressedData);
- });
- }
- deflater = null;
- break;
- }
- } else if (async && callback) {
- callback(Buffer.alloc(0));
- } else {
- return Buffer.alloc(0);
- }
- }
-
- function readUInt64LE(buffer, offset) {
- return (buffer.readUInt32LE(offset + 4) << 4) + buffer.readUInt32LE(offset);
- }
-
- function parseExtra(data) {
- try {
- var offset = 0;
- var signature, size, part;
- while (offset + 4 < data.length) {
- signature = data.readUInt16LE(offset);
- offset += 2;
- size = data.readUInt16LE(offset);
- offset += 2;
- part = data.slice(offset, offset + size);
- offset += size;
- if (Constants.ID_ZIP64 === signature) {
- parseZip64ExtendedInformation(part);
- }
- }
- } catch (error) {
- throw Utils.Errors.EXTRA_FIELD_PARSE_ERROR();
- }
- }
-
- //Override header field values with values from the ZIP64 extra field
- function parseZip64ExtendedInformation(data) {
- var size, compressedSize, offset, diskNumStart;
-
- if (data.length >= Constants.EF_ZIP64_SCOMP) {
- size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP);
- if (_centralHeader.size === Constants.EF_ZIP64_OR_32) {
- _centralHeader.size = size;
- }
- }
- if (data.length >= Constants.EF_ZIP64_RHO) {
- compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP);
- if (_centralHeader.compressedSize === Constants.EF_ZIP64_OR_32) {
- _centralHeader.compressedSize = compressedSize;
- }
- }
- if (data.length >= Constants.EF_ZIP64_DSN) {
- offset = readUInt64LE(data, Constants.EF_ZIP64_RHO);
- if (_centralHeader.offset === Constants.EF_ZIP64_OR_32) {
- _centralHeader.offset = offset;
- }
- }
- if (data.length >= Constants.EF_ZIP64_DSN + 4) {
- diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN);
- if (_centralHeader.diskNumStart === Constants.EF_ZIP64_OR_16) {
- _centralHeader.diskNumStart = diskNumStart;
- }
- }
- }
-
- return {
- get entryName() {
- return decoder.decode(_entryName);
- },
- get rawEntryName() {
- return _entryName;
- },
- set entryName(val) {
- _entryName = Utils.toBuffer(val, decoder.encode);
- var lastChar = _entryName[_entryName.length - 1];
- _isDirectory = lastChar === 47 || lastChar === 92;
- _centralHeader.fileNameLength = _entryName.length;
- },
-
- get efs() {
- if (typeof _efs === "function") {
- return _efs(this.entryName);
- } else {
- return _efs;
- }
- },
-
- get extra() {
- return _extra;
- },
- set extra(val) {
- _extra = val;
- _centralHeader.extraLength = val.length;
- parseExtra(val);
- },
-
- get comment() {
- return decoder.decode(_comment);
- },
- set comment(val) {
- _comment = Utils.toBuffer(val, decoder.encode);
- _centralHeader.commentLength = _comment.length;
- if (_comment.length > 0xffff) throw Utils.Errors.COMMENT_TOO_LONG();
- },
-
- get name() {
- var n = decoder.decode(_entryName);
- return _isDirectory
- ? n
- .substr(n.length - 1)
- .split("/")
- .pop()
- : n.split("/").pop();
- },
- get isDirectory() {
- return _isDirectory;
- },
-
- getCompressedData: function () {
- return compress(false, null);
- },
-
- getCompressedDataAsync: function (/*Function*/ callback) {
- compress(true, callback);
- },
-
- setData: function (value) {
- uncompressedData = Utils.toBuffer(value, Utils.decoder.encode);
- if (!_isDirectory && uncompressedData.length) {
- _centralHeader.size = uncompressedData.length;
- _centralHeader.method = Utils.Constants.DEFLATED;
- _centralHeader.crc = Utils.crc32(value);
- _centralHeader.changed = true;
- } else {
- // folders and blank files should be stored
- _centralHeader.method = Utils.Constants.STORED;
- }
- },
-
- getData: function (pass) {
- if (_centralHeader.changed) {
- return uncompressedData;
- } else {
- return decompress(false, null, pass);
- }
- },
-
- getDataAsync: function (/*Function*/ callback, pass) {
- if (_centralHeader.changed) {
- callback(uncompressedData);
- } else {
- decompress(true, callback, pass);
- }
- },
-
- set attr(attr) {
- _centralHeader.attr = attr;
- },
- get attr() {
- return _centralHeader.attr;
- },
-
- set header(/*Buffer*/ data) {
- _centralHeader.loadFromBinary(data);
- },
-
- get header() {
- return _centralHeader;
- },
-
- packCentralHeader: function () {
- _centralHeader.flags_efs = this.efs;
- _centralHeader.extraLength = _extra.length;
- // 1. create header (buffer)
- var header = _centralHeader.centralHeaderToBinary();
- var addpos = Utils.Constants.CENHDR;
- // 2. add file name
- _entryName.copy(header, addpos);
- addpos += _entryName.length;
- // 3. add extra data
- _extra.copy(header, addpos);
- addpos += _centralHeader.extraLength;
- // 4. add file comment
- _comment.copy(header, addpos);
- return header;
- },
-
- packLocalHeader: function () {
- let addpos = 0;
- _centralHeader.flags_efs = this.efs;
- _centralHeader.extraLocalLength = _extralocal.length;
- // 1. construct local header Buffer
- const localHeaderBuf = _centralHeader.localHeaderToBinary();
- // 2. localHeader - crate header buffer
- const localHeader = Buffer.alloc(localHeaderBuf.length + _entryName.length + _centralHeader.extraLocalLength);
- // 2.1 add localheader
- localHeaderBuf.copy(localHeader, addpos);
- addpos += localHeaderBuf.length;
- // 2.2 add file name
- _entryName.copy(localHeader, addpos);
- addpos += _entryName.length;
- // 2.3 add extra field
- _extralocal.copy(localHeader, addpos);
- addpos += _extralocal.length;
-
- return localHeader;
- },
-
- toJSON: function () {
- const bytes = function (nr) {
- return "<" + ((nr && nr.length + " bytes buffer") || "null") + ">";
- };
-
- return {
- entryName: this.entryName,
- name: this.name,
- comment: this.comment,
- isDirectory: this.isDirectory,
- header: _centralHeader.toJSON(),
- compressedData: bytes(input),
- data: bytes(uncompressedData)
- };
- },
-
- toString: function () {
- return JSON.stringify(this.toJSON(), null, "\t");
- }
- };
-};
diff --git a/node_modules/adm-zip/zipFile.js b/node_modules/adm-zip/zipFile.js
deleted file mode 100644
index a52db94380..0000000000
--- a/node_modules/adm-zip/zipFile.js
+++ /dev/null
@@ -1,446 +0,0 @@
-const ZipEntry = require("./zipEntry");
-const Headers = require("./headers");
-const Utils = require("./util");
-
-module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) {
- var entryList = [],
- entryTable = {},
- _comment = Buffer.alloc(0),
- mainHeader = new Headers.MainHeader(),
- loadedEntries = false;
- var password = null;
- const temporary = new Set();
-
- // assign options
- const opts = options;
-
- const { noSort, decoder } = opts;
-
- if (inBuffer) {
- // is a memory buffer
- readMainHeader(opts.readEntries);
- } else {
- // none. is a new file
- loadedEntries = true;
- }
-
- function makeTemporaryFolders() {
- const foldersList = new Set();
-
- // Make list of all folders in file
- for (const elem of Object.keys(entryTable)) {
- const elements = elem.split("/");
- elements.pop(); // filename
- if (!elements.length) continue; // no folders
- for (let i = 0; i < elements.length; i++) {
- const sub = elements.slice(0, i + 1).join("/") + "/";
- foldersList.add(sub);
- }
- }
-
- // create missing folders as temporary
- for (const elem of foldersList) {
- if (!(elem in entryTable)) {
- const tempfolder = new ZipEntry(opts);
- tempfolder.entryName = elem;
- tempfolder.attr = 0x10;
- tempfolder.temporary = true;
- entryList.push(tempfolder);
- entryTable[tempfolder.entryName] = tempfolder;
- temporary.add(tempfolder);
- }
- }
- }
-
- function readEntries() {
- loadedEntries = true;
- entryTable = {};
- if (mainHeader.diskEntries > (inBuffer.length - mainHeader.offset) / Utils.Constants.CENHDR) {
- throw Utils.Errors.DISK_ENTRY_TOO_LARGE();
- }
- entryList = new Array(mainHeader.diskEntries); // total number of entries
- var index = mainHeader.offset; // offset of first CEN header
- for (var i = 0; i < entryList.length; i++) {
- var tmp = index,
- entry = new ZipEntry(opts, inBuffer);
- entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR));
-
- entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength));
-
- if (entry.header.extraLength) {
- entry.extra = inBuffer.slice(tmp, (tmp += entry.header.extraLength));
- }
-
- if (entry.header.commentLength) entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength);
-
- index += entry.header.centralHeaderSize;
-
- entryList[i] = entry;
- entryTable[entry.entryName] = entry;
- }
- temporary.clear();
- makeTemporaryFolders();
- }
-
- function readMainHeader(/*Boolean*/ readNow) {
- var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size
- max = Math.max(0, i - 0xffff), // 0xFFFF is the max zip file comment length
- n = max,
- endStart = inBuffer.length,
- endOffset = -1, // Start offset of the END header
- commentEnd = 0;
-
- // option to search header form entire file
- const trailingSpace = typeof opts.trailingSpace === "boolean" ? opts.trailingSpace : false;
- if (trailingSpace) max = 0;
-
- for (i; i >= n; i--) {
- if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P'
- if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) {
- // "PK\005\006"
- endOffset = i;
- commentEnd = i;
- endStart = i + Utils.Constants.ENDHDR;
- // We already found a regular signature, let's look just a bit further to check if there's any zip64 signature
- n = i - Utils.Constants.END64HDR;
- continue;
- }
-
- if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) {
- // Found a zip64 signature, let's continue reading the whole zip64 record
- n = max;
- continue;
- }
-
- if (inBuffer.readUInt32LE(i) === Utils.Constants.ZIP64SIG) {
- // Found the zip64 record, let's determine it's size
- endOffset = i;
- endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD;
- break;
- }
- }
-
- if (endOffset == -1) throw Utils.Errors.INVALID_FORMAT();
-
- mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart));
- if (mainHeader.commentLength) {
- _comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR);
- }
- if (readNow) readEntries();
- }
-
- function sortEntries() {
- if (entryList.length > 1 && !noSort) {
- entryList.sort((a, b) => a.entryName.toLowerCase().localeCompare(b.entryName.toLowerCase()));
- }
- }
-
- return {
- /**
- * Returns an array of ZipEntry objects existent in the current opened archive
- * @return Array
- */
- get entries() {
- if (!loadedEntries) {
- readEntries();
- }
- return entryList.filter((e) => !temporary.has(e));
- },
-
- /**
- * Archive comment
- * @return {String}
- */
- get comment() {
- return decoder.decode(_comment);
- },
- set comment(val) {
- _comment = Utils.toBuffer(val, decoder.encode);
- mainHeader.commentLength = _comment.length;
- },
-
- getEntryCount: function () {
- if (!loadedEntries) {
- return mainHeader.diskEntries;
- }
-
- return entryList.length;
- },
-
- forEach: function (callback) {
- this.entries.forEach(callback);
- },
-
- /**
- * Returns a reference to the entry with the given name or null if entry is inexistent
- *
- * @param entryName
- * @return ZipEntry
- */
- getEntry: function (/*String*/ entryName) {
- if (!loadedEntries) {
- readEntries();
- }
- return entryTable[entryName] || null;
- },
-
- /**
- * Adds the given entry to the entry list
- *
- * @param entry
- */
- setEntry: function (/*ZipEntry*/ entry) {
- if (!loadedEntries) {
- readEntries();
- }
- entryList.push(entry);
- entryTable[entry.entryName] = entry;
- mainHeader.totalEntries = entryList.length;
- },
-
- /**
- * Removes the file with the given name from the entry list.
- *
- * If the entry is a directory, then all nested files and directories will be removed
- * @param entryName
- * @returns {void}
- */
- deleteFile: function (/*String*/ entryName, withsubfolders = true) {
- if (!loadedEntries) {
- readEntries();
- }
- const entry = entryTable[entryName];
- const list = this.getEntryChildren(entry, withsubfolders).map((child) => child.entryName);
-
- list.forEach(this.deleteEntry);
- },
-
- /**
- * Removes the entry with the given name from the entry list.
- *
- * @param {string} entryName
- * @returns {void}
- */
- deleteEntry: function (/*String*/ entryName) {
- if (!loadedEntries) {
- readEntries();
- }
- const entry = entryTable[entryName];
- const index = entryList.indexOf(entry);
- if (index >= 0) {
- entryList.splice(index, 1);
- delete entryTable[entryName];
- mainHeader.totalEntries = entryList.length;
- }
- },
-
- /**
- * Iterates and returns all nested files and directories of the given entry
- *
- * @param entry
- * @return Array
- */
- getEntryChildren: function (/*ZipEntry*/ entry, subfolders = true) {
- if (!loadedEntries) {
- readEntries();
- }
- if (typeof entry === "object") {
- if (entry.isDirectory && subfolders) {
- const list = [];
- const name = entry.entryName;
-
- for (const zipEntry of entryList) {
- if (zipEntry.entryName.startsWith(name)) {
- list.push(zipEntry);
- }
- }
- return list;
- } else {
- return [entry];
- }
- }
- return [];
- },
-
- /**
- * How many child elements entry has
- *
- * @param {ZipEntry} entry
- * @return {integer}
- */
- getChildCount: function (entry) {
- if (entry && entry.isDirectory) {
- const list = this.getEntryChildren(entry);
- return list.includes(entry) ? list.length - 1 : list.length;
- }
- return 0;
- },
-
- /**
- * Returns the zip file
- *
- * @return Buffer
- */
- compressToBuffer: function () {
- if (!loadedEntries) {
- readEntries();
- }
- sortEntries();
-
- const dataBlock = [];
- const headerBlocks = [];
- let totalSize = 0;
- let dindex = 0;
-
- mainHeader.size = 0;
- mainHeader.offset = 0;
- let totalEntries = 0;
-
- for (const entry of this.entries) {
- // compress data and set local and entry header accordingly. Reason why is called first
- const compressedData = entry.getCompressedData();
- entry.header.offset = dindex;
-
- // 1. construct local header
- const localHeader = entry.packLocalHeader();
-
- // 2. offsets
- const dataLength = localHeader.length + compressedData.length;
- dindex += dataLength;
-
- // 3. store values in sequence
- dataBlock.push(localHeader);
- dataBlock.push(compressedData);
-
- // 4. construct central header
- const centralHeader = entry.packCentralHeader();
- headerBlocks.push(centralHeader);
- // 5. update main header
- mainHeader.size += centralHeader.length;
- totalSize += dataLength + centralHeader.length;
- totalEntries++;
- }
-
- totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
- // point to end of data and beginning of central directory first record
- mainHeader.offset = dindex;
- mainHeader.totalEntries = totalEntries;
-
- dindex = 0;
- const outBuffer = Buffer.alloc(totalSize);
- // write data blocks
- for (const content of dataBlock) {
- content.copy(outBuffer, dindex);
- dindex += content.length;
- }
-
- // write central directory entries
- for (const content of headerBlocks) {
- content.copy(outBuffer, dindex);
- dindex += content.length;
- }
-
- // write main header
- const mh = mainHeader.toBinary();
- if (_comment) {
- _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment
- }
- mh.copy(outBuffer, dindex);
-
- // Since we update entry and main header offsets,
- // they are no longer valid and we have to reset content
- // (Issue 64)
-
- inBuffer = outBuffer;
- loadedEntries = false;
-
- return outBuffer;
- },
-
- toAsyncBuffer: function (/*Function*/ onSuccess, /*Function*/ onFail, /*Function*/ onItemStart, /*Function*/ onItemEnd) {
- try {
- if (!loadedEntries) {
- readEntries();
- }
- sortEntries();
-
- const dataBlock = [];
- const centralHeaders = [];
- let totalSize = 0;
- let dindex = 0;
- let totalEntries = 0;
-
- mainHeader.size = 0;
- mainHeader.offset = 0;
-
- const compress2Buffer = function (entryLists) {
- if (entryLists.length > 0) {
- const entry = entryLists.shift();
- const name = entry.entryName + entry.extra.toString();
- if (onItemStart) onItemStart(name);
- entry.getCompressedDataAsync(function (compressedData) {
- if (onItemEnd) onItemEnd(name);
- entry.header.offset = dindex;
-
- // 1. construct local header
- const localHeader = entry.packLocalHeader();
-
- // 2. offsets
- const dataLength = localHeader.length + compressedData.length;
- dindex += dataLength;
-
- // 3. store values in sequence
- dataBlock.push(localHeader);
- dataBlock.push(compressedData);
-
- // central header
- const centalHeader = entry.packCentralHeader();
- centralHeaders.push(centalHeader);
- mainHeader.size += centalHeader.length;
- totalSize += dataLength + centalHeader.length;
- totalEntries++;
-
- compress2Buffer(entryLists);
- });
- } else {
- totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
- // point to end of data and beginning of central directory first record
- mainHeader.offset = dindex;
- mainHeader.totalEntries = totalEntries;
-
- dindex = 0;
- const outBuffer = Buffer.alloc(totalSize);
- dataBlock.forEach(function (content) {
- content.copy(outBuffer, dindex); // write data blocks
- dindex += content.length;
- });
- centralHeaders.forEach(function (content) {
- content.copy(outBuffer, dindex); // write central directory entries
- dindex += content.length;
- });
-
- const mh = mainHeader.toBinary();
- if (_comment) {
- _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment
- }
-
- mh.copy(outBuffer, dindex); // write main header
-
- // Since we update entry and main header offsets, they are no
- // longer valid and we have to reset content using our new buffer
- // (Issue 64)
-
- inBuffer = outBuffer;
- loadedEntries = false;
-
- onSuccess(outBuffer);
- }
- };
-
- compress2Buffer(Array.from(this.entries));
- } catch (e) {
- onFail(e);
- }
- }
- };
-};
diff --git a/package-lock.json b/package-lock.json
index 099d1b3c57..78ecf03807 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -24,7 +24,7 @@
"@schemastore/package": "0.0.10",
"@types/node-forge": "^1.3.11",
"@types/uuid": "^10.0.0",
- "adm-zip": "^0.5.16",
+ "archiver": "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
"del": "^6.1.1",
@@ -49,7 +49,7 @@
"@eslint/eslintrc": "^3.3.1",
"@eslint/js": "^9.23.0",
"@microsoft/eslint-formatter-sarif": "^3.1.0",
- "@types/adm-zip": "^0.5.7",
+ "@types/archiver": "^6.0.3",
"@types/console-log-level": "^1.4.5",
"@types/follow-redirects": "^1.14.4",
"@types/get-folder-size": "^2.0.0",
@@ -1938,14 +1938,14 @@
"node": ">=4"
}
},
- "node_modules/@types/adm-zip": {
- "version": "0.5.7",
- "resolved": "https://registry.npmjs.org/@types/adm-zip/-/adm-zip-0.5.7.tgz",
- "integrity": "sha512-DNEs/QvmyRLurdQPChqq0Md4zGvPwHerAJYWk9l2jCbD1VPpnzRJorOdiq4zsw09NFbYnhfsoEhWtxIzXpn2yw==",
+ "node_modules/@types/archiver": {
+ "version": "6.0.3",
+ "resolved": "https://registry.npmjs.org/@types/archiver/-/archiver-6.0.3.tgz",
+ "integrity": "sha512-a6wUll6k3zX6qs5KlxIggs1P1JcYJaTCx2gnlr+f0S1yd2DoaEwoIK10HmBaLnZwWneBz+JBm0dwcZu0zECBcQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@types/node": "*"
+ "@types/readdir-glob": "*"
}
},
"node_modules/@types/aws-lambda": {
@@ -2004,6 +2004,16 @@
"@types/node": "*"
}
},
+ "node_modules/@types/readdir-glob": {
+ "version": "1.1.5",
+ "resolved": "https://registry.npmjs.org/@types/readdir-glob/-/readdir-glob-1.1.5.tgz",
+ "integrity": "sha512-raiuEPUYqXu+nvtY2Pe8s8FEmZ3x5yAH4VkLdihcPdalvsHltomrRC9BzuStrJ9yk06470hS0Crw0f1pXqD+Hg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/node": "*"
+ }
+ },
"node_modules/@types/semver": {
"version": "7.7.0",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.7.0.tgz",
@@ -2703,14 +2713,6 @@
"node": ">=0.4.0"
}
},
- "node_modules/adm-zip": {
- "version": "0.5.16",
- "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.16.tgz",
- "integrity": "sha512-TGw5yVi4saajsSEgz25grObGHEUaDrniwvA2qwSC060KfqGPdglhvPMA2lPIoxs3PQIItj2iag35fONcQqgUaQ==",
- "engines": {
- "node": ">=12.0"
- }
- },
"node_modules/agent-base": {
"version": "7.1.3",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz",
diff --git a/package.json b/package.json
index eb379bfc10..565bff7bc1 100644
--- a/package.json
+++ b/package.json
@@ -37,7 +37,7 @@
"@schemastore/package": "0.0.10",
"@types/node-forge": "^1.3.11",
"@types/uuid": "^10.0.0",
- "adm-zip": "^0.5.16",
+ "archiver": "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
"del": "^6.1.1",
@@ -62,7 +62,7 @@
"@eslint/eslintrc": "^3.3.1",
"@eslint/js": "^9.23.0",
"@microsoft/eslint-formatter-sarif": "^3.1.0",
- "@types/adm-zip": "^0.5.7",
+ "@types/archiver": "^6.0.3",
"@types/console-log-level": "^1.4.5",
"@types/follow-redirects": "^1.14.4",
"@types/get-folder-size": "^2.0.0",
diff --git a/src/debug-artifacts.ts b/src/debug-artifacts.ts
index 153b95f634..4215619575 100644
--- a/src/debug-artifacts.ts
+++ b/src/debug-artifacts.ts
@@ -4,7 +4,7 @@ import * as path from "path";
import * as artifact from "@actions/artifact";
import * as artifactLegacy from "@actions/artifact-legacy";
import * as core from "@actions/core";
-import AdmZip from "adm-zip";
+import archiver from "archiver";
import del from "del";
import { getOptionalInput, getTemporaryDirectory } from "./actions-util";
@@ -344,9 +344,24 @@ async function createPartialDatabaseBundle(
if (fs.existsSync(databaseBundlePath)) {
await del(databaseBundlePath, { force: true });
}
- const zip = new AdmZip();
- zip.addLocalFolder(databasePath);
- zip.writeZip(databaseBundlePath);
+ const output = fs.createWriteStream(databaseBundlePath);
+ const zip = archiver("zip");
+
+ zip.on("error", (err) => {
+ throw err;
+ });
+
+ zip.on("warning", (err) => {
+ // Ignore ENOENT warnings. There's nothing anyone can do about it.
+ if (err.code !== "ENOENT") {
+ throw err;
+ }
+ });
+
+ zip.pipe(output);
+ zip.directory(databasePath, false);
+ await zip.finalize();
+
return databaseBundlePath;
}
pFad - Phonifier reborn
Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
Alternative Proxies:
Alternative Proxy
pFad Proxy
pFad v3 Proxy
pFad v4 Proxy