{“ast”:null,“code”:“'use strict';nnvar utils = require('../utils');nnvar GenericWorker = require('../stream/GenericWorker');nnvar utf8 = require('../utf8');nnvar crc32 = require('../crc32');nnvar signature = require('../signature');n/**n * Transform an integer into a string in hexadecimal.n * @privaten * @param {number} dec the number to convert.n * @param {number} bytes the number of bytes to generate.n * @returns {string} the result.n */nnnvar decToHex = function decToHex(dec, bytes) {n var hex = "",n i;nn for (i = 0; i < bytes; i++) {n hex += String.fromCharCode(dec & 0xff);n dec = dec >>> 8;n }nn return hex;n};n/**n * Generate the UNIX part of the external file attributes.n * @param {Object} unixPermissions the unix permissions or null.n * @param {Boolean} isDir true if the entry is a directory, false otherwise.n * @return {Number} a 32 bit integer.n *n * adapted from unix.stackexchange.com/questions/14705/the-zip-formats-external-file-attribute :n *n * TTTTsstrwxrwxrwx0000000000ADVSHRn * ^^^^__ file type, see zipinfo.c (UNX_*)n * ^^^_ setuid, setgid, stickyn * ^^^^^^^^^__ permissionsn * ^^^^^^^^^^__ not used ?n * ^^^^^^ DOS attribute bits : Archive, Directory, Volume label, System file, Hidden, Read onlyn */nnnvar generateUnixExternalFileAttr = function generateUnixExternalFileAttr(unixPermissions, isDir) {n var result = unixPermissions;nn if (!unixPermissions) {n // I can't use octal values in strict mode, hence the hexa.n // 040775 => 0x41fdn // 0100664 => 0x81b4n result = isDir ? 0x41fd : 0x81b4;n }nn return (result & 0xFFFF) << 16;n};n/**n * Generate the DOS part of the external file attributes.n * @param {Object} dosPermissions the dos permissions or null.n * @param {Boolean} isDir true if the entry is a directory, false otherwise.n * @return {Number} a 32 bit integer.n *n * Bit 0 Read-Onlyn * Bit 1 Hiddenn * Bit 2 Systemn * Bit 3 Volume Labeln * Bit 4 Directoryn * Bit 5 Archiven */nnnvar generateDosExternalFileAttr = function generateDosExternalFileAttr(dosPermissions, isDir) {n // the dir flag is already set for compatibilityn return (dosPermissions || 0) & 0x3F;n};n/**n * Generate the various parts used in the construction of the final zip file.n * @param {Object} streamInfo the hash with informations about the compressed file.n * @param {Boolean} streamedContent is the content streamed ?n * @param {Boolean} streamingEnded is the stream finished ?n * @param {number} offset the current offset from the start of the zip file.n * @param {String} platform let's pretend we are this platform (change platform dependents fields)n * @param {Function} encodeFileName the function to encode the file name / comment.n * @return {Object} the zip parts.n */nnnvar generateZipParts = function generateZipParts(streamInfo, streamedContent, streamingEnded, offset, platform, encodeFileName) {n var file = streamInfo,n compression = streamInfo,n useCustomEncoding = encodeFileName !== utf8.utf8encode,n encodedFileName = utils.transformTo("string", encodeFileName(file.name)),n utfEncodedFileName = utils.transformTo("string", utf8.utf8encode(file.name)),n comment = file.comment,n encodedComment = utils.transformTo("string", encodeFileName(comment)),n utfEncodedComment = utils.transformTo("string", utf8.utf8encode(comment)),n useUTF8ForFileName = utfEncodedFileName.length !== file.name.length,n useUTF8ForComment = utfEncodedComment.length !== comment.length,n dosTime,n dosDate,n extraFields = "",n unicodePathExtraField = "",n unicodeCommentExtraField = "",n dir = file.dir,n date = file.date;n var dataInfo = {n crc32: 0,n compressedSize: 0,n uncompressedSize: 0n }; // if the content is streamed, the sizes/crc32 are only available AFTERn // the end of the stream.nn if (!streamedContent || streamingEnded) {n dataInfo.crc32 = streamInfo;n dataInfo.compressedSize = streamInfo;n dataInfo.uncompressedSize = streamInfo;n }nn var bitflag = 0;nn if (streamedContent) {n // Bit 3: the sizes/crc32 are set to zero in the local header.n // The correct values are put in the data descriptor immediatelyn // following the compressed data.n bitflag |= 0x0008;n }nn if (!useCustomEncoding && (useUTF8ForFileName || useUTF8ForComment)) {n // Bit 11: Language encoding flag (EFS).n bitflag |= 0x0800;n }nn var extFileAttr = 0;n var versionMadeBy = 0;nn if (dir) {n // dos or unix, we set the dos dir flagn extFileAttr |= 0x00010;n }nn if (platform === "UNIX") {n versionMadeBy = 0x031E; // UNIX, version 3.0nn extFileAttr |= generateUnixExternalFileAttr(file.unixPermissions, dir);n } else {n // DOS or other, fallback to DOSn versionMadeBy = 0x0014; // DOS, version 2.0nn extFileAttr |= generateDosExternalFileAttr(file.dosPermissions, dir);n } // daten // @see www.delorie.com/djgpp/doc/rbinter/it/52/13.htmln // @see www.delorie.com/djgpp/doc/rbinter/it/65/16.htmln // @see www.delorie.com/djgpp/doc/rbinter/it/66/16.htmlnnn dosTime = date.getUTCHours();n dosTime = dosTime << 6;n dosTime = dosTime | date.getUTCMinutes();n dosTime = dosTime << 5;n dosTime = dosTime | date.getUTCSeconds() / 2;n dosDate = date.getUTCFullYear() - 1980;n dosDate = dosDate << 4;n dosDate = dosDate | date.getUTCMonth() + 1;n dosDate = dosDate << 5;n dosDate = dosDate | date.getUTCDate();nn if (useUTF8ForFileName) {n // set the unicode path extra field. unzip needs at least one extran // field to correctly handle unicode path, so using the path is as goodn // as any other information. This could improve the situation withn // other archive managers too.n // This field is usually used without the utf8 flag, with a nonn // unicode path in the header (winrar, winzip). This helps (a bit)n // with the messy Windows' default compressed folders feature butn // breaks on p7zip which doesn't seek the unicode path extra field.n // So for now, UTF-8 everywhere !n unicodePathExtraField = // Versionn decToHex(1, 1) + // NameCRC32n decToHex(crc32(encodedFileName), 4) + // UnicodeNamen utfEncodedFileName;n extraFields += // Info-ZIP Unicode Path Extra Fieldn "\x75\x70" + // sizen decToHex(unicodePathExtraField.length, 2) + // contentn unicodePathExtraField;n }nn if (useUTF8ForComment) {n unicodeCommentExtraField = // Versionn decToHex(1, 1) + // CommentCRC32n decToHex(crc32(encodedComment), 4) + // UnicodeNamen utfEncodedComment;n extraFields += // Info-ZIP Unicode Path Extra Fieldn "\x75\x63" + // sizen decToHex(unicodeCommentExtraField.length, 2) + // contentn unicodeCommentExtraField;n }nn var header = ""; // version needed to extractnn header += "\x0A\x00"; // general purpose bit flagnn header += decToHex(bitflag, 2); // compression methodnn header += compression.magic; // last mod file timenn header += decToHex(dosTime, 2); // last mod file datenn header += decToHex(dosDate, 2); // crc-32nn header += decToHex(dataInfo.crc32, 4); // compressed sizenn header += decToHex(dataInfo.compressedSize, 4); // uncompressed sizenn header += decToHex(dataInfo.uncompressedSize, 4); // file name lengthnn header += decToHex(encodedFileName.length, 2); // extra field lengthnn header += decToHex(extraFields.length, 2);n var fileRecord = signature.LOCAL_FILE_HEADER + header + encodedFileName + extraFields;n var dirRecord = signature.CENTRAL_FILE_HEADER + // version made by (00: DOS)n decToHex(versionMadeBy, 2) + // file header (common to file and central directory)n header + // file comment lengthn decToHex(encodedComment.length, 2) + // disk number startn "\x00\x00" + // internal file attributes TODOn "\x00\x00" + // external file attributesn decToHex(extFileAttr, 4) + // relative offset of local headern decToHex(offset, 4) + // file namen encodedFileName + // extra fieldn extraFields + // file commentn encodedComment;n return {n fileRecord: fileRecord,n dirRecord: dirRecordn };n};n/**n * Generate the EOCD record.n * @param {Number} entriesCount the number of entries in the zip file.n * @param {Number} centralDirLength the length (in bytes) of the central dir.n * @param {Number} localDirLength the length (in bytes) of the local dir.n * @param {String} comment the zip file comment as a binary string.n * @param {Function} encodeFileName the function to encode the comment.n * @return {String} the EOCD record.n */nnnvar generateCentralDirectoryEnd = function generateCentralDirectoryEnd(entriesCount, centralDirLength, localDirLength, comment, encodeFileName) {n var dirEnd = "";n var encodedComment = utils.transformTo("string", encodeFileName(comment)); // end of central dir signaturenn dirEnd = signature.CENTRAL_DIRECTORY_END + // number of this diskn "\x00\x00" + // number of the disk with the start of the central directoryn "\x00\x00" + // total number of entries in the central directory on this diskn decToHex(entriesCount, 2) + // total number of entries in the central directoryn decToHex(entriesCount, 2) + // size of the central directory 4 bytesn decToHex(centralDirLength, 4) + // offset of start of central directory with respect to the starting disk numbern decToHex(localDirLength, 4) + // .ZIP file comment lengthn decToHex(encodedComment.length, 2) + // .ZIP file commentn encodedComment;n return dirEnd;n};n/**n * Generate data descriptors for a file entry.n * @param {Object} streamInfo the hash generated by a worker, containing informationsn * on the file entry.n * @return {String} the data descriptors.n */nnnvar generateDataDescriptors = function generateDataDescriptors(streamInfo) {n var descriptor = "";n descriptor = signature.DATA_DESCRIPTOR + // crc-32 4 bytesn decToHex(streamInfo, 4) + // compressed size 4 bytesn decToHex(streamInfo, 4) + // uncompressed size 4 bytesn decToHex(streamInfo, 4);n return descriptor;n};n/**n * A worker to concatenate other workers to create a zip file.n * @param {Boolean} streamFiles `true` to stream the content of the files,n * `false` to accumulate it.n * @param {String} comment the comment to use.n * @param {String} platform the platform to use, "UNIX" or "DOS".n * @param {Function} encodeFileName the function to encode file names and comments.n */nnnfunction ZipFileWorker(streamFiles, comment, platform, encodeFileName) {n GenericWorker.call(this, "ZipFileWorker"); // The number of bytes written so far. This doesn't count accumulated chunks.nn this.bytesWritten = 0; // The comment of the zip filenn this.zipComment = comment; // The platform "generating" the zip file.nn this.zipPlatform = platform; // the function to encode file names and comments.nn this.encodeFileName = encodeFileName; // Should we stream the content of the files ?nn this.streamFiles = streamFiles; // If `streamFiles` is false, we will need to accumulate the content of then // files to calculate sizes / crc32 (and write them before the content).n // This boolean indicates if we are accumulating chunks (it will change a lotn // during the lifetime of this worker).nn this.accumulate = false; // The buffer receiving chunks when accumulating content.nn this.contentBuffer = []; // The list of generated directory records.nn this.dirRecords = []; // The offset (in bytes) from the beginning of the zip file for the current source.nn this.currentSourceOffset = 0; // The total number of entries in this zip file.nn this.entriesCount = 0; // the name of the file currently being added, null when handling the end of the zip file.n // Used for the emited metadata.nn this.currentFile = null;n this._sources = [];n}nnutils.inherits(ZipFileWorker, GenericWorker);n/**n * @see GenericWorker.pushn */nnZipFileWorker.prototype.push = function (chunk) {n var currentFilePercent = chunk.meta.percent || 0;n var entriesCount = this.entriesCount;n var remainingFiles = this._sources.length;nn if (this.accumulate) {n this.contentBuffer.push(chunk);n } else {n this.bytesWritten += chunk.data.length;n GenericWorker.prototype.push.call(this, {n data: chunk.data,n meta: {n currentFile: this.currentFile,n percent: entriesCount ? (currentFilePercent + 100 * (entriesCount - remainingFiles - 1)) / entriesCount : 100n }n });n }n};n/**n * The worker started a new source (an other worker).n * @param {Object} streamInfo the streamInfo object from the new source.n */nnnZipFileWorker.prototype.openedSource = function (streamInfo) {n this.currentSourceOffset = this.bytesWritten;n this.currentFile = streamInfo.name;n var streamedContent = this.streamFiles && !streamInfo.dir; // don't stream folders (because they don't have any content)nn if (streamedContent) {n var record = generateZipParts(streamInfo, streamedContent, false, this.currentSourceOffset, this.zipPlatform, this.encodeFileName);n this.push({n data: record.fileRecord,n meta: {n percent: 0n }n });n } else {n // we need to wait for the whole file before pushing anythingn this.accumulate = true;n }n};n/**n * The worker finished a source (an other worker).n * @param {Object} streamInfo the streamInfo object from the finished source.n */nnnZipFileWorker.prototype.closedSource = function (streamInfo) {n this.accumulate = false;n var streamedContent = this.streamFiles && !streamInfo.dir;n var record = generateZipParts(streamInfo, streamedContent, true, this.currentSourceOffset, this.zipPlatform, this.encodeFileName);n this.dirRecords.push(record.dirRecord);nn if (streamedContent) {n // after the streamed file, we put data descriptorsn this.push({n data: generateDataDescriptors(streamInfo),n meta: {n percent: 100n }n });n } else {n // the content wasn't streamed, we need to push everything nown // first the file record, then the contentn this.push({n data: record.fileRecord,n meta: {n percent: 0n }n });nn while (this.contentBuffer.length) {n this.push(this.contentBuffer.shift());n }n }nn this.currentFile = null;n};n/**n * @see GenericWorker.flushn */nnnZipFileWorker.prototype.flush = function () {n var localDirLength = this.bytesWritten;nn for (var i = 0; i < this.dirRecords.length; i++) {n this.push({n data: this.dirRecords,n meta: {n percent: 100n }n });n }nn var centralDirLength = this.bytesWritten - localDirLength;n var dirEnd = generateCentralDirectoryEnd(this.dirRecords.length, centralDirLength, localDirLength, this.zipComment, this.encodeFileName);n this.push({n data: dirEnd,n meta: {n percent: 100n }n });n};n/**n * Prepare the next source to be read.n */nnnZipFileWorker.prototype.prepareNextSource = function () {n this.previous = this._sources.shift();n this.openedSource(this.previous.streamInfo);nn if (this.isPaused) {n this.previous.pause();n } else {n this.previous.resume();n }n};n/**n * @see GenericWorker.registerPreviousn */nnnZipFileWorker.prototype.registerPrevious = function (previous) {n this._sources.push(previous);nn var self = this;n previous.on('data', function (chunk) {n self.processChunk(chunk);n });n previous.on('end', function () {n self.closedSource(self.previous.streamInfo);nn if (self._sources.length) {n self.prepareNextSource();n } else {n self.end();n }n });n previous.on('error', function (e) {n self.error(e);n });n return this;n};n/**n * @see GenericWorker.resumen */nnnZipFileWorker.prototype.resume = function () {n if (!GenericWorker.prototype.resume.call(this)) {n return false;n }nn if (!this.previous && this._sources.length) {n this.prepareNextSource();n return true;n }nn if (!this.previous && !this._sources.length && !this.generatedError) {n this.end();n return true;n }n};n/**n * @see GenericWorker.errorn */nnnZipFileWorker.prototype.error = function (e) {n var sources = this._sources;nn if (!GenericWorker.prototype.error.call(this, e)) {n return false;n }nn for (var i = 0; i < sources.length; i++) {n try {n sources.error(e);n } catch (e) {// the `error` exploded, nothing to don }n }nn return true;n};n/**n * @see GenericWorker.lockn */nnnZipFileWorker.prototype.lock = function () {n GenericWorker.prototype.lock.call(this);n var sources = this._sources;nn for (var i = 0; i < sources.length; i++) {n sources.lock();n }n};nnmodule.exports = ZipFileWorker;”,“map”:null,“metadata”:{},“sourceType”:“module”}