Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
it("should raises an error when don't pass a second required argument", () => {
// $ExpectError (must pass in a module)
Archiver.registerFormat('zip');
});
});
before(() => {
try {
archiver.registerFormat('zip-encrypted', require('../'));
} catch (e) {
// already registered
}
});
before(() => {
try {
archiver.registerFormat('zip-encrypted', require('../lib/zip-encrypted'));
} catch (e) {
// already registered
}
});
Archiver("zip", { gzp: true });
Archiver("zip", { gzip: true });
// $ExpectError (values of options should use correct type)
Archiver("zip", { statConcurrency: "1" });
Archiver("zip", { statConcurrency: 1 });
// $ExpectError (must pass in a format)
Archiver.create();
Archiver.create("zip");
Archiver.create("zip", {});
// $ExpectError (must pass in a format and module)
Archiver.registerFormat();
// $ExpectError (must pass in a module)
Archiver.registerFormat("zip");
Archiver.registerFormat("zip", () => {});
const options = {
statConcurrency: 1,
allowHalfOpen: true,
readableObjectMode: true,
writeableObjectMode: true,
decodeStrings: true,
encoding: "test",
highWaterMark: 1,
objectmode: true,
comment: "test",
forceLocalTime: true,
forceZip64: true,
store: true,
zlib: {},
export default async function createZip({ zipPath, zipContents }: CreateZipOptions) {
const zipStream = fs.createWriteStream(zipPath);
const archive = archiver('zip', { zlib: { level: 9 } });
return new Promise((resolve, reject) => {
// listen for all archive data to be written
// 'close' event is fired only when a file descriptor is involved
zipStream.on('close', () => {
const totalBytes = archive.pointer();
console.log(`${totalBytes} total bytes`);
console.log('archiver has been finalized and the output file descriptor has closed.');
resolve({ totalBytes });
});
// This event is fired when the data source is drained no matter what was the data source.
// It is not part of this library but rather from the NodeJS Stream API.
// @see: https://nodejs.org/api/stream.html#stream_event_end
zipStream.on('end', () => {
console.log('Data has been drained');
return new Promise((resolve) => {
console.log('1. Compressing...'); // eslint-disable-line
const output = createWriteStream(compressedProjectLocation);
const archive = archiver('zip');
archive.pipe(output);
archive
.directory('config', 'config')
.directory('node_modules', 'node_modules')
.directory('public', 'public')
.file('package.json')
.file('index.js')
.finalize();
output.on('close', () => {
console.log(' - Compressing was successful'); // eslint-disable-line
resolve();
});
archive.on('error', (err) => {
throw new Error('Could not compress: ' + err);
if (fileType === FILE_TYPE.SOURCE_FILES) {
if (result && result.experiment && result.experiment.sources) {
files = result.experiment.sources.map(source => {
return {
name: source[0],
file_id: source[1]
}
});
} else {
res.status(500).json({message: 'Error: Unable to fetch source files for runId: ' + runId});
}
} else {
// fileType: artifacts
files = result.artifacts;
}
const archive = archiver('zip', {
zlib: { level: 5 } // Sets the compression level.
});
const fileName = `${fileType}-${runId}.zip`; // ex: source-files-1.zip
const dirName = `${fileType}-${runId}`; // ex: source-files-1
archive.on('error', function(err) {
/* eslint-disable no-console */
console.error('An error occurred: ', err);
res.status(500);
next(err);
});
files.forEach(function(file) {
const readStream = gfs.createReadStream({
_id: file.file_id
});
//error handling, e.g. file does not exist
readStream.on('error', function (err) {
output.on('end', function () { });
archive.on('warning', function (err) { console.log('Backup warning: ' + err); });
archive.on('error', function (err) { console.log('Backup error: ' + err); });
archive.pipe(output);
archive.file(newBackupPath + '.archive', { name: newBackupFile + '.archive' });
archive.directory(parent.datapath, 'meshcentral-data');
archive.finalize();
} catch (ex) { console.log(ex); }
});
} else {
// Perform a NeDB backup
var archiver = require('archiver');
var output = parent.fs.createWriteStream(newAutoBackupPath + '.zip');
var archive = null;
if (parent.config.settings.autobackup && (typeof parent.config.settings.autobackup.zippassword == 'string')) {
try { archiver.registerFormat('zip-encrypted', require("archiver-zip-encrypted")); } catch (ex) { }
archive = archiver.create('zip-encrypted', { zlib: { level: 9 }, encryptionMethod: 'aes256', password: parent.config.settings.autobackup.zippassword });
} else {
archive = archiver('zip', { zlib: { level: 9 } });
}
output.on('close', function () { obj.performingBackup = false; });
output.on('end', function () { });
archive.on('warning', function (err) { console.log('Backup warning: ' + err); });
archive.on('error', function (err) { console.log('Backup error: ' + err); });
archive.pipe(output);
archive.directory(parent.datapath, 'meshcentral-data');
archive.finalize();
}
// Remove old backups
if (parent.config.settings.autobackup && (typeof parent.config.settings.autobackup.keeplastdaysbackup == 'number')) {
var cutoffDate = new Date();
s3Zip.archiveStream = function (stream, filesS3, filesZip) {
const self = this
const folder = this.folder || ''
if (this.registerFormat) {
archiver.registerFormat(this.registerFormat, this.formatModule)
}
const archive = archiver(this.format || 'zip', this.archiverOpts || {})
archive.on('error', function (err) {
self.debug && console.log('archive error', err)
})
stream
.on('data', function (file) {
if (file.path[file.path.length - 1] === '/') {
self.debug && console.log('don\'t append to zip', file.path)
return
}
let fname
if (filesZip) {
// Place files_s3[i] into the archive as files_zip[i]
const i = filesS3.indexOf(file.path.startsWith(folder) ? file.path.substr(folder.length) : file.path)
fname = (i >= 0 && i < filesZip.length) ? filesZip[i] : file.path
exports.tar = function(files, done) {
if (typeof exports.options.archive !== 'string' || exports.options.archive.length === 0) {
grunt.fail.warn('Unable to compress; no valid archive file was specified.');
return;
}
var mode = exports.options.mode;
if (mode === 'tgz') {
mode = 'tar';
exports.options.gzip = true;
}
var archive = archiver.create(mode, exports.options);
var dest = exports.options.archive;
var dataWhitelist = ['comment', 'date', 'mode', 'store', 'gid', 'uid'];
var sourcePaths = {};
// Ensure dest folder exists
grunt.file.mkdir(path.dirname(dest));
// Where to write the file
var destStream = fs.createWriteStream(dest);
archive.on('error', function(err) {
grunt.log.error(err);
grunt.fail.warn('Archiving failed.');
});