Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
generateZipFile: function (files, _event, platformSpecificManifest, zipOptions) {
var destStream = temp.createWriteStream(),
archive = archiver('zip', zipOptions || {});
return new Promise(function(resolve, reject) {
// Resolve on close
destStream.on('close', function () {
resolve(destStream.path);
});
// Reject on Error
archive.on('error', reject);
// Add the files
var filesBulk = [];
files.forEach(function(file){
if(file.dest === 'package.json' && platformSpecificManifest){
return new Promise(function(resolve, reject) {
var tempFileStream = temp.createWriteStream({
dir: tempAvatarDirectory
});
tempFileStream
.on('finish', function() {
var tempFilePath = tempFileStream.path;
// Store it on our map as a cache lookup
urlFilePathCacheMap.set(url, tempFilePath);
resolve(tempFilePath);
})
.on('error', function(err) {
reject(err);
});
request({
url: url,
function (error, response, body) {
if (error) {
// Usually means we never got a response - server is down, no DNS entry, etc.
_endDownload(downloadId, Errors.NO_SERVER_RESPONSE);
return;
}
if (response.statusCode !== 200) {
_endDownload(downloadId, [Errors.BAD_HTTP_STATUS, response.statusCode]);
return;
}
var stream = temp.createWriteStream("brackets");
if (!stream) {
_endDownload(downloadId, Errors.CANNOT_WRITE_TEMP);
return;
}
pendingDownloads[downloadId].localPath = stream.path;
pendingDownloads[downloadId].outStream = stream;
stream.write(body);
_endDownload(downloadId);
});
return new Promise((resolve, reject) => {
const {pathname} = url.parse(fileURL); // gives /pub/whatever/example.png
const basename = path.basename(pathname); // gives example.png
const writeStream = temp.createWriteStream({suffix: basename});
const outputPath = writeStream.path;
writeStream
.on("error", reject)
.on("finish", () => {
resolve(outputPath);
});
request.get(fileURL)
.on("error", reject)
.pipe(writeStream);
});
};
function testCreateWriteStream() {
const stream = temp.createWriteStream("HelloStreamAffix");
stream.write("data");
const stream2 = temp.createWriteStream();
}
createTmpFS = function(data) {
var stream = temp.createWriteStream()
fs.writeFileSync(stream.path, jsyaml.safeDump(data))
return stream.path
}
var inStream;
if (args.infile) {
inStream = fs.createReadStream(args.infile);
} else {
if (process.stdin.isTTY) {
parser.printHelp();
process.exit(1);
}
inStream = process.stdin;
}
if (args.outfile) {
outStream = fs.createWriteStream(args.outfile);
} else if (args.kcachegrind) {
outStream = temp.createWriteStream({
'prefix': 'chrome2calltree',
'suffix': '.log'
});
} else {
outStream = process.stdout;
}
var readEntireStream = function(stream, cb) {
var buffer = "";
stream.resume();
stream.setEncoding("utf8");
stream.on("data", function(chunk) {
buffer += chunk;
});
stream.on("end", function() {
cb(buffer);
doc.date = (new Date).toISOString();
var data = doc.resources.filter(function(x){return 'path' in x});
var dataPaths = data.map(function(x){return x.path});
var dataNames = data.map(function(x){return x.name});
//compress everything (not ignored) but the data and the package.json
var ignore = new Ignore({
path: root,
ignoreFiles: ['.gitignore', '.npmignore', '.dpmignore'].map(function(x){return path.resolve(root, x)})
});
ignore.addIgnoreRules(dataPaths.concat(['package.json', '.git', 'data_modules', 'node_modules']), 'custom-rules');
//write tarball in a temp dir
var ws = ignore.pipe(tar.Pack()).pipe(zlib.createGzip()).pipe(temp.createWriteStream('dpm-'));
ws.on('error', callback)
ws.on('finish', function(){
dataPaths = dataPaths.map(function(p){return path.resolve(root, p);});
dataPaths.push(ws.path);
//get stats
async.map(dataPaths, fs.stat, function(err, stats){
if(err) return callback(err);
//append _attachments to datapackage
doc._attachments = {
'debug.tar.gz': {follows: true, length: (stats.pop()).size, 'content_type': 'application/x-gtar', _stream: fs.createReadStream(dataPaths.pop())}
};
dataPaths.forEach(function(p, i){
doc._attachments[dataNames[i] + path.extname(p)] = {
var req = https.get(url, function(res) {
if (res.statusCode !== 200) {
_endDownload(downloadId, [Errors.BAD_HTTP_STATUS, res.statusCode]);
return;
}
var stream = temp.createWriteStream("brackets");
if (!stream) {
_endDownload(downloadId, Errors.CANNOT_WRITE_TEMP);
return;
}
pendingDownloads[downloadId].localPath = stream.path;
pendingDownloads[downloadId].outStream = stream;
res.on("data", function(d) {
stream.write(d);
});
res.on("end", function () {
_endDownload(downloadId);
});
}).on("error", function(e) {
complete: '=',
incomplete: '-',
width: 20,
total: len
});
} else {
bar.total += len;
}
}
});
rq.on('data', function(chunk) {
len && bar && bar.tick(chunk.length);
});
if (extention === '.zip') {
stream = temp.createWriteStream();
stream.on('close', function() {
if (done.promise.isRejected()) return;
self.extractZip(stream.path, cachepath).then(self.stripRootFolder).then(function() {
self.moveExtractedFilesToDestination(cachepath, destination).then(function() {
done.resolve();
});
});
});
rq.pipe(stream);
}
if (extention === '.gz') {
rq.on('response', function(res) {
if (res.statusCode !== 200) return;