Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
function executeTests(manifest, callback) {
async.mapLimit(manifest.tests, workers,
// 1.2.1 Execute an individual test
function (test, callback) {
async.series({
actionStream: self._fetch.bind(self, test.action),
resultStream: self._fetch.bind(self, test.result),
},
function (error, results) {
if (error) return callback(error);
self._performTest(test, results.actionStream, callback);
});
},
// 1.2.2 Show the summary of all performed tests
function showSummary(error, tests) {
var score = tests.reduce(function (sum, test) { return sum + test.success; }, 0);
manifest.skipped.forEach(function (test) { self._verifyResult(test); });
console.log(('* passed ' + score +
function _deleteOngoingMPUs(authInfo, bucketName, mpus, log, cb) {
async.mapLimit(mpus, 1, (mpu, next) => {
const splitterChar = mpu.key.includes(oldSplitter) ?
oldSplitter : splitter;
// `overview${splitter}${objectKey}${splitter}${uploadId}
const [, objectKey, uploadId] = mpu.key.split(splitterChar);
abortMultipartUpload(authInfo, bucketName, objectKey, uploadId, log,
next);
}, cb);
}
/**
gitHelper.util.diffCommits(branch, FS_BRANCH, repo, function(err, info) {
if (err) {
if (err.code == 'NOTACOMMIT')
return callback(null, { added: [], missing: [] }); // empty result
return callback(err);
}
// find commit notes with info what belongs to this changeset
async.mapLimit(info.added.concat(info.missing), 20, function(change, callback) {
gitHelper.util.readCommitInfo(repo, change.commitId, NAMESPACE, function(err, info) {
change.note = info && info.notes;
callback(err, info);
});
}, function(err) {
if (err) return callback(err);
// FIXME: not neccessary anymore when every FS access is commited too
exec('git', ['ls-files', '-mdso', '--exclude-standard'], { cwd: repo }, function(err, changes) {
if (err) return callback(err);
if ((branch != FS_BRANCH) && (changes[0].trim() != '')) {
// artificial commit for uncommited changes
info.missing.unshift({
commitId: null, message: '[Filesystem changes]', note: null
});
}
}
if (!flightCheckOnStartUp && checkStatus.response &&
Date.now() - checkStatus.time
< externalBackendHealthCheckInterval) {
return process.nextTick(cb, null, checkStatus.response);
}
let locationsToCheck;
if (flightCheckOnStartUp) {
// check all locations if flight check on start up
locationsToCheck = locations;
} else {
const randomLocation = locations[Math.floor(Math.random() *
locations.length)];
locationsToCheck = [randomLocation];
}
return async.mapLimit(locationsToCheck, 5, (location, next) => {
const client = clients[location];
client.healthcheck(location, next, flightCheckOnStartUp);
}, (err, results) => {
if (err) {
return cb(err);
}
if (!flightCheckOnStartUp) {
checkStatus.response = results;
checkStatus.time = Date.now();
}
return cb(null, results);
});
},
translateAzureMetaHeaders(metaHeaders, tags) {
}, callback => {
if (!loadSimilarRecords) {
return process.nextTick(callback);
}
async.mapLimit(this.similarRecords, 4, (similar, callback) => {
if (similar.recordModel) {
return process.nextTick(() => callback(null, similar));
}
recordModel(this.type).findById(similar.record, (err, record) => {
/* istanbul ignore if */
if (err || !record) {
return callback();
}
similar.recordModel = record;
callback(null, similar);
});
}, (err, similar) => {
// We filter out any invalid/un-found records
// TODO: We should log out some details on when this
function searchRepoHistories(callback) {
async.mapLimit(repos, bosco.concurrency.network, function (repo, historyCallback) {
if (!repo.match(repoRegex)) return historyCallback();
var repoPath = bosco.getRepoPath(repo);
searchRepoHistory(bosco, args, repo, repoPath, function (err, result) {
// err.code is 1 when nothing is found.
if (err && err.code !== 1) bosco.error(err.message.substring(0, err.message.indexOf('\n')));
historyCallback(null, result);
});
}, callback);
}
return new Promise((resolve, reject) => {
if (blockNumbers.length === 0) return resolve([]);
console.log(`Fetching blocks details from ${blockNumbers[0]} to ${blockNumbers[blockNumbers.length - 1]}`);
let fetchedBlockCount = 0;
let highestBlockFetched = 0;
mapLimit(blockNumbers, BLOCK_DOWNLOAD_PARALLEL_LIMIT, async (blockNumber, nextBlockNumber) => {
try {
const block = await augur.provider.getBlock(blockNumber);
if (block == null) return nextBlockNumber(new Error(`Block ${blockNumber} returned null response. This is usually an issue with a partially sync'd parity warp node. See: https://github.com/paritytech/parity-ethereum/issues/7411`));
fetchedBlockCount++;
if (fetchedBlockCount % 10 === 0) console.log(`Fetched ${fetchedBlockCount} / ${blockNumbers.length} block details (current: ${highestBlockFetched})`);
if (blockNumber > highestBlockFetched) highestBlockFetched = blockNumber;
nextBlockNumber(undefined, [blockNumber, block]);
} catch (e) {
return nextBlockNumber(new Error("Could not get block"));
}
}, (err: Error | undefined, blockDetails: Array<[number, BlockDetail]>) => {
if (err) return reject(err);
const blockDetailsByBlock = _.fromPairs(blockDetails);
resolve(blockDetailsByBlock);
});
_createAndPushEntry(objectMds, done) {
if (objectMds.length > 0) {
return async.mapLimit(objectMds, 10, (objectMd, cb) => {
const objectMdEntry = this.createEntry.createPutEntry(objectMd,
this._targetZenkoBucket);
return cb(null, objectMdEntry);
}, (err, entries) => {
if (err) {
this.log.error('error sending objectMd to kafka', {
method: 'IngestionProducer._createAndPushEntry',
error: err,
});
}
return done(err, entries);
});
}
return done(null, []);
}
function changedRepos(cb) {
async.mapLimit(repos, bosco.concurrency.network, function repoStash(repo, repoCb) {
var repoPath = bosco.getRepoPath(repo);
if (!repo.match(repoRegex)) return repoCb();
upstream(bosco, repoPath, repoCb);
}, function () {
cb();
});
}
function concat(data, callback) {
if (data.files && data.files.length) {
async.mapLimit(data.files, 1000, function (ref, next) {
fs.readFile(ref.srcPath, function (err, buffer) {
if (err) {
return next(err);
}
next(null, buffer.toString());
});
}, function (err, files) {
if (err) {
return callback(err);
}
var output = files.join(os.EOL + ';');
fs.writeFile(data.destPath, output, callback);
});