Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
export const addTagsToEntities: ImportGroupsAsTagsThunk = () => (
dispatch,
getState
) => {
dispatch(importTagsActions.started());
const entities = getEntitiesWithGroupsToImport(getState());
const linodeAccumulator = createAccumulator('linode', dispatch);
const domainAccumulator = createAccumulator('domain', dispatch);
Bluebird.join(
Bluebird.reduce(entities.linodes, linodeAccumulator, {
success: [],
errors: []
}),
Bluebird.reduce(entities.domains, domainAccumulator, {
success: [],
errors: []
}),
dispatch,
handleAccumulatedResponsesAndErrors
)
.then((totalErrors: TagError[]) => {
if (isEmpty(totalErrors)) {
storage.hasImportedGroups.set();
}
})
.catch(() =>
dispatch(
// Errors from individual requests will be accumulated and passed to .then(); hitting
// this block indicates something went wrong with .reduce() or .join().
// It's unclear under what circumstances this could ever actually fire.
export const enableAllBackups: EnableAllBackupsThunk = () => (
dispatch,
getState
) => {
const { entities } = getState().__resources.linodes;
const linodesWithoutBackups = entities.filter(
linode => !linode.backups.enabled
);
dispatch(handleEnable());
Bluebird.reduce(linodesWithoutBackups, gatherResponsesAndErrors, {
success: [],
errors: []
})
.then(response => {
if (response.errors && !isEmpty(response.errors)) {
dispatch(handleEnableError(response));
} else {
dispatch(handleEnableSuccess(response.success));
}
dispatch(updateMultipleLinodes(response.success));
// GA Event
sendBackupsEnabledEvent(
`Enabled backups for ${response.success.length} Linodes`
);
})
.catch(() =>
sequence = function sequence(tasks, modelOptions, logger) {
// utils/sequence.js does not offer an option to pass cloned arguments
return Promise.reduce(tasks, function (results, task) {
return task(_.cloneDeep(modelOptions), logger)
.then(function (result) {
results.push(result);
return results;
});
}, []);
},
updateDatabaseSchema,
const statistics = _.cloneDeep(this._accumulator);
// calculate utilizations
let utilization = (statistics.summary.allocations.time / duration) * 100;
statistics.summary.allocations.utilization = utilization;
utilization = (statistics.summary.maintenance.time / duration) * 100;
statistics.summary.maintenance.utilization = utilization;
// calculate day based utilizations..
const reducer = (accumulator, date) => {
const obj = accumulator.dates[date];
utilization = (obj.allocations.time / duration) * 100;
obj.allocations.utilization = utilization;
utilization = (obj.maintenance.time / duration) * 100;
obj.maintenance.utilization = utilization;
return new Promise(resolve => process.nextTick(() => resolve(accumulator)));
};
return Promise.reduce(Object.keys(statistics.dates), reducer, statistics);
}
/*
.then(function (delegates) {
delegates_rewards = delegates;
return Promise.reduce(blocks, function (blocks, b) {
var pk;
pk = b.generatorPublicKey.toString('hex');
if (blocks[pk]) {
blocks.rewards += Number(b.reward);
} else {
blocks[pk] = {
pk: pk,
rewards: Number(b.reward)
}
}
return blocks;
}, {})
.then(function (blocks) {
expect(delegates_rewards).to.deep.equal(blocks);
});
});
function batchedIdQuery (environment, type, ids) {
const method = METHODS[type].method
const entityTypeName = METHODS[type].name
return Promise.reduce(getIdBatches(ids), (fullResponse, batch) => {
return environment[method]({
'sys.id[in]': batch,
limit: batch.split(',').length
})
.then((response) => {
fullResponse = [
...fullResponse,
...response.items
]
logEmitter.emit('info', `Fetched ${fullResponse.length} of ${response.total} ${entityTypeName}`)
return fullResponse
})
}, [])
}
var mainPromise = bluebird.all(fkQueries).then(function createRecords() {
return bluebird.reduce(prioritized, function(buildingFinalResult, priorityLevel) {
priorityLevel = resolveDependencies(buildingFinalResult, priorityLevel);
priorityLevel = unescape(priorityLevel);
var priorityLevelPromises = insertRecords(knexInst, priorityLevel, options.unique, options.showWarning);
return bluebird.all(priorityLevelPromises).then(function(levelResults) {
return addToFinalResult(buildingFinalResult, levelResults, withSpecIds);
});
}, {}).then(function(finalResult) {
return stripSpecIds(finalResult);
});
});
function applyGroupBy(opts, images) {
opts.logger('Applying the groups...');
return Promise.reduce(opts.groupBy, (images, groupFn) => {
return Promise.map(images, (image) => {
return groupFn(image)
.then(group => {
image.groups.push(group);
return image;
})
.catch(() => image);
});
}, images).then(images => [opts, images]);
}
getGridFsFileByMetaDataMainId(mongoUrl, mainId).then(function (files) {
var resultArry = [];
resultArry[0] = "success";
var attachments_SizeByte = 0;
if (files.length > 0) {
Promise.reduce(files, function (total, item) {
return new Promise(function (resolve, reject) {
if ((attachmentSize && item.length < attachmentSize) || (!attachmentSize && item.length < maxAttachmentSize)) {
getGridFsStreams(mongoUrl, item._id).then(function (result) {
if (result) {
var resultObj = {};
resultObj.filename = item.filename;
resultObj.data = result;
resultArry.push(resultObj);
attachments_SizeByte += item.length;
return resolve(resultArry);
} else {
return reject('Fail get gridfs streams (' + currentData + '/' + mongodataCount + '), DocId: ' + mainId + ', fileId: ' + item._id.toString());
}
}).catch(function (err) {
return reject(err);
});
export async function getLocalFilesInDir (dir) {
const files = await fs.readdir(dir);
return Promise.reduce(files, async (acc, file) => {
const name = dir + '/' + file;
const stats = await fs.stat(name);
return stats.isDirectory()
? acc.concat(await getLocalFilesInDir(name))
: acc.concat(name);
}, []);
}