Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
/// so, now that's over with, let's start generating recovery info
console.log('Generating recovery...');
// indicate that we wish to run in chunked mode
// the chunker object allows us to feed in data in a chunked fashion
var chunker = par2.startChunking(recoverySlices);
// set the chunking size
chunker.setChunkSize(chunkSize);
var numChunks = Math.ceil(sliceSize / chunkSize);
// loop through all the chunks
var sliceOffset = 0; // where we're at within each slice
var buf = new Buffer(chunkSize);
async.timesSeries(numChunks, function(n, cb) {
// if the final chunk is too large, adjust accordingly
if(sliceOffset + chunkSize > sliceSize) {
chunkSize = sliceSize - sliceOffset;
chunker.setChunkSize(chunkSize);
buf = buf.slice(0, chunkSize);
}
// loop through each file
async.eachSeries(pFiles, function(file, cb) {
fs.open(file.name, 'r', function(err, fd) {
if(err) return cb(err);
// we need to read data from the file in a chunked fashion
var filePos = sliceOffset;
// for each slice in the file, read a chunk
async.timesSeries(file.numSlices, function(sliceNum, cb) {
var called = 0;
var rows = [
{"keyspace_name":"ks_udf","aggregate_name":"sum","signature":["bigint"],"argument_types":["org.apache.cassandra.db.marshal.LongType"],"final_func":null,"initcond":new Buffer([0,0,0,0,0,0,0,0]),"return_type":"org.apache.cassandra.db.marshal.LongType","state_func":"plus","state_type":"org.apache.cassandra.db.marshal.LongType"}
];
var cc = {
query: function (q, cb) {
called++;
setImmediate(function () {
cb(null, {rows: rows});
});
},
getEncoder: function () { return new Encoder(4, {}); }
};
var metadata = new Metadata(clientOptions.defaultOptions(), cc);
metadata.keyspaces['ks_udf'] = { aggregates: {}};
async.timesSeries(10, function (n, next) {
metadata.getAggregates('ks_udf', 'sum', function (err, funcArray) {
assert.ifError(err);
assert.ok(funcArray);
assert.strictEqual(funcArray.length, 1);
next();
});
}, function (err) {
assert.ifError(err);
assert.strictEqual(called, 1);
done();
});
});
it('should query the following times if was previously not found', function (done) {
helpers.createAndWait(table, function(err) {
if (err) return cb(err)
async.timesSeries(50, function(n, cb) { batchWrite(name, n, cb) }, cb)
})
}
localCon.open(function (err) {
assert.ok(!err, err);
async.timesSeries(10, function (n, callback) {
localCon.close(callback);
}, done);
});
});
broker.subscribe('s1', function(err, subscription) {
assert.ifError(err);
async.timesSeries(3, function(index, cb) {
subscription.cancel(cb);
}, done);
});
});
fs.readFile(absoluteFilePath, (err, fileData) => {
var partSize = 1024 * 1024 * 5;
var parts = Math.ceil(fileData.length / partSize);
async.timesSeries(parts, (partNum, next) => {
var rangeStart = partNum*partSize;
var end = Math.min(rangeStart + partSize, fileData.length);
console.log("uploading ", fileName, " % ", (partNum/parts).toFixed(2));
partNum++;
async.retry((retryCb) => {
s3.uploadPart({
Body: fileData.slice(rangeStart, end),
Bucket: bucketName,
Key: fileName,
PartNumber: partNum,
UploadId: multipart.UploadId
}, (err, mData) => {
retryCb(err, mData);
});
}, (err, data) => {
//console.log(data);
const selectNRandomNonUniqueUsers = function(howMany, callback)
{
async.timesSeries(howMany, function(n, callback){
User.randomInstance("ddr:User", function(err, user){
callback(err, user);
});
},
function(err, users){
callback(err, users);
});
};
setup(options, () => {
async.timesSeries(N - 5, (n, cb) => {
async.times(M, (m, tcb) => {
marky.mark(options.name + n + m)
testFnUser(n, () => {
const entry = marky.stop(options.name + n + m)
stats[options.name].push(entry.duration)
tcb()
})
}, cb)
}, () => {
complete(options, done)
})
})
}