Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
it('statistics methods on fields of array', () => {
const data = [];
for (let i = 1; i <= 10; i++) {
// 1~10
data.push({
a: [i, i + 10, [-i, i * i, [-i * i, 1 / i]]],
});
}
const dv = new DataSet.View().source(data);
const values = flattenDeep(dv.getColumn('a'));
expect(dv.max('a')).to.equal(max(values));
expect(dv.min('a')).to.equal(min(values));
expect(dv.mean('a')).to.equal(mean(values));
expect(dv.average('a')).to.equal(mean(values));
expect(dv.median('a')).to.equal(median(values));
expect(dv.mode('a')).to.equal(mode(values));
expect(dv.quantile('a', 0.5)).to.equal(quantile(values, 0.5));
expect(dv.quantiles('a', [0, 0.1, 0.5])).to.eql(map([0, 0.1, 0.5], (p) => quantile(values, p)));
expect(dv.quantilesByFraction('a', 4)).to.eql(map([0, 0.25, 0.5, 0.75, 1], (p) => quantile(values, p)));
expect(dv.standardDeviation('a')).to.equal(standardDeviation(values));
expect(dv.sum('a')).to.equal(sum(values));
expect(dv.variance('a')).to.equal(variance(values));
expect(dv.range('a')).to.eql([min(values), max(values)]);
});
});
}, function (err, result) {
process.stdout.write(' DONE!\n\n');
if (err) {
console.log(err);
process.exit(1);
}
console.log('Response Times:');
console.log('Min: ' + Math.floor(ss.min(result)) + 'ms');
console.log('Max: ' + Math.floor(ss.max(result)) + 'ms');
console.log('95th: ' + Math.floor(ss.quantile(result, 0.95)) + 'ms');
console.log('Std Dev: ' + Math.floor(ss.standardDeviation(result)) + 'ms');
console.log('\nRedis Cache:');
redisStats(api);
console.log('\nUser Cache:');
userStats(api);
process.exit(0); // TODO shutdown cleanly
api.client._client.shutdown();
});
}
function printSummary (arr) {
console.log(`mean: ${ss.mean(arr)}`)
console.log(`min: ${ss.min(arr)}`)
console.log(`max: ${ss.max(arr)}`)
console.log(`---`)
console.log(`25%: ${ss.quantile(arr, 0.25)}`)
console.log(`50%: ${ss.quantile(arr, 0.50)}`)
console.log(`75%: ${ss.quantile(arr, 0.75)}`)
console.log(`90%: ${ss.quantile(arr, 0.9)}`)
}
if (getTransactionsByChannelIdCount > 0) {
console.log(`\ngetTransactionsByChannelId statistics`);
console.log(`average: ${simpleStatistics.mean(getTransactionsByChannelIdTimes)}`);
console.log(`median: ${simpleStatistics.median(getTransactionsByChannelIdTimes)}`);
console.log(`min: ${simpleStatistics.min(getTransactionsByChannelIdTimes)}`);
console.log(`max: ${simpleStatistics.max(getTransactionsByChannelIdTimes)}`);
console.log(
`standard deviation: ${simpleStatistics.standardDeviation(getTransactionsByChannelIdTimes)}`,
);
}
if (persistTransactionCount > 0) {
console.log(`\npersistTransaction statistics`);
console.log(`average: ${simpleStatistics.mean(persistTransactionTimes)}`);
console.log(`median: ${simpleStatistics.median(persistTransactionTimes)}`);
console.log(`min: ${simpleStatistics.min(persistTransactionTimes)}`);
console.log(`max: ${simpleStatistics.max(persistTransactionTimes)}`);
console.log(
`standard deviation: ${simpleStatistics.standardDeviation(persistTransactionTimes)}`,
);
}
});
.map((value, key) => ({ count: value, type: key }))
.sortBy('count')
.reverse()
.head()
.get('type')
.value();
} catch (e) {
console.log(e);
}
fieldStat.hasImageUrls = fieldAnalysis.some(fa => Boolean(fa.hasImageUrls));
const lengths = _.map(fieldAnalysis, 'length');
fieldStat.minLength = ss.min(lengths);
fieldStat.maxLength = ss.max(lengths);
fieldStat.meanLength = ss.mean(lengths).toFixed(2);
return fieldStat;
});
readlineInterface.on('close', () => {
console.log(
`${getChannelsByTopicCount +
getTransactionsByChannelIdCount +
persistTransactionCount} requests`,
);
console.log(`${getChannelsByTopicCount} getChannelsByTopic requests`);
console.log(`${getTransactionsByChannelIdCount} getTransactionsByChannelId requests`);
console.log(`${persistTransactionCount} persistTransaction requests`);
if (getChannelsByTopicCount > 0) {
console.log(`\ngetChannelsByTopic statistics`);
console.log(`average: ${simpleStatistics.mean(getChannelsByTopicTimes)}`);
console.log(`median: ${simpleStatistics.median(getChannelsByTopicTimes)}`);
console.log(`min: ${simpleStatistics.min(getChannelsByTopicTimes)}`);
console.log(`max: ${simpleStatistics.max(getChannelsByTopicTimes)}`);
console.log(
`standard deviation: ${simpleStatistics.standardDeviation(getChannelsByTopicTimes)}`,
);
}
if (getTransactionsByChannelIdCount > 0) {
console.log(`\ngetTransactionsByChannelId statistics`);
console.log(`average: ${simpleStatistics.mean(getTransactionsByChannelIdTimes)}`);
console.log(`median: ${simpleStatistics.median(getTransactionsByChannelIdTimes)}`);
console.log(`min: ${simpleStatistics.min(getTransactionsByChannelIdTimes)}`);
console.log(`max: ${simpleStatistics.max(getTransactionsByChannelIdTimes)}`);
console.log(
`standard deviation: ${simpleStatistics.standardDeviation(getTransactionsByChannelIdTimes)}`,
);
}
if (persistTransactionCount > 0) {
console.log(`\npersistTransaction statistics`);
quantiles: function(fc, z, numBreaks, colors, style){
var vals = _.chain(fc.features)
.pluck('properties')
.pluck(z)
.value()
var min = ss.min(vals)
var max = ss.max(vals)
var interval = 1 / numBreaks
var quants = [0]
var currentBreak = 0
for(var i=0;i
const processingLogs = collectedSignatures.filter(x => x.eventTransactionHash)
const txSentMap = senderForeign
.filter(x => x.eventTransactionHash)
.reduce((acc, x) => {
acc[x.eventTransactionHash] = x
return acc
}, {})
const times = processingLogs.map(x => txSentMap[x.eventTransactionHash].time - x.time)
return {
count: times.length,
mean: mean(times),
median: median(times),
min: min(times),
max: max(times)
}
}
_.each(polyFC.features, function(poly){
if(!poly.properties){
poly.properties = {}
}
var values = []
_.each(ptFC.features, function(pt){
if (t.inside(pt, poly)) {
values.push(pt.properties[inField]);
}
})
poly.properties[outField] = ss.max(values)
})
done(null, polyFC)
const processingLogs = signatureRequests.filter(x => x.eventTransactionHash)
const txSentMap = senderHome
.filter(x => x.eventTransactionHash)
.reduce((acc, x) => {
acc[x.eventTransactionHash] = x
return acc
}, {})
const times = processingLogs.map(x => txSentMap[x.eventTransactionHash].time - x.time)
return {
count: times.length,
mean: mean(times),
median: median(times),
min: min(times),
max: max(times)
}
}