Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
function loadVcvValues(vcvReadStream, factors){
var deferred = Q.defer();
let correlIds = factors.getCorrelFactorIds();
console.log('Load vcv data for Arria');
csv
.fromStream(vcvReadStream)
.on("data", function(row){
// check factors are in key factors
if (correlIds.includes(row[0]) && correlIds.includes(row[1])){
// see if variance
if (row[0] == row[1]){
let ids = FactorCollection.convertFromCorrelId(row[0])
factors.setVariance(parseFloat(row[2]), ids[0], ids[1]);
if (row[0] == factors.shockedFactorId){
factors.setCovariance(parseFloat(row[2]), factors.shockedFactorId);
}
} // else correl
else {
if (row.slice(0,2).includes(factors.shockedFactorId)){
let otherFactor = (row[0] == factors.shockedFactorId) ? row[1] : row[0];
let ids = FactorCollection.convertFromCorrelId(otherFactor)
async.each(paths, function (path, _aCallback) {
// Create File Stream
var inputStream = fs.createReadStream(path);
// Read in CSV file
fast_csv.fromStream(inputStream,{
headers: true,
ignoreEmpty: true
})
.transform(function(data){
var regex = path.match(/(advanced|totals)|(\d{4})/g);
return DB_OBJ.filter(data, regex);
})
.on("data", function(data){
var name = data.Player,
yr = data.Season,
stat = data.statType;
// Does Player exist?
if (!DB_OBJ.hasOwnProperty(name)) {
// Player doesn't exist
var csv = require('fast-csv');
var request = require("request")
var polyline = require('polyline');
var inputFile = process.argv[2];
var stream = fs.createReadStream(inputFile);
var outputArray=[];
var rawData = [];
var previousMedallion = null;
var taxiCount = -1;
var apiCallArray = [];
csv
.fromStream(stream, {
headers: true
})
.on("record", function (data) {
if (data.medallion != previousMedallion) {
taxiCount++;
rawData[taxiCount]=[];
//console.log("i: " + taxiCount);
}
rawData[taxiCount].push(data);
previousMedallion = data.medallion;
})
var csv = require("fast-csv");
var fs = require("fs");
var source = require('./local.json');
var stream = fs.createReadStream(__dirname + '/routes.txt');
routes = [];
//read each line of routes.txt, push each to routes[],
csv
.fromStream(stream, {headers : true})
.on("data", function(data){
routes.push(data);
})
.on("end", function(){
source.features.forEach(function(feature) {
var p = feature.properties;
delete p.OBJECTID;
var newData = lookupColor(p.Route_Numb);
p.Route_ID = newData.route_id;
p.color = newData.color;
feature.properties = p;
console.log(feature);
var csv = require('fast-csv');
var request = require("request")
var polyline = require('polyline');
var inputFile = process.argv[2];
var stream = fs.createReadStream(inputFile);
var outputArray=[];
var rawData = [];
var previousMedallion = null;
var taxiCount = -1;
var apiCallArray = [];
csv
.fromStream(stream, {
headers: true
})
.on("record", function (data) {
if (data.medallion != previousMedallion) {
taxiCount++;
rawData[taxiCount]=[];
//console.log("i: " + taxiCount);
}
rawData[taxiCount].push(data);
previousMedallion = data.medallion;
})
function loadKeyFactors(factorsFileReadStream, factors){
console.log('Loading key factors definitions for Arria');
var deferred = Q.defer();
csv
.fromStream(factorsFileReadStream, {headers:true, strictColumnHandling:false})
.on("data", function(row){
//console.log(row);
let categories = {};
switch(row['CATEGORY'].toLowerCase()){
case 'index':
categories.region = row['REGION'];
break;
case 'credit spread':
categories.sector = row['SECTOR'];
categories.rating = row['RATING'];
categories.tenor = row['TENOR'];
break;
case 'rates':
categories.tenor = row['TENOR'];
break;
function readCSVfile(pathToFile, fileType, dataToDB, callback){
var stream = fs.createReadStream(pathToFile);
dataToDB[fileType] = [];
var getHeaders = true;
var identifier;
var headers = [];
var callbackLaunched = false;
csv.fromStream(stream, {headers : true, delimiter:'\t', quote: null})
.on("data", function(data){
if (getHeaders){
for (i in data) headers.push(i);
identifier = headers[0];
if (fileType == 'fileProfile'){
dataToDB.key = identifier;
//headers.shift();
}
if((dataToDB.key == undefined || dataToDB.key == 'phylovizFastaID') && fileType == 'fileMetadata') dataToDB.key = identifier.trim();
if (dataToDB.key == 'phylovizFastaID'){
for (i in dataToDB['fileFasta']){
dataToDB['fileFasta'][i][dataToDB.key] = dataToDB['fileFasta'][i]['phylovizFastaID'];
delete dataToDB['fileFasta'][i]['phylovizFastaID'];
}
}
function showStats () {
var expensesFilepath = xdgBasedir.data + '/wallet/expenses.csv';
var stream = fs.createReadStream(expensesFilepath);
var totalCredit = 0.0;
var totalDebit = 0.0;
var totalStashed = conf.get('stashed');
csv
.fromStream(stream)
.transform(function (data) {
return {
'date': data[0],
'reason': data[1],
'category': data[2],
'credit': numberIsNan(parseFloat(data[3])) ? 0.0 : parseFloat(data[3]),
'debit': numberIsNan(parseFloat(data[4])) ? 0.0 : parseFloat(data[4])
};
})
.on('data', function (data) {
totalDebit += data.debit;
totalCredit += data.credit;
})
.on('end', function () {
var result = [
module.exports.csvForEach = function (csvPath, cb, completedCb) {
const stream = fs.createReadStream(csvPath);
csv.fromStream(stream, { headers: true })
.on('data', cb)
.on('end', completedCb);
};
pr = await new Promise((resolve, reject) => {
const csvDataArray = []
stream = fs.createReadStream(pupilCensusCSV)
csv.fromStream(stream)
.on('data', (data) => {
csvDataArray.push(data)
})
.on('end', async () => {
try {
return resolve(csvDataArray)
} catch (error) {
reject(error)
}
})
})
return pr