Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
num = parseMappedArray[0].length;
// csvParseRows(...) ============================================================================
// without row mapper -----------------------------------------------------------------------
parseRowsArray = d3Dsv.csvParseRows(csvTestString);
str = parseRowsArray[0][0]; // 'Year' of first row
// date = parseRowsArray[0][0]; // fails, return value is string
// with row mapper ---------------------------------------------------------------------------
parseRowsMappedArray = d3Dsv.csvParseRows(csvTestString, (rawRow, index) => {
let rr: string[] = rawRow;
let i: number = index;
let pr: ParsedTestObject;
pr = {
year: new Date(+rr[0], 0, 1),
make: rr[1],
model: rr[2],
length: +rr[3]
};
return pr;
});
date = parseRowsMappedArray[0].year;
parseCsv(csv, geography, customUpload) {
const mapResource = geographyResource.getMapResource(geography)
const features = mapResource.getUniqueFeatureIds()
const badMapIds = []
const badValueIds = []
csv = csv.trim()
let parsed
if (geography === 'United States') {
parsed = csvParseRows(csv, d => [this._validateFips(d[0]), parseFloat(d[1])])
} else {
parsed = csvParseRows(csv, d => [d[0], parseFloat(d[1])])
}
if (customUpload) {
// extra data validation for custom uploads
parsed = parsed.filter(row => {
const hasId = (features.indexOf(row[0]) > -1)
if (!hasId) {
badMapIds.push(row[0])
}
if (row[1] <= 0 || isNaN(row[1])) {
badValueIds.push(row[0])
}
return hasId && row[1] > 0
})
if (badMapIds.length || badValueIds.length) {
function parseCSV(text) {
const csv = csvParseRows(text);
// Remove header
if (csv.length > 0) {
csv.shift();
}
for (const row of csv) {
for (const key in row) {
const number = parseFloat(row[key]);
if (!Number.isNaN(number)) {
row[key] = number;
}
}
}
return csv;
export function parse(csv, inputDateFormat, index, stacked, type) {
let val;
const firstVals = {};
const keys = csvParseRows(csv.match(/^.*$/m)[0])[0];
let groupingKey, dotSizingKey;
const isScatterplot = type && type === 'scatterplot';
if (isScatterplot) {
if (keys.length > 3) groupingKey = keys[3];
if (keys.length >= 4) dotSizingKey = keys[4];
}
if (groupingKey) keys.splice(keys.indexOf(groupingKey), 1);
if (dotSizingKey) keys.splice(keys.indexOf(dotSizingKey), 1);
const data = csvParse(csv, (d, i) => {
const obj = {};
if (!metadata.source) {
throw new Error('Malformed metadata, property "source" is required.');
}
if (!metadata.header) {
throw new Error('Malformed metadata, property "header" is required.');
}
if (!metadata.format) {
throw new Error('Malformed metadata, property "format" is required.');
}
let data: string[][] = [];
const labels = metadata.header || [];
switch (metadata.format) {
case 'csv':
const path = WorkflowParser.parseStoragePath(metadata.source);
data = csvParseRows((await Apis.readFile(path)).trim()).map(r => r.map(c => c.trim()));
break;
default:
throw new Error('Unsupported table format: ' + metadata.format);
}
return {
data,
labels,
type: PlotType.TABLE,
};
}
parser: d => csvParseRows(d)
};
const addRenditions = (airportData, renditions) => {
const airports = csvParseRows(airportData)
.reduce((obj, airport) => {
obj[airport[4]] = {
lat: airport[6],
lon: airport[7],
};
return obj;
}, {});
const routes = csvParse(renditions).map((v) => {
const dep = v['Departure Airport'];
const arr = v['Arrival Airport'];
return {
from: airports[dep],
to: airports[arr],
};
): Promise {
if (!metadata.source) {
throw new Error('Malformed metadata, property "source" is required.');
}
if (!metadata.labels) {
throw new Error('Malformed metadata, property "labels" is required.');
}
if (!metadata.schema) {
throw new Error('Malformed metadata, property "schema" missing.');
}
if (!Array.isArray(metadata.schema)) {
throw new Error('"schema" must be an array of {"name": string, "type": string} objects');
}
const path = WorkflowParser.parseStoragePath(metadata.source);
const csvRows = csvParseRows((await Apis.readFile(path)).trim());
const labels = metadata.labels;
const labelIndex: { [label: string]: number } = {};
let index = 0;
labels.forEach(l => {
labelIndex[l] = index++;
});
if (labels.length ** 2 !== csvRows.length) {
throw new Error(
`Data dimensions ${csvRows.length} do not match the number of labels passed ${labels.length}`,
);
}
const data = Array.from(Array(labels.length), () => new Array(labels.length));
csvRows.forEach(([target, predicted, count]) => {
const i = labelIndex[target.trim()];
public static async buildRocCurveConfig(metadata: PlotMetadata): Promise {
if (!metadata.source) {
throw new Error('Malformed metadata, property "source" is required.');
}
if (!metadata.schema) {
throw new Error('Malformed metadata, property "schema" is required.');
}
if (!Array.isArray(metadata.schema)) {
throw new Error('Malformed schema, must be an array of {"name": string, "type": string}');
}
const path = WorkflowParser.parseStoragePath(metadata.source);
const stringData = csvParseRows((await Apis.readFile(path)).trim());
const fprIndex = metadata.schema.findIndex(field => field.name === 'fpr');
if (fprIndex === -1) {
throw new Error('Malformed schema, expected to find a column named "fpr"');
}
const tprIndex = metadata.schema.findIndex(field => field.name === 'tpr');
if (tprIndex === -1) {
throw new Error('Malformed schema, expected to find a column named "tpr"');
}
const thresholdIndex = metadata.schema.findIndex(field => field.name.startsWith('threshold'));
if (thresholdIndex === -1) {
throw new Error('Malformed schema, expected to find a column named "threshold"');
}
const dataset = stringData.map(row => ({
label: row[thresholdIndex].trim(),