How to use @fast-csv/parse - 10 common examples

To help you get started, we’ve selected a few @fast-csv/parse examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github C2FO / fast-csv / examples / parsing-js / examples / validate.example.js View on Github external
const { EOL } = require('os');
const csv = require('@fast-csv/parse');

const CSV_STRING = ['firstName,lastName', 'bob,yukon', 'sally,yukon', 'timmy,yukon'].join(EOL);

const stream = csv
    .parse({ headers: true })
    .validate(data => data.firstName !== 'bob')
    .on('error', error => console.error(error))
    .on('data', row => console.log(`Valid [row=${JSON.stringify(row)}]`))
    .on('data-invalid', (row, rowNumber) =>
        console.log(`Invalid [rowNumber=${rowNumber}] [row=${JSON.stringify(row)}]`),
    )
    .on('end', rowCount => console.log(`Parsed ${rowCount} rows`));

stream.write(CSV_STRING);
stream.end();

// Output:
// Invalid [rowNumber=1] [row={"firstName":"bob","lastName":"yukon"}]
// Valid [row={"firstName":"sally","lastName":"yukon"}]
// Valid [row={"firstName":"timmy","lastName":"yukon"}]
github C2FO / fast-csv / examples / parsing-js / examples / custom_headers.example.js View on Github external
const { EOL } = require('os');
const csv = require('@fast-csv/parse');

const CSV_STRING = ['a1,b1', 'a2,b2'].join(EOL);

const stream = csv
    .parse({ headers: ['a', 'b'] })
    .on('error', error => console.error(error))
    .on('data', row => console.log(row))
    .on('end', rowCount => console.log(`Parsed ${rowCount} rows`));

stream.write(CSV_STRING);
stream.end();

// Output:
// { a: 'a1', b: 'b1' }
// { a: 'a2', b: 'b2' }
// Parsed 2 rows
github C2FO / fast-csv / examples / parsing-js / examples / validate_async.example.js View on Github external
const { EOL } = require('os');
const csv = require('@fast-csv/parse');

const CSV_STRING = ['firstName,lastName', 'bob,yukon', 'sally,yukon', 'timmy,yukon'].join(EOL);

const stream = csv
    .parse({ headers: true })
    .validate((row, cb) => {
        setImmediate(() => cb(null, row.firstName !== 'bob'));
    })
    .on('error', error => console.error(error))
    .on('data', row => console.log(`Valid [row=${JSON.stringify(row)}]`))
    .on('data-invalid', row => console.log(`Invalid [row=${JSON.stringify(row)}]`))
    .on('end', rowCount => console.log(`Parsed ${rowCount} rows`));

stream.write(CSV_STRING);
stream.end();

// Output:
// Invalid [row={"firstName":"bob","lastName":"yukon"}]
// Valid [row={"firstName":"sally","lastName":"yukon"}]
// Valid [row={"firstName":"timmy","lastName":"yukon"}]
github C2FO / fast-csv / examples / parsing-js / examples / ignore_empty_rows.example.js View on Github external
const { EOL } = require('os');
const csv = require('@fast-csv/parse');

const CSV_STRING = ['a1,b1', ',', 'a2,b2', '   ,\t', ''].join(EOL);

const stream = csv
    .parse({ ignoreEmpty: true })
    .on('error', error => console.error(error))
    .on('data', row => console.log(row))
    .on('end', rowCount => console.log(`Parsed ${rowCount} rows`));

stream.write(CSV_STRING);
stream.end();

// Output:
// [ 'a1', 'b1' ]
// [ 'a2', 'b2' ]
// Parsed 2 rows
github C2FO / fast-csv / examples / parsing-js / examples / transform.example.js View on Github external
const { EOL } = require('os');
const csv = require('@fast-csv/parse');

const CSV_STRING = ['firstName,lastName', 'bob,yukon', 'sally,yukon', 'timmy,yukon'].join(EOL);

const stream = csv
    .parse({ headers: true })
    .transform(data => ({
        firstName: data.firstName.toUpperCase(),
        lastName: data.lastName.toUpperCase(),
        properName: `${data.firstName} ${data.lastName}`,
    }))
    .on('error', error => console.error(error))
    .on('data', row => console.log(JSON.stringify(row)))
    .on('end', rowCount => console.log(`Parsed ${rowCount} rows`));

stream.write(CSV_STRING);
stream.end();

// Output:
// {"firstName":"BOB","lastName":"YUKON","properName":"bob yukon"}
// {"firstName":"SALLY","lastName":"YUKON","properName":"sally yukon"}
github C2FO / fast-csv / examples / parsing-js / examples / max_rows.example.js View on Github external
const rows = [
    'header1,header2\n',
    'col1,col1\n',
    'col2,col2\n',
    'col3,col3\n',
    'col4,col4\n',
    'col5,col5\n',
    'col6,col6\n',
    'col7,col7\n',
    'col8,col8\n',
    'col9,col9\n',
    'col10,col10',
];

const stream = csv
    .parse({ headers: true, maxRows: 5 })
    .on('error', error => console.error(error))
    .on('data', row => console.log(row))
    .on('end', rowCount => console.log(`Parsed ${rowCount} rows`));

rows.forEach(row => stream.write(row));
stream.end();

// Output:
// { header1: 'col1', header2: 'col1' }
// { header1: 'col2', header2: 'col2' }
// { header1: 'col3', header2: 'col3' }
// { header1: 'col4', header2: 'col4' }
// { header1: 'col5', header2: 'col5' }
// Parsed 5 rows
github C2FO / fast-csv / examples / parsing-js / examples / skip_lines.example.js View on Github external
const csv = require('@fast-csv/parse');

const rows = [
    'skip1_header1,skip1_header2\n',
    'skip2_header1,skip2_header2\n',
    'header1,header2\n',
    'col1,col1\n',
    'col2,col2\n',
    'col3,col3\n',
    'col4,col4\n',
];

const stream = csv
    .parse({ headers: true, skipLines: 2 })
    .on('error', error => console.error(error))
    .on('data', row => console.log(row))
    .on('end', rowCount => console.log(`Parsed ${rowCount} rows`));

rows.forEach(row => stream.write(row));
stream.end();

// Output:
// { header1: 'col1', header2: 'col1' }
// { header1: 'col2', header2: 'col2' }
// { header1: 'col3', header2: 'col3' }
// { header1: 'col4', header2: 'col4' }
// Parsed 4 rows
github C2FO / fast-csv / examples / parsing-js / examples / first_row_as_headers.example.js View on Github external
const { EOL } = require('os');
const csv = require('@fast-csv/parse');

const CSV_STRING = ['a,b', 'a1,b1', 'a2,b2'].join(EOL);

const stream = csv
    .parse({ headers: true })
    .on('error', error => console.error(error))
    .on('data', row => console.log(row))
    .on('end', rowCount => console.log(`Parsed ${rowCount} rows`));

stream.write(CSV_STRING);
stream.end();

// Output:
// { a: 'a1', b: 'b1' }
// { a: 'a2', b: 'b2' }
// Parsed 2 rows
github C2FO / fast-csv / examples / parsing-js / examples / skip_rows.example.js View on Github external
const csv = require('@fast-csv/parse');

const rows = [
    'header1,header2\n',
    'col1,col1\n',
    'col2,col2\n',
    'col3,col3\n',
    'col4,col4\n',
    'col5,col5\n',
    'col6,col6\n',
];

const stream = csv
    .parse({ headers: true, skipRows: 2 })
    .on('error', error => console.error(error))
    .on('data', row => console.log(row))
    .on('end', rowCount => console.log(`Parsed ${rowCount} rows`));

rows.forEach(row => stream.write(row));
stream.end();

// Output:
// { header1: 'col3', header2: 'col3' }
// { header1: 'col4', header2: 'col4' }
// { header1: 'col5', header2: 'col5' }
// { header1: 'col6', header2: 'col6' }
// Parsed 4 rows
github C2FO / fast-csv / examples / parsing-js / examples / transform_headers.example.js View on Github external
const { EOL } = require('os');
const csv = require('@fast-csv/parse');

const CSV_STRING = ['header1,header2', 'a1,b1', 'a2,b2'].join(EOL);

const stream = csv
    .parse({
        headers: headers => headers.map(h => h.toUpperCase()),
    })
    .on('error', error => console.error(error))
    .on('data', row => console.log(row))
    .on('end', rowCount => console.log(`Parsed ${rowCount} rows`));

stream.write(CSV_STRING);
stream.end();

// Output:
// { HEADER1: 'a1', HEADER2: 'b1' }
// { HEADER1: 'a2', HEADER2: 'b2' }
// Parsed 2 rows

@fast-csv/parse

fast-csv parsing package

MIT
Latest version published 23 days ago

Package Health Score

92 / 100
Full package analysis

Popular @fast-csv/parse functions