With nodejs I want to parse a .csv file of 10000 records and do some operation on each row. I tried using http://www.adaltas.com/projects/node-csv. I couldnt get this to pau
Try the line by line npm plugin.
npm install line-by-line --save
Seems like you need to use some stream based solution, there existed already such libraries so before reinventing yourself, try this library, which also includes validation support. https://www.npmjs.org/package/fast-csv
this is my solution to get csv file from external url
const parse = require( 'csv-parse/lib/sync' );
const axios = require( 'axios' );
const readCSV = ( module.exports.readCSV = async ( path ) => {
try {
const res = await axios( { url: path, method: 'GET', responseType: 'blob' } );
let records = parse( res.data, {
columns: true,
skip_empty_lines: true
} );
return records;
} catch ( e ) {
console.log( 'err' );
}
} );
readCSV('https://urltofilecsv');
In order to pause the streaming in fast-csv you can do the following:
let csvstream = csv.fromPath(filePath, { headers: true })
.on("data", function (row) {
csvstream.pause();
// do some heavy work
// when done resume the stream
csvstream.resume();
})
.on("end", function () {
console.log("We are done!")
})
.on("error", function (error) {
console.log(error)
});