I\'m currently learning how to parse a JSON object to a CSV file using the json2csv node module. Have never worked with JSON before, so this is all new to me.
My JSO
The input is bad format, it should like json2csv document:
var json2csv = require('json2csv');
var json = [
{
"car": "Audi",
"price": 40000,
"color": "blue"
}, {
"car": "BMW",
"price": 35000,
"color": "black"
}, {
"car": "Porsche",
"price": 60000,
"color": "green"
}
];
json2csv({data: json, fields: ['car', 'price', 'color']}, function(err, csv) {
if (err) console.log(err);
fs.writeFile('file.csv', csv, function(err) {
if (err) throw err;
console.log('file saved');
});
});
The content of the "file.csv" should be
car, price, color
"Audi", 40000, "blue"
"BMW", 35000, "black"
"Porsche", 60000, "green"
You can also direct json2csv to use a sub array within your JSON object by addressing it with .<name>
after your JSON object in the data section of json2csv.
In your case, this might look something like:
const json2csv = require('json2csv');
const fs = require('fs');
var json = {
"car":[
{
"name":"Audi",
"price":"40000",
"color":"blue"
}
]
};
json2csv({data: json.car, fields: ['name', 'price', 'color']}, function(err, csv) {
if (err) console.log(err);
fs.writeFile('cars.csv', csv, function(err) {
if (err) throw err;
console.log('cars file saved');
});
});
json2csv only support a flat structure where fields are direct children of the json root.
if you wish to change it, consider cloning the code and do something like the following:
// createColumnContent function changed from original code
var createColumnContent = function(params, str, callback) {
params.data.forEach(function(data_element) {
//if null or empty object do nothing
if (data_element && Object.getOwnPropertyNames(data_element).length > 0) {
var line = '';
var eol = os.EOL || '\n';
params.fields.forEach(function(field_element) {
// here, instead of direct child, getByPath support multiple subnodes levels
line += getByPath(data_element, field_element.split('.'), 0) + params.del;
});
//remove last delimeter
line = line.substring(0, line.length - 1);
line = line.replace(/\\"/g, '""');
str += eol + line;
}
});
callback(str);
};
var getByPath = function(data_element, path, position) {
if (data_element.hasOwnProperty(path[position])) {
if (position === path.length - 1) {
return JSON.stringify(data_element[path[position]]);
}
else {
return getByPath(data_element[path[position]], path, position + 1)
}
}
else {
return '';
}
}
usage:
json2csv({data: json, fields: ['car.name.0', 'car.price.0', 'car.color.0']}, function(err, csv) {
if (err) console.log(err);
fs.writeFile('file.csv', csv, function(err) {
if (err) throw err;
console.log('file saved');
});
});
output file content:
"car.name.0","car.price.0","car.color.0"
"Audi","40000","blue"
as a side note, to clone and work with your own version:
git clone https://github.com/zeMirco/json2csv.git
add changes...
to use changed version:
npm install /local/path/to/repo
I don't know about you guys, but i like small packages that just work as expected without a lot of extra configuration, try using jsonexport, works really well with objects, arrays, .. and its fast!
Install
npm i --save jsonexport
Usage
const jsonexport = require('jsonexport');
const fs = require('fs');
jsonexport({
"car":[
{
"name":"Audi",
"price":"40000",
"color":"blue"
}
]
}, function(err, csv) {
if (err) return console.error(err);
fs.writeFile('cars.csv', csv, function(err) {
if (err) return console.error(err);
console.log('cars.csv saved');
});
});
https://github.com/kauegimenes/jsonexport
const { Parser } = require('json2csv');
let myCars = {
"car":
{
"name": ["Audi"],
"price": ["40000"],
"color": ["blue"]
}
};
let fields = ["car.name", "car.price", "car.color"];
const parser = new Parser({
fields,
unwind: ["car.name", "car.price", "car.color"]
});
const csv = parser.parse(myCars);
console.log('output',csv);
will output to console
Check out Underscore.js pluck method - if you pass _.pluck(json, 'car')
to json2csv's data
instead of the original json
, you should get what you're looking for.