I want to convert json which has value array. response.json
{
"rows": [
[
"New Visitor",
"(not set)",
"(not set)",
"0"
],
[
"New Visitor",
"(not set)",
"(not set)",
"mobile"
],
[
"New Visitor",
"(not set)",
"(not set)",
"mobile"
],
[
"New Visitor",
"(not set)",
"(not set)",
"mobile",
]
]
}
Now i want to convert this data into. name.csv
"New Visitor","(not set)","(not set)","0"
"New Visitor","(not set)","(not set)","mobile"
"New Visitor","(not set)","(not set)","mobile"
"New Visitor","(not set)","(not set)","mobile"
Please give me suggetions using Node.js.
First, we import the native file system module ( fs ) and the csv-parse module. Then, we create a parser which accepts an object literal, containing the options we'd like to set. The second argument is the callback function that's used to access the records - or just print them out, in our case.
'use strict';
var fs = require('fs');
let myObj = {
"rows": [
[
"New , Visitor",
"(not set)",
"(not set)",
"0"
],
[
"New Visitor",
"(not set)",
"(not set)",
"mobile"
],
[
"New Visitor",
"(not set)",
"(not set)",
"mobile"
],
[
"New Visitor",
"(not set)",
"(not set)",
"mobile",
]
]
}
// 1. One way - if you want the results to be in double quotes and you have comas inside
// choose another string to temporally replace commas if necessary
let stringToReplaceComas = '!!!!';
myObj.rows.map((singleRow) => {
singleRow.map((value, index) => {
singleRow[index] = value.replace(/,/g, stringToReplaceComas);
})
})
let csv = `"${myObj.rows.join('"\n"').replace(/,/g, '","')}"`;
// // or like this
// let csv = `"${myObj.rows.join('"\n"').split(',').join('","')}"`;
csv = csv.replace(new RegExp(`${stringToReplaceComas}`, 'g'), ',');
// // 2. Another way - if you don't need the double quotes in the generated csv and you don't have comas in rows' values
// let csv = myObj.rows.join('\n')
fs.writeFile('name.csv', csv, 'utf8', function(err) {
if (err) {
console.log('Some error occured - file either not saved or corrupted file saved.');
} else {
console.log('It\'s saved!');
}
});
ex. https://github.com/mrodrig/json-2-csv, https://github.com/wdavidw/node-csv, https://github.com/wdavidw/node-csv-stringify
an example using json-2-csv (https://github.com/mrodrig/json-2-csv)
'use strict';
const converter = require('json-2-csv');
let myObj = {
"rows": [
{
value1: "New Visitor",
value2: "(not set)",
value3: "(not set)",
value4: "0"
},
{
value1: "New Visitor",
value2: "(not set)",
value3: "(not set)",
value4: "mobile"
},
{
value1: "New Visitor",
value2: "(not set)",
value3: "(not set)",
value4: "mobile"
},
{
value1: "New Visitor",
value2: "(not set)",
value3: "(not set)",
value4: "mobile",
}
]
}
let json2csvCallback = function (err, csv) {
if (err) throw err;
fs.writeFile('name.csv', csv, 'utf8', function(err) {
if (err) {
console.log('Some error occured - file either not saved or corrupted file saved.');
} else {
console.log('It\'s saved!');
}
});
};
converter.json2csv(myObj.rows, json2csvCallback, {
prependHeader: false // removes the generated header of "value1,value2,value3,value4" (in case you don't want it)
});
an example using csv-stringify (https://github.com/wdavidw/node-csv-stringify)
'use strict';
var stringify = require('csv-stringify');
var fs = require('fs');
let myObj = {
"rows": [
[
"New Visitor",
"(not set)",
"(not set)",
"0"
],
[
"New Visitor",
"(not set)",
"(not set)",
"mobile"
],
[
"New Visitor",
"(not set)",
"(not set)",
"mobile"
],
[
"New Visitor",
"(not set)",
"(not set)",
"mobile",
]
]
}
stringify(myObj.rows, function(err, output) {
fs.writeFile('name.csv', output, 'utf8', function(err) {
if (err) {
console.log('Some error occured - file either not saved or corrupted file saved.');
} else {
console.log('It\'s saved!');
}
});
});
Step 1: Read.
If you need to read the JSON from a file (as indicated by your inclusion of the filename response.json
in your post), you will require the Node.js FileSystem API:
const fs = require('fs'); // Require Node.js FileSystem API.
const JSONFile = fs.readFileSync('response.json'); // Read the file synchronously.
Note: If you prefer, you can read the file asynchronously with fs.readFile()
and perform the conversion in a callback function.
Step 2: Convert.
Whether you read your JSON from a local file or GET it from a server, you will need to parse it into a Plain Old JavaScript Object first using the JSON.parse
method:
const JSONasPOJO = JSON.parse(JSONFile); // Parse JSON into POJO.
Then perform a series of joins on the child arrays and parent array:
SEE EDIT BELOW
/* THIS IS UNNECESSARY FOR "COMMA" SEPARATED VALUES
const CSVString = JSONasPOJO
.rows // Get `rows`, which is an array.
.map( // Map returns a new array.
row => row.join(',') // Each child array becomes a comma-separated string.
)
.join('\n'); // Parent array becomes a newline-separated string...
// ...of comma-separated strings.
// It is now a single CSV string!
*/
EDIT:
While the previous code certainly works, it is unnecessary to use .map
and .join
on the child arrays. As @Relu demonstrates, a single .join
on the parent array is sufficient because JavaScript will automatically convert the child arrays into comma-separated strings by default since .join
must return a string and cannot contain any child arrays.
You could use the above pattern if you want to join the child arrays with something other than a comma.
Otherwise:
var CSVString = JSONasPOJO.rows.join('\n'); // Array becomes a newline-separated...
// ...string of comma-separated strings.
// It is now a single CSV string!
Here, we can see that conversion in action:
const JSONasPOJO = {
"rows": [
[
"New Visitor",
"(not set)",
"(not set)",
"0"
],
[
"New Visitor",
"(not set)",
"(not set)",
"mobile"
],
[
"New Visitor",
"(not set)",
"(not set)",
"mobile"
],
[
"New Visitor",
"(not set)",
"(not set)",
"mobile" // NOTE: Here I removed a trailing comma,
// ...which is invalid JSON!
]
]
}
const CSVString = JSONasPOJO.rows.join('\n');
console.log(CSVString);
Step 3: Write.
Using the FileSystem API again, write to a file, and log an error or a success message:
fs.writeFile('name.csv', CSVString, err => {
if (err) return console.log(err);
console.log('FILE SUCCESSFULLY WRITTEN!\n');
});
Note: Here, I demonstrate the asynchronous pattern using a callback to log my error and success messages. If you prefer, you can write the file synchronously with fs.writeFileSync()
.
I like to add plenty of console.log()
messages to my Node.js scripts.
const fs = require('fs');
const inFilename = 'response.json',
outFilename = 'name.csv';
console.log(`Preparing to read from ${inFilename} …`);
const JSONContents = fs.readFileSync(inFilename);
console.log(`READ:\n${JSONContents}`);
console.log('Preparing to parse as JSON …');
const JSONasPOJO = JSON.parse(JSONContents);
console.log(`PARSED:\n${JSONasPOJO}`);
console.log('Preparing to convert into CSV …');
const CSVString = JSONasPOJO.rows.join('\n');
console.log(`CONVERTED:\n${CSVString}`);
console.log(`Preparing to write to ${outFilename} …`);
fs.writeFile(outFilename, CSVString, err => {
if (err) return console.error(err);
console.log('FILE SUCCESSFULLY WRITTEN!');
});
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With