Hey I am trying to export a csv from node.js (pulling the data from mongodb). I already have the data being pulled and separated by commas and all, but now I am trying to figure out how to actually send it... I am sticking this code in my routes file. Any advice on how to take the array of data and send it to a user straight for download on request.
here is the code: (I attempted the the bottom part of the code the second function)
exports.downloadContacts = function(req, res) {
async.waterfall([
function(callback) {
var source = [];
Friend.find({userId: req.signedCookies.userid}, function(err, friends) {
if(err) {console.log('err with friends for download');
} else {
var userMap = {};
var friendIds = friends.map(function (user) {
userMap[user.friend_id] = user;
return user.friend_id;
});
console.log(friends);
User.find({_id: {$in: friendIds}}, function(err, users) {
if(err) {console.log(err);
} else {
for(var i = 0; i < users.length; i++) {
console.log('users')
//console.log(users[i]);
source.push(users[i].firstNameTrue, users[i].lastNameTrue, users[i].emailTrue, users[i].phone, users[i].emailList, users[i].phoneList)
}
console.log(source);
callback(null, source);
}
});
}
});
}
],
function(err, source) {
var result = [];
res.contentType('csv');
csv()
.from(source)
.on('data', function(data){
result.push(data.join());
})
.on('end', function(){
res.send(result.join('\n'));
});
});
};
First, we import the native file system module ( fs ) and the csv-parse module. Then, we create a parser which accepts an object literal, containing the options we'd like to set. The second argument is the callback function that's used to access the records - or just print them out, in our case.
Go to File > Save As. Click Browse. In the Save As dialog box, under Save as type box, choose the text file format for the worksheet; for example, click Text (Tab delimited) or CSV (Comma delimited).
You will use the fs module's createReadStream() method to read the data from the CSV file and create a readable stream. You will then pipe the stream to another stream initialized with the csv-parse module to parse the chunks of data. Once the chunks of data have been parsed, you can log them in the console.
Here is what I did:
var json2csv = require('json2csv'); var fields = ['name', 'phone', 'mobile', 'email', 'address', 'notes']; var fieldNames = ['Name', 'Phone', 'Mobile', 'Email', 'Address', 'Notes']; var data = json2csv({ data: docs, fields: fields, fieldNames: fieldNames });
res.attachment('filename.csv'); res.status(200).send(data);
Have you tried something like this with a content type as "application/octet-stream"
res.set('Content-Type', 'application/octet-stream');
res.send(<your data>);
or simply
res.send(Buffer.from(<your data>));
Express send() docs.
The json2csv package has been updated since the highest voted answer was written, the newer version has slightly different syntax:
var { Parser } = require('json2csv')
const fields = [{
label: 'header 1',
value: 'field1_name'
}, {
label: 'header 2',
value: 'field2_name'
}]
const json2csv = new Parser({ fields: fields })
try {
const csv = json2csv.parse(data)
res.attachment('data.csv')
res.status(200).send(csv)
} catch (error) {
console.log('error:', error.message)
res.status(500).send(error.message)
}
res.attachment is a function and not an attribute. Needed to remove the equal sign for this to work.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With