Been struggling for several hours now trying to import CSV, uploaded from client using meteor-file and converted to CSV using node-csv server-side. I basically need to populate my collection with data from a CSV file uploaded by the user.
/server/filehandler.js:
Meteor.methods({
'uploadFile': function (file) {
if(file.start === 0) {
console.log(file.name);
console.log(file.type);
console.log(file.size);
}
file.save('/home/russell/tmp',{});
var buffer = new Buffer(file.data);
CSV().from(
buffer.toString(),
{comment: '#', delimiter: ',', quote: ''}
)
.to.array( function(data){
//console.log(data);
for(var row=0; row<data.length; row++) {
console.log(data[row]);
newRecord = {
'firstname': data[row][0],
'lastname': data[row][1],
'email': data[row][2],
'emailshort': data[row][3],
'emailmain': data[row][4],
'domain': data[row][5]
};
console.log(newRecord);
reas.insert(newRecord); // *** _dynamic_meteor ERROR here!
}
} );
} // uploadFile
});
The console.log tells me that the CSV to array conversion is fine.
Collection reas is setup as a Collection in /lib/models.js - /lib is at the same level as /server & /client.
I've tried having a global variable outside of the Meteor.method() and storing the result of the conversion into that, and I've also tried using a Session.set(), but I just can't seem to get at the results of the conversion, outside of the method().
thanks.
My /libs/models.js looks like this:
reas = new Meteor.Collection("RegisteredEmailAddresses");
/*checks to see if the current user making the request to update is the admin user */
function adminUser(userId) {
var adminUser = Meteor.users.findOne({username:"admin"});
return (userId && adminUser && userId === adminUser._id);
}
reas.allow({
insert: function(userId, doc){
return adminUser(userId);
},
update: function(userId, docs, fields, modifier){
return adminUser(userId);
},
remove: function (userId, docs){
return adminUser(userId);
}
});
EUREKA MOMENT?!
Shouldn't that be /lib not /libs? Maybe reas is not being defined in time?
If I leave in the line
reas.insert(newRecord);
I get the error message below. If I remove that line, i don't.
Error message:
W2036-20:56:29.463(1)? (STDERR) packages/mongo-livedata.js:1862
W2036-20:56:29.471(1)? (STDERR) throw e;
W2036-20:56:29.475(1)? (STDERR) ^
W2036-20:56:29.953(1)? (STDERR) Error: Meteor code must always run within a Fiber. Try wrapping callbacks that you pass to non-Meteor libraries with Meteor.bindEnvironment.
W2036-20:56:29.958(1)? (STDERR) at Object.Meteor.bindEnvironment (packages/meteor/dynamics_nodejs.js:60)
W2036-20:56:29.958(1)? (STDERR) at null.<anonymous> (packages/meteor/helpers.js:108)
W2036-20:56:29.959(1)? (STDERR) at MongoConnection.(anonymous function) [as insert] (packages/mongo-livedata/mongo_driver.js:491)
W2036-20:56:29.964(1)? (STDERR) at Meteor.Collection.(anonymous function) [as insert] (packages/mongo-livedata/collection.js:448)
W2036-20:56:29.965(1)? (STDERR) at app/server/server.js:37:20
W2036-20:56:29.966(1)? (STDERR) at null.<anonymous> (/home/russell/.meteorite/packages/node-csv-npm/Dsyko/meteor-node-csv/01be0e3e834a4f033121cb3fcc92c2697741170d/.build/npm/node_modules/csv/lib/to.js:274:14)
W2036-20:56:29.967(1)? (STDERR) at EventEmitter.emit (events.js:95:17)
W2036-20:56:29.971(1)? (STDERR) at null.<anonymous> (/home/russell/.meteorite/packages/node-csv-npm/Dsyko/meteor-node-csv/01be0e3e834a4f033121cb3fcc92c2697741170d/.build/npm/node_modules/csv/lib/index.js:214:17)
W2036-20:56:29.972(1)? (STDERR) at EventEmitter.emit (events.js:92:17)
W2036-20:56:29.975(1)? (STDERR) at Transformer.end (/home/russell/.meteorite/packages/node-csv-npm/Dsyko/meteor-node-csv/01be0e3e834a4f033121cb3fcc92c2697741170d/.build/npm/node_modules/csv/lib/transformer.js:241:17)
A common file format is the .csv format. While you can read CSV files using the fs module that comes with Node and get the content of the file, in most cases, parsing and further conversion is much easier with the help of modules made exactly for that purpose. Multiple modules provide such capabilities like the neat-csv or csv-parser packages.
The node-csv module is a suite of smaller modules used to read/parse, transform and write CSV data from and to files. We've used the csv-parse module to read CSV files and the csv-stringify module to stringify data before writing it to a file using Node.js. Was this article helpful? Improve your dev skills!
Read CSV file using csvtojson We will use fast-csvmodule, so run the command: npm install csvtojson Then we use csvtojsonmodule to parse the data to JSON format from a CSV file.
Import CSV data to MongoDB Database using mongodb Combine all Node.js import CSV data into MongoDB using csvtojson & mongodb Node.js import CSV data into MongoDB using fast-csv & mongodb
I wanted to find a solution that didn't load the entire CSV file into memory, for use with large datasets. Here's my solution that uses Meteor.bindEnvironment, along with node-csv, to parse a CSV file into a Meteor Collection.
Thanks to the folks on #meteor for the help.
var csv = Meteor.require('CSV');
var fs = Meteor.require('fs');
var path = Npm.require('path');
function loadData() {
var basepath = path.resolve('.').split('.meteor')[0];
csv().from.stream(
fs.createReadStream(basepath+'server/data/enron_data.csv'),
{'escape': '\\'})
.on('record', Meteor.bindEnvironment(function(row, index) {
Emails.insert({
'sender_id': row[0]
// etc.
})
}, function(error) {
console.log('Error in bindEnvironment:', error);
}
))
.on('error', function(err) {
console.log('Error reading CSV:', err);
})
.on('end', function(count) {
console.log(count, 'records read');
});
}
So it turns out because CSV() uses a callback and runs Asynchronous code, I needed to use a 'Future'. For more explaination see http://gist.io/3443021
here's my working code:
Meteor.methods({
'uploadFile': function (file) {
Future = Npm.require('fibers/future');
console.log(file.name+'\'+file.type+'\'+file.size);
file.save('/home/russell/tmp',{});
var buffer = new Buffer(file.data);
// Set up the Future
var fut = new Future();
// Convert buffer (a CSV file) to an array
CSV().from(
buffer.toString(),
{comment: '#', delimiter: ',', quote: ''}
)
.to.array( function(data){
var newRecords=[];
for(var row=0; row<data.length; row++) {
console.log(data[row]);
newRecord = {
'firstname': data[row][0],
'lastname': data[row][1],
'email': data[row][2],
'emailshort': data[row][3],
'emailmain': data[row][4],
'domain': data[row][5]
};
//console.log(newRecord);
newRecords.push(newRecord);
}
// at the end of the CSV callback
// return newRecords via the Future
fut['return'](newRecords);
} );
// Wait for the results of the conversion
results = fut.wait();
console.log('results================');
console.log(results);
// now insert the new records from the file into our collectiion
if (results.length) {
for(i in results) {
reas.insert(results[i]);
}
}
console.log('reas now looks like =====================');
console.log(reas.find({}).fetch());
} // uploadFile
});
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With