I'm trying to import a table from mysql to mongodb straight without any schema changes.
I wrote a small node script for that and my issue is with the way i implemented it.
Maybe I hit some limit of using mongo db insert limit while using it inside a loop. I think this problem would not have had come if it was in reverse (maybe not! )
So here's the thing. The row in the mysql table is more than 100,000 but when the loop hit's more than around 30000 the number of inserted items just get reduced.
so let's say if there was 100,000 items in the mysql table after complete import using the below mentioned script, i get only a maximum of 37000 or so.
My strong suscpicion is either in the node script/node mongodb connector, or some bug in the script or lastly a limit in mongodb concurrent db inserts.
I'm pasting the script below. Hoping i get a way around it.
Thanks,
var http = require('http'),
mysql = require('mysql'),
mongo = require('mongodb').MongoClient,
format = require('util').format;
var connection = mysql.createConnection({
user: "xxx",
password: "xxx",
database: "mydb"
});
connection.connect();
var query = "select * from mytable";
var mysqlrows = '';
connection.query(query, function(err,rows,fields){
if(err) throw err;
console.log(rows.length+'rows found.');
mongo.connect('mongodb://root:[email protected]:27017/mydb', function(err, db){
if (err)
throw err;
var collection = db.collection('mytable');
for(var i=0; i<rows.length;i++)
{
//console.log(JSON.stringify(rows[i]));
(function(i){
collection.insert(rows[i],function(err,docs){});
console.log(i);
})(i);
}
db.close();
});
});
connection.end();
insertcan take an array of docs; have you tried just passingrowsintoinsertdirectly instead of one by one?