Insert document loop - RangeError: Maximum call stack size exceeded

I am literally giving my first steps with node and mongodb and I have recently hit this RangeError wall.

Here's what I am trying to do, I have a file that contains a list of countries that I would like to add to my mongo db. This would be part of my "seed" mechanism to get the app running.

I load the json and then I iterate through the collection of objects and add them one by one to the 'Countries' collection. However, everytime I run the code, I get a "RangeError: Maximum call stack size exceeded".

I have googled around but none of the suggested solutions seem to apply for me. My guess is there is something wrong with my insertCountry function...

Anyways, here's my code:

var mongoose = require('mongoose');
var countries = require('./seed/countries.json');


// mongodb
var Country = mongoose.Schema({
    name: String,
    code: String,
    extra: [Extra]
});

var Extra = mongoose.Schema({
    exampleField: Boolean,
    anotherField: Boolean
});


var mCountry = mongoose.model('Countries', Country);
var mExtra = mongoose.model('Extras', Extra);

// do connection
mongoose.connect('...');

var db = mongoose.connection;
db.on('error', console.error.bind(console, 'connection error'));
db.once('open', function callback() {

});


// async function
var insertCountry = function(document, callback) {

    db.model('Countries').count({code: document.code}, function (err, count) {
        if (count < 1) {
            db.collection('Countries').insert(document, function (err, result) {
                if (!err) {
                    console.log('country ' + document.name + ' added');
                }
                else {
                    console.log('- [' + document.name + '] ' + err);
                }
            });
        }
        callback(null,document);
    });
};


// doing countries
var Country = mongoose.model('Countries');
var Extras = mongoose.model('Extras');


for(i = 0; i < countries.length; i++) 
{

    nCountry = new Country();
    nCountry.name = countries[i].name;
    nCountry.code = countries[i].code;
    nCountry.benefits = new Extras();
    nCountry.benefits.exampleField = false;
    nCountry.benefits.anotherField = false;

    insertCountry(nCountry, function (err, value) {
        console.log(value.name + ' added to collection (callback)');
    });
}

I have been using some guides I have found to build this so this might not be optimal code. Any best pratices, standards, guides or tutorials you can share are most welcome!

Your callback is in the wrong place. It is not waiting for the insert operation to complete before you return from it's own callback. Altering your code:

var insertCountry = function(document, callback) {

    db.model('Countries').count({code: document.code}, function (err, count) {
        if (count < 1) {
            db.collection('Countries').insert(document, function (err, result) {
                if (!err) {
                    console.log('country ' + document.name + ' added');
                }
                else {
                    console.log('- [' + document.name + '] ' + err);
                }
                callback(null,document);
            });
        }
    });
};

That is part of your problem, but it does not completely solve it. The other part is the loop which also does not wait for the wrapping function to complete before moving on. You want something like asyc.eachSeries in order to wait for inserts to complete before performing the next iteration. This is mostly why you are exceeding the call stack:

async.eachSeries(
    countries,
    function(current,callback) {
       // make your nCountry object
       insertCountry(nCountry,function(err,value) {
          // do something, then
          callback(err);
       }) 
    },
    function(err) {
       // called where done, err contains err where set
       console.log( "done" );
    }
);

There is really still and issue with the array, which must be reasonably large if you are exceeding the call stack limit. You probably should look at using event streams to process that rather that load everything in memory to the array.

Personally, if you were just trying not to insert duplicates for a field and had MongoDB 2.6 available I would just use the Bulk Operations API with "unordered operations" and allow non fatal failures on the duplicate keys. Coupled with the fact that bulk operations are sent in "batches" and not one at a time, this is much more efficient than checking for the presence on every request:

var Country = mongoose.Schema({
    name: String,
    code: { type: String, unique: true },  // define a unique index
    extra: [Extra]
});

var insertCountries = function(countries,callback) {
    var bulk = Country.collection.initializeUnorderedBulkOp();
    var counter = 0;

    async.eachSeries(
        countries,
        function(current,callback) {
            // same object construction
            bulk.insert(nCountry);
            counter++;

            // only send once every 1000
            if ( counter % 1000 == 0 ) {
                bulk.execute(function(err,result) {
                   // err should generally not be set
                   // but result would contain any duplicate errors
                   // along with other insert responses

                   // clear to result and callback
                   bulk = Country.collection.initializeUnorderedBulkOp();
                   callback();
                });
            } else {
                callback();
            }
        },
        function(err) {
            // send anything still queued
            if ( counter % 1000 != 0 )
                bulk.execute(function(err,result) {
                    // same as before but no need to reset
                    callback(err);
                });
        }
    );
};

mongoose.on("open",function(err,conn) {
    insertCountries(countries,function(err) {
        console.log("done");
    });
});

Keeping in mind that unlike the methods implemented directly on the mongoose models, the native driver methods require that a connection is actually established before they can be called. Mongoose "queues" these up for you, but otherwise you need something to be sure the connection is actually open. The example of the "open" event is used here.

Take a look at event streams as well. If you are constructing an array large enough to cause a problem by missing callback execution then you probably should not be loading it all in memory from whatever your source is. Stream processing that source combined with an approach as shown above should provide efficient loading.