[SOLVED] MongoDB insert/update error dup key_id

Hello everyone, after a few weeks of absence, I come back to you again to ask you for help on a new problem.

I want to recover data from a MySQL database and transform it into JSON for integration into my MongoDB database.

When it’is a single insertion it works, but when it’is 45000 insertion it does not work and the following message is displayed :

(node:31032) UnhandledPromiseRejectionWarning: WriteError({"code":11000,"index":0,"errmsg":"E11000 duplicate key error collection: meteor.depart index: _id_ dup key: { : ObjectId('5b1527ee6161057938e0aef0') }","op":{"jourExpl
oitation":"2018-06-03T22:00:00.000Z","nomArret":"REPUBLIQUE","numeroArret":3005,"heureTheoriqueDepart":"2018-06-04T02:49:55.000Z","nomDestination":"GARE CENTRALE","numeroVehicule":1,"sens":"R","retard":"00:00:00","avance":"00:00:03","debutPerturbation":"1970-01
-01T00:00:00.000Z","finPerturbation":"1970-01-01T00:00:00.000Z","estParti":false,"_id":"5b1527ee6161057938e0aef0"}})

here is my code in server side :

imports/startup/server/mysqlData/index.js

SyncedCron.add({
    name: 'Synchronisation des données MongoDB depuis la BDSI.',
    schedule: function (parser) {
        // parser is a later.parse object
        return parser.text("at 13:52 pm");
    },
    job: function ()
    {
        import './upsertHoraire';
    }
});
SyncedCron.start();

and upsertHoraire.js :

let Future     = require("fibers/future");
let mysql      = require('mysql');
let sql = mysql.createConnection({
    host     : '127.0.0.1',
    user     : 'root',
    password : '',
    database : 'bdsi'
});

// Get all arrets data in distinct
let rawArrets = Arret.rawCollection();
let arretsDistinct = Meteor.wrapAsync(rawArrets.distinct, rawArrets);
let arrets = arretsDistinct("nomArret");

// global var used for insertMany();
manyInsert = Depart.rawCollection();


sql.connect();

for (let arret of arrets) {

 // array to contain my JSON data arret by arret.
    dataJsonInsert = [];

// sql to recover all useful data by arret var.
    sql.query('SELECT /*+ MAX_EXECUTION_TIME(60000) */ DISTINCT LIGNE.mnemoCom AS numeroVehicule, ARRET.mnemol AS numeroArret,HORAIRE.htd AS heureTheoriqueDepart, ARRET.nom AS nomArret, DEST.sens AS sens, DEST.nom AS nomDestination, HORAIRE.jour AS jourExploitation, VEHICULE.avance AS avance, VEHICULE.retard AS retard FROM DEST INNER JOIN (HORAIRE INNER JOIN (VEHICULE INNER JOIN (LIGNE INNER JOIN (CHAINAGE INNER JOIN (ARRET INNER JOIN ARRETCHN ON ARRET.ref = ARRETCHN.arret) ON CHAINAGE.ref = ARRETCHN.chainage) ON LIGNE.ref = CHAINAGE.ligne) ON VEHICULE.ligne = LIGNE.ref) ON HORAIRE.arretchn = arretchn.ref) ON (DEST.ligne = LIGNE.ref) AND (DEST.ref = CHAINAGE.dest) WHERE (((ARRET.nom)="' + arret + '" AND (HORAIRE.jour BETWEEN CURDATE() AND CURDATE()+2) AND ((CHAINAGE.type)="C") AND (ARRET.nom)<>DEST.nom)) GROUP BY LIGNE.mnemoCom, ARRET.mnemol, HORAIRE.htd, DEST.sens, DEST.nom, HORAIRE.jour, ARRET.nom ORDER BY HORAIRE.htd ASC, LIGNE.mnemoCom ASC, DEST.nom ASC', function (error, results, fields) {
        if (error)
            console.log("Exception : " + error);
        else {

            let upSert = function () {

                for (let result of results) {
                    /**
                     * transforme l'heure en date complète année-mois-jour heure:minute:seconde
                     * @type {Date}
                     */
                    let getDate = new Date(result.jourExploitation);
                    let hours = result.heureTheoriqueDepart.replace(/:/g, ',').split(',');
                    getDate.setHours(hours[0], hours[1], hours[2]);

                    /**
                     * Si les données existent dans mongoDB on effectue un update, sinon un insert.
                     */
                    let dataExist = Depart.find({
                        'numeroArret': parseInt(result.numeroArret),
                        'jourExploitation': result.jourExploitation,
                        'heureTheoriqueDepart': getDate,
                        "numeroVehicule": parseInt(result.numeroVehicule)
                    }, {limit: 1, fields: {"heureTheoriqueDepart": 1}, transform: null}).fetch();

                        if (dataExist.length === 0)
                        {
                         dataJsonInsert.push({
                            'jourExploitation': result.jourExploitation,
                            'nomArret': result.nomArret,
                            'numeroArret': parseInt(result.numeroArret),
                            'heureTheoriqueDepart': getDate,
                            'nomDestination': result.nomDestination,
                            'numeroVehicule': parseInt(result.numeroVehicule),
                            'sens': result.sens,
                            'retard': result.retard,
                            'avance': result.avance,
                            'debutPerturbation': new Date('1970-01-01 01:00:00.000'),
                            'finPerturbation': new Date('1970-01-01 01:00:00.000'),
                            'estParti': false
                        });
                    }
                }
            }.future();

            try {
                Future.task(function () {
                    upSert().wait();
                  // here my insert and error msg returned :(
                    if (dataJsonInsert.length > 0) 
                        manyInsert.insertMany(dataJsonInsert);
                }).detach();
            }
            catch (e) {
                console.log(e);
            }
        }
    });
}
sql.end();

I tried many things but always the same problem. Queries are executed too quickly.

thanks to everyone who will take the time to help me!

ps : Pay no attention to the variable dataExist :slight_smile:

As you are using insertMany, you could try the forceServerObjectId flag, which should always be unique:

manyInsert.insertMany(dataJsonInsert, { forceServerObjectId: true });

http://mongodb.github.io/node-mongodb-native/3.0/api/Collection.html#insertMany

2 Likes

It works, as always… soon, I won’t have the words to thank you!

2 Likes