I tested the limits of MongoDB to see if it would work for the upcoming project, and I noticed that upserts are pretty slow compared to inserts.
Of course, I would expect them to be slower, but not (almost) an order of magnitude slower (7400 versus 55000 op / sec). This uses the (desktop) code (nodejs native driver) that I used:
(async function() {
let db = await require('mongodb').MongoClient.connect('mongodb://localhost:27017/mongo-benchmark-8764824692947');
db.collection('text').createIndex({text:1},{unique:true})
let batch = db.collection('text').initializeOrderedBulkOp();
let totalOpCount = 0;
let batchOpCount = 0;
let start = Date.now();
while(1) {
totalOpCount++;
batchOpCount++;
if(batchOpCount === 1000) {
await batch.execute();
batch = db.collection('text').initializeOrderedBulkOp();
batchOpCount = 0;
let secondsElapsed = (Date.now() - start)/1000;
console.log(`(${Math.round(totalOpCount/secondsElapsed)} ops per sec) (${totalOpCount} total ops)`)
}
let text = Math.floor(Math.random()*1000000);
batch.find({text}).upsert().updateOne({$setOnInsert:{text}});
if(totalOpCount > 500000) {
console.log("<< finished >>");
await db.dropCollection('text');
db.close();
break;
}
}
})();
You can easily run it yourself by inserting it in index.js, running npm init -yand npm install --save mongodb, and thennode .
upsert , mongo , , . , , insert ? !
: $setOnInsert , .