I have a loop like this:
var i,j,temparray,chunk = 200;
for (i=0,j=document.mainarray.length; i<j; i+=chunk) {
temparray = document.mainarray.slice(i,i+chunk);
var docs = collection.find({ id: { "$in": temparray}}).toArray();
docs.then(function(singleDoc)
{
if(singleDoc)
{
console.log("single doc length : " + singleDoc.length);
var t;
for(t = 0, len = singleDoc.length; t < len;t++)
{
fs.appendFile("C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n", function(err) {
if(err) {
return console.log(err);
}
});
}
}
});
}
The cycle is repeated twice. At the first iteration, he gets 200 elements, and secondly, he gets 130 elements. And when I open the .txt file, I see only 130 names. I think due to the asynchronous nature of Node.js, only the second part of the array is processed. What to do to process all parts of the array? Thanks in advance.
EDIT: I finally included the code:
var generalArr = [];
var i,j,temparray,chunk = 200;
for (i=0,j=document.mainarray.length; i<j; i+=chunk) {
temparray = document.mainarray.slice(i,i+chunk);
generalArr.push(temparray);
}
async.each(generalArr, function(item, callback)
{
var docs = collection.find({ id: { "$in": item}}).toArray();
docs.then(function(singleDoc)
{
if(singleDoc)
{
console.log("single doc length : " + singleDoc.length);
var t;
for(t = 0, len = singleDoc.length; t < len;t++)
{
fs.appendFile("C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n", function(err) {
if(err) {
return console.log(err);
}
});
}
}
});
callback(null);
})
When I change this line:
var docs = collection.find({ id: { "$in": item}}).toArray();
To this line:
var docs = collection.find({ id: { "$in": item}}).project({ name: 1 }).toArray();
It works, I can print all the names. I think there is a memory problem when I try without .project(). How can I do this work without using a project? Should I change some memory limits? Thanks in advance.