Node js connector description

I am creating an application that will make about a million calls on a remote api server. Will I be able to limit the number of connections, for example, 10? Can I set maximum sockets to 10?

I am trying to understand what these options do:

keepAlive: false, maxSockets: 999, maxFreeSockets: 1 

In node http get function, in the following code:

 var inputData = []; for(i=1; i<=5000;i++){ inputData.push('number' + i); } var options = { host: "localhost", port: 80, path: "/text.txt", keepAlive: false, maxSockets: 999, maxFreeSockets: 1 } var limit = inputData.length; var counter = 0; function fetchData(number){ return new Promise(function(resolve, reject){ var http = require('http'); fetch = function(resp){ var body = ''; resp.on('data',function(chunk){ body += chunk; }) resp.on('end',function(){ console.log(resp) resolve() }) resp.on('error',function(err){ console.log('error'); }) } var req = http.request(options, fetch); req.end(); }) } Promise.all(inputData.map(number => fetchData(number))).then(function(results) { console.log('finished'); connection.end(); }) .catch(function(error) { console.log('there wa an error'); console.log(error); }); 
+3
source share
2 answers

You really don't want to fire 1,000,000 requests and somehow hope that maxSockets manages it up to 100 at a time. There are a whole bunch of reasons why this is not a great way to do something. Instead, you should use your own code that controls the number of live connections up to 100 at a time.

There are several ways to do this:

  • Write your own code that fires 100, and then each time it ends, it runs the next one.

  • Use Bluebird Promise.map() , which has a built-in concurrency function that will control the time.

  • Use Async async.mapLimit() , which has a built-in concurrency function that will control the time.

As for writing code for this, you can do something like this:

 function fetchAll() { var start = 1; var end = 1000000; var concurrentMax = 100; var concurrentCnt = 0; var cntr = start; return new Promise(function(resolve, reject) { // start up requests until the max concurrent requests are going function run() { while (cntr < end && concurrentCnt < concurrentMax) { ++concurrentCnt; fetchData(cntr++).then(function() { --concurrentCnt; run(); }, function(err) { --concurrentCnt; // decide what to do with error here // to continue processing more requests, call run() here // to stop processing more requests, call reject(err) here }); } if (cntr >= end && concurrentCnt === 0) { // all requests are done here resolve(); } } run(); }); } 
+6
source

I decided to use an asynchronous library.

Here is my complete solution:

 var async = require('async') var http = require('http'); var inputData = []; for(i=1; i<=2000;i++){ inputData.push('number' + i); } var options = { host: "o2.pl", path: "/static/desktop.css?v=0.0.417", port: 80 } function fetchData(number, callback){ return new Promise(function(resolve, reject){ fetch = function(resp){ var body = ''; resp.on('data',function(chunk){ body += chunk; }) process.stdout.write('.') callback() resp.on('error',function(err){ console.log('error'); console.log(err); }) } var req = http.request(options, fetch); req.end(); }) } function foo(item, callback){ return callback(false, 'foo'); } async.mapLimit(inputData,100,fetchData,function(err, result){ console.log('finished'); }) 

Thank you for your help.

+1
source

Source: https://habr.com/ru/post/1272471/


All Articles