nodejs multiple http requests in loop

I'm trying to make simple feed reader in node and I'm facing a problem with multiple requests in node.js. For example, I got table with urls something like:

urls = [

Now I want to get contents of each url. First idea was to use for(var i in urls) but it's not good idea. the best option would be to do it asynchronously but I don't know how to make it.

Any ideas?


I got this code:

var data = [];
for(var i = 0; i<urls.length; i++){
    http.get(urls[i], function(response){
    console.log('Reponse: ', response.statusCode, ' from url: ', urls[i]);
    var body = '';
    response.on('data', function(chunk){
        body += chunk;

    response.on('end', function() {
}).on('error', function(e){
    console.log('Error: ', e.message);

Problem is that first is call line "http.get..." for each element in loop and after that event response.on('data') is called and after that response.on('end'). It makes mess and I don't know how to handle this.



By default node http requests are asynchronous. You can start them sequentially in your code and call a function that'll start when all requests are done. You can either do it by hand (count the finished vs started request) or use async.js

This is the no-dependency way (error checking omitted):

var http = require('http');    
var urls = ["", ""];
var responses = [];
var completed_requests = 0;

for (i in urls) {
    http.get(urls[i], function(res) {
        if (completed_requests == urls.length) {
            // All download done, process responses array

You need to check that on end (data complete event) has been called the exact number of requests... Here's a working example:

var http = require('http');
var urls = ['', ''];
var completed_requests = 0;

urls.forEach(function(url) {
  var responses = [];
  http.get(url, function(res) {
    res.on('data', function(chunk){

    res.on('end', function(){
      if (completed_requests++ == urls.length - 1) {
        // All downloads are completed
        console.log('body:', responses.join());

You can use any promise library with ".all" implementation. I use RSVP library, Its simple enough.

var downloadFileList = [url:'http://stuff',dataname:'filename to download']
var ddownload ={
          var dataname = id.dataname;
          var url = id.url;
          return new RSVP.Promise(function(fulfill, reject) {
           var stream = fs.createWriteStream(dataname);
            stream.on('close', function() {
            console.log(dataname+' downloaded');
          request(url).on('error', function(err) {
        return new RSVP.hashSettled(ddownload);

The problem can be easily solved using closure. Make a function to handle the request and call that function in the loop. Every time the function would be called, it would have it's own lexical scope and using closure, it would be able to retain the address of the URL even if the loop ends. And even is the response is in streams, closure would handle that stuff too.

const request = require("request");

function getTheUrl(data) {
    var options = {
        url: "" + data
    return options

function consoleTheResult(url) {
    request(url, function (err, res, body) {

for (var i = 0; i < 10; i++) {

I know this is an old question, but I think a better solution would be to use JavaScripts Promise.all():

const request = require('request-promise');
const urls = ["", ""];
const promises = => request(url));
Promise.all(promises).then((data) => {
    // data = [promise1,promise2]


Recent Questions

Top Questions

Home Tags Terms of Service Privacy Policy DMCA Contact Us

©2020 All rights reserved.