how to loop the url in options in nodejs

368 Views Asked by At

var request = require('request');
var options = {
  'method': 'GET',
  'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=1',//To get all the users data from the repos
  'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=2',
  'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=3',
  'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=4',
  'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=5',
  'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=6',
  'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=7',
  'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=8',
  'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=9',
  'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=10',
  'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=11',
  
  'headers': {
    'Accept': 'application/vnd.github.mercy-preview+json',//to get topics of the repos
    'Authorization': 'Bxxx xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
    'User-Agent' : 'sxxxxxxxxxxxxx'
  }
};
request(options, function (error, response) { 
  if (error) throw new Error(error);
  console.log(response.body);
});

In this above code i want to loop the urls continuously until the end of the page if not anyone have the idea of using pagination in this help me out

3

There are 3 best solutions below

0
Niklas E. On BEST ANSWER

cYou cannot have multiple attributes for one object key. You have to call every url individually. I tried to solve this using asyncronous code, because looping with callback functions is confusing and dangerous with regard to the call stack.

const request = require('request');

// configuration for the url generation
const perPages = 100;
const startPage = 1;
const endPage = 11;
const url = 'https://api.github.com/orgs/organizationName/repos?per_page=%perPages%&page=%page%';

// define a asyncronous call for one url
async function callOneUrl(url) {
  // local options for each url
  const options = {
    method: 'GET',
    url: url,
    headers: {
      Accept: 'application/vnd.github.mercy-preview+json',
      Authorization: 'Bxxx xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
     'User-Agent': 'sxxxxxxxxxxxxx'
    }
  }
  return new Promise((resolve, reject) => {
    request(options, function (error, response) { 
      if (error) return reject(error);
      resolve(response);
    });
  });
}

// call each url with a for loop
(async () => {
  for (let i = startPage; i <= endPage; i++) {
    // using the await statement to get the resolved value of the Promise instance or catch the error
    try {
       var response = await callOneUrl(url.replace('%perPages%', perPages).replace('%page%', i));
       // handle response here
       console.log(response.body);
    } catch (error) {
       // handle errors here
       throw new Error(error);
    }
  }
})()
3
Terry Lennox On

You can download a list of repos with a do...while loop. We'll set a maximum number of pages to download and exit when we reach either this or the last page.

I would suggest using the request-promise-native package to allow us to use the very nice async-await syntax.

Now, I've given the example of downloading repos for the mongodb org. You can easily replace with whatever one you wish.

I would also note that the request library is now deprecated, we can use it of course, but we must consider replacing in the future.

We now also log the repo information and save it to the output file.

const rp = require("request-promise-native");
const fs = require("fs");

async function downloadRepoInformation(org, outputFile) {

    let repoList = [];
    let page = 0;
    const resultsPerPage = 20;
    const maxPages = 10;
    const uri = `https://api.github.com/orgs/${org}/repos`;

    do {
        try {
            let response = await rp.get({ uri, json: true, qs: { per_page: resultsPerPage, page: ++page }, headers: {"User-Agent" : "request"} });
            console.log(`downloadRepoInformation: Downloaded page: ${page}, repos: ${response.length}...`);
            repoList = repoList.concat(response);
            console.log("downloadRepoInformation: response", JSON.stringify(response, null, 2));
            console.log("downloadRepoInformation: repoList.length:", repoList.length);
            if (response.length < resultsPerPage) {
                console.log(`downloadRepoInformation: Last page reached: exiting loop...`);
                break;
            }
        } catch (error) {
            console.error(`downloadRepoInformation: An error occurred:`, error);
            break;
        }
    } while (page <= maxPages)

    console.log("downloadRepoInformation: download complete: repoList.length:", repoList.length)
    console.log("downloadRepoInformation: Saving to file:", outputFile);
    fs.writeFileSync(outputFile, JSON.stringify(repoList, null, 4));
}

downloadRepoInformation("mongodb", "./repolist.json");
0
SNG On
const request = require('request-promise');
const urls = ["http://www.google.com", "http://www.example.com"];
const promises = urls.map(url => request(url));
Promise.all(promises).then((data) => {
    // data = [promise1,promise2]
});

Apart from above you can also use async.eachseries or async.parallel etc..