const puppeteer = require('puppeteer');
const fs = require('fs');
const endOfLine = require('os').EOL;
const pageStart = "https://toster.ru/tags/?page=";
const selector = ".card__head-title a";
const fileName = "tags.txt";
const pages = 62;
(async () => {
const browser = await puppeteer.launch();
const promises = [];
for (let i = 1; i <= pages; i++) {
promises.push(browser.newPage().then(async page => {
const url = pageStart + i;
console.log(url);
await page.goto(url, {timeout: 100000});
return await page.$$eval(selector, elements => {
return Array.from(elements).map(element => element.innerText);
});
}));
}
const pagesData = await Promise.all(promises);
const logger = fs.createWriteStream(fileName, {
flags: 'a'
});
pagesDataByLines = pagesData.reduce((acc, el) => ([...acc, ...el]));
pagesDataByLines.forEach((line, index) => {
logger.write(`${index}. ${line} ${endOfLine}`);
});
await browser.close();
})();
первый раз писал scraper, так что 99.99%, что здесь что-то костыльно, но работает =)