Я пробую создать асинхронный парсер на python с использованием библиотеки asyncio и aiohttp. Но появляется ошибка "raise RuntimeError('Event loop is closed')
RuntimeError: Event loop is closed"
Кто знает в чем дело помогите пожалуйста. Вот код:
import asyncio
import json
import time
import aiohttp
import requests
import datetime
import csv
import fake_useragent
from bs4 import BeautifulSoup
from aiogram import Bot, types
from aiogram.dispatcher import Dispatcher
from aiogram.utils import executor
from aiogram.types import ReplyKeyboardRemove, ReplyKeyboardMarkup, KeyboardButton, InlineKeyboardMarkup, InlineKeyboardButton
olx_items = [
]
async def get_page_data(session, page, url):
headers = {
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Mobile Safari/537.36'
}
url += f'?page={page}'
async with session.get(url = url, headers = headers) as response:
response_text = await response.text()
soup = BeautifulSoup(response_text, 'lxml')
items = soup.find_all('div', class_='css-1sw7q4x')
olx_items.append(items)
async def gather_data():
url = 'https://www.olx.ua/d/nedvizhimost/kvartiry/'
headers = {
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Mobile Safari/537.36'
}
async with aiohttp.ClientSession() as session:
response = await session.get(url = url, headers = headers)
soup = BeautifulSoup(await response.text(), 'lxml')
pagination = int(soup.find('div', class_='pagination-wrapper').find('ul', class_='pagination-list css-1vdlgt7').find_all('li', class_='pagination-list-item')[-1].text)
tasks = []
for page in range(1, pagination + 1):
task = asyncio.create_task(get_page_data(session, page, url))
tasks.append(task)
await asyncio.gather(*tasks)
def main():
asyncio.run(gather_data())
main()
print(olx_items)