import asyncio, aiohttp
# Количество параллельных запросов
sem = asyncio.Semaphore(10)
async def fetch(session, url):
async with sem:
async with session.get(url) as response:
result = len(await response.text())
print(url, "done")
return result
async def urls_to_symbols_count(urls):
async with aiohttp.ClientSession() as session:
futures = [fetch(session, url) for url in urls]
return await asyncio.gather(*futures)
if __name__ == "__main__":
urls = ["https://google.com/", "https://yandex.ru/", "https://toster.ru/"] * 80
loop = asyncio.get_event_loop()
lens = loop.run_until_complete(urls_to_symbols_count(urls))
print(dict(zip(urls, lens)))