import asyncio
import random
from multiprocessing import Pool
from concurrent.futures import ProcessPoolExecutor
from loguru import logger
HOST = 'localhost'
PORT = random.randint(2 ** 10, 2 ** 16 - 1)
def run_async(func):
asyncio.run(func())
async def read_all(reader: asyncio.StreamReader) -> bytes:
data = bytes()
while not reader.at_eof():
data += await reader.read(1024)
return data
async def server_handler(reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
logger.debug('server start receiving...')
data = await read_all(reader)
logger.info(f'server received {data!r}')
data = b'server\'s response'
logger.debug(f'server preparing to send {data!r}')
writer.write(data)
await writer.drain()
logger.info(f'server sent {data!r}')
async def run_server():
logger.debug('server is starting...')
server = await asyncio.start_server(server_handler, HOST, PORT)
logger.debug('server started')
async with server:
logger.debug('serving server forever')
await server.serve_forever()
async def run_client():
logger.debug('client is starting...')
reader, writer = await asyncio.open_connection(HOST, PORT)
logger.debug('client started')
while True:
data = b'client\'s request'
logger.debug(f'client preparing to send {data!r}')
writer.write(data)
await writer.drain()
logger.info(f'client sent {data!r}')
logger.debug('client starting receiving...')
data = await read_all(reader)
logger.info(f'client received {data!r}')
await asyncio.sleep(3)
if __name__ == '__main__':
with Pool(processes=2) as pool:
pool.map(run_async, [run_server, run_client])
# также пробовал это
# with ProcessPoolExecutor(max_workers=2) as executor:
# executor.map(run_async, [run_server, run_client])