workers = []
for lastname in lastnames:
lastname_page = search_host + lastname
lastname_paths = []
paths = threading.Thread(target=recieve_page_html, args=(lastname_page,))
paths.start()
workers.append(paths)
for w in workers:
w.join()
proxies = {
"http": f'socks{socks_type}://{host}:{port}',
"https": f'socks{socks_type}://{host}:{port}'
}
import time
import threading
import random
def longworker():
t = random.randint(10,20)
time.sleep(t)
print('воркер работал {} секунд'.format(t))
w = threading.Thread(target=longworker, daemon=True)
w.start()
w.join(timeout=15)
if w.isAlive():
print('Завершаем работу не дождавшись воркера')
exit()