Мне нужно, чтобы скрипт использовал всё 4 процессора. Он юзает только 1, + надо, чтобы он обрабатывал 10 млн строк за 3 секунды.
import os
import csv
import openpyxl
import re
from flask import Flask, request, jsonify, send_file
import requests
import secrets
import time
import asyncio
app = Flask(__name__)
host = "https://127.0.0.1:5000"
token = "" # Replace with your Hostinger API access token
url = "https://hpanel.hostinger.com/api/dns/v1/direct/zone/resource-records?gaid=GA1.1.451463002.1704653339"
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json"
}
cookies = {}
visited_files = set() # Добавлена переменная для хранения уже просмотренных файлов
def generate_key():
key = '-'.join(secrets.token_urlsafe(4) for _ in range(4))
return key
def delete_temp_file():
while True: # Заменено на бесконечный цикл для периодической проверки
time.sleep(600)
if os.path.exists('temp_result.txt'):
os.remove('temp_result.txt')
async def process_file(file_path, phone_number, results):
try:
with open(file_path, 'r', encoding='utf-8') as file:
start_time = time.time()
lines_read = 0
for line in file:
if phone_number in line:
results.append((os.path.basename(file_path), line.strip()))
return True # Возвращаем True, если найдено совпадение, чтобы прекратить чтение файла
lines_read += 1
if lines_read == 100000000:
break
end_time = time.time()
elapsed_time = end_time - start_time
with open('visited_files.txt', 'a') as log_file:
log_file.write(f"{file_path}: {elapsed_time} seconds\n")
except FileNotFoundError:
print(f"File {file_path} not found.")
except UnicodeDecodeError as e:
print(f"Error decoding file {file_path}: {e}")
return False # Возвращаем False, если совпадения не найдены
async def search_phone_number_in_files_async(phone_number, results):
root_dir = 'test'
tasks = []
for root, dirs, files in os.walk(root_dir):
for file_name in files:
file_path = os.path.join(root, file_name)
# Вызываем функцию process_file и проверяем, были ли найдены совпадения
if await process_file(file_path, phone_number, results):
break # Если найдены совпадения, переходим к следующему файлу
await asyncio.gather(*tasks)
@app.route('/api/user/generate_key')
def generate_and_return_key():
key = generate_key()
return jsonify({'key': key})
@app.route('/api/domain/add_dns_record', methods=['POST'])
def add_dns_record():
data = request.json
domain = data.get('domain')
ip = data.get('ip')
payload = {
"domain": "illussion.fun",
"resource_records": [
{
"name": "" + domain,
"type": "A",
"ttl": 600,
"records": [
{
"content": "" + ip,
"disabled": False
}
]
}
],
"overwrite": False
}
response = requests.patch(url, json=payload, headers=headers, cookies=cookies)
if response.status_code == 200:
return jsonify({"message": "DNS ok."}), 200
else:
return jsonify({"error": f" DNS dsdas: {response.status_code}"}), response.status_code
def search_phone_number(phone_number):
results = []
asyncio.run(search_phone_number_in_files_async(phone_number, results))
return results
def write_to_file(data):
timestamp = int(time.time())
temp_filename = f'temp/probiv_telegram_onix_{timestamp}.txt'
output_filename = f'probiv_telegram_onix_{timestamp}.txt'
with open(temp_filename, 'w') as file:
for filename, line in data:
file.write(f"Database Name: {filename}\n")
file.write("=" * 50 + "\n")
file.write(f"{line}\n")
file.write("=" * 50 + "\n\n")
return output_filename
@app.route('/api/probiv/telegram', methods=['POST'])
def process_request():
phone_number = request.form.get('phone_number')
results = search_phone_number(phone_number)
temp_filename = write_to_file(results)
return jsonify({'download_link': f'http://127.0.0.1:5000/api/probiv/download/{temp_filename}'})
@app.route('/api/probiv/download/<filename>', methods=['GET'])
def download_file(filename):
file_path = os.path.join(os.getcwd(), 'temp', filename)
if os.path.exists(file_path):
return send_file(file_path, as_attachment=True)
else:
return jsonify({'error': 'File not found'}), 404
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')