import requests
from bs4 import BeautifulSoup
urls = ['http://site1.ru/', 'http://site2.ru/', 'http://site3.ru/', 'http://site4.ru/']
for url in urls:
url += 'robots.txt'
r = requests.get(url).text
dcul = BeautifulSoup(r)
response = requests.get(url)
if response:
with open("base.txt", "a") as f:
f.write(url + ' ' + "Файл найден" + "\n")
else:
with open("base_no.txt", "a") as f:
f.write(url + ' ' + "Файл не найден" + "\n")
import json
data = {
'users': [
{
'id': 1,
'name': 'user1',
},
{
'id': 2,
'name': 'user2',
},
],
}
json_str = json.dumps(data)
{"users": [{"id": 1, "name": "user1"}, {"id": 2, "name": "user2"}]}