import requests
from bs4 import BeautifulSoup
from lxml import html
import csv
headers = {'Accept': 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.5112.102 Safari/537.36 Edg/104.0.1293.63'}
url = 'https://old.baltbet.ru/BetInfoSE.aspx?id=688835725'
response = requests.get(url, headers=headers)
file = open('data.csv','w')
writer = csv.writer(file)
soup = BeautifulSoup(response.text, 'html.parser')
rows = soup.find('table',class_ ='betinfo2').find_all('tr')
for row in rows:
columns = row.find_all('td')
for coloum in columns:
data_list = [columns[1].text]
print(data_list)