# -*- coding: utf-8 -*-
# g-portal logs downloader for scum servers
import json
import asyncio
from bs4 import BeautifulSoup
from configparser import RawConfigParser
from datetime import datetime
import cfscrape
import requests
import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)
def log(text):
print('[%s] %s' % (datetime.strftime(datetime.now(), '%H:%M:%S'), text))
myfile = 'logs_connect.txt'
f = open(myfile, 'a', encoding="utf-8")
datesave = datetime.now().strftime('%d.%m.%Y %H:%S')
f.write(f'[{datesave}] {text}\n')
f.seek(0)
f.close()
def help():
print('\nPlease edit scumlogs.ini and include your g-portal credentials, use:')
print(' user = gportal email or username')
print(' password = gportal password')
print(' serverid = gportal server id')
print(' loc = com (for gportal international) or us (for gportal us)')
print(' folder = blank for local or path folder to store your log files')
print(' leave the rest of the parameters as is\n')
def load_configini():
config = RawConfigParser()
with open('scumlogs.ini', 'r', encoding="utf-8") as f:
config.read_file(f)
global configini
configini = dict(config['GPORTAL'])
def save_configini():
parser = RawConfigParser()
parser.add_section('GPORTAL')
for key in configini.keys():
parser.set('GPORTAL', key, configini[key])
with open('scumlogs.ini', 'w', encoding="utf-8") as f:
parser.write(f)
def get_token():
with cfscrape.create_scraper() as session_for_token:
try:
payload = {'_method': 'POST', 'login': configini['user'], 'password': configini['password'], 'rememberme': '1'}
headers = {'Cache-Control':'max-age=0', 'Upgrade-Insecure-Requests':'1', 'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko); charset=UTF-8'}
raw_response = session_for_token.post(URL_LOGIN, headers=headers, data=payload)
response = raw_response.text
html = BeautifulSoup(response, 'html.parser')
token = str(html.find_all('script')[0]).split('=')[2]
token = token.split('"')[0]
return token
except Exception as ter:
log(f'get_token error: {ter}')
async def read_logs():
log('=====================================================================')
values = ('user','password','serverid','loc','folder','admin_file','admin_line','chat_file','chat_line','kill_file','kill_line','login_file','login_line','violations_file','violations_line')
print('scumlogs v1.0, scum server logs downloader from gportal')
try:
load_configini()
except:
global configini
configini = {}
for value in values:
if value not in configini:
configini[value] = ''
if configini['folder'] != '':
if configini['folder'][-1:] != '/' and configini['folder'][-1:] != '\\':
configini['folder'] = configini['folder'] + '/'
save_configini()
global URL_LOGIN
if configini['loc'] == 'com':
loc = 'eur'
URL_LOGIN = 'https://id2.g-portal.com/login?redirect=https://www.g-portal.com/:regionPrefix/auth/login?redirectAfterLogin=%2F&defaultRegion=EU'
else:
loc = 'int'
URL_LOGIN = 'https://id2.g-portal.com/login?redirect=https://www.g-portal.com/:regionPrefix/auth/login?redirectAfterLogin=%2F&defaultRegion=US'
URL_LOGS = 'https://www.g-portal.com/{0}/server/scum/{1}/logs'.format(loc, configini['serverid'])
# https://www.g-portal.com/eur/server/scum/500119/logs
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko); charset=UTF-8'}
with cfscrape.create_scraper() as session:
print(session)
try:
log('connecting g-portal...')
token = get_token()
log(f'Token: {token}')
#payload = {'_method': 'POST', 'login': configini['user'], 'password': configini['password'], 'rememberme': '1'} # Ранее работало
#payload = {'_method': 'POST', 'login': configini['user'], 'password': configini['password'], 'rememberme': '1', 'captcha_token': str(token)}#token
payload = {'_method': 'POST', 'login': configini['user'], 'password': configini['password'], 'captcha_token': token, 'rememberme': '1'}#token
raw_response = session.post(URL_LOGIN, headers=headers, data=payload, allow_redirects=True)
raw_response = session.get(URL_LOGS, headers=headers, timeout=(10, 20))#verify=True,
#log(f'Acces: {raw_response}')
response = raw_response.text
#html = BeautifulSoup(response, 'html.parser')
html = BeautifulSoup(response, 'lxml')
log(html.prettify())
#server = html.find('div', {'class': 'server'})
#print(f'SERVER: {server}')
#select = html.find('div', {'class': 'wrapper logs'})
#print(f'SELECT: {select}')
#loglist = select['data-logs']
#logs = json.loads(loglist)
logs = []
print(range(len(logs)))
for i in range(len(logs)):
getid = logs["file_" + str(i + 1)]
id = (getid[int(getid.find('Logs')) + 5:])
type = id.split('_')[0]
if configini[type + '_file'] != '':
if id < configini[type + '_file']:
continue
payload = {'_method': 'POST', 'load': 'true', 'ExtConfig[config]': getid}
raw_response = session.post(URL_LOGS, headers=headers, data=payload)
response = raw_response.text
content = json.loads(response)
lines = content["ExtConfig"]["content"].splitlines()
filename = configini['folder'] + id
file = open(filename, "a+", encoding='utf-8')
found = False
writing = False
for line in lines:
if id == configini[type + '_file'] and not found:
if line == configini[type + '_line']:
found = True
continue
else:
file.write(line + '\n')
writing = True
if writing:
if found:
log('updating {}'.format(id))
else:
log('creating {}'.format(id))
file.close()
configini[type + '_file'] = id
configini[type + '_line'] = lines[-1]
save_configini()
except Exception as rl_er:
log(f'error connecting, check connectivity and scumlogs.ini: {rl_er}')
help()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(read_logs())
loop.close()