Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- async def backup_grab_logs():
- # load ini
- config = RawConfigParser()
- with open('settings.ini', 'r', encoding="utf-8") as ini:
- config.read_file(ini)
- user_name = config['GPORTAL']['user_name']
- password = config['GPORTAL']['password']
- login_url = 'https://id2.g-portal.com/login?redirect=https://www.g-portal.com/:regionPrefix/auth/login?redirectAfterLogin=%2F&defaultRegion=US'
- logs_url = config['GPORTAL']['log_url']
- # create logs folder if it doesnt exist
- if not os.path.exists(config['GPORTAL']['logs_folder']): os.makedirs(config['GPORTAL']['logs_folder'])
- options = Options()
- options.binary_location = os.environ.get("GOOGLE_CHROME_BIN")
- options.add_experimental_option("excludeSwitches", ["enable-logging", "enable-automation"])
- options.add_experimental_option('useAutomationExtension', False)
- options.add_argument("--disable-blink-features=AutomationControlled")
- options.add_argument("--headless") # prevent an actual browser window from opening ...not working
- options.add_argument("--disable-dev-shm-usage")
- options.add_argument("--no-sandbox")
- browser = webdriver.Chrome(executable_path=r'chromedriver.exe')
- log(yellow("Connection to G-Portal & downloading log files. (backup method)"))
- browser.get(login_url)
- browser.find_element_by_id("login").send_keys(user_name)
- browser.find_element_by_id("password").send_keys(password)
- browser.implicitly_wait(randint(3, 6)) # random wait time to prevent pattern trigger
- browser.find_element_by_class_name("submit").click()
- browser.get(logs_url)
- browser.implicitly_wait(randint(3, 6)) # random wait time to prevent pattern trigger
- browser_cookies = browser.get_cookies()
- try:
- html = BeautifulSoup(browser.page_source, 'html.parser')
- select = html.find('div', {'class': 'wrapper logs'})
- loglist = select['data-logs']
- logs = json.loads(loglist)
- # sort log files by type
- for i in range(len(logs)):
- getid = logs["file_" + str(i + 1)]
- id = (getid[int(getid.find('Logs')) + 5:])
- type = id.split('_')[0]
- if config['GPORTAL'][type + '_file'] != '':
- if id < config['GPORTAL'][type + '_file']:
- continue
- # download specific log file & create locally
- with cloudscraper.create_scraper() as session:
- #pass browser cookies to session
- cookies = [session.cookies.set(cookies['name'], cookies['value']) for cookies in browser_cookies]
- data = {'_method': 'POST', 'load': 'true', 'ExtConfig[config]': getid}
- headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66'}
- raw_response = session.post(logs_url, headers=headers, data=data)
- response = raw_response.text
- content = json.loads(response)
- lines = content["ExtConfig"]["content"].splitlines()
- file = config['GPORTAL']['logs_folder'] + id
- file = open(file, "a+", encoding='utf-8')
- found = False
- writing = False
- for line in lines:
- # check name of log file
- if id == config['GPORTAL'][type + '_file'] and not found:
- # compare line to last line downloaded
- if line == config['GPORTAL'][type + '_line']:
- found = True
- continue
- else:
- # add new line to file
- file.write(line + '\n')
- writing = True
- if writing:
- if found:
- log(green("Updating {}".format(id)))
- else:
- log(green("Creating {}".format(id)))
- file.close()
- config['GPORTAL'][type + '_file'] = id
- config['GPORTAL'][type + '_line'] = lines[-1]
- with open('settings.ini', 'w', encoding="utf-8") as update:
- config.write(update)
- #pass cookies back to browser
- '''dict_resp_cookies = session.cookies.get_dict()
- response_cookies_browser = [{'name': name, 'value': value} for name, value in dict_resp_cookies.items()]
- c = [browser.add_cookie(c) for c in response_cookies_browser]'''
- except:
- log(red('Failed to obtain logs from Gportal (backup method)!'))
- browser.close()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement