Advertisement
MMMonster

GPortal auth 27/11

Nov 29th, 2020
282
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. async def backup_grab_logs():
  2.     # load ini
  3.     config = RawConfigParser()
  4.     with open('settings.ini', 'r', encoding="utf-8") as ini:
  5.         config.read_file(ini)
  6.         user_name = config['GPORTAL']['user_name']
  7.         password = config['GPORTAL']['password']
  8.         login_url = 'https://id2.g-portal.com/login?redirect=https://www.g-portal.com/:regionPrefix/auth/login?redirectAfterLogin=%2F&defaultRegion=US'
  9.         logs_url = config['GPORTAL']['log_url']
  10.  
  11.     # create logs folder if it doesnt exist
  12.     if not os.path.exists(config['GPORTAL']['logs_folder']): os.makedirs(config['GPORTAL']['logs_folder'])
  13.     options = Options()
  14.     options.binary_location = os.environ.get("GOOGLE_CHROME_BIN")
  15.     options.add_experimental_option("excludeSwitches", ["enable-logging", "enable-automation"])
  16.     options.add_experimental_option('useAutomationExtension', False)
  17.     options.add_argument("--disable-blink-features=AutomationControlled")
  18.     options.add_argument("--headless")  # prevent an actual browser window from opening ...not working
  19.     options.add_argument("--disable-dev-shm-usage")
  20.     options.add_argument("--no-sandbox")
  21.     browser = webdriver.Chrome(executable_path=r'chromedriver.exe')
  22.     log(yellow("Connection to G-Portal & downloading log files. (backup method)"))
  23.     browser.get(login_url)
  24.     browser.find_element_by_id("login").send_keys(user_name)
  25.     browser.find_element_by_id("password").send_keys(password)
  26.     browser.implicitly_wait(randint(3, 6))  # random wait time to prevent pattern trigger
  27.     browser.find_element_by_class_name("submit").click()
  28.     browser.get(logs_url)
  29.     browser.implicitly_wait(randint(3, 6))  # random wait time to prevent pattern trigger
  30.     browser_cookies = browser.get_cookies()
  31.     try:
  32.         html = BeautifulSoup(browser.page_source, 'html.parser')
  33.         select = html.find('div', {'class': 'wrapper logs'})
  34.         loglist = select['data-logs']
  35.         logs = json.loads(loglist)
  36.         # sort log files by type
  37.         for i in range(len(logs)):
  38.             getid = logs["file_" + str(i + 1)]
  39.             id = (getid[int(getid.find('Logs')) + 5:])
  40.             type = id.split('_')[0]
  41.             if config['GPORTAL'][type + '_file'] != '':
  42.                 if id < config['GPORTAL'][type + '_file']:
  43.                     continue
  44.             # download specific log file & create locally
  45.             with cloudscraper.create_scraper() as session:
  46.                 #pass browser cookies to session
  47.                 cookies = [session.cookies.set(cookies['name'], cookies['value']) for cookies in browser_cookies]
  48.                 data = {'_method': 'POST', 'load': 'true', 'ExtConfig[config]': getid}
  49.                 headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66'}
  50.                 raw_response = session.post(logs_url, headers=headers, data=data)
  51.                 response = raw_response.text
  52.                 content = json.loads(response)
  53.                 lines = content["ExtConfig"]["content"].splitlines()
  54.                 file = config['GPORTAL']['logs_folder'] + id
  55.                 file = open(file, "a+", encoding='utf-8')
  56.                 found = False
  57.                 writing = False
  58.                 for line in lines:
  59.                     # check name of log file
  60.                     if id == config['GPORTAL'][type + '_file'] and not found:
  61.                         # compare line to last line downloaded
  62.                         if line == config['GPORTAL'][type + '_line']:
  63.                             found = True
  64.                             continue
  65.                     else:
  66.                         # add new line to file
  67.                         file.write(line + '\n')
  68.                         writing = True
  69.                 if writing:
  70.                     if found:
  71.                         log(green("Updating {}".format(id)))
  72.                     else:
  73.                         log(green("Creating {}".format(id)))
  74.                 file.close()
  75.                 config['GPORTAL'][type + '_file'] = id
  76.                 config['GPORTAL'][type + '_line'] = lines[-1]
  77.  
  78.             with open('settings.ini', 'w', encoding="utf-8") as update:
  79.                 config.write(update)
  80.  
  81.         #pass cookies back to browser
  82.         '''dict_resp_cookies = session.cookies.get_dict()
  83.        response_cookies_browser = [{'name': name, 'value': value} for name, value in dict_resp_cookies.items()]
  84.        c = [browser.add_cookie(c) for c in response_cookies_browser]'''
  85.  
  86.     except:
  87.         log(red('Failed to obtain logs from Gportal (backup method)!'))
  88.  
  89.     browser.close()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement