Advertisement
Guest User

Untitled

a guest
Oct 2nd, 2017
71
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 4.37 KB | None | 0 0
  1. #!/usr/bin/env python
  2.  
  3. import requests
  4. import time
  5. import pymysql
  6. import sys
  7. import logging
  8. import pandas as pd
  9. import urllib3
  10. import socket
  11. import requests
  12. from urllib.parse import urlparse
  13. urllib3.disable_warnings()
  14. API = 'https://api.ssllabs.com/api/v2/'
  15.  
  16. def check_version(url):
  17.     response = requests.get(url, timeout=10, verify=False)
  18.     http2 = response.headers.get("upgrade")
  19.  
  20.     if http2 == None:
  21.         if response.raw.version == 10:
  22.             return "1.0"
  23.         else:
  24.             return "1.1"
  25.     else:
  26.         return "2"
  27.  
  28. def check_http(results, host):
  29.     try:
  30.         endpoints = results["endpoints"]
  31.         df_endpoints = pd.DataFrame(endpoints)
  32.         protocols = df_endpoints.details[0]['npnProtocols']
  33.  
  34.         if "h2" in protocols:
  35.             return "2"
  36.         elif 'h2c' in protocols:
  37.             return "2"
  38.         elif "1.1" in protocols:
  39.             return "1.1"
  40.         else:
  41.             return "1.0"
  42.     except Exception as exc:
  43.         return check_version(host)
  44.  
  45. def requestAPI(path, payload={}):
  46.     '''
  47.        This is a helper method that takes the path to the relevant
  48.        API call and the user-defined payload and requests the
  49.        data/server test from Qualys SSL Labs.
  50.        Returns JSON formatted data
  51.    '''
  52.  
  53.     url = API + path
  54.  
  55.     try:
  56.         response = requests.get(url, params=payload)
  57.     except requests.exception.RequestException:
  58.         logging.exception('Request failed.')
  59.         sys.exit(1)
  60.  
  61.     data = response.json()
  62.     return data
  63.  
  64. def resultsFromCache(host, publish='off', startNew='off', fromCache='on', all='done'):
  65.     path = 'analyze'
  66.     payload = {
  67.         'host': host,
  68.         'publish': publish,
  69.         'startNew': startNew,
  70.         'fromCache': fromCache,
  71.         'all': all
  72.     }
  73.     data = requestAPI(path, payload)
  74.     return data
  75.  
  76. def newScan(host, publish='off', startNew='on', all='done', ignoreMismatch='on'):
  77.     path = 'analyze'
  78.     payload = {
  79.         'host': host,
  80.         'publish': publish,
  81.         'startNew': startNew,
  82.         'all': all,
  83.         'ignoreMismatch': ignoreMismatch
  84.     }
  85.  
  86.     results = requestAPI(path, payload)
  87.     payload.pop('startNew')
  88.  
  89.     while results['status'] != 'READY' and results['status'] != 'ERROR':
  90.         time.sleep(30)
  91.         results = requestAPI(path, payload)
  92.  
  93.     return results
  94.  
  95. def response_time(url):
  96.     import requests
  97.     return requests.get(url).elapsed.total_seconds()
  98.  
  99. def check_ipv6(url):
  100.     # Create variable to store ipv4 and ipv6 array
  101.     ip = []
  102.  
  103.     # parse URL to only get network location address
  104.     parsed_url = urlparse(url).netloc
  105.  
  106.     # Get host by name using socket lib
  107.     response = socket.gethostbyname(parsed_url)
  108.     ip.append(response)
  109.  
  110.     try:
  111.         # Try to get the ipv6 info
  112.         ipv6_info = socket.getaddrinfo(parsed_url, None, family = socket.AF_INET6)
  113.  
  114.         # Get the address from the 3D array returned by getaddrinfo
  115.         ip.append(ipv6_info[-1][-1][0])
  116.  
  117.     except Exception as exception:
  118.         ip.append("none")
  119.  
  120.     return ip
  121.  
  122. def check_uptime(url):
  123.     try:
  124.         # Use requests library to get headers of request
  125.         response = requests.get(url)
  126.  
  127.         # and then check the response...
  128.         if response.status_code == 200:
  129.             return True
  130.         else:
  131.             return False
  132.  
  133.     except requests.RequestException as exception:
  134.         return 0
  135.  
  136. def check_site(url):
  137.     results = newScan(url)
  138.     http = check_http(results, url)
  139.     up = check_uptime(url)
  140.     response = response_time(url)
  141.     ips = check_ipv6(url)
  142.  
  143.     ''' Connect to the database '''
  144.     connection = pymysql.connect(host='localhost',
  145.                                  user='root',
  146.                                  password='Azerty123',
  147.                                  db='websites',
  148.                                  charset='utf8mb4',
  149.                                  cursorclass=pymysql.cursors.DictCursor)
  150.     try:
  151.         with connection.cursor() as cursor:
  152.             sql = "INSERT INTO statistics (url, http_version, response_time, ipv4, ipv6, up) VALUES (%s,%s,%s,%s,%s,%s)"
  153.             cursor.execute(sql, (url, http , response, ips[0], ips[1], up))
  154.             connection.commit()
  155.     finally:
  156.         connection.close()
  157.  
  158.  
  159. file = "websites.csv"
  160. websites = pd.read_csv(file)
  161. for site in websites:
  162.     check_site(site)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement