Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import os #File exsistance
- import xml.etree.cElementTree as ETX # for config file
- import requests # for login and html
- from bs4 import BeautifulSoup # to parse html
- class GradesParser(object):
- Sesh = requests.session()
- LoggedIn = False
- Config_File = "config.xml"
- def __init__(self, AutoRun=False):
- #Check for config
- #Create config (If one doesnt exsist)
- #Read Config for user/password
- #Login
- #GET page with grades
- #Parse Grades into good format
- #Output to file or screen
- #Logout
- if AutoRun:
- self.CheckConfig()
- x = self.ReadConfig()
- if self.Login(x.get('username'), x.get('password')):
- self.LogOut()
- else:
- pass
- def CheckConfig(self):
- #check if config file exists at 'location'
- #If a config file doesnt exsist, create one CreateConfig()
- if not os.path.isfile(GradesParser.Config_File): #Check for config file exsistance
- username = input("Please enter your username for config file creation: ")
- password = input("Password: ")
- self.CreateConfig(username, password) #Pass info to createconfig()
- else:
- return 0
- def CreateConfig(self, user, password):
- #Create config file at 'location', with info
- #Supplied when the function is called
- root = ETX.Element("Grades") #Creates root tag
- username = ETX.SubElement(root, "username") #Created user key
- username.text = user #Adds username to key
- passw = ETX.SubElement(root, "password") #Creates password key
- passw.text = password #Adds password to key
- tree = ETX.ElementTree(root)#Ends root tag
- print(tree.write(GradesParser.Config_File))
- def ReadConfig(self):
- #Read config file at 'location' using XML
- #Return object of variables
- tree = ETX.parse(GradesParser.Config_File) #Opens config file
- root = tree.getroot() #Gets config File root
- data = {root[0].tag:root[0].text, root[1].tag:root[1].text}
- #Loads The first and second <tag>key</tag> from the config file
- #into a dictionary object
- return data
- def Login(self, user, passw):
- payload = dict(password=passw, email=user)
- Logd = GradesParser.Sesh.post('https://www.teacherease.com/common/LoginResponse.aspx', data=payload)
- LoginPage = requests.get('http://www.teacherease.com/parents/main.aspx') #Get defualt login page
- Restricted = GradesParser.Sesh.get('http://www.teacherease.com/parents/main.aspx') #Attempt to access a restricted page
- #How it works:
- #If the login fails, it redirects you to the defualt login page
- #Id the login succedes, it will redirect you to the parent home
- #So If you get a copy of the default login page, and check it
- #Against the text coming from the restricted page, you can tell
- #Wether the login failed or succeeded
- #Kind of a shitty slow way to check, but oh well
- if LoginPage.text == Restricted.text:
- print("FATAL: Login Failed!")
- return 1
- else:
- GradesParser.LoggedIn = True
- return True
- def LogOut(self):
- #CHECK LOGGED IN
- #Use the requests library to GET
- #the logout url
- if self.CheckLoggedIn():
- GradesParser.Sesh.get("http://www.teacherease.com/common/logout.aspx")
- GradesParser.LoggedIn = False
- exit()
- else:
- print("Error: You must be logged in first.")
- return 1
- def CheckLoggedIn(self):
- #Set a global var to true or false
- #And check wether true or false
- if GradesParser.LoggedIn:
- return True
- else:
- return False
- def GetGradeData(self):
- #CHECK LOGGED IN
- #Use the request library to get the html from
- #One of the restricted webpages
- #return the HTML from the page
- if self.CheckLoggedIn:
- return repr(GradesParser.Sesh.get("http://www.teacherease.com/parents/StudentProgressSummary.aspx?s=2557880").text)
- else:
- print("Error: You must be logged in first.")
- return 1
- def ParseGradeData(self, HTML):
- #Take html, parse it using BeautifulSoup
- #To get only the relevant information
- #and save it to a dictionay
- #Return the dictionary
- soup = BeautifulSoup(HTML) #Define Object
- Table = soup.body.table.find_all("table")[2]
- #the third nested table in body.table contains the
- #list of grades
- Entries = Table.find_all("tr")
- # the grades are stored in tr tags, there are 17 in this webpage
- #only 16 are useful though, the first one is junk
- Grades = [] #List object used to store data
- for sibling in Entries:
- Grades.append(sibling.text)
- #Final step!!
- #Need to clean up the list and remove the junk 1st entry
- del Grades[0]
- #aaaaaaaaaaaaaaaaaaannddddd
- return Grades
- x = GradesParser()
- x.CheckConfig()
- y = x.ReadConfig()
- data = []
- if x.Login(y.get('username'), y.get('password')):
- for i in x.ParseGradeData(x.GetGradeData()):
- data.append(i)
- x.LogOut()
- print(data)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement