Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- >>> import requests
- >>> login_data = {'formPosted':'1', 'login_email':'me@example.com', 'password':'pw'}
- >>> r = requests.post('https://localhost/login.py', login_data)
- >>>
- >>> r.text
- u'You are being redirected <a href="profilePage?_ck=1349394964">here</a>'
- >>> r.cookies
- {'session_id_myapp': '127-0-0-1-825ff22a-6ed1-453b-aebc-5d3cf2987065'}
- >>>
- >>> r2 = requests.get('https://localhost/profile_data.json', ...)
- s = requests.session()
- s.post('https://localhost/login.py', login_data)
- #logged in! cookies saved for future requests.
- r2 = s.get('https://localhost/profile_data.json', ...)
- #cookies sent automatically!
- #do whatever, s will keep your cookies intact :)
- import urllib2
- import urllib
- from cookielib import CookieJar
- cj = CookieJar()
- opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
- # input-type values from the html form
- formdata = { "username" : username, "password": password, "form-id" : "1234" }
- data_encoded = urllib.urlencode(formdata)
- response = opener.open("https://page.com/login.php", data_encoded)
- content = response.read()
- >>> url = 'http://httpbin.org/cookies'
- >>> cookies = dict(cookies_are='working')
- >>> r = requests.get(url, cookies=cookies)
- >>> r.text
- '{"cookies": {"cookies_are": "working"}}'
- import pickle
- import datetime
- import os
- from urllib.parse import urlparse
- import requests
- class MyLoginSession:
- """
- a class which handles and saves login sessions. It also keeps track of proxy settings.
- It does also maintine a cache-file for restoring session data from earlier
- script executions.
- """
- def __init__(self,
- loginUrl,
- loginData,
- loginTestUrl,
- loginTestString,
- sessionFileAppendix = '_session.dat',
- maxSessionTimeSeconds = 30 * 60,
- proxies = None,
- userAgent = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1',
- debug = True):
- """
- save some information needed to login the session
- you'll have to provide 'loginTestString' which will be looked for in the
- responses html to make sure, you've properly been logged in
- 'proxies' is of format { 'https' : 'https://user:pass@server:port', 'http' : ...
- 'loginData' will be sent as post data (dictionary of id : value).
- 'maxSessionTimeSeconds' will be used to determine when to re-login.
- """
- urlData = urlparse(loginUrl)
- self.proxies = proxies
- self.loginData = loginData
- self.loginUrl = loginUrl
- self.loginTestUrl = loginTestUrl
- self.maxSessionTime = maxSessionTimeSeconds
- self.sessionFile = urlData.netloc + sessionFileAppendix
- self.userAgent = userAgent
- self.loginTestString = loginTestString
- self.debug = debug
- self.login()
- def modification_date(self, filename):
- """
- return last file modification date as datetime object
- """
- t = os.path.getmtime(filename)
- return datetime.datetime.fromtimestamp(t)
- def login(self, forceLogin = False):
- """
- login to a session. Try to read last saved session from cache file. If this fails
- do proper login. If the last cache access was too old, also perform a proper login.
- Always updates session cache file.
- """
- wasReadFromCache = False
- if self.debug:
- print('loading or generating session...')
- if os.path.exists(self.sessionFile) and not forceLogin:
- time = self.modification_date(self.sessionFile)
- # only load if file less than 30 minutes old
- lastModification = (datetime.datetime.now() - time).seconds
- if lastModification < self.maxSessionTime:
- with open(self.sessionFile, "rb") as f:
- self.session = pickle.load(f)
- wasReadFromCache = True
- if self.debug:
- print("loaded session from cache (last access %ds ago) "
- % lastModification)
- if not wasReadFromCache:
- self.session = requests.Session()
- self.session.headers.update({'user-agent' : self.userAgent})
- res = self.session.post(self.loginUrl, data = self.loginData, proxies = self.proxies)
- if self.debug:
- print('created new session with login' )
- self.saveSessionToCache()
- # test login
- res = self.session.get(self.loginTestUrl)
- if res.text.lower().find(self.loginTestString.lower()) < 0:
- raise Exception("could not log into provided site '%s'"
- " (did not find successful login string)"
- % self.loginUrl)
- def saveSessionToCache(self):
- """
- save session to a cache file
- """
- # always save (to update timeout)
- with open(self.sessionFile, "wb") as f:
- pickle.dump(self.session, f)
- if self.debug:
- print('updated session cache-file %s' % self.sessionFile)
- def retrieveContent(self, url, method = "get", postData = None):
- """
- return the content of the url with respect to the session.
- If 'method' is not 'get', the url will be called with 'postData'
- as a post request.
- """
- if method == 'get':
- res = self.session.get(url , proxies = self.proxies)
- else:
- res = self.session.post(url , data = postData, proxies = self.proxies)
- # the session has been updated on the server, so also update in cache
- self.saveSessionToCache()
- return res
- if __name__ == "__main__":
- # proxies = {'https' : 'https://user:pass@server:port',
- # 'http' : 'http://user:pass@server:port'}
- loginData = {'user' : 'usr',
- 'password' : 'pwd'}
- loginUrl = 'https://...'
- loginTestUrl = 'https://...'
- successStr = 'Hello Tom'
- s = MyLoginSession(loginUrl, loginData, loginTestUrl, successStr,
- #proxies = proxies
- )
- res = s.retrieveContent('https://....')
- print(res.text)
- import requests
- import json
- authUrl = 'https://whatever.com/login'
- #The subsequent url
- testUrl = 'https://whatever.com/someEndpoint'
- #Whatever you are posting
- login_data = {'formPosted':'1', 'login_email':'me@example.com', 'password':'pw'}
- #The auth token or any other data that we will recieve from the authRequest.
- token = ''
- # Post the loginRequest
- loginRequest = requests.post(authUrl,login_data)
- print loginRequest.text
- # Save the request content to your variable. In this case I needed a field called token.
- token = str(json.loads(loginRequest.content)['token'])
- print token
- # Verify successfull login
- print loginRequest.status_code
- #Create your RequestsCookieJar for your subsequent requests and add the cookie
- jar = requests.cookies.RequestsCookieJar()
- jar.set('LWSSO_COOKIE_KEY', token)
- #Execute your next request(s) with the RequestCookieJar set
- r = requests.get(testUrl, cookies=jar)
- print(r.text)
- print(r.status_code)
- import requests
- username = "my_user_name"
- password = "my_super_secret"
- url = "https://www.my_base_url.com"
- the_page_i_want = "/my_json_data_page"
- session = requests.Session()
- # retrieve cookie value
- resp = session.get(url+'/login')
- csrf_token = resp.cookies['csrftoken']
- # login, add referer
- resp = session.post(url+"/login",
- data={
- 'username': username,
- 'password': password,
- 'csrfmiddlewaretoken': csrf_token,
- 'next': the_page_i_want,
- },
- headers=dict(Referer=url+"/login"))
- print(resp.json())
Add Comment
Please, Sign In to add comment