Advertisement
vissu295

Cricinfo fantasy league

Apr 18th, 2011
291
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 16.74 KB | None | 0 0
  1. #! /usr/bin/python
  2. #----------------------------------------------------------------------
  3. #
  4. # Author:      Laszlo Nagy
  5. #
  6. # Copyright:   (c) 2005 by Szoftver Messias Bt.
  7. # Licence:     BSD style
  8. #
  9. #
  10. #----------------------------------------------------------------------
  11. # Cricinfo Fantasy League
  12. #----------------------------------------------------------------------
  13. #
  14. # Author:      Kasi Viswanadh Yakkala
  15. #
  16. # Usage:       http://www.viswanadh.com/2011/04/cricinfo-fantasy-league.html
  17. #
  18. #----------------------------------------------------------------------
  19.  
  20. import os
  21. import getopt
  22. import re
  23. import md5
  24. import urllib
  25. import urllib2
  26. import mimetypes
  27. #from gzip import GzipFile
  28. import cStringIO
  29. from cPickle import loads,dumps
  30. import cookielib
  31. import random
  32. import sys
  33. import base64
  34.  
  35. tid = 1063062 # Team XI
  36. login_email = "name@domain.com"
  37. password = "password"
  38.  
  39. class MozillaCacher(object):
  40.     """A dictionary like object, that can cache results on a storage device."""
  41.     def __init__(self,cachedir='.cache'):
  42.         self.cachedir = cachedir
  43.         if not os.path.isdir(cachedir):
  44.             os.mkdir(cachedir)
  45.     def name2fname(self,name):
  46.         return os.path.join(self.cachedir,name)
  47.     def __getitem__(self,name):
  48.         if not isinstance(name,str):
  49.             raise TypeError()
  50.         fname = self.name2fname(name)
  51.         if os.path.isfile(fname):
  52.             return file(fname,'rb').read()
  53.         else:
  54.             raise IndexError()
  55.     def __setitem__(self,name,value):
  56.         if not isinstance(name,str):
  57.             raise TypeError()
  58.         fname = self.name2fname(name)
  59.         if os.path.isfile(fname):
  60.             os.unlink(fname)
  61.         f = file(fname,'wb+')
  62.         try:
  63.             f.write(value)
  64.         finally:
  65.             f.close()
  66.     def __delitem__(self,name):
  67.         if not isinstance(name,str):
  68.             raise TypeError()
  69.         fname = self.name2fname(name)
  70.         if os.path.isfile(fname):
  71.             os.unlink(fname)
  72.     def __iter__(self):
  73.         raise NotImplementedError()
  74.     def has_key(self,name):
  75.         return os.path.isfile(self.name2fname(name))
  76.  
  77. class MozillaEmulator(object):
  78.     def __init__(self,cacher={},trycount=0):
  79.         """Create a new MozillaEmulator object.
  80.  
  81.        @param cacher: A dictionary like object, that can cache search results on a storage device.
  82.            You can use a simple dictionary here, but it is not recommended.
  83.            You can also put None here to disable caching completely.
  84.        @param trycount: The download() method will retry the operation if it fails. You can specify -1 for infinite retrying.
  85.                A value of 0 means no retrying. A value of 1 means one retry. etc."""
  86.         self.cacher = cacher
  87.         self.cookies = cookielib.CookieJar()
  88.         self.debug = False
  89.         self.trycount = trycount
  90.     def _hash(self,data):
  91.         h = md5.new()
  92.         h.update(data)
  93.         return h.hexdigest()
  94.  
  95.     def build_opener(self,url,postdata=None,extraheaders={},forbid_redirect=False):
  96.         txheaders = {
  97.             'Accept':'text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5',
  98.             'Accept-Language':'en,hu;q=0.8,en-us;q=0.5,hu-hu;q=0.3',
  99. #            'Accept-Encoding': 'gzip, deflate',
  100.             'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
  101. #            'Keep-Alive': '300',
  102. #            'Connection': 'keep-alive',
  103. #            'Cache-Control': 'max-age=0',
  104.         }
  105.         for key,value in extraheaders.iteritems():
  106.             txheaders[key] = value
  107.         req = urllib2.Request(url, postdata, txheaders)
  108.         self.cookies.add_cookie_header(req)
  109.         if forbid_redirect:
  110.             redirector = HTTPNoRedirector()
  111.         else:
  112.             redirector = urllib2.HTTPRedirectHandler()
  113.  
  114.         http_handler = urllib2.HTTPHandler(debuglevel=self.debug)
  115.         https_handler = urllib2.HTTPSHandler(debuglevel=self.debug)
  116.  
  117.         u = urllib2.build_opener(http_handler,https_handler,urllib2.HTTPCookieProcessor(self.cookies),redirector)
  118.         u.addheaders = [('User-Agent','Mozilla/5.0 (Windows; U; Windows NT 5.1; hu-HU; rv:1.7.8) Gecko/20050511 Firefox/1.0.4')]
  119.         if not postdata is None:
  120.             req.add_data(postdata)
  121.         return (req,u)
  122.  
  123.     def download(self,url,postdata=None,extraheaders={},forbid_redirect=False,
  124.             trycount=None,fd=None,onprogress=None,only_head=False):
  125.         """Download an URL with GET or POST methods.
  126.  
  127.        @param postdata: It can be a string that will be POST-ed to the URL.
  128.            When None is given, the method will be GET instead.
  129.        @param extraheaders: You can add/modify HTTP headers with a dict here.
  130.        @param forbid_redirect: Set this flag if you do not want to handle
  131.            HTTP 301 and 302 redirects.
  132.        @param trycount: Specify the maximum number of retries here.
  133.            0 means no retry on error. Using -1 means infinite retring.
  134.            None means the default value (that is self.trycount).
  135.        @param fd: You can pass a file descriptor here. In this case,
  136.            the data will be written into the file. Please note that
  137.            when you save the raw data into a file then it won't be cached.
  138.        @param onprogress: A function that has two parameters:
  139.            the size of the resource and the downloaded size. This will be
  140.            called for each 1KB chunk. (If the HTTP header does not contain
  141.            the content-length field, then the size parameter will be zero!)
  142.        @param only_head: Create the openerdirector and return it. In other
  143.            words, this will not retrieve any content except HTTP headers.
  144.  
  145.        @return: The raw HTML page data, unless fd was specified. When fd
  146.            was given, the return value is undefined.
  147.        """
  148.         if trycount is None:
  149.             trycount = self.trycount
  150.         cnt = 0
  151.         while True:
  152.             try:
  153.                 key = self._hash(url)
  154.                 if (self.cacher is None) or (not self.cacher.has_key(key)):
  155.                     req,u = self.build_opener(url,postdata,extraheaders,forbid_redirect)
  156.                     openerdirector = u.open(req)
  157.                     if self.debug:
  158.                         print req.get_method(),url
  159.                         print openerdirector.code,openerdirector.msg
  160.                         print openerdirector.headers
  161.                     self.cookies.extract_cookies(openerdirector,req)
  162.                     if only_head:
  163.                         return openerdirector
  164.                     if openerdirector.headers.has_key('content-length'):
  165.                         length = long(openerdirector.headers['content-length'])
  166.                     else:
  167.                         length = 0
  168.                     dlength = 0
  169.                     if fd:
  170.                         while True:
  171.                             data = openerdirector.read(1024)
  172.                             dlength += len(data)
  173.                             fd.write(data)
  174.                             if onprogress:
  175.                                 onprogress(length,dlength)
  176.                             if not data:
  177.                                 break
  178.                     else:
  179.                         data = ''
  180.                         while True:
  181.                             newdata = openerdirector.read(1024)
  182.                             dlength += len(newdata)
  183.                             data += newdata
  184.                             if onprogress:
  185.                                 onprogress(length,dlength)
  186.                             if not newdata:
  187.                                 break
  188.                         #data = openerdirector.read()
  189.                         if not (self.cacher is None):
  190.                             self.cacher[key] = data
  191.                 else:
  192.                     data = self.cacher[key]
  193.                 #try:
  194.                 #    d2= GzipFile(fileobj=cStringIO.StringIO(data)).read()
  195.                 #    data = d2
  196.                 #except IOError:
  197.                 #    pass
  198.                 return data
  199.             except urllib2.URLError:
  200.                 cnt += 1
  201.                 if (trycount > -1) and (trycount < cnt):
  202.                     raise
  203.                 # Retry :-)
  204.                 if self.debug:
  205.                     print "MozillaEmulator: urllib2.URLError, retryting ",cnt
  206.  
  207.  
  208.     def post_multipart(self,url,fields, files, forbid_redirect=True):
  209.         """Post fields and files to an http host as multipart/form-data.
  210.        fields is a sequence of (name, value) elements for regular form fields.
  211.        files is a sequence of (name, filename, value) elements for data to be uploaded as files
  212.        Return the server's response page.
  213.        """
  214.         content_type, post_data = encode_multipart_formdata(fields, files)
  215.         result = self.download(url,post_data,{
  216.             'Content-Type': content_type,
  217.             'Content-Length': str(len(post_data))
  218.         },forbid_redirect=forbid_redirect
  219.         )
  220.         return result
  221.  
  222.  
  223. class HTTPNoRedirector(urllib2.HTTPRedirectHandler):
  224.     """This is a custom http redirect handler that FORBIDS redirection."""
  225.     def http_error_302(self, req, fp, code, msg, headers):
  226.         e = urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp)
  227.         if e.code in (301,302):
  228.             if 'location' in headers:
  229.                 newurl = headers.getheaders('location')[0]
  230.             elif 'uri' in headers:
  231.                 newurl = headers.getheaders('uri')[0]
  232.             e.newurl = newurl
  233.         raise e
  234.  
  235.  
  236.            
  237. def encode_multipart_formdata(fields, files):
  238.     """
  239.    fields is a sequence of (name, value) elements for regular form fields.
  240.    files is a sequence of (name, filename, value) elements for data to be uploaded as files
  241.    Return (content_type, body) ready for httplib.HTTP instance
  242.    """
  243.     BOUNDARY = '----------ThIs_Is_tHe_bouNdaRY_$'
  244.     CRLF = '\r\n'
  245.     L = []
  246.     for (key, value) in fields:
  247.         L.append('--' + BOUNDARY)
  248.         L.append('Content-Disposition: form-data; name="%s"' % key)
  249.         L.append('')
  250.         L.append(value)
  251.     for (key, filename, value) in files:
  252.         L.append('--' + BOUNDARY)
  253.         L.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename))
  254.         L.append('Content-Type: %s' % get_content_type(filename))
  255.         L.append('')
  256.         L.append(value)
  257.     L.append('--' + BOUNDARY + '--')
  258.     L.append('')
  259.     body = CRLF.join(L)
  260.     content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
  261.     return content_type, body
  262.  
  263. def get_content_type(filename):
  264.     return mimetypes.guess_type(filename)[0] or 'application/octet-stream'    
  265.  
  266. class Table:
  267.     def __init__(self,title,headers,rows):
  268.         self.title=title
  269.         self.headers=headers
  270.         self.rows=rows
  271.         self.nrows=len(self.rows)
  272.         self.fieldlen=[]
  273.  
  274.         ncols=len(headers)
  275.  
  276.         for i in range(ncols):
  277.             max=0
  278.             for j in rows:
  279.                 if len(str(j[i]))>max: max=len(str(j[i]))
  280.             self.fieldlen.append(max)
  281.  
  282.         for i in range(len(headers)):
  283.             if len(str(headers[i]))>self.fieldlen[i]: self.fieldlen[i]=len(str(headers[i]))
  284.  
  285.  
  286.         self.width=sum(self.fieldlen)+(ncols-1)*3+4
  287.  
  288.     def __str__(self):
  289.         bar="-"*self.width
  290.         title="| "+self.title+" "*(self.width-3-(len(self.title)))+"|"
  291.         out=[bar,title,bar]
  292.         header=""
  293.         for i in range(len(self.headers)):
  294.             header+="| %s" %(str(self.headers[i])) +" "*(self.fieldlen[i]-len(str(self.headers[i])))+" "
  295.         header+="|"
  296.         out.append(header)
  297.         out.append(bar)
  298.         for i in self.rows:
  299.             line=""
  300.             for j in range(len(i)):
  301.                 line+="| %s" %(str(i[j])) +" "*(self.fieldlen[j]-len(str(i[j])))+" "
  302.             out.append(line+"|")
  303.  
  304.         out.append(bar)
  305.         return "\r\n".join(out)
  306.  
  307. def fantasy_pick(new_trump_id = 0,replace_list = {},pplayers = False):
  308.     dl = MozillaEmulator()          
  309.     """ Make sure that we get cookies from the server before logging in """
  310.     frontpage = dl.download("http://fantasy.espncricinfo.com/fantasy/fantasyleague/user/home.html",{},{},False)
  311.     """ Log in to cricinfo server """
  312.     post_data = "email="+login_email+"&password="+password+"&remember=Y"
  313.     page = dl.download("http://fantasy.espncricinfo.com/fantasy/fantasyleague/user/login.html",post_data,{'Referer':'http://fantasy.espncricinfo.com/fantasy/fantasyleague/user/home.html'},False)
  314.     page = dl.download("http://fantasy.espncricinfo.com/fantasy/fantasyleague/user/create_team.html?game_id=93;team_id="+str(tid),post_data,{'Referer':'http://fantasy.espncricinfo.com/fantasy/fantasyleague/user/team_page.html?game_id=93;team_id='+str(tid)},False)
  315.     team_name = re.search('var original_team_name = "(.*?)"',page,re.DOTALL).group(1)
  316.     bat_print,bowl_print,ar_print,wk_print = [],[],[],[]
  317.     batsmen = re.findall('batsmen\[\d+\] = "(.{0,100}?)";.{0,100}?batcost\[\d+\] = (.{0,100}?);.{0,100}?batsmenteam\[\d+\] = "(.{0,100}?)";.{0,100}?batsmenids\[\d+\] = "(.{0,100}?)";.{0,100}?objids_batsmen\[\d+\] = "(.{0,100}?)";.{0,100}?country_type_batsmen\[\d+\] = "(.{0,1}?)"',page,re.MULTILINE|re.DOTALL)
  318.     bowlers = re.findall('bowler\[\d+\] = "(.{0,100}?)";.{0,100}?bowlcost\[\d+\] = (.{0,100}?);.{0,100}?bowlerteam\[\d+\] = "(.{0,100}?)";.{0,100}?bowlerids\[\d+\] = "(.{0,100}?)";.{0,100}?objids_bowlers\[\d+\] = "(.{0,100}?)";.{0,100}?country_type_bowlers\[\d+\] = "(.{0,1}?)"',page,re.MULTILINE|re.DOTALL)
  319.     ars = re.findall('allrounder\[\d+\] = "(.{0,100}?)";.{0,100}?allcost\[\d+\] = (.{0,100}?);.{0,100}?arteam\[\d+\] = "(.{0,100}?)";.{0,100}?arids\[\d+\] = "(.{0,100}?)";.{0,100}?objids_ar\[\d+\] = "(.{0,100}?)";.{0,100}?country_type_ar\[\d+\] = "(.{0,1}?)"',page,re.MULTILINE|re.DOTALL)
  320.     wks = re.findall('wicketkeeper\[\d+\] = "(.{0,100}?)";.{0,100}?wkcost\[\d+\] = (.{0,100}?);.{0,100}?wkteam\[\d+\] = "(.{0,100}?)";.{0,100}?wkids\[\d+\] = "(.{0,100}?)";.{0,100}?objids_wk\[\d+\] = "(.{0,100}?)";.{0,100}?country_type_wk\[\d+\] = "(.{0,1}?)"',page,re.MULTILINE|re.DOTALL)
  321.     all_player_ids,player_ids = {},{}
  322.     for batsman in batsmen:
  323.         all_player_ids[batsman[3]] = batsman[0]
  324.         bat_print.append([batsman[0],batsman[3],batsman[2],batsman[1]])
  325.     for bowler in bowlers:
  326.         all_player_ids[bowler[3]] = bowler[0]
  327.         bowl_print.append([bowler[0],bowler[3],bowler[2],bowler[1]])
  328.     for ar in ars:
  329.         all_player_ids[ar[3]] = ar[0]
  330.         ar_print.append([ar[0],ar[3],ar[2],ar[1]])
  331.     for wk in wks:
  332.         all_player_ids[wk[3]] = wk[0]
  333.         wk_print.append([wk[0],wk[3],wk[2],wk[1]])
  334.         if pplayers:
  335.         print Table('Batsmen', ["Name","ID","Team","Cost"],bat_print)
  336.         print Table('Bowlers', ["Name","ID","Team","Cost"],bowl_print)
  337.         print Table('All rounders', ["Name","ID","Team","Cost"],ar_print)
  338.         print Table('Wicket keepers', ["Name","ID","Team","Cost"],wk_print)
  339.     player_m = re.finditer('original_player_ids\[\d+\] *= *(\d+)',page);
  340.     players = "";
  341.     team_changed = False
  342.     for player_g in player_m:
  343.         player = player_g.group(1);
  344.         player_ids[player] = all_player_ids[player]
  345.         if replace_list.has_key(player):
  346.             team_changed = True
  347.             print ' Replacing* ',player_ids[player],' with ',all_player_ids[replace_list[player]]
  348.             del player_ids[player]
  349.             player = replace_list[player]
  350.             player_ids[player] = all_player_ids[player]
  351.         players=players+player+',';
  352.         print 'Player Name: ',player_ids[player],'(',player,')'
  353.     players = players[0:-1];
  354.     trump_id = re.search('var trumpplayerid *= *(\d+);',page).group(1);
  355.     print '\nTrump player: ', player_ids[trump_id],'(',trump_id,')'
  356.     if new_trump_id !=0 or team_changed:
  357.         if new_trump_id == 0:
  358.             new_trump_id = trump_id
  359.         print 'Changing trump player to ',player_ids[str(new_trump_id)],'(',new_trump_id,')'
  360.         post_data = "game_id=93&key=edit&team_id="+str(tid)+"&player_ids="+players+"&teamname="+team_name+"&trumpcard="+str(new_trump_id)+"&email_alert=yes&player_update_alert=yes"
  361.         if team_changed:
  362.             print '* - Replacement succeeds only when players restrictions like cost, overseas quota and category of the players are satisfied'
  363.             post_data = post_data+"&team_changed=yes"
  364.         page = dl.download("http://fantasy.espncricinfo.com/fantasy/fantasyleague/user/create_team.html",post_data,{},False);
  365.     sys.exit(0);
  366.  
  367. # HOW TO USE
  368. """ Main Function """
  369. if __name__ == "__main__":
  370.     opts, args = getopt.getopt(sys.argv[1:],"t:r:p",["trump=","replace=","print"])
  371.     replace_list,new_trump,pplayers = {},0,False
  372.     for opt,arg in opts:
  373.         if opt in ("-t","--trump"):
  374.             try:
  375.                 new_trump = int(arg)
  376.             except:
  377.                 pass
  378.         elif opt in ("-r","--replace"):
  379.             try:
  380.                 p = arg.split('-')
  381.                 replace_list[p[0]] = p[1]
  382.             except:
  383.                 replace_list = {}
  384.         elif opt in ("-p","--print"):
  385.             pplayers = True
  386.     fantasy_pick(new_trump,replace_list,pplayers)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement