SHARE
TWEET

Untitled

a guest Apr 26th, 2018 82 Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. from lxml import html
  2. from collections import OrderedDict
  3. import re
  4. import requests
  5. import praw
  6. import config
  7. import time
  8. import os
  9. import requests
  10. import random
  11. player_info = OrderedDict()
  12.  
  13. def bot_login():
  14.     print("Loggin in...")
  15.     r = praw.Reddit(username = config.username,
  16.             password = config.password,
  17.             client_id = config.client_id,
  18.             client_secret = config.client_secret,
  19.             user_agent = "NFL Player Stats Bot v0.1")
  20.     print("Logged in!")
  21.  
  22.     return r
  23.  
  24. def response(player, player_info):
  25.     if player is not None:
  26.         search_string = "http://www.nfl.com/players/search?category=name&filter={}&playerType=current".format(player.replace(" ", "+"))
  27.         search = requests.get(search_string)
  28.  
  29.         search_tree = html.fromstring(search.content)
  30.         print(search_tree)
  31. # --------------------------------------------------------------------------------
  32.         doesitwork = search_tree.xpath('//div[@id="searchResults"]//@href')
  33.         if doesitwork:
  34.             search_result = search_tree.xpath('//div[@id="searchResults"]//@href')[0]
  35.             print("found")
  36.         else:
  37.             search_result = None
  38.             print("not found")
  39. # --------------------------------------------------------------------------------
  40.         if search_result is not None:
  41.  
  42.             print("finding player")
  43.  
  44.             page = requests.get("http://www.nfl.com" + str(search_tree.xpath('//div[@id="searchResults"]//@href')[0]))
  45.             tree = html.fromstring(page.content)
  46.             bio = tree.xpath('//div[@class="player-info"]//p//text()')
  47.  
  48.             player_info["Name"] = str(tree.xpath('//div[@class="player-info"]//span[@class="player-name"]/text()')[0].encode("ascii", errors="ignore").decode()).strip()
  49.             print("player name: " + player_info["Name"])
  50.             player_info["Number"] = str(tree.xpath('//div[@class="player-info"]//span[@class="player-number"]/text()')[0].encode("ascii", errors="ignore").decode()).strip()
  51.  
  52.             player_info["Height"] = str(bio[bio.index("Height") + 1].encode("ascii", errors="ignore").decode()).split(": ",1)[1].strip().replace("-", "ft ").strip()
  53.  
  54.             player_info["Weight"] = str(bio[bio.index("Weight") + 1].encode("ascii", errors="ignore").decode()).split(": ",1)[1].strip()
  55.  
  56.             player_info["Age"] = str(bio[bio.index("Age") + 1].encode("ascii", errors="ignore").decode()).split(": ",1)[1].strip()
  57.  
  58.             player_info["D.O.B"] = str(bio[bio.index("Born") + 1].encode("ascii", errors="ignore").decode()).split(": ",1)[1].strip()
  59.  
  60.             player_info["Seasons Played"] = str(bio[bio.index("Experience") + 1].encode("ascii", errors="ignore").decode()).split(": ",1)[1].strip()
  61.  
  62.             player_info["College"] = str(bio[bio.index("College") + 1].encode("ascii", errors="ignore").decode()).split(": ",1)[1].strip()
  63.  
  64.             player_info["High School"] = str(bio[bio.index("High School") + 1].encode("ascii", errors="ignore").decode()).split(": ",1)[1].strip()
  65.  
  66.             player_info["Current Team"] = "[{}] ({}) ".format(str(tree.xpath('//div[@class="player-info"]//p[@class="player-team-links"]//@href')[1]), str(tree.xpath('//div[@class="player-info"]//p[@class="player-team-links"]//a/text()')[0]).strip())
  67.  
  68.         # player_info["seasons_played_for_current_team"] = ""
  69.         # player_info["career_total_stats"] = ""
  70.         # player_info["games_played"] = ""
  71.         # player_info["game_victories"] = ""
  72.         # player_info[""] = ""
  73.  
  74.         return player_info
  75.  
  76. def comment_message(response_message, player_info):
  77.     for k,v in player_info.iteritems():
  78.         response_message = response_message + ("> {}: {}\n\n".format(str(k), str(v)))
  79.  
  80.     return response_message
  81.  
  82. def run_bot(r, comments_replied_to):
  83.     print("Obtaining 250 comments...")
  84.     for comment in r.subreddit('test').comments(limit=250):
  85.         match = re.search(r"\[\[(.*?)\]\]", comment.body)
  86.         if(match) and comment.id not in comments_replied_to and comment.author != r.user.me():
  87.             comment_reply = comment_message("Player Stats: \n\n", response(match.group(1), player_info))
  88.             comment.reply(comment_reply)
  89.             print("Replied to comment " + comment.id)
  90.             comments_replied_to.append(comment.id)
  91.  
  92.             with open("comments_replied_to.text", "a") as f:
  93.                 f.write(comment.id + "\n")
  94.  
  95.     print("Sleeping for 10 seconds...")
  96.     #Sleep for 10 seconds...
  97.     time.sleep(10)
  98.  
  99. def get_saved_comments():
  100.     if not os.path.isfile("comments_replied_to.txt"):
  101.         comments_replied_to = []
  102.     else:
  103.         with open("comments_replied_to.txt", "r") as f:
  104.             comments_replied_to = f.read()
  105.             comments_replied_to = comments_replied_to.split("\n")
  106.             comments_replied_to = filter(None, comments_replied_to)
  107.  
  108.     return comments_replied_to
  109.  
  110. r = bot_login()
  111. comments_replied_to = get_saved_comments()
  112. print(comments_replied_to)
  113.  
  114. while True:
  115.     run_bot(r, comments_replied_to)
RAW Paste Data
We use cookies for various purposes including analytics. By continuing to use Pastebin, you agree to our use of cookies as described in the Cookies Policy. OK, I Understand
 
Top