Advertisement
Guest User

Untitled

a guest
Mar 22nd, 2018
122
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 4.26 KB | None | 0 0
  1. from lxml import html
  2. from collections import OrderedDict
  3. import re
  4. import requests
  5. import praw
  6. import config
  7. import time
  8. import os
  9. import requests
  10. import random
  11. player_info = OrderedDict()
  12.  
  13. def bot_login():
  14.     print("Loggin in...")
  15.     r = praw.Reddit(username = config.username,
  16.             password = config.password,
  17.             client_id = config.client_id,
  18.             client_secret = config.client_secret,
  19.             user_agent = "NFL Player Stats Bot v0.1")
  20.     print("Logged in!")
  21.  
  22.     return r
  23.  
  24. def response(player, player_info):
  25.  
  26.     if player is not None:
  27.         search = requests.get("http://www.nfl.com/players/search?category=name&filter={}&playerType=current".format(player.replace(" ", "+")))
  28.         search_tree = html.fromstring(search.content)
  29.  
  30.         if search_tree.xpath('//div[@id="searchResults"]//@href') is True:
  31.             search_result = search_tree.xpath('//div[@id="searchResults"]//@href')[0]
  32.         else:
  33.             search_result = None
  34.         if search_result is not None:
  35.  
  36.             page = requests.get("http://www.nfl.com" + str(search_tree.xpath('//div[@id="searchResults"]//@href')[0]))
  37.             tree = html.fromstring(page.content)
  38.             bio = tree.xpath('//div[@class="player-info"]//p//text()')
  39.  
  40.             player_info["Name"] = str(tree.xpath('//div[@class="player-info"]//span[@class="player-name"]/text()')[0].encode("ascii", errors="ignore").decode()).strip()
  41.  
  42.             player_info["Number"] = str(tree.xpath('//div[@class="player-info"]//span[@class="player-number"]/text()')[0].encode("ascii", errors="ignore").decode()).strip()
  43.  
  44.             player_info["Height"] = str(bio[bio.index("Height") + 1].encode("ascii", errors="ignore").decode()).split(": ",1)[1].strip().replace("-", "ft ").strip()
  45.  
  46.             player_info["Weight"] = str(bio[bio.index("Weight") + 1].encode("ascii", errors="ignore").decode()).split(": ",1)[1].strip()
  47.  
  48.             player_info["Age"] = str(bio[bio.index("Age") + 1].encode("ascii", errors="ignore").decode()).split(": ",1)[1].strip()
  49.  
  50.             player_info["D.O.B"] = str(bio[bio.index("Born") + 1].encode("ascii", errors="ignore").decode()).split(": ",1)[1].strip()
  51.  
  52.             player_info["Seasons Played"] = str(bio[bio.index("Experience") + 1].encode("ascii", errors="ignore").decode()).split(": ",1)[1].strip()
  53.  
  54.             player_info["College"] = str(bio[bio.index("College") + 1].encode("ascii", errors="ignore").decode()).split(": ",1)[1].strip()
  55.  
  56.             player_info["High School"] = str(bio[bio.index("High School") + 1].encode("ascii", errors="ignore").decode()).split(": ",1)[1].strip()
  57.  
  58.             player_info["Current Team"] = "[{}] ({}) ".format(str(tree.xpath('//div[@class="player-info"]//p[@class="player-team-links"]//@href')[1]), str(tree.xpath('//div[@class="player-info"]//p[@class="player-team-links"]//a/text()')[0]).strip())
  59.  
  60.  
  61.     # player_info["seasons_played_for_current_team"] = ""
  62.     # player_info["career_total_stats"] = ""
  63.     # player_info["games_played"] = ""
  64.     # player_info["game_victories"] = ""
  65.     # player_info[""] = ""
  66.  
  67.         return player_info
  68.  
  69. def comment_message(response_message, player_info):
  70.     for k,v in player_info.iteritems():
  71.         response_message = response_message + ("> {}: {}\n\n".format(str(k), str(v)))
  72.  
  73.     return response_message
  74.  
  75. def run_bot(r, comments_replied_to):
  76.     print("Obtaining 25 comments...")
  77.  
  78.     for comment in r.subreddit('test').comments(limit=25):
  79.         if "[[" in comment.body and comment.id not in comments_replied_to and comment.author != r.user.me():
  80.             print("found")
  81.             player = comment.body.partition("[[")[2].partition("]]")[0]
  82.  
  83.             comment_reply = comment_message("Player Stats: \n\n", response(player, player_info))
  84.  
  85.             comment.reply(comment_reply)
  86.             print("Replied to comment " + comment.id)
  87.  
  88.             comments_replied_to.append(comment.id)
  89.  
  90.             with open ("comments_replied_to.txt", "a") as f:
  91.                 f.write(comment.id + "\n")
  92.  
  93.     print("Sleeping for 10 seconds...")
  94.     #Sleep for 10 seconds...
  95.     time.sleep(10)
  96.  
  97. def get_saved_comments():
  98.     if not os.path.isfile("comments_replied_to.txt"):
  99.         comments_replied_to = []
  100.     else:
  101.         with open("comments_replied_to.txt", "r") as f:
  102.             comments_replied_to = f.read()
  103.             comments_replied_to = comments_replied_to.split("\n")
  104.             comments_replied_to = filter(None, comments_replied_to)
  105.  
  106.     return comments_replied_to
  107.  
  108. r = bot_login()
  109. comments_replied_to = get_saved_comments()
  110. print(comments_replied_to)
  111.  
  112. while True:
  113.     run_bot(r, comments_replied_to)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement