Advertisement
Guest User

Untitled

a guest
Aug 14th, 2016
114
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 13.85 KB | None | 0 0
  1. #!/usr/bin/python
  2. # -*- coding: utf-8 -*-
  3.  
  4. '''
  5. Search Architecture:
  6. - Have a list of accounts
  7. - Create an "overseer" thread
  8. - Search Overseer:
  9. - Tracks incoming new location values
  10. - Tracks "paused state"
  11. - During pause or new location will clears current search queue
  12. - Starts search_worker threads
  13. - Search Worker Threads each:
  14. - Have a unique API login
  15. - Listens to the same Queue for areas to scan
  16. - Can re-login as needed
  17. - Shares a global lock for map parsing
  18. '''
  19.  
  20. import logging
  21. import math
  22. import json
  23. import geojson
  24. import random
  25. import time
  26.  
  27.  
  28. from operator import itemgetter
  29. from threading import Thread, Lock
  30. from queue import Queue, Empty
  31.  
  32. from pgoapi import PGoApi
  33. from pgoapi.utilities import f2i
  34. from pgoapi import utilities as util
  35. from pgoapi.exceptions import AuthException
  36.  
  37. from .models import parse_map
  38.  
  39. log = logging.getLogger(__name__)
  40.  
  41. TIMESTAMP = '\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000'
  42.  
  43.  
  44. def get_new_coords(init_loc, distance, bearing):
  45. """ Given an initial lat/lng, a distance(in kms), and a bearing (degrees),
  46. this will calculate the resulting lat/lng coordinates.
  47. """
  48. R = 6378.1 # km radius of the earth
  49. bearing = math.radians(bearing)
  50.  
  51. init_coords = [math.radians(init_loc[0]), math.radians(init_loc[1])] # convert lat/lng to radians
  52.  
  53. new_lat = math.asin(math.sin(init_coords[0]) * math.cos(distance / R) +
  54. math.cos(init_coords[0]) * math.sin(distance / R) * math.cos(bearing)
  55. )
  56.  
  57. new_lon = init_coords[1] + math.atan2(math.sin(bearing) * math.sin(distance / R) * math.cos(init_coords[0]),
  58. math.cos(distance / R) - math.sin(init_coords[0]) * math.sin(new_lat)
  59. )
  60.  
  61. return [math.degrees(new_lat), math.degrees(new_lon)]
  62.  
  63.  
  64. def generate_location_steps(initial_loc, step_count):
  65. # Bearing (degrees)
  66. NORTH = 0
  67. EAST = 90
  68. SOUTH = 180
  69. WEST = 270
  70.  
  71. pulse_radius = 0.07 # km - radius of players heartbeat is 70m
  72. xdist = math.sqrt(3) * pulse_radius # dist between column centers
  73. ydist = 3 * (pulse_radius / 2) # dist between row centers
  74.  
  75. yield (initial_loc[0], initial_loc[1], 0) # insert initial location
  76.  
  77. ring = 1
  78. loc = initial_loc
  79. while ring < step_count:
  80. # Set loc to start at top left
  81. loc = get_new_coords(loc, ydist, NORTH)
  82. loc = get_new_coords(loc, xdist / 2, WEST)
  83. for direction in range(6):
  84. for i in range(ring):
  85. if direction == 0: # RIGHT
  86. loc = get_new_coords(loc, xdist, EAST)
  87. if direction == 1: # DOWN + RIGHT
  88. loc = get_new_coords(loc, ydist, SOUTH)
  89. loc = get_new_coords(loc, xdist / 2, EAST)
  90. if direction == 2: # DOWN + LEFT
  91. loc = get_new_coords(loc, ydist, SOUTH)
  92. loc = get_new_coords(loc, xdist / 2, WEST)
  93. if direction == 3: # LEFT
  94. loc = get_new_coords(loc, xdist, WEST)
  95. if direction == 4: # UP + LEFT
  96. loc = get_new_coords(loc, ydist, NORTH)
  97. loc = get_new_coords(loc, xdist / 2, WEST)
  98. if direction == 5: # UP + RIGHT
  99. loc = get_new_coords(loc, ydist, NORTH)
  100. loc = get_new_coords(loc, xdist / 2, EAST)
  101. yield (loc[0], loc[1], 0)
  102. ring += 1
  103.  
  104.  
  105. #
  106. # A fake search loop which does....nothing!
  107. #
  108. def fake_search_loop():
  109. while True:
  110. log.info('Fake search loop running')
  111. time.sleep(10)
  112. # EFFICIENT SPAWN SCAN CODE STARTS HERE
  113. def curSec():
  114. return (60 * time.gmtime().tm_min) + time.gmtime().tm_sec
  115.  
  116. def timeDif(a,b):#timeDif of -1800 to +1800 secs
  117. dif = a-b
  118. if (dif < -1800):
  119. dif += 3600
  120. if (dif > 1800):
  121. dif -= 3600
  122. return dif
  123.  
  124. def SbSearch(Slist, T):
  125. #binary search to find the lowest index with the required value or the index with the next value update
  126. first = 0
  127. last = len(Slist)-1
  128. while first < last:
  129. mp = (first+last)//2
  130. if Slist[mp]['time'] < T:
  131. first = mp + 1
  132. else:
  133. last = mp
  134. return first
  135. Shash = {}
  136. # EFFICIENT SPAWN SCAN CODE ENDS HERE
  137. # The main search loop that keeps an eye on the over all process
  138. def search_overseer_thread(args, new_location_queue, pause_bit, encryption_lib_path):
  139. global spawns, Shash, going # This is also part of the efficient spawn scan code
  140. log.info('Search overseer starting')
  141.  
  142. search_items_queue = Queue()
  143. parse_lock = Lock()
  144.  
  145. # Create a search_worker_thread per account
  146. log.info('Starting search worker threads')
  147. for i, account in enumerate(args.accounts):
  148. log.debug('Starting search worker thread %d for user %s', i, account['username'])
  149. t = Thread(target=search_worker_thread,
  150. name='search_worker_{}'.format(i),
  151. args=(args, account, search_items_queue, parse_lock,
  152. encryption_lib_path))
  153. t.daemon = True
  154. t.start()
  155.  
  156. # A place to track the current location
  157. current_location = False
  158. # EFFICIENT SPAWN SCAN CODE STARTS HERE
  159. #FIXME add arg for switching
  160. #load spawn points
  161. with open('spawns.json') as file:
  162. spawns = json.load(file)
  163. file.close()
  164. for spawn in spawns:
  165. hash = '{},{}'.format(spawn['time'],spawn['lng'])
  166. Shash[spawn['lng']] = spawn['time']
  167. #sort spawn points
  168. spawns.sort(key=itemgetter('time'))
  169. log.info('total of %d spawns to track',len(spawns))
  170. #find start position
  171. pos = SbSearch(spawns, (curSec()+3540)%3600)
  172. while True:
  173. while timeDif(curSec(),spawns[pos]['time']) < 60:
  174. time.sleep(1)
  175. location = []
  176. location.append(spawns[pos]['lat'])
  177. location.append(spawns[pos]['lng'])
  178. location.append(0)
  179. for step, step_location in enumerate(generate_location_steps(location, args.step_limit), 1):
  180. log.debug('Queueing step %d @ %f/%f/%f', pos, step_location[0], step_location[1], step_location[2])
  181. search_args = (step, step_location, spawns[pos]['time'])
  182. search_items_queue.put(search_args)
  183. pos = (pos+1) % len(spawns)
  184. if pos == 0:
  185. while not(search_items_queue.empty()):
  186. log.info('search_items_queue not empty. waiting 10 secrestarting at top of hour')
  187. time.sleep(10)
  188. log.info('restarting from top of list and finding current time')
  189. pos = SbSearch(spawns, (curSec()+3540)%3600)
  190.  
  191.  
  192. def search_worker_thread(args, account, search_items_queue, parse_lock, encryption_lib_path):
  193. # The forever loop for the thread
  194. while True:
  195. try:
  196. log.debug('Entering search loop')
  197.  
  198. # Create the API instance this will use
  199. api = PGoApi()
  200. if args.proxy:
  201. api.set_proxy({'http': args.proxy, 'https': args.proxy})
  202.  
  203. # Get current time
  204. loop_start_time = int(round(time.time() * 1000))
  205.  
  206. # The forever loop for the searches
  207. while True:
  208.  
  209. # Grab the next thing to search (when available)
  210. step, step_location, spawntime = search_items_queue.get() #Slight change here for efficient spawn code added , spawntime
  211.  
  212. # If we have more than one account, stagger the logins such that they occur evenly over scan_delay
  213. if len(args.accounts) > 1:
  214. if len(args.accounts) > args.scan_delay: # force ~1 second delay between threads if you have many accounts
  215. delay = args.accounts.index(account) \
  216. + ((random.random() - .5) / 2) if args.accounts.index(account) > 0 else 0
  217. else:
  218. delay = (args.scan_delay / len(args.accounts)) * args.accounts.index(account)
  219. log.info('Delaying thread startup for %.2f seconds', delay)
  220. log.info('Search step %d beginning (queue size is %d)', step, search_items_queue.qsize())
  221. else:
  222. time.sleep(delay)
  223. log.info('Search step %d beginning (queue size is %d)', step, search_items_queue.qsize())
  224.  
  225.  
  226. if timeDif(curSec(),spawntime) < 840:#if we arnt 14mins too late - part of efficient spawn scan code
  227. # Let the api know where we intend to be for this loop
  228. api.set_position(*step_location)
  229.  
  230. # The loop to try very hard to scan this step
  231. failed_total = 0
  232. while True:
  233.  
  234. # After so many attempts, let's get out of here
  235. if failed_total >= args.scan_retries:
  236. # I am choosing to NOT place this item back in the queue
  237. # otherwise we could get a "bad scan" area and be stuck
  238. # on this overall loop forever. Better to lose one cell
  239. # than have the scanner, essentially, halt.
  240. log.error('Search step %d went over max scan_retires; abandoning', step)
  241. break
  242.  
  243. # Increase sleep delay between each failed scan
  244. # By default scan_dela=5, scan_retries=5 so
  245. # We'd see timeouts of 5, 10, 15, 20, 25
  246. sleep_time = args.scan_delay * (1 + failed_total)
  247.  
  248. # Ok, let's get started -- check our login status
  249. check_login(args, account, api, step_location)
  250.  
  251. api.activate_signature(encryption_lib_path)
  252.  
  253. # Make the actual request (finally!)
  254. response_dict = map_request(api, step_location)
  255.  
  256. # G'damnit, nothing back. Mark it up, sleep, carry on
  257. if not response_dict:
  258. log.error('Search step %d area download failed, retrying request in %g seconds', step, sleep_time)
  259. failed_total += 1
  260. time.sleep(sleep_time)
  261. continue
  262.  
  263. # Got the response, lock for parsing and do so (or fail, whatever)
  264. with parse_lock:
  265. try:
  266. parse_map(response_dict, step_location)
  267. log.debug('Search step %s completed', step)
  268. search_items_queue.task_done()
  269. # If there's any time left between the start time and the time when we should be kicking off the next
  270. # loop, hang out until its up.
  271. sleep_delay_remaining = loop_start_time + (args.scan_delay * 1000) - int(round(time.time() * 1000))
  272. if sleep_delay_remaining < 0:
  273. time.sleep(args.scan_delay)
  274. else:
  275. time.sleep(sleep_delay_remaining / 1000)
  276. loop_start_time = int(round(time.time() * 1000))
  277. break # All done, get out of the request-retry loop
  278. except KeyError:
  279. log.exception('Search step %s map parsing failed, retrying request in %g seconds', step, sleep_time)
  280. failed_total += 1
  281. time.sleep(sleep_time)
  282. else:
  283. log.info('cant keep up. skipping')
  284.  
  285. # catch any process exceptions, log them, and continue the thread
  286. except Exception as e:
  287. log.exception('Exception in search_worker: %s. Username: %s', e, account['username'])
  288.  
  289.  
  290. def check_login(args, account, api, position):
  291.  
  292. # Logged in? Enough time left? Cool!
  293. if api._auth_provider and api._auth_provider._ticket_expire:
  294. remaining_time = api._auth_provider._ticket_expire / 1000 - time.time()
  295. if remaining_time > 60:
  296. log.debug('Credentials remain valid for another %f seconds', remaining_time)
  297. return
  298.  
  299. # Try to login (a few times, but don't get stuck here)
  300. i = 0
  301. api.set_position(position[0], position[1], position[2])
  302. while i < args.login_retries:
  303. try:
  304. api.set_authentication(provider=account['auth_service'], username=account['username'], password=account['password'])
  305. break
  306. except AuthException:
  307. if i >= args.login_retries:
  308. raise TooManyLoginAttempts('Exceeded login attempts')
  309. else:
  310. i += 1
  311. log.error('Failed to login to Pokemon Go with account %s. Trying again in %g seconds', account['username'], args.login_delay)
  312. time.sleep(args.login_delay)
  313.  
  314. log.debug('Login for account %s successful', account['username'])
  315. # To avoid first search step resulting 0/0/0
  316. time.sleep(10)
  317.  
  318.  
  319. def map_request(api, position):
  320. try:
  321. cell_ids = util.get_cell_ids(position[0], position[1])
  322. timestamps = [0, ] * len(cell_ids)
  323. return api.get_map_objects(latitude=f2i(position[0]),
  324. longitude=f2i(position[1]),
  325. since_timestamp_ms=timestamps,
  326. cell_id=cell_ids)
  327. except Exception as e:
  328. log.warning('Exception while downloading map: %s', e)
  329. return False
  330.  
  331.  
  332. class TooManyLoginAttempts(Exception):
  333. pass
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement