Advertisement
mindtuga

models.py

Dec 28th, 2016
112
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 86.89 KB | None | 0 0
  1. #!/usr/bin/python
  2. # -*- coding: utf-8 -*-
  3.  
  4. import logging
  5. import itertools
  6. import calendar
  7. import sys
  8. import traceback
  9. import gc
  10. import time
  11. import geopy
  12. import math
  13. from peewee import SqliteDatabase, InsertQuery, \
  14. Check, CompositeKey, ForeignKeyField, \
  15. IntegerField, CharField, DoubleField, BooleanField, \
  16. DateTimeField, fn, DeleteQuery, FloatField, SQL, TextField, JOIN
  17. from playhouse.flask_utils import FlaskDB
  18. from playhouse.pool import PooledMySQLDatabase
  19. from playhouse.shortcuts import RetryOperationalError
  20. from playhouse.migrate import migrate, MySQLMigrator, SqliteMigrator
  21. from datetime import datetime, timedelta
  22. from base64 import b64encode
  23. from cachetools import TTLCache
  24. from cachetools import cached
  25.  
  26. from . import config
  27. from .utils import get_pokemon_name, get_pokemon_rarity, get_pokemon_types, get_args, \
  28. cellid, in_radius, date_secs, clock_between, secs_between, get_move_name, get_move_damage, \
  29. get_move_energy, get_move_type
  30. from .transform import transform_from_wgs_to_gcj, get_new_coords
  31. from .customLog import printPokemon
  32. log = logging.getLogger(__name__)
  33.  
  34. args = get_args()
  35. flaskDb = FlaskDB()
  36. cache = TTLCache(maxsize=100, ttl=60 * 5)
  37.  
  38. db_schema_version = 11
  39.  
  40.  
  41. class MyRetryDB(RetryOperationalError, PooledMySQLDatabase):
  42. pass
  43.  
  44.  
  45. def init_database(app):
  46. if args.db_type == 'mysql':
  47. log.info('Connecting to MySQL database on %s:%i', args.db_host, args.db_port)
  48. connections = args.db_max_connections
  49. if hasattr(args, 'accounts'):
  50. connections *= len(args.accounts)
  51. db = MyRetryDB(
  52. args.db_name,
  53. user=args.db_user,
  54. password=args.db_pass,
  55. host=args.db_host,
  56. port=args.db_port,
  57. max_connections=connections,
  58. stale_timeout=300)
  59. else:
  60. log.info('Connecting to local SQLite database')
  61. db = SqliteDatabase(args.db)
  62.  
  63. app.config['DATABASE'] = db
  64. flaskDb.init_app(app)
  65.  
  66. return db
  67.  
  68.  
  69. class BaseModel(flaskDb.Model):
  70.  
  71. @classmethod
  72. def get_all(cls):
  73. results = [m for m in cls.select().dicts()]
  74. if args.china:
  75. for result in results:
  76. result['latitude'], result['longitude'] = \
  77. transform_from_wgs_to_gcj(
  78. result['latitude'], result['longitude'])
  79. return results
  80.  
  81.  
  82. class Pokemon(BaseModel):
  83. # We are base64 encoding the ids delivered by the api,
  84. # because they are too big for sqlite to handle.
  85. encounter_id = CharField(primary_key=True, max_length=50)
  86. spawnpoint_id = CharField(index=True)
  87. pokemon_id = IntegerField(index=True)
  88. latitude = DoubleField()
  89. longitude = DoubleField()
  90. disappear_time = DateTimeField(index=True)
  91. individual_attack = IntegerField(null=True)
  92. individual_defense = IntegerField(null=True)
  93. individual_stamina = IntegerField(null=True)
  94. move_1 = IntegerField(null=True)
  95. move_2 = IntegerField(null=True)
  96. last_modified = DateTimeField(null=True, index=True, default=datetime.utcnow)
  97.  
  98. class Meta:
  99. indexes = ((('latitude', 'longitude'), False),)
  100.  
  101. @staticmethod
  102. def get_active(swLat, swLng, neLat, neLng, timestamp=0, oSwLat=None, oSwLng=None, oNeLat=None, oNeLng=None):
  103. now_date = datetime.utcnow()
  104. # now_secs = date_secs(now_date)
  105. query = Pokemon.select()
  106. if not (swLat and swLng and neLat and neLng):
  107. query = (query
  108. .where(Pokemon.disappear_time > now_date)
  109. .dicts())
  110. elif timestamp > 0:
  111. # If timestamp is known only load modified pokemon.
  112. query = (query
  113. .where(((Pokemon.last_modified > datetime.utcfromtimestamp(timestamp / 1000)) &
  114. (Pokemon.disappear_time > now_date)) &
  115. ((Pokemon.latitude >= swLat) &
  116. (Pokemon.longitude >= swLng) &
  117. (Pokemon.latitude <= neLat) &
  118. (Pokemon.longitude <= neLng)))
  119. .dicts())
  120. elif oSwLat and oSwLng and oNeLat and oNeLng:
  121. # Send Pokemon in view but exclude those within old boundaries. Only send newly uncovered Pokemon.
  122. query = (query
  123. .where(((Pokemon.disappear_time > now_date) &
  124. (((Pokemon.latitude >= swLat) &
  125. (Pokemon.longitude >= swLng) &
  126. (Pokemon.latitude <= neLat) &
  127. (Pokemon.longitude <= neLng))) &
  128. ~((Pokemon.disappear_time > now_date) &
  129. (Pokemon.latitude >= oSwLat) &
  130. (Pokemon.longitude >= oSwLng) &
  131. (Pokemon.latitude <= oNeLat) &
  132. (Pokemon.longitude <= oNeLng))))
  133. .dicts())
  134. else:
  135. query = (Pokemon
  136. .select()
  137. # add 1 hour buffer to include spawnpoints that persist after tth, like shsh
  138. .where((Pokemon.disappear_time > now_date) &
  139. (((Pokemon.latitude >= swLat) &
  140. (Pokemon.longitude >= swLng) &
  141. (Pokemon.latitude <= neLat) &
  142. (Pokemon.longitude <= neLng))))
  143. .dicts())
  144.  
  145. # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append().
  146. gc.disable()
  147.  
  148. pokemons = []
  149. for p in list(query):
  150.  
  151. p['pokemon_name'] = get_pokemon_name(p['pokemon_id'])
  152. p['pokemon_rarity'] = get_pokemon_rarity(p['pokemon_id'])
  153. p['pokemon_types'] = get_pokemon_types(p['pokemon_id'])
  154. if args.china:
  155. p['latitude'], p['longitude'] = \
  156. transform_from_wgs_to_gcj(p['latitude'], p['longitude'])
  157. pokemons.append(p)
  158.  
  159. # Re-enable the GC.
  160. gc.enable()
  161.  
  162. return pokemons
  163.  
  164. @staticmethod
  165. def get_active_by_id(ids, swLat, swLng, neLat, neLng):
  166. if not (swLat and swLng and neLat and neLng):
  167. query = (Pokemon
  168. .select()
  169. .where((Pokemon.pokemon_id << ids) &
  170. (Pokemon.disappear_time > datetime.utcnow()))
  171. .dicts())
  172. else:
  173. query = (Pokemon
  174. .select()
  175. .where((Pokemon.pokemon_id << ids) &
  176. (Pokemon.disappear_time > datetime.utcnow()) &
  177. (Pokemon.latitude >= swLat) &
  178. (Pokemon.longitude >= swLng) &
  179. (Pokemon.latitude <= neLat) &
  180. (Pokemon.longitude <= neLng))
  181. .dicts())
  182.  
  183. # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append().
  184. gc.disable()
  185.  
  186. pokemons = []
  187. for p in query:
  188. p['pokemon_name'] = get_pokemon_name(p['pokemon_id'])
  189. p['pokemon_rarity'] = get_pokemon_rarity(p['pokemon_id'])
  190. p['pokemon_types'] = get_pokemon_types(p['pokemon_id'])
  191. if args.china:
  192. p['latitude'], p['longitude'] = \
  193. transform_from_wgs_to_gcj(p['latitude'], p['longitude'])
  194. pokemons.append(p)
  195.  
  196. # Re-enable the GC.
  197. gc.enable()
  198.  
  199. return pokemons
  200.  
  201. @classmethod
  202. @cached(cache)
  203. def get_seen(cls, timediff):
  204. if timediff:
  205. timediff = datetime.utcnow() - timediff
  206. pokemon_count_query = (Pokemon
  207. .select(Pokemon.pokemon_id,
  208. fn.COUNT(Pokemon.pokemon_id).alias('count'),
  209. fn.MAX(Pokemon.disappear_time).alias('lastappeared')
  210. )
  211. .where(Pokemon.disappear_time > timediff)
  212. .group_by(Pokemon.pokemon_id)
  213. .alias('counttable')
  214. )
  215. query = (Pokemon
  216. .select(Pokemon.pokemon_id,
  217. Pokemon.disappear_time,
  218. Pokemon.latitude,
  219. Pokemon.longitude,
  220. pokemon_count_query.c.count)
  221. .join(pokemon_count_query, on=(Pokemon.pokemon_id == pokemon_count_query.c.pokemon_id))
  222. .distinct()
  223. .where(Pokemon.disappear_time == pokemon_count_query.c.lastappeared)
  224. .dicts()
  225. )
  226.  
  227. # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append().
  228. gc.disable()
  229.  
  230. pokemons = []
  231. total = 0
  232. for p in query:
  233. p['pokemon_name'] = get_pokemon_name(p['pokemon_id'])
  234. pokemons.append(p)
  235. total += p['count']
  236.  
  237. # Re-enable the GC.
  238. gc.enable()
  239.  
  240. return {'pokemon': pokemons, 'total': total}
  241.  
  242. @classmethod
  243. def get_appearances(cls, pokemon_id, timediff):
  244. '''
  245. :param pokemon_id: id of pokemon that we need appearances for
  246. :param timediff: limiting period of the selection
  247. :return: list of pokemon appearances over a selected period
  248. '''
  249. if timediff:
  250. timediff = datetime.utcnow() - timediff
  251. query = (Pokemon
  252. .select(Pokemon.latitude, Pokemon.longitude, Pokemon.pokemon_id, fn.Count(Pokemon.spawnpoint_id).alias('count'), Pokemon.spawnpoint_id)
  253. .where((Pokemon.pokemon_id == pokemon_id) &
  254. (Pokemon.disappear_time > timediff)
  255. )
  256. .group_by(Pokemon.latitude, Pokemon.longitude, Pokemon.pokemon_id, Pokemon.spawnpoint_id)
  257. .dicts()
  258. )
  259.  
  260. return list(query)
  261.  
  262. @classmethod
  263. def get_appearances_times_by_spawnpoint(cls, pokemon_id, spawnpoint_id, timediff):
  264. '''
  265. :param pokemon_id: id of pokemon that we need appearances times for
  266. :param spawnpoint_id: spawnpoing id we need appearances times for
  267. :param timediff: limiting period of the selection
  268. :return: list of time appearances over a selected period
  269. '''
  270. if timediff:
  271. timediff = datetime.utcnow() - timediff
  272. query = (Pokemon
  273. .select(Pokemon.disappear_time)
  274. .where((Pokemon.pokemon_id == pokemon_id) &
  275. (Pokemon.spawnpoint_id == spawnpoint_id) &
  276. (Pokemon.disappear_time > timediff)
  277. )
  278. .order_by(Pokemon.disappear_time.asc())
  279. .tuples()
  280. )
  281.  
  282. return list(itertools.chain(*query))
  283.  
  284. @classmethod
  285. def get_spawn_time(cls, disappear_time):
  286. return (disappear_time + 2700) % 3600
  287.  
  288. @classmethod
  289. def get_spawnpoints(cls, swLat, swLng, neLat, neLng, timestamp=0, oSwLat=None, oSwLng=None, oNeLat=None, oNeLng=None):
  290. query = Pokemon.select(Pokemon.latitude, Pokemon.longitude, Pokemon.spawnpoint_id, (date_secs(Pokemon.disappear_time)).alias('time'), fn.Count(Pokemon.spawnpoint_id).alias('count'))
  291.  
  292. if timestamp > 0:
  293. query = (query
  294. .where(((Pokemon.last_modified > datetime.utcfromtimestamp(timestamp / 1000))) &
  295. ((Pokemon.latitude >= swLat) &
  296. (Pokemon.longitude >= swLng) &
  297. (Pokemon.latitude <= neLat) &
  298. (Pokemon.longitude <= neLng)))
  299. .dicts())
  300. elif oSwLat and oSwLng and oNeLat and oNeLng:
  301. # Send spawnpoints in view but exclude those within old boundaries. Only send newly uncovered spawnpoints.
  302. query = (query
  303. .where((((Pokemon.latitude >= swLat) &
  304. (Pokemon.longitude >= swLng) &
  305. (Pokemon.latitude <= neLat) &
  306. (Pokemon.longitude <= neLng))) &
  307. ~((Pokemon.latitude >= oSwLat) &
  308. (Pokemon.longitude >= oSwLng) &
  309. (Pokemon.latitude <= oNeLat) &
  310. (Pokemon.longitude <= oNeLng)))
  311. .dicts())
  312. elif swLat and swLng and neLat and neLng:
  313. query = (query
  314. .where((Pokemon.latitude <= neLat) &
  315. (Pokemon.latitude >= swLat) &
  316. (Pokemon.longitude >= swLng) &
  317. (Pokemon.longitude <= neLng)
  318. ))
  319.  
  320. query = query.group_by(Pokemon.latitude, Pokemon.longitude, Pokemon.spawnpoint_id, SQL('time'))
  321.  
  322. queryDict = query.dicts()
  323. spawnpoints = {}
  324.  
  325. for sp in queryDict:
  326. key = sp['spawnpoint_id']
  327. disappear_time = cls.get_spawn_time(sp.pop('time'))
  328. count = int(sp['count'])
  329.  
  330. if key not in spawnpoints:
  331. spawnpoints[key] = sp
  332. else:
  333. spawnpoints[key]['special'] = True
  334.  
  335. if 'time' not in spawnpoints[key] or count >= spawnpoints[key]['count']:
  336. spawnpoints[key]['time'] = disappear_time
  337. spawnpoints[key]['count'] = count
  338.  
  339. for sp in spawnpoints.values():
  340. del sp['count']
  341.  
  342. return list(spawnpoints.values())
  343.  
  344. @classmethod
  345. def get_spawnpoints_in_hex(cls, center, steps):
  346. log.info('Finding spawn points {} steps away'.format(steps))
  347.  
  348. n, e, s, w = hex_bounds(center, steps)
  349.  
  350. query = (Pokemon
  351. .select(Pokemon.latitude.alias('lat'),
  352. Pokemon.longitude.alias('lng'),
  353. (date_secs(Pokemon.disappear_time)).alias('time'),
  354. Pokemon.spawnpoint_id
  355. ))
  356. query = (query.where((Pokemon.latitude <= n) &
  357. (Pokemon.latitude >= s) &
  358. (Pokemon.longitude >= w) &
  359. (Pokemon.longitude <= e)
  360. ))
  361. # Sqlite doesn't support distinct on columns.
  362. if args.db_type == 'mysql':
  363. query = query.distinct(Pokemon.spawnpoint_id)
  364. else:
  365. query = query.group_by(Pokemon.spawnpoint_id)
  366.  
  367. s = list(query.dicts())
  368.  
  369. # The distance between scan circles of radius 70 in a hex is 121.2436
  370. # steps - 1 to account for the center circle then add 70 for the edge.
  371. step_distance = ((steps - 1) * 121.2436) + 70
  372. # Compare spawnpoint list to a circle with radius steps * 120.
  373. # Uses the direct geopy distance between the center and the spawnpoint.
  374. filtered = []
  375.  
  376. for idx, sp in enumerate(s):
  377. if geopy.distance.distance(center, (sp['lat'], sp['lng'])).meters <= step_distance:
  378. filtered.append(s[idx])
  379.  
  380. # At this point, 'time' is DISAPPEARANCE time, we're going to morph it to APPEARANCE time.
  381. for location in filtered:
  382. # examples: time shifted
  383. # 0 ( 0 + 2700) = 2700 % 3600 = 2700 (0th minute to 45th minute, 15 minutes prior to appearance as time wraps around the hour.)
  384. # 1800 (1800 + 2700) = 4500 % 3600 = 900 (30th minute, moved to arrive at 15th minute.)
  385. # todo: this DOES NOT ACCOUNT for pokemons that appear sooner and live longer, but you'll _always_ have at least 15 minutes, so it works well enough.
  386. location['time'] = cls.get_spawn_time(location['time'])
  387.  
  388. return filtered
  389.  
  390.  
  391. class Pokestop(BaseModel):
  392. pokestop_id = CharField(primary_key=True, max_length=50)
  393. enabled = BooleanField()
  394. latitude = DoubleField()
  395. longitude = DoubleField()
  396. last_modified = DateTimeField(index=True)
  397. lure_expiration = DateTimeField(null=True, index=True)
  398. active_fort_modifier = CharField(max_length=50, null=True)
  399. last_updated = DateTimeField(null=True, index=True, default=datetime.utcnow)
  400.  
  401. class Meta:
  402. indexes = ((('latitude', 'longitude'), False),)
  403.  
  404. @staticmethod
  405. def get_stops(swLat, swLng, neLat, neLng, timestamp=0, oSwLat=None, oSwLng=None, oNeLat=None, oNeLng=None, lured=False):
  406.  
  407. query = Pokestop.select(Pokestop.active_fort_modifier, Pokestop.enabled, Pokestop.latitude, Pokestop.longitude, Pokestop.last_modified, Pokestop.lure_expiration, Pokestop.pokestop_id)
  408.  
  409. if not (swLat and swLng and neLat and neLng):
  410. query = (query
  411. .dicts())
  412. elif timestamp > 0:
  413. query = (query
  414. .where(((Pokestop.last_updated > datetime.utcfromtimestamp(timestamp / 1000))) &
  415. (Pokestop.latitude >= swLat) &
  416. (Pokestop.longitude >= swLng) &
  417. (Pokestop.latitude <= neLat) &
  418. (Pokestop.longitude <= neLng))
  419. .dicts())
  420. elif oSwLat and oSwLng and oNeLat and oNeLng and lured:
  421. query = (query
  422. .where((((Pokestop.latitude >= swLat) &
  423. (Pokestop.longitude >= swLng) &
  424. (Pokestop.latitude <= neLat) &
  425. (Pokestop.longitude <= neLng)) &
  426. (Pokestop.active_fort_modifier.is_null(False))) &
  427. ~((Pokestop.latitude >= oSwLat) &
  428. (Pokestop.longitude >= oSwLng) &
  429. (Pokestop.latitude <= oNeLat) &
  430. (Pokestop.longitude <= oNeLng)) &
  431. (Pokestop.active_fort_modifier.is_null(False)))
  432. .dicts())
  433. elif oSwLat and oSwLng and oNeLat and oNeLng:
  434. # Send stops in view but exclude those within old boundaries. Only send newly uncovered stops.
  435. query = (query
  436. .where(((Pokestop.latitude >= swLat) &
  437. (Pokestop.longitude >= swLng) &
  438. (Pokestop.latitude <= neLat) &
  439. (Pokestop.longitude <= neLng)) &
  440. ~((Pokestop.latitude >= oSwLat) &
  441. (Pokestop.longitude >= oSwLng) &
  442. (Pokestop.latitude <= oNeLat) &
  443. (Pokestop.longitude <= oNeLng)))
  444. .dicts())
  445. elif lured:
  446. query = (query
  447. .where(((Pokestop.last_updated > datetime.utcfromtimestamp(timestamp / 1000))) &
  448. ((Pokestop.latitude >= swLat) &
  449. (Pokestop.longitude >= swLng) &
  450. (Pokestop.latitude <= neLat) &
  451. (Pokestop.longitude <= neLng)) &
  452. (Pokestop.active_fort_modifier.is_null(False)))
  453. .dicts())
  454.  
  455. else:
  456. query = (query
  457. .where((Pokestop.latitude >= swLat) &
  458. (Pokestop.longitude >= swLng) &
  459. (Pokestop.latitude <= neLat) &
  460. (Pokestop.longitude <= neLng))
  461. .dicts())
  462.  
  463. # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append().
  464. gc.disable()
  465.  
  466. pokestops = []
  467. for p in query:
  468. if args.china:
  469. p['latitude'], p['longitude'] = \
  470. transform_from_wgs_to_gcj(p['latitude'], p['longitude'])
  471. pokestops.append(p)
  472.  
  473. # Re-enable the GC.
  474. gc.enable()
  475.  
  476. return pokestops
  477.  
  478.  
  479. class Gym(BaseModel):
  480. UNCONTESTED = 0
  481. TEAM_MYSTIC = 1
  482. TEAM_VALOR = 2
  483. TEAM_INSTINCT = 3
  484.  
  485. gym_id = CharField(primary_key=True, max_length=50)
  486. team_id = IntegerField()
  487. guard_pokemon_id = IntegerField()
  488. gym_points = IntegerField()
  489. enabled = BooleanField()
  490. latitude = DoubleField()
  491. longitude = DoubleField()
  492. last_modified = DateTimeField(index=True)
  493. last_scanned = DateTimeField(default=datetime.utcnow)
  494.  
  495. class Meta:
  496. indexes = ((('latitude', 'longitude'), False),)
  497.  
  498. @staticmethod
  499. def get_gyms(swLat, swLng, neLat, neLng, timestamp=0, oSwLat=None, oSwLng=None, oNeLat=None, oNeLng=None):
  500. if not (swLat and swLng and neLat and neLng):
  501. results = (Gym
  502. .select()
  503. .dicts())
  504. elif timestamp > 0:
  505. # If timestamp is known only send last scanned Gyms.
  506. results = (Gym
  507. .select()
  508. .where(((Gym.last_scanned > datetime.utcfromtimestamp(timestamp / 1000)) &
  509. (Gym.latitude >= swLat) &
  510. (Gym.longitude >= swLng) &
  511. (Gym.latitude <= neLat) &
  512. (Gym.longitude <= neLng)))
  513. .dicts())
  514. elif oSwLat and oSwLng and oNeLat and oNeLng:
  515. # Send gyms in view but exclude those within old boundaries. Only send newly uncovered gyms.
  516. results = (Gym
  517. .select()
  518. .where(((Gym.latitude >= swLat) &
  519. (Gym.longitude >= swLng) &
  520. (Gym.latitude <= neLat) &
  521. (Gym.longitude <= neLng)) &
  522. ~((Gym.latitude >= oSwLat) &
  523. (Gym.longitude >= oSwLng) &
  524. (Gym.latitude <= oNeLat) &
  525. (Gym.longitude <= oNeLng)))
  526. .dicts())
  527.  
  528. else:
  529. results = (Gym
  530. .select()
  531. .where((Gym.latitude >= swLat) &
  532. (Gym.longitude >= swLng) &
  533. (Gym.latitude <= neLat) &
  534. (Gym.longitude <= neLng))
  535. .dicts())
  536.  
  537. # Performance: Disable the garbage collector prior to creating a (potentially) large dict with append().
  538. gc.disable()
  539.  
  540. gyms = {}
  541. gym_ids = []
  542. for g in results:
  543. g['name'] = None
  544. g['pokemon'] = []
  545. gyms[g['gym_id']] = g
  546. gym_ids.append(g['gym_id'])
  547.  
  548. if len(gym_ids) > 0:
  549. pokemon = (GymMember
  550. .select(
  551. GymMember.gym_id,
  552. GymPokemon.cp.alias('pokemon_cp'),
  553. GymPokemon.pokemon_id,
  554. Trainer.name.alias('trainer_name'),
  555. Trainer.level.alias('trainer_level'))
  556. .join(Gym, on=(GymMember.gym_id == Gym.gym_id))
  557. .join(GymPokemon, on=(GymMember.pokemon_uid == GymPokemon.pokemon_uid))
  558. .join(Trainer, on=(GymPokemon.trainer_name == Trainer.name))
  559. .where(GymMember.gym_id << gym_ids)
  560. .where(GymMember.last_scanned > Gym.last_modified)
  561. .order_by(GymMember.gym_id, GymPokemon.cp)
  562. .dicts())
  563.  
  564. for p in pokemon:
  565. p['pokemon_name'] = get_pokemon_name(p['pokemon_id'])
  566. gyms[p['gym_id']]['pokemon'].append(p)
  567.  
  568. details = (GymDetails
  569. .select(
  570. GymDetails.gym_id,
  571. GymDetails.name)
  572. .where(GymDetails.gym_id << gym_ids)
  573. .dicts())
  574.  
  575. for d in details:
  576. gyms[d['gym_id']]['name'] = d['name']
  577.  
  578. # Re-enable the GC.
  579. gc.enable()
  580.  
  581. return gyms
  582.  
  583. @staticmethod
  584. def get_gym(id):
  585. result = (Gym
  586. .select(Gym.gym_id,
  587. Gym.team_id,
  588. GymDetails.name,
  589. GymDetails.description,
  590. Gym.guard_pokemon_id,
  591. Gym.gym_points,
  592. Gym.latitude,
  593. Gym.longitude,
  594. Gym.last_modified,
  595. Gym.last_scanned)
  596. .join(GymDetails, JOIN.LEFT_OUTER, on=(Gym.gym_id == GymDetails.gym_id))
  597. .where(Gym.gym_id == id)
  598. .dicts()
  599. .get())
  600.  
  601. result['guard_pokemon_name'] = get_pokemon_name(result['guard_pokemon_id']) if result['guard_pokemon_id'] else ''
  602. result['pokemon'] = []
  603.  
  604. pokemon = (GymMember
  605. .select(GymPokemon.cp.alias('pokemon_cp'),
  606. GymPokemon.pokemon_id,
  607. GymPokemon.pokemon_uid,
  608. GymPokemon.move_1,
  609. GymPokemon.move_2,
  610. GymPokemon.iv_attack,
  611. GymPokemon.iv_defense,
  612. GymPokemon.iv_stamina,
  613. Trainer.name.alias('trainer_name'),
  614. Trainer.level.alias('trainer_level'))
  615. .join(Gym, on=(GymMember.gym_id == Gym.gym_id))
  616. .join(GymPokemon, on=(GymMember.pokemon_uid == GymPokemon.pokemon_uid))
  617. .join(Trainer, on=(GymPokemon.trainer_name == Trainer.name))
  618. .where(GymMember.gym_id == id)
  619. .where(GymMember.last_scanned > Gym.last_modified)
  620. .order_by(GymPokemon.cp.desc())
  621. .dicts())
  622.  
  623. for p in pokemon:
  624. p['pokemon_name'] = get_pokemon_name(p['pokemon_id'])
  625.  
  626. p['move_1_name'] = get_move_name(p['move_1'])
  627. p['move_1_damage'] = get_move_damage(p['move_1'])
  628. p['move_1_energy'] = get_move_energy(p['move_1'])
  629. p['move_1_type'] = get_move_type(p['move_1'])
  630.  
  631. p['move_2_name'] = get_move_name(p['move_2'])
  632. p['move_2_damage'] = get_move_damage(p['move_2'])
  633. p['move_2_energy'] = get_move_energy(p['move_2'])
  634. p['move_2_type'] = get_move_type(p['move_2'])
  635.  
  636. result['pokemon'].append(p)
  637.  
  638. return result
  639.  
  640.  
  641. class ScannedLocation(BaseModel):
  642. cellid = CharField(primary_key=True, max_length=50)
  643. latitude = DoubleField()
  644. longitude = DoubleField()
  645. last_modified = DateTimeField(index=True, default=datetime.utcnow, null=True)
  646. # marked true when all five bands have been completed
  647. done = BooleanField(default=False)
  648.  
  649. # Five scans/hour is required to catch all spawns
  650. # Each scan must be at least 12 minutes from the previous check,
  651. # with a 2 minute window during which the scan can be done
  652.  
  653. # default of -1 is for bands not yet scanned
  654. band1 = IntegerField(default=-1)
  655. band2 = IntegerField(default=-1)
  656. band3 = IntegerField(default=-1)
  657. band4 = IntegerField(default=-1)
  658. band5 = IntegerField(default=-1)
  659.  
  660. # midpoint is the center of the bands relative to band 1
  661. # e.g., if band 1 is 10.4 min, and band 4 is 34.0 min, midpoint is -0.2 min in minsec
  662. # extra 10 seconds in case of delay in recording now time
  663. midpoint = IntegerField(default=0)
  664.  
  665. # width is how wide the valid window is. Default is 0, max is 2 min
  666. # e.g., if band 1 is 10.4 min, and band 4 is 34.0 min, midpoint is 0.4 min in minsec
  667. width = IntegerField(default=0)
  668.  
  669. class Meta:
  670. indexes = ((('latitude', 'longitude'), False),)
  671. constraints = [Check('band1 >= -1'), Check('band1 < 3600'),
  672. Check('band2 >= -1'), Check('band2 < 3600'),
  673. Check('band3 >= -1'), Check('band3 < 3600'),
  674. Check('band4 >= -1'), Check('band4 < 3600'),
  675. Check('band5 >= -1'), Check('band5 < 3600'),
  676. Check('midpoint >= -130'), Check('midpoint <= 130'),
  677. Check('width >= 0'), Check('width <= 130')]
  678.  
  679. @staticmethod
  680. def get_recent(swLat, swLng, neLat, neLng, timestamp=0, oSwLat=None, oSwLng=None, oNeLat=None, oNeLng=None):
  681. activeTime = (datetime.utcnow() - timedelta(minutes=15))
  682. if timestamp > 0:
  683. query = (ScannedLocation
  684. .select()
  685. .where(((ScannedLocation.last_modified >= datetime.utcfromtimestamp(timestamp / 1000))) &
  686. (ScannedLocation.latitude >= swLat) &
  687. (ScannedLocation.longitude >= swLng) &
  688. (ScannedLocation.latitude <= neLat) &
  689. (ScannedLocation.longitude <= neLng))
  690. .dicts())
  691. elif oSwLat and oSwLng and oNeLat and oNeLng:
  692. # Send scannedlocations in view but exclude those within old boundaries. Only send newly uncovered scannedlocations.
  693. query = (ScannedLocation
  694. .select()
  695. .where((((ScannedLocation.last_modified >= activeTime)) &
  696. (ScannedLocation.latitude >= swLat) &
  697. (ScannedLocation.longitude >= swLng) &
  698. (ScannedLocation.latitude <= neLat) &
  699. (ScannedLocation.longitude <= neLng)) &
  700. ~(((ScannedLocation.last_modified >= activeTime)) &
  701. (ScannedLocation.latitude >= oSwLat) &
  702. (ScannedLocation.longitude >= oSwLng) &
  703. (ScannedLocation.latitude <= oNeLat) &
  704. (ScannedLocation.longitude <= oNeLng)))
  705. .dicts())
  706. else:
  707. query = (ScannedLocation
  708. .select()
  709. .where((ScannedLocation.last_modified >= activeTime) &
  710. (ScannedLocation.latitude >= swLat) &
  711. (ScannedLocation.longitude >= swLng) &
  712. (ScannedLocation.latitude <= neLat) &
  713. (ScannedLocation.longitude <= neLng))
  714. .order_by(ScannedLocation.last_modified.asc())
  715. .dicts())
  716.  
  717. return list(query)
  718.  
  719. # DB format of a new location
  720. @staticmethod
  721. def new_loc(loc):
  722. return {'cellid': cellid(loc),
  723. 'latitude': loc[0],
  724. 'longitude': loc[1],
  725. 'done': False,
  726. 'band1': -1,
  727. 'band2': -1,
  728. 'band3': -1,
  729. 'band4': -1,
  730. 'band5': -1,
  731. 'width': 0,
  732. 'midpoint': 0,
  733. 'last_modified': None}
  734.  
  735. # Used to update bands
  736. @staticmethod
  737. def db_format(scan, band, nowms):
  738. scan.update({'band' + str(band): nowms})
  739. scan['done'] = reduce(lambda x, y: x and (scan['band' + str(y)] > -1), range(1, 6), True)
  740. return scan
  741.  
  742. # Shorthand helper for DB dict
  743. @staticmethod
  744. def _q_init(scan, start, end, kind, sp_id=None):
  745. return {'loc': scan['loc'], 'kind': kind, 'start': start, 'end': end, 'step': scan['step'], 'sp': sp_id}
  746.  
  747. # return value of a particular scan from loc, or default dict if not found
  748. @classmethod
  749. def get_by_loc(cls, loc):
  750. query = (cls
  751. .select()
  752. .where((ScannedLocation.latitude == loc[0]) &
  753. (ScannedLocation.longitude == loc[1]))
  754. .dicts())
  755.  
  756. return query[0] if len(list(query)) else cls.new_loc(loc)
  757.  
  758. # Check if spawn points in a list are in any of the existing spannedlocation records
  759. # Otherwise, search through the spawn point list, and update scan_spawn_point dict for DB bulk upserting
  760. @classmethod
  761. def link_spawn_points(cls, scans, initial, spawn_points, distance, scan_spawn_point, force=False):
  762. for cell, scan in scans.iteritems():
  763. if initial[cell]['done'] and not force:
  764. continue
  765.  
  766. for sp in spawn_points:
  767. if in_radius((sp['latitude'], sp['longitude']), scan['loc'], distance):
  768. scan_spawn_point[cell + sp['id']] = {'spawnpoint': sp['id'],
  769. 'scannedlocation': cell}
  770.  
  771. # return list of dicts for upcoming valid band times
  772. @classmethod
  773. def linked_spawn_points(cls, cell):
  774.  
  775. # unable to use a normal join, since MySQL produces foreignkey constraint errors when
  776. # trying to upsert fields that are foreignkeys on another table
  777.  
  778. query = (SpawnPoint
  779. .select()
  780. .join(ScanSpawnPoint)
  781. .join(cls)
  782. .where(cls.cellid == cell).dicts())
  783.  
  784. return list(query)
  785.  
  786. # return list of dicts for upcoming valid band times
  787. @staticmethod
  788. def visible_forts(step_location):
  789. distance = 0.9
  790. n, e, s, w = hex_bounds(step_location, radius=distance * 1000)
  791. for g in Gym.get_gyms(s, w, n, e).values():
  792. if in_radius((g['latitude'], g['longitude']), step_location, distance):
  793. return True
  794.  
  795. for g in Pokestop.get_stops(s, w, n, e):
  796. if in_radius((g['latitude'], g['longitude']), step_location, distance):
  797. return True
  798.  
  799. return False
  800.  
  801. # return list of dicts for upcoming valid band times
  802. @classmethod
  803. def get_times(cls, scan, now_date):
  804. s = cls.get_by_loc(scan['loc'])
  805. if s['done']:
  806. return []
  807.  
  808. max = 3600 * 2 + 250 # greater than maximum possible value
  809. min = {'end': max}
  810.  
  811. nowms = date_secs(now_date)
  812. if s['band1'] == -1:
  813. return [cls._q_init(scan, nowms, nowms + 3599, 'band')]
  814.  
  815. # Find next window
  816. basems = s['band1']
  817. for i in range(2, 6):
  818. ms = s['band' + str(i)]
  819.  
  820. # skip bands already done
  821. if ms > -1:
  822. continue
  823.  
  824. radius = 120 - s['width'] / 2
  825. end = (basems + s['midpoint'] + radius + (i - 1) * 720 - 10) % 3600
  826. end = end if end >= nowms else end + 3600
  827.  
  828. if end < min['end']:
  829. min = cls._q_init(scan, end - radius * 2 + 10, end, 'band')
  830.  
  831. return [min] if min['end'] < max else []
  832.  
  833. # Checks if now falls within an unfilled band for a scanned location
  834. # Returns the updated scan location dict
  835. @classmethod
  836. def update_band(cls, scan):
  837. now_date = datetime.utcnow()
  838. scan['last_modified'] = now_date
  839.  
  840. if scan['done']:
  841. return scan
  842.  
  843. now_secs = date_secs(now_date)
  844. if scan['band1'] == -1:
  845. return cls.db_format(scan, 1, now_secs)
  846.  
  847. # calc if number falls in band with remaining points
  848. basems = scan['band1']
  849. delta = (now_secs - basems - scan['midpoint']) % 3600
  850. band = int(round(delta / 12 / 60.0) % 5) + 1
  851.  
  852. # Check if that band already filled
  853. if scan['band' + str(band)] > -1:
  854. return scan
  855.  
  856. # Check if this result falls within the band 2 min window
  857. offset = (delta + 1080) % 720 - 360
  858. if abs(offset) > 120 - scan['width'] / 2:
  859. return scan
  860.  
  861. # find band midpoint/width
  862. scan = cls.db_format(scan, band, now_secs)
  863. bts = [scan['band' + str(i)] for i in range(1, 6)]
  864. bts = filter(lambda ms: ms > -1, bts)
  865. bts_delta = map(lambda ms: (ms - basems) % 3600, bts)
  866. bts_offsets = map(lambda ms: (ms + 1080) % 720 - 360, bts_delta)
  867. min_scan = min(bts_offsets)
  868. max_scan = max(bts_offsets)
  869. scan['width'] = max_scan - min_scan
  870. scan['midpoint'] = (max_scan + min_scan) / 2
  871.  
  872. return scan
  873.  
  874. @classmethod
  875. def bands_filled(cls, locations):
  876. filled = 0
  877. for e in locations:
  878. sl = cls.get_by_loc(e[1])
  879. bands = [sl['band' + str(i)] for i in range(1, 6)]
  880. filled += reduce(lambda x, y: x + (y > -1), bands, 0)
  881.  
  882. return filled
  883.  
  884. @classmethod
  885. def reset_bands(cls, scan_loc):
  886. scan_loc['done'] = False
  887. scan_loc['last_modified'] = datetime.utcnow()
  888. for i in range(1, 6):
  889. scan_loc['band' + str(i)] = -1
  890.  
  891. @classmethod
  892. def select_in_hex(cls, center, steps):
  893. # should be a way to delegate this to SpawnPoint.select_in_hex, but w/e
  894.  
  895. R = 6378.1 # km radius of the earth
  896. hdist = ((steps * 120.0) - 50.0) / 1000.0
  897. n, e, s, w = hex_bounds(center, steps)
  898.  
  899. # get all spawns in that box
  900. sp = list(cls
  901. .select()
  902. .where((cls.latitude <= n) &
  903. (cls.latitude >= s) &
  904. (cls.longitude >= w) &
  905. (cls.longitude <= e))
  906. .dicts())
  907.  
  908. # for each spawn work out if it is in the hex (clipping the diagonals)
  909. in_hex = []
  910. for spawn in sp:
  911. # get the offset from the center of each spawn in km
  912. offset = [math.radians(spawn['latitude'] - center[0]) * R,
  913. math.radians(spawn['longitude'] - center[1]) * (R * math.cos(math.radians(center[0])))]
  914. # check agains the 4 lines that make up the diagonals
  915. if (offset[1] + (offset[0] * 0.5)) > hdist: # too far ne
  916. continue
  917. if (offset[1] - (offset[0] * 0.5)) > hdist: # too far se
  918. continue
  919. if ((offset[0] * 0.5) - offset[1]) > hdist: # too far nw
  920. continue
  921. if ((0 - offset[1]) - (offset[0] * 0.5)) > hdist: # too far sw
  922. continue
  923. # if it gets to here its a good spawn
  924. in_hex.append(spawn)
  925. return in_hex
  926.  
  927.  
  928. class MainWorker(BaseModel):
  929. worker_name = CharField(primary_key=True, max_length=50)
  930. message = CharField()
  931. method = CharField(max_length=50)
  932. last_modified = DateTimeField(index=True)
  933.  
  934.  
  935. class WorkerStatus(BaseModel):
  936. username = CharField(primary_key=True, max_length=50)
  937. worker_name = CharField(index=True, max_length=50)
  938. success = IntegerField()
  939. fail = IntegerField()
  940. no_items = IntegerField()
  941. skip = IntegerField()
  942. last_modified = DateTimeField(index=True)
  943. message = CharField(max_length=255)
  944. last_scan_date = DateTimeField(index=True)
  945. latitude = DoubleField(null=True)
  946. longitude = DoubleField(null=True)
  947.  
  948. @staticmethod
  949. def db_format(status, name='status_worker_db'):
  950. status['worker_name'] = status.get('worker_name', name)
  951. return {'username': status['username'],
  952. 'worker_name': status['worker_name'],
  953. 'success': status['success'],
  954. 'fail': status['fail'],
  955. 'no_items': status['noitems'],
  956. 'skip': status['skip'],
  957. 'last_modified': datetime.utcnow(),
  958. 'message': status['message'],
  959. 'last_scan_date': status.get('last_scan_date', datetime.utcnow()),
  960. 'latitude': status.get('latitude', None),
  961. 'longitude': status.get('longitude', None)}
  962.  
  963. @staticmethod
  964. def get_recent():
  965. query = (WorkerStatus
  966. .select()
  967. .where((WorkerStatus.last_modified >=
  968. (datetime.utcnow() - timedelta(minutes=5))))
  969. .order_by(WorkerStatus.username)
  970. .dicts())
  971.  
  972. status = []
  973. for s in query:
  974. status.append(s)
  975.  
  976. return status
  977.  
  978. @staticmethod
  979. def get_worker(username, loc=False):
  980. query = (WorkerStatus
  981. .select()
  982. .where((WorkerStatus.username == username))
  983. .dicts())
  984.  
  985. # Sometimes is appears peewee is slow to load, and and this produces an Exception
  986. # Retry after a second to give peewee time to load
  987. while True:
  988. try:
  989. result = query[0] if len(query) else {
  990. 'username': username,
  991. 'success': 0,
  992. 'fail': 0,
  993. 'no_items': 0,
  994. 'skip': 0,
  995. 'last_modified': datetime.utcnow(),
  996. 'message': 'New account {} loaded'.format(username),
  997. 'last_scan_date': datetime.utcnow(),
  998. 'latitude': loc[0] if loc else None,
  999. 'longitude': loc[1] if loc else None
  1000. }
  1001. break
  1002. except Exception as e:
  1003. log.error('Exception in get_worker under account {} Exception message: {}'.format(username, e))
  1004. traceback.print_exc(file=sys.stdout)
  1005. time.sleep(1)
  1006.  
  1007. return result
  1008.  
  1009.  
  1010. class SpawnPoint(BaseModel):
  1011. id = CharField(primary_key=True, max_length=50)
  1012. latitude = DoubleField()
  1013. longitude = DoubleField()
  1014. last_scanned = DateTimeField(index=True)
  1015. # kind gives the four quartiles of the spawn, as 's' for seen or 'h' for hidden
  1016. # for example, a 30 minute spawn is 'hhss'
  1017. kind = CharField(max_length=4, default='hhhs')
  1018.  
  1019. # links shows whether a pokemon encounter id changes between quartiles or stays the same
  1020. # both 1x45 and 1x60h3 have the kind of 'sssh', but the different links shows when the
  1021. # encounter id changes
  1022. # same encounter id is shared between two quartiles, links shows a '+',
  1023. # a different encounter id between two quartiles is a '-'
  1024. # For the hidden times, an 'h' is used. Until determined, '?' is used.
  1025. # Note index is shifted by a half. links[0] is the link between kind[0] and kind[1],
  1026. # and so on. links[3] is the link between kind[3] and kind[0]
  1027. links = CharField(max_length=4, default='????')
  1028.  
  1029. # count consecutive times spawn should have been seen, but wasn't
  1030. # if too high, will not be scheduled for review, and treated as inactive
  1031. missed_count = IntegerField(default=0)
  1032.  
  1033. # next 2 fields are to narrow down on the valid TTH window
  1034. # seconds after the hour of the latest pokemon seen time within the hour
  1035. latest_seen = IntegerField()
  1036.  
  1037. # seconds after the hour of the earliest time wasn't seen after an appearance
  1038. earliest_unseen = IntegerField()
  1039.  
  1040. class Meta:
  1041. indexes = ((('latitude', 'longitude'), False),)
  1042. constraints = [Check('earliest_unseen >= 0'), Check('earliest_unseen < 3600'),
  1043. Check('latest_seen >= 0'), Check('latest_seen < 3600')]
  1044.  
  1045. # Returns the spawn point dict from ID, or a new dict if not found
  1046. @classmethod
  1047. def get_by_id(cls, id, latitude=0, longitude=0):
  1048. query = (cls
  1049. .select()
  1050. .where(cls.id == id)
  1051. .dicts())
  1052.  
  1053. return query[0] if query else {
  1054. 'id': id,
  1055. 'latitude': latitude,
  1056. 'longitude': longitude,
  1057. 'last_scanned': None, # Null value used as new flag
  1058. 'kind': 'hhhs',
  1059. 'links': '????',
  1060. 'missed_count': 0,
  1061. 'latest_seen': None,
  1062. 'earliest_unseen': None
  1063.  
  1064. }
  1065.  
  1066. # Confirm if tth has been found
  1067. @staticmethod
  1068. def tth_found(sp):
  1069. # fully indentified if no '?' in links and latest seen == earliest seen
  1070. return sp['latest_seen'] == sp['earliest_unseen']
  1071.  
  1072. # return [start, end] in seconds after the hour for the spawn, despawn time of a spawnpoint
  1073. @classmethod
  1074. def start_end(cls, sp, spawn_delay=0, links=False):
  1075. links_arg = links
  1076. links = links if links else str(sp['links'])
  1077.  
  1078. if links == '????': # clean up for old data
  1079. links = str(sp['kind'].replace('s', '?'))
  1080.  
  1081. # make some assumptions if link not fully identified
  1082. if links.count('-') == 0:
  1083. links = links[:-1] + '-'
  1084.  
  1085. links = links.replace('?', '+')
  1086.  
  1087. links = links[:-1] + '-'
  1088. plus_or_minus = links.index('+') if links.count('+') else links.index('-')
  1089. start = sp['earliest_unseen'] - (4 - plus_or_minus) * 900 + spawn_delay
  1090. no_tth_adjust = 60 if not links_arg and not cls.tth_found(sp) else 0
  1091. end = sp['latest_seen'] - (3 - links.index('-')) * 900 + no_tth_adjust
  1092. return [start % 3600, end % 3600]
  1093.  
  1094. # Return a list of dicts with the next spawn times
  1095. @classmethod
  1096. def get_times(cls, cell, scan, now_date, scan_delay):
  1097. l = []
  1098. now_secs = date_secs(now_date)
  1099. for sp in ScannedLocation.linked_spawn_points(cell):
  1100.  
  1101. if sp['missed_count'] > 5:
  1102. continue
  1103.  
  1104. endpoints = SpawnPoint.start_end(sp, scan_delay)
  1105. cls.add_if_not_scanned('spawn', l, sp, scan, endpoints[0], endpoints[1], now_date, now_secs)
  1106.  
  1107. # check to see if still searching for valid TTH
  1108. if cls.tth_found(sp):
  1109. continue
  1110.  
  1111. # add a spawnpoint check between latest seen and earliest seen
  1112. start = sp['latest_seen'] + scan_delay
  1113. end = sp['earliest_unseen']
  1114.  
  1115. cls.add_if_not_scanned('TTH', l, sp, scan, start, end, now_date, now_secs)
  1116.  
  1117. return l
  1118.  
  1119. @classmethod
  1120. def add_if_not_scanned(cls, kind, l, sp, scan, start, end, now_date, now_secs):
  1121. # make sure later than now_secs
  1122. while end < now_secs:
  1123. start, end = start + 3600, end + 3600
  1124.  
  1125. # ensure start before end
  1126. while start > end:
  1127. start -= 3600
  1128.  
  1129. if (now_date - cls.get_by_id(sp['id'])['last_scanned']).total_seconds() > now_secs - start:
  1130. l.append(ScannedLocation._q_init(scan, start, end, kind, sp['id']))
  1131.  
  1132. # given seconds after the hour and a spawnpoint dict, return which quartile of the
  1133. # spawnpoint the secs falls in
  1134. @staticmethod
  1135. def get_quartile(secs, sp):
  1136. return int(((secs - sp['earliest_unseen'] + 15 * 60 + 3600 - 1) % 3600) / 15 / 60)
  1137.  
  1138. @classmethod
  1139. def select_in_hex(cls, center, steps):
  1140. R = 6378.1 # km radius of the earth
  1141. hdist = ((steps * 120.0) - 50.0) / 1000.0
  1142. n, e, s, w = hex_bounds(center, steps)
  1143.  
  1144. # get all spawns in that box
  1145. sp = list(cls
  1146. .select()
  1147. .where((cls.latitude <= n) &
  1148. (cls.latitude >= s) &
  1149. (cls.longitude >= w) &
  1150. (cls.longitude <= e))
  1151. .dicts())
  1152.  
  1153. # for each spawn work out if it is in the hex (clipping the diagonals)
  1154. in_hex = []
  1155. for spawn in sp:
  1156. # get the offset from the center of each spawn in km
  1157. offset = [math.radians(spawn['latitude'] - center[0]) * R,
  1158. math.radians(spawn['longitude'] - center[1]) * (R * math.cos(math.radians(center[0])))]
  1159. # check agains the 4 lines that make up the diagonals
  1160. if (offset[1] + (offset[0] * 0.5)) > hdist: # too far ne
  1161. continue
  1162. if (offset[1] - (offset[0] * 0.5)) > hdist: # too far se
  1163. continue
  1164. if ((offset[0] * 0.5) - offset[1]) > hdist: # too far nw
  1165. continue
  1166. if ((0 - offset[1]) - (offset[0] * 0.5)) > hdist: # too far sw
  1167. continue
  1168. # if it gets to here its a good spawn
  1169. in_hex.append(spawn)
  1170. return in_hex
  1171.  
  1172.  
  1173. class ScanSpawnPoint(BaseModel):
  1174. # removing ForeignKeyField due to MSQL issues with upserting rows that are foreignkeys for other tables
  1175. # scannedlocation = ForeignKeyField(ScannedLocation)
  1176. # spawnpoint = ForeignKeyField(SpawnPoint)
  1177.  
  1178. scannedlocation = ForeignKeyField(ScannedLocation, null=True)
  1179. spawnpoint = ForeignKeyField(SpawnPoint, null=True)
  1180.  
  1181. class Meta:
  1182. primary_key = CompositeKey('spawnpoint', 'scannedlocation')
  1183.  
  1184.  
  1185. class SpawnpointDetectionData(BaseModel):
  1186. id = CharField(primary_key=True, max_length=54)
  1187. encounter_id = CharField(max_length=54) # removed ForeignKeyField since it caused MySQL issues
  1188. spawnpoint_id = CharField(max_length=54) # removed ForeignKeyField since it caused MySQL issues
  1189. scan_time = DateTimeField()
  1190. tth_secs = IntegerField(null=True)
  1191.  
  1192. @staticmethod
  1193. def set_default_earliest_unseen(sp):
  1194. sp['earliest_unseen'] = (sp['latest_seen'] + 14 * 60) % 3600
  1195.  
  1196. @classmethod
  1197. def classify(cls, sp, scan_loc, now_secs, sighting=None):
  1198.  
  1199. # to reduce CPU usage, give an intial reading of 15 min spawns if not done with initial scan of location
  1200. if not scan_loc['done']:
  1201. sp['kind'] = 'hhhs'
  1202. if not sp['earliest_unseen']:
  1203. sp['latest_seen'] = now_secs
  1204. cls.set_default_earliest_unseen(sp)
  1205.  
  1206. elif clock_between(sp['latest_seen'], now_secs, sp['earliest_unseen']):
  1207. sp['latest_seen'] = now_secs
  1208.  
  1209. return
  1210.  
  1211. # get past sightings
  1212. query = list(cls.select()
  1213. .where(cls.spawnpoint_id == sp['id'])
  1214. .dicts())
  1215.  
  1216. if sighting:
  1217. query.append(sighting)
  1218.  
  1219. # make a record of links, so we can reset earliest_unseen if it changes
  1220. old_kind = str(sp['kind'])
  1221.  
  1222. # make a sorted list of the seconds after the hour
  1223. seen_secs = sorted(map(lambda x: date_secs(x['scan_time']), query))
  1224.  
  1225. # add the first seen_secs to the end as a clock wrap around
  1226. if seen_secs:
  1227. seen_secs.append(seen_secs[0] + 3600)
  1228.  
  1229. # make a list of gaps between sightings
  1230. gap_list = [seen_secs[i + 1] - seen_secs[i] for i in range(len(seen_secs) - 1)]
  1231.  
  1232. max_gap = max(gap_list)
  1233.  
  1234. # an hour (60 min) minus the largest gap in minutes gives us the duration the spawn was there
  1235. # round up to the nearest 15 min interval for our current best duration guess
  1236. duration = (int((59 - max_gap / 60.0) / 15) + 1) * 15
  1237.  
  1238. # if the second largest gap is larger than 15 minutes, then there are two gaps that are
  1239. # greater than 15 min, so it must be a double-spawn
  1240. if len(gap_list) > 4 and sorted(gap_list)[-2] > 900:
  1241. sp['kind'] = 'hshs'
  1242. sp['links'] = 'h?h?'
  1243.  
  1244. else:
  1245. # convert the duration into a 'hhhs', 'hhss', 'hsss', 'ssss' string accordingly
  1246. # 's' is for seen, 'h' is for hidden
  1247. sp['kind'] = ''.join(['s' if i > (3 - duration / 15) else 'h' for i in range(0, 4)])
  1248.  
  1249. # assume no hidden times
  1250. sp['links'] = sp['kind'].replace('s', '?')
  1251.  
  1252. if sp['kind'] != 'ssss':
  1253.  
  1254. if not sp['earliest_unseen'] or sp['earliest_unseen'] != sp['latest_seen']:
  1255.  
  1256. # new latest seen will be just before max_gap
  1257. sp['latest_seen'] = seen_secs[gap_list.index(max_gap)]
  1258.  
  1259. # if we don't have a earliest_unseen yet or the kind of spawn has changed, reset
  1260. # set to latest_seen + 14 min
  1261. if not sp['earliest_unseen'] or sp['kind'] != old_kind:
  1262. cls.set_default_earliest_unseen(sp)
  1263.  
  1264. return
  1265.  
  1266. # only ssss spawns from here below
  1267.  
  1268. sp['links'] = '+++-'
  1269. if sp['earliest_unseen'] == sp['latest_seen']:
  1270. return
  1271.  
  1272. # make a sight_list of dicts like
  1273. # {date: first seen time,
  1274. # delta: duration of sighting,
  1275. # same: whether encounter ID was same or different over that time}
  1276.  
  1277. # for 60 min spawns ('ssss'), the largest gap doesn't give the earliest spawn point,
  1278. # because a pokemon is always there
  1279. # use the union of all intervals where the same encounter ID was seen to find the latest_seen
  1280. # If a different encounter ID was seen, then the complement of that interval was the same
  1281. # ID, so union that complement as well
  1282.  
  1283. sight_list = [{'date': query[i]['scan_time'],
  1284. 'delta': query[i + 1]['scan_time'] - query[i]['scan_time'],
  1285. 'same': query[i + 1]['encounter_id'] == query[i]['encounter_id']}
  1286. for i in range(len(query) - 1)
  1287. if query[i + 1]['scan_time'] - query[i]['scan_time'] < timedelta(hours=1)]
  1288.  
  1289. start_end_list = []
  1290. for s in sight_list:
  1291. if s['same']:
  1292. # get the seconds past the hour for start and end times
  1293. start = date_secs(s['date'])
  1294. end = (start + int(s['delta'].total_seconds())) % 3600
  1295.  
  1296. else:
  1297. # convert diff range to same range by taking the clock complement
  1298. start = date_secs(s['date'] + s['delta']) % 3600
  1299. end = date_secs(s['date'])
  1300.  
  1301. start_end_list.append([start, end])
  1302.  
  1303. # Take the union of all the ranges
  1304. while True:
  1305. # union is list of unions of ranges with the same encounter id
  1306. union = []
  1307. for start, end in start_end_list:
  1308. if not union:
  1309. union.append([start, end])
  1310. continue
  1311. # cycle through all ranges in union, since it might overlap with any of them
  1312. for u in union:
  1313. if clock_between(u[0], start, u[1]):
  1314. u[1] = end if not(clock_between(u[0], end, u[1])) else u[1]
  1315. elif clock_between(u[0], end, u[1]):
  1316. u[0] = start if not(clock_between(u[0], start, u[1])) else u[0]
  1317. elif union.count([start, end]) == 0:
  1318. union.append([start, end])
  1319.  
  1320. # Are no more unions possible?
  1321. if union == start_end_list:
  1322. break
  1323. else:
  1324. start_end_list = union # Make another pass looking for unions
  1325.  
  1326. # if more than one disparate union, take the largest as our starting point
  1327. union = reduce(lambda x, y: x if (x[1] - x[0]) % 3600 > (y[1] - y[0]) % 3600 else y,
  1328. union, [0, 3600])
  1329. sp['latest_seen'] = union[1]
  1330. sp['earliest_unseen'] = union[0]
  1331. log.info('1x60: appear %d, despawn %d, duration: %d min', union[0], union[1], ((union[1] - union[0]) % 3600) / 60)
  1332.  
  1333. # expand the seen times for 30 minute spawnpoints based on scans when spawn wasn't there
  1334. # return true if spawnpoint dict changed
  1335. @classmethod
  1336. def unseen(cls, sp, now_secs):
  1337.  
  1338. # return if we already have a tth
  1339. if sp['latest_seen'] == sp['earliest_unseen']:
  1340. return False
  1341.  
  1342. # if now_secs is later than the latest seen return
  1343. if not clock_between(sp['latest_seen'], now_secs, sp['earliest_unseen']):
  1344. return False
  1345.  
  1346. sp['earliest_unseen'] = now_secs
  1347.  
  1348. return True
  1349.  
  1350. # expand a 30 minute spawn with a new seen point based on which endpoint it is closer to
  1351. # return true if sp changed
  1352. @classmethod
  1353. def clock_extend(cls, sp, new_secs):
  1354. # check if this is a new earliest time
  1355. if clock_between(sp['earliest_seen'], new_secs, sp['latest_seen']):
  1356. return False
  1357.  
  1358. # extend earliest or latest seen depending on which is closer to the new point
  1359. if secs_between(new_secs, sp['earliest_seen']) < secs_between(new_secs, sp['latest_seen']):
  1360. sp['earliest_seen'] = new_secs
  1361. else:
  1362. sp['latest_seen'] = new_secs
  1363.  
  1364. return True
  1365.  
  1366.  
  1367. class Versions(flaskDb.Model):
  1368. key = CharField()
  1369. val = IntegerField()
  1370.  
  1371. class Meta:
  1372. primary_key = False
  1373.  
  1374.  
  1375. class GymMember(BaseModel):
  1376. gym_id = CharField(index=True)
  1377. pokemon_uid = CharField()
  1378. last_scanned = DateTimeField(default=datetime.utcnow)
  1379.  
  1380. class Meta:
  1381. primary_key = False
  1382.  
  1383.  
  1384. class GymPokemon(BaseModel):
  1385. pokemon_uid = CharField(primary_key=True, max_length=50)
  1386. pokemon_id = IntegerField()
  1387. cp = IntegerField()
  1388. trainer_name = CharField()
  1389. num_upgrades = IntegerField(null=True)
  1390. move_1 = IntegerField(null=True)
  1391. move_2 = IntegerField(null=True)
  1392. height = FloatField(null=True)
  1393. weight = FloatField(null=True)
  1394. stamina = IntegerField(null=True)
  1395. stamina_max = IntegerField(null=True)
  1396. cp_multiplier = FloatField(null=True)
  1397. additional_cp_multiplier = FloatField(null=True)
  1398. iv_defense = IntegerField(null=True)
  1399. iv_stamina = IntegerField(null=True)
  1400. iv_attack = IntegerField(null=True)
  1401. last_seen = DateTimeField(default=datetime.utcnow)
  1402.  
  1403.  
  1404. class Trainer(BaseModel):
  1405. name = CharField(primary_key=True, max_length=50)
  1406. team = IntegerField()
  1407. level = IntegerField()
  1408. last_seen = DateTimeField(default=datetime.utcnow)
  1409.  
  1410.  
  1411. class GymDetails(BaseModel):
  1412. gym_id = CharField(primary_key=True, max_length=50)
  1413. name = CharField()
  1414. description = TextField(null=True, default="")
  1415. url = CharField()
  1416. last_scanned = DateTimeField(default=datetime.utcnow)
  1417.  
  1418.  
  1419. def hex_bounds(center, steps=None, radius=None):
  1420. # Make a box that is (70m * step_limit * 2) + 70m away from the center point
  1421. # Rationale is that you need to travel
  1422. sp_dist = 0.07 * (2 * steps + 1) if steps else radius
  1423. n = get_new_coords(center, sp_dist, 0)[0]
  1424. e = get_new_coords(center, sp_dist, 90)[1]
  1425. s = get_new_coords(center, sp_dist, 180)[0]
  1426. w = get_new_coords(center, sp_dist, 270)[1]
  1427. return (n, e, s, w)
  1428.  
  1429.  
  1430. # todo: this probably shouldn't _really_ be in "models" anymore, but w/e
  1431. def parse_map(args, map_dict, step_location, db_update_queue, wh_update_queue, api, now_date):
  1432. pokemons = {}
  1433. pokestops = {}
  1434. gyms = {}
  1435. skipped = 0
  1436. stopsskipped = 0
  1437. forts = []
  1438. wild_pokemon = []
  1439. nearby_pokemons = []
  1440. spawn_points = {}
  1441. scan_spawn_points = {}
  1442. sightings = {}
  1443. new_spawn_points = []
  1444. sp_id_list = []
  1445. now_secs = date_secs(now_date)
  1446.  
  1447. # consolidate the individual lists in each cell into one list of pokemon and a list of forts
  1448. cells = map_dict['responses']['GET_MAP_OBJECTS']['map_cells']
  1449. for cell in cells:
  1450. nearby_pokemons += cell.get('nearby_pokemons', [])
  1451. if config['parse_pokemon']:
  1452. wild_pokemon += cell.get('wild_pokemons', [])
  1453.  
  1454. if config['parse_pokestops'] or config['parse_gyms']:
  1455. forts += cell.get('forts', [])
  1456.  
  1457. if not len(nearby_pokemons) and not len(wild_pokemon):
  1458. log.warning('Nothing on nearby_pokemons or wild. Speed violation?')
  1459. log.info("Common causes: not using -speed, deleting or dropping the WorkerStatus table without waiting before restarting, or there really aren't any pokemon in 200m")
  1460.  
  1461. scan_loc = ScannedLocation.get_by_loc(step_location)
  1462. done_already = scan_loc['done']
  1463. ScannedLocation.update_band(scan_loc)
  1464. just_completed = not done_already and scan_loc['done']
  1465.  
  1466. if len(wild_pokemon):
  1467. encounter_ids = [b64encode(str(p['encounter_id'])) for p in wild_pokemon]
  1468. # For all the wild Pokemon we found check if an active Pokemon is in the database.
  1469. query = (Pokemon
  1470. .select(Pokemon.encounter_id, Pokemon.spawnpoint_id)
  1471. .where((Pokemon.disappear_time > datetime.utcnow()) & (Pokemon.encounter_id << encounter_ids))
  1472. .dicts())
  1473.  
  1474. # Store all encounter_ids and spawnpoint_id for the pokemon in query (all thats needed to make sure its unique).
  1475. encountered_pokemon = [(p['encounter_id'], p['spawnpoint_id']) for p in query]
  1476.  
  1477. for p in wild_pokemon:
  1478.  
  1479. sp = SpawnPoint.get_by_id(p['spawn_point_id'], p['latitude'], p['longitude'])
  1480. spawn_points[p['spawn_point_id']] = sp
  1481. sp['missed_count'] = 0
  1482.  
  1483. sighting = {
  1484. 'id': b64encode(str(p['encounter_id'])) + '_' + str(now_secs),
  1485. 'encounter_id': b64encode(str(p['encounter_id'])),
  1486. 'spawnpoint_id': p['spawn_point_id'],
  1487. 'scan_time': now_date,
  1488. 'tth_secs': None
  1489. }
  1490.  
  1491. sp_id_list.append(p['spawn_point_id']) # keep a list of sp_ids to return
  1492.  
  1493. # time_till_hidden_ms was overflowing causing a negative integer.
  1494. # It was also returning a value above 3.6M ms.
  1495. if 0 < p['time_till_hidden_ms'] < 3600000:
  1496. d_t_secs = date_secs(datetime.utcfromtimestamp((p['last_modified_timestamp_ms'] + p['time_till_hidden_ms']) / 1000.0))
  1497. if sp['latest_seen'] != sp['earliest_unseen']:
  1498. log.info('TTH found for spawnpoint %s', sp['id'])
  1499. sighting['tth_secs'] = d_t_secs
  1500.  
  1501. # only update when TTH is seen for the first time
  1502. # just before pokemon migrations, Niantic sets all TTH to the exact time of the migration
  1503. # not the normal despawn time
  1504. sp['latest_seen'] = d_t_secs
  1505. sp['earliest_unseen'] = d_t_secs
  1506.  
  1507. scan_spawn_points[scan_loc['cellid'] + sp['id']] = {'spawnpoint': sp['id'],
  1508. 'scannedlocation': scan_loc['cellid']}
  1509. if not sp['last_scanned']:
  1510. log.info('New Spawn Point found!')
  1511. new_spawn_points.append(sp)
  1512.  
  1513. # if we found a new spawnpoint after the location was already fully scanned
  1514. # either it's new, or we had a bad scan. Either way, rescan the loc
  1515. if scan_loc['done'] and not just_completed:
  1516. log.warning('Location was fully scanned, and yet a brand new spawnpoint found.')
  1517. log.warning('Redoing scan of this location to identify new spawnpoint.')
  1518. ScannedLocation.reset_bands(scan_loc)
  1519.  
  1520. if (not SpawnPoint.tth_found(sp) or sighting['tth_secs'] or not scan_loc['done'] or just_completed):
  1521. SpawnpointDetectionData.classify(sp, scan_loc, now_secs, sighting)
  1522. sightings[p['encounter_id']] = sighting
  1523.  
  1524. sp['last_scanned'] = datetime.utcfromtimestamp(p['last_modified_timestamp_ms'] / 1000.0)
  1525.  
  1526. if (b64encode(str(p['encounter_id'])), p['spawn_point_id']) in encountered_pokemon:
  1527. # If pokemon has been encountered before dont process it.
  1528. skipped += 1
  1529. continue
  1530.  
  1531. start_end = SpawnPoint.start_end(sp, 1)
  1532. seconds_until_despawn = (start_end[1] - now_secs) % 3600
  1533. disappear_time = now_date + timedelta(seconds=seconds_until_despawn)
  1534.  
  1535. printPokemon(p['pokemon_data']['pokemon_id'], p['latitude'], p['longitude'], disappear_time)
  1536.  
  1537. # Scan for IVs and moves.
  1538. encounter_result = None
  1539. if (args.encounter and (p['pokemon_data']['pokemon_id'] in args.encounter_whitelist or
  1540. p['pokemon_data']['pokemon_id'] not in args.encounter_blacklist and not args.encounter_whitelist)):
  1541. time.sleep(args.encounter_delay)
  1542. # Set up encounter request envelope
  1543. req = api.create_request()
  1544. encounter_result = req.encounter(encounter_id=p['encounter_id'],
  1545. spawn_point_id=p['spawn_point_id'],
  1546. player_latitude=step_location[0],
  1547. player_longitude=step_location[1])
  1548. encounter_result = req.check_challenge()
  1549. encounter_result = req.get_hatched_eggs()
  1550. encounter_result = req.get_inventory()
  1551. encounter_result = req.check_awarded_badges()
  1552. encounter_result = req.download_settings()
  1553. encounter_result = req.get_buddy_walked()
  1554. encounter_result = req.call()
  1555.  
  1556. pokemons[p['encounter_id']] = {
  1557. 'encounter_id': b64encode(str(p['encounter_id'])),
  1558. 'spawnpoint_id': p['spawn_point_id'],
  1559. 'pokemon_id': p['pokemon_data']['pokemon_id'],
  1560. 'latitude': p['latitude'],
  1561. 'longitude': p['longitude'],
  1562. 'disappear_time': disappear_time,
  1563. 'individual_attack': None,
  1564. 'individual_defense': None,
  1565. 'individual_stamina': None,
  1566. 'move_1': None,
  1567. 'move_2': None
  1568. }
  1569.  
  1570. if encounter_result is not None and 'wild_pokemon' in encounter_result['responses']['ENCOUNTER']:
  1571. pokemon_info = encounter_result['responses']['ENCOUNTER']['wild_pokemon']['pokemon_data']
  1572. pokemons[p['encounter_id']].update({
  1573. 'individual_attack': pokemon_info.get('individual_attack', 0),
  1574. 'individual_defense': pokemon_info.get('individual_defense', 0),
  1575. 'individual_stamina': pokemon_info.get('individual_stamina', 0),
  1576. 'move_1': pokemon_info['move_1'],
  1577. 'move_2': pokemon_info['move_2'],
  1578. })
  1579.  
  1580. if args.webhooks:
  1581.  
  1582. wh_poke = pokemons[p['encounter_id']].copy()
  1583. wh_poke.update({
  1584. 'disappear_time': calendar.timegm(disappear_time.timetuple()),
  1585. 'last_modified_time': p['last_modified_timestamp_ms'],
  1586. 'time_until_hidden_ms': p['time_till_hidden_ms']
  1587. })
  1588. wh_update_queue.put(('pokemon', wh_poke))
  1589.  
  1590. if len(forts):
  1591. if config['parse_pokestops']:
  1592. stop_ids = [f['id'] for f in forts if f.get('type') == 1]
  1593. if len(stop_ids) > 0:
  1594. query = (Pokestop
  1595. .select(Pokestop.pokestop_id, Pokestop.last_modified)
  1596. .where((Pokestop.pokestop_id << stop_ids))
  1597. .dicts())
  1598. encountered_pokestops = [(f['pokestop_id'], int((f['last_modified'] - datetime(1970, 1, 1)).total_seconds())) for f in query]
  1599.  
  1600. for f in forts:
  1601. if config['parse_pokestops'] and f.get('type') == 1: # Pokestops.
  1602. if 'active_fort_modifier' in f:
  1603. lure_expiration = datetime.utcfromtimestamp(
  1604. f['last_modified_timestamp_ms'] / 1000.0) + timedelta(minutes=30)
  1605. active_fort_modifier = f['active_fort_modifier']
  1606. if args.webhooks and args.webhook_updates_only:
  1607. wh_update_queue.put(('pokestop', {
  1608. 'pokestop_id': b64encode(str(f['id'])),
  1609. 'enabled': f['enabled'],
  1610. 'latitude': f['latitude'],
  1611. 'longitude': f['longitude'],
  1612. 'last_modified_time': f['last_modified_timestamp_ms'],
  1613. 'lure_expiration': calendar.timegm(lure_expiration.timetuple()),
  1614. 'active_fort_modifier': active_fort_modifier
  1615. }))
  1616. else:
  1617. lure_expiration, active_fort_modifier = None, None
  1618.  
  1619. # Send all pokestops to webhooks.
  1620. if args.webhooks and not args.webhook_updates_only:
  1621. # Explicitly set 'webhook_data', in case we want to change the information pushed to webhooks,
  1622. # similar to above and previous commits.
  1623. l_e = None
  1624.  
  1625. if lure_expiration is not None:
  1626. l_e = calendar.timegm(lure_expiration.timetuple())
  1627.  
  1628. wh_update_queue.put(('pokestop', {
  1629. 'pokestop_id': b64encode(str(f['id'])),
  1630. 'enabled': f['enabled'],
  1631. 'latitude': f['latitude'],
  1632. 'longitude': f['longitude'],
  1633. 'last_modified': f['last_modified_timestamp_ms'],
  1634. 'lure_expiration': l_e,
  1635. 'active_fort_modifier': active_fort_modifier
  1636. }))
  1637.  
  1638. if (f['id'], int(f['last_modified_timestamp_ms'] / 1000.0)) in encountered_pokestops:
  1639. # If pokestop has been encountered before and hasn't changed dont process it.
  1640. stopsskipped += 1
  1641. continue
  1642.  
  1643. pokestops[f['id']] = {
  1644. 'pokestop_id': f['id'],
  1645. 'enabled': f['enabled'],
  1646. 'latitude': f['latitude'],
  1647. 'longitude': f['longitude'],
  1648. 'last_modified': datetime.utcfromtimestamp(
  1649. f['last_modified_timestamp_ms'] / 1000.0),
  1650. 'lure_expiration': lure_expiration,
  1651. 'active_fort_modifier': active_fort_modifier
  1652. }
  1653.  
  1654. elif config['parse_gyms'] and f.get('type') is None: # Currently, there are only stops and gyms
  1655. # Send gyms to webhooks.
  1656. if args.webhooks and not args.webhook_updates_only:
  1657. # Explicitly set 'webhook_data', in case we want to change the information pushed to webhooks,
  1658. # similar to above and previous commits.
  1659. wh_update_queue.put(('gym', {
  1660. 'gym_id': b64encode(str(f['id'])),
  1661. 'team_id': f.get('owned_by_team', 0),
  1662. 'guard_pokemon_id': f.get('guard_pokemon_id', 0),
  1663. 'gym_points': f.get('gym_points', 0),
  1664. 'enabled': f['enabled'],
  1665. 'latitude': f['latitude'],
  1666. 'longitude': f['longitude'],
  1667. 'last_modified': f['last_modified_timestamp_ms']
  1668. }))
  1669.  
  1670. gyms[f['id']] = {
  1671. 'gym_id': f['id'],
  1672. 'team_id': f.get('owned_by_team', 0),
  1673. 'guard_pokemon_id': f.get('guard_pokemon_id', 0),
  1674. 'gym_points': f.get('gym_points', 0),
  1675. 'enabled': f['enabled'],
  1676. 'latitude': f['latitude'],
  1677. 'longitude': f['longitude'],
  1678. 'last_modified': datetime.utcfromtimestamp(
  1679. f['last_modified_timestamp_ms'] / 1000.0),
  1680. }
  1681.  
  1682. log.info('Parsing found %d pokemons, %d pokestops, and %d gyms.',
  1683. len(pokemons) + skipped,
  1684. len(pokestops) + stopsskipped,
  1685. len(gyms))
  1686.  
  1687. log.debug('Skipped %d Pokemons and %d pokestops.', skipped, stopsskipped)
  1688.  
  1689. # look for spawnpoints within scan_loc that are not here to see if can narrow down tth window
  1690. for sp in ScannedLocation.linked_spawn_points(scan_loc['cellid']):
  1691. if sp['id'] in sp_id_list:
  1692. sp = spawn_points[sp['id']] # Don't overwrite changes from this parse with DB version
  1693. else:
  1694. if SpawnpointDetectionData.unseen(sp, now_secs):
  1695. spawn_points[sp['id']] = sp
  1696. endpoints = SpawnPoint.start_end(sp, args.spawn_delay)
  1697. if clock_between(endpoints[0], now_secs, endpoints[1]):
  1698. sp['missed_count'] += 1
  1699. spawn_points[sp['id']] = sp
  1700. log.warning('%s kind spawnpoint %s has no pokemon %d times in a row',
  1701. sp['kind'], sp['id'], sp['missed_count'])
  1702. log.info('Possible causes: Still doing initial scan, or super rare double spawnpoint during hidden period, or Niantic has removed spawnpoint')
  1703.  
  1704. if (not SpawnPoint.tth_found(sp) and scan_loc['done'] and
  1705. (sp['earliest_unseen'] - sp['latest_seen'] - args.spawn_delay) % 3600 < 60):
  1706. log.warning('Spawnpoint %s was unable to locate a TTH, with only %ss after pokemon last seen',
  1707. sp['id'], (sp['earliest_unseen'] - sp['latest_seen']) % 3600)
  1708. log.info('Embiggening search for TTH by 15 minutes to try again')
  1709. if sp['id'] not in sp_id_list:
  1710. SpawnpointDetectionData.classify(sp, scan_loc, now_secs)
  1711. sp['latest_seen'] = (sp['latest_seen'] - 60) % 3600
  1712. sp['earliest_unseen'] = (sp['earliest_unseen'] + 14 * 60) % 3600
  1713. spawn_points[sp['id']] = sp
  1714.  
  1715. db_update_queue.put((ScannedLocation, {0: scan_loc}))
  1716.  
  1717. if len(pokemons):
  1718. db_update_queue.put((Pokemon, pokemons))
  1719. if len(pokestops):
  1720. db_update_queue.put((Pokestop, pokestops))
  1721. if len(gyms):
  1722. db_update_queue.put((Gym, gyms))
  1723. if len(spawn_points):
  1724. db_update_queue.put((SpawnPoint, spawn_points))
  1725. db_update_queue.put((ScanSpawnPoint, scan_spawn_points))
  1726. if len(sightings):
  1727. db_update_queue.put((SpawnpointDetectionData, sightings))
  1728.  
  1729. return {
  1730. 'count': len(wild_pokemon) + len(forts),
  1731. 'gyms': gyms,
  1732. 'sp_id_list': sp_id_list,
  1733. 'bad_scan': False
  1734. }
  1735.  
  1736.  
  1737. def parse_gyms(args, gym_responses, wh_update_queue, db_update_queue):
  1738. gym_details = {}
  1739. gym_members = {}
  1740. gym_pokemon = {}
  1741. trainers = {}
  1742.  
  1743. i = 0
  1744. for g in gym_responses.values():
  1745. gym_state = g['gym_state']
  1746. gym_id = gym_state['fort_data']['id']
  1747.  
  1748. gym_details[gym_id] = {
  1749. 'gym_id': gym_id,
  1750. 'name': g['name'],
  1751. 'description': g.get('description'),
  1752. 'url': g['urls'][0],
  1753. }
  1754.  
  1755. if args.webhooks:
  1756. webhook_data = {
  1757. 'id': gym_id,
  1758. 'latitude': gym_state['fort_data']['latitude'],
  1759. 'longitude': gym_state['fort_data']['longitude'],
  1760. 'team': gym_state['fort_data'].get('owned_by_team', 0),
  1761. 'name': g['name'],
  1762. 'description': g.get('description'),
  1763. 'url': g['urls'][0],
  1764. 'pokemon': [],
  1765. }
  1766.  
  1767. for member in gym_state.get('memberships', []):
  1768. gym_members[i] = {
  1769. 'gym_id': gym_id,
  1770. 'pokemon_uid': member['pokemon_data']['id'],
  1771. }
  1772.  
  1773. gym_pokemon[i] = {
  1774. 'pokemon_uid': member['pokemon_data']['id'],
  1775. 'pokemon_id': member['pokemon_data']['pokemon_id'],
  1776. 'cp': member['pokemon_data']['cp'],
  1777. 'trainer_name': member['trainer_public_profile']['name'],
  1778. 'num_upgrades': member['pokemon_data'].get('num_upgrades', 0),
  1779. 'move_1': member['pokemon_data'].get('move_1'),
  1780. 'move_2': member['pokemon_data'].get('move_2'),
  1781. 'height': member['pokemon_data'].get('height_m'),
  1782. 'weight': member['pokemon_data'].get('weight_kg'),
  1783. 'stamina': member['pokemon_data'].get('stamina'),
  1784. 'stamina_max': member['pokemon_data'].get('stamina_max'),
  1785. 'cp_multiplier': member['pokemon_data'].get('cp_multiplier'),
  1786. 'additional_cp_multiplier': member['pokemon_data'].get('additional_cp_multiplier', 0),
  1787. 'iv_defense': member['pokemon_data'].get('individual_defense', 0),
  1788. 'iv_stamina': member['pokemon_data'].get('individual_stamina', 0),
  1789. 'iv_attack': member['pokemon_data'].get('individual_attack', 0),
  1790. 'last_seen': datetime.utcnow(),
  1791. }
  1792.  
  1793. trainers[i] = {
  1794. 'name': member['trainer_public_profile']['name'],
  1795. 'team': gym_state['fort_data']['owned_by_team'],
  1796. 'level': member['trainer_public_profile']['level'],
  1797. 'last_seen': datetime.utcnow(),
  1798. }
  1799.  
  1800. if args.webhooks:
  1801. webhook_data['pokemon'].append({
  1802. 'pokemon_uid': member['pokemon_data']['id'],
  1803. 'pokemon_id': member['pokemon_data']['pokemon_id'],
  1804. 'cp': member['pokemon_data']['cp'],
  1805. 'num_upgrades': member['pokemon_data'].get('num_upgrades', 0),
  1806. 'move_1': member['pokemon_data'].get('move_1'),
  1807. 'move_2': member['pokemon_data'].get('move_2'),
  1808. 'height': member['pokemon_data'].get('height_m'),
  1809. 'weight': member['pokemon_data'].get('weight_kg'),
  1810. 'stamina': member['pokemon_data'].get('stamina'),
  1811. 'stamina_max': member['pokemon_data'].get('stamina_max'),
  1812. 'cp_multiplier': member['pokemon_data'].get('cp_multiplier'),
  1813. 'additional_cp_multiplier': member['pokemon_data'].get('additional_cp_multiplier', 0),
  1814. 'iv_defense': member['pokemon_data'].get('individual_defense', 0),
  1815. 'iv_stamina': member['pokemon_data'].get('individual_stamina', 0),
  1816. 'iv_attack': member['pokemon_data'].get('individual_attack', 0),
  1817. 'trainer_name': member['trainer_public_profile']['name'],
  1818. 'trainer_level': member['trainer_public_profile']['level'],
  1819. })
  1820.  
  1821. i += 1
  1822. if args.webhooks:
  1823. wh_update_queue.put(('gym_details', webhook_data))
  1824.  
  1825. # All this database stuff is synchronous (not using the upsert queue) on purpose.
  1826. # Since the search workers load the GymDetails model from the database to determine if a gym
  1827. # needs rescanned, we need to be sure the GymDetails get fully committed to the database before moving on.
  1828. #
  1829. # We _could_ synchronously upsert GymDetails, then queue the other tables for
  1830. # upsert, but that would put that Gym's overall information in a weird non-atomic state.
  1831.  
  1832. # Upsert all the models.
  1833. if len(gym_details):
  1834. db_update_queue.put((GymDetails, gym_details))
  1835. if len(gym_pokemon):
  1836. db_update_queue.put((GymPokemon, gym_pokemon))
  1837. if len(trainers):
  1838. db_update_queue.put((Trainer, trainers))
  1839.  
  1840. # This needs to be completed in a transaction, because we don't wany any other thread or process
  1841. # to mess with the GymMembers for the gyms we're updating while we're updating the bridge table.
  1842. with flaskDb.database.transaction():
  1843. # Get rid of all the gym members, we're going to insert new records.
  1844. if len(gym_details):
  1845. DeleteQuery(GymMember).where(GymMember.gym_id << gym_details.keys()).execute()
  1846.  
  1847. # Insert new gym members.
  1848. if len(gym_members):
  1849. db_update_queue.put((GymMember, gym_members))
  1850.  
  1851. log.info('Upserted %d gyms and %d gym members',
  1852. len(gym_details),
  1853. len(gym_members))
  1854.  
  1855.  
  1856. def db_updater(args, q, db):
  1857. # The forever loop.
  1858. while True:
  1859. try:
  1860.  
  1861. while True:
  1862. try:
  1863. flaskDb.connect_db()
  1864. break
  1865. except Exception as e:
  1866. log.warning('%s... Retrying', e)
  1867.  
  1868. # Loop the queue.
  1869. while True:
  1870. model, data = q.get()
  1871. bulk_upsert(model, data, db)
  1872. q.task_done()
  1873. log.debug('Upserted to %s, %d records (upsert queue remaining: %d)',
  1874. model.__name__,
  1875. len(data),
  1876. q.qsize())
  1877. if q.qsize() > 50:
  1878. log.warning("DB queue is > 50 (@%d); try increasing --db-threads", q.qsize())
  1879.  
  1880. except Exception as e:
  1881. log.exception('Exception in db_updater: %s', e)
  1882.  
  1883.  
  1884. def clean_db_loop(args):
  1885. while True:
  1886. try:
  1887. query = (MainWorker
  1888. .delete()
  1889. .where((ScannedLocation.last_modified <
  1890. (datetime.utcnow() - timedelta(minutes=30)))))
  1891. query.execute()
  1892.  
  1893. query = (WorkerStatus
  1894. .delete()
  1895. .where((ScannedLocation.last_modified <
  1896. (datetime.utcnow() - timedelta(minutes=30)))))
  1897. query.execute()
  1898.  
  1899. # Remove active modifier from expired lured pokestops.
  1900. query = (Pokestop
  1901. .update(lure_expiration=None, active_fort_modifier=None)
  1902. .where(Pokestop.lure_expiration < datetime.utcnow()))
  1903. query.execute()
  1904.  
  1905. # If desired, clear old pokemon spawns.
  1906. if args.purge_data > 0:
  1907. query = (Pokemon
  1908. .delete()
  1909. .where((Pokemon.disappear_time <
  1910. (datetime.utcnow() - timedelta(hours=args.purge_data)))))
  1911. query.execute()
  1912.  
  1913. log.info('Regular database cleaning complete')
  1914. time.sleep(60)
  1915. except Exception as e:
  1916. log.exception('Exception in clean_db_loop: %s', e)
  1917.  
  1918.  
  1919. def bulk_upsert(cls, data, db):
  1920. num_rows = len(data.values())
  1921. i = 0
  1922.  
  1923. if args.db_type == 'mysql':
  1924. step = 120
  1925. else:
  1926. # SQLite has a default max number of parameters of 999,
  1927. # so we need to limit how many rows we insert for it.
  1928. step = 50
  1929.  
  1930. while i < num_rows:
  1931. log.debug('Inserting items %d to %d', i, min(i + step, num_rows))
  1932. try:
  1933. # Turn off FOREIGN_KEY_CHECKS on MySQL, because it apparently is unable
  1934. # to recognize strings to update unicode keys for foriegn key fields,
  1935. # thus giving lots of foreign key constraint errors
  1936. if args.db_type == 'mysql':
  1937. db.execute_sql('SET FOREIGN_KEY_CHECKS=0;')
  1938.  
  1939. InsertQuery(cls, rows=data.values()[i:min(i + step, num_rows)]).upsert().execute()
  1940.  
  1941. if args.db_type == 'mysql':
  1942. db.execute_sql('SET FOREIGN_KEY_CHECKS=1;')
  1943.  
  1944. except Exception as e:
  1945. # if there is a DB table constraint error, dump the data and don't retry
  1946. # unrecoverable error strings:
  1947. unrecoverable = ['constraint', 'has no attribute', 'peewee.IntegerField object at']
  1948. has_unrecoverable = filter(lambda x: x in str(e), unrecoverable)
  1949. if has_unrecoverable:
  1950. log.warning('%s. Data is:', e)
  1951. log.warning(data.items())
  1952. else:
  1953. log.warning('%s... Retrying', e)
  1954. time.sleep(1)
  1955. continue
  1956.  
  1957. i += step
  1958.  
  1959.  
  1960. def create_tables(db):
  1961. db.connect()
  1962. verify_database_schema(db)
  1963. db.create_tables([Pokemon, Pokestop, Gym, ScannedLocation, GymDetails, GymMember, GymPokemon,
  1964. Trainer, MainWorker, WorkerStatus, SpawnPoint, ScanSpawnPoint, SpawnpointDetectionData], safe=True)
  1965. db.close()
  1966.  
  1967.  
  1968. def drop_tables(db):
  1969. db.connect()
  1970. db.drop_tables([Pokemon, Pokestop, Gym, ScannedLocation, Versions, GymDetails, GymMember, GymPokemon,
  1971. Trainer, MainWorker, WorkerStatus, SpawnPoint, ScanSpawnPoint, SpawnpointDetectionData, Versions], safe=True)
  1972. db.close()
  1973.  
  1974.  
  1975. def verify_database_schema(db):
  1976. if not Versions.table_exists():
  1977. db.create_tables([Versions])
  1978.  
  1979. if ScannedLocation.table_exists():
  1980. # Versions table didn't exist, but there were tables. This must mean the user
  1981. # is coming from a database that existed before we started tracking the schema
  1982. # version. Perform a full upgrade.
  1983. InsertQuery(Versions, {Versions.key: 'schema_version', Versions.val: 0}).execute()
  1984. database_migrate(db, 0)
  1985. else:
  1986. InsertQuery(Versions, {Versions.key: 'schema_version', Versions.val: db_schema_version}).execute()
  1987.  
  1988. else:
  1989. db_ver = Versions.get(Versions.key == 'schema_version').val
  1990.  
  1991. if db_ver < db_schema_version:
  1992. database_migrate(db, db_ver)
  1993.  
  1994. elif db_ver > db_schema_version:
  1995. log.error("Your database version (%i) appears to be newer than the code supports (%i).",
  1996. db_ver, db_schema_version)
  1997. log.error("Please upgrade your code base or drop all tables in your database.")
  1998. sys.exit(1)
  1999.  
  2000.  
  2001. def database_migrate(db, old_ver):
  2002. # Update database schema version.
  2003. Versions.update(val=db_schema_version).where(Versions.key == 'schema_version').execute()
  2004.  
  2005. log.info("Detected database version %i, updating to %i", old_ver, db_schema_version)
  2006.  
  2007. # Perform migrations here.
  2008. migrator = None
  2009. if args.db_type == 'mysql':
  2010. migrator = MySQLMigrator(db)
  2011. else:
  2012. migrator = SqliteMigrator(db)
  2013.  
  2014. # No longer necessary, we're doing this at schema 4 as well.
  2015. # if old_ver < 1:
  2016. # db.drop_tables([ScannedLocation])
  2017.  
  2018. if old_ver < 2:
  2019. migrate(migrator.add_column('pokestop', 'encounter_id', CharField(max_length=50, null=True)))
  2020.  
  2021. if old_ver < 3:
  2022. migrate(
  2023. migrator.add_column('pokestop', 'active_fort_modifier', CharField(max_length=50, null=True)),
  2024. migrator.drop_column('pokestop', 'encounter_id'),
  2025. migrator.drop_column('pokestop', 'active_pokemon_id')
  2026. )
  2027.  
  2028. if old_ver < 4:
  2029. db.drop_tables([ScannedLocation])
  2030.  
  2031. if old_ver < 5:
  2032. # Some pokemon were added before the 595 bug was "fixed".
  2033. # Clean those up for a better UX.
  2034. query = (Pokemon
  2035. .delete()
  2036. .where(Pokemon.disappear_time >
  2037. (datetime.utcnow() - timedelta(hours=24))))
  2038. query.execute()
  2039.  
  2040. if old_ver < 6:
  2041. migrate(
  2042. migrator.add_column('gym', 'last_scanned', DateTimeField(null=True)),
  2043. )
  2044.  
  2045. if old_ver < 7:
  2046. migrate(
  2047. migrator.drop_column('gymdetails', 'description'),
  2048. migrator.add_column('gymdetails', 'description', TextField(null=True, default=""))
  2049. )
  2050.  
  2051. if old_ver < 8:
  2052. migrate(
  2053. migrator.add_column('pokemon', 'individual_attack', IntegerField(null=True, default=0)),
  2054. migrator.add_column('pokemon', 'individual_defense', IntegerField(null=True, default=0)),
  2055. migrator.add_column('pokemon', 'individual_stamina', IntegerField(null=True, default=0)),
  2056. migrator.add_column('pokemon', 'move_1', IntegerField(null=True, default=0)),
  2057. migrator.add_column('pokemon', 'move_2', IntegerField(null=True, default=0))
  2058. )
  2059.  
  2060. if old_ver < 9:
  2061. migrate(
  2062. migrator.add_column('pokemon', 'last_modified', DateTimeField(null=True, index=True)),
  2063. migrator.add_column('pokestop', 'last_updated', DateTimeField(null=True, index=True))
  2064. )
  2065.  
  2066. if old_ver < 10:
  2067. # Information in ScannedLocation and Member Status probably out of date,
  2068. # so drop and re-create with new schema
  2069.  
  2070. db.drop_tables([ScannedLocation])
  2071. db.drop_tables([WorkerStatus])
  2072.  
  2073. if old_ver < 11:
  2074.  
  2075. db.drop_tables([ScanSpawnPoint])
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement