Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # osu! ranked/loved map checker by G3T v1.1
- import time
- import datetime
- import urllib.request
- import json
- import os
- import hashlib
- import collections
- start_date_stamp = '2007-10-07'
- date_format = '%Y-%m-%d'
- page_date = datetime.datetime.strptime(start_date_stamp, date_format)
- api_key = ''
- db = {}
- def get_next_date(ordered_dict):
- bm_key = next(reversed(ordered_dict))
- bm = ordered_dict[bm_key]
- date_str = (bm['approved_date'].split(' '))[0]
- return datetime.datetime.strptime(date_str, date_format)
- def get_page(key, date):
- url_base = 'https://osu.ppy.sh/api/get_beatmaps?'
- url = url_base + 'k=' + key + '&since=' + date.strftime(date_format) + '&m=0'
- page = urllib.request.urlopen(url)
- return page.read()
- def get_api_key():
- print('The api key can be found at https://osu.ppy.sh/p/api')
- api_key = input('Key: ')
- try:
- test = get_page(api_key, page_date)
- except urllib.error.HTTPError:
- print('That key is invalid, try again.')
- return get_api_key()
- return api_key
- def md5(fname):
- hash_md5 = hashlib.md5()
- with open(fname, "rb") as f:
- for chunk in iter(lambda: f.read(4096), b""):
- hash_md5.update(chunk)
- return hash_md5.hexdigest()
- # Get API key
- if os.access('api_key', os.F_OK):
- with open('api_key', 'r') as key_file:
- api_key = key_file.readline().strip()
- else:
- api_key = get_api_key()
- with open('api_key', 'w') as key_file:
- key_file.write(api_key)
- # Get md5/mtime db
- if os.access('md5_mtime_db', os.F_OK):
- with open('md5_mtime_db', 'r') as mmdb:
- db = json.loads(mmdb.read())
- # Get a list of all beatmap md5s in songs folder
- walk = next(os.walk('.\\Songs'))
- dirs = walk[1]
- path = walk[0]
- md5s = {}
- for i, d in enumerate(dirs):
- print('Scanning songs folder... [maps: {:d} ({:.2f}%)]'.format(len(md5s), (i/len(dirs)) * 100), end='\r')
- dir_path = os.path.join(path, d)
- files = next(os.walk(dir_path))[2]
- for f in files:
- if f[-3:] == 'osu':
- rel_path = os.path.join(d, f)
- abs_path = os.path.join(dir_path, f)
- f_mtime = os.path.getmtime(abs_path)
- if (rel_path in db) and (db[rel_path][0] == f_mtime):
- md5s[db[rel_path][1]] = None
- else:
- f_md5 = md5(abs_path)
- md5s[f_md5] = None
- db[rel_path] = (f_mtime, f_md5)
- with open('md5_mtime_db', 'w') as mmdb:
- mmdb.write(json.dumps(db))
- print('Scanning songs folder... [maps: {:d} (100%)] '.format(len(md5s)))
- # Get a dictionary of all ranked standard maps
- if os.access('ranked_maps.json', os.F_OK):
- with open('ranked_maps.json', 'r') as rmj:
- maps = collections.OrderedDict(json.loads(rmj.read()))
- page_date = get_next_date(maps)
- else:
- maps = collections.OrderedDict([])
- num_maps = -1
- while (num_maps != len(maps)):
- num_maps = len(maps)
- print('Downloading ranked/loved map list... [maps: {:d}]'.format(num_maps), end='\r')
- page = get_page(api_key, page_date)
- parsed_page = json.loads(page)
- for bm in parsed_page:
- if bm['approved'] != '3':
- maps[bm['file_md5']] = bm
- page_date = get_next_date(maps)
- time.sleep(1.1)
- print('Downloading ranked/loved map list... [maps: {:d}]'.format(num_maps))
- with open('ranked_maps.json', 'w') as rmj:
- rmj.write(json.dumps(maps))
- # Generate a set of all mapsets that are not present
- missing = {}
- ranked = 0
- loved = 0
- errors = 0
- for key in maps:
- if maps[key]['approved'] == '4':
- loved += 1
- elif maps[key]['approved'] in ('1', '2'):
- ranked += 1
- else:
- errors += 1
- if key not in md5s:
- missing[maps[key]['beatmapset_id']] = None
- print('Map composition:')
- print(' Ranked: ' + str(ranked))
- print(' Loved: ' + str(loved))
- print(' Unknown: ' + str(errors))
- if len(missing.keys()) == 0:
- print('All ranked/loved maps accounted for.')
- else:
- print('Missing maps by song id:')
- for key in missing.keys():
- print(key)
- input('Press any key to exit...')
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement