Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import csv
- import os
- import requests
- import sh
- import shutil
- import sys
- import zipfile
- SESSION = requests.Session()
- def get_image_cached(filename, size=60):
- cache_directory = os.path.join("cache", "full%d" % size)
- if not os.path.exists(cache_directory):
- os.makedirs(cache_directory)
- cache_filename = os.path.join(cache_directory, filename + ".png")
- if os.path.exists(cache_filename):
- print "Cache hit: %s" % cache_filename
- return cache_filename
- try:
- url = (u"https://flair.redditcfb.com/full%d/%s.png"
- % (size, filename))
- print "Downloading %s..." % url
- req = SESSION.get(url)
- req.raise_for_status()
- with open(cache_filename, 'wb') as fd:
- for chunk in req.iter_content(64*1024):
- fd.write(chunk)
- orig_size = fd.tell()
- print "Downloaded %d bytes." % orig_size
- try:
- sh.pngout("/y", cache_filename, _ok_code=[0,1,2])
- except sh.CommandNotFound:
- pass
- try:
- sh.optipng("-o7", cache_filename)
- except sh.CommandNotFound:
- pass
- new_size = os.path.getsize(cache_filename)
- size_delta = orig_size - new_size
- size_delta_percent = (size_delta * 100.0) / orig_size
- print ("Optimized to %d bytes. Saved %d bytes (%.1f%%)."
- % (new_size, size_delta, size_delta_percent))
- return cache_filename
- except Exception as e:
- stdout = getattr(e, 'stdout')
- if stdout:
- print stdout
- try:
- os.remove(cache_filename)
- except OSError:
- pass
- raise
- def read_utf8_csv(csvfile, *args, **kwargs):
- reader = csv.DictReader(csvfile, *args, **kwargs)
- for row in reader:
- yield { k: unicode(v, 'utf-8') for (k, v) in row.iteritems() }
- def remake_all_dirs():
- for dirname in ('standard', 'mega'):
- if os.path.isdir(dirname):
- shutil.rmtree(dirname)
- os.mkdir(dirname)
- for subdir in ('#f', '#i', '#l'):
- os.mkdir(os.path.join(dirname, subdir))
- archive = zipfile.ZipFile(dirname + '.zip',
- 'w', zipfile.ZIP_STORED)
- ARCHIVES[dirname] = archive
- def clear_cache():
- if os.path.isdir('cache'):
- shutil.rmtree('cache')
- def maybe(s):
- return None if s == u'None' else s
- ARCHIVES = {}
- def copy(cache, packtype, imgtype, filename=None):
- if not filename:
- parts = imgtype.split('/')
- imgtype = parts[0]
- filename = parts[1]
- emote_path = os.path.join(imgtype, filename) + '.png'
- dest = os.path.join(packtype, emote_path)
- if not os.path.isfile(dest):
- print " -> " + dest
- shutil.copy(cache, dest)
- if packtype in ARCHIVES:
- archive = ARCHIVES[packtype]
- archive.write(dest, emote_path.encode('utf-8'))
- else:
- print "ERR -> " + dest + " (already exists)"
- def process_one_flair(row):
- display = row['Selectdisplay'].lower() == 'true'
- inline = row['Selectinline'].lower() == 'true'
- width = int(row['Width'])
- filename = row['Filename']
- shortcut = maybe(row['Shortcutinline'])
- letter = maybe(row['Shortcutletter'])
- flair1 = row['Flair1']
- division = row['Division']
- conference = row['Conference']
- if not display and not shortcut and not letter:
- return
- if display and not (shortcut or inline):
- return
- if shortcut and not shortcut.startswith('#'):
- return
- cache = get_image_cached(filename, size=width)
- if letter:
- copy(cache, 'standard', letter)
- copy(cache, 'mega', letter)
- if shortcut:
- if inline:
- copy(cache, 'standard', shortcut)
- copy(cache, 'mega', shortcut) # include discontinued
- elif inline:
- # Relay will ignore everything that follows a dash.
- name = flair1.split('-')[0]
- if division == 'Postseason' or division == 'Rivalries':
- # Include rivalry trophies and bowl logos.
- copy(cache, 'standard', '#f', name)
- copy(cache, 'mega', '#f', name)
- def process_all():
- print "Downloading flair database... ",
- sys.stdout.flush()
- csvreq = SESSION.get('https://flair.redditcfb.com/teamsheet.csv')
- csvreq.raise_for_status()
- lines = list(csvreq.iter_lines())
- print '%d flairs loaded.' % len(lines)
- print "Cleaning up state..."
- remake_all_dirs()
- if not os.path.isdir('cache'):
- os.mkdir('cache')
- teamsheet_path = os.path.join('cache', 'teamsheet.csv')
- with open(teamsheet_path, 'w') as fh:
- fh.writelines(lines)
- try:
- for row in read_utf8_csv(lines):
- process_one_flair(row)
- finally:
- for archive in ARCHIVES.itervalues():
- archive.close()
- if __name__ == "__main__":
- os.chdir(os.path.dirname(os.path.abspath(__file__)))
- process_all()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement