Advertisement
Guest User

Untitled

a guest
Oct 10th, 2017
123
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 43.71 KB | None | 0 0
  1. #!/usr/bin/python3
  2. # -*- coding: utf-8 -*-
  3.  
  4. """
  5. USAGE: Set up as described below, then just run the script somewhere in the
  6.       background, screen works nicely for this.
  7.  
  8. Non-standard dependencies: sqlite3, tld
  9.  
  10. It is recommended to wrap this in the following script:
  11. #!/bin/bash
  12. RET=1
  13. while [ $RET -eq 1 ]
  14. do
  15.        removalbot
  16.        RET=$?
  17. done
  18.  
  19. Ctrl+C is handled as success, expected failures have exit code 2.
  20. Likely programming errors have exit code 3.
  21.  
  22. Files:
  23. - CONFDIR, defined below
  24. - CONFDIR/maintainer, your reddit username
  25. - CONFDIT/secrets in the format "user\npass\nclient_id\bclient_secret"
  26. - CONFDIR/version
  27. - CONFDIR/safedomains, empty or newline-separated domains
  28. - CONFDIR/unknowndomains, empty
  29. - CONFDIR/posts.db, sqlite3 database
  30.  
  31. Be aware that you need to set up oauth to use this and gather the client ID and secret for the login process.
  32.  
  33. DB schema:
  34. CREATE TABLE notify
  35.    (author TEXT UNIQUE);
  36. CREATE TABLE comments
  37.    (name TEXT UNIQUE PRIMARY KEY,
  38.     subreddit TEXT,
  39.     author TEXT,
  40.     created_utc INTEGER,
  41.     link_id TEXT,
  42.     body TEXT);
  43. CREATE TABLE comments_deleted
  44.    (name TEXT UNIQUE PRIMARY KEY,
  45.     subreddit TEXT,
  46.     author TEXT,
  47.     created_utc INTEGER,
  48.     link_id TEXT,
  49.     body TEXT,
  50.     spotted INTEGER);
  51. CREATE TABLE flairs
  52.    (name TEXT UNIQUE PRIMARY KEY,
  53.     flair_template_id TEXT UNIQUE);
  54. CREATE TABLE times
  55.    (name TEXT UNIQUE PRIMARY KEY,
  56.     time INTEGER);
  57. CREATE TABLE submissions
  58.    (name TEXT UNIQUE PRIMARY KEY,
  59.     subreddit TEXT,
  60.     author TEXT,
  61.     created_utc INTEGER,
  62.     link_flair_text TEXT,
  63.     title TEXT,
  64.     domain TEXT,
  65.     selftext TEXT);
  66. CREATE TABLE submissions_deleted
  67.    (name TEXT UNIQUE PRIMARY KEY,
  68.     subreddit TEXT,
  69.     author TEXT,
  70.     created_utc INTEGER,
  71.     link_flair_text TEXT,
  72.     title TEXT,
  73.     domain TEXT,
  74.     selftext TEXT,
  75.     spotted INTEGER);
  76.  
  77. # TODO: shadowbanned vs deleted users
  78. # TODO: log.db
  79. # TODO: deletion.db
  80. """
  81.  
  82. LOGGING = True
  83. DUMMY = False
  84. SUBMIT_ERRORS = False
  85.  
  86. import os
  87. import sys
  88. import time
  89. import json
  90. import urllib.request, urllib.parse, urllib.error
  91. import urllib.request, urllib.error, urllib.parse
  92. import ssl
  93. import socket
  94. import re
  95. import traceback
  96. import sqlite3
  97. import tld
  98. from random import sample
  99. from datetime import datetime
  100. from contextlib import closing
  101. from sys import stderr
  102. from html.parser import HTMLParser
  103.  
  104. MINUTE = 60
  105. HOUR = MINUTE * 60
  106. DAY = HOUR * 24
  107.  
  108. socket.setdefaulttimeout(10)
  109.  
  110. NOTIFY_THREADS = ['3rmc4v']
  111. # all in seconds
  112. NEW_SPACING = 10
  113. DELETION_SPACING = 10 * MINUTE
  114. FLAIR_SPACING = 24 * HOUR
  115. SUBSCRIBER_SPACING = 12 * HOUR
  116. ALLOWED_TITLE_LENGTH = 300
  117. INTROLEN = 100
  118.  
  119. CONFDIR = '/etc/removalbot'
  120. PIDFILE = "/tmp/removalbot.pid"
  121. LOGDIR = os.path.join(CONFDIR, 'log')
  122. IGNORE = ["godwins_law_bot", "totes_meta_bot", "redditbots", "ttumblrbots",
  123.           "autowikibot", "SRScreenshot", "MRSPArchiver", "AutoModerator",
  124.           "image_linker_bot", "SmallSubBot", "autourbanbot",
  125.           "note-to-self-bot", "ObamaRobot", "TotesMessenger",
  126.           "TweetsInCommentsBot", "TweetPoster", "JoeBidenBot",
  127.           "smilesbot", "DailMail_Bot", "TrollaBot", "TotesHuman",
  128.           "youtubefactsbot", "imgurtranscriber", "isreactionary_bot",
  129.           "iscuck_bot", "author", "reginaldtato", "NotTheOnionBot",
  130.           "rSGSpolice", "hwsbot", "yes_it_is_weird", "r_PictureGame",
  131.           "prairiechicken2", "domoarigatobtfcboto", "SkydivingHaylz",
  132.           "I_Like_Spaghetti", "STEALTHM0UNTAIN", "Google_Panda",
  133.           "AakashMasani", "Forestl", "lurkattwork", "drgoku282",
  134.           "texasmommie", "Really_Like_Pancakes", "BlaineWolfe",
  135.           "Blassie098", "ghort98765", "GustavoFrings", "WritingPromptsRobot",
  136.           "sontato", "ramsesniblick3rd", "300BlackoutSober",
  137.           "flair_your_post_bot", "GoomyTooOP", "arbutus_", "foamed",
  138.           "DumbCollegeStudent", "[deleted]", "GOTradeRuleBot",
  139.           "ShadowBanCheckBot", "ShadowBannedBot", "Shiny_Sylveon",
  140.           "PaidBot", "xbamsod", "enriquepaz13", "Moskau50", "PornOverlord",
  141.           "ConvertsToMetric", "removalbot"]
  142.  
  143. URLREGEX = r'''(?i)\b((?:[a-z][\w-]+:(?:/{1,3}|[a-z0-9%])|www\d{0,3}[.]|[a-z0-9
  144.    .\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(
  145.    ([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:'".,<>?«»“”‘’]))'''
  146.  
  147. DOMAIN_BLACKLIST = ["malformed.domain", "goo.gl", "tinyurl.com"]
  148.  
  149. os.environ['TZ'] = 'UTC'
  150. time.tzset()
  151.  
  152. pid = str(os.getpid())
  153. with open(PIDFILE, 'w') as f:
  154.     f.write(pid)
  155.  
  156.  
  157. ALSO_FATAL = ['SyntaxError', 'NameError', 'IndexError', 'TypeError',
  158.               'ValueError', 'UnboundLocalError']
  159.  
  160.  
  161. def errorhook(extype, value, trace):
  162.     os.unlink(PIDFILE)
  163.     print(traceback.print_exception(extype, value, trace))
  164.     if extype.__name__ == "KeyboardInterrupt":
  165.         exit(0)
  166.     elif extype.__name__ == "FatalError":
  167.         send_pm(maintainer, "REMOVALBOT CRASHED", "Reason: " +
  168.                 extype.__name__+": " + value.message)
  169.         writefile(conf('error'), extype.__name__+": " + value.message)
  170.         exit(2)
  171.     elif extype.__name__ in ALSO_FATAL:
  172.         send_pm(maintainer, "REMOVALBOT CRASHED", "Reason: " +
  173.                 extype.__name__+": " + value.message)
  174.         writefile(conf('error'), extype.__name__+": " + value.message)
  175.         exit(3)
  176.  
  177. sys.excepthook = errorhook
  178.  
  179.  
  180. class FatalError(Exception):
  181.     def __init__(self, message):
  182.         self.message = message
  183.         Exception.__init__(self, message)
  184.  
  185.  
  186. def current_epoch():
  187.     return (datetime.now() - datetime.utcfromtimestamp(0)).total_seconds()
  188.  
  189.  
  190. def epoch_to_string(epoch=None, tech=False, short=False):
  191.     if epoch is None:
  192.         epoch = current_epoch()
  193.     try:
  194.         epoch = float(epoch)
  195.     except:
  196.         epoch = 0
  197.     if tech:
  198.         model = "%y%m%d-%H%M"
  199.     elif short:
  200.         model = "%m-%d %H:%M"
  201.     else:
  202.         model = "%Y-%m-%d %H:%M %Z"
  203.     return time.strftime(model, time.localtime(epoch))
  204.  
  205.  
  206. def conf(name):
  207.     return str(os.path.join(CONFDIR, name))
  208.  
  209.  
  210. def newlog(name):
  211.     if LOGGING:
  212.         name = str(os.path.join(LOGDIR, name))
  213.         with closing(open(name, 'w')):
  214.             pass
  215.         return name
  216.     else:
  217.         return '/dev/null'
  218.  
  219.  
  220. def readfile(f):
  221.     with closing(open(f)) as f:
  222.         return f.read()
  223.  
  224.  
  225. def writefile(f, data):
  226.     with closing(open(f, 'w')) as f:
  227.         f.write(data)
  228.  
  229.  
  230. def censor(s, fraction):
  231.     num = int(round(fraction * len(s)))
  232.     change_locs = set(sample(list(range(len(s))), num))
  233.     changed = ('*' if i in change_locs else c for i, c in enumerate(s))
  234.     return ''.join(changed)
  235.  
  236.  
  237. def getv(query, args=()):
  238.     return (c.execute(query, args).fetchone() or (None,))[0]
  239.  
  240.  
  241. def getlast(what):
  242.     return getv('SELECT time FROM times WHERE name=?', (what,))
  243.  
  244.  
  245. def setlast(what, utc):
  246.     c.execute('INSERT OR REPLACE INTO times VALUES (?, ?)', (what, utc))
  247.     db.commit()
  248.  
  249.  
  250. def login():
  251.     print("> Logging in ", end=' ')
  252.     sys.stdout.flush()
  253.     secrets = readfile(conf('secrets')).split()
  254.     username = secrets[0]
  255.     password = secrets[1]
  256.     client_id = secrets[2]
  257.     client_secret = secrets[3]
  258.  
  259.     post_data = {"grant_type": "password",
  260.                  "username": username,
  261.                  "password": password}
  262.     headers = {"User-Agent": USERAGENT}
  263.     url = "https://www.reddit.com/api/v1/access_token"
  264.  
  265.     password_mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
  266.     password_mgr.add_password(None, url, client_id, client_secret)
  267.     handler = urllib.request.HTTPBasicAuthHandler(password_mgr)
  268.     opener = urllib.request.build_opener(handler)
  269.  
  270.     response = reddit(url, opener=opener.open,
  271.                       post_data=post_data, headers=headers,
  272.                       raw=True)
  273.  
  274.     if not ('token_type' in list(response.keys()) and
  275.             'access_token' in list(response.keys())):
  276.         print(response, file=stderr)
  277.         raise FatalError("Authorization Failed")
  278.     token = response['token_type']+" "+response['access_token']
  279.     print("- done")
  280.     return {"Authorization": token, "User-Agent": USERAGENT}
  281.  
  282.  
  283. def mkrequest(url, headers=None, post_data=None):
  284.     if not post_data:
  285.         return urllib.request.Request(url, None, headers)
  286.     else:
  287.         for k, v in post_data.items():
  288.             if isinstance(v, str):
  289.                 v = v.encode('utf8')
  290.             post_data[k] = v
  291.         post_data = urllib.parse.urlencode(post_data).encode('utf-8')
  292.         return urllib.request.Request(url, post_data, headers)
  293.  
  294.  
  295. def errordir(e):
  296.     for attr in dir(e):
  297.         print(attr, getattr(e, attr), file=stderr)
  298.  
  299.  
  300. def reddit(url, opener=urllib.request.urlopen,
  301.            headers=None, post_data=None,
  302.            raw=False, catch_also=[]):
  303.     global auth
  304.     global requests_used
  305.     global requests_remaining
  306.     global requests_reset
  307.     catch = [400, 401, 500, 502, 503, 504, 521] + catch_also
  308.     while True:
  309.         try:
  310.             request = mkrequest(url, headers=headers, post_data=post_data)
  311.             response = opener(request)
  312.             requests_remaining = response.headers.get('x-ratelimit-remaining')
  313.             requests_used = response.headers.get('x-ratelimit-used')
  314.             requests_reset = response.headers.get('x-ratelimit-reset')
  315.             if requests_remaining and requests_reset:
  316.                 remaining = float(requests_remaining)
  317.                 reset = int(requests_reset)
  318.                 if remaining < reset:
  319.                     time.sleep(float(reset - remaining) /
  320.                                remaining)
  321.             j = json.loads(response.read().decode('utf-8'))
  322.             if not raw:
  323.                 j = j['data']
  324.             break
  325.         except (ValueError, KeyError,
  326.                 socket.timeout, socket.error,
  327.                 ssl.SSLError,
  328.                 urllib.error.HTTPError, urllib.error.URLError) as e:
  329.             print('<' + type(e).__name__ + ': ', end=' ')
  330.             if type(e).__name__ == 'HTTPError':
  331.                 print(str(e.code) + '!>', end=' ')
  332.                 if e.code not in catch:
  333.                     raise
  334.                 elif e.code == 401:
  335.                     print("(Token expired)", end=' ')
  336.                     auth = login()
  337.                     headers = auth
  338.             elif type(e).__name__ == 'URLError':
  339.                 if "handshake operation" not in str(e.reason):
  340.                     raise
  341.             else:
  342.                 print('!>', end=' ')
  343.             sys.stdout.flush()
  344.             time.sleep(5)
  345.         except Exception as e:
  346.             print(file=stderr)
  347.             print("ERROR!", file=stderr)
  348.             print(file=stderr)
  349.             errordir(e)
  350.             raise
  351.     return j
  352.  
  353.  
  354. def fetch(query, lastseen, model, kind, catch_also=[]):
  355.     pagecount = 1
  356.     log = ()
  357.     after = ''
  358.  
  359.     class Done(Exception):
  360.         pass
  361.     try:
  362.         while True:
  363.             if pagecount % 10 == 0 and kind != 'noupdate':
  364.                 print('u(', end=' ')
  365.                 sys.stdout.flush()
  366.                 if kind == 'comments':
  367.                     new = fetch_posts_since(c_lastcheck=log[0][3], quiet=True)
  368.                     log = new + log
  369.                 elif kind == 'submissions':
  370.                     new = fetch_posts_since(s_lastcheck=log[0][3], quiet=True)
  371.                     log = new + log
  372.                 print(')', end=' ')
  373.                 sys.stdout.flush
  374.  
  375.             print('p'+str(pagecount), end=' ')
  376.             sys.stdout.flush()
  377.  
  378.             response = reddit(query+after, headers=auth, catch_also=catch_also)
  379.  
  380.             for child in [c['data'] for c in response['children']]:
  381.  
  382.                 if child['created_utc'] <= lastseen or \
  383.                    current_epoch() - child['created_utc'] > DAY * 7:
  384.                     raise Done
  385.                 if child['author'] in IGNORE:
  386.                     continue
  387.  
  388.                 fields = []
  389.                 for field in model:
  390.                     fieldtype = field[0]
  391.                     fieldname = field[1]
  392.                     if fieldtype is bool:
  393.                         fields += [child[fieldname] and 1 or 0]
  394.                     else:
  395.                         fields += [fieldtype(child[fieldname])]
  396.  
  397.                 log += (tuple(fields),)
  398.  
  399.             if not response['after']:
  400.                 raise Done
  401.             else:
  402.                 after = '&after='+response['after']
  403.             pagecount += 1
  404.     except Done:
  405.         print(sum([len(field) for post in log for field in post if type(field) is str]))
  406.         sys.stdout.flush()
  407.         return tuple(sorted(list(log), key=lambda post: post[3], reverse=True))
  408.  
  409.  
  410. def fetch_posts_since(c_lastcheck=None, s_lastcheck=None, quiet=False):
  411.     baseurl = 'https://oauth.reddit.com/user/removalbot/m/monitor'
  412.     if c_lastcheck is not None:
  413.         if not quiet:
  414.             start = current_epoch()
  415.             print("Reading comments -", end=' ')
  416.             sys.stdout.flush()
  417.         newcomments = fetch(baseurl+'/comments/.json?sort=new&limit=100',
  418.                             c_lastcheck,
  419.                             ((str, 'name'),
  420.                              (str, 'subreddit'),
  421.                              (str, 'author'),
  422.                              (float, 'created_utc'),
  423.                              (str, 'link_id'),
  424.                              (str, 'body')),
  425.                             kind='comments',
  426.                             catch_also=[403])
  427.         if not quiet:
  428.             print("- "+str(len(newcomments))+" comments new")
  429.             print("Read in " + str(current_epoch() - start))
  430.  
  431.     if s_lastcheck is not None:
  432.         if not quiet:
  433.             start = current_epoch()
  434.             print("Reading submissions -", end=' ')
  435.             sys.stdout.flush()
  436.         newsubmissions = fetch(baseurl+'/new/.json?limit=100',
  437.                                s_lastcheck,
  438.                                ((str, 'name'),
  439.                                 (str, 'subreddit'),
  440.                                 (str, 'author'),
  441.                                 (float, 'created_utc'),
  442.                                 (str, 'link_flair_text'),
  443.                                 (str, 'title'),
  444.                                 (str, 'domain'),
  445.                                 (str, 'selftext')),
  446.                                kind='submissions',
  447.                                catch_also=[403])
  448.         if not quiet:
  449.             print("- "+str(len(newsubmissions))+" submissions new")
  450.             print("Read in " + str(current_epoch() - start))
  451.  
  452.     if c_lastcheck is not None and s_lastcheck is not None:
  453.         return newcomments, newsubmissions
  454.     elif c_lastcheck is None:
  455.         return newsubmissions
  456.     elif s_lastcheck is None:
  457.         return newcomments
  458.     else:
  459.         raise FatalError("Malformed post retrieval request")
  460.  
  461.  
  462. def get_new():
  463.     print("> Checking for new posts,", end=' ')
  464.     nextdelcheck = str(DELETION_SPACING - int(current_epoch() -
  465.                        last_deletion_check * DELETION_SPACING))
  466.     print("next comparison in "+nextdelcheck+"s                               ")
  467.  
  468.     global c_lastcheck
  469.     global s_lastcheck
  470.     global known_comments
  471.     global known_submissions
  472.  
  473.     c_lastcheck = getv('SELECT MAX(created_utc) FROM comments')
  474.     s_lastcheck = getv('SELECT MAX(created_utc) FROM submissions')
  475.  
  476.     newcomments, newsubmissions = fetch_posts_since(c_lastcheck=c_lastcheck,
  477.                                                     s_lastcheck=s_lastcheck)
  478.  
  479.     for comment in newcomments:
  480.         c.execute('INSERT OR IGNORE INTO comments VALUES (?,?,?,?,?,?)',
  481.                   comment)
  482.     for submission in newsubmissions:
  483.         c.execute('INSERT OR IGNORE INTO submissions VALUES (?,?,?,?,?,?,?,?)',
  484.                   submission)
  485.     db.commit()
  486.  
  487. def recurse_into_get_authors(subthread):
  488.     subthread = subthread['data']['children']
  489.     notify = []
  490.     for post in subthread:
  491.         post = post['data']
  492.         if post['author'] != '[deleted]':
  493.             notify += [post['author']]
  494.         if 'replies' in list(post.keys()) and post['replies']:
  495.             notify += recurse_into_get_authors(post['replies'])
  496.     return notify
  497.  
  498.  
  499. def update_notify():
  500.     global notify
  501.     print("> Updating users to notify", end=' ')
  502.     sys.stdout.flush()
  503.     notify = []
  504.     for thread in NOTIFY_THREADS:
  505.         thread = 'https://oauth.reddit.com/r/removalbot/comments/'+thread
  506.         pm_thread = reddit(thread+'.json',
  507.                            headers=auth, raw=True)
  508.         notify += recurse_into_get_authors(pm_thread[1])
  509.     notify += [maintainer]
  510.     notify = list(set(notify))
  511.     c.execute('DELETE FROM notify')
  512.     for user in notify:
  513.         c.execute('INSERT INTO notify VALUES (?)', (user,))
  514.     db.commit()
  515.     print("- done (" + str(len(notify)) + " users: " + ", ".join(notify) + ")")
  516.  
  517.  
  518. def update_flairs():
  519.     print("> Fetching flairs", end=' ')
  520.     sys.stdout.flush()
  521.     latestposturl = 'https://oauth.reddit.com/r/removalbot/new/.json?limit=1'
  522.     latestpostname = reddit(latestposturl, headers=auth)
  523.     latestpostname = latestpostname['children'][0]['data']['name']
  524.     flairurl = 'https://oauth.reddit.com/r/removalbot/api/flairselector'
  525.     post_data = {'link': latestpostname}
  526.     flairchoices = reddit(flairurl,
  527.                           post_data=post_data, headers=auth,
  528.                           raw=True)['choices']
  529.     c.execute('DELETE FROM flairs')
  530.     for flair in flairchoices:
  531.         c.execute('INSERT INTO flairs VALUES (?,?)',
  532.                   (flair['flair_text'], flair['flair_template_id']))
  533.     db.commit()
  534.     print("- done")
  535.  
  536.  
  537. def get_fullname_new(response):
  538.     response = response['jquery']
  539.     is_redirect = False
  540.     fullname_new = None
  541.     for line in response:
  542.         if is_redirect:
  543.             fullname_new = 't3_'+line[3][0].split('/')[-3]
  544.             break
  545.         if line[2] == 'attr' and line[3] == 'redirect':
  546.             is_redirect = True
  547.     if fullname_new:
  548.         return fullname_new
  549.     else:
  550.         print(response)
  551.         raise FatalError("Malformed response from reddit")
  552.  
  553.  
  554. def setflair(fullname, text):
  555.     fid = getv('SELECT flair_template_id FROM flairs WHERE name=?', (text,))
  556.     if not fid:
  557.         return False
  558.     post_data = {'link': fullname,
  559.                  'api_type': 'json',
  560.                  'flair_template_id': fid}
  561.     if not DUMMY:
  562.         reddit('https://oauth.reddit.com/r/removalbot/api/selectflair',
  563.                post_data=post_data, headers=auth, raw=True)
  564.     return True
  565.  
  566.  
  567. def send_pm(to, subject, text):
  568.     post_data = {'api_type': 'json',
  569.                  'subject': subject,
  570.                  'text': text,
  571.                  'to': to}
  572.     print("Sending PM '"+subject+"' to /u/"+to, end=' ')
  573.     sys.stdout.flush()
  574.     if not DUMMY:
  575.         reddit('https://oauth.reddit.com/api/compose',
  576.                post_data=post_data, headers=auth, raw=True)
  577.     print("- sent")
  578.  
  579.  
  580. def timestring(span):
  581.     spanstring = ''
  582.     times = ((60.0, 'min'), (60.0, 'h'), (24.0, 'd'))
  583.     for t in times:
  584.         span /= t[0]
  585.         if span > 5:
  586.             spanstring = str(round(span, 1)) + t[1]
  587.     return spanstring
  588.  
  589.  
  590. def compare_update(kind, newposts):
  591.     new_oldestposts = {}
  592.     for post in newposts:
  593.         if post[1] not in list(new_oldestposts.keys()) or \
  594.            post[3] < new_oldestposts[post[1]]:
  595.             new_oldestposts[post[1]] = post[3]
  596.  
  597.     fell = 0
  598.     old_newestposts = {}
  599.     for sub in list(new_oldestposts.keys()):
  600.         old_newestposts[sub] = getv('SELECT MAX(created_utc) \
  601.                                     FROM ' + kind + ' \
  602.                                     WHERE subreddit=?', (sub,))
  603.         fell += getv('SELECT COUNT(name) FROM ' + kind + ' WHERE \
  604.                      subreddit=? AND created_utc<=?',
  605.                      (sub, new_oldestposts[sub]))
  606.         c.execute('DELETE FROM ' + kind + ' WHERE \
  607.                   subreddit=? AND created_utc<=?',
  608.                   (sub, new_oldestposts[sub]))
  609.  
  610.     db.commit()
  611.  
  612.     new_ids = tuple(str(post[0]) for post in newposts)
  613.  
  614.     deleted = tuple(c.execute('SELECT name FROM ' + kind + ' \
  615.                               WHERE name NOT IN ' + str(new_ids)))
  616.  
  617.     if deleted:
  618.         print("Deleted "+kind+": " + ', '.join([post[0].split('_')[1]
  619.                                                for post in deleted]))
  620.  
  621.     c.execute('INSERT OR IGNORE INTO ' + kind + '_deleted \
  622.               SELECT *, STRFTIME("%s", "now") FROM ' + kind + ' \
  623.               WHERE name NOT IN ' + str(new_ids))
  624.  
  625.     c.execute('DELETE FROM '+kind)
  626.  
  627.     db.commit()
  628.  
  629.     for post in newposts:
  630.         c.execute('INSERT OR IGNORE INTO ' + kind + ' VALUES ' +
  631.                   ('(' + ','.join(['?'] * len(post)) + ')'),
  632.                   post)
  633.  
  634.     new = 0
  635.     for sub in list(old_newestposts.keys()):
  636.         new += getv('SELECT COUNT(name) FROM ' + kind + ' WHERE \
  637.                     subreddit=? AND created_utc>?',
  638.                     (sub, old_newestposts[sub]))
  639.  
  640.     total = getv('SELECT COUNT(name) FROM ' + kind)
  641.  
  642.     db.commit()
  643.  
  644.     print(str(len(deleted)) + ' ' + kind + ' deleted,', end=' ')
  645.     print(str(new) + ' new,', end=' ')
  646.     print(str(fell) + ' fell out -', end=' ')
  647.     print(str(total) + ' total')
  648.  
  649.  
  650. def check_deletions():
  651.  
  652.     print("> Checking for deletions at " + str(int(current_epoch())) + "                   ")
  653.  
  654.     (newcomments, newsubmissions) = fetch_posts_since(c_lastcheck=0,
  655.                                                       s_lastcheck=0)
  656.  
  657.     compare_update('comments', newcomments)
  658.     compare_update('submissions', newsubmissions)
  659.  
  660. def check_user_deletion(post, kind):
  661.     safe_domains = readfile(conf('safedomains')).strip().split()
  662.     u = readfile(conf('unknowndomains')).strip().split('\n')
  663.     unknown_domains = {}
  664.     for d in u:
  665.         if not d:
  666.             continue
  667.         d = d.strip().split()
  668.         unknown_domains[d[0]] = int(d[1])
  669.  
  670.     print("> Checking for user deletion of " + kind + " " + post[0] + \
  671.         (" from " + post[4] if kind == 'comment' else '') + \
  672.         " in " + post[1] + ",", end=' ')
  673.     left = getv('SELECT COUNT(*) FROM (SELECT name FROM comments_deleted \
  674.                     UNION SELECT name FROM submissions_deleted)')-1
  675.     print(str(left or 'no') + " more left to check -", end=' ')
  676.     sys.stdout.flush()
  677.  
  678.     spotted = post[-1]
  679.  
  680.     name = post[0].split('_')[1]
  681.     sub = post[1]
  682.     author = post[2]
  683.     posted = post[3]
  684.  
  685.     compare = tuple(comment[0] for comment in
  686.                     tuple(c.execute('SELECT created_utc FROM ' + kind + 's \
  687.                                    WHERE subreddit=? \
  688.                                    ORDER BY created_utc ASC', (sub,))))
  689.     mincompare = min(compare) if compare else current_epoch() - DAY*7
  690.     compare = compare[-int(len(compare)*0.95)] if compare \
  691.         else current_epoch() - DAY*7
  692.  
  693.     print(str(round((current_epoch() - posted) / HOUR, 2)) + "h vs cutoff " + \
  694.         str(round((current_epoch() - compare) / HOUR, 2)) + "h, oldest " + \
  695.         str(round((current_epoch() - mincompare) / HOUR, 2)) + "h -", end=' ')
  696.     sys.stdout.flush()
  697.     if posted <= compare:
  698.         print("too old")
  699.         return False
  700.  
  701.     title = ''
  702.     if kind == 'comment':
  703.         link_id = post[4].split('_')[1]
  704.         content = post[5]
  705.         baseurl = 'https://oauth.reddit.com/user/'+author
  706.         usercomments = {}
  707.         shadowbanned = False
  708.         try:
  709.             usercomments = fetch(baseurl+'/comments/.json?sort=new&limit=100',
  710.                                  posted - 30,
  711.                                  ((str, 'name'),
  712.                                   (str, 'subreddit'),
  713.                                   (str, 'author'),
  714.                                   (float, 'created_utc'),
  715.                                   (str, 'link_id'),
  716.                                   (str, 'body')),
  717.                                  kind='noupdate')
  718.         except urllib.error.HTTPError as e:
  719.             if e.code in [403, 404]:
  720.                 print("- " + author + " shadowbanned or deleted", end=' ')
  721.                 url = 'https://oauth.reddit.com/r/' + sub + '/comments/' + \
  722.                       link_id + '/comment/' + name + '/.json'
  723.                 try:
  724.                     reddit(url, headers=auth, raw=True)
  725.                     print("- deleted")
  726.                     return False
  727.                 except urllib.error.HTTPError as e:
  728.                     if e.code in [403, 404]:
  729.                         print("- shadowbanned", end=' ')
  730.                         shadowbanned = True
  731.                     else:
  732.                         raise
  733.             else:
  734.                 raise
  735.         usercomments = {comment[0]: comment
  736.                         for comment in usercomments}
  737.         if post[0] not in list(usercomments.keys()) and not shadowbanned:
  738.             print("- deleted by "+author+" ("+((content[:47]+"...")
  739.                                                if len(content) > 50
  740.                                                else content).replace('\n',
  741.                                                                      ' / ')+")")
  742.             return False
  743.         else:
  744.             if not shadowbanned:
  745.                 post = usercomments[post[0]]
  746.                 name = post[0].split('_')[1]
  747.                 sub = post[1]
  748.                 author = post[2]
  749.                 posted = post[3]
  750.                 link_id = post[4].split('_')[1]
  751.                 content = post[5]
  752.             print("- deleted by mods")
  753.             title1 = str(epoch_to_string(short=True) + " - '")
  754.             title2 = str("' by /u/" + author +
  755.                              " removed from /r/" + sub)
  756.             lower_frame_boundary = spotted - DELETION_SPACING
  757.             upper_frame_boundary = spotted
  758.             frame_lower = round((float(lower_frame_boundary) - posted) / 60)
  759.             if frame_lower < 0:
  760.                 frame_lower = 0
  761.             frame_upper = round((float(upper_frame_boundary) - posted) / 60)
  762.             if frame_lower != frame_upper:
  763.                 frame = str(int(frame_lower)) + "-" + str(int(frame_upper))
  764.             else:
  765.                 frame = str(int(frame_lower))
  766.             title2 += " within " + frame + "min"
  767.             if shadowbanned:
  768.                 title2 += " (user shadowbanned)"
  769.             restlen = ALLOWED_TITLE_LENGTH - (len(title1) + len(title2))
  770.             intro = re.sub(r'&gt;.*\n', '[quote]', content)
  771.  
  772.             intro = re.sub(r'\[([^\]]*)\]\([^\)]*\)', r'[\1]', intro)
  773.             intro = re.sub(URLREGEX, '[link]', intro)
  774.  
  775.             intro = intro.replace('/r/', 'r/')
  776.             intro = intro.replace('/u/', 'u/')
  777.             intro = re.sub(r' +', ' ', intro)
  778.             intro = re.sub(r'[ \n/][ \n/]+', ' / ', intro)
  779.             intro = intro.strip(' \n/')
  780.  
  781.             links = []
  782.             for url in re.finditer(r'\[([^\]]*)\][ \n]?\(([^\)]*)\)', content):
  783.                 links += [url.group(2)]
  784.                 content = content.replace(url.group(0),
  785.                                           '[' + url.group(1) + ']^^' +
  786.                                           str(len(links)) + ' ')
  787.             for url in re.finditer(URLREGEX, content):
  788.                 links += [url.group(0)]
  789.                 content = content.replace(url.group(0),
  790.                                           '[link]^^' + str(len(links)) + ' ')
  791.  
  792.             if len(intro) > restlen:
  793.                 intro = str(intro[:restlen-3].strip(' ./,') + "...")
  794.             else:
  795.                 intro = str(intro)
  796.             title = title1 + intro + title2
  797.             title = title[:ALLOWED_TITLE_LENGTH]
  798.             body = "'''\n\n"+content+"\n\n'''\n\n"
  799.             if post[4] == 'None':
  800.                 body = "No link could be determined."
  801.                 link = "Unknown"
  802.             else:
  803.                 linkbase = "/r/" + sub + "/comments/" + link_id + \
  804.                            "/comment/" + name + "?context=999"
  805.                 link = "https://reddit.com" + linkbase
  806.                 goldfishlink = "http://r.go1dfish.me" + linkbase
  807.                 unredditlink = "https://unreddit.com" + linkbase
  808.                 body += "[Context Link](" + link + ")\n\n"
  809.                 body += "[Go1dfish undelete link](" + goldfishlink + ")\n\n"
  810.                 body += "[unreddit undelete link](" + unredditlink + ")"
  811.             body += "\n\nAuthor: /u/" + author
  812.             if links:
  813.                 body += "\n\n"
  814.                 unknowns = False
  815.                 for l in range(len(links)):
  816.                     try:
  817.                         domain = tld.get_tld(links[l])
  818.                     except tld.exceptions.TldBadUrl:
  819.                         domain = 'reddit.com'
  820.                     except (tld.exceptions.TldDomainNotFound, ValueError):
  821.                         domain = 'malformed.domain'
  822.                         print("Malformed domain: " + links[l])
  823.                     if domain in safe_domains:
  824.                         body += str(l+1) + ': ' + links[l] + '  \n'
  825.                     else:
  826.                         unknowns = True
  827.                         if domain not in DOMAIN_BLACKLIST:
  828.                             if domain in list(unknown_domains.keys()):
  829.                                 unknown_domains[domain] += 1
  830.                             else:
  831.                                 unknown_domains[domain] = 1
  832.                             with closing(open(conf('unknowndomains'),
  833.                                          'w')) as f:
  834.                                 for d in unknown_domains:
  835.                                     f.write(d+' '+str(unknown_domains[d])+'\n')
  836.                         oblink = re.sub(r'.*://', '', links[l])
  837.                         if domain != "maldormed.domain":
  838.                             oblink = censor(oblink, 0.25)
  839.                         body += str(l+1) + ': `' + oblink + '`  \n'
  840.                 if unknowns:
  841.                     body += "\nUnknown links are censored to prevent \
  842.                        spreading illicit content."
  843.             print(title, end=' ')
  844.  
  845.     elif kind == 'submission':
  846.         reason = post[4]
  847.         subject = post[5]
  848.         domain = post[6]
  849.         selftext = post[7]
  850.         baseurl = 'https://oauth.reddit.com/user/'+author
  851.         shadowbanned = False
  852.         usersubmissions = {}
  853.         try:
  854.             usersubmissions = fetch(baseurl +
  855.                                     '/submitted/.json?sort=new&limit=100',
  856.                                     posted - 30,
  857.                                     ((str, 'name'),
  858.                                      (str, 'subreddit'),
  859.                                      (str, 'author'),
  860.                                      (float, 'created_utc'),
  861.                                      (str, 'link_flair_text'),
  862.                                      (str, 'title'),
  863.                                      (str, 'domain'),
  864.                                      (str, 'selftext')),
  865.                                     kind='noupdate')
  866.         except urllib.error.HTTPError as e:
  867.             if e.code in [403, 404]:
  868.                 print("- " + author + " shadowbanned or deleted", end=' ')
  869.                 url = 'https://oauth.reddit.com/r/' + sub + '/comments/' + \
  870.                       name + '/.json'
  871.                 try:
  872.                     reddit(url, headers=auth, raw=True)
  873.                     print("- deleted")
  874.                     return False
  875.                 except urllib.error.HTTPError as e:
  876.                     if e.code in [403, 404]:
  877.                         print("- shadowbanned", end=' ')
  878.                         shadowbanned = True
  879.                     else:
  880.                         raise
  881.             else:
  882.                 raise
  883.         usersubmissions = {submission[0]: submission
  884.                            for submission in usersubmissions}
  885.         if post[0] not in list(usersubmissions.keys()) and not shadowbanned:
  886.             print("- deleted by "+author+" ("+((subject[:47]+"...")
  887.                                                if len(subject) > 50
  888.                                                else subject)+")")
  889.             return False
  890.         else:
  891.             reason = 'None'
  892.             if not shadowbanned and not post[7]:
  893.                 post = usersubmissions[post[0]]
  894.                 name = post[0].split('_')[1]
  895.                 sub = post[1]
  896.                 author = post[2]
  897.                 posted = post[3]
  898.                 reason = post[4]
  899.                 subject = post[5]
  900.                 domain = post[6]
  901.                 selftext = post[7]
  902.             reason = reason.lower()
  903.             if 'removed' in reason:
  904.                 reason = reason.replace('removed', '').strip(' -|—')
  905.                 reason = reason.capitalize() or 'None'
  906.                 reason = " - reason: "+reason
  907.             else:
  908.                 reason = ''
  909.             print("- deleted by mods" + reason)
  910.             lower_frame_boundary = spotted - DELETION_SPACING
  911.             upper_frame_boundary = spotted
  912.             frame_lower = round((float(lower_frame_boundary) - posted) / 60)
  913.             if frame_lower < 0:
  914.                 frame_lower = 0
  915.             frame_upper = round((float(upper_frame_boundary) - posted) / 60)
  916.             if frame_lower != frame_upper:
  917.                 frame = str(int(frame_lower)) + "-" + str(int(frame_upper))
  918.             else:
  919.                 frame = str(int(frame_lower))
  920.             title1 = str(epoch_to_string(short=True) + " - '")
  921.             title2 = str("' (" + domain + ") by /u/" + author +
  922.                              " removed from /r/" + sub +
  923.                              " within " + frame + "min" + reason)
  924.             if shadowbanned:
  925.                 title2 += " (user shadowbanned)"
  926.             restlen = ALLOWED_TITLE_LENGTH - (len(title1) + len(title2))
  927.             if len(subject) > restlen:
  928.                 intro = str(subject[:restlen-3].strip(' ./') + "...")
  929.             else:
  930.                 intro = str(subject)
  931.             title = title1 + intro + title2
  932.             title = title[:ALLOWED_TITLE_LENGTH]
  933.             linkbase = "/r/" + sub + "/comments/" + name
  934.             link = "https://reddit.com/" + linkbase
  935.             goldfishlink = "http://r.go1dfish.me" + linkbase
  936.             unredditlink = "https://unreddit.com" + linkbase
  937.             body = ""
  938.             links = []
  939.             if selftext:
  940.                 links = []
  941.                 for url in re.finditer(r'\[([^\]]*)\][ \n]?\(([^\)]*)\)',
  942.                                        selftext):
  943.                     links += [url.group(2)]
  944.                     selftext = selftext.replace(url.group(0),
  945.                                                 '[' + url.group(1) + ']^^' +
  946.                                                 str(len(links)) + ' ')
  947.                 for url in re.finditer(URLREGEX, selftext):
  948.                     links += [url.group(0)]
  949.                     selftext = selftext.replace(url.group(0),
  950.                                                 '[link]^^' +
  951.                                                 str(len(links)) + ' ')
  952.                 body = "'''\n\n" + selftext + "\n\n'''\n\n"
  953.             body += "[" + subject + "](" + link + ")\n\n"
  954.             body += "[Go1dfish undelete link](" + goldfishlink + ")\n\n"
  955.             body += "[unreddit undelete link](" + unredditlink + ")\n\n"
  956.             body += "Author: /u/" + author
  957.             if links:
  958.                 body += "\n\n"
  959.                 unknowns = False
  960.                 for l in range(len(links)):
  961.                     try:
  962.                         domain = tld.get_tld(links[l])
  963.                     except tld.exceptions.TldBadUrl:
  964.                         domain = 'reddit.com'
  965.                     except (tld.exceptions.TldDomainNotFound, ValueError):
  966.                         domain = 'malformed.domain'
  967.                         print("Malformed domain: " + links[l])
  968.                     if domain in safe_domains:
  969.                         body += str(l+1) + ': ' + links[l] + '  \n'
  970.                     else:
  971.                         unknowns = True
  972.                         if domain not in DOMAIN_BLACKLIST:
  973.                             if domain in list(unknown_domains.keys()):
  974.                                 unknown_domains[domain] += 1
  975.                             else:
  976.                                 unknown_domains[domain] = 1
  977.                             with closing(open(conf('unknowndomains'),
  978.                                          'w')) as f:
  979.                                 for d in unknown_domains:
  980.                                     f.write(d+' '+str(unknown_domains[d])+'\n')
  981.                         oblink = re.sub(r'.*://', '', links[l])
  982.                         if domain != "maldormed.domain":
  983.                             oblink = censor(oblink, 0.25)
  984.                         body += str(l+1) + ': `' + oblink + '`  \n'
  985.                 if unknowns:
  986.                     body += "\nUnknown links are censored to prevent \
  987.                        spreading illicit content."
  988.             print(title, end=' ')
  989.  
  990.     h = HTMLParser()
  991.     title = h.unescape(title)
  992.     body = h.unescape(body)
  993.     if len(body) > 40000:
  994.         body = body[:39900] + '[... post size limit of 40,000 characters reached]'
  995.     post_data = {'sr': 'removalbot', 'title': title,
  996.                  'kind': 'self', 'text': body}
  997.     if not DUMMY:
  998.         response = reddit('https://oauth.reddit.com/api/submit',
  999.                           post_data=post_data, headers=auth, raw=True)
  1000.     print("- submitted", end=' ')
  1001.     sys.stdout.flush()
  1002.     if not DUMMY:
  1003.         fullname_new = get_fullname_new(response)
  1004.         if setflair(fullname_new, kind+'-'+sub.lower()):
  1005.             print("- flaired")
  1006.         else:
  1007.             print("- no flair")
  1008.  
  1009.     if not shadowbanned and getv('SELECT author FROM notify WHERE author=?',
  1010.                                  (author,)):
  1011.         pm_subject = "Your "+kind+" was deleted from /r/"+sub
  1012.         if kind == 'comment':
  1013.             content = content.strip(' \n')
  1014.             if '\n\n' in content:
  1015.                 content = '\n\n'+content+'\n\n'
  1016.         else:
  1017.             content = '  \n&nbsp;**Reason**: '+str(reason)
  1018.         content = h.unescape(content)
  1019.         pm_body = 'Hello, **'+author+'**!\n\n&nbsp;\n\n'
  1020.         pm_body += 'Your '+kind+' appears to have been deleted '
  1021.         pm_body += 'from **/r/' + sub + '** by the moderators, '
  1022.         pm_body += '/u/AutoModerator or the administrators.\n\n'
  1023.         pm_body += '&nbsp;**'+kind.capitalize()+'**: '+content+'  \n'
  1024.         pm_body += '&nbsp;**Posted at**: '+epoch_to_string(posted)+'  \n'
  1025.         pm_body += '&nbsp;**Delay until deletion**: '+frame+'min  \n'
  1026.         pm_body += '&nbsp;**Link**: '+str(link)+'\n\n&nbsp;\n\n'
  1027.         pm_body += 'Have a nice day!  \n'
  1028.         pm_body += '/u/removalbot\n\n'
  1029.         pm_body += '----\n\n'
  1030.         pm_body += '^(Note that the deletion may have been accidental '
  1031.         pm_body += 'or its detection a false positive caused by heavy load '
  1032.         pm_body += 'on reddit\'s servers.)  \n'
  1033.         pm_body += '^^^This ^^^is ^^^an ^^^automated ^^^message ^^^from '
  1034.         pm_body += '^^^/r/removalbot.'
  1035.         send_pm(author, pm_subject, pm_body)
  1036.  
  1037.     return True
  1038.  
  1039. requests_used = None
  1040. requests_remaining = None
  1041. requests_reset = None
  1042.  
  1043. maintainer = readfile(conf('maintainer')).strip()
  1044. version = readfile(conf('version')).strip()
  1045. USERAGENT = 'removalbot by /u/'+maintainer+', v'+version
  1046.  
  1047. os.chdir(CONFDIR)
  1048.  
  1049. db = sqlite3.connect(conf('posts.db'))
  1050. c = db.cursor()
  1051.  
  1052. auth = login()
  1053.  
  1054. update_flairs()
  1055.  
  1056. if os.path.isfile(conf('error')) and \
  1057.    readfile(conf('error')) != 'KeyboardInterrupt':
  1058.     failed = os.path.getctime(conf('error'))
  1059.     failed = epoch_to_string(epoch=failed)
  1060.     reason = readfile(conf('error'))
  1061.     send_pm(maintainer, "REMOVALBOT CRASHED", "Reason: "+reason)
  1062.     e = reason.split(':')[0]
  1063.     print(e, e in ALSO_FATAL)
  1064.     if not DUMMY and (SUBMIT_ERRORS or e == "FatalError" or e in ALSO_FATAL):
  1065.         print("> Submitting error "+reason, end=' ')
  1066.         sys.stdout.flush()
  1067.         title = "[!] Bot encountered an error at " + failed + \
  1068.             ", reason: " + reason
  1069.         post_data = {'sr': 'removalbot', 'kind': 'self', 'title': title}
  1070.         if reason != "FatalError":
  1071.             post_data['text'] = "Such errors usually indicate that reddit is \
  1072.                                 overloaded or in maintenance mode, i. e. \
  1073.                                 they are unavoidable.\n\nA database is used \
  1074.                                 to minimize the impact but cannot negate it."
  1075.         else:
  1076.             post_data['text'] = "The bot encountered a fatal error. This \
  1077.                                 should not happen. The maintainer has been \
  1078.                                 notified; until further action, the bot is \
  1079.                                 suspended."
  1080.         response = reddit('https://oauth.reddit.com/api/submit',
  1081.                           post_data=post_data, headers=auth, raw=True)
  1082.         setflair(get_fullname_new(response), 'error')
  1083.         print("- done")
  1084.     else:
  1085.         print("> Encountered error "+reason)
  1086.     os.remove(conf('error'))
  1087.  
  1088. update_notify()
  1089.  
  1090. while True:
  1091.     now = int(current_epoch())
  1092.     last_deletion_check = getlast('deletion')
  1093.     last_new_check = getlast('new')
  1094.     last_subscriber_check = getlast('subscribers')
  1095.     last_flair_check = getlast('flair')
  1096.     changed = False
  1097.     if int(now / DELETION_SPACING) != last_deletion_check:
  1098.         check_deletions()
  1099.         setlast('deletion', int(now / DELETION_SPACING))
  1100.     elif int(now / NEW_SPACING) != last_new_check:
  1101.         get_new()
  1102.         setlast('new', int(now / NEW_SPACING))
  1103.     elif int(now / SUBSCRIBER_SPACING) != last_subscriber_check:
  1104.         update_notify()
  1105.         setlast('subscribers', int(now / SUBSCRIBER_SPACING))
  1106.     elif int(now / FLAIR_SPACING) != last_flair_check:
  1107.         update_flairs()
  1108.         setlast('flair', int(now / FLAIR_SPACING))
  1109.     else:
  1110.         deleted_submission = \
  1111.             c.execute('SELECT * FROM submissions_deleted LIMIT 1').fetchone()
  1112.         deleted_comment = \
  1113.             c.execute('SELECT * FROM comments_deleted LIMIT 1').fetchone()
  1114.         left = getv('SELECT COUNT(*) FROM (SELECT name FROM comments_deleted \
  1115.                 UNION SELECT name FROM submissions_deleted)')-1
  1116.         if deleted_submission:
  1117.             check_user_deletion(deleted_submission, 'submission')
  1118.             c.execute('DELETE FROM submissions_deleted WHERE name=?',
  1119.                       (deleted_submission[0],))
  1120.         elif deleted_comment:
  1121.             check_user_deletion(deleted_comment, 'comment')
  1122.             c.execute('DELETE FROM comments_deleted WHERE name=?',
  1123.                       (deleted_comment[0],))
  1124.         c.execute('PRAGMA shrink_memory')
  1125.         if not deleted_submission and not deleted_comment:
  1126.             time.sleep(0.1)
  1127.             continue
  1128.         elif left == 0:
  1129.             c_unknown = readfile(conf('unknowndomains')).strip().split('\n')
  1130.             print("> Done undeleting, " + str(len(c_unknown)) + \
  1131.                 " unknown domains logged")
  1132.  
  1133.     print()
  1134.     if requests_remaining and requests_reset and requests_used:
  1135.         requests_remaining = str(int(float(requests_remaining)))
  1136.         print("Used: " + requests_used + ", remaining: " + \
  1137.               requests_remaining + " in " + requests_reset + "s", end=' ')
  1138.         remaining = float(requests_remaining)
  1139.         reset = int(requests_reset)
  1140.         print("(" + str(round(remaining / reset, 2)) + "/s)", end=' ')
  1141.         if remaining < reset:
  1142.             sleeptime = str(int(round(float(reset - remaining) /
  1143.                                 remaining, 3) * 1000))
  1144.             print("- OVERLOAD, sleep " + sleeptime + "ms")
  1145.         else:
  1146.             print("- OK\r", end=' ')
  1147.         sys.stdout.flush()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement