Advertisement
havalqandiel

oppnlld

Dec 16th, 2016
152
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 21.40 KB | None | 0 0
  1. # -*- coding: utf-8 -*-
  2. """
  3. openload.io urlresolver plugin
  4. Copyright (C) 2015 tknorris
  5.  
  6. This program is free software: you can redistribute it and/or modify
  7. it under the terms of the GNU General Public License as published by
  8. the Free Software Foundation, either version 3 of the License, or
  9. (at your option) any later version.
  10.  
  11. This program is distributed in the hope that it will be useful,
  12. but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. GNU General Public License for more details.
  15.  
  16. You should have received a copy of the GNU General Public License
  17. along with this program. If not, see <http://www.gnu.org/licenses/>.
  18. """
  19.  
  20. import cookielib
  21. import gzip
  22. import re
  23. import StringIO
  24. import urllib
  25. import urllib2
  26. import socket
  27.  
  28. # Set Global timeout - Useful for slow connections and Putlocker.
  29. socket.setdefaulttimeout(10)
  30.  
  31. class Net:
  32. '''
  33. This class wraps :mod:`urllib2` and provides an easy way to make http
  34. requests while taking care of cookies, proxies, gzip compression and
  35. character encoding.
  36.  
  37. Example::
  38.  
  39. from addon.common.net import Net
  40. net = Net()
  41. response = net.http_GET('http://xbmc.org')
  42. print response.content
  43. '''
  44.  
  45. _cj = cookielib.LWPCookieJar()
  46. _proxy = None
  47. _user_agent = 'Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0'
  48. _http_debug = False
  49.  
  50. def __init__(self, cookie_file='', proxy='', user_agent='', http_debug=False):
  51. '''
  52. Kwargs:
  53. cookie_file (str): Full path to a file to be used to load and save
  54. cookies to.
  55.  
  56. proxy (str): Proxy setting (eg.
  57. ``'http://user:pass@example.com:1234'``)
  58.  
  59. user_agent (str): String to use as the User Agent header. If not
  60. supplied the class will use a default user agent (chrome)
  61.  
  62. http_debug (bool): Set ``True`` to have HTTP header info written to
  63. the XBMC log for all requests.
  64. '''
  65. if cookie_file:
  66. self.set_cookies(cookie_file)
  67. if proxy:
  68. self.set_proxy(proxy)
  69. if user_agent:
  70. self.set_user_agent(user_agent)
  71. self._http_debug = http_debug
  72. self._update_opener()
  73.  
  74. def set_cookies(self, cookie_file):
  75. '''
  76. Set the cookie file and try to load cookies from it if it exists.
  77.  
  78. Args:
  79. cookie_file (str): Full path to a file to be used to load and save
  80. cookies to.
  81. '''
  82. try:
  83. self._cj.load(cookie_file, ignore_discard=True)
  84. self._update_opener()
  85. return True
  86. except:
  87. return False
  88.  
  89. def get_cookies(self):
  90. '''Returns A dictionary containing all cookie information by domain.'''
  91. return self._cj._cookies
  92.  
  93. def save_cookies(self, cookie_file):
  94. '''
  95. Saves cookies to a file.
  96.  
  97. Args:
  98. cookie_file (str): Full path to a file to save cookies to.
  99. '''
  100. self._cj.save(cookie_file, ignore_discard=True)
  101.  
  102. def set_proxy(self, proxy):
  103. '''
  104. Args:
  105. proxy (str): Proxy setting (eg.
  106. ``'http://user:pass@example.com:1234'``)
  107. '''
  108. self._proxy = proxy
  109. self._update_opener()
  110.  
  111. def get_proxy(self):
  112. '''Returns string containing proxy details.'''
  113. return self._proxy
  114.  
  115. def set_user_agent(self, user_agent):
  116. '''
  117. Args:
  118. user_agent (str): String to use as the User Agent header.
  119. '''
  120. self._user_agent = user_agent
  121.  
  122. def get_user_agent(self):
  123. '''Returns user agent string.'''
  124. return self._user_agent
  125.  
  126. def _update_opener(self):
  127. '''
  128. Builds and installs a new opener to be used by all future calls to
  129. :func:`urllib2.urlopen`.
  130. '''
  131. if self._http_debug:
  132. http = urllib2.HTTPHandler(debuglevel=1)
  133. else:
  134. http = urllib2.HTTPHandler()
  135.  
  136. if self._proxy:
  137. opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
  138. urllib2.ProxyHandler({'http':
  139. self._proxy}),
  140. urllib2.HTTPBasicAuthHandler(),
  141. http)
  142.  
  143. else:
  144. opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
  145. urllib2.HTTPBasicAuthHandler(),
  146. http)
  147. urllib2.install_opener(opener)
  148.  
  149. def http_GET(self, url, headers={}, compression=True):
  150. '''
  151. Perform an HTTP GET request.
  152.  
  153. Args:
  154. url (str): The URL to GET.
  155.  
  156. Kwargs:
  157. headers (dict): A dictionary describing any headers you would like
  158. to add to the request. (eg. ``{'X-Test': 'testing'}``)
  159.  
  160. compression (bool): If ``True`` (default), try to use gzip
  161. compression.
  162.  
  163. Returns:
  164. An :class:`HttpResponse` object containing headers and other
  165. meta-information about the page and the page content.
  166. '''
  167. return self._fetch(url, headers=headers, compression=compression)
  168.  
  169. def http_POST(self, url, form_data, headers={}, compression=True):
  170. '''
  171. Perform an HTTP POST request.
  172.  
  173. Args:
  174. url (str): The URL to POST.
  175.  
  176. form_data (dict): A dictionary of form data to POST.
  177.  
  178. Kwargs:
  179. headers (dict): A dictionary describing any headers you would like
  180. to add to the request. (eg. ``{'X-Test': 'testing'}``)
  181.  
  182. compression (bool): If ``True`` (default), try to use gzip
  183. compression.
  184.  
  185. Returns:
  186. An :class:`HttpResponse` object containing headers and other
  187. meta-information about the page and the page content.
  188. '''
  189. return self._fetch(url, form_data, headers=headers, compression=compression)
  190.  
  191. def http_HEAD(self, url, headers={}):
  192. '''
  193. Perform an HTTP HEAD request.
  194.  
  195. Args:
  196. url (str): The URL to GET.
  197.  
  198. Kwargs:
  199. headers (dict): A dictionary describing any headers you would like
  200. to add to the request. (eg. ``{'X-Test': 'testing'}``)
  201.  
  202. Returns:
  203. An :class:`HttpResponse` object containing headers and other
  204. meta-information about the page.
  205. '''
  206. request = urllib2.Request(url)
  207. request.get_method = lambda: 'HEAD'
  208. request.add_header('User-Agent', self._user_agent)
  209. for key in headers:
  210. request.add_header(key, headers[key])
  211. response = urllib2.urlopen(request)
  212. return HttpResponse(response)
  213.  
  214. def _fetch(self, url, form_data={}, headers={}, compression=True):
  215. '''
  216. Perform an HTTP GET or POST request.
  217.  
  218. Args:
  219. url (str): The URL to GET or POST.
  220.  
  221. form_data (dict): A dictionary of form data to POST. If empty, the
  222. request will be a GET, if it contains form data it will be a POST.
  223.  
  224. Kwargs:
  225. headers (dict): A dictionary describing any headers you would like
  226. to add to the request. (eg. ``{'X-Test': 'testing'}``)
  227.  
  228. compression (bool): If ``True`` (default), try to use gzip
  229. compression.
  230.  
  231. Returns:
  232. An :class:`HttpResponse` object containing headers and other
  233. meta-information about the page and the page content.
  234. '''
  235. req = urllib2.Request(url)
  236. if form_data:
  237. if isinstance(form_data, basestring):
  238. form_data = form_data
  239. else:
  240. form_data = urllib.urlencode(form_data, True)
  241. req = urllib2.Request(url, form_data)
  242. req.add_header('User-Agent', self._user_agent)
  243. for key in headers:
  244. req.add_header(key, headers[key])
  245. if compression:
  246. req.add_header('Accept-Encoding', 'gzip')
  247. req.add_unredirected_header('Host', req.get_host())
  248. response = urllib2.urlopen(req)
  249. return HttpResponse(response)
  250.  
  251. class HttpResponse:
  252. '''
  253. This class represents a resoponse from an HTTP request.
  254.  
  255. The content is examined and every attempt is made to properly encode it to
  256. Unicode.
  257.  
  258. .. seealso::
  259. :meth:`Net.http_GET`, :meth:`Net.http_HEAD` and :meth:`Net.http_POST`
  260. '''
  261.  
  262. content = ''
  263. '''Unicode encoded string containing the body of the reposne.'''
  264.  
  265. def __init__(self, response):
  266. '''
  267. Args:
  268. response (:class:`mimetools.Message`): The object returned by a call
  269. to :func:`urllib2.urlopen`.
  270. '''
  271. self._response = response
  272.  
  273. @property
  274. def content(self):
  275. html = self._response.read()
  276. encoding = None
  277. try:
  278. if self._response.headers['content-encoding'].lower() == 'gzip':
  279. html = gzip.GzipFile(fileobj=StringIO.StringIO(html)).read()
  280. except:
  281. pass
  282.  
  283. try:
  284. content_type = self._response.headers['content-type']
  285. if 'charset=' in content_type:
  286. encoding = content_type.split('charset=')[-1]
  287. except:
  288. pass
  289.  
  290. r = re.search('<meta\s+http-equiv="Content-Type"\s+content="(?:.+?);\s+charset=(.+?)"', html, re.IGNORECASE)
  291. if r:
  292. encoding = r.group(1)
  293.  
  294. if encoding is not None:
  295. try: html = html.decode(encoding)
  296. except: pass
  297. return html
  298.  
  299. def get_headers(self, as_dict=False):
  300. '''Returns headers returned by the server.
  301. If as_dict is True, headers are returned as a dictionary otherwise a list'''
  302. if as_dict:
  303. return dict([(item[0].title(), item[1]) for item in self._response.info().items()])
  304. else:
  305. return self._response.info().headers
  306.  
  307. def get_url(self):
  308. '''
  309. Return the URL of the resource retrieved, commonly used to determine if
  310. a redirect was followed.
  311. '''
  312. return self._response.geturl()
  313.  
  314.  
  315. import re
  316. import urllib2
  317. #from urlresolver9 import common
  318. #from urlresolver9.resolver import UrlResolver, ResolverError
  319. from HTMLParser import HTMLParser
  320. import time
  321. import urllib
  322. import base64
  323. #from lib.png import Reader as PNGReader
  324.  
  325. FF_USER_AGENT = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:49.0) Gecko/20100101 Firefox/49.0'
  326.  
  327. class OpenLoadResolver():
  328. name = "openload"
  329. domains = ["openload.io", "openload.co"]
  330. pattern = '(?://|\.)(openload\.(?:io|co))/(?:embed|f)/([0-9a-zA-Z-_]+)'
  331.  
  332. def __init__(self):
  333. self.net = Net()
  334.  
  335. def get_media_url(self, host, media_id):
  336. try:
  337.  
  338. myurl = 'http://openload.co/embed/%s' % media_id
  339. HTTP_HEADER = {
  340. 'User-Agent': FF_USER_AGENT,
  341. 'Referer': myurl} # 'Connection': 'keep-alive'
  342. html = self.net.http_GET(myurl, headers=HTTP_HEADER).content
  343. mylink = self.get_mylink(html)
  344. if set('[<>=!@#$%^&*()+{}":;\']+$').intersection(mylink):
  345. #common.log_utils.log_notice('############################## ERROR A openload mylink: %s' % (mylink))
  346. time.sleep(2)
  347. html = self.net.http_GET(myurl, headers=HTTP_HEADER).content
  348. mylink = self.get_mylink(html)
  349. if set('[<>=!@#$%^&*()+{}":;\']+$').intersection(mylink):
  350. #common.log_utils.log_notice('############################## ERROR A openload mylink: %s' % (mylink))
  351. time.sleep(2)
  352. html = self.net.http_GET(myurl, headers=HTTP_HEADER).content
  353. mylink = self.get_mylink(html)
  354.  
  355. #common.log_utils.log_notice('A openload mylink: %s' % mylink)
  356. #print "Mylink", mylink, urllib.quote_plus(mylink)
  357. videoUrl = 'https://openload.co/stream/{0}?mime=true'.format(mylink)
  358. #common.log_utils.log_notice('A openload resolve parse: %s' % videoUrl)
  359.  
  360. #dtext = videoUrl.replace('https', 'http')
  361. headers = {'User-Agent': HTTP_HEADER['User-Agent'], 'Referer':myurl}
  362. req = urllib2.Request(videoUrl, None, headers)
  363. res = urllib2.urlopen(req)
  364. videoUrl = res.geturl()
  365. res.close()
  366.  
  367. return videoUrl
  368. # video_url = 'https://openload.co/stream/%s?mime=true' % myvidurl
  369.  
  370.  
  371. except Exception as e:
  372. #common.log_utils.log_notice('Exception during openload resolve parse: %s' % e)
  373. print("Error", e)
  374. raise
  375.  
  376. def get_url(self, host, media_id):
  377. return 'http://openload.io/embed/%s' % media_id
  378.  
  379. def get_mylink(self, html):
  380. try:
  381. html = html.encode('utf-8')
  382. except:
  383. pass
  384. if any(x in html for x in ['We are sorry', 'File not found']):
  385. raise Exception('The file was removed')
  386.  
  387. n = re.findall('<span id="(.*?)">(.*?)</span>', html)
  388. print "y",n
  389. id = n[0][1]
  390.  
  391. def parseInt(sin):
  392. m = re.search(r'^(\d+)[.,]?\d*?', str(sin))
  393. return int(m.groups()[-1]) if m and not callable(sin) else None
  394.  
  395. #id = '0311103128141621815820119150520011012136101190210112156191831907609106180800810519060010720506202080101111807006144060791010518090141001810619090141071804600099170791110218177170930612518146081221612200162201241516319'
  396. firstTwoChars = parseInt(id[0:2])
  397. num = 2;
  398. txt = ''
  399. while num < len(id):
  400. print "NUM", num
  401. txt += chr(parseInt(id[num:num + 3]) - firstTwoChars * parseInt(id[num + 3:num + 3 + 2]))
  402. print "NUM", txt
  403.  
  404. num += 5
  405.  
  406. #alina = 'https://openload.co/stream/' + txt;
  407. return txt
  408.  
  409. #magic = ord(y[-1])
  410. #y = " ".join(y.split(chr(magic - 1)))
  411. #y = chr(magic - 1).join(y.split(y[-1]))
  412. #y = chr(magic).join(y.split(" "))
  413. #enc_data = y
  414. #print enc_data
  415. #enc_data = HTMLParser().unescape(enc_data)
  416. enc_data = HTMLParser().unescape(y)
  417.  
  418. res = []
  419. for c in enc_data:
  420. j = ord(c)
  421. if j >= 33 and j <= 126:
  422. j = ((j + 14) % 94)
  423. j = j + 33
  424. res += chr(j)
  425. mylink = ''.join(res)
  426.  
  427. tmp100 = re.findall('<script type="text/javascript">(゚ω゚.*?)</script>', html, re.DOTALL)
  428. encdata = ''
  429. tmpEncodedData = tmp100[0].split('┻━┻')
  430. for tmpItem in tmpEncodedData:
  431. try:
  432. encdata += self.decodeOpenLoad(tmpItem)
  433. except:
  434. pass
  435.  
  436. print "AAAAA",encdata
  437.  
  438. exit()
  439. encnumbers = re.findall('return(.*?);', encdata, re.DOTALL)
  440. print encnumbers
  441.  
  442. #https://openload.co/stream/rZ04_L_uRuU~1478308714~95.160.0.0~VWnfq0ig?mime=true
  443. #https://openload.co/stream/JlSTfXTluk8~1478209703~46.169.0.0~49kqoQ-2?mime=true')
  444.  
  445.  
  446. encnumbers1 = re.findall('(\d+).*?(\d+)', encnumbers[0])[0]
  447. encnumbers2 = re.findall('(\d+) \- (\d+)', encnumbers[1])[0]
  448. encnumbers4 = re.findall('(\d+)', encnumbers[3])[0]
  449.  
  450. number1 = int(encnumbers1[0]) + int(encnumbers1[1])
  451. number2 = int(encnumbers2[0]) - int(encnumbers2[1]) + number1
  452. number4 = int(encnumbers4[0])
  453. number3 = number2 - number4
  454.  
  455. print "num1", number1
  456. print "num2", number2
  457. print "num4", number4
  458. print "num3", number3
  459. print "a",len(mylink)-number2
  460. # var str =
  461. # tmp.substring(0, tmp.length - number2())
  462. # + String.fromCharCode(tmp.slice(0 - number2()).charCodeAt(0) + number3())
  463. # + tmp.substring(tmp.length - number2() + 1);
  464. # mylink = ''.join(res)[0:-3] + chr(ord(''.join(res)[-1]) -2 3)
  465.  
  466. #https://openload.co/stream/ExatdBfcJ38~1478307277~95.160.0.0~hppYZUHF?mime=true
  467. mynewlink1 = mylink[0:-number2]
  468. mynewlink2 = chr(ord(mylink[-number2])+number3)
  469. mynewlink3 = mylink[len(mylink)-number2+1:]
  470. print "my2", mynewlink1,mynewlink2,mynewlink3
  471. mynewlink = mynewlink1+mynewlink2+mynewlink3
  472.  
  473.  
  474. return mynewlink
  475.  
  476.  
  477.  
  478.  
  479. # If you want to use the code for openload please at least put the info from were you take it:
  480. # for example: "Code take from plugin IPTVPlayer: "https://gitlab.com/iptvplayer-for-e2/iptvplayer-for-e2/"
  481. # It will be very nice if you send also email to me samsamsam@o2.pl and inform were this code will be used
  482. # start https://github.com/whitecream01/WhiteCream-V0.0.1/blob/master/plugin.video.uwc/plugin.video.uwc-1.0.51.zip?raw=true
  483. def decode(self,encoded):
  484. tab = encoded.split('\\')
  485. ret = ''
  486. for item in tab:
  487. try:
  488. ret += chr(int(item, 8))
  489. except Exception:
  490. ret += item
  491. return ret
  492.  
  493. def base10toN(self,num, n):
  494. num_rep = {10: 'a', 11: 'b', 12: 'c', 13: 'd', 14: 'e', 15: 'f', 16: 'g', 17: 'h', 18: 'i', 19: 'j', 20: 'k',
  495. 21: 'l', 22: 'm', 23: 'n', 24: 'o', 25: 'p', 26: 'q', 27: 'r', 28: 's', 29: 't', 30: 'u', 31: 'v',
  496. 32: 'w', 33: 'x', 34: 'y', 35: 'z'}
  497. new_num_string = ''
  498. current = num
  499. while current != 0:
  500. remainder = current % n
  501. if 36 > remainder > 9:
  502. remainder_string = num_rep[remainder]
  503. elif remainder >= 36:
  504. remainder_string = '(' + str(remainder) + ')'
  505. else:
  506. remainder_string = str(remainder)
  507. new_num_string = remainder_string + new_num_string
  508. current = current / n
  509. return new_num_string
  510.  
  511. def decodeOpenLoad(self,aastring):
  512. # decodeOpenLoad made by mortael, please leave this line for proper credit :)
  513. # aastring = re.search(r"<video(?:.|\s)*?<script\s[^>]*?>((?:.|\s)*?)</script", html, re.DOTALL | re.IGNORECASE).group(1)
  514.  
  515. aastring = aastring.replace("(゚Д゚)[゚ε゚]+(o゚ー゚o)+ ((c^_^o)-(c^_^o))+ (-~0)+ (゚Д゚) ['c']+ (-~-~1)+", "")
  516. aastring = aastring.replace("((゚ー゚) + (゚ー゚) + (゚Θ゚))", "9")
  517. aastring = aastring.replace("((゚ー゚) + (゚ー゚))", "8")
  518. aastring = aastring.replace("((゚ー゚) + (o^_^o))", "7")
  519. aastring = aastring.replace("((c^_^o)-(c^_^o))", "0")
  520. aastring = aastring.replace("((゚ー゚) + (゚Θ゚))", "5")
  521. aastring = aastring.replace("(゚ー゚)", "4")
  522. aastring = aastring.replace("((o^_^o) - (゚Θ゚))", "2")
  523. aastring = aastring.replace("(o^_^o)", "3")
  524. aastring = aastring.replace("(゚Θ゚)", "1")
  525. aastring = aastring.replace("(+!+[])", "1")
  526. aastring = aastring.replace("(c^_^o)", "0")
  527. aastring = aastring.replace("(0+0)", "0")
  528. aastring = aastring.replace("(゚Д゚)[゚ε゚]", "\\")
  529. aastring = aastring.replace("(3 +3 +0)", "6")
  530. aastring = aastring.replace("(3 - 1 +0)", "2")
  531. aastring = aastring.replace("(!+[]+!+[])", "2")
  532. aastring = aastring.replace("(-~-~2)", "4")
  533. aastring = aastring.replace("(-~-~1)", "3")
  534. aastring = aastring.replace("(-~0)", "1")
  535. aastring = aastring.replace("(-~1)", "2")
  536. aastring = aastring.replace("(-~3)", "4")
  537. aastring = aastring.replace("(0-0)", "0")
  538.  
  539. aastring = aastring.replace("(゚Д゚).゚ω゚ノ", "10")
  540. aastring = aastring.replace("(゚Д゚).゚Θ゚ノ", "11")
  541. aastring = aastring.replace("(゚Д゚)[\'c\']", "12")
  542. aastring = aastring.replace("(゚Д゚).゚ー゚ノ", "13")
  543. aastring = aastring.replace("(゚Д゚).゚Д゚ノ", "14")
  544. aastring = aastring.replace("(゚Д゚)[゚Θ゚]", "15")
  545.  
  546. decodestring = re.search(r"\\\+([^(]+)", aastring, re.DOTALL | re.IGNORECASE).group(1)
  547. decodestring = "\\+" + decodestring
  548. decodestring = decodestring.replace("+", "")
  549. decodestring = decodestring.replace(" ", "")
  550.  
  551. decodestring = self.decode(decodestring)
  552. decodestring = decodestring.replace("\\/", "/")
  553.  
  554. if 'toString' in decodestring:
  555. base = re.compile(r"toString\(a\+(\d+)", re.DOTALL | re.IGNORECASE).findall(decodestring)[0]
  556. base = int(base)
  557. match = re.compile(r"(\(\d[^)]+\))", re.DOTALL | re.IGNORECASE).findall(decodestring)
  558. for repl in match:
  559. match1 = re.compile(r"(\d+),(\d+)", re.DOTALL | re.IGNORECASE).findall(repl)
  560. base2 = base + int(match1[0][0])
  561. repl2 = self.base10toN(int(match1[0][1]), base2)
  562. decodestring = decodestring.replace(repl, repl2)
  563. decodestring = decodestring.replace("+", "")
  564. decodestring = decodestring.replace("\"", "")
  565. return decodestring
  566.  
  567. def openloadresolv(url):
  568. #url = 'https://openload.co/embed/aFNMCPye31g/'
  569. tt= '1'
  570. try:
  571. media_id = re.findall('embed/(.*?)/', url)[0]; tt = '2'
  572. except:
  573. pass
  574. if tt == '1':
  575. media_id = re.findall('embed/(.*?)$', url)[0]
  576. sos = OpenLoadResolver()
  577. host = 'Host'
  578. self = ''
  579. media_url = sos.get_media_url( host, media_id)
  580. return media_url
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement