Advertisement
Guest User

jkanime

a guest
Jan 21st, 2013
29
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 12.33 KB | None | 0 0
  1. # -*- coding: utf-8 -*-
  2. #------------------------------------------------------------
  3. # pelisalacarta - XBMC Plugin
  4. # Canal para jkanime
  5. # http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
  6. #------------------------------------------------------------
  7.  
  8. import urlparse,urllib2,urllib,re
  9. import os, sys
  10.  
  11. from core import logger
  12. from core import config
  13. from core import scrapertools
  14. from core.item import Item
  15. from servers import servertools
  16.  
  17. DEBUG = config.get_setting("debug")
  18.  
  19. __category__ = "A"
  20. __type__ = "generic"
  21. __title__ = "JKanime"
  22. __channel__ = "jkanime"
  23. __language__ = "ES"
  24. __creationdate__ = "20121015"
  25.  
  26. def isGeneric():
  27.     return True
  28.  
  29. def mainlist(item):
  30.     logger.info("[jkanime.py] mainlist")
  31.  
  32.     itemlist = []
  33.     itemlist.append( Item(channel=__channel__, action="ultimos" , title="Últimos"           , url="http://jkanime.net/" ))
  34.     itemlist.append( Item(channel=__channel__, action="letras"  , title="Listado Alfabetico", url="http://jkanime.net/" ))
  35.     itemlist.append( Item(channel=__channel__, action="generos" , title="Listado por Genero", url="http://jkanime.net/" ))
  36.     itemlist.append( Item(channel=__channel__, action="search"  , title="Buscar" ))
  37.  
  38.     return itemlist
  39.  
  40. def search(item,texto):
  41.     logger.info("[jkanime.py] search")
  42.     if item.url=="":
  43.         item.url="http://jkanime.net/buscar/%s/"
  44.     texto = texto.replace(" ","+")
  45.     item.url = item.url % texto
  46.     try:
  47.         return series(item)
  48.     # Se captura la excepción, para no interrumpir al buscador global si un canal falla
  49.     except:
  50.         import sys
  51.         for line in sys.exc_info():
  52.             logger.error( "%s" % line )
  53.         return []
  54.  
  55. def ultimos(item):
  56.     logger.info("[jkanime.py] ultimos")
  57.     itemlist = []
  58.     data = scrapertools.cache_page(item.url)
  59.     data = scrapertools.get_match(data,'<ul class="latestul">(.*?)</ul>')
  60.    
  61.     patron = '<a href="([^"]+)">([^<]+)<'
  62.     matches = re.compile(patron,re.DOTALL).findall(data)    
  63.  
  64.     for scrapedurl,scrapedtitle in matches:
  65.         title = scrapedtitle.strip()
  66.         url = urlparse.urljoin(item.url,scrapedurl)
  67.         thumbnail = ""
  68.         plot = ""
  69.         if (DEBUG): logger.info("title=["+title+"], url=["+url+"], thumbnail=["+thumbnail+"]")
  70.  
  71.         itemlist.append( Item(channel=__channel__, action="episodios" , title=title , url=url, thumbnail=thumbnail, plot=plot))        
  72.  
  73.     return itemlist
  74.  
  75. def generos(item):
  76.     logger.info("[jkanime.py] generos")
  77.     itemlist = []
  78.    
  79.     data = scrapertools.cache_page(item.url)
  80.     data = scrapertools.get_match(data,'<div class="genres">(.*?)</div>')
  81.    
  82.     patron = '<a href="([^"]+)">([^<]+)</a>'
  83.     matches = re.compile(patron,re.DOTALL).findall(data)    
  84.  
  85.     for scrapedurl,scrapedtitle in matches:
  86.         title = scrapedtitle
  87.         url = urlparse.urljoin(item.url,scrapedurl)
  88.         thumbnail = ""
  89.         plot = ""
  90.         if (DEBUG): logger.info("title=["+title+"], url=["+url+"], thumbnail=["+thumbnail+"]")
  91.  
  92.         itemlist.append( Item(channel=__channel__, action="series" , title=title , url=url, thumbnail=thumbnail, plot=plot))        
  93.  
  94.     return itemlist
  95.  
  96. def letras(item):
  97.     logger.info("[jkanime.py] letras")
  98.     itemlist = []
  99.    
  100.     data = scrapertools.cache_page(item.url)
  101.     data = scrapertools.get_match(data,'<ul class="animelet">(.*?)</ul>')
  102.    
  103.     patron = '<a href="([^"]+)">([^<]+)</a>'
  104.     matches = re.compile(patron,re.DOTALL).findall(data)    
  105.  
  106.     for scrapedurl,scrapedtitle in matches:
  107.         title = scrapedtitle
  108.         url = urlparse.urljoin(item.url,scrapedurl)
  109.         thumbnail = ""
  110.         plot = ""
  111.         if (DEBUG): logger.info("title=["+title+"], url=["+url+"], thumbnail=["+thumbnail+"]")
  112.  
  113.         itemlist.append( Item(channel=__channel__, action="series" , title=title , url=url, thumbnail=thumbnail, plot=plot))        
  114.  
  115.     return itemlist
  116.  
  117. def series(item):
  118.     logger.info("[jkanime.py] series")
  119.  
  120.     # Descarga la pagina
  121.     data = scrapertools.cache_page(item.url)
  122.  
  123.     # Extrae las entradas
  124.     '''
  125.    <table class="search">
  126.    <tr>
  127.    <td rowspan="2">
  128.    <a href="http://jkanime.net/basilisk-kouga-ninpou-chou/"><img src="http://jkanime.net/assets/images/animes/thumbnail/basilisk-kouga-ninpou-chou.jpg" width="50" /></a>
  129.    </td>
  130.    <td><a class="titl" href="http://jkanime.net/basilisk-kouga-ninpou-chou/">Basilisk: Kouga Ninpou Chou</a></td>
  131.    <td rowspan="2" style="width:50px; text-align:center;">Serie</td>
  132.    <td rowspan="2" style="width:50px; text-align:center;" >24 Eps</td>
  133.    </tr>
  134.    <tr>
  135.    <td><p>Basilisk, considerada una de las mejores series del genero ninja, nos narra la historia de dos clanes ninja separados por el odio entre dos familias. Los actuales representantes, Kouga Danjo del clan Kouga y Ogen del clan&#8230; <a class="next" href="http://jkanime.net/basilisk-kouga-ninpou-chou/">seguir leyendo</a></p></td>
  136.    </tr>
  137.    </table>
  138.    '''
  139.     patron  = '<table class="search[^<]+'
  140.     patron += '<tr[^<]+'
  141.     patron += '<td[^<]+'
  142.     patron += '<a href="([^"]+)"><img src="([^"]+)"[^<]+</a>[^<]+'
  143.     patron += '</td>[^<]+'
  144.     patron += '<td><a[^>]+>([^<]+)</a></td>[^<]+'
  145.     patron += '<td[^>]+>([^<]+)</td>[^<]+'
  146.     patron += '<td[^>]+>([^<]+)</td>[^<]+'
  147.     patron += '</tr>[^<]+'
  148.     patron += '<tr>[^<]+'
  149.     patron += '<td>(.*?)</td>'
  150.     matches = re.compile(patron,re.DOTALL).findall(data)
  151.     itemlist = []
  152.    
  153.     for scrapedurl, scrapedthumbnail,scrapedtitle,line1,line2,scrapedplot in matches:
  154.         title = scrapedtitle.strip()+" ("+line1.strip()+") ("+line2.strip()+")"
  155.         extra = line2.strip()
  156.         url = urlparse.urljoin(item.url,scrapedurl)
  157.         thumbnail = scrapedthumbnail
  158.         plot = scrapertools.htmlclean(scrapedplot)
  159.         if (DEBUG): logger.info("title=["+title+"], url=["+url+"], thumbnail=["+thumbnail+"]")
  160.  
  161.         itemlist.append( Item(channel=__channel__, action="episodios" , title=title , url=url, thumbnail=thumbnail, fanart=thumbnail, plot=plot, extra=extra, viewmode="movie_with_plot"))        
  162.  
  163.     try:
  164.         siguiente = scrapertools.get_match(data,'<a class="listsiguiente" href="([^"]+)" >Resultados Siguientes')
  165.         scrapedurl = urlparse.urljoin(item.url,siguiente)
  166.         scrapedtitle = ">> Pagina Siguiente"
  167.         scrapedthumbnail = ""
  168.         scrapedplot = ""
  169.  
  170.         itemlist.append( Item(channel=__channel__, action="series", title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail , plot=scrapedplot , folder=True) )
  171.     except:
  172.         pass
  173.     return itemlist
  174.  
  175. def episodios(item):
  176.     logger.info("[jkanime.py] episodios")
  177.     itemlist = []
  178.  
  179.     # Descarga la pagina
  180.     data = scrapertools.cache_page(item.url)
  181.     scrapedplot = scrapertools.get_match(data,'<meta name="description" content="([^"]+)"/>')
  182.     scrapedthumbnail = scrapertools.get_match(data,'<meta property="og.image" content="([^"]+)"/>')
  183.     idserie = scrapertools.get_match(data,"ajax/pagination_episodes/(\d+)/")
  184.     logger.info("idserie="+idserie)
  185.     numero_pag = 0
  186.     flag = 1
  187.     while flag==1:
  188.         numero_pag += 1
  189.         headers = []
  190.         headers.append( [ "User-Agent" , "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:16.0) Gecko/20100101 Firefox/16.0" ] )
  191.         headers.append( [ "Referer" , item.url ] )
  192.         data = scrapertools.cache_page("http://jkanime.net/ajax/pagination_episodes/"+idserie+"/"+str(numero_pag)+"/")
  193.         logger.info("data="+data)
  194.         if data!="[]":
  195.    
  196.             '''
  197.            [{"id":"14199","title":"GetBackers - 1","number":"1","animes_id":"122","timestamp":"2012-01-04 16:59:30"},{"id":"14200","title":"GetBackers - 2","number":"2","animes_id":"122","timestamp":"2012-01-04 16:59:30"},{"id":"14201","title":"GetBackers - 3","number":"3","animes_id":"122","timestamp":"2012-01-04 16:59:30"},{"id":"14202","title":"GetBackers - 4","number":"4","animes_id":"122","timestamp":"2012-01-04 16:59:30"},{"id":"14203","title":"GetBackers - 5","number":"5","animes_id":"122","timestamp":"2012-01-04 16:59:30"},{"id":"14204","title":"GetBackers - 6","number":"6","animes_id":"122","timestamp":"2012-01-04 16:59:30"},{"id":"14205","title":"GetBackers - 7","number":"7","animes_id":"122","timestamp":"2012-01-04 16:59:30"},{"id":"14206","title":"GetBackers - 8","number":"8","animes_id":"122","timestamp":"2012-01-04 16:59:30"},{"id":"14207","title":"GetBackers - 9","number":"9","animes_id":"122","timestamp":"2012-01-04 16:59:30"},{"id":"14208","title":"GetBackers - 10","number":"10","animes_id":"122","timestamp":"2012-01-04 16:59:30"}]
  198.            '''
  199.             patron = '"id"\:"(\d+)","title"\:"([^"]+)","number"\:"(\d+)","animes_id"\:"(\d+)"'
  200.             matches = re.compile(patron,re.DOTALL).findall(data)
  201.    
  202.             #http://jkanime.net/get-backers/1/
  203.             for id,scrapedtitle,numero,animes_id in matches:
  204.                 title = scrapedtitle.strip()
  205.                 url = urlparse.urljoin(item.url,numero)
  206.                 thumbnail = scrapedthumbnail
  207.                 plot = scrapedplot
  208.                 if (DEBUG): logger.info("title=["+title+"], url=["+url+"], thumbnail=["+thumbnail+"]")
  209.  
  210.                 itemlist.append( Item(channel=__channel__, action="findvideos" , title=title , url=url, thumbnail=thumbnail, fanart=thumbnail, plot=plot))
  211.         else:
  212.            flag = 0
  213.  
  214.     return itemlist
  215.  
  216. def findvideos(item):
  217.     logger.info("[jkanime.py] findvideos")
  218.     itemlist = []
  219.  
  220.     # Descarga la pagina
  221.     data = scrapertools.cache_page(item.url)
  222.  
  223.     #180upload: sp1.e=hh7pmxk553kj
  224.     try:
  225.         code = scrapertools.get_match(data,"sp1.e=([a-z0-9]+)")
  226.         mediaurl = "http://180upload.com/"+code
  227.         itemlist.append( Item(channel=__channel__, action="play" , title="Ver en 180upload" , url=mediaurl, thumbnail=item.thumbnail, fanart=item.thumbnail, plot=item.plot, server="one80upload", folder=False))
  228.     except:
  229.         pass
  230.    
  231.     #upafile: spu.e=idyoybh552bf
  232.     try:
  233.         code = scrapertools.get_match(data,"spu.e=([a-z0-9]+)")
  234.         mediaurl = "http://upafile.com/"+code
  235.         itemlist.append( Item(channel=__channel__, action="play" , title="Ver en upafile" , url=mediaurl, thumbnail=item.thumbnail, fanart=item.thumbnail, plot=item.plot, server="upafile", folder=False))
  236.     except:
  237.         pass
  238.  
  239.     try:
  240.         mediaurl = scrapertools.get_match(data,'flashvars\="file\=([^\&]+)\&')
  241.         itemlist.append( Item(channel=__channel__, action="play" , title="Ver en jkanime" , url=mediaurl, thumbnail=item.thumbnail, fanart=item.thumbnail, plot=item.plot, server="directo", folder=False))
  242.     except:
  243.         pass
  244.    
  245.  
  246.     return itemlist
  247.  
  248. def play(item):
  249.     logger.info("[jkanime.py] play")
  250.    
  251.     itemlist = []
  252.  
  253.     if item.server=="directo":
  254.         headers = []
  255.         headers.append(["User-Agent","Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:18.0) Gecko/20100101 Firefox/18.0"])
  256.         headers.append(["Referer","http://jkanime.net/assets/images/players/jkplayer.swf"])
  257.         location = scrapertools.get_header_from_response( item.url , headers=headers , header_to_get="location" )
  258.         location = scrapertools.get_header_from_response( item.url , headers=headers , header_to_get="location" )
  259.         logger.info("location="+location)
  260.         #http://jkanime.net/stream/jkget/00e47553476031a35fd158881ca9d49f/32021b728c40bb5779190e0a95b72d40/?t=6e
  261.     else:
  262.         itemlist.append( Item(channel=__channel__, action="play" , title=item.title , url=item.url, thumbnail=item.thumbnail, fanart=item.thumbnail, plot=item.plot, server=item.server, folder=False))
  263.     return itemlist
  264.  
  265. # Verificación automática de canales: Esta función debe devolver "True" si todo está ok en el canal.
  266. def test():
  267.     bien = True
  268.    
  269.     # mainlist
  270.     mainlist_items = mainlist(Item())
  271.    
  272.     # Comprueba que todas las opciones tengan algo (excepto el buscador)
  273.     for mainlist_item in mainlist_items:
  274.         if mainlist_item.action!="search":
  275.             exec "itemlist = "+mainlist_item.action+"(mainlist_item)"
  276.             if len(itemlist)==0:
  277.                 return false
  278.    
  279.     # Comprueba si alguno de los vídeos de "Novedades" devuelve mirrors
  280.     episodios_items = newlist(mainlist_items[0])
  281.    
  282.     bien = False
  283.     for episodio_item in episodios_items:
  284.         mirrors = findvideos(item=episodio_item)
  285.         if len(mirrors)>0:
  286.             bien = True
  287.             break
  288.    
  289.     return bien
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement