Advertisement
g6man

DaumMovie

Aug 31st, 2018
101
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 9.61 KB | None | 0 0
  1. # -*- coding: utf-8 -*-
  2. # Daum Movie
  3.  
  4. import urllib
  5. import urlparse
  6. import unicodedata
  7.  
  8. DAUM_MOVIE_SRCH = "movie_search.php?name=%s"
  9. DAUM_MOVIE_DETAIL = "movie_detail.php?id=%s"
  10. DAUM_MOVIE_CAST = "movie_cast.php?id=%s"
  11. DAUM_MOVIE_PHOTO = "movie_photo.php?id=%s"
  12.  
  13. DAUM_TV_SRCH = "tv_search.php?name=%s"
  14. DAUM_TV_DETAIL = "tv_detail.php?id=%s"
  15. DAUM_TV_CAST = "tv_cast.php?id=%s"
  16. DAUM_TV_EPISODE = "tv_episode.php?id=%s"
  17.  
  18. JSON_MAX_SIZE = 10 * 1024 * 1024
  19.  
  20. DAUM_CR_TO_MPAA_CR = {
  21.     u'전체관람가': {
  22.         'KMRB': 'kr/A',
  23.         'MPAA': 'G'
  24.     },
  25.     u'12세이상관람가': {
  26.         'KMRB': 'kr/12',
  27.         'MPAA': 'PG'
  28.     },
  29.     u'15세이상관람가': {
  30.         'KMRB': 'kr/15',
  31.         'MPAA': 'PG-13'
  32.     },
  33.     u'청소년관람불가': {
  34.         'KMRB': 'kr/R',
  35.         'MPAA': 'R'
  36.     },
  37.     u'제한상영가': {     # 어느 여름날 밤에 (2016)
  38.         'KMRB': 'kr/X',
  39.         'MPAA': 'NC-17'
  40.     }
  41. }
  42.  
  43.  
  44. def Start():
  45.     HTTP.CacheTime = CACHE_1HOUR * 12
  46.     HTTP.Headers['Accept'] = 'text/html, application/json'
  47.  
  48.  
  49. ####################################################################################################
  50. def to_int(value):
  51.     try:
  52.         return int(value)
  53.     except ValueError:
  54.         return 0
  55.  
  56.  
  57. def get_json_data(url):
  58.     try:
  59.         base_url = Prefs['base_url']
  60.         url = urlparse.urljoin(base_url, url)
  61.         json = JSON.ObjectFromURL(url=url, max_size=JSON_MAX_SIZE)
  62.     except:
  63.         Log("Can't get JSON, url=%s" % url)
  64.         return None
  65.  
  66.     if 'error' in json:
  67.         Log('JSON is error, url=%s, errorMsg=%s' % (url, json['error']))
  68.         return None
  69.  
  70.     if not json or 'data' not in json:
  71.         Log("JSON is empty, url=%s" % url)
  72.         return False
  73.  
  74.     return json['data']
  75.  
  76.  
  77. def update_photo(metadata, url):
  78.     max_poster = int(Prefs['max_num_posters'])
  79.     max_art = int(Prefs['max_num_arts'])
  80.     poster_count = 0
  81.     art_count = 0
  82.  
  83.     data = get_json_data(url)
  84.     if not data:
  85.         return False
  86.  
  87.     for photo_url in data['posters']:        
  88.         if poster_count >= max_poster:
  89.             break
  90.  
  91.         try:
  92.             metadata.posters[photo_url] = Proxy.Preview(HTTP.Request(photo_url), sort_order=poster_count)
  93.             poster_count += 1
  94.         except:
  95.             pass
  96.  
  97.     for photo_url in data['arts']:        
  98.         if art_count >= max_art:
  99.             break
  100.  
  101.         try:
  102.             metadata.art[photo_url] = Proxy.Preview(HTTP.Request(photo_url), sort_order=art_count)
  103.             art_count += 1
  104.         except:
  105.             pass
  106.  
  107.     Log('Total %d posters, %d artworks' % (poster_count, art_count))
  108.  
  109.     return bool(poster_count)
  110.  
  111.  
  112. def update_cast(cast_list, metadata):
  113.     if not cast_list:
  114.         return
  115.  
  116.     metadata.clear()
  117.     for cast in cast_list:
  118.         new_metadata = metadata.new()
  119.         if 'role' in cast:
  120.             new_metadata.role = cast['role']
  121.  
  122.         if 'name' in cast:
  123.             new_metadata.name = cast['name']
  124.  
  125.         if 'photo' in cast:
  126.             new_metadata.photo = cast['photo']
  127.  
  128.  
  129. def search_daum_movie(results, media, lang):
  130.     media_name = media.name
  131.     media_name = unicodedata.normalize('NFKC', unicode(media_name)).strip()
  132.     Log("search: %s %s" % (media_name, media.year))
  133.  
  134.     url = DAUM_MOVIE_SRCH % urllib.quote(media_name.encode('utf8'))
  135.     search_data = get_json_data(url)
  136.     if not search_data:
  137.         return
  138.  
  139.     media_year = to_int(media.year)
  140.     for item in search_data:
  141.         year = str(item['year'])
  142.         id = str(item['id'])
  143.         title = item['title']        
  144.         year_diff = abs(media_year - to_int(year))
  145.         match_title = title == media_name
  146.  
  147.         if len(search_data) == 1:
  148.             score = 80
  149.         else:
  150.             if year_diff == 0:
  151.                 score = 95 if match_title else 75
  152.             elif year_diff == 1:
  153.                 score = 90 if match_title else 70
  154.             else:
  155.                 score = 15 if match_title else 10
  156.  
  157.         Log('ID=%s, media_name=%s, title=%s, year=%s, score=%d' % (id, media_name, title, year, score))
  158.         results.Append(MetadataSearchResult(id=id, name=title, year=year, score=score, lang=lang))
  159.  
  160.  
  161. def update_daum_movie(metadata):
  162.     # (1) from detail page
  163.     url = DAUM_MOVIE_DETAIL % metadata.id
  164.     data = get_json_data(url)
  165.     if not data:
  166.         return
  167.    
  168.     cr_key = 'MPAA' if Prefs['use_mpaa'] else 'KMRB'
  169.     metadata.title = data['title']
  170.     metadata.year = int(data['year'])
  171.     metadata.original_title = data['original_title']
  172.     metadata.rating = float(data['rating'])
  173.     metadata.tagline = data['tagline']
  174.     metadata.genres.clear()
  175.     for genre in data['genres']:
  176.         metadata.genres.add(genre)
  177.     metadata.countries.clear()
  178.     for country in data['countries']:
  179.         metadata.countries.add(country)
  180.     if data['originally_available_at']:
  181.         metadata.originally_available_at = Datetime.ParseDate(data['originally_available_at']).date()
  182.     metadata.duration = int(data['duration'])
  183.     if data['content_rating']:
  184.         cr = data['content_rating']
  185.         match = Regex(u'미국 (.*) 등급').search(cr)
  186.         if match:
  187.             metadata.content_rating = match.group(1)
  188.         elif cr in DAUM_CR_TO_MPAA_CR:
  189.             metadata.content_rating = DAUM_CR_TO_MPAA_CR[cr][cr_key]
  190.         else:
  191.             metadata.content_rating = 'kr/' + cr
  192.     metadata.summary = data['summary']
  193.     poster_url = data['poster_url']
  194.  
  195.     # (2) cast crew
  196.     url = DAUM_MOVIE_CAST % metadata.id
  197.     cast_data = get_json_data(url)
  198.     if cast_data:
  199.         update_cast(cast_data['directors'], metadata.directors)
  200.         update_cast(cast_data['producers'], metadata.producers)
  201.         update_cast(cast_data['writers'], metadata.writers)
  202.         update_cast(cast_data['roles'], metadata.roles)    
  203.  
  204.     # (3) from photo page
  205.     if not update_photo(metadata, DAUM_MOVIE_PHOTO % metadata.id) and poster_url:
  206.         try:
  207.             poster = HTTP.Request(poster_url)
  208.             metadata.posters[poster_url] = Proxy.Media(poster)
  209.         except:
  210.             pass
  211.  
  212.  
  213. def search_daum_tv(results, media, lang):
  214.     media_name = media.show
  215.     media_name = unicodedata.normalize('NFKC', unicode(media_name)).strip()
  216.     Log("search: %s %s" % (media_name, media.year))
  217.  
  218.     url = DAUM_TV_SRCH % (urllib.quote(media_name.encode('utf8')))
  219.     search_data = get_json_data(url)
  220.     if not search_data:
  221.         return
  222.  
  223.     for item in search_data:
  224.         year = str(item['year'])
  225.         id = str(item['id'])
  226.         title = item['title']
  227.         if year == media.year:
  228.             score = 95
  229.         elif len(search_data) == 1:
  230.             score = 80
  231.         else:
  232.             score = 10
  233.  
  234.         Log('ID=%s, media_name=%s, title=%s, year=%s, score=%d' % (id, media_name, title, year, score))
  235.         results.Append(MetadataSearchResult(id=id, name=title, year=year, score=score, lang=lang))
  236.  
  237.  
  238. def update_daum_tv(metadata):
  239.     # (1) from detail page
  240.     url = DAUM_TV_DETAIL % metadata.id
  241.     data = get_json_data(url)
  242.     if not data:
  243.         return
  244.  
  245.     metadata.title = data['title']
  246.     metadata.original_title = data['original_title']
  247.     metadata.rating = float(data['rating'])
  248.     metadata.genres.clear()
  249.     for genre in data['genres']:
  250.         metadata.genres.add(genre)
  251.     if data['originally_available_at']:
  252.         metadata.originally_available_at = Datetime.ParseDate(data['originally_available_at']).date()
  253.     metadata.summary = data['summary']
  254.     poster_url = data['poster_url']
  255.     if poster_url:
  256.         try:
  257.             poster = HTTP.Request(poster_url)
  258.             metadata.posters[poster_url] = Proxy.Media(poster)
  259.         except:
  260.             pass
  261.  
  262.     # (2) cast crew
  263.     url = DAUM_TV_CAST % metadata.id
  264.     cast_data = get_json_data(url)
  265.     if not cast_data:
  266.         cast_data = {
  267.             'roles': [],
  268.             'directors': [],
  269.             'producers': [],
  270.             'writers': [],
  271.         }
  272.  
  273.     update_cast(cast_data['roles'], metadata.roles)
  274.  
  275.     # (3) from episode page
  276.     url = DAUM_TV_EPISODE % metadata.id
  277.     episode_data = get_json_data(url)
  278.     if not episode_data:
  279.         return
  280.    
  281.     for item in episode_data:
  282.         episode = metadata.seasons[item['seasons']].episodes[item['episodes']]
  283.         episode.title = item['title']
  284.         episode.summary = item['summary']
  285.         if item['originally_available_at']:
  286.             episode.originally_available_at = Datetime.ParseDate(item['originally_available_at'], '%Y%m%d').date()
  287.         episode.rating = float(item['rating'])
  288.  
  289.         update_cast(cast_data['directors'], episode.directors)
  290.         update_cast(cast_data['producers'], episode.producers)
  291.         update_cast(cast_data['writers'], episode.writers)
  292.  
  293.  
  294. ####################################################################################################
  295. class DaumMovieAgent(Agent.Movies):
  296.     name = "Daum Movie"
  297.     primary_provider = True
  298.     languages = [Locale.Language.Korean]
  299.     accepts_from = ['com.plexapp.agents.localmedia']
  300.  
  301.     def search(self, results, media, lang, manual=False):
  302.         return search_daum_movie(results, media, lang)
  303.  
  304.     def update(self, metadata, media, lang):
  305.         update_daum_movie(metadata)
  306.  
  307.  
  308. class DaumMovieTvAgent(Agent.TV_Shows):
  309.     name = "Daum Movie"
  310.     primary_provider = True
  311.     languages = [Locale.Language.Korean]
  312.     accepts_from = ['com.plexapp.agents.localmedia']
  313.  
  314.     def search(self, results, media, lang, manual=False):
  315.         return search_daum_tv(results, media, lang)
  316.  
  317.     def update(self, metadata, media, lang):
  318.         update_daum_tv(metadata)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement