Advertisement
Guest User

Old YouTubeCore.py

a guest
Oct 3rd, 2012
193
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 37.76 KB | None | 0 0
  1. '''
  2. YouTube plugin for XBMC
  3. Copyright (C) 2010-2011 Tobias Ussing And Henrik Mosgaard Jensen
  4.  
  5. This program is free software: you can redistribute it and/or modify
  6. it under the terms of the GNU General Public License as published by
  7. the Free Software Foundation, either version 3 of the License, or
  8. (at your option) any later version.
  9.  
  10. This program is distributed in the hope that it will be useful,
  11. but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  13. GNU General Public License for more details.
  14.  
  15. You should have received a copy of the GNU General Public License
  16. along with this program. If not, see <http://www.gnu.org/licenses/>.
  17. '''
  18.  
  19. import sys
  20. import urllib
  21. import urllib2
  22. import re
  23. import time
  24. import socket
  25. import xml.dom.minidom as minidom
  26. try:
  27. import simplejson as json
  28. except ImportError:
  29. import json
  30.  
  31. # ERRORCODES:
  32. # 200 = OK
  33. # 303 = See other (returned an error message)
  34. # 500 = uncaught error
  35.  
  36.  
  37. class url2request(urllib2.Request):
  38. """Workaround for using DELETE with urllib2"""
  39. def __init__(self, url, method, data=None, headers={}, origin_req_host=None, unverifiable=False):
  40. self._method = method
  41. urllib2.Request.__init__(self, url, data, headers, origin_req_host, unverifiable)
  42.  
  43. def get_method(self):
  44. if self._method:
  45. return self._method
  46. else:
  47. return urllib2.Request.get_method(self)
  48.  
  49.  
  50. class YouTubeCore():
  51. APIKEY = "AI39si6hWF7uOkKh4B9OEAX-gK337xbwR9Vax-cdeF9CF9iNAcQftT8NVhEXaORRLHAmHxj6GjM-Prw04odK4FxACFfKkiH9lg"
  52.  
  53. #===============================================================================
  54. # The time parameter restricts the search to videos uploaded within the specified time.
  55. # Valid values for this parameter are today (1 day), this_week (7 days), this_month (1 month) and all_time.
  56. # The default value for this parameter is all_time.
  57. #
  58. # This parameter is supported for search feeds as well as for the top_rated, top_favorites, most_viewed,
  59. # most_popular, most_discussed and most_responded standard feeds.
  60. #===============================================================================
  61.  
  62. urls = {}
  63. urls['batch'] = "http://gdata.youtube.com/feeds/api/videos/batch"
  64. urls['thumbnail'] = "http://i.ytimg.com/vi/%s/0.jpg"
  65.  
  66. def __init__(self):
  67. self.settings = sys.modules["__main__"].settings
  68. self.language = sys.modules["__main__"].language
  69. self.plugin = sys.modules["__main__"].plugin
  70. self.dbg = sys.modules["__main__"].dbg
  71. self.storage = sys.modules["__main__"].storage
  72. self.cache = sys.modules["__main__"].cache
  73. self.login = sys.modules["__main__"].login
  74. self.utils = sys.modules["__main__"].utils
  75. self.common = sys.modules["__main__"].common
  76. urllib2.install_opener(sys.modules["__main__"].opener)
  77.  
  78. timeout = [5, 10, 15, 20, 25][int(self.settings.getSetting("timeout"))]
  79. if not timeout:
  80. timeout = "15"
  81. socket.setdefaulttimeout(float(timeout))
  82. return None
  83.  
  84. def delete_favorite(self, params={}):
  85. self.common.log("")
  86. get = params.get
  87.  
  88. delete_url = "http://gdata.youtube.com/feeds/api/users/default/favorites"
  89. delete_url += "/" + get('editid')
  90. result = self._fetchPage({"link": delete_url, "api": "true", "login": "true", "auth": "true", "method": "DELETE"})
  91. return (result["content"], result["status"])
  92.  
  93. def remove_contact(self, params={}):
  94. self.common.log("")
  95. get = params.get
  96. delete_url = "http://gdata.youtube.com/feeds/api/users/default/contacts"
  97. delete_url += "/" + get("contact")
  98. result = self._fetchPage({"link": delete_url, "api": "true", "login": "true", "auth": "true", "method": "DELETE"})
  99. return (result["content"], result["status"])
  100.  
  101. def remove_subscription(self, params={}):
  102. self.common.log("")
  103. get = params.get
  104. delete_url = "http://gdata.youtube.com/feeds/api/users/default/subscriptions"
  105. delete_url += "/" + get("editid")
  106. result = self._fetchPage({"link": delete_url, "api": "true", "login": "true", "auth": "true", "method": "DELETE"})
  107. return (result["content"], result["status"])
  108.  
  109. def add_contact(self, params={}):
  110. self.common.log("")
  111. get = params.get
  112. url = "http://gdata.youtube.com/feeds/api/users/default/contacts"
  113. add_request = '<?xml version="1.0" encoding="UTF-8"?> <entry xmlns="http://www.w3.org/2005/Atom" xmlns:yt="http://gdata.youtube.com/schemas/2007"><yt:username>%s</yt:username></entry>' % get("contact")
  114. result = self._fetchPage({"link": url, "api": "true", "login": "true", "auth": "true", "request": add_request})
  115. return (result["content"], result["status"])
  116.  
  117. def add_favorite(self, params={}):
  118. get = params.get
  119. url = "http://gdata.youtube.com/feeds/api/users/default/favorites"
  120. add_request = '<?xml version="1.0" encoding="UTF-8"?><entry xmlns="http://www.w3.org/2005/Atom"><id>%s</id></entry>' % get("videoid")
  121. result = self._fetchPage({"link": url, "api": "true", "login": "true", "auth": "true", "request": add_request})
  122. return (result["content"], result["status"])
  123.  
  124. def add_subscription(self, params={}):
  125. self.common.log("")
  126. get = params.get
  127. url = "http://gdata.youtube.com/feeds/api/users/default/subscriptions"
  128. add_request = '<?xml version="1.0" encoding="UTF-8"?><entry xmlns="http://www.w3.org/2005/Atom" xmlns:yt="http://gdata.youtube.com/schemas/2007"> <category scheme="http://gdata.youtube.com/schemas/2007/subscriptiontypes.cat" term="user"/><yt:username>%s</yt:username></entry>' % get("channel")
  129. result = self._fetchPage({"link": url, "api": "true", "login": "true", "auth": "true", "request": add_request})
  130. return (result["content"], result["status"])
  131.  
  132. def add_playlist(self, params={}):
  133. get = params.get
  134. url = "http://gdata.youtube.com/feeds/api/users/default/playlists"
  135. add_request = '<?xml version="1.0" encoding="UTF-8"?><entry xmlns="http://www.w3.org/2005/Atom" xmlns:yt="http://gdata.youtube.com/schemas/2007"><title type="text">%s</title><summary>%s</summary></entry>' % (get("title"), get("summary"))
  136. result = self._fetchPage({"link": url, "api": "true", "login": "true", "auth": "true", "request": add_request})
  137. return (result["content"], result["status"])
  138.  
  139. def del_playlist(self, params={}):
  140. self.common.log("")
  141. get = params.get
  142. url = "http://gdata.youtube.com/feeds/api/users/default/playlists/%s" % (get("playlist"))
  143. result = self._fetchPage({"link": url, "api": "true", "login": "true", "auth": "true", "method": "DELETE"})
  144. return (result["content"], result["status"])
  145.  
  146. def add_to_playlist(self, params={}):
  147. get = params.get
  148. self.common.log("")
  149. url = "http://gdata.youtube.com/feeds/api/playlists/%s" % get("playlist")
  150. add_request = '<?xml version="1.0" encoding="UTF-8"?><entry xmlns="http://www.w3.org/2005/Atom" xmlns:yt="http://gdata.youtube.com/schemas/2007"><id>%s</id></entry>' % get("videoid")
  151. result = self._fetchPage({"link": url, "api": "true", "login": "true", "auth": "true", "request": add_request})
  152. return (result["content"], result["status"])
  153.  
  154. def remove_from_playlist(self, params={}):
  155. self.common.log("")
  156. get = params.get
  157. url = "http://gdata.youtube.com/feeds/api/playlists/%s/%s" % (get("playlist"), get("playlist_entry_id"))
  158. result = self._fetchPage({"link": url, "api": "true", "login": "true", "auth": "true", "method": "DELETE"})
  159. return (result["content"], result["status"])
  160.  
  161. def remove_from_watch_later(self, params={}):
  162. self.common.log("")
  163. get = params.get
  164. url = "https://gdata.youtube.com/feeds/api/users/default/watch_later/%s" % get("playlist_entry_id")
  165. result = self._fetchPage({"link": url, "api": "true", "login": "true", "auth": "true", "method": "DELETE"})
  166. return (result["content"], result["status"])
  167.  
  168. def set_video_watched(self, params={}):
  169. self.common.log("")
  170. get = params.get
  171. url = "https://gdata.youtube.com/feeds/api/users/default/watch_later/%s" % get("videoid")
  172. result = self._fetchPage({"link": url, "api": "true", "login": "true", "auth": "true", "method": "DELETE"})
  173. return (result["content"], result["status"])
  174.  
  175. def getCategoriesFolderInfo(self, xml, params={}):
  176. self.common.log("")
  177.  
  178. dom = minidom.parseString(xml)
  179. entries = dom.getElementsByTagName("atom:category")
  180.  
  181. folders = []
  182. for node in entries:
  183. folder = {}
  184.  
  185. if node.getElementsByTagName("yt:deprecated"):
  186. continue
  187. folder['Title'] = node.getAttribute("label")
  188.  
  189. folder['category'] = node.getAttribute("term")
  190. folder["icon"] = "explore"
  191. folder["thumbnail"] = "explore"
  192. folder["feed"] = "feed_category"
  193.  
  194. folders.append(folder)
  195.  
  196. return folders
  197.  
  198. def getFolderInfo(self, xml, params={}):
  199. get = params.get
  200.  
  201. dom = minidom.parseString(xml)
  202. links = dom.getElementsByTagName("link")
  203. entries = dom.getElementsByTagName("entry")
  204. show_next = False
  205.  
  206. #find out if there are more pages
  207. if (len(links)):
  208. for link in links:
  209. lget = link.attributes.get
  210. if (lget("rel").value == "next"):
  211. show_next = True
  212. break
  213.  
  214. folders = []
  215. for node in entries:
  216. folder = {}
  217.  
  218. if get("feed") != "feed_categories":
  219. folder["login"] = "true"
  220. folder['Title'] = node.getElementsByTagName("title").item(0).firstChild.nodeValue.replace('Activity of : ', '').replace('Videos published by : ', '').encode("utf-8")
  221. folder['published'] = self._getNodeValue(node, "published", "2008-07-05T19:56:35.000-07:00")
  222.  
  223. if node.getElementsByTagName("id"):
  224. entryid = self._getNodeValue(node, "id", "")
  225. entryid = entryid[entryid.rfind(":") + 1:]
  226. folder["editid"] = entryid
  227.  
  228. thumb = ""
  229. if get("user_feed") == "contacts":
  230. folder["thumbnail"] = "user"
  231. folder["contact"] = folder["Title"]
  232. folder["store"] = "contact_options"
  233. folder["folder"] = "true"
  234.  
  235. if get("user_feed") == "subscriptions":
  236. folder["channel"] = folder["Title"]
  237.  
  238. if get("user_feed") == "playlists":
  239. folder['playlist'] = self._getNodeValue(node, 'yt:playlistId', '')
  240. folder["user_feed"] = "playlist"
  241.  
  242. params["thumb"] = "true"
  243. thumb = self.storage.retrieve(params, "thumbnail", folder)
  244. if thumb:
  245. folder["thumbnail"] = thumb
  246.  
  247. folders.append(folder)
  248.  
  249. if show_next:
  250. self.utils.addNextFolder(folders, params)
  251.  
  252. return folders
  253.  
  254. def getBatchDetailsOverride(self, items, params={}):
  255. ytobjects = []
  256. videoids = []
  257.  
  258. for video in items:
  259. for k, v in video.items():
  260. if k == "videoid":
  261. videoids.append(v)
  262.  
  263. (ytobjects, status) = self.getBatchDetails(videoids, params)
  264.  
  265. for video in items:
  266. videoid = video["videoid"]
  267. for item in ytobjects:
  268. if item['videoid'] == videoid:
  269. for k, v in video.items():
  270. item[k] = v
  271.  
  272. while len(items) > len(ytobjects):
  273. ytobjects.append({'videoid': 'false'})
  274.  
  275. return (ytobjects, 200)
  276.  
  277. def getBatchDetailsThumbnails(self, items, params={}):
  278. ytobjects = []
  279. videoids = []
  280.  
  281. for (videoid, thumb) in items:
  282. videoids.append(videoid)
  283.  
  284. (tempobjects, status) = self.getBatchDetails(videoids, params)
  285.  
  286. for i in range(0, len(items)):
  287. (videoid, thumbnail) = items[i]
  288. for item in tempobjects:
  289. if item['videoid'] == videoid:
  290. item['thumbnail'] = thumbnail
  291. ytobjects.append(item)
  292. break
  293.  
  294. while len(items) > len(ytobjects):
  295. ytobjects.append({'videoid': 'false'})
  296.  
  297. return (ytobjects, 200)
  298.  
  299. def getBatchDetails(self, items, params={}):
  300. self.common.log("params: " + repr(params))
  301. self.common.log("items: " + str(len(items)))
  302. request_start = "<feed xmlns='http://www.w3.org/2005/Atom'\n xmlns:media='http://search.yahoo.com/mrss/'\n xmlns:batch='http://schemas.google.com/gdata/batch'\n xmlns:yt='http://gdata.youtube.com/schemas/2007'>\n <batch:operation type='query'/> \n"
  303. request_end = "</feed>"
  304.  
  305. video_request = ""
  306.  
  307. ytobjects = []
  308. status = 500
  309. i = 1
  310.  
  311. temp_objs = self.cache.getMulti("videoidcache", items)
  312.  
  313. for index, videoid in enumerate(items):
  314. if index < len(temp_objs):
  315. if temp_objs[index]:
  316. ytobjects.append(eval(temp_objs[index]))
  317. continue
  318. if videoid:
  319. video_request += "<entry> \n <id>http://gdata.youtube.com/feeds/api/videos/" + videoid + "</id>\n</entry> \n"
  320. if i == 50:
  321. final_request = request_start + video_request + request_end
  322. rstat = 403
  323. while rstat == 403:
  324. result = self._fetchPage({"link": "http://gdata.youtube.com/feeds/api/videos/batch", "api": "true", "request": final_request})
  325. rstat = self.common.parseDOM(result["content"], "batch:status", ret="code")
  326. if len(rstat) > 0:
  327. if int(rstat[len(rstat) - 1]) == 403:
  328. self.common.log("quota exceeded. Waiting 5 seconds. " + repr(rstat))
  329. rstat = 403
  330. time.sleep(5)
  331.  
  332. temp = self.getVideoInfo(result["content"], params)
  333. ytobjects += temp
  334. video_request = ""
  335. i = 1
  336. i += 1
  337.  
  338. if i > 1:
  339. final_request = request_start + video_request + request_end
  340. result = self._fetchPage({"link": "http://gdata.youtube.com/feeds/api/videos/batch", "api": "true", "request": final_request})
  341.  
  342. temp = self.getVideoInfo(result["content"], params)
  343. ytobjects += temp
  344.  
  345. save_data = {}
  346. for item in ytobjects:
  347. save_data[item["videoid"]] = repr(item)
  348. self.cache.setMulti("videoidcache", save_data)
  349.  
  350. if len(ytobjects) > 0:
  351. status = 200
  352.  
  353. self.common.log("ytobjects: " + str(len(ytobjects)))
  354.  
  355. return (ytobjects, status)
  356.  
  357. #===============================================================================
  358. #
  359. # Internal functions to YouTubeCore.py
  360. #
  361. # Return should be value(True for bool functions), or False if failed.
  362. #
  363. # False MUST be handled properly in External functions
  364. #
  365. #===============================================================================
  366.  
  367. def _fetchPage(self, params={}): # This does not handle cookie timeout for _httpLogin
  368. if self.settings.getSetting("force_proxy") == "true" and self.settings.getSetting("proxy"):
  369. params["proxy"] = self.settings.getSetting("proxy")
  370.  
  371. get = params.get
  372. link = get("link")
  373. ret_obj = {"status": 500, "content": "", "error": 0}
  374. cookie = ""
  375.  
  376. if (get("url_data") or get("request") or get("hidden")) and False:
  377. self.common.log("called for : " + repr(params['link']))
  378. else:
  379. self.common.log("called for : " + repr(params))
  380.  
  381. if get("auth", "false") == "true":
  382. self.common.log("got auth")
  383. if self._getAuth():
  384. if link.find("?") > -1:
  385. link += "&oauth_token=" + self.settings.getSetting("oauth2_access_token")
  386. else:
  387. link += "?oauth_token=" + self.settings.getSetting("oauth2_access_token")
  388.  
  389. self.common.log("updated link: " + link)
  390. else:
  391. self.common.log("couldn't get login token")
  392.  
  393. if not link or get("error", 0) > 2:
  394. self.common.log("giving up")
  395. return ret_obj
  396.  
  397. if get("url_data"):
  398. request = urllib2.Request(link, urllib.urlencode(get("url_data")))
  399. request.add_header('Content-Type', 'application/x-www-form-urlencoded')
  400. elif get("request", "false") == "false":
  401. if get("proxy"):
  402. proxy = get("proxy")
  403. link = proxy + urllib.quote(link)
  404. self.common.log("got proxy: %s" % link)
  405. else:
  406. self.common.log("got default: %s" % link)
  407.  
  408. request = url2request(link, get("method", "GET"))
  409. else:
  410. self.common.log("got request")
  411. request = urllib2.Request(link, get("request"))
  412. request.add_header('X-GData-Client', "")
  413. request.add_header('Content-Type', 'application/atom+xml')
  414. request.add_header('Content-Length', str(len(get("request"))))
  415.  
  416. if get("proxy") or link.find(self.settings.getSetting("proxy")) > -1:
  417. proxy = self.settings.getSetting("proxy")
  418. referer = proxy[:proxy.rfind("/")]
  419. self.common.log("Added proxy refer: %s" % referer)
  420.  
  421. request.add_header('Referer', referer)
  422.  
  423. if get("api", "false") == "true":
  424. self.common.log("got api")
  425. request.add_header('GData-Version', '2.1')
  426. request.add_header('X-GData-Key', 'key=' + self.APIKEY)
  427. else:
  428. request.add_header('User-Agent', self.common.USERAGENT)
  429.  
  430. if get("no-language-cookie", "false") == "false":
  431. cookie += "PREF=f1=50000000&hl=en;"
  432.  
  433. if get("login", "false") == "true":
  434. self.common.log("got login")
  435. if (self.settings.getSetting("username") == "" or self.settings.getSetting("user_password") == ""):
  436. self.common.log("_fetchPage, login required but no credentials provided")
  437. ret_obj["status"] = 303
  438. ret_obj["content"] = self.language(30622)
  439. return ret_obj
  440.  
  441. # This should be a call to self.login._httpLogin()
  442. if self.settings.getSetting("login_info") == "":
  443. if isinstance(self.login, str):
  444. self.login = sys.modules["__main__"].login
  445. self.login._httpLogin()
  446.  
  447. if self.settings.getSetting("login_info") != "":
  448. info = self.settings.getSetting("login_info")
  449. SID = self.settings.getSetting("SID")
  450. cookie += 'LOGIN_INFO=' + info + ';SID=' + SID + ';'
  451.  
  452. if get("referer", "false") != "false":
  453. self.common.log("Added referer: %s" % get("referer"))
  454. request.add_header('Referer', get("referer"))
  455.  
  456. try:
  457. self.common.log("connecting to server... %s" % link )
  458.  
  459. if cookie:
  460. self.common.log("Setting cookie: " + cookie)
  461. request.add_header('Cookie', cookie)
  462.  
  463. con = urllib2.urlopen(request)
  464.  
  465. ret_obj["content"] = con.read()
  466. ret_obj["location"] = link
  467. ret_obj["new_url"] = con.geturl()
  468. ret_obj["header"] = str(con.info())
  469. con.close()
  470.  
  471. self.common.log("Result: %s " % repr(ret_obj), 9)
  472.  
  473. # Return result if it isn't age restricted
  474. if (ret_obj["content"].find("verify-actions") == -1 and ret_obj["content"].find("verify-age-actions") == -1):
  475. self.common.log("done")
  476. ret_obj["status"] = 200
  477. return ret_obj
  478. else:
  479. self.common.log("found verify age request: " + repr(params))
  480. # We need login to verify age
  481. if not get("login"):
  482. params["error"] = get("error", 0)
  483. params["login"] = "true"
  484. return self._fetchPage(params)
  485. elif get("no_verify_age", "false") == "false":
  486. ret_obj["status"] = 303
  487. ret_obj["content"] = self.language(30606)
  488. return self._verifyAge(link, ret_obj["new_url"], params)
  489. else:
  490. #ret_obj["status"] = 303
  491. #ret_obj["content"] = self.language(30606)
  492. return ret_obj
  493.  
  494. except urllib2.HTTPError, e:
  495. cont = False
  496. err = str(e)
  497. msg = e.read()
  498.  
  499. self.common.log("HTTPError : " + err)
  500. if e.code == 400 or True:
  501. self.common.log("Unhandled HTTPError : [%s] %s " % (e.code, msg), 1)
  502.  
  503. if msg.find("<?xml") > -1:
  504. acted = False
  505.  
  506. dom = minidom.parseString(msg)
  507. self.common.log(str(len(msg)))
  508. domains = dom.getElementsByTagName("domain")
  509. codes = dom.getElementsByTagName("code")
  510. for domain in domains:
  511. self.common.log(repr(domain.firstChild.nodeValue), 5)
  512. if domain.firstChild.nodeValue == "yt:quota":
  513. self.common.log("Hit quota... sleeping for 100 seconds")
  514. time.sleep(100)
  515. acted = True
  516.  
  517. if not acted:
  518. for code in codes:
  519. self.common.log(repr(code.firstChild.nodeValue), 5)
  520. if code.firstChild.nodeValue == "too_many_recent_calls":
  521. self.common.log("Hit quota... sleeping for 10 seconds")
  522. time.sleep(10)
  523. acted = True
  524.  
  525. else: # Legacy this.
  526. if msg.find("yt:quota") > 1:
  527. self.common.log("Hit quota... sleeping for 10 seconds")
  528. time.sleep(10)
  529. elif msg.find("too_many_recent_calls") > 1:
  530. self.common.log("Hit quota... sleeping for 10 seconds")
  531. time.sleep(10)
  532. elif err.find("Token invalid") > -1:
  533. self.common.log("refreshing token")
  534. self._oRefreshToken()
  535. elif err.find("User Rate Limit Exceeded") > -1:
  536. self.common.log("Hit limit... Sleeping for 10 seconds")
  537. time.sleep(10)
  538. else:
  539. if e.fp:
  540. cont = e.fp.read()
  541. self.common.log("HTTPError - Headers: " + str(e.headers) + " - Content: " + cont)
  542.  
  543. params["error"] = get("error", 0) + 1
  544. ret_obj = self._fetchPage(params)
  545.  
  546. if cont and ret_obj["content"] == "":
  547. ret_obj["content"] = cont
  548. ret_obj["status"] = 303
  549.  
  550. return ret_obj
  551.  
  552. except urllib2.URLError, e:
  553. err = str(e)
  554. self.common.log("URLError : " + err)
  555. if err.find("SSL") > -1:
  556. ret_obj["status"] = 303
  557. ret_obj["content"] = self.language(30629)
  558. ret_obj["error"] = 3 # Tell _findErrors that we have an error
  559. return ret_obj
  560.  
  561. time.sleep(3)
  562. params["error"] = get("error", 0) + 1
  563. ret_obj = self._fetchPage(params)
  564. return ret_obj
  565.  
  566. except socket.timeout:
  567. self.common.log("Socket timeout")
  568. return ret_obj
  569.  
  570. def _findErrors(self, ret, silent=False):
  571. self.common.log("")
  572.  
  573. ## Couldn't find 2 factor or normal login
  574. error = self.common.parseDOM(ret['content'], "div", attrs={"class": "errormsg"})
  575. if len(error) == 0:
  576. # An error in 2-factor
  577. self.common.log("1")
  578. error = self.common.parseDOM(ret['content'], "div", attrs={"class": "error smaller"})
  579. if len(error) == 0:
  580. self.common.log("2")
  581. error = self.common.parseDOM(ret['content'], "div", attrs={"id": "unavailable-message"})
  582. if len(error) == 0 and ret['content'].find("yt:quota") > -1:
  583. self.common.log("3")
  584. # Api quota
  585. html = self.common.parseDOM(ret['content'], "error")
  586. error = self.common.parseDOM(html, "code")
  587.  
  588. if len(error) == 0: # Bad password for _httpLogin.
  589. error = self.common.parseDOM(ret['content'], "span", attrs={"class": "errormsg"})
  590.  
  591. # Has a link. Lets remove that.
  592. if len(error) == 1:
  593. if error[0].find("<") > -1:
  594. error[0] = error[0][0:error[0].find("<")]
  595.  
  596. if len(error) > 0:
  597. self.common.log("4")
  598. error = error[0]
  599. error = urllib.unquote(error[0:error.find("[")]).replace("&#39;", "'")
  600. self.common.log("returning error : " + error.strip())
  601. return error.strip()
  602.  
  603. # If no error was found. But fetchPage has an error level of 3+, return the fetchPage content.
  604. if len(error) == 0 and ret["error"] >= 3:
  605. self.common.log("Returning error from _fetchPage : " + repr(ret))
  606. return ret["content"]
  607.  
  608. if not silent:
  609. self.common.log("couldn't find any errors: " + repr(ret))
  610.  
  611. return False
  612.  
  613. def _verifyAge(self, org_link, next_url, params={}):
  614. self.common.log("org_link : " + org_link + " - next_url: " + next_url)
  615. fetch_options = {"link": next_url, "no_verify_age": "true", "login": "true"}
  616. verified = False
  617. step = 0
  618. ret = {}
  619. while not verified and fetch_options and step < 6:
  620. self.common.log("Step : " + str(step))
  621. step += 1
  622.  
  623. if step == 5:
  624. return {"content": self._findErrors(ret), "status": 303}
  625.  
  626. ret = self._fetchPage(fetch_options)
  627. fetch_options = False
  628.  
  629. # Check if we should login.
  630. new_url = self.common.parseDOM(ret["content"].replace("\n", " "), "form", attrs={"id": "gaia_loginform"}, ret="action")
  631.  
  632. if len(new_url) == 1:
  633. if isinstance(self.login, str):
  634. self.login = sys.modules["__main__"].login
  635. self.login._httpLogin({"page": ret})
  636.  
  637. new_url = self.common.parseDOM(ret["content"], "form", attrs={"id": "confirm-age-form"}, ret="action")
  638.  
  639. if len(new_url) > 0:
  640. self.common.log("Part A - Type 1")
  641. self.common.log("BLA:" + repr(ret))
  642. new_url = "http://www.youtube.com/" + new_url[0]
  643. next_url = self.common.parseDOM(ret["content"], "input", attrs={"name": "next_url"}, ret="value")
  644. set_racy = self.common.parseDOM(ret["content"], "input", attrs={"name": "set_racy"}, ret="value")
  645. session_token_start = ret["content"].find("'XSRF_TOKEN': '") + len("'XSRF_TOKEN': '")
  646. session_token_stop = ret["content"].find("',", session_token_start)
  647. session_token = ret["content"][session_token_start:session_token_stop]
  648.  
  649. fetch_options = {"link": new_url, "no_verify_age": "true", "login": "true", "url_data": {"next_url": next_url[0], "set_racy": set_racy[0], "session_token": session_token}}
  650. continue
  651. else:
  652. self.common.log("Part A - Type 2")
  653. actions = self.common.parseDOM(ret["content"], "div", attrs={"id": "verify-actions"})
  654. if len(actions) > 0:
  655. new_url = self.common.parseDOM(actions, "button", attrs={"type": "button"}, ret="href")
  656. fetch_options = {"link": new_url[0].replace("&amp;", "&"), "no_verify_age": "true", "login": "true"}
  657. continue
  658.  
  659. new_url = self.common.parseDOM(ret["content"], "button", attrs={"href": "/verify.*?"}, ret="href")
  660. if len(new_url) > 0:
  661. target_url = ret["new_url"]
  662. if target_url.rfind("/") > 10:
  663. target_url = target_url[:target_url.find("/", 10)]
  664. else:
  665. target_url += "/"
  666.  
  667. fetch_options = {"link": target_url + new_url[0], "no_verify_age": "true", "login": "true"}
  668. continue
  669.  
  670. if ret["content"].find("PLAYER_CONFIG") > -1:
  671. self.common.log("Found PLAYER_CONFIG. Verify successful")
  672. return ret
  673.  
  674. if not fetch_options:
  675. self.common.log("Nothing hit, assume we are verified: " + repr(ret))
  676. fetch_options = {"link": org_link, "no_verify_age": "true", "login": "true"}
  677. return self._fetchPage(fetch_options)
  678.  
  679. self.common.log("Done")
  680.  
  681. def _oRefreshToken(self):
  682. self.common.log("")
  683. # Refresh token
  684. if self.settings.getSetting("oauth2_refresh_token"):
  685. url = "https://accounts.google.com/o/oauth2/token"
  686. data = {"client_id": "208795275779.apps.googleusercontent.com",
  687. "client_secret": "sZn1pllhAfyonULAWfoGKCfp",
  688. "refresh_token": self.settings.getSetting("oauth2_refresh_token"),
  689. "grant_type": "refresh_token"}
  690. self.settings.setSetting("oauth2_access_token", "")
  691. ret = self._fetchPage({"link": url, "no-language-cookie": "true", "url_data": data})
  692. if ret["status"] == 200:
  693. oauth = ""
  694. try:
  695. oauth = json.loads(ret["content"])
  696. except:
  697. self.common.log("Except: " + repr(ret))
  698. return False
  699.  
  700. self.common.log("- returning, got result a: " + repr(oauth))
  701.  
  702. self.settings.setSetting("oauth2_access_token", oauth["access_token"])
  703. self.settings.setSetting("oauth2_expires_at", str(int(oauth["expires_in"]) + time.time()) )
  704. self.common.log("Success")
  705. return True
  706. else:
  707. self.common.log("Failure, Trying a clean login")
  708. if isinstance(self.login, str):
  709. self.login = sys.modules["__main__"].login
  710. self.login.login({"new": "true"})
  711. return False
  712.  
  713. self.common.log("didn't even try")
  714.  
  715. return False
  716.  
  717. def _getAuth(self):
  718. now = time.time()
  719. if self.settings.getSetting("oauth2_expires_at"):
  720. expire_at = float(self.settings.getSetting("oauth2_expires_at"))
  721. else:
  722. expire_at = now
  723.  
  724. self.common.log("Oauth expires in %s seconds" % int(expire_at - now))
  725.  
  726. if expire_at <= now:
  727. self._oRefreshToken()
  728.  
  729. auth = self.settings.getSetting("oauth2_access_token")
  730. self.common.log("oauth2_access_token: " + repr(auth), 5)
  731.  
  732. if (auth):
  733. self.common.log("returning stored auth")
  734. return auth
  735. else:
  736. if isinstance(self.login, str):
  737. self.login = sys.modules["__main__"].login
  738.  
  739. (result, status) = self.login.login()
  740.  
  741. if status == 200:
  742. self.common.log("returning new auth")
  743. return self.settings.getSetting("oauth2_access_token")
  744.  
  745. self.common.log("failed because login failed")
  746.  
  747. return False
  748.  
  749. def _getNodeAttribute(self, node, tag, attribute, default=""):
  750. if node.getElementsByTagName(tag).item(0):
  751. if node.getElementsByTagName(tag).item(0).hasAttribute(attribute):
  752. return node.getElementsByTagName(tag).item(0).getAttribute(attribute)
  753. return default
  754.  
  755. def _getNodeValue(self, node, tag, default=""):
  756. if node.getElementsByTagName(tag).item(0):
  757. if node.getElementsByTagName(tag).item(0).firstChild:
  758. return node.getElementsByTagName(tag).item(0).firstChild.nodeValue
  759. return default
  760.  
  761. def getVideoInfo(self, xml, params={}):
  762. dom = minidom.parseString(xml)
  763. self.common.log(str(len(xml)))
  764. links = dom.getElementsByTagName("link")
  765. entries = dom.getElementsByTagName("entry")
  766. if (not entries):
  767. entries = dom.getElementsByTagName("atom:entry")
  768. show_next = False
  769.  
  770. # find out if there are more pages
  771. if (len(links)):
  772. for link in links:
  773. lget = link.attributes.get
  774. if (lget("rel").value == "next"):
  775. show_next = True
  776. break
  777.  
  778. ytobjects = []
  779. for node in entries:
  780. video = {}
  781.  
  782. video['videoid'] = self._getNodeValue(node, "yt:videoid", "false")
  783. if video['videoid'] == "false":
  784. video['videoid'] = self._getNodeAttribute(node, "content", "src", "false")
  785. video['videoid'] = video['videoid'][video['videoid'].rfind("/") + 1:]
  786.  
  787. if video['videoid'] == "false" and node.getElementsByTagName("link").item(0):
  788. video['videolink'] = node.getElementsByTagName("link").item(0).getAttribute('href')
  789. match = re.match('.*?v=(.*)\&.*', video['videolink'])
  790. if match:
  791. video['videoid'] = match.group(1)
  792.  
  793. if node.getElementsByTagName("id"):
  794. entryid = self._getNodeValue(node, "id", "")
  795. entryid = entryid[entryid.rfind(":") + 1:]
  796. video["playlist_entry_id"] = entryid
  797.  
  798. if node.getElementsByTagName("yt:state").item(0):
  799. state = self._getNodeAttribute(node, "yt:state", 'name', 'Unknown Name')
  800.  
  801. # Ignore unplayable items.
  802. if (state == 'deleted' or state == 'rejected'):
  803. video['videoid'] = "false"
  804.  
  805. # Get reason for why we can't playback the file.
  806. if node.getElementsByTagName("yt:state").item(0).hasAttribute('reasonCode'):
  807. reason = self._getNodeAttribute(node, "yt:state", 'reasonCode', 'Unknown reasonCode')
  808. value = self._getNodeValue(node, "yt:state", "Unknown reasonValue").encode('utf-8')
  809. if reason == "private":
  810. video['videoid'] = "false"
  811. elif reason == 'requesterRegion':
  812. video['videoid'] = "false"
  813. elif reason != 'limitedSyndication':
  814. self.common.log("removing video, reason: %s value: %s" % (reason, value))
  815. video['videoid'] = "false"
  816.  
  817. video['Title'] = self._getNodeValue(node, "media:title", "Unknown Title").encode('utf-8') # Convert from utf-16 to combat breakage
  818. video['Plot'] = self._getNodeValue(node, "media:description", "Unknown Plot").encode("utf-8")
  819. video['Date'] = self._getNodeValue(node, "published", "Unknown Date").encode("utf-8")
  820. video['user'] = self._getNodeValue(node, "name", "Unknown Name").encode("utf-8")
  821.  
  822. # media:credit is not set for favorites, playlists
  823. video['Studio'] = self._getNodeValue(node, "media:credit", "").encode("utf-8")
  824. if video['Studio'] == "":
  825. video['Studio'] = self._getNodeValue(node, "name", "Unknown Uploader").encode("utf-8")
  826.  
  827. duration = int(self._getNodeAttribute(node, "yt:duration", 'seconds', '0'))
  828. video['Duration'] = "%02d:%02d" % (int(duration / 60), int(duration % 60))
  829. video['Rating'] = float(self._getNodeAttribute(node, "gd:rating", 'average', "0.0"))
  830. video['count'] = int(self._getNodeAttribute(node, "yt:statistics", 'viewCount', "0"))
  831. infoString = ""
  832. if video['Date'] != "Unknown Date":
  833. c = time.strptime(video['Date'][:video['Date'].find(".")], "%Y-%m-%dT%H:%M:%S")
  834. video['Date'] = time.strftime("%d-%m-%Y", c)
  835. infoString += "Date Uploaded: " + time.strftime("%Y-%m-%d %H:%M:%S", c) + ", "
  836. infoString += "View count: " + str(video['count'])
  837. video['Plot'] = infoString + "\n" + video['Plot']
  838. video['Genre'] = self._getNodeAttribute(node, "media:category", "label", "Unknown Genre").encode("utf-8")
  839.  
  840. edit_links = node.getElementsByTagName("link")
  841. if edit_links:
  842. for edit_link in edit_links:
  843. if edit_link.getAttribute('rel') == 'edit':
  844. obj = edit_link.getAttribute('href')
  845. video['editid'] = obj[obj.rfind('/') + 1:]
  846.  
  847. video['thumbnail'] = self.urls["thumbnail"] % video['videoid']
  848.  
  849. overlay = self.storage.retrieveValue("vidstatus-" + video['videoid'])
  850. if overlay:
  851. video['Overlay'] = int(overlay)
  852.  
  853. if video['videoid'] == "false":
  854. self.common.log("videoid set to false : " + repr(video))
  855.  
  856. ytobjects.append(video)
  857.  
  858. if show_next:
  859. self.utils.addNextFolder(ytobjects, params)
  860.  
  861. self.common.log("Done: " + str(len(ytobjects)))
  862. save_data = {}
  863. for item in ytobjects:
  864. if "videoid" in item:
  865. save_data[item["videoid"]] = repr(item)
  866. self.cache.setMulti("videoidcache", save_data)
  867. return ytobjects
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement