55from __future__ import absolute_import , division , unicode_literals
66
77try : # Python 3
8- from urllib .error import HTTPError
98 from urllib .parse import quote_plus , unquote
10- from urllib .request import build_opener , install_opener , ProxyHandler , Request , urlopen
9+ from urllib .request import build_opener , install_opener , ProxyHandler , urlopen
1110except ImportError : # Python 2
1211 from urllib import quote_plus
13- from urllib2 import build_opener , install_opener , ProxyHandler , Request , HTTPError , unquote , urlopen
12+ from urllib2 import build_opener , install_opener , ProxyHandler , unquote , urlopen
1413
1514from data import CHANNELS
1615from helperobjects import TitleItem
17- from kodiutils import (delete_cached_thumbnail , get_cache , get_global_setting , get_proxies , get_setting ,
18- has_addon , localize , localize_from_data , log , log_error , ok_dialog , ttl , update_cache ,
19- url_for )
20- from statichelper import (add_https_method , convert_html_to_kodilabel , find_entry , from_unicode , play_url_to_id ,
21- program_to_url , realpage , to_unicode , strip_newlines , url_to_program )
16+ from kodiutils import (delete_cached_thumbnail , get_global_setting , get_proxies , get_setting ,
17+ has_addon , localize , localize_from_data , log , url_for )
2218from metadata import Metadata
19+ from statichelper import (add_https_method , convert_html_to_kodilabel , find_entry , from_unicode , play_url_to_id ,
20+ program_to_url , realpage , strip_newlines , url_to_program )
21+ from utils import get_cache , get_cached_url_json , get_url_json , ttl , update_cache
2322
2423
2524class ApiHelper :
@@ -57,16 +56,10 @@ def get_tvshows(self, category=None, channel=None, feature=None):
5756 if not category and not channel and not feature :
5857 params ['facets[transcodingStatus]' ] = 'AVAILABLE' # Required for getting results in Suggests API
5958 cache_file = 'programs.json'
60- tvshows = get_cache (cache_file , ttl = ttl ('indirect' )) # Try the cache if it is fresh
61- if not tvshows :
62- from json import loads
63- querystring = '&' .join ('{}={}' .format (key , value ) for key , value in list (params .items ()))
64- suggest_url = self ._VRTNU_SUGGEST_URL + '?' + querystring
65- log (2 , 'URL get: {url}' , url = unquote (suggest_url ))
66- tvshows = loads (to_unicode (urlopen (suggest_url ).read ()))
67- update_cache (cache_file , tvshows )
6859
69- return tvshows
60+ querystring = '&' .join ('{}={}' .format (key , value ) for key , value in list (params .items ()))
61+ suggest_url = self ._VRTNU_SUGGEST_URL + '?' + querystring
62+ return get_cached_url_json (url = suggest_url , cache = cache_file , ttl = ttl ('indirect' ))
7063
7164 def list_tvshows (self , category = None , channel = None , feature = None , use_favorites = False ):
7265 ''' List all TV shows for a given category, channel or feature, optionally filtered by favorites '''
@@ -413,8 +406,7 @@ def get_episode_by_air_date(self, channel_name, start_date, end_date=None):
413406 schedule_date = onairdate
414407 schedule_datestr = schedule_date .isoformat ().split ('T' )[0 ]
415408 url = 'https://www.vrt.be/bin/epg/schedule.%s.json' % schedule_datestr
416- from json import loads
417- schedule_json = loads (to_unicode (urlopen (url ).read ()))
409+ schedule_json = get_url_json (url )
418410 episodes = schedule_json .get (channel .get ('id' ), [])
419411 if not episodes :
420412 return None
@@ -569,35 +561,10 @@ def get_episodes(self, program=None, season=None, episodes=None, category=None,
569561 # Construct VRT NU Search API Url and get api data
570562 querystring = '&' .join ('{}={}' .format (key , value ) for key , value in list (params .items ()))
571563 search_url = self ._VRTNU_SEARCH_URL + '?' + querystring .replace (' ' , '%20' ) # Only encode spaces to minimize url length
572-
573- from json import loads
574564 if cache_file :
575- # Get api data from cache if it is fresh
576- search_json = get_cache (cache_file , ttl = ttl ('indirect' ))
577- if not search_json :
578- log (2 , 'URL get: {url}' , url = unquote (search_url ))
579- req = Request (search_url )
580- try :
581- search_json = loads (to_unicode (urlopen (req ).read ()))
582- except (TypeError , ValueError ): # No JSON object could be decoded
583- return []
584- except HTTPError as exc :
585- url_length = len (req .get_selector ())
586- if exc .code == 413 and url_length > 8192 :
587- ok_dialog (heading = 'HTTP Error 413' , message = localize (30967 ))
588- log_error ('HTTP Error 413: Exceeded maximum url length: '
589- 'VRT Search API url has a length of {length} characters.' , length = url_length )
590- return []
591- if exc .code == 400 and 7600 <= url_length <= 8192 :
592- ok_dialog (heading = 'HTTP Error 400' , message = localize (30967 ))
593- log_error ('HTTP Error 400: Probably exceeded maximum url length: '
594- 'VRT Search API url has a length of {length} characters.' , length = url_length )
595- return []
596- raise
597- update_cache (cache_file , search_json )
565+ search_json = get_cached_url_json (url = search_url , cache = cache_file , ttl = ttl ('indirect' ))
598566 else :
599- log (2 , 'URL get: {url}' , url = unquote (search_url ))
600- search_json = loads (to_unicode (urlopen (search_url ).read ()))
567+ search_json = get_url_json (url = search_url )
601568
602569 # Check for multiple seasons
603570 seasons = None
@@ -619,8 +586,9 @@ def get_episodes(self, program=None, season=None, episodes=None, category=None,
619586 if all_items and total_results > api_page_size :
620587 for api_page in range (1 , api_pages ):
621588 api_page_url = search_url + '&from=' + str (api_page * api_page_size + 1 )
622- api_page_json = loads (to_unicode (urlopen (api_page_url ).read ()))
623- episodes += api_page_json .get ('results' , [{}])
589+ api_page_json = get_url_json (api_page_url )
590+ if api_page_json :
591+ episodes += api_page_json .get ('results' , [{}])
624592
625593 # Return episodes
626594 return episodes
0 commit comments