Skip to content

Commit a94bff0

Browse files
committed
Attempt to use json.load(fdesc) again
This PR includes: - Use json.load(fdesc) - Early exit in tokenresolver - Move caching functionality to utils.py - Fix 2 issues - Return default value on failure - Fix more json.load() calls for Python 3.5
1 parent 85d065a commit a94bff0

18 files changed

+533
-558
lines changed

resources/lib/addon.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,8 @@
1010
except ImportError: # Python 2
1111
from urllib import unquote_plus
1212

13-
from kodiutils import localize, log_access, notification
14-
from statichelper import from_unicode, to_unicode
15-
from utils import refresh_caches
13+
from kodiutils import localize, log_access, notification, refresh_caches
14+
from utils import from_unicode, to_unicode
1615

1716
plugin = Plugin() # pylint: disable=invalid-name
1817

resources/lib/addon_entry.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33
''' This is the actual VRT NU video plugin entry point '''
44

55
from __future__ import absolute_import, division, unicode_literals
6-
import kodiutils
76
import xbmcaddon
7+
import kodiutils
88

99
kodiutils.ADDON = xbmcaddon.Addon()
1010

resources/lib/apihelper.py

Lines changed: 20 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -13,12 +13,12 @@
1313

1414
from data import CHANNELS
1515
from helperobjects import TitleItem
16-
from kodiutils import (delete_cached_thumbnail, get_global_setting, get_proxies, get_setting,
17-
has_addon, localize, localize_from_data, log, url_for)
16+
from kodiutils import (delete_cached_thumbnail, get_cache, get_cached_url_json, get_global_setting,
17+
get_proxies, get_setting, get_url_json, has_addon, localize, localize_from_data,
18+
log, ttl, update_cache, url_for)
1819
from metadata import Metadata
19-
from statichelper import (add_https_method, convert_html_to_kodilabel, find_entry, from_unicode, play_url_to_id,
20-
program_to_url, realpage, strip_newlines, url_to_program)
21-
from utils import get_cache, get_cached_url_json, get_url_json, ttl, update_cache
20+
from utils import (add_https_proto, html_to_kodilabel, find_entry, from_unicode, play_url_to_id,
21+
program_to_url, realpage, strip_newlines, url_to_program)
2222

2323

2424
class ApiHelper:
@@ -38,7 +38,7 @@ def __init__(self, _favorites, _resumepoints):
3838

3939
def get_tvshows(self, category=None, channel=None, feature=None):
4040
''' Get all TV shows for a given category, channel or feature, optionally filtered by favorites '''
41-
params = dict()
41+
params = {}
4242

4343
if category:
4444
params['facets[categories]'] = category
@@ -59,7 +59,7 @@ def get_tvshows(self, category=None, channel=None, feature=None):
5959

6060
querystring = '&'.join('{}={}'.format(key, value) for key, value in list(params.items()))
6161
suggest_url = self._VRTNU_SUGGEST_URL + '?' + querystring
62-
return get_cached_url_json(url=suggest_url, cache=cache_file, ttl=ttl('indirect'))
62+
return get_cached_url_json(url=suggest_url, cache=cache_file, ttl=ttl('indirect'), fail=[])
6363

6464
def list_tvshows(self, category=None, channel=None, feature=None, use_favorites=False):
6565
''' List all TV shows for a given category, channel or feature, optionally filtered by favorites '''
@@ -144,7 +144,7 @@ def __map_episodes(self, episodes, titletype=None, season=None, use_favorites=Fa
144144
highlight = episode.get('highlight')
145145
if highlight:
146146
for key in highlight:
147-
episode[key] = convert_html_to_kodilabel(highlight.get(key)[0])
147+
episode[key] = html_to_kodilabel(highlight.get(key)[0])
148148

149149
list_item, sort, ascending = self.episode_to_listitem(episode, program, cache_file, titletype)
150150
episode_items.append(list_item)
@@ -261,7 +261,7 @@ def get_upnext(self, info):
261261

262262
# Get all episodes from current program and sort by program, seasonTitle and episodeNumber
263263
episodes = sorted(self.get_episodes(keywords=program), key=lambda k: (k.get('program'), k.get('seasonTitle'), k.get('episodeNumber')))
264-
upnext = dict()
264+
upnext = {}
265265
for episode in episodes:
266266
if ep_id.get('whatson_id') == episode.get('whatsonId') or \
267267
ep_id.get('video_id') == episode.get('videoId') or \
@@ -406,7 +406,7 @@ def get_episode_by_air_date(self, channel_name, start_date, end_date=None):
406406
schedule_date = onairdate
407407
schedule_datestr = schedule_date.isoformat().split('T')[0]
408408
url = 'https://www.vrt.be/bin/epg/schedule.%s.json' % schedule_datestr
409-
schedule_json = get_url_json(url)
409+
schedule_json = get_url_json(url, fail={})
410410
episodes = schedule_json.get(channel.get('id'), [])
411411
if not episodes:
412412
return None
@@ -562,15 +562,16 @@ def get_episodes(self, program=None, season=None, episodes=None, category=None,
562562
querystring = '&'.join('{}={}'.format(key, value) for key, value in list(params.items()))
563563
search_url = self._VRTNU_SEARCH_URL + '?' + querystring.replace(' ', '%20') # Only encode spaces to minimize url length
564564
if cache_file:
565-
search_json = get_cached_url_json(url=search_url, cache=cache_file, ttl=ttl('indirect'))
565+
search_json = get_cached_url_json(url=search_url, cache=cache_file, ttl=ttl('indirect'), fail={})
566566
else:
567-
search_json = get_url_json(url=search_url)
567+
search_json = get_url_json(url=search_url, fail={})
568568

569569
# Check for multiple seasons
570-
seasons = None
570+
seasons = []
571571
if 'facets[seasonTitle]' not in unquote(search_url):
572-
facets = search_json.get('facets', dict()).get('facets')
573-
seasons = next((f.get('buckets', []) for f in facets if f.get('name') == 'seasons' and len(f.get('buckets', [])) > 1), None)
572+
facets = search_json.get('facets', {}).get('facets')
573+
if facets:
574+
seasons = next((f.get('buckets', []) for f in facets if f.get('name') == 'seasons' and len(f.get('buckets', [])) > 1), None)
574575

575576
episodes = search_json.get('results', [{}])
576577
show_seasons = bool(season != 'allseasons')
@@ -587,7 +588,7 @@ def get_episodes(self, program=None, season=None, episodes=None, category=None,
587588
for api_page in range(1, api_pages):
588589
api_page_url = search_url + '&from=' + str(api_page * api_page_size + 1)
589590
api_page_json = get_url_json(api_page_url)
590-
if api_page_json:
591+
if api_page_json is not None:
591592
episodes += api_page_json.get('results', [{}])
592593

593594
# Return episodes
@@ -610,7 +611,7 @@ def list_channels(self, channels=None, live=True):
610611
continue
611612

612613
context_menu = []
613-
art_dict = dict()
614+
art_dict = {}
614615

615616
# Try to use the white icons for thumbnails (used for icons as well)
616617
if has_addon('resource.images.studios.white'):
@@ -681,7 +682,7 @@ def list_youtube(channels=None):
681682
continue
682683

683684
context_menu = []
684-
art_dict = dict()
685+
art_dict = {}
685686

686687
# Try to use the white icons for thumbnails (used for icons as well)
687688
if has_addon('resource.images.studios.white'):
@@ -818,7 +819,7 @@ def get_category_thumbnail(element):
818819
''' Return a category thumbnail, if available '''
819820
if get_setting('showfanart', 'true') == 'true':
820821
raw_thumbnail = element.find(class_='media').get('data-responsive-image', 'DefaultGenre.png')
821-
return add_https_method(raw_thumbnail)
822+
return add_https_proto(raw_thumbnail)
822823
return 'DefaultGenre.png'
823824

824825
@staticmethod

resources/lib/favorites.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,9 @@
1111
except ImportError: # Python 2
1212
from urllib2 import build_opener, install_opener, ProxyHandler, Request, unquote, urlopen
1313

14-
from kodiutils import (container_refresh, get_proxies, get_setting, has_credentials, input_down,
15-
localize, log, log_error, multiselect, notification, ok_dialog)
16-
from utils import get_cache, get_url_json, invalidate_caches, update_cache
14+
from kodiutils import (container_refresh, get_cache, get_proxies, get_setting, get_url_json,
15+
has_credentials, input_down, invalidate_caches, localize, log, log_error,
16+
multiselect, notification, ok_dialog, update_cache)
1717

1818

1919
class Favorites:
@@ -45,7 +45,7 @@ def refresh(self, ttl=None):
4545
}
4646
favorites_url = 'https://video-user-data.vrt.be/favorites'
4747
favorites_json = get_url_json(url=favorites_url, cache='favorites.json', headers=headers)
48-
if favorites_json:
48+
if favorites_json is not None:
4949
self._favorites = favorites_json
5050

5151
def update(self, program, title, value=True):
@@ -70,9 +70,9 @@ def update(self, program, title, value=True):
7070
'Referer': 'https://www.vrt.be/vrtnu',
7171
}
7272

73-
from statichelper import program_to_url
74-
payload = dict(isFavorite=value, programUrl=program_to_url(program, 'short'), title=title)
7573
from json import dumps
74+
from utils import program_to_url
75+
payload = dict(isFavorite=value, programUrl=program_to_url(program, 'short'), title=title)
7676
data = dumps(payload).encode('utf-8')
7777
program_id = self.program_to_id(program)
7878
log(2, 'URL post: https://video-user-data.vrt.be/favorites/{program_id}', program_id=program_id)
@@ -124,12 +124,12 @@ def titles(self):
124124

125125
def programs(self):
126126
''' Return all favorite programs '''
127-
from statichelper import url_to_program
127+
from utils import url_to_program
128128
return [url_to_program(value.get('value').get('programUrl')) for value in list(self._favorites.values()) if value.get('value').get('isFavorite')]
129129

130130
def manage(self):
131131
''' Allow the user to unselect favorites to be removed from the listing '''
132-
from statichelper import url_to_program
132+
from utils import url_to_program
133133
self.refresh(ttl=0)
134134
if not self._favorites:
135135
ok_dialog(heading=localize(30418), message=localize(30419)) # No favorites found

resources/lib/kodiutils.py

Lines changed: 176 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,12 @@
44

55
from __future__ import absolute_import, division, unicode_literals
66
from contextlib import contextmanager
7+
from sys import version_info
8+
79
import xbmc
810
import xbmcaddon
911
import xbmcplugin
10-
from statichelper import from_unicode, to_unicode
12+
from utils import from_unicode, to_unicode
1113

1214
ADDON = xbmcaddon.Addon()
1315

@@ -750,3 +752,176 @@ def jsonrpc(**kwargs):
750752
if 'jsonrpc' not in kwargs:
751753
kwargs.update(jsonrpc='2.0')
752754
return loads(xbmc.executeJSONRPC(dumps(kwargs)))
755+
756+
757+
def human_delta(seconds):
758+
''' Return a human-readable representation of the TTL '''
759+
from math import floor
760+
days = int(floor(seconds / (24 * 60 * 60)))
761+
seconds = seconds % (24 * 60 * 60)
762+
hours = int(floor(seconds / (60 * 60)))
763+
seconds = seconds % (60 * 60)
764+
if days:
765+
return '%d day%s and %d hour%s' % (days, 's' if days != 1 else '', hours, 's' if hours != 1 else '')
766+
minutes = int(floor(seconds / 60))
767+
seconds = seconds % 60
768+
if hours:
769+
return '%d hour%s and %d minute%s' % (hours, 's' if hours != 1 else '', minutes, 's' if minutes != 1 else '')
770+
if minutes:
771+
return '%d minute%s and %d second%s' % (minutes, 's' if minutes != 1 else '', seconds, 's' if seconds != 1 else '')
772+
return '%d second%s' % (seconds, 's' if seconds != 1 else '')
773+
774+
775+
def get_cache(path, ttl=None): # pylint: disable=redefined-outer-name
776+
''' Get the content from cache, if it's still fresh '''
777+
if get_setting('usehttpcaching', 'true') == 'false':
778+
return None
779+
780+
fullpath = get_cache_path() + path
781+
if not exists(fullpath):
782+
return None
783+
784+
from time import localtime, mktime
785+
mtime = stat_file(fullpath).st_mtime()
786+
now = mktime(localtime())
787+
if ttl and now >= mtime + ttl:
788+
return None
789+
790+
if ttl is None:
791+
log(3, "Cache '{path}' is forced from cache.", path=path)
792+
else:
793+
log(3, "Cache '{path}' is fresh, expires in {time}.", path=path, time=human_delta(mtime + ttl - now))
794+
with open_file(fullpath, 'r') as fdesc:
795+
try:
796+
return get_json_data(fdesc)
797+
except ValueError as exc: # No JSON object could be decoded
798+
log_error('JSON Error: {exc}', exc=exc)
799+
return None
800+
801+
802+
def update_cache(path, data):
803+
''' Update the cache, if necessary '''
804+
if get_setting('usehttpcaching', 'true') == 'false':
805+
return
806+
807+
from hashlib import md5
808+
from json import dump, dumps
809+
fullpath = get_cache_path() + path
810+
if exists(fullpath):
811+
with open_file(fullpath) as fdesc:
812+
cachefile = fdesc.read().encode('utf-8')
813+
md5_cache = md5(cachefile)
814+
else:
815+
md5_cache = 0
816+
# Create cache directory if missing
817+
if not exists(get_cache_path()):
818+
mkdirs(get_cache_path())
819+
820+
# Avoid writes if possible (i.e. SD cards)
821+
if md5_cache != md5(dumps(data).encode('utf-8')):
822+
log(3, "Write cache '{path}'.", path=path)
823+
with open_file(fullpath, 'w') as fdesc:
824+
# dump(data, fdesc, encoding='utf-8')
825+
dump(data, fdesc)
826+
else:
827+
# Update timestamp
828+
from os import utime
829+
log(3, "Cache '{path}' has not changed, updating mtime only.", path=path)
830+
utime(path)
831+
832+
833+
def ttl(kind='direct'):
834+
''' Return the HTTP cache ttl in seconds based on kind of relation '''
835+
if kind == 'direct':
836+
return int(get_setting('httpcachettldirect', 5)) * 60
837+
if kind == 'indirect':
838+
return int(get_setting('httpcachettlindirect', 60)) * 60
839+
return 5 * 60
840+
841+
842+
def get_json_data(response):
843+
''' Return json object from HTTP response '''
844+
from json import load, loads
845+
if (3, 0, 0) <= version_info <= (3, 5, 9): # the JSON object must be str, not 'bytes'
846+
json_data = loads(to_unicode(response.read()))
847+
else:
848+
json_data = load(response)
849+
return json_data
850+
851+
852+
def get_url_json(url, cache=None, headers=None, data=None, fail=None):
853+
''' Return HTTP data '''
854+
try: # Python 3
855+
from urllib.error import HTTPError
856+
from urllib.parse import unquote
857+
from urllib.request import urlopen, Request
858+
except ImportError: # Python 2
859+
from urllib2 import HTTPError, unquote, urlopen, Request
860+
861+
if headers is None:
862+
headers = dict()
863+
log(2, 'URL get: {url}', url=unquote(url))
864+
req = Request(url, headers=headers)
865+
if data is not None:
866+
req.data = data
867+
try:
868+
json_data = get_json_data(urlopen(req))
869+
except ValueError as exc: # No JSON object could be decoded
870+
log_error('JSON Error: {exc}', exc=exc)
871+
return fail
872+
except HTTPError as exc:
873+
if hasattr(req, 'selector'): # Python 3.4+
874+
url_length = len(req.selector)
875+
else: # Python 2.7
876+
url_length = len(req.get_selector())
877+
if exc.code == 413 and url_length > 8192:
878+
ok_dialog(heading='HTTP Error 413', message=localize(30967))
879+
log_error('HTTP Error 413: Exceeded maximum url length: '
880+
'VRT Search API url has a length of {length} characters.', length=url_length)
881+
return fail
882+
if exc.code == 400 and 7600 <= url_length <= 8192:
883+
ok_dialog(heading='HTTP Error 400', message=localize(30967))
884+
log_error('HTTP Error 400: Probably exceeded maximum url length: '
885+
'VRT Search API url has a length of {length} characters.', length=url_length)
886+
return fail
887+
try:
888+
return get_json_data(exc)
889+
except ValueError as exc: # No JSON object could be decoded
890+
log_error('JSON Error: {exc}', exc=exc)
891+
return fail
892+
raise
893+
else:
894+
if cache:
895+
update_cache(cache, json_data)
896+
return json_data
897+
898+
899+
def get_cached_url_json(url, cache, headers=None, ttl=None, fail=None): # pylint: disable=redefined-outer-name
900+
''' Return data from cache, if any, else make an HTTP request '''
901+
# Get api data from cache if it is fresh
902+
json_data = get_cache(cache, ttl=ttl)
903+
if json_data is not None:
904+
return json_data
905+
return get_url_json(url, cache=cache, headers=headers, fail=fail)
906+
907+
908+
def refresh_caches(cache_file=None):
909+
''' Invalidate the needed caches and refresh container '''
910+
files = ['favorites.json', 'oneoff.json', 'resume_points.json']
911+
if cache_file and cache_file not in files:
912+
files.append(cache_file)
913+
invalidate_caches(*files)
914+
container_refresh()
915+
notification(message=localize(30981))
916+
917+
918+
def invalidate_caches(*caches):
919+
''' Invalidate multiple cache files '''
920+
import fnmatch
921+
_, files = listdir(get_cache_path())
922+
# Invalidate caches related to menu list refreshes
923+
removes = set()
924+
for expr in caches:
925+
removes.update(fnmatch.filter(files, expr))
926+
for filename in removes:
927+
delete(get_cache_path() + filename)

0 commit comments

Comments
 (0)