|
4 | 4 |
|
5 | 5 | from __future__ import absolute_import, division, unicode_literals |
6 | 6 | from contextlib import contextmanager |
| 7 | +from sys import version_info |
| 8 | + |
7 | 9 | import xbmc |
8 | 10 | import xbmcaddon |
9 | 11 | import xbmcplugin |
10 | | -from statichelper import from_unicode, to_unicode |
| 12 | +from utils import from_unicode, to_unicode |
11 | 13 |
|
12 | 14 | ADDON = xbmcaddon.Addon() |
13 | 15 |
|
@@ -750,3 +752,176 @@ def jsonrpc(**kwargs): |
750 | 752 | if 'jsonrpc' not in kwargs: |
751 | 753 | kwargs.update(jsonrpc='2.0') |
752 | 754 | return loads(xbmc.executeJSONRPC(dumps(kwargs))) |
| 755 | + |
| 756 | + |
| 757 | +def human_delta(seconds): |
| 758 | + ''' Return a human-readable representation of the TTL ''' |
| 759 | + from math import floor |
| 760 | + days = int(floor(seconds / (24 * 60 * 60))) |
| 761 | + seconds = seconds % (24 * 60 * 60) |
| 762 | + hours = int(floor(seconds / (60 * 60))) |
| 763 | + seconds = seconds % (60 * 60) |
| 764 | + if days: |
| 765 | + return '%d day%s and %d hour%s' % (days, 's' if days != 1 else '', hours, 's' if hours != 1 else '') |
| 766 | + minutes = int(floor(seconds / 60)) |
| 767 | + seconds = seconds % 60 |
| 768 | + if hours: |
| 769 | + return '%d hour%s and %d minute%s' % (hours, 's' if hours != 1 else '', minutes, 's' if minutes != 1 else '') |
| 770 | + if minutes: |
| 771 | + return '%d minute%s and %d second%s' % (minutes, 's' if minutes != 1 else '', seconds, 's' if seconds != 1 else '') |
| 772 | + return '%d second%s' % (seconds, 's' if seconds != 1 else '') |
| 773 | + |
| 774 | + |
| 775 | +def get_cache(path, ttl=None): # pylint: disable=redefined-outer-name |
| 776 | + ''' Get the content from cache, if it's still fresh ''' |
| 777 | + if get_setting('usehttpcaching', 'true') == 'false': |
| 778 | + return None |
| 779 | + |
| 780 | + fullpath = get_cache_path() + path |
| 781 | + if not exists(fullpath): |
| 782 | + return None |
| 783 | + |
| 784 | + from time import localtime, mktime |
| 785 | + mtime = stat_file(fullpath).st_mtime() |
| 786 | + now = mktime(localtime()) |
| 787 | + if ttl and now >= mtime + ttl: |
| 788 | + return None |
| 789 | + |
| 790 | + if ttl is None: |
| 791 | + log(3, "Cache '{path}' is forced from cache.", path=path) |
| 792 | + else: |
| 793 | + log(3, "Cache '{path}' is fresh, expires in {time}.", path=path, time=human_delta(mtime + ttl - now)) |
| 794 | + with open_file(fullpath, 'r') as fdesc: |
| 795 | + try: |
| 796 | + return get_json_data(fdesc) |
| 797 | + except ValueError as exc: # No JSON object could be decoded |
| 798 | + log_error('JSON Error: {exc}', exc=exc) |
| 799 | + return None |
| 800 | + |
| 801 | + |
| 802 | +def update_cache(path, data): |
| 803 | + ''' Update the cache, if necessary ''' |
| 804 | + if get_setting('usehttpcaching', 'true') == 'false': |
| 805 | + return |
| 806 | + |
| 807 | + from hashlib import md5 |
| 808 | + from json import dump, dumps |
| 809 | + fullpath = get_cache_path() + path |
| 810 | + if exists(fullpath): |
| 811 | + with open_file(fullpath) as fdesc: |
| 812 | + cachefile = fdesc.read().encode('utf-8') |
| 813 | + md5_cache = md5(cachefile) |
| 814 | + else: |
| 815 | + md5_cache = 0 |
| 816 | + # Create cache directory if missing |
| 817 | + if not exists(get_cache_path()): |
| 818 | + mkdirs(get_cache_path()) |
| 819 | + |
| 820 | + # Avoid writes if possible (i.e. SD cards) |
| 821 | + if md5_cache != md5(dumps(data).encode('utf-8')): |
| 822 | + log(3, "Write cache '{path}'.", path=path) |
| 823 | + with open_file(fullpath, 'w') as fdesc: |
| 824 | + # dump(data, fdesc, encoding='utf-8') |
| 825 | + dump(data, fdesc) |
| 826 | + else: |
| 827 | + # Update timestamp |
| 828 | + from os import utime |
| 829 | + log(3, "Cache '{path}' has not changed, updating mtime only.", path=path) |
| 830 | + utime(path) |
| 831 | + |
| 832 | + |
| 833 | +def ttl(kind='direct'): |
| 834 | + ''' Return the HTTP cache ttl in seconds based on kind of relation ''' |
| 835 | + if kind == 'direct': |
| 836 | + return int(get_setting('httpcachettldirect', 5)) * 60 |
| 837 | + if kind == 'indirect': |
| 838 | + return int(get_setting('httpcachettlindirect', 60)) * 60 |
| 839 | + return 5 * 60 |
| 840 | + |
| 841 | + |
| 842 | +def get_json_data(response): |
| 843 | + ''' Return json object from HTTP response ''' |
| 844 | + from json import load, loads |
| 845 | + if (3, 0, 0) <= version_info <= (3, 5, 9): # the JSON object must be str, not 'bytes' |
| 846 | + json_data = loads(to_unicode(response.read())) |
| 847 | + else: |
| 848 | + json_data = load(response) |
| 849 | + return json_data |
| 850 | + |
| 851 | + |
| 852 | +def get_url_json(url, cache=None, headers=None, data=None, fail=None): |
| 853 | + ''' Return HTTP data ''' |
| 854 | + try: # Python 3 |
| 855 | + from urllib.error import HTTPError |
| 856 | + from urllib.parse import unquote |
| 857 | + from urllib.request import urlopen, Request |
| 858 | + except ImportError: # Python 2 |
| 859 | + from urllib2 import HTTPError, unquote, urlopen, Request |
| 860 | + |
| 861 | + if headers is None: |
| 862 | + headers = dict() |
| 863 | + log(2, 'URL get: {url}', url=unquote(url)) |
| 864 | + req = Request(url, headers=headers) |
| 865 | + if data is not None: |
| 866 | + req.data = data |
| 867 | + try: |
| 868 | + json_data = get_json_data(urlopen(req)) |
| 869 | + except ValueError as exc: # No JSON object could be decoded |
| 870 | + log_error('JSON Error: {exc}', exc=exc) |
| 871 | + return fail |
| 872 | + except HTTPError as exc: |
| 873 | + if hasattr(req, 'selector'): # Python 3.4+ |
| 874 | + url_length = len(req.selector) |
| 875 | + else: # Python 2.7 |
| 876 | + url_length = len(req.get_selector()) |
| 877 | + if exc.code == 413 and url_length > 8192: |
| 878 | + ok_dialog(heading='HTTP Error 413', message=localize(30967)) |
| 879 | + log_error('HTTP Error 413: Exceeded maximum url length: ' |
| 880 | + 'VRT Search API url has a length of {length} characters.', length=url_length) |
| 881 | + return fail |
| 882 | + if exc.code == 400 and 7600 <= url_length <= 8192: |
| 883 | + ok_dialog(heading='HTTP Error 400', message=localize(30967)) |
| 884 | + log_error('HTTP Error 400: Probably exceeded maximum url length: ' |
| 885 | + 'VRT Search API url has a length of {length} characters.', length=url_length) |
| 886 | + return fail |
| 887 | + try: |
| 888 | + return get_json_data(exc) |
| 889 | + except ValueError as exc: # No JSON object could be decoded |
| 890 | + log_error('JSON Error: {exc}', exc=exc) |
| 891 | + return fail |
| 892 | + raise |
| 893 | + else: |
| 894 | + if cache: |
| 895 | + update_cache(cache, json_data) |
| 896 | + return json_data |
| 897 | + |
| 898 | + |
| 899 | +def get_cached_url_json(url, cache, headers=None, ttl=None, fail=None): # pylint: disable=redefined-outer-name |
| 900 | + ''' Return data from cache, if any, else make an HTTP request ''' |
| 901 | + # Get api data from cache if it is fresh |
| 902 | + json_data = get_cache(cache, ttl=ttl) |
| 903 | + if json_data is not None: |
| 904 | + return json_data |
| 905 | + return get_url_json(url, cache=cache, headers=headers, fail=fail) |
| 906 | + |
| 907 | + |
| 908 | +def refresh_caches(cache_file=None): |
| 909 | + ''' Invalidate the needed caches and refresh container ''' |
| 910 | + files = ['favorites.json', 'oneoff.json', 'resume_points.json'] |
| 911 | + if cache_file and cache_file not in files: |
| 912 | + files.append(cache_file) |
| 913 | + invalidate_caches(*files) |
| 914 | + container_refresh() |
| 915 | + notification(message=localize(30981)) |
| 916 | + |
| 917 | + |
| 918 | +def invalidate_caches(*caches): |
| 919 | + ''' Invalidate multiple cache files ''' |
| 920 | + import fnmatch |
| 921 | + _, files = listdir(get_cache_path()) |
| 922 | + # Invalidate caches related to menu list refreshes |
| 923 | + removes = set() |
| 924 | + for expr in caches: |
| 925 | + removes.update(fnmatch.filter(files, expr)) |
| 926 | + for filename in removes: |
| 927 | + delete(get_cache_path() + filename) |
0 commit comments