|
10 | 10 | from .. import app |
11 | 11 |
|
12 | 12 |
|
13 | | -def get(url, |
14 | | - background_update_interval, |
15 | | - cache_expiration_timeout, |
16 | | - headers=None): |
| 13 | +def get(url, background_update_interval, cache_expiration_timeout, headers=None): |
17 | 14 | """ |
18 | 15 | Returns None if such URL is not (yet) cached. |
19 | 16 | Cached entries will be removed after the specified period of inactivity. |
20 | 17 | The timing parameters should not change between calls; |
21 | 18 | otherwise, their actual values may vary unpredictably within the supplied ranges. |
22 | 19 | """ |
23 | 20 | if background_update_interval >= cache_expiration_timeout: |
24 | | - raise ValueError('The background update interval must be lower than the data expiration timeout') |
| 21 | + raise ValueError("The background update interval must be lower than the data expiration timeout") |
25 | 22 |
|
26 | 23 | # First, decide if it is time to launch a background update yet. |
27 | 24 | # Due to the race condition, we may accidentally start multiple updates concurrently, but this is fine. |
28 | | - lock_key = 'lock-' + url |
| 25 | + lock_key = "lock-" + url |
29 | 26 | if not expiring_storage.read(lock_key): |
30 | 27 | expiring_storage.write(lock_key, True, timeout=float(background_update_interval)) |
31 | | - threading.Thread(target=lambda: _do_background_update(url, headers, cache_expiration_timeout), |
32 | | - daemon=False).start() |
| 28 | + threading.Thread( |
| 29 | + target=lambda: _do_background_update(url, headers, cache_expiration_timeout), |
| 30 | + daemon=False, |
| 31 | + ).start() |
33 | 32 |
|
34 | 33 | return expiring_storage.read(url) |
35 | 34 |
|
36 | 35 |
|
37 | 36 | def _do_background_update(url, headers, cache_expiration_timeout): |
38 | | - app.logger.info('Initiating background update from %r with headers %r', url, headers) |
| 37 | + app.logger.info("Initiating background update from %r with headers %r", url, headers) |
39 | 38 |
|
40 | 39 | started_at = time.monotonic() |
41 | 40 | r = urllib.request.Request(url, headers=headers or {}) |
42 | 41 | data = urllib.request.urlopen(r).read() |
43 | 42 |
|
44 | 43 | expiring_storage.write(url, data, timeout=float(cache_expiration_timeout)) |
45 | 44 |
|
46 | | - app.logger.info('Background update OK: saved %.1f KiB from %r in %.3f seconds', |
47 | | - len(data) / 1024, |
48 | | - url, |
49 | | - time.monotonic() - started_at) |
| 45 | + app.logger.info( |
| 46 | + "Background update OK: saved %.1f KiB from %r in %.3f seconds", |
| 47 | + len(data) / 1024, |
| 48 | + url, |
| 49 | + time.monotonic() - started_at, |
| 50 | + ) |
0 commit comments