@@ -30,7 +30,7 @@ def default_response():
30
30
31
31
32
32
def test_default_filename_from_uri (default_response ):
33
- cd = CacheDownloader ().bind ("https://example.com/schema1.json" )
33
+ cd = CacheDownloader ("downloads" ).bind ("https://example.com/schema1.json" )
34
34
assert cd ._filename == "schema1.json"
35
35
36
36
@@ -76,7 +76,7 @@ def fake_expanduser(path):
76
76
monkeypatch .setattr (platform , "system" , fakesystem )
77
77
monkeypatch .setattr (os .path , "expanduser" , fake_expanduser )
78
78
79
- cd = CacheDownloader ()
79
+ cd = CacheDownloader ("downloads" )
80
80
assert cd ._cache_dir == expect_value
81
81
82
82
if sysname == "Darwin" :
@@ -114,7 +114,7 @@ def test_cachedownloader_cached_file(tmp_path, monkeypatch, default_response):
114
114
f .write_text ("{}" )
115
115
116
116
# set the cache_dir to the tmp dir (so that cache_dir will always be set)
117
- cd = CacheDownloader (cache_dir = tmp_path ).bind (str (f ))
117
+ cd = CacheDownloader (tmp_path ).bind (str (f ))
118
118
# patch the downloader to skip any download "work"
119
119
monkeypatch .setattr (
120
120
cd ._downloader , "_download" , lambda file_uri , filename , response_ok : str (f )
@@ -128,7 +128,7 @@ def test_cachedownloader_cached_file(tmp_path, monkeypatch, default_response):
128
128
def test_cachedownloader_on_success (get_download_cache_loc , disable_cache ):
129
129
add_default_response ()
130
130
f = get_download_cache_loc ("schema1.json" )
131
- cd = CacheDownloader (disable_cache = disable_cache ).bind (
131
+ cd = CacheDownloader ("downloads" , disable_cache = disable_cache ).bind (
132
132
"https://example.com/schema1.json"
133
133
)
134
134
@@ -171,7 +171,7 @@ def test_cachedownloader_succeeds_after_few_errors(
171
171
)
172
172
add_default_response ()
173
173
f = get_download_cache_loc ("schema1.json" )
174
- cd = CacheDownloader (disable_cache = disable_cache ).bind (
174
+ cd = CacheDownloader ("downloads" , disable_cache = disable_cache ).bind (
175
175
"https://example.com/schema1.json"
176
176
)
177
177
@@ -205,7 +205,7 @@ def test_cachedownloader_fails_after_many_errors(
205
205
)
206
206
add_default_response () # never reached, the 11th response
207
207
f = get_download_cache_loc ("schema1.json" )
208
- cd = CacheDownloader (disable_cache = disable_cache ).bind (
208
+ cd = CacheDownloader ("downloads" , disable_cache = disable_cache ).bind (
209
209
"https://example.com/schema1.json"
210
210
)
211
211
with pytest .raises (FailedDownloadError ):
@@ -226,6 +226,7 @@ def test_cachedownloader_retries_on_bad_data(get_download_cache_loc, disable_cac
226
226
add_default_response ()
227
227
f = get_download_cache_loc ("schema1.json" )
228
228
cd = CacheDownloader (
229
+ "downloads" ,
229
230
disable_cache = disable_cache ,
230
231
).bind (
231
232
"https://example.com/schema1.json" ,
@@ -281,7 +282,7 @@ def fake_mktime(*args):
281
282
if file_exists :
282
283
inject_cached_download (uri , original_file_contents )
283
284
284
- cd = CacheDownloader ().bind (uri )
285
+ cd = CacheDownloader ("downloads" ).bind (uri )
285
286
286
287
# if the file already existed, it will not be overwritten by the cachedownloader
287
288
# so the returned value for both the downloader and a direct file read should be the
@@ -326,7 +327,7 @@ def dummy_validate_bytes(data):
326
327
327
328
# construct a downloader pointed at the schema and file, expecting a cache hit
328
329
# and use the above validation method
329
- cd = CacheDownloader ().bind (
330
+ cd = CacheDownloader ("downloads" ).bind (
330
331
"https://example.com/schema1.json" ,
331
332
validation_callback = dummy_validate_bytes ,
332
333
)
0 commit comments