@@ -261,28 +261,29 @@ def test_query_caching(self):
261
261
os .mkdir (os .path .expanduser ('~/dj_query_cache' ))
262
262
263
263
with dj .config (query_cache = os .path .expanduser ('~/dj_query_cache' )):
264
+ conn = schema .TTest3 .connection
264
265
# insert sample data and load cache
265
266
schema .TTest3 .insert ([dict (key = 100 + i , value = 200 + i ) for i in range (2 )])
266
- dj . conn () .set_query_cache (query_cache = 'main' )
267
+ conn .set_query_cache (query_cache = 'main' )
267
268
cached_res = schema .TTest3 ().fetch ()
268
269
# attempt to insert while caching enabled
269
270
try :
270
271
schema .TTest3 .insert ([dict (key = 200 + i , value = 400 + i ) for i in range (2 )])
271
- assert False , 'Insert allowed which query caching enabled'
272
+ assert False , 'Insert allowed while query caching enabled'
272
273
except dj .DataJointError :
273
- dj . conn () .set_query_cache ()
274
+ conn .set_query_cache ()
274
275
# insert new data
275
276
schema .TTest3 .insert ([dict (key = 600 + i , value = 800 + i ) for i in range (2 )])
276
277
# re-enable cache to access old results
277
- dj . conn () .set_query_cache (query_cache = 'main' )
278
+ conn .set_query_cache (query_cache = 'main' )
278
279
previous_cache = schema .TTest3 ().fetch ()
279
280
# verify properly cached and how to refresh results
280
281
assert all ([c == p for c , p in zip (cached_res , previous_cache )])
281
- dj . conn () .set_query_cache ()
282
+ conn .set_query_cache ()
282
283
uncached_res = schema .TTest3 ().fetch ()
283
284
assert len (uncached_res ) > len (cached_res )
284
285
# purge query cache
285
- dj . conn () .purge_query_cache ()
286
+ conn .purge_query_cache ()
286
287
287
288
# reset cache directory state (will fail if purge was unsuccessful)
288
289
os .rmdir (os .path .expanduser ('~/dj_query_cache' ))
0 commit comments