@@ -309,9 +309,9 @@ def _need_checksum(local_filepath):
309
309
return limit is None or Path (local_filepath ).stat ().st_size < limit
310
310
311
311
if filepath_hash is not None :
312
- relative_filepath , contents_hash = ( self & { "hash" : filepath_hash }). fetch1 (
313
- "filepath" , "contents_hash"
314
- )
312
+ relative_filepath , contents_hash , size = (
313
+ self & { "hash" : filepath_hash }
314
+ ). fetch1 ( "filepath" , "contents_hash" , "size" )
315
315
external_path = self ._make_external_filepath (relative_filepath )
316
316
local_filepath = Path (self .spec ["stage" ]).absolute () / relative_filepath
317
317
@@ -329,10 +329,12 @@ def _need_checksum(local_filepath):
329
329
f"'{ local_filepath } ' downloaded but did not pass checksum'"
330
330
)
331
331
if not _need_checksum (local_filepath ):
332
- logger .warning (
333
- f"Warning skipped checksum for file with hash: { contents_hash } "
334
- )
335
- # This will turn into a proper logger when we implement the datajoint logger
332
+ logger .warning (f"Skipped checksum for file with hash: { contents_hash } " )
333
+ if size != Path (local_filepath ).stat ().st_size :
334
+ # this should never happen without outside interference
335
+ raise DataJointError (
336
+ f"'{ local_filepath } ' downloaded but size is not the same (skipped checksum due to config)'"
337
+ )
336
338
return str (local_filepath ), contents_hash
337
339
338
340
# --- UTILITIES ---
0 commit comments