@@ -339,17 +339,10 @@ def fetch_entries(self):
339339 fetch_file_path = os .path .join (self .path , "fetch.txt" )
340340
341341 if isfile (fetch_file_path ):
342- fetch_file = open (fetch_file_path , 'rb' )
343-
344- try :
342+ with open (fetch_file_path , 'rb' ) as fetch_file :
345343 for line in fetch_file :
346344 parts = line .strip ().split (None , 2 )
347345 yield (parts [0 ], parts [1 ], parts [2 ])
348- except Exception :
349- fetch_file .close ()
350- raise
351-
352- fetch_file .close ()
353346
354347 def files_to_be_fetched (self ):
355348 for f , size , path in self .fetch_entries ():
@@ -549,13 +542,10 @@ def _validate_bagittxt(self):
549542 Verify that bagit.txt conforms to specification
550543 """
551544 bagit_file_path = os .path .join (self .path , "bagit.txt" )
552- bagit_file = open (bagit_file_path , 'r' )
553- try :
545+ with open (bagit_file_path , 'r' ) as bagit_file :
554546 first_line = bagit_file .readline ()
555547 if first_line .startswith (BOM ):
556548 raise BagValidationError ("bagit.txt must not contain a byte-order mark" )
557- finally :
558- bagit_file .close ()
559549
560550
561551class BagError (Exception ):
@@ -622,33 +612,25 @@ def _calculate_file_hashes(full_path, f_hashers):
622612 raise BagValidationError ("%s does not exist" % full_path )
623613
624614 try :
625- try :
626- f = open (full_path , 'rb' )
615+ with open (full_path , 'rb' ) as f :
627616 while True :
628617 block = f .read (1048576 )
629618 if not block :
630619 break
631620 for i in list (f_hashers .values ()):
632621 i .update (block )
633- except IOError as e :
634- raise BagValidationError ("could not read %s: %s" % (full_path , str (e )))
635- except OSError as e :
636- raise BagValidationError ("could not read %s: %s" % (full_path , str (e )))
637- finally :
638- try :
639- f .close ()
640- except :
641- pass
622+ except IOError as e :
623+ raise BagValidationError ("could not read %s: %s" % (full_path , str (e )))
624+ except OSError as e :
625+ raise BagValidationError ("could not read %s: %s" % (full_path , str (e )))
642626
643627 return dict (
644628 (alg , h .hexdigest ()) for alg , h in list (f_hashers .items ())
645629 )
646630
647631
648632def _load_tag_file (tag_file_name ):
649- tag_file = open (tag_file_name , 'r' )
650-
651- try :
633+ with open (tag_file_name , 'r' ) as tag_file :
652634 # Store duplicate tags as list of vals
653635 # in order of parsing under the same key.
654636 tags = {}
@@ -663,9 +645,6 @@ def _load_tag_file(tag_file_name):
663645 tags [name ].append (value )
664646 return tags
665647
666- finally :
667- tag_file .close ()
668-
669648def _parse_tags (file ):
670649 """Parses a tag file, according to RFC 2822. This
671650 includes line folding, permitting extra-long
@@ -768,15 +747,14 @@ def _make_tagmanifest_file(alg, bag_dir):
768747 for f in files :
769748 if re .match ('^tagmanifest-.+\.txt$' , f ):
770749 continue
771- fh = open (join (bag_dir , f ), 'rb' )
772- m = _hasher (alg )
773- while True :
774- bytes = fh .read (16384 )
775- if not bytes :
776- break
777- m .update (bytes )
778- checksums .append ((m .hexdigest (), f ))
779- fh .close ()
750+ with open (join (bag_dir , f ), 'rb' ) as fh :
751+ m = _hasher (alg )
752+ while True :
753+ bytes = fh .read (16384 )
754+ if not bytes :
755+ break
756+ m .update (bytes )
757+ checksums .append ((m .hexdigest (), f ))
780758
781759 with open (join (bag_dir , tagmanifest_file ), 'w' ) as tagmanifest :
782760 for digest , filename in checksums :
@@ -845,16 +823,16 @@ def _hasher(algorithm='md5'):
845823 return m
846824
847825def _manifest_line (filename , algorithm = 'md5' ):
848- fh = open (filename , 'rb' )
849- m = _hasher (algorithm )
850-
851- total_bytes = 0
852- while True :
853- bytes = fh .read (16384 )
854- total_bytes += len (bytes )
855- if not bytes : break
856- m . update ( bytes )
857- fh . close ( )
826+ with open (filename , 'rb' ) as fh :
827+ m = _hasher (algorithm )
828+
829+ total_bytes = 0
830+ while True :
831+ bytes = fh .read (16384 )
832+ total_bytes += len (bytes )
833+ if not bytes :
834+ break
835+ m . update ( bytes )
858836
859837 return (m .hexdigest (), _decode_filename (filename ), total_bytes )
860838
0 commit comments