@@ -141,6 +141,17 @@ def mask_password(url, secret="*****"):
141141 return url .replace (parsed .password , secret )
142142
143143
144+ def non_negative_int (value ):
145+ """Argparse type validator for non-negative integers."""
146+ try :
147+ ivalue = int (value )
148+ except ValueError :
149+ raise argparse .ArgumentTypeError (f"'{ value } ' is not a valid integer" )
150+ if ivalue < 0 :
151+ raise argparse .ArgumentTypeError (f"{ value } must be 0 or greater" )
152+ return ivalue
153+
154+
144155def parse_args (args = None ):
145156 parser = argparse .ArgumentParser (description = "Backup a github account" )
146157 parser .add_argument ("user" , metavar = "USER" , type = str , help = "github username" )
@@ -468,7 +479,7 @@ def parse_args(args=None):
468479 parser .add_argument (
469480 "--retries" ,
470481 dest = "max_retries" ,
471- type = int ,
482+ type = non_negative_int ,
472483 default = 5 ,
473484 help = "maximum number of retries for API calls (default: 5)" ,
474485 )
@@ -626,7 +637,7 @@ def retrieve_data(args, template, query_args=None, paginated=True):
626637 def _extract_next_page_url (link_header ):
627638 for link in link_header .split ("," ):
628639 if 'rel="next"' in link :
629- return link [link .find ("<" ) + 1 : link .find (">" )]
640+ return link [link .find ("<" ) + 1 : link .find (">" )]
630641 return None
631642
632643 def fetch_all () -> Generator [dict , None , None ]:
@@ -635,7 +646,7 @@ def fetch_all() -> Generator[dict, None, None]:
635646 while True :
636647 # FIRST: Fetch response
637648
638- for attempt in range (args .max_retries ):
649+ for attempt in range (args .max_retries + 1 ):
639650 request = _construct_request (
640651 per_page = per_page if paginated else None ,
641652 query_args = query_args ,
@@ -658,10 +669,10 @@ def fetch_all() -> Generator[dict, None, None]:
658669 TimeoutError ,
659670 ) as e :
660671 logger .warning (f"{ type (e ).__name__ } reading response" )
661- if attempt < args .max_retries - 1 :
672+ if attempt < args .max_retries :
662673 delay = calculate_retry_delay (attempt , {})
663674 logger .warning (
664- f"Retrying read in { delay :.1f} s (attempt { attempt + 1 } /{ args .max_retries } )"
675+ f"Retrying read in { delay :.1f} s (attempt { attempt + 1 } /{ args .max_retries + 1 } )"
665676 )
666677 time .sleep (delay )
667678 continue # Next retry attempt
@@ -687,10 +698,10 @@ def fetch_all() -> Generator[dict, None, None]:
687698 )
688699 else :
689700 logger .error (
690- f"Failed to read response after { args .max_retries } attempts for { next_url or template } "
701+ f"Failed to read response after { args .max_retries + 1 } attempts for { next_url or template } "
691702 )
692703 raise Exception (
693- f"Failed to read response after { args .max_retries } attempts for { next_url or template } "
704+ f"Failed to read response after { args .max_retries + 1 } attempts for { next_url or template } "
694705 )
695706
696707 # SECOND: Process and paginate
@@ -734,41 +745,49 @@ def is_retryable_status(status_code, headers):
734745 return int (headers .get ("x-ratelimit-remaining" , 1 )) < 1
735746 return False
736747
737- for attempt in range (max_retries ):
748+ for attempt in range (max_retries + 1 ):
738749 try :
739750 return urlopen (request , context = https_ctx )
740751
741752 except HTTPError as exc :
742753 # HTTPError can be used as a response-like object
743754 if not is_retryable_status (exc .code , exc .headers ):
744- logger .error (f"API Error: { exc .code } { exc .reason } for { request .full_url } " )
755+ logger .error (
756+ f"API Error: { exc .code } { exc .reason } for { request .full_url } "
757+ )
745758 raise # Non-retryable error
746759
747- if attempt >= max_retries - 1 :
748- logger .error (f"HTTP { exc .code } failed after { max_retries } attempts for { request .full_url } " )
760+ if attempt >= max_retries :
761+ logger .error (
762+ f"HTTP { exc .code } failed after { max_retries + 1 } attempts for { request .full_url } "
763+ )
749764 raise
750765
751766 delay = calculate_retry_delay (attempt , exc .headers )
752767 logger .warning (
753768 f"HTTP { exc .code } ({ exc .reason } ), retrying in { delay :.1f} s "
754- f"(attempt { attempt + 1 } /{ max_retries } ) for { request .full_url } "
769+ f"(attempt { attempt + 1 } /{ max_retries + 1 } ) for { request .full_url } "
755770 )
756771 if auth is None and exc .code in (403 , 429 ):
757772 logger .info ("Hint: Authenticate to raise your GitHub rate limit" )
758773 time .sleep (delay )
759774
760775 except (URLError , socket .error ) as e :
761- if attempt >= max_retries - 1 :
762- logger .error (f"Connection error failed after { max_retries } attempts: { e } for { request .full_url } " )
776+ if attempt >= max_retries :
777+ logger .error (
778+ f"Connection error failed after { max_retries + 1 } attempts: { e } for { request .full_url } "
779+ )
763780 raise
764781 delay = calculate_retry_delay (attempt , {})
765782 logger .warning (
766783 f"Connection error: { e } , retrying in { delay :.1f} s "
767- f"(attempt { attempt + 1 } /{ max_retries } ) for { request .full_url } "
784+ f"(attempt { attempt + 1 } /{ max_retries + 1 } ) for { request .full_url } "
768785 )
769786 time .sleep (delay )
770787
771- raise Exception (f"Request failed after { max_retries } attempts" ) # pragma: no cover
788+ raise Exception (
789+ f"Request failed after { max_retries + 1 } attempts"
790+ ) # pragma: no cover
772791
773792
774793def _construct_request (per_page , query_args , template , auth , as_app = None , fine = False ):
@@ -1584,9 +1603,7 @@ def filter_repositories(args, unfiltered_repositories):
15841603 repositories = [r for r in repositories if not r .get ("archived" )]
15851604 if args .starred_skip_size_over is not None :
15861605 if args .starred_skip_size_over <= 0 :
1587- logger .warning (
1588- "--starred-skip-size-over must be greater than 0, ignoring"
1589- )
1606+ logger .warning ("--starred-skip-size-over must be greater than 0, ignoring" )
15901607 else :
15911608 size_limit_kb = args .starred_skip_size_over * 1024
15921609 filtered = []
@@ -1595,7 +1612,9 @@ def filter_repositories(args, unfiltered_repositories):
15951612 size_mb = r .get ("size" , 0 ) / 1024
15961613 logger .info (
15971614 "Skipping starred repo {0} ({1:.0f} MB) due to --starred-skip-size-over {2}" .format (
1598- r .get ("full_name" , r .get ("name" )), size_mb , args .starred_skip_size_over
1615+ r .get ("full_name" , r .get ("name" )),
1616+ size_mb ,
1617+ args .starred_skip_size_over ,
15991618 )
16001619 )
16011620 else :
0 commit comments