77
88URL_REGEX = re .compile (r"""(http[s]*://[^{})"'<>#\s]+)[)"'<>#\s]""" )
99
10- GOOD_URLS = set (['https://www.aspose.cloud' ])
10+ GOOD_URLS = set (
11+ [
12+ "https://www.aspose.cloud" ,
13+ "https://products.aspose.cloud/barcode/" ,
14+ ]
15+ )
1116BROKEN_URLS = collections .defaultdict (list )
1217
1318
1419def check_url (url ):
15- with open (os .devnull , 'w' ) as devnull :
16- ret_code = subprocess .call (['curl' , '-sSf' , '--user-agent' , 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:91.0) Gecko/20100101 Firefox/91.0' , url ], stdout = devnull )
20+ with open (os .devnull , "w" ) as devnull :
21+ ret_code = subprocess .call (
22+ [
23+ "curl" ,
24+ "-sSf" ,
25+ "--user-agent" ,
26+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:91.0) Gecko/20100101 Firefox/91.0" ,
27+ url ,
28+ ],
29+ stdout = devnull ,
30+ )
1731 return ret_code == 0
1832
1933
2034def check_file (filename ):
21- with open (filename , 'r' ) as f :
35+ with open (filename , "r" ) as f :
2236 urls = frozenset (URL_REGEX .findall (f .read ()))
2337
2438 for url in sorted (urls ):
@@ -40,10 +54,12 @@ def main():
4054 check_file (filename .strip ())
4155
4256 for url , files in BROKEN_URLS .items ():
43- print ("BROKEN URL: '%s' in files: %s" % (url , ', ' .join (files )), file = sys .stderr )
57+ print (
58+ "BROKEN URL: '%s' in files: %s" % (url , ", " .join (files )), file = sys .stderr
59+ )
4460 if BROKEN_URLS :
4561 exit (1 )
4662
4763
48- if __name__ == ' __main__' :
64+ if __name__ == " __main__" :
4965 main ()
0 commit comments