11
11
from datetime import datetime
12
12
from os import makedirs , path
13
13
from re import match , search , sub
14
- from typing import Dict , Iterable , List , Optional , Set , Type , TypeVar
14
+ from typing import Dict , Iterable , List , Optional , Set , Type , TypeVar , Union
15
15
16
- import boto3
17
- import botocore
16
+ import boto3 # type: ignore[import]
17
+ import botocore # type: ignore[import]
18
18
from packaging .version import InvalidVersion , parse as _parse_version , Version
19
19
20
20
@@ -240,13 +240,13 @@ def __lt__(self, other):
240
240
241
241
def safe_parse_version (ver_str : str ) -> Version :
242
242
try :
243
- return _parse_version (ver_str )
243
+ return _parse_version (ver_str ) # type: ignore[return-value]
244
244
except InvalidVersion :
245
245
return Version ("0.0.0" )
246
246
247
247
248
248
class S3Index :
249
- def __init__ (self : S3IndexType , objects : List [S3Object ], prefix : str ) -> None :
249
+ def __init__ (self , objects : List [S3Object ], prefix : str ) -> None :
250
250
self .objects = objects
251
251
self .prefix = prefix .rstrip ("/" )
252
252
self .html_name = "index.html"
@@ -256,7 +256,7 @@ def __init__(self: S3IndexType, objects: List[S3Object], prefix: str) -> None:
256
256
path .dirname (obj .key ) for obj in objects if path .dirname != prefix
257
257
}
258
258
259
- def nightly_packages_to_show (self : S3IndexType ) -> List [S3Object ]:
259
+ def nightly_packages_to_show (self ) -> List [S3Object ]:
260
260
"""Finding packages to show based on a threshold we specify
261
261
262
262
Basically takes our S3 packages, normalizes the version for easier
@@ -326,7 +326,7 @@ def get_package_names(self, subdir: Optional[str] = None) -> List[str]:
326
326
{self .obj_to_package_name (obj ) for obj in self .gen_file_list (subdir )}
327
327
)
328
328
329
- def normalize_package_version (self : S3IndexType , obj : S3Object ) -> str :
329
+ def normalize_package_version (self , obj : S3Object ) -> str :
330
330
# removes the GPU specifier from the package name as well as
331
331
# unnecessary things like the file extension, architecture name, etc.
332
332
return sub (r"%2B.*" , "" , "-" .join (path .basename (obj .key ).split ("-" )[:2 ]))
@@ -498,7 +498,7 @@ def compute_sha256(self) -> None:
498
498
)
499
499
500
500
@classmethod
501
- def has_public_read (cls : Type [ S3IndexType ] , key : str ) -> bool :
501
+ def has_public_read (cls , key : str ) -> bool :
502
502
def is_all_users_group (o ) -> bool :
503
503
return (
504
504
o .get ("Grantee" , {}).get ("URI" )
@@ -512,11 +512,11 @@ def can_read(o) -> bool:
512
512
return any (is_all_users_group (x ) and can_read (x ) for x in acl_grants )
513
513
514
514
@classmethod
515
- def grant_public_read (cls : Type [ S3IndexType ] , key : str ) -> None :
515
+ def grant_public_read (cls , key : str ) -> None :
516
516
CLIENT .put_object_acl (Bucket = BUCKET .name , Key = key , ACL = "public-read" )
517
517
518
518
@classmethod
519
- def fetch_object_names (cls : Type [ S3IndexType ] , prefix : str ) -> List [str ]:
519
+ def fetch_object_names (cls , prefix : str ) -> List [str ]:
520
520
obj_names = []
521
521
for obj in BUCKET .objects .filter (Prefix = prefix ):
522
522
is_acceptable = any (
@@ -531,7 +531,7 @@ def fetch_object_names(cls: Type[S3IndexType], prefix: str) -> List[str]:
531
531
obj_names .append (obj .key )
532
532
return obj_names
533
533
534
- def fetch_metadata (self : S3IndexType ) -> None :
534
+ def fetch_metadata (self ) -> None :
535
535
# Add PEP 503-compatible hashes to URLs to allow clients to avoid spurious downloads, if possible.
536
536
regex_multipart_upload = r"^[A-Za-z0-9+/=]+=-[0-9]+$"
537
537
with concurrent .futures .ThreadPoolExecutor (max_workers = 6 ) as executor :
@@ -565,17 +565,17 @@ def fetch_metadata(self: S3IndexType) -> None:
565
565
if size := response .get ("ContentLength" ):
566
566
self .objects [idx ].size = int (size )
567
567
568
- def fetch_pep658 (self : S3IndexType ) -> None :
568
+ def fetch_pep658 (self ) -> None :
569
569
def _fetch_metadata (key : str ) -> str :
570
570
try :
571
571
response = CLIENT .head_object (
572
572
Bucket = BUCKET .name , Key = f"{ key } .metadata" , ChecksumMode = "Enabled"
573
573
)
574
574
sha256 = base64 .b64decode (response .get ("ChecksumSHA256" )).hex ()
575
575
return sha256
576
- except botocore .exceptions .ClientError as e :
576
+ except botocore .exceptions .ClientError as e : # type: ignore[attr-defined]
577
577
if e .response ["Error" ]["Code" ] == "404" :
578
- return None
578
+ return ""
579
579
raise
580
580
581
581
with concurrent .futures .ThreadPoolExecutor (max_workers = 6 ) as executor :
@@ -592,9 +592,7 @@ def _fetch_metadata(key: str) -> str:
592
592
self .objects [idx ].pep658 = response
593
593
594
594
@classmethod
595
- def from_S3 (
596
- cls : Type [S3IndexType ], prefix : str , with_metadata : bool = True
597
- ) -> S3IndexType :
595
+ def from_S3 (cls , prefix : str , with_metadata : bool = True ) -> "S3Index" :
598
596
prefix = prefix .rstrip ("/" )
599
597
obj_names = cls .fetch_object_names (prefix )
600
598
@@ -622,7 +620,7 @@ def sanitize_key(key: str) -> str:
622
620
return rc
623
621
624
622
@classmethod
625
- def undelete_prefix (cls : Type [ S3IndexType ] , prefix : str ) -> None :
623
+ def undelete_prefix (cls , prefix : str ) -> None :
626
624
paginator = CLIENT .get_paginator ("list_object_versions" )
627
625
for page in paginator .paginate (Bucket = BUCKET .name , Prefix = prefix ):
628
626
for obj in page .get ("DeleteMarkers" , []):
0 commit comments