@@ -35,8 +35,8 @@ def __init__(self, cfg: config.Config = None):
3535 self .bucket_manager = qiniu .BucketManager (self .auth , preferred_scheme = "https" )
3636
3737 def get_object_url (
38- self , bucket : str , key : str , disable_ssl : bool = False , expires : int = 3600
39- ) -> list [str ]:
38+ self , bucket : str , key : str , disable_ssl : bool = False , expires : int = 3600
39+ ) -> list [dict [ str : Any ] ]:
4040 """
4141 获取对象
4242 :param disable_ssl:
@@ -47,7 +47,8 @@ def get_object_url(
4747 返回对象信息
4848 """
4949 # 获取下载域名
50- domains_list , domain_response = self .bucket_manager .bucket_domain (bucket )
50+ domains_getter = getattr (self .bucket_manager , "_BucketManager__uc_do_with_retrier" )
51+ domains_list , domain_response = domains_getter ('/v3/domains?tbl={0}' .format (bucket ))
5152 if domain_response .status_code != 200 :
5253 raise Exception (
5354 f"get bucket domain error:{ domain_response .exception } reqId:{ domain_response .req_id } "
@@ -59,7 +60,21 @@ def get_object_url(
5960 )
6061
6162 http_schema = "https" if not disable_ssl else "http"
62- object_public_urls = {f"{ http_schema } ://{ url } /{ key } " for url in domains_list }
63+ object_public_urls = []
64+ for domain in domains_list :
65+ # 被冻结
66+ freeze_types = domain .get ("freeze_types" )
67+ if freeze_types is not None :
68+ continue
69+
70+ domain_url = domain .get ("domain" )
71+ if domain_url is None :
72+ continue
73+
74+ object_public_urls .append ({
75+ "object_url" : f"{ http_schema } ://{ domain_url } /{ key } " ,
76+ "domain_type" : "cdn" if domain .get ("domaintype" ) is None or domain .get ("domaintype" ) == 0 else "origin"
77+ })
6378
6479 object_urls = []
6580 bucket_info , bucket_info_response = self .bucket_manager .bucket_info (bucket )
@@ -68,11 +83,15 @@ def get_object_url(
6883 f"get bucket domain error:{ bucket_info_response .exception } reqId:{ bucket_info_response .req_id } "
6984 )
7085 if bucket_info ["private" ] != 0 :
71- for url in object_public_urls :
72- object_urls .append (self .auth .private_download_url (url , expires = expires ))
86+ for url_info in object_public_urls :
87+ public_url = url_info .get ("object_url" )
88+ if public_url is None :
89+ continue
90+ url_info ["object_url" ] = self .auth .private_download_url (public_url , expires = expires )
91+ object_urls .append (url_info )
7392 else :
74- for url in object_public_urls :
75- object_urls .append (url )
93+ for url_info in object_public_urls :
94+ object_urls .append (url_info )
7695 return object_urls
7796
7897 async def list_buckets (self , prefix : Optional [str ] = None ) -> List [dict ]:
@@ -82,11 +101,11 @@ async def list_buckets(self, prefix: Optional[str] = None) -> List[dict]:
82101 max_buckets = 50
83102
84103 async with self .s3_session .client (
85- "s3" ,
86- aws_access_key_id = self .config .access_key ,
87- aws_secret_access_key = self .config .secret_key ,
88- endpoint_url = self .config .endpoint_url ,
89- region_name = self .config .region_name ,
104+ "s3" ,
105+ aws_access_key_id = self .config .access_key ,
106+ aws_secret_access_key = self .config .secret_key ,
107+ endpoint_url = self .config .endpoint_url ,
108+ region_name = self .config .region_name ,
90109 ) as s3 :
91110 if self .config .buckets :
92111 # If buckets are configured, only return those
@@ -116,7 +135,7 @@ async def list_buckets(self, prefix: Optional[str] = None) -> List[dict]:
116135 return buckets [:max_buckets ]
117136
118137 async def list_objects (
119- self , bucket : str , prefix : str = "" , max_keys : int = 20 , start_after : str = ""
138+ self , bucket : str , prefix : str = "" , max_keys : int = 20 , start_after : str = ""
120139 ) -> List [dict ]:
121140 """
122141 List objects in a specific bucket using async client with pagination
@@ -138,11 +157,11 @@ async def list_objects(
138157 max_keys = 100
139158
140159 async with self .s3_session .client (
141- "s3" ,
142- aws_access_key_id = self .config .access_key ,
143- aws_secret_access_key = self .config .secret_key ,
144- endpoint_url = self .config .endpoint_url ,
145- region_name = self .config .region_name ,
160+ "s3" ,
161+ aws_access_key_id = self .config .access_key ,
162+ aws_secret_access_key = self .config .secret_key ,
163+ endpoint_url = self .config .endpoint_url ,
164+ region_name = self .config .region_name ,
146165 ) as s3 :
147166 response = await s3 .list_objects_v2 (
148167 Bucket = bucket ,
@@ -153,7 +172,7 @@ async def list_objects(
153172 return response .get ("Contents" , [])
154173
155174 async def get_object (
156- self , bucket : str , key : str , max_retries : int = 3
175+ self , bucket : str , key : str , max_retries : int = 3
157176 ) -> Dict [str , Any ]:
158177 """
159178 Get object from S3 using streaming to handle large files and PDFs reliably.
@@ -169,12 +188,12 @@ async def get_object(
169188 while attempt < max_retries :
170189 try :
171190 async with self .s3_session .client (
172- "s3" ,
173- aws_access_key_id = self .config .access_key ,
174- aws_secret_access_key = self .config .secret_key ,
175- endpoint_url = self .config .endpoint_url ,
176- region_name = self .config .region_name ,
177- config = self .s3_config ,
191+ "s3" ,
192+ aws_access_key_id = self .config .access_key ,
193+ aws_secret_access_key = self .config .secret_key ,
194+ endpoint_url = self .config .endpoint_url ,
195+ region_name = self .config .region_name ,
196+ config = self .s3_config ,
178197 ) as s3 :
179198 # Get the object and its stream
180199 response = await s3 .get_object (Bucket = bucket , Key = key )
@@ -196,7 +215,7 @@ async def get_object(
196215
197216 attempt += 1
198217 if attempt < max_retries :
199- wait_time = 2 ** attempt
218+ wait_time = 2 ** attempt
200219 logger .warning (
201220 f"Attempt { attempt } failed, retrying in { wait_time } seconds: { str (e )} "
202221 )
0 commit comments