@@ -125,7 +125,7 @@ async def get_package_maintainers(package: str) -> list[str]:
125
125
if response .status_code == 200 :
126
126
html = response .text
127
127
soup = BeautifulSoup (html , "html.parser" )
128
- maintainers = soup .find_all ("a " , class_ = "package-header__author-link " )
128
+ maintainers = soup .find_all ("span " , class_ = "sidebar-section__maintainer " )
129
129
if not maintainers :
130
130
return ["unknown (blocked by fastly?)" ]
131
131
return [a .text .strip () for a in maintainers ]
@@ -156,20 +156,23 @@ async def main():
156
156
157
157
async with trio .open_nursery () as nursery :
158
158
targets = []
159
- for org , repo in todo :
159
+ semaphore = trio .Semaphore (10 ) # Throttle to 10 concurrent requests
160
+ for org , repo in todo [:10 ]:
160
161
161
162
async def _loc (targets , org , repo ):
162
- maintainers = await get_package_maintainers (repo )
163
- targets .append (
164
- (
165
- org ,
166
- repo ,
163
+ async with semaphore : # Wait for semaphore to be available
164
+ maintainers = await get_package_maintainers (repo )
165
+ targets .append (
167
166
(
168
- await asks .get (f"https://pypi.org/pypi/{ repo } /json" )
169
- ).status_code ,
170
- maintainers ,
167
+ org ,
168
+ repo ,
169
+ (
170
+ await asks .get (f"https://pypi.org/pypi/{ repo } /json" )
171
+ ).status_code ,
172
+ maintainers ,
173
+ )
171
174
)
172
- )
175
+ print ( "." , end = "" , flush = True )
173
176
174
177
nursery .start_soon (_loc , targets , org , repo )
175
178
0 commit comments