@@ -20,6 +20,7 @@ load("@bazel_features//:features.bzl", "bazel_features")
2020load ("//python/private:auth.bzl" , "get_auth" )
2121load ("//python/private:envsubst.bzl" , "envsubst" )
2222load ("//python/private:normalize_name.bzl" , "normalize_name" )
23+ load ("//python/private:text_util.bzl" , "render" )
2324load (":parse_simpleapi_html.bzl" , "parse_simpleapi_html" )
2425
2526def  simpleapi_download (ctx , * , attr , cache , parallel_download  =  True ):
@@ -64,14 +65,20 @@ def simpleapi_download(ctx, *, attr, cache, parallel_download = True):
6465
6566    # NOTE @aignas 2024-03-31: we are not merging results from multiple indexes 
6667    # to replicate how `pip` would handle this case. 
67-     async_downloads  =  {}
6868    contents  =  {}
6969    index_urls  =  [attr .index_url ] +  attr .extra_index_urls 
70-     for  pkg  in  attr .sources :
71-         pkg_normalized  =  normalize_name (pkg )
7270
73-         success  =  False 
74-         for  index_url  in  index_urls :
71+     found_on_index  =  {}
72+     warn_overrides  =  False 
73+     for  i , index_url  in  enumerate (index_urls ):
74+         if  i  !=  0 :
75+             # Warn the user about a potential fix for the overrides 
76+             warn_overrides  =  True 
77+ 
78+         async_downloads  =  {}
79+         sources  =  [pkg  for  pkg  in  attr .sources  if  pkg  not  in   found_on_index ]
80+         for  pkg  in  sources :
81+             pkg_normalized  =  normalize_name (pkg )
7582            result  =  _read_simpleapi (
7683                ctx  =  ctx ,
7784                url  =  "{}/{}/" .format (
@@ -84,42 +91,44 @@ def simpleapi_download(ctx, *, attr, cache, parallel_download = True):
8491            )
8592            if  hasattr (result , "wait" ):
8693                # We will process it in a separate loop: 
87-                 async_downloads .setdefault (pkg_normalized , []).append (
88-                     struct (
89-                         pkg_normalized  =  pkg_normalized ,
90-                         wait  =  result .wait ,
91-                     ),
94+                 async_downloads [pkg ] =  struct (
95+                     pkg_normalized  =  pkg_normalized ,
96+                     wait  =  result .wait ,
9297                )
93-                 continue 
94- 
95-             if  result .success :
98+             else :
9699                contents [pkg_normalized ] =  result .output 
97-                 success  =  True 
98-                 break 
99- 
100-         if  not  async_downloads  and  not  success :
101-             fail ("Failed to download metadata from urls: {}" .format (
102-                 ", " .join (index_urls ),
103-             ))
104- 
105-     if  not  async_downloads :
106-         return  contents 
107- 
108-     # If we use `block` == False, then we need to have a second loop that is 
109-     # collecting all of the results as they were being downloaded in parallel. 
110-     for  pkg , downloads  in  async_downloads .items ():
111-         success  =  False 
112-         for  download  in  downloads :
100+                 found_on_index [pkg ] =  index_url 
101+ 
102+         if  not  async_downloads :
103+             continue 
104+ 
105+         # If we use `block` == False, then we need to have a second loop that is 
106+         # collecting all of the results as they were being downloaded in parallel. 
107+         for  pkg , download  in  async_downloads .items ():
113108            result  =  download .wait ()
114109
115-             if  result .success   and   download . pkg_normalized   not   in   contents :
110+             if  result .success :
116111                contents [download .pkg_normalized ] =  result .output 
117-                 success  =  True 
118- 
119-         if  not  success :
120-             fail ("Failed to download metadata from urls: {}" .format (
121-                 ", " .join (index_urls ),
122-             ))
112+                 found_on_index [pkg ] =  index_url 
113+ 
114+     failed_sources  =  [pkg  for  pkg  in  attr .sources  if  pkg  not  in   found_on_index ]
115+     if  failed_sources :
116+         fail ("Failed to download metadata for {} for from urls: {}" .format (
117+             failed_sources ,
118+             index_urls ,
119+         ))
120+ 
121+     if  warn_overrides :
122+         index_url_overrides  =  {
123+             pkg : found_on_index [pkg ]
124+             for  pkg  in  attr .sources 
125+             if  found_on_index [pkg ] !=  attr .index_url 
126+         }
127+ 
128+         # buildifier: disable=print 
129+         print ("You can use the following `index_url_overrides` to avoid the 404 warnings:\n {}" .format (
130+             render .dict (index_url_overrides ),
131+         ))
123132
124133    return  contents 
125134
0 commit comments