@@ -168,13 +168,34 @@ async def upload_outputs(
168168
169169 # when having multiple directories it is important to
170170 # run the compression in parallel to guarantee better performance
171+ async def _archive_dir_notified (
172+ dir_to_compress : Path , destination : Path , port_key : ServicePortKey
173+ ) -> None :
174+ # Errors and cancellation can also be triggered from archving as well
175+ try :
176+ await archive_dir (
177+ dir_to_compress = dir_to_compress ,
178+ destination = destination ,
179+ compress = False ,
180+ store_relative_path = True ,
181+ progress_bar = sub_progress ,
182+ )
183+ except CancelledError :
184+ await port_notifier .send_output_port_upload_was_aborted (
185+ port_key
186+ )
187+ raise
188+ except Exception :
189+ await port_notifier .send_output_port_upload_finished_with_error (
190+ port_key
191+ )
192+ raise
193+
171194 archiving_tasks .append (
172- archive_dir (
195+ _archive_dir_notified (
173196 dir_to_compress = src_folder ,
174197 destination = tmp_file ,
175- compress = False ,
176- store_relative_path = True ,
177- progress_bar = sub_progress ,
198+ port_key = port .key ,
178199 )
179200 )
180201 ports_values [port .key ] = (
@@ -197,26 +218,7 @@ async def upload_outputs(
197218 logger .debug ("No file %s to fetch port values from" , data_file )
198219
199220 if archiving_tasks :
200- # NOTE: if one archiving task fails/cancelled all the ports are affected
201- # setting all other ports as finished with error/cancelled
202- try :
203- await logged_gather (* archiving_tasks )
204- except CancelledError :
205- await logged_gather (
206- * (
207- port_notifier .send_output_port_upload_was_aborted (p .key )
208- for p in ports_to_set
209- )
210- )
211- raise
212- except Exception :
213- await logged_gather (
214- * (
215- port_notifier .send_output_port_upload_finished_with_error (p .key )
216- for p in ports_to_set
217- )
218- )
219- raise
221+ await logged_gather (* archiving_tasks )
220222
221223 await PORTS .set_multiple (
222224 ports_values ,
0 commit comments