@@ -123,7 +123,7 @@ def determine_key_type(bucket, key, profile):
123123 # The file itself may be a prefix of another file (e.v. abc.vcf.gz vs abc.vcf.gz.tbi)
124124 # but it doesn't matter.
125125 else :
126- return 'File'
126+ return 'File'
127127 else :
128128 # data_file is a folder
129129 return 'Folder'
@@ -217,7 +217,7 @@ def download_workflow():
217217 return
218218 local_wfdir = os .environ .get ('LOCAL_WFDIR' )
219219 subprocess .call (['mkdir' , '-p' , local_wfdir ])
220-
220+
221221 if language in ['wdl' , 'wdl_v1' , 'wdl_draft2' ]:
222222 main_wf = os .environ .get ('MAIN_WDL' , '' )
223223 wf_files = os .environ .get ('WDL_FILES' , '' )
@@ -239,10 +239,10 @@ def download_workflow():
239239 wf_files = [wf_files ]
240240 wf_files .append (main_wf )
241241 wf_url = wf_url .rstrip ('/' )
242-
242+
243243 print ("main workflow file: %s" % main_wf )
244244 print ("workflow files: " + str (wf_files ))
245-
245+
246246 s3 = boto3 .client ('s3' )
247247 for wf_file in wf_files :
248248 target = "%s/%s" % (local_wfdir , wf_file )
@@ -262,7 +262,7 @@ def download_workflow():
262262 targetdir = re .sub ('[^/]+$' , '' , target )
263263 subprocess .call (["mkdir" , "-p" , targetdir ])
264264 s3 .download_file (Bucket = bucket_name , Key = key , Filename = target )
265-
265+
266266
267267def read_md5file (md5file ):
268268 with open (md5file , 'r' ) as md5_f :
@@ -340,7 +340,7 @@ def update_postrun_json_init(json_old, json_new):
340340
341341
342342def update_postrun_json_upload_output (json_old , execution_metadata_file , md5file , json_new ,
343- language = 'cwl_v1' , strict = True , upload = True ):
343+ language = 'cwl_v1' , strict = True , upload = True , endpoint_url = None ):
344344 """Update postrun json with output files.
345345 if strict is set false, it does not check execution metadata is required for cwl/wdl."""
346346 # read old json file and prepare postrunjson skeleton
@@ -362,18 +362,18 @@ def update_postrun_json_upload_output(json_old, execution_metadata_file, md5file
362362
363363 # upload output to S3 (this also updates postrun json)
364364 if upload :
365- upload_output (prj )
365+ upload_output (prj , endpoint_url = endpoint_url )
366366
367367 # write to new json file
368368 write_postrun_json (json_new , prj )
369369
370370
371- def upload_output (prj ):
371+ def upload_output (prj , endpoint_url = None ):
372372 # parsing output_target and uploading output files to output target
373- upload_to_output_target (prj .Job .Output , prj .config .encrypt_s3_upload )
373+ upload_to_output_target (prj .Job .Output , prj .config .encrypt_s3_upload , endpoint_url = endpoint_url )
374374
375375
376- def upload_to_output_target (prj_out , encrypt_s3_upload = False ):
376+ def upload_to_output_target (prj_out , encrypt_s3_upload = False , endpoint_url = None ):
377377 # parsing output_target and uploading output files to output target
378378 output_bucket = prj_out .output_bucket_directory
379379 output_argnames = prj_out .output_files .keys ()
@@ -388,7 +388,7 @@ def upload_to_output_target(prj_out, encrypt_s3_upload=False):
388388 target .parse_custom_target (k , output_target [k ])
389389 if target .is_valid :
390390 print ("Target is valid. Uploading.." )
391- target .upload_to_s3 (encrypt_s3_upload = encrypt_s3_upload )
391+ target .upload_to_s3 (encrypt_s3_upload = encrypt_s3_upload , endpoint_url = endpoint_url )
392392 else :
393393 raise Exception ("Invalid target %s -> %s: failed to upload" % k , output_target [k ])
394394 else :
@@ -397,17 +397,17 @@ def upload_to_output_target(prj_out, encrypt_s3_upload=False):
397397 target .parse_cwl_target (k , output_target .get (k , '' ), prj_out .output_files )
398398 if target .is_valid :
399399 print ("Target is valid. Uploading.." )
400- target .upload_to_s3 (encrypt_s3_upload = encrypt_s3_upload )
400+ target .upload_to_s3 (encrypt_s3_upload = encrypt_s3_upload , endpoint_url = endpoint_url )
401401 prj_out .output_files [k ].add_target (target .dest )
402-
402+
403403 # upload secondary files
404404 secondary_output_files = prj_out .output_files [k ].secondaryFiles
405405 if secondary_output_files :
406406 stlist = SecondaryTargetList (output_bucket )
407407 stlist .parse_target_values (prj_out .secondary_output_target .get (k , []))
408408 stlist .reorder_by_source ([sf .path for sf in secondary_output_files ])
409409 for st in stlist .secondary_targets :
410- st .upload_to_s3 (encrypt_s3_upload = encrypt_s3_upload )
410+ st .upload_to_s3 (encrypt_s3_upload = encrypt_s3_upload , endpoint_url = endpoint_url )
411411 for i , sf in enumerate (secondary_output_files ):
412412 sf .add_target (stlist .secondary_targets [i ].dest )
413413 else :
@@ -424,9 +424,9 @@ def update_postrun_json_final(json_old, json_new, logfile=None):
424424 """Update postrun json with status, time stamps, parsed commands,
425425 input/tmp/output sizes"""
426426 prj = read_postrun_json (json_old )
427-
427+
428428 postrun_json_final (prj , logfile = logfile )
429-
429+
430430 # write to new json file
431431 write_postrun_json (json_new , prj )
432432
0 commit comments