1313from biocontainersci .utils import send_github_pr_comment , send_status , BiocontainersCIException
1414from biocontainersci .biotools import Biotools
1515
16+
1617class CI :
1718 '''
1819 Class to manage build/check of containers
@@ -43,13 +44,13 @@ def local_name(self, f):
4344 return None
4445 return self .config ['registry' ]['url' ] + '/biocontainers/' + f ['container' ] + ':' + f ['tag' ]
4546
46- def run_test (self , f :dict , test : str ):
47+ def run_test (self , f : dict , test : str ):
4748 '''
4849 Execute a test against container
4950 '''
5051 logging .info ("[ci][test] run test: " + test )
5152 base_container_name = self .name (f )
52- volumes = {}
53+ volumes = {}
5354 volumes [self .workdir ()] = {'bind' : '/biocontainers' , 'mode' : 'ro' }
5455 logs = self .docker_client .containers .run (
5556 base_container_name ,
@@ -94,7 +95,6 @@ def docker_logs(self, build_logs):
9495 for line in chunk ['stream' ].splitlines ():
9596 logging .info (line )
9697
97-
9898 def docker_push (self , repo , auth_config = None ):
9999 '''
100100 Push to registry
@@ -140,7 +140,6 @@ def biotools(self, f, labels):
140140 )
141141 '''
142142
143-
144143 def anchore (self , f ):
145144 '''
146145 Add image to anchore security scan
@@ -189,26 +188,27 @@ def singularity(self, f):
189188 logging .exception ('[ci][singularity] convert failed: ' + str (e ))
190189 raise BiocontainersCIException ('singularity conversion failed' )
191190
192-
193191 if self .config ['dry' ]:
194192 logging .info ('[ci][singularity] dry mode, do not push image' )
195193 return
196194 try :
197- s3_client = boto3 .client (service_name = "s3" , region_name = self .config ['s3' ]['region' ],
198- endpoint_url = self .config ['s3' ]['endpoint' ],
199- verify = False ,
200- aws_access_key_id = self .config ['s3' ]['access_key' ],
201- aws_secret_access_key = self .config ['s3' ]['secret_access_key' ])
202- s3_client .upload_file (sing_image , self .config ['s3' ]['bucket' ], 'SingImgsRepo/' + f ['container' ] + '/' + f ['tag' ] + '/' + f ['container' ] + '_' + f ['tag' ] + '.sif' )
203- except Exception as e :
195+ s3_client = boto3 .client (
196+ service_name = "s3" ,
197+ region_name = self .config ['s3' ]['region' ],
198+ endpoint_url = self .config ['s3' ]['endpoint' ],
199+ verify = False ,
200+ aws_access_key_id = self .config ['s3' ]['access_key' ],
201+ aws_secret_access_key = self .config ['s3' ]['secret_access_key' ])
202+
203+ s3_client .upload_file (sing_image , self .config ['s3' ]['bucket' ], 'SingImgsRepo/' + f ['container' ] + '/' + f ['tag' ] + '/' + f ['container' ] + '_' + f ['tag' ] + '.sif' )
204+ except Exception :
204205 os .unlink (sing_image )
205206 raise BiocontainersCIException ('singularity s3 upload failed' )
206207 # need to be root...
207208 os .unlink (sing_image )
208- #s3 = boto3.resource('s3')
209- #data = open('/tmp/singimage', 'rb')
210- #s3.Bucket(self.config['s3']['bucket']).put_object(Key='SingImgsRepo/'+f['container'] + '/' + f['tag'] + '/' + f['container'] + '_' + f['tag'] + '.img', Body=data)
211-
209+ # s3 = boto3.resource('s3')
210+ # data = open('/tmp/singimage', 'rb')
211+ # s3.Bucket(self.config['s3']['bucket']).put_object(Key='SingImgsRepo/'+f['container'] + '/' + f['tag'] + '/' + f['container'] + '_' + f['tag'] + '.img', Body=data)
212212
213213 def workdir (self ):
214214 return os .environ .get ('GITHUB_WORKSPACE' , os .getcwd ())
@@ -227,12 +227,12 @@ def workflow(self, f):
227227 # check for dockerfile
228228 with open (os .path .join (self .workdir (), f ['container' ], f ['version' ], 'Dockerfile' ), 'r' ) as d :
229229 lines = d .readlines ()
230- for l in lines :
231- if '.aws' in l :
230+ for line in lines :
231+ if '.aws' in line :
232232 logging .error ('[ci] private biocontainers-ci directory access in dockerfile forbiden' )
233233 send_github_pr_comment (self .config , 'Forbiden access to biocontainers-ci private files in Dockerfile' )
234234 raise BiocontainersCIException ('private biocontainers-ci directory access in dockerfile forbiden' )
235- if 'etc/biocontainers-ci' in l :
235+ if 'etc/biocontainers-ci' in line :
236236 logging .error ('[ci] private biocontainers-ci directory access in dockerfile forbiden' )
237237 send_github_pr_comment (self .config , 'Forbiden access to biocontainers-ci directory in Dockerfile' )
238238 raise BiocontainersCIException ('private biocontainers-ci directory access in dockerfile forbiden' )
@@ -312,7 +312,7 @@ def workflow(self, f):
312312 except Exception as e :
313313 logging .exception ('[ci][workflow] error: ' + str (e ))
314314 status = False
315-
315+
316316 try :
317317 self .docker_client .images .remove (image = self .name (f ), force = True )
318318 except Exception :
@@ -332,25 +332,23 @@ def workflow(self, f):
332332 logging .info ('Docker containers prune' )
333333 self .docker_client .containers .prune ()
334334 return status
335-
336-
337335
338336 '''
339337 Check labels in docker image
340338 '''
341- def check_labels (self , f :dict , labels :dict ):
339+ def check_labels (self , f : dict , labels : dict ):
342340 label_errors = []
343341 software = 'unknown'
344342 if 'software' not in labels or not labels ['software' ]:
345343 label_errors .append ('software label not present' )
346344 status = False
347345 else :
348- software = labels ['software' ]
349- #labels['software'].strip()
350- pattern = re .compile ("^([a-z0-9_-])+$" )
346+ software = labels ['software' ]
347+ # labels['software'].strip()
348+ pattern = re .compile ("^([a-z0-9_-])+$" )
351349 if pattern .match (labels ['software' ]) is None :
352350 logging .warning ('[ci][labels] ' + software + " has invalid name, using directory name" )
353- software = f ['container' ]
351+ software = f ['container' ]
354352 labels ['container' ] = f ['container' ]
355353
356354 if 'base_image' not in labels or not labels ['base_image' ]:
@@ -419,7 +417,7 @@ def check_labels(self, f:dict, labels:dict):
419417 else :
420418 bio = requests .get ('https://bio.tools/api/tool/' + str (software ) + '/?format=json' )
421419 if bio .status_code != 404 :
422- send_github_pr_comment (self .config , 'Found a biotools entry matching the software name (https://bio.tools/' + labels ['software' ]+ '), if this is the same software, please add the extra.identifiers.biotools label to your Dockerfile' )
420+ send_github_pr_comment (self .config , 'Found a biotools entry matching the software name (https://bio.tools/' + labels ['software' ] + '), if this is the same software, please add the extra.identifiers.biotools label to your Dockerfile' )
423421 else :
424422 send_github_pr_comment (self .config , 'No biotools label defined, please check if tool is not already defined in biotools (https://bio.tools) and add extra.identifiers.biotools label if it exists. If it is not defined, you can ignore this comment.' )
425423
@@ -434,7 +432,7 @@ def check_labels(self, f:dict, labels:dict):
434432 logging .info ("biotools entry is ok" )
435433
436434 # Check if exists in conda
437- conda_url = 'https://bioconda.github.io/recipes/' + labels ['software' ]+ '/README.html'
435+ conda_url = 'https://bioconda.github.io/recipes/' + labels ['software' ] + '/README.html'
438436 conda = requests .get (conda_url )
439437 if conda .status_code == 200 :
440438 send_github_pr_comment (self .config , 'Found an existing bioconda package for this software (' + conda_url + '), is this the same, then you should update the recipe in bioconda to avoid duplicates.' )
0 commit comments