1212
1313from googleapiclient .discovery import build
1414from googleapiclient .http import MediaFileUpload
15+
1516from glob import glob
1617import inspect
1718import imp
1819import json
20+ import logging
21+
1922from oauth2client import client
2023from oauth2client .service_account import ServiceAccountCredentials
21- from glob import glob
24+
2225import os
2326import re
2427import requests
2932import uuid
3033import zipfile
3134
32-
3335api_base = "http://www.singularity-hub.org/api"
3436
37+ # Log everything to stdout
38+ logging .basicConfig (stream = sys .stdout )
3539
3640def google_drive_connect (credential ):
3741
@@ -41,7 +45,7 @@ def google_drive_connect(credential):
4145
4246 # If the user has a credential object, check if it's good
4347 if credential .invalid is True :
44- # This probably won't work, needs to be redone on shub server
48+ logging . warning ( 'Storage credential not valid, refreshing.' )
4549 credential .refresh ()
4650
4751 # Authorize with http
@@ -91,19 +95,23 @@ def create_file(drive_service,folder_id,file_path,file_name=None,verbose=True):
9195 'parents' : [ folder_id ]
9296 }
9397
98+ logging .info ('Creating file %s in folder %s with mimetype %s' , file_name ,
99+ folder_id ,
100+ mimetype )
94101 media = MediaFileUpload (file_path ,
95102 mimetype = mimetype ,
96103 resumable = True )
104+
97105 return drive_service .files ().create (body = file_metadata ,
98106 media_body = media ,
99107 fields = 'id' ).execute ()
100108
101109
102110def permissions_callback (request_id , response , exception ):
103111 if exception :
104- print (exception )
112+ logging . error (exception )
105113 else :
106- print ("Permission Id: %s" % response .get ('id' ))
114+ logging . info ("Permission Id: %s" , response .get ('id' ))
107115
108116
109117def set_reader_permissions (drive_service ,file_ids ):
@@ -145,11 +153,14 @@ def get_folder(drive_service,folder_name=None,create=True):
145153
146154 for folder in folders ['files' ]:
147155 if folder ['name' ] == folder_name :
156+ logging .info ("Found folder %s in storage" ,folder_name )
148157 return folder
149158
159+ logging .info ("Did not find %s in storage." ,folder_name )
150160 folder = None
151161
152162 if create == True :
163+ logging .info ("Creating folder %s." ,folder_name )
153164 folder = create_folder (drive_service ,folder_name )
154165
155166 return folder
@@ -167,20 +178,24 @@ def get_download_links(build_files):
167178 return links
168179
169180
170- def google_drive_setup (drive_service ,image_path = None ):
181+ def google_drive_setup (drive_service ,image_path = None , base_folder = None ):
171182 '''google_drive_setup will connect to a Google drive, check for the singularity
172183 folder, and if it doesn't exist, create it, along with other collection and image
173184 metadata. The final upload folder for the image and other stuffs is returned
174185 :param image_path: should be the path to the image, from within the singularity-hub folder
175186 (eg, www.github.com/vsoch/singularity-images). If not defined, a folder with the commit id
176187 will be created in the base of the singularity-hub google drive folder
188+ :param base_folder: the parent (base) folder to write to, default is singularity-hub
177189 '''
178- singularity_folder = get_folder (drive_service ,folder_name = 'singularity-hub' )
190+ if base_folder == None :
191+ base_folder = 'singularity-hub'
192+ singularity_folder = get_folder (drive_service ,folder_name = base_folder )
193+ logging .info ("Base folder set to %s" ,base_folder )
179194
180195 # If the user wants a more custom path
181196 if image_path != None :
182-
183197 folders = [x .strip (" " ) for x in image_path .split ("/" )]
198+ logging .info ("Storage path set to %s" ,"=>" .join (folders ))
184199 parent_folder = singularity_folder ['id' ]
185200
186201 for folder in folders :
@@ -211,42 +226,56 @@ def run_build(build_dir=None,spec_file=None,repo_url=None,token=None,
211226 :: note: this function is currently configured to work with Google Compute
212227 Engine metadata api, and should (will) be customized if needed to work elsewhere
213228 '''
229+ # Default spec file is Singularity
230+ if spec_file == None :
231+ spec_file = "Singularity"
214232
233+ # If no build directory is specified, make a temporary one
215234 if build_dir == None :
216235 build_dir = tempfile .mkdtemp ()
236+ logging .warning ('Build directory not set, using %s' ,build_dir )
237+ else :
238+ logging .info ('Build directory set to %s' ,build_dir )
217239
218240 # Get variables from the environment
219241 if commit == None :
220242 commit = get_build_metadata (key = 'commit' )
221- if repo_url == None :
222- repo_url = get_build_metadata (key = 'repo_url' )
223- if repo_id == None :
224- repo_url = get_build_metadata (key = 'repo_id' )
225- if credential == None :
226- credential = get_build_metadata (key = 'credential' ,
227- return_text = False )
228- if build_url == None :
229- response_url = get_build_metadata (key = 'response_url' )
230-
231- # Token is a secret to send back to server to accept response
232- if token == None :
233- token = get_build_metadata (key = 'token' ,
234- return_text = False )
243+ logging .info ('Build directory set to %s' ,build_dir )
244+
245+ # cycle through each one, check for metadata from build api
246+ metadata = [{'key' : 'repo_url' , 'value' : repo_url , 'return_text' : False },
247+ {'key' : 'repo_id' , 'value' , repo_id , 'return_text' : True },
248+ {'key' : 'credential' , 'value' , credential , 'return_text' : True },
249+ {'key' : 'response_url' , 'value' , reponse_url , 'return_text' : True },
250+ {'key' : 'token' , 'value' , token , 'return_text' : False }]
251+
252+
253+ # Obtain values from build
254+ for item in metadata :
255+ if item ['value' ] == None :
256+ logging .warning ('%s not found in function call.' ,item ['key' ])
257+ item ['value' ] = get_build_metadata (key = item ['key' ],
258+ return_text = item ['return_text' ])
259+ logging .info ('%s is set to %s' ,item ['key' ],item ['value' ])
260+
235261
236262 # Download the repo and image
237263 repo = download_repo (repo_url = repo_url ,
238264 destination = build_dir )
239265
240266 os .chdir (build_dir )
241267 if commit != None :
268+ logging .info ('Checking out commit %s' ,commit )
242269 os .system ('git checkout %s .' % (commit ))
243270
244271 # From here on out commit is used as a unique id, if we don't have one, randomly make one
245- if commit == None :
272+ else commit == None :
246273 commit = uuid .uuid4 ().__str__ ()
274+ logging .warning ("commit still not found in build, setting unique id to %s" ,commit )
247275
248- if os .path .exists (spec_file ):
249276
277+ if os .path .exists (spec_file ):
278+ logging .info ("Found spec file %s in repository" ,spec_file )
250279 image_package = build_from_spec (spec = spec_file ,
251280 name = commit ,
252281 size = None ,
@@ -255,6 +284,7 @@ def run_build(build_dir=None,spec_file=None,repo_url=None,token=None,
255284
256285 # If doesn't error, run google_drive_setup and upload image
257286 if os .path .exists (image_package ):
287+ logging .info ("Package %s successfully built" ,image_package )
258288 dest_dir = "%s/build" % (build_dir )
259289 os .mkdir (dest_dir )
260290 with zipfile .ZipFile (image_package ) as zf :
@@ -263,6 +293,7 @@ def run_build(build_dir=None,spec_file=None,repo_url=None,token=None,
263293 # The path to the images on google drive will be the github url/commit folder
264294 image_path = "%s/%s" % (re .sub ('^http.+//www[.]' ,'' ,repo_url ),commit )
265295 build_files = glob ("%s/*" % (dest_dir ))
296+ logging .info ("Sending build files %s to storage" ,'\n ' .join (build_files ))
266297 drive_service = google_drive_connect (credential )
267298 upload_folder = google_drive_setup (drive_service = drive_service ,
268299 image_path = image_path )
@@ -296,6 +327,12 @@ def run_build(build_dir=None,spec_file=None,repo_url=None,token=None,
296327 response = api_put (url = response_url ,
297328 data = response ,
298329 token = token )
330+
331+ else :
332+ # Tell the user what is actually there
333+ present_files = glob ("*" )
334+ logging .error ("Build file %s not found in repository" ,spec_file )
335+ logging .info ("Found files are %s" ,"\n " .join (present_files ))
299336
300337
301338#####################################################################################
@@ -316,6 +353,9 @@ def get_build_metadata(key,return_text=True):
316353 if return_text == True :
317354 return api_get (url = url ,headers = headers ).text
318355 return api_get (url = url ,headers = headers ).json ()
356+ else :
357+ logging .error ("Error retrieving metadata %s, returned response %s" , key ,
358+ response .status_code )
319359 return None
320360
321361
@@ -361,6 +401,6 @@ def sniff_extension(file_path,verbose=True):
361401 mime_type = mime_types ['txt' ]
362402
363403 if verbose == True :
364- print ("%s --> %s" % ( file_path ,mime_type ) )
404+ logging . info ("%s --> %s" , file_path , mime_type )
365405
366406 return mime_type
0 commit comments