1414from googleapiclient .http import MediaFileUpload
1515
1616from glob import glob
17+ import httplib2
1718import inspect
1819import imp
1920import json
3536api_base = "http://www.singularity-hub.org/api"
3637
3738# Log everything to stdout
38- logging .basicConfig (stream = sys .stdout )
39+ logging .basicConfig (stream = sys .stdout , level = logging . DEBUG )
3940
4041def google_drive_connect (credential ):
4142
4243 # If it's a dict, assume json and load into credential
44+ if isinstance (credential ,str ):
45+ credential = json .loads (credential )
46+
4347 if isinstance (credential ,dict ):
4448 credential = client .Credentials .new_from_json (json .dumps (credential ))
4549
@@ -102,9 +106,11 @@ def create_file(drive_service,folder_id,file_path,file_name=None,verbose=True):
102106 mimetype = mimetype ,
103107 resumable = True )
104108
105- return drive_service .files ().create (body = file_metadata ,
109+ new_file = drive_service .files ().create (body = file_metadata ,
106110 media_body = media ,
107111 fields = 'id' ).execute ()
112+ new_file ['name' ] = file_name
113+ return new_file
108114
109115
110116def permissions_callback (request_id , response , exception ):
@@ -169,7 +175,10 @@ def get_folder(drive_service,folder_name=None,create=True):
169175def get_download_links (build_files ):
170176 '''get_files will use a drive_service to return a list of build file objects
171177 :param build_files: a list of build_files, each a dictionary with an id for the file
178+ :returns links: a list of dictionaries with included file links
172179 '''
180+ if not isinstance (build_files ,list ):
181+ build_files = [build_files ]
173182 links = []
174183 for build_file in build_files :
175184 link = "https://drive.google.com/uc?export=download&id=%s" % (build_file ['id' ])
@@ -206,10 +215,11 @@ def google_drive_setup(drive_service,image_path=None,base_folder=None):
206215 parent_folder = singularity_folder ['id' ]
207216
208217 return singularity_folder
209-
210218
211- def run_build (build_dir = None ,spec_file = None ,repo_url = None ,token = None ,
212- repo_id = None ,commit = None ,credential = None ,verbose = True ,response_url = None ):
219+
220+ def run_build (build_dir = None ,spec_file = None ,repo_url = None ,token = None ,size = None ,
221+ repo_id = None ,commit = None ,credential = None ,verbose = True ,response_url = None ,
222+ logfile = None ):
213223 '''run_build will generate the Singularity build from a spec_file from a repo_url.
214224 If no arguments are required, the metadata api is queried for the values.
215225 :param build_dir: directory to do the build in. If not specified,
@@ -218,9 +228,11 @@ def run_build(build_dir=None,spec_file=None,repo_url=None,token=None,
218228 :param repo_url: the url to download the repo from
219229 :param repo_id: the repo_id to uniquely identify the repo (in case name changes)
220230 :param commit: the commit to checkout
231+ :param size: the size of the image to build. If none set, builds default 1024.
221232 :param credential: the credential to send the image to.
222233 :param verbose: print out extra details as we go (default True)
223234 :param token: a token to send back to the server to authenticate adding the build
235+ :param logfile: path to a logfile to read and include path in response to server.
224236 :param response_url: the build url to send the response back to. Should also come
225237 from metadata. If not specified, no response is sent
226238 :: note: this function is currently configured to work with Google Compute
@@ -237,48 +249,40 @@ def run_build(build_dir=None,spec_file=None,repo_url=None,token=None,
237249 else :
238250 logging .info ('Build directory set to %s' ,build_dir )
239251
240- # Get variables from the environment
241- if commit == None :
242- commit = get_build_metadata (key = 'commit' )
243- logging .info ('Build directory set to %s' ,build_dir )
244-
245- # cycle through each one, check for metadata from build api
252+ # Get variables from the instance metadata API
246253 metadata = [{'key' : 'repo_url' , 'value' : repo_url , 'return_text' : False },
247- {'key' : 'repo_id' , 'value' , repo_id , 'return_text' : True },
248- {'key' : 'credential' , 'value' , credential , 'return_text' : True },
249- {'key' : 'response_url' , 'value' , reponse_url , 'return_text' : True },
250- {'key' : 'token' , 'value' , token , 'return_text' : False }]
251-
254+ {'key' : 'repo_id' , 'value' : repo_id , 'return_text' : True },
255+ {'key' : 'credential' , 'value' : credential , 'return_text' : True },
256+ {'key' : 'response_url' , 'value' : response_url , 'return_text' : True },
257+ {'key' : 'token' , 'value' : token , 'return_text' : False },
258+ {'key' : 'commit' , 'value' : commit , 'return_text' : True },
259+ {'key' : 'spec_file' , 'value' : spec_file , 'return_text' : True },
260+ {'key' : 'size' , 'value' : size , 'return_text' : True },
261+ {'key' : 'logfile' , 'value' : logfile , 'return_text' : True }]
252262
253263 # Obtain values from build
254- for item in metadata :
255- if item ['value' ] == None :
256- logging .warning ('%s not found in function call.' ,item ['key' ])
257- item ['value' ] = get_build_metadata (key = item ['key' ],
258- return_text = item ['return_text' ])
259- logging .info ('%s is set to %s' ,item ['key' ],item ['value' ])
260-
264+ params = get_build_params (metadata )
261265
262266 # Download the repo and image
263- repo = download_repo (repo_url = repo_url ,
267+ repo = download_repo (repo_url = params [ ' repo_url' ] ,
264268 destination = build_dir )
265269
266270 os .chdir (build_dir )
267- if commit != None :
268- logging .info ('Checking out commit %s' ,commit )
269- os .system ('git checkout %s .' % (commit ))
271+ if params [ ' commit' ] != None :
272+ logging .info ('Checking out commit %s' ,params [ ' commit' ] )
273+ os .system ('git checkout %s .' % (params [ ' commit' ] ))
270274
271275 # From here on out commit is used as a unique id, if we don't have one, randomly make one
272- else commit == None :
273- commit = uuid .uuid4 ().__str__ ()
274- logging .warning ("commit still not found in build, setting unique id to %s" ,commit )
276+ else :
277+ params [ ' commit' ] = uuid .uuid4 ().__str__ ()
278+ logging .warning ("commit still not found in build, setting unique id to %s" ,params [ ' commit' ] )
275279
276280
277- if os .path .exists (spec_file ):
278- logging .info ("Found spec file %s in repository" ,spec_file )
279- image_package = build_from_spec (spec = spec_file ,
280- name = commit ,
281- size = None ,
281+ if os .path .exists (params [ ' spec_file' ] ):
282+ logging .info ("Found spec file %s in repository" ,params [ ' spec_file' ] )
283+ image_package = build_from_spec (spec = params [ ' spec_file' ] ,
284+ name = params [ ' commit' ] ,
285+ size = params [ 'size' ] ,
282286 sudopw = '' , # with root should not need sudo
283287 output_folder = build_dir )
284288
@@ -291,10 +295,10 @@ def run_build(build_dir=None,spec_file=None,repo_url=None,token=None,
291295 zf .extractall (dest_dir )
292296
293297 # The path to the images on google drive will be the github url/commit folder
294- image_path = "%s/%s" % (re .sub ('^http.+//www[.]' ,'' ,repo_url ), commit )
298+ image_path = "%s/%s" % (re .sub ('^http.+//www[.]' ,'' ,params [ ' repo_url' ]), params [ ' commit' ] )
295299 build_files = glob ("%s/*" % (dest_dir ))
296300 logging .info ("Sending build files %s to storage" ,'\n ' .join (build_files ))
297- drive_service = google_drive_connect (credential )
301+ drive_service = google_drive_connect (params [ ' credential' ] )
298302 upload_folder = google_drive_setup (drive_service = drive_service ,
299303 image_path = image_path )
300304
@@ -304,34 +308,41 @@ def run_build(build_dir=None,spec_file=None,repo_url=None,token=None,
304308 drive_file = create_file (drive_service ,
305309 folder_id = upload_folder ['id' ],
306310 file_path = build_file )
307- drive_file ['name' ] = os .path .basename (build_file )
308311 files .append (drive_file )
309312
310313 # Set readable permissions
311314 set_reader_permissions (drive_service ,files )
312315
313316 # Get metadata to return to singularity-hub
314317 download_links = get_download_links (build_files = files )
318+
319+ # If the user has specified a log file, include with data/response
320+ if logfile != None :
321+ log_file = create_file (drive_service ,
322+ folder_id = upload_folder ['id' ],
323+ file_path = logfile )
324+ files .append (log_file )
325+ download_links = download_links + get_download_links (build_files = log_file )
315326
316327 # Finally, package everything to send back to shub
317- response = {"files" :download_links ,
318- "repo_url" :repo_url ,
319- "commit" :commit ,
320- "repo_id" :repo_id }
328+ response = {"files" : download_links ,
329+ "repo_url" : params [ ' repo_url' ] ,
330+ "commit" : params [ ' commit' ] ,
331+ "repo_id" : params [ ' repo_id' ] }
321332
322333 if token != None :
323- response ['token' ] = token
334+ response ['token' ] = params [ ' token' ]
324335
325336 # Send it back!
326- if response_url != None :
327- response = api_put (url = response_url ,
337+ if params [ ' response_url' ] != None :
338+ response = api_put (url = params [ ' response_url' ] ,
328339 data = response ,
329- token = token )
340+ token = params [ ' token' ] )
330341
331342 else :
332343 # Tell the user what is actually there
333344 present_files = glob ("*" )
334- logging .error ("Build file %s not found in repository" ,spec_file )
345+ logging .error ("Build file %s not found in repository" ,params [ ' spec_file' ] )
335346 logging .info ("Found files are %s" ,"\n " .join (present_files ))
336347
337348
@@ -340,25 +351,49 @@ def run_build(build_dir=None,spec_file=None,repo_url=None,token=None,
340351#####################################################################################
341352
342353
343- def get_build_metadata (key , return_text = True ):
354+ def get_build_metadata (key ):
344355 '''get_build_metadata will return metadata about an instance from within it.
345356 :param key: the key to look upu
346357 :param return_text: return text (appropriate for one value, or if needs custom parsing. Otherwise, will return json
347358 '''
348359 headers = {"Metadata-Flavor" :"Google" }
349360 url = "http://metadata.google.internal/computeMetadata/v1/instance/attributes/%s" % (key )
350361 response = api_get (url = url ,headers = headers )
362+
351363 # Successful query returns the result
352364 if response .status_code == 200 :
353- if return_text == True :
354- return api_get ( url = url , headers = headers ) .text
355- return api_get ( url = url , headers = headers ). json ()
365+ if key != "credential" :
366+ logging . info ( 'Metadata response is %s' , response .text )
367+ return response . text
356368 else :
357369 logging .error ("Error retrieving metadata %s, returned response %s" , key ,
358370 response .status_code )
359371 return None
360372
361373
374+ def get_build_params (metadata ):
375+ '''get_build_params uses get_build_metadata to retrieve corresponding meta data values for a build
376+ :param metadata: a list, each item a dictionary of metadata, in format:
377+ metadata = [{'key': 'repo_url', 'value': repo_url, 'return_text': False },
378+ {'key': 'repo_id', 'value': repo_id, 'return_text': True },
379+ {'key': 'credential', 'value': credential, 'return_text': True },
380+ {'key': 'response_url', 'value': response_url, 'return_text': True },
381+ {'key': 'token', 'value': token, 'return_text': False },
382+ {'key': 'commit', 'value': commit, 'return_text': True }]
383+
384+ '''
385+ params = dict ()
386+ for item in metadata :
387+ if item ['value' ] == None :
388+ logging .warning ('%s not found in function call.' ,item ['key' ])
389+ response = get_build_metadata (key = item ['key' ])
390+ item ['value' ] = response
391+ params [item ['key' ]] = item ['value' ]
392+ if item ['key' ] != 'credential' :
393+ logging .info ('%s is set to %s' ,item ['key' ],item ['value' ])
394+ return params
395+
396+
362397def sniff_extension (file_path ,verbose = True ):
363398 '''sniff_extension will attempt to determine the file type based on the extension,
364399 and return the proper mimetype
0 commit comments