1+ import sys
12import os
23import codecs
34import shutil
4- import requests
5+ import logging
56
7+ import requests
8+ import click
69from flask import Flask , request , json , make_response , send_file
710
811from kepler_model .util .train_types import get_valid_feature_groups , ModelOutputType , FeatureGroups , FeatureGroup , PowerSourceMap , weight_support_trainers
1114from kepler_model .util .saver import WEIGHT_FILENAME
1215from kepler_model .train import NodeTypeSpec , NodeTypeIndexCollection
1316
17+ logger = logging .getLogger (__name__ )
1418
1519###############################################
1620# model request #
@@ -41,10 +45,7 @@ def __init__(self, metrics, output_type, source="rapl-sysfs", node_type=-1, weig
4145
4246
4347###########################################
44-
45- MODEL_SERVER_PORT = 8100
46- MODEL_SERVER_PORT = getConfig ("MODEL_SERVER_PORT" , MODEL_SERVER_PORT )
47- MODEL_SERVER_PORT = int (MODEL_SERVER_PORT )
48+ MODEL_SERVER_PORT = int (getConfig ("MODEL_SERVER_PORT" , "8100" ))
4849
4950# pipelineName and nodeCollection are global dict values set at initial state (load_init_pipeline)
5051## pipelineName: map of energy_source to target pipeline name
@@ -79,28 +80,28 @@ def select_best_model(spec, valid_groupath, filters, energy_source, pipeline_nam
7980 if len (model_names ) > 0 and len (candidates ) == 0 :
8081 # loosen all spec
8182 candidates = get_largest_candidates (model_names , pipeline_name , nodeCollection , energy_source )
82- print ("no matched models, select from large candidates: " , candidates )
83+ logger . info ("no matched models, select from large candidates: %s " , candidates )
8384 if candidates is None :
84- print ("no large candidates, select from all availables" )
85+ logger . warn ("no large candidates, select from all availables" )
8586 candidates = model_names
8687 for model_name in candidates :
8788 model_savepath = os .path .join (valid_groupath , model_name )
8889 metadata = load_json (model_savepath , METADATA_FILENAME )
8990 if metadata is None or not is_valid_model (metadata , filters ) or ERROR_KEY not in metadata :
9091 # invalid metadata
91- print ("invalid" , is_valid_model (metadata , filters ), metadata )
92+ logger . warn ("invalid metadata %s : %s " , is_valid_model (metadata , filters ), metadata )
9293 continue
9394 if weight :
9495 response = load_weight (model_savepath )
9596 if response is None :
9697 # fail to get weight file
97- print ("weight failed" , model_savepath )
98+ logger . warn ("weight failed: %s " , model_savepath )
9899 continue
99100 else :
100101 response = get_archived_file (valid_groupath , model_name )
101102 if not os .path .exists (response ):
102103 # archived model file does not exists
103- print ( "archived failed" , response )
104+ logger . warn ( "archive failed: %s " , response )
104105 continue
105106 if best_cadidate is None or best_cadidate [ERROR_KEY ] > metadata [ERROR_KEY ]:
106107 best_cadidate = metadata
@@ -115,7 +116,7 @@ def select_best_model(spec, valid_groupath, filters, energy_source, pipeline_nam
115116@app .route (MODEL_SERVER_MODEL_REQ_PATH , methods = ["POST" ])
116117def get_model ():
117118 model_request = request .get_json ()
118- print ("get request /model: {}" . format ( model_request ) )
119+ logger . info ("get request /model: %s" , model_request )
119120 req = ModelRequest (** model_request )
120121 energy_source = req .source
121122 # TODO: need revisit if get more than one rapl energy source
@@ -221,21 +222,21 @@ def set_pipelines():
221222 pipeline_path = get_pipeline_path (model_toppath , pipeline_name = pipeline_name )
222223 global nodeCollection
223224 nodeCollection [pipeline_name ] = NodeTypeIndexCollection (pipeline_path )
224- print ("initial pipeline is loaded to {}" . format ( pipeline_path ) )
225+ logger . info ("initial pipeline is loaded to %s" , pipeline_path )
225226 for energy_source in PowerSourceMap .keys ():
226227 if os .path .exists (os .path .join (pipeline_path , energy_source )):
227228 pipelineName [energy_source ] = pipeline_name
228- print ("set pipeline {} for {}" . format ( pipeline_name , energy_source ) )
229+ logger . info ("set pipeline %s for %s" , pipeline_name , energy_source )
229230
230231
231232# load_init_pipeline: load pipeline from URLs and set pipeline variables
232233def load_init_pipeline ():
233234 for initial_pipeline_url in initial_pipeline_urls :
234- print ( "try downloading archieved pipeline from URL: {}" . format ( initial_pipeline_url ) )
235+ logger . info ( " downloading archived pipeline from URL: %s" , initial_pipeline_url )
235236 response = requests .get (initial_pipeline_url )
236- print ( response )
237+ logger . debug ( "response: %s" , response )
237238 if response .status_code != 200 :
238- print ("failed to download archieved pipeline." )
239+ logger . error ("failed to download archieved pipeline - %s" , initial_pipeline_url )
239240 return
240241 # delete existing default pipeline
241242 basename = os .path .basename (initial_pipeline_url )
@@ -253,17 +254,27 @@ def load_init_pipeline():
253254 shutil .unpack_archive (tmp_filepath , pipeline_path )
254255 unpack_zip_files (pipeline_path )
255256 except Exception as e :
256- print ("failed to unpack downloaded pipeline: " , e )
257+ logger . error ("failed to unpack downloaded pipeline: %s " , e )
257258 return
258259 # remove downloaded zip
259260 os .remove (tmp_filepath )
260261 set_pipelines ()
261262
262263
263- def run ():
264+ @click .command ()
265+ @click .option (
266+ "--log-level" ,
267+ "-l" ,
268+ type = click .Choice (["debug" , "info" , "warn" , "error" ]),
269+ default = "info" ,
270+ required = False ,
271+ )
272+ def run (log_level : str ):
273+ level = getattr (logging , log_level .upper ())
274+ logging .basicConfig (level = level )
264275 load_init_pipeline ()
265276 app .run (host = "0.0.0.0" , port = MODEL_SERVER_PORT )
266277
267278
268279if __name__ == "__main__" :
269- run ()
280+ sys . exit ( run () )
0 commit comments