1212from kepler_model .estimate .archived_model import get_achived_model
1313from kepler_model .estimate .model .model import load_downloaded_model
1414from kepler_model .estimate .model_server_connector import is_model_server_enabled , make_request
15- from kepler_model .train .profiler .node_type_index import get_machine_spec
15+ from kepler_model .train .profiler .node_type_index import NodeTypeSpec , discover_spec_values , get_machine_spec
1616from kepler_model .util .config import SERVE_SOCKET , download_path , set_env_from_model_config
17- from kepler_model .util .loader import get_download_output_path
17+ from kepler_model .util .loader import get_download_output_path , load_metadata
1818from kepler_model .util .train_types import ModelOutputType , convert_enery_source , is_output_type_supported
1919
2020###############################################
@@ -42,7 +42,7 @@ def __init__(self, metrics, values, output_type, source, system_features, system
4242
4343loaded_model = dict ()
4444
45- def handle_request (data : str , machine_spec = None ) -> dict :
45+ def handle_request (data : str , machine_spec = None , discovered_core = None ) -> dict :
4646 try :
4747 power_request = json .loads (data , object_hook = lambda d : PowerRequest (** d ))
4848 except Exception as e :
@@ -94,22 +94,41 @@ def handle_request(data: str, machine_spec=None) -> dict:
9494 if loaded_item is not None and loaded_item .estimator is not None :
9595 loaded_model [output_type .name ][power_request .energy_source ] = loaded_item
9696 logger .info (f"set model { loaded_item .model_name } for { output_type .name } ({ power_request .energy_source } )" )
97+ else :
98+ msg = f"load item for { power_request .energy_source } is none"
99+ logger .error (msg )
100+ return {"powers" : dict (), "msg" : msg }
97101
98102 model = loaded_model [output_type .name ][power_request .energy_source ]
99103 powers , msg = model .get_power (power_request .datapoint )
100104 if msg != "" :
101105 logger .info (f"{ model .model_name } failed to predict; removed: { msg } " )
102106 if output_path != "" and os .path .exists (output_path ):
103107 shutil .rmtree (output_path )
104-
105- return {"powers" : powers , "msg" : msg }
106-
108+ response = {"powers" : powers , "msg" : msg }
109+ # add core_ratio if applicable
110+ core_ratio = 1
111+ if discovered_core is not None and discovered_core > 0 :
112+ metadata = load_metadata (output_path )
113+ if metadata is not None and "machine_spec" in metadata :
114+ model_spec = NodeTypeSpec (** metadata ["machine_spec" ])
115+ model_cores = model_spec .get_cores ()
116+ if model_cores > 0 :
117+ core_ratio = discovered_core / model_cores
118+ logger .debug (f"model cores: { model_cores } " )
119+ logger .debug (f"metadata: { metadata } " )
120+ response ["core_ratio" ] = core_ratio
121+
122+ return response
107123
108124class EstimatorServer :
109125 def __init__ (self , socket_path , machine_spec ):
110126 self .socket_path = socket_path
111127 self .machine_spec = machine_spec
112- logger .info (f"initialize EstimatorServer with spec={ machine_spec } " )
128+ spec_values = discover_spec_values ()
129+ discovered_spec = NodeTypeSpec (** spec_values )
130+ self .discovered_core = discovered_spec .get_cores ()
131+ logger .info (f"initialize EstimatorServer with spec={ machine_spec } , discovered_core={ self .discovered_core } " )
113132
114133 def start (self ):
115134 s = self .socket = socket .socket (socket .AF_UNIX , socket .SOCK_STREAM )
@@ -135,11 +154,10 @@ def accepted(self, connection):
135154 if shunk is None or shunk .decode ()[- 1 ] == "}" :
136155 break
137156 decoded_data = data .decode ()
138- y = handle_request (decoded_data , self .machine_spec )
157+ y = handle_request (decoded_data , self .machine_spec , self . discovered_core )
139158 response = json .dumps (y )
140159 connection .send (response .encode ())
141160
142-
143161def clean_socket ():
144162 logger .info ("clean socket" )
145163 if os .path .exists (SERVE_SOCKET ):
0 commit comments