Skip to content

Commit ef88478

Browse files
author
Sunil Thaha
authored
Merge pull request #380 from sthaha/fix-estimator-logs
fix(estimator): crash when logging
2 parents cb563bf + c57af08 commit ef88478

File tree

2 files changed

+24
-21
lines changed

2 files changed

+24
-21
lines changed

src/kepler_model/estimate/estimator.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ def handle_request(data):
4747
try:
4848
power_request = json.loads(data, object_hook=lambda d: PowerRequest(**d))
4949
except Exception as e:
50-
logger.error("fail to handle request: {}".format(e))
50+
logger.error(f"fail to handle request: {e}")
5151
msg = "fail to handle request: {}".format(e)
5252
return {"powers": dict(), "msg": msg}
5353

@@ -70,7 +70,7 @@ def handle_request(data):
7070
current_trainer = loaded_model[output_type.name][power_request.energy_source].trainer_name
7171
request_trainer = current_trainer != power_request.trainer_name
7272
if request_trainer:
73-
logger.info("try obtaining the requesting trainer {} (current: {})".format(power_request.trainer_name, current_trainer))
73+
logger.info(f"try obtaining the requesting trainer {power_request.trainer_name} (current: {current_trainer})")
7474
if power_request.energy_source not in loaded_model[output_type.name] or request_trainer:
7575
output_path = get_download_output_path(download_path, power_request.energy_source, output_type)
7676
if not os.path.exists(output_path):
@@ -84,20 +84,20 @@ def handle_request(data):
8484
logger.error(msg)
8585
return {"powers": dict(), "msg": msg}
8686
else:
87-
logger.info("load model from config: ", output_path)
87+
logger.info(f"load model from config: {output_path}")
8888
else:
89-
logger.info("load model from model server: %s", output_path)
89+
logger.info(f"load model from model server: {output_path}")
9090
loaded_item = load_downloaded_model(power_request.energy_source, output_type)
9191
if loaded_item is not None and loaded_item.estimator is not None:
9292
loaded_model[output_type.name][power_request.energy_source] = loaded_item
93-
logger.info("set model {0} for {2} ({1})".format(loaded_item.model_name, output_type.name, power_request.energy_source))
93+
logger.info(f"set model {loaded_item.model_name} for {output_type.name} ({power_request.energy_source})")
9494
# remove loaded model
9595
shutil.rmtree(output_path)
9696

9797
model = loaded_model[output_type.name][power_request.energy_source]
9898
powers, msg = model.get_power(power_request.datapoint)
9999
if msg != "":
100-
logger.info("{} fail to predict, removed: {}".format(model.model_name, msg))
100+
logger.info(f"{model.model_name} failed to predict; removed: {msg}")
101101
if output_path != "" and os.path.exists(output_path):
102102
shutil.rmtree(output_path)
103103
return {"powers": powers, "msg": msg}
@@ -111,7 +111,7 @@ def start(self):
111111
s = self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
112112
s.bind(self.socket_path)
113113
s.listen(1)
114-
logger.info("started serving on {}".format(self.socket_path))
114+
logger.info(f"started serving on {self.socket_path}")
115115
try:
116116
while True:
117117
connection, _ = s.accept()
@@ -121,8 +121,7 @@ def start(self):
121121
os.remove(self.socket_path)
122122
sys.stdout.write("close socket\n")
123123
except Exception as e:
124-
logger.error("fail to close socket: ", e)
125-
pass
124+
logger.error(f"fail to close socket: {e}")
126125

127126
def accepted(self, connection):
128127
data = b""

src/kepler_model/server/model_server.py

Lines changed: 16 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -80,28 +80,28 @@ def select_best_model(spec, valid_groupath, filters, energy_source, pipeline_nam
8080
if len(model_names) > 0 and len(candidates) == 0:
8181
# loosen all spec
8282
candidates = get_largest_candidates(model_names, pipeline_name, nodeCollection, energy_source)
83-
logger.info("no matched models, select from large candidates: %s", candidates)
83+
logger.info(f"no matched models; selecting from large candidates: {candidates}")
8484
if candidates is None:
85-
logger.warn("no large candidates, select from all availables")
85+
logger.warn("no large candidates; selecting from all available")
8686
candidates = model_names
8787
for model_name in candidates:
8888
model_savepath = os.path.join(valid_groupath, model_name)
8989
metadata = load_json(model_savepath, METADATA_FILENAME)
9090
if metadata is None or not is_valid_model(metadata, filters) or ERROR_KEY not in metadata:
9191
# invalid metadata
92-
logger.warn("invalid metadata %s : %s", is_valid_model(metadata, filters), metadata)
92+
logger.warn(f"invalid metadata {is_valid_model(metadata, filters)} : {metadata}")
9393
continue
9494
if weight:
9595
response = load_weight(model_savepath)
9696
if response is None:
9797
# fail to get weight file
98-
logger.warn("weight failed: %s", model_savepath)
98+
logger.warn(f"weight failed: {model_savepath}")
9999
continue
100100
else:
101101
response = get_archived_file(valid_groupath, model_name)
102102
if not os.path.exists(response):
103103
# archived model file does not exists
104-
logger.warn("archive failed: %s", response)
104+
logger.warn(f"archive failed: {response}")
105105
continue
106106
if best_cadidate is None or best_cadidate[ERROR_KEY] > metadata[ERROR_KEY]:
107107
best_cadidate = metadata
@@ -116,7 +116,7 @@ def select_best_model(spec, valid_groupath, filters, energy_source, pipeline_nam
116116
@app.route(MODEL_SERVER_MODEL_REQ_PATH, methods=["POST"])
117117
def get_model():
118118
model_request = request.get_json()
119-
logger.info("get request /model: %s", model_request)
119+
logger.info(f"get request /model: {model_request}")
120120
req = ModelRequest(**model_request)
121121
energy_source = req.source
122122
# TODO: need revisit if get more than one rapl energy source
@@ -222,22 +222,26 @@ def set_pipelines():
222222
pipeline_path = get_pipeline_path(model_toppath, pipeline_name=pipeline_name)
223223
global nodeCollection
224224
nodeCollection[pipeline_name] = NodeTypeIndexCollection(pipeline_path)
225-
logger.info("initial pipeline is loaded to %s", pipeline_path)
225+
logger.info(f"initial pipeline is loaded to {pipeline_path}")
226226
for energy_source in PowerSourceMap.keys():
227227
if os.path.exists(os.path.join(pipeline_path, energy_source)):
228228
pipelineName[energy_source] = pipeline_name
229-
logger.info("set pipeline %s for %s", pipeline_name, energy_source)
229+
logger.info(f"set pipeline {pipeline_name} for {energy_source}")
230230

231231

232232
# load_init_pipeline: load pipeline from URLs and set pipeline variables
233233
def load_init_pipeline():
234234
for initial_pipeline_url in initial_pipeline_urls:
235-
logger.info("downloading archived pipeline from URL: %s", initial_pipeline_url)
235+
logger.info(f"downloading archived pipeline from URL: {initial_pipeline_url}")
236236
response = requests.get(initial_pipeline_url)
237-
logger.debug("response: %s", response)
237+
238+
if logger.isEnabledFor(logging.DEBUG):
239+
logger.debug(f"response: {response}")
240+
238241
if response.status_code != 200:
239-
logger.error("failed to download archieved pipeline - %s", initial_pipeline_url)
242+
logger.error(f"failed to download archived pipeline - status code: {response.status_code}, url: {initial_pipeline_url}")
240243
return
244+
241245
# delete existing default pipeline
242246
basename = os.path.basename(initial_pipeline_url)
243247
pipeline_name = basename.split(".zip")[0]
@@ -254,7 +258,7 @@ def load_init_pipeline():
254258
shutil.unpack_archive(tmp_filepath, pipeline_path)
255259
unpack_zip_files(pipeline_path)
256260
except Exception as e:
257-
logger.error("failed to unpack downloaded pipeline: %s", e)
261+
logger.error(f"failed to unpack downloaded pipeline: {e}")
258262
return
259263
# remove downloaded zip
260264
os.remove(tmp_filepath)

0 commit comments

Comments
 (0)