Skip to content

Commit 5963d03

Browse files
authored
Clean up command prompt logging with Sharrow turned on (#1014)
* Demoted multiple repetitive logging messages in core.flow from "INFO" to "DEBUG" * Downgraded repetitive logging message in skim dataset
1 parent 7401f5a commit 5963d03

File tree

2 files changed

+13
-13
lines changed

2 files changed

+13
-13
lines changed

activitysim/core/flow.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -51,15 +51,15 @@
5151

5252
@contextlib.contextmanager
5353
def logtime(tag, tag2=""):
54-
logger.info(f"begin {tag} {tag2}")
54+
logger.debug(f"begin {tag} {tag2}")
5555
t0 = time.time()
5656
try:
5757
yield
5858
except Exception:
5959
logger.error(f"error in {tag} after {timedelta(seconds=time.time()-t0)} {tag2}")
6060
raise
6161
else:
62-
logger.info(f"completed {tag} in {timedelta(seconds=time.time()-t0)} {tag2}")
62+
logger.debug(f"completed {tag} in {timedelta(seconds=time.time()-t0)} {tag2}")
6363

6464

6565
class TimeLogger:
@@ -271,11 +271,11 @@ def skims_mapping(
271271
primary_origin_col_name=None,
272272
predigitized_time_periods=False,
273273
):
274-
logger.info("loading skims_mapping")
275-
logger.info(f"- orig_col_name: {orig_col_name}")
276-
logger.info(f"- dest_col_name: {dest_col_name}")
277-
logger.info(f"- stop_col_name: {stop_col_name}")
278-
logger.info(f"- primary_origin_col_name: {primary_origin_col_name}")
274+
logger.debug("loading skims_mapping")
275+
logger.debug(f"- orig_col_name: {orig_col_name}")
276+
logger.debug(f"- dest_col_name: {dest_col_name}")
277+
logger.debug(f"- stop_col_name: {stop_col_name}")
278+
logger.debug(f"- primary_origin_col_name: {primary_origin_col_name}")
279279
skim_dataset = state.get_injectable("skim_dataset")
280280
if zone_layer == "maz" or zone_layer is None:
281281
odim = "omaz" if "omaz" in skim_dataset.dims else "otaz"
@@ -297,7 +297,7 @@ def skims_mapping(
297297
else:
298298
raise ValueError(f"unknown zone layer {zone_layer!r}")
299299
if zone_layer:
300-
logger.info(f"- zone_layer: {zone_layer}")
300+
logger.debug(f"- zone_layer: {zone_layer}")
301301
if (
302302
orig_col_name is not None
303303
and dest_col_name is not None
@@ -574,7 +574,7 @@ def _apply_filter(_dataset, renames: list):
574574
if choosers is None:
575575
logger.info(f"empty flow on {trace_label}")
576576
else:
577-
logger.info(f"{len(choosers)} chooser rows on {trace_label}")
577+
logger.debug(f"{len(choosers)} chooser rows on {trace_label}")
578578
flow_tree = sh.DataTree(df=[] if choosers is None else choosers)
579579
idx_name = choosers.index.name or "index"
580580
rename_dataset_cols = [
@@ -598,7 +598,7 @@ def _apply_filter(_dataset, renames: list):
598598
)
599599
flow_tree.root_dataset = flow_tree.root_dataset # apply the filter
600600
else:
601-
logger.info(
601+
logger.debug(
602602
f"{len(choosers)} chooser rows and {len(interacts)} interact rows on {trace_label}"
603603
)
604604
top = sh.dataset.from_named_objects(
@@ -697,7 +697,7 @@ def _apply_filter(_dataset, renames: list):
697697
for i, v in extra_vars.items():
698698
readme += f"\n - {i}: {v}"
699699

700-
logger.info(f"setting up sharrow flow {trace_label}")
700+
logger.debug(f"setting up sharrow flow {trace_label}")
701701
extra_hash_data = ()
702702
if zone_layer:
703703
extra_hash_data += (zone_layer,)

activitysim/core/skim_dataset.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -253,15 +253,15 @@ def set_df(self, df):
253253
and np.issubdtype(df[self.time_key].dtype, np.integer)
254254
and df[self.time_key].max() < self.dataset.dims["time_period"]
255255
):
256-
logger.info(f"natural use for time_period={self.time_key}")
256+
logger.debug(f"natural use for time_period={self.time_key}")
257257
positions["time_period"] = df[self.time_key]
258258
elif (
259259
df[self.time_key].dtype == "category"
260260
and df[self.time_key].dtype == self.time_label_dtype
261261
):
262262
positions["time_period"] = df[self.time_key].cat.codes
263263
else:
264-
logger.info(f"vectorize lookup for time_period={self.time_key}")
264+
logger.debug(f"vectorize lookup for time_period={self.time_key}")
265265
positions["time_period"] = pd.Series(
266266
np.vectorize(self.time_map.get, "I")(df[self.time_key], 0),
267267
index=df.index,

0 commit comments

Comments
 (0)