Skip to content

Commit e4357a1

Browse files
clarkdykangmeta-codesync[bot]
authored andcommitted
Remove logger level settings (#1036)
Summary: Pull Request resolved: #1036 - Please see comments in D83398754. It is recommended set logging level at the top level only, so remove module-level debug setting in: - `early_stop_checker`, as well as avoiding the spamming of logs. - `env`, as well as changing `debug` logs to `info`, since this is usually a one-off setting in the flow, which is not supposed to spam the logs. Reviewed By: galrotem Differential Revision: D83985240 fbshipit-source-id: da6a9fa87f04464aa30d6d2c28e4cc24f004a502
1 parent d6c41e0 commit e4357a1

File tree

2 files changed

+4
-6
lines changed

2 files changed

+4
-6
lines changed

torchtnt/utils/early_stop_checker.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
from typing_extensions import final, Literal
1515

1616
_log: logging.Logger = logging.getLogger(__name__)
17-
_log.setLevel(logging.DEBUG)
1817

1918

2019
@final

torchtnt/utils/env.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
from typing_extensions import Literal
2727

2828
_log: logging.Logger = logging.getLogger(__name__)
29-
_log.setLevel(logging.DEBUG) # Set logger level to DEBUG to see all messages
3029

3130

3231
def _check_dist_env() -> bool:
@@ -136,23 +135,23 @@ def seed(seed: int, deterministic: Optional[Union[str, int]] = None) -> None:
136135
raise ValueError(
137136
f"Invalid seed value provided: {seed}. Value must be in the range [{min_val}, {max_val}]"
138137
)
139-
_log.debug(f"Setting seed to {seed}")
138+
_log.info(f"Setting seed to {seed}")
140139

141140
torch.manual_seed(seed)
142141
np.random.seed(seed)
143142
random.seed(seed)
144143
os.environ["PYTHONHASHSEED"] = str(seed)
145144

146145
if deterministic is not None:
147-
_log.debug(f"Setting deterministic debug mode to {deterministic}")
146+
_log.info(f"Setting deterministic debug mode to {deterministic}")
148147
torch.set_deterministic_debug_mode(deterministic)
149148
deterministic_debug_mode = torch.get_deterministic_debug_mode()
150149
if deterministic_debug_mode == 0:
151-
_log.debug("Disabling cuDNN deterministic mode")
150+
_log.info("Disabling cuDNN deterministic mode")
152151
torch.backends.cudnn.deterministic = False
153152
torch.backends.cudnn.benchmark = True
154153
else:
155-
_log.debug("Enabling cuDNN deterministic mode")
154+
_log.info("Enabling cuDNN deterministic mode")
156155
torch.backends.cudnn.deterministic = True
157156
torch.backends.cudnn.benchmark = False
158157
# reference: https://docs.nvidia.com/cuda/cublas/index.html#cublasApi_reproducibility

0 commit comments

Comments
 (0)