Skip to content

Commit ed9c4de

Browse files
authored
[cadence] add logging init to cadence tests
Differential Revision: D77624992 Pull Request resolved: #12177
1 parent 2d095b8 commit ed9c4de

File tree

1 file changed

+10
-0
lines changed

1 file changed

+10
-0
lines changed

backends/cadence/aot/tests/test_memory_passes.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
# pyre-strict
88

9+
import logging
910
import math
1011
import unittest
1112
from typing import cast, List, Optional, Sequence
@@ -39,6 +40,15 @@
3940

4041

4142
class TestMemPlanningPasses(unittest.TestCase):
43+
def setUp(self) -> None:
44+
logging.basicConfig(
45+
format="%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s",
46+
datefmt="%Y-%m-%d:%H:%M:%S",
47+
level=logging.getLevelName(logging.INFO),
48+
force=True,
49+
)
50+
return super().setUp()
51+
4252
def test_calculate_peak_memory_pass(self) -> None:
4353
class PeakMemoryTestModel(torch.nn.Module):
4454
def __init__(self, input_dim: int, hidden_dim: int, output_dim: int):

0 commit comments

Comments
 (0)