Skip to content

Commit 6f60c65

Browse files
Revert "[dynamo] Log guard latency (pytorch#145132)"
This reverts commit 0a310d7. Reverted pytorch#145132 on behalf of https://github.com/anijain2305 due to CI failures observed after PR was merged ([comment](pytorch#145132 (comment)))
1 parent f0e9f87 commit 6f60c65

File tree

4 files changed

+23
-31
lines changed

4 files changed

+23
-31
lines changed

test/dynamo/test_utils.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -212,8 +212,6 @@ def test_dynamo_timed(self, mock_time, mock_time_ns):
212212
# much easier.
213213
raw = dataclasses.asdict(compilation_events[0])
214214
del raw["feature_usage"]
215-
# guard_latency_us is not deterministic
216-
del raw["guard_latency_us"]
217215
self.assertExpectedInline(
218216
pprint.pformat(raw),
219217
"""\
@@ -294,7 +292,6 @@ def test_dynamo_timed(self, mock_time, mock_time_ns):
294292
# Second event is for the backward
295293
raw = dataclasses.asdict(compilation_events[1])
296294
del raw["feature_usage"]
297-
del raw["guard_latency_us"]
298295
self.assertExpectedInline(
299296
pprint.pformat(raw),
300297
"""\

torch/_dynamo/guards.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,6 @@
4848
TensorProperty,
4949
TensorPropertySource,
5050
)
51-
from torch._dynamo.utils import CompileEventLogger
5251
from torch._guards import (
5352
CompileContext,
5453
CompileId,
@@ -2481,7 +2480,6 @@ def cleanup_builder(weak_b):
24812480
self.guard_manager.root, output_graph.local_scope, 50
24822481
)
24832482
guards_log.debug("Guard eval latency = %s us", f"{latency:.2f}")
2484-
CompileEventLogger.compilation_metric(guard_latency_us=latency)
24852483

24862484
# NB - We have to very careful of cleaning up here. Because of the
24872485
# invalidate function, we can create a weakref finalizer that keeps

torch/_dynamo/testing.py

Lines changed: 23 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -181,32 +181,30 @@ def insert_nops(instructions: list[Any], code_options: Any) -> None:
181181
instructions.insert(0, create_instruction("NOP"))
182182
instructions.insert(0, create_instruction("NOP"))
183183

184-
metrics_context = torch._dynamo.utils.get_metrics_context()
185-
with torch._dynamo.utils.dynamo_timed("debug_insert_nops"), metrics_context:
186-
if is_generator(frame.f_code):
187-
return None
188-
189-
debug_checks(frame.f_code)
190-
code = transform_code_object(frame.f_code, insert_nops)
191-
graph = OutputGraph(
192-
code_options={},
193-
compiler_fn=None,
194-
root_tx=None,
195-
export=False,
196-
export_constraints=None,
197-
frame_state={"_id": 0},
198-
# TODO: shouldn't this be f_locals/f_globals from frame?
199-
local_scope=locals(),
200-
global_scope=globals(),
201-
f_code=frame.f_code,
202-
torch_function_mode_stack=[],
203-
)
184+
if is_generator(frame.f_code):
185+
return None
204186

205-
return GuardedCode(
206-
code,
207-
CheckFunctionManager(frame.f_code, graph).guard_manager, # type: ignore[arg-type]
208-
CompileId(frame_id=0, frame_compile_id=0),
209-
)
187+
debug_checks(frame.f_code)
188+
code = transform_code_object(frame.f_code, insert_nops)
189+
graph = OutputGraph(
190+
code_options={},
191+
compiler_fn=None,
192+
root_tx=None,
193+
export=False,
194+
export_constraints=None,
195+
frame_state={"_id": 0},
196+
# TODO: shouldn't this be f_locals/f_globals from frame?
197+
local_scope=locals(),
198+
global_scope=globals(),
199+
f_code=frame.f_code,
200+
torch_function_mode_stack=[],
201+
)
202+
203+
return GuardedCode(
204+
code,
205+
CheckFunctionManager(frame.f_code, graph).guard_manager, # type: ignore[arg-type]
206+
CompileId(frame_id=0, frame_compile_id=0),
207+
)
210208

211209

212210
class CompileCounter:

torch/_dynamo/utils.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1186,7 +1186,6 @@ class CompilationMetrics:
11861186
tensorify_float_attempt: Optional[bool] = None
11871187
tensorify_float_success: Optional[bool] = None
11881188
tensorify_float_failure: Optional[set[str]] = None
1189-
guard_latency_us: Optional[float] = None
11901189

11911190
@classmethod
11921191
def create(cls, metrics: dict[str, Any]):

0 commit comments

Comments
 (0)