Skip to content

Commit a7e496a

Browse files
Revert "[dynamo] Record the pre-graph bytecode using fast record function event (pytorch#154769)"
This reverts commit 409c396. Reverted pytorch#154769 on behalf of https://github.com/seemethere due to This fails internal tests see [fburl.com/diff/67gyp7gp](https://fburl.com/diff/67gyp7gp) ([comment](pytorch#154769 (comment)))
1 parent b86aaaa commit a7e496a

File tree

4 files changed

+2
-48
lines changed

4 files changed

+2
-48
lines changed

test/inductor/test_compiled_autograd.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -103,15 +103,10 @@ def reset():
103103

104104
class TestCompiledAutograd(TestCase):
105105
def setUp(self) -> None:
106-
self.exit_stack = contextlib.ExitStack()
107-
self.exit_stack.enter_context(
108-
config.patch("record_pre_graph_bytecode_in_traces", False)
109-
)
110106
super().setUp()
111107
reset()
112108

113109
def tearDown(self) -> None:
114-
self.exit_stack.close()
115110
super().tearDown()
116111
reset()
117112

torch/_dynamo/codegen.py

Lines changed: 1 addition & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
import torch.nn
2424
from torch.utils._ordered_set import OrderedSet
2525

26-
from . import config, graph_break_hints, utils
26+
from . import graph_break_hints, utils
2727
from .bytecode_transformation import (
2828
add_push_null,
2929
add_push_null_call_function_ex,
@@ -613,18 +613,6 @@ def collect_temp_source(source):
613613
if arg.source is not None:
614614
collect_temp_source(arg.source)
615615

616-
cm_var = None
617-
if config.record_pre_graph_bytecode_in_traces:
618-
# Record the pregraph bytecode start
619-
self.add_push_null(
620-
lambda: self.load_import_from(
621-
utils.__name__, "record_pregraph_bytecode_enter"
622-
)
623-
)
624-
self.extend_output(create_call_function(0, False))
625-
cm_var = self.new_var()
626-
self.store(cm_var)
627-
628616
for arg in graphargs:
629617
if arg.pass_arg_as_tensor:
630618
self.add_push_null(
@@ -640,18 +628,6 @@ def collect_temp_source(source):
640628
else:
641629
self.call_reconstruct(arg)
642630

643-
if config.record_pre_graph_bytecode_in_traces:
644-
# Record the pregraph bytecode end
645-
self.add_push_null(
646-
lambda: self.load_import_from(
647-
utils.__name__, "record_pregraph_bytecode_exit"
648-
)
649-
)
650-
assert cm_var is not None
651-
self.extend_output([self.create_load(cm_var)])
652-
self.extend_output(create_call_function(1, False))
653-
self.pop_top()
654-
655631
self.extend_output(create_call_function(len(graphargs), False))
656632

657633
def load_import_from(self, module_name, object_name) -> None:

torch/_dynamo/config.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -615,9 +615,6 @@ def default_debug_dir_root():
615615
# wrapper. This ensures that nn.module hooks are also compiled in the same frame.
616616
wrap_top_frame = False
617617

618-
# record pre-graph bytecode in profile traces
619-
record_pre_graph_bytecode_in_traces = True
620-
621618
# HACK: this is for testing custom ops profiling only
622619
_custom_ops_profile: Optional[Any] = None
623620

torch/_dynamo/utils.py

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@
4747
import warnings
4848
import weakref
4949
from collections import Counter, OrderedDict
50-
from contextlib import AbstractContextManager, contextmanager
50+
from contextlib import contextmanager
5151
from dataclasses import is_dataclass
5252
from functools import lru_cache
5353
from types import MethodWrapperType
@@ -4670,17 +4670,3 @@ def maybe_disable_inference_mode_for_fake_prop() -> Generator[None, None, None]:
46704670

46714671
def is_node_meta_valid(node: Optional[torch.fx.Node]) -> bool:
46724672
return node is None or "example_value" in node.meta or "val" in node.meta
4673-
4674-
4675-
def record_pregraph_bytecode_enter() -> AbstractContextManager[None]:
4676-
cm: AbstractContextManager[None] = (
4677-
torch._C._profiler._RecordFunctionFast("Pregraph bytecode")
4678-
if torch.autograd.profiler._is_profiler_enabled
4679-
else contextlib.nullcontext()
4680-
)
4681-
cm.__enter__()
4682-
return cm
4683-
4684-
4685-
def record_pregraph_bytecode_exit(cm: AbstractContextManager[None]) -> None:
4686-
cm.__exit__(None, None, None)

0 commit comments

Comments
 (0)