Skip to content

Commit 3779028

Browse files
authored
Allow for HOP to be in the etreord graph
Differential Revision: D80118856 Pull Request resolved: #13385
1 parent fc87462 commit 3779028

File tree

1 file changed

+11
-0
lines changed

1 file changed

+11
-0
lines changed

devtools/debug_format/et_schema.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,11 @@
2929
OperatorNode,
3030
ValueNode,
3131
)
32+
33+
from torch._higher_order_ops.auto_functionalize import (
34+
auto_functionalized,
35+
auto_functionalized_v2,
36+
)
3237
from torch._subclasses import FakeTensor
3338

3439

@@ -121,6 +126,12 @@ def _parse_args( # noqa: C901
121126
# pyre-ignore
122127
named_args = node.target._schema.arguments
123128

129+
if node.op == "call_function" and (
130+
node.target == auto_functionalized or node.target == auto_functionalized_v2
131+
):
132+
# for functioanlized HOPs, args for the corresponding functional op are stored in kwargs
133+
args = tuple(kwargs.values())
134+
124135
for index, arg in enumerate(args):
125136
if isinstance(arg, torch.fx.node.Node):
126137
if arg.target == exir.memory.alloc:

0 commit comments

Comments
 (0)