Skip to content

Commit 41491c0

Browse files
authored
Fix test_mtl_e2e_test_a16w8_two_layers_turing_3_1_1k
Differential Revision: D90353766 Pull Request resolved: #16513
1 parent 10f72fc commit 41491c0

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

backends/cadence/aot/replace_ops.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1665,13 +1665,13 @@ def maybe_remove_or_replace(self, node: torch.fx.Node) -> bool:
16651665

16661666
def call(self, graph_module: torch.fx.GraphModule) -> PassResult:
16671667
result = super().call(graph_module)
1668-
# If this pass made modifications, fuse any cascaded view ops that may have been created
1669-
if result.modified:
1670-
fuse_cascaded_result = FuseCascadedViewOps().call(result.graph_module)
16711668

1672-
# True because we are in the 'if modified' block
1673-
return PassResult(fuse_cascaded_result.graph_module, True)
1674-
return result
1669+
# TODO: I tried conditionally running this only if the above made any modifications,
1670+
# but for whatever reason was getting numerical failures in
1671+
# test_mtl_e2e_test_a16w8_two_layers_turing_3_1_1k. Always running this pass
1672+
# resolved that issue.
1673+
fuse_cascaded_result = FuseCascadedViewOps().call(result.graph_module)
1674+
return PassResult(fuse_cascaded_result.graph_module, True)
16751675

16761676

16771677
@register_cadence_pass(CadencePassAttribute(opt_level=2))

0 commit comments

Comments
 (0)