File tree Expand file tree Collapse file tree 1 file changed +6
-6
lines changed
Expand file tree Collapse file tree 1 file changed +6
-6
lines changed Original file line number Diff line number Diff line change @@ -1665,13 +1665,13 @@ def maybe_remove_or_replace(self, node: torch.fx.Node) -> bool:
16651665
16661666 def call (self , graph_module : torch .fx .GraphModule ) -> PassResult :
16671667 result = super ().call (graph_module )
1668- # If this pass made modifications, fuse any cascaded view ops that may have been created
1669- if result .modified :
1670- fuse_cascaded_result = FuseCascadedViewOps ().call (result .graph_module )
16711668
1672- # True because we are in the 'if modified' block
1673- return PassResult (fuse_cascaded_result .graph_module , True )
1674- return result
1669+ # TODO: I tried conditionally running this only if the above made any modifications,
1670+ # but for whatever reason was getting numerical failures in
1671+ # test_mtl_e2e_test_a16w8_two_layers_turing_3_1_1k. Always running this pass
1672+ # resolved that issue.
1673+ fuse_cascaded_result = FuseCascadedViewOps ().call (result .graph_module )
1674+ return PassResult (fuse_cascaded_result .graph_module , True )
16751675
16761676
16771677@register_cadence_pass (CadencePassAttribute (opt_level = 2 ))
You can’t perform that action at this time.
0 commit comments