We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 41f4ebc commit bbf563cCopy full SHA for bbf563c
_unittests/ut_tasks/try_export.py
@@ -222,7 +222,7 @@ def _config_reduction(config, task):
222
self.assertIn('"PackedMultiHeadAttention"', str(model))
223
elif attention == "BIGMASK":
224
self.assertNotIn('"PackedMultiHeadAttention"', str(model))
225
- self.assertNotIn("MultiHeadAttention", str(model))
+ self.assertIn("MultiHeadAttention", str(model))
226
self.assertNotIn("Loop", {n.op_type for n in model.graph.node})
227
elif attention == "LOOPMHA":
228
0 commit comments