File tree Expand file tree Collapse file tree 1 file changed +7
-2
lines changed
backends/xnnpack/test/ops Expand file tree Collapse file tree 1 file changed +7
-2
lines changed Original file line number Diff line number Diff line change @@ -24,17 +24,22 @@ def _test_softmax(self, inputs):
2424 # as xnnpack only supports softmax on the last dimension.
2525 valid_dims = [len (inputs [0 ]) - 1 , - 1 ]
2626
27+ dynamic_shape = {}
28+ for i in range (len (inputs [0 ].shape )):
29+ dynamic_shape [i ] = torch .export .Dim (f"dynamic_dim{ i } " , min = 1 , max = 100 )
30+ dynamic_shape = (dynamic_shape ,)
31+
2732 for dim in valid_dims :
2833 (
29- Tester (self .Softmax (dim ), inputs )
34+ Tester (self .Softmax (dim ), inputs , dynamic_shapes = dynamic_shape )
3035 .export ()
3136 .check_count ({"torch.ops.aten.softmax" : 1 })
3237 .to_edge_transform_and_lower ()
3338 .check_count ({"torch.ops.higher_order.executorch_call_delegate" : 1 })
3439 .check_not (["executorch_exir_dialects_edge__ops_aten__softmax_default" ])
3540 .to_executorch ()
3641 .serialize ()
37- .run_method_and_compare_outputs ()
42+ .run_method_and_compare_outputs (num_runs = 5 )
3843 )
3944
4045 def test_fp16_softmax (self ):
You can’t perform that action at this time.
0 commit comments