Skip to content

Commit 4867e28

Browse files
committed
Move check for recomposition of Layernorm without bias + add to canonicalization test, as it is implemeted as canonicalization pattern
Signed-off-by: Rickert, Jonas <[email protected]>
1 parent cfe885d commit 4867e28

File tree

2 files changed

+16
-15
lines changed

2 files changed

+16
-15
lines changed

test/mlir/onnx/onnx_canonicalization.mlir

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2375,4 +2375,20 @@ func.func @test_batchnormv9_f32_no_var_mean_use(%arg0: tensor<100x3x10x10xf32>)
23752375
// CHECK: [[VAR_14_:%.+]] = "onnx.Add"([[VAR_10_]], [[VAR_13_]]) : (tensor<100x3x10x10xf32>, tensor<3x1x1xf32>) -> tensor<100x3x10x10xf32>
23762376
// CHECK: return [[VAR_14_]] : tensor<100x3x10x10xf32>
23772377

2378+
// -----
2379+
2380+
// Recognize the bias and fold into LayerNorm.
2381+
func.func @layernorm_without_bias(%arg0: tensor<1x384x768xf32>, %arg1: tensor<768xf32>, %bias: tensor<768xf32>) -> tensor<1x384x768xf32> {
2382+
%0 = "onnx.NoValue"() {value} : () -> none
2383+
%NormScaled, %Mean, %InvStdDev = "onnx.LayerNormalization"(%arg0, %arg1, %0) {axis = 2 : si64, epsilon = 1.200000e+00 : f32, stash_type = 1 : si64} : (tensor<1x384x768xf32>, tensor<768xf32>, none) -> (tensor<1x384x768xf32>, none, none)
2384+
%Y = "onnx.Add"(%bias, %NormScaled) : (tensor<768xf32>, tensor<1x384x768xf32>) -> tensor<1x384x768xf32>
2385+
return %Y : tensor<1x384x768xf32>
2386+
// mlir2FileCheck.py
2387+
// CHECK-LABEL: func.func @layernorm_without_bias
2388+
// CHECK-SAME: ([[PARAM_0_:%.+]]: tensor<1x384x768xf32>, [[PARAM_1_:%.+]]: tensor<768xf32>, [[PARAM_2_:%.+]]: tensor<768xf32>) -> tensor<1x384x768xf32> {
2389+
// CHECK: [[Y_:%.+]], [[Mean_:%.+]], [[VAR_InvStdDev_:%.+]] = "onnx.LayerNormalization"([[PARAM_0_]], [[PARAM_1_]], [[PARAM_2_]]) {axis = 2 : si64, epsilon = 1.200000e+00 : f32, stash_type = 1 : si64} : (tensor<1x384x768xf32>, tensor<768xf32>, tensor<768xf32>) -> (tensor<1x384x768xf32>, none, none)
2390+
// CHECK: return [[Y_]] : tensor<1x384x768xf32>
2391+
// CHECK: }
2392+
}
2393+
23782394

test/mlir/onnx/onnx_recompose.mlir

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -293,21 +293,6 @@ func.func @layernorm_with_bias_switched(%x: tensor<1x384x768xf32>, %scale: tenso
293293
// CHECK: }
294294
}
295295

296-
// -----
297-
298-
// Recognize the bias and fold into LayerNorm.
299-
func.func @layernorm_without_bias(%arg0: tensor<1x384x768xf32>, %arg1: tensor<768xf32>, %bias: tensor<768xf32>) -> tensor<1x384x768xf32> {
300-
%0 = "onnx.NoValue"() {value} : () -> none
301-
%NormScaled, %Mean, %InvStdDev = "onnx.LayerNormalization"(%arg0, %arg1, %0) {axis = 2 : si64, epsilon = 1.200000e+00 : f32, stash_type = 1 : si64} : (tensor<1x384x768xf32>, tensor<768xf32>, none) -> (tensor<1x384x768xf32>, none, none)
302-
%Y = "onnx.Add"(%bias, %NormScaled) : (tensor<768xf32>, tensor<1x384x768xf32>) -> tensor<1x384x768xf32>
303-
return %Y : tensor<1x384x768xf32>
304-
// mlir2FileCheck.py
305-
// CHECK-LABEL: func.func @layernorm_without_bias
306-
// CHECK-SAME: ([[PARAM_0_:%.+]]: tensor<1x384x768xf32>, [[PARAM_1_:%.+]]: tensor<768xf32>, [[PARAM_2_:%.+]]: tensor<768xf32>) -> tensor<1x384x768xf32> {
307-
// CHECK: [[Y_:%.+]], [[Mean_:%.+]], [[VAR_InvStdDev_:%.+]] = "onnx.LayerNormalization"([[PARAM_0_]], [[PARAM_1_]], [[PARAM_2_]]) {axis = 2 : si64, epsilon = 1.200000e+00 : f32, stash_type = 1 : si64} : (tensor<1x384x768xf32>, tensor<768xf32>, tensor<768xf32>) -> (tensor<1x384x768xf32>, none, none)
308-
// CHECK: return [[Y_]] : tensor<1x384x768xf32>
309-
// CHECK: }
310-
}
311296

312297
// -----
313298

0 commit comments

Comments
 (0)