Skip to content

Commit b3424d5

Browse files
committed
[mlir][tosa] Add missing verifier for tosa.pad
This PR adds a missing verifier for `tosa.pad`, ensuring that the padding shape matches [rank(input), 2].
1 parent 7b23f41 commit b3424d5

File tree

3 files changed

+28
-5
lines changed

3 files changed

+28
-5
lines changed

mlir/include/mlir/Dialect/Tosa/IR/TosaOps.td

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1566,7 +1566,7 @@ def Tosa_PadOp : Tosa_InferShapedTypeOp<"pad"> {
15661566

15671567
let arguments = (ins
15681568
Tosa_RankedTensor:$input1,
1569-
Tosa_Int32Or64Tensor:$padding,
1569+
2DTensorOf<[Tosa_Int32Or64]>:$padding,
15701570
Optional<Tosa_ScalarTensor>:$pad_const,
15711571
OptionalAttr<Tosa_PadOpQuantizationAttr>:$quantization_info
15721572
);

mlir/lib/Dialect/Tosa/IR/TosaOps.cpp

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -823,13 +823,20 @@ LogicalResult tosa::PadOp::inferReturnTypeComponents(
823823
LogicalResult tosa::PadOp::verify() {
824824
RankedTensorType inputType = getInput1().getType();
825825
RankedTensorType outputType = getOutput().getType();
826-
TensorType paddingType = getPadding().getType();
826+
RankedTensorType paddingType = getPadding().getType();
827827

828828
if (inputType.getRank() != outputType.getRank())
829829
return emitOpError() << "expect same input and output tensor rank.";
830830

831-
if (paddingType.hasRank() && paddingType.getRank() != 2)
832-
return emitOpError() << "expect 'padding' tensor rank equal to 2.";
831+
if (!paddingType.isDynamicDim(0) &&
832+
paddingType.getDimSize(0) != inputType.getRank())
833+
return emitOpError() << "expected padding tensor dim 0 to have size "
834+
<< inputType.getRank() << " (input rank) but got size "
835+
<< paddingType.getDimSize(0);
836+
837+
if (!paddingType.isDynamicDim(1) && paddingType.getDimSize(1) != 2)
838+
return emitOpError() << "expected padding tensor dim 1 to have size 2 "
839+
<< "but got size " << paddingType.getDimSize(1);
833840

834841
return success();
835842
}

mlir/test/Dialect/Tosa/invalid.mlir

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ func.func @test_pad_io_rank_mismatch(%arg0: tensor<13x21xf32>, %arg1: tensor<2x2
103103
// -----
104104

105105
func.func @test_pad_invalid_padding_rank(%arg0: tensor<13x21xf32>, %arg1: tensor<2xi32>) {
106-
// expected-error@+1 {{'tosa.pad' op expect 'padding' tensor rank equal to 2.}}
106+
// expected-error@+1 {{'tosa.pad' op operand #1 must be 2D tensor of 32-bit signless integer or 64-bit signless integer values, but got 'tensor<2xi32>'}}
107107
%1 = tosa.pad %arg0, %arg1 : (tensor<13x21xf32>, tensor<2xi32>) -> tensor<13x21xf32>
108108
return
109109
}
@@ -119,6 +119,22 @@ func.func @test_pad_invalid_padConst_rank(%arg0: tensor<13x21xf32>, %arg1: tenso
119119

120120
// -----
121121

122+
func.func @test_pad_padding_shape_mismatch(%arg0: tensor<13x21x3xf32>, %arg1: tensor<2x2xi32>) -> tensor<13x21x3xf32> {
123+
// expected-error@+1 {{'tosa.pad' op expected padding tensor dim 0 to have size 3 (input rank) but got size 2}}
124+
%0 = tosa.pad %arg0, %arg1 : (tensor<13x21x3xf32>, tensor<2x2xi32>) -> tensor<13x21x3xf32>
125+
return %0 : tensor<13x21x3xf32>
126+
}
127+
128+
// -----
129+
130+
func.func @test_pad_padding_shape_mismatch(%arg0: tensor<13x21x3xf32>, %arg1: tensor<3x1xi32>) -> tensor<13x21x3xf32> {
131+
// expected-error@+1 {{'tosa.pad' op expected padding tensor dim 1 to have size 2 but got size 1}}
132+
%0 = tosa.pad %arg0, %arg1 : (tensor<13x21x3xf32>, tensor<3x1xi32>) -> tensor<13x21x3xf32>
133+
return %0 : tensor<13x21x3xf32>
134+
}
135+
136+
// -----
137+
122138
func.func @test_transpose_non_const(%arg0: tensor<13x21x3xf32>, %arg1: tensor<3xi32>) -> tensor<3x13x21xf32> {
123139
// expected-error@+1 {{'tosa.transpose' op perms of transpose is not constant}}
124140
%0 = tosa.transpose %arg0, %arg1 : (tensor<13x21x3xf32>, tensor<3xi32>) -> tensor<3x13x21xf32>

0 commit comments

Comments
 (0)