Skip to content

Commit db13ece

Browse files
authored
Merge pull request #374 from Xilinx/bump_to_37263b6c
[AutoBump] Merge with fixes of 37263b6 (Sep 04) (21)
2 parents 379903b + 61c982f commit db13ece

File tree

3 files changed

+29
-106
lines changed

3 files changed

+29
-106
lines changed

mlir/include/mlir/Dialect/Tosa/IR/TosaOps.td

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1579,7 +1579,7 @@ def Tosa_ConcatOp : Tosa_InferTensorTypeOp<"concat"> {
15791579
//===----------------------------------------------------------------------===//
15801580
// Operator: pad
15811581
//===----------------------------------------------------------------------===//
1582-
def Tosa_PadOp : Tosa_InferShapedTypeOp<"pad", [InferTensorType]> {
1582+
def Tosa_PadOp : Tosa_InferShapedTypeOp<"pad"> {
15831583
let summary = "Pads a tensor with value specified.";
15841584

15851585
let description = [{
@@ -1619,13 +1619,6 @@ def Tosa_PadOp : Tosa_InferShapedTypeOp<"pad", [InferTensorType]> {
16191619
let hasCanonicalizer = 1;
16201620
let hasFolder = 1;
16211621
let hasVerifier = 1;
1622-
1623-
let extraClassDeclaration = [{
1624-
/// Returns true when two result types are compatible for this op;
1625-
/// Method used by InferTypeOpInterface.
1626-
static bool isCompatibleReturnTypes(TypeRange l, TypeRange r);
1627-
}];
1628-
16291622
}
16301623

16311624
//===----------------------------------------------------------------------===//

mlir/lib/Dialect/Tosa/IR/TosaOps.cpp

Lines changed: 12 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -800,8 +800,6 @@ LogicalResult tosa::PadOp::inferReturnTypeComponents(
800800
MLIRContext *context, ::std::optional<Location> location,
801801
PadOp::Adaptor adaptor,
802802
SmallVectorImpl<ShapedTypeComponents> &inferredReturnShapes) {
803-
804-
Type inputType = getElementTypeOrSelf(adaptor.getInput1());
805803
ShapeAdaptor inputShape(adaptor.getInput1().getType());
806804
ShapeAdaptor paddingShape(adaptor.getPadding().getType());
807805
SmallVector<int64_t> outputShape;
@@ -822,17 +820,15 @@ LogicalResult tosa::PadOp::inferReturnTypeComponents(
822820
}
823821

824822
outputShape.resize(paddingShape.getDimSize(0), ShapedType::kDynamic);
825-
inferredReturnShapes.push_back(
826-
ShapedTypeComponents(outputShape, inputType));
823+
inferredReturnShapes.push_back(ShapedTypeComponents(outputShape));
827824
return success();
828825
}
829826

830827
DenseIntElementsAttr paddings;
831828
// If the paddings value is not a constant, all dimensions must be dynamic.
832829
if (!matchPattern(adaptor.getPadding(), m_Constant(&paddings))) {
833830
outputShape.resize(inputShape.getRank(), ShapedType::kDynamic);
834-
inferredReturnShapes.push_back(
835-
ShapedTypeComponents(outputShape, inputType));
831+
inferredReturnShapes.push_back(ShapedTypeComponents(outputShape));
836832
return success();
837833
}
838834

@@ -852,39 +848,21 @@ LogicalResult tosa::PadOp::inferReturnTypeComponents(
852848
paddingValues[i * 2 + 1]);
853849
}
854850

855-
inferredReturnShapes.push_back(ShapedTypeComponents(outputShape, inputType));
851+
inferredReturnShapes.push_back(ShapedTypeComponents(outputShape));
856852
return success();
857853
}
858854

859-
LogicalResult PadOp::verify() {
860-
ShapedType inputType = llvm::cast<ShapedType>(getInput1().getType());
861-
if (inputType.hasRank() && inputType.getRank() == 0) {
862-
return emitOpError() << "input tensor rank must not be 0";
863-
}
855+
LogicalResult tosa::PadOp::verify() {
856+
RankedTensorType inputType = getInput1().getType();
857+
RankedTensorType outputType = getOutput().getType();
858+
TensorType paddingType = getPadding().getType();
864859

865-
ShapedType paddingType = llvm::cast<ShapedType>(getPadding().getType());
866-
if (paddingType.hasRank()) {
867-
if (paddingType.getRank() != 2) {
868-
return emitOpError() << "paddings must be a tensor of rank 2";
869-
}
870-
if (inputType.hasRank() && !paddingType.isDynamicDim(0) &&
871-
inputType.getRank() != paddingType.getDimSize(0)) {
872-
return emitOpError() << "paddings must be a tensor of shape ["
873-
<< inputType.getRank() << ", 2]";
874-
}
875-
if (!paddingType.isDynamicDim(1) && paddingType.getDimSize(1) != 2) {
876-
return emitOpError() << "paddings must be a tensor of shape ["
877-
<< inputType.getRank() << ", 2]";
878-
}
860+
if (inputType.getRank() != outputType.getRank())
861+
return emitOpError() << "expect same input and output tensor rank.";
862+
863+
if (paddingType.hasRank() && paddingType.getRank() != 2)
864+
return emitOpError() << "expect 'padding' tensor rank equal to 2.";
879865

880-
DenseIntElementsAttr paddings;
881-
if (matchPattern(getPadding(), m_Constant(&paddings))) {
882-
if (llvm::any_of(paddings,
883-
[](auto val) { return val.getSExtValue() < 0; })) {
884-
return emitOpError() << "number of pad elements must be positive";
885-
}
886-
}
887-
}
888866
return success();
889867
}
890868

@@ -1441,7 +1419,6 @@ REDUCE_SHAPE_INFER(tosa::ReduceProdOp)
14411419
REDUCE_SHAPE_INFER(tosa::ReduceSumOp)
14421420
#undef REDUCE_SHAPE_INFER
14431421
COMPATIBLE_RETURN_TYPES(tosa::ConcatOp)
1444-
COMPATIBLE_RETURN_TYPES(tosa::PadOp)
14451422
#undef COMPATIBLE_RETURN_TYPES
14461423

14471424
template <typename T>

mlir/test/Dialect/Tosa/invalid.mlir

Lines changed: 16 additions & 63 deletions
Original file line numberDiff line numberDiff line change
@@ -105,83 +105,36 @@ func.func @test_pad_non_const(%arg0: tensor<13x21x3xf32>, %arg1: tensor<3x2xi32>
105105

106106
// -----
107107

108-
func.func @test_pad_non_const(%arg0: tensor<13x21x3xi8>, %arg1: tensor<i8>) -> tensor<?x?x?xi8> {
108+
func.func @test_pad_non_const(%arg0: tensor<13x21x3xi8>, %arg1: tensor<i8>) -> tensor<13x21x3xi8> {
109109
%0 = "tosa.const"() {value = dense<[[0, 0], [0, 1], [0, 1]]> : tensor<3x2xi32>} : () -> tensor<3x2xi32>
110110
// expected-error@+1 {{'tosa.pad' op pad_const of pad is not constant}}
111-
%1 = tosa.pad %arg0, %0, %arg1 : (tensor<13x21x3xi8>, tensor<3x2xi32>, tensor<i8>) -> tensor<?x?x?xi8>
112-
return %1 : tensor<?x?x?xi8>
111+
%1 = tosa.pad %arg0, %0, %arg1 : (tensor<13x21x3xi8>, tensor<3x2xi32>, tensor<i8>) -> tensor<13x21x3xi8>
112+
return %1 : tensor<13x21x3xi8>
113113
}
114114

115115
// -----
116116

117-
func.func @test_pad_output_shape_mismatch(%arg0: tensor<13x21x3xf32>) -> tensor<13x21x3xf32> {
118-
%0 = "tosa.const"() {value = dense<[[1, 1], [1, 1], [1, 1]]> : tensor<3x2xi32>} : () -> tensor<3x2xi32>
119-
// expected-error@+2 {{'tosa.pad' op failed to infer returned types}}
120-
// expected-error@+1 {{'tosa.pad' op inferred type(s) 'tensor<15x23x5xf32>' are incompatible with return type(s) of operation 'tensor<13x21x3xf32>}}
121-
%1 = tosa.pad %arg0, %0 : (tensor<13x21x3xf32>, tensor<3x2xi32>) -> tensor<13x21x3xf32>
122-
return %1 : tensor<13x21x3xf32>
123-
}
124-
125-
// -----
126-
127-
func.func @test_pad_type_mismatch(%arg0: tensor<13x21x3xf32>) -> tensor<15x23x5xi32> {
128-
%0 = "tosa.const"() {value = dense<[[1, 1], [1, 1], [1, 1]]> : tensor<3x2xi32>} : () -> tensor<3x2xi32>
129-
// expected-error@+2 {{'tosa.pad' op failed to infer returned types}}
130-
// expected-error@+1 {{'tosa.pad' op inferred type(s) 'tensor<15x23x5xf32>' are incompatible with return type(s) of operation 'tensor<15x23x5xi32>}}
131-
%1 = tosa.pad %arg0, %0 : (tensor<13x21x3xf32>, tensor<3x2xi32>) -> tensor<15x23x5xi32>
132-
return %1 : tensor<15x23x5xi32>
133-
}
134-
135-
// -----
136-
137-
func.func @test_pad_incorret_padding_rank(%arg0: tensor<13x21xf32>) -> tensor<13x21xf32> {
138-
%0 = "tosa.const"() {value = dense<[0, 1]> : tensor<2xi32>} : () -> tensor<2xi32>
139-
// expected-error@+1 {{'tosa.pad' op paddings must be a tensor of rank 2}}
140-
%1 = tosa.pad %arg0, %0 : (tensor<13x21xf32>, tensor<2xi32>) -> tensor<13x21xf32>
141-
return %1 : tensor<13x21xf32>
142-
}
143-
144-
// -----
145-
146-
func.func @test_pad_incorret_padding_shape(%arg0: tensor<13x21xf32>) -> tensor<13x21xf32> {
147-
%0 = "tosa.const"() {value = dense<[[0, 0], [0, 1], [0, 1], [1, 1]]> : tensor<4x2xi32>} : () -> tensor<4x2xi32>
148-
// expected-error@+1 {{'tosa.pad' op paddings must be a tensor of shape [2, 2]}}
149-
%1 = tosa.pad %arg0, %0 : (tensor<13x21xf32>, tensor<4x2xi32>) -> tensor<13x21xf32>
150-
return %1 : tensor<13x21xf32>
151-
}
152-
153-
// -----
154-
155-
func.func @test_pad_incorret_padding_shape(%arg0: tensor<13x21xf32>) -> tensor<13x21xf32> {
156-
%0 = "tosa.const"() {value = dense<[[0, 0, 0, 1], [0, 1, 1, 1]]> : tensor<2x4xi32>} : () -> tensor<2x4xi32>
157-
// expected-error@+1 {{'tosa.pad' op paddings must be a tensor of shape [2, 2]}}
158-
%1 = tosa.pad %arg0, %0 : (tensor<13x21xf32>, tensor<2x4xi32>) -> tensor<13x21xf32>
159-
return %1 : tensor<13x21xf32>
160-
}
161-
162-
// -----
163-
164-
func.func @test_pad_negative_padding(%arg0: tensor<13x21xf32>) -> tensor<?x?xf32> {
165-
%0 = "tosa.const"() {value = dense<[[0, 0], [0, -1]]> : tensor<2x2xi32>} : () -> tensor<2x2xi32>
166-
// expected-error@+1 {{'tosa.pad' op number of pad elements must be positive}}
167-
%1 = tosa.pad %arg0, %0 : (tensor<13x21xf32>, tensor<2x2xi32>) -> tensor<?x?xf32>
168-
return %1 : tensor<?x?xf32>
117+
func.func @test_pad_io_rank_mismatch(%arg0: tensor<13x21xf32>, %arg1: tensor<2x2xi32>) {
118+
// expected-error@+1 {{'tosa.pad' op expect same input and output tensor rank.}}
119+
%1 = tosa.pad %arg0, %arg1 : (tensor<13x21xf32>, tensor<2x2xi32>) -> tensor<13x21x3xf32>
120+
return
169121
}
170122

171123
// -----
172124

173-
func.func @test_pad_incorrect_input(%arg0: f32, %arg1: i32) -> f32 {
174-
// expected-error@+1 {{'tosa.pad' op operand #0 must be ranked tensor of number values, but got 'f32'}}
175-
%1 = tosa.pad %arg0, %arg1 : (f32, i32) -> f32
176-
return %1 : f32
125+
func.func @test_pad_invalid_padding_rank(%arg0: tensor<13x21xf32>, %arg1: tensor<2xi32>) {
126+
// expected-error@+1 {{'tosa.pad' op expect 'padding' tensor rank equal to 2.}}
127+
%1 = tosa.pad %arg0, %arg1 : (tensor<13x21xf32>, tensor<2xi32>) -> tensor<13x21xf32>
128+
return
177129
}
178130

179131
// -----
180132

181-
func.func @test_pad_zero_rank_input(%arg0: tensor<f32>, %arg1: tensor<i32>) -> tensor<f32> {
182-
// expected-error@+1 {{'tosa.pad' op input tensor rank must not be 0}}
183-
%1 = tosa.pad %arg0, %arg1 : (tensor<f32>, tensor<i32>) -> tensor<f32>
184-
return %1 : tensor<f32>
133+
func.func @test_pad_invalid_padConst_rank(%arg0: tensor<13x21xf32>, %arg1: tensor<2x2xi32>) {
134+
%0 = "tosa.const"() {value = dense<3.14> : tensor<1xf32>} : () -> tensor<1xf32>
135+
// expected-error@+1 {{'tosa.pad' op operand #2 must be 0D tensor of number values, but got 'tensor<1xf32>'}}
136+
%1 = tosa.pad %arg0, %arg1, %0 : (tensor<13x21xf32>, tensor<2x2xi32>, tensor<1xf32>) -> tensor<13x21xf32>
137+
return
185138
}
186139

187140
// -----

0 commit comments

Comments
 (0)