File tree Expand file tree Collapse file tree 2 files changed +7
-12
lines changed
include/mlir/Dialect/Tensor/Utils Expand file tree Collapse file tree 2 files changed +7
-12
lines changed Original file line number Diff line number Diff line change 1414namespace mlir {
1515namespace tensor {
1616
17- // Return a PadOp that pads `source` to `type` size. Output sizes (from `type`)
18- // are assumed to be static and greater than the potentially dynamic input sizes
19- // (from `source) . The op performs "high" padding (i.e. it adds trailing padding
20- // values until the desired size is met).
17+ // Return a PadOp that pads `source` to `type` size where the static
18+ // sizes are assumed to be greater than the dynamic sizes. If `type` has dynamic
19+ // dimensions the padding width is set to zero . The op performs "high" padding
20+ // (i.e. it adds trailing padding values until the desired size is met).
2121PadOp createPadHighOp (RankedTensorType type, Value source, Value pad,
2222 bool nofold, Location loc, OpBuilder &builder);
2323
Original file line number Diff line number Diff line change @@ -24,17 +24,12 @@ using namespace mlir::tensor;
2424PadOp mlir::tensor::createPadHighOp (RankedTensorType type, Value source,
2525 Value pad, bool nofold, Location loc,
2626 OpBuilder &b) {
27-
28- // TODO: Either relax or turn this into a failure
29- assert (!ShapedType::isDynamicShape (type.getShape ()) &&
30- " The output type is dynamic - that's not supported ATM." );
31-
32- // Init "low" and "high" padding values ("low" is kept as is, "high" is
33- // computed below).
3427 SmallVector<OpFoldResult> low (type.getRank (), b.getIndexAttr (0 ));
3528 SmallVector<OpFoldResult> high (type.getRank (), b.getIndexAttr (0 ));
36-
3729 for (const auto &en : enumerate(type.getShape ())) {
30+ // Pad only the static dimensions of the result tensor type.
31+ if (ShapedType::isDynamic (en.value ()))
32+ continue ;
3833 // Compute the padding width.
3934 AffineExpr d0;
4035 bindDims (b.getContext (), d0);
You can’t perform that action at this time.
0 commit comments