diff --git a/e2e_testing/xfail_sets.py b/e2e_testing/xfail_sets.py index 204632619deb..aedcb113330b 100644 --- a/e2e_testing/xfail_sets.py +++ b/e2e_testing/xfail_sets.py @@ -302,6 +302,12 @@ "FakeQuantizePerTensorAffineCachemaskModule_basic", } +# Tests that pass on nightly already, but are still failing on latest stable. +if torch_version_for_comparison() < version.parse("2.1.0.dev"): + TORCHDYNAMO_XFAIL_SET.union({ + "EmptyModule_sizeZeroDim", + }) + TORCHDYNAMO_CRASHING_SET = { # No upstream decompositions. # %6:4 = torch.operator "aten._embedding_bag_forward_only"(%1, %3, %5, %false, %int0, %false, %none, %false, %int-1) : (!torch.tensor<*,f32>, !torch.tensor<*,si64>, !torch.tensor<*,si64>, !torch.bool, !torch.int, !torch.bool, !torch.none, !torch.bool, !torch.int) -> (!torch.tensor, !torch.tensor, !torch.tensor, !torch.tensor) @@ -696,6 +702,7 @@ "EmptyModule_falsePinMemory", "EmptyModule_int", "EmptyModule_float", + "EmptyModule_sizeZeroDim", "NewEmptyModuleBool_basic", "NewEmptyModuleDefaultDtype_basic", "NewEmptyModuleFalsePinMemory_basic", diff --git a/lib/Conversion/TorchToTosa/TorchToTosa.cpp b/lib/Conversion/TorchToTosa/TorchToTosa.cpp index 4e19c700482b..2b639d9522b4 100644 --- a/lib/Conversion/TorchToTosa/TorchToTosa.cpp +++ b/lib/Conversion/TorchToTosa/TorchToTosa.cpp @@ -5392,6 +5392,14 @@ LogicalResult ConvertAtenOp::matchAndRewrite( auto resultType = typeConverter->convertType(op.getType()).template cast(); + // TOSA does not allow empty dimensions, so we can't lower this while + // preserving the shape. + if (llvm::any_of(resultType.getShape(), + [](int dimSize) { return dimSize == 0; })) { + return rewriter.notifyMatchFailure( + op, "Cannot lower tensors with 0-sized dimensions to TOSA."); + } + DenseElementsAttr emptyVal; if (op.getDtype().getType().template isa()) { emptyVal = DenseFPElementsAttr::get(resultType, {0.0F}); diff --git a/python/torch_mlir_e2e_test/test_suite/constant_alloc.py b/python/torch_mlir_e2e_test/test_suite/constant_alloc.py index 1b92c8f17135..b3bd2b08287a 100644 --- a/python/torch_mlir_e2e_test/test_suite/constant_alloc.py +++ b/python/torch_mlir_e2e_test/test_suite/constant_alloc.py @@ -308,6 +308,25 @@ def EmptyModule_falsePinMemory(module, tu: TestUtils): module.forward() +class EmptySizeZeroDimTensorModule(torch.nn.Module): + + def __init__(self): + super().__init__() + + @export + @annotate_args([ + None, + ]) + def forward(self): + return torch.empty((3, 0, 4), + memory_format=torch.contiguous_format) + + +@register_test_case(module_factory=lambda: EmptySizeZeroDimTensorModule()) +def EmptyModule_sizeZeroDim(module, tu: TestUtils): + module.forward() + + # ==============================================================================