From eeddbb03230d6829c77943830012a8e8151d1297 Mon Sep 17 00:00:00 2001 From: Songhao Jia Date: Fri, 24 Jan 2025 10:25:18 -0800 Subject: [PATCH] remove reduntant check_valid_op in EdgeDialectVerifier Summary: check_valid_op member function in EXIREdgeDialectVerifier is actually never used: it has been overrided in the EXIREdgeDialectVerifier constructor. Remove it for better structure. Differential Revision: D68611967 --- exir/verification/verifier.py | 27 +-------------------------- 1 file changed, 1 insertion(+), 26 deletions(-) diff --git a/exir/verification/verifier.py b/exir/verification/verifier.py index 8e71d30761e..2ad453ffede 100644 --- a/exir/verification/verifier.py +++ b/exir/verification/verifier.py @@ -129,7 +129,7 @@ def check_valid_op(self, op): 2. Sometimes inference and training gives slightly different op set. Try adding `with torch.no_grad():` context manager if you are export for inference only. -3. If the error persists after 2, this is likely caused by torch.export() + core ATen decomposition producing unexpected operators for your model. +3. If the error persists after 2, this is likely caused by torch.export() + core ATen decomposition producing unexpected operators for your model. If you believe this operator should be included into core ATen opset, please create an issue in https://github.com/pytorch/pytorch/issues and add `module: core aten` tag. """ ) @@ -274,31 +274,6 @@ def check_additional(self, gm: GraphModule) -> None: _check_tensors_are_contiguous(gm) _check_tensor_args_matching_op_allowed_dtype(gm) - def check_valid_op(self, op): - if isinstance(op, OpOverload): - # TODO These special ops should be removable easily. - if op.namespace in ( - "quantized_decomposed", - "boltnn_nimble", - "nimble", - "quantized", - "dim_order_ops", - ) or op in ( - torch.ops.aten.mkldnn_rnn_layer.default, - torch.ops.aten._upsample_bilinear2d_aa.default, - torch.ops.aten.quantize_per_tensor.default, - torch.ops.aten.dequantize.self, - torch.ops.aten.max.default, - torch.ops.aten.full_like.default, # TODO(T183507359) - ): - return - if torch.Tag.core not in op.tags and torch.Tag.view_copy not in op.tags: - # NOTE(qihan): whether view_copy operators are marked as canonical is still under - # discussion. - raise SpecViolationError( - f"Operator {op.__module__}.{op.__name__} is not Aten Canonical." - ) - def is_valid(self, gm: GraphModule) -> bool: try: self(gm)