Skip to content

Commit f0cc259

Browse files
neuropilot-captainMediaTek-ai
authored andcommitted
Fix lintrunner error
1 parent 4d86272 commit f0cc259

File tree

3 files changed

+4
-3
lines changed

3 files changed

+4
-3
lines changed

backends/mediatek/partitioner.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def is_node_supported(self, _, node: torch.fx.Node) -> bool:
4646
op_type = node.target.__name__
4747

4848
# Skip until we can handle the dimension order representation
49-
if op_type == 'aten._to_copy.default':
49+
if op_type == "aten._to_copy.default":
5050
return False
5151

5252
if op_type in self._op_types_to_skip or node.name in self._op_names_to_skip:

backends/mediatek/preprocess.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def preprocess(
4848
) -> PreprocessResult:
4949

5050
# Make sure all inputs are contiguous_format or NCHW or default dim order
51-
print('here')
51+
print("here")
5252
assert_default_dim_order(edge_program.graph_module)
5353

5454
name_to_node_mappings = {node.name: node for node in edge_program.graph.nodes}

backends/mediatek/runtime/NeuronBackend.cpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,8 @@ Error NeuronExecuTorchDelegate::execute(
114114
for (int i = 0; i < inputCount; i++) {
115115
auto tensor_in = args[i]->toTensor();
116116
ET_CHECK_OR_RETURN_ERROR(
117-
runtime::is_contiguous_dim_order(tensor_in.dim_order().data(), tensor_in.dim()),
117+
runtime::is_contiguous_dim_order(
118+
tensor_in.dim_order().data(), tensor_in.dim()),
118119
Internal,
119120
"Expecting default dim_order but got a non default dim_order tensor for external input %u",
120121
i);

0 commit comments

Comments
 (0)