Skip to content

Commit 96ea729

Browse files
chong-chen01Jiseong-oh
authored andcommitted
Make partitioner and preprocess work
1. Make compilation pass (script build.sh) 2. Enable simple function for partitioner 3. Fix errors in preprocess phase. Co-authored-by: chong-chen <[email protected]>
1 parent c46b081 commit 96ea729

File tree

8 files changed

+99
-38
lines changed

8 files changed

+99
-38
lines changed

backends/samsung/CMakeLists.txt

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,8 @@ set(CMAKE_CXX_STANDARD_REQUIRED ON)
1111

1212
get_filename_component(EXECUTORCH_SOURCE_DIR "${CMAKE_CURRENT_LIST_DIR}/../.." ABSOLUTE)
1313

14-
if(NOT DEFINED EXYNOS_AI_LITECORE_PATH)
15-
message(FATAL_ERROR "Please define EXYNOS_AI_LIRECORE_PATH by adding cmake parameter -DEXYNOS_AI_LITECORE_PATH=<...>")
14+
if(NOT DEFINED EXYNOS_AI_LITECORE_ROOT)
15+
message(FATAL_ERROR "Please define EXYNOS_AI_LIRECORE_PATH by adding cmake parameter -DEXYNOS_AI_LITECORE_ROOT=<...>")
1616
endif()
1717
if(CMAKE_TOOLCHAIN_FILE MATCHES ".*(iOS|ios\.toolchain)\.cmake$")
1818
message(FATAL_ERROR "IOS is not supported on Exynos.")
@@ -32,8 +32,12 @@ if(CMAKE_BUILD_TYPE STREQUAL "Release")
3232
add_definitions(-DNDEBUG)
3333
endif()
3434

35-
include_directories(${EXECUTORCH_SOURCE_DIR}/..)
36-
include_directories(${EXYNOS_AI_LITECORE_PATH})
35+
include_directories(
36+
${EXECUTORCH_SOURCE_DIR}/..
37+
${EXECUTORCH_SOURCE_DIR}/runtime/core/portable_type/c10
38+
${EXYNOS_AI_LITECORE_ROOT}
39+
)
40+
add_compile_definitions(C10_USING_CUSTOM_GENERATED_MACROS)
3741

3842
if(${ANDROID})
3943
find_library(android_log log)
@@ -50,10 +54,10 @@ if(${CMAKE_SYSTEM_PROCESSOR} MATCHES "x86_64")
5054
)
5155
add_library(PyEnnWrapperAdaptor MODULE)
5256

53-
find_library(GG_API_LIB NAMES graphgen_api HINTS ${EXYNOS_AI_LITECORE_PATH}/lib64/)
57+
find_library(GG_API_LIB NAMES graphgen_api HINTS ${EXYNOS_AI_LITECORE_ROOT}/lib64/)
5458
add_library(graphgen_api SHARED IMPORTED GLOBAL)
5559
set_target_properties(graphgen_api PROPERTIES
56-
INTERFACE_INCLUDE_DIRECTORIES "${EXYNOS_AI_LITECORE_PATH}/include"
60+
INTERFACE_INCLUDE_DIRECTORIES "${EXYNOS_AI_LITECORE_ROOT}/include"
5761
IMPORTED_LOCATION "${GG_API_LIB}"
5862
)
5963

@@ -96,10 +100,10 @@ if(${CMAKE_SYSTEM_PROCESSOR} MATCHES "x86_64")
96100
# PyGraphWrapperAdaptor
97101
add_library(PyGraphWrapperAdaptor MODULE)
98102
#
99-
find_library(GRAPH_WRAPPER_LIB NAMES graph_wrapper HINTS ${EXYNOS_AI_LITECORE_PATH}/lib64/)
103+
find_library(GRAPH_WRAPPER_LIB NAMES graph_wrapper HINTS ${EXYNOS_AI_LITECORE_ROOT}/lib64/)
100104
add_library(graph_wrapper SHARED IMPORTED GLOBAL)
101105
set_target_properties(graph_wrapper PROPERTIES
102-
INTERFACE_INCLUDE_DIRECTORIES "${EXYNOS_AI_LITECORE_PATH}/include"
106+
INTERFACE_INCLUDE_DIRECTORIES "${EXYNOS_AI_LITECORE_ROOT}/include"
103107
IMPORTED_LOCATION "${GRAPH_WRAPPER_LIB}"
104108
)
105109
set_target_properties(PyGraphWrapperAdaptor PROPERTIES CXX_VISIBILITY_PRESET hidden)
@@ -120,6 +124,7 @@ if(${ANDROID})
120124
add_library(enn_backend STATIC)
121125
target_link_libraries(enn_backend PRIVATE enn_logging)
122126
target_link_options_shared_lib(enn_backend)
127+
target_compile_options(enn_backend PRIVATE -Wno-deprecated-declarations)
123128

124129
set(__enn_executor_runner_srcs ${EXECUTORCH_SOURCE_DIR}/examples/samsung/executor_runner/enn_executor_runner.cpp)
125130
add_executable(enn_executor_runner ${__enn_executor_runner_srcs})

backends/samsung/aot/PyEnnWrapperAdaptor.h

Lines changed: 16 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,9 @@
1313
#include <pybind11/numpy.h>
1414
#include <pybind11/pybind11.h>
1515

16+
#include <executorch/backends/samsung/compile_options_def_generated.h>
17+
#include <executorch/backends/samsung/runtime/logging.h>
18+
1619
#include <iostream>
1720
#include <memory>
1821
#include <vector>
@@ -29,18 +32,26 @@ class PyEnnWrapper {
2932

3033
void Init(const py::bytes& compile_opts) {
3134
graphgen_instance_ = graphgen_create();
35+
option_buf_ = enn_option::GetEnnExecuTorchOptions(
36+
compile_opts.cast<std::string_view>().data());
3237
}
3338

3439
bool IsNodeSupportedByBackend() {
35-
return False;
40+
return false;
3641
}
3742

3843
py::array_t<char> Compile(const py::array_t<char>& model_buffer) {
3944
if (graphgen_instance_ == nullptr) {
4045
ENN_LOG_ERROR("Please call `Init()` first before compile.");
4146
return py::array_t<char>();
4247
}
43-
48+
auto soc_name = option_buf_->chipset();
49+
if (graphgen_initialize_context(graphgen_instance_, soc_name) !=
50+
GraphGenResult::SUCCESS) {
51+
ENN_LOG_ERROR(
52+
"Unsupported Soc (%d), please check your chipset version.", soc_name);
53+
return py::array_t<char>();
54+
}
4455

4556
auto m_buf_info = model_buffer.request();
4657
auto* model_buf_ptr = reinterpret_cast<uint8_t*>(m_buf_info.ptr);
@@ -52,7 +63,7 @@ class PyEnnWrapper {
5263
return py::array_t<char>();
5364
}
5465

55-
auto result = py::array_t<char>({nnc_buffer->size}, {sizeof(char)});
66+
auto result = py::array_t<char>({nnc_buffer->size}, {sizeof(char)});
5667
auto result_buf = result.request();
5768
memcpy(result_buf.ptr, nnc_buffer->addr, nnc_buffer->size);
5869

@@ -73,6 +84,8 @@ class PyEnnWrapper {
7384
private:
7485
// pointer to enn software entry
7586
void* graphgen_instance_ = nullptr;
87+
// enn compilation option buf
88+
const enn_option::EnnExecuTorchOptions* option_buf_ = nullptr;
7689
};
7790
} // namespace enn
7891
} // namespace executor

backends/samsung/build.sh

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ function usage() {
1818
<command> <argument> <description>
1919
2020
--sdk The path of downloaded ENN SDK, which is required for building.
21-
Or export EXYNOS_AI_LITECORE_ROOT=/path/to/enn_sdk_xxx
21+
Or export EXYNOS_AI_LITECORE_ROOT=/path/to/xxx
2222
--ndk The path of Android NDK, or export ANDROID_NDK_ROOT=/path/to/ndk.
2323
2424
--build, -b [x86_64, android, all] Default is all, x86_64 target to offline compilation,
@@ -34,14 +34,16 @@ function build_x86_64() {
3434
exit 1
3535
fi
3636

37-
echo EXYNOS_AI_LITECORE_ROOT: ${EXYNOS_AI_LITECORE_ROOT}
38-
echo ANDROID_NDK_ROOT: ${ANDROID_NDK_ROOT}
37+
echo "EXYNOS_AI_LITECORE_ROOT: ${EXYNOS_AI_LITECORE_ROOT}"
38+
echo "ANDROID_NDK_ROOT: ${ANDROID_NDK_ROOT}"
3939

4040
cmake \
4141
-DCMAKE_INSTALL_PREFIX=${X86_64_BUILD_DIR} \
4242
-DEXYNOS_AI_LITECORE_ROOT=${EXYNOS_AI_LITECORE_ROOT} \
43-
-DEXECUTORCH_BUILD_ENN=ON \
43+
-DEXECUTORCH_BUILD_SAMSUNG=ON \
4444
-DEXECUTORCH_BUILD_DEVTOOLS=ON \
45+
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
46+
-DEXECUTORCH_BUILD_EXTENSION_FLAT_TENSOR=ON \
4547
-DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
4648
-DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \
4749
-S ${PROJECT_DIR} \
@@ -70,12 +72,15 @@ function build_android() {
7072
-DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK_ROOT}/build/cmake/android.toolchain.cmake" \
7173
-DANDROID_ABI="${ANDROID_ABI}" \
7274
-DCMAKE_BUILD_TYPE=Release \
73-
-DEXECUTORCH_BUILD_ENN=ON \
75+
-DEXECUTORCH_BUILD_SAMSUNG=ON \
7476
-DEXYNOS_AI_LITECORE_ROOT=${EXYNOS_AI_LITECORE_ROOT} \
75-
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
7677
-DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
78+
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
79+
-DEXECUTORCH_BUILD_EXTENSION_FLAT_TENSOR=ON \
7780
-DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \
7881
-DEXECUTORCH_ENABLE_LOGGING=1 \
82+
-DEXECUTORCH_BUILD_DEVTOOLS=ON \
83+
-DEXECUTORCH_ENABLE_EVENT_TRACER=ON \
7984
-S ${PROJECT_DIR} \
8085
-B ${ANDROID_BUILD_DIR}
8186

backends/samsung/enn_preprocess.py

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,12 @@
1515
)
1616
from executorch.backends.samsung.serialization.enn_graph_schema import EnnGraph
1717
from executorch.backends.samsung.utils.utils import get_compile_spec
18+
from executorch.backends.transforms.addmm_mm_to_linear import AddmmToLinearTransform
19+
from executorch.backends.transforms.fuse_batch_norm_with_conv import (
20+
FuseBatchNormWithConvPass,
21+
)
22+
23+
from executorch.backends.transforms.remove_getitem_op import RemoveGetItemPass
1824

1925
from executorch.exir.backend.backend_details import (
2026
BackendDetails,
@@ -40,12 +46,17 @@ def preprocess(
4046
)
4147
enn_wrapper.Init(option_spec.value)
4248

43-
enn_preprocess_passes = PassManager(passes=[])
49+
enn_preprocess_passes = PassManager(
50+
passes=[
51+
FuseBatchNormWithConvPass(edge_program),
52+
AddmmToLinearTransform(),
53+
RemoveGetItemPass(),
54+
]
55+
)
4456
pass_result = enn_preprocess_passes(edge_program.graph_module)
4557
assert pass_result is not None
4658

4759
enn_graph = EnnGraph()
48-
enn_graph.init("UnknownName", "")
4960
# node visitors
5061
node_visitors = get_node_visitors(edge_program)
5162

@@ -71,7 +82,6 @@ def preprocess(
7182
raise RuntimeError(f"{node.op}" " is not supported in ENN Delegate")
7283

7384
# Compile Graph
74-
enn_wrapper.Destroy()
7585
enn_graph.finish()
7686
ser_buf = enn_graph.serialize()
7787
enn_context_binary = enn_wrapper.Compile(ser_buf)

backends/samsung/partition/enn_partitioner.py

Lines changed: 20 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,15 +3,18 @@
33
#
44
# This source code is licensed under the BSD-style license found in the
55
# LICENSE file in the root directory of this source tree.
6-
76
import logging
8-
from typing import Any, Callable, Dict, List, Optional, Tuple
7+
from typing import Any, Dict, List
8+
9+
import executorch.backends.samsung.builders.node_visitor as node_visitor
910

1011
import executorch.backends.samsung.python.PyEnnWrapperAdaptor as PyEnnWrapper
1112

1213
import torch
1314
from executorch.backends.samsung.enn_preprocess import EnnBackend
14-
15+
from executorch.backends.samsung.serialization.compile_options import (
16+
ENN_COMPILE_OPTION_TITLE,
17+
)
1518
from executorch.backends.samsung.utils.utils import get_compile_spec
1619
from executorch.exir.backend.backend_details import CompileSpec
1720
from executorch.exir.backend.canonical_partitioners.pattern_op_partitioner import (
@@ -23,9 +26,15 @@
2326
PartitionResult,
2427
)
2528
from executorch.exir.backend.utils import tag_constant_data
29+
30+
from executorch.exir.dialects._ops import ops as exir_ops
2631
from torch.fx.passes.infra.partitioner import Partition
2732
from torch.fx.passes.operator_support import OperatorSupportBase
2833

34+
SUPPORTED_OPS = [
35+
exir_ops.edge.aten.addmm.default,
36+
]
37+
2938

3039
class EnnOperatorSupport(OperatorSupportBase):
3140

@@ -36,7 +45,10 @@ def __init__(
3645
):
3746
self.edge_program = edge_program
3847
self.enn_wrapper = PyEnnWrapper.EnnWrapper()
39-
option_spec = get_compile_spec(compile_specs, "Exynos compile", required=True)
48+
self.node_visitors = node_visitor.get_node_visitors(edge_program)
49+
option_spec = get_compile_spec(
50+
compile_specs, ENN_COMPILE_OPTION_TITLE, required=True
51+
)
4052
self.enn_wrapper.Init(option_spec.value)
4153

4254
def is_node_supported(self, _, node: torch.fx.Node) -> bool:
@@ -50,6 +62,9 @@ def is_node_supported(self, _, node: torch.fx.Node) -> bool:
5062
]:
5163
return False
5264

65+
if node.target in SUPPORTED_OPS or node.target.__name__ in self.node_visitors:
66+
return True
67+
5368
supported = self.enn_wrapper.IsNodeSupportedByBackend()
5469
return supported
5570

@@ -67,7 +82,7 @@ def __init__(self, compile_specs: List[CompileSpec]):
6782
def generate_partitions(
6883
self, edge_program: torch.export.ExportedProgram
6984
) -> List[Any]:
70-
85+
self.op_support_checker = EnnOperatorSupport(edge_program, self.compile_specs)
7186
return generate_partitions_from_list_of_nodes(
7287
edge_program.graph_module,
7388
op_support=self.op_support_checker,

backends/samsung/python/.gitignore

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
# exclude the pybind libraries
2+
*
3+
4+
# keep folder for ease of use
5+
!.gitignore

backends/samsung/utils/export_utils.py

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,26 +4,31 @@
44
# This source code is licensed under the BSD-style license found in the
55
# LICENSE file in the root directory of this source tree.
66

7-
from typing import List, Optional, Tuple
7+
from typing import Optional, Tuple
88

99
import executorch.exir as exir
1010
import torch
11-
from executorch.exir import EdgeCompileConfig, ExportedProgram
11+
from executorch.backends.samsung.partition.enn_partitioner import EnnPartitioner
12+
from executorch.backends.transforms.remove_clone_ops import RemoveCloneOpsTransform
13+
from executorch.exir import EdgeCompileConfig
1214
from executorch.exir.backend.backend_details import CompileSpec
13-
from executorch.exir.program._program import (
14-
to_edge_transform_and_lower,
15-
)
15+
from executorch.exir.program._program import to_edge_transform_and_lower
1616

1717

1818
def to_edge_transform_and_lower_to_enn(
1919
module: torch.nn.Module,
2020
inputs: Tuple[torch.Tensor],
21-
custom_pass_config: List[PassType] = None,
2221
compile_specs: Optional[CompileSpec] = None,
2322
) -> exir.ExecutorchProgramManager:
24-
assert compile_specs is not None, "For now, we must deliver complile specs"
23+
assert (
24+
compile_specs is not None
25+
), "Please provide compile specifications for enn backend"
2526
prog = torch.export.export(module, inputs)
27+
28+
ahead_pass_list = [RemoveCloneOpsTransform()]
2629
return to_edge_transform_and_lower(
2730
prog,
28-
compile_config=[],
31+
ahead_pass_list,
32+
{"forward": [EnnPartitioner(compile_specs)]},
33+
compile_config=EdgeCompileConfig(_skip_dim_order=True),
2934
)

examples/samsung/executor_runner/enn_executor_runner.cpp

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
/*
2-
* Copyright (c) 2025 Samsung Electronics Co. LTD
3-
* All rights reserved
2+
* Copyright (c) Meta Platforms, Inc. and affiliates.
3+
* Copyright (c) 2025 Samsung Electronics Co. LTD
4+
* All rights reserved
45
*
56
* This source code is licensed under the BSD-style license found in the
67
* LICENSE file in the root directory of this source tree.
@@ -265,9 +266,11 @@ int main(int argc, char** argv) {
265266
status = method->get_outputs(outputs.data(), outputs.size());
266267
ET_CHECK(status == Error::Ok);
267268

268-
for (size_t output_index = 0; output_index < method->outputs_size(); ++output_index) {
269-
// Save the results to given directory in order.
270-
saveOutput(output_tensor, output_index);
269+
for (size_t output_index = 0; output_index < method->outputs_size();
270+
++output_index) {
271+
auto output_tensor = outputs[output_index].toTensor();
272+
// Save the results to given directory in order.
273+
saveOutput(output_tensor, output_index);
271274
}
272275

273276
return 0;

0 commit comments

Comments
 (0)