diff --git a/CMakeLists.txt b/CMakeLists.txt index 8a9102848de..b3b4c33b842 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -181,16 +181,16 @@ option(EXECUTORCH_BUILD_KERNELS_CUSTOM_AOT "Build the custom ops lib for AOT" ) option(EXECUTORCH_BUILD_EXTENSION_DATA_LOADER "Build the Data Loader extension" - OFF + ON ) -option(EXECUTORCH_BUILD_EXTENSION_MODULE "Build the Module extension" OFF) +option(EXECUTORCH_BUILD_EXTENSION_MODULE "Build the Module extension" ON) option(EXECUTORCH_BUILD_EXTENSION_RUNNER_UTIL "Build the Runner Util extension" OFF ) -option(EXECUTORCH_BUILD_EXTENSION_TENSOR "Build the Tensor extension" OFF) +option(EXECUTORCH_BUILD_EXTENSION_TENSOR "Build the Tensor extension" ON) option(EXECUTORCH_BUILD_EXTENSION_TRAINING "Build the training extension" OFF) @@ -834,6 +834,23 @@ if(EXECUTORCH_BUILD_EXECUTOR_RUNNER) target_compile_options(executor_runner PUBLIC ${_common_compile_options}) endif() +if(TRUE) + # Build Module + set(_module_libs executorch gflags) + list(APPEND _module_libs + portable_ops_lib + portable_kernels + extension_data_loader + extension_tensor + extension_module_static + ) + + add_executable(module_runner examples/portable/module/module.cpp) + target_link_libraries(module_runner ${_module_libs}) + target_compile_options(module_runner PUBLIC ${_common_compile_options}) + +endif() + if(EXECUTORCH_BUILD_VULKAN) add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/backends/vulkan) endif() diff --git a/examples/portable/module/module.cpp b/examples/portable/module/module.cpp new file mode 100644 index 00000000000..26ac4ecb715 --- /dev/null +++ b/examples/portable/module/module.cpp @@ -0,0 +1,28 @@ +#include +#include + +#include +using namespace ::executorch::extension; + +int main(int argc, char** argv) { + // Create a Module. + Module module( + "/data/users/lfq/executorch/extension/module/test/resources/add.pte"); + + // Wrap the input data with a Tensor. + float input[1] = {4.f}; + auto tensor = from_blob(input, {1}); + + auto err = module.set_inputs({tensor, tensor}); + + // Perform an inference. + const auto result = module.forward(); + + // Check for success or failure. + if (result.ok()) { + // Retrieve the output data. + const auto output = result->at(0).toTensor().const_data_ptr(); + std::cout << "Output: " << output[0] << std::endl; + } + return 0; +}