Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 20 additions & 3 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -181,16 +181,16 @@ option(EXECUTORCH_BUILD_KERNELS_CUSTOM_AOT "Build the custom ops lib for AOT"
)

option(EXECUTORCH_BUILD_EXTENSION_DATA_LOADER "Build the Data Loader extension"
OFF
ON
)

option(EXECUTORCH_BUILD_EXTENSION_MODULE "Build the Module extension" OFF)
option(EXECUTORCH_BUILD_EXTENSION_MODULE "Build the Module extension" ON)

option(EXECUTORCH_BUILD_EXTENSION_RUNNER_UTIL "Build the Runner Util extension"
OFF
)

option(EXECUTORCH_BUILD_EXTENSION_TENSOR "Build the Tensor extension" OFF)
option(EXECUTORCH_BUILD_EXTENSION_TENSOR "Build the Tensor extension" ON)

option(EXECUTORCH_BUILD_EXTENSION_TRAINING "Build the training extension" OFF)

Expand Down Expand Up @@ -834,6 +834,23 @@ if(EXECUTORCH_BUILD_EXECUTOR_RUNNER)
target_compile_options(executor_runner PUBLIC ${_common_compile_options})
endif()

if(TRUE)
# Build Module
set(_module_libs executorch gflags)
list(APPEND _module_libs
portable_ops_lib
portable_kernels
extension_data_loader
extension_tensor
extension_module_static
)

add_executable(module_runner examples/portable/module/module.cpp)
target_link_libraries(module_runner ${_module_libs})
target_compile_options(module_runner PUBLIC ${_common_compile_options})

endif()

if(EXECUTORCH_BUILD_VULKAN)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/backends/vulkan)
endif()
Expand Down
28 changes: 28 additions & 0 deletions examples/portable/module/module.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
#include <executorch/extension/module/module.h>
#include <executorch/extension/tensor/tensor.h>

#include <iostream>
using namespace ::executorch::extension;

int main(int argc, char** argv) {
// Create a Module.
Module module(
"/data/users/lfq/executorch/extension/module/test/resources/add.pte");

// Wrap the input data with a Tensor.
float input[1] = {4.f};
auto tensor = from_blob(input, {1});

auto err = module.set_inputs({tensor, tensor});

// Perform an inference.
const auto result = module.forward();

// Check for success or failure.
if (result.ok()) {
// Retrieve the output data.
const auto output = result->at(0).toTensor().const_data_ptr<float>();
std::cout << "Output: " << output[0] << std::endl;
}
return 0;
}
Loading