Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions extension/flat_tensor/test/targets.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@ def define_common_targets(is_fbcode=False):
# The tests use this var to find the program file to load. This uses
# an fbcode target path because the authoring/export tools
# intentionally don't work in xplat (since they're host-only tools).
"ET_MODULE_LINEAR_PROGRAM": "$(location fbcode//executorch/test/models:exported_programs_with_data_separated[ModuleLinear.pte])",
"ET_MODULE_LINEAR_DATA": "$(location fbcode//executorch/test/models:exported_programs_with_data_separated[ModuleLinear.ptd])",
"ET_MODULE_LINEAR_PROGRAM": "$(location fbcode//executorch/test/models:exported_program_and_data[ModuleLinear.pte])",
"ET_MODULE_LINEAR_DATA": "$(location fbcode//executorch/test/models:exported_program_and_data[ModuleLinear.ptd])",
}

runtime.cxx_test(
Expand Down
10 changes: 7 additions & 3 deletions runtime/executor/method.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
#include <executorch/runtime/backend/interface.h>
#include <executorch/runtime/core/event_tracer_hooks.h>
#include <executorch/runtime/core/exec_aten/util/tensor_util.h>
#include <executorch/runtime/core/named_data_map.h>
#include <executorch/runtime/core/span.h>
#include <executorch/runtime/executor/memory_manager.h>
#include <executorch/runtime/executor/platform_memory_allocator.h>
Expand Down Expand Up @@ -414,7 +415,8 @@ Error Method::parse_values() {
auto t = deserialization::parseTensor(
program_,
memory_manager_,
static_cast<const executorch_flatbuffer::Tensor*>(val));
static_cast<const executorch_flatbuffer::Tensor*>(val),
named_data_map_);
if (!t.ok()) {
ET_LOG(
Error,
Expand Down Expand Up @@ -607,7 +609,8 @@ Result<Method> Method::load(
executorch_flatbuffer::ExecutionPlan* s_plan,
const Program* program,
MemoryManager* memory_manager,
EventTracer* event_tracer) {
EventTracer* event_tracer,
const NamedDataMap* named_data_map) {
MemoryAllocator* temp_allocator = memory_manager->temp_allocator();
if (temp_allocator == nullptr) {
PlatformMemoryAllocator* platform_allocator =
Expand All @@ -619,7 +622,8 @@ Result<Method> Method::load(
new (platform_allocator) PlatformMemoryAllocator();
temp_allocator = platform_allocator;
}
Method method(program, memory_manager, event_tracer, temp_allocator);
Method method(
program, memory_manager, event_tracer, temp_allocator, named_data_map);

Error err = method.init(s_plan);
if (err != Error::Ok) {
Expand Down
10 changes: 8 additions & 2 deletions runtime/executor/method.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
#include <executorch/runtime/core/evalue.h>
#include <executorch/runtime/core/event_tracer.h>
#include <executorch/runtime/core/exec_aten/exec_aten.h>
#include <executorch/runtime/core/named_data_map.h>
#include <executorch/runtime/core/span.h>
#include <executorch/runtime/executor/memory_manager.h>
#include <executorch/runtime/executor/method_meta.h>
Expand Down Expand Up @@ -54,6 +55,7 @@ class Method final {
program_(rhs.program_),
memory_manager_(rhs.memory_manager_),
temp_allocator_(rhs.temp_allocator_),
named_data_map_(rhs.named_data_map_),
serialization_plan_(rhs.serialization_plan_),
event_tracer_(rhs.event_tracer_),
n_value_(rhs.n_value_),
Expand Down Expand Up @@ -271,11 +273,13 @@ class Method final {
const Program* program,
MemoryManager* memory_manager,
EventTracer* event_tracer,
MemoryAllocator* temp_allocator)
MemoryAllocator* temp_allocator,
const NamedDataMap* named_data_map)
: step_state_(),
program_(program),
memory_manager_(memory_manager),
temp_allocator_(temp_allocator),
named_data_map_(named_data_map),
serialization_plan_(nullptr),
event_tracer_(event_tracer),
n_value_(0),
Expand All @@ -291,7 +295,8 @@ class Method final {
executorch_flatbuffer::ExecutionPlan* s_plan,
const Program* program,
MemoryManager* memory_manager,
EventTracer* event_tracer);
EventTracer* event_tracer,
const NamedDataMap* named_data_map);

/**
* Initialize the method from its serialized representation.
Expand All @@ -317,6 +322,7 @@ class Method final {
const Program* program_;
MemoryManager* memory_manager_;
MemoryAllocator* temp_allocator_;
const NamedDataMap* named_data_map_;
executorch_flatbuffer::ExecutionPlan* serialization_plan_;
EventTracer* event_tracer_;

Expand Down
6 changes: 4 additions & 2 deletions runtime/executor/program.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,8 @@ Result<const char*> Program::get_method_name(size_t plan_index) const {
Result<Method> Program::load_method(
const char* method_name,
MemoryManager* memory_manager,
EventTracer* event_tracer) const {
EventTracer* event_tracer,
const NamedDataMap* named_data_map) const {
EXECUTORCH_SCOPE_PROF("Program::load_method");
internal::event_tracer_create_event_block(event_tracer, "Default");
internal::EventTracerProfileMethodScope event_tracer_scope =
Expand All @@ -257,7 +258,8 @@ Result<Method> Program::load_method(
if (!plan.ok()) {
return plan.error();
}
return Method::load(plan.get(), this, memory_manager, event_tracer);
return Method::load(
plan.get(), this, memory_manager, event_tracer, named_data_map);
}

Result<MethodMeta> Program::method_meta(const char* method_name) const {
Expand Down
3 changes: 2 additions & 1 deletion runtime/executor/program.h
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,8 @@ class Program final {
Result<Method> load_method(
const char* method_name,
MemoryManager* memory_manager,
EventTracer* event_tracer = nullptr) const;
EventTracer* event_tracer = nullptr,
const NamedDataMap* named_data_map = nullptr) const;

/**
* Gathers metadata for the named method.
Expand Down
1 change: 1 addition & 0 deletions runtime/executor/targets.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ def define_common_targets():
":memory_manager",
"//executorch/runtime/backend:interface",
"//executorch/runtime/core:core",
"//executorch/runtime/core:named_data_map",
"//executorch/runtime/core:evalue" + aten_suffix,
"//executorch/runtime/core:event_tracer" + aten_suffix,
"//executorch/runtime/core/exec_aten:lib" + aten_suffix,
Expand Down
6 changes: 4 additions & 2 deletions runtime/executor/tensor_parser.h
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ namespace deserialization {
ET_NODISCARD Result<executorch::aten::Tensor> parseTensor(
const Program* program,
MemoryManager* memory_manager,
const executorch_flatbuffer::Tensor* s_tensor);
const executorch_flatbuffer::Tensor* s_tensor,
const NamedDataMap* named_data_map = nullptr);

ET_NODISCARD Result<BoxedEvalueList<executorch::aten::Tensor>> parseTensorList(
const flatbuffers::Vector<int32_t>* tensor_indices,
Expand Down Expand Up @@ -108,7 +109,8 @@ ET_NODISCARD Result<void*> getTensorDataPtr(
const executorch_flatbuffer::Tensor* s_tensor,
const Program* program,
size_t nbytes,
HierarchicalAllocator* allocator);
HierarchicalAllocator* allocator,
const NamedDataMap* named_data_map = nullptr);

} // namespace deserialization
} // namespace runtime
Expand Down
10 changes: 8 additions & 2 deletions runtime/executor/tensor_parser_aten.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

#include <executorch/runtime/core/exec_aten/util/dim_order_util.h>
#include <executorch/runtime/core/exec_aten/util/scalar_type_util.h>
#include <executorch/runtime/core/named_data_map.h>
#include <executorch/runtime/executor/memory_manager.h>
#include <executorch/runtime/executor/program.h>
#include <executorch/runtime/platform/profiler.h>
Expand All @@ -31,7 +32,8 @@ void deleteNothing(void*) {}
Result<at::Tensor> parseTensor(
const Program* program,
MemoryManager* memory_manager,
const executorch_flatbuffer::Tensor* s_tensor) {
const executorch_flatbuffer::Tensor* s_tensor,
const NamedDataMap* named_data_map) {
EXECUTORCH_SCOPE_PROF("TensorParser::parseTensor");

ET_CHECK_OR_RETURN_ERROR(
Expand Down Expand Up @@ -102,7 +104,11 @@ Result<at::Tensor> parseTensor(
} else {
// Now that we know how big the tensor is, find and assign its memory.
Result<void*> data_ptr = getTensorDataPtr(
s_tensor, program, tensor.nbytes(), memory_manager->planned_memory());
s_tensor,
program,
tensor.nbytes(),
memory_manager->planned_memory(),
named_data_map);
if (!data_ptr.ok()) {
ET_LOG(
Error,
Expand Down
77 changes: 74 additions & 3 deletions runtime/executor/tensor_parser_exec_aten.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ namespace executorch {
namespace runtime {
namespace deserialization {

using executorch::aten::ScalarType;
using executorch::runtime::TensorLayout;
// Provides access to private Program methods.
class TensorParser final {
public:
Expand Down Expand Up @@ -113,7 +115,8 @@ ET_NODISCARD Result<void*> getTensorDataPtr(
const executorch_flatbuffer::Tensor* s_tensor,
const Program* program,
size_t nbytes,
HierarchicalAllocator* allocator) {
HierarchicalAllocator* allocator,
const NamedDataMap* named_data_map) {
auto data_buffer_idx = s_tensor->data_buffer_idx();
const executorch_flatbuffer::AllocationDetails* allocation_info =
s_tensor->allocation_info();
Expand All @@ -132,8 +135,76 @@ ET_NODISCARD Result<void*> getTensorDataPtr(
}
return planned_ptr;

// Constant
} else if (data_buffer_idx > 0 && allocation_info == nullptr) {
}
// Constant, stored externally.
else if (
allocation_info == nullptr && s_tensor->extra_tensor_info() != nullptr &&
s_tensor->extra_tensor_info()->location() ==
executorch_flatbuffer::TensorDataLocation::EXTERNAL) {
// Check that fqn is not null.
ET_CHECK_OR_RETURN_ERROR(
s_tensor->extra_tensor_info()->fully_qualified_name() != nullptr,
InvalidExternalData,
"Fully qualified name of external tensor is null");
// Look up tensor in named data map.
Result<const TensorLayout> tensor_layout_res = named_data_map->get_metadata(
s_tensor->extra_tensor_info()->fully_qualified_name()->c_str());
if (!tensor_layout_res.ok()) {
return tensor_layout_res.error();
}
const TensorLayout& tensor_layout = tensor_layout_res.get();

// Compatibility checking.
ET_CHECK_OR_RETURN_ERROR(
static_cast<ScalarType>(s_tensor->scalar_type()) ==
tensor_layout.scalar_type(),
InvalidExternalData,
"Scalar type mismatch. Expected %hhd, got %hhd.",
static_cast<int8_t>(s_tensor->scalar_type()),
static_cast<int8_t>(tensor_layout.scalar_type()));
ET_CHECK_OR_RETURN_ERROR(
nbytes == tensor_layout.nbytes(),
InvalidExternalData,
"Nbytes mismatch. Expected %zu, got %zu.",
nbytes,
tensor_layout.nbytes());
int dim = s_tensor->sizes()->size();
ET_CHECK_OR_RETURN_ERROR(
dim == tensor_layout.sizes().size(),
InvalidExternalData,
"Dim mismatch. Expected %d, got %zu.",
dim,
tensor_layout.sizes().size());
for (int i = 0; i < dim; i++) {
ET_CHECK_OR_RETURN_ERROR(
s_tensor->sizes()->Get(i) == tensor_layout.sizes()[i],
InvalidExternalData,
"Sizes mismatch. Expected %d, got %d for size at index %d.",
s_tensor->sizes()->Get(i),
tensor_layout.sizes()[i],
i);
ET_CHECK_OR_RETURN_ERROR(
s_tensor->dim_order()->Get(i) == tensor_layout.dim_order()[i],
InvalidExternalData,
"Dim order mismatch. Expected %d, got %d for dim at index %d.",
s_tensor->dim_order()->Get(i),
tensor_layout.dim_order()[i],
i);
}

Result<FreeableBuffer> data_res = named_data_map->get_data(
s_tensor->extra_tensor_info()->fully_qualified_name()->c_str());
if (!data_res.ok()) {
return data_res.error();
}
// The const_cast is 'ok' here because program and runtime should guarantee
// that this data is never modified. Temporary until we introduce the
// `get_and_persist_data` API from TODO(T214294528).
return const_cast<void*>(static_cast<const void*>(data_res.get().data()));
}

// Constant, stored in PTE file.
else if (data_buffer_idx > 0 && allocation_info == nullptr) {
auto const_data =
program->get_constant_buffer_data(data_buffer_idx, nbytes);
if (!const_data.ok()) {
Expand Down
7 changes: 5 additions & 2 deletions runtime/executor/tensor_parser_portable.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
#include <executorch/runtime/core/exec_aten/exec_aten.h>
#include <executorch/runtime/core/exec_aten/util/dim_order_util.h>
#include <executorch/runtime/core/exec_aten/util/scalar_type_util.h>
#include <executorch/runtime/core/named_data_map.h>
#include <executorch/runtime/executor/memory_manager.h>
#include <executorch/runtime/executor/program.h>
#include <executorch/runtime/platform/profiler.h>
Expand All @@ -27,7 +28,8 @@ using torch::executor::TensorImpl;
Result<Tensor> parseTensor(
const Program* program,
MemoryManager* memory_manager,
const executorch_flatbuffer::Tensor* s_tensor) {
const executorch_flatbuffer::Tensor* s_tensor,
const NamedDataMap* named_data_map) {
EXECUTORCH_SCOPE_PROF("TensorParser::parseTensor");
auto method_allocator = memory_manager->method_allocator();

Expand Down Expand Up @@ -146,7 +148,8 @@ Result<Tensor> parseTensor(
s_tensor,
program,
tensor_impl->nbytes(),
memory_manager->planned_memory());
memory_manager->planned_memory(),
named_data_map);
if (!data_ptr.ok()) {
ET_LOG(
Error,
Expand Down
33 changes: 33 additions & 0 deletions runtime/executor/test/method_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
#include <filesystem>

#include <executorch/extension/data_loader/file_data_loader.h>
#include <executorch/extension/flat_tensor/data_map.h>
#include <executorch/extension/runner_util/inputs.h>
#include <executorch/runtime/core/exec_aten/exec_aten.h>
#include <executorch/runtime/executor/method.h>
Expand All @@ -21,6 +22,7 @@

using namespace ::testing;
using executorch::aten::ArrayRef;
using executorch::extension::DataMap;
using executorch::extension::prepare_input_tensors;
using executorch::runtime::Error;
using executorch::runtime::EValue;
Expand Down Expand Up @@ -52,6 +54,21 @@ class MethodTest : public ::testing::Test {
{module_name, std::make_unique<Program>(std::move(program.get()))});
}

void load_data_map(const char* path, const char* module_name) {
// Create a loader for the serialized data map.
Result<FileDataLoader> loader = FileDataLoader::from(path);
ASSERT_EQ(loader.error(), Error::Ok);
loaders_.insert(
{module_name,
std::make_unique<FileDataLoader>(std::move(loader.get()))});

Result<DataMap> data_map = DataMap::load(loaders_[module_name].get());
EXPECT_EQ(data_map.error(), Error::Ok);

data_maps_.insert(
{module_name, std::make_unique<DataMap>(std::move(data_map.get()))});
}

void SetUp() override {
executorch::runtime::runtime_init();

Expand All @@ -63,6 +80,10 @@ class MethodTest : public ::testing::Test {
load_program(
std::getenv("DEPRECATED_ET_MODULE_LINEAR_CONSTANT_BUFFER_PATH"),
"linear_constant_buffer");

load_program(
std::getenv("ET_MODULE_LINEAR_PROGRAM_PATH"), "linear_program");
load_data_map(std::getenv("ET_MODULE_LINEAR_DATA_PATH"), "linear_data");
}

private:
Expand All @@ -71,6 +92,7 @@ class MethodTest : public ::testing::Test {

protected:
std::unordered_map<std::string, std::unique_ptr<Program>> programs_;
std::unordered_map<std::string, std::unique_ptr<DataMap>> data_maps_;
};

TEST_F(MethodTest, MoveTest) {
Expand Down Expand Up @@ -303,6 +325,17 @@ TEST_F(MethodTest, ConstantBufferTest) {
ASSERT_EQ(err, Error::Ok);
}

TEST_F(MethodTest, ProgramDataSeparationTest) {
ManagedMemoryManager mmm(kDefaultNonConstMemBytes, kDefaultRuntimeMemBytes);
Result<Method> method = programs_["linear_program"]->load_method(
"forward", &mmm.get(), nullptr, data_maps_["linear_data"].get());
ASSERT_EQ(method.error(), Error::Ok);

// Can execute the method.
Error err = method->execute();
ASSERT_EQ(err, Error::Ok);
}

/*
* TODO(T161163608): Test is disabled due to a resize bug in tensor_index_out of
* the portable op lib
Expand Down
Loading
Loading