Skip to content

Commit 06722c2

Browse files
committed
Add debug buffer support for pybindings
Add debug buffer to PyModule. Pass this to the event tracer and collect it back later. Differential Revision: [D81751067](https://our.internmc.facebook.com/intern/diff/D81751067/) [ghstack-poisoned]
1 parent 1189938 commit 06722c2

File tree

1 file changed

+55
-26
lines changed

1 file changed

+55
-26
lines changed

extension/pybindings/pybindings.cpp

Lines changed: 55 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -160,23 +160,21 @@ void setup_output_storage(
160160
inline std::unique_ptr<Module> load_module_from_buffer(
161161
const void* ptr,
162162
size_t ptr_len,
163-
bool enable_etdump,
164-
size_t debug_buffer_size,
163+
std::unique_ptr<runtime::EventTracer> event_tracer,
165164
Program::Verification program_verification) {
166165
EXECUTORCH_SCOPE_PROF("load_module_from_buffer");
167166
auto loader = std::make_unique<BufferDataLoader>(ptr, ptr_len);
168167
return std::make_unique<Module>(
169168
std::move(loader),
170169
nullptr, // memory_allocator
171170
nullptr, // temp_allocator
172-
enable_etdump ? std::make_unique<torch::executor::ETDumpGen>() : nullptr,
171+
std::move(event_tracer), // event_tracer
173172
nullptr); // data_map_loader
174173
}
175174

176175
inline std::unique_ptr<Module> load_module_from_file(
177176
const std::string& path,
178-
bool enable_etdump,
179-
size_t debug_buffer_size,
177+
std::unique_ptr<runtime::EventTracer> event_tracer,
180178
Program::Verification program_verification) {
181179
EXECUTORCH_SCOPE_PROF("load_module_from_file");
182180

@@ -193,7 +191,7 @@ inline std::unique_ptr<Module> load_module_from_file(
193191
std::move(loader),
194192
nullptr, // memory_allocator
195193
nullptr, // temp_allocator
196-
enable_etdump ? std::make_unique<torch::executor::ETDumpGen>() : nullptr,
194+
std::move(event_tracer), // event_tracer
197195
nullptr); // data_map_loader
198196
}
199197

@@ -489,12 +487,23 @@ struct PyModule final {
489487
size_t debug_buffer_size = 0,
490488
Program::Verification program_verification =
491489
Program::Verification::InternalConsistency)
492-
: module_(load_module_from_buffer(
493-
buffer.cast<std::string_view>().data(),
494-
py::len(buffer),
495-
enable_etdump,
496-
debug_buffer_size,
497-
program_verification)) {}
490+
: debug_buffer_size_(debug_buffer_size) {
491+
std::unique_ptr<torch::executor::ETDumpGen> event_tracer = enable_etdump
492+
? std::make_unique<torch::executor::ETDumpGen>()
493+
: nullptr;
494+
if (enable_etdump && debug_buffer_size > 0) {
495+
debug_buffer_ = std::make_unique<uint8_t[]>(debug_buffer_size);
496+
event_tracer->set_debug_buffer(
497+
Span<uint8_t>(debug_buffer_.get(), debug_buffer_size));
498+
event_tracer->set_event_tracer_debug_level(
499+
EventTracerDebugLogLevel::kIntermediateOutputs);
500+
}
501+
module_ = load_module_from_buffer(
502+
buffer.cast<std::string_view>().data(),
503+
py::len(buffer),
504+
std::move(event_tracer),
505+
program_verification);
506+
}
498507

499508
explicit PyModule(
500509
const void* ptr,
@@ -503,24 +512,41 @@ struct PyModule final {
503512
size_t debug_buffer_size = 0,
504513
Program::Verification program_verification =
505514
Program::Verification::InternalConsistency)
506-
: module_(load_module_from_buffer(
507-
ptr,
508-
ptr_len,
509-
enable_etdump,
510-
debug_buffer_size,
511-
program_verification)) {}
515+
: debug_buffer_size_(debug_buffer_size) {
516+
std::unique_ptr<torch::executor::ETDumpGen> event_tracer = enable_etdump
517+
? std::make_unique<torch::executor::ETDumpGen>()
518+
: nullptr;
519+
if (enable_etdump && debug_buffer_size > 0) {
520+
debug_buffer_ = std::make_unique<uint8_t[]>(debug_buffer_size);
521+
event_tracer->set_debug_buffer(
522+
Span<uint8_t>(debug_buffer_.get(), debug_buffer_size));
523+
event_tracer->set_event_tracer_debug_level(
524+
EventTracerDebugLogLevel::kIntermediateOutputs);
525+
}
526+
module_ = load_module_from_buffer(
527+
ptr, ptr_len, std::move(event_tracer), program_verification);
528+
}
512529

513530
explicit PyModule(
514531
const std::string& path,
515532
bool enable_etdump,
516533
size_t debug_buffer_size = 0,
517534
Program::Verification program_verification =
518535
Program::Verification::InternalConsistency)
519-
: module_(load_module_from_file(
520-
path,
521-
enable_etdump,
522-
debug_buffer_size,
523-
program_verification)) {}
536+
: debug_buffer_size_(debug_buffer_size) {
537+
std::unique_ptr<torch::executor::ETDumpGen> event_tracer = enable_etdump
538+
? std::make_unique<torch::executor::ETDumpGen>()
539+
: nullptr;
540+
if (enable_etdump && debug_buffer_size > 0) {
541+
debug_buffer_ = std::make_unique<uint8_t[]>(debug_buffer_size);
542+
event_tracer->set_debug_buffer(
543+
Span<uint8_t>(debug_buffer_.get(), debug_buffer_size));
544+
event_tracer->set_event_tracer_debug_level(
545+
EventTracerDebugLogLevel::kIntermediateOutputs);
546+
}
547+
module_ = load_module_from_file(
548+
path, std::move(event_tracer), program_verification);
549+
}
524550

525551
PyModule(const PyModule&) = delete;
526552
PyModule& operator=(const PyModule&) = delete;
@@ -693,10 +719,9 @@ struct PyModule final {
693719
// Also write out the debug buffer to a separate file if requested.
694720
std::string debug_buffer_path_str =
695721
py::cast<std::string>(debug_buffer_path);
696-
const auto debug_buffer = module_->debug_buffer();
697-
if (debug_buffer.size() > 0) {
722+
if (debug_buffer_ && debug_buffer_size_ > 0) {
698723
write_data_to_file(
699-
debug_buffer_path_str, debug_buffer.data(), debug_buffer.size());
724+
debug_buffer_path_str, debug_buffer_.get(), debug_buffer_size_);
700725
}
701726
}
702727
} else {
@@ -746,6 +771,10 @@ struct PyModule final {
746771
// bundled programs.
747772
std::vector<std::optional<TensorPtr>> output_tensors_;
748773

774+
// Hold onto the debug_buffer_ for the event_tracer.
775+
std::unique_ptr<uint8_t[]> debug_buffer_;
776+
size_t debug_buffer_size_;
777+
749778
void allocate_output_tensors(const std::string& method_name) {
750779
auto method_meta_result = module_->method_meta(method_name);
751780
THROW_IF_ERROR(

0 commit comments

Comments
 (0)