Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
107 changes: 73 additions & 34 deletions runtime/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,10 @@
from pathlib import Path

import torch
from executorch.runtime import Verification, Runtime, Program, Method
from executorch.runtime import Runtime, Program, Method

et_runtime: Runtime = Runtime.get()
program: Program = et_runtime.load_program(
Path("/tmp/program.pte"),
verification=Verification.Minimal,
)
program: Program = et_runtime.load_program(Path("/tmp/program.pte"))
print("Program methods:", program.method_names)
forward: Method = program.load_method("forward")

Expand All @@ -40,21 +37,23 @@

Example usage with ETDump generation:

Note: ETDump requires building ExecuTorch with event tracing enabled
(CMake option ``EXECUTORCH_ENABLE_EVENT_TRACER=ON``).

.. code-block:: python

from pathlib import Path
import os

import torch
from executorch.runtime import Verification, Runtime, Program, Method
from executorch.runtime import Runtime, Program, Method

# Create program with etdump generation enabled
et_runtime: Runtime = Runtime.get()
program: Program = et_runtime.load_program(
Path("/tmp/program.pte"),
verification=Verification.Minimal,
enable_etdump=True,
debug_buffer_size=1e7, # A large buffer size to ensure that all debug info is captured
debug_buffer_size=int(1e7), # 10MB buffer to capture all debug info
)

# Load method and execute
Expand All @@ -76,10 +75,37 @@

.. code-block:: text

Program methods: {'forward'}
ETDump file created: True
Debug file created: True
Directory contents: ['program.pte', 'etdump_output.etdp', 'debug_output.bin']

Example usage with backend and operator introspection:

.. code-block:: python

from executorch.runtime import Runtime

runtime = Runtime.get()

# Check available backends
backends = runtime.backend_registry.registered_backend_names
print(f"Available backends: {backends}")

# Check if a specific backend is available
if runtime.backend_registry.is_available("XnnpackBackend"):
print("XNNPACK backend is available")

# List all registered operators
operators = runtime.operator_registry.operator_names
print(f"Number of registered operators: {len(operators)}")

Example output:

.. code-block:: text

Available backends: ['XnnpackBackend', ...] # Depends on your build configuration
XNNPACK backend is available
Number of registered operators: 247 # Depends on linked kernels
"""

import functools
Expand Down Expand Up @@ -113,19 +139,22 @@ def execute(self, inputs: Sequence[Any]) -> Sequence[Any]:
"""Executes the method with the given inputs.

Args:
inputs: The inputs to the method.
inputs: A sequence of input values, typically torch.Tensor objects.

Returns:
The outputs of the method.
A list of output values, typically torch.Tensor objects.
"""
return self._method(inputs)

@property
def metadata(self) -> MethodMeta:
"""Gets the metadata for the method.

The metadata includes information about input and output specifications,
such as tensor shapes, data types, and memory requirements.

Returns:
The metadata for the method.
The MethodMeta object containing method specifications.
"""
return self._method.method_meta()

Expand All @@ -148,9 +177,7 @@ def __init__(self, program: ExecuTorchProgram, data: Optional[bytes]) -> None:

@property
def method_names(self) -> Set[str]:
"""
Returns method names of the `Program` as a set of strings.
"""
"""Returns method names of the Program as a set of strings."""
return set(self._methods.keys())

def load_method(self, name: str) -> Optional[Method]:
Expand All @@ -170,13 +197,13 @@ def load_method(self, name: str) -> Optional[Method]:
return method

def metadata(self, method_name: str) -> MethodMeta:
"""Gets the metadata for the specified method.
"""Gets the metadata for the specified method without loading it.

Args:
method_name: The name of the method.

Returns:
The outputs of the method.
The metadata for the method, including input/output specifications.
"""
return self._program.method_meta(method_name)

Expand All @@ -201,14 +228,17 @@ def __init__(self, legacy_module: ModuleType) -> None:

@property
def registered_backend_names(self) -> List[str]:
"""
Returns the names of all registered backends as a list of strings.
"""
"""Returns the names of all registered backends as a list of strings."""
return self._legacy_module._get_registered_backend_names()

def is_available(self, backend_name: str) -> bool:
"""
Returns the names of all registered backends as a list of strings.
"""Checks if a specific backend is available in the runtime.

Args:
backend_name: The name of the backend to check (e.g., "XnnpackBackend").

Returns:
True if the backend is available, False otherwise.
"""
return self._legacy_module._is_available(backend_name)

Expand All @@ -222,9 +252,7 @@ def __init__(self, legacy_module: ModuleType) -> None:

@property
def operator_names(self) -> Set[str]:
"""
Returns the names of all registered operators as a set of strings.
"""
"""Returns the names of all registered operators as a set of strings."""
return set(self._legacy_module._get_operator_names())


Expand All @@ -233,6 +261,10 @@ class Runtime:

This can be used to concurrently load and execute any number of ExecuTorch
programs and methods.

Attributes:
backend_registry: Registry for querying available hardware backends.
operator_registry: Registry for querying available operators/kernels.
"""

@staticmethod
Expand Down Expand Up @@ -261,11 +293,17 @@ def load_program(
"""Loads an ExecuTorch program from a PTE binary.

Args:
data: The binary program data to load; typically PTE data.
verification: level of program verification to perform.
data: The binary program data to load. Can be a file path (str or Path),
bytes/bytearray, or a file-like object.
verification: Level of program verification to perform (Minimal or InternalConsistency).
Default is InternalConsistency.
enable_etdump: If True, enables ETDump profiling for runtime performance analysis.
Default is False.
debug_buffer_size: Size of the debug buffer in bytes for ETDump data.
Only used when enable_etdump=True. Default is 0.

Returns:
The loaded program.
The loaded Program instance.
"""
if isinstance(data, (Path, str)):
p = self._legacy_module._load_program(
Expand All @@ -275,20 +313,21 @@ def load_program(
program_verification=verification,
)
return Program(p, data=None)
elif isinstance(data, BinaryIO):
data_bytes = data.read()
elif isinstance(data, bytearray):
data_bytes = bytes(data)
elif isinstance(data, bytes):
data_bytes = data
elif isinstance(data, bytearray):
data_bytes = bytes(data)
elif hasattr(data, "read"):
# File-like object with read() method
data_bytes = data.read()
else:
raise TypeError(
f"Expected data to be bytes, bytearray, a path to a .pte file, or a file-like object, but got {type(data).__name__}."
)
p = self._legacy_module._load_program_from_buffer(
data_bytes,
enable_etdump=False,
debug_buffer_size=0,
enable_etdump=enable_etdump,
debug_buffer_size=debug_buffer_size,
program_verification=verification,
)

Expand Down
28 changes: 28 additions & 0 deletions runtime/test/test_runtime.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

import io
import tempfile
import unittest
from pathlib import Path
Expand Down Expand Up @@ -76,3 +77,30 @@ def test_add(program):
with open(f.name, "rb") as f:
program = runtime.load_program(f.read())
test_add(program)

def test_load_program_with_file_like_objects(self):
"""Regression test: Ensure file-like objects (BytesIO, etc.) work correctly.
Previously, isinstance(data, BinaryIO) check didn't work because BinaryIO
is a typing protocol. Fixed by using hasattr(data, 'read') duck-typing.
"""
ep, inputs = create_program(ModuleAdd())
runtime = Runtime.get()

def test_add(program):
method = program.load_method("forward")
outputs = method.execute(inputs)
self.assertTrue(torch.allclose(outputs[0], inputs[0] + inputs[1]))

# Test with BytesIO
bytesio = io.BytesIO(ep.buffer)
program = runtime.load_program(bytesio)
test_add(program)

# Test with bytes
program = runtime.load_program(bytes(ep.buffer))
test_add(program)

# Test with bytearray
program = runtime.load_program(bytearray(ep.buffer))
test_add(program)
66 changes: 66 additions & 0 deletions runtime/test/test_runtime_etdump_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

import io
import os
import tempfile
import unittest
Expand Down Expand Up @@ -95,3 +96,68 @@ def test_etdump_generation(self):
self.assertGreater(
len(run_data.events), 0, "Run data should contain at least one events"
)

def test_etdump_params_with_bytes_and_buffer(self):
"""Regression test: Ensure enable_etdump and debug_buffer_size work with bytes/buffer.

Previously, when loading from bytes/bytearray/file-like objects, these parameters
were hardcoded to False/0 instead of using the provided values.
"""
ep, inputs = create_program(ModuleAdd())
runtime = Runtime.get()

with tempfile.TemporaryDirectory() as temp_dir:
etdump_path = os.path.join(temp_dir, "etdump_output.etdp")
debug_path = os.path.join(temp_dir, "debug_output.bin")

def test_etdump_with_data(data, data_type):
"""Helper to test ETDump with different data types."""
# Load program with etdump enabled
program = runtime.load_program(
data,
verification=Verification.Minimal,
enable_etdump=True,
debug_buffer_size=int(1e7),
)

# Execute the method
method = program.load_method("forward")
outputs = method.execute(inputs)

# Verify computation
self.assertTrue(
torch.allclose(outputs[0], inputs[0] + inputs[1]),
f"Computation failed for {data_type}",
)

# Write etdump result
program.write_etdump_result_to_file(etdump_path, debug_path)

# Verify files were created
self.assertTrue(
os.path.exists(etdump_path),
f"ETDump file not created for {data_type}",
)
self.assertTrue(
os.path.exists(debug_path),
f"Debug file not created for {data_type}",
)

# Verify etdump file is not empty
etdump_size = os.path.getsize(etdump_path)
self.assertGreater(
etdump_size, 0, f"ETDump file is empty for {data_type}"
)

# Clean up for next test
os.remove(etdump_path)
os.remove(debug_path)

# Test with bytes
test_etdump_with_data(ep.buffer, "bytes")

# Test with bytearray
test_etdump_with_data(bytearray(ep.buffer), "bytearray")

# Test with BytesIO (file-like object)
test_etdump_with_data(io.BytesIO(ep.buffer), "BytesIO")
Loading