From 8fcf6b2d13b3193456616ce4c81643b8c6a6f8b3 Mon Sep 17 00:00:00 2001 From: lucylq Date: Fri, 18 Apr 2025 13:59:07 -0700 Subject: [PATCH] Update module wrapper so that params are explicitly registered to the wrapper Seeing issue with linear where the fqns for constants disappear. Registering self.method_name as a submodule of wrapper means that the parameters are registered to the wrapper. cc @angelayi ``` File "/data/users/lfq/fbsource/buck-out/v2/gen/fbcode/1af94fa701700343/executorch/test/models/__export_delegated_program__/export_delegated_program#link-tree/torch/export/_trace.py", line 1980, in _export_for_training export_artifact = export_func( File "/data/users/lfq/fbsource/buck-out/v2/gen/fbcode/1af94fa701700343/executorch/test/models/__export_delegated_program__/export_delegated_program#link-tree/torch/export/_trace.py", line 1473, in _strict_export _replace_param_buffer_names(param_buffer_table, export_graph_signature) File "/data/users/lfq/fbsource/buck-out/v2/gen/fbcode/1af94fa701700343/executorch/test/models/__export_delegated_program__/export_delegated_program#link-tree/torch/export/_trace.py", line 272, in _replace_param_buffer_names spec.target = param_buffer_table[spec.target] KeyError: 'L__self___fn___self___linear.weight' ``` Differential Revision: [D73279618](https://our.internmc.facebook.com/intern/diff/D73279618/) [ghstack-poisoned] --- test/models/export_delegated_program.py | 23 +++++++++++++++++------ test/models/targets.bzl | 1 + 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/test/models/export_delegated_program.py b/test/models/export_delegated_program.py index 139e6357b14..47b7be38f12 100644 --- a/test/models/export_delegated_program.py +++ b/test/models/export_delegated_program.py @@ -87,6 +87,18 @@ def get_random_inputs(self) -> Sequence[torch.Tensor]: n = 10 # to create a large tensor return (torch.ones(n, n, n), 2 * torch.ones(n, n, n), 3 * torch.ones(n, n, n)) + +class ModuleLinear(torch.nn.Module): + def __init__(self): + super().__init__() + self.linear = torch.nn.Linear(3, 3) + + def forward(self, x: torch.Tensor): + return self.linear(x) + + def get_random_inputs(self): + return (torch.randn(3),) + # # Backends @@ -114,7 +126,7 @@ def export_module_to_program( extract_delegate_segments: bool, constant_tensor_alignment: Optional[int] = None, delegate_alignment: Optional[int] = None, - method: str = "forward", + method_name: str = "forward", ) -> ExecutorchProgramManager: eager_module = module_class().eval() inputs = () @@ -122,16 +134,15 @@ def export_module_to_program( inputs = eager_module.get_random_inputs() # type: ignore[operator] class WrapperModule(torch.nn.Module): - def __init__(self, fn): + def __init__(self, fn, method_name=method_name): super().__init__() self.fn = fn + self.method_name = method_name def forward(self, *args, **kwargs): - return self.fn(*args, **kwargs) + return getattr(self.fn, self.method_name)(*args, **kwargs) - exported_program = export( - WrapperModule(getattr(eager_module, method)), args=inputs, strict=True - ) + exported_program = export(WrapperModule(eager_module), args=inputs, strict=True) edge_config = EdgeCompileConfig(_check_ir_validity=False) et_config = exir.ExecutorchBackendConfig( diff --git a/test/models/targets.bzl b/test/models/targets.bzl index 95ed0d4302f..46d1374919e 100644 --- a/test/models/targets.bzl +++ b/test/models/targets.bzl @@ -155,6 +155,7 @@ def define_common_targets(): "ModuleAddMul", "ModuleAddLarge", "ModuleSubLarge", + "ModuleLinear", ] # Name of the backend to use when exporting delegated programs.