Skip to content

Commit ffc9a90

Browse files
gmagogsfmfacebook-github-bot
authored andcommitted
executorch/exir/program/test (pytorch#7397)
Summary: Pull Request resolved: pytorch#7397 Reviewed By: avikchaudhuri, ydwu4 Differential Revision: D67383235
1 parent f341da8 commit ffc9a90

File tree

35 files changed

+233
-279
lines changed

35 files changed

+233
-279
lines changed

backends/apple/coreml/runtime/test/export_stateful_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ def main() -> None:
4747
torch.randn((1, embedding_dim)),
4848
torch.tensor([0]),
4949
)
50-
exported_model = export(model, example_inputs)
50+
exported_model = export(model, example_inputs, strict=True)
5151
edge_program_manager = exir.to_edge(exported_model)
5252
compile_specs = CoreMLBackend.generate_compile_specs(
5353
compute_precision=ct.precision.FLOAT16,

devtools/backend_debug/tests/test_delegation_info.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ def forward(self, a, x, b):
3131

3232
m = Model()
3333
inputs = (torch.randn(2, 2), torch.randn(2, 2), torch.randn(2, 2))
34-
edge = to_edge(torch.export.export(m, inputs)).to_backend(
34+
edge = to_edge(torch.export.export(m, inputs, strict=True)).to_backend(
3535
AddMulPartitionerDemo()
3636
)
3737
delegation_info = get_delegation_info(edge.exported_program().graph_module)

devtools/bundled_program/util/test_util.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -271,6 +271,7 @@ def get_common_executorch_program() -> (
271271
m_name: export(
272272
StatefulWrapperModule(eager_model, getattr(eager_model, m_name)),
273273
capture_inputs[m_name],
274+
strict=True,
274275
)
275276
for m_name in eager_model.method_names
276277
}

devtools/etrecord/tests/etrecord_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ def get_test_model_with_bundled_program(self):
6969

7070
def get_test_model_with_manager(self):
7171
f = models.BasicSinMax()
72-
aten_dialect = export(f, f.get_random_inputs())
72+
aten_dialect = export(f, f.get_random_inputs(), strict=True)
7373
edge_program: EdgeProgramManager = to_edge(
7474
aten_dialect, compile_config=EdgeCompileConfig(_check_ir_validity=False)
7575
)

docs/source/tutorials_source/devtools-integration-tutorial.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -89,10 +89,7 @@ def forward(self, x):
8989

9090
model = Net()
9191

92-
aten_model: ExportedProgram = export(
93-
model,
94-
(torch.randn(1, 1, 32, 32),),
95-
)
92+
aten_model: ExportedProgram = export(model, (torch.randn(1, 1, 32, 32),), strict=True)
9693

9794
edge_program_manager: EdgeProgramManager = to_edge(
9895
aten_model, compile_config=EdgeCompileConfig(_check_ir_validity=True)
@@ -141,7 +138,7 @@ def forward(self, x):
141138

142139
# Step 1: ExecuTorch Program Export
143140
m_name = "forward"
144-
method_graphs = {m_name: export(model, (torch.randn(1, 1, 32, 32),))}
141+
method_graphs = {m_name: export(model, (torch.randn(1, 1, 32, 32),), strict=True)}
145142

146143
# Step 2: Construct Method Test Suites
147144
inputs = [[torch.randn(1, 1, 32, 32)] for _ in range(2)]

docs/source/tutorials_source/export-to-executorch-tutorial.py

Lines changed: 15 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ def forward(self, x: torch.Tensor) -> torch.Tensor:
6666

6767

6868
example_args = (torch.randn(1, 3, 256, 256),)
69-
aten_dialect: ExportedProgram = export(SimpleConv(), example_args)
69+
aten_dialect: ExportedProgram = export(SimpleConv(), example_args, strict=True)
7070
print(aten_dialect)
7171

7272
######################################################################
@@ -101,7 +101,7 @@ def forward(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor:
101101

102102

103103
example_args = (torch.randn(3, 3), torch.randn(3, 3))
104-
aten_dialect: ExportedProgram = export(Basic(), example_args)
104+
aten_dialect: ExportedProgram = export(Basic(), example_args, strict=True)
105105

106106
# Works correctly
107107
print(aten_dialect.module()(torch.ones(3, 3), torch.ones(3, 3)))
@@ -131,7 +131,7 @@ def forward(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor:
131131
dim1_x = Dim("dim1_x", min=1, max=10)
132132
dynamic_shapes = {"x": {1: dim1_x}, "y": {1: dim1_x}}
133133
aten_dialect: ExportedProgram = export(
134-
Basic(), example_args, dynamic_shapes=dynamic_shapes
134+
Basic(), example_args, dynamic_shapes=dynamic_shapes, strict=True
135135
)
136136
print(aten_dialect)
137137

@@ -213,7 +213,7 @@ def forward(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor:
213213
print("Quantized Graph")
214214
print(converted_graph)
215215

216-
aten_dialect: ExportedProgram = export(converted_graph, example_args)
216+
aten_dialect: ExportedProgram = export(converted_graph, example_args, strict=True)
217217
print("ATen Dialect Graph")
218218
print(aten_dialect)
219219

@@ -243,7 +243,7 @@ def forward(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor:
243243
from executorch.exir import EdgeProgramManager, to_edge
244244

245245
example_args = (torch.randn(1, 3, 256, 256),)
246-
aten_dialect: ExportedProgram = export(SimpleConv(), example_args)
246+
aten_dialect: ExportedProgram = export(SimpleConv(), example_args, strict=True)
247247

248248
edge_program: EdgeProgramManager = to_edge(aten_dialect)
249249
print("Edge Dialect Graph")
@@ -267,10 +267,10 @@ def forward(self, x):
267267

268268

269269
encode_args = (torch.randn(1, 10),)
270-
aten_encode: ExportedProgram = export(Encode(), encode_args)
270+
aten_encode: ExportedProgram = export(Encode(), encode_args, strict=True)
271271

272272
decode_args = (torch.randn(1, 5),)
273-
aten_decode: ExportedProgram = export(Decode(), decode_args)
273+
aten_decode: ExportedProgram = export(Decode(), decode_args, strict=True)
274274

275275
edge_program: EdgeProgramManager = to_edge(
276276
{"encode": aten_encode, "decode": aten_decode}
@@ -291,7 +291,7 @@ def forward(self, x):
291291
# rather than the ``torch.ops.aten`` namespace.
292292

293293
example_args = (torch.randn(1, 3, 256, 256),)
294-
aten_dialect: ExportedProgram = export(SimpleConv(), example_args)
294+
aten_dialect: ExportedProgram = export(SimpleConv(), example_args, strict=True)
295295
edge_program: EdgeProgramManager = to_edge(aten_dialect)
296296
print("Edge Dialect Graph")
297297
print(edge_program.exported_program())
@@ -357,7 +357,7 @@ def forward(self, x):
357357

358358
# Export and lower the module to Edge Dialect
359359
example_args = (torch.ones(1),)
360-
aten_dialect: ExportedProgram = export(LowerableModule(), example_args)
360+
aten_dialect: ExportedProgram = export(LowerableModule(), example_args, strict=True)
361361
edge_program: EdgeProgramManager = to_edge(aten_dialect)
362362
to_be_lowered_module = edge_program.exported_program()
363363

@@ -423,7 +423,7 @@ def forward(self, x):
423423

424424

425425
example_args = (torch.ones(1),)
426-
aten_dialect: ExportedProgram = export(ComposedModule(), example_args)
426+
aten_dialect: ExportedProgram = export(ComposedModule(), example_args, strict=True)
427427
edge_program: EdgeProgramManager = to_edge(aten_dialect)
428428
exported_program = edge_program.exported_program()
429429
print("Edge Dialect graph")
@@ -461,7 +461,7 @@ def forward(self, a, x, b):
461461

462462

463463
example_args = (torch.randn(2, 2), torch.randn(2, 2), torch.randn(2, 2))
464-
aten_dialect: ExportedProgram = export(Foo(), example_args)
464+
aten_dialect: ExportedProgram = export(Foo(), example_args, strict=True)
465465
edge_program: EdgeProgramManager = to_edge(aten_dialect)
466466
exported_program = edge_program.exported_program()
467467
print("Edge Dialect graph")
@@ -495,7 +495,7 @@ def forward(self, a, x, b):
495495

496496

497497
example_args = (torch.randn(2, 2), torch.randn(2, 2), torch.randn(2, 2))
498-
aten_dialect: ExportedProgram = export(Foo(), example_args)
498+
aten_dialect: ExportedProgram = export(Foo(), example_args, strict=True)
499499
edge_program: EdgeProgramManager = to_edge(aten_dialect)
500500
exported_program = edge_program.exported_program()
501501
delegated_program = edge_program.to_backend(AddMulPartitionerDemo())
@@ -577,7 +577,9 @@ def forward(self, x):
577577
pre_autograd_aten_dialect = export_for_training(M(), example_args).module()
578578
# Optionally do quantization:
579579
# pre_autograd_aten_dialect = convert_pt2e(prepare_pt2e(pre_autograd_aten_dialect, CustomBackendQuantizer))
580-
aten_dialect: ExportedProgram = export(pre_autograd_aten_dialect, example_args)
580+
aten_dialect: ExportedProgram = export(
581+
pre_autograd_aten_dialect, example_args, strict=True
582+
)
581583
edge_program: exir.EdgeProgramManager = exir.to_edge(aten_dialect)
582584
# Optionally do delegation:
583585
# edge_program = edge_program.to_backend(CustomBackendPartitioner)

examples/apple/coreml/scripts/export.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,9 @@ def partition_module_to_coreml(module):
8888

8989
def lower_module_to_coreml(module, compile_specs, example_inputs):
9090
module = module.eval()
91-
edge = to_edge(export(module, example_inputs), compile_config=_EDGE_COMPILE_CONFIG)
91+
edge = to_edge(
92+
export(module, example_inputs, strict=True), compile_config=_EDGE_COMPILE_CONFIG
93+
)
9294
# All of the subsequent calls on the edge_dialect_graph generated above (such as delegation or
9395
# to_executorch()) are done in place and the graph is also modified in place. For debugging purposes
9496
# we would like to keep a copy of the original edge dialect graph and hence we create a deepcopy of
@@ -107,7 +109,8 @@ def lower_module_to_coreml(module, compile_specs, example_inputs):
107109
def export_lowered_module_to_executorch_program(lowered_module, example_inputs):
108110
lowered_module(*example_inputs)
109111
exec_prog = to_edge(
110-
export(lowered_module, example_inputs), compile_config=_EDGE_COMPILE_CONFIG
112+
export(lowered_module, example_inputs, strict=True),
113+
compile_config=_EDGE_COMPILE_CONFIG,
111114
).to_executorch(config=exir.ExecutorchBackendConfig(extract_delegate_segments=True))
112115

113116
return exec_prog
@@ -170,7 +173,7 @@ def main():
170173

171174
if args.use_partitioner:
172175
model.eval()
173-
exir_program_aten = torch.export.export(model, example_inputs)
176+
exir_program_aten = torch.export.export(model, example_inputs, strict=True)
174177

175178
edge_program_manager = exir.to_edge(exir_program_aten)
176179
edge_copy = copy.deepcopy(edge_program_manager)

examples/apple/coreml/scripts/inspector_utils.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -65,9 +65,7 @@ def build_devtools_runner_including_coreml(
6565
build_devtools_runner_command: str = (
6666
"./examples/devtools/build_example_runner.sh --coreml"
6767
)
68-
build_command: str = (
69-
f"{cd_root_command} && {conda_activate_env_command} && {build_devtools_runner_command}"
70-
)
68+
build_command: str = f"{cd_root_command} && {conda_activate_env_command} && {build_devtools_runner_command}"
7169
subprocess.run(
7270
f'bash -c "{build_command}"', shell=True, check=True
7371
).check_returncode()
@@ -87,10 +85,7 @@ def to_core_aten(
8785
module: torch.nn.Module,
8886
example_inputs: Tuple[Value, ...],
8987
) -> ExportedProgram:
90-
core_aten_program = export(
91-
mod=module,
92-
args=example_inputs,
93-
)
88+
core_aten_program = export(mod=module, args=example_inputs, strict=True)
9489
return core_aten_program
9590

9691

examples/devtools/scripts/gen_sample_etrecord.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -31,10 +31,7 @@
3131

3232
def gen_etrecord(model: torch.nn.Module, inputs: Any, output_path=None):
3333
f = model
34-
aten_dialect: ExportedProgram = export(
35-
f,
36-
inputs,
37-
)
34+
aten_dialect: ExportedProgram = export(f, inputs, strict=True)
3835
edge_program: EdgeProgramManager = to_edge(
3936
aten_dialect, compile_config=EdgeCompileConfig(_check_ir_validity=True)
4037
)

examples/llm_manual/export_nanogpt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
m = export_for_training(
3131
model, example_inputs, dynamic_shapes=dynamic_shape
3232
).module()
33-
traced_model = export(m, example_inputs, dynamic_shapes=dynamic_shape)
33+
traced_model = export(m, example_inputs, dynamic_shapes=dynamic_shape, strict=True)
3434

3535
# Convert the model into a runnable ExecuTorch program.
3636
# To be further lowered to Xnnpack backend, `traced_model` needs xnnpack-specific edge compile config

0 commit comments

Comments
 (0)