From 69cda5288b97c9acfe48acb6bc851be3999a272e Mon Sep 17 00:00:00 2001 From: Shangdi Yu Date: Mon, 7 Oct 2024 14:25:10 -0700 Subject: [PATCH] Migrate to training IR in executorch tests (#5875) Summary: as title also bump pytorch pin clone of D63845664, this time does not export to a fork Reviewed By: mergennachin, tugsbayasgalan Differential Revision: D63994058 --- .ci/docker/ci_commit_pins/pytorch.txt | 2 +- examples/apple/mps/scripts/mps_example.py | 2 +- examples/models/phi-3-mini/export_phi-3-mini.py | 6 +++--- exir/tests/test_passes.py | 4 ++-- install_requirements.py | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.ci/docker/ci_commit_pins/pytorch.txt b/.ci/docker/ci_commit_pins/pytorch.txt index af80fa602c7..21a0ea5d478 100644 --- a/.ci/docker/ci_commit_pins/pytorch.txt +++ b/.ci/docker/ci_commit_pins/pytorch.txt @@ -1 +1 @@ -4b2970f7cd3cdd56883cacf116a8693862f89db5 +d1b87e26e5c4343f5b56bb1e6f89b479b389bfac diff --git a/examples/apple/mps/scripts/mps_example.py b/examples/apple/mps/scripts/mps_example.py index d6416e0ffc8..dfb958dce53 100644 --- a/examples/apple/mps/scripts/mps_example.py +++ b/examples/apple/mps/scripts/mps_example.py @@ -166,7 +166,7 @@ def get_model_config(args): # pre-autograd export. eventually this will become torch.export with torch.no_grad(): - model = torch._export.capture_pre_autograd_graph(model, example_inputs) + model = torch.export.export_for_training(model, example_inputs).module() edge: EdgeProgramManager = export_to_edge( model, example_inputs, diff --git a/examples/models/phi-3-mini/export_phi-3-mini.py b/examples/models/phi-3-mini/export_phi-3-mini.py index c2e97a21b1e..305b83457dc 100644 --- a/examples/models/phi-3-mini/export_phi-3-mini.py +++ b/examples/models/phi-3-mini/export_phi-3-mini.py @@ -15,13 +15,13 @@ from executorch.backends.xnnpack.partition.xnnpack_partitioner import XnnpackPartitioner from executorch.backends.xnnpack.utils.configs import get_xnnpack_edge_compile_config from executorch.exir import to_edge -from torch._export import capture_pre_autograd_graph from torch.ao.quantization.quantize_pt2e import convert_pt2e, prepare_pt2e from torch.ao.quantization.quantizer.xnnpack_quantizer import ( get_symmetric_quantization_config, XNNPACKQuantizer, ) +from torch.export import export_for_training from transformers import Phi3ForCausalLM @@ -64,9 +64,9 @@ def export(args) -> None: xnnpack_quantizer = XNNPACKQuantizer() xnnpack_quantizer.set_global(xnnpack_quant_config) - model = capture_pre_autograd_graph( + model = export_for_training( model, example_inputs, dynamic_shapes=dynamic_shapes - ) + ).module() model = prepare_pt2e(model, xnnpack_quantizer) # pyre-fixme[6] model(*example_inputs) model = convert_pt2e(model) diff --git a/exir/tests/test_passes.py b/exir/tests/test_passes.py index 79578763475..d039db51876 100644 --- a/exir/tests/test_passes.py +++ b/exir/tests/test_passes.py @@ -1413,10 +1413,10 @@ def quantize_model( m_eager: torch.nn.Module, example_inputs: Tuple[torch.Tensor] ) -> Tuple[EdgeProgramManager, int, int]: # program capture - m = torch._export.capture_pre_autograd_graph( + m = torch.export.export_for_training( m_eager, example_inputs, - ) + ).module() quantizer = XNNPACKQuantizer() quantization_config = get_symmetric_quantization_config() diff --git a/install_requirements.py b/install_requirements.py index 2fb4d410db3..5c6777e783d 100644 --- a/install_requirements.py +++ b/install_requirements.py @@ -94,7 +94,7 @@ def python_is_compatible(): # NOTE: If a newly-fetched version of the executorch repo changes the value of # NIGHTLY_VERSION, you should re-run this script to install the necessary # package versions. -NIGHTLY_VERSION = "dev20241002" +NIGHTLY_VERSION = "dev20241007" # The pip repository that hosts nightly torch packages. TORCH_NIGHTLY_URL = "https://download.pytorch.org/whl/nightly/cpu"