diff --git a/.ci/docker/ci_commit_pins/pytorch.txt b/.ci/docker/ci_commit_pins/pytorch.txt index af80fa602c7..21a0ea5d478 100644 --- a/.ci/docker/ci_commit_pins/pytorch.txt +++ b/.ci/docker/ci_commit_pins/pytorch.txt @@ -1 +1 @@ -4b2970f7cd3cdd56883cacf116a8693862f89db5 +d1b87e26e5c4343f5b56bb1e6f89b479b389bfac diff --git a/examples/models/phi-3-mini/export_phi-3-mini.py b/examples/models/phi-3-mini/export_phi-3-mini.py index c2e97a21b1e..305b83457dc 100644 --- a/examples/models/phi-3-mini/export_phi-3-mini.py +++ b/examples/models/phi-3-mini/export_phi-3-mini.py @@ -15,13 +15,13 @@ from executorch.backends.xnnpack.partition.xnnpack_partitioner import XnnpackPartitioner from executorch.backends.xnnpack.utils.configs import get_xnnpack_edge_compile_config from executorch.exir import to_edge -from torch._export import capture_pre_autograd_graph from torch.ao.quantization.quantize_pt2e import convert_pt2e, prepare_pt2e from torch.ao.quantization.quantizer.xnnpack_quantizer import ( get_symmetric_quantization_config, XNNPACKQuantizer, ) +from torch.export import export_for_training from transformers import Phi3ForCausalLM @@ -64,9 +64,9 @@ def export(args) -> None: xnnpack_quantizer = XNNPACKQuantizer() xnnpack_quantizer.set_global(xnnpack_quant_config) - model = capture_pre_autograd_graph( + model = export_for_training( model, example_inputs, dynamic_shapes=dynamic_shapes - ) + ).module() model = prepare_pt2e(model, xnnpack_quantizer) # pyre-fixme[6] model(*example_inputs) model = convert_pt2e(model) diff --git a/install_requirements.py b/install_requirements.py index 2fb4d410db3..5c6777e783d 100644 --- a/install_requirements.py +++ b/install_requirements.py @@ -94,7 +94,7 @@ def python_is_compatible(): # NOTE: If a newly-fetched version of the executorch repo changes the value of # NIGHTLY_VERSION, you should re-run this script to install the necessary # package versions. -NIGHTLY_VERSION = "dev20241002" +NIGHTLY_VERSION = "dev20241007" # The pip repository that hosts nightly torch packages. TORCH_NIGHTLY_URL = "https://download.pytorch.org/whl/nightly/cpu"