Skip to content

Commit 2d7811f

Browse files
angelayilucylq
authored andcommitted
remove args
1 parent d3bcadb commit 2d7811f

File tree

6 files changed

+22
-17
lines changed

6 files changed

+22
-17
lines changed
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
80ca6dd892613fd4f1dee9040b8273ddeadb1c50
1+
2ea4b56ec872424e486c4fe2d55da061067a2ed3

examples/models/llama3_2_vision/text_decoder/test/test_text_decoder.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -74,8 +74,6 @@ def test_llama3_2_text_decoder_aoti(self) -> None:
7474
with tempfile.TemporaryDirectory() as tmpdir:
7575
path = torch._inductor.aoti_compile_and_package(
7676
ep,
77-
model.get_example_inputs(),
78-
kwargs=model.get_example_kwarg_inputs(),
7977
package_path=os.path.join(tmpdir, "text_decoder.pt2"),
8078
)
8179
encoder_aoti = torch._inductor.aoti_load_package(path)

examples/models/llama3_2_vision/vision_encoder/test/test_vision_encoder.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,6 @@ def test_flamingo_vision_encoder(self) -> None:
3636
with tempfile.TemporaryDirectory() as tmpdir:
3737
path = torch._inductor.aoti_compile_and_package(
3838
ep,
39-
model.get_example_inputs(),
4039
package_path=os.path.join(tmpdir, "vision_encoder.pt2"),
4140
)
4241
print(path)

extension/llm/export/builder.py

Lines changed: 18 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -184,15 +184,24 @@ def export(self) -> "LLMEdgeManager":
184184
# 2. torch.no_grad() is for getting rid of the dropout (not sure why training ops will show up)
185185
with torch.nn.attention.sdpa_kernel([SDPBackend.MATH]), torch.no_grad():
186186
if hasattr(self.args, "qnn") and self.args.qnn:
187-
# TODO: this is temporary and export_for_training doesn't work with qnn either. We need a
188-
# functional graph. See issue https://github.com/pytorch/executorch/pull/4627 for more details
189-
exported_module = torch.export.export(
190-
self.model,
191-
self.example_inputs,
192-
self.example_kwarg_inputs,
193-
dynamic_shapes=dynamic_shape,
194-
strict=True,
195-
)
187+
# TODO: this is temporary, as qnn flow does not work with new, non-functional export IR.
188+
# See issue: https://github.com/pytorch/executorch/issues/7373
189+
from unittest.mock import patch
190+
191+
with patch.object(
192+
torch._utils_internal,
193+
"export_training_ir_rollout_check",
194+
return_value=False,
195+
):
196+
# TODO: this is temporary and export_for_training doesn't work with qnn either. We need a
197+
# functional graph. See issue https://github.com/pytorch/executorch/pull/4627 for more details
198+
exported_module = torch.export.export(
199+
self.model,
200+
self.example_inputs,
201+
self.example_kwarg_inputs,
202+
dynamic_shapes=dynamic_shape,
203+
strict=True,
204+
)
196205
else:
197206
logging.info("Exporting with:")
198207
logging.info(f"inputs: {self.example_inputs}")

extension/llm/modules/test/test_position_embeddings.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,6 @@ def test_tiled_token_positional_embedding_aoti(self):
177177
with tempfile.TemporaryDirectory() as tmpdir:
178178
path = torch._inductor.aoti_compile_and_package(
179179
tpe_ep,
180-
(self.x, self.aspect_ratio),
181180
package_path=os.path.join(tmpdir, "tpe.pt2"),
182181
)
183182
tpe_aoti = load_package(path)

install_requirements.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ def python_is_compatible():
112112
# NOTE: If a newly-fetched version of the executorch repo changes the value of
113113
# NIGHTLY_VERSION, you should re-run this script to install the necessary
114114
# package versions.
115-
NIGHTLY_VERSION = "dev20241206"
115+
NIGHTLY_VERSION = "dev20241218"
116116

117117
# The pip repository that hosts nightly torch packages.
118118
TORCH_NIGHTLY_URL = "https://download.pytorch.org/whl/nightly/cpu"
@@ -124,7 +124,7 @@ def python_is_compatible():
124124
# been installed on CI before this step, so pip won't reinstall them
125125
f"torch==2.6.0.{NIGHTLY_VERSION}" if USE_PYTORCH_NIGHTLY else "torch",
126126
(
127-
f"torchvision==0.20.0.{NIGHTLY_VERSION}"
127+
f"torchvision==0.22.0.{NIGHTLY_VERSION}"
128128
if USE_PYTORCH_NIGHTLY
129129
else "torchvision"
130130
), # For testing.
@@ -135,7 +135,7 @@ def python_is_compatible():
135135
# TODO: Make each example publish its own requirements.txt
136136
EXAMPLES_REQUIREMENTS = [
137137
"timm==1.0.7",
138-
f"torchaudio==2.5.0.{NIGHTLY_VERSION}" if USE_PYTORCH_NIGHTLY else "torchaudio",
138+
f"torchaudio==2.6.0.{NIGHTLY_VERSION}" if USE_PYTORCH_NIGHTLY else "torchaudio",
139139
"torchsr==1.0.4",
140140
"transformers==4.46.1",
141141
]

0 commit comments

Comments
 (0)