Skip to content
This repository was archived by the owner on Sep 10, 2025. It is now read-only.

Commit bcdfc54

Browse files
committed
Bump PyTorch pin to 20241111
1 parent e30aaa0 commit bcdfc54

File tree

4 files changed

+7
-5
lines changed

4 files changed

+7
-5
lines changed

install/install_requirements.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,10 +47,10 @@ fi
4747
# NOTE: If a newly-fetched version of the executorch repo changes the value of
4848
# PYTORCH_NIGHTLY_VERSION, you should re-run this script to install the necessary
4949
# package versions.
50-
PYTORCH_NIGHTLY_VERSION=dev20241002
50+
PYTORCH_NIGHTLY_VERSION=dev20241111
5151

5252
# Nightly version for torchvision
53-
VISION_NIGHTLY_VERSION=dev20241002
53+
VISION_NIGHTLY_VERSION=dev20241111
5454

5555
# Nightly version for torchtune
5656
TUNE_NIGHTLY_VERSION=dev20241010

torchchat/cli/builder.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -402,6 +402,7 @@ def _load_checkpoint(builder_args: BuilderArgs):
402402
os.path.join(builder_args.checkpoint_dir, cp_name),
403403
map_location=builder_args.device,
404404
mmap=True,
405+
weights_only=False,
405406
)
406407
)
407408
checkpoint = {}
@@ -706,4 +707,4 @@ def tokenizer_setting_to_name(tiktoken: bool, tokenizers: bool) -> str:
706707
return "TikToken"
707708
if tokenizers:
708709
return "Tokenizers"
709-
return "SentencePiece"
710+
return "SentencePiece"

torchchat/distributed/checkpoint.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,7 @@ def _load_checkpoints_from_storage(
9696
checkpoint_path,
9797
map_location=builder_args.device,
9898
mmap=True,
99+
weight_only=False,
99100
)
100101

101102

torchchat/export.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ def export_for_server(
122122
from executorch.exir.tracer import Value
123123

124124
from torch._export import capture_pre_autograd_graph
125-
from torch.export import export, ExportedProgram
125+
from torch.export import export_for_training, ExportedProgram
126126

127127
from torchchat.model import apply_rotary_emb, Attention
128128
from torchchat.utils.build_utils import get_precision
@@ -238,7 +238,7 @@ def _to_core_aten(
238238
raise ValueError(
239239
f"Expected passed in model to be an instance of fx.GraphModule, got {type(model)}"
240240
)
241-
core_aten_ep = export(model, example_inputs, dynamic_shapes=dynamic_shapes)
241+
core_aten_ep = export_for_training(model, example_inputs, dynamic_shapes=dynamic_shapes)
242242
if verbose:
243243
logging.info(f"Core ATen graph:\n{core_aten_ep.graph}")
244244
return core_aten_ep

0 commit comments

Comments
 (0)