Skip to content
This repository was archived by the owner on Sep 10, 2025. It is now read-only.

Commit d85657c

Browse files
committed
Remove call to capture_pre_auto_grad_graph
1 parent f34359a commit d85657c

File tree

1 file changed

+1
-2
lines changed

1 file changed

+1
-2
lines changed

torchchat/export.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,6 @@ def export_for_server(
125125
)
126126
from executorch.exir.tracer import Value
127127

128-
from torch._export import capture_pre_autograd_graph
129128
from torch.export import export, export_for_training, ExportedProgram
130129

131130
from torchchat.model import apply_rotary_emb, Attention
@@ -316,7 +315,7 @@ def export_for_et(model, device, output_path) -> str:
316315
with torch.nn.attention.sdpa_kernel(
317316
[torch.nn.attention.SDPBackend.MATH]
318317
), torch.no_grad():
319-
m = capture_pre_autograd_graph(model, input, dynamic_shapes=dynamic_shapes)
318+
m = export_for_training(model, input, dynamic_shapes=dynamic_shapes)
320319

321320
edge_manager = export_to_edge(
322321
m,

0 commit comments

Comments
 (0)