Skip to content

Commit 49f813b

Browse files
committed
Improved comments
1 parent f3e80b3 commit 49f813b

File tree

2 files changed

+3
-2
lines changed

2 files changed

+3
-2
lines changed

thunder/dynamo/compiler.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,8 @@ def __call__(self, gm: torch.fx.GraphModule, sample_args: list[torch.SymInt, tor
135135

136136
remove_empty_autocast(gm)
137137

138-
# Convert tag_activation_checkpoint operators, which is merely a tagger for torch.compile stack, to actual checkpoint calls
138+
# Convert tag_activation_checkpoint operators, which is meaningless in eager mode, to actual checkpoint calls
139+
# This will not be needed when we have found a way to make tag_activation_checkpoint fall back to PyTorch's backend
139140
convert_checkpoint_tags(gm)
140141

141142
# The whole graph may not be supported by `thunder`, so we split it in `thunder` supported sections

thunder/dynamo/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -635,7 +635,7 @@ def convert_checkpoint_tags(gm: torch.fx.GraphModule):
635635
"""
636636
Replaces tag_activation_checkpoint operators with torch.utils.checkpoint.checkpoint calls.
637637
638-
tag_activation_checkpoint only marks nodes for recomputation within torch.compile but does not execute checkpointing itself.
638+
tag_activation_checkpoint only marks nodes for torch.compile stack but does not execute actual checkpointing in eager mode.
639639
"""
640640
for n in gm.graph.nodes:
641641
if n.op == "call_function" and n.target is torch.ops.higher_order.tag_activation_checkpoint:

0 commit comments

Comments
 (0)