Skip to content

Commit aba69ef

Browse files
committed
deleted changes that don't belong to this PR
1 parent 290cc39 commit aba69ef

File tree

1 file changed

+0
-10
lines changed

1 file changed

+0
-10
lines changed

py/torch_tensorrt/dynamo/_compiler.py

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -885,16 +885,6 @@ def preserve_module_specs(
885885
# Iterate over all components that can be accelerated
886886
# Generate the corresponding TRT Module for those
887887

888-
# Here we delete the frozen parameters from the graph module. Note this does not affect the submodules. We are going to delete the frozen parameters from the submodules in the convert_module function.
889-
# This is done to release CPU memory.
890-
for attr in dir(gm):
891-
if attr.startswith("_frozen_param"):
892-
delattr(gm, attr)
893-
894-
from torch_tensorrt.dynamo.conversion._ConverterRegistry import DYNAMO_CONVERTERS
895-
896-
DYNAMO_CONVERTERS.disallowed_targets = set()
897-
898888
for name, _ in partitioned_module.named_children():
899889
submodule = getattr(partitioned_module, name)
900890
# filter on the GraphModule

0 commit comments

Comments
 (0)