Skip to content

Commit e0b788f

Browse files
committed
removed unload_before and unload_after option
1 parent 505b204 commit e0b788f

File tree

2 files changed

+2
-13
lines changed

2 files changed

+2
-13
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[project]
22
name = "comfyui_tensorrt"
33
description = "TensorRT Node for ComfyUI\nThis node enables the best performance on NVIDIA RTX™ Graphics Cards (GPUs) for Stable Diffusion by leveraging NVIDIA TensorRT."
4-
version = "0.1.1"
4+
version = "0.1.2"
55
license = "LICENSE"
66
dependencies = [
77
"tensorrt>=10.0.1",

tensorrt_convert.py

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -153,15 +153,11 @@ def _convert(
153153
context_max,
154154
num_video_frames,
155155
is_static: bool,
156-
unload_before: bool = True,
157-
unload_after: bool = True,
158156
):
159157
output_onnx = os.path.normpath(
160158
os.path.join(self.temp_dir, str(time.time()), "model.onnx")
161159
)
162160

163-
if unload_before:
164-
comfy.model_management.unload_all_models()
165161
comfy.model_management.load_models_gpu([model], force_patch_weights=True)
166162
unet = model.model.diffusion_model
167163

@@ -286,8 +282,7 @@ def forward(self, x, timesteps, context, y=None):
286282
dynamic_axes=dynamic_axes,
287283
)
288284

289-
if unload_after:
290-
comfy.model_management.unload_all_models()
285+
comfy.model_management.unload_all_models()
291286
comfy.model_management.soft_empty_cache()
292287

293288
# TRT conversion starts here
@@ -606,8 +601,6 @@ def INPUT_TYPES(s):
606601
"step": 1,
607602
},
608603
),
609-
"unload_before": ("BOOLEAN", {"default": True}),
610-
"unload_after": ("BOOLEAN", {"default": True}),
611604
},
612605
}
613606

@@ -620,8 +613,6 @@ def convert(
620613
width_opt,
621614
context_opt,
622615
num_video_frames,
623-
unload_before: bool = True,
624-
unload_after: bool = True,
625616
):
626617
return super()._convert(
627618
model,
@@ -640,8 +631,6 @@ def convert(
640631
context_opt,
641632
num_video_frames,
642633
is_static=True,
643-
unload_before=unload_before,
644-
unload_after=unload_after,
645634
)
646635

647636

0 commit comments

Comments
 (0)