Skip to content

Commit 325e740

Browse files
committed
remove unused fn
1 parent b79928d commit 325e740

File tree

1 file changed

+0
-10
lines changed

1 file changed

+0
-10
lines changed

src/diffusers/models/transformers/transformer_ltx.py

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -412,16 +412,6 @@ def forward(
412412

413413
for block in self.transformer_blocks:
414414
if torch.is_grad_enabled() and self.gradient_checkpointing:
415-
416-
def create_custom_forward(module, return_dict=None):
417-
def custom_forward(*inputs):
418-
if return_dict is not None:
419-
return module(*inputs, return_dict=return_dict)
420-
else:
421-
return module(*inputs)
422-
423-
return custom_forward
424-
425415
hidden_states = self._gradient_checkpointing_func(
426416
block,
427417
hidden_states,

0 commit comments

Comments
 (0)