We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent b79928d commit 325e740Copy full SHA for 325e740
src/diffusers/models/transformers/transformer_ltx.py
@@ -412,16 +412,6 @@ def forward(
412
413
for block in self.transformer_blocks:
414
if torch.is_grad_enabled() and self.gradient_checkpointing:
415
-
416
- def create_custom_forward(module, return_dict=None):
417
- def custom_forward(*inputs):
418
- if return_dict is not None:
419
- return module(*inputs, return_dict=return_dict)
420
- else:
421
- return module(*inputs)
422
423
- return custom_forward
424
425
hidden_states = self._gradient_checkpointing_func(
426
block,
427
hidden_states,
0 commit comments