Skip to content

Commit 704aef4

Browse files
committed
remove unused
1 parent 6bc1dd5 commit 704aef4

File tree

2 files changed

+0
-40
lines changed

2 files changed

+0
-40
lines changed

src/diffusers/guiders/guider_utils.py

Lines changed: 0 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020

2121

2222
if TYPE_CHECKING:
23-
from ..models.attention_processor import AttentionProcessor
2423
from ..pipelines.modular_pipeline import BlockState
2524

2625

@@ -214,41 +213,3 @@ def rescale_noise_cfg(noise_cfg, noise_pred_text, guidance_rescale=0.0):
214213
# mix with the original results from guidance by factor guidance_rescale to avoid "plain looking" images
215214
noise_cfg = guidance_rescale * noise_pred_rescaled + (1 - guidance_rescale) * noise_cfg
216215
return noise_cfg
217-
218-
219-
def _default_prepare_inputs(denoiser: torch.nn.Module, num_conditions: int, *args: Union[Tuple[torch.Tensor], List[torch.Tensor]]) -> Tuple[List[torch.Tensor], ...]:
220-
"""
221-
Prepares the inputs for the denoiser by ensuring that the conditional and unconditional inputs are correctly
222-
prepared based on required number of conditions. This function is used in the `prepare_inputs` method of the
223-
`BaseGuidance` class.
224-
225-
Either tensors or tuples/lists of tensors can be provided. If a tuple/list is provided, it should contain two elements:
226-
- The first element is the conditional input.
227-
- The second element is the unconditional input or None.
228-
229-
If only the conditional input is provided, it will be repeated for all batches.
230-
231-
If both conditional and unconditional inputs are provided, they are alternated as batches of data.
232-
"""
233-
list_of_inputs = []
234-
for arg in args:
235-
if arg is None or isinstance(arg, torch.Tensor):
236-
list_of_inputs.append([arg] * num_conditions)
237-
elif isinstance(arg, (tuple, list)):
238-
if len(arg) != 2:
239-
raise ValueError(
240-
f"Expected a tuple or list of length 2, but got {len(arg)} for argument {arg}. Please provide a tuple/list of length 2 "
241-
f"with the first element being the conditional input and the second element being the unconditional input or None."
242-
)
243-
if arg[1] is None:
244-
# Only conditioning inputs for all batches
245-
list_of_inputs.append([arg[0]] * num_conditions)
246-
else:
247-
# Alternating conditional and unconditional inputs as batches
248-
inputs = [arg[i % 2] for i in range(num_conditions)]
249-
list_of_inputs.append(inputs)
250-
else:
251-
raise ValueError(
252-
f"Expected a tensor, tuple, or list, but got {type(arg)} for argument {arg}. Please provide a tensor, tuple, or list."
253-
)
254-
return tuple(list_of_inputs)

src/diffusers/schedulers/scheduling_euler_discrete.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -584,7 +584,6 @@ def step(
584584
s_noise: float = 1.0,
585585
generator: Optional[torch.Generator] = None,
586586
return_dict: bool = True,
587-
_model_output_uncond: Optional[torch.Tensor] = None,
588587
) -> Union[EulerDiscreteSchedulerOutput, Tuple]:
589588
"""
590589
Predict the sample from the previous timestep by reversing the SDE. This function propagates the diffusion

0 commit comments

Comments
 (0)