We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 473bbad commit 2714043Copy full SHA for 2714043
src/diffusers/loaders/lora_base.py
@@ -564,12 +564,6 @@ def set_adapters(
564
for adapter_name, weights in zip(adapter_names, adapter_weights):
565
if isinstance(weights, dict):
566
component_adapter_weights = weights.pop(component, None)
567
-
568
- if component_adapter_weights is not None and not hasattr(self, component):
569
- logger.warning(
570
- f"Lora weight dict contains {component} weights but will be ignored because pipeline does not have {component}."
571
- )
572
573
if component_adapter_weights is not None and component not in invert_list_adapters[adapter_name]:
574
logger.warning(
575
(
0 commit comments