@@ -192,11 +192,6 @@ def load_lora_adapter(self, pretrained_model_name_or_path_or_dict, prefix="trans
192192 from peft import LoraConfig , inject_adapter_in_model , set_peft_model_state_dict
193193 from peft .tuners .tuners_utils import BaseTunerLayer
194194
195- try :
196- from peft .utils .constants import FULLY_QUALIFIED_PATTERN_KEY_PREFIX
197- except ImportError :
198- FULLY_QUALIFIED_PATTERN_KEY_PREFIX = None
199-
200195 cache_dir = kwargs .pop ("cache_dir" , None )
201196 force_download = kwargs .pop ("force_download" , False )
202197 proxies = kwargs .pop ("proxies" , None )
@@ -261,22 +256,16 @@ def load_lora_adapter(self, pretrained_model_name_or_path_or_dict, prefix="trans
261256 # Cannot figure out rank from lora layers that don't have atleast 2 dimensions.
262257 # Bias layers in LoRA only have a single dimension
263258 if "lora_B" in key and val .ndim > 1 :
264- # Support to handle cases where layer patterns are treated as full layer names
265- # was added later in PEFT. So, we handle it accordingly.
266- # TODO: when we fix the minimal PEFT version for Diffusers,
267- # we should remove `_maybe_adjust_config()`.
268- if FULLY_QUALIFIED_PATTERN_KEY_PREFIX :
269- rank [f"{ FULLY_QUALIFIED_PATTERN_KEY_PREFIX } { key } " ] = val .shape [1 ]
270- else :
271- rank [key ] = val .shape [1 ]
259+ # TODO: revisit this after https://github.com/huggingface/peft/pull/2382 is merged.
260+ rank [key ] = val .shape [1 ]
272261
273262 if network_alphas is not None and len (network_alphas ) >= 1 :
274263 alpha_keys = [k for k in network_alphas .keys () if k .startswith (f"{ prefix } ." )]
275264 network_alphas = {k .replace (f"{ prefix } ." , "" ): v for k , v in network_alphas .items () if k in alpha_keys }
276265
277266 lora_config_kwargs = get_peft_kwargs (rank , network_alpha_dict = network_alphas , peft_state_dict = state_dict )
278- if not FULLY_QUALIFIED_PATTERN_KEY_PREFIX :
279- lora_config_kwargs = _maybe_adjust_config (lora_config_kwargs )
267+ # TODO: revisit this after https://github.com/huggingface/peft/pull/2382 is merged.
268+ lora_config_kwargs = _maybe_adjust_config (lora_config_kwargs )
280269
281270 if "use_dora" in lora_config_kwargs :
282271 if lora_config_kwargs ["use_dora" ]:
0 commit comments