Skip to content

Commit f27fbce

Browse files
committed
more attemp to fix circular import
1 parent abf28d5 commit f27fbce

File tree

1 file changed

+4
-1
lines changed

1 file changed

+4
-1
lines changed

src/diffusers/loaders/lora_base.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@
2525
from huggingface_hub import model_info
2626
from huggingface_hub.constants import HF_HUB_OFFLINE
2727

28-
from ..hooks.group_offloading import _is_group_offload_enabled, _maybe_remove_and_reapply_group_offloading
2928
from ..models.modeling_utils import ModelMixin, load_state_dict
3029
from ..utils import (
3130
USE_PEFT_BACKEND,
@@ -331,6 +330,8 @@ def _load_lora_into_text_encoder(
331330
hotswap: bool = False,
332331
metadata=None,
333332
):
333+
from ..hooks.group_offloading import _maybe_remove_and_reapply_group_offloading
334+
334335
if not USE_PEFT_BACKEND:
335336
raise ValueError("PEFT backend is required for this method.")
336337

@@ -442,6 +443,8 @@ def _func_optionally_disable_offloading(_pipeline):
442443
tuple:
443444
A tuple indicating if `is_model_cpu_offload` or `is_sequential_cpu_offload` or `is_group_offload` is True.
444445
"""
446+
from ..hooks.group_offloading import _is_group_offload_enabled
447+
445448
is_model_cpu_offload = False
446449
is_sequential_cpu_offload = False
447450
is_group_offload = False

0 commit comments

Comments
 (0)