Skip to content

Commit 73be58a

Browse files
author
Lincoln Stein
committed
fix issue #3293
1 parent 5a7d11b commit 73be58a

File tree

2 files changed

+7
-4
lines changed

2 files changed

+7
-4
lines changed

ldm/invoke/_version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__='2.3.5-rc1'
1+
__version__='2.3.5-rc2'

ldm/modules/kohya_lora_manager.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from torch.utils.hooks import RemovableHandle
1010
from transformers import CLIPTextModel
1111

12-
from ..invoke.globals import global_lora_models_dir
12+
from ..invoke.globals import global_lora_models_dir, Globals
1313
from ..invoke.devices import choose_torch_device
1414

1515
"""
@@ -456,10 +456,12 @@ def load_from_dict(self, state_dict):
456456

457457

458458
class KohyaLoraManager:
459-
lora_path = Path(global_lora_models_dir())
460-
vector_length_cache_path = lora_path / '.vectorlength.cache'
459+
lora_path = None
460+
vector_length_cache_path = None
461461

462462
def __init__(self, pipe):
463+
self.lora_path = Path(global_lora_models_dir())
464+
self.vector_length_cache_path = self.lora_path / '.vectorlength.cache'
463465
self.unet = pipe.unet
464466
self.wrapper = LoRAModuleWrapper(pipe.unet, pipe.text_encoder)
465467
self.text_encoder = pipe.text_encoder
@@ -566,6 +568,7 @@ def vector_length_from_checkpoint_file(self, checkpoint_path: Path) -> int:
566568
class LoraVectorLengthCache(object):
567569
def __init__(self, cache_path: Path):
568570
self.cache_path = cache_path
571+
print(f'DEBUG: lock path = {Path(cache_path.parent, ".cachelock")}')
569572
self.lock = FileLock(Path(cache_path.parent, ".cachelock"))
570573
self.cache = {}
571574

0 commit comments

Comments
 (0)