Skip to content

Commit a07f054

Browse files
committed
Add missing info on hypernetwork/embedding model log
Mentioned here: #1528 (comment) Also group the saving into one
1 parent ab05a74 commit a07f054

File tree

2 files changed

+47
-23
lines changed

2 files changed

+47
-23
lines changed

modules/hypernetworks/hypernetwork.py

Lines changed: 21 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -361,6 +361,7 @@ def train_hypernetwork(hypernetwork_name, learn_rate, batch_size, data_root, log
361361
images_dir = None
362362

363363
hypernetwork = shared.loaded_hypernetwork
364+
checkpoint = sd_models.select_checkpoint()
364365

365366
ititial_step = hypernetwork.step or 0
366367
if ititial_step > steps:
@@ -449,9 +450,9 @@ def train_hypernetwork(hypernetwork_name, learn_rate, batch_size, data_root, log
449450

450451
if hypernetwork_dir is not None and steps_done % save_hypernetwork_every == 0:
451452
# Before saving, change name to match current checkpoint.
452-
hypernetwork.name = f'{hypernetwork_name}-{steps_done}'
453-
last_saved_file = os.path.join(hypernetwork_dir, f'{hypernetwork.name}.pt')
454-
hypernetwork.save(last_saved_file)
453+
hypernetwork_name_every = f'{hypernetwork_name}-{steps_done}'
454+
last_saved_file = os.path.join(hypernetwork_dir, f'{hypernetwork_name_every}.pt')
455+
save_hypernetwork(hypernetwork, checkpoint, hypernetwork_name, last_saved_file)
455456

456457
textual_inversion.write_loss(log_directory, "hypernetwork_loss.csv", hypernetwork.step, len(ds), {
457458
"loss": f"{previous_mean_loss:.7f}",
@@ -512,13 +513,23 @@ def train_hypernetwork(hypernetwork_name, learn_rate, batch_size, data_root, log
512513
"""
513514

514515
report_statistics(loss_dict)
515-
checkpoint = sd_models.select_checkpoint()
516516

517-
hypernetwork.sd_checkpoint = checkpoint.hash
518-
hypernetwork.sd_checkpoint_name = checkpoint.model_name
519-
# Before saving for the last time, change name back to the base name (as opposed to the save_hypernetwork_every step-suffixed naming convention).
520-
hypernetwork.name = hypernetwork_name
521-
filename = os.path.join(shared.cmd_opts.hypernetwork_dir, f'{hypernetwork.name}.pt')
522-
hypernetwork.save(filename)
517+
filename = os.path.join(shared.cmd_opts.hypernetwork_dir, f'{hypernetwork_name}.pt')
518+
save_hypernetwork(hypernetwork, checkpoint, hypernetwork_name, filename)
523519

524520
return hypernetwork, filename
521+
522+
def save_hypernetwork(hypernetwork, checkpoint, hypernetwork_name, filename):
523+
old_hypernetwork_name = hypernetwork.name
524+
old_sd_checkpoint = hypernetwork.sd_checkpoint if hasattr(hypernetwork, "sd_checkpoint") else None
525+
old_sd_checkpoint_name = hypernetwork.sd_checkpoint_name if hasattr(hypernetwork, "sd_checkpoint_name") else None
526+
try:
527+
hypernetwork.sd_checkpoint = checkpoint.hash
528+
hypernetwork.sd_checkpoint_name = checkpoint.model_name
529+
hypernetwork.name = hypernetwork_name
530+
hypernetwork.save(filename)
531+
except:
532+
hypernetwork.sd_checkpoint = old_sd_checkpoint
533+
hypernetwork.sd_checkpoint_name = old_sd_checkpoint_name
534+
hypernetwork.name = old_hypernetwork_name
535+
raise

modules/textual_inversion/textual_inversion.py

Lines changed: 26 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ def process_file(path, filename):
119119
vec = emb.detach().to(devices.device, dtype=torch.float32)
120120
embedding = Embedding(vec, name)
121121
embedding.step = data.get('step', None)
122-
embedding.sd_checkpoint = data.get('hash', None)
122+
embedding.sd_checkpoint = data.get('sd_checkpoint', None)
123123
embedding.sd_checkpoint_name = data.get('sd_checkpoint_name', None)
124124
self.register_embedding(embedding, shared.sd_model)
125125

@@ -259,6 +259,7 @@ def train_embedding(embedding_name, learn_rate, batch_size, data_root, log_direc
259259
hijack = sd_hijack.model_hijack
260260

261261
embedding = hijack.embedding_db.word_embeddings[embedding_name]
262+
checkpoint = sd_models.select_checkpoint()
262263

263264
ititial_step = embedding.step or 0
264265
if ititial_step > steps:
@@ -314,9 +315,9 @@ def train_embedding(embedding_name, learn_rate, batch_size, data_root, log_direc
314315

315316
if embedding_dir is not None and steps_done % save_embedding_every == 0:
316317
# Before saving, change name to match current checkpoint.
317-
embedding.name = f'{embedding_name}-{steps_done}'
318-
last_saved_file = os.path.join(embedding_dir, f'{embedding.name}.pt')
319-
embedding.save(last_saved_file)
318+
embedding_name_every = f'{embedding_name}-{steps_done}'
319+
last_saved_file = os.path.join(embedding_dir, f'{embedding_name_every}.pt')
320+
save_embedding(embedding, checkpoint, embedding_name_every, last_saved_file, remove_cached_checksum=True)
320321
embedding_yet_to_be_embedded = True
321322

322323
write_loss(log_directory, "textual_inversion_loss.csv", embedding.step, len(ds), {
@@ -397,14 +398,26 @@ def train_embedding(embedding_name, learn_rate, batch_size, data_root, log_direc
397398
</p>
398399
"""
399400

400-
checkpoint = sd_models.select_checkpoint()
401-
402-
embedding.sd_checkpoint = checkpoint.hash
403-
embedding.sd_checkpoint_name = checkpoint.model_name
404-
embedding.cached_checksum = None
405-
# Before saving for the last time, change name back to base name (as opposed to the save_embedding_every step-suffixed naming convention).
406-
embedding.name = embedding_name
407-
filename = os.path.join(shared.cmd_opts.embeddings_dir, f'{embedding.name}.pt')
408-
embedding.save(filename)
401+
filename = os.path.join(shared.cmd_opts.embeddings_dir, f'{embedding_name}.pt')
402+
save_embedding(embedding, checkpoint, embedding_name, filename, remove_cached_checksum=True)
409403

410404
return embedding, filename
405+
406+
def save_embedding(embedding, checkpoint, embedding_name, filename, remove_cached_checksum=True):
407+
old_embedding_name = embedding.name
408+
old_sd_checkpoint = embedding.sd_checkpoint if hasattr(embedding, "sd_checkpoint") else None
409+
old_sd_checkpoint_name = embedding.sd_checkpoint_name if hasattr(embedding, "sd_checkpoint_name") else None
410+
old_cached_checksum = embedding.cached_checksum if hasattr(embedding, "cached_checksum") else None
411+
try:
412+
embedding.sd_checkpoint = checkpoint.hash
413+
embedding.sd_checkpoint_name = checkpoint.model_name
414+
if remove_cached_checksum:
415+
embedding.cached_checksum = None
416+
embedding.name = embedding_name
417+
embedding.save(filename)
418+
except:
419+
embedding.sd_checkpoint = old_sd_checkpoint
420+
embedding.sd_checkpoint_name = old_sd_checkpoint_name
421+
embedding.name = old_embedding_name
422+
embedding.cached_checksum = old_cached_checksum
423+
raise

0 commit comments

Comments
 (0)