Skip to content

Commit 62e3d71

Browse files
committed
rework the code to not use the walrus operator because colab's 3.7 does not support it
1 parent b8f2dfe commit 62e3d71

File tree

1 file changed

+5
-2
lines changed

1 file changed

+5
-2
lines changed

modules/hypernetworks/hypernetwork.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -429,13 +429,16 @@ def train_hypernetwork(hypernetwork_name, learn_rate, batch_size, data_root, log
429429
weights = hypernetwork.weights()
430430
for weight in weights:
431431
weight.requires_grad = True
432+
432433
# Here we use optimizer from saved HN, or we can specify as UI option.
433-
if (optimizer_name := hypernetwork.optimizer_name) in optimizer_dict:
434+
if hypernetwork.optimizer_name in optimizer_dict:
434435
optimizer = optimizer_dict[hypernetwork.optimizer_name](params=weights, lr=scheduler.learn_rate)
436+
optimizer_name = hypernetwork.optimizer_name
435437
else:
436-
print(f"Optimizer type {optimizer_name} is not defined!")
438+
print(f"Optimizer type {hypernetwork.optimizer_name} is not defined!")
437439
optimizer = torch.optim.AdamW(params=weights, lr=scheduler.learn_rate)
438440
optimizer_name = 'AdamW'
441+
439442
if hypernetwork.optimizer_state_dict: # This line must be changed if Optimizer type can be different from saved optimizer.
440443
try:
441444
optimizer.load_state_dict(hypernetwork.optimizer_state_dict)

0 commit comments

Comments
 (0)