Skip to content

Commit 37ba007

Browse files
Merge branch 'master' into feat/allow-origins
2 parents b8435e6 + c9b2eef commit 37ba007

File tree

9 files changed

+108
-28
lines changed

9 files changed

+108
-28
lines changed

modules/api/api.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -218,6 +218,10 @@ def get_config(self):
218218
return options
219219

220220
def set_config(self, req: OptionsModel):
221+
# currently req has all options fields even if you send a dict like { "send_seed": false }, which means it will
222+
# overwrite all options with default values.
223+
raise RuntimeError('Setting options via API is not supported')
224+
221225
reqDict = vars(req)
222226
for o in reqDict:
223227
setattr(shared.opts, o, reqDict[o])

modules/api/models.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import inspect
22
from pydantic import BaseModel, Field, create_model
3-
from typing import Any, Optional, Union
3+
from typing import Any, Optional
44
from typing_extensions import Literal
55
from inflection import underscore
66
from modules.processing import StableDiffusionProcessingTxt2Img, StableDiffusionProcessingImg2Img
@@ -185,22 +185,22 @@ class ProgressResponse(BaseModel):
185185
for key in _options:
186186
if(_options[key].dest != 'help'):
187187
flag = _options[key]
188-
_type = str
189-
if(_options[key].default != None): _type = type(_options[key].default)
188+
_type = str
189+
if _options[key].default is not None: _type = type(_options[key].default)
190190
flags.update({flag.dest: (_type,Field(default=flag.default, description=flag.help))})
191191

192192
FlagsModel = create_model("Flags", **flags)
193193

194194
class SamplerItem(BaseModel):
195195
name: str = Field(title="Name")
196-
aliases: list[str] = Field(title="Aliases")
196+
aliases: list[str] = Field(title="Aliases")
197197
options: dict[str, str] = Field(title="Options")
198198

199199
class UpscalerItem(BaseModel):
200200
name: str = Field(title="Name")
201-
model_name: str | None = Field(title="Model Name")
202-
model_path: str | None = Field(title="Path")
203-
model_url: str | None = Field(title="URL")
201+
model_name: Optional[str] = Field(title="Model Name")
202+
model_path: Optional[str] = Field(title="Path")
203+
model_url: Optional[str] = Field(title="URL")
204204

205205
class SDModelItem(BaseModel):
206206
title: str = Field(title="Title")
@@ -211,21 +211,21 @@ class SDModelItem(BaseModel):
211211

212212
class HypernetworkItem(BaseModel):
213213
name: str = Field(title="Name")
214-
path: str | None = Field(title="Path")
214+
path: Optional[str] = Field(title="Path")
215215

216216
class FaceRestorerItem(BaseModel):
217217
name: str = Field(title="Name")
218-
cmd_dir: str | None = Field(title="Path")
218+
cmd_dir: Optional[str] = Field(title="Path")
219219

220220
class RealesrganItem(BaseModel):
221221
name: str = Field(title="Name")
222-
path: str | None = Field(title="Path")
223-
scale: int | None = Field(title="Scale")
222+
path: Optional[str] = Field(title="Path")
223+
scale: Optional[int] = Field(title="Scale")
224224

225225
class PromptStyleItem(BaseModel):
226226
name: str = Field(title="Name")
227-
prompt: str | None = Field(title="Prompt")
228-
negative_prompt: str | None = Field(title="Negative Prompt")
227+
prompt: Optional[str] = Field(title="Prompt")
228+
negative_prompt: Optional[str] = Field(title="Negative Prompt")
229229

230230
class ArtistItem(BaseModel):
231231
name: str = Field(title="Name")

modules/extensions.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,11 @@ def __init__(self, name, path, enabled=True):
3434
if repo is None or repo.bare:
3535
self.remote = None
3636
else:
37-
self.remote = next(repo.remote().urls, None)
38-
self.status = 'unknown'
37+
try:
38+
self.remote = next(repo.remote().urls, None)
39+
self.status = 'unknown'
40+
except Exception:
41+
self.remote = None
3942

4043
def list_files(self, subdir, extension):
4144
from modules import scripts

modules/hypernetworks/hypernetwork.py

Lines changed: 50 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@
2222
from statistics import stdev, mean
2323

2424

25+
optimizer_dict = {optim_name : cls_obj for optim_name, cls_obj in inspect.getmembers(torch.optim, inspect.isclass) if optim_name != "Optimizer"}
26+
2527
class HypernetworkModule(torch.nn.Module):
2628
multiplier = 1.0
2729
activation_dict = {
@@ -142,6 +144,8 @@ def __init__(self, name=None, enable_sizes=None, layer_structure=None, activatio
142144
self.use_dropout = use_dropout
143145
self.activate_output = activate_output
144146
self.last_layer_dropout = kwargs['last_layer_dropout'] if 'last_layer_dropout' in kwargs else True
147+
self.optimizer_name = None
148+
self.optimizer_state_dict = None
145149

146150
for size in enable_sizes or []:
147151
self.layers[size] = (
@@ -163,6 +167,7 @@ def weights(self):
163167

164168
def save(self, filename):
165169
state_dict = {}
170+
optimizer_saved_dict = {}
166171

167172
for k, v in self.layers.items():
168173
state_dict[k] = (v[0].state_dict(), v[1].state_dict())
@@ -178,8 +183,15 @@ def save(self, filename):
178183
state_dict['sd_checkpoint_name'] = self.sd_checkpoint_name
179184
state_dict['activate_output'] = self.activate_output
180185
state_dict['last_layer_dropout'] = self.last_layer_dropout
181-
186+
187+
if self.optimizer_name is not None:
188+
optimizer_saved_dict['optimizer_name'] = self.optimizer_name
189+
182190
torch.save(state_dict, filename)
191+
if shared.opts.save_optimizer_state and self.optimizer_state_dict:
192+
optimizer_saved_dict['hash'] = sd_models.model_hash(filename)
193+
optimizer_saved_dict['optimizer_state_dict'] = self.optimizer_state_dict
194+
torch.save(optimizer_saved_dict, filename + '.optim')
183195

184196
def load(self, filename):
185197
self.filename = filename
@@ -202,6 +214,18 @@ def load(self, filename):
202214
print(f"Activate last layer is set to {self.activate_output}")
203215
self.last_layer_dropout = state_dict.get('last_layer_dropout', False)
204216

217+
optimizer_saved_dict = torch.load(self.filename + '.optim', map_location = 'cpu') if os.path.exists(self.filename + '.optim') else {}
218+
self.optimizer_name = optimizer_saved_dict.get('optimizer_name', 'AdamW')
219+
print(f"Optimizer name is {self.optimizer_name}")
220+
if sd_models.model_hash(filename) == optimizer_saved_dict.get('hash', None):
221+
self.optimizer_state_dict = optimizer_saved_dict.get('optimizer_state_dict', None)
222+
else:
223+
self.optimizer_state_dict = None
224+
if self.optimizer_state_dict:
225+
print("Loaded existing optimizer from checkpoint")
226+
else:
227+
print("No saved optimizer exists in checkpoint")
228+
205229
for size, sd in state_dict.items():
206230
if type(size) == int:
207231
self.layers[size] = (
@@ -219,11 +243,11 @@ def load(self, filename):
219243

220244
def list_hypernetworks(path):
221245
res = {}
222-
for filename in glob.iglob(os.path.join(path, '**/*.pt'), recursive=True):
246+
for filename in sorted(glob.iglob(os.path.join(path, '**/*.pt'), recursive=True)):
223247
name = os.path.splitext(os.path.basename(filename))[0]
224248
# Prevent a hypothetical "None.pt" from being listed.
225249
if name != "None":
226-
res[name] = filename
250+
res[name + f"({sd_models.model_hash(filename)})"] = filename
227251
return res
228252

229253

@@ -358,6 +382,7 @@ def train_hypernetwork(hypernetwork_name, learn_rate, batch_size, data_root, log
358382
shared.state.textinfo = "Initializing hypernetwork training..."
359383
shared.state.job_count = steps
360384

385+
hypernetwork_name = hypernetwork_name.rsplit('(', 1)[0]
361386
filename = os.path.join(shared.cmd_opts.hypernetwork_dir, f'{hypernetwork_name}.pt')
362387

363388
log_directory = os.path.join(log_directory, datetime.datetime.now().strftime("%Y-%m-%d"), hypernetwork_name)
@@ -404,8 +429,19 @@ def train_hypernetwork(hypernetwork_name, learn_rate, batch_size, data_root, log
404429
weights = hypernetwork.weights()
405430
for weight in weights:
406431
weight.requires_grad = True
407-
# if optimizer == "AdamW": or else Adam / AdamW / SGD, etc...
408-
optimizer = torch.optim.AdamW(weights, lr=scheduler.learn_rate)
432+
# Here we use optimizer from saved HN, or we can specify as UI option.
433+
if (optimizer_name := hypernetwork.optimizer_name) in optimizer_dict:
434+
optimizer = optimizer_dict[hypernetwork.optimizer_name](params=weights, lr=scheduler.learn_rate)
435+
else:
436+
print(f"Optimizer type {optimizer_name} is not defined!")
437+
optimizer = torch.optim.AdamW(params=weights, lr=scheduler.learn_rate)
438+
optimizer_name = 'AdamW'
439+
if hypernetwork.optimizer_state_dict: # This line must be changed if Optimizer type can be different from saved optimizer.
440+
try:
441+
optimizer.load_state_dict(hypernetwork.optimizer_state_dict)
442+
except RuntimeError as e:
443+
print("Cannot resume from saved optimizer!")
444+
print(e)
409445

410446
steps_without_grad = 0
411447

@@ -467,7 +503,11 @@ def train_hypernetwork(hypernetwork_name, learn_rate, batch_size, data_root, log
467503
# Before saving, change name to match current checkpoint.
468504
hypernetwork_name_every = f'{hypernetwork_name}-{steps_done}'
469505
last_saved_file = os.path.join(hypernetwork_dir, f'{hypernetwork_name_every}.pt')
506+
hypernetwork.optimizer_name = optimizer_name
507+
if shared.opts.save_optimizer_state:
508+
hypernetwork.optimizer_state_dict = optimizer.state_dict()
470509
save_hypernetwork(hypernetwork, checkpoint, hypernetwork_name, last_saved_file)
510+
hypernetwork.optimizer_state_dict = None # dereference it after saving, to save memory.
471511

472512
textual_inversion.write_loss(log_directory, "hypernetwork_loss.csv", hypernetwork.step, len(ds), {
473513
"loss": f"{previous_mean_loss:.7f}",
@@ -530,8 +570,12 @@ def train_hypernetwork(hypernetwork_name, learn_rate, batch_size, data_root, log
530570
report_statistics(loss_dict)
531571

532572
filename = os.path.join(shared.cmd_opts.hypernetwork_dir, f'{hypernetwork_name}.pt')
573+
hypernetwork.optimizer_name = optimizer_name
574+
if shared.opts.save_optimizer_state:
575+
hypernetwork.optimizer_state_dict = optimizer.state_dict()
533576
save_hypernetwork(hypernetwork, checkpoint, hypernetwork_name, filename)
534-
577+
del optimizer
578+
hypernetwork.optimizer_state_dict = None # dereference it after saving, to save memory.
535579
return hypernetwork, filename
536580

537581
def save_hypernetwork(hypernetwork, checkpoint, hypernetwork_name, filename):

modules/hypernetworks/ui.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from modules.hypernetworks import hypernetwork
1010

1111
not_available = ["hardswish", "multiheadattention"]
12-
keys = ["linear"] + list(x for x in hypernetwork.HypernetworkModule.activation_dict.keys() if x not in not_available)
12+
keys = list(x for x in hypernetwork.HypernetworkModule.activation_dict.keys() if x not in not_available)
1313

1414
def create_hypernetwork(name, enable_sizes, overwrite_old, layer_structure=None, activation_func=None, weight_init=None, add_layer_norm=False, use_dropout=False):
1515
# Remove illegal characters from name.

modules/shared.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,9 @@
8787
parser.add_argument("--device-id", type=str, help="Select the default CUDA device to use (export CUDA_VISIBLE_DEVICES=0,1,etc might be needed before)", default=None)
8888
parser.add_argument("--administrator", action='store_true', help="Administrator rights", default=False)
8989
parser.add_argument("--cors-allow-origins", type=str, help="Allowed CORS origins", default=None)
90+
parser.add_argument("--tls-keyfile", type=str, help="Partially enables TLS, requires --tls-certfile to fully function", default=None)
91+
parser.add_argument("--tls-certfile", type=str, help="Partially enables TLS, requires --tls-keyfile to fully function", default=None)
92+
parser.add_argument("--server-name", type=str, help="Sets hostname of server", default=None)
9093

9194
cmd_opts = parser.parse_args()
9295
restricted_opts = {
@@ -318,6 +321,7 @@ def options_section(section_identifier, options_dict):
318321

319322
options_templates.update(options_section(('training', "Training"), {
320323
"unload_models_when_training": OptionInfo(False, "Move VAE and CLIP to RAM when training if possible. Saves VRAM."),
324+
"save_optimizer_state": OptionInfo(False, "Saves Optimizer state as separate *.optim file. Training can be resumed with HN itself and matching optim file."),
321325
"dataset_filename_word_regex": OptionInfo("", "Filename word regex"),
322326
"dataset_filename_join_string": OptionInfo(" ", "Filename join string"),
323327
"training_image_repeats_per_epoch": OptionInfo(1, "Number of repeats for a single input image per epoch; used only for displaying epoch number", gr.Number, {"precision": 0}),
@@ -407,7 +411,8 @@ def __setattr__(self, key, value):
407411
if key in self.data or key in self.data_labels:
408412
assert not cmd_opts.freeze_settings, "changing settings is disabled"
409413

410-
comp_args = opts.data_labels[key].component_args
414+
info = opts.data_labels.get(key, None)
415+
comp_args = info.component_args if info else None
411416
if isinstance(comp_args, dict) and comp_args.get('visible', True) is False:
412417
raise RuntimeError(f"not possible to set {key} because it is restricted")
413418

modules/ui_extensions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -188,7 +188,7 @@ def refresh_available_extensions_from_data():
188188

189189
code += f"""
190190
<tr>
191-
<td><a href="{html.escape(url)}">{html.escape(name)}</a></td>
191+
<td><a href="{html.escape(url)}" target="_blank">{html.escape(name)}</a></td>
192192
<td>{html.escape(description)}</td>
193193
<td>{install_code}</td>
194194
</tr>

modules/upscaler.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,10 +57,18 @@ def upscale(self, img: PIL.Image, scale: int, selected_model: str = None):
5757
self.scale = scale
5858
dest_w = img.width * scale
5959
dest_h = img.height * scale
60+
6061
for i in range(3):
61-
if img.width > dest_w and img.height > dest_h:
62-
break
62+
shape = (img.width, img.height)
63+
6364
img = self.do_upscale(img, selected_model)
65+
66+
if shape == (img.width, img.height):
67+
break
68+
69+
if img.width >= dest_w and img.height >= dest_h:
70+
break
71+
6472
if img.width != dest_w or img.height != dest_h:
6573
img = img.resize((int(dest_w), int(dest_h)), resample=LANCZOS)
6674

webui.py

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
import modules.hypernetworks.hypernetwork
3636

3737
queue_lock = threading.Lock()
38-
38+
server_name = "0.0.0.0" if cmd_opts.listen else cmd_opts.server_name
3939

4040
def wrap_queued_call(func):
4141
def f(*args, **kwargs):
@@ -86,6 +86,20 @@ def initialize():
8686
shared.opts.onchange("sd_hypernetwork", wrap_queued_call(lambda: modules.hypernetworks.hypernetwork.load_hypernetwork(shared.opts.sd_hypernetwork)))
8787
shared.opts.onchange("sd_hypernetwork_strength", modules.hypernetworks.hypernetwork.apply_strength)
8888

89+
if cmd_opts.tls_keyfile is not None and cmd_opts.tls_keyfile is not None:
90+
91+
try:
92+
if not os.path.exists(cmd_opts.tls_keyfile):
93+
print("Invalid path to TLS keyfile given")
94+
if not os.path.exists(cmd_opts.tls_certfile):
95+
print(f"Invalid path to TLS certfile: '{cmd_opts.tls_certfile}'")
96+
except TypeError:
97+
cmd_opts.tls_keyfile = cmd_opts.tls_certfile = None
98+
print("TLS setup invalid, running webui without TLS")
99+
else:
100+
print("Running with TLS")
101+
102+
89103
# make the program just exit at ctrl+c without waiting for anything
90104
def sigint_handler(sig, frame):
91105
print(f'Interrupted with signal {sig} in {frame}')
@@ -138,8 +152,10 @@ def webui():
138152

139153
app, local_url, share_url = demo.launch(
140154
share=cmd_opts.share,
141-
server_name="0.0.0.0" if cmd_opts.listen else None,
155+
server_name=server_name,
142156
server_port=cmd_opts.port,
157+
ssl_keyfile=cmd_opts.tls_keyfile,
158+
ssl_certfile=cmd_opts.tls_certfile,
143159
debug=cmd_opts.gradio_debug,
144160
auth=[tuple(cred.split(':')) for cred in cmd_opts.gradio_auth.strip('"').split(',')] if cmd_opts.gradio_auth else None,
145161
inbrowser=cmd_opts.autolaunch,

0 commit comments

Comments
 (0)