Skip to content

Commit cb24c68

Browse files
committed
Merge branch 'main_upstream' into develop_upstream
2 parents 64c29a8 + ab01104 commit cb24c68

File tree

6 files changed

+41
-27
lines changed

6 files changed

+41
-27
lines changed

Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ RUN chown -R user:user /content
2323
WORKDIR /content
2424
USER user
2525

26-
COPY . /content/app
26+
COPY --chown=user:user . /content/app
2727
RUN mv /content/app/models /content/app/models.org
2828

2929
CMD [ "sh", "-c", "/content/entrypoint.sh ${CMDARGS}" ]

css/style.css

Lines changed: 13 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -74,31 +74,35 @@ progress::after {
7474
text-align: right;
7575
width: 215px;
7676
}
77+
div:has(> #positive_prompt) {
78+
border: none;
79+
}
7780

78-
.type_row{
79-
height: 80px !important;
81+
#positive_prompt {
82+
padding: 1px;
83+
background: var(--background-fill-primary);
8084
}
8185

82-
.type_row_half{
83-
height: 32px !important;
86+
.type_row {
87+
height: 84px !important;
8488
}
8589

86-
.scroll-hide{
87-
resize: none !important;
90+
.type_row_half {
91+
height: 34px !important;
8892
}
8993

90-
.refresh_button{
94+
.refresh_button {
9195
border: none !important;
9296
background: none !important;
9397
font-size: none !important;
9498
box-shadow: none !important;
9599
}
96100

97-
.advanced_check_row{
101+
.advanced_check_row {
98102
width: 250px !important;
99103
}
100104

101-
.min_check{
105+
.min_check {
102106
min-width: min(1px, 100%) !important;
103107
}
104108

ldm_patched/contrib/external_custom_sampler.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -107,8 +107,7 @@ def INPUT_TYPES(s):
107107
def get_sigmas(self, model, steps, denoise):
108108
start_step = 10 - int(10 * denoise)
109109
timesteps = torch.flip(torch.arange(1, 11) * 100 - 1, (0,))[start_step:start_step + steps]
110-
ldm_patched.modules.model_management.load_models_gpu([model])
111-
sigmas = model.model.model_sampling.sigma(timesteps)
110+
sigmas = model.model_sampling.sigma(timesteps)
112111
sigmas = torch.cat([sigmas, sigmas.new_zeros([1])])
113112
return (sigmas, )
114113

modules/sample_hijack.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -175,7 +175,7 @@ def calculate_sigmas_scheduler_hacked(model, scheduler_name, steps):
175175
elif scheduler_name == "sgm_uniform":
176176
sigmas = normal_scheduler(model, steps, sgm=True)
177177
elif scheduler_name == "turbo":
178-
sigmas = SDTurboScheduler().get_sigmas(namedtuple('Patcher', ['model'])(model=model), steps=steps, denoise=1.0)[0]
178+
sigmas = SDTurboScheduler().get_sigmas(model=model, steps=steps, denoise=1.0)[0]
179179
elif scheduler_name == "align_your_steps":
180180
model_type = 'SDXL' if isinstance(model.latent_format, ldm_patched.modules.latent_formats.SDXL) else 'SD1'
181181
sigmas = AlignYourStepsScheduler().get_sigmas(model_type=model_type, steps=steps, denoise=1.0)[0]

readme.md

Lines changed: 23 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -370,25 +370,36 @@ entry_with_update.py [-h] [--listen [IP]] [--port PORT]
370370
[--web-upload-size WEB_UPLOAD_SIZE]
371371
[--hf-mirror HF_MIRROR]
372372
[--external-working-path PATH [PATH ...]]
373-
[--output-path OUTPUT_PATH] [--temp-path TEMP_PATH]
373+
[--output-path OUTPUT_PATH]
374+
[--temp-path TEMP_PATH]
374375
[--cache-path CACHE_PATH] [--in-browser]
375-
[--disable-in-browser] [--gpu-device-id DEVICE_ID]
376+
[--disable-in-browser]
377+
[--gpu-device-id DEVICE_ID]
376378
[--async-cuda-allocation | --disable-async-cuda-allocation]
377-
[--disable-attention-upcast] [--all-in-fp32 | --all-in-fp16]
379+
[--disable-attention-upcast]
380+
[--all-in-fp32 | --all-in-fp16]
378381
[--unet-in-bf16 | --unet-in-fp16 | --unet-in-fp8-e4m3fn | --unet-in-fp8-e5m2]
379-
[--vae-in-fp16 | --vae-in-fp32 | --vae-in-bf16]
382+
[--vae-in-fp16 | --vae-in-fp32 | --vae-in-bf16]
383+
[--vae-in-cpu]
380384
[--clip-in-fp8-e4m3fn | --clip-in-fp8-e5m2 | --clip-in-fp16 | --clip-in-fp32]
381-
[--directml [DIRECTML_DEVICE]] [--disable-ipex-hijack]
385+
[--directml [DIRECTML_DEVICE]]
386+
[--disable-ipex-hijack]
382387
[--preview-option [none,auto,fast,taesd]]
383388
[--attention-split | --attention-quad | --attention-pytorch]
384389
[--disable-xformers]
385-
[--always-gpu | --always-high-vram | --always-normal-vram |
386-
--always-low-vram | --always-no-vram | --always-cpu [CPU_NUM_THREADS]]
387-
[--always-offload-from-vram] [--disable-server-log]
388-
[--debug-mode] [--is-windows-embedded-python]
389-
[--disable-server-info] [--share] [--preset PRESET]
390-
[--language LANGUAGE] [--disable-offload-from-vram]
391-
[--theme THEME] [--disable-image-log]
390+
[--always-gpu | --always-high-vram | --always-normal-vram |
391+
--always-low-vram | --always-no-vram | --always-cpu [CPU_NUM_THREADS]]
392+
[--always-offload-from-vram]
393+
[--pytorch-deterministic] [--disable-server-log]
394+
[--debug-mode] [--is-windows-embedded-python]
395+
[--disable-server-info] [--multi-user] [--share]
396+
[--preset PRESET] [--disable-preset-selection]
397+
[--language LANGUAGE]
398+
[--disable-offload-from-vram] [--theme THEME]
399+
[--disable-image-log] [--disable-analytics]
400+
[--disable-metadata] [--disable-preset-download]
401+
[--enable-describe-uov-image]
402+
[--always-download-new-model]
392403
```
393404

394405
## Advanced Features

webui.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -112,10 +112,10 @@ def generate_clicked(task: worker.AsyncTask):
112112
gallery = gr.Gallery(label='Gallery', show_label=False, object_fit='contain', visible=True, height=768,
113113
elem_classes=['resizable_area', 'main_view', 'final_gallery', 'image_gallery'],
114114
elem_id='final_gallery')
115-
with gr.Row(elem_classes='type_row'):
115+
with gr.Row():
116116
with gr.Column(scale=17):
117117
prompt = gr.Textbox(show_label=False, placeholder="Type prompt here or paste parameters.", elem_id='positive_prompt',
118-
container=False, autofocus=True, elem_classes='type_row', lines=1024)
118+
autofocus=True, lines=3)
119119

120120
default_prompt = modules.config.default_prompt
121121
if isinstance(default_prompt, str) and default_prompt != '':

0 commit comments

Comments
 (0)