Skip to content

Commit 2bc8dd2

Browse files
authored
Merge branch 'main' into main
2 parents 58d05d6 + 9f06a0d commit 2bc8dd2

File tree

93 files changed

+781
-970
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

93 files changed

+781
-970
lines changed

examples/advanced_diffusion_training/README.md

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,17 @@ write_basic_config()
6767
When running `accelerate config`, if we specify torch compile mode to True there can be dramatic speedups.
6868
Note also that we use PEFT library as backend for LoRA training, make sure to have `peft>=0.6.0` installed in your environment.
6969

70+
Lastly, we recommend logging into your HF account so that your trained LoRA is automatically uploaded to the hub:
71+
```bash
72+
huggingface-cli login
73+
```
74+
This command will prompt you for a token. Copy-paste yours from your [settings/tokens](https://huggingface.co/settings/tokens),and press Enter.
75+
76+
> [!NOTE]
77+
> In the examples below we use `wandb` to document the training runs. To do the same, make sure to install `wandb`:
78+
> `pip install wandb`
79+
> Alternatively, you can use other tools / train without reporting by modifying the flag `--report_to="wandb"`.
80+
7081
### Pivotal Tuning
7182
**Training with text encoder(s)**
7283

examples/advanced_diffusion_training/README_flux.md

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,17 @@ write_basic_config()
6565
When running `accelerate config`, if we specify torch compile mode to True there can be dramatic speedups.
6666
Note also that we use PEFT library as backend for LoRA training, make sure to have `peft>=0.6.0` installed in your environment.
6767

68+
Lastly, we recommend logging into your HF account so that your trained LoRA is automatically uploaded to the hub:
69+
```bash
70+
huggingface-cli login
71+
```
72+
This command will prompt you for a token. Copy-paste yours from your [settings/tokens](https://huggingface.co/settings/tokens),and press Enter.
73+
74+
> [!NOTE]
75+
> In the examples below we use `wandb` to document the training runs. To do the same, make sure to install `wandb`:
76+
> `pip install wandb`
77+
> Alternatively, you can use other tools / train without reporting by modifying the flag `--report_to="wandb"`.
78+
6879
### Target Modules
6980
When LoRA was first adapted from language models to diffusion models, it was applied to the cross-attention layers in the Unet that relate the image representations with the prompts that describe them.
7081
More recently, SOTA text-to-image diffusion models replaced the Unet with a diffusion Transformer(DiT). With this change, we may also want to explore

examples/community/adaptive_mask_inpainting.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -416,10 +416,14 @@ def __init__(
416416
" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead."
417417
)
418418

419-
is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse(
420-
version.parse(unet.config._diffusers_version).base_version
421-
) < version.parse("0.9.0.dev0")
422-
is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
419+
is_unet_version_less_0_9_0 = (
420+
unet is not None
421+
and hasattr(unet.config, "_diffusers_version")
422+
and version.parse(version.parse(unet.config._diffusers_version).base_version) < version.parse("0.9.0.dev0")
423+
)
424+
is_unet_sample_size_less_64 = (
425+
unet is not None and hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
426+
)
423427
if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:
424428
deprecation_message = (
425429
"The configuration file of the unet has set the default `sample_size` to smaller than"
@@ -438,7 +442,7 @@ def __init__(
438442
unet._internal_dict = FrozenDict(new_config)
439443

440444
# Check shapes, assume num_channels_latents == 4, num_channels_mask == 1, num_channels_masked == 4
441-
if unet.config.in_channels != 9:
445+
if unet is not None and unet.config.in_channels != 9:
442446
logger.info(f"You have loaded a UNet with {unet.config.in_channels} input channels which.")
443447

444448
self.register_modules(

examples/community/composable_stable_diffusion.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -132,10 +132,14 @@ def __init__(
132132
" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead."
133133
)
134134

135-
is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse(
136-
version.parse(unet.config._diffusers_version).base_version
137-
) < version.parse("0.9.0.dev0")
138-
is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
135+
is_unet_version_less_0_9_0 = (
136+
unet is not None
137+
and hasattr(unet.config, "_diffusers_version")
138+
and version.parse(version.parse(unet.config._diffusers_version).base_version) < version.parse("0.9.0.dev0")
139+
)
140+
is_unet_sample_size_less_64 = (
141+
unet is not None and hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
142+
)
139143
if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:
140144
deprecation_message = (
141145
"The configuration file of the unet has set the default `sample_size` to smaller than"

examples/community/instaflow_one_step.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -152,10 +152,14 @@ def __init__(
152152
" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead."
153153
)
154154

155-
is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse(
156-
version.parse(unet.config._diffusers_version).base_version
157-
) < version.parse("0.9.0.dev0")
158-
is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
155+
is_unet_version_less_0_9_0 = (
156+
unet is not None
157+
and hasattr(unet.config, "_diffusers_version")
158+
and version.parse(version.parse(unet.config._diffusers_version).base_version) < version.parse("0.9.0.dev0")
159+
)
160+
is_unet_sample_size_less_64 = (
161+
unet is not None and hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
162+
)
159163
if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:
160164
deprecation_message = (
161165
"The configuration file of the unet has set the default `sample_size` to smaller than"

examples/community/ip_adapter_face_id.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -234,10 +234,14 @@ def __init__(
234234
" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead."
235235
)
236236

237-
is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse(
238-
version.parse(unet.config._diffusers_version).base_version
239-
) < version.parse("0.9.0.dev0")
240-
is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
237+
is_unet_version_less_0_9_0 = (
238+
unet is not None
239+
and hasattr(unet.config, "_diffusers_version")
240+
and version.parse(version.parse(unet.config._diffusers_version).base_version) < version.parse("0.9.0.dev0")
241+
)
242+
is_unet_sample_size_less_64 = (
243+
unet is not None and hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
244+
)
241245
if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:
242246
deprecation_message = (
243247
"The configuration file of the unet has set the default `sample_size` to smaller than"

examples/community/llm_grounded_diffusion.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -379,10 +379,14 @@ def __init__(
379379
" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead."
380380
)
381381

382-
is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse(
383-
version.parse(unet.config._diffusers_version).base_version
384-
) < version.parse("0.9.0.dev0")
385-
is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
382+
is_unet_version_less_0_9_0 = (
383+
unet is not None
384+
and hasattr(unet.config, "_diffusers_version")
385+
and version.parse(version.parse(unet.config._diffusers_version).base_version) < version.parse("0.9.0.dev0")
386+
)
387+
is_unet_sample_size_less_64 = (
388+
unet is not None and hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
389+
)
386390
if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:
387391
deprecation_message = (
388392
"The configuration file of the unet has set the default `sample_size` to smaller than"

examples/community/lpw_stable_diffusion.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -539,10 +539,14 @@ def __init__(
539539
" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead."
540540
)
541541

542-
is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse(
543-
version.parse(unet.config._diffusers_version).base_version
544-
) < version.parse("0.9.0.dev0")
545-
is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
542+
is_unet_version_less_0_9_0 = (
543+
unet is not None
544+
and hasattr(unet.config, "_diffusers_version")
545+
and version.parse(version.parse(unet.config._diffusers_version).base_version) < version.parse("0.9.0.dev0")
546+
)
547+
is_unet_sample_size_less_64 = (
548+
unet is not None and hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
549+
)
546550
if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:
547551
deprecation_message = (
548552
"The configuration file of the unet has set the default `sample_size` to smaller than"

examples/community/lpw_stable_diffusion_xl.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -678,7 +678,11 @@ def __init__(
678678
self.mask_processor = VaeImageProcessor(
679679
vae_scale_factor=self.vae_scale_factor, do_normalize=False, do_binarize=True, do_convert_grayscale=True
680680
)
681-
self.default_sample_size = self.unet.config.sample_size
681+
self.default_sample_size = (
682+
self.unet.config.sample_size
683+
if hasattr(self, "unet") and self.unet is not None and hasattr(self.unet.config, "sample_size")
684+
else 128
685+
)
682686

683687
add_watermarker = add_watermarker if add_watermarker is not None else is_invisible_watermark_available()
684688

examples/community/matryoshka.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3793,10 +3793,14 @@ def __init__(
37933793
# new_config["clip_sample"] = False
37943794
# scheduler._internal_dict = FrozenDict(new_config)
37953795

3796-
is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse(
3797-
version.parse(unet.config._diffusers_version).base_version
3798-
) < version.parse("0.9.0.dev0")
3799-
is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
3796+
is_unet_version_less_0_9_0 = (
3797+
unet is not None
3798+
and hasattr(unet.config, "_diffusers_version")
3799+
and version.parse(version.parse(unet.config._diffusers_version).base_version) < version.parse("0.9.0.dev0")
3800+
)
3801+
is_unet_sample_size_less_64 = (
3802+
unet is not None and hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
3803+
)
38003804
if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:
38013805
deprecation_message = (
38023806
"The configuration file of the unet has set the default `sample_size` to smaller than"

0 commit comments

Comments
 (0)