Skip to content

Commit 8e2c4cd

Browse files
Deprecate sample size (#1406)
* up * up * fix * uP * more fixes * up * uP * up * up * uP * fix final tests
1 parent bb2c64a commit 8e2c4cd

20 files changed

+407
-26
lines changed

src/diffusers/configuration_utils.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -91,9 +91,6 @@ class ConfigMixin:
9191
def register_to_config(self, **kwargs):
9292
if self.config_name is None:
9393
raise NotImplementedError(f"Make sure that {self.__class__} has defined a class name `config_name`")
94-
kwargs["_class_name"] = self.__class__.__name__
95-
kwargs["_diffusers_version"] = __version__
96-
9794
# Special case for `kwargs` used in deprecation warning added to schedulers
9895
# TODO: remove this when we remove the deprecation warning, and the `kwargs` argument,
9996
# or solve in a more general way.
@@ -462,7 +459,7 @@ def extract_init_dict(cls, config_dict, **kwargs):
462459
unused_kwargs = {**config_dict, **kwargs}
463460

464461
# 7. Define "hidden" config parameters that were saved for compatible classes
465-
hidden_config_dict = {k: v for k, v in original_dict.items() if k not in init_dict and not k.startswith("_")}
462+
hidden_config_dict = {k: v for k, v in original_dict.items() if k not in init_dict}
466463

467464
return init_dict, unused_kwargs, hidden_config_dict
468465

@@ -493,6 +490,9 @@ def to_json_string(self) -> str:
493490
`str`: String containing all the attributes that make up this configuration instance in JSON format.
494491
"""
495492
config_dict = self._internal_dict if hasattr(self, "_internal_dict") else {}
493+
config_dict["_class_name"] = self.__class__.__name__
494+
config_dict["_diffusers_version"] = __version__
495+
496496
return json.dumps(config_dict, indent=2, sort_keys=True) + "\n"
497497

498498
def to_json_file(self, json_file_path: Union[str, os.PathLike]):
@@ -520,6 +520,7 @@ def register_to_config(init):
520520
def inner_init(self, *args, **kwargs):
521521
# Ignore private kwargs in the init.
522522
init_kwargs = {k: v for k, v in kwargs.items() if not k.startswith("_")}
523+
config_init_kwargs = {k: v for k, v in kwargs.items() if k.startswith("_")}
523524
init(self, *args, **init_kwargs)
524525
if not isinstance(self, ConfigMixin):
525526
raise RuntimeError(
@@ -545,6 +546,7 @@ def inner_init(self, *args, **kwargs):
545546
if k not in ignore and k not in new_kwargs
546547
}
547548
)
549+
new_kwargs = {**config_init_kwargs, **new_kwargs}
548550
getattr(self, "register_to_config")(**new_kwargs)
549551

550552
return inner_init
@@ -562,7 +564,7 @@ def init(self, *args, **kwargs):
562564
)
563565

564566
# Ignore private kwargs in the init. Retrieve all passed attributes
565-
init_kwargs = {k: v for k, v in kwargs.items() if not k.startswith("_")}
567+
init_kwargs = {k: v for k, v in kwargs.items()}
566568

567569
# Retrieve default values
568570
fields = dataclasses.fields(self)

src/diffusers/modeling_utils.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -448,7 +448,7 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P
448448
if low_cpu_mem_usage:
449449
# Instantiate model with empty weights
450450
with accelerate.init_empty_weights():
451-
model, unused_kwargs = cls.from_config(
451+
config, unused_kwargs = cls.load_config(
452452
config_path,
453453
cache_dir=cache_dir,
454454
return_unused_kwargs=True,
@@ -462,6 +462,7 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P
462462
device_map=device_map,
463463
**kwargs,
464464
)
465+
model = cls.from_config(config, **unused_kwargs)
465466

466467
# if device_map is Non,e load the state dict on move the params from meta device to the cpu
467468
if device_map is None:
@@ -482,7 +483,7 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P
482483
"error_msgs": [],
483484
}
484485
else:
485-
model, unused_kwargs = cls.from_config(
486+
config, unused_kwargs = cls.load_config(
486487
config_path,
487488
cache_dir=cache_dir,
488489
return_unused_kwargs=True,
@@ -496,6 +497,7 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P
496497
device_map=device_map,
497498
**kwargs,
498499
)
500+
model = cls.from_config(config, **unused_kwargs)
499501

500502
state_dict = load_state_dict(model_file)
501503
model, missing_keys, unexpected_keys, mismatched_keys, error_msgs = cls._load_pretrained_model(

src/diffusers/pipelines/alt_diffusion/pipeline_alt_diffusion.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
import torch
1919

2020
from diffusers.utils import is_accelerate_available
21+
from packaging import version
2122
from transformers import CLIPFeatureExtractor, XLMRobertaTokenizer
2223

2324
from ...configuration_utils import FrozenDict
@@ -132,6 +133,27 @@ def __init__(
132133
" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead."
133134
)
134135

136+
is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse(
137+
version.parse(unet.config._diffusers_version).base_version
138+
) < version.parse("0.9.0.dev0")
139+
is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
140+
if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:
141+
deprecation_message = (
142+
"The configuration file of the unet has set the default `sample_size` to smaller than"
143+
" 64 which seems highly unlikely .If you're checkpoint is a fine-tuned version of any of the"
144+
" following: \n- CompVis/stable-diffusion-v1-4 \n- CompVis/stable-diffusion-v1-3 \n-"
145+
" CompVis/stable-diffusion-v1-2 \n- CompVis/stable-diffusion-v1-1 \n- runwayml/stable-diffusion-v1-5"
146+
" \n- runwayml/stable-diffusion-inpainting \n you should change 'sample_size' to 64 in the"
147+
" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`"
148+
" in the config might lead to incorrect results in future versions. If you have downloaded this"
149+
" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for"
150+
" the `unet/config.json` file"
151+
)
152+
deprecate("sample_size<64", "1.0.0", deprecation_message, standard_warn=False)
153+
new_config = dict(unet.config)
154+
new_config["sample_size"] = 64
155+
unet._internal_dict = FrozenDict(new_config)
156+
135157
self.register_modules(
136158
vae=vae,
137159
text_encoder=text_encoder,

src/diffusers/pipelines/alt_diffusion/pipeline_alt_diffusion_img2img.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020

2121
import PIL
2222
from diffusers.utils import is_accelerate_available
23+
from packaging import version
2324
from transformers import CLIPFeatureExtractor, XLMRobertaTokenizer
2425

2526
from ...configuration_utils import FrozenDict
@@ -145,6 +146,27 @@ def __init__(
145146
" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead."
146147
)
147148

149+
is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse(
150+
version.parse(unet.config._diffusers_version).base_version
151+
) < version.parse("0.9.0.dev0")
152+
is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
153+
if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:
154+
deprecation_message = (
155+
"The configuration file of the unet has set the default `sample_size` to smaller than"
156+
" 64 which seems highly unlikely .If you're checkpoint is a fine-tuned version of any of the"
157+
" following: \n- CompVis/stable-diffusion-v1-4 \n- CompVis/stable-diffusion-v1-3 \n-"
158+
" CompVis/stable-diffusion-v1-2 \n- CompVis/stable-diffusion-v1-1 \n- runwayml/stable-diffusion-v1-5"
159+
" \n- runwayml/stable-diffusion-inpainting \n you should change 'sample_size' to 64 in the"
160+
" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`"
161+
" in the config might lead to incorrect results in future versions. If you have downloaded this"
162+
" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for"
163+
" the `unet/config.json` file"
164+
)
165+
deprecate("sample_size<64", "1.0.0", deprecation_message, standard_warn=False)
166+
new_config = dict(unet.config)
167+
new_config["sample_size"] = 64
168+
unet._internal_dict = FrozenDict(new_config)
169+
148170
self.register_modules(
149171
vae=vae,
150172
text_encoder=text_encoder,

src/diffusers/pipelines/stable_diffusion/pipeline_cycle_diffusion.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020

2121
import PIL
2222
from diffusers.utils import is_accelerate_available
23+
from packaging import version
2324
from transformers import CLIPFeatureExtractor, CLIPTextModel, CLIPTokenizer
2425

2526
from ...configuration_utils import FrozenDict
@@ -176,6 +177,26 @@ def __init__(
176177
"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety"
177178
" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead."
178179
)
180+
is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse(
181+
version.parse(unet.config._diffusers_version).base_version
182+
) < version.parse("0.9.0.dev0")
183+
is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
184+
if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:
185+
deprecation_message = (
186+
"The configuration file of the unet has set the default `sample_size` to smaller than"
187+
" 64 which seems highly unlikely .If you're checkpoint is a fine-tuned version of any of the"
188+
" following: \n- CompVis/stable-diffusion-v1-4 \n- CompVis/stable-diffusion-v1-3 \n-"
189+
" CompVis/stable-diffusion-v1-2 \n- CompVis/stable-diffusion-v1-1 \n- runwayml/stable-diffusion-v1-5"
190+
" \n- runwayml/stable-diffusion-inpainting \n you should change 'sample_size' to 64 in the"
191+
" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`"
192+
" in the config might lead to incorrect results in future versions. If you have downloaded this"
193+
" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for"
194+
" the `unet/config.json` file"
195+
)
196+
deprecate("sample_size<64", "1.0.0", deprecation_message, standard_warn=False)
197+
new_config = dict(unet.config)
198+
new_config["sample_size"] = 64
199+
unet._internal_dict = FrozenDict(new_config)
179200

180201
self.register_modules(
181202
vae=vae,

src/diffusers/pipelines/stable_diffusion/pipeline_flax_stable_diffusion.py

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
from flax.core.frozen_dict import FrozenDict
2424
from flax.jax_utils import unreplicate
2525
from flax.training.common_utils import shard
26+
from packaging import version
2627
from PIL import Image
2728
from transformers import CLIPFeatureExtractor, CLIPTokenizer, FlaxCLIPTextModel
2829

@@ -34,7 +35,7 @@
3435
FlaxLMSDiscreteScheduler,
3536
FlaxPNDMScheduler,
3637
)
37-
from ...utils import logging
38+
from ...utils import deprecate, logging
3839
from . import FlaxStableDiffusionPipelineOutput
3940
from .safety_checker_flax import FlaxStableDiffusionSafetyChecker
4041

@@ -97,6 +98,27 @@ def __init__(
9798
" information, please have a look at https://github.com/huggingface/diffusers/pull/254 ."
9899
)
99100

101+
is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse(
102+
version.parse(unet.config._diffusers_version).base_version
103+
) < version.parse("0.9.0.dev0")
104+
is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
105+
if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:
106+
deprecation_message = (
107+
"The configuration file of the unet has set the default `sample_size` to smaller than"
108+
" 64 which seems highly unlikely .If you're checkpoint is a fine-tuned version of any of the"
109+
" following: \n- CompVis/stable-diffusion-v1-4 \n- CompVis/stable-diffusion-v1-3 \n-"
110+
" CompVis/stable-diffusion-v1-2 \n- CompVis/stable-diffusion-v1-1 \n- runwayml/stable-diffusion-v1-5"
111+
" \n- runwayml/stable-diffusion-inpainting \n you should change 'sample_size' to 64 in the"
112+
" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`"
113+
" in the config might lead to incorrect results in future versions. If you have downloaded this"
114+
" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for"
115+
" the `unet/config.json` file"
116+
)
117+
deprecate("sample_size<64", "1.0.0", deprecation_message, standard_warn=False)
118+
new_config = dict(unet.config)
119+
new_config["sample_size"] = 64
120+
unet._internal_dict = FrozenDict(new_config)
121+
100122
self.register_modules(
101123
vae=vae,
102124
text_encoder=text_encoder,

src/diffusers/pipelines/stable_diffusion/pipeline_onnx_stable_diffusion.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
import numpy as np
1919
import torch
2020

21+
from packaging import version
2122
from transformers import CLIPFeatureExtractor, CLIPTokenizer
2223

2324
from ...configuration_utils import FrozenDict
@@ -98,6 +99,27 @@ def __init__(
9899
" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead."
99100
)
100101

102+
is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse(
103+
version.parse(unet.config._diffusers_version).base_version
104+
) < version.parse("0.9.0.dev0")
105+
is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
106+
if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:
107+
deprecation_message = (
108+
"The configuration file of the unet has set the default `sample_size` to smaller than"
109+
" 64 which seems highly unlikely .If you're checkpoint is a fine-tuned version of any of the"
110+
" following: \n- CompVis/stable-diffusion-v1-4 \n- CompVis/stable-diffusion-v1-3 \n-"
111+
" CompVis/stable-diffusion-v1-2 \n- CompVis/stable-diffusion-v1-1 \n- runwayml/stable-diffusion-v1-5"
112+
" \n- runwayml/stable-diffusion-inpainting \n you should change 'sample_size' to 64 in the"
113+
" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`"
114+
" in the config might lead to incorrect results in future versions. If you have downloaded this"
115+
" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for"
116+
" the `unet/config.json` file"
117+
)
118+
deprecate("sample_size<64", "1.0.0", deprecation_message, standard_warn=False)
119+
new_config = dict(unet.config)
120+
new_config["sample_size"] = 64
121+
unet._internal_dict = FrozenDict(new_config)
122+
101123
self.register_modules(
102124
vae_encoder=vae_encoder,
103125
vae_decoder=vae_decoder,

src/diffusers/pipelines/stable_diffusion/pipeline_onnx_stable_diffusion_img2img.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
import torch
2020

2121
import PIL
22+
from packaging import version
2223
from transformers import CLIPFeatureExtractor, CLIPTokenizer
2324

2425
from ...configuration_utils import FrozenDict
@@ -134,6 +135,27 @@ def __init__(
134135
" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead."
135136
)
136137

138+
is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse(
139+
version.parse(unet.config._diffusers_version).base_version
140+
) < version.parse("0.9.0.dev0")
141+
is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
142+
if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:
143+
deprecation_message = (
144+
"The configuration file of the unet has set the default `sample_size` to smaller than"
145+
" 64 which seems highly unlikely .If you're checkpoint is a fine-tuned version of any of the"
146+
" following: \n- CompVis/stable-diffusion-v1-4 \n- CompVis/stable-diffusion-v1-3 \n-"
147+
" CompVis/stable-diffusion-v1-2 \n- CompVis/stable-diffusion-v1-1 \n- runwayml/stable-diffusion-v1-5"
148+
" \n- runwayml/stable-diffusion-inpainting \n you should change 'sample_size' to 64 in the"
149+
" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`"
150+
" in the config might lead to incorrect results in future versions. If you have downloaded this"
151+
" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for"
152+
" the `unet/config.json` file"
153+
)
154+
deprecate("sample_size<64", "1.0.0", deprecation_message, standard_warn=False)
155+
new_config = dict(unet.config)
156+
new_config["sample_size"] = 64
157+
unet._internal_dict = FrozenDict(new_config)
158+
137159
self.register_modules(
138160
vae_encoder=vae_encoder,
139161
vae_decoder=vae_decoder,

src/diffusers/pipelines/stable_diffusion/pipeline_onnx_stable_diffusion_inpaint.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
import torch
2020

2121
import PIL
22+
from packaging import version
2223
from transformers import CLIPFeatureExtractor, CLIPTokenizer
2324

2425
from ...configuration_utils import FrozenDict
@@ -148,6 +149,27 @@ def __init__(
148149
" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead."
149150
)
150151

152+
is_unet_version_less_0_9_0 = hasattr(unet.config, "_diffusers_version") and version.parse(
153+
version.parse(unet.config._diffusers_version).base_version
154+
) < version.parse("0.9.0.dev0")
155+
is_unet_sample_size_less_64 = hasattr(unet.config, "sample_size") and unet.config.sample_size < 64
156+
if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:
157+
deprecation_message = (
158+
"The configuration file of the unet has set the default `sample_size` to smaller than"
159+
" 64 which seems highly unlikely .If you're checkpoint is a fine-tuned version of any of the"
160+
" following: \n- CompVis/stable-diffusion-v1-4 \n- CompVis/stable-diffusion-v1-3 \n-"
161+
" CompVis/stable-diffusion-v1-2 \n- CompVis/stable-diffusion-v1-1 \n- runwayml/stable-diffusion-v1-5"
162+
" \n- runwayml/stable-diffusion-inpainting \n you should change 'sample_size' to 64 in the"
163+
" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`"
164+
" in the config might lead to incorrect results in future versions. If you have downloaded this"
165+
" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for"
166+
" the `unet/config.json` file"
167+
)
168+
deprecate("sample_size<64", "1.0.0", deprecation_message, standard_warn=False)
169+
new_config = dict(unet.config)
170+
new_config["sample_size"] = 64
171+
unet._internal_dict = FrozenDict(new_config)
172+
151173
self.register_modules(
152174
vae_encoder=vae_encoder,
153175
vae_decoder=vae_decoder,

0 commit comments

Comments
 (0)