From fe205a67f49a0892e5caa3c65c7049f43f8609ef Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Tue, 17 Dec 2024 02:15:58 +0000 Subject: [PATCH 01/23] Add no_mmap arg. --- src/diffusers/loaders/single_file.py | 3 +++ src/diffusers/loaders/single_file_utils.py | 3 ++- src/diffusers/models/model_loading_utils.py | 7 +++++-- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/src/diffusers/loaders/single_file.py b/src/diffusers/loaders/single_file.py index c0cbfc713857..149be4ebcc04 100644 --- a/src/diffusers/loaders/single_file.py +++ b/src/diffusers/loaders/single_file.py @@ -308,6 +308,8 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): hosted on the Hub. - A path to a *directory* (for example `./my_pipeline_directory/`) containing the pipeline component configs in Diffusers format. + no_mmap ('bool', *optional*, defaults to 'False'): + Whether to use mmap when loading the model. kwargs (remaining dictionary of keyword arguments, *optional*): Can be used to overwrite load and saveable variables (the pipeline components of the specific pipeline class). The overwritten components are passed directly to the pipelines `__init__` method. See example @@ -383,6 +385,7 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): cache_dir=cache_dir, local_files_only=local_files_only, revision=revision, + no_mmap=no_mmap, ) if config is None: diff --git a/src/diffusers/loaders/single_file_utils.py b/src/diffusers/loaders/single_file_utils.py index 21ff2841700d..24fb3452986d 100644 --- a/src/diffusers/loaders/single_file_utils.py +++ b/src/diffusers/loaders/single_file_utils.py @@ -364,6 +364,7 @@ def load_single_file_checkpoint( cache_dir=None, local_files_only=None, revision=None, + no_mmap=False, ): if os.path.isfile(pretrained_model_link_or_path): pretrained_model_link_or_path = pretrained_model_link_or_path @@ -381,7 +382,7 @@ def load_single_file_checkpoint( revision=revision, ) - checkpoint = load_state_dict(pretrained_model_link_or_path) + checkpoint = load_state_dict(pretrained_model_link_or_path, no_mmap) # some checkpoints contain the model state dict under a "state_dict" key while "state_dict" in checkpoint: diff --git a/src/diffusers/models/model_loading_utils.py b/src/diffusers/models/model_loading_utils.py index 546c0eb4d840..a4d5c082e433 100644 --- a/src/diffusers/models/model_loading_utils.py +++ b/src/diffusers/models/model_loading_utils.py @@ -127,7 +127,7 @@ def _fetch_remapped_cls_from_config(config, old_class): return old_class -def load_state_dict(checkpoint_file: Union[str, os.PathLike], variant: Optional[str] = None): +def load_state_dict(checkpoint_file: Union[str, os.PathLike], variant: Optional[str] = None, no_mmap: bool = False): """ Reads a checkpoint file, returning properly formatted errors if they arise. """ @@ -138,7 +138,10 @@ def load_state_dict(checkpoint_file: Union[str, os.PathLike], variant: Optional[ try: file_extension = os.path.basename(checkpoint_file).split(".")[-1] if file_extension == SAFETENSORS_FILE_EXTENSION: - return safetensors.torch.load_file(checkpoint_file, device="cpu") + if no_mmap: + return safetensors.torch.load(open(checkpoint_file, "rb"), device="cpu") + else: + return safetensors.torch.load_file(checkpoint_file, device="cpu") else: weights_only_kwarg = {"weights_only": True} if is_torch_version(">=", "1.13") else {} return torch.load( From a6b4d8fdd3ac9856715c45deb32888f352d57f49 Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Tue, 17 Dec 2024 02:27:24 +0000 Subject: [PATCH 02/23] Fix arg parsing. --- src/diffusers/loaders/single_file.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/diffusers/loaders/single_file.py b/src/diffusers/loaders/single_file.py index 149be4ebcc04..1d6c6c570bff 100644 --- a/src/diffusers/loaders/single_file.py +++ b/src/diffusers/loaders/single_file.py @@ -357,6 +357,7 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): local_files_only = kwargs.pop("local_files_only", False) revision = kwargs.pop("revision", None) torch_dtype = kwargs.pop("torch_dtype", None) + no_mmap = kwargs.pop("no_mmap", False) is_legacy_loading = False From 3cf01bf7e603ff2f12e9f2c73352bd8d250cfb32 Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Wed, 18 Dec 2024 16:11:11 +0000 Subject: [PATCH 03/23] Update another method to force no mmap. --- src/diffusers/loaders/single_file.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/diffusers/loaders/single_file.py b/src/diffusers/loaders/single_file.py index 1d6c6c570bff..666d9e8be139 100644 --- a/src/diffusers/loaders/single_file.py +++ b/src/diffusers/loaders/single_file.py @@ -106,6 +106,7 @@ def load_single_file_sub_model( subfolder=name, torch_dtype=torch_dtype, local_files_only=local_files_only, + no_mmap=True, **kwargs, ) From 2e082427897eda5f62ae9097b8e0446a7c5cd70d Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Wed, 18 Dec 2024 16:31:35 +0000 Subject: [PATCH 04/23] logging --- src/diffusers/loaders/single_file.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/diffusers/loaders/single_file.py b/src/diffusers/loaders/single_file.py index 666d9e8be139..de3c1d28f16e 100644 --- a/src/diffusers/loaders/single_file.py +++ b/src/diffusers/loaders/single_file.py @@ -99,6 +99,7 @@ def load_single_file_sub_model( if original_config: cached_model_config_path = None + logger.warning("dhipke1") loaded_sub_model = load_method( pretrained_model_link_or_path_or_dict=checkpoint, original_config=original_config, @@ -111,6 +112,7 @@ def load_single_file_sub_model( ) elif is_transformers_model and is_clip_model_in_single_file(class_obj, checkpoint): + logger.warning("dhipke2") loaded_sub_model = create_diffusers_clip_model_from_ldm( class_obj, checkpoint=checkpoint, @@ -142,6 +144,8 @@ def load_single_file_sub_model( ) else: + logger.warning("dhipke3") + if not hasattr(class_obj, "from_pretrained"): raise ValueError( ( @@ -359,6 +363,7 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): revision = kwargs.pop("revision", None) torch_dtype = kwargs.pop("torch_dtype", None) no_mmap = kwargs.pop("no_mmap", False) + logger.warning("no_mmap: " + no_mmap) is_legacy_loading = False From bcca53b851b6dc371adbe5fe030b329db1b123d9 Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Wed, 18 Dec 2024 16:46:07 +0000 Subject: [PATCH 05/23] logging2 --- src/diffusers/loaders/single_file.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/loaders/single_file.py b/src/diffusers/loaders/single_file.py index de3c1d28f16e..099462182f19 100644 --- a/src/diffusers/loaders/single_file.py +++ b/src/diffusers/loaders/single_file.py @@ -363,7 +363,7 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): revision = kwargs.pop("revision", None) torch_dtype = kwargs.pop("torch_dtype", None) no_mmap = kwargs.pop("no_mmap", False) - logger.warning("no_mmap: " + no_mmap) + logger.warning("no_mmap: " + str(no_mmap)) is_legacy_loading = False From c895d8615a7318f7070d5c87781feaf7ec6b9918 Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Wed, 18 Dec 2024 17:14:09 +0000 Subject: [PATCH 06/23] propagate no_mmap --- src/diffusers/loaders/single_file_model.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/diffusers/loaders/single_file_model.py b/src/diffusers/loaders/single_file_model.py index 78ce47273d8f..540d57dc87df 100644 --- a/src/diffusers/loaders/single_file_model.py +++ b/src/diffusers/loaders/single_file_model.py @@ -170,6 +170,8 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = revision (`str`, *optional*, defaults to `"main"`): The specific model version to use. It can be a branch name, a tag name, a commit id, or any identifier allowed by Git. + no_mmap ('bool', *optional*, defaults to 'False'): + Whether to use mmap when loading the model. kwargs (remaining dictionary of keyword arguments, *optional*): Can be used to overwrite load and saveable variables (for example the pipeline components of the specific pipeline class). The overwritten components are directly passed to the pipelines `__init__` @@ -214,6 +216,8 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = subfolder = kwargs.pop("subfolder", None) revision = kwargs.pop("revision", None) torch_dtype = kwargs.pop("torch_dtype", None) + no_mmap = kwargs.pop("no_mmap", False) + logger.warning("no_mmap2: " + str(no_mmap)) if isinstance(pretrained_model_link_or_path_or_dict, dict): checkpoint = pretrained_model_link_or_path_or_dict @@ -226,6 +230,7 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = cache_dir=cache_dir, local_files_only=local_files_only, revision=revision, + no_mmap=no_mmap, ) mapping_functions = SINGLE_FILE_LOADABLE_CLASSES[mapping_class_name] From c081e0bddf9570f28c422bd017d063d0e89d707c Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Wed, 18 Dec 2024 17:28:16 +0000 Subject: [PATCH 07/23] logging3 --- src/diffusers/models/model_loading_utils.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/diffusers/models/model_loading_utils.py b/src/diffusers/models/model_loading_utils.py index a4d5c082e433..deee275b9238 100644 --- a/src/diffusers/models/model_loading_utils.py +++ b/src/diffusers/models/model_loading_utils.py @@ -139,8 +139,10 @@ def load_state_dict(checkpoint_file: Union[str, os.PathLike], variant: Optional[ file_extension = os.path.basename(checkpoint_file).split(".")[-1] if file_extension == SAFETENSORS_FILE_EXTENSION: if no_mmap: + logger.warning("fast load: " + str(checkpoint_file)) return safetensors.torch.load(open(checkpoint_file, "rb"), device="cpu") else: + logger.warning("slow load: " + str(checkpoint_file)) return safetensors.torch.load_file(checkpoint_file, device="cpu") else: weights_only_kwarg = {"weights_only": True} if is_torch_version(">=", "1.13") else {} From 7231c282e0e76a5aac2a9df74c639371e57ade36 Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Wed, 18 Dec 2024 17:39:20 +0000 Subject: [PATCH 08/23] propagate no_mmap --- src/diffusers/loaders/single_file_model.py | 2 +- src/diffusers/loaders/single_file_utils.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/diffusers/loaders/single_file_model.py b/src/diffusers/loaders/single_file_model.py index 540d57dc87df..0c3208b463f4 100644 --- a/src/diffusers/loaders/single_file_model.py +++ b/src/diffusers/loaders/single_file_model.py @@ -318,7 +318,7 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = unexpected_keys = load_model_dict_into_meta(model, diffusers_format_checkpoint, dtype=torch_dtype) else: - _, unexpected_keys = model.load_state_dict(diffusers_format_checkpoint, strict=False) + _, unexpected_keys = model.load_state_dict(diffusers_format_checkpoint, strict=False, no_mmap=no_mmap) if model._keys_to_ignore_on_load_unexpected is not None: for pat in model._keys_to_ignore_on_load_unexpected: diff --git a/src/diffusers/loaders/single_file_utils.py b/src/diffusers/loaders/single_file_utils.py index 24fb3452986d..ffee972dd04a 100644 --- a/src/diffusers/loaders/single_file_utils.py +++ b/src/diffusers/loaders/single_file_utils.py @@ -382,7 +382,8 @@ def load_single_file_checkpoint( revision=revision, ) - checkpoint = load_state_dict(pretrained_model_link_or_path, no_mmap) + logger.warn("before load_state_dict. no_mmap: " + str(no_mmap)) + checkpoint = load_state_dict(pretrained_model_link_or_path, no_mmap=no_mmap) # some checkpoints contain the model state dict under a "state_dict" key while "state_dict" in checkpoint: From 0c472b2fada2815bfa8b9c381f070170732e8590 Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Wed, 18 Dec 2024 17:51:37 +0000 Subject: [PATCH 09/23] logging4 --- src/diffusers/loaders/single_file_model.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/diffusers/loaders/single_file_model.py b/src/diffusers/loaders/single_file_model.py index 0c3208b463f4..bfd495317a70 100644 --- a/src/diffusers/loaders/single_file_model.py +++ b/src/diffusers/loaders/single_file_model.py @@ -234,8 +234,9 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = ) mapping_functions = SINGLE_FILE_LOADABLE_CLASSES[mapping_class_name] - + logger.warning(mapping_functions) checkpoint_mapping_fn = mapping_functions["checkpoint_mapping_fn"] + logger.warning(checkpoint_mapping_fn) if original_config is not None: if "config_mapping_fn" in mapping_functions: config_mapping_fn = mapping_functions["config_mapping_fn"] @@ -255,6 +256,7 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = # If original_config is a URL or filepath fetch the original_config dict original_config = fetch_original_config(original_config, local_files_only=local_files_only) + logger.warn("hi1") config_mapping_kwargs = _get_mapping_function_kwargs(config_mapping_fn, **kwargs) diffusers_model_config = config_mapping_fn( original_config=original_config, checkpoint=checkpoint, **config_mapping_kwargs @@ -312,14 +314,17 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = ctx = init_empty_weights if is_accelerate_available() else nullcontext with ctx(): + logger.warn("hi2") model = cls.from_config(diffusers_model_config) + logger.warn("hi3") if is_accelerate_available(): unexpected_keys = load_model_dict_into_meta(model, diffusers_format_checkpoint, dtype=torch_dtype) else: _, unexpected_keys = model.load_state_dict(diffusers_format_checkpoint, strict=False, no_mmap=no_mmap) + logger.warn("hi4") if model._keys_to_ignore_on_load_unexpected is not None: for pat in model._keys_to_ignore_on_load_unexpected: unexpected_keys = [k for k in unexpected_keys if re.search(pat, k) is None] From c4d4d60cd28ff4539d6cecef1d04ae479b6c605d Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Wed, 18 Dec 2024 17:56:32 +0000 Subject: [PATCH 10/23] fix open call --- src/diffusers/models/model_loading_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/models/model_loading_utils.py b/src/diffusers/models/model_loading_utils.py index deee275b9238..06a60d4cc839 100644 --- a/src/diffusers/models/model_loading_utils.py +++ b/src/diffusers/models/model_loading_utils.py @@ -140,7 +140,7 @@ def load_state_dict(checkpoint_file: Union[str, os.PathLike], variant: Optional[ if file_extension == SAFETENSORS_FILE_EXTENSION: if no_mmap: logger.warning("fast load: " + str(checkpoint_file)) - return safetensors.torch.load(open(checkpoint_file, "rb"), device="cpu") + return safetensors.torch.load(open(checkpoint_file, "rb").read()) else: logger.warning("slow load: " + str(checkpoint_file)) return safetensors.torch.load_file(checkpoint_file, device="cpu") From 4f84222d1eb3776e12ffdaf44169bd4e9c36b6ca Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Thu, 19 Dec 2024 16:59:04 +0000 Subject: [PATCH 11/23] clean up logging --- src/diffusers/loaders/single_file.py | 5 ----- src/diffusers/loaders/single_file_model.py | 7 ------- src/diffusers/loaders/single_file_utils.py | 1 - 3 files changed, 13 deletions(-) diff --git a/src/diffusers/loaders/single_file.py b/src/diffusers/loaders/single_file.py index 099462182f19..666d9e8be139 100644 --- a/src/diffusers/loaders/single_file.py +++ b/src/diffusers/loaders/single_file.py @@ -99,7 +99,6 @@ def load_single_file_sub_model( if original_config: cached_model_config_path = None - logger.warning("dhipke1") loaded_sub_model = load_method( pretrained_model_link_or_path_or_dict=checkpoint, original_config=original_config, @@ -112,7 +111,6 @@ def load_single_file_sub_model( ) elif is_transformers_model and is_clip_model_in_single_file(class_obj, checkpoint): - logger.warning("dhipke2") loaded_sub_model = create_diffusers_clip_model_from_ldm( class_obj, checkpoint=checkpoint, @@ -144,8 +142,6 @@ def load_single_file_sub_model( ) else: - logger.warning("dhipke3") - if not hasattr(class_obj, "from_pretrained"): raise ValueError( ( @@ -363,7 +359,6 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): revision = kwargs.pop("revision", None) torch_dtype = kwargs.pop("torch_dtype", None) no_mmap = kwargs.pop("no_mmap", False) - logger.warning("no_mmap: " + str(no_mmap)) is_legacy_loading = False diff --git a/src/diffusers/loaders/single_file_model.py b/src/diffusers/loaders/single_file_model.py index bfd495317a70..eb72d549f30e 100644 --- a/src/diffusers/loaders/single_file_model.py +++ b/src/diffusers/loaders/single_file_model.py @@ -217,7 +217,6 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = revision = kwargs.pop("revision", None) torch_dtype = kwargs.pop("torch_dtype", None) no_mmap = kwargs.pop("no_mmap", False) - logger.warning("no_mmap2: " + str(no_mmap)) if isinstance(pretrained_model_link_or_path_or_dict, dict): checkpoint = pretrained_model_link_or_path_or_dict @@ -234,9 +233,7 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = ) mapping_functions = SINGLE_FILE_LOADABLE_CLASSES[mapping_class_name] - logger.warning(mapping_functions) checkpoint_mapping_fn = mapping_functions["checkpoint_mapping_fn"] - logger.warning(checkpoint_mapping_fn) if original_config is not None: if "config_mapping_fn" in mapping_functions: config_mapping_fn = mapping_functions["config_mapping_fn"] @@ -256,7 +253,6 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = # If original_config is a URL or filepath fetch the original_config dict original_config = fetch_original_config(original_config, local_files_only=local_files_only) - logger.warn("hi1") config_mapping_kwargs = _get_mapping_function_kwargs(config_mapping_fn, **kwargs) diffusers_model_config = config_mapping_fn( original_config=original_config, checkpoint=checkpoint, **config_mapping_kwargs @@ -314,17 +310,14 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = ctx = init_empty_weights if is_accelerate_available() else nullcontext with ctx(): - logger.warn("hi2") model = cls.from_config(diffusers_model_config) - logger.warn("hi3") if is_accelerate_available(): unexpected_keys = load_model_dict_into_meta(model, diffusers_format_checkpoint, dtype=torch_dtype) else: _, unexpected_keys = model.load_state_dict(diffusers_format_checkpoint, strict=False, no_mmap=no_mmap) - logger.warn("hi4") if model._keys_to_ignore_on_load_unexpected is not None: for pat in model._keys_to_ignore_on_load_unexpected: unexpected_keys = [k for k in unexpected_keys if re.search(pat, k) is None] diff --git a/src/diffusers/loaders/single_file_utils.py b/src/diffusers/loaders/single_file_utils.py index ffee972dd04a..4586e7d07b79 100644 --- a/src/diffusers/loaders/single_file_utils.py +++ b/src/diffusers/loaders/single_file_utils.py @@ -382,7 +382,6 @@ def load_single_file_checkpoint( revision=revision, ) - logger.warn("before load_state_dict. no_mmap: " + str(no_mmap)) checkpoint = load_state_dict(pretrained_model_link_or_path, no_mmap=no_mmap) # some checkpoints contain the model state dict under a "state_dict" key From 5fab6d18b953b22785050bfe13761664b9e602bd Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Thu, 19 Dec 2024 17:14:48 +0000 Subject: [PATCH 12/23] cleanup --- src/diffusers/loaders/single_file.py | 6 ++++-- src/diffusers/loaders/single_file_model.py | 4 +++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/diffusers/loaders/single_file.py b/src/diffusers/loaders/single_file.py index 666d9e8be139..54d61023e9cc 100644 --- a/src/diffusers/loaders/single_file.py +++ b/src/diffusers/loaders/single_file.py @@ -106,7 +106,7 @@ def load_single_file_sub_model( subfolder=name, torch_dtype=torch_dtype, local_files_only=local_files_only, - no_mmap=True, + no_mmap=no_mmap, **kwargs, ) @@ -310,7 +310,8 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): - A path to a *directory* (for example `./my_pipeline_directory/`) containing the pipeline component configs in Diffusers format. no_mmap ('bool', *optional*, defaults to 'False'): - Whether to use mmap when loading the model. + Whether to disable mmap when loading the model. This option can perform better when the model is on + a network mount or hard drive, which may not handle the seeky-ness of mmap very well. kwargs (remaining dictionary of keyword arguments, *optional*): Can be used to overwrite load and saveable variables (the pipeline components of the specific pipeline class). The overwritten components are passed directly to the pipelines `__init__` method. See example @@ -509,6 +510,7 @@ def load_module(name, value): original_config=original_config, local_files_only=local_files_only, is_legacy_loading=is_legacy_loading, + no_mmap=no_mmap, **kwargs, ) except SingleFileComponentError as e: diff --git a/src/diffusers/loaders/single_file_model.py b/src/diffusers/loaders/single_file_model.py index eb72d549f30e..f392b0492bb1 100644 --- a/src/diffusers/loaders/single_file_model.py +++ b/src/diffusers/loaders/single_file_model.py @@ -171,7 +171,8 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = The specific model version to use. It can be a branch name, a tag name, a commit id, or any identifier allowed by Git. no_mmap ('bool', *optional*, defaults to 'False'): - Whether to use mmap when loading the model. + Whether to disable mmap when loading the model. This option can perform better when the model is on + a network mount or hard drive, which may not handle the seeky-ness of mmap very well. kwargs (remaining dictionary of keyword arguments, *optional*): Can be used to overwrite load and saveable variables (for example the pipeline components of the specific pipeline class). The overwritten components are directly passed to the pipelines `__init__` @@ -233,6 +234,7 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = ) mapping_functions = SINGLE_FILE_LOADABLE_CLASSES[mapping_class_name] + checkpoint_mapping_fn = mapping_functions["checkpoint_mapping_fn"] if original_config is not None: if "config_mapping_fn" in mapping_functions: From 1d8cf69ac8e173dde9a3c3f9bf428406748acd03 Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Thu, 19 Dec 2024 17:20:19 +0000 Subject: [PATCH 13/23] fix missing arg --- src/diffusers/loaders/single_file.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/diffusers/loaders/single_file.py b/src/diffusers/loaders/single_file.py index 54d61023e9cc..7b614e97f6fa 100644 --- a/src/diffusers/loaders/single_file.py +++ b/src/diffusers/loaders/single_file.py @@ -60,6 +60,7 @@ def load_single_file_sub_model( local_files_only=False, torch_dtype=None, is_legacy_loading=False, + no_mmap=False, **kwargs, ): if is_pipeline_module: From 5ef288fdb3e1128fc1e5c578c59610d958401c7e Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Thu, 19 Dec 2024 17:25:40 +0000 Subject: [PATCH 14/23] update logging and comments --- src/diffusers/loaders/single_file.py | 4 ++-- src/diffusers/loaders/single_file_model.py | 4 ++-- src/diffusers/models/model_loading_utils.py | 2 -- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/diffusers/loaders/single_file.py b/src/diffusers/loaders/single_file.py index 7b614e97f6fa..b277eb1b5e2b 100644 --- a/src/diffusers/loaders/single_file.py +++ b/src/diffusers/loaders/single_file.py @@ -311,8 +311,8 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): - A path to a *directory* (for example `./my_pipeline_directory/`) containing the pipeline component configs in Diffusers format. no_mmap ('bool', *optional*, defaults to 'False'): - Whether to disable mmap when loading the model. This option can perform better when the model is on - a network mount or hard drive, which may not handle the seeky-ness of mmap very well. + Whether to disable mmap when loading a Safetensors model. This option can perform better when the model + is on a network mount or hard drive, which may not handle the seeky-ness of mmap very well. kwargs (remaining dictionary of keyword arguments, *optional*): Can be used to overwrite load and saveable variables (the pipeline components of the specific pipeline class). The overwritten components are passed directly to the pipelines `__init__` method. See example diff --git a/src/diffusers/loaders/single_file_model.py b/src/diffusers/loaders/single_file_model.py index f392b0492bb1..a2eab5e6b60f 100644 --- a/src/diffusers/loaders/single_file_model.py +++ b/src/diffusers/loaders/single_file_model.py @@ -171,8 +171,8 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = The specific model version to use. It can be a branch name, a tag name, a commit id, or any identifier allowed by Git. no_mmap ('bool', *optional*, defaults to 'False'): - Whether to disable mmap when loading the model. This option can perform better when the model is on - a network mount or hard drive, which may not handle the seeky-ness of mmap very well. + Whether to disable mmap when loading a Safetensors model. This option can perform better when the model + is on a network mount or hard drive, which may not handle the seeky-ness of mmap very well. kwargs (remaining dictionary of keyword arguments, *optional*): Can be used to overwrite load and saveable variables (for example the pipeline components of the specific pipeline class). The overwritten components are directly passed to the pipelines `__init__` diff --git a/src/diffusers/models/model_loading_utils.py b/src/diffusers/models/model_loading_utils.py index 06a60d4cc839..c9fa90252527 100644 --- a/src/diffusers/models/model_loading_utils.py +++ b/src/diffusers/models/model_loading_utils.py @@ -139,10 +139,8 @@ def load_state_dict(checkpoint_file: Union[str, os.PathLike], variant: Optional[ file_extension = os.path.basename(checkpoint_file).split(".")[-1] if file_extension == SAFETENSORS_FILE_EXTENSION: if no_mmap: - logger.warning("fast load: " + str(checkpoint_file)) return safetensors.torch.load(open(checkpoint_file, "rb").read()) else: - logger.warning("slow load: " + str(checkpoint_file)) return safetensors.torch.load_file(checkpoint_file, device="cpu") else: weights_only_kwarg = {"weights_only": True} if is_torch_version(">=", "1.13") else {} From f80644d1c052852a2940014f49a5e5ed968a9e6e Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Fri, 20 Dec 2024 21:21:46 +0000 Subject: [PATCH 15/23] Rename to disable_mmap and update other references. --- src/diffusers/loaders/single_file.py | 12 ++++++------ src/diffusers/loaders/single_file_model.py | 10 ++++++---- src/diffusers/loaders/single_file_utils.py | 4 ++-- src/diffusers/models/model_loading_utils.py | 6 ++++-- src/diffusers/models/modeling_utils.py | 8 ++++++-- 5 files changed, 24 insertions(+), 16 deletions(-) diff --git a/src/diffusers/loaders/single_file.py b/src/diffusers/loaders/single_file.py index b277eb1b5e2b..68a5c21ca732 100644 --- a/src/diffusers/loaders/single_file.py +++ b/src/diffusers/loaders/single_file.py @@ -60,7 +60,7 @@ def load_single_file_sub_model( local_files_only=False, torch_dtype=None, is_legacy_loading=False, - no_mmap=False, + disable_mmap=False, **kwargs, ): if is_pipeline_module: @@ -107,7 +107,7 @@ def load_single_file_sub_model( subfolder=name, torch_dtype=torch_dtype, local_files_only=local_files_only, - no_mmap=no_mmap, + disable_mmap=disable_mmap, **kwargs, ) @@ -310,7 +310,7 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): hosted on the Hub. - A path to a *directory* (for example `./my_pipeline_directory/`) containing the pipeline component configs in Diffusers format. - no_mmap ('bool', *optional*, defaults to 'False'): + disable_mmap ('bool', *optional*, defaults to 'False'): Whether to disable mmap when loading a Safetensors model. This option can perform better when the model is on a network mount or hard drive, which may not handle the seeky-ness of mmap very well. kwargs (remaining dictionary of keyword arguments, *optional*): @@ -360,7 +360,7 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): local_files_only = kwargs.pop("local_files_only", False) revision = kwargs.pop("revision", None) torch_dtype = kwargs.pop("torch_dtype", None) - no_mmap = kwargs.pop("no_mmap", False) + disable_mmap = kwargs.pop("disable_mmap", False) is_legacy_loading = False @@ -389,7 +389,7 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): cache_dir=cache_dir, local_files_only=local_files_only, revision=revision, - no_mmap=no_mmap, + disable_mmap=disable_mmap, ) if config is None: @@ -511,7 +511,7 @@ def load_module(name, value): original_config=original_config, local_files_only=local_files_only, is_legacy_loading=is_legacy_loading, - no_mmap=no_mmap, + disable_mmap=disable_mmap, **kwargs, ) except SingleFileComponentError as e: diff --git a/src/diffusers/loaders/single_file_model.py b/src/diffusers/loaders/single_file_model.py index 9abc730722c4..fe9f2e14ceeb 100644 --- a/src/diffusers/loaders/single_file_model.py +++ b/src/diffusers/loaders/single_file_model.py @@ -177,7 +177,7 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = revision (`str`, *optional*, defaults to `"main"`): The specific model version to use. It can be a branch name, a tag name, a commit id, or any identifier allowed by Git. - no_mmap ('bool', *optional*, defaults to 'False'): + disable_mmap ('bool', *optional*, defaults to 'False'): Whether to disable mmap when loading a Safetensors model. This option can perform better when the model is on a network mount or hard drive, which may not handle the seeky-ness of mmap very well. kwargs (remaining dictionary of keyword arguments, *optional*): @@ -226,7 +226,7 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = torch_dtype = kwargs.pop("torch_dtype", None) quantization_config = kwargs.pop("quantization_config", None) device = kwargs.pop("device", None) - no_mmap = kwargs.pop("no_mmap", False) + disable_mmap = kwargs.pop("disable_mmap", False) if isinstance(pretrained_model_link_or_path_or_dict, dict): checkpoint = pretrained_model_link_or_path_or_dict @@ -239,7 +239,7 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = cache_dir=cache_dir, local_files_only=local_files_only, revision=revision, - no_mmap=no_mmap, + disable_mmap=disable_mmap, ) if quantization_config is not None: hf_quantizer = DiffusersAutoQuantizer.from_config(quantization_config) @@ -361,7 +361,9 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = ) else: - _, unexpected_keys = model.load_state_dict(diffusers_format_checkpoint, strict=False, no_mmap=no_mmap) + _, unexpected_keys = model.load_state_dict( + diffusers_format_checkpoint, strict=False, disable_mmap=disable_mmap + ) if model._keys_to_ignore_on_load_unexpected is not None: for pat in model._keys_to_ignore_on_load_unexpected: diff --git a/src/diffusers/loaders/single_file_utils.py b/src/diffusers/loaders/single_file_utils.py index 46791a9c2f32..a21686b7b388 100644 --- a/src/diffusers/loaders/single_file_utils.py +++ b/src/diffusers/loaders/single_file_utils.py @@ -375,7 +375,7 @@ def load_single_file_checkpoint( cache_dir=None, local_files_only=None, revision=None, - no_mmap=False, + disable_mmap=False, ): if os.path.isfile(pretrained_model_link_or_path): pretrained_model_link_or_path = pretrained_model_link_or_path @@ -393,7 +393,7 @@ def load_single_file_checkpoint( revision=revision, ) - checkpoint = load_state_dict(pretrained_model_link_or_path, no_mmap=no_mmap) + checkpoint = load_state_dict(pretrained_model_link_or_path, disable_mmap=disable_mmap) # some checkpoints contain the model state dict under a "state_dict" key while "state_dict" in checkpoint: diff --git a/src/diffusers/models/model_loading_utils.py b/src/diffusers/models/model_loading_utils.py index 6e9ff8261761..cc8e81a0b90c 100644 --- a/src/diffusers/models/model_loading_utils.py +++ b/src/diffusers/models/model_loading_utils.py @@ -131,7 +131,9 @@ def _fetch_remapped_cls_from_config(config, old_class): return old_class -def load_state_dict(checkpoint_file: Union[str, os.PathLike], variant: Optional[str] = None, no_mmap: bool = False): +def load_state_dict( + checkpoint_file: Union[str, os.PathLike], variant: Optional[str] = None, disable_mmap: bool = False +): """ Reads a checkpoint file, returning properly formatted errors if they arise. """ @@ -142,7 +144,7 @@ def load_state_dict(checkpoint_file: Union[str, os.PathLike], variant: Optional[ try: file_extension = os.path.basename(checkpoint_file).split(".")[-1] if file_extension == SAFETENSORS_FILE_EXTENSION: - if no_mmap: + if disable_mmap: return safetensors.torch.load(open(checkpoint_file, "rb").read()) else: return safetensors.torch.load_file(checkpoint_file, device="cpu") diff --git a/src/diffusers/models/modeling_utils.py b/src/diffusers/models/modeling_utils.py index 872d4d73d41f..e16f44bc73d3 100644 --- a/src/diffusers/models/modeling_utils.py +++ b/src/diffusers/models/modeling_utils.py @@ -541,6 +541,9 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P If set to `None`, the `safetensors` weights are downloaded if they're available **and** if the `safetensors` library is installed. If set to `True`, the model is forcibly loaded from `safetensors` weights. If set to `False`, `safetensors` weights are not loaded. + disable_mmap ('bool', *optional*, defaults to 'False'): + Whether to disable mmap when loading a Safetensors model. This option can perform better when the model + is on a network mount or hard drive, which may not handle the seeky-ness of mmap very well. @@ -586,6 +589,7 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P variant = kwargs.pop("variant", None) use_safetensors = kwargs.pop("use_safetensors", None) quantization_config = kwargs.pop("quantization_config", None) + disable_mmap = kwargs.pop("disable_mmap", False) allow_pickle = False if use_safetensors is None: @@ -865,7 +869,7 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P # TODO (sayakpaul, SunMarc): remove this after model loading refactor else: param_device = torch.device(torch.cuda.current_device()) - state_dict = load_state_dict(model_file, variant=variant) + state_dict = load_state_dict(model_file, variant=variant, disable_mmap=disable_mmap) model._convert_deprecated_attention_blocks(state_dict) # move the params from meta device to cpu @@ -965,7 +969,7 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P else: model = cls.from_config(config, **unused_kwargs) - state_dict = load_state_dict(model_file, variant=variant) + state_dict = load_state_dict(model_file, variant=variant, disable_mmap=disable_mmap) model._convert_deprecated_attention_blocks(state_dict) model, missing_keys, unexpected_keys, mismatched_keys, error_msgs = cls._load_pretrained_model( From ffe5abae5e8287f176139cab7287dc9ef9edc48c Mon Sep 17 00:00:00 2001 From: Sayak Paul Date: Fri, 20 Dec 2024 18:32:22 +0530 Subject: [PATCH 16/23] [Docs] Update ltx_video.md to remove generator from `from_pretrained()` (#10316) Update ltx_video.md to remove generator from `from_pretrained()` --- docs/source/en/api/pipelines/ltx_video.md | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/source/en/api/pipelines/ltx_video.md b/docs/source/en/api/pipelines/ltx_video.md index 211cd3007d1e..a925b848706e 100644 --- a/docs/source/en/api/pipelines/ltx_video.md +++ b/docs/source/en/api/pipelines/ltx_video.md @@ -79,7 +79,6 @@ transformer = LTXVideoTransformer3DModel.from_single_file( pipe = LTXPipeline.from_pretrained( "Lightricks/LTX-Video", transformer=transformer, - generator=torch.manual_seed(0), torch_dtype=torch.bfloat16, ) pipe.enable_model_cpu_offload() From 3fc4a422e54aa79a47d6ab1689cda9dd2a00131e Mon Sep 17 00:00:00 2001 From: Leojc Date: Fri, 20 Dec 2024 23:22:32 +0800 Subject: [PATCH 17/23] docs: fix a mistake in docstring (#10319) Update pipeline_hunyuan_video.py docs: fix a mistake --- src/diffusers/pipelines/hunyuan_video/pipeline_hunyuan_video.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/pipelines/hunyuan_video/pipeline_hunyuan_video.py b/src/diffusers/pipelines/hunyuan_video/pipeline_hunyuan_video.py index 4423ccf97932..6e0541e938ba 100644 --- a/src/diffusers/pipelines/hunyuan_video/pipeline_hunyuan_video.py +++ b/src/diffusers/pipelines/hunyuan_video/pipeline_hunyuan_video.py @@ -143,7 +143,7 @@ class HunyuanVideoPipeline(DiffusionPipeline, HunyuanVideoLoraLoaderMixin): Args: text_encoder ([`LlamaModel`]): [Llava Llama3-8B](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1-transformers). - tokenizer_2 (`LlamaTokenizer`): + tokenizer (`LlamaTokenizer`): Tokenizer from [Llava Llama3-8B](https://huggingface.co/xtuner/llava-llama-3-8b-v1_1-transformers). transformer ([`HunyuanVideoTransformer3DModel`]): Conditional Transformer to denoise the encoded image latents. From 9e887b478aebcadbfd242c2f06d329d60f3e262c Mon Sep 17 00:00:00 2001 From: Aditya Raj Date: Fri, 20 Dec 2024 20:59:58 +0530 Subject: [PATCH 18/23] [BUG FIX] [Stable Audio Pipeline] Resolve torch.Tensor.new_zeros() TypeError in function prepare_latents caused by audio_vae_length (#10306) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [BUG FIX] [Stable Audio Pipeline] TypeError: new_zeros(): argument 'size' failed to unpack the object at pos 3 with error "type must be tuple of ints,but got float" torch.Tensor.new_zeros() takes a single argument size (int...) – a list, tuple, or torch.Size of integers defining the shape of the output tensor. in function prepare_latents: audio_vae_length = self.transformer.config.sample_size * self.vae.hop_length audio_shape = (batch_size // num_waveforms_per_prompt, audio_channels, audio_vae_length) ... audio = initial_audio_waveforms.new_zeros(audio_shape) audio_vae_length evaluates to float because self.transformer.config.sample_size returns a float Co-authored-by: hlky --- src/diffusers/pipelines/stable_audio/pipeline_stable_audio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/pipelines/stable_audio/pipeline_stable_audio.py b/src/diffusers/pipelines/stable_audio/pipeline_stable_audio.py index cef63cf7e63d..5d773b614a5c 100644 --- a/src/diffusers/pipelines/stable_audio/pipeline_stable_audio.py +++ b/src/diffusers/pipelines/stable_audio/pipeline_stable_audio.py @@ -446,7 +446,7 @@ def prepare_latents( f"`initial_audio_waveforms` must be of shape `(batch_size, num_channels, audio_length)` or `(batch_size, audio_length)` but has `{initial_audio_waveforms.ndim}` dimensions" ) - audio_vae_length = self.transformer.config.sample_size * self.vae.hop_length + audio_vae_length = int(self.transformer.config.sample_size) * self.vae.hop_length audio_shape = (batch_size // num_waveforms_per_prompt, audio_channels, audio_vae_length) # check num_channels From dbbcd0f343c7b87c444ea757b5e100c674ca5123 Mon Sep 17 00:00:00 2001 From: Steven Liu <59462357+stevhliu@users.noreply.github.com> Date: Fri, 20 Dec 2024 08:30:21 -0800 Subject: [PATCH 19/23] [docs] Fix quantization links (#10323) Update overview.md --- docs/source/en/quantization/overview.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/en/quantization/overview.md b/docs/source/en/quantization/overview.md index 3eef5238f1ce..794098e210a6 100644 --- a/docs/source/en/quantization/overview.md +++ b/docs/source/en/quantization/overview.md @@ -33,8 +33,8 @@ If you are new to the quantization field, we recommend you to check out these be ## When to use what? Diffusers currently supports the following quantization methods. -- [BitsandBytes](./bitsandbytes.md) -- [TorchAO](./torchao.md) -- [GGUF](./gguf.md) +- [BitsandBytes](./bitsandbytes) +- [TorchAO](./torchao) +- [GGUF](./gguf) [This resource](https://huggingface.co/docs/transformers/main/en/quantization/overview#when-to-use-what) provides a good overview of the pros and cons of different quantization techniques. From dfebda27c25b5b650674338d721524c2fd227e1b Mon Sep 17 00:00:00 2001 From: Junsong Chen Date: Sat, 21 Dec 2024 01:21:34 +0800 Subject: [PATCH 20/23] [Sana]add 2K related model for Sana (#10322) add 2K related model for Sana --- scripts/convert_sana_to_diffusers.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/convert_sana_to_diffusers.py b/scripts/convert_sana_to_diffusers.py index c1045a98a51a..dc553681678b 100644 --- a/scripts/convert_sana_to_diffusers.py +++ b/scripts/convert_sana_to_diffusers.py @@ -25,6 +25,7 @@ CTX = init_empty_weights if is_accelerate_available else nullcontext ckpt_ids = [ + "Efficient-Large-Model/Sana_1600M_2Kpx_BF16/checkpoints/Sana_1600M_2Kpx_BF16.pth", "Efficient-Large-Model/Sana_1600M_1024px_MultiLing/checkpoints/Sana_1600M_1024px_MultiLing.pth", "Efficient-Large-Model/Sana_1600M_1024px_BF16/checkpoints/Sana_1600M_1024px_BF16.pth", "Efficient-Large-Model/Sana_1600M_512px_MultiLing/checkpoints/Sana_1600M_512px_MultiLing.pth", @@ -265,9 +266,9 @@ def main(args): "--image_size", default=1024, type=int, - choices=[512, 1024], + choices=[512, 1024, 2048], required=False, - help="Image size of pretrained model, 512 or 1024.", + help="Image size of pretrained model, 512, 1024 or 2048.", ) parser.add_argument( "--model_type", default="SanaMS_1600M_P1_D20", type=str, choices=["SanaMS_1600M_P1_D20", "SanaMS_600M_P1_D28"] From 6720c51ae78f7231c315b4ab04fbf5704d3c30a7 Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Sun, 22 Dec 2024 22:59:26 -0800 Subject: [PATCH 21/23] Update src/diffusers/loaders/single_file_model.py Co-authored-by: Dhruv Nair --- src/diffusers/loaders/single_file_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/loaders/single_file_model.py b/src/diffusers/loaders/single_file_model.py index fe9f2e14ceeb..70178fde6cbf 100644 --- a/src/diffusers/loaders/single_file_model.py +++ b/src/diffusers/loaders/single_file_model.py @@ -362,7 +362,7 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = else: _, unexpected_keys = model.load_state_dict( - diffusers_format_checkpoint, strict=False, disable_mmap=disable_mmap + diffusers_format_checkpoint, strict=False ) if model._keys_to_ignore_on_load_unexpected is not None: From 2926158e7ff23f13e5992755e89e284abb239819 Mon Sep 17 00:00:00 2001 From: Daniel Hipke Date: Sun, 22 Dec 2024 22:59:42 -0800 Subject: [PATCH 22/23] Update src/diffusers/loaders/single_file.py Co-authored-by: Dhruv Nair --- src/diffusers/loaders/single_file.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/loaders/single_file.py b/src/diffusers/loaders/single_file.py index 68a5c21ca732..71161cc58529 100644 --- a/src/diffusers/loaders/single_file.py +++ b/src/diffusers/loaders/single_file.py @@ -312,7 +312,7 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs): component configs in Diffusers format. disable_mmap ('bool', *optional*, defaults to 'False'): Whether to disable mmap when loading a Safetensors model. This option can perform better when the model - is on a network mount or hard drive, which may not handle the seeky-ness of mmap very well. + is on a network mount or hard drive. kwargs (remaining dictionary of keyword arguments, *optional*): Can be used to overwrite load and saveable variables (the pipeline components of the specific pipeline class). The overwritten components are passed directly to the pipelines `__init__` method. See example From 22b33708a152f101075f4c15e538e2bfab5825fd Mon Sep 17 00:00:00 2001 From: DN6 Date: Fri, 10 Jan 2025 13:57:09 +0530 Subject: [PATCH 23/23] make style --- src/diffusers/loaders/single_file_model.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/diffusers/loaders/single_file_model.py b/src/diffusers/loaders/single_file_model.py index 4d294b262a42..0c998bab5e0f 100644 --- a/src/diffusers/loaders/single_file_model.py +++ b/src/diffusers/loaders/single_file_model.py @@ -367,9 +367,7 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] = ) else: - _, unexpected_keys = model.load_state_dict( - diffusers_format_checkpoint, strict=False - ) + _, unexpected_keys = model.load_state_dict(diffusers_format_checkpoint, strict=False) if model._keys_to_ignore_on_load_unexpected is not None: for pat in model._keys_to_ignore_on_load_unexpected: