Skip to content

Commit 4f84222

Browse files
committed
clean up logging
1 parent c4d4d60 commit 4f84222

File tree

3 files changed

+0
-13
lines changed

3 files changed

+0
-13
lines changed

src/diffusers/loaders/single_file.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,6 @@ def load_single_file_sub_model(
9999
if original_config:
100100
cached_model_config_path = None
101101

102-
logger.warning("dhipke1")
103102
loaded_sub_model = load_method(
104103
pretrained_model_link_or_path_or_dict=checkpoint,
105104
original_config=original_config,
@@ -112,7 +111,6 @@ def load_single_file_sub_model(
112111
)
113112

114113
elif is_transformers_model and is_clip_model_in_single_file(class_obj, checkpoint):
115-
logger.warning("dhipke2")
116114
loaded_sub_model = create_diffusers_clip_model_from_ldm(
117115
class_obj,
118116
checkpoint=checkpoint,
@@ -144,8 +142,6 @@ def load_single_file_sub_model(
144142
)
145143

146144
else:
147-
logger.warning("dhipke3")
148-
149145
if not hasattr(class_obj, "from_pretrained"):
150146
raise ValueError(
151147
(
@@ -363,7 +359,6 @@ def from_single_file(cls, pretrained_model_link_or_path, **kwargs):
363359
revision = kwargs.pop("revision", None)
364360
torch_dtype = kwargs.pop("torch_dtype", None)
365361
no_mmap = kwargs.pop("no_mmap", False)
366-
logger.warning("no_mmap: " + str(no_mmap))
367362

368363
is_legacy_loading = False
369364

src/diffusers/loaders/single_file_model.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -217,7 +217,6 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] =
217217
revision = kwargs.pop("revision", None)
218218
torch_dtype = kwargs.pop("torch_dtype", None)
219219
no_mmap = kwargs.pop("no_mmap", False)
220-
logger.warning("no_mmap2: " + str(no_mmap))
221220

222221
if isinstance(pretrained_model_link_or_path_or_dict, dict):
223222
checkpoint = pretrained_model_link_or_path_or_dict
@@ -234,9 +233,7 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] =
234233
)
235234

236235
mapping_functions = SINGLE_FILE_LOADABLE_CLASSES[mapping_class_name]
237-
logger.warning(mapping_functions)
238236
checkpoint_mapping_fn = mapping_functions["checkpoint_mapping_fn"]
239-
logger.warning(checkpoint_mapping_fn)
240237
if original_config is not None:
241238
if "config_mapping_fn" in mapping_functions:
242239
config_mapping_fn = mapping_functions["config_mapping_fn"]
@@ -256,7 +253,6 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] =
256253
# If original_config is a URL or filepath fetch the original_config dict
257254
original_config = fetch_original_config(original_config, local_files_only=local_files_only)
258255

259-
logger.warn("hi1")
260256
config_mapping_kwargs = _get_mapping_function_kwargs(config_mapping_fn, **kwargs)
261257
diffusers_model_config = config_mapping_fn(
262258
original_config=original_config, checkpoint=checkpoint, **config_mapping_kwargs
@@ -314,17 +310,14 @@ def from_single_file(cls, pretrained_model_link_or_path_or_dict: Optional[str] =
314310

315311
ctx = init_empty_weights if is_accelerate_available() else nullcontext
316312
with ctx():
317-
logger.warn("hi2")
318313
model = cls.from_config(diffusers_model_config)
319314

320-
logger.warn("hi3")
321315
if is_accelerate_available():
322316
unexpected_keys = load_model_dict_into_meta(model, diffusers_format_checkpoint, dtype=torch_dtype)
323317

324318
else:
325319
_, unexpected_keys = model.load_state_dict(diffusers_format_checkpoint, strict=False, no_mmap=no_mmap)
326320

327-
logger.warn("hi4")
328321
if model._keys_to_ignore_on_load_unexpected is not None:
329322
for pat in model._keys_to_ignore_on_load_unexpected:
330323
unexpected_keys = [k for k in unexpected_keys if re.search(pat, k) is None]

src/diffusers/loaders/single_file_utils.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -382,7 +382,6 @@ def load_single_file_checkpoint(
382382
revision=revision,
383383
)
384384

385-
logger.warn("before load_state_dict. no_mmap: " + str(no_mmap))
386385
checkpoint = load_state_dict(pretrained_model_link_or_path, no_mmap=no_mmap)
387386

388387
# some checkpoints contain the model state dict under a "state_dict" key

0 commit comments

Comments
 (0)