We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 582af9b commit a6ee660Copy full SHA for a6ee660
src/diffusers/models/model_loading_utils.py
@@ -256,7 +256,7 @@ def load_model_dict_into_meta(
256
257
if is_accelerate_version(">=", "1.9.0.dev0"):
258
set_module_kwargs["non_blocking"] = True
259
- set_module_kwargs["_empty_cache"] = False
+ set_module_kwargs["clear_cache"] = False
260
261
# For compatibility with PyTorch load_state_dict which converts state dict dtype to existing dtype in model, and which
262
# uses `param.copy_(input_param)` that preserves the contiguity of the parameter in the model.
0 commit comments