We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent af544d1 commit 5d614c7Copy full SHA for 5d614c7
src/diffusers/hooks/layerwise_casting.py
@@ -60,7 +60,7 @@ def initialize_hook(self, module: torch.nn.Module):
60
module.to(dtype=self.storage_dtype, non_blocking=self.non_blocking)
61
return module
62
63
- def deinitialize_hook(self, module: torch.nn.Module):
+ def deinitalize_hook(self, module: torch.nn.Module):
64
raise NotImplementedError(
65
"LayerwiseCastingHook does not support deinitialization. A model once enabled with layerwise casting will "
66
"have casted its weights to a lower precision dtype for storage. Casting this back to the original dtype "
0 commit comments