@@ -267,7 +267,7 @@ def align_maybe_tensor_dtype(input: Any, dtype: torch.dtype) -> Any:
267267 return input
268268
269269
270- class LayerwiseUpcastingGranualarity (str , Enum ):
270+ class LayerwiseUpcastingGranularity (str , Enum ):
271271 r"""
272272 An enumeration class that defines the granularity of the layerwise upcasting process.
273273
@@ -349,17 +349,17 @@ def apply_layerwise_upcasting(
349349 module : torch .nn .Module ,
350350 storage_dtype : torch .dtype ,
351351 compute_dtype : torch .dtype ,
352- granularity : LayerwiseUpcastingGranualarity = LayerwiseUpcastingGranualarity .PYTORCH_LAYER ,
352+ granularity : LayerwiseUpcastingGranularity = LayerwiseUpcastingGranularity .PYTORCH_LAYER ,
353353 skip_modules_pattern : List [str ] = [],
354354 skip_modules_classes : List [Type [torch .nn .Module ]] = [],
355355) -> torch .nn .Module :
356- if granularity == LayerwiseUpcastingGranualarity .DIFFUSERS_MODEL :
356+ if granularity == LayerwiseUpcastingGranularity .DIFFUSERS_MODEL :
357357 return _apply_layerwise_upcasting_diffusers_model (module , storage_dtype , compute_dtype )
358- if granularity == LayerwiseUpcastingGranualarity .DIFFUSERS_LAYER :
358+ if granularity == LayerwiseUpcastingGranularity .DIFFUSERS_LAYER :
359359 return _apply_layerwise_upcasting_diffusers_layer (
360360 module , storage_dtype , compute_dtype , skip_modules_pattern , skip_modules_classes
361361 )
362- if granularity == LayerwiseUpcastingGranualarity .PYTORCH_LAYER :
362+ if granularity == LayerwiseUpcastingGranularity .PYTORCH_LAYER :
363363 return _apply_layerwise_upcasting_pytorch_layer (
364364 module , storage_dtype , compute_dtype , skip_modules_pattern , skip_modules_classes
365365 )
0 commit comments