From fa6665711e12a44d3198dad48b22a523c5fcce5b Mon Sep 17 00:00:00 2001 From: sayakpaul Date: Wed, 9 Apr 2025 21:04:36 +0530 Subject: [PATCH 1/3] dummy --- src/diffusers/models/modeling_utils.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/src/diffusers/models/modeling_utils.py b/src/diffusers/models/modeling_utils.py index 2a22bc09ad7a..b8cf7822d6b7 100644 --- a/src/diffusers/models/modeling_utils.py +++ b/src/diffusers/models/modeling_utils.py @@ -340,16 +340,17 @@ def set_use_npu_flash_attention(self, valid: bool) -> None: Set the switch for the npu flash attention. """ - def fn_recursive_set_npu_flash_attention(module: torch.nn.Module): - if hasattr(module, "set_use_npu_flash_attention"): - module.set_use_npu_flash_attention(valid) + # def fn_recursive_set_npu_flash_attention(module: torch.nn.Module): + # if hasattr(module, "set_use_npu_flash_attention"): + # module.set_use_npu_flash_attention(valid) - for child in module.children(): - fn_recursive_set_npu_flash_attention(child) + # for child in module.children(): + # fn_recursive_set_npu_flash_attention(child) - for module in self.children(): - if isinstance(module, torch.nn.Module): - fn_recursive_set_npu_flash_attention(module) + # for module in self.children(): + # if isinstance(module, torch.nn.Module): + # fn_recursive_set_npu_flash_attention(module) + pass def enable_npu_flash_attention(self) -> None: r""" From 7a9cf80d67134400909f1589f75d51dce28bacf7 Mon Sep 17 00:00:00 2001 From: sayakpaul Date: Wed, 9 Apr 2025 21:06:59 +0530 Subject: [PATCH 2/3] mess up styling --- src/diffusers/models/modeling_utils.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/diffusers/models/modeling_utils.py b/src/diffusers/models/modeling_utils.py index b8cf7822d6b7..2941f07e7e41 100644 --- a/src/diffusers/models/modeling_utils.py +++ b/src/diffusers/models/modeling_utils.py @@ -26,8 +26,7 @@ from contextlib import ExitStack, contextmanager from functools import wraps from pathlib import Path -from typing import Any, Callable, ContextManager, Dict, List, Optional, Tuple, Type, Union - +from typing import Any,Callable, ContextManager, Dict, List, Optional, Tuple, Type, Union import safetensors import torch import torch.utils.checkpoint From 2c46122efc55ad195eba51892b96b115f601bd1c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Wed, 9 Apr 2025 15:39:09 +0000 Subject: [PATCH 3/3] Apply style fixes --- src/diffusers/models/modeling_utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/diffusers/models/modeling_utils.py b/src/diffusers/models/modeling_utils.py index 2941f07e7e41..b8cf7822d6b7 100644 --- a/src/diffusers/models/modeling_utils.py +++ b/src/diffusers/models/modeling_utils.py @@ -26,7 +26,8 @@ from contextlib import ExitStack, contextmanager from functools import wraps from pathlib import Path -from typing import Any,Callable, ContextManager, Dict, List, Optional, Tuple, Type, Union +from typing import Any, Callable, ContextManager, Dict, List, Optional, Tuple, Type, Union + import safetensors import torch import torch.utils.checkpoint