Skip to content

Commit 3b2e85d

Browse files
committed
update
1 parent 12b4edc commit 3b2e85d

File tree

5 files changed

+60
-12
lines changed

5 files changed

+60
-12
lines changed

src/diffusers/models/attention.py

Lines changed: 56 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -12,19 +12,15 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
15+
from typing import Callable, Dict, Optional, Tuple, Union
1616

1717
import torch
1818
import torch.nn as nn
1919
import torch.nn.functional as F
2020

21-
from ..utils import deprecate, logging
21+
from ..utils import logging
2222
from ..utils.import_utils import is_torch_npu_available, is_torch_xla_available, is_xformers_available
23-
from ..utils.torch_utils import maybe_allow_in_graph
24-
from .activations import GEGLU, GELU, ApproximateGELU, FP32SiLU, LinearActivation, SwiGLU
25-
from .attention_processor import Attention, AttentionProcessor, JointAttnProcessor2_0
26-
from .embeddings import SinusoidalPositionalEmbedding
27-
from .normalization import AdaLayerNorm, AdaLayerNormContinuous, AdaLayerNormZero, RMSNorm, SD35AdaLayerNormZeroX
23+
from .attention_processor import AttentionProcessor
2824

2925

3026
if is_xformers_available():
@@ -511,78 +507,130 @@ def norm_encoder_hidden_states(self, encoder_hidden_states: torch.Tensor) -> tor
511507

512508
def _chunked_feed_forward(*args, **kwargs):
513509
"""Backward compatibility stub. Use transformers.modeling_common._chunked_feed_forward instead."""
510+
logger.warning(
511+
"Importing `_chunked_feed_forward` from `diffusers.models.attention` is deprecated and will be removed in a future version. "
512+
"Please use `from diffusers.models.transformers.modeling_common import _chunked_feed_forward` instead."
513+
)
514514
from .transformers.modeling_common import _chunked_feed_forward as _actual_chunked_feed_forward
515+
515516
return _actual_chunked_feed_forward(*args, **kwargs)
516517

517518

518519
class GatedSelfAttentionDense:
519520
r"""
520521
Backward compatibility stub. Use transformers.modeling_common.GatedSelfAttentionDense instead.
521522
"""
523+
522524
def __new__(cls, *args, **kwargs):
525+
logger.warning(
526+
"Importing `GatedSelfAttentionDense` from `diffusers.models.attention` is deprecated and will be removed in a future version. "
527+
"Please use `from diffusers.models.transformers.modeling_common import GatedSelfAttentionDense` instead."
528+
)
523529
from .transformers.modeling_common import GatedSelfAttentionDense
530+
524531
return GatedSelfAttentionDense(*args, **kwargs)
525532

526533

527534
class JointTransformerBlock:
528535
r"""
529536
Backward compatibility stub. Use transformers.modeling_common.JointTransformerBlock instead.
530537
"""
538+
531539
def __new__(cls, *args, **kwargs):
540+
logger.warning(
541+
"Importing `JointTransformerBlock` from `diffusers.models.attention` is deprecated and will be removed in a future version. "
542+
"Please use `from diffusers.models.transformers.modeling_common import JointTransformerBlock` instead."
543+
)
532544
from .transformers.modeling_common import JointTransformerBlock
545+
533546
return JointTransformerBlock(*args, **kwargs)
534547

535548

536549
class BasicTransformerBlock:
537550
r"""
538551
Backward compatibility stub. Use transformers.modeling_common.BasicTransformerBlock instead.
539552
"""
553+
540554
def __new__(cls, *args, **kwargs):
555+
logger.warning(
556+
"Importing `BasicTransformerBlock` from `diffusers.models.attention` is deprecated and will be removed in a future version. "
557+
"Please use `from diffusers.models.transformers.modeling_common import BasicTransformerBlock` instead."
558+
)
541559
from .transformers.modeling_common import BasicTransformerBlock
560+
542561
return BasicTransformerBlock(*args, **kwargs)
543562

544563

545564
class LuminaFeedForward:
546565
r"""
547566
Backward compatibility stub. Use transformers.modeling_common.LuminaFeedForward instead.
548567
"""
568+
549569
def __new__(cls, *args, **kwargs):
570+
logger.warning(
571+
"Importing `LuminaFeedForward` from `diffusers.models.attention` is deprecated and will be removed in a future version. "
572+
"Please use `from diffusers.models.transformers.modeling_common import LuminaFeedForward` instead."
573+
)
550574
from .transformers.modeling_common import LuminaFeedForward
575+
551576
return LuminaFeedForward(*args, **kwargs)
552577

553578

554579
class TemporalBasicTransformerBlock:
555580
r"""
556581
Backward compatibility stub. Use transformers.modeling_common.TemporalBasicTransformerBlock instead.
557582
"""
583+
558584
def __new__(cls, *args, **kwargs):
585+
logger.warning(
586+
"Importing `TemporalBasicTransformerBlock` from `diffusers.models.attention` is deprecated and will be removed in a future version. "
587+
"Please use `from diffusers.models.transformers.modeling_common import TemporalBasicTransformerBlock` instead."
588+
)
559589
from .transformers.modeling_common import TemporalBasicTransformerBlock
590+
560591
return TemporalBasicTransformerBlock(*args, **kwargs)
561592

562593

563594
class SkipFFTransformerBlock:
564595
r"""
565596
Backward compatibility stub. Use transformers.modeling_common.SkipFFTransformerBlock instead.
566597
"""
598+
567599
def __new__(cls, *args, **kwargs):
600+
logger.warning(
601+
"Importing `SkipFFTransformerBlock` from `diffusers.models.attention` is deprecated and will be removed in a future version. "
602+
"Please use `from diffusers.models.transformers.modeling_common import SkipFFTransformerBlock` instead."
603+
)
568604
from .transformers.modeling_common import SkipFFTransformerBlock
605+
569606
return SkipFFTransformerBlock(*args, **kwargs)
570607

571608

572609
class FreeNoiseTransformerBlock:
573610
r"""
574611
Backward compatibility stub. Use transformers.modeling_common.FreeNoiseTransformerBlock instead.
575612
"""
613+
576614
def __new__(cls, *args, **kwargs):
615+
logger.warning(
616+
"Importing `FreeNoiseTransformerBlock` from `diffusers.models.attention` is deprecated and will be removed in a future version. "
617+
"Please use `from diffusers.models.transformers.modeling_common import FreeNoiseTransformerBlock` instead."
618+
)
577619
from .transformers.modeling_common import FreeNoiseTransformerBlock
578-
return FreeNoiseTransformerBlock(*args, **kwargs)
579620

621+
return FreeNoiseTransformerBlock(*args, **kwargs)
580622

581623

582624
class FeedForward:
583625
r"""
584626
Backward compatibility stub. Use transformers.modeling_common.FeedForward instead.
585627
"""
628+
586629
def __new__(cls, *args, **kwargs):
630+
logger.warning(
631+
"Importing `FeedForward` from `diffusers.models.attention` is deprecated and will be removed in a future version. "
632+
"Please use `from diffusers.models.transformers.modeling_common import FeedForward` instead."
633+
)
587634
from .transformers.modeling_common import FeedForward
635+
588636
return FeedForward(*args, **kwargs)

src/diffusers/models/transformers/dit_transformer_2d.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,10 @@
1919

2020
from ...configuration_utils import ConfigMixin, register_to_config
2121
from ...utils import logging
22-
from .modeling_common import BasicTransformerBlock
2322
from ..embeddings import PatchEmbed
2423
from ..modeling_outputs import Transformer2DModelOutput
2524
from ..modeling_utils import ModelMixin
25+
from .modeling_common import BasicTransformerBlock
2626

2727

2828
logger = logging.get_logger(__name__) # pylint: disable=invalid-name

src/diffusers/models/transformers/modeling_common.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1255,4 +1255,4 @@ def forward(self, hidden_states: torch.Tensor, *args, **kwargs) -> torch.Tensor:
12551255
deprecate("scale", "1.0.0", deprecation_message)
12561256
for module in self.net:
12571257
hidden_states = module(hidden_states)
1258-
return hidden_states
1258+
return hidden_states

src/diffusers/models/transformers/transformer_2d.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,11 +19,11 @@
1919

2020
from ...configuration_utils import LegacyConfigMixin, register_to_config
2121
from ...utils import deprecate, logging
22-
from .modeling_common import BasicTransformerBlock
2322
from ..embeddings import ImagePositionalEmbeddings, PatchEmbed, PixArtAlphaTextProjection
2423
from ..modeling_outputs import Transformer2DModelOutput
2524
from ..modeling_utils import LegacyModelMixin
2625
from ..normalization import AdaLayerNormSingle
26+
from .modeling_common import BasicTransformerBlock
2727

2828

2929
logger = logging.get_logger(__name__) # pylint: disable=invalid-name

src/diffusers/models/transformers/transformer_sd3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
from ...loaders import FromOriginalModelMixin, PeftAdapterMixin, SD3Transformer2DLoadersMixin
2121
from ...utils import USE_PEFT_BACKEND, logging, scale_lora_layers, unscale_lora_layers
2222
from ...utils.torch_utils import maybe_allow_in_graph
23-
from .modeling_common import FeedForward, JointTransformerBlock
2423
from ..attention_processor import (
2524
Attention,
2625
AttentionProcessor,
@@ -31,6 +30,7 @@
3130
from ..modeling_outputs import Transformer2DModelOutput
3231
from ..modeling_utils import ModelMixin
3332
from ..normalization import AdaLayerNormContinuous, AdaLayerNormZero
33+
from .modeling_common import FeedForward, JointTransformerBlock
3434

3535

3636
logger = logging.get_logger(__name__) # pylint: disable=invalid-name

0 commit comments

Comments
 (0)