| 
 | 1 | +# Copyright 2025 The HuggingFace Team. All rights reserved.  | 
 | 2 | +#  | 
 | 3 | +# Licensed under the Apache License, Version 2.0 (the "License");  | 
 | 4 | +# you may not use this file except in compliance with the License.  | 
 | 5 | +# You may obtain a copy of the License at  | 
 | 6 | +#  | 
 | 7 | +#     http://www.apache.org/licenses/LICENSE-2.0  | 
 | 8 | +#  | 
 | 9 | +# Unless required by applicable law or agreed to in writing, software  | 
 | 10 | +# distributed under the License is distributed on an "AS IS" BASIS,  | 
 | 11 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  | 
 | 12 | +# See the License for the specific language governing permissions and  | 
 | 13 | +# limitations under the License.  | 
 | 14 | + | 
 | 15 | +import torch  | 
 | 16 | + | 
 | 17 | +from ._common import _ALL_TRANSFORMER_BLOCK_IDENTIFIERS, _ATTENTION_CLASSES, _FEEDFORWARD_CLASSES  | 
 | 18 | + | 
 | 19 | + | 
 | 20 | +def _get_identifiable_transformer_blocks_in_module(module: torch.nn.Module):  | 
 | 21 | +    module_list_with_transformer_blocks = []  | 
 | 22 | +    for name, submodule in module.named_modules():  | 
 | 23 | +        name_endswith_identifier = any(name.endswith(identifier) for identifier in _ALL_TRANSFORMER_BLOCK_IDENTIFIERS)  | 
 | 24 | +        is_modulelist = isinstance(submodule, torch.nn.ModuleList)  | 
 | 25 | +        if name_endswith_identifier and is_modulelist:  | 
 | 26 | +            module_list_with_transformer_blocks.append((name, submodule))  | 
 | 27 | +    return module_list_with_transformer_blocks  | 
 | 28 | + | 
 | 29 | + | 
 | 30 | +def _get_identifiable_attention_layers_in_module(module: torch.nn.Module):  | 
 | 31 | +    attention_layers = []  | 
 | 32 | +    for name, submodule in module.named_modules():  | 
 | 33 | +        if isinstance(submodule, _ATTENTION_CLASSES):  | 
 | 34 | +            attention_layers.append((name, submodule))  | 
 | 35 | +    return attention_layers  | 
 | 36 | + | 
 | 37 | + | 
 | 38 | +def _get_identifiable_feedforward_layers_in_module(module: torch.nn.Module):  | 
 | 39 | +    feedforward_layers = []  | 
 | 40 | +    for name, submodule in module.named_modules():  | 
 | 41 | +        if isinstance(submodule, _FEEDFORWARD_CLASSES):  | 
 | 42 | +            feedforward_layers.append((name, submodule))  | 
 | 43 | +    return feedforward_layers  | 
0 commit comments