From a07a9dbfc3f61c0865dc2ef5700c9dbb2759f243 Mon Sep 17 00:00:00 2001 From: Kyle Sayers Date: Thu, 20 Nov 2025 00:29:26 +0000 Subject: [PATCH] remove Signed-off-by: Kyle Sayers --- .../modifiers/utils/pytorch_helpers.py | 31 +------------------ 1 file changed, 1 insertion(+), 30 deletions(-) diff --git a/src/llmcompressor/modifiers/utils/pytorch_helpers.py b/src/llmcompressor/modifiers/utils/pytorch_helpers.py index d755689782..2f67058438 100644 --- a/src/llmcompressor/modifiers/utils/pytorch_helpers.py +++ b/src/llmcompressor/modifiers/utils/pytorch_helpers.py @@ -10,12 +10,8 @@ from typing import Dict import torch -from torch.nn import Module -__all__ = [ - "apply_pad_mask_to_batch", - "is_moe_model", -] +__all__ = ["apply_pad_mask_to_batch"] def apply_pad_mask_to_batch(batch: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]: @@ -35,28 +31,3 @@ def apply_pad_mask_to_batch(batch: Dict[str, torch.Tensor]) -> Dict[str, torch.T batch[key] = batch[key] * batch["attention_mask"] return batch - - -def is_moe_model(model: Module) -> bool: - """ - Check if the model is a mixture of experts model - - :param model: the model to check - :return: True if the model is a mixture of experts model - """ - - # Check for MoE components - for _, module in model.named_modules(): - module_name = module.__class__.__name__ - if "MoE" in module_name or "Expert" in module_name: - return True - - # Check config for MoE attributes - if hasattr(model, "config"): - if any( - "moe" in attr.lower() or "expert" in attr.lower() - for attr in dir(model.config) - ): - return True - - return False