|
| 1 | +import torch |
1 | 2 | import traceback |
2 | 3 | import logging |
| 4 | +from graph_net.imp_util import load_module |
| 5 | +from graph_net.torch.decompose_util import fold_range_to_submodule |
3 | 6 | from graph_net.torch.graph_decomposer import NaiveDecomposerExtractor |
4 | 7 | from graph_net.torch.graph_fusibility_status import ( |
5 | 8 | GraphFusibilityStatus, |
6 | 9 | GraphFusibility, |
7 | 10 | ) |
| 11 | +from graph_net.torch.fx_graph_module_util import get_torch_module_and_inputs |
| 12 | +from graph_net.torch.fx_graph_cache_util import ( |
| 13 | + parse_immutable_model_path_into_sole_graph_module, |
| 14 | +) |
8 | 15 |
|
9 | 16 | logger = logging.getLogger(__name__) |
10 | 17 |
|
@@ -32,3 +39,64 @@ def __call__(self, model_path): |
32 | 39 | traceback.print_exc() |
33 | 40 | print("--------------------------\n") |
34 | 41 | return False |
| 42 | + |
| 43 | + |
| 44 | +class FullyFusibleSubGraphPredicator: |
| 45 | + def __init__(self, config): |
| 46 | + if config is None: |
| 47 | + config = {} |
| 48 | + self.config = self._make_config(config) |
| 49 | + self.nn_module_fully_fusible_decorator = ( |
| 50 | + self._make_nn_module_fully_fusible_decorator(config) |
| 51 | + ) |
| 52 | + model_path = self.config["model_path"] |
| 53 | + module, inputs = get_torch_module_and_inputs(model_path) |
| 54 | + self.traced_module = parse_immutable_model_path_into_sole_graph_module( |
| 55 | + model_path |
| 56 | + ) |
| 57 | + self.inputs = inputs |
| 58 | + |
| 59 | + def _make_nn_module_fully_fusible_decorator(self, config): |
| 60 | + py_module = load_module(self.config["nn_module_fully_fusible_decorator_path"]) |
| 61 | + decorator_cls = getattr( |
| 62 | + py_module, self.config["nn_module_fully_fusible_decorator_class_name"] |
| 63 | + ) |
| 64 | + return decorator_cls(self.config["nn_module_fully_fusible_decorator_config"]) |
| 65 | + |
| 66 | + def _make_config( |
| 67 | + self, |
| 68 | + model_path, |
| 69 | + nn_module_fully_fusible_decorator_path, |
| 70 | + nn_module_fully_fusible_decorator_class_name, |
| 71 | + nn_module_fully_fusible_decorator_config=None, |
| 72 | + ): |
| 73 | + if nn_module_fully_fusible_decorator_config is None: |
| 74 | + nn_module_fully_fusible_decorator_config = {} |
| 75 | + return { |
| 76 | + "model_path": model_path, |
| 77 | + "nn_module_fully_fusible_decorator_path": nn_module_fully_fusible_decorator_path, |
| 78 | + "nn_module_fully_fusible_decorator_class_name": nn_module_fully_fusible_decorator_class_name, |
| 79 | + "nn_module_fully_fusible_decorator_config": nn_module_fully_fusible_decorator_config, |
| 80 | + } |
| 81 | + |
| 82 | + def __call__(self, gm: torch.fx.GraphModule, start_node_idx, end_node_idx): |
| 83 | + try: |
| 84 | + rewrited_gm: torch.fx.GraphModule = fold_range_to_submodule( |
| 85 | + gm, |
| 86 | + start_node_idx=start_node_idx, |
| 87 | + end_node_idx=end_node_idx, |
| 88 | + submodule_hook=self.nn_module_fully_fusible_decorator, |
| 89 | + ) |
| 90 | + rewrited_gm(*self.inputs) |
| 91 | + except GraphFusibilityStatus as status: |
| 92 | + if status.graph_fusibility == GraphFusibility.kFullyFusible: |
| 93 | + return True |
| 94 | + elif status.graph_fusibility == GraphFusibility.kNotFullyFusible: |
| 95 | + return False |
| 96 | + else: |
| 97 | + raise NotImplementedError(f"{status.graph_fusibility=}") |
| 98 | + except Exception: |
| 99 | + print("\n--- Custom Error Handler ---") |
| 100 | + traceback.print_exc() |
| 101 | + print("--------------------------\n") |
| 102 | + return False |
0 commit comments