|
1 | 1 | from graph_net.dynamic_dim_constraints import DynamicDimConstraints |
| 2 | +from graph_net.imp_util import load_module |
| 3 | +from graph_net.tensor_meta import TensorMeta |
2 | 4 | from typing import Callable |
3 | 5 | import copy |
| 6 | +import sys |
| 7 | +import os |
| 8 | + |
| 9 | + |
| 10 | +class UpdateInputTensorConstraints: |
| 11 | + def __init__(self, config=None): |
| 12 | + if config is None: |
| 13 | + config = {} |
| 14 | + self.config = self._make_config(**config) |
| 15 | + self.data_input_predicator = self._make_data_input_predicator(self.config) |
| 16 | + self.model_runnable_predicator = self._make_model_runnable_predicator( |
| 17 | + self.config |
| 18 | + ) |
| 19 | + |
| 20 | + def _make_data_input_predicator(self, config): |
| 21 | + module = load_module(config["data_input_predicator_filepath"]) |
| 22 | + cls = getattr(module, config["data_input_predicator_class_name"]) |
| 23 | + return cls(config["data_input_predicator_config"]) |
| 24 | + |
| 25 | + def _make_model_runnable_predicator(self, config): |
| 26 | + module = load_module(config["model_runnable_predicator_filepath"]) |
| 27 | + cls = getattr(module, config["model_runnable_predicator_class_name"]) |
| 28 | + return cls(config["model_runnable_predicator_config"]) |
| 29 | + |
| 30 | + def _make_config( |
| 31 | + self, |
| 32 | + data_input_predicator_filepath, |
| 33 | + model_runnable_predicator_filepath, |
| 34 | + data_input_predicator_class_name="DataInputPredicator", |
| 35 | + data_input_predicator_config=None, |
| 36 | + model_runnable_predicator_class_name="ModelRunner", |
| 37 | + model_runnable_predicator_config=None, |
| 38 | + ): |
| 39 | + if data_input_predicator_config is None: |
| 40 | + data_input_predicator_config = {} |
| 41 | + if model_runnable_predicator_config is None: |
| 42 | + model_runnable_predicator_config = {} |
| 43 | + return { |
| 44 | + "data_input_predicator_filepath": data_input_predicator_filepath, |
| 45 | + "data_input_predicator_class_name": data_input_predicator_class_name, |
| 46 | + "data_input_predicator_config": data_input_predicator_config, |
| 47 | + "model_runnable_predicator_filepath": model_runnable_predicator_filepath, |
| 48 | + "model_runnable_predicator_class_name": model_runnable_predicator_class_name, |
| 49 | + "model_runnable_predicator_config": model_runnable_predicator_config, |
| 50 | + } |
| 51 | + |
| 52 | + def __call__(self, model_path): |
| 53 | + tensor_metas = self._get_tensor_metas(model_path) |
| 54 | + dyn_dim_cstr = make_dyn_dim_cstr_from_tensor_metas(tensor_metas) |
| 55 | + |
| 56 | + def data_input_predicator(input_var_name): |
| 57 | + return self.data_input_predicator(model_path, input_var_name) |
| 58 | + |
| 59 | + def is_dyn_dim_cstr_feasible(dyn_dim_cstr): |
| 60 | + return self._is_dyn_dim_cstr_feasible( |
| 61 | + model_path, tensor_metas, dyn_dim_cstr |
| 62 | + ) |
| 63 | + |
| 64 | + dyn_dim_cstr = symbolize_data_input_dims( |
| 65 | + dyn_dim_cstr, |
| 66 | + is_data_input=data_input_predicator, |
| 67 | + is_dyn_dim_cstr_feasible=is_dyn_dim_cstr_feasible, |
| 68 | + ) |
| 69 | + self._save_dyn_dim_cstr(dyn_dim_cstr, model_path) |
| 70 | + |
| 71 | + def _save_dyn_dim_cstr(self, dyn_dim_cstr, model_path): |
| 72 | + cstr_code = dyn_dim_cstr.serialize_to_py_str() |
| 73 | + with open(os.path.join(model_path, "input_tensor_constraints.py"), "w") as fp: |
| 74 | + fp.write(cstr_code) |
| 75 | + |
| 76 | + def _get_tensor_metas(self, model_path): |
| 77 | + make = TensorMeta.unserialize_from_py_file |
| 78 | + return [ |
| 79 | + *make(os.path.join(model_path, "input_meta.py")), |
| 80 | + *make(os.path.join(model_path, "weight_meta.py")), |
| 81 | + ] |
| 82 | + |
| 83 | + def _is_dyn_dim_cstr_feasible( |
| 84 | + self, model_path, tensor_metas, dyn_dim_cstr: DynamicDimConstraints |
| 85 | + ): |
| 86 | + tensor_metas = copy.deepcopy(tensor_metas) |
| 87 | + update_tensor_metas_by_dyn_dim_cstr(tensor_metas, dyn_dim_cstr) |
| 88 | + weight_meta_code = "\n".join( |
| 89 | + tensor_meta.serialize_to_py_str() for tensor_meta in tensor_metas |
| 90 | + ) |
| 91 | + import tempfile |
| 92 | + |
| 93 | + with tempfile.TemporaryDirectory() as tmpdir: |
| 94 | + for filename in ["graph_net.json", "model.py"]: |
| 95 | + with open(os.path.join(tmpdir, filename), "w") as f: |
| 96 | + f.write(open(os.path.join(model_path, filename)).read()) |
| 97 | + with open(os.path.join(tmpdir, "input_meta.py"), "w") as f: |
| 98 | + f.write("") |
| 99 | + with open(os.path.join(tmpdir, "weight_meta.py"), "w") as f: |
| 100 | + f.write(weight_meta_code) |
| 101 | + return self.model_runnable_predicator(tmpdir) |
| 102 | + |
| 103 | + |
| 104 | +def update_tensor_metas_by_dyn_dim_cstr( |
| 105 | + tensor_metas: list[TensorMeta], dyn_dim_cstr: DynamicDimConstraints |
| 106 | +): |
| 107 | + input_shapes = dyn_dim_cstr.get_reified_input_shapes() |
| 108 | + input_max_values = dyn_dim_cstr.get_reified_input_max_values() |
| 109 | + assert len(tensor_metas) == len(input_shapes) |
| 110 | + assert len(tensor_metas) == len(input_max_values) |
| 111 | + for i, tensor_meta in enumerate(tensor_metas): |
| 112 | + tensor_meta.shape = input_shapes[i] |
| 113 | + tensor_meta.max_val = input_max_values[i] |
| 114 | + |
| 115 | + |
| 116 | +def make_dyn_dim_cstr_from_tensor_metas(tensor_metas: list[TensorMeta]): |
| 117 | + named_shapes = [ |
| 118 | + (shape, name) |
| 119 | + for tensor_meta in tensor_metas |
| 120 | + for name in [tensor_meta.name] |
| 121 | + for shape in [tensor_meta.shape] |
| 122 | + ] |
| 123 | + named_max_values = [ |
| 124 | + (max_val, name) |
| 125 | + for tensor_meta in tensor_metas |
| 126 | + for name in [tensor_meta.name] |
| 127 | + for max_val in [tensor_meta.max_val] |
| 128 | + ] |
| 129 | + return DynamicDimConstraints.make_by_named_inputs( |
| 130 | + named_shapes=named_shapes, |
| 131 | + named_max_values=named_max_values, |
| 132 | + ) |
4 | 133 |
|
5 | 134 |
|
6 | 135 | def symbolize_data_input_dims( |
7 | 136 | dyn_dim_cstr: DynamicDimConstraints, |
8 | | - is_data_input: Callable[["input_var_name:str"], bool], |
9 | | - is_input_shape_valid: Callable[[DynamicDimConstraints], bool], |
| 137 | + is_data_input: Callable[[str], bool], |
| 138 | + is_dyn_dim_cstr_feasible: Callable[[DynamicDimConstraints], bool], |
10 | 139 | ) -> DynamicDimConstraints | None: |
11 | 140 | """ |
| 141 | + is_data_input: Callable[["input_var_name:str"], bool] |
12 | 142 | Symbolizes data input dimensions as much as possible. |
13 | 143 | Returns new DynamicDimConstraints if success. |
14 | 144 | Returns None if no symbolicable dim . |
15 | 145 | """ |
16 | 146 | unqiue_dims = set() |
17 | 147 |
|
18 | 148 | def dumpy_filter_fn(input_name, input_idx, axis, dim): |
19 | | - unqiue_dims.add(dim) |
| 149 | + if is_data_input(input_name): |
| 150 | + print("data_input", input_name, input_idx, axis, dim) |
| 151 | + unqiue_dims.add(dim) |
20 | 152 | # No symbolization because of returning True |
21 | 153 | return False |
22 | 154 |
|
23 | 155 | # Collect input dimensions into `unqiue_dims` |
24 | 156 | assert dyn_dim_cstr.symbolize(dumpy_filter_fn) is None |
25 | 157 | for picked_dim in unqiue_dims: |
26 | | - tmp_dyn_dim_cstr = copy.deepcopy(dyn_dim_cstr) |
| 158 | + cur_dyn_dim_cstr = copy.deepcopy(dyn_dim_cstr) |
27 | 159 |
|
28 | 160 | def filter_fn(input_name, input_idx, axis, dim): |
29 | 161 | return is_data_input(input_name) and dim == picked_dim |
30 | 162 |
|
31 | | - symbol = tmp_dyn_dim_cstr.symbolize(filter_fn) |
| 163 | + symbol = cur_dyn_dim_cstr.symbolize(filter_fn) |
32 | 164 | if symbol is None: |
33 | 165 | continue |
34 | 166 | sym2example_value = {symbol: picked_dim + 1} |
35 | | - if not tmp_dyn_dim_cstr.check_delta_symbol2example_value(sym2example_value): |
| 167 | + if not cur_dyn_dim_cstr.check_delta_symbol2example_value(sym2example_value): |
36 | 168 | continue |
| 169 | + tmp_dyn_dim_cstr = copy.deepcopy(cur_dyn_dim_cstr) |
37 | 170 | tmp_dyn_dim_cstr.update_symbol2example_value(sym2example_value) |
38 | | - if not is_input_shape_valid(tmp_dyn_dim_cstr): |
| 171 | + if not is_dyn_dim_cstr_feasible(tmp_dyn_dim_cstr): |
39 | 172 | continue |
40 | | - dyn_dim_cstr = tmp_dyn_dim_cstr |
| 173 | + dyn_dim_cstr = cur_dyn_dim_cstr |
41 | 174 | return dyn_dim_cstr |
0 commit comments