|
| 1 | +from deepmd.env import tf |
| 2 | +def transform(args): |
| 3 | + new_graph = load_graph(args.raw_model) |
| 4 | + old_graph = load_graph(args.old_model) |
| 5 | + print("%d ops in the raw graph\n%d ops in the old graph" %(len(new_graph.node),len(old_graph.node))) |
| 6 | + transform_node = load_data(new_graph,old_graph) |
| 7 | + for node in new_graph.node: |
| 8 | + if node.name in transform_node: |
| 9 | + print("%s is passed from old graph to raw graph" % node.name) |
| 10 | + node.attr["value"].tensor.CopyFrom(transform_node[node.name].attr["value"].tensor) |
| 11 | + with tf.gfile.GFile(args.output, mode='wb') as f: |
| 12 | + f.write(new_graph.SerializeToString()) |
| 13 | + print("the output model is saved in %s" % args.output) |
| 14 | + |
| 15 | +def load_graph(graphName): |
| 16 | + graph_def = tf.GraphDef() |
| 17 | + with open(graphName,"rb") as f: |
| 18 | + graph_def.ParseFromString(f.read()) |
| 19 | + with tf.Graph().as_default() as graph: |
| 20 | + tf.import_graph_def(graph_def,name = "") |
| 21 | + return graph_def |
| 22 | + |
| 23 | +def load_data(new_graph,old_graph): |
| 24 | + new_graph_node = load_transform_node(new_graph) |
| 25 | + old_graph_node = load_transform_node(old_graph) |
| 26 | + if len(new_graph_node) != len(old_graph_node): |
| 27 | + raise RuntimeError("New graph and original graph has different network structure\n") |
| 28 | + for nodeName in old_graph_node.keys(): |
| 29 | + check_dim(new_graph_node, old_graph_node, nodeName) |
| 30 | + check_precision(new_graph_node, old_graph_node, nodeName) |
| 31 | + return old_graph_node |
| 32 | + |
| 33 | + |
| 34 | +def check_precision(new_graph_node, old_graph_node, nodeName): |
| 35 | + new_graph_precision = new_graph_node[nodeName].attr["value"].tensor.dtype |
| 36 | + old_graph_precision = old_graph_node[nodeName].attr["value"].tensor.dtype |
| 37 | + if new_graph_precision != old_graph_precision: |
| 38 | + raise RuntimeError("New graph and original graph has different"+nodeName+" precision\n") |
| 39 | + |
| 40 | +def check_dim(new_graph_node, old_graph_node, nodeName): |
| 41 | + new_graph_dim = new_graph_node[nodeName].attr["value"].tensor.tensor_shape |
| 42 | + old_graph_dim = old_graph_node[nodeName].attr["value"].tensor.tensor_shape |
| 43 | + if new_graph_dim != old_graph_dim: |
| 44 | + raise RuntimeError("New graph and original graph has different"+nodeName+" dim\n") |
| 45 | + |
| 46 | + |
| 47 | +def load_transform_node(graph): |
| 48 | + transform_node = {} |
| 49 | + filter_w = ["filter_type_0/matrix_{}_0".format(i) for i in range(1,10)] |
| 50 | + filter_b = ["filter_type_0/bias_{}_0".format(i) for i in range(1,10)] |
| 51 | + fitting_w = ["layer_{}_type_0/matrix".format(i) for i in range(0,10)] |
| 52 | + fitting_b = ["layer_{}_type_0/bias".format(i) for i in range(0,10)] |
| 53 | + fitting_idt = ["layer_{}_type_0/idt".format(i) for i in range(0,10)] |
| 54 | + final_layer = ["final_layer_type_0/bias","final_layer_type_0/matrix"] |
| 55 | + transform_node_list = filter_w + filter_b + fitting_w + fitting_b + fitting_idt + final_layer |
| 56 | + for node in graph.node: |
| 57 | + if node.name in transform_node_list: |
| 58 | + transform_node[node.name] = node |
| 59 | + return transform_node |
0 commit comments