@@ -91,18 +91,22 @@ def dump_gru_layer(self, f, hf):
9191CuDNNGRU .dump_layer = dump_gru_layer
9292GRU .dump_layer = dump_gru_layer
9393
94+ def dump_dense_layer_impl (name , weights , bias , activation , f , hf ):
95+ printVector (f , weights , name + '_weights' )
96+ printVector (f , bias , name + '_bias' )
97+ f .write ('const DenseLayer {} = {{\n {}_bias,\n {}_weights,\n {}, {}, ACTIVATION_{}\n }};\n \n '
98+ .format (name , name , name , weights .shape [0 ], weights .shape [1 ], activation ))
99+ hf .write ('#define {}_OUT_SIZE {}\n ' .format (name .upper (), weights .shape [1 ]))
100+ hf .write ('extern const DenseLayer {};\n \n ' .format (name ));
101+
94102def dump_dense_layer (self , f , hf ):
95103 name = self .name
96104 print ("printing layer " + name + " of type " + self .__class__ .__name__ )
97105 weights = self .get_weights ()
98- printVector (f , weights [0 ], name + '_weights' )
99- printVector (f , weights [- 1 ], name + '_bias' )
100106 activation = self .activation .__name__ .upper ()
101- f .write ('const DenseLayer {} = {{\n {}_bias,\n {}_weights,\n {}, {}, ACTIVATION_{}\n }};\n \n '
102- .format (name , name , name , weights [0 ].shape [0 ], weights [0 ].shape [1 ], activation ))
103- hf .write ('#define {}_OUT_SIZE {}\n ' .format (name .upper (), weights [0 ].shape [1 ]))
104- hf .write ('extern const DenseLayer {};\n \n ' .format (name ));
107+ dump_dense_layer_impl (name , weights [0 ], weights [1 ], activation , f , hf )
105108 return False
109+
106110Dense .dump_layer = dump_dense_layer
107111
108112def dump_mdense_layer (self , f , hf ):
@@ -141,15 +145,18 @@ def dump_conv1d_layer(self, f, hf):
141145Conv1D .dump_layer = dump_conv1d_layer
142146
143147
148+ def dump_embedding_layer_impl (name , weights , f , hf ):
149+ printVector (f , weights , name + '_weights' )
150+ f .write ('const EmbeddingLayer {} = {{\n {}_weights,\n {}, {}\n }};\n \n '
151+ .format (name , name , weights .shape [0 ], weights .shape [1 ]))
152+ hf .write ('#define {}_OUT_SIZE {}\n ' .format (name .upper (), weights .shape [1 ]))
153+ hf .write ('extern const EmbeddingLayer {};\n \n ' .format (name ));
154+
144155def dump_embedding_layer (self , f , hf ):
145156 name = self .name
146157 print ("printing layer " + name + " of type " + self .__class__ .__name__ )
147- weights = self .get_weights ()
148- printVector (f , weights [0 ], name + '_weights' )
149- f .write ('const EmbeddingLayer {} = {{\n {}_weights,\n {}, {}\n }};\n \n '
150- .format (name , name , weights [0 ].shape [0 ], weights [0 ].shape [1 ]))
151- hf .write ('#define {}_OUT_SIZE {}\n ' .format (name .upper (), weights [0 ].shape [1 ]))
152- hf .write ('extern const EmbeddingLayer {};\n \n ' .format (name ));
158+ weights = self .get_weights ()[0 ]
159+ dump_embedding_layer_impl (name , weights , f , hf )
153160 return False
154161Embedding .dump_layer = dump_embedding_layer
155162
@@ -178,6 +185,21 @@ def dump_embedding_layer(self, f, hf):
178185hf .write ('/*This file is automatically generated from a Keras model*/\n \n ' )
179186hf .write ('#ifndef RNN_DATA_H\n #define RNN_DATA_H\n \n #include "nnet.h"\n \n ' )
180187
188+ embed_size = lpcnet .embed_size
189+
190+ E = model .get_layer ('embed_sig' ).get_weights ()[0 ]
191+ W = model .layers [18 ].get_weights ()[0 ][:embed_size ,:]
192+ dump_embedding_layer_impl ('gru_a_embed_sig' , np .dot (E , W ), f , hf )
193+ W = model .layers [18 ].get_weights ()[0 ][embed_size :2 * embed_size ,:]
194+ dump_embedding_layer_impl ('gru_a_embed_pred' , np .dot (E , W ), f , hf )
195+ E = model .get_layer ('embed_exc' ).get_weights ()[0 ]
196+ W = model .layers [18 ].get_weights ()[0 ][2 * embed_size :3 * embed_size ,:]
197+ dump_embedding_layer_impl ('gru_a_embed_exc' , np .dot (E , W ), f , hf )
198+ W = model .layers [18 ].get_weights ()[0 ][3 * embed_size :,:]
199+ #FIXME: dump only half the biases
200+ b = model .layers [18 ].get_weights ()[2 ]
201+ dump_dense_layer_impl ('gru_a_dense_feature' , W , b , 'LINEAR' , f , hf )
202+
181203layer_list = []
182204for i , layer in enumerate (model .layers ):
183205 if layer .dump_layer (f , hf ):
0 commit comments