@@ -140,148 +140,167 @@ def hsigmoidWithCustomTree(x, w, ptable, pcode, label, bias, num_classes):
140
140
return pre_output , out
141
141
142
142
143
- # class TestHSigmoidOp(OpTest):
144
- # def setUp(self):
145
- # self.op_type = "hierarchical_sigmoid"
146
- # num_classes = 6
147
- # feature_size = 8
148
- # batch_size = 4
149
- # x = np.random.random((batch_size, feature_size)).astype("float32") * 2
150
- # w = np.random.random(
151
- # (num_classes - 1, feature_size)).astype("float32") * 2
152
- # label = np.random.randint(0, num_classes, (batch_size, 1))
153
- # bias = np.random.random((1, num_classes - 1)).astype("float32")
154
- # self.attrs = {'num_classes': num_classes, 'is_sparse': False}
155
- # self.inputs = {'X': x, 'W': w, 'Label': label, 'Bias': bias}
156
- # pre_output, out = hsigmoid(x, w, label, bias, num_classes)
157
- # self.outputs = {'PreOut': pre_output, 'Out': out}
158
-
159
- # def test_check_output(self):
160
- # self.check_output()
161
-
162
- # def test_check_grad(self):
163
- # self.check_grad(['Bias', 'X', 'W'], ['Out'], no_grad_set=set('Label'))
164
-
165
- # class TestHSigmoidOpSparse(OpTest):
166
- # def setUp(self ):
167
- # self.op_type = "hierarchical_sigmoid"
168
- # num_classes = 6 #using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
169
- # feature_size = 8
170
- # batch_size = 4
171
- # x = np.random.random((batch_size, feature_size)).astype("float32") * 2
172
- # w = np.random.random(
173
- # ( num_classes - 1, feature_size)).astype("float32") * 2
174
- # label = np.array([0, 1, 4, 5])
175
- # ptable = np.array(
176
- # [(0, 2, -1, -1, -1), (0, 1, 3, -1, -1), (0, 1, 4, -1, -1),
177
- # (0, 2, -1, -1,
178
- # -1)]) #np.array to store 1,2,5,6s' non-leaf path(root -> leaf)
179
- # pcode = np.array([(0, 0, -1, -1, -1), (1, 1, 1, -1, -1), (
180
- # 1, 0, 0, -1, -1), (0, 1, -1, -1, -1)]) #np.array to store
181
- # bias = np.random.random((1, num_classes - 1)).astype("float32")
182
- # self.attrs = {'num_classes': num_classes, 'is_sparse': True}
183
- # self.inputs = {
184
- # 'X': x,
185
- # 'W': w,
186
- # 'PTable': ptable,
187
- # 'PCode': pcode,
188
- # 'Label': label,
189
- # 'Bias': bias
190
- # }
191
- # pre_output, out = hsigmoidWithCustomTree(x, w, ptable, pcode, label,
192
- # bias, num_classes)
193
- # self.outputs = {'PreOut': pre_output, 'Out': out}
194
-
195
- # def test_check_output(self):
196
- # print("checking output in CostumTree")
197
- # self.check_output()
198
-
199
-
200
- class TestHSigmoidOpWithSparseGrad ():
201
- def hs_net_conf (self ):
202
- emb = fluid .layers .data (name = "x" , shape = [3 ], dtype = 'int64' )
143
+ class TestHSigmoidOp (OpTest ):
144
+ def setUp (self ):
145
+ self .op_type = "hierarchical_sigmoid"
146
+ num_classes = 6
147
+ feature_size = 8
148
+ batch_size = 4
149
+ x = np .random .random ((batch_size , feature_size )).astype ("float32" ) * 2
150
+ w = np .random .random (
151
+ (num_classes - 1 , feature_size )).astype ("float32" ) * 2
152
+ label = np .random .randint (0 , num_classes , (batch_size , 1 ))
153
+ bias = np .random .random ((1 , num_classes - 1 )).astype ("float32" )
154
+ self .attrs = {'num_classes' : num_classes , 'is_sparse' : False }
155
+ self .inputs = {'X' : x , 'W' : w , 'Label' : label , 'Bias' : bias }
156
+ pre_output , out = hsigmoid (x , w , label , bias , num_classes )
157
+ self .outputs = {'PreOut' : pre_output , 'Out' : out }
158
+
159
+ def test_check_output (self ):
160
+ self .check_output ()
161
+
162
+ def test_check_grad (self ):
163
+ self .check_grad (['Bias' , 'X' , 'W' ], ['Out' ], no_grad_set = set ('Label' ))
164
+
165
+
166
+ class TestHSigmoidOpSparse ( OpTest ):
167
+ def setUp ( self ):
168
+ self . op_type = "hierarchical_sigmoid"
169
+ num_classes = 6 #using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
170
+ feature_size = 8
171
+ batch_size = 4
172
+ x = np .random .random (( batch_size , feature_size )). astype ( "float32" )
173
+ w = np . random . random (( num_classes - 1 , feature_size )).astype ("float32" )
174
+ label = np .array ([0 , 1 , 4 , 5 ])
175
+ ptable = np .array (
176
+ [(0 , 2 , - 1 , - 1 , - 1 ), (0 , 1 , 3 , - 1 , - 1 ), (0 , 1 , 4 , - 1 , - 1 ),
177
+ (0 , 2 , - 1 , - 1 ,
178
+ - 1 )]) #np.array to store 1,2,5,6s' non-leaf path(root -> leaf)
179
+ pcode = np .array ([(0 , 0 , - 1 , - 1 , - 1 ), (1 , 1 , 1 , - 1 , - 1 ), (
180
+ 1 , 0 , 0 , - 1 , - 1 ), (0 , 1 , - 1 , - 1 , - 1 )]) #np.array to store
181
+ bias = np .random .random ((1 , num_classes - 1 )).astype ("float32" )
182
+ self .attrs = {'num_classes' : num_classes , 'is_sparse' : True }
183
+ self .inputs = {
184
+ 'X' : x ,
185
+ 'W' : w ,
186
+ 'PTable' : ptable ,
187
+ 'PCode' : pcode ,
188
+ 'Label' : label ,
189
+ 'Bias' : bias
190
+ }
191
+ pre_output , out = hsigmoidWithCustomTree (x , w , ptable , pcode , label ,
192
+ bias , num_classes )
193
+ self .outputs = {'PreOut' : pre_output , 'Out' : out }
194
+
195
+ def test_check_output (self ):
196
+ print ("checking output in CostumTree" )
197
+ self .check_output ()
198
+
199
+
200
+ class TestHSigmoidOpWithSparseGrad (unittest . TestCase ):
201
+ def hs_net_conf (self , is_sparse ):
202
+ input_word = fluid .layers .data (name = "x" , shape = [1 ], dtype = 'int64' )
203
203
ptable = fluid .layers .data (name = 'ptable' , shape = [3 ], dtype = 'int64' )
204
204
pcode = fluid .layers .data (name = 'pcode' , shape = [3 ], dtype = 'int64' )
205
205
label = fluid .layers .data (name = 'label' , shape = [1 ], dtype = 'int64' )
206
- data_list = [emb , ptable , pcode , label ]
206
+
207
+ data_list = [input_word , ptable , pcode , label ]
208
+
209
+ emb = fluid .layers .embedding (
210
+ input = input_word ,
211
+ is_sparse = False ,
212
+ size = [3 , 3 ],
213
+ param_attr = fluid .ParamAttr (initializer = fluid .initializer .Normal (
214
+ scale = 1 / math .sqrt (3 ))))
215
+
207
216
cost = fluid .layers .hsigmoid (
208
217
input = emb ,
209
- label = predict_word ,
210
- non_leaf_num = 4 ,
218
+ label = label ,
219
+ non_leaf_num = 3 ,
211
220
ptable = ptable ,
212
221
pcode = pcode ,
213
222
is_costum = True ,
214
- is_sparse = True )
223
+ is_sparse = is_sparse )
215
224
216
225
avg_cost = fluid .layers .reduce_mean (cost )
217
226
218
227
return avg_cost , data_list
219
228
220
- def test_training_test (self ):
221
- print ("im here" )
222
- w = np .arange (12 ).reshape (4 , 3 )
223
- x = np .ones ((2 , 3 ))
224
- ptable = np .array ([(1 , 2 , - 1 ), (1 , 2 , - 1 )])
225
- pcode = np .array ([(1 , 0 , - 1 ), (0 , 0 , - 1 )])
226
- label = np .array ([(1 , 4 )])
227
-
228
- loss , data_list = hs_net_conf ()
229
- optimizer = fluid .optimizer .SGD (learning_rate = 1e-3 )
230
- optimizer .minimize (loss )
231
-
232
- main_program = fluid .default_main_program ()
233
-
234
- place = fluid .CPUPlace ()
235
- feeder = fluid .DataFeeder (feed_list = data_list , place = place )
236
- data_name_list = [var .name for var in data_list ]
237
- exe = fluid .Executor (place )
238
- exe .run (fluid .default_startup_program ())
239
- for pass_id in range (args .num_passes ):
229
+ def training_test (self , is_sparse ):
230
+ with fluid .program_guard (fluid .Program (), fluid .Program ()):
231
+ start_up = fluid .default_startup_program ()
232
+ start_up .random_seed = 1 # Fix random seed
233
+ x = np .arange (6 ).reshape (6 )
234
+ ptable = np .array ([(1 , 2 , - 1 ), (1 , 2 , - 1 )])
235
+ pcode = np .array ([(1 , 0 , - 1 ), (0 , 0 , - 1 )])
236
+ label = np .array ([1 , 4 ])
237
+
238
+ loss , data_list = self .hs_net_conf (is_sparse )
239
+ optimizer = fluid .optimizer .SGD (learning_rate = 1e-3 )
240
+ optimizer .minimize (loss )
241
+
242
+ main_program = fluid .default_main_program ()
243
+ # print("main program: {program}".format{program=str(main_program)})
244
+ place = fluid .CPUPlace ()
245
+ feeder = fluid .DataFeeder (feed_list = data_list , place = place )
246
+ exe = fluid .Executor (place )
247
+
248
+ exe .run (start_up )
249
+ result = list ()
240
250
for i in range (10 ):
241
- data = [w , x [i % 2 ], ptable [i % 2 ], pcode [i % 2 ], label [i % 2 ]]
251
+ data = [([[x [i % 2 ]]], [list (ptable [i % 2 ])],
252
+ [list (pcode [i % 2 ])], [label [i % 2 ]])]
253
+
242
254
loss_val = exe .run (main_program ,
243
255
feed = feeder .feed (data ),
244
256
fetch_list = [loss ])
245
- print ("loss is: {loss}" .format (loss = loss ))
246
-
247
-
248
- # class TestHSigmoidOpWithCostumTree(OpTest):
249
- # def setUp(self):
250
- # self.op_type = "hierarchical_sigmoid"
251
- # num_classes = 6 #using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
252
- # feature_size = 8
253
- # batch_size = 4
254
- # x = np.random.random((batch_size, feature_size)).astype("float32") * 2
255
- # w = np.random.random(
256
- # (num_classes - 1, feature_size)).astype("float32") * 2
257
- # label = np.array([0, 1, 4, 5])
258
- # ptable = np.array(
259
- # [(0, 2, -1, -1, -1), (0, 1, 3, -1, -1), (0, 1, 4, -1, -1),
260
- # (0, 2, -1, -1,
261
- # -1)]) #np.array to store 1,2,5,6s' non-leaf path(root -> leaf)
262
- # pcode = np.array([(0, 0, -1, -1, -1), (1, 1, 1, -1, -1), (
263
- # 1, 0, 0, -1, -1), (0, 1, -1, -1, -1)]) #np.array to store
264
- # bias = np.random.random((1, num_classes - 1)).astype("float32")
265
- # self.attrs = {'num_classes': num_classes, 'is_sparse': False}
266
- # self.inputs = {
267
- # 'X': x,
268
- # 'W': w,
269
- # 'PTable': ptable,
270
- # 'PCode': pcode,
271
- # 'Label': label,
272
- # 'Bias': bias
273
- # }
274
- # pre_output, out = hsigmoidWithCustomTree(x, w, ptable, pcode, label,
275
- # bias, num_classes)
276
- # self.outputs = {'PreOut': pre_output, 'Out': out}
277
-
278
- # def test_check_output(self):
279
- # print("checking output in CostumTree")
280
- # self.check_output()
281
-
282
- # def test_check_grad(self):
283
- # print("checking outputGrad in CostumTree")
284
- # self.check_grad(['Bias', 'X', 'W'], ['Out'], no_grad_set=set('Label'))
257
+ result .append (loss_val )
258
+ return result
259
+
260
+ def test_hs_grad_with_sparse (self ):
261
+ dense_result = self .training_test (is_sparse = False )
262
+ sparse_result = self .training_test (is_sparse = True )
263
+ assert (dense_result == sparse_result )
264
+
265
+
266
+ class TestHSigmoidOpWithCostumTree (OpTest ):
267
+ def setUp (self ):
268
+ self .op_type = "hierarchical_sigmoid"
269
+ num_classes = 6 #using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
270
+ feature_size = 8
271
+ batch_size = 4
272
+ x = np .random .random ((batch_size , feature_size )).astype ("float32" ) * 2
273
+ w = np .random .random (
274
+ (num_classes - 1 , feature_size )).astype ("float32" ) * 2
275
+ label = np .array ([0 , 1 , 4 , 5 ])
276
+ ptable = np .array (
277
+ [(0 , 2 , - 1 , - 1 , - 1 ), (0 , 1 , 3 , - 1 , - 1 ), (0 , 1 , 4 , - 1 , - 1 ),
278
+ (0 , 2 , - 1 , - 1 ,
279
+ - 1 )]) #np.array to store 1,2,5,6s' non-leaf path(root -> leaf)
280
+ pcode = np .array ([(0 , 0 , - 1 , - 1 , - 1 ), (1 , 1 , 1 , - 1 , - 1 ), (
281
+ 1 , 0 , 0 , - 1 , - 1 ), (0 , 1 , - 1 , - 1 , - 1 )]) #np.array to store
282
+ bias = np .random .random ((1 , num_classes - 1 )).astype ("float32" )
283
+ self .attrs = {'num_classes' : num_classes , 'is_sparse' : False }
284
+ self .inputs = {
285
+ 'X' : x ,
286
+ 'W' : w ,
287
+ 'PTable' : ptable ,
288
+ 'PCode' : pcode ,
289
+ 'Label' : label ,
290
+ 'Bias' : bias
291
+ }
292
+ pre_output , out = hsigmoidWithCustomTree (x , w , ptable , pcode , label ,
293
+ bias , num_classes )
294
+ self .outputs = {'PreOut' : pre_output , 'Out' : out }
295
+
296
+ def test_check_output (self ):
297
+ print ("checking output in CostumTree" )
298
+ self .check_output ()
299
+
300
+ def test_check_grad (self ):
301
+ print ("checking outputGrad in CostumTree" )
302
+ self .check_grad (['Bias' , 'X' , 'W' ], ['Out' ], no_grad_set = set ('Label' ))
303
+
285
304
286
305
if __name__ == '__main__' :
287
306
unittest .main ()
0 commit comments