@@ -105,5 +105,107 @@ def test_check_grad(self):
105
105
["X" ], "Y" , max_relative_error = 0.05 , numeric_grad_delta = 0.001 )
106
106
107
107
108
+ class TestCrossEntropyOp4 (OpTest ):
109
+ """Test high rank tensor cross-entropy with discrete one-hot labels.
110
+ """
111
+
112
+ def setUp (self ):
113
+ self .op_type = "cross_entropy"
114
+ shape = [10 , 2 , 4 ]
115
+ ins_num = np .prod (np .array (shape ))
116
+ class_num = 10
117
+
118
+ X_2d = randomize_probability (ins_num , class_num , dtype = 'float64' )
119
+
120
+ label_2d = np .random .randint (0 , class_num , (ins_num , 1 ), dtype = "int64" )
121
+ cross_entropy_2d = np .asmatrix (
122
+ [[- np .log (X_2d [i ][label_2d [i ][0 ]])] for i in range (X_2d .shape [0 ])],
123
+ dtype = "float64" )
124
+
125
+ X = X_2d .reshape (shape + [class_num ])
126
+ label = label_2d .reshape (shape + [1 ])
127
+ cross_entropy = np .array (cross_entropy_2d ).reshape (shape + [1 ])
128
+
129
+ self .inputs = {"X" : X , "Label" : label }
130
+ self .outputs = {"Y" : cross_entropy }
131
+ self .attrs = {"soft_label" : False }
132
+
133
+ def test_check_output (self ):
134
+ self .check_output ()
135
+
136
+ def test_check_grad (self ):
137
+ self .check_grad (["X" ], "Y" , numeric_grad_delta = 0.001 )
138
+
139
+
140
+ class TestCrossEntropyOp5 (OpTest ):
141
+ """Test high rank tensor cross-entropy with vectorized soft labels.
142
+ """
143
+
144
+ def setUp (self ):
145
+ self .op_type = "cross_entropy"
146
+ shape = [4 , 3 ]
147
+ ins_num = np .prod (np .array (shape ))
148
+ class_num = 37
149
+
150
+ X_2d = randomize_probability (ins_num , class_num )
151
+ label_2d = np .random .uniform (0.1 , 1.0 ,
152
+ [ins_num , class_num ]).astype ("float32" )
153
+ label_2d /= label_2d .sum (axis = 1 , keepdims = True )
154
+ cross_entropy_2d = (- label_2d * np .log (X_2d )).sum (
155
+ axis = 1 , keepdims = True ).astype ("float32" )
156
+
157
+ X = X_2d .reshape (shape + [class_num ])
158
+ label = label_2d .reshape (shape + [class_num ])
159
+ cross_entropy = np .array (cross_entropy_2d ).reshape (shape + [1 ])
160
+
161
+ self .inputs = {"X" : X , "Label" : label }
162
+ self .outputs = {"Y" : cross_entropy }
163
+ self .attrs = {"soft_label" : True }
164
+
165
+ def test_check_output (self ):
166
+ self .check_output ()
167
+
168
+ def test_check_grad (self ):
169
+ self .check_grad (
170
+ ["X" ], "Y" , max_relative_error = 0.05 , numeric_grad_delta = 0.001 )
171
+
172
+
173
+ class TestCrossEntropyOp6 (OpTest ):
174
+ """Test high rank tensor cross-entropy with vectorized one-hot representation of labels.
175
+ """
176
+
177
+ def setUp (self ):
178
+ self .op_type = "cross_entropy"
179
+ shape = [4 , 3 , 2 ]
180
+ ins_num = np .prod (np .array (shape ))
181
+ class_num = 17
182
+
183
+ X_2d = randomize_probability (ins_num , class_num )
184
+ label_index_2d = np .random .randint (
185
+ 0 , class_num , (ins_num ), dtype = "int32" )
186
+ label_2d = np .zeros (X_2d .shape )
187
+ label_2d [np .arange (ins_num ), label_index_2d ] = 1
188
+
189
+ cross_entropy_2d = np .asmatrix (
190
+ [[- np .log (X_2d [i ][label_index_2d [i ]])]
191
+ for i in range (X_2d .shape [0 ])],
192
+ dtype = "float32" )
193
+
194
+ X = X_2d .reshape (shape + [class_num ])
195
+ label = label_2d .reshape (shape + [class_num ])
196
+ cross_entropy = np .array (cross_entropy_2d ).reshape (shape + [1 ])
197
+
198
+ self .inputs = {"X" : X , "Label" : label .astype (np .float32 )}
199
+ self .outputs = {"Y" : cross_entropy }
200
+ self .attrs = {"soft_label" : True }
201
+
202
+ def test_check_output (self ):
203
+ self .check_output ()
204
+
205
+ def test_check_grad (self ):
206
+ self .check_grad (
207
+ ["X" ], "Y" , max_relative_error = 0.05 , numeric_grad_delta = 0.001 )
208
+
209
+
108
210
if __name__ == "__main__" :
109
211
unittest .main ()
0 commit comments