|
21 | 21 | from scipy.special import expit, erf
|
22 | 22 | import paddle
|
23 | 23 | import paddle.fluid as fluid
|
24 |
| -import paddle.nn as nn |
25 |
| -import paddle.nn.functional as functional |
26 | 24 | from paddle.fluid import compiler, Program, program_guard
|
27 | 25 |
|
28 | 26 |
|
@@ -1203,140 +1201,5 @@ def test_check_grad(self):
|
1203 | 1201 | create_test_act_fp16_class(TestSwish)
|
1204 | 1202 | create_test_act_fp16_class(TestHardSwish)
|
1205 | 1203 |
|
1206 |
| - |
1207 |
| -class TestNNReluAPI(unittest.TestCase): |
1208 |
| - def setUp(self): |
1209 |
| - self.init_data() |
1210 |
| - |
1211 |
| - def init_data(self): |
1212 |
| - self.x_shape = [10, 12] |
1213 |
| - self.x = np.random.uniform(-1, 1, self.x_shape).astype(np.float32) |
1214 |
| - self.y = self.ref_forward(self.x) |
1215 |
| - |
1216 |
| - def ref_forward(self, x): |
1217 |
| - return np.maximum(x, 0) |
1218 |
| - |
1219 |
| - def ref_backward(self, y, dy): |
1220 |
| - y_t = y.copy() |
1221 |
| - y_t[y_t > 0] = 1 |
1222 |
| - return y_t * dy |
1223 |
| - |
1224 |
| - def check_api(self, place=fluid.CPUPlace(), inplace=False): |
1225 |
| - main_program = Program() |
1226 |
| - myrelu = nn.ReLU(inplace) |
1227 |
| - with fluid.program_guard(main_program): |
1228 |
| - x = fluid.data(name='x', shape=self.x_shape) |
1229 |
| - x.stop_gradient = False |
1230 |
| - y = myrelu(x) |
1231 |
| - fluid.backward.append_backward(fluid.layers.mean(y)) |
1232 |
| - exe = fluid.Executor(place) |
1233 |
| - out = exe.run(main_program, |
1234 |
| - feed={'x': self.x}, |
1235 |
| - fetch_list=[y, y.grad_name, x.grad_name]) |
1236 |
| - self.assertTrue(np.allclose(out[0], self.y)) |
1237 |
| - self.assertTrue(np.allclose(out[2], self.ref_backward(self.y, out[1]))) |
1238 |
| - |
1239 |
| - with fluid.dygraph.guard(place): |
1240 |
| - x = fluid.dygraph.to_variable(self.x) |
1241 |
| - y = myrelu(x) |
1242 |
| - self.assertTrue(np.allclose(y.numpy(), self.y)) |
1243 |
| - |
1244 |
| - def test_check_api(self): |
1245 |
| - places = [fluid.CPUPlace()] |
1246 |
| - if core.is_compiled_with_cuda(): |
1247 |
| - places.append(fluid.CUDAPlace(0)) |
1248 |
| - for place in places: |
1249 |
| - for inplace in [True, False]: |
1250 |
| - self.check_api(place, inplace) |
1251 |
| - |
1252 |
| - |
1253 |
| -class TestNNFunctionalReluAPI(unittest.TestCase): |
1254 |
| - def setUp(self): |
1255 |
| - self.init_data() |
1256 |
| - |
1257 |
| - def init_data(self): |
1258 |
| - self.x_shape = [10, 12] |
1259 |
| - self.x = np.random.uniform(-1, 1, self.x_shape).astype(np.float32) |
1260 |
| - self.y = self.ref_forward(self.x) |
1261 |
| - |
1262 |
| - def ref_forward(self, x): |
1263 |
| - return np.maximum(x, 0) |
1264 |
| - |
1265 |
| - def test_check_api(self): |
1266 |
| - main_program = Program() |
1267 |
| - with fluid.program_guard(main_program): |
1268 |
| - x = fluid.data(name='x', shape=self.x_shape) |
1269 |
| - y = functional.relu(x) |
1270 |
| - exe = fluid.Executor(fluid.CPUPlace()) |
1271 |
| - out = exe.run(main_program, feed={'x': self.x}, fetch_list=[y]) |
1272 |
| - self.assertTrue(np.allclose(out[0], self.y)) |
1273 |
| - |
1274 |
| - |
1275 |
| -class TestNNSigmoidAPI(unittest.TestCase): |
1276 |
| - def setUp(self): |
1277 |
| - self.init_data() |
1278 |
| - |
1279 |
| - def init_data(self): |
1280 |
| - self.x_shape = [10, 15] |
1281 |
| - self.x = np.random.uniform(-1, 1, self.x_shape).astype(np.float32) |
1282 |
| - self.y = self.ref_forward(self.x) |
1283 |
| - |
1284 |
| - def ref_forward(self, x): |
1285 |
| - return 1 / (1 + np.exp(-x)) |
1286 |
| - |
1287 |
| - def ref_backward(self, y, dy): |
1288 |
| - return dy * y * (1 - y) |
1289 |
| - |
1290 |
| - def check_api(self, place=fluid.CPUPlace(), inplace=False): |
1291 |
| - main_program = Program() |
1292 |
| - mysigmoid = nn.Sigmoid(inplace) |
1293 |
| - with fluid.program_guard(main_program): |
1294 |
| - x = fluid.data(name='x', shape=self.x_shape) |
1295 |
| - x.stop_gradient = False |
1296 |
| - y = mysigmoid(x) |
1297 |
| - fluid.backward.append_backward(fluid.layers.mean(y)) |
1298 |
| - exe = fluid.Executor(place) |
1299 |
| - out = exe.run(main_program, |
1300 |
| - feed={'x': self.x}, |
1301 |
| - fetch_list=[y, y.grad_name, x.grad_name]) |
1302 |
| - self.assertTrue(np.allclose(out[0], self.y)) |
1303 |
| - self.assertTrue(np.allclose(out[2], self.ref_backward(self.y, out[1]))) |
1304 |
| - |
1305 |
| - with fluid.dygraph.guard(place): |
1306 |
| - x = fluid.dygraph.to_variable(self.x) |
1307 |
| - y = mysigmoid(x) |
1308 |
| - self.assertTrue(np.allclose(y.numpy(), self.y)) |
1309 |
| - |
1310 |
| - def test_check_api(self): |
1311 |
| - places = [fluid.CPUPlace()] |
1312 |
| - if core.is_compiled_with_cuda(): |
1313 |
| - places.append(fluid.CUDAPlace(0)) |
1314 |
| - for place in places: |
1315 |
| - for inplace in [True, False]: |
1316 |
| - self.check_api(place, inplace) |
1317 |
| - |
1318 |
| - |
1319 |
| -class TestNNFunctionalSigmoidAPI(unittest.TestCase): |
1320 |
| - def setUp(self): |
1321 |
| - self.init_data() |
1322 |
| - |
1323 |
| - def init_data(self): |
1324 |
| - self.x_shape = [10, 15] |
1325 |
| - self.x = np.random.uniform(-1, 1, self.x_shape).astype(np.float32) |
1326 |
| - self.y = self.ref_forward(self.x) |
1327 |
| - |
1328 |
| - def ref_forward(self, x): |
1329 |
| - return 1 / (1 + np.exp(-x)) |
1330 |
| - |
1331 |
| - def test_check_api(self): |
1332 |
| - main_program = Program() |
1333 |
| - with fluid.program_guard(main_program): |
1334 |
| - x = fluid.data(name='x', shape=self.x_shape) |
1335 |
| - y = functional.sigmoid(x) |
1336 |
| - exe = fluid.Executor(fluid.CPUPlace()) |
1337 |
| - out = exe.run(main_program, feed={'x': self.x}, fetch_list=[y]) |
1338 |
| - self.assertTrue(np.allclose(out[0], self.y)) |
1339 |
| - |
1340 |
| - |
1341 | 1204 | if __name__ == "__main__":
|
1342 | 1205 | unittest.main()
|
0 commit comments