|
6 | 6 | import tensorlayer as tl |
7 | 7 |
|
8 | 8 |
|
9 | | -class Layer_Special_Activation_Test(unittest.TestCase): |
| 9 | +class PReLU_Layer_Test(unittest.TestCase): |
10 | 10 |
|
11 | 11 | @classmethod |
12 | 12 | def setUpClass(cls): |
13 | 13 |
|
14 | 14 | x = tf.placeholder(tf.float32, shape=[None, 30]) |
15 | | - net = tl.layers.InputLayer(x, name='input') |
16 | | - net = tl.layers.DenseLayer(net, n_units=10, name='dense') |
17 | | - net1 = tl.layers.PReluLayer(net, name='prelu') |
18 | 15 |
|
19 | | - net1.print_layers() |
20 | | - net1.print_params(False) |
| 16 | + in_layer = tl.layers.InputLayer(x, name='input') |
21 | 17 |
|
22 | | - cls.net1_shape = net1.outputs.get_shape().as_list() |
23 | | - cls.net1_layers = net1.all_layers |
24 | | - cls.net1_params = net1.all_params |
25 | | - cls.net1_n_params = net1.count_params() |
| 18 | + net = tl.layers.DenseLayer(in_layer, n_units=10, name='dense_1') |
| 19 | + cls.net1 = tl.layers.PReluLayer(net, name='prelu_1') |
26 | 20 |
|
27 | | - net2 = tl.layers.PReluLayer(net1, channel_shared=True, name='prelu2') |
| 21 | + cls.net1.print_layers() |
| 22 | + cls.net1.print_params(False) |
28 | 23 |
|
29 | | - net2.print_layers() |
30 | | - net2.print_params(False) |
| 24 | + net2 = tl.layers.DenseLayer(cls.net1, n_units=30, name='dense_2') |
| 25 | + cls.net2 = tl.layers.PReluLayer(net2, channel_shared=True, name='prelu_2') |
31 | 26 |
|
32 | | - cls.net2_shape = net2.outputs.get_shape().as_list() |
33 | | - cls.net2_layers = net2.all_layers |
34 | | - cls.net2_params = net2.all_params |
35 | | - cls.net2_n_params = net2.count_params() |
| 27 | + cls.net2.print_layers() |
| 28 | + cls.net2.print_params(False) |
36 | 29 |
|
37 | 30 | @classmethod |
38 | 31 | def tearDownClass(cls): |
39 | 32 | tf.reset_default_graph() |
40 | 33 |
|
41 | 34 | def test_net1(self): |
42 | | - self.assertEqual(len(self.net1_layers), 3) |
43 | | - self.assertEqual(len(self.net1_params), 3) |
44 | | - self.assertEqual(self.net1_n_params, 320) |
45 | | - self.assertEqual(self.net1_shape[-1], 10) |
| 35 | + self.assertEqual(len(self.net1.all_layers), 3) |
| 36 | + self.assertEqual(len(self.net1.all_params), 3) |
| 37 | + self.assertEqual(self.net1.count_params(), 320) |
| 38 | + self.assertEqual(self.net1.outputs.get_shape().as_list()[1:], [10]) |
| 39 | + |
| 40 | + prelu1_param_shape = self.net1.all_params[-1].get_shape().as_list() |
| 41 | + self.assertEqual(prelu1_param_shape, [10]) |
| 42 | + |
| 43 | + def test_net2(self): |
| 44 | + self.assertEqual(len(self.net2.all_layers), 5) |
| 45 | + self.assertEqual(len(self.net2.all_params), 6) |
| 46 | + self.assertEqual(self.net2.count_params(), 651) |
| 47 | + self.assertEqual(self.net2.outputs.get_shape().as_list()[1:], [30]) |
| 48 | + |
| 49 | + prelu2_param_shape = self.net2.all_params[-1].get_shape().as_list() |
| 50 | + self.assertEqual(prelu2_param_shape, [1]) |
| 51 | + |
| 52 | + |
| 53 | +class PRelu6_Layer_Test(unittest.TestCase): |
| 54 | + |
| 55 | + @classmethod |
| 56 | + def setUpClass(cls): |
| 57 | + |
| 58 | + x = tf.placeholder(tf.float32, shape=[None, 30]) |
| 59 | + |
| 60 | + in_layer = tl.layers.InputLayer(x, name='input') |
| 61 | + |
| 62 | + net = tl.layers.DenseLayer(in_layer, n_units=10, name='dense_1') |
| 63 | + cls.net1 = tl.layers.PRelu6Layer(net, name='prelu6_1') |
| 64 | + |
| 65 | + cls.net1.print_layers() |
| 66 | + cls.net1.print_params(False) |
| 67 | + |
| 68 | + net2 = tl.layers.DenseLayer(cls.net1, n_units=30, name='dense_2') |
| 69 | + cls.net2 = tl.layers.PRelu6Layer(net2, channel_shared=True, name='prelu6_2') |
| 70 | + |
| 71 | + cls.net2.print_layers() |
| 72 | + cls.net2.print_params(False) |
| 73 | + |
| 74 | + @classmethod |
| 75 | + def tearDownClass(cls): |
| 76 | + tf.reset_default_graph() |
| 77 | + |
| 78 | + def test_net1(self): |
| 79 | + self.assertEqual(len(self.net1.all_layers), 3) |
| 80 | + self.assertEqual(len(self.net1.all_params), 3) |
| 81 | + self.assertEqual(self.net1.count_params(), 320) |
| 82 | + self.assertEqual(self.net1.outputs.get_shape().as_list()[1:], [10]) |
| 83 | + |
| 84 | + prelu1_param_shape = self.net1.all_params[-1].get_shape().as_list() |
| 85 | + self.assertEqual(prelu1_param_shape, [10]) |
46 | 86 |
|
47 | 87 | def test_net2(self): |
48 | | - self.assertEqual(len(self.net2_layers), 4) |
49 | | - self.assertEqual(len(self.net2_params), 4) |
50 | | - self.assertEqual(self.net2_n_params, 321) |
51 | | - self.assertEqual(self.net2_shape[-1], 10) |
| 88 | + self.assertEqual(len(self.net2.all_layers), 5) |
| 89 | + self.assertEqual(len(self.net2.all_params), 6) |
| 90 | + self.assertEqual(self.net2.count_params(), 651) |
| 91 | + self.assertEqual(self.net2.outputs.get_shape().as_list()[1:], [30]) |
| 92 | + |
| 93 | + prelu2_param_shape = self.net2.all_params[-1].get_shape().as_list() |
| 94 | + self.assertEqual(prelu2_param_shape, [1]) |
| 95 | + |
| 96 | + |
| 97 | +class PTRelu6_Layer_Test(unittest.TestCase): |
| 98 | + |
| 99 | + @classmethod |
| 100 | + def setUpClass(cls): |
| 101 | + |
| 102 | + x = tf.placeholder(tf.float32, shape=[None, 30]) |
| 103 | + |
| 104 | + in_layer = tl.layers.InputLayer(x, name='input') |
| 105 | + |
| 106 | + net = tl.layers.DenseLayer(in_layer, n_units=10, name='dense_1') |
| 107 | + cls.net1 = tl.layers.PTRelu6Layer(net, name='ptrelu6_1') |
| 108 | + |
| 109 | + cls.net1.print_layers() |
| 110 | + cls.net1.print_params(False) |
| 111 | + |
| 112 | + net2 = tl.layers.DenseLayer(cls.net1, n_units=30, name='dense_2') |
| 113 | + cls.net2 = tl.layers.PTRelu6Layer(net2, channel_shared=True, name='ptrelu6_2') |
| 114 | + |
| 115 | + cls.net2.print_layers() |
| 116 | + cls.net2.print_params(False) |
| 117 | + |
| 118 | + @classmethod |
| 119 | + def tearDownClass(cls): |
| 120 | + tf.reset_default_graph() |
| 121 | + |
| 122 | + def test_net1(self): |
| 123 | + self.assertEqual(len(self.net1.all_layers), 3) |
| 124 | + self.assertEqual(len(self.net1.all_params), 4) |
| 125 | + self.assertEqual(self.net1.count_params(), 330) |
| 126 | + self.assertEqual(self.net1.outputs.get_shape().as_list()[1:], [10]) |
| 127 | + |
| 128 | + prelu1_param_shape = self.net1.all_params[-1].get_shape().as_list() |
| 129 | + self.assertEqual(prelu1_param_shape, [10]) |
| 130 | + |
| 131 | + def test_net2(self): |
| 132 | + self.assertEqual(len(self.net2.all_layers), 5) |
| 133 | + self.assertEqual(len(self.net2.all_params), 8) |
| 134 | + self.assertEqual(self.net2.count_params(), 662) |
| 135 | + self.assertEqual(self.net2.outputs.get_shape().as_list()[1:], [30]) |
| 136 | + |
| 137 | + prelu2_param_shape = self.net2.all_params[-1].get_shape().as_list() |
| 138 | + self.assertEqual(prelu2_param_shape, [1]) |
52 | 139 |
|
53 | 140 |
|
54 | 141 | if __name__ == '__main__': |
|
0 commit comments