|
12 | 12 | # See the License for the specific language governing permissions and
|
13 | 13 | # limitations under the License.
|
14 | 14 |
|
| 15 | +from __future__ import print_function |
15 | 16 | import paddle
|
| 17 | +import paddle.fluid as fluid |
16 | 18 | import unittest
|
17 | 19 | import numpy as np
|
18 | 20 | from op_test import OpTest
|
| 21 | +from paddle.fluid import Program, program_guard |
| 22 | +from paddle.fluid.layer_helper import LayerHelper |
19 | 23 |
|
20 | 24 |
|
21 |
| -def ref_logsumexp(x, axis=None, keepdim=False, reduce_all=False): |
22 |
| - if isinstance(axis, int): |
23 |
| - axis = (axis, ) |
24 |
| - elif isinstance(axis, list): |
25 |
| - axis = tuple(axis) |
26 |
| - if reduce_all: |
27 |
| - axis = None |
28 |
| - out = np.log(np.exp(x).sum(axis=axis, keepdims=keepdim)) |
29 |
| - return out |
30 |
| - |
31 |
| - |
32 |
| -class TestLogsumexp(OpTest): |
33 |
| - def setUp(self): |
34 |
| - self.op_type = 'logsumexp' |
35 |
| - self.shape = [2, 3, 4, 5] |
36 |
| - self.dtype = 'float64' |
37 |
| - self.axis = [-1] |
38 |
| - self.keepdim = False |
39 |
| - self.reduce_all = False |
40 |
| - self.set_attrs() |
41 |
| - |
42 |
| - np.random.seed(10) |
43 |
| - x = np.random.uniform(-1, 1, self.shape).astype(self.dtype) |
44 |
| - out = ref_logsumexp(x, self.axis, self.keepdim, self.reduce_all) |
45 |
| - |
46 |
| - self.inputs = {'X': x} |
47 |
| - self.outputs = {'Out': out} |
48 |
| - self.attrs = { |
49 |
| - 'dim': self.axis, |
50 |
| - 'keep_dim': self.keepdim, |
51 |
| - 'reduce_all': self.reduce_all |
52 |
| - } |
53 |
| - |
54 |
| - def set_attrs(self): |
55 |
| - pass |
56 |
| - |
57 |
| - def test_check_output(self): |
58 |
| - self.check_output() |
59 |
| - |
60 |
| - def test_check_grad(self): |
61 |
| - self.check_grad(['X'], ['Out']) |
62 |
| - |
63 |
| - |
64 |
| -class TestLogsumexp_shape(TestLogsumexp): |
65 |
| - def set_attrs(self): |
66 |
| - self.shape = [4, 5, 6] |
67 |
| - |
68 |
| - |
69 |
| -class TestLogsumexp_axis(TestLogsumexp): |
70 |
| - def set_attrs(self): |
71 |
| - self.axis = [0, -1] |
72 |
| - |
73 |
| - |
74 |
| -class TestLogsumexp_axis_all(TestLogsumexp): |
75 |
| - def set_attrs(self): |
76 |
| - self.axis = [0, 1, 2, 3] |
77 |
| - |
78 |
| - |
79 |
| -class TestLogsumexp_keepdim(TestLogsumexp): |
80 |
| - def set_attrs(self): |
81 |
| - self.keepdim = True |
82 |
| - |
83 |
| - |
84 |
| -class TestLogsumexp_reduce_all(TestLogsumexp): |
85 |
| - def set_attrs(self): |
86 |
| - self.reduce_all = True |
87 |
| - |
88 |
| - |
89 |
| -class TestLogsumexpError(unittest.TestCase): |
| 25 | +class TestLogSumOpError(unittest.TestCase): |
90 | 26 | def test_errors(self):
|
91 |
| - with paddle.static.program_guard(paddle.static.Program()): |
92 |
| - self.assertRaises(TypeError, paddle.logsumexp, 1) |
93 |
| - x1 = paddle.data(name='x1', shape=[120], dtype="int32") |
94 |
| - self.assertRaises(TypeError, paddle.logsumexp, x1) |
95 |
| - |
96 |
| - |
97 |
| -class TestLogsumexpAPI(unittest.TestCase): |
98 |
| - def setUp(self): |
99 |
| - self.shape = [2, 3, 4, 5] |
100 |
| - self.x = np.random.uniform(-1, 1, self.shape).astype(np.float32) |
101 |
| - self.place = paddle.CUDAPlace(0) if paddle.fluid.core.is_compiled_with_cuda() \ |
102 |
| - else paddle.CPUPlace() |
103 |
| - |
104 |
| - def api_case(self, axis=None, keepdim=False): |
105 |
| - out_ref = ref_logsumexp(self.x, axis, keepdim) |
106 |
| - with paddle.static.program_guard(paddle.static.Program()): |
107 |
| - x = paddle.data('X', self.shape) |
108 |
| - out = paddle.logsumexp(x, axis, keepdim) |
109 |
| - exe = paddle.static.Executor(self.place) |
110 |
| - res = exe.run(feed={'X': self.x}, fetch_list=[out]) |
111 |
| - self.assertTrue(np.allclose(res[0], out_ref)) |
112 |
| - |
113 |
| - paddle.disable_static(self.place) |
114 |
| - x = paddle.to_variable(self.x) |
115 |
| - out = paddle.logsumexp(x, axis, keepdim) |
116 |
| - self.assertTrue(np.allclose(out.numpy(), out_ref)) |
117 |
| - paddle.enable_static() |
118 |
| - |
119 |
| - def test_api(self): |
120 |
| - self.api_case() |
121 |
| - self.api_case(2) |
122 |
| - self.api_case([-1]) |
123 |
| - self.api_case([2, -3]) |
124 |
| - self.api_case((0, 1, -1)) |
125 |
| - self.api_case(keepdim=True) |
126 |
| - |
127 |
| - def test_alias(self): |
128 |
| - paddle.disable_static(self.place) |
129 |
| - x = paddle.to_variable(self.x) |
130 |
| - out1 = paddle.logsumexp(x) |
131 |
| - out2 = paddle.tensor.logsumexp(x) |
132 |
| - out3 = paddle.tensor.math.logsumexp(x) |
133 |
| - out_ref = ref_logsumexp(self.x) |
134 |
| - for out in [out1, out2, out3]: |
135 |
| - self.assertTrue(np.allclose(out.numpy(), out_ref)) |
136 |
| - paddle.enable_static() |
| 27 | + with program_guard(Program(), Program()): |
| 28 | + |
| 29 | + x1 = fluid.layers.data(name='x1', shape=[120], dtype="uint8") |
| 30 | + self.assertRaises(Exception, paddle.logsumexp, x1) |
| 31 | + |
| 32 | + x2 = fluid.layers.data(name='x2', shape=[2, 3], dtype="int") |
| 33 | + self.assertRaises(Exception, paddle.logsumexp, x2) |
| 34 | + |
| 35 | + x3 = fluid.layers.data(name='x3', shape=[3], dtype="float16") |
| 36 | + self.assertRaises(Exception, paddle.logsumexp, x3) |
| 37 | + |
| 38 | + |
| 39 | +class TestLogSumExpOp(unittest.TestCase): |
| 40 | + def test_dygraph(self): |
| 41 | + with fluid.dygraph.guard(): |
| 42 | + np_x = np.random.uniform(0.1, 1, [123]).astype(np.float32) |
| 43 | + x = fluid.dygraph.to_variable(np_x) |
| 44 | + self.assertTrue( |
| 45 | + np.allclose( |
| 46 | + paddle.logsumexp(x).numpy(), np.log(np.sum(np.exp(np_x))))) |
| 47 | + |
| 48 | + np_x = np.random.uniform(0.1, 1, [2, 3, 4]).astype(np.float32) |
| 49 | + x = fluid.dygraph.to_variable(np_x) |
| 50 | + self.assertTrue( |
| 51 | + np.allclose( |
| 52 | + paddle.logsumexp(x, [1, 2]).numpy(), |
| 53 | + np.log(np.sum(np.exp(np_x), axis=(1, 2))))) |
| 54 | + |
| 55 | + np_x = np.random.uniform(0.1, 1, [2, 3, 4]).astype(np.float32) |
| 56 | + x = fluid.dygraph.to_variable(np_x) |
| 57 | + self.assertTrue( |
| 58 | + np.allclose( |
| 59 | + paddle.logsumexp(x, [2]).numpy(), |
| 60 | + np.log(np.sum(np.exp(np_x), axis=(2))))) |
| 61 | + |
| 62 | + np_x = np.random.uniform(0.1, 1, [2, 3, 4]).astype(np.float32) |
| 63 | + x = fluid.dygraph.to_variable(np_x) |
| 64 | + self.assertTrue( |
| 65 | + np.allclose( |
| 66 | + paddle.logsumexp( |
| 67 | + x, keepdim=True).numpy(), |
| 68 | + np.log(np.sum(np.exp(np_x), keepdims=True)))) |
137 | 69 |
|
138 | 70 |
|
139 | 71 | if __name__ == '__main__':
|
|
0 commit comments