Skip to content

Commit a8e355a

Browse files
authored
revert logsumexp op(#27145)
1 parent 847f93f commit a8e355a

File tree

6 files changed

+56
-300
lines changed

6 files changed

+56
-300
lines changed

paddle/fluid/operators/reduce_ops/logsumexp_op.cc

Lines changed: 0 additions & 63 deletions
This file was deleted.

paddle/fluid/operators/reduce_ops/logsumexp_op.cu

Lines changed: 0 additions & 21 deletions
This file was deleted.

paddle/fluid/operators/reduce_ops/logsumexp_op.h

Lines changed: 0 additions & 58 deletions
This file was deleted.

paddle/fluid/operators/reduce_ops/logsumexp_op.part.cu

Lines changed: 0 additions & 22 deletions
This file was deleted.

python/paddle/fluid/tests/unittests/test_logsumexp.py

Lines changed: 47 additions & 115 deletions
Original file line numberDiff line numberDiff line change
@@ -12,128 +12,60 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
from __future__ import print_function
1516
import paddle
17+
import paddle.fluid as fluid
1618
import unittest
1719
import numpy as np
1820
from op_test import OpTest
21+
from paddle.fluid import Program, program_guard
22+
from paddle.fluid.layer_helper import LayerHelper
1923

2024

21-
def ref_logsumexp(x, axis=None, keepdim=False, reduce_all=False):
22-
if isinstance(axis, int):
23-
axis = (axis, )
24-
elif isinstance(axis, list):
25-
axis = tuple(axis)
26-
if reduce_all:
27-
axis = None
28-
out = np.log(np.exp(x).sum(axis=axis, keepdims=keepdim))
29-
return out
30-
31-
32-
class TestLogsumexp(OpTest):
33-
def setUp(self):
34-
self.op_type = 'logsumexp'
35-
self.shape = [2, 3, 4, 5]
36-
self.dtype = 'float64'
37-
self.axis = [-1]
38-
self.keepdim = False
39-
self.reduce_all = False
40-
self.set_attrs()
41-
42-
np.random.seed(10)
43-
x = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
44-
out = ref_logsumexp(x, self.axis, self.keepdim, self.reduce_all)
45-
46-
self.inputs = {'X': x}
47-
self.outputs = {'Out': out}
48-
self.attrs = {
49-
'dim': self.axis,
50-
'keep_dim': self.keepdim,
51-
'reduce_all': self.reduce_all
52-
}
53-
54-
def set_attrs(self):
55-
pass
56-
57-
def test_check_output(self):
58-
self.check_output()
59-
60-
def test_check_grad(self):
61-
self.check_grad(['X'], ['Out'])
62-
63-
64-
class TestLogsumexp_shape(TestLogsumexp):
65-
def set_attrs(self):
66-
self.shape = [4, 5, 6]
67-
68-
69-
class TestLogsumexp_axis(TestLogsumexp):
70-
def set_attrs(self):
71-
self.axis = [0, -1]
72-
73-
74-
class TestLogsumexp_axis_all(TestLogsumexp):
75-
def set_attrs(self):
76-
self.axis = [0, 1, 2, 3]
77-
78-
79-
class TestLogsumexp_keepdim(TestLogsumexp):
80-
def set_attrs(self):
81-
self.keepdim = True
82-
83-
84-
class TestLogsumexp_reduce_all(TestLogsumexp):
85-
def set_attrs(self):
86-
self.reduce_all = True
87-
88-
89-
class TestLogsumexpError(unittest.TestCase):
25+
class TestLogSumOpError(unittest.TestCase):
9026
def test_errors(self):
91-
with paddle.static.program_guard(paddle.static.Program()):
92-
self.assertRaises(TypeError, paddle.logsumexp, 1)
93-
x1 = paddle.data(name='x1', shape=[120], dtype="int32")
94-
self.assertRaises(TypeError, paddle.logsumexp, x1)
95-
96-
97-
class TestLogsumexpAPI(unittest.TestCase):
98-
def setUp(self):
99-
self.shape = [2, 3, 4, 5]
100-
self.x = np.random.uniform(-1, 1, self.shape).astype(np.float32)
101-
self.place = paddle.CUDAPlace(0) if paddle.fluid.core.is_compiled_with_cuda() \
102-
else paddle.CPUPlace()
103-
104-
def api_case(self, axis=None, keepdim=False):
105-
out_ref = ref_logsumexp(self.x, axis, keepdim)
106-
with paddle.static.program_guard(paddle.static.Program()):
107-
x = paddle.data('X', self.shape)
108-
out = paddle.logsumexp(x, axis, keepdim)
109-
exe = paddle.static.Executor(self.place)
110-
res = exe.run(feed={'X': self.x}, fetch_list=[out])
111-
self.assertTrue(np.allclose(res[0], out_ref))
112-
113-
paddle.disable_static(self.place)
114-
x = paddle.to_variable(self.x)
115-
out = paddle.logsumexp(x, axis, keepdim)
116-
self.assertTrue(np.allclose(out.numpy(), out_ref))
117-
paddle.enable_static()
118-
119-
def test_api(self):
120-
self.api_case()
121-
self.api_case(2)
122-
self.api_case([-1])
123-
self.api_case([2, -3])
124-
self.api_case((0, 1, -1))
125-
self.api_case(keepdim=True)
126-
127-
def test_alias(self):
128-
paddle.disable_static(self.place)
129-
x = paddle.to_variable(self.x)
130-
out1 = paddle.logsumexp(x)
131-
out2 = paddle.tensor.logsumexp(x)
132-
out3 = paddle.tensor.math.logsumexp(x)
133-
out_ref = ref_logsumexp(self.x)
134-
for out in [out1, out2, out3]:
135-
self.assertTrue(np.allclose(out.numpy(), out_ref))
136-
paddle.enable_static()
27+
with program_guard(Program(), Program()):
28+
29+
x1 = fluid.layers.data(name='x1', shape=[120], dtype="uint8")
30+
self.assertRaises(Exception, paddle.logsumexp, x1)
31+
32+
x2 = fluid.layers.data(name='x2', shape=[2, 3], dtype="int")
33+
self.assertRaises(Exception, paddle.logsumexp, x2)
34+
35+
x3 = fluid.layers.data(name='x3', shape=[3], dtype="float16")
36+
self.assertRaises(Exception, paddle.logsumexp, x3)
37+
38+
39+
class TestLogSumExpOp(unittest.TestCase):
40+
def test_dygraph(self):
41+
with fluid.dygraph.guard():
42+
np_x = np.random.uniform(0.1, 1, [123]).astype(np.float32)
43+
x = fluid.dygraph.to_variable(np_x)
44+
self.assertTrue(
45+
np.allclose(
46+
paddle.logsumexp(x).numpy(), np.log(np.sum(np.exp(np_x)))))
47+
48+
np_x = np.random.uniform(0.1, 1, [2, 3, 4]).astype(np.float32)
49+
x = fluid.dygraph.to_variable(np_x)
50+
self.assertTrue(
51+
np.allclose(
52+
paddle.logsumexp(x, [1, 2]).numpy(),
53+
np.log(np.sum(np.exp(np_x), axis=(1, 2)))))
54+
55+
np_x = np.random.uniform(0.1, 1, [2, 3, 4]).astype(np.float32)
56+
x = fluid.dygraph.to_variable(np_x)
57+
self.assertTrue(
58+
np.allclose(
59+
paddle.logsumexp(x, [2]).numpy(),
60+
np.log(np.sum(np.exp(np_x), axis=(2)))))
61+
62+
np_x = np.random.uniform(0.1, 1, [2, 3, 4]).astype(np.float32)
63+
x = fluid.dygraph.to_variable(np_x)
64+
self.assertTrue(
65+
np.allclose(
66+
paddle.logsumexp(
67+
x, keepdim=True).numpy(),
68+
np.log(np.sum(np.exp(np_x), keepdims=True))))
13769

13870

13971
if __name__ == '__main__':

python/paddle/tensor/math.py

Lines changed: 9 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,6 @@
8686
'floor',
8787
'increment',
8888
'log',
89-
'logsumexp',
9089
'mul',
9190
'multiplex',
9291
'pow',
@@ -1177,35 +1176,24 @@ def logsumexp(x, axis=None, keepdim=False, name=None):
11771176
.. code-block:: python
11781177
11791178
import paddle
1179+
import numpy as np
11801180
11811181
paddle.disable_static()
1182-
1183-
x = paddle.to_tensor([[-1.5, 0., 2.], [3., 1.2, -2.4]])
1182+
1183+
x = np.array([[-1.5, 0., 2.], [3., 1.2, -2.4]])
1184+
x = paddle.to_tensor(x)
11841185
out1 = paddle.logsumexp(x) # [3.4691226]
11851186
out2 = paddle.logsumexp(x, 1) # [2.15317821, 3.15684602]
11861187
11871188
"""
1188-
if isinstance(axis, int):
1189-
axis = [axis]
1190-
reduce_all = True if axis is None \
1191-
or len(axis)==0 \
1192-
or len(axis) == len(x.shape) else False
1193-
if axis is None or len(axis) == 0:
1194-
axis = [0]
1195-
1196-
if in_dygraph_mode():
1197-
return core.ops.logsumexp(x, 'dim', axis, 'keep_dim', keepdim,
1198-
'reduce_all', reduce_all)
1199-
1200-
check_variable_and_dtype(x, 'x',
1189+
if not in_dygraph_mode():
1190+
check_variable_and_dtype(x, 'x',
12011191
['float32', 'float64'],
12021192
'logsumexp')
12031193

1204-
helper = LayerHelper('logsumexp', **locals())
1205-
attrs = {'dim': axis, 'keep_dim': keepdim, 'reduce_all': reduce_all}
1206-
out = helper.create_variable_for_type_inference(x.dtype)
1207-
helper.append_op(
1208-
type='logsumexp', inputs={'X': x}, outputs={'Out': out}, attrs=attrs)
1194+
out = paddle.exp(x, name)
1195+
out = paddle.sum(out, axis=axis, keepdim=keepdim, name=name)
1196+
out = paddle.log(out, name)
12091197
return out
12101198

12111199

0 commit comments

Comments
 (0)