Skip to content

Commit 9fec4bc

Browse files
remove complexvariable (#29390) (#29417)
* rm complexvariable * modify test_var_base unittest * remove duplicated codes
1 parent f223c78 commit 9fec4bc

19 files changed

+61
-520
lines changed

python/paddle/__init__.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,6 @@
3232
monkey_patch_math_varbase()
3333
import paddle.framework
3434
from .framework import VarBase as Tensor
35-
from .framework import ComplexVariable as ComplexTensor
3635
import paddle.compat
3736
import paddle.distributed
3837
import paddle.sysconfig
@@ -43,7 +42,6 @@
4342
import paddle.optimizer
4443
import paddle.metric
4544
import paddle.device
46-
import paddle.incubate.complex as complex
4745
import paddle.regularizer
4846

4947
# TODO: define alias in tensor and framework directory

python/paddle/fluid/dygraph/base.py

Lines changed: 15 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -593,12 +593,12 @@ def to_variable(value, name=None, zero_copy=None, dtype=None):
593593
r"""
594594
:api_attr: imperative
595595
596-
The API will create a ``Variable`` or ``ComplexVariable`` object from
597-
tuple, list, numpy\.ndarray, Variable or ComplexVariable object.
596+
The API will create a ``Variable`` object from
597+
tuple, list, numpy\.ndarray or Variable object.
598598
599599
Parameters:
600-
value(tuple|list|ndarray|Variable|Tensor|ComplexVariable): Initial data.
601-
Can be a list, tuple, NumPy ndarray, Variable, Tensor, ComplexVariable.
600+
value(tuple|list|ndarray|Variable|Tensor): Initial data.
601+
Can be a list, tuple, NumPy ndarray, Variable, Tensor.
602602
The shape can be multi-dimensional. The data type is one of
603603
numpy\.{float16, float32, float64, int16, int32, int64,
604604
uint8, uint16, complex64, complex128}.
@@ -613,10 +613,9 @@ def to_variable(value, name=None, zero_copy=None, dtype=None):
613613
'int32' , 'int64' , 'uint8' . Default: None.
614614
615615
Returns:
616-
Variable or ComplexVariable: If ``value`` is a tuple/list/numpy\.ndarray object,
616+
Variable : If ``value`` is a tuple/list/numpy\.ndarray object,
617617
return ``Tensor`` created from the corresponding numpy\.ndarray object, which has
618-
same data type and shape with ``value``. If ``value`` is a Variable or ComplexVariable
619-
object, just return ``value``.
618+
same data type and shape with ``value``.
620619
621620
622621
Examples:
@@ -647,13 +646,12 @@ def to_variable(value, name=None, zero_copy=None, dtype=None):
647646
648647
"""
649648
support_type = (list, tuple, np.ndarray, core.VarBase, framework.Variable,
650-
framework.ComplexVariable, core.Tensor, core.LoDTensor)
649+
core.Tensor, core.LoDTensor)
651650
if not isinstance(value, support_type):
652651
raise TypeError(
653652
"The type of 'value' in fluid.dygraph.to_variable must be %s, but received %s."
654653
% (support_type, type(value)))
655-
if isinstance(value, (core.VarBase, framework.Variable,
656-
framework.ComplexVariable)):
654+
if isinstance(value, (core.VarBase, framework.Variable)):
657655
return value
658656
elif isinstance(value, (core.Tensor, core.LoDTensor)):
659657
return core.VarBase(value)
@@ -682,27 +680,10 @@ def to_variable(value, name=None, zero_copy=None, dtype=None):
682680
if value.dtype != dtype:
683681
value = value.astype(dtype)
684682

685-
if np.iscomplexobj(value):
686-
if not name:
687-
name = framework.unique_name.generate('_generated_var')
688-
real_var = core.VarBase(
689-
value=value.real,
690-
place=framework._current_expected_place(),
691-
persistable=False,
692-
zero_copy=zero_copy,
693-
name=name + ".real")
694-
imag_var = core.VarBase(
695-
value=value.imag,
696-
place=framework._current_expected_place(),
697-
persistable=False,
698-
zero_copy=zero_copy,
699-
name=name + ".imag")
700-
return framework.ComplexVariable(real_var, imag_var)
701-
else:
702-
py_var = core.VarBase(
703-
value=value,
704-
place=framework._current_expected_place(),
705-
persistable=False,
706-
zero_copy=zero_copy,
707-
name=name if name else '')
708-
return py_var
683+
py_var = core.VarBase(
684+
value=value,
685+
place=framework._current_expected_place(),
686+
persistable=False,
687+
zero_copy=zero_copy,
688+
name=name if name else '')
689+
return py_var

python/paddle/fluid/dygraph/math_op_patch.py

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from __future__ import print_function
1616

1717
from .. import core
18-
from ..framework import Variable, convert_np_dtype_to_dtype_, _varbase_creator, ComplexVariable
18+
from ..framework import Variable, convert_np_dtype_to_dtype_, _varbase_creator
1919
from ..layers.layer_function_generator import OpProtoHolder
2020
from . import no_grad
2121

@@ -170,13 +170,6 @@ def _binary_creator_(method_name,
170170
reverse=False,
171171
scalar_method=None):
172172
def __impl__(self, other_var):
173-
# 0. check tensor and ComplexVariable opetator
174-
if isinstance(other_var, ComplexVariable):
175-
# need import paddle in closure
176-
import paddle
177-
math_op = getattr(paddle.incubate.complex.tensor, op_type)
178-
return math_op(self, other_var)
179-
180173
# 1. scalar exists cases
181174
# we need combine the tensor.dtype and scalar.dtype, cast correct object
182175
if isinstance(other_var, float):

python/paddle/fluid/framework.py

Lines changed: 0 additions & 92 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,6 @@
5151
'is_compiled_with_cuda',
5252
'is_compiled_with_xpu',
5353
'Variable',
54-
'ComplexVariable',
5554
'load_op_library',
5655
'require_version',
5756
'device_guard',
@@ -1783,97 +1782,6 @@ def get_all_op_protos():
17831782
return ret_values
17841783

17851784

1786-
class ComplexVariable(object):
1787-
"""
1788-
The ComplexTensor defined on the complex number domain. It contains two common
1789-
real number Tensor as its members, :attr:`real` and :attr:`imag`
1790-
holding the real part and imaginary part of complex numbers respectively.
1791-
1792-
**Notes**:
1793-
**The constructor of ComplexTensor should not be invoked directly.**
1794-
1795-
Args:
1796-
real (Tensor): The Tensor holding real-part data.
1797-
imag (Tensor): The Tensor holding imaginery-part data.
1798-
1799-
Examples:
1800-
.. code-block:: python
1801-
1802-
import paddle
1803-
x = paddle.to_tensor([1.0+2.0j, 0.2])
1804-
print(x.name, x.dtype, x.shape)
1805-
# ({'real': 'generated_tensor_0.real', 'imag': 'generated_tensor_0.imag'}, complex64, [2])
1806-
print(x)
1807-
# ComplexTensor[real](shape=[2], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
1808-
# [ 1., 0.20000000])
1809-
# ComplexTensor[imag](shape=[2], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
1810-
# [2., 0.])
1811-
print(type(x))
1812-
# <class 'paddle.ComplexTensor'>
1813-
"""
1814-
1815-
def __new__(cls, *arg, **kwargs):
1816-
cls.__module__ = "paddle"
1817-
cls.__name__ = "ComplexTensor"
1818-
return super(ComplexVariable, cls).__new__(cls)
1819-
1820-
def __init__(self, real, imag):
1821-
assert real.shape == imag.shape, "The real part and imaginary part " \
1822-
"of a ComplexVariable should have the same shape!"
1823-
assert real.dtype == imag.dtype, "The real part and imaginary part " \
1824-
"of a ComplexVariable should have the same data type!"
1825-
1826-
self.real = real
1827-
self.imag = imag
1828-
if self.real.dtype in [
1829-
core.VarDesc.VarType.FP16, core.VarDesc.VarType.FP32
1830-
]:
1831-
self._dtype = "complex64"
1832-
else:
1833-
self._dtype = "complex128"
1834-
self._shape = self.real.shape
1835-
1836-
def __getitem__(self, idx):
1837-
return ComplexVariable(self.real[idx], self.imag[idx])
1838-
1839-
@property
1840-
def dtype(self):
1841-
return self._dtype
1842-
1843-
@property
1844-
def shape(self):
1845-
return self._shape
1846-
1847-
@property
1848-
def name(self):
1849-
return {"real": self.real.name, "imag": self.imag.name}
1850-
1851-
@name.setter
1852-
def name(self, name):
1853-
# rename
1854-
if isinstance(name, str):
1855-
self.real.name = name + ".real"
1856-
self.imag.name = name + ".imag"
1857-
elif (isinstance(name, tuple) or isinstance(name,
1858-
list)) and len(name) == 2:
1859-
self.real.name, self.imag.name = name[0], name[1]
1860-
else:
1861-
raise ValueError(
1862-
"An invalid name assigned to the ComplexVariable, "
1863-
"which must be a string, or a tuple or a list with length 2!")
1864-
1865-
def numpy(self):
1866-
return self.real.numpy() + 1j * self.imag.numpy()
1867-
1868-
def __str__(self):
1869-
from paddle.tensor.to_string import to_string
1870-
return "ComplexTensor containing:\n{real}\n{imag}".format(
1871-
real=to_string(self.real, "[real part]Tensor"),
1872-
imag=to_string(self.imag, "[imag part]Tensor"))
1873-
1874-
__repr__ = __str__
1875-
1876-
18771785
class OpProtoHolder(object):
18781786
"""
18791787
A global variable to hold all OpProtos from C++ as a map

python/paddle/fluid/tests/unittests/test_complex_elementwise_layers.py

Lines changed: 4 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -19,14 +19,6 @@
1919
import paddle
2020
import paddle.fluid as fluid
2121
import paddle.fluid.dygraph as dg
22-
from paddle import complex as cpx
23-
24-
layers = {
25-
"add": cpx.elementwise_add,
26-
"sub": cpx.elementwise_sub,
27-
"mul": cpx.elementwise_mul,
28-
"div": cpx.elementwise_div,
29-
}
3022

3123
paddle_apis = {
3224
"add": paddle.add,
@@ -43,26 +35,10 @@ def setUp(self):
4335
if fluid.core.is_compiled_with_cuda():
4436
self._places.append(paddle.CUDAPlace(0))
4537

46-
def calc(self, x, y, op, place):
47-
with dg.guard(place):
48-
var_x = dg.to_variable(x)
49-
var_y = dg.to_variable(y)
50-
return layers[op](var_x, var_y).numpy()
51-
5238
def paddle_calc(self, x, y, op, place):
5339
with dg.guard(place):
54-
x_t = paddle.Tensor(
55-
value=x,
56-
place=place,
57-
persistable=False,
58-
zero_copy=False,
59-
stop_gradient=True)
60-
y_t = paddle.Tensor(
61-
value=y,
62-
place=place,
63-
persistable=False,
64-
zero_copy=False,
65-
stop_gradient=True)
40+
x_t = dg.to_variable(x)
41+
y_t = dg.to_variable(y)
6642
return paddle_apis[op](x_t, y_t).numpy()
6743

6844
def assert_check(self, pd_result, np_result, place):
@@ -72,13 +48,6 @@ def assert_check(self, pd_result, np_result, place):
7248
format(place, pd_result[~np.isclose(pd_result, np_result)],
7349
np_result[~np.isclose(pd_result, np_result)]))
7450

75-
def compare_by_complex_api(self, x, y):
76-
for place in self._places:
77-
self.assert_check(self.calc(x, y, "add", place), x + y, place)
78-
self.assert_check(self.calc(x, y, "sub", place), x - y, place)
79-
self.assert_check(self.calc(x, y, "mul", place), x * y, place)
80-
self.assert_check(self.calc(x, y, "div", place), x / y, place)
81-
8251
def compare_by_basic_api(self, x, y):
8352
for place in self._places:
8453
self.assert_check(
@@ -90,7 +59,7 @@ def compare_by_basic_api(self, x, y):
9059
self.assert_check(
9160
self.paddle_calc(x, y, "div", place), x / y, place)
9261

93-
def compare_op_by_complex_api(self, x, y):
62+
def compare_op_by_basic_api(self, x, y):
9463
for place in self._places:
9564
with dg.guard(place):
9665
var_x = dg.to_variable(x)
@@ -100,37 +69,14 @@ def compare_op_by_complex_api(self, x, y):
10069
self.assert_check((var_x * var_y).numpy(), x * y, place)
10170
self.assert_check((var_x / var_y).numpy(), x / y, place)
10271

103-
def compare_op_by_basic_api(self, x, y):
104-
for place in self._places:
105-
with dg.guard(place):
106-
x_t = paddle.Tensor(
107-
value=x,
108-
place=place,
109-
persistable=False,
110-
zero_copy=False,
111-
stop_gradient=True)
112-
y_t = paddle.Tensor(
113-
value=y,
114-
place=place,
115-
persistable=False,
116-
zero_copy=False,
117-
stop_gradient=True)
118-
self.assert_check((x_t + y_t).numpy(), x + y, place)
119-
self.assert_check((x_t - y_t).numpy(), x - y, place)
120-
self.assert_check((x_t * y_t).numpy(), x * y, place)
121-
self.assert_check((x_t / y_t).numpy(), x / y, place)
122-
12372
def test_complex_xy(self):
12473
for dtype in self._dtypes:
12574
x = rand([2, 3, 4, 5]).astype(dtype) + 1j * rand(
12675
[2, 3, 4, 5]).astype(dtype)
12776
y = rand([2, 3, 4, 5]).astype(dtype) + 1j * rand(
12877
[2, 3, 4, 5]).astype(dtype)
12978

130-
self.compare_by_complex_api(x, y)
131-
self.compare_op_by_complex_api(x, y)
132-
133-
self.compare_op_by_complex_api(x, y)
79+
self.compare_by_basic_api(x, y)
13480
self.compare_op_by_basic_api(x, y)
13581

13682
def test_complex_x_real_y(self):
@@ -139,9 +85,6 @@ def test_complex_x_real_y(self):
13985
[2, 3, 4, 5]).astype(dtype)
14086
y = rand([4, 5]).astype(dtype)
14187

142-
self.compare_by_complex_api(x, y)
143-
self.compare_op_by_complex_api(x, y)
144-
14588
# promote types cases
14689
self.compare_by_basic_api(x, y)
14790
self.compare_op_by_basic_api(x, y)
@@ -151,9 +94,6 @@ def test_real_x_complex_y(self):
15194
x = rand([2, 3, 4, 5]).astype(dtype)
15295
y = rand([5]).astype(dtype) + 1j * rand([5]).astype(dtype)
15396

154-
self.compare_by_complex_api(x, y)
155-
self.compare_op_by_complex_api(x, y)
156-
15797
# promote types cases
15898
self.compare_by_basic_api(x, y)
15999
self.compare_op_by_basic_api(x, y)

0 commit comments

Comments
 (0)