@@ -4263,14 +4263,18 @@ def reshape(x, shape, actual_shape=None, act=None, inplace=True, name=None):
4263
4263
say :attr:`actual_shape` has a higher priority
4264
4264
than :attr:`shape`.
4265
4265
act (str): The non-linear activation to be applied to output variable.
4266
- inplace(bool): If this flag is set true, a new output tensor is created
4267
- whose data is copied from input x, otherwise the output
4268
- shares data with input without copying.
4266
+ inplace(bool): If this flag is set true, the output
4267
+ shares data with input without copying, otherwise
4268
+ a new output tensor is created
4269
+ whose data is copied from input x.
4269
4270
name (str): The name of this layer. It is optional.
4270
4271
4271
4272
Returns:
4272
4273
Variable: The output tensor.
4273
4274
4275
+ Raises:
4276
+ TypeError: if actual_shape is neither Variable nor None.
4277
+
4274
4278
Examples:
4275
4279
.. code-block:: python
4276
4280
@@ -4282,6 +4286,11 @@ def reshape(x, shape, actual_shape=None, act=None, inplace=True, name=None):
4282
4286
4283
4287
if not (isinstance (shape , list ) or isinstance (shape , tuple )):
4284
4288
raise ValueError ("Input shape must be a python lsit or tuple." )
4289
+ inputs = {"X" : x }
4290
+ if isinstance (actual_shape , Variable ):
4291
+ inputs ["Shape" ] = actual_shape
4292
+ elif actual_shape is not None :
4293
+ raise TypeError ("actual_shape should either be Variable or None" )
4285
4294
4286
4295
# Validate the shape
4287
4296
unk_dim_idx = - 1
@@ -4302,9 +4311,7 @@ def reshape(x, shape, actual_shape=None, act=None, inplace=True, name=None):
4302
4311
reshaped = helper .create_tmp_variable (dtype = x .dtype )
4303
4312
helper .append_op (
4304
4313
type = "reshape" ,
4305
- inputs = {"X" : x ,
4306
- "Shape" : actual_shape }
4307
- if isinstance (actual_shape , Variable ) else {"X" : x },
4314
+ inputs = inputs ,
4308
4315
attrs = {"shape" : shape ,
4309
4316
"inplace" : inplace },
4310
4317
outputs = {"Out" : reshaped })
0 commit comments