@@ -4266,14 +4266,18 @@ def reshape(x, shape, actual_shape=None, act=None, inplace=True, name=None):
4266
4266
say :attr:`actual_shape` has a higher priority
4267
4267
than :attr:`shape`.
4268
4268
act (str): The non-linear activation to be applied to output variable.
4269
- inplace(bool): If this flag is set true, a new output tensor is created
4270
- whose data is copied from input x, otherwise the output
4271
- shares data with input without copying.
4269
+ inplace(bool): If this flag is set true, the output
4270
+ shares data with input without copying, otherwise
4271
+ a new output tensor is created
4272
+ whose data is copied from input x.
4272
4273
name (str): The name of this layer. It is optional.
4273
4274
4274
4275
Returns:
4275
4276
Variable: The output tensor.
4276
4277
4278
+ Raises:
4279
+ TypeError: if actual_shape is neither Variable nor None.
4280
+
4277
4281
Examples:
4278
4282
.. code-block:: python
4279
4283
@@ -4285,6 +4289,11 @@ def reshape(x, shape, actual_shape=None, act=None, inplace=True, name=None):
4285
4289
4286
4290
if not (isinstance (shape , list ) or isinstance (shape , tuple )):
4287
4291
raise ValueError ("Input shape must be a python lsit or tuple." )
4292
+ inputs = {"X" : x }
4293
+ if isinstance (actual_shape , Variable ):
4294
+ inputs ["Shape" ] = actual_shape
4295
+ elif actual_shape is not None :
4296
+ raise TypeError ("actual_shape should either be Variable or None" )
4288
4297
4289
4298
# Validate the shape
4290
4299
unk_dim_idx = - 1
@@ -4305,9 +4314,7 @@ def reshape(x, shape, actual_shape=None, act=None, inplace=True, name=None):
4305
4314
reshaped = helper .create_tmp_variable (dtype = x .dtype )
4306
4315
helper .append_op (
4307
4316
type = "reshape" ,
4308
- inputs = {"X" : x ,
4309
- "Shape" : actual_shape }
4310
- if isinstance (actual_shape , Variable ) else {"X" : x },
4317
+ inputs = inputs ,
4311
4318
attrs = {"shape" : shape ,
4312
4319
"inplace" : inplace },
4313
4320
outputs = {"Out" : reshaped })
0 commit comments