Skip to content

Commit 5dc5175

Browse files
Add python api for expand op. (#13453)
* Add python api for expand op. * Fix unitest. * Remove 'out' from arguments and fix code style. * fix API.spec * Fix API * Fix unitest
1 parent 253f618 commit 5dc5175

File tree

3 files changed

+59
-0
lines changed

3 files changed

+59
-0
lines changed

paddle/fluid/API.spec

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -168,6 +168,7 @@ paddle.fluid.layers.stack ArgSpec(args=['x', 'axis'], varargs=None, keywords=Non
168168
paddle.fluid.layers.pad2d ArgSpec(args=['input', 'paddings', 'mode', 'pad_value', 'data_format', 'name'], varargs=None, keywords=None, defaults=([0, 0, 0, 0], 'constant', 0.0, 'NCHW', None))
169169
paddle.fluid.layers.unstack ArgSpec(args=['x', 'axis', 'num'], varargs=None, keywords=None, defaults=(0, None))
170170
paddle.fluid.layers.sequence_enumerate ArgSpec(args=['input', 'win_size', 'pad_value', 'name'], varargs=None, keywords=None, defaults=(0, None))
171+
paddle.fluid.layers.expand ArgSpec(args=['x', 'expand_times', 'name'], varargs=None, keywords=None, defaults=(None,))
171172
paddle.fluid.layers.sequence_concat ArgSpec(args=['input', 'name'], varargs=None, keywords=None, defaults=(None,))
172173
paddle.fluid.layers.data ArgSpec(args=['name', 'shape', 'append_batch_size', 'dtype', 'lod_level', 'type', 'stop_gradient'], varargs=None, keywords=None, defaults=(True, 'float32', 0, VarType.LOD_TENSOR, True))
173174
paddle.fluid.layers.open_files ArgSpec(args=['filenames', 'shapes', 'lod_levels', 'dtypes', 'thread_num', 'buffer_size', 'pass_num', 'is_test'], varargs=None, keywords=None, defaults=(None, None, 1, None))

python/paddle/fluid/layers/nn.py

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -113,6 +113,7 @@
113113
'pad2d',
114114
'unstack',
115115
'sequence_enumerate',
116+
'expand',
116117
'sequence_concat',
117118
]
118119

@@ -6118,3 +6119,53 @@ def unstack(x, axis=0, num=None):
61186119
attrs={'axis': axis,
61196120
'num': num})
61206121
return outs
6122+
6123+
6124+
def expand(x, expand_times, name=None):
6125+
"""Expand operator tiles the input by given times number. You should set times
6126+
number for each dimension by providing attribute 'expand_times'. The rank of X
6127+
should be in [1, 6]. Please note that size of 'expand_times' must be the same
6128+
with X's rank. Following is a using case:
6129+
6130+
6131+
.. code-block:: text
6132+
6133+
Input(X) is a 3-D tensor with shape [2, 3, 1]:
6134+
6135+
[
6136+
[[1], [2], [3]],
6137+
[[4], [5], [6]]
6138+
]
6139+
6140+
Attr(expand_times): [1, 2, 2]
6141+
6142+
Output(Out) is a 3-D tensor with shape [2, 6, 2]:
6143+
6144+
[
6145+
[[1, 1], [2, 2], [3, 3], [1, 1], [2, 2], [3, 3]],
6146+
[[4, 4], [5, 5], [6, 6], [4, 4], [5, 5], [6, 6]]
6147+
]
6148+
6149+
Args:
6150+
x (Variable): A tensor with rank in [1, 6].
6151+
expand_times (list|tuple): Expand times number for each dimension.
6152+
6153+
Returns:
6154+
Variable: The expanded variable which is a LoDTensor. After expanding, size of each dimension of Output(Out) is equal to ithe size of the corresponding dimension of Input(X) multiplying the corresponding value given by expand_times.
6155+
6156+
6157+
Examples:
6158+
.. code-block:: python
6159+
6160+
x = fluid.layers.data(name='x', shape=[10], dtype='float32')
6161+
out = fluid.layers.expand(x=x, expand_times=[1, 2, 2])
6162+
"""
6163+
helper = LayerHelper('expand', input=x, **locals())
6164+
dtype = helper.input_dtype(input_param_name='x')
6165+
out = helper.create_tmp_variable(dtype)
6166+
helper.append_op(
6167+
type='expand',
6168+
inputs={'X': x},
6169+
outputs={'Out': out},
6170+
attrs={'expand_times': expand_times})
6171+
return out

python/paddle/fluid/tests/unittests/test_layers.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -565,6 +565,13 @@ def test_cross_entropy(self):
565565
out = layers.cross_entropy(x, label, False, 4)
566566
self.assertIsNotNone(out)
567567

568+
def test_expand(self):
569+
program = Program()
570+
with program_guard(program):
571+
x = layers.data(name="input", shape=[10], dtype='int32')
572+
out = layers.expand(x, [1, 2])
573+
print(str(program))
574+
568575

569576
if __name__ == '__main__':
570577
unittest.main()

0 commit comments

Comments
 (0)