Skip to content

Commit 3959023

Browse files
committed
Enhance layer_function_generator
* Generated functions can take `*args` as inputs.
1 parent 50a6e7c commit 3959023

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

46 files changed

+75
-72
lines changed

python/paddle/v2/fluid/layers/layer_function_generator.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ def infer_and_check_dtype(op_proto, **kwargs):
155155

156156
return dtype
157157

158-
def func(**kwargs):
158+
def func(*args, **kwargs):
159159
helper = LayerHelper(op_type, **kwargs)
160160

161161
dtype = infer_and_check_dtype(op_proto, **kwargs)
@@ -166,6 +166,9 @@ def func(**kwargs):
166166
val = kwargs.pop(name, [])
167167
if not isinstance(val, list) and not isinstance(val, tuple):
168168
val = [val]
169+
if len(val) == 0 and len(args) != 0:
170+
val = args[0]
171+
args = args[1:]
169172
inputs[ipt.name] = val
170173

171174
outputs = dict()

python/paddle/v2/fluid/layers/tensor.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -160,8 +160,8 @@ def sums(input, out=None):
160160
a0 = layers.array_read(array=tmp, i=i)
161161
i = layers.increment(x=i)
162162
a1 = layers.array_read(array=tmp, i=i)
163-
mean_a0 = layers.mean(x=a0)
164-
mean_a1 = layers.mean(x=a1)
163+
mean_a0 = layers.mean(a0)
164+
mean_a1 = layers.mean(a1)
165165
a_sum = layers.sums(input=[mean_a0, mean_a1])
166166
"""
167167
helper = LayerHelper('sum', **locals())

python/paddle/v2/fluid/tests/book/notest_rnn_encoder_decoer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ def seq_to_seq_net():
147147
label = fluid.layers.data(
148148
name='label_sequence', shape=[1], dtype='int64', lod_level=1)
149149
cost = fluid.layers.cross_entropy(input=prediction, label=label)
150-
avg_cost = fluid.layers.mean(x=cost)
150+
avg_cost = fluid.layers.mean(cost)
151151

152152
return avg_cost, prediction
153153

python/paddle/v2/fluid/tests/book/test_fit_a_line.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def train(use_cuda, save_dirname):
2929
y = fluid.layers.data(name='y', shape=[1], dtype='float32')
3030

3131
cost = fluid.layers.square_error_cost(input=y_predict, label=y)
32-
avg_cost = fluid.layers.mean(x=cost)
32+
avg_cost = fluid.layers.mean(cost)
3333

3434
sgd_optimizer = fluid.optimizer.SGD(learning_rate=0.001)
3535
sgd_optimizer.minimize(avg_cost)

python/paddle/v2/fluid/tests/book/test_image_classification.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ def train(net_type, use_cuda, save_dirname):
110110

111111
predict = fluid.layers.fc(input=net, size=classdim, act='softmax')
112112
cost = fluid.layers.cross_entropy(input=predict, label=label)
113-
avg_cost = fluid.layers.mean(x=cost)
113+
avg_cost = fluid.layers.mean(cost)
114114
acc = fluid.layers.accuracy(input=predict, label=label)
115115

116116
# Test program

python/paddle/v2/fluid/tests/book/test_label_semantic_roles.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ def train(use_cuda, save_dirname=None):
164164
label=target,
165165
param_attr=fluid.ParamAttr(
166166
name='crfw', learning_rate=mix_hidden_lr))
167-
avg_cost = fluid.layers.mean(x=crf_cost)
167+
avg_cost = fluid.layers.mean(crf_cost)
168168

169169
# TODO(qiao)
170170
# check other optimizers and check why out will be NAN

python/paddle/v2/fluid/tests/book/test_machine_translation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -178,7 +178,7 @@ def train_main(use_cuda, is_sparse):
178178
label = pd.data(
179179
name="target_language_next_word", shape=[1], dtype='int64', lod_level=1)
180180
cost = pd.cross_entropy(input=rnn_out, label=label)
181-
avg_cost = pd.mean(x=cost)
181+
avg_cost = pd.mean(cost)
182182

183183
optimizer = fluid.optimizer.Adagrad(learning_rate=1e-4)
184184
optimizer.minimize(avg_cost)

python/paddle/v2/fluid/tests/book/test_recognize_digits.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def parse_arg():
4848
def loss_net(hidden, label):
4949
prediction = fluid.layers.fc(input=hidden, size=10, act='softmax')
5050
loss = fluid.layers.cross_entropy(input=prediction, label=label)
51-
avg_loss = fluid.layers.mean(x=loss)
51+
avg_loss = fluid.layers.mean(loss)
5252
acc = fluid.layers.accuracy(input=prediction, label=label)
5353
return prediction, avg_loss, acc
5454

@@ -101,8 +101,8 @@ def train(nn_type, use_cuda, parallel, save_dirname, save_param_filename):
101101

102102
avg_loss, acc = pd()
103103
# get mean loss and acc through every devices.
104-
avg_loss = fluid.layers.mean(x=avg_loss)
105-
acc = fluid.layers.mean(x=acc)
104+
avg_loss = fluid.layers.mean(avg_loss)
105+
acc = fluid.layers.mean(acc)
106106
else:
107107
prediction, avg_loss, acc = net_conf(img, label)
108108

python/paddle/v2/fluid/tests/book/test_recommender_system.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ def model():
147147

148148
label = layers.data(name='score', shape=[1], dtype='float32')
149149
square_cost = layers.square_error_cost(input=scale_infer, label=label)
150-
avg_cost = layers.mean(x=square_cost)
150+
avg_cost = layers.mean(square_cost)
151151

152152
return scale_infer, avg_cost
153153

python/paddle/v2/fluid/tests/book/test_understand_sentiment.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def convolution_net(data, label, input_dim, class_dim=2, emb_dim=32,
4242
size=class_dim,
4343
act="softmax")
4444
cost = fluid.layers.cross_entropy(input=prediction, label=label)
45-
avg_cost = fluid.layers.mean(x=cost)
45+
avg_cost = fluid.layers.mean(cost)
4646
accuracy = fluid.layers.accuracy(input=prediction, label=label)
4747
return avg_cost, accuracy, prediction
4848

@@ -82,7 +82,7 @@ def gate_common(ipt, hidden, size):
8282
last = fluid.layers.sequence_last_step(rnn())
8383
prediction = fluid.layers.fc(input=last, size=class_dim, act="softmax")
8484
cost = fluid.layers.cross_entropy(input=prediction, label=label)
85-
avg_cost = fluid.layers.mean(x=cost)
85+
avg_cost = fluid.layers.mean(cost)
8686
accuracy = fluid.layers.accuracy(input=prediction, label=label)
8787
return avg_cost, accuracy, prediction
8888

@@ -119,7 +119,7 @@ def stacked_lstm_net(data,
119119
size=class_dim,
120120
act='softmax')
121121
cost = fluid.layers.cross_entropy(input=prediction, label=label)
122-
avg_cost = fluid.layers.mean(x=cost)
122+
avg_cost = fluid.layers.mean(cost)
123123
accuracy = fluid.layers.accuracy(input=prediction, label=label)
124124
return avg_cost, accuracy, prediction
125125

@@ -158,8 +158,8 @@ def train(word_dict, net_method, use_cuda, parallel=False, save_dirname=None):
158158
pd.write_output(acc)
159159

160160
cost, acc = pd()
161-
cost = fluid.layers.mean(x=cost)
162-
acc_out = fluid.layers.mean(x=acc)
161+
cost = fluid.layers.mean(cost)
162+
acc_out = fluid.layers.mean(acc)
163163
prediction = None
164164
assert save_dirname is None
165165

0 commit comments

Comments
 (0)