Skip to content

Commit beb93bb

Browse files
committed
Fix ut bug for graph_test
Port dist_transpiler new added codes Port ut for clone desc
1 parent 2906d83 commit beb93bb

File tree

6 files changed

+28
-6
lines changed

6 files changed

+28
-6
lines changed

paddle/fluid/framework/ir/graph_test.cc

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -200,9 +200,11 @@ TEST(GraphTest, WriteAfterWrite) {
200200
ASSERT_TRUE(ir::IsControlDepVar(*n->inputs[1]));
201201
control_dep2 = n->inputs[1];
202202
ASSERT_EQ(n->inputs.size(), 2);
203-
ASSERT_EQ(control_dep1, control_dep2);
204203
}
205204
}
205+
ASSERT_NE(control_dep1, nullptr);
206+
ASSERT_NE(control_dep2, nullptr);
207+
ASSERT_EQ(control_dep1, control_dep2);
206208
}
207209
} // namespace framework
208210
} // namespace paddle

paddle/scripts/paddle_build.sh

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -313,7 +313,18 @@ function run_test() {
313313
Running unit tests ...
314314
========================================
315315
EOF
316-
ctest --output-on-failure
316+
ctest --output-on-failure -R graph_test -V
317+
ctest --output-on-failure -R test_prelu_op -V
318+
ctest --output-on-failure -R test_prelu_op -V
319+
ctest --output-on-failure -R test_dist_transpiler -V
320+
ctest --output-on-failure -R test_dist_word2vec -V
321+
ctest --output-on-failure -R test_desc_clone -V
322+
ctest --output-on-failure -R test_dist_mnist -V
323+
ctest --output-on-failure -R test_listen_and_serv_op -V
324+
ctest --output-on-failure -R test_debugger -V
325+
ctest --output-on-failure -R test_dist_transformer -V
326+
ctest --output-on-failure -R test_dist_se_resnext -V
327+
317328
# make install should also be test when unittest
318329
make install -j `nproc`
319330
pip install /usr/local/opt/paddle/share/wheels/*.whl

python/paddle/fluid/tests/unittests/op_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ def __set_elem__(tensor, i, e):
123123
y_neg = get_output()
124124

125125
__set_elem__(tensor_to_check, i, origin)
126-
gradient_flat[i] = (y_pos - y_neg) / delta / 2
126+
gradient_flat[i] = (y_pos - y_neg) / delta // 2
127127

128128
return gradient_flat.reshape(tensor_to_check.shape())
129129

python/paddle/fluid/tests/unittests/test_desc_clone.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
from multiprocessing import Process
2828
import os
2929
import signal
30+
import six
3031
import collections
3132

3233
SEED = 1
@@ -55,7 +56,8 @@ def cnn_model(data):
5556
# TODO(dzhwinter) : refine the initializer and random seed settting
5657
SIZE = 10
5758
input_shape = conv_pool_2.shape
58-
param_shape = [reduce(lambda a, b: a * b, input_shape[1:], 1)] + [SIZE]
59+
param_shape = [six.moves.reduce(lambda a, b: a * b, input_shape[1:], 1)
60+
] + [SIZE]
5961
scale = (2.0 / (param_shape[0]**2 * SIZE))**0.5
6062

6163
predict = fluid.layers.fc(

python/paddle/fluid/tests/unittests/test_prelu_op.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,10 +39,17 @@ def setUp(self):
3939
alpha_np = np.random.rand(*x_np.shape).astype("float32")
4040
self.inputs = {'X': x_np, 'Alpha': alpha_np}
4141

42+
import sys
43+
print('self.inputs', self.inputs)
44+
sys.stdout.flush()
45+
4246
out_np = np.maximum(self.inputs['X'], 0.)
4347
out_np = out_np + np.minimum(self.inputs['X'],
4448
0.) * self.inputs['Alpha']
4549
assert out_np is not self.inputs['X']
50+
import sys
51+
print('self.outputs', self.outputs)
52+
sys.stdout.flush()
4653
self.outputs = {'Out': out_np}
4754

4855
def initTestCase(self):

python/paddle/fluid/transpiler/distribute_transpiler.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -369,7 +369,7 @@ def _get_trainer_startup_program(self,
369369
# FIXME(gongwb): delete not need ops.
370370
# note that: some parameter is not trainable and those ops can't be deleted.
371371

372-
for varname, splited_var in self.param_var_mapping.iteritems():
372+
for varname, splited_var in six.iteritems(self.param_var_mapping):
373373
# Get the eplist of recv vars
374374
eps = []
375375
for var in splited_var:
@@ -406,7 +406,7 @@ def _get_trainer_startup_program(self,
406406
RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE
407407
})
408408

409-
for varname, splited_var in self.param_var_mapping.iteritems():
409+
for varname, splited_var in six.iteritems(self.param_var_mapping):
410410
#add concat ops to merge splited parameters received from parameter servers.
411411
if len(splited_var) <= 1:
412412
continue

0 commit comments

Comments
 (0)