Skip to content

Commit 7296522

Browse files
authored
Merge pull request #12818 from velconia/fix_python3_CI_job
Fix python3 CI job
2 parents f63368d + 656c77e commit 7296522

File tree

3 files changed

+15
-11
lines changed

3 files changed

+15
-11
lines changed

paddle/fluid/framework/ir/graph_test.cc

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -200,9 +200,11 @@ TEST(GraphTest, WriteAfterWrite) {
200200
ASSERT_TRUE(ir::IsControlDepVar(*n->inputs[1]));
201201
control_dep2 = n->inputs[1];
202202
ASSERT_EQ(n->inputs.size(), 2);
203-
ASSERT_EQ(control_dep1, control_dep2);
204203
}
205204
}
205+
ASSERT_NE(control_dep1, nullptr);
206+
ASSERT_NE(control_dep2, nullptr);
207+
ASSERT_EQ(control_dep1, control_dep2);
206208
}
207209
} // namespace framework
208210
} // namespace paddle

python/paddle/fluid/tests/unittests/test_desc_clone.py

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
from multiprocessing import Process
2828
import os
2929
import signal
30+
import six
3031
import collections
3132

3233
SEED = 1
@@ -55,7 +56,8 @@ def cnn_model(data):
5556
# TODO(dzhwinter) : refine the initializer and random seed settting
5657
SIZE = 10
5758
input_shape = conv_pool_2.shape
58-
param_shape = [reduce(lambda a, b: a * b, input_shape[1:], 1)] + [SIZE]
59+
param_shape = [six.moves.reduce(lambda a, b: a * b, input_shape[1:], 1)
60+
] + [SIZE]
5961
scale = (2.0 / (param_shape[0]**2 * SIZE))**0.5
6062

6163
predict = fluid.layers.fc(
@@ -108,7 +110,7 @@ def get_transpiler(trainer_id, main_program, pserver_endpoints, trainers):
108110

109111

110112
def operator_equal(a, b):
111-
for k, v in a.__dict__.iteritems():
113+
for k, v in six.iteritems(a.__dict__):
112114
if isinstance(v, fluid.framework.Program) or \
113115
isinstance(v, fluid.framework.Block):
114116
continue
@@ -118,8 +120,8 @@ def operator_equal(a, b):
118120
raise ValueError("In operator_equal not equal:{0}\n".format(k))
119121

120122
elif isinstance(v, collections.OrderedDict):
121-
v0 = sorted(v.iteritems(), key=lambda x: x[0])
122-
v1 = sorted(b.__dict__[k].iteritems(), key=lambda x: x[0])
123+
v0 = sorted(six.iteritems(v), key=lambda x: x[0])
124+
v1 = sorted(six.iteritems(b.__dict__[k]), key=lambda x: x[0])
123125

124126
if v0 != v1:
125127
raise ValueError("In operator_equal not equal:{0}\n".format(k))
@@ -131,7 +133,7 @@ def operator_equal(a, b):
131133

132134

133135
def block_equal(a, b):
134-
for k, v in a.__dict__.iteritems():
136+
for k, v in six.iteritems(a.__dict__):
135137
if isinstance(v, core.ProgramDesc) or isinstance(
136138
v, fluid.framework.Program) or isinstance(v, core.BlockDesc):
137139
continue
@@ -143,8 +145,8 @@ def block_equal(a, b):
143145
assert (len(a.ops) == len(b.ops))
144146

145147
elif isinstance(v, collections.OrderedDict):
146-
v0 = sorted(v.iteritems(), key=lambda x: x[0])
147-
v1 = sorted(b.__dict__[k].iteritems(), key=lambda x: x[0])
148+
v0 = sorted(six.iteritems(v), key=lambda x: x[0])
149+
v1 = sorted(six.iteritems(b.__dict__[k]), key=lambda x: x[0])
148150

149151
if v0 != v1:
150152
raise ValueError("In block_equal not equal:{0}\n".format(k))
@@ -156,7 +158,7 @@ def block_equal(a, b):
156158

157159

158160
def program_equal(a, b):
159-
for k, v in a.__dict__.iteritems():
161+
for k, v in six.iteritems(a.__dict__):
160162
if isinstance(v, core.ProgramDesc):
161163
continue
162164

python/paddle/fluid/transpiler/distribute_transpiler.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -369,7 +369,7 @@ def _get_trainer_startup_program(self,
369369
# FIXME(gongwb): delete not need ops.
370370
# note that: some parameter is not trainable and those ops can't be deleted.
371371

372-
for varname, splited_var in self.param_var_mapping.iteritems():
372+
for varname, splited_var in six.iteritems(self.param_var_mapping):
373373
# Get the eplist of recv vars
374374
eps = []
375375
for var in splited_var:
@@ -406,7 +406,7 @@ def _get_trainer_startup_program(self,
406406
RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE
407407
})
408408

409-
for varname, splited_var in self.param_var_mapping.iteritems():
409+
for varname, splited_var in six.iteritems(self.param_var_mapping):
410410
#add concat ops to merge splited parameters received from parameter servers.
411411
if len(splited_var) <= 1:
412412
continue

0 commit comments

Comments
 (0)