Skip to content

Commit 718033e

Browse files
committed
Merge remote-tracking branch 'ups/develop' into refine/op/peephole
2 parents 40dbd97 + ad4d965 commit 718033e

File tree

5 files changed

+16
-6
lines changed

5 files changed

+16
-6
lines changed

doc/fluid/new_docs/user_guides/howto/debug/visualdl.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,7 @@ visualDL --logdir=scratch_log --port=8080
104104
105105
# 访问 http://127.0.0.1:8080
106106
```
107+
如果出现`TypeError: __init__() got an unexpected keyword argument 'file'`, 是因为protobuf不是3.5以上,运行`pip install --upgrade protobuf`就能解决。
107108

108109
如果在虚拟环境下仍然遇到安装问题,请尝试以下方法。
109110

paddle/scripts/paddle_build.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -500,7 +500,7 @@ EOF
500500
EOF
501501

502502
if [[ ${WITH_GPU} == "ON" ]]; then
503-
NCCL_DEPS="apt-get install -y --allow-downgrades libnccl2=2.1.2-1+cuda${CUDA_MAJOR} libnccl-dev=2.1.2-1+cuda${CUDA_MAJOR} &&"
503+
NCCL_DEPS="apt-get install -y --allow-downgrades libnccl2=2.2.13-1+cuda${CUDA_MAJOR} libnccl-dev=2.2.13-1+cuda${CUDA_MAJOR} &&"
504504
else
505505
NCCL_DEPS=""
506506
fi

python/paddle/dataset/image.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ def batch_images_from_tar(data_file,
104104
pickle.dump(
105105
output,
106106
open('%s/batch_%d' % (out_path, file_id), 'wb'),
107-
protocol=pickle.HIGHEST_PROTOCOL)
107+
protocol=2)
108108
file_id += 1
109109
data = []
110110
labels = []
@@ -113,9 +113,7 @@ def batch_images_from_tar(data_file,
113113
output['label'] = labels
114114
output['data'] = data
115115
pickle.dump(
116-
output,
117-
open('%s/batch_%d' % (out_path, file_id), 'wb'),
118-
protocol=pickle.HIGHEST_PROTOCOL)
116+
output, open('%s/batch_%d' % (out_path, file_id), 'wb'), protocol=2)
119117

120118
with open(meta_file, 'a') as meta:
121119
for file in os.listdir(out_path):

python/paddle/fluid/tests/unittests/test_dist_base.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,7 @@ def run_pserver(self, args):
5555
pserver_prog = t.get_pserver_program(args.current_endpoint)
5656
startup_prog = t.get_startup_program(args.current_endpoint,
5757
pserver_prog)
58+
5859
place = fluid.CPUPlace()
5960
exe = fluid.Executor(place)
6061
exe.run(startup_prog)
@@ -147,6 +148,8 @@ def runtime_main(test_class):
147148

148149

149150
import paddle.compat as cpt
151+
import socket
152+
from contextlib import closing
150153

151154

152155
class TestDistBase(unittest.TestCase):
@@ -156,13 +159,19 @@ def _setup_config(self):
156159
def setUp(self):
157160
self._trainers = 2
158161
self._pservers = 2
159-
self._ps_endpoints = "127.0.0.1:9123,127.0.0.1:9124"
162+
self._ps_endpoints = "127.0.0.1:%s,127.0.0.1:%s" % (
163+
self._find_free_port(), self._find_free_port())
160164
self._python_interp = "python"
161165
self._sync_mode = True
162166
self._mem_opt = False
163167
self._use_reduce = False
164168
self._setup_config()
165169

170+
def _find_free_port(self):
171+
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
172+
s.bind(('', 0))
173+
return s.getsockname()[1]
174+
166175
def start_pserver(self, model_file, check_error_log):
167176
ps0_ep, ps1_ep = self._ps_endpoints.split(",")
168177
ps_cmd = "%s %s --role pserver --endpoints %s --trainer_id 0 --current_endpoint %s --trainers %d --is_dist"

python/paddle/fluid/tests/unittests/test_parallel_executor_fetch_feed.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,7 @@ def parallel_exe(self, train_inputs, seed, use_cuda):
8585
assert not math.isnan(np.sum(ret[i])) and \
8686
not math.isinf(np.sum(ret[i]))
8787

88+
@unittest.skip(reason="CI timeout")
8889
def test_fetch_op(self):
8990
tst_reader = paddle.batch(flowers.test(use_xmap=False), batch_size=16)
9091
tst_reader_iter = tst_reader()
@@ -139,6 +140,7 @@ def parallel_exe(self, use_cuda, seed):
139140
if batch_id == 2:
140141
break
141142

143+
@unittest.skip(reason="CI timeout")
142144
def test_feed_op(self):
143145
os.environ['CPU_NUM'] = str(4)
144146
if core.is_compiled_with_cuda():

0 commit comments

Comments
 (0)