Skip to content

Commit d23bae7

Browse files
committed
Merge branch 'develop' of github.com:baidu/Paddle into feature/c_api
2 parents fe8d5ff + 9e6c8cd commit d23bae7

File tree

7 files changed

+46
-11
lines changed

7 files changed

+46
-11
lines changed

cmake/external/python.cmake

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,10 @@ IF(PYTHONLIBS_FOUND AND PYTHONINTERP_FOUND)
2626
find_python_module(wheel REQUIRED)
2727
find_python_module(google.protobuf REQUIRED)
2828
FIND_PACKAGE(NumPy REQUIRED)
29+
IF(${PY_GOOGLE.PROTOBUF_VERSION} VERSION_LESS "3.0.0")
30+
MESSAGE(FATAL_ERROR "Found Python Protobuf ${PY_GOOGLE.PROTOBUF_VERSION} < 3.0.0, "
31+
"please use pip to upgrade protobuf.")
32+
ENDIF(${PY_GOOGLE.PROTOBUF_VERSION} VERSION_LESS "3.0.0")
2933
ELSE(PYTHONLIBS_FOUND AND PYTHONINTERP_FOUND)
3034
##################################### PYTHON ########################################
3135
SET(PYTHON_SOURCES_DIR ${THIRD_PARTY_PATH}/python)

cmake/python_module.cmake

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,5 +26,18 @@ function(find_python_module module)
2626
if(NOT PY_${module_upper}_FOUND AND ${module}_FIND_REQUIRED)
2727
message(FATAL_ERROR "python module ${module} is not found")
2828
endif()
29+
30+
execute_process(COMMAND "${PYTHON_EXECUTABLE}" "-c"
31+
"import sys, ${module}; sys.stdout.write(${module}.__version__)"
32+
OUTPUT_VARIABLE _${module}_version
33+
RESULT_VARIABLE _${module}_status
34+
ERROR_QUIET
35+
OUTPUT_STRIP_TRAILING_WHITESPACE)
36+
if(NOT _${module}_status)
37+
set(PY_${module_upper}_VERSION ${_${module}_version} CACHE STRING
38+
"Version of Python module ${module}")
39+
endif(NOT _${module}_status)
40+
2941
set(PY_${module_upper}_FOUND ${PY_${module_upper}_FOUND} PARENT_SCOPE)
42+
set(PY_${module_upper}_VERSION ${PY_${module_upper}_VERSION} PARENT_SCOPE)
3043
endfunction(find_python_module)

doc/howto/usage/k8s/k8s_distributed_cn.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -159,6 +159,8 @@ docker build -t your_repo/paddle:mypaddle .
159159
docker push your_repo/paddle:mypaddle
160160
```
161161

162+
注意上述命令中`your_repo`表示读者所使用的Docker镜像仓库地址,读者需要替换成自己使用的仓库地址。下文使用`your_repo/paddle:mypaddle`这个地址来表示此步骤所构建出的镜像。
163+
162164
### 上传训练文件
163165

164166
本文使用PaddlePaddle官方的[recommendation demo](http://www.paddlepaddle.org/doc/demo/index.html#recommendation)作为这次训练的内容,我们将训练文件与数据放在一个job name命名的目录中,上传到MFS共享存储。完成后MFS上的文件内容大致如下:
@@ -244,6 +246,8 @@ spec:
244246

245247
`CONF_PADDLE_GRADIENT_NUM`表示训练节点数量,即`--num_gradient_servers`参数
246248

249+
这些参数的具体描述,读者可以查看[这里](http://www.paddlepaddle.org/doc/ui/cmd_argument/detail_introduction.html#parameter-server-and-distributed-communication)。
250+
247251
编写完YAML文件后,可以使用Kubernetes的命令行工具创建job。
248252

249253
```bash

paddle/math/Matrix.cpp

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1311,7 +1311,9 @@ void GpuMatrix::paramReluForward(Matrix& data, Matrix& W) {
13111311
real* w = W.getData();
13121312
size_t numElements = data.getWidth();
13131313
size_t numSamples = data.getHeight();
1314-
size_t partial_sum = numElements / (W.getHeight() * W.getWidth());
1314+
size_t paraSize = W.getHeight() * W.getWidth();
1315+
CHECK(!(numElements % paraSize)); // this check from ParameterReluLayer::init
1316+
size_t partial_sum = numElements / paraSize;
13151317
real* output = getData();
13161318
hl_param_relu_forward(output, input, w, numElements, numSamples, partial_sum);
13171319
}
@@ -1324,7 +1326,9 @@ void GpuMatrix::paramReluBackwardW(Matrix& oGrad, Matrix& data) {
13241326
real* wgrad = data_;
13251327
size_t numElements = data.getWidth();
13261328
size_t numSamples = data.getHeight();
1327-
size_t partial_sum = numElements / (this->getHeight() * this->getWidth());
1329+
size_t paraSize = this->getHeight() * this->getWidth();
1330+
CHECK(!(numElements % paraSize)); // this check from ParameterReluLayer::init
1331+
size_t partial_sum = numElements / paraSize;
13281332
hl_param_relu_backward_w(
13291333
wgrad, ograd, input, numElements, numSamples, partial_sum);
13301334
}
@@ -1336,7 +1340,9 @@ void GpuMatrix::paramReluBackwardDiff(Matrix& oGrad, Matrix& data, Matrix& W) {
13361340
real* w = W.getData();
13371341
size_t numElements = data.getWidth();
13381342
size_t numSamples = data.getHeight();
1339-
size_t partial_sum = numElements / (W.getHeight() * W.getWidth());
1343+
size_t paraSize = W.getHeight() * W.getWidth();
1344+
CHECK(!(numElements % paraSize)); // this check from ParameterReluLayer::init
1345+
size_t partial_sum = numElements / paraSize;
13401346
hl_param_relu_backward_diff(
13411347
ograd, input, w, diff, numElements, numSamples, partial_sum);
13421348
}
@@ -3764,7 +3770,9 @@ void CpuMatrix::paramReluForward(Matrix& data, Matrix& W) {
37643770
real* w = W.getData();
37653771
size_t numElements = data.getWidth();
37663772
size_t numSamples = data.getHeight();
3767-
size_t partial_sum = numElements / (W.getHeight() * W.getWidth());
3773+
size_t paraSize = W.getHeight() * W.getWidth();
3774+
CHECK(!(numElements % paraSize)); // this check from ParameterReluLayer::init
3775+
size_t partial_sum = numElements / paraSize;
37683776
for (size_t n = 0, k = 0; n < numSamples; ++n) {
37693777
for (size_t i = 0; i < numElements; ++i, ++k) {
37703778
data_[k] = input[k] > 0 ? input[k] : input[k] * w[i / partial_sum];
@@ -3778,7 +3786,9 @@ void CpuMatrix::paramReluBackwardW(Matrix& oGrad, Matrix& data) {
37783786
real* wgrad = data_;
37793787
size_t numElements = data.getWidth();
37803788
size_t numSamples = data.getHeight();
3781-
size_t partial_sum = numElements / (this->getHeight() * this->getWidth());
3789+
size_t paraSize = this->getHeight() * this->getWidth();
3790+
CHECK(!(numElements % paraSize)); // this check from ParameterReluLayer::init
3791+
size_t partial_sum = numElements / paraSize;
37823792
for (size_t n = 0, k = 0; n < numSamples; ++n) {
37833793
for (size_t i = 0; i < numElements; ++i, ++k) {
37843794
wgrad[i / partial_sum] += ograd[k] * (input[k] > 0 ? 0 : input[k]);
@@ -3793,7 +3803,9 @@ void CpuMatrix::paramReluBackwardDiff(Matrix& oGrad, Matrix& data, Matrix& W) {
37933803
real* w = W.getData();
37943804
size_t numElements = data.getWidth();
37953805
size_t numSamples = data.getHeight();
3796-
size_t partial_sum = numElements / (W.getHeight() * W.getWidth());
3806+
size_t paraSize = W.getHeight() * W.getWidth();
3807+
CHECK(!(numElements % paraSize)); // this check from ParameterReluLayer::init
3808+
size_t partial_sum = numElements / paraSize;
37973809
for (size_t n = 0, k = 0; n < numSamples; ++n) {
37983810
for (size_t i = 0; i < numElements; ++i, ++k) {
37993811
diff[k] += ograd[k] * (input[k] > 0 ? 1 : w[i / partial_sum]);

paddle/math/tests/test_Matrix.cpp

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -224,10 +224,11 @@ void testParamReluBackwardW(int height, int width, int w_height, int w_width) {
224224
}
225225

226226
TEST(Matrix, paramRelu) {
227-
for (auto height : {10, 100}) {
228-
for (auto width : {10, 100}) {
227+
for (auto height : {10, 40, 100}) {
228+
for (auto width : {10, 40, 100}) {
229229
for (auto w_height : {1, 2}) {
230230
for (auto w_width : {1, 2}) {
231+
if (width % (w_height * w_width)) continue;
231232
testParamReluForward(height, width, w_height, w_width);
232233
testParamReluBackwardW(height, width, w_height, w_width);
233234
}

paddle/math/tests/test_matrixCompare.cpp

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -773,10 +773,11 @@ void testParamReluBackwardDiff(int height,
773773
}
774774

775775
TEST(Matrix, paramReluBackwardDiff) {
776-
for (auto height : {10, 100}) {
777-
for (auto width : {10, 100}) {
776+
for (auto height : {10, 40, 100}) {
777+
for (auto width : {10, 40, 100}) {
778778
for (auto w_height : {1, 2}) {
779779
for (auto w_width : {1, 2}) {
780+
if (width % (w_height * w_width)) continue;
780781
testParamReluBackwardDiff(height, width, w_height, w_width);
781782
}
782783
}

paddle/setup.py.in

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,6 @@ setup(name="py_paddle",
7070
include_dirs = include_dirs,
7171
install_requires = [
7272
'numpy>=1.8.0', # The numpy is required.
73-
'protobuf>=2.4.1' # The paddle protobuf version
73+
'protobuf>=3.0.0' # The paddle protobuf version
7474
],
7575
)

0 commit comments

Comments
 (0)