Skip to content

Commit ac29d00

Browse files
author
ranqiu
committed
Update doc of layers.py
2 parents de2bc5d + 4adc8a7 commit ac29d00

File tree

195 files changed

+3068
-312
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

195 files changed

+3068
-312
lines changed

.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ third_party/
2121
cmake-build-*
2222

2323
# generated while compiling
24-
python/paddle/v2/framework/core.so
24+
python/paddle/v2/fluid/core.so
2525
paddle/pybind/pybind.h
2626
CMakeFiles
2727
cmake_install.cmake

paddle/framework/backward.cc

Lines changed: 27 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -377,6 +377,12 @@ std::vector<std::unique_ptr<OpDescBind>> MakeOpGrad(
377377
return grad_op_descs;
378378
}
379379

380+
static BlockDescBind* CreateStepBlock(
381+
ProgramDescBind& program_desc,
382+
std::unordered_set<std::string>* no_grad_vars,
383+
std::unordered_map<std::string, std::string>* grad_to_var,
384+
int step_block_idx);
385+
380386
std::vector<std::unique_ptr<OpDescBind>> MakeBlockBackward(
381387
ProgramDescBind& program_desc, int block_idx,
382388
std::unordered_set<std::string>* no_grad_vars,
@@ -392,13 +398,13 @@ std::vector<std::unique_ptr<OpDescBind>> MakeBlockBackward(
392398

393399
if ((*it)->Type() == "recurrent") {
394400
int step_block_idx = (*it)->GetBlockAttr("step_block");
395-
auto backward_block_op_descs = MakeBlockBackward(
396-
program_desc, step_block_idx, no_grad_vars, grad_to_var);
401+
BlockDescBind* backward_block = CreateStepBlock(
402+
program_desc, no_grad_vars, grad_to_var, step_block_idx);
403+
op_grads = MakeOpGrad(*it, no_grad_vars, grad_to_var, {backward_block});
404+
} else if ((*it)->Type() == "conditional_block") {
397405
BlockDescBind* backward_block =
398-
program_desc.AppendBlock(*program_desc.MutableBlock(step_block_idx));
399-
for (auto& ptr : backward_block_op_descs) {
400-
backward_block->AppendAllocatedOp(std::move(ptr));
401-
}
406+
CreateStepBlock(program_desc, no_grad_vars, grad_to_var,
407+
(*it)->GetBlockAttr("block"));
402408
op_grads = MakeOpGrad(*it, no_grad_vars, grad_to_var, {backward_block});
403409
} else {
404410
op_grads = MakeOpGrad(*it, no_grad_vars, grad_to_var);
@@ -449,6 +455,21 @@ std::vector<std::unique_ptr<OpDescBind>> MakeBlockBackward(
449455
return backward_descs;
450456
}
451457

458+
static BlockDescBind* CreateStepBlock(
459+
ProgramDescBind& program_desc,
460+
std::unordered_set<std::string>* no_grad_vars,
461+
std::unordered_map<std::string, std::string>* grad_to_var,
462+
int step_block_idx) {
463+
auto backward_block_op_descs = MakeBlockBackward(program_desc, step_block_idx,
464+
no_grad_vars, grad_to_var);
465+
BlockDescBind* backward_block =
466+
program_desc.AppendBlock(*program_desc.MutableBlock(step_block_idx));
467+
for (auto& ptr : backward_block_op_descs) {
468+
backward_block->AppendAllocatedOp(move(ptr));
469+
}
470+
return backward_block;
471+
}
472+
452473
ParamGradInfoMap AppendBackward(
453474
ProgramDescBind& program_desc, const VarDescBind& target,
454475
const std::unordered_set<std::string>& no_grad_vars) {

paddle/framework/var_type.h

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,10 +27,32 @@ inline VarDesc::VarType ToVarType(std::type_index type) {
2727
return VarDesc_VarType_LOD_RANK_TABLE;
2828
} else if (type.hash_code() == typeid(LoDTensorArray).hash_code()) {
2929
return VarDesc_VarType_LOD_TENSOR_ARRAY;
30+
} else if (type.hash_code() == typeid(SelectedRows).hash_code()) {
31+
return VarDesc_VarType_SELECTED_ROWS;
3032
} else {
3133
PADDLE_THROW("ToVarType:Unsupported type %s", type.name());
3234
}
3335
}
3436

37+
template <typename Visitor>
38+
inline void VisitVarType(const Variable& var, Visitor visitor) {
39+
switch (ToVarType(var.Type())) {
40+
case VarDesc_VarType_LOD_TENSOR:
41+
visitor(var.Get<framework::LoDTensor>());
42+
return;
43+
case VarDesc_VarType_LOD_RANK_TABLE:
44+
visitor(var.Get<LoDRankTable>());
45+
return;
46+
case VarDesc_VarType_LOD_TENSOR_ARRAY:
47+
visitor(var.Get<LoDTensorArray>());
48+
return;
49+
case VarDesc_VarType_SELECTED_ROWS:
50+
visitor(var.Get<SelectedRows>());
51+
return;
52+
default:
53+
PADDLE_THROW("Not supported visit type, %d", ToVarType(var.Type()));
54+
}
55+
}
56+
3557
} // namespace framework
3658
} // namespace paddle

paddle/gserver/layers/MKLDNNAddtoLayer.cpp

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,6 @@ void MKLDNNAddtoLayer::reshape(
5454
ow = iw;
5555
reshapeOutput(oh, ow);
5656
resizeOutput(bs, oc * oh * ow);
57-
printSizeInfo();
5857
}
5958

6059
void MKLDNNAddtoLayer::resetFwd(std::vector<primitive>& pipeline,

paddle/gserver/layers/MKLDNNBatchNormLayer.cpp

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,6 @@ void MKLDNNBatchNormLayer::reshape(
125125
<< "Input channel can not be changed";
126126
reshapeOutput(oh, ow);
127127
resizeOutput(bs, oc * oh * ow);
128-
printSizeInfo();
129128
}
130129

131130
void MKLDNNBatchNormLayer::resetFwd(std::vector<primitive>& pipeline,

paddle/gserver/layers/MKLDNNConvLayer.cpp

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -102,8 +102,6 @@ void MKLDNNConvLayer::reshape(
102102

103103
reshapeOutput(oh, ow);
104104
resizeOutput(bs, oc * oh * ow);
105-
106-
printSizeInfo();
107105
}
108106

109107
void MKLDNNConvLayer::resetFwd(std::vector<primitive>& pipeline,

paddle/gserver/layers/MKLDNNConvLayer.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ class MKLDNNConvLayer : public MKLDNNLayer {
9292
void printSizeInfo() override {
9393
MKLDNNLayer::printSizeInfo();
9494
VLOG(MKLDNN_SIZES) << getName() << ": fh: " << fh_ << ", fw: " << fw_
95-
<< ": ph: " << ph_ << ", pw: " << pw_ << ", sh: " << sh_
95+
<< ", ph: " << ph_ << ", pw: " << pw_ << ", sh: " << sh_
9696
<< ", sw: " << sw_ << ", dh: " << dh_ << ", dw: " << dw_;
9797
}
9898

paddle/gserver/layers/MKLDNNFcLayer.cpp

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -84,8 +84,6 @@ void MKLDNNFcLayer::reshape(
8484

8585
reshapeOutput(oh, ow);
8686
resizeOutput(bs, oc);
87-
88-
printSizeInfo();
8987
}
9088

9189
void MKLDNNFcLayer::resetFwd(std::vector<primitive>& pipeline,

paddle/gserver/layers/MKLDNNPoolLayer.cpp

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,8 +71,6 @@ void MKLDNNPoolLayer::reshape(
7171
reshapeOutput(oh, ow);
7272

7373
resizeOutput(bs, oc * oh * ow);
74-
75-
printSizeInfo();
7674
}
7775

7876
void MKLDNNPoolLayer::resetFwd(std::vector<primitive>& pipeline,

paddle/gserver/layers/ROIPoolLayer.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ void ROIPoolLayer::forward(PassType passType) {
9898
size_t roiStartH = round(bottomROIs[2] * spatialScale_);
9999
size_t roiEndW = round(bottomROIs[3] * spatialScale_);
100100
size_t roiEndH = round(bottomROIs[4] * spatialScale_);
101-
CHECK_GE(roiBatchIdx, 0);
101+
CHECK_GE(roiBatchIdx, 0UL);
102102
CHECK_LT(roiBatchIdx, batchSize);
103103
size_t roiHeight = std::max(roiEndH - roiStartH + 1, 1UL);
104104
size_t roiWidth = std::max(roiEndW - roiStartW + 1, 1UL);

0 commit comments

Comments
 (0)