Skip to content

Commit 5c892db

Browse files
committed
remove unused code
refine comments and bias fix typo and todo
1 parent 4f41eaf commit 5c892db

File tree

4 files changed

+37
-38
lines changed

4 files changed

+37
-38
lines changed

paddle/gserver/layers/MKLDNNConvLayer.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -210,11 +210,11 @@ void MKLDNNConvLayer::resetFwdBuffers(
210210

211211
resetWithMatrix(wgt, weight_->getW(), pd->weights_primitive_desc());
212212

213-
bias = nullptr;
214-
if (biases_ == nullptr || biases_->getW() == nullptr) {
215-
return;
213+
if (biases_ && biases_->getW()) {
214+
resetWithMatrix(bias, biases_->getW(), pd->bias_primitive_desc());
215+
} else {
216+
bias = nullptr;
216217
}
217-
resetWithMatrix(bias, biases_->getW(), pd->bias_primitive_desc());
218218
}
219219

220220
void MKLDNNConvLayer::resetFwdPipeline(

paddle/gserver/layers/MKLDNNFcLayer.cpp

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -134,10 +134,6 @@ void MKLDNNFcLayer::resetFwdBuffers(MKLDNNMatrixPtr& in,
134134
CHECK(in);
135135
in->downSpatial();
136136

137-
// if (extInVal_) {
138-
// extInVal_->downSpatial();
139-
// }
140-
141137
auto outPD =
142138
MKLDNNMatrix::createPrimitiveDesc({bs_, oc_}, format::nc, engine_);
143139
resetOutValue(out, outPD);
@@ -153,11 +149,12 @@ void MKLDNNFcLayer::resetFwdBuffers(MKLDNNMatrixPtr& in,
153149
resetWithMatrix(wgt, weight_->getW(), wgtPD);
154150
wgt->downSpatial();
155151

156-
if (biases_ == nullptr || biases_->getW() == nullptr) {
157-
return;
152+
if (biases_ && biases_->getW()) {
153+
auto biasPD = MKLDNNMatrix::createPrimitiveDesc({oc_}, format::x, engine_);
154+
resetWithMatrix(bias, biases_->getW(), biasPD);
155+
} else {
156+
bias = nullptr;
158157
}
159-
auto biasPD = MKLDNNMatrix::createPrimitiveDesc({oc_}, format::x, engine_);
160-
resetWithMatrix(bias, biases_->getW(), biasPD);
161158
}
162159

163160
void MKLDNNFcLayer::resetFwdPD(std::shared_ptr<fc_fwd::primitive_desc>& pd,
@@ -207,11 +204,11 @@ void MKLDNNFcLayer::resetBwdBuffers(MKLDNNMatrixPtr& in,
207204
CHECK(wgtVal_);
208205
resetWithMatrix(wgt, weight_->getWGrad(), wgtVal_->getPrimitiveDesc());
209206

210-
bias = nullptr;
211-
if (biasVal_ == nullptr) {
212-
return;
207+
if (biasVal_) {
208+
resetWithMatrix(bias, biases_->getWGrad(), biasVal_->getPrimitiveDesc());
209+
} else {
210+
bias = nullptr;
213211
}
214-
resetWithMatrix(bias, biases_->getWGrad(), biasVal_->getPrimitiveDesc());
215212
}
216213

217214
void MKLDNNFcLayer::resetBwdWgtPD(

paddle/gserver/layers/MKLDNNLayer.cpp

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ void MKLDNNLayer::forward(PassType passType) {
6060
resetFwd(pipelineFwd_, inVal_, wgtVal_, biasVal_, outVal_);
6161
// MKLDNNLayer output value should be MKLDNNMatrix
6262
// so external output value is necessary.
63-
// then external input value is not necessary,
63+
// Then external input value is not necessary,
6464
// since input may be mkldnn internal buffer.
6565
CHECK(extOutVal_) << "external output value is necessary";
6666
output_.value = std::dynamic_pointer_cast<Matrix>(extOutVal_);
@@ -235,8 +235,8 @@ void MKLDNNLayer::resetInGrad(MKLDNNMatrixPtr& in,
235235
in = MKLDNNMatrix::create(intPD, inMat);
236236
Argument& arg = input->getOutput(this->getName());
237237
arg.grad = std::dynamic_pointer_cast<Matrix>(in);
238-
CHECK(inVal_ != nullptr && inVal_->getPrimitiveDesc() == intPD)
239-
<< "should have internal input value and primitive desc must equal";
238+
CHECK(inVal_);
239+
CHECK(inVal_->getPrimitiveDesc() == intPD) << "the primitive desc must equal";
240240
if (inputIsOnlyMKLDNN()) {
241241
return;
242242
}
@@ -246,6 +246,7 @@ void MKLDNNLayer::resetInGrad(MKLDNNMatrixPtr& in,
246246
return;
247247
}
248248
// need create reorder
249+
// TODO(TJ): add macro definition to simplify it
249250
CHECK(extInVal_ != nullptr && isPaddleFormat(extInVal_->getFormat()))
250251
<< "should have external input value and the format must be nchw(nc)";
251252
extInGrad_ = MKLDNNMatrix::create(extInVal_->getPrimitiveDesc(), inMat);

paddle/gserver/layers/MKLDNNLayer.h

Lines changed: 20 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -58,14 +58,15 @@ class MKLDNNLayer : public Layer {
5858
std::vector<mkldnn::primitive> pipelineFwd_;
5959
std::vector<mkldnn::primitive> pipelineBwd_;
6060

61-
/// value and grad are seperated as internal and external buffers.
62-
/// each MKLDNNLayer must init or reset internal buffer at least,
63-
/// and the external buffer format is always nchw of nc(when h==w==1),
64-
/// which is the same format as paddle.
65-
/// The output_.value and output_.grad always save the external data,
66-
/// when mixed with cpu device.
67-
/// When all layers are mkldnn layers, they could save internal data.
68-
/// below MKLDNNMatrix buffers are all internal buffers
61+
/* Value and grad are seperated as internal and external buffers.
62+
* Each MKLDNNLayer must init or reset internal buffer at least,
63+
* and the external buffer format is always nchw of nc(when h==w==1),
64+
* which is the same format as paddle.
65+
* The output_.value and output_.grad always save the external data,
66+
* when mixed with cpu device.
67+
* When all layers are mkldnn layers, they could save internal data.
68+
*/
69+
// below MKLDNNMatrix buffers are all internal buffers
6970
MKLDNNMatrixPtr inVal_;
7071
MKLDNNMatrixPtr inGrad_;
7172
MKLDNNMatrixPtr outVal_;
@@ -120,8 +121,8 @@ class MKLDNNLayer : public Layer {
120121
~MKLDNNLayer() {}
121122

122123
virtual bool init(const LayerMap& layerMap, const ParameterMap& parameterMap);
123-
void forward(PassType passType) override;
124-
void backward(const UpdateCallback& callback) override;
124+
virtual void forward(PassType passType);
125+
virtual void backward(const UpdateCallback& callback);
125126

126127
/**
127128
* reshape the input image sizes
@@ -217,15 +218,15 @@ class MKLDNNLayer : public Layer {
217218
* reset output grad from internal primitive desc.
218219
* merge grad if necessary.
219220
* reset both internal and external buffer and create reorder if necessary.
220-
* note: about merge grad, when this layer has serval outputs,
221+
* note: about merge grad, when this layer has several outputs,
221222
* it could not be mixed with cpu device,
222223
* since it can not get memory desc from cpu device.
223224
*/
224225
void resetOutGrad(MKLDNNMatrixPtr& out, mkldnn::memory::primitive_desc intPD);
225226

226227
/**
227228
* reset the merge grad primitive if necessary.
228-
* note: do not support the grads are mixed with cpu device,
229+
* note: do not support the grads mixed with cpu device,
229230
* since it can not get memory desc from cpu device.
230231
*/
231232
void resetMergeGrad(MKLDNNMatrixPtr& out);
@@ -313,17 +314,17 @@ class MKLDNNLayer : public Layer {
313314
* print the mkldnn memory format of grad
314315
*/
315316
virtual void printGradFormat() {
316-
if (extInGrad_) {
317-
VLOG(MKLDNN_FMTS) << extInGrad_->getFormat() << " <<< ";
318-
}
319-
if (inGrad_) {
320-
VLOG(MKLDNN_FMTS) << inGrad_->getFormat() << " <<<";
317+
if (extOutGrad_) {
318+
VLOG(MKLDNN_FMTS) << extOutGrad_->getFormat();
321319
}
322320
if (outGrad_) {
323321
VLOG(MKLDNN_FMTS) << outGrad_->getFormat() << " <<< ";
324322
}
325-
if (extOutGrad_) {
326-
VLOG(MKLDNN_FMTS) << extOutGrad_->getFormat();
323+
if (inGrad_) {
324+
VLOG(MKLDNN_FMTS) << inGrad_->getFormat() << " <<<";
325+
}
326+
if (extInGrad_) {
327+
VLOG(MKLDNN_FMTS) << extInGrad_->getFormat() << " <<< ";
327328
}
328329
if (wgtGrad_) {
329330
VLOG(MKLDNN_FMTS) << "Weight grad format: " << wgtGrad_->getFormat();

0 commit comments

Comments
 (0)