Skip to content

Commit 784e242

Browse files
author
gaoyuan
committed
Remove redundancy codes
1 parent 57c355a commit 784e242

File tree

4 files changed

+11
-7
lines changed

4 files changed

+11
-7
lines changed

doc/api/v2/config/layer.rst

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,12 @@ sum_to_one_norm
109109
:members: sum_to_one_norm
110110
:noindex:
111111

112+
cross_channel_norm
113+
---------------
114+
.. automodule:: paddle.v2.layer
115+
:members: cross_channel_norm
116+
:noindex:
117+
112118
Recurrent Layers
113119
================
114120

paddle/gserver/layers/CrossChannelNormLayer.cpp

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,6 @@ void CrossChannelNormLayer::forward(PassType passType) {
4040
normBuffer_->addScalar(*normBuffer_, 1e-6);
4141
inV->square2(*dataBuffer_);
4242
for (size_t i = 0; i < batchSize; i++) {
43-
spatialBuffer_->zeroMem();
4443
MatrixPtr inTmp = Matrix::create(
4544
inV->getData() + i * dataDim, channels_, spatialDim, false, useGpu_);
4645
MatrixPtr dataTmp = Matrix::create(dataBuffer_->getData() + i * dataDim,
@@ -80,7 +79,6 @@ void CrossChannelNormLayer::backward(const UpdateCallback& callback) {
8079
scaleDiff_->zeroMem();
8180
for (size_t i = 0; i < batchSize; i++) {
8281
spatialBuffer_->zeroMem();
83-
channelBuffer_->zeroMem();
8482
// propagate to param.
8583
MatrixPtr dataBufferTmp =
8684
Matrix::create(dataBuffer_->getData() + i * dataDim,

paddle/gserver/layers/NormLayer.h

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -66,11 +66,10 @@ class ResponseNormLayer : public NormLayer {
6666
};
6767

6868
/**
69-
* This layer applys normalize across the channels of each sample to a
70-
* conv layer's output and scale the output by a group of trainable factors
71-
* which dimensions equal to the channel's number.
72-
* - Input: One and only one input layer are accepted. The input layer must be
73-
* be a data output layer.
69+
* This layer applys normalization across the channels of each sample to a
70+
* conv layer's output, and scales the output by a group of trainable factors
71+
* whose equal to the number of channels.
72+
* - Input: One and only one input layer are accepted.
7473
* - Output: The normalized data of the input data.
7574
* Reference:
7675
* Wei Liu, Dragomir Anguelov, Dumitru Erhan, Christian Szegedy, Scott Reed,

python/paddle/trainer_config_helpers/layers.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1015,6 +1015,7 @@ def cross_channel_norm_layer(input, name=None, param_attr=None):
10151015
This layer applys normalize across the channels of each sample to
10161016
a conv layer's output and scale the output by a group of trainable
10171017
factors which dimensions equal to the channel's number.
1018+
10181019
:param name: The Layer Name.
10191020
:type name: basestring
10201021
:param input: The input layer.

0 commit comments

Comments
 (0)