Skip to content

Commit b97931e

Browse files
committed
Merge pull request #1136 from vpisarev:dnn5
2 parents 3908909 + 7578908 commit b97931e

File tree

7 files changed

+594
-551
lines changed

7 files changed

+594
-551
lines changed

modules/dnn/samples/torch_enet.cpp

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -98,14 +98,19 @@ int main(int argc, char **argv)
9898
net.setBlob("", inputBlob); //set the network input
9999
//! [Set input blob]
100100

101+
const int N = 3;
101102
TickMeter tm;
102-
tm.start();
103103

104104
//! [Make forward pass]
105-
net.forward(); //compute output
106-
//! [Make forward pass]
107-
108-
tm.stop();
105+
for( int i = 0; i < N; i++ )
106+
{
107+
TickMeter tm_;
108+
tm_.start();
109+
net.forward(); //compute output
110+
tm_.stop();
111+
if( i == 0 || tm_.getTimeTicks() < tm.getTimeTicks() )
112+
tm = tm_;
113+
}
109114

110115
//! [Gather output]
111116

modules/dnn/src/layers/batch_norm_layer.cpp

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,15 @@ class BatchNormLayerImpl : public BatchNormLayer
4141
Mat* inp = inputs[i];
4242
outputs[i].create(inp->dims, &inp->size.p[0], inp->type());
4343
}
44+
45+
varMeanScale = 1.f;
46+
if (!hasWeights && !hasBias) {
47+
varMeanScale = *blobs[2].ptr<float>();
48+
if (varMeanScale != 0)
49+
varMeanScale = 1/varMeanScale;
50+
}
51+
52+
cv::pow(blobs[1]*varMeanScale + epsilon, -0.5, invStdMat);
4453
}
4554

4655
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs)
@@ -52,16 +61,6 @@ class BatchNormLayerImpl : public BatchNormLayer
5261
int weightsBlobIndex = 2;
5362
int biasBlobIndex = weightsBlobIndex + hasWeights;
5463

55-
float varMeanScale = 1;
56-
if (!hasWeights && !hasBias) {
57-
varMeanScale = *blobs[2].ptr<float>();
58-
if (varMeanScale != 0)
59-
varMeanScale = 1/varMeanScale;
60-
}
61-
62-
Mat invStdMat;
63-
cv::pow(blobs[1]*varMeanScale + epsilon, -0.5, invStdMat);
64-
6564
int rows = inpBlob.size[2];
6665
int cols = inpBlob.size[3];
6766

@@ -92,7 +91,8 @@ class BatchNormLayerImpl : public BatchNormLayer
9291
}
9392

9493
bool hasWeights, hasBias;
95-
float epsilon;
94+
float epsilon, varMeanScale;
95+
Mat invStdMat;
9696
};
9797

9898
Ptr<BatchNormLayer> BatchNormLayer::create(const LayerParams& params)

0 commit comments

Comments
 (0)