Skip to content

Commit 2abf45d

Browse files
dkurtvpisarev
authored andcommitted
Made some deep learning layers params are public (#1134)
1 parent 78fabfe commit 2abf45d

File tree

6 files changed

+21
-10
lines changed

6 files changed

+21
-10
lines changed

modules/dnn/include/opencv2/dnn/all_layers.hpp

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -329,6 +329,8 @@ namespace dnn
329329
class CV_EXPORTS ReLULayer : public Layer
330330
{
331331
public:
332+
float negativeSlope;
333+
332334
static Ptr<ReLULayer> create(const LayerParams &params);
333335
};
334336

@@ -365,6 +367,8 @@ namespace dnn
365367
class CV_EXPORTS PowerLayer : public Layer
366368
{
367369
public:
370+
float power, scale, shift;
371+
368372
static Ptr<PowerLayer> create(const LayerParams &params);
369373
};
370374

@@ -395,18 +399,27 @@ namespace dnn
395399
class CV_EXPORTS BatchNormLayer : public Layer
396400
{
397401
public:
402+
bool hasWeights, hasBias;
403+
float epsilon;
404+
398405
static Ptr<BatchNormLayer> create(const LayerParams &params);
399406
};
400407

401408
class CV_EXPORTS MaxUnpoolLayer : public Layer
402409
{
403410
public:
411+
Size poolKernel;
412+
Size poolPad;
413+
Size poolStride;
414+
404415
static Ptr<MaxUnpoolLayer> create(const LayerParams &params);
405416
};
406417

407418
class CV_EXPORTS ScaleLayer : public Layer
408419
{
409420
public:
421+
bool hasBias;
422+
410423
static Ptr<ScaleLayer> create(const LayerParams& params);
411424
};
412425

modules/dnn/src/layers/batch_norm_layer.cpp

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -91,9 +91,6 @@ class BatchNormLayerImpl : public BatchNormLayer
9191
}
9292
return flops;
9393
}
94-
95-
bool hasWeights, hasBias;
96-
float epsilon;
9794
};
9895

9996
Ptr<BatchNormLayer> BatchNormLayer::create(const LayerParams& params)

modules/dnn/src/layers/blank_layer.cpp

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,10 @@ namespace dnn
4747
class BlankLayerImpl : public BlankLayer
4848
{
4949
public:
50-
BlankLayerImpl(const LayerParams&) {}
50+
BlankLayerImpl(const LayerParams& params)
51+
{
52+
setParamsFrom(params);
53+
}
5154

5255
bool getMemoryShapes(const std::vector<MatShape> &inputs,
5356
const int requiredOutputs,

modules/dnn/src/layers/elementwise_layers.cpp

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -261,6 +261,7 @@ Ptr<ReLULayer> ReLULayer::create(const LayerParams& params)
261261
float negativeSlope = params.get<float>("negative_slope", 0.f);
262262
Ptr<ReLULayer> l(new ElementWiseLayer<ReLUFunctor>(true, ReLUFunctor(negativeSlope)));
263263
l->setParamsFrom(params);
264+
l->negativeSlope = negativeSlope;
264265

265266
return l;
266267
}
@@ -306,6 +307,9 @@ Ptr<PowerLayer> PowerLayer::create(const LayerParams& params)
306307
(PowerLayer*)(new ElementWiseLayer<PowerFunctor1>(false, PowerFunctor1(scale, shift))) :
307308
(PowerLayer*)(new ElementWiseLayer<PowerFunctor>(true, PowerFunctor(power, scale, shift))));
308309
l->setParamsFrom(params);
310+
l->power = power;
311+
l->scale = scale;
312+
l->shift = shift;
309313

310314
return l;
311315
}

modules/dnn/src/layers/max_unpooling_layer.cpp

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -81,10 +81,6 @@ class MaxUnpoolLayerImpl : public MaxUnpoolLayer
8181
}
8282
}
8383
}
84-
85-
Size poolKernel;
86-
Size poolPad;
87-
Size poolStride;
8884
};
8985

9086
Ptr<MaxUnpoolLayer> MaxUnpoolLayer::create(const LayerParams& params)

modules/dnn/src/layers/scale_layer.cpp

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,8 +67,6 @@ class ScaleLayerImpl : public ScaleLayer
6767
}
6868
return flops;
6969
}
70-
71-
bool hasBias;
7270
};
7371

7472

0 commit comments

Comments
 (0)