Skip to content

Commit d5ac902

Browse files
committed
Fix changes with allocating memory outside layers.
1 parent a4cc801 commit d5ac902

File tree

4 files changed

+31
-22
lines changed

4 files changed

+31
-22
lines changed

modules/dnn/src/layers/blank_layer.cpp

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -49,18 +49,16 @@ class BlankLayerImpl : public BlankLayer
4949
public:
5050
BlankLayerImpl(const LayerParams&) {}
5151

52-
void allocate(const std::vector<Mat*> &inputs, std::vector<Mat> &outputs)
52+
bool getMemoryShapes(const std::vector<MatShape> &inputs,
53+
const int requiredOutputs,
54+
std::vector<MatShape> &outputs,
55+
std::vector<MatShape> &internals) const
5356
{
54-
outputs.resize(inputs.size());
55-
for (size_t i = 0; i < inputs.size(); i++)
56-
outputs[i] = *inputs[i];
57+
Layer::getMemoryShapes(inputs, requiredOutputs, outputs, internals);
58+
return true;
5759
}
5860

59-
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
60-
{
61-
for (size_t i = 0; i < inputs.size(); i++)
62-
outputs[i] = *inputs[i];
63-
}
61+
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals) {}
6462
};
6563

6664
Ptr<BlankLayer> BlankLayer::create(const LayerParams& params)

modules/dnn/src/layers/elementwise_layers.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,9 @@ class ElementWiseLayer : public Func::Layer
3737
ElementWiseLayer(bool run_parallel_=false, const Func &f=Func()) : func(f), run_parallel(run_parallel_) {}
3838

3939
bool getMemoryShapes(const std::vector<MatShape> &inputs,
40-
const int requiredOutputs,
41-
std::vector<MatShape> &outputs,
42-
std::vector<MatShape> &internals) const
40+
const int requiredOutputs,
41+
std::vector<MatShape> &outputs,
42+
std::vector<MatShape> &internals) const
4343
{
4444
Layer::getMemoryShapes(inputs, requiredOutputs, outputs, internals);
4545
return true;

modules/dnn/src/layers/pooling_layer.cpp

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -203,7 +203,13 @@ class PoolingLayerImpl : public PoolingLayer
203203
CV_Assert(inputs.size() != 0);
204204
Size in(inputs[0][3], inputs[0][2]), out;
205205

206-
if (padMode.empty()) {
206+
if (globalPooling)
207+
{
208+
out.height = 1;
209+
out.width = 1;
210+
}
211+
else if (padMode.empty())
212+
{
207213
//Yeah, something strange Caffe scheme-)
208214
out.height = static_cast<int>(ceil(static_cast<float>(in.height + 2 * pad.height -
209215
kernel.height) / stride.height)) + 1;

modules/dnn/src/layers/reshape_layer.cpp

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -149,6 +149,7 @@ class ReshapeLayerImpl : public ReshapeLayer
149149
outputs.push_back(MatShape());
150150
computeShapeByReshapeMask(inputs[i], newShapeDesc, newShapeRange, outputs.back());
151151
}
152+
internals = outputs;
152153

153154
return true;
154155
}
@@ -160,23 +161,28 @@ class ReshapeLayerImpl : public ReshapeLayer
160161
Mat srcBlob = *inputs[0];
161162
int dims = srcBlob.dims;
162163
MatShape inputShape = shape(srcBlob), outShape = shape(outputs[0]);
163-
bool channelsReduced = dims > (int)outShape.size() ||
164-
(dims == 4 && inputShape[1] > outShape[1]);
165-
performReordering = enableReordering && dims == 4 && channelsReduced;
164+
165+
// input.total() == output.total(). So if reordering is require,
166+
// one of the sizes will be are not equal.
167+
// Example where reordering is require: from 1x128x4x4 to 1x2048
168+
// Example where reordering is NOT require: from 1x1024x1x1 to 1x1024.
169+
bool reorderingRequire = false;
170+
const int minDims = min(dims, (int)outShape.size());
171+
for (int i = 0; !reorderingRequire && i < minDims; ++i)
172+
reorderingRequire = inputShape[i] != outShape[i];
173+
performReordering = enableReordering && reorderingRequire;
166174
}
167175

168176
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
169177
{
170178
for (size_t i = 0; i < inputs.size(); i++)
171179
{
172180
Mat srcBlob = *inputs[i];
173-
MatShape inputShape = shape(srcBlob), outShape = shape(outputs[i]);
181+
MatShape inputShape = shape(srcBlob);
174182

175183
if (performReordering)
176184
{
177-
Mat reordered_blob(inputShape, srcBlob.type());
178-
179-
float *dstData = reordered_blob.ptr<float>();
185+
float *dstData = internals[i].ptr<float>();
180186
const float *srcData = srcBlob.ptr<float>();
181187

182188
int num = inputShape[0], channels = inputShape[1], height = inputShape[2], width = inputShape[3];
@@ -196,8 +202,7 @@ class ReshapeLayerImpl : public ReshapeLayer
196202
}
197203
}
198204
}
199-
200-
outputs[i] = reordered_blob.reshape(1, outShape);
205+
internals[i].copyTo(outputs[i]);
201206
}
202207
}
203208
}

0 commit comments

Comments
 (0)