Skip to content

Commit aa0d806

Browse files
arrybnmshabunin
authored andcommitted
Added possibility of getting any intermediate blob with thrifty memory management
1 parent b18e357 commit aa0d806

18 files changed

+350
-205
lines changed

modules/dnn/include/opencv2/dnn/dnn.hpp

Lines changed: 29 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -337,19 +337,35 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
337337
* In fact, this layer provides the only way to pass user data into the network.
338338
* As any other layer, this layer can label its outputs and this function provides an easy way to do this.
339339
*/
340-
CV_WRAP void setNetInputs(const std::vector<String> &inputBlobNames);
340+
CV_WRAP void setInputsNames(const std::vector<String> &inputBlobNames);
341341

342-
/** @brief Initializes and allocates all layers. */
343-
CV_WRAP void allocate();
344-
345-
/** @brief Runs forward pass to compute output of layer @p toLayer.
342+
/** @brief Runs forward pass to compute output of layer with name @p outputName.
343+
* @param outputName name for layer which output is needed to get
344+
* @return blob for first output of specified layer.
346345
* @details By default runs forward pass for the whole network.
347346
*/
348-
CV_WRAP void forward(LayerId toLayer = String());
349-
/** @brief Runs forward pass to compute output of layer @p toLayer, but computations start from @p startLayer */
350-
void forward(LayerId startLayer, LayerId toLayer);
351-
/** @overload */
352-
void forward(const std::vector<LayerId> &startLayers, const std::vector<LayerId> &toLayers);
347+
CV_WRAP Mat forward(const String& outputName = String());
348+
349+
/** @brief Runs forward pass to compute output of layer with name @p outputName.
350+
* @param outputBlobs contains all output blobs for specified layer.
351+
* @param outputName name for layer which output is needed to get
352+
* @details If @p outputName is empty, runs forward pass for the whole network.
353+
*/
354+
CV_WRAP void forward(std::vector<Mat>& outputBlobs, const String& outputName = String());
355+
356+
/** @brief Runs forward pass to compute outputs of layers listed in @p outBlobNames.
357+
* @param outputBlobs contains blobs for first outputs of specified layers.
358+
* @param outBlobNames names for layers which outputs are needed to get
359+
*/
360+
CV_WRAP void forward(std::vector<Mat>& outputBlobs,
361+
const std::vector<String>& outBlobNames);
362+
363+
/** @brief Runs forward pass to compute outputs of layers listed in @p outBlobNames.
364+
* @param outputBlobs contains all output blobs for each layer specified in @p outBlobNames.
365+
* @param outBlobNames names for layers which outputs are needed to get
366+
*/
367+
CV_WRAP void forward(std::vector<std::vector<Mat> >& outputBlobs,
368+
const std::vector<String>& outBlobNames);
353369

354370
//TODO:
355371
/** @brief Optimized forward.
@@ -369,7 +385,7 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
369385
* specific target. For layers that not represented in scheduling file
370386
* or if no manual scheduling used at all, automatic scheduling will be applied.
371387
*/
372-
void compileHalide(const std::string& scheduler = "");
388+
void setHalideScheduler(const String& scheduler);
373389

374390
/**
375391
* @brief Ask network to use specific computation backend where it supported.
@@ -379,19 +395,13 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
379395
void setPreferableBackend(int backendId);
380396

381397
/** @brief Sets the new value for the layer output blob
382-
* @param outputName descriptor of the updating layer output blob.
398+
* @param name descriptor of the updating layer output blob.
383399
* @param blob new blob.
384400
* @see connect(String, String) to know format of the descriptor.
385401
* @note If updating blob is not empty then @p blob must have the same shape,
386402
* because network reshaping is not implemented yet.
387403
*/
388-
CV_WRAP void setBlob(String outputName, const Mat &blob);
389-
390-
/** @brief Returns the layer output blob.
391-
* @param outputName the descriptor of the returning layer output blob.
392-
* @see connect(String, String)
393-
*/
394-
CV_WRAP Mat getBlob(String outputName);
404+
CV_WRAP void setInput(const Mat &blob, const String& name = "");
395405

396406
/** @brief Sets the new value for the learned param of the layer.
397407
* @param layer name or id of the layer.

modules/dnn/misc/python/pyopencv_dnn.hpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ typedef dnn::DictValue LayerId;
33
typedef std::vector<dnn::MatShape> vector_MatShape;
44
typedef std::vector<std::vector<dnn::MatShape> > vector_vector_MatShape;
55
typedef std::vector<size_t> vector_size_t;
6+
typedef std::vector<std::vector<Mat> > vector_vector_Mat;
67

78
template<>
89
bool pyopencv_to(PyObject *o, dnn::DictValue &dv, const char *name)

modules/dnn/samples/caffe_googlenet.cpp

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -119,16 +119,14 @@ int main(int argc, char **argv)
119119
//! [Prepare blob]
120120

121121
//! [Set input blob]
122-
net.setBlob(".data", inputBlob); //set the network input
122+
net.setInput(inputBlob, "data"); //set the network input
123123
//! [Set input blob]
124124

125125
//! [Make forward pass]
126-
net.forward(); //compute output
126+
Mat prob = net.forward("prob"); //compute output
127127
//! [Make forward pass]
128128

129129
//! [Gather output]
130-
Mat prob = net.getBlob("prob"); //gather output of "prob" layer
131-
132130
int classId;
133131
double classProb;
134132
getMaxClass(prob, &classId, &classProb);//find the best class

modules/dnn/samples/fcn_semsegm.cpp

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -134,19 +134,16 @@ int main(int argc, char **argv)
134134
//! [Prepare blob]
135135

136136
//! [Set input blob]
137-
net.setBlob(".data", inputBlob); //set the network input
137+
net.setInput(inputBlob, "data"); //set the network input
138138
//! [Set input blob]
139139

140140
//! [Make forward pass]
141141
double t = (double)cv::getTickCount();
142-
net.forward(); //compute output
142+
Mat score = net.forward("score"); //compute output
143143
t = (double)cv::getTickCount() - t;
144144
printf("processing time: %.1fms\n", t*1000./getTickFrequency());
145145
//! [Make forward pass]
146146

147-
//! [Gather output]
148-
Mat score = net.getBlob("score");
149-
150147
Mat colorize;
151148
colorizeSegmentation(score, colors, colorize);
152149
Mat show;

modules/dnn/samples/squeezenet_halide.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -93,23 +93,23 @@ int main(int argc, char **argv)
9393
//! [Prepare blob]
9494

9595
//! [Set input blob]
96-
net.setBlob("", inputBlob); // Set the network input.
96+
net.setInput(inputBlob); // Set the network input.
9797
//! [Set input blob]
9898

9999
//! [Enable Halide backend]
100100
net.setPreferableBackend(DNN_BACKEND_HALIDE); // Tell engine to use Halide where it possible.
101101
//! [Enable Halide backend]
102102

103103
//! [Compile Halide pipeline]
104-
net.compileHalide(); // Compile Halide pipeline.
104+
// net.compileHalide(); // Compile Halide pipeline.
105105
//! [Compile Halide pipeline]
106106

107107
//! [Make forward pass]
108-
net.forward(); // Compute output.
108+
Mat prob = net.forward("prob"); // Compute output.
109109
//! [Make forward pass]
110110

111111
//! [Gather output]
112-
Mat prob = net.getBlob("prob"); // Gather output of "prob" layer.
112+
// net.getBlob(); // Gather output of "prob" layer.
113113

114114
int classId;
115115
double classProb;

modules/dnn/samples/ssd_object_detection.cpp

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -108,15 +108,13 @@ int main(int argc, char** argv)
108108
//! [Prepare blob]
109109

110110
//! [Set input blob]
111-
net.setBlob(".data", inputBlob); //set the network input
111+
net.setInput(inputBlob, "data"); //set the network input
112112
//! [Set input blob]
113113

114114
//! [Make forward pass]
115-
net.forward(); //compute output
115+
Mat detection = net.forward("detection_out"); //compute output
116116
//! [Make forward pass]
117117

118-
//! [Gather output]
119-
Mat detection = net.getBlob("detection_out");
120118
Mat detectionMat(detection.size[2], detection.size[3], CV_32F, detection.ptr<float>());
121119

122120
float confidenceThreshold = parser.get<float>("min_confidence");

modules/dnn/samples/tf_inception.cpp

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ const String keys =
2626
"https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip }"
2727
"{model m |tensorflow_inception_graph.pb| path to TensorFlow .pb model file }"
2828
"{image i || path to image file }"
29-
"{i_blob | .input | input blob name) }"
29+
"{i_blob | input | input blob name) }"
3030
"{o_blob | softmax2 | output blob name) }"
3131
"{c_names c | imagenet_comp_graph_label_strings.txt | path to file with classnames for class id }"
3232
"{result r || path to save output blob (optional, binary format, NCHW order) }"
@@ -101,21 +101,18 @@ int main(int argc, char **argv)
101101
//! [Prepare blob]
102102
inputBlob -= 117.0;
103103
//! [Set input blob]
104-
net.setBlob(inBlobName, inputBlob); //set the network input
104+
net.setInput(inputBlob, inBlobName); //set the network input
105105
//! [Set input blob]
106106

107107
cv::TickMeter tm;
108108
tm.start();
109109

110110
//! [Make forward pass]
111-
net.forward(); //compute output
111+
Mat result = net.forward(outBlobName); //compute output
112112
//! [Make forward pass]
113113

114114
tm.stop();
115115

116-
//! [Gather output]
117-
Mat result = net.getBlob(outBlobName); //gather output of "prob" layer
118-
119116
if (!resultFile.empty()) {
120117
CV_Assert(result.isContinuous());
121118

modules/dnn/samples/torch_enet.cpp

Lines changed: 3 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -73,32 +73,19 @@ int main(int argc, char **argv)
7373
//! [Prepare blob]
7474

7575
//! [Set input blob]
76-
net.setBlob("", inputBlob); //set the network input
76+
net.setInput(inputBlob, ""); //set the network input
7777
//! [Set input blob]
7878

79-
const int N = 3;
8079
TickMeter tm;
8180

82-
//! [Make forward pass]
83-
for( int i = 0; i < N; i++ )
84-
{
85-
TickMeter tm_;
86-
tm_.start();
87-
net.forward(); //compute output
88-
tm_.stop();
89-
if( i == 0 || tm_.getTimeTicks() < tm.getTimeTicks() )
90-
tm = tm_;
91-
}
92-
93-
//! [Gather output]
94-
9581
String oBlob = net.getLayerNames().back();
9682
if (!parser.get<String>("o_blob").empty())
9783
{
9884
oBlob = parser.get<String>("o_blob");
9985
}
10086

101-
Mat result = net.getBlob(oBlob); //gather output of "prob" layer
87+
//! [Make forward pass]
88+
Mat result = net.forward(oBlob);
10289

10390
if (!resultFile.empty()) {
10491
CV_Assert(result.isContinuous());

modules/dnn/src/caffe/caffe_importer.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,7 @@ class CaffeImporter : public Importer
277277
addedBlobs.push_back(BlobNote(net.input(inNum), 0, inNum));
278278
netInputs[inNum] = net.input(inNum);
279279
}
280-
dstNet.setNetInputs(netInputs);
280+
dstNet.setInputsNames(netInputs);
281281
}
282282

283283
for (int li = 0; li < layersSize; li++)

0 commit comments

Comments
 (0)