@@ -324,7 +324,6 @@ struct LayerData
324
324
// add logging info
325
325
params.name = name;
326
326
params.type = type;
327
- skip = false ;
328
327
}
329
328
330
329
int id;
@@ -347,7 +346,6 @@ struct LayerData
347
346
std::map<int , bool > skipFlags;
348
347
349
348
int flag;
350
- bool skip;
351
349
352
350
Ptr<Layer> getLayerInstance ()
353
351
{
@@ -666,18 +664,39 @@ struct Net::Impl
666
664
}
667
665
}
668
666
669
- void setUpNet ( const std::vector<LayerPin>& blobsToKeep_ = std::vector<LayerPin>() )
667
+ void clear ( )
670
668
{
671
- if (!netWasAllocated || this ->blobsToKeep != blobsToKeep_)
669
+ MapIdToLayerData::iterator it;
670
+ for (it = layers.begin (); it != layers.end (); it++)
672
671
{
673
- MapIdToLayerData::iterator it;
674
- for (it = layers.begin (); it != layers.end (); it++)
672
+ if (it->second .id != 0 ) {
673
+ it->second .outputBlobs .clear ();
674
+ it->second .internals .clear ();
675
+ }
676
+ it->second .skipFlags .clear ();
677
+ it->second .consumers .clear ();
678
+ Ptr<ConvolutionLayer> convLayer = it->second .layerInstance .dynamicCast <ConvolutionLayer>();
679
+
680
+ if ( !convLayer.empty () )
675
681
{
676
- if (it->second .id != 0 ) {
677
- it->second .outputBlobs .clear ();
678
- it->second .internals .clear ();
679
- }
682
+ convLayer->setActivation (Ptr<ActivationLayer>());
683
+ convLayer->setBatchNorm (Ptr<BatchNormLayer>());
684
+ }
685
+
686
+ Ptr<PoolingLayer> poolingLayer = it->second .layerInstance .dynamicCast <PoolingLayer>();
687
+ if ( !poolingLayer.empty () )
688
+ {
689
+ poolingLayer->computeMaxIdx = true ;
680
690
}
691
+ }
692
+ }
693
+
694
+
695
+ void setUpNet (const std::vector<LayerPin>& blobsToKeep_ = std::vector<LayerPin>())
696
+ {
697
+ if (!netWasAllocated || this ->blobsToKeep != blobsToKeep_)
698
+ {
699
+ clear ();
681
700
682
701
allocateLayers (blobsToKeep_);
683
702
computeNetOutputLayers ();
@@ -1005,69 +1024,41 @@ struct Net::Impl
1005
1024
ld.flag = 1 ;
1006
1025
}
1007
1026
1008
- void allocateLayers (const std::vector<LayerPin>& blobsToKeep_)
1027
+ void fuseLayers (const std::vector<LayerPin>& blobsToKeep_)
1009
1028
{
1010
- MapIdToLayerData::iterator it;
1011
- for (it = layers.begin (); it != layers.end (); it++)
1012
- it->second .flag = 0 ;
1013
-
1014
- CV_Assert (!layers[0 ].outputBlobs .empty ());
1015
- ShapesVec inputShapes;
1016
- for (int i = 0 ; i < layers[0 ].outputBlobs .size (); i++)
1017
- {
1018
- CV_Assert (layers[0 ].outputBlobs [i].total ());
1019
- inputShapes.push_back (shape (layers[0 ].outputBlobs [i]));
1020
- }
1021
- LayersShapesMap layersShapes;
1022
- getLayersShapes (inputShapes, layersShapes);
1023
-
1024
- blobManager.reset ();
1025
- for (it = layers.begin (); it != layers.end (); ++it)
1026
- {
1027
- const LayerData& ld = it->second ;
1028
- blobManager.addReferences (ld.inputBlobsId );
1029
- }
1030
-
1031
- for (int i = 0 ; i < blobsToKeep_.size (); i++)
1032
- {
1033
- blobManager.addReference (blobsToKeep_[i]);
1034
- }
1035
-
1036
- for (it = layers.begin (); it != layers.end (); it++)
1037
- {
1038
- int lid = it->first ;
1039
- allocateLayer (lid, layersShapes);
1040
- }
1041
-
1042
1029
// scan through all the layers. If there is convolution layer followed by the activation layer,
1043
1030
// we try to embed this activation into the convolution and disable separate execution of the activation
1044
1031
std::vector<String> outnames;
1032
+ std::set<LayerPin> pinsToKeep (blobsToKeep_.begin (),
1033
+ blobsToKeep_.end ());
1034
+ MapIdToLayerData::iterator it;
1045
1035
for (it = layers.begin (); it != layers.end (); it++)
1046
1036
{
1047
1037
int lid = it->first ;
1048
1038
LayerData& ld = layers[lid];
1049
- if ( ld.skip )
1039
+ if ( ld.skipFlags [DNN_BACKEND_DEFAULT] )
1050
1040
{
1051
- // printf("skipping %s\n", ld.layerInstance->name.c_str());
1052
1041
continue ;
1053
1042
}
1054
- // printf("analyzing %s\n", ld.layerInstance->name.c_str());
1055
1043
if ( ld.consumers .size () == 0 )
1056
1044
outnames.push_back (ld.layerInstance ->name );
1057
1045
Ptr<ConvolutionLayer> convLayer = ld.layerInstance .dynamicCast <ConvolutionLayer>();
1058
- if ( !convLayer.empty () && ld.consumers .size () == 1 )
1046
+ LayerPin lp (lid, 0 );
1047
+ if ( !convLayer.empty () && ld.consumers .size () == 1 &&
1048
+ pinsToKeep.count (lp) == 0 )
1059
1049
{
1060
1050
LayerData* nextData = &layers[ld.consumers [0 ].lid ];
1061
1051
Ptr<BatchNormLayer> nextBNormLayer =
1062
1052
nextData->layerInstance .dynamicCast <BatchNormLayer>();
1063
- if ( !nextBNormLayer.empty () )
1053
+ LayerPin lpNext (ld.consumers [0 ].lid , 0 );
1054
+ if ( !nextBNormLayer.empty () && pinsToKeep.count (lpNext) == 0 )
1064
1055
{
1065
1056
LayerData* bnormData = nextData;
1066
1057
nextData = 0 ;
1067
1058
if ( convLayer->setBatchNorm (nextBNormLayer) )
1068
1059
{
1069
- // printf("fused convolution (%s) and batch norm (%s)\n", convLayer->name.c_str(), nextBNormLayer->name.c_str()) ;
1070
- bnormData-> skip = true ;
1060
+ bnormData-> skipFlags [DNN_BACKEND_DEFAULT] = true ;
1061
+ ld. outputBlobs = layers[lpNext. lid ]. outputBlobs ;
1071
1062
if ( bnormData->consumers .size () == 1 )
1072
1063
nextData = &layers[bnormData->consumers [0 ].lid ];
1073
1064
}
@@ -1079,8 +1070,8 @@ struct Net::Impl
1079
1070
1080
1071
if ( !nextActivLayer.empty () && convLayer->setActivation (nextActivLayer) )
1081
1072
{
1082
- // printf("fused convolution (%s) and activation (%s)\n", convLayer->name.c_str(), nextActivLayer->name.c_str()) ;
1083
- nextData-> skip = true ;
1073
+ nextData-> skipFlags [DNN_BACKEND_DEFAULT] = true ;
1074
+ ld. outputBlobs = layers[lpNext. lid ]. outputBlobs ;
1084
1075
}
1085
1076
}
1086
1077
Ptr<PoolingLayer> poolingLayer = ld.layerInstance .dynamicCast <PoolingLayer>();
@@ -1096,10 +1087,43 @@ struct Net::Impl
1096
1087
poolingLayer->computeMaxIdx = false ;
1097
1088
}
1098
1089
}
1099
- /* printf("outputs: ");
1100
- for( size_t j = 0; j < outnames.size(); j++ )
1101
- printf("%s ", outnames[j].c_str());
1102
- printf("\n");*/
1090
+ }
1091
+
1092
+ void allocateLayers (const std::vector<LayerPin>& blobsToKeep_)
1093
+ {
1094
+ MapIdToLayerData::iterator it;
1095
+ for (it = layers.begin (); it != layers.end (); it++)
1096
+ it->second .flag = 0 ;
1097
+
1098
+ CV_Assert (!layers[0 ].outputBlobs .empty ());
1099
+ ShapesVec inputShapes;
1100
+ for (int i = 0 ; i < layers[0 ].outputBlobs .size (); i++)
1101
+ {
1102
+ CV_Assert (layers[0 ].outputBlobs [i].total ());
1103
+ inputShapes.push_back (shape (layers[0 ].outputBlobs [i]));
1104
+ }
1105
+ LayersShapesMap layersShapes;
1106
+ getLayersShapes (inputShapes, layersShapes);
1107
+
1108
+ blobManager.reset ();
1109
+ for (it = layers.begin (); it != layers.end (); ++it)
1110
+ {
1111
+ const LayerData& ld = it->second ;
1112
+ blobManager.addReferences (ld.inputBlobsId );
1113
+ }
1114
+
1115
+ for (int i = 0 ; i < blobsToKeep_.size (); i++)
1116
+ {
1117
+ blobManager.addReference (blobsToKeep_[i]);
1118
+ }
1119
+
1120
+ for (it = layers.begin (); it != layers.end (); it++)
1121
+ {
1122
+ int lid = it->first ;
1123
+ allocateLayer (lid, layersShapes);
1124
+ }
1125
+
1126
+ fuseLayers (blobsToKeep_);
1103
1127
}
1104
1128
1105
1129
void forwardLayer (LayerData &ld)
@@ -1109,7 +1133,7 @@ struct Net::Impl
1109
1133
if (preferableBackend == DNN_BACKEND_DEFAULT ||
1110
1134
!layer->supportBackend (preferableBackend))
1111
1135
{
1112
- if ( !ld.skip )
1136
+ if ( !ld.skipFlags [DNN_BACKEND_DEFAULT] )
1113
1137
layer->forward (ld.inputBlobs , ld.outputBlobs , ld.internals );
1114
1138
}
1115
1139
else if (!ld.skipFlags [preferableBackend])
@@ -1300,20 +1324,6 @@ void Net::connect(String _outPin, String _inPin)
1300
1324
impl->connect (outPin.lid , outPin.oid , inpPin.lid , inpPin.oid );
1301
1325
}
1302
1326
1303
- // void Net::forward(LayerId toLayer)
1304
- // {
1305
- // if (!impl->netWasAllocated)
1306
- // {
1307
- // impl->setUpNet();
1308
-
1309
- // }
1310
-
1311
- // if (toLayer.isString() && toLayer.get<String>().empty())
1312
- // impl->forwardAll();
1313
- // else
1314
- // impl->forwardLayer(impl->getLayerData(toLayer));
1315
- // }
1316
-
1317
1327
Mat Net::forward (const String& outputName)
1318
1328
{
1319
1329
String layerName = outputName;
0 commit comments