Skip to content

Commit 10194eb

Browse files
committed
flownet2 patch: fix the layer type mismatching/missing.
1 parent 9346f47 commit 10194eb

10 files changed

+12
-3
lines changed

include/caffe/layers/channel_norm_layer.hpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ class ChannelNormLayer : public Layer<Dtype> {
1919
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
2020
const vector<Blob<Dtype>*>& top);
2121

22-
virtual inline const char* type() const { return "NormLayer"; }
22+
virtual inline const char* type() const { return "ChannelNorm"; }
2323
virtual inline int MinBottomBlobs() const { return 1; }
2424
virtual inline int ExactNumTopBlobs() const { return 1; }
2525

include/caffe/layers/data_augmentation_layer.hpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ class DataAugmentationLayer : public AugmentationLayerBase<Dtype>, public Layer<
2525
const vector<Blob<Dtype>*>& top);
2626
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
2727
const vector<Blob<Dtype>*>& top);
28+
virtual inline const char* type() const { return "DataAugmentation"; }
2829
virtual void adjust_blobs(vector<Blob<Dtype>*> blobs);
2930
virtual inline bool AllowBackward() const { LOG(WARNING) << "DataAugmentationLayer does not do backward."; return false; }
3031

include/caffe/layers/float_reader_layer.hpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ class FloatReaderLayer : public Layer<Dtype> {
2020
const vector<Blob<Dtype>*>& top);
2121
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
2222
const vector<Blob<Dtype>*>& top);
23+
virtual inline const char* type() const { return "FloatReader"; }
2324

2425
protected:
2526
int dataXSize_;

include/caffe/layers/flow_augmentation_layer.hpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ class FlowAugmentationLayer : public AugmentationLayerBase<Dtype>, public Layer<
2525
const vector<Blob<Dtype>*>& top);
2626
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
2727
const vector<Blob<Dtype>*>& top);
28+
virtual inline const char* type() const { return "FlowAugmentation"; }
2829
virtual inline bool AllowBackward() const { LOG(WARNING) << "FlowAugmentationLayer does not do backward."; return false; }
2930

3031

include/caffe/layers/flow_warp_layer.hpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ class FlowWarpLayer : public Layer<Dtype> {
1919
const vector<Blob<Dtype>*>& top);
2020
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
2121
const vector<Blob<Dtype>*>& top);
22+
virtual inline const char* type() const { return "FlowWarp"; }
2223

2324
protected:
2425
virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,

include/caffe/layers/generate_augmentation_parameters_layer.hpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ class GenerateAugmentationParametersLayer : public AugmentationLayerBase<Dtype>,
2424
const vector<Blob<Dtype>*>& top);
2525
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
2626
const vector<Blob<Dtype>*>& top);
27+
virtual inline const char* type() const { return "GenerateAugmentationParameters"; }
2728
virtual inline bool AllowBackward() const { LOG(WARNING) << "GenerateAugmentationParametersLayer does not do backward."; return false; }
2829

2930
protected:

include/caffe/layers/lpq_loss_layer.hpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ class LpqLossLayer : public LossLayer<Dtype> {
3737
const vector<Blob<Dtype>*>& bottom,
3838
const vector<Blob<Dtype>*>& top);
3939

40-
virtual inline const char* type() const { return "L1Loss"; }
40+
virtual inline const char* type() const { return "LpqLoss"; }
4141

4242
virtual inline bool AllowForceBackward(
4343
const int bottom_index) const

include/caffe/layers/mean_layer.hpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ class MeanLayer : public Layer<Dtype> {
1919
const vector<Blob<Dtype>*>& top);
2020
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
2121
const vector<Blob<Dtype>*>& top);
22+
virtual inline const char* type() const { return "Mean"; }
2223

2324
protected:
2425
Blob<Dtype> mean_;

include/caffe/layers/resample_layer.hpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ class ResampleLayer : public Layer<Dtype> {
2222
virtual inline int MinBottomBlobs() const { return 1; }
2323
virtual inline int MaxBottomBlobs() const { return 2; }
2424
virtual inline int ExactNumTopBlobs() const { return 1; }
25+
virtual inline const char* type() const { return "Resample"; }
2526
virtual inline bool AllowBackward() const { LOG(WARNING) << "ResampleLayer does not do backward."; return false; }
2627

2728
protected:

src/caffe/net.cpp

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,6 +81,8 @@ void Net<Dtype>::Init(const NetParameter& in_param) {
8181
}
8282
layers_.push_back(LayerRegistry<Dtype>::CreateLayer(layer_param));
8383
layer_names_.push_back(layer_param.name());
84+
Layer<Dtype>* layer = layers_[layer_id].get();
85+
layer->SetNet(this);
8486
LOG_IF(INFO, Caffe::root_solver())
8587
<< "Creating Layer " << layer_param.name();
8688
bool need_backward = false;
@@ -106,7 +108,7 @@ void Net<Dtype>::Init(const NetParameter& in_param) {
106108
// If the layer specifies that AutoTopBlobs() -> true and the LayerParameter
107109
// specified fewer than the required number (as specified by
108110
// ExactNumTopBlobs() or MinTopBlobs()), allocate them here.
109-
Layer<Dtype>* layer = layers_[layer_id].get();
111+
//Layer<Dtype>* layer = layers_[layer_id].get();
110112
if (layer->AutoTopBlobs()) {
111113
const int needed_num_top =
112114
std::max(layer->MinTopBlobs(), layer->ExactNumTopBlobs());

0 commit comments

Comments
 (0)