Skip to content

Commit 1776fe7

Browse files
committed
xlu: Should use xlu_param instead of relu_param
Signed-off-by: Huaqi Fang <[email protected]>
1 parent 1412e44 commit 1776fe7

File tree

2 files changed

+10
-18
lines changed

2 files changed

+10
-18
lines changed

include/caffe/layers/xlu_layer.hpp

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ template <typename Dtype>
1717
class XLULayer : public Layer<Dtype> {
1818
public:
1919
/**
20-
* @param param provides XLUParameter relu_param,
20+
* @param param provides XLUParameter xlu_param,
2121
* with XLULayer options:
2222
* - negative_slope (\b optional, default 0).
2323
* the value @f$ \nu @f$ by which negative values are multiplied.
@@ -47,8 +47,6 @@ class XLULayer : public Layer<Dtype> {
4747
*/
4848
virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
4949
const vector<Blob<Dtype>*>& top);
50-
virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
51-
const vector<Blob<Dtype>*>& top);
5250

5351
/**
5452
* @brief Computes the error gradient w.r.t. the XLU inputs.
@@ -80,10 +78,8 @@ class XLULayer : public Layer<Dtype> {
8078
*/
8179
virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
8280
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
83-
virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
84-
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
8581
};
8682

8783
} // namespace caffe
8884

89-
#endif // CAFFE_RELU_LAYER_HPP_
85+
#endif // CAFFE_XLU_LAYER_HPP_

src/caffe/layers/xlu_layer.cpp

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,10 @@ void XLULayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
2121
const Dtype* bottom_data = bottom[0]->cpu_data();
2222
Dtype* top_data = top[0]->mutable_cpu_data();
2323
const int count = bottom[0]->count();
24-
Dtype negative_slope = this->layer_param_.relu_param().negative_slope();
25-
Dtype relu6 = this->layer_param_.relu_param().relu6(); //CUSTOMIZATION
26-
Dtype maximum = this->layer_param_.relu_param().maximum(); //CUSTOMIZATION
27-
Dtype minimum = this->layer_param_.relu_param().minimum(); //CUSTOMIZATION
24+
Dtype negative_slope = this->layer_param_.xlu_param().negative_slope();
25+
Dtype relu6 = this->layer_param_.xlu_param().relu6(); //CUSTOMIZATION
26+
Dtype maximum = this->layer_param_.xlu_param().maximum(); //CUSTOMIZATION
27+
Dtype minimum = this->layer_param_.xlu_param().minimum(); //CUSTOMIZATION
2828
if (bottom.size() > 1) //bottom[1] provides the maximum case
2929
maximum = bottom[1]->cpu_data()[0];
3030
for (int i = 0; i < count; ++i) {
@@ -50,10 +50,10 @@ void XLULayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
5050
const Dtype* top_diff = top[0]->cpu_diff();
5151
Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
5252
const int count = bottom[0]->count();
53-
Dtype negative_slope = this->layer_param_.relu_param().negative_slope();
54-
Dtype relu6 = this->layer_param_.relu_param().relu6(); //CUSTOMIZATION
55-
Dtype maximum = this->layer_param_.relu_param().maximum(); //CUSTOMIZATION
56-
Dtype minimum = this->layer_param_.relu_param().minimum(); //CUSTOMIZATION
53+
Dtype negative_slope = this->layer_param_.xlu_param().negative_slope();
54+
Dtype relu6 = this->layer_param_.xlu_param().relu6(); //CUSTOMIZATION
55+
Dtype maximum = this->layer_param_.xlu_param().maximum(); //CUSTOMIZATION
56+
Dtype minimum = this->layer_param_.xlu_param().minimum(); //CUSTOMIZATION
5757
if (bottom.size() > 1) //bottom[1] provides the maximum case
5858
maximum = bottom[1]->cpu_data()[0];
5959
for (int i = 0; i < count; ++i) {
@@ -70,10 +70,6 @@ void XLULayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
7070
}
7171

7272

73-
#ifdef CPU_ONLY
74-
STUB_GPU(XLULayer);
75-
#endif
76-
7773
INSTANTIATE_CLASS(XLULayer);
7874
REGISTER_LAYER_CLASS(XLU);
7975

0 commit comments

Comments
 (0)