Yes, you can. In src/caffe/protoadd the line:
message ReLUParameter {
...
optional float threshold = 3 [default = 0]; #add this line
...
}
and in src/caffe/layers/relu_layer.cpp, make some small modifications like:
template <typename Dtype>
void ReLULayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
...
Dtype threshold = this->layer_param_.relu_param().threshold();
for (int i = 0; i < count; ++i) {
top_data[i] = (bottom_data[i] > threshold) ? (bottom_data[i] - threshold) :
(negative_slope * (bottom_data[i] - threshold));
}
}
template <typename Dtype>
void ReLULayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down,
const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[0]) {
...
Dtype threshold = this->layer_param_.relu_param().threshold();
for (int i = 0; i < count; ++i) {
bottom_diff[i] = top_diff[i] * ((bottom_data[i] > threshold)
+ negative_slope * (bottom_data[i] <= threshold));
}
}
}
and similarly, the src/caffe/layers/relu_layer.cucode should look like this .
And after compiling yours caffeand pycaffein, net.prototxtyou can write a layer relu, for example:
layer {
name: "threshold_relu"
type: "ReLU"
relu_param: {threshold: 1
bottom: "input"
top: "output"
}
source
share