-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathghmc_loss_layer.hpp
71 lines (57 loc) · 2.31 KB
/
ghmc_loss_layer.hpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
#ifndef CAFFE_GHMC_LOSS_LAYERS_HPP_
#define CAFFE_GHMC_LOSS_LAYERS_HPP_
#include <vector>
#include "caffe/blob.hpp"
#include "caffe/layer.hpp"
#include "caffe/proto/caffe.pb.h"
#include "caffe/layers/loss_layer.hpp"
#include "caffe/layers/softmax_layer.hpp"
namespace caffe {
template <typename Dtype>
class GhmcLossLayer : public LossLayer<Dtype> {
public:
explicit GhmcLossLayer(const LayerParameter& param)
: LossLayer<Dtype>(param){}
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
virtual inline const char* type() const { return "GhmcLoss"; }
virtual inline int ExactNumBottomBlobs() const { return -1; }
virtual inline int MinBottomBlobs() const { return 1; }
virtual inline int MaxBottomBlobs() const { return 2; }
/**
* Unlike most loss layers, in the SmoothL1LossLayer we can backpropagate
* to both inputs -- override to return true and always allow force_backward.
*/
virtual inline bool AllowForceBackward(const int bottom_index) const {
return true;
}
protected:
virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
//virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
// const vector<Blob<Dtype>*>& top);
virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
//virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
// const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
virtual Dtype get_normalizer(
LossParameter_NormalizationMode normalization_mode, int valid_count);
shared_ptr<Layer<Dtype> > softmax_layer_;
vector<Blob<Dtype>*> softmax_bottom_vec_;
vector<Blob<Dtype>*> softmax_top_vec_;
Blob<Dtype> prob_; // softmax output
bool has_ignore_label_;
int ignore_label_;
LossParameter_NormalizationMode normalization_;
int softmax_axis_, outer_num_, inner_num_;
int m_;
int count;
float * r_num;
Dtype alpha;
Blob<Dtype> diff_ce;
Blob<Dtype> beta; //beta = N / GD(g)
};
} // namespace caffe
#endif // CAFFE_GHMC_LOSS_LAYERS_HPP_