-
Notifications
You must be signed in to change notification settings - Fork 26
/
Copy pathproposal_layer.hpp
86 lines (67 loc) · 2.81 KB
/
proposal_layer.hpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
// --------------------------------------------------------
// Proposal Layer C++ Implement
// Written by Zou Jinyi
// --------------------------------------------------------
#ifndef CAFFE_PROPOSAL_LAYER_HPP_
#define CAFFE_PROPOSAL_LAYER_HPP_
#include <vector>
#include "caffe/blob.hpp"
#include "caffe/layer.hpp"
#include "caffe/proto/caffe.pb.h"
#define max(a, b) (((a)>(b)) ? (a) :(b))
#define min(a, b) (((a)<(b)) ? (a) :(b))
namespace caffe {
/**
* @brief Provides ROIs by assigning tops directly.
*
* This data layer is to provide ROIs from the anchor;
* backward, and reshape are all no-ops.
*/
template <typename Dtype>
class ProposalLayer : public Layer<Dtype> {
public:
explicit ProposalLayer(const LayerParameter& param)
: Layer<Dtype>(param) {}
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {}
virtual inline const char* type() const { return "Proposal"; }
virtual inline int ExactNumBottomBlobs() const { return 3; }
virtual inline int MinBottomBlobs() const { return 3; }
virtual inline int MaxBottomBlobs() const { return 3; }
virtual inline int ExactNumTopBlobs() const { return 1; }
virtual inline int MinTopBlobs() const { return 1; }
virtual inline int MaxTopBlobs() const { return 1; }
protected:
virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
/// @brief Not implemented
//virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
// const vector<Blob<Dtype>*>& top){
// NOT_IMPLEMENTED;
//}
virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
NOT_IMPLEMENTED;
}
virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
NOT_IMPLEMENTED;
}
virtual void Generate_anchors();
virtual void _whctrs(vector <float> anchor, vector<float> &ctrs);
virtual void _ratio_enum(vector <float> anchor, vector <float> &anchors_ratio);
virtual void _mkanchors(vector<float> ctrs, vector<float> &anchors);
virtual void _scale_enum(vector<float> anchors_ratio, vector<float> &anchor_boxes);
virtual void bbox_transform_inv(int img_width, int img_height, vector<vector<float> > bbox, vector<vector<float> > select_anchor, vector<vector<float> > &pred);
virtual void apply_nms(vector<vector<float> > &pred_boxes, vector<float> &confidence);
int feat_stride_; //resolution
int anchor_base_size_;
vector<float> anchor_scale_; //anchor scale
vector<float> anchor_ratio_; //anchor_ratio
int max_rois_;
vector<float> anchor_boxes_;
};
} // namespace caffe
#endif // CAFFE_PROPOSAL_LAYER_HPP_