Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix Bug when build without c++11 #212

Open
wants to merge 7 commits into
base: ms
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ caffe_option(USE_LEVELDB "Build with levelDB" ON)
caffe_option(USE_LMDB "Build with lmdb" ON)
caffe_option(ALLOW_LMDB_NOLOCK "Allow MDB_NOLOCK when reading LMDB files (only if necessary)" OFF)
caffe_option(USE_OPENMP "Link with OpenMP (when your BLAS wants OpenMP and you get linker errors)" OFF)
caffe_option(HAVE_BINGING "caffe binding" ON)

# ---[ Dependencies
include(cmake/Dependencies.cmake)
Expand All @@ -50,6 +51,8 @@ if(UNIX OR APPLE)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC -Wall")
endif()

SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -D_MWAITXINTRIN_H_INCLUDED")

caffe_set_caffe_link()

if(USE_libstdcpp)
Expand Down Expand Up @@ -107,6 +110,10 @@ add_subdirectory(python)
add_subdirectory(matlab)
add_subdirectory(docs)

if (HAVE_BINGING)
add_subdirectory(windows/caffe.binding)
endif()

# ---[ Linter target
add_custom_target(lint COMMAND ${CMAKE_COMMAND} -P ${PROJECT_SOURCE_DIR}/cmake/lint.cmake)

Expand Down
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,12 @@ After you have built solution with Matlab support, in order to use it you have t
### Build
Now, you should be able to build `.\windows\Caffe.sln`


## Ubuntu 16.04 cmake build
`cmake . -DCUDA_NVCC_FLAGS="-D_FORCE_INLINES"`

`make`

## License and Citation

Caffe is released under the [BSD 2-Clause license](https://github.com/BVLC/caffe/blob/master/LICENSE).
Expand Down
4 changes: 2 additions & 2 deletions python/caffe/_caffe.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ namespace caffe {
bp::object labels_obj) {
// check that this network has an input MemoryDataLayer
shared_ptr<MemoryDataLayer<Dtype> > md_layer =
boost::dynamic_pointer_cast<MemoryDataLayer<Dtype>>(net->layers()[0]);
boost::dynamic_pointer_cast<MemoryDataLayer<Dtype> >(net->layers()[0]);
if (!md_layer) {
throw std::runtime_error("set_input_arrays may only be called if the"
" first layer is a MemoryDataLayer");
Expand Down Expand Up @@ -588,4 +588,4 @@ namespace caffe {
import_array1();
}

} // namespace caffe
} // namespace caffe
5 changes: 3 additions & 2 deletions src/caffe/common.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -139,10 +139,11 @@ void* Caffe::RNG::generator() {
#else // Normal GPU + CPU Caffe.

Caffe::Caffe()
: cublas_handle_(NULL), curand_generator_(NULL), random_generator_(),
: cublas_handle_(NULL), curand_generator_(NULL),
#ifdef USE_CUDNN
cudnn_handle_(NULL),
cudnn_handle_(NULL),
#endif
random_generator_(),
mode_(Caffe::CPU),
solver_count_(1), solver_rank_(0), multiprocess_(false) {
// Try to create a cublas handler, and report an error if failed (but we will
Expand Down
8 changes: 7 additions & 1 deletion src/caffe/layers/batch_contrastive_loss_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,21 @@ void BatchContrastiveLossLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& botto
CHECK_EQ(bottom[0]->num(), bottom[0]->channels());
if (top.size() >= 2) {
// positive distance, negative distance.
#if __cplusplus < 201103L
int arr[] = { 2 };
vector<int> shape(arr,arr+sizeof(arr)/sizeof(int));
top[1]->Reshape(shape);
#else
top[1]->Reshape({ 2 });
#endif
}
}

template <typename Dtype>
void BatchContrastiveLossLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
const Dtype* bottom_data = bottom[0]->cpu_data();
Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
// Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
const Dtype* label = bottom[1]->cpu_data();
int num = bottom[0]->num();
Dtype positive_distance = Dtype(0);
Expand Down
20 changes: 19 additions & 1 deletion src/caffe/layers/general_constrastive_loss_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,15 +36,33 @@ void GeneralContrastiveLossLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bot
if (top.size() >= 2) {
if (add_intra_mae_) {
// positive distance, negative distance, intra_mae.
#if __cplusplus < 201103L
int arr[] = { 3 };
vector<int> shape(arr,arr+sizeof(arr)/sizeof(int));
top[1]->Reshape(shape);
#else
top[1]->Reshape({ 3 });
#endif
}
else {
// positive distance, negative distance.
#if __cplusplus < 201103L
int arr[] = { 2 };
vector<int> shape(arr,arr+sizeof(arr)/sizeof(int));
top[1]->Reshape(shape);
#else
top[1]->Reshape({ 2 });
#endif
}
}
if (max_negative_only_) {
#if __cplusplus < 201103L
int arr[] = { bottom[0]->num() };
vector<int> shape(arr,arr+sizeof(arr)/sizeof(int));
max_negative_index_.Reshape(shape);
#else
max_negative_index_.Reshape({ bottom[0]->num() });
#endif
}
}

Expand All @@ -59,7 +77,7 @@ void GeneralContrastiveLossLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>&
int num = bottom[0]->num();
int count = bottom[0]->count();
int dim = count / num;
Dtype weighted_count = num * (abs(positive_weight_) + (dim - 1)*abs(negative_weight_));
// Dtype weighted_count = num * (abs(positive_weight_) + (dim - 1)*abs(negative_weight_));
Dtype positive_distance = Dtype(0);
Dtype negative_distance = Dtype(0);
max_positive_index_ = 0;
Expand Down
12 changes: 12 additions & 0 deletions src/caffe/layers/general_triplet_loss_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,22 @@ void GeneralTripletLossLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
LossLayer<Dtype>::Reshape(bottom, top);
if (top.size() >= 2) {
// positive distance, negative distance.
#if __cplusplus < 201103L
int arr[] = { 2 };
vector<int> shape(arr,arr+sizeof(arr)/sizeof(int));
top[1]->Reshape(shape);
#else
top[1]->Reshape({ 2 });
#endif
}
if (hardest_only_) {
#if __cplusplus < 201103L
int arr[] = { bottom[0]->num() };
vector<int> shape(arr,arr+sizeof(arr)/sizeof(int));
hardest_index_.Reshape(shape);
#else
hardest_index_.Reshape({ bottom[0]->num() });
#endif
}
}

Expand Down
6 changes: 6 additions & 0 deletions src/caffe/layers/hotspot_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,13 @@ void HotspotLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
height_ = bottom[1]->height();
width_ = bottom[1]->width();
}
#if __cplusplus < 201103L
int arr[] = { bottom[0]->num(), num_point, height_, width_ };
vector<int> shape(arr,arr+sizeof(arr)/sizeof(int));
top[0]->Reshape(shape);
#else
top[0]->Reshape({ bottom[0]->num(), num_point, height_, width_ });
#endif
}

template <typename Dtype>
Expand Down
36 changes: 36 additions & 0 deletions src/caffe/layers/image_data_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -55,9 +55,16 @@ void ImageDataLayer<Dtype>::DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
if (top.size() == 3) {
num_samples_ = vector<int>(max_label + 1);
class_weights_ = vector<Dtype>(max_label + 1);
#if __cplusplus < 201103L
for (vector<std::pair<std::string, int> >::iterator l = lines_.begin(); l != lines_.end(); ++l) {
num_samples_[l->second]++;
}
#else
for (auto l : lines_) {
num_samples_[l.second]++;
}
#endif

Dtype mean_sample_num = (Dtype)lines_.size() / (Dtype)(max_label + 1);
Dtype min_weight = 9999, max_weight = 0;
for (int i = 0; i < num_samples_.size(); i++) {
Expand All @@ -76,10 +83,17 @@ void ImageDataLayer<Dtype>::DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
if (balance_) {
num_samples_ = vector<int>(max_label + 1);
filename_by_class_ = vector<vector<std::pair<std::string, int> > >(max_label + 1);
#if __cplusplus < 201103L
for (vector<std::pair<std::string, int> >::iterator l = lines_.begin(); l != lines_.end(); ++l) {
num_samples_[l->second]++;
filename_by_class_[l->second].push_back(std::make_pair(l->first, 0));
}
#else
for (auto l : lines_) {
num_samples_[l.second]++;
filename_by_class_[l.second].push_back(std::make_pair(l.first, 0));
}
#endif
class_id_ = 0;
}

Expand Down Expand Up @@ -194,6 +208,20 @@ void ImageDataLayer<Dtype>::load_batch(Batch<Dtype>* batch) {

if (balance_) {
int pick_index = (caffe_rng_rand() % num_samples_[class_id_]) + 1;
#if __cplusplus < 201103L
vector<std::pair<std::string, int> >* samples = &filename_by_class_[class_id_];
for (vector<std::pair<std::string, int> >::iterator sample = samples->begin(); sample != samples->end(); ++sample) {
if (sample->second == 0) {
pick_index--;
if (pick_index == 0) {
this_line = std::make_pair(sample->first, class_id_);
sample->second = 1;
num_samples_[class_id_]--;
break;
}
}
}
#else
for (auto& sample : filename_by_class_[class_id_]) {
if (sample.second == 0) {
pick_index--;
Expand All @@ -205,12 +233,20 @@ void ImageDataLayer<Dtype>::load_batch(Batch<Dtype>* batch) {
}
}
}
#endif
CHECK_GT(this_line.first.size(), 0);
if (num_samples_[class_id_] == 0) {
num_samples_[class_id_] = filename_by_class_[class_id_].size();
#if __cplusplus < 201103L
vector<std::pair<std::string, int> >* samples = &filename_by_class_[class_id_];
for (vector<std::pair<std::string, int> >::iterator sample = samples->begin(); sample != samples->end(); ++sample) {
sample->second = 0;
}
#else
for (auto& sample : filename_by_class_[class_id_]) {
sample.second = 0;
}
#endif
}
}
else {
Expand Down
10 changes: 5 additions & 5 deletions src/caffe/layers/insanity_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ void InsanityLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
CHECK_GE(bottom[0]->num_axes(), 2)
<< "Number of axes of bottom blob must be >=2.";
InsanityParameter insanity_param_ = this->layer_param().insanity_param();
int channels = bottom[0]->channels();
// int channels = bottom[0]->channels();
lb_ = insanity_param_.lb();
ub_ = insanity_param_.ub();
CHECK_GT(ub_, lb_) << "upper bound must > lower bound.";
Expand All @@ -40,8 +40,8 @@ void InsanityLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
const Dtype* bottom_data = bottom[0]->cpu_data();
Dtype* top_data = top[0]->mutable_cpu_data();
const int count = bottom[0]->count();
const int dim = bottom[0]->count(2);
const int channels = bottom[0]->channels();
// const int dim = bottom[0]->count(2);
// const int channels = bottom[0]->channels();
Dtype* slope_data = alpha.mutable_cpu_data();

// For in-place computation
Expand Down Expand Up @@ -73,8 +73,8 @@ void InsanityLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
const Dtype* slope_data = alpha.cpu_data();
const Dtype* top_diff = top[0]->cpu_diff();
const int count = bottom[0]->count();
const int dim = bottom[0]->count(2);
const int channels = bottom[0]->channels();
// const int dim = bottom[0]->count(2);
// const int channels = bottom[0]->channels();

// For in-place computation
if (top[0] == bottom[0] && lb_ < 0) {
Expand Down
2 changes: 1 addition & 1 deletion src/caffe/layers/label_specific_rescale.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ void LabelSpecificRescaleLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>&
const vector<bool>& propagate_down,
const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[0]) {
const Dtype* bottom_data = bottom[0]->cpu_data();
// const Dtype* bottom_data = bottom[0]->cpu_data();
const Dtype* label_data = bottom[1]->cpu_data();
const Dtype* top_diff = top[0]->cpu_diff();
Dtype* bottom_diff = bottom[0]->mutable_cpu_diff();
Expand Down
37 changes: 37 additions & 0 deletions src/caffe/layers/multi_label_image_data_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,11 @@
#include <string>
#include <utility>
#include <vector>
#if __cplusplus >= 201103L
#include <random>
#else
#include <boost/random.hpp>
#endif

#include "caffe/layers/multi_label_image_data_layer.hpp"
#include "caffe/util/benchmark.hpp"
Expand All @@ -23,7 +27,11 @@ namespace caffe {
this->StopInternalThread();
}

#if __cplusplus >= 201103L
typedef std::mt19937 RANDOM_ENGINE;
#else
typedef boost::mt19937 RANDOM_ENGINE;
#endif

template <typename Dtype>
void extract_face(cv::Mat& input_image, Dtype* points, int point_count,
Expand All @@ -40,14 +48,27 @@ namespace caffe {
double face_scale = 2 * sqrt((face_center.x - mouth_center.x) * (face_center.x - mouth_center.x)
+ (face_center.y - mouth_center.y) * (face_center.y - mouth_center.y));
RANDOM_ENGINE prnd(time(NULL));

#if __cplusplus >= 201103L
face_center.x += std::uniform_int_distribution<int>(-max_random_shift, max_random_shift)(prnd);
face_center.y += std::uniform_int_distribution<int>(-max_random_shift, max_random_shift)(prnd);
std::uniform_real_distribution<float> rand_uniform(0, 1);
#else
face_center.x += boost::random::uniform_int_distribution<int>(-max_random_shift, max_random_shift)(prnd);
face_center.y += boost::random::uniform_int_distribution<int>(-max_random_shift, max_random_shift)(prnd);
boost::random::uniform_real_distribution<float> rand_uniform(0, 1);
#endif

// shear
float s = rand_uniform(prnd) * max_shear_ratio * 2 - max_shear_ratio;
// rotate
#if __cplusplus >= 201103L
int angle = std::uniform_int_distribution<int>(
-max_rotate_angle, max_rotate_angle)(prnd);
#else
int angle = boost::random::uniform_int_distribution<int>(
-max_rotate_angle, max_rotate_angle)(prnd);
#endif
float a = cos(angle / 180.0 * CV_PI);
float b = sin(angle / 180.0 * CV_PI);
// scale
Expand All @@ -61,7 +82,11 @@ namespace caffe {
float ws = ratio * hs;
int flip = 1;
if (face_mirror) {
#if __cplusplus >= 201103L
flip = std::uniform_int_distribution<int>(0, 1)(prnd)* 2 - 1;
#else
flip = boost::random::uniform_int_distribution<int>(0, 1)(prnd)* 2 - 1;
#endif
}
hs *= flip;

Expand Down Expand Up @@ -149,10 +174,17 @@ namespace caffe {
if (balance_) {
num_samples_ = vector<int>(max_label + 1);
filename_by_class_ = vector<vector<std::pair<std::string, shared_ptr<vector<Dtype> > > > >(max_label + 1);
#if __cplusplus >= 201103L
for (auto& l : lines_) {
num_samples_[(*l.second)[balance_by_]]++;
filename_by_class_[(*l.second)[balance_by_]].push_back(l);
}
#else
for (typename vector<std::pair<std::string, shared_ptr<vector<Dtype> > > >::iterator l = lines_.begin(); l != lines_.end(); ++l) {
num_samples_[(*l->second)[balance_by_]]++;
filename_by_class_[(*l->second)[balance_by_]].push_back(*l);
}
#endif
class_id_ = 0;
}

Expand Down Expand Up @@ -194,7 +226,12 @@ namespace caffe {
<< top[0]->channels() << "," << top[0]->height() << ","
<< top[0]->width();
// label
#if __cplusplus >= 201103L
vector<int> label_shape = { batch_size - label_cut_start_ - label_cut_end_, label_count };
#else
int arr[] = { batch_size - label_cut_start_ - label_cut_end_, label_count };
vector<int> label_shape(arr,arr+sizeof(arr)/sizeof(int));
#endif
top[1]->Reshape(label_shape);
for (int i = 0; i < this->prefetch_.size(); ++i) {
this->prefetch_[i]->label_.Reshape(label_shape);
Expand Down
Loading