Commit 866a2f91 authored by Guolin Ke's avatar Guolin Ke
Browse files

use ".empty()" to check container

parent 1466f907
...@@ -151,7 +151,7 @@ public: ...@@ -151,7 +151,7 @@ public:
* \return Pointer of weights * \return Pointer of weights
*/ */
inline const float* weights() const { inline const float* weights() const {
if (weights_.size() > 0) { if (!weights_.empty()) {
return weights_.data(); return weights_.data();
} else { } else {
return nullptr; return nullptr;
...@@ -166,7 +166,7 @@ public: ...@@ -166,7 +166,7 @@ public:
* \return Pointer of data boundaries on queries * \return Pointer of data boundaries on queries
*/ */
inline const data_size_t* query_boundaries() const { inline const data_size_t* query_boundaries() const {
if (query_boundaries_.size() > 0) { if (!query_boundaries_.empty()) {
return query_boundaries_.data(); return query_boundaries_.data();
} else { } else {
return nullptr; return nullptr;
...@@ -184,7 +184,7 @@ public: ...@@ -184,7 +184,7 @@ public:
* \return Pointer of weights for queries * \return Pointer of weights for queries
*/ */
inline const float* query_weights() const { inline const float* query_weights() const {
if (query_weights_.size() > 0) { if (!query_weights_.empty()) {
return query_weights_.data(); return query_weights_.data();
} else { } else {
return nullptr; return nullptr;
...@@ -196,7 +196,7 @@ public: ...@@ -196,7 +196,7 @@ public:
* \return Pointer of initial scores * \return Pointer of initial scores
*/ */
inline const float* init_score() const { inline const float* init_score() const {
if (init_score_.size() > 0) { if (!init_score_.empty()) {
return init_score_.data(); return init_score_.data();
} else { } else {
return nullptr; return nullptr;
......
...@@ -48,7 +48,7 @@ public: ...@@ -48,7 +48,7 @@ public:
bin_mapper_.reset(new BinMapper(memory_ptr)); bin_mapper_.reset(new BinMapper(memory_ptr));
memory_ptr += bin_mapper_->SizesInByte(); memory_ptr += bin_mapper_->SizesInByte();
data_size_t num_data = num_all_data; data_size_t num_data = num_all_data;
if (local_used_indices.size() > 0) { if (!local_used_indices.empty()) {
num_data = static_cast<data_size_t>(local_used_indices.size()); num_data = static_cast<data_size_t>(local_used_indices.size());
} }
if (is_sparse_) { if (is_sparse_) {
......
...@@ -13,7 +13,7 @@ template<typename VAL_T> ...@@ -13,7 +13,7 @@ template<typename VAL_T>
class ArrayArgs { class ArrayArgs {
public: public:
inline static size_t ArgMax(const std::vector<VAL_T>& array) { inline static size_t ArgMax(const std::vector<VAL_T>& array) {
if (array.size() <= 0) { if (array.empty()) {
return 0; return 0;
} }
size_t argMax = 0; size_t argMax = 0;
...@@ -26,7 +26,7 @@ public: ...@@ -26,7 +26,7 @@ public:
} }
inline static size_t ArgMin(const std::vector<VAL_T>& array) { inline static size_t ArgMin(const std::vector<VAL_T>& array) {
if (array.size() <= 0) { if (array.empty()) {
return 0; return 0;
} }
size_t argMin = 0; size_t argMin = 0;
......
...@@ -25,7 +25,7 @@ inline char tolower(char in) { ...@@ -25,7 +25,7 @@ inline char tolower(char in) {
} }
inline static std::string& Trim(std::string& str) { inline static std::string& Trim(std::string& str) {
if (str.size() <= 0) { if (str.empty()) {
return str; return str;
} }
str.erase(str.find_last_not_of(" \f\n\r\t\v") + 1); str.erase(str.find_last_not_of(" \f\n\r\t\v") + 1);
...@@ -34,7 +34,7 @@ inline static std::string& Trim(std::string& str) { ...@@ -34,7 +34,7 @@ inline static std::string& Trim(std::string& str) {
} }
inline static std::string& RemoveQuotationSymbol(std::string& str) { inline static std::string& RemoveQuotationSymbol(std::string& str) {
if (str.size() <= 0) { if (str.empty()) {
return str; return str;
} }
str.erase(str.find_last_not_of("'\"") + 1); str.erase(str.find_last_not_of("'\"") + 1);
...@@ -242,7 +242,7 @@ inline static std::vector<T2> ArrayCast(const std::vector<T>& arr) { ...@@ -242,7 +242,7 @@ inline static std::vector<T2> ArrayCast(const std::vector<T>& arr) {
template<typename T> template<typename T>
inline static std::string ArrayToString(const std::vector<T>& arr, char delimiter) { inline static std::string ArrayToString(const std::vector<T>& arr, char delimiter) {
if (arr.size() <= 0) { if (arr.empty()) {
return std::string(""); return std::string("");
} }
std::stringstream str_buf; std::stringstream str_buf;
...@@ -291,7 +291,7 @@ inline static std::vector<T> StringToArray(const std::string& str, char delimite ...@@ -291,7 +291,7 @@ inline static std::vector<T> StringToArray(const std::string& str, char delimite
template<typename T> template<typename T>
inline static std::string Join(const std::vector<T>& strs, const char* delimiter) { inline static std::string Join(const std::vector<T>& strs, const char* delimiter) {
if (strs.size() <= 0) { if (strs.empty()) {
return std::string(""); return std::string("");
} }
std::stringstream ss; std::stringstream ss;
......
...@@ -65,7 +65,7 @@ void Application::LoadParameters(int argc, char** argv) { ...@@ -65,7 +65,7 @@ void Application::LoadParameters(int argc, char** argv) {
if (params.count("config_file") > 0) { if (params.count("config_file") > 0) {
TextReader<size_t> config_reader(params["config_file"].c_str(), false); TextReader<size_t> config_reader(params["config_file"].c_str(), false);
config_reader.ReadAllLines(); config_reader.ReadAllLines();
if (config_reader.Lines().size() > 0) { if (!config_reader.Lines().empty()) {
for (auto& line : config_reader.Lines()) { for (auto& line : config_reader.Lines()) {
// remove str after "#" // remove str after "#"
if (line.size() > 0 && std::string::npos != line.find_first_of("#")) { if (line.size() > 0 && std::string::npos != line.find_first_of("#")) {
...@@ -146,7 +146,7 @@ void Application::LoadData() { ...@@ -146,7 +146,7 @@ void Application::LoadData() {
train_metric_.shrink_to_fit(); train_metric_.shrink_to_fit();
if (config_.metric_types.size() > 0) { if (!config_.metric_types.empty()) {
// only when have metrics then need to construct validation data // only when have metrics then need to construct validation data
// Add validation data, if it exists // Add validation data, if it exists
......
...@@ -135,7 +135,7 @@ void GBDT::AddValidDataset(const Dataset* valid_data, ...@@ -135,7 +135,7 @@ void GBDT::AddValidDataset(const Dataset* valid_data,
void GBDT::Bagging(int iter, const int curr_class) { void GBDT::Bagging(int iter, const int curr_class) {
// if need bagging // if need bagging
if (out_of_bag_data_indices_.size() > 0 && iter % gbdt_config_->bagging_freq == 0) { if (!out_of_bag_data_indices_.empty() && iter % gbdt_config_->bagging_freq == 0) {
// if doesn't have query data // if doesn't have query data
if (train_data_->metadata().query_boundaries() == nullptr) { if (train_data_->metadata().query_boundaries() == nullptr) {
bag_data_cnt_ = bag_data_cnt_ =
...@@ -188,7 +188,7 @@ void GBDT::Bagging(int iter, const int curr_class) { ...@@ -188,7 +188,7 @@ void GBDT::Bagging(int iter, const int curr_class) {
void GBDT::UpdateScoreOutOfBag(const Tree* tree, const int curr_class) { void GBDT::UpdateScoreOutOfBag(const Tree* tree, const int curr_class) {
// we need to predict out-of-bag socres of data for boosting // we need to predict out-of-bag socres of data for boosting
if (out_of_bag_data_indices_.size() > 0) { if (!out_of_bag_data_indices_.empty()) {
train_score_updater_->AddScore(tree, out_of_bag_data_indices_.data(), out_of_bag_data_cnt_, curr_class); train_score_updater_->AddScore(tree, out_of_bag_data_indices_.data(), out_of_bag_data_cnt_, curr_class);
} }
} }
......
...@@ -53,12 +53,12 @@ void BinMapper::FindBin(std::vector<double>* values, size_t total_sample_cnt, in ...@@ -53,12 +53,12 @@ void BinMapper::FindBin(std::vector<double>* values, size_t total_sample_cnt, in
std::sort(ref_values.begin(), ref_values.end()); std::sort(ref_values.begin(), ref_values.end());
// push zero in the front // push zero in the front
if (ref_values.size() == 0 || (ref_values[0] > 0.0f && zero_cnt > 0)) { if (ref_values.empty() || (ref_values[0] > 0.0f && zero_cnt > 0)) {
distinct_values.push_back(0); distinct_values.push_back(0);
counts.push_back(zero_cnt); counts.push_back(zero_cnt);
} }
if (ref_values.size() > 0) { if (!ref_values.empty()) {
distinct_values.push_back(ref_values[0]); distinct_values.push_back(ref_values[0]);
counts.push_back(1); counts.push_back(1);
} }
...@@ -79,7 +79,7 @@ void BinMapper::FindBin(std::vector<double>* values, size_t total_sample_cnt, in ...@@ -79,7 +79,7 @@ void BinMapper::FindBin(std::vector<double>* values, size_t total_sample_cnt, in
} }
// push zero in the back // push zero in the back
if (ref_values.size() > 0 && ref_values.back() < 0.0f && zero_cnt > 0) { if (!ref_values.empty() && ref_values.back() < 0.0f && zero_cnt > 0) {
distinct_values.push_back(0); distinct_values.push_back(0);
counts.push_back(zero_cnt); counts.push_back(zero_cnt);
} }
......
...@@ -59,7 +59,7 @@ void DatasetLoader::SetHeader(const char* filename) { ...@@ -59,7 +59,7 @@ void DatasetLoader::SetHeader(const char* filename) {
} }
} }
if (feature_names_.size() > 0) { if (!feature_names_.empty()) {
// erase label column name // erase label column name
feature_names_.erase(feature_names_.begin() + label_idx_); feature_names_.erase(feature_names_.begin() + label_idx_);
for (size_t i = 0; i < feature_names_.size(); ++i) { for (size_t i = 0; i < feature_names_.size(); ++i) {
...@@ -473,7 +473,7 @@ Dataset* DatasetLoader::CostructFromSampleData(std::vector<std::vector<double>>& ...@@ -473,7 +473,7 @@ Dataset* DatasetLoader::CostructFromSampleData(std::vector<std::vector<double>>&
} }
dataset->features_.shrink_to_fit(); dataset->features_.shrink_to_fit();
// fill feature_names_ if not header // fill feature_names_ if not header
if (feature_names_.size() <= 0) { if (feature_names_.empty()) {
for (int i = 0; i < dataset->num_total_features_; ++i) { for (int i = 0; i < dataset->num_total_features_; ++i) {
std::stringstream str_buf; std::stringstream str_buf;
str_buf << "Column_" << i; str_buf << "Column_" << i;
...@@ -493,7 +493,7 @@ void DatasetLoader::CheckDataset(const Dataset* dataset) { ...@@ -493,7 +493,7 @@ void DatasetLoader::CheckDataset(const Dataset* dataset) {
if (dataset->num_data_ <= 0) { if (dataset->num_data_ <= 0) {
Log::Fatal("Data file %s is empty", dataset->data_filename_); Log::Fatal("Data file %s is empty", dataset->data_filename_);
} }
if (dataset->features_.size() <= 0) { if (dataset->features_.empty()) {
Log::Fatal("No usable features in data file %s", dataset->data_filename_); Log::Fatal("No usable features in data file %s", dataset->data_filename_);
} }
} }
...@@ -641,7 +641,7 @@ void DatasetLoader::ConstructBinMappersFromTextData(int rank, int num_machines, ...@@ -641,7 +641,7 @@ void DatasetLoader::ConstructBinMappersFromTextData(int rank, int num_machines,
CHECK(group_idx_ < 0 || group_idx_ < dataset->num_total_features_); CHECK(group_idx_ < 0 || group_idx_ < dataset->num_total_features_);
// fill feature_names_ if not header // fill feature_names_ if not header
if (feature_names_.size() <= 0) { if (feature_names_.empty()) {
for (int i = 0; i < dataset->num_total_features_; ++i) { for (int i = 0; i < dataset->num_total_features_; ++i) {
std::stringstream str_buf; std::stringstream str_buf;
str_buf << "Column_" << i; str_buf << "Column_" << i;
...@@ -849,7 +849,7 @@ void DatasetLoader::ExtractFeaturesFromFile(const char* filename, const Parser* ...@@ -849,7 +849,7 @@ void DatasetLoader::ExtractFeaturesFromFile(const char* filename, const Parser*
// parser // parser
parser->ParseOneLine(lines[i].c_str(), &oneline_features, &tmp_label); parser->ParseOneLine(lines[i].c_str(), &oneline_features, &tmp_label);
// set initial score // set initial score
if (init_score.size() > 0) { if (!init_score.empty()) {
std::vector<double> oneline_init_score = predict_fun_(oneline_features); std::vector<double> oneline_init_score = predict_fun_(oneline_features);
for (int k = 0; k < dataset->num_class_; ++k) { for (int k = 0; k < dataset->num_class_; ++k) {
init_score[k * dataset->num_data_ + start_idx + i] = static_cast<float>(oneline_init_score[k]); init_score[k * dataset->num_data_ + start_idx + i] = static_cast<float>(oneline_init_score[k]);
...@@ -875,7 +875,7 @@ void DatasetLoader::ExtractFeaturesFromFile(const char* filename, const Parser* ...@@ -875,7 +875,7 @@ void DatasetLoader::ExtractFeaturesFromFile(const char* filename, const Parser*
} }
}; };
TextReader<data_size_t> text_reader(filename, io_config_.has_header); TextReader<data_size_t> text_reader(filename, io_config_.has_header);
if (used_data_indices.size() > 0) { if (!used_data_indices.empty()) {
// only need part of data // only need part of data
text_reader.ReadPartAndProcessParallel(used_data_indices, process_fun); text_reader.ReadPartAndProcessParallel(used_data_indices, process_fun);
} else { } else {
...@@ -884,7 +884,7 @@ void DatasetLoader::ExtractFeaturesFromFile(const char* filename, const Parser* ...@@ -884,7 +884,7 @@ void DatasetLoader::ExtractFeaturesFromFile(const char* filename, const Parser*
} }
// metadata_ will manage space of init_score // metadata_ will manage space of init_score
if (init_score.size() > 0) { if (!init_score.empty()) {
dataset->metadata_.SetInitScore(init_score.data(), dataset->num_data_ * dataset->num_class_); dataset->metadata_.SetInitScore(init_score.data(), dataset->num_data_ * dataset->num_class_);
} }
dataset->FinishLoad(); dataset->FinishLoad();
......
...@@ -126,7 +126,7 @@ public: ...@@ -126,7 +126,7 @@ public:
void LoadFromMemory(const void* memory, const std::vector<data_size_t>& local_used_indices) override { void LoadFromMemory(const void* memory, const std::vector<data_size_t>& local_used_indices) override {
const VAL_T* mem_data = reinterpret_cast<const VAL_T*>(memory); const VAL_T* mem_data = reinterpret_cast<const VAL_T*>(memory);
if (local_used_indices.size() > 0) { if (!local_used_indices.empty()) {
for (int i = 0; i < num_data_; ++i) { for (int i = 0; i < num_data_; ++i) {
data_[i] = mem_data[local_used_indices[i]]; data_[i] = mem_data[local_used_indices[i]];
} }
......
...@@ -31,7 +31,7 @@ void Metadata::Init(data_size_t num_data, int num_class, int weight_idx, int que ...@@ -31,7 +31,7 @@ void Metadata::Init(data_size_t num_data, int num_class, int weight_idx, int que
num_class_ = num_class; num_class_ = num_class;
label_ = std::vector<float>(num_data_); label_ = std::vector<float>(num_data_);
if (weight_idx >= 0) { if (weight_idx >= 0) {
if (weights_.size() > 0) { if (!weights_.empty()) {
Log::Info("Using weights in data file, ignoring the additional weights file"); Log::Info("Using weights in data file, ignoring the additional weights file");
weights_.clear(); weights_.clear();
} }
...@@ -40,11 +40,11 @@ void Metadata::Init(data_size_t num_data, int num_class, int weight_idx, int que ...@@ -40,11 +40,11 @@ void Metadata::Init(data_size_t num_data, int num_class, int weight_idx, int que
std::fill(weights_.begin(), weights_.end(), 0.0f); std::fill(weights_.begin(), weights_.end(), 0.0f);
} }
if (query_idx >= 0) { if (query_idx >= 0) {
if (query_boundaries_.size() > 0) { if (!query_boundaries_.empty()) {
Log::Info("Using query id in data file, ignoring the additional query file"); Log::Info("Using query id in data file, ignoring the additional query file");
query_boundaries_.clear(); query_boundaries_.clear();
} }
if (query_weights_.size() > 0) { query_weights_.clear(); } if (!query_weights_.empty()) { query_weights_.clear(); }
queries_ = std::vector<data_size_t>(num_data_); queries_ = std::vector<data_size_t>(num_data_);
std::fill(queries_.begin(), queries_.end(), 0); std::fill(queries_.begin(), queries_.end(), 0);
} }
...@@ -59,7 +59,7 @@ void Metadata::Init(const Metadata& fullset, const data_size_t* used_indices, da ...@@ -59,7 +59,7 @@ void Metadata::Init(const Metadata& fullset, const data_size_t* used_indices, da
label_[i] = fullset.label_[used_indices[i]]; label_[i] = fullset.label_[used_indices[i]];
} }
if (fullset.weights_.size() > 0) { if (!fullset.weights_.empty()) {
weights_ = std::vector<float>(num_used_indices); weights_ = std::vector<float>(num_used_indices);
num_weights_ = num_used_indices; num_weights_ = num_used_indices;
for (data_size_t i = 0; i < num_used_indices; i++) { for (data_size_t i = 0; i < num_used_indices; i++) {
...@@ -69,7 +69,7 @@ void Metadata::Init(const Metadata& fullset, const data_size_t* used_indices, da ...@@ -69,7 +69,7 @@ void Metadata::Init(const Metadata& fullset, const data_size_t* used_indices, da
num_weights_ = 0; num_weights_ = 0;
} }
if (fullset.init_score_.size() > 0) { if (!fullset.init_score_.empty()) {
init_score_ = std::vector<float>(num_used_indices); init_score_ = std::vector<float>(num_used_indices);
num_init_score_ = num_used_indices; num_init_score_ = num_used_indices;
for (data_size_t i = 0; i < num_used_indices; i++) { for (data_size_t i = 0; i < num_used_indices; i++) {
...@@ -79,7 +79,7 @@ void Metadata::Init(const Metadata& fullset, const data_size_t* used_indices, da ...@@ -79,7 +79,7 @@ void Metadata::Init(const Metadata& fullset, const data_size_t* used_indices, da
num_init_score_ = 0; num_init_score_ = 0;
} }
if (fullset.query_boundaries_.size() > 0) { if (!fullset.query_boundaries_.empty()) {
std::vector<data_size_t> used_query; std::vector<data_size_t> used_query;
data_size_t data_idx = 0; data_size_t data_idx = 0;
for (data_size_t qid = 0; qid < num_queries_ && data_idx < num_used_indices; ++qid) { for (data_size_t qid = 0; qid < num_queries_ && data_idx < num_used_indices; ++qid) {
...@@ -114,7 +114,7 @@ void Metadata::Init(const Metadata& fullset, const data_size_t* used_indices, da ...@@ -114,7 +114,7 @@ void Metadata::Init(const Metadata& fullset, const data_size_t* used_indices, da
} }
void Metadata::PartitionLabel(const std::vector<data_size_t>& used_indices) { void Metadata::PartitionLabel(const std::vector<data_size_t>& used_indices) {
if (used_indices.size() <= 0) { if (used_indices.empty()) {
return; return;
} }
auto old_label = label_; auto old_label = label_;
...@@ -127,8 +127,8 @@ void Metadata::PartitionLabel(const std::vector<data_size_t>& used_indices) { ...@@ -127,8 +127,8 @@ void Metadata::PartitionLabel(const std::vector<data_size_t>& used_indices) {
} }
void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data_size_t>& used_data_indices) { void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data_size_t>& used_data_indices) {
if (used_data_indices.size() == 0) { if (used_data_indices.empty()) {
if (queries_.size() > 0) { if (!queries_.empty()) {
// need convert query_id to boundaries // need convert query_id to boundaries
std::vector<data_size_t> tmp_buffer; std::vector<data_size_t> tmp_buffer;
data_size_t last_qid = -1; data_size_t last_qid = -1;
...@@ -154,21 +154,21 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data ...@@ -154,21 +154,21 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
queries_.clear(); queries_.clear();
} }
// check weights // check weights
if (weights_.size() > 0 && num_weights_ != num_data_) { if (!weights_.empty() && num_weights_ != num_data_) {
weights_.clear(); weights_.clear();
num_weights_ = 0; num_weights_ = 0;
Log::Fatal("Weights size doesn't match data size"); Log::Fatal("Weights size doesn't match data size");
} }
// check query boundries // check query boundries
if (query_boundaries_.size() > 0 && query_boundaries_[num_queries_] != num_data_) { if (!query_boundaries_.empty() && query_boundaries_[num_queries_] != num_data_) {
query_boundaries_.clear(); query_boundaries_.clear();
num_queries_ = 0; num_queries_ = 0;
Log::Fatal("Query size doesn't match data size"); Log::Fatal("Query size doesn't match data size");
} }
// contain initial score file // contain initial score file
if (init_score_.size() > 0 && num_init_score_ != num_data_) { if (!init_score_.empty() && num_init_score_ != num_data_) {
init_score_.clear(); init_score_.clear();
num_init_score_ = 0; num_init_score_ = 0;
Log::Fatal("Initial score size doesn't match data size"); Log::Fatal("Initial score size doesn't match data size");
...@@ -182,21 +182,21 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data ...@@ -182,21 +182,21 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
Log::Fatal("Weights size doesn't match data size"); Log::Fatal("Weights size doesn't match data size");
} }
// check query boundries // check query boundries
if (query_boundaries_.size() > 0 && query_boundaries_[num_queries_] != num_all_data) { if (!query_boundaries_.empty() && query_boundaries_[num_queries_] != num_all_data) {
query_boundaries_.clear(); query_boundaries_.clear();
num_queries_ = 0; num_queries_ = 0;
Log::Fatal("Query size doesn't match data size"); Log::Fatal("Query size doesn't match data size");
} }
// contain initial score file // contain initial score file
if (init_score_.size() > 0 && num_init_score_ != num_all_data) { if (!init_score_.empty() && num_init_score_ != num_all_data) {
init_score_.clear(); init_score_.clear();
num_init_score_ = 0; num_init_score_ = 0;
Log::Fatal("Initial score size doesn't match data size"); Log::Fatal("Initial score size doesn't match data size");
} }
// get local weights // get local weights
if (weights_.size() > 0) { if (!weights_.empty()) {
auto old_weights = weights_; auto old_weights = weights_;
num_weights_ = num_data_; num_weights_ = num_data_;
weights_ = std::vector<float>(num_data_); weights_ = std::vector<float>(num_data_);
...@@ -207,7 +207,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data ...@@ -207,7 +207,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
} }
// get local query boundaries // get local query boundaries
if (query_boundaries_.size() > 0) { if (!query_boundaries_.empty()) {
std::vector<data_size_t> used_query; std::vector<data_size_t> used_query;
data_size_t data_idx = 0; data_size_t data_idx = 0;
for (data_size_t qid = 0; qid < num_queries_ && data_idx < num_used_data; ++qid) { for (data_size_t qid = 0; qid < num_queries_ && data_idx < num_used_data; ++qid) {
...@@ -240,7 +240,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data ...@@ -240,7 +240,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
} }
// get local initial scores // get local initial scores
if (init_score_.size() > 0) { if (!init_score_.empty()) {
auto old_scores = init_score_; auto old_scores = init_score_;
num_init_score_ = num_data_; num_init_score_ = num_data_;
init_score_ = std::vector<float>(num_init_score_ * num_class_); init_score_ = std::vector<float>(num_init_score_ * num_class_);
...@@ -269,7 +269,7 @@ void Metadata::SetInitScore(const float* init_score, data_size_t len) { ...@@ -269,7 +269,7 @@ void Metadata::SetInitScore(const float* init_score, data_size_t len) {
if (len != num_data_ * num_class_) { if (len != num_data_ * num_class_) {
Log::Fatal("Initial score size doesn't match data size"); Log::Fatal("Initial score size doesn't match data size");
} }
if (init_score_.size() > 0) { init_score_.clear(); } if (!init_score_.empty()) { init_score_.clear(); }
num_init_score_ = num_data_; num_init_score_ = num_data_;
init_score_ = std::vector<float>(len); init_score_ = std::vector<float>(len);
for (data_size_t i = 0; i < len; ++i) { for (data_size_t i = 0; i < len; ++i) {
...@@ -285,7 +285,7 @@ void Metadata::SetLabel(const float* label, data_size_t len) { ...@@ -285,7 +285,7 @@ void Metadata::SetLabel(const float* label, data_size_t len) {
if (num_data_ != len) { if (num_data_ != len) {
Log::Fatal("len of label is not same with #data"); Log::Fatal("len of label is not same with #data");
} }
if (label_.size() > 0) { label_.clear(); } if (!label_.empty()) { label_.clear(); }
label_ = std::vector<float>(num_data_); label_ = std::vector<float>(num_data_);
for (data_size_t i = 0; i < num_data_; ++i) { for (data_size_t i = 0; i < num_data_; ++i) {
label_[i] = label[i]; label_[i] = label[i];
...@@ -303,7 +303,7 @@ void Metadata::SetWeights(const float* weights, data_size_t len) { ...@@ -303,7 +303,7 @@ void Metadata::SetWeights(const float* weights, data_size_t len) {
if (num_data_ != len) { if (num_data_ != len) {
Log::Fatal("len of weights is not same with #data"); Log::Fatal("len of weights is not same with #data");
} }
if (weights_.size() > 0) { weights_.clear(); } if (!weights_.empty()) { weights_.clear(); }
num_weights_ = num_data_; num_weights_ = num_data_;
weights_ = std::vector<float>(num_weights_); weights_ = std::vector<float>(num_weights_);
for (data_size_t i = 0; i < num_weights_; ++i) { for (data_size_t i = 0; i < num_weights_; ++i) {
...@@ -327,7 +327,7 @@ void Metadata::SetQueryBoundaries(const data_size_t* query_boundaries, data_size ...@@ -327,7 +327,7 @@ void Metadata::SetQueryBoundaries(const data_size_t* query_boundaries, data_size
if (num_data_ != sum) { if (num_data_ != sum) {
Log::Fatal("sum of query counts is not same with #data"); Log::Fatal("sum of query counts is not same with #data");
} }
if (query_boundaries_.size() > 0) { query_boundaries_.clear(); } if (!query_boundaries_.empty()) { query_boundaries_.clear(); }
num_queries_ = len; num_queries_ = len;
query_boundaries_ = std::vector<data_size_t>(num_queries_); query_boundaries_ = std::vector<data_size_t>(num_queries_);
for (data_size_t i = 0; i < num_queries_; ++i) { for (data_size_t i = 0; i < num_queries_; ++i) {
...@@ -348,7 +348,7 @@ void Metadata::SetQueryId(const data_size_t* query_id, data_size_t len) { ...@@ -348,7 +348,7 @@ void Metadata::SetQueryId(const data_size_t* query_id, data_size_t len) {
if (num_data_ != len) { if (num_data_ != len) {
Log::Fatal("len of query id is not same with #data"); Log::Fatal("len of query id is not same with #data");
} }
if (queries_.size() > 0) { queries_.clear(); } if (!queries_.empty()) { queries_.clear(); }
queries_ = std::vector<data_size_t>(num_data_); queries_ = std::vector<data_size_t>(num_data_);
for (data_size_t i = 0; i < num_weights_; ++i) { for (data_size_t i = 0; i < num_weights_; ++i) {
queries_[i] = query_id[i]; queries_[i] = query_id[i];
...@@ -385,7 +385,7 @@ void Metadata::LoadWeights() { ...@@ -385,7 +385,7 @@ void Metadata::LoadWeights() {
weight_filename.append(".weight"); weight_filename.append(".weight");
TextReader<size_t> reader(weight_filename.c_str(), false); TextReader<size_t> reader(weight_filename.c_str(), false);
reader.ReadAllLines(); reader.ReadAllLines();
if (reader.Lines().size() <= 0) { if (reader.Lines().empty()) {
return; return;
} }
Log::Info("Loading weights..."); Log::Info("Loading weights...");
...@@ -405,7 +405,7 @@ void Metadata::LoadInitialScore() { ...@@ -405,7 +405,7 @@ void Metadata::LoadInitialScore() {
init_score_filename.append(".init"); init_score_filename.append(".init");
TextReader<size_t> reader(init_score_filename.c_str(), false); TextReader<size_t> reader(init_score_filename.c_str(), false);
reader.ReadAllLines(); reader.ReadAllLines();
if (reader.Lines().size() <= 0) { if (reader.Lines().empty()) {
return; return;
} }
Log::Info("Loading initial scores..."); Log::Info("Loading initial scores...");
...@@ -441,7 +441,7 @@ void Metadata::LoadQueryBoundaries() { ...@@ -441,7 +441,7 @@ void Metadata::LoadQueryBoundaries() {
query_filename.append(".query"); query_filename.append(".query");
TextReader<size_t> reader(query_filename.c_str(), false); TextReader<size_t> reader(query_filename.c_str(), false);
reader.ReadAllLines(); reader.ReadAllLines();
if (reader.Lines().size() <= 0) { if (reader.Lines().empty()) {
return; return;
} }
Log::Info("Loading query boundaries..."); Log::Info("Loading query boundaries...");
...@@ -481,19 +481,19 @@ void Metadata::LoadFromMemory(const void* memory) { ...@@ -481,19 +481,19 @@ void Metadata::LoadFromMemory(const void* memory) {
num_queries_ = *(reinterpret_cast<const data_size_t*>(mem_ptr)); num_queries_ = *(reinterpret_cast<const data_size_t*>(mem_ptr));
mem_ptr += sizeof(num_queries_); mem_ptr += sizeof(num_queries_);
if (label_.size() > 0) { label_.clear(); } if (!label_.empty()) { label_.clear(); }
label_ = std::vector<float>(num_data_); label_ = std::vector<float>(num_data_);
std::memcpy(label_.data(), mem_ptr, sizeof(float)*num_data_); std::memcpy(label_.data(), mem_ptr, sizeof(float)*num_data_);
mem_ptr += sizeof(float)*num_data_; mem_ptr += sizeof(float)*num_data_;
if (num_weights_ > 0) { if (num_weights_ > 0) {
if (weights_.size() > 0) { weights_.clear(); } if (!weights_.empty()) { weights_.clear(); }
weights_ = std::vector<float>(num_weights_); weights_ = std::vector<float>(num_weights_);
std::memcpy(weights_.data(), mem_ptr, sizeof(float)*num_weights_); std::memcpy(weights_.data(), mem_ptr, sizeof(float)*num_weights_);
mem_ptr += sizeof(float)*num_weights_; mem_ptr += sizeof(float)*num_weights_;
} }
if (num_queries_ > 0) { if (num_queries_ > 0) {
if (query_boundaries_.size() > 0) { query_boundaries_.clear(); } if (!query_boundaries_.empty()) { query_boundaries_.clear(); }
query_boundaries_ = std::vector<data_size_t>(num_queries_ + 1); query_boundaries_ = std::vector<data_size_t>(num_queries_ + 1);
std::memcpy(query_boundaries_.data(), mem_ptr, sizeof(data_size_t)*(num_queries_ + 1)); std::memcpy(query_boundaries_.data(), mem_ptr, sizeof(data_size_t)*(num_queries_ + 1));
mem_ptr += sizeof(data_size_t)*(num_queries_ + 1); mem_ptr += sizeof(data_size_t)*(num_queries_ + 1);
...@@ -506,10 +506,10 @@ void Metadata::SaveBinaryToFile(FILE* file) const { ...@@ -506,10 +506,10 @@ void Metadata::SaveBinaryToFile(FILE* file) const {
fwrite(&num_weights_, sizeof(num_weights_), 1, file); fwrite(&num_weights_, sizeof(num_weights_), 1, file);
fwrite(&num_queries_, sizeof(num_queries_), 1, file); fwrite(&num_queries_, sizeof(num_queries_), 1, file);
fwrite(label_.data(), sizeof(float), num_data_, file); fwrite(label_.data(), sizeof(float), num_data_, file);
if (weights_.size() > 0) { if (!weights_.empty()) {
fwrite(weights_.data(), sizeof(float), num_weights_, file); fwrite(weights_.data(), sizeof(float), num_weights_, file);
} }
if (query_boundaries_.size() > 0) { if (!query_boundaries_.empty()) {
fwrite(query_boundaries_.data(), sizeof(data_size_t), num_queries_ + 1, file); fwrite(query_boundaries_.data(), sizeof(data_size_t), num_queries_ + 1, file);
} }
...@@ -519,10 +519,10 @@ size_t Metadata::SizesInByte() const { ...@@ -519,10 +519,10 @@ size_t Metadata::SizesInByte() const {
size_t size = sizeof(num_data_) + sizeof(num_weights_) size_t size = sizeof(num_data_) + sizeof(num_weights_)
+ sizeof(num_queries_); + sizeof(num_queries_);
size += sizeof(float) * num_data_; size += sizeof(float) * num_data_;
if (weights_.size() > 0) { if (!weights_.empty()) {
size += sizeof(float) * num_weights_; size += sizeof(float) * num_weights_;
} }
if (query_boundaries_.size() > 0) { if (!query_boundaries_.empty()) {
size += sizeof(data_size_t) * (num_queries_ + 1); size += sizeof(data_size_t) * (num_queries_ + 1);
} }
return size; return size;
......
...@@ -239,7 +239,7 @@ public: ...@@ -239,7 +239,7 @@ public:
deltas_.shrink_to_fit(); deltas_.shrink_to_fit();
vals_.shrink_to_fit(); vals_.shrink_to_fit();
if (local_used_indices.size() <= 0) { if (local_used_indices.empty()) {
// generate fast index // generate fast index
GetFastIndex(); GetFastIndex();
} else { } else {
......
...@@ -73,7 +73,7 @@ Linkers::~Linkers() { ...@@ -73,7 +73,7 @@ Linkers::~Linkers() {
void Linkers::ParseMachineList(const char * filename) { void Linkers::ParseMachineList(const char * filename) {
TextReader<size_t> machine_list_reader(filename, false); TextReader<size_t> machine_list_reader(filename, false);
machine_list_reader.ReadAllLines(); machine_list_reader.ReadAllLines();
if (machine_list_reader.Lines().size() <= 0) { if (machine_list_reader.Lines().empty()) {
Log::Fatal("Machine list file %s doesn't exist", filename); Log::Fatal("Machine list file %s doesn't exist", filename);
} }
...@@ -97,7 +97,7 @@ void Linkers::ParseMachineList(const char * filename) { ...@@ -97,7 +97,7 @@ void Linkers::ParseMachineList(const char * filename) {
client_ips_.push_back(str_after_split[0]); client_ips_.push_back(str_after_split[0]);
client_ports_.push_back(atoi(str_after_split[1].c_str())); client_ports_.push_back(atoi(str_after_split[1].c_str()));
} }
if (client_ips_.size() == 0) { if (client_ips_.empty()) {
Log::Fatal("Machine list file doesn't contain any ip and port. \ Log::Fatal("Machine list file doesn't contain any ip and port. \
Please check it again"); Please check it again");
} }
......
...@@ -125,7 +125,7 @@ void DataParallelTreeLearner::FindBestThresholds() { ...@@ -125,7 +125,7 @@ void DataParallelTreeLearner::FindBestThresholds() {
// construct local histograms // construct local histograms
#pragma omp parallel for schedule(guided) #pragma omp parallel for schedule(guided)
for (int feature_index = 0; feature_index < num_features_; ++feature_index) { for (int feature_index = 0; feature_index < num_features_; ++feature_index) {
if ((is_feature_used_.size() > 0 && is_feature_used_[feature_index] == false)) continue; if ((!is_feature_used_.empty() && is_feature_used_[feature_index] == false)) continue;
// construct histograms for smaller leaf // construct histograms for smaller leaf
if (ordered_bins_[feature_index] == nullptr) { if (ordered_bins_[feature_index] == nullptr) {
smaller_leaf_histogram_array_[feature_index].Construct(smaller_leaf_splits_->data_indices(), smaller_leaf_histogram_array_[feature_index].Construct(smaller_leaf_splits_->data_indices(),
......
...@@ -311,7 +311,7 @@ void SerialTreeLearner::FindBestThresholds() { ...@@ -311,7 +311,7 @@ void SerialTreeLearner::FindBestThresholds() {
#pragma omp parallel for schedule(guided) #pragma omp parallel for schedule(guided)
for (int feature_index = 0; feature_index < num_features_; feature_index++) { for (int feature_index = 0; feature_index < num_features_; feature_index++) {
// feature is not used // feature is not used
if ((is_feature_used_.size() > 0 && is_feature_used_[feature_index] == false)) continue; if ((!is_feature_used_.empty() && is_feature_used_[feature_index] == false)) continue;
// if parent(larger) leaf cannot split at current feature // if parent(larger) leaf cannot split at current feature
if (parent_leaf_histogram_array_ != nullptr && !parent_leaf_histogram_array_[feature_index].is_splittable()) { if (parent_leaf_histogram_array_ != nullptr && !parent_leaf_histogram_array_[feature_index].is_splittable()) {
smaller_leaf_histogram_array_[feature_index].set_is_splittable(false); smaller_leaf_histogram_array_[feature_index].set_is_splittable(false);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment