Unverified Commit 9b61166f authored by Nikita Titov's avatar Nikita Titov Committed by GitHub
Browse files

fixed cpplint errors about spaces and newlines (#2481)

parent 6036e07d
......@@ -72,7 +72,8 @@ namespace LightGBM {
}
std::vector<double> GreedyFindBin(const double* distinct_values, const int* counts,
int num_distinct_values, int max_bin, size_t total_cnt, int min_data_in_bin) {
int num_distinct_values, int max_bin,
size_t total_cnt, int min_data_in_bin) {
std::vector<double> bin_upper_bound;
CHECK(max_bin > 0);
if (num_distinct_values <= max_bin) {
......@@ -150,7 +151,9 @@ namespace LightGBM {
}
std::vector<double> FindBinWithPredefinedBin(const double* distinct_values, const int* counts,
int num_distinct_values, int max_bin, size_t total_sample_cnt, int min_data_in_bin, const std::vector<double>& forced_upper_bounds) {
int num_distinct_values, int max_bin,
size_t total_sample_cnt, int min_data_in_bin,
const std::vector<double>& forced_upper_bounds) {
std::vector<double> bin_upper_bound;
// get list of distinct values
......@@ -305,7 +308,8 @@ namespace LightGBM {
}
std::vector<double> FindBinWithZeroAsOneBin(const double* distinct_values, const int* counts, int num_distinct_values,
int max_bin, size_t total_sample_cnt, int min_data_in_bin, const std::vector<double>& forced_upper_bounds) {
int max_bin, size_t total_sample_cnt, int min_data_in_bin,
const std::vector<double>& forced_upper_bounds) {
if (forced_upper_bounds.empty()) {
return FindBinWithZeroAsOneBin(distinct_values, counts, num_distinct_values, max_bin, total_sample_cnt, min_data_in_bin);
} else {
......@@ -315,7 +319,8 @@ namespace LightGBM {
}
void BinMapper::FindBin(double* values, int num_sample_values, size_t total_sample_cnt,
int max_bin, int min_data_in_bin, int min_split_data, BinType bin_type, bool use_missing, bool zero_as_missing,
int max_bin, int min_data_in_bin, int min_split_data, BinType bin_type,
bool use_missing, bool zero_as_missing,
const std::vector<double>& forced_upper_bounds) {
int na_cnt = 0;
int tmp_num_sample_values = 0;
......
......@@ -4,7 +4,6 @@
*/
#include <LightGBM/dataset_loader.h>
#include <LightGBM/json11.hpp>
#include <LightGBM/network.h>
#include <LightGBM/utils/array_args.h>
#include <LightGBM/utils/log.h>
......@@ -12,6 +11,8 @@
#include <fstream>
#include <LightGBM/json11.hpp>
using namespace json11;
namespace LightGBM {
......@@ -270,7 +271,9 @@ Dataset* DatasetLoader::LoadFromFileAlignWithOtherDataset(const char* filename,
return dataset.release();
}
Dataset* DatasetLoader::LoadFromBinFile(const char* data_filename, const char* bin_filename, int rank, int num_machines, int* num_global_data, std::vector<data_size_t>* used_data_indices) {
Dataset* DatasetLoader::LoadFromBinFile(const char* data_filename, const char* bin_filename,
int rank, int num_machines, int* num_global_data,
std::vector<data_size_t>* used_data_indices) {
auto dataset = std::unique_ptr<Dataset>(new Dataset());
auto reader = VirtualFileReader::Make(bin_filename);
dataset->data_filename_ = data_filename;
......@@ -470,13 +473,11 @@ Dataset* DatasetLoader::LoadFromBinFile(const char* data_filename, const char* b
mem_ptr += sizeof(int);
dataset->forced_bin_bounds_[i] = std::vector<double>();
const double* tmp_ptr_forced_bounds = reinterpret_cast<const double*>(mem_ptr);
for (int j = 0; j < num_bounds; ++j) {
double bound = tmp_ptr_forced_bounds[j];
dataset->forced_bin_bounds_[i].push_back(bound);
}
mem_ptr += num_bounds * sizeof(double);
}
// read size of meta data
......@@ -821,7 +822,9 @@ std::vector<std::string> DatasetLoader::SampleTextDataFromMemory(const std::vect
return out;
}
std::vector<std::string> DatasetLoader::SampleTextDataFromFile(const char* filename, const Metadata& metadata, int rank, int num_machines, int* num_global_data, std::vector<data_size_t>* used_data_indices) {
std::vector<std::string> DatasetLoader::SampleTextDataFromFile(const char* filename, const Metadata& metadata,
int rank, int num_machines, int* num_global_data,
std::vector<data_size_t>* used_data_indices) {
const data_size_t sample_cnt = static_cast<data_size_t>(config_.bin_construct_sample_cnt);
TextReader<data_size_t> text_reader(filename, config_.header);
std::vector<std::string> out_data;
......@@ -867,7 +870,9 @@ std::vector<std::string> DatasetLoader::SampleTextDataFromFile(const char* filen
return out_data;
}
void DatasetLoader::ConstructBinMappersFromTextData(int rank, int num_machines, const std::vector<std::string>& sample_data, const Parser* parser, Dataset* dataset) {
void DatasetLoader::ConstructBinMappersFromTextData(int rank, int num_machines,
const std::vector<std::string>& sample_data,
const Parser* parser, Dataset* dataset) {
std::vector<std::vector<double>> sample_values;
std::vector<std::vector<int>> sample_indices;
std::vector<std::pair<int, double>> oneline_features;
......@@ -906,7 +911,8 @@ void DatasetLoader::ConstructBinMappersFromTextData(int rank, int num_machines,
// get forced split
std::string forced_bins_path = config_.forcedbins_filename;
std::vector<std::vector<double>> forced_bin_bounds = DatasetLoader::GetForcedBins(forced_bins_path, dataset->num_total_features_,
std::vector<std::vector<double>> forced_bin_bounds = DatasetLoader::GetForcedBins(forced_bins_path,
dataset->num_total_features_,
categorical_features_);
// check the range of label_idx, weight_idx and group_idx
......@@ -1149,7 +1155,8 @@ void DatasetLoader::ExtractFeaturesFromMemory(std::vector<std::string>* text_dat
}
/*! \brief Extract local features from file */
void DatasetLoader::ExtractFeaturesFromFile(const char* filename, const Parser* parser, const std::vector<data_size_t>& used_data_indices, Dataset* dataset) {
void DatasetLoader::ExtractFeaturesFromFile(const char* filename, const Parser* parser,
const std::vector<data_size_t>& used_data_indices, Dataset* dataset) {
std::vector<double> init_score;
if (predict_fun_ != nullptr) {
init_score = std::vector<double>(dataset->num_data_ * num_class_);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment