Commit 7f0d5358 authored by Guolin Ke's avatar Guolin Ke
Browse files

Change fatel output to stderr. move newline to logger.

parent 12d884ff
...@@ -18,7 +18,7 @@ namespace LightGBM { ...@@ -18,7 +18,7 @@ namespace LightGBM {
#ifndef CHECK_NOTNULL #ifndef CHECK_NOTNULL
#define CHECK_NOTNULL(pointer) \ #define CHECK_NOTNULL(pointer) \
if ((pointer) == nullptr) LightGBM::Log::Fatal(#pointer " Can't be NULL\n"); if ((pointer) == nullptr) LightGBM::Log::Fatal(#pointer " Can't be NULL");
#endif #endif
// A enumeration type of log message levels. The values are ordered: // A enumeration type of log message levels. The values are ordered:
...@@ -67,8 +67,12 @@ public: ...@@ -67,8 +67,12 @@ public:
static void Fatal(const char *format, ...) { static void Fatal(const char *format, ...) {
va_list val; va_list val;
va_start(val, format); va_start(val, format);
Write(LogLevel::Fatal, "Fatal", format, val); fprintf(stderr, "[LightGBM] [Fatel] ");
vfprintf(stderr, format, val);
fprintf(stderr, "\n");
fflush(stderr);
va_end(val); va_end(val);
exit(1);
} }
private: private:
...@@ -78,11 +82,8 @@ private: ...@@ -78,11 +82,8 @@ private:
// write to STDOUT // write to STDOUT
printf("[LightGBM] [%s] ", level_str); printf("[LightGBM] [%s] ", level_str);
vprintf(format, val); vprintf(format, val);
printf("\n");
fflush(stdout); fflush(stdout);
if (level == LogLevel::Fatal) {
exit(1);
}
} }
} }
......
...@@ -69,7 +69,7 @@ void Application::LoadParameters(int argc, char** argv) { ...@@ -69,7 +69,7 @@ void Application::LoadParameters(int argc, char** argv) {
params[key] = value; params[key] = value;
} }
else { else {
Log::Error("Unknown parameter in command line: %s\n", argv[i]); Log::Error("Unknown parameter in command line: %s", argv[i]);
} }
} }
// check for alias // check for alias
...@@ -105,7 +105,7 @@ void Application::LoadParameters(int argc, char** argv) { ...@@ -105,7 +105,7 @@ void Application::LoadParameters(int argc, char** argv) {
} }
} }
} else { } else {
Log::Error("Config file: %s doesn't exist, will ignore\n", Log::Error("Config file: %s doesn't exist, will ignore",
params["config_file"].c_str()); params["config_file"].c_str());
} }
} }
...@@ -113,7 +113,7 @@ void Application::LoadParameters(int argc, char** argv) { ...@@ -113,7 +113,7 @@ void Application::LoadParameters(int argc, char** argv) {
ParameterAlias::KeyAliasTransform(&params); ParameterAlias::KeyAliasTransform(&params);
// load configs // load configs
config_.Set(params); config_.Set(params);
Log::Info("Loading parameters .. finished\n"); Log::Info("Loading parameters .. finished");
} }
void Application::LoadData() { void Application::LoadData() {
...@@ -201,7 +201,7 @@ void Application::LoadData() { ...@@ -201,7 +201,7 @@ void Application::LoadData() {
} }
auto end_time = std::chrono::high_resolution_clock::now(); auto end_time = std::chrono::high_resolution_clock::now();
// output used time on each iteration // output used time on each iteration
Log::Info("Finish loading data, use %f seconds \n", Log::Info("Finish loading data, use %f seconds",
std::chrono::duration<double, std::milli>(end_time - start_time) * 1e-3); std::chrono::duration<double, std::milli>(end_time - start_time) * 1e-3);
} }
...@@ -209,7 +209,7 @@ void Application::InitTrain() { ...@@ -209,7 +209,7 @@ void Application::InitTrain() {
if (config_.is_parallel) { if (config_.is_parallel) {
// need init network // need init network
Network::Init(config_.network_config); Network::Init(config_.network_config);
Log::Info("Finish network initialization\n"); Log::Info("Finish network initialization");
// sync global random seed for feature patition // sync global random seed for feature patition
if (config_.boosting_type == BoostingType::kGBDT) { if (config_.boosting_type == BoostingType::kGBDT) {
GBDTConfig* gbdt_config = GBDTConfig* gbdt_config =
...@@ -240,13 +240,13 @@ void Application::InitTrain() { ...@@ -240,13 +240,13 @@ void Application::InitTrain() {
boosting_->AddDataset(valid_datas_[i], boosting_->AddDataset(valid_datas_[i],
ConstPtrInVectorWarpper<Metric>(valid_metrics_[i])); ConstPtrInVectorWarpper<Metric>(valid_metrics_[i]));
} }
Log::Info("Finish training initilization.\n"); Log::Info("Finish training initilization.");
} }
void Application::Train() { void Application::Train() {
Log::Info("Start train\n"); Log::Info("Start train");
boosting_->Train(); boosting_->Train();
Log::Info("Finish train\n"); Log::Info("Finish train");
} }
...@@ -254,14 +254,14 @@ void Application::Predict() { ...@@ -254,14 +254,14 @@ void Application::Predict() {
// create predictor // create predictor
Predictor predictor(boosting_, config_.io_config.is_sigmoid); Predictor predictor(boosting_, config_.io_config.is_sigmoid);
predictor.Predict(config_.io_config.data_filename.c_str(), config_.io_config.output_result.c_str()); predictor.Predict(config_.io_config.data_filename.c_str(), config_.io_config.output_result.c_str());
Log::Info("Finish predict.\n"); Log::Info("Finish predict.");
} }
void Application::InitPredict() { void Application::InitPredict() {
boosting_ = boosting_ =
Boosting::CreateBoosting(config_.boosting_type, config_.boosting_config); Boosting::CreateBoosting(config_.boosting_type, config_.boosting_config);
LoadModel(); LoadModel();
Log::Info("Finish predict initilization.\n"); Log::Info("Finish predict initilization.");
} }
void Application::LoadModel() { void Application::LoadModel() {
......
...@@ -124,14 +124,14 @@ public: ...@@ -124,14 +124,14 @@ public:
(const char* buffer, std::vector<std::pair<int, double>>* feature) { (const char* buffer, std::vector<std::pair<int, double>>* feature) {
parser->ParseOneLine(buffer, feature, &tmp_label); parser->ParseOneLine(buffer, feature, &tmp_label);
}; };
Log::Info("Start prediction for data %s with labels\n", data_filename); Log::Info("Start prediction for data %s with labels", data_filename);
} else { } else {
// parse function without label // parse function without label
parser_fun = [this, &parser] parser_fun = [this, &parser]
(const char* buffer, std::vector<std::pair<int, double>>* feature) { (const char* buffer, std::vector<std::pair<int, double>>* feature) {
parser->ParseOneLine(buffer, feature); parser->ParseOneLine(buffer, feature);
}; };
Log::Info("Start prediction for data %s without label\n", data_filename); Log::Info("Start prediction for data %s without label", data_filename);
} }
std::function<double(const std::vector<std::pair<int, double>>&)> predict_fun; std::function<double(const std::vector<std::pair<int, double>>&)> predict_fun;
if (is_simgoid_) { if (is_simgoid_) {
......
...@@ -149,7 +149,7 @@ void GBDT::Bagging(int iter) { ...@@ -149,7 +149,7 @@ void GBDT::Bagging(int iter) {
bag_data_cnt_ = cur_left_cnt; bag_data_cnt_ = cur_left_cnt;
out_of_bag_data_cnt_ = num_data_ - bag_data_cnt_; out_of_bag_data_cnt_ = num_data_ - bag_data_cnt_;
} }
Log::Info("re-bagging, using %d data to train\n", bag_data_cnt_); Log::Info("re-bagging, using %d data to train", bag_data_cnt_);
// set bagging data to tree learner // set bagging data to tree learner
tree_learner_->SetBaggingData(bag_data_indices_, bag_data_cnt_); tree_learner_->SetBaggingData(bag_data_indices_, bag_data_cnt_);
} }
...@@ -175,7 +175,7 @@ void GBDT::Train() { ...@@ -175,7 +175,7 @@ void GBDT::Train() {
Tree * new_tree = TrainOneTree(); Tree * new_tree = TrainOneTree();
// if cannot learn a new tree, then stop // if cannot learn a new tree, then stop
if (new_tree->num_leaves() <= 1) { if (new_tree->num_leaves() <= 1) {
Log::Info("Can't training anymore, there isn't any leaf meets split requirements.\n"); Log::Info("Can't training anymore, there isn't any leaf meets split requirements.");
break; break;
} }
// shrinkage by learning rate // shrinkage by learning rate
...@@ -204,11 +204,11 @@ void GBDT::Train() { ...@@ -204,11 +204,11 @@ void GBDT::Train() {
} }
auto end_time = std::chrono::high_resolution_clock::now(); auto end_time = std::chrono::high_resolution_clock::now();
// output used time per iteration // output used time per iteration
Log::Info("%f seconds elapsed, finished %d iteration\n", std::chrono::duration<double, Log::Info("%f seconds elapsed, finished %d iteration", std::chrono::duration<double,
std::milli>(end_time - start_time) * 1e-3, iter + 1); std::milli>(end_time - start_time) * 1e-3, iter + 1);
if (is_early_stopping) { if (is_early_stopping) {
// close file with an early-stopping message // close file with an early-stopping message
Log::Info("Early stopping at iteration %d, the best iteration round is %d\n", iter + 1, iter + 1 - early_stopping_round_); Log::Info("Early stopping at iteration %d, the best iteration round is %d", iter + 1, iter + 1 - early_stopping_round_);
fclose(output_model_file); fclose(output_model_file);
return; return;
} }
...@@ -307,7 +307,7 @@ void GBDT::ModelsFromString(const std::string& model_str, int num_used_model) { ...@@ -307,7 +307,7 @@ void GBDT::ModelsFromString(const std::string& model_str, int num_used_model) {
} }
} }
if (i == lines.size()) { if (i == lines.size()) {
Log::Fatal("Model file doesn't contain max_feature_idx\n"); Log::Fatal("Model file doesn't contain max_feature_idx");
return; return;
} }
// get sigmoid parameter // get sigmoid parameter
...@@ -346,7 +346,7 @@ void GBDT::ModelsFromString(const std::string& model_str, int num_used_model) { ...@@ -346,7 +346,7 @@ void GBDT::ModelsFromString(const std::string& model_str, int num_used_model) {
} }
} }
Log::Info("%d models has been loaded\n\n", models_.size()); Log::Info("%d models has been loaded\n", models_.size());
} }
double GBDT::PredictRaw(const double* value) const { double GBDT::PredictRaw(const double* value) const {
......
...@@ -21,7 +21,7 @@ Dataset::Dataset(const char* data_filename, const char* init_score_filename, ...@@ -21,7 +21,7 @@ Dataset::Dataset(const char* data_filename, const char* init_score_filename,
CheckCanLoadFromBin(); CheckCanLoadFromBin();
if (is_loading_from_binfile_ && predict_fun != nullptr) { if (is_loading_from_binfile_ && predict_fun != nullptr) {
Log::Info("Cannot performing initialization of prediction by using binary file, using text file instead\n"); Log::Info("Cannot performing initialization of prediction by using binary file, using text file instead");
is_loading_from_binfile_ = false; is_loading_from_binfile_ = false;
} }
...@@ -38,7 +38,7 @@ Dataset::Dataset(const char* data_filename, const char* init_score_filename, ...@@ -38,7 +38,7 @@ Dataset::Dataset(const char* data_filename, const char* init_score_filename,
} else { } else {
// only need to load initilize score, other meta data will load from bin flie // only need to load initilize score, other meta data will load from bin flie
metadata_.Init(init_score_filename); metadata_.Init(init_score_filename);
Log::Info("Loading data set from binary file\n"); Log::Info("Loading data set from binary file");
parser_ = nullptr; parser_ = nullptr;
text_reader_ = nullptr; text_reader_ = nullptr;
} }
...@@ -209,7 +209,7 @@ void Dataset::ConstructBinMappers(int rank, int num_machines, const std::vector< ...@@ -209,7 +209,7 @@ void Dataset::ConstructBinMappers(int rank, int num_machines, const std::vector<
num_data_, is_enable_sparse_)); num_data_, is_enable_sparse_));
} else { } else {
// if feature is trival(only 1 bin), free spaces // if feature is trival(only 1 bin), free spaces
Log::Error("Feature %d only contains one value, will be ignored\n", i); Log::Error("Feature %d only contains one value, will be ignored", i);
delete bin_mappers[i]; delete bin_mappers[i];
} }
} }
...@@ -489,7 +489,7 @@ void Dataset::SaveBinaryFile() { ...@@ -489,7 +489,7 @@ void Dataset::SaveBinaryFile() {
Log::Fatal("Cannot write binary data to %s ", bin_filename.c_str()); Log::Fatal("Cannot write binary data to %s ", bin_filename.c_str());
} }
Log::Info("Saving data to binary file: %s\n", data_filename_); Log::Info("Saving data to binary file: %s", data_filename_);
// get size of header // get size of header
size_t size_of_header = sizeof(global_num_data_) + sizeof(is_enable_sparse_) size_t size_of_header = sizeof(global_num_data_) + sizeof(is_enable_sparse_)
......
...@@ -61,7 +61,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data ...@@ -61,7 +61,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
if (used_data_indices.size() == 0) { if (used_data_indices.size() == 0) {
// check weights // check weights
if (weights_ != nullptr && num_weights_ != num_data_) { if (weights_ != nullptr && num_weights_ != num_data_) {
Log::Error("Initial weight size doesn't equal to data, weights will be ignored\n"); Log::Error("Initial weight size doesn't equal to data, weights will be ignored");
delete[] weights_; delete[] weights_;
num_weights_ = 0; num_weights_ = 0;
weights_ = nullptr; weights_ = nullptr;
...@@ -69,7 +69,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data ...@@ -69,7 +69,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
// check query boundries // check query boundries
if (query_boundaries_ != nullptr && query_boundaries_[num_queries_] != num_data_) { if (query_boundaries_ != nullptr && query_boundaries_[num_queries_] != num_data_) {
Log::Error("Initial query size doesn't equal to data, queies will be ignored\n"); Log::Error("Initial query size doesn't equal to data, queies will be ignored");
delete[] query_boundaries_; delete[] query_boundaries_;
num_queries_ = 0; num_queries_ = 0;
query_boundaries_ = nullptr; query_boundaries_ = nullptr;
...@@ -78,21 +78,21 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data ...@@ -78,21 +78,21 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
// contain initial score file // contain initial score file
if (init_score_ != nullptr && num_init_score_ != num_data_) { if (init_score_ != nullptr && num_init_score_ != num_data_) {
delete[] init_score_; delete[] init_score_;
Log::Error("Initial score size doesn't equal to data, score file will be ignored\n"); Log::Error("Initial score size doesn't equal to data, score file will be ignored");
num_init_score_ = 0; num_init_score_ = 0;
} }
} else { } else {
data_size_t num_used_data = static_cast<data_size_t>(used_data_indices.size()); data_size_t num_used_data = static_cast<data_size_t>(used_data_indices.size());
// check weights // check weights
if (weights_ != nullptr && num_weights_ != num_all_data) { if (weights_ != nullptr && num_weights_ != num_all_data) {
Log::Error("Initial weights size doesn't equal to data, weights will be ignored\n"); Log::Error("Initial weights size doesn't equal to data, weights will be ignored");
delete[] weights_; delete[] weights_;
num_weights_ = 0; num_weights_ = 0;
weights_ = nullptr; weights_ = nullptr;
} }
// check query boundries // check query boundries
if (query_boundaries_ != nullptr && query_boundaries_[num_queries_] != num_all_data) { if (query_boundaries_ != nullptr && query_boundaries_[num_queries_] != num_all_data) {
Log::Error("Initial query size doesn't equal to data , queries will be ignored\n"); Log::Error("Initial query size doesn't equal to data , queries will be ignored");
delete[] query_boundaries_; delete[] query_boundaries_;
num_queries_ = 0; num_queries_ = 0;
query_boundaries_ = nullptr; query_boundaries_ = nullptr;
...@@ -100,7 +100,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data ...@@ -100,7 +100,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
// contain initial score file // contain initial score file
if (init_score_ != nullptr && num_init_score_ != num_all_data) { if (init_score_ != nullptr && num_init_score_ != num_all_data) {
Log::Error("Initial score size doesn't equal to data , initial scores will be ignored\n"); Log::Error("Initial score size doesn't equal to data , initial scores will be ignored");
delete[] init_score_; delete[] init_score_;
num_init_score_ = 0; num_init_score_ = 0;
} }
...@@ -131,10 +131,10 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data ...@@ -131,10 +131,10 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
used_query.push_back(qid); used_query.push_back(qid);
data_idx += len; data_idx += len;
} else { } else {
Log::Fatal("Data partition error, data didn't match queies\n"); Log::Fatal("Data partition error, data didn't match queies");
} }
} else { } else {
Log::Fatal("Data partition error, data didn't match queies\n"); Log::Fatal("Data partition error, data didn't match queies");
} }
} }
data_size_t * old_query_boundaries = query_boundaries_; data_size_t * old_query_boundaries = query_boundaries_;
...@@ -182,7 +182,7 @@ void Metadata::LoadWeights() { ...@@ -182,7 +182,7 @@ void Metadata::LoadWeights() {
if (reader.Lines().size() <= 0) { if (reader.Lines().size() <= 0) {
return; return;
} }
Log::Info("Start loading weights\n"); Log::Info("Start loading weights");
num_weights_ = static_cast<data_size_t>(reader.Lines().size()); num_weights_ = static_cast<data_size_t>(reader.Lines().size());
weights_ = new float[num_weights_]; weights_ = new float[num_weights_];
for (data_size_t i = 0; i < num_weights_; ++i) { for (data_size_t i = 0; i < num_weights_; ++i) {
...@@ -198,7 +198,7 @@ void Metadata::LoadInitialScore() { ...@@ -198,7 +198,7 @@ void Metadata::LoadInitialScore() {
TextReader<size_t> reader(init_score_filename_); TextReader<size_t> reader(init_score_filename_);
reader.ReadAllLines(); reader.ReadAllLines();
Log::Info("Start loading initial scores\n"); Log::Info("Start loading initial scores");
num_init_score_ = static_cast<data_size_t>(reader.Lines().size()); num_init_score_ = static_cast<data_size_t>(reader.Lines().size());
init_score_ = new score_t[num_init_score_]; init_score_ = new score_t[num_init_score_];
double tmp = 0.0f; double tmp = 0.0f;
...@@ -218,7 +218,7 @@ void Metadata::LoadQueryBoundaries() { ...@@ -218,7 +218,7 @@ void Metadata::LoadQueryBoundaries() {
if (reader.Lines().size() <= 0) { if (reader.Lines().size() <= 0) {
return; return;
} }
Log::Info("Start loading query boundries\n"); Log::Info("Start loading query boundries");
query_boundaries_ = new data_size_t[reader.Lines().size() + 1]; query_boundaries_ = new data_size_t[reader.Lines().size() + 1];
num_queries_ = static_cast<data_size_t>(reader.Lines().size()); num_queries_ = static_cast<data_size_t>(reader.Lines().size());
query_boundaries_[0] = 0; query_boundaries_[0] = 0;
...@@ -233,7 +233,7 @@ void Metadata::LoadQueryWeights() { ...@@ -233,7 +233,7 @@ void Metadata::LoadQueryWeights() {
if (weights_ == nullptr || query_boundaries_ == nullptr) { if (weights_ == nullptr || query_boundaries_ == nullptr) {
return; return;
} }
Log::Info("Start loading query weights\n"); Log::Info("Start loading query weights");
query_weights_ = new float[num_queries_]; query_weights_ = new float[num_queries_];
for (data_size_t i = 0; i < num_queries_; ++i) { for (data_size_t i = 0; i < num_queries_; ++i) {
query_weights_[i] = 0.0f; query_weights_[i] = 0.0f;
......
...@@ -28,7 +28,7 @@ public: ...@@ -28,7 +28,7 @@ public:
: num_data_(num_data) { : num_data_(num_data) {
default_bin_ = static_cast<VAL_T>(default_bin); default_bin_ = static_cast<VAL_T>(default_bin);
if (default_bin_ != 0) { if (default_bin_ != 0) {
Log::Info("Warning: Having sparse feature with negative values. Will let negative values equal zero as well\n"); Log::Info("Warning: Having sparse feature with negative values. Will let negative values equal zero as well");
} }
#pragma omp parallel #pragma omp parallel
#pragma omp master #pragma omp master
......
...@@ -72,7 +72,7 @@ public: ...@@ -72,7 +72,7 @@ public:
} }
score_t loss = sum_loss / sum_weights_; score_t loss = sum_loss / sum_weights_;
if (output_freq_ > 0 && iter % output_freq_ == 0){ if (output_freq_ > 0 && iter % output_freq_ == 0){
Log::Info("Iteration:%d, %s's %s: %f\n", iter, name, PointWiseLossCalculator::Name(), loss); Log::Info("Iteration:%d, %s's %s: %f", iter, name, PointWiseLossCalculator::Name(), loss);
} }
return loss; return loss;
} }
...@@ -230,7 +230,7 @@ public: ...@@ -230,7 +230,7 @@ public:
auc = accum / (sum_pos *(sum_weights_ - sum_pos)); auc = accum / (sum_pos *(sum_weights_ - sum_pos));
} }
if (output_freq_ > 0 && iter % output_freq_ == 0){ if (output_freq_ > 0 && iter % output_freq_ == 0){
Log::Info("Iteration:%d, %s's %s: %f\n", iter, name, "auc", auc); Log::Info("Iteration:%d, %s's %s: %f", iter, name, "auc", auc);
} }
return auc; return auc;
} }
......
...@@ -57,7 +57,7 @@ void DCGCalculator::CalMaxDCG(const std::vector<data_size_t>& ks, ...@@ -57,7 +57,7 @@ void DCGCalculator::CalMaxDCG(const std::vector<data_size_t>& ks,
std::vector<data_size_t> label_cnt(label_gain_.size(), 0); std::vector<data_size_t> label_cnt(label_gain_.size(), 0);
// counts for all labels // counts for all labels
for (data_size_t i = 0; i < num_data; ++i) { for (data_size_t i = 0; i < num_data; ++i) {
if (static_cast<size_t>(label[i]) >= label_cnt.size()) { Log::Fatal("label excel %d\n", label[i]); } if (static_cast<size_t>(label[i]) >= label_cnt.size()) { Log::Fatal("label excel %d", label[i]); }
++label_cnt[static_cast<int>(label[i])]; ++label_cnt[static_cast<int>(label[i])];
} }
double cur_result = 0.0; double cur_result = 0.0;
......
...@@ -135,7 +135,7 @@ public: ...@@ -135,7 +135,7 @@ public:
result_ss << "NDCG@" << eval_at_[j] << ":" << result[j] << "\t"; result_ss << "NDCG@" << eval_at_[j] << ":" << result[j] << "\t";
} }
if (output_freq_ > 0 && iter % output_freq_ == 0){ if (output_freq_ > 0 && iter % output_freq_ == 0){
Log::Info("Iteration:%d, Test:%s, %s \n", iter, name, result_ss.str().c_str()); Log::Info("Iteration:%d, Test:%s, %s ", iter, name, result_ss.str().c_str());
} }
return result[0]; return result[0];
} }
......
...@@ -73,7 +73,7 @@ Linkers::~Linkers() { ...@@ -73,7 +73,7 @@ Linkers::~Linkers() {
} }
} }
TcpSocket::Finalize(); TcpSocket::Finalize();
Log::Info("Network using %f seconds\n", network_time_ * 1e-3); Log::Info("Network using %f seconds", network_time_ * 1e-3);
} }
void Linkers::ParseMachineList(const char * filename) { void Linkers::ParseMachineList(const char * filename) {
...@@ -95,7 +95,7 @@ void Linkers::ParseMachineList(const char * filename) { ...@@ -95,7 +95,7 @@ void Linkers::ParseMachineList(const char * filename) {
continue; continue;
} }
if (client_ips_.size() >= static_cast<size_t>(num_machines_)) { if (client_ips_.size() >= static_cast<size_t>(num_machines_)) {
Log::Error("The #machine in machine_list is larger than parameter num_machines, the redundant will ignored\n"); Log::Error("The #machine in machine_list is larger than parameter num_machines, the redundant will ignored");
break; break;
} }
str_after_split[0] = Common::Trim(str_after_split[0]); str_after_split[0] = Common::Trim(str_after_split[0]);
...@@ -104,17 +104,17 @@ void Linkers::ParseMachineList(const char * filename) { ...@@ -104,17 +104,17 @@ void Linkers::ParseMachineList(const char * filename) {
client_ports_.push_back(atoi(str_after_split[1].c_str())); client_ports_.push_back(atoi(str_after_split[1].c_str()));
} }
if (client_ips_.size() != static_cast<size_t>(num_machines_)) { if (client_ips_.size() != static_cast<size_t>(num_machines_)) {
Log::Error("The world size is bigger the #machine in machine list, change world size to %d .\n", client_ips_.size()); Log::Error("The world size is bigger the #machine in machine list, change world size to %d .", client_ips_.size());
num_machines_ = static_cast<int>(client_ips_.size()); num_machines_ = static_cast<int>(client_ips_.size());
} }
} }
void Linkers::TryBind(int port) { void Linkers::TryBind(int port) {
Log::Info("try to bind port %d.\n", port); Log::Info("try to bind port %d.", port);
if (listener_->Bind(port)) { if (listener_->Bind(port)) {
Log::Info("Binding port %d success.\n", port); Log::Info("Binding port %d success.", port);
} else { } else {
Log::Fatal("Binding port %d failed.\n", port); Log::Fatal("Binding port %d failed.", port);
} }
} }
...@@ -125,7 +125,7 @@ void Linkers::SetLinker(int rank, const TcpSocket& socket) { ...@@ -125,7 +125,7 @@ void Linkers::SetLinker(int rank, const TcpSocket& socket) {
} }
void Linkers::ListenThread(int incoming_cnt) { void Linkers::ListenThread(int incoming_cnt) {
Log::Info("Listening...\n"); Log::Info("Listening...");
char buffer[100]; char buffer[100];
int connected_cnt = 0; int connected_cnt = 0;
while (connected_cnt < incoming_cnt) { while (connected_cnt < incoming_cnt) {
...@@ -192,7 +192,7 @@ void Linkers::Construct() { ...@@ -192,7 +192,7 @@ void Linkers::Construct() {
if (cur_socket.Connect(client_ips_[out_rank].c_str(), client_ports_[out_rank])) { if (cur_socket.Connect(client_ips_[out_rank].c_str(), client_ports_[out_rank])) {
break; break;
} else { } else {
Log::Error("Connect to rank %d failed, wait for %d milliseconds\n", out_rank, connect_fail_delay_time); Log::Error("Connect to rank %d failed, wait for %d milliseconds", out_rank, connect_fail_delay_time);
std::this_thread::sleep_for(std::chrono::milliseconds(connect_fail_delay_time)); std::this_thread::sleep_for(std::chrono::milliseconds(connect_fail_delay_time));
} }
} }
...@@ -217,7 +217,7 @@ bool Linkers::CheckLinker(int rank) { ...@@ -217,7 +217,7 @@ bool Linkers::CheckLinker(int rank) {
void Linkers::PrintLinkers() { void Linkers::PrintLinkers() {
for (int i = 0; i < num_machines_; ++i) { for (int i = 0; i < num_machines_; ++i) {
if (CheckLinker(i)) { if (CheckLinker(i)) {
Log::Info("Connected to rank %d.\n", i); Log::Info("Connected to rank %d.", i);
} }
} }
} }
......
...@@ -60,7 +60,7 @@ public: ...@@ -60,7 +60,7 @@ public:
TcpSocket() { TcpSocket() {
sockfd_ = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP); sockfd_ = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (sockfd_ == INVALID_SOCKET) { if (sockfd_ == INVALID_SOCKET) {
Log::Fatal("Socket construct error\n"); Log::Fatal("Socket construct error");
return; return;
} }
ConfigSocket(); ConfigSocket();
...@@ -69,7 +69,7 @@ public: ...@@ -69,7 +69,7 @@ public:
explicit TcpSocket(SOCKET socket) { explicit TcpSocket(SOCKET socket) {
sockfd_ = socket; sockfd_ = socket;
if (sockfd_ == INVALID_SOCKET) { if (sockfd_ == INVALID_SOCKET) {
Log::Fatal("Passed socket error\n"); Log::Fatal("Passed socket error");
return; return;
} }
ConfigSocket(); ConfigSocket();
...@@ -97,11 +97,11 @@ public: ...@@ -97,11 +97,11 @@ public:
#if defined(_WIN32) #if defined(_WIN32)
WSADATA wsa_data; WSADATA wsa_data;
if (WSAStartup(MAKEWORD(2, 2), &wsa_data) == -1) { if (WSAStartup(MAKEWORD(2, 2), &wsa_data) == -1) {
Log::Fatal("Socket error: WSAStart up error\n"); Log::Fatal("Socket error: WSAStart up error");
} }
if (LOBYTE(wsa_data.wVersion) != 2 || HIBYTE(wsa_data.wVersion) != 2) { if (LOBYTE(wsa_data.wVersion) != 2 || HIBYTE(wsa_data.wVersion) != 2) {
WSACleanup(); WSACleanup();
Log::Fatal("Socket error: Winsock.dll version error\n"); Log::Fatal("Socket error: Winsock.dll version error");
} }
#else #else
#endif #endif
...@@ -128,7 +128,7 @@ public: ...@@ -128,7 +128,7 @@ public:
char buffer[512]; char buffer[512];
// get hostName // get hostName
if (gethostname(buffer, sizeof(buffer)) == SOCKET_ERROR) { if (gethostname(buffer, sizeof(buffer)) == SOCKET_ERROR) {
Log::Fatal("Error code: %d, when getting local host name.\n", WSAGetLastError()); Log::Fatal("Error code: %d, when getting local host name.", WSAGetLastError());
} }
// push local ip // push local ip
PIP_ADAPTER_INFO pAdapterInfo; PIP_ADAPTER_INFO pAdapterInfo;
...@@ -137,7 +137,7 @@ public: ...@@ -137,7 +137,7 @@ public:
ULONG ulOutBufLen = sizeof(IP_ADAPTER_INFO); ULONG ulOutBufLen = sizeof(IP_ADAPTER_INFO);
pAdapterInfo = (IP_ADAPTER_INFO *)MALLOC(sizeof(IP_ADAPTER_INFO)); pAdapterInfo = (IP_ADAPTER_INFO *)MALLOC(sizeof(IP_ADAPTER_INFO));
if (pAdapterInfo == NULL) { if (pAdapterInfo == NULL) {
Log::Fatal("GetAdaptersinfo error: allocating memory \n"); Log::Fatal("GetAdaptersinfo error: allocating memory ");
} }
// Make an initial call to GetAdaptersInfo to get // Make an initial call to GetAdaptersInfo to get
// the necessary size into the ulOutBufLen variable // the necessary size into the ulOutBufLen variable
...@@ -145,7 +145,7 @@ public: ...@@ -145,7 +145,7 @@ public:
FREE(pAdapterInfo); FREE(pAdapterInfo);
pAdapterInfo = (IP_ADAPTER_INFO *)MALLOC(ulOutBufLen); pAdapterInfo = (IP_ADAPTER_INFO *)MALLOC(ulOutBufLen);
if (pAdapterInfo == NULL) { if (pAdapterInfo == NULL) {
Log::Fatal("GetAdaptersinfo error: allocating memory \n"); Log::Fatal("GetAdaptersinfo error: allocating memory ");
} }
} }
if ((dwRetVal = GetAdaptersInfo(pAdapterInfo, &ulOutBufLen)) == NO_ERROR) { if ((dwRetVal = GetAdaptersInfo(pAdapterInfo, &ulOutBufLen)) == NO_ERROR) {
...@@ -155,7 +155,7 @@ public: ...@@ -155,7 +155,7 @@ public:
pAdapter = pAdapter->Next; pAdapter = pAdapter->Next;
} }
} else { } else {
Log::Error("GetAdaptersinfo error: code %d \n", dwRetVal); Log::Error("GetAdaptersinfo error: code %d ", dwRetVal);
} }
if (pAdapterInfo) if (pAdapterInfo)
FREE(pAdapterInfo); FREE(pAdapterInfo);
......
...@@ -16,7 +16,7 @@ public: ...@@ -16,7 +16,7 @@ public:
is_unbalance_ = config.is_unbalance; is_unbalance_ = config.is_unbalance;
sigmoid_ = static_cast<score_t>(config.sigmoid); sigmoid_ = static_cast<score_t>(config.sigmoid);
if (sigmoid_ <= 0.0) { if (sigmoid_ <= 0.0) {
Log::Fatal("Sigmoid parameter %f :should greater than zero\n", sigmoid_); Log::Fatal("Sigmoid parameter %f :should greater than zero", sigmoid_);
} }
} }
~BinaryLogloss() {} ~BinaryLogloss() {}
...@@ -34,10 +34,10 @@ public: ...@@ -34,10 +34,10 @@ public:
++cnt_negative; ++cnt_negative;
} }
} }
Log::Info("Number of postive:%d, number of negative:%d\n", cnt_positive, cnt_negative); Log::Info("Number of postive:%d, number of negative:%d", cnt_positive, cnt_negative);
// cannot continue if all sample are same class // cannot continue if all sample are same class
if (cnt_positive == 0 || cnt_negative == 0) { if (cnt_positive == 0 || cnt_negative == 0) {
Log::Fatal("Input training data only contains one class\n"); Log::Fatal("Input training data only contains one class");
} }
// use -1 for negative class, and 1 for positive class // use -1 for negative class, and 1 for positive class
label_val_[0] = -1; label_val_[0] = -1;
......
...@@ -95,7 +95,7 @@ void SerialTreeLearner::Init(const Dataset* train_data) { ...@@ -95,7 +95,7 @@ void SerialTreeLearner::Init(const Dataset* train_data) {
if (has_ordered_bin_) { if (has_ordered_bin_) {
is_data_in_leaf_ = new char[num_data_]; is_data_in_leaf_ = new char[num_data_];
} }
Log::Info("Number of data:%d, Number of features:%d\n", num_data_, num_features_); Log::Info("Number of data:%d, Number of features:%d", num_data_, num_features_);
} }
...@@ -123,7 +123,7 @@ Tree* SerialTreeLearner::Train(const score_t* gradients, const score_t *hessians ...@@ -123,7 +123,7 @@ Tree* SerialTreeLearner::Train(const score_t* gradients, const score_t *hessians
const SplitInfo& best_leaf_SplitInfo = best_split_per_leaf_[best_leaf]; const SplitInfo& best_leaf_SplitInfo = best_split_per_leaf_[best_leaf];
// cannot split, quit // cannot split, quit
if (best_leaf_SplitInfo.gain <= 0.0) { if (best_leaf_SplitInfo.gain <= 0.0) {
Log::Info("cannot find more split with gain = %f , current #leaves=%d\n", Log::Info("cannot find more split with gain = %f , current #leaves=%d",
best_leaf_SplitInfo.gain, split + 1); best_leaf_SplitInfo.gain, split + 1);
break; break;
} }
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment