Commit 7f0d5358 authored by Guolin Ke's avatar Guolin Ke
Browse files

Change fatel output to stderr. move newline to logger.

parent 12d884ff
......@@ -18,7 +18,7 @@ namespace LightGBM {
#ifndef CHECK_NOTNULL
#define CHECK_NOTNULL(pointer) \
if ((pointer) == nullptr) LightGBM::Log::Fatal(#pointer " Can't be NULL\n");
if ((pointer) == nullptr) LightGBM::Log::Fatal(#pointer " Can't be NULL");
#endif
// A enumeration type of log message levels. The values are ordered:
......@@ -67,8 +67,12 @@ public:
static void Fatal(const char *format, ...) {
va_list val;
va_start(val, format);
Write(LogLevel::Fatal, "Fatal", format, val);
fprintf(stderr, "[LightGBM] [Fatel] ");
vfprintf(stderr, format, val);
fprintf(stderr, "\n");
fflush(stderr);
va_end(val);
exit(1);
}
private:
......@@ -78,11 +82,8 @@ private:
// write to STDOUT
printf("[LightGBM] [%s] ", level_str);
vprintf(format, val);
printf("\n");
fflush(stdout);
if (level == LogLevel::Fatal) {
exit(1);
}
}
}
......
......@@ -69,7 +69,7 @@ void Application::LoadParameters(int argc, char** argv) {
params[key] = value;
}
else {
Log::Error("Unknown parameter in command line: %s\n", argv[i]);
Log::Error("Unknown parameter in command line: %s", argv[i]);
}
}
// check for alias
......@@ -105,7 +105,7 @@ void Application::LoadParameters(int argc, char** argv) {
}
}
} else {
Log::Error("Config file: %s doesn't exist, will ignore\n",
Log::Error("Config file: %s doesn't exist, will ignore",
params["config_file"].c_str());
}
}
......@@ -113,7 +113,7 @@ void Application::LoadParameters(int argc, char** argv) {
ParameterAlias::KeyAliasTransform(&params);
// load configs
config_.Set(params);
Log::Info("Loading parameters .. finished\n");
Log::Info("Loading parameters .. finished");
}
void Application::LoadData() {
......@@ -201,7 +201,7 @@ void Application::LoadData() {
}
auto end_time = std::chrono::high_resolution_clock::now();
// output used time on each iteration
Log::Info("Finish loading data, use %f seconds \n",
Log::Info("Finish loading data, use %f seconds",
std::chrono::duration<double, std::milli>(end_time - start_time) * 1e-3);
}
......@@ -209,7 +209,7 @@ void Application::InitTrain() {
if (config_.is_parallel) {
// need init network
Network::Init(config_.network_config);
Log::Info("Finish network initialization\n");
Log::Info("Finish network initialization");
// sync global random seed for feature patition
if (config_.boosting_type == BoostingType::kGBDT) {
GBDTConfig* gbdt_config =
......@@ -240,13 +240,13 @@ void Application::InitTrain() {
boosting_->AddDataset(valid_datas_[i],
ConstPtrInVectorWarpper<Metric>(valid_metrics_[i]));
}
Log::Info("Finish training initilization.\n");
Log::Info("Finish training initilization.");
}
void Application::Train() {
Log::Info("Start train\n");
Log::Info("Start train");
boosting_->Train();
Log::Info("Finish train\n");
Log::Info("Finish train");
}
......@@ -254,14 +254,14 @@ void Application::Predict() {
// create predictor
Predictor predictor(boosting_, config_.io_config.is_sigmoid);
predictor.Predict(config_.io_config.data_filename.c_str(), config_.io_config.output_result.c_str());
Log::Info("Finish predict.\n");
Log::Info("Finish predict.");
}
void Application::InitPredict() {
boosting_ =
Boosting::CreateBoosting(config_.boosting_type, config_.boosting_config);
LoadModel();
Log::Info("Finish predict initilization.\n");
Log::Info("Finish predict initilization.");
}
void Application::LoadModel() {
......
......@@ -124,14 +124,14 @@ public:
(const char* buffer, std::vector<std::pair<int, double>>* feature) {
parser->ParseOneLine(buffer, feature, &tmp_label);
};
Log::Info("Start prediction for data %s with labels\n", data_filename);
Log::Info("Start prediction for data %s with labels", data_filename);
} else {
// parse function without label
parser_fun = [this, &parser]
(const char* buffer, std::vector<std::pair<int, double>>* feature) {
parser->ParseOneLine(buffer, feature);
};
Log::Info("Start prediction for data %s without label\n", data_filename);
Log::Info("Start prediction for data %s without label", data_filename);
}
std::function<double(const std::vector<std::pair<int, double>>&)> predict_fun;
if (is_simgoid_) {
......
......@@ -149,7 +149,7 @@ void GBDT::Bagging(int iter) {
bag_data_cnt_ = cur_left_cnt;
out_of_bag_data_cnt_ = num_data_ - bag_data_cnt_;
}
Log::Info("re-bagging, using %d data to train\n", bag_data_cnt_);
Log::Info("re-bagging, using %d data to train", bag_data_cnt_);
// set bagging data to tree learner
tree_learner_->SetBaggingData(bag_data_indices_, bag_data_cnt_);
}
......@@ -175,7 +175,7 @@ void GBDT::Train() {
Tree * new_tree = TrainOneTree();
// if cannot learn a new tree, then stop
if (new_tree->num_leaves() <= 1) {
Log::Info("Can't training anymore, there isn't any leaf meets split requirements.\n");
Log::Info("Can't training anymore, there isn't any leaf meets split requirements.");
break;
}
// shrinkage by learning rate
......@@ -204,11 +204,11 @@ void GBDT::Train() {
}
auto end_time = std::chrono::high_resolution_clock::now();
// output used time per iteration
Log::Info("%f seconds elapsed, finished %d iteration\n", std::chrono::duration<double,
Log::Info("%f seconds elapsed, finished %d iteration", std::chrono::duration<double,
std::milli>(end_time - start_time) * 1e-3, iter + 1);
if (is_early_stopping) {
// close file with an early-stopping message
Log::Info("Early stopping at iteration %d, the best iteration round is %d\n", iter + 1, iter + 1 - early_stopping_round_);
Log::Info("Early stopping at iteration %d, the best iteration round is %d", iter + 1, iter + 1 - early_stopping_round_);
fclose(output_model_file);
return;
}
......@@ -307,7 +307,7 @@ void GBDT::ModelsFromString(const std::string& model_str, int num_used_model) {
}
}
if (i == lines.size()) {
Log::Fatal("Model file doesn't contain max_feature_idx\n");
Log::Fatal("Model file doesn't contain max_feature_idx");
return;
}
// get sigmoid parameter
......@@ -346,7 +346,7 @@ void GBDT::ModelsFromString(const std::string& model_str, int num_used_model) {
}
}
Log::Info("%d models has been loaded\n\n", models_.size());
Log::Info("%d models has been loaded\n", models_.size());
}
double GBDT::PredictRaw(const double* value) const {
......
......@@ -21,7 +21,7 @@ Dataset::Dataset(const char* data_filename, const char* init_score_filename,
CheckCanLoadFromBin();
if (is_loading_from_binfile_ && predict_fun != nullptr) {
Log::Info("Cannot performing initialization of prediction by using binary file, using text file instead\n");
Log::Info("Cannot performing initialization of prediction by using binary file, using text file instead");
is_loading_from_binfile_ = false;
}
......@@ -38,7 +38,7 @@ Dataset::Dataset(const char* data_filename, const char* init_score_filename,
} else {
// only need to load initilize score, other meta data will load from bin flie
metadata_.Init(init_score_filename);
Log::Info("Loading data set from binary file\n");
Log::Info("Loading data set from binary file");
parser_ = nullptr;
text_reader_ = nullptr;
}
......@@ -209,7 +209,7 @@ void Dataset::ConstructBinMappers(int rank, int num_machines, const std::vector<
num_data_, is_enable_sparse_));
} else {
// if feature is trival(only 1 bin), free spaces
Log::Error("Feature %d only contains one value, will be ignored\n", i);
Log::Error("Feature %d only contains one value, will be ignored", i);
delete bin_mappers[i];
}
}
......@@ -489,7 +489,7 @@ void Dataset::SaveBinaryFile() {
Log::Fatal("Cannot write binary data to %s ", bin_filename.c_str());
}
Log::Info("Saving data to binary file: %s\n", data_filename_);
Log::Info("Saving data to binary file: %s", data_filename_);
// get size of header
size_t size_of_header = sizeof(global_num_data_) + sizeof(is_enable_sparse_)
......
......@@ -61,7 +61,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
if (used_data_indices.size() == 0) {
// check weights
if (weights_ != nullptr && num_weights_ != num_data_) {
Log::Error("Initial weight size doesn't equal to data, weights will be ignored\n");
Log::Error("Initial weight size doesn't equal to data, weights will be ignored");
delete[] weights_;
num_weights_ = 0;
weights_ = nullptr;
......@@ -69,7 +69,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
// check query boundries
if (query_boundaries_ != nullptr && query_boundaries_[num_queries_] != num_data_) {
Log::Error("Initial query size doesn't equal to data, queies will be ignored\n");
Log::Error("Initial query size doesn't equal to data, queies will be ignored");
delete[] query_boundaries_;
num_queries_ = 0;
query_boundaries_ = nullptr;
......@@ -78,21 +78,21 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
// contain initial score file
if (init_score_ != nullptr && num_init_score_ != num_data_) {
delete[] init_score_;
Log::Error("Initial score size doesn't equal to data, score file will be ignored\n");
Log::Error("Initial score size doesn't equal to data, score file will be ignored");
num_init_score_ = 0;
}
} else {
data_size_t num_used_data = static_cast<data_size_t>(used_data_indices.size());
// check weights
if (weights_ != nullptr && num_weights_ != num_all_data) {
Log::Error("Initial weights size doesn't equal to data, weights will be ignored\n");
Log::Error("Initial weights size doesn't equal to data, weights will be ignored");
delete[] weights_;
num_weights_ = 0;
weights_ = nullptr;
}
// check query boundries
if (query_boundaries_ != nullptr && query_boundaries_[num_queries_] != num_all_data) {
Log::Error("Initial query size doesn't equal to data , queries will be ignored\n");
Log::Error("Initial query size doesn't equal to data , queries will be ignored");
delete[] query_boundaries_;
num_queries_ = 0;
query_boundaries_ = nullptr;
......@@ -100,7 +100,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
// contain initial score file
if (init_score_ != nullptr && num_init_score_ != num_all_data) {
Log::Error("Initial score size doesn't equal to data , initial scores will be ignored\n");
Log::Error("Initial score size doesn't equal to data , initial scores will be ignored");
delete[] init_score_;
num_init_score_ = 0;
}
......@@ -131,10 +131,10 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
used_query.push_back(qid);
data_idx += len;
} else {
Log::Fatal("Data partition error, data didn't match queies\n");
Log::Fatal("Data partition error, data didn't match queies");
}
} else {
Log::Fatal("Data partition error, data didn't match queies\n");
Log::Fatal("Data partition error, data didn't match queies");
}
}
data_size_t * old_query_boundaries = query_boundaries_;
......@@ -182,7 +182,7 @@ void Metadata::LoadWeights() {
if (reader.Lines().size() <= 0) {
return;
}
Log::Info("Start loading weights\n");
Log::Info("Start loading weights");
num_weights_ = static_cast<data_size_t>(reader.Lines().size());
weights_ = new float[num_weights_];
for (data_size_t i = 0; i < num_weights_; ++i) {
......@@ -198,7 +198,7 @@ void Metadata::LoadInitialScore() {
TextReader<size_t> reader(init_score_filename_);
reader.ReadAllLines();
Log::Info("Start loading initial scores\n");
Log::Info("Start loading initial scores");
num_init_score_ = static_cast<data_size_t>(reader.Lines().size());
init_score_ = new score_t[num_init_score_];
double tmp = 0.0f;
......@@ -218,7 +218,7 @@ void Metadata::LoadQueryBoundaries() {
if (reader.Lines().size() <= 0) {
return;
}
Log::Info("Start loading query boundries\n");
Log::Info("Start loading query boundries");
query_boundaries_ = new data_size_t[reader.Lines().size() + 1];
num_queries_ = static_cast<data_size_t>(reader.Lines().size());
query_boundaries_[0] = 0;
......@@ -233,7 +233,7 @@ void Metadata::LoadQueryWeights() {
if (weights_ == nullptr || query_boundaries_ == nullptr) {
return;
}
Log::Info("Start loading query weights\n");
Log::Info("Start loading query weights");
query_weights_ = new float[num_queries_];
for (data_size_t i = 0; i < num_queries_; ++i) {
query_weights_[i] = 0.0f;
......
......@@ -28,7 +28,7 @@ public:
: num_data_(num_data) {
default_bin_ = static_cast<VAL_T>(default_bin);
if (default_bin_ != 0) {
Log::Info("Warning: Having sparse feature with negative values. Will let negative values equal zero as well\n");
Log::Info("Warning: Having sparse feature with negative values. Will let negative values equal zero as well");
}
#pragma omp parallel
#pragma omp master
......
......@@ -72,7 +72,7 @@ public:
}
score_t loss = sum_loss / sum_weights_;
if (output_freq_ > 0 && iter % output_freq_ == 0){
Log::Info("Iteration:%d, %s's %s: %f\n", iter, name, PointWiseLossCalculator::Name(), loss);
Log::Info("Iteration:%d, %s's %s: %f", iter, name, PointWiseLossCalculator::Name(), loss);
}
return loss;
}
......@@ -230,7 +230,7 @@ public:
auc = accum / (sum_pos *(sum_weights_ - sum_pos));
}
if (output_freq_ > 0 && iter % output_freq_ == 0){
Log::Info("Iteration:%d, %s's %s: %f\n", iter, name, "auc", auc);
Log::Info("Iteration:%d, %s's %s: %f", iter, name, "auc", auc);
}
return auc;
}
......
......@@ -57,7 +57,7 @@ void DCGCalculator::CalMaxDCG(const std::vector<data_size_t>& ks,
std::vector<data_size_t> label_cnt(label_gain_.size(), 0);
// counts for all labels
for (data_size_t i = 0; i < num_data; ++i) {
if (static_cast<size_t>(label[i]) >= label_cnt.size()) { Log::Fatal("label excel %d\n", label[i]); }
if (static_cast<size_t>(label[i]) >= label_cnt.size()) { Log::Fatal("label excel %d", label[i]); }
++label_cnt[static_cast<int>(label[i])];
}
double cur_result = 0.0;
......
......@@ -135,7 +135,7 @@ public:
result_ss << "NDCG@" << eval_at_[j] << ":" << result[j] << "\t";
}
if (output_freq_ > 0 && iter % output_freq_ == 0){
Log::Info("Iteration:%d, Test:%s, %s \n", iter, name, result_ss.str().c_str());
Log::Info("Iteration:%d, Test:%s, %s ", iter, name, result_ss.str().c_str());
}
return result[0];
}
......
......@@ -73,7 +73,7 @@ Linkers::~Linkers() {
}
}
TcpSocket::Finalize();
Log::Info("Network using %f seconds\n", network_time_ * 1e-3);
Log::Info("Network using %f seconds", network_time_ * 1e-3);
}
void Linkers::ParseMachineList(const char * filename) {
......@@ -95,7 +95,7 @@ void Linkers::ParseMachineList(const char * filename) {
continue;
}
if (client_ips_.size() >= static_cast<size_t>(num_machines_)) {
Log::Error("The #machine in machine_list is larger than parameter num_machines, the redundant will ignored\n");
Log::Error("The #machine in machine_list is larger than parameter num_machines, the redundant will ignored");
break;
}
str_after_split[0] = Common::Trim(str_after_split[0]);
......@@ -104,17 +104,17 @@ void Linkers::ParseMachineList(const char * filename) {
client_ports_.push_back(atoi(str_after_split[1].c_str()));
}
if (client_ips_.size() != static_cast<size_t>(num_machines_)) {
Log::Error("The world size is bigger the #machine in machine list, change world size to %d .\n", client_ips_.size());
Log::Error("The world size is bigger the #machine in machine list, change world size to %d .", client_ips_.size());
num_machines_ = static_cast<int>(client_ips_.size());
}
}
void Linkers::TryBind(int port) {
Log::Info("try to bind port %d.\n", port);
Log::Info("try to bind port %d.", port);
if (listener_->Bind(port)) {
Log::Info("Binding port %d success.\n", port);
Log::Info("Binding port %d success.", port);
} else {
Log::Fatal("Binding port %d failed.\n", port);
Log::Fatal("Binding port %d failed.", port);
}
}
......@@ -125,7 +125,7 @@ void Linkers::SetLinker(int rank, const TcpSocket& socket) {
}
void Linkers::ListenThread(int incoming_cnt) {
Log::Info("Listening...\n");
Log::Info("Listening...");
char buffer[100];
int connected_cnt = 0;
while (connected_cnt < incoming_cnt) {
......@@ -192,7 +192,7 @@ void Linkers::Construct() {
if (cur_socket.Connect(client_ips_[out_rank].c_str(), client_ports_[out_rank])) {
break;
} else {
Log::Error("Connect to rank %d failed, wait for %d milliseconds\n", out_rank, connect_fail_delay_time);
Log::Error("Connect to rank %d failed, wait for %d milliseconds", out_rank, connect_fail_delay_time);
std::this_thread::sleep_for(std::chrono::milliseconds(connect_fail_delay_time));
}
}
......@@ -217,7 +217,7 @@ bool Linkers::CheckLinker(int rank) {
void Linkers::PrintLinkers() {
for (int i = 0; i < num_machines_; ++i) {
if (CheckLinker(i)) {
Log::Info("Connected to rank %d.\n", i);
Log::Info("Connected to rank %d.", i);
}
}
}
......
......@@ -60,7 +60,7 @@ public:
TcpSocket() {
sockfd_ = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (sockfd_ == INVALID_SOCKET) {
Log::Fatal("Socket construct error\n");
Log::Fatal("Socket construct error");
return;
}
ConfigSocket();
......@@ -69,7 +69,7 @@ public:
explicit TcpSocket(SOCKET socket) {
sockfd_ = socket;
if (sockfd_ == INVALID_SOCKET) {
Log::Fatal("Passed socket error\n");
Log::Fatal("Passed socket error");
return;
}
ConfigSocket();
......@@ -97,11 +97,11 @@ public:
#if defined(_WIN32)
WSADATA wsa_data;
if (WSAStartup(MAKEWORD(2, 2), &wsa_data) == -1) {
Log::Fatal("Socket error: WSAStart up error\n");
Log::Fatal("Socket error: WSAStart up error");
}
if (LOBYTE(wsa_data.wVersion) != 2 || HIBYTE(wsa_data.wVersion) != 2) {
WSACleanup();
Log::Fatal("Socket error: Winsock.dll version error\n");
Log::Fatal("Socket error: Winsock.dll version error");
}
#else
#endif
......@@ -128,7 +128,7 @@ public:
char buffer[512];
// get hostName
if (gethostname(buffer, sizeof(buffer)) == SOCKET_ERROR) {
Log::Fatal("Error code: %d, when getting local host name.\n", WSAGetLastError());
Log::Fatal("Error code: %d, when getting local host name.", WSAGetLastError());
}
// push local ip
PIP_ADAPTER_INFO pAdapterInfo;
......@@ -137,7 +137,7 @@ public:
ULONG ulOutBufLen = sizeof(IP_ADAPTER_INFO);
pAdapterInfo = (IP_ADAPTER_INFO *)MALLOC(sizeof(IP_ADAPTER_INFO));
if (pAdapterInfo == NULL) {
Log::Fatal("GetAdaptersinfo error: allocating memory \n");
Log::Fatal("GetAdaptersinfo error: allocating memory ");
}
// Make an initial call to GetAdaptersInfo to get
// the necessary size into the ulOutBufLen variable
......@@ -145,7 +145,7 @@ public:
FREE(pAdapterInfo);
pAdapterInfo = (IP_ADAPTER_INFO *)MALLOC(ulOutBufLen);
if (pAdapterInfo == NULL) {
Log::Fatal("GetAdaptersinfo error: allocating memory \n");
Log::Fatal("GetAdaptersinfo error: allocating memory ");
}
}
if ((dwRetVal = GetAdaptersInfo(pAdapterInfo, &ulOutBufLen)) == NO_ERROR) {
......@@ -155,7 +155,7 @@ public:
pAdapter = pAdapter->Next;
}
} else {
Log::Error("GetAdaptersinfo error: code %d \n", dwRetVal);
Log::Error("GetAdaptersinfo error: code %d ", dwRetVal);
}
if (pAdapterInfo)
FREE(pAdapterInfo);
......
......@@ -16,7 +16,7 @@ public:
is_unbalance_ = config.is_unbalance;
sigmoid_ = static_cast<score_t>(config.sigmoid);
if (sigmoid_ <= 0.0) {
Log::Fatal("Sigmoid parameter %f :should greater than zero\n", sigmoid_);
Log::Fatal("Sigmoid parameter %f :should greater than zero", sigmoid_);
}
}
~BinaryLogloss() {}
......@@ -34,10 +34,10 @@ public:
++cnt_negative;
}
}
Log::Info("Number of postive:%d, number of negative:%d\n", cnt_positive, cnt_negative);
Log::Info("Number of postive:%d, number of negative:%d", cnt_positive, cnt_negative);
// cannot continue if all sample are same class
if (cnt_positive == 0 || cnt_negative == 0) {
Log::Fatal("Input training data only contains one class\n");
Log::Fatal("Input training data only contains one class");
}
// use -1 for negative class, and 1 for positive class
label_val_[0] = -1;
......
......@@ -95,7 +95,7 @@ void SerialTreeLearner::Init(const Dataset* train_data) {
if (has_ordered_bin_) {
is_data_in_leaf_ = new char[num_data_];
}
Log::Info("Number of data:%d, Number of features:%d\n", num_data_, num_features_);
Log::Info("Number of data:%d, Number of features:%d", num_data_, num_features_);
}
......@@ -123,7 +123,7 @@ Tree* SerialTreeLearner::Train(const score_t* gradients, const score_t *hessians
const SplitInfo& best_leaf_SplitInfo = best_split_per_leaf_[best_leaf];
// cannot split, quit
if (best_leaf_SplitInfo.gain <= 0.0) {
Log::Info("cannot find more split with gain = %f , current #leaves=%d\n",
Log::Info("cannot find more split with gain = %f , current #leaves=%d",
best_leaf_SplitInfo.gain, split + 1);
break;
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment