Commit 04ccb4e8 authored by Qiwei Ye's avatar Qiwei Ye
Browse files

adding verbose option for logger

parent ee97ed3d
......@@ -93,6 +93,8 @@ public:
std::string output_result = "LightGBM_predict_result.txt";
std::string input_model = "";
std::string input_init_score = "";
int verbosity = 0;
std::string log_file = "";
int num_model_predict = -1;
bool is_pre_partition = false;
bool is_enable_sparse = true;
......@@ -310,7 +312,8 @@ struct ParameterAlias {
{ "two_round", "use_two_round_loading" },
{ "mlist", "machine_list_file" },
{ "is_save_binary", "is_save_binary_file" },
{ "save_binary", "is_save_binary_file" }
{ "save_binary", "is_save_binary_file" },
{ "verbose", "verbosity" }
});
std::unordered_map<std::string, std::string> tmp_map;
for (const auto& pair : *params) {
......
......@@ -157,7 +157,7 @@ inline static const char* Atof(const char* p, double* out) {
*out = sign * 1e308;
}
else {
Log::Error("Unknow token %s in data file", tmp_str.c_str());
Log::Fatal("Unknow token %s in data file", tmp_str.c_str());
}
p += cnt;
}
......@@ -201,7 +201,7 @@ inline static std::string ArrayToString(const T* arr, int n, char delimiter) {
inline static void StringToIntArray(const std::string& str, char delimiter, size_t n, int* out) {
std::vector<std::string> strs = Split(str.c_str(), delimiter);
if (strs.size() != n) {
Log::Error("StringToIntArray error, size don't equal.");
Log::Fatal("StringToIntArray error, size doesn't matched.");
}
for (size_t i = 0; i < strs.size(); ++i) {
strs[i] = Trim(strs[i]);
......@@ -212,7 +212,7 @@ inline static void StringToIntArray(const std::string& str, char delimiter, size
inline static void StringToDoubleArray(const std::string& str, char delimiter, size_t n, double* out) {
std::vector<std::string> strs = Split(str.c_str(), delimiter);
if (strs.size() != n) {
Log::Error("StringToDoubleArray error, size don't equal");
Log::Fatal("StringToDoubleArray error, size doesn't matched.");
}
for (size_t i = 0; i < strs.size(); ++i) {
strs[i] = Trim(strs[i]);
......@@ -223,7 +223,7 @@ inline static void StringToDoubleArray(const std::string& str, char delimiter, s
inline static void StringToDoubleArray(const std::string& str, char delimiter, size_t n, float* out) {
std::vector<std::string> strs = Split(str.c_str(), delimiter);
if (strs.size() != n) {
Log::Error("StringToDoubleArray error, size don't equal");
Log::Fatal("StringToDoubleArray error, size doesn't matched.");
}
double tmp;
for (size_t i = 0; i < strs.size(); ++i) {
......
......@@ -69,7 +69,7 @@ void Application::LoadParameters(int argc, char** argv) {
params[key] = value;
}
else {
Log::Info("Warning: unknown parameter in command line: %s", argv[i]);
Log::Error("Unknown parameter in command line: %s\n", argv[i]);
}
}
// check for alias
......@@ -101,11 +101,11 @@ void Application::LoadParameters(int argc, char** argv) {
}
}
else {
Log::Info("Warning: unknown parameter in config file: %s", line.c_str());
Log::Error("Unknown parameter in config file: %s", line.c_str());
}
}
} else {
Log::Info("config file: %s doesn't exist, will ignore",
Log::Error("Config file: %s doesn't exist, will ignore\n",
params["config_file"].c_str());
}
}
......@@ -113,7 +113,7 @@ void Application::LoadParameters(int argc, char** argv) {
ParameterAlias::KeyAliasTransform(&params);
// load configs
config_.Set(params);
Log::Info("finished load parameters");
Log::Info("Loading parameters .. finished\n");
}
void Application::LoadData() {
......@@ -201,7 +201,7 @@ void Application::LoadData() {
}
auto end_time = std::chrono::high_resolution_clock::now();
// output used time on each iteration
Log::Info("Finish loading data, use %f seconds ",
Log::Info("Finish loading data, use %f seconds \n",
std::chrono::duration<double, std::milli>(end_time - start_time) * 1e-3);
}
......@@ -209,7 +209,7 @@ void Application::InitTrain() {
if (config_.is_parallel) {
// need init network
Network::Init(config_.network_config);
Log::Info("finish network initialization");
Log::Info("Finish network initialization\n");
// sync global random seed for feature patition
if (config_.boosting_type == BoostingType::kGBDT) {
GBDTConfig* gbdt_config =
......@@ -240,13 +240,13 @@ void Application::InitTrain() {
boosting_->AddDataset(valid_datas_[i],
ConstPtrInVectorWarpper<Metric>(valid_metrics_[i]));
}
Log::Info("finish training init");
Log::Info("Finish training initilization.\n");
}
void Application::Train() {
Log::Info("start train");
Log::Info("Start train\n");
boosting_->Train();
Log::Info("finish train");
Log::Info("Finish train\n");
}
......@@ -254,14 +254,14 @@ void Application::Predict() {
// create predictor
Predictor predictor(boosting_, config_.io_config.is_sigmoid);
predictor.Predict(config_.io_config.data_filename.c_str(), config_.io_config.output_result.c_str());
Log::Info("finish predict");
Log::Info("Finish predict.\n");
}
void Application::InitPredict() {
boosting_ =
Boosting::CreateBoosting(config_.boosting_type, config_.boosting_config);
LoadModel();
Log::Info("finish predict init");
Log::Info("Finish predict initilization.\n");
}
void Application::LoadModel() {
......
......@@ -106,13 +106,13 @@ public:
#endif
if (result_file == NULL) {
Log::Error("predition result file %s doesn't exists", data_filename);
Log::Fatal("Predition result file %s doesn't exists", data_filename);
}
bool has_label = false;
Parser* parser = Parser::CreateParser(data_filename, num_features_, &has_label);
if (parser == nullptr) {
Log::Error("recongnizing input data format failed, filename %s", data_filename);
Log::Fatal("Recongnizing input data format failed, filename %s", data_filename);
}
// function for parse data
......@@ -124,14 +124,14 @@ public:
(const char* buffer, std::vector<std::pair<int, double>>* feature) {
parser->ParseOneLine(buffer, feature, &tmp_label);
};
Log::Info("start prediction for data %s, and data has label", data_filename);
Log::Info("Start prediction for data %s with labels\n", data_filename);
} else {
// parse function without label
parser_fun = [this, &parser]
(const char* buffer, std::vector<std::pair<int, double>>* feature) {
parser->ParseOneLine(buffer, feature);
};
Log::Info("start prediction for data %s, and data doesn't has label", data_filename);
Log::Info("Start prediction for data %s without label\n", data_filename);
}
std::function<double(const std::vector<std::pair<int, double>>&)> predict_fun;
if (is_simgoid_) {
......
......@@ -150,7 +150,7 @@ void GBDT::Bagging(int iter) {
bag_data_cnt_ = cur_left_cnt;
out_of_bag_data_cnt_ = num_data_ - bag_data_cnt_;
}
Log::Info("re-bagging, using %d data to train", bag_data_cnt_);
Log::Info("re-bagging, using %d data to train\n", bag_data_cnt_);
// set bagging data to tree learner
tree_learner_->SetBaggingData(bag_data_indices_, bag_data_cnt_);
}
......@@ -176,7 +176,7 @@ void GBDT::Train() {
Tree * new_tree = TrainOneTree();
// if cannot learn a new tree, then stop
if (new_tree->num_leaves() <= 1) {
Log::Info("Cannot do any boosting for tree cannot split");
Log::Info("Can't training anymore, there isn't any leaf meets split requirements.\n");
break;
}
// shrinkage by learning rate
......@@ -194,7 +194,7 @@ void GBDT::Train() {
fflush(output_model_file);
auto end_time = std::chrono::high_resolution_clock::now();
// output used time per iteration
Log::Info("%f seconds elapsed, finished %d iteration", std::chrono::duration<double,
Log::Info("%f seconds elapsed, finished %d iteration\n", std::chrono::duration<double,
std::milli>(end_time - start_time) * 1e-3, iter + 1);
}
// close file
......@@ -284,7 +284,7 @@ void GBDT::ModelsFromString(const std::string& model_str, int num_used_model) {
}
}
if (i == lines.size()) {
Log::Error("The model doesn't contain max_feature_idx");
Log::Fatal("Model file doesn't contain max_feature_idx\n");
return;
}
// get sigmoid parameter
......@@ -323,7 +323,7 @@ void GBDT::ModelsFromString(const std::string& model_str, int num_used_model) {
}
}
Log::Info("Loaded %d models\n", models_.size());
Log::Info("%d models has been loaded\n\n", models_.size());
}
double GBDT::PredictRaw(const double* value) const {
......
......@@ -34,6 +34,15 @@ void OverallConfig::Set(const std::unordered_map<std::string, std::string>& para
metric_config.Set(params);
// check for conflicts
CheckParamConflict();
if (io_config.verbosity == 1)
LightGBM::Log::ResetLogLevel(LightGBM::LogLevel::Info);
else if (io_config.verbosity == 0)
LightGBM::Log::ResetLogLevel(LightGBM::LogLevel::Error);
else if (io_config.verbosity >= 2)
LightGBM::Log::ResetLogLevel(LightGBM::LogLevel::Debug);
else
LightGBM::Log::ResetLogLevel(LightGBM::LogLevel::Fatal);
}
void OverallConfig::GetBoostingType(const std::unordered_map<std::string, std::string>& params) {
......@@ -43,7 +52,7 @@ void OverallConfig::GetBoostingType(const std::unordered_map<std::string, std::s
if (value == std::string("gbdt") || value == std::string("gbrt")) {
boosting_type = BoostingType::kGBDT;
} else {
Log::Error("boosting type %s error", value.c_str());
Log::Fatal("Boosting type %s error", value.c_str());
}
}
}
......@@ -91,7 +100,7 @@ void OverallConfig::GetTaskType(const std::unordered_map<std::string, std::strin
|| value == std::string("test")) {
task_type = TaskType::kPredict;
} else {
Log::Error("task type error");
Log::Fatal("Task type error");
}
}
}
......@@ -128,8 +137,9 @@ void IOConfig::Set(const std::unordered_map<std::string, std::string>& params) {
GetInt(params, "data_random_seed", &data_random_seed);
if (!GetString(params, "data", &data_filename)) {
Log::Error("No training/prediction data, application quit");
Log::Fatal("No training/prediction data, application quit");
}
GetInt(params, "verbose", &verbosity);
GetInt(params, "num_model_predict", &num_model_predict);
GetBool(params, "is_pre_partition", &is_pre_partition);
GetBool(params, "is_enable_sparse", &is_enable_sparse);
......@@ -140,6 +150,7 @@ void IOConfig::Set(const std::unordered_map<std::string, std::string>& params) {
GetString(params, "input_model", &input_model);
GetString(params, "output_result", &output_result);
GetString(params, "input_init_score", &input_init_score);
GetString(params, "log_file", &log_file);
std::string tmp_str = "";
if (GetString(params, "valid_data", &tmp_str)) {
valid_data_filenames = Common::Split(tmp_str.c_str(), ',');
......@@ -236,7 +247,7 @@ void GBDTConfig::GetTreeLearnerType(const std::unordered_map<std::string, std::s
tree_learner_type = TreeLearnerType::kDataParallelTreeLearner;
}
else {
Log::Error("tree learner type error");
Log::Fatal("Tree learner type error");
}
}
}
......
......@@ -21,7 +21,7 @@ Dataset::Dataset(const char* data_filename, const char* init_score_filename,
CheckCanLoadFromBin();
if (is_loading_from_binfile_ && predict_fun != nullptr) {
Log::Info("cannot perform initial prediction for binary file, will use text file instead");
Log::Info("Cannot performing initialization of prediction by using binary file, using text file instead\n");
is_loading_from_binfile_ = false;
}
......@@ -31,14 +31,14 @@ Dataset::Dataset(const char* data_filename, const char* init_score_filename,
// create text parser
parser_ = Parser::CreateParser(data_filename_, 0, nullptr);
if (parser_ == nullptr) {
Log::Error("cannot recognise input data format, filename: %s", data_filename_);
Log::Fatal("Cannot recognising input data format, filename: %s", data_filename_);
}
// create text reader
text_reader_ = new TextReader<data_size_t>(data_filename);
} else {
// only need to load initilize score, other meta data will load from bin flie
metadata_.Init(init_score_filename);
Log::Info("will load data set from binary file");
Log::Info("Loading data set from binary file\n");
parser_ = nullptr;
text_reader_ = nullptr;
}
......@@ -82,7 +82,7 @@ void Dataset::LoadDataToMemory(int rank, int num_machines, bool is_pre_partition
[this, rank, num_machines, &qid, &query_boundaries, &is_query_used, num_queries]
(data_size_t line_idx) {
if (qid >= num_queries) {
Log::Error("current query is exceed the range of query file, please ensure your query file is correct");
Log::Fatal("Current query is exceed the range of query file, please ensure your query file is correct");
}
if (line_idx >= query_boundaries[qid + 1]) {
// if is new query
......@@ -139,7 +139,7 @@ void Dataset::SampleDataFromFile(int rank, int num_machines, bool is_pre_partiti
[this, rank, num_machines, &qid, &query_boundaries, &is_query_used, num_queries]
(data_size_t line_idx) {
if (qid >= num_queries) {
Log::Error("current query is exceed the range of query file, \
Log::Fatal("Query id is exceed the range of query file, \
please ensure your query file is correct");
}
if (line_idx >= query_boundaries[qid + 1]) {
......@@ -209,7 +209,7 @@ void Dataset::ConstructBinMappers(int rank, int num_machines, const std::vector<
num_data_, is_enable_sparse_));
} else {
// if feature is trival(only 1 bin), free spaces
Log::Info("Warning: feature %d only contains one value, will ignore it", i);
Log::Error("Feature %d only contains one value, will be ignored\n", i);
delete bin_mappers[i];
}
}
......@@ -486,10 +486,10 @@ void Dataset::SaveBinaryFile() {
file = fopen(bin_filename.c_str(), "wb");
#endif
if (file == NULL) {
Log::Error("cannot write binary data to %s ", bin_filename.c_str());
Log::Fatal("Cannot write binary data to %s ", bin_filename.c_str());
}
Log::Info("start save binary file for data %s", data_filename_);
Log::Info("Saving data to binary file: %s\n", data_filename_);
// get size of header
size_t size_of_header = sizeof(global_num_data_) + sizeof(is_enable_sparse_)
......@@ -556,7 +556,7 @@ void Dataset::LoadDataFromBinFile(int rank, int num_machines, bool is_pre_partit
#endif
if (file == NULL) {
Log::Error("cannot read binary data from %s", bin_filename.c_str());
Log::Fatal("Cannot read binary data from %s", bin_filename.c_str());
}
// buffer to read binary file
......@@ -567,7 +567,7 @@ void Dataset::LoadDataFromBinFile(int rank, int num_machines, bool is_pre_partit
size_t read_cnt = fread(buffer, sizeof(size_t), 1, file);
if (read_cnt != 1) {
Log::Error("binary file format error at header size");
Log::Fatal("Binary file format error at header size");
}
size_t size_of_head = *(reinterpret_cast<size_t*>(buffer));
......@@ -582,7 +582,7 @@ void Dataset::LoadDataFromBinFile(int rank, int num_machines, bool is_pre_partit
read_cnt = fread(buffer, 1, size_of_head, file);
if (read_cnt != size_of_head) {
Log::Error("binary file format error at header");
Log::Fatal("Binary file format error at header");
}
// get header
const char* mem_ptr = buffer;
......@@ -608,7 +608,7 @@ void Dataset::LoadDataFromBinFile(int rank, int num_machines, bool is_pre_partit
read_cnt = fread(buffer, sizeof(size_t), 1, file);
if (read_cnt != 1) {
Log::Error("binary file format error at size of meta data");
Log::Fatal("Binary file format error: wrong size of meta data");
}
size_t size_of_metadata = *(reinterpret_cast<size_t*>(buffer));
......@@ -623,7 +623,7 @@ void Dataset::LoadDataFromBinFile(int rank, int num_machines, bool is_pre_partit
read_cnt = fread(buffer, 1, size_of_metadata, file);
if (read_cnt != size_of_metadata) {
Log::Error("binary file format error at meta data");
Log::Fatal("Binary file format error: wrong size of meta data");
}
// load meta data
metadata_.LoadFromMemory(buffer);
......@@ -647,7 +647,7 @@ void Dataset::LoadDataFromBinFile(int rank, int num_machines, bool is_pre_partit
bool is_query_used = false;
for (data_size_t i = 0; i < num_data_; i++) {
if (qid >= num_queries) {
Log::Error("current query is exceed the range of query file, please ensure your query file is correct");
Log::Fatal("current query is exceed the range of query file, please ensure your query file is correct");
}
if (i >= query_boundaries[qid + 1]) {
// if is new query
......@@ -670,7 +670,7 @@ void Dataset::LoadDataFromBinFile(int rank, int num_machines, bool is_pre_partit
// read feature size
read_cnt = fread(buffer, sizeof(size_t), 1, file);
if (read_cnt != 1) {
Log::Error("binary file format error at feature %d's size", i);
Log::Fatal("Binary file format error at feature %d's size", i);
}
size_t size_of_feature = *(reinterpret_cast<size_t*>(buffer));
// re-allocmate space if not enough
......@@ -683,7 +683,7 @@ void Dataset::LoadDataFromBinFile(int rank, int num_machines, bool is_pre_partit
read_cnt = fread(buffer, 1, size_of_feature, file);
if (read_cnt != size_of_feature) {
Log::Error("binary file format error at feature %d loading , read count %d", i, read_cnt);
Log::Fatal("Binary file format error at feature %d loading , read count %d", i, read_cnt);
}
features_.push_back(new Feature(buffer, static_cast<data_size_t>(global_num_data_), used_data_indices_));
}
......@@ -693,10 +693,10 @@ void Dataset::LoadDataFromBinFile(int rank, int num_machines, bool is_pre_partit
void Dataset::CheckDataset() {
if (num_data_ <= 0) {
Log::Error("data size of %s is zero", data_filename_);
Log::Fatal("Data file %s is empty", data_filename_);
}
if (features_.size() <= 0) {
Log::Error("not useful feature of data %s", data_filename_);
Log::Fatal("Usable feature of data %s is null", data_filename_);
}
}
......
......@@ -61,7 +61,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
if (used_data_indices.size() == 0) {
// check weights
if (weights_ != nullptr && num_weights_ != num_data_) {
Log::Info("init weight size doesn't equal with data file, will ignore");
Log::Error("Initial weight size doesn't equal to data, weights will be ignored\n");
delete[] weights_;
num_weights_ = 0;
weights_ = nullptr;
......@@ -69,7 +69,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
// check query boundries
if (query_boundaries_ != nullptr && query_boundaries_[num_queries_] != num_data_) {
Log::Info("init query size doesn't equal with data file, will ignore");
Log::Error("Initial query size doesn't equal to data, queies will be ignored\n");
delete[] query_boundaries_;
num_queries_ = 0;
query_boundaries_ = nullptr;
......@@ -78,21 +78,21 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
// contain initial score file
if (init_score_ != nullptr && num_init_score_ != num_data_) {
delete[] init_score_;
Log::Info("init score size doesn't equal with data file, will ignore");
Log::Error("Initial score size doesn't equal to data, score file will be ignored\n");
num_init_score_ = 0;
}
} else {
data_size_t num_used_data = static_cast<data_size_t>(used_data_indices.size());
// check weights
if (weights_ != nullptr && num_weights_ != num_all_data) {
Log::Info("init weight size doesn't equal with data file, will ignore");
Log::Error("Initial weights size doesn't equal to data, weights will be ignored\n");
delete[] weights_;
num_weights_ = 0;
weights_ = nullptr;
}
// check query boundries
if (query_boundaries_ != nullptr && query_boundaries_[num_queries_] != num_all_data) {
Log::Info("init query size doesn't equal with data file, will ignore");
Log::Error("Initial query size doesn't equal to data , queries will be ignored\n");
delete[] query_boundaries_;
num_queries_ = 0;
query_boundaries_ = nullptr;
......@@ -100,7 +100,7 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
// contain initial score file
if (init_score_ != nullptr && num_init_score_ != num_all_data) {
Log::Info("init score size doesn't equal with data file, will ignore");
Log::Error("Initial score size doesn't equal to data , initial scores will be ignored\n");
delete[] init_score_;
num_init_score_ = 0;
}
......@@ -131,10 +131,10 @@ void Metadata::CheckOrPartition(data_size_t num_all_data, const std::vector<data
used_query.push_back(qid);
data_idx += len;
} else {
Log::Error("data partition error, not according to query");
Log::Fatal("Data partition error, data didn't match queies\n");
}
} else {
Log::Error("data partition error, not according to query");
Log::Fatal("Data partition error, data didn't match queies\n");
}
}
data_size_t * old_query_boundaries = query_boundaries_;
......@@ -182,7 +182,7 @@ void Metadata::LoadWeights() {
if (reader.Lines().size() <= 0) {
return;
}
Log::Info("Start to load weights");
Log::Info("Start loading weights\n");
num_weights_ = static_cast<data_size_t>(reader.Lines().size());
weights_ = new float[num_weights_];
for (data_size_t i = 0; i < num_weights_; ++i) {
......@@ -198,7 +198,7 @@ void Metadata::LoadInitialScore() {
TextReader<size_t> reader(init_score_filename_);
reader.ReadAllLines();
Log::Info("Start to load initial score");
Log::Info("Start loading initial scores\n");
num_init_score_ = static_cast<data_size_t>(reader.Lines().size());
init_score_ = new score_t[num_init_score_];
double tmp = 0.0f;
......@@ -218,7 +218,7 @@ void Metadata::LoadQueryBoundaries() {
if (reader.Lines().size() <= 0) {
return;
}
Log::Info("Start to load query boundries");
Log::Info("Start loading query boundries\n");
query_boundaries_ = new data_size_t[reader.Lines().size() + 1];
num_queries_ = static_cast<data_size_t>(reader.Lines().size());
query_boundaries_[0] = 0;
......@@ -233,7 +233,7 @@ void Metadata::LoadQueryWeights() {
if (weights_ == nullptr || query_boundaries_ == nullptr) {
return;
}
Log::Info("Start to load query weights");
Log::Info("Start loading query weights\n");
query_weights_ = new float[num_queries_];
for (data_size_t i = 0; i < num_queries_; ++i) {
query_weights_[i] = 0.0f;
......
......@@ -55,18 +55,18 @@ Parser* Parser::CreateParser(const char* filename, int num_features, bool* has_l
std::ifstream tmp_file;
tmp_file.open(filename);
if (!tmp_file.is_open()) {
Log::Error("Data file: %s doesn't exist", filename);
Log::Fatal("Data file: %s doesn't exist", filename);
}
std::string line1, line2;
if (!tmp_file.eof()) {
std::getline(tmp_file, line1);
} else {
Log::Error("Data file: %s at least should have one line", filename);
Log::Fatal("Data file: %s at least should have one line", filename);
}
if (!tmp_file.eof()) {
std::getline(tmp_file, line2);
} else {
Log::Info("Data file: %s only have one line", filename);
Log::Error("Data file: %s only have one line", filename);
}
tmp_file.close();
int comma_cnt = 0, comma_cnt2 = 0;
......
......@@ -27,7 +27,7 @@ public:
if (*str == ',') {
++str;
} else if (*str != '\0') {
Log::Error("input format error, should be CSV");
Log::Fatal("input format error, should be CSV");
}
}
}
......@@ -38,7 +38,7 @@ public:
if (*str == ',') {
++str;
} else if (*str != '\0') {
Log::Error("input format error, should be CSV");
Log::Fatal("input format error, should be CSV");
}
return ParseOneLine(str, out_features);
}
......@@ -58,7 +58,7 @@ public:
if (*str == '\t') {
++str;
} else if (*str != '\0') {
Log::Error("input format error, should be TSV");
Log::Fatal("input format error, should be TSV");
}
}
}
......@@ -69,7 +69,7 @@ public:
if (*str == '\t') {
++str;
} else if (*str != '\0') {
Log::Error("input format error, should be TSV");
Log::Fatal("input format error, should be TSV");
}
return ParseOneLine(str, out_features);
}
......@@ -88,7 +88,7 @@ public:
str = Common::Atof(str, &val);
out_features->emplace_back(idx, val);
} else {
Log::Error("input format error, should be LibSVM");
Log::Fatal("input format error, should be LibSVM");
}
str = Common::SkipSpaceAndTab(str);
}
......
......@@ -140,7 +140,7 @@ Tree::Tree(const std::string& str) {
|| key_vals.count("split_gain") <= 0 || key_vals.count("threshold") <= 0
|| key_vals.count("left_child") <= 0 || key_vals.count("right_child") <= 0
|| key_vals.count("leaf_parent") <= 0 || key_vals.count("leaf_value") <= 0) {
Log::Error("tree model string format error");
Log::Fatal("tree model string format error");
}
Common::Atoi(key_vals["num_leaves"].c_str(), &num_leaves_);
......
......@@ -23,7 +23,7 @@ public:
the_bigger_the_better = false;
sigmoid_ = static_cast<score_t>(config.sigmoid);
if (sigmoid_ <= 0.0f) {
Log::Error("sigmoid param %f should greater than zero", sigmoid_);
Log::Fatal("Sigmoid param %f should greater than zero", sigmoid_);
}
}
......@@ -72,7 +72,7 @@ public:
}
loss = sum_loss / sum_weights_;
if (output_freq_ > 0 && iter % output_freq_ == 0){
Log::Info("Iteration:%d, %s's %s: %f", iter, name, PointWiseLossCalculator::Name(), loss);
Log::Info("Iteration:%d, %s's %s: %f\n", iter, name, PointWiseLossCalculator::Name(), loss);
}
}
}
......@@ -229,7 +229,7 @@ public:
}
loss = auc;
if (output_freq_ > 0 && iter % output_freq_ == 0){
Log::Info("iteration:%d, %s's %s: %f", iter, name, "auc", loss);
Log::Info("Iteration:%d, %s's %s: %f\n", iter, name, "auc", loss);
}
}
}
......
......@@ -57,7 +57,7 @@ void DCGCalculator::CalMaxDCG(const std::vector<data_size_t>& ks,
std::vector<data_size_t> label_cnt(label_gain_.size(), 0);
// counts for all labels
for (data_size_t i = 0; i < num_data; ++i) {
if (static_cast<size_t>(label[i]) >= label_cnt.size()) { Log::Error("label excel %d\n", label[i]); }
if (static_cast<size_t>(label[i]) >= label_cnt.size()) { Log::Fatal("label excel %d\n", label[i]); }
++label_cnt[static_cast<int>(label[i])];
}
double cur_result = 0.0;
......
......@@ -43,7 +43,7 @@ public:
// get query boundaries
query_boundaries_ = metadata.query_boundaries();
if (query_boundaries_ == nullptr) {
Log::Error("For NDCG metric, should have query information");
Log::Fatal("For NDCG metric, there should be query information");
}
num_queries_ = metadata.num_queries();
// get query weights
......@@ -136,7 +136,7 @@ public:
}
loss = result[0];
if (output_freq_ > 0 && iter % output_freq_ == 0){
Log::Info("Iteration:%d, Test:%s, %s ", iter, name, result_ss.str().c_str());
Log::Info("Iteration:%d, Test:%s, %s \n", iter, name, result_ss.str().c_str());
}
}
}
......
......@@ -44,7 +44,7 @@ Linkers::Linkers(NetworkConfig config) {
}
}
if (rank_ == -1) {
Log::Error("machine list file doesn't contain local machine, app quit");
Log::Fatal("Machine list file doesn't contain local machine");
}
// construct listener
listener_ = new TcpSocket();
......@@ -73,14 +73,14 @@ Linkers::~Linkers() {
}
}
TcpSocket::Finalize();
Log::Info("network used %f seconds", network_time_ * 1e-3);
Log::Info("Network using %f seconds\n", network_time_ * 1e-3);
}
void Linkers::ParseMachineList(const char * filename) {
TextReader<size_t> machine_list_reader(filename);
machine_list_reader.ReadAllLines();
if (machine_list_reader.Lines().size() <= 0) {
Log::Error("machine list file:%s doesn't exist", filename);
Log::Fatal("Machine list file:%s doesn't exist", filename);
}
for (auto& line : machine_list_reader.Lines()) {
......@@ -95,7 +95,7 @@ void Linkers::ParseMachineList(const char * filename) {
continue;
}
if (client_ips_.size() >= static_cast<size_t>(num_machines_)) {
Log::Info("The #machine in machine list is larger than parameter num_machines, will ignore rest");
Log::Error("The #machine in machine_list is larger than parameter num_machines, the redundant will ignored\n");
break;
}
str_after_split[0] = Common::Trim(str_after_split[0]);
......@@ -104,17 +104,17 @@ void Linkers::ParseMachineList(const char * filename) {
client_ports_.push_back(atoi(str_after_split[1].c_str()));
}
if (client_ips_.size() != static_cast<size_t>(num_machines_)) {
Log::Info("The world size is bigger the #machine in machine list, change world size to %d .", client_ips_.size());
Log::Error("The world size is bigger the #machine in machine list, change world size to %d .\n", client_ips_.size());
num_machines_ = static_cast<int>(client_ips_.size());
}
}
void Linkers::TryBind(int port) {
Log::Info("try to bind port %d.", port);
Log::Info("try to bind port %d.\n", port);
if (listener_->Bind(port)) {
Log::Info("bind port %d success.", port);
Log::Info("Binding port %d success.\n", port);
} else {
Log::Error("bind port %d failed.", port);
Log::Fatal("Binding port %d failed.\n", port);
}
}
......@@ -125,7 +125,7 @@ void Linkers::SetLinker(int rank, const TcpSocket& socket) {
}
void Linkers::ListenThread(int incoming_cnt) {
Log::Info("Listening...");
Log::Info("Listening...\n");
char buffer[100];
int connected_cnt = 0;
while (connected_cnt < incoming_cnt) {
......@@ -192,7 +192,7 @@ void Linkers::Construct() {
if (cur_socket.Connect(client_ips_[out_rank].c_str(), client_ports_[out_rank])) {
break;
} else {
Log::Info("connect to rank %d failed, wait for %d milliseconds", out_rank, connect_fail_delay_time);
Log::Error("Connect to rank %d failed, wait for %d milliseconds\n", out_rank, connect_fail_delay_time);
std::this_thread::sleep_for(std::chrono::milliseconds(connect_fail_delay_time));
}
}
......@@ -217,7 +217,7 @@ bool Linkers::CheckLinker(int rank) {
void Linkers::PrintLinkers() {
for (int i = 0; i < num_machines_; ++i) {
if (CheckLinker(i)) {
Log::Info("Connected to rank %d.", i);
Log::Info("Connected to rank %d.\n", i);
}
}
}
......
......@@ -60,7 +60,7 @@ public:
TcpSocket() {
sockfd_ = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (sockfd_ == INVALID_SOCKET) {
Log::Error("socket construct error");
Log::Fatal("Socket construct error\n");
return;
}
ConfigSocket();
......@@ -69,7 +69,7 @@ public:
explicit TcpSocket(SOCKET socket) {
sockfd_ = socket;
if (sockfd_ == INVALID_SOCKET) {
Log::Error("passed socket error");
Log::Fatal("Passed socket error\n");
return;
}
ConfigSocket();
......@@ -97,11 +97,11 @@ public:
#if defined(_WIN32)
WSADATA wsa_data;
if (WSAStartup(MAKEWORD(2, 2), &wsa_data) == -1) {
Log::Error("socket error: start up error");
Log::Fatal("Socket error: WSAStart up error\n");
}
if (LOBYTE(wsa_data.wVersion) != 2 || HIBYTE(wsa_data.wVersion) != 2) {
WSACleanup();
Log::Error("socket error: Winsock.dll version error");
Log::Fatal("Socket error: Winsock.dll version error\n");
}
#else
#endif
......@@ -128,7 +128,7 @@ public:
char buffer[512];
// get hostName
if (gethostname(buffer, sizeof(buffer)) == SOCKET_ERROR) {
Log::Error("Error code: %d, when getting local host name.", WSAGetLastError());
Log::Fatal("Error code: %d, when getting local host name.\n", WSAGetLastError());
}
// push local ip
PIP_ADAPTER_INFO pAdapterInfo;
......@@ -137,7 +137,7 @@ public:
ULONG ulOutBufLen = sizeof(IP_ADAPTER_INFO);
pAdapterInfo = (IP_ADAPTER_INFO *)MALLOC(sizeof(IP_ADAPTER_INFO));
if (pAdapterInfo == NULL) {
Log::Error("Error allocating memory needed to call GetAdaptersinfo\n");
Log::Fatal("GetAdaptersinfo error: allocating memory \n");
}
// Make an initial call to GetAdaptersInfo to get
// the necessary size into the ulOutBufLen variable
......@@ -145,7 +145,7 @@ public:
FREE(pAdapterInfo);
pAdapterInfo = (IP_ADAPTER_INFO *)MALLOC(ulOutBufLen);
if (pAdapterInfo == NULL) {
Log::Error("Error allocating memory needed to call GetAdaptersinfo\n");
Log::Fatal("GetAdaptersinfo error: allocating memory \n");
}
}
if ((dwRetVal = GetAdaptersInfo(pAdapterInfo, &ulOutBufLen)) == NO_ERROR) {
......@@ -155,7 +155,7 @@ public:
pAdapter = pAdapter->Next;
}
} else {
printf("GetAdaptersInfo failed with error: %d\n", dwRetVal);
Log::Error("GetAdaptersinfo error: code %d \n", dwRetVal);
}
if (pAdapterInfo)
FREE(pAdapterInfo);
......@@ -218,7 +218,7 @@ public:
inline TcpSocket Accept() {
SOCKET newfd = accept(sockfd_, NULL, NULL);
if (newfd == INVALID_SOCKET) {
Log::Error("socket accept error,error code: %d", GetLastError());
Log::Fatal("Socket accept error, code: %d", GetLastError());
}
return TcpSocket(newfd);
}
......@@ -226,7 +226,7 @@ public:
inline int Send(const char *buf_, int len, int flag = 0) {
int cur_cnt = send(sockfd_, buf_, len, flag);
if (cur_cnt == SOCKET_ERROR) {
Log::Error("socket send error, error code: %d", GetLastError());
Log::Fatal("Socket send error, code: %d", GetLastError());
}
return cur_cnt;
}
......@@ -234,7 +234,7 @@ public:
inline int Recv(char *buf_, int len, int flags = 0) {
int cur_cnt = recv(sockfd_, buf_ , len , flags);
if (cur_cnt == SOCKET_ERROR) {
Log::Error("socket recv error, error code: %d", GetLastError());
Log::Fatal("Socket recv error, code: %d", GetLastError());
}
return cur_cnt;
}
......
......@@ -16,7 +16,7 @@ public:
is_unbalance_ = config.is_unbalance;
sigmoid_ = static_cast<score_t>(config.sigmoid);
if (sigmoid_ <= 0.0) {
Log::Error("sigmoid param %f should greater than zero", sigmoid_);
Log::Fatal("Sigmoid parameter %f :should greater than zero\n", sigmoid_);
}
}
~BinaryLogloss() {}
......@@ -34,10 +34,10 @@ public:
++cnt_negative;
}
}
Log::Info("number of postive:%d number of negative:%d", cnt_positive, cnt_negative);
Log::Info("Number of postive:%d, number of negative:%d\n", cnt_positive, cnt_negative);
// cannot continue if all sample are same class
if (cnt_positive == 0 || cnt_negative == 0) {
Log::Error("input training data only contain one class");
Log::Fatal("Input training data only contains one class\n");
}
// use -1 for negative class, and 1 for positive class
label_val_[0] = -1;
......
......@@ -31,7 +31,7 @@ public:
optimize_pos_at_ = config.max_position;
sigmoid_table_ = nullptr;
if (sigmoid_ <= 0.0) {
Log::Error("sigmoid param %f should greater than zero", sigmoid_);
Log::Fatal("sigmoid param %f should greater than zero", sigmoid_);
}
}
~LambdarankNDCG() {
......@@ -47,7 +47,7 @@ public:
// get boundries
query_boundaries_ = metadata.query_boundaries();
if (query_boundaries_ == nullptr) {
Log::Error("For NDCG metric, should have query information");
Log::Fatal("For NDCG metric, should have query information");
}
num_queries_ = metadata.num_queries();
// cache inverse max DCG, avoid compution many times
......
......@@ -95,7 +95,7 @@ void SerialTreeLearner::Init(const Dataset* train_data) {
if (has_ordered_bin_) {
is_data_in_leaf_ = new char[num_data_];
}
Log::Info("#data:%d #feature:%d\n", num_data_, num_features_);
Log::Info("Number of data:%d, Number of features:%d\n", num_data_, num_features_);
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment