Commit 8497af62 authored by Allardvm's avatar Allardvm
Browse files

Improved consistency and wording of user-facing logs and documentation

Packages that parse LightGBM’s logs will require minor changes to
parsing logic to work correctly.
parent 4e291459
...@@ -30,7 +30,7 @@ void Network::Init(NetworkConfig config) { ...@@ -30,7 +30,7 @@ void Network::Init(NetworkConfig config) {
block_len_ = new int[num_machines_]; block_len_ = new int[num_machines_];
buffer_size_ = 1024 * 1024; buffer_size_ = 1024 * 1024;
buffer_ = new char[buffer_size_]; buffer_ = new char[buffer_size_];
Log::Info("local rank %d, total number of machines %d", rank_, num_machines_); Log::Info("Local rank: %d, total number of machines: %d", rank_, num_machines_);
} }
void Network::Dispose() { void Network::Dispose() {
......
...@@ -60,7 +60,7 @@ public: ...@@ -60,7 +60,7 @@ public:
TcpSocket() { TcpSocket() {
sockfd_ = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP); sockfd_ = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (sockfd_ == INVALID_SOCKET) { if (sockfd_ == INVALID_SOCKET) {
Log::Fatal("Socket construct error"); Log::Fatal("Socket construction error");
return; return;
} }
ConfigSocket(); ConfigSocket();
...@@ -97,7 +97,7 @@ public: ...@@ -97,7 +97,7 @@ public:
#if defined(_WIN32) #if defined(_WIN32)
WSADATA wsa_data; WSADATA wsa_data;
if (WSAStartup(MAKEWORD(2, 2), &wsa_data) == -1) { if (WSAStartup(MAKEWORD(2, 2), &wsa_data) == -1) {
Log::Fatal("Socket error: WSAStart up error"); Log::Fatal("Socket error: WSAStartup error");
} }
if (LOBYTE(wsa_data.wVersion) != 2 || HIBYTE(wsa_data.wVersion) != 2) { if (LOBYTE(wsa_data.wVersion) != 2 || HIBYTE(wsa_data.wVersion) != 2) {
WSACleanup(); WSACleanup();
...@@ -128,7 +128,7 @@ public: ...@@ -128,7 +128,7 @@ public:
char buffer[512]; char buffer[512];
// get hostName // get hostName
if (gethostname(buffer, sizeof(buffer)) == SOCKET_ERROR) { if (gethostname(buffer, sizeof(buffer)) == SOCKET_ERROR) {
Log::Fatal("Error code: %d, when getting local host name.", WSAGetLastError()); Log::Fatal("Error code %d, when getting local host name", WSAGetLastError());
} }
// push local ip // push local ip
PIP_ADAPTER_INFO pAdapterInfo; PIP_ADAPTER_INFO pAdapterInfo;
...@@ -137,7 +137,7 @@ public: ...@@ -137,7 +137,7 @@ public:
ULONG ulOutBufLen = sizeof(IP_ADAPTER_INFO); ULONG ulOutBufLen = sizeof(IP_ADAPTER_INFO);
pAdapterInfo = (IP_ADAPTER_INFO *)MALLOC(sizeof(IP_ADAPTER_INFO)); pAdapterInfo = (IP_ADAPTER_INFO *)MALLOC(sizeof(IP_ADAPTER_INFO));
if (pAdapterInfo == NULL) { if (pAdapterInfo == NULL) {
Log::Fatal("GetAdaptersinfo error: allocating memory "); Log::Fatal("GetAdaptersinfo error: allocating memory");
} }
// Make an initial call to GetAdaptersInfo to get // Make an initial call to GetAdaptersInfo to get
// the necessary size into the ulOutBufLen variable // the necessary size into the ulOutBufLen variable
...@@ -145,7 +145,7 @@ public: ...@@ -145,7 +145,7 @@ public:
FREE(pAdapterInfo); FREE(pAdapterInfo);
pAdapterInfo = (IP_ADAPTER_INFO *)MALLOC(ulOutBufLen); pAdapterInfo = (IP_ADAPTER_INFO *)MALLOC(ulOutBufLen);
if (pAdapterInfo == NULL) { if (pAdapterInfo == NULL) {
Log::Fatal("GetAdaptersinfo error: allocating memory "); Log::Fatal("GetAdaptersinfo error: allocating memory");
} }
} }
if ((dwRetVal = GetAdaptersInfo(pAdapterInfo, &ulOutBufLen)) == NO_ERROR) { if ((dwRetVal = GetAdaptersInfo(pAdapterInfo, &ulOutBufLen)) == NO_ERROR) {
...@@ -155,7 +155,7 @@ public: ...@@ -155,7 +155,7 @@ public:
pAdapter = pAdapter->Next; pAdapter = pAdapter->Next;
} }
} else { } else {
Log::Fatal("GetAdaptersinfo error: code %d ", dwRetVal); Log::Fatal("GetAdaptersinfo error: code %d", dwRetVal);
} }
if (pAdapterInfo) if (pAdapterInfo)
FREE(pAdapterInfo); FREE(pAdapterInfo);
......
...@@ -16,7 +16,7 @@ public: ...@@ -16,7 +16,7 @@ public:
is_unbalance_ = config.is_unbalance; is_unbalance_ = config.is_unbalance;
sigmoid_ = static_cast<score_t>(config.sigmoid); sigmoid_ = static_cast<score_t>(config.sigmoid);
if (sigmoid_ <= 0.0) { if (sigmoid_ <= 0.0) {
Log::Fatal("Sigmoid parameter %f :should greater than zero", sigmoid_); Log::Fatal("Sigmoid parameter %f should be greater than zero", sigmoid_);
} }
} }
~BinaryLogloss() {} ~BinaryLogloss() {}
...@@ -34,10 +34,10 @@ public: ...@@ -34,10 +34,10 @@ public:
++cnt_negative; ++cnt_negative;
} }
} }
Log::Info("Number of postive:%d, number of negative:%d", cnt_positive, cnt_negative); Log::Info("Number of postive: %d, number of negative: %d", cnt_positive, cnt_negative);
// cannot continue if all sample are same class // cannot continue if all sample are same class
if (cnt_positive == 0 || cnt_negative == 0) { if (cnt_positive == 0 || cnt_negative == 0) {
Log::Fatal("Input training data only contains one class"); Log::Fatal("Training data only contains one class");
} }
// use -1 for negative class, and 1 for positive class // use -1 for negative class, and 1 for positive class
label_val_[0] = -1; label_val_[0] = -1;
......
...@@ -16,20 +16,20 @@ public: ...@@ -16,20 +16,20 @@ public:
:label_int_(nullptr) { :label_int_(nullptr) {
num_class_ = config.num_class; num_class_ = config.num_class;
} }
~MulticlassLogloss() { ~MulticlassLogloss() {
if (label_int_ != nullptr) { delete[] label_int_; } if (label_int_ != nullptr) { delete[] label_int_; }
} }
void Init(const Metadata& metadata, data_size_t num_data) override { void Init(const Metadata& metadata, data_size_t num_data) override {
num_data_ = num_data; num_data_ = num_data;
label_ = metadata.label(); label_ = metadata.label();
weights_ = metadata.weights(); weights_ = metadata.weights();
label_int_ = new int[num_data_]; label_int_ = new int[num_data_];
for (int i = 0; i < num_data_; ++i){ for (int i = 0; i < num_data_; ++i){
label_int_[i] = static_cast<int>(label_[i]); label_int_[i] = static_cast<int>(label_[i]);
if (label_int_[i] < 0 || label_int_[i] >= num_class_) { if (label_int_[i] < 0 || label_int_[i] >= num_class_) {
Log::Fatal("Label must be in [0, %d), but find %d in label", num_class_, label_int_[i]); Log::Fatal("Label must be in [0, %d), but found %d in label", num_class_, label_int_[i]);
} }
} }
} }
...@@ -42,7 +42,7 @@ public: ...@@ -42,7 +42,7 @@ public:
for (int k = 0; k < num_class_; ++k){ for (int k = 0; k < num_class_; ++k){
rec[k] = static_cast<double>(score[k * num_data_ + i]); rec[k] = static_cast<double>(score[k * num_data_ + i]);
} }
Common::Softmax(&rec); Common::Softmax(&rec);
for (int k = 0; k < num_class_; ++k) { for (int k = 0; k < num_class_; ++k) {
score_t p = static_cast<score_t>(rec[k]); score_t p = static_cast<score_t>(rec[k]);
if (label_int_[i] == k) { if (label_int_[i] == k) {
...@@ -51,7 +51,7 @@ public: ...@@ -51,7 +51,7 @@ public:
gradients[k * num_data_ + i] = p; gradients[k * num_data_ + i] = p;
} }
hessians[k * num_data_ + i] = 2.0f * p * (1.0f - p); hessians[k * num_data_ + i] = 2.0f * p * (1.0f - p);
} }
} }
} else { } else {
#pragma omp parallel for schedule(static) #pragma omp parallel for schedule(static)
...@@ -59,7 +59,7 @@ public: ...@@ -59,7 +59,7 @@ public:
std::vector<double> rec(num_class_); std::vector<double> rec(num_class_);
for (int k = 0; k < num_class_; ++k){ for (int k = 0; k < num_class_; ++k){
rec[k] = static_cast<double>(score[k * num_data_ + i]); rec[k] = static_cast<double>(score[k * num_data_ + i]);
} }
Common::Softmax(&rec); Common::Softmax(&rec);
for (int k = 0; k < num_class_; ++k) { for (int k = 0; k < num_class_; ++k) {
score_t p = static_cast<score_t>(rec[k]); score_t p = static_cast<score_t>(rec[k]);
......
...@@ -31,7 +31,7 @@ public: ...@@ -31,7 +31,7 @@ public:
optimize_pos_at_ = config.max_position; optimize_pos_at_ = config.max_position;
sigmoid_table_ = nullptr; sigmoid_table_ = nullptr;
if (sigmoid_ <= 0.0) { if (sigmoid_ <= 0.0) {
Log::Fatal("sigmoid param %f should greater than zero", sigmoid_); Log::Fatal("Sigmoid param %f should be greater than zero", sigmoid_);
} }
} }
~LambdarankNDCG() { ~LambdarankNDCG() {
...@@ -47,7 +47,7 @@ public: ...@@ -47,7 +47,7 @@ public:
// get boundries // get boundries
query_boundaries_ = metadata.query_boundaries(); query_boundaries_ = metadata.query_boundaries();
if (query_boundaries_ == nullptr) { if (query_boundaries_ == nullptr) {
Log::Fatal("For lambdarank tasks, should have query information"); Log::Fatal("Lambdarank tasks require query information");
} }
num_queries_ = metadata.num_queries(); num_queries_ = metadata.num_queries();
// cache inverse max DCG, avoid computation many times // cache inverse max DCG, avoid computation many times
......
...@@ -112,7 +112,7 @@ void SerialTreeLearner::Init(const Dataset* train_data) { ...@@ -112,7 +112,7 @@ void SerialTreeLearner::Init(const Dataset* train_data) {
if (has_ordered_bin_) { if (has_ordered_bin_) {
is_data_in_leaf_ = new char[num_data_]; is_data_in_leaf_ = new char[num_data_];
} }
Log::Info("Number of data:%d, Number of features:%d", num_data_, num_features_); Log::Info("Number of data: %d, number of features: %d", num_data_, num_features_);
} }
...@@ -142,7 +142,7 @@ Tree* SerialTreeLearner::Train(const score_t* gradients, const score_t *hessians ...@@ -142,7 +142,7 @@ Tree* SerialTreeLearner::Train(const score_t* gradients, const score_t *hessians
const SplitInfo& best_leaf_SplitInfo = best_split_per_leaf_[best_leaf]; const SplitInfo& best_leaf_SplitInfo = best_split_per_leaf_[best_leaf];
// cannot split, quit // cannot split, quit
if (best_leaf_SplitInfo.gain <= 0.0) { if (best_leaf_SplitInfo.gain <= 0.0) {
Log::Info("cannot find more split with gain = %f , current #leaves=%d", Log::Info("No further splits with positive gain, best gain: %f, leaves: %d",
best_leaf_SplitInfo.gain, split + 1); best_leaf_SplitInfo.gain, split + 1);
break; break;
} }
...@@ -266,7 +266,7 @@ bool SerialTreeLearner::BeforeFindBestSplit(int left_leaf, int right_leaf) { ...@@ -266,7 +266,7 @@ bool SerialTreeLearner::BeforeFindBestSplit(int left_leaf, int right_leaf) {
if (right_leaf < 0) { if (right_leaf < 0) {
histogram_pool_.Get(left_leaf, &smaller_leaf_histogram_array_); histogram_pool_.Get(left_leaf, &smaller_leaf_histogram_array_);
larger_leaf_histogram_array_ = nullptr; larger_leaf_histogram_array_ = nullptr;
} else if (num_data_in_left_child < num_data_in_right_child) { } else if (num_data_in_left_child < num_data_in_right_child) {
smaller_leaf = left_leaf; smaller_leaf = left_leaf;
larger_leaf = right_leaf; larger_leaf = right_leaf;
......
...@@ -40,13 +40,13 @@ public: ...@@ -40,13 +40,13 @@ public:
double right_sum_hessian; double right_sum_hessian;
SplitInfo() { SplitInfo() {
// initilize with -1 and -inf gain // initialize with -1 and -inf gain
feature = -1; feature = -1;
gain = kMinScore; gain = kMinScore;
} }
inline void Reset() { inline void Reset() {
// initilize with -1 and -inf gain // initialize with -1 and -inf gain
feature = -1; feature = -1;
gain = kMinScore; gain = kMinScore;
} }
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment