Commit 5172b533 authored by Qiwei Ye's avatar Qiwei Ye Committed by GitHub
Browse files

Merge pull request #64 from Allardvm/master

Improved consistency and wording of user-facing logs and documentation
parents 4e291459 8497af62
......@@ -30,7 +30,7 @@ void Network::Init(NetworkConfig config) {
block_len_ = new int[num_machines_];
buffer_size_ = 1024 * 1024;
buffer_ = new char[buffer_size_];
Log::Info("local rank %d, total number of machines %d", rank_, num_machines_);
Log::Info("Local rank: %d, total number of machines: %d", rank_, num_machines_);
}
void Network::Dispose() {
......
......@@ -60,7 +60,7 @@ public:
TcpSocket() {
sockfd_ = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (sockfd_ == INVALID_SOCKET) {
Log::Fatal("Socket construct error");
Log::Fatal("Socket construction error");
return;
}
ConfigSocket();
......@@ -97,7 +97,7 @@ public:
#if defined(_WIN32)
WSADATA wsa_data;
if (WSAStartup(MAKEWORD(2, 2), &wsa_data) == -1) {
Log::Fatal("Socket error: WSAStart up error");
Log::Fatal("Socket error: WSAStartup error");
}
if (LOBYTE(wsa_data.wVersion) != 2 || HIBYTE(wsa_data.wVersion) != 2) {
WSACleanup();
......@@ -128,7 +128,7 @@ public:
char buffer[512];
// get hostName
if (gethostname(buffer, sizeof(buffer)) == SOCKET_ERROR) {
Log::Fatal("Error code: %d, when getting local host name.", WSAGetLastError());
Log::Fatal("Error code %d, when getting local host name", WSAGetLastError());
}
// push local ip
PIP_ADAPTER_INFO pAdapterInfo;
......@@ -137,7 +137,7 @@ public:
ULONG ulOutBufLen = sizeof(IP_ADAPTER_INFO);
pAdapterInfo = (IP_ADAPTER_INFO *)MALLOC(sizeof(IP_ADAPTER_INFO));
if (pAdapterInfo == NULL) {
Log::Fatal("GetAdaptersinfo error: allocating memory ");
Log::Fatal("GetAdaptersinfo error: allocating memory");
}
// Make an initial call to GetAdaptersInfo to get
// the necessary size into the ulOutBufLen variable
......@@ -145,7 +145,7 @@ public:
FREE(pAdapterInfo);
pAdapterInfo = (IP_ADAPTER_INFO *)MALLOC(ulOutBufLen);
if (pAdapterInfo == NULL) {
Log::Fatal("GetAdaptersinfo error: allocating memory ");
Log::Fatal("GetAdaptersinfo error: allocating memory");
}
}
if ((dwRetVal = GetAdaptersInfo(pAdapterInfo, &ulOutBufLen)) == NO_ERROR) {
......@@ -155,7 +155,7 @@ public:
pAdapter = pAdapter->Next;
}
} else {
Log::Fatal("GetAdaptersinfo error: code %d ", dwRetVal);
Log::Fatal("GetAdaptersinfo error: code %d", dwRetVal);
}
if (pAdapterInfo)
FREE(pAdapterInfo);
......
......@@ -16,7 +16,7 @@ public:
is_unbalance_ = config.is_unbalance;
sigmoid_ = static_cast<score_t>(config.sigmoid);
if (sigmoid_ <= 0.0) {
Log::Fatal("Sigmoid parameter %f :should greater than zero", sigmoid_);
Log::Fatal("Sigmoid parameter %f should be greater than zero", sigmoid_);
}
}
~BinaryLogloss() {}
......@@ -34,10 +34,10 @@ public:
++cnt_negative;
}
}
Log::Info("Number of postive:%d, number of negative:%d", cnt_positive, cnt_negative);
Log::Info("Number of postive: %d, number of negative: %d", cnt_positive, cnt_negative);
// cannot continue if all sample are same class
if (cnt_positive == 0 || cnt_negative == 0) {
Log::Fatal("Input training data only contains one class");
Log::Fatal("Training data only contains one class");
}
// use -1 for negative class, and 1 for positive class
label_val_[0] = -1;
......
......@@ -16,20 +16,20 @@ public:
:label_int_(nullptr) {
num_class_ = config.num_class;
}
~MulticlassLogloss() {
if (label_int_ != nullptr) { delete[] label_int_; }
if (label_int_ != nullptr) { delete[] label_int_; }
}
void Init(const Metadata& metadata, data_size_t num_data) override {
num_data_ = num_data;
label_ = metadata.label();
weights_ = metadata.weights();
label_int_ = new int[num_data_];
for (int i = 0; i < num_data_; ++i){
label_int_[i] = static_cast<int>(label_[i]);
label_int_[i] = static_cast<int>(label_[i]);
if (label_int_[i] < 0 || label_int_[i] >= num_class_) {
Log::Fatal("Label must be in [0, %d), but find %d in label", num_class_, label_int_[i]);
Log::Fatal("Label must be in [0, %d), but found %d in label", num_class_, label_int_[i]);
}
}
}
......@@ -42,7 +42,7 @@ public:
for (int k = 0; k < num_class_; ++k){
rec[k] = static_cast<double>(score[k * num_data_ + i]);
}
Common::Softmax(&rec);
Common::Softmax(&rec);
for (int k = 0; k < num_class_; ++k) {
score_t p = static_cast<score_t>(rec[k]);
if (label_int_[i] == k) {
......@@ -51,7 +51,7 @@ public:
gradients[k * num_data_ + i] = p;
}
hessians[k * num_data_ + i] = 2.0f * p * (1.0f - p);
}
}
}
} else {
#pragma omp parallel for schedule(static)
......@@ -59,7 +59,7 @@ public:
std::vector<double> rec(num_class_);
for (int k = 0; k < num_class_; ++k){
rec[k] = static_cast<double>(score[k * num_data_ + i]);
}
}
Common::Softmax(&rec);
for (int k = 0; k < num_class_; ++k) {
score_t p = static_cast<score_t>(rec[k]);
......
......@@ -31,7 +31,7 @@ public:
optimize_pos_at_ = config.max_position;
sigmoid_table_ = nullptr;
if (sigmoid_ <= 0.0) {
Log::Fatal("sigmoid param %f should greater than zero", sigmoid_);
Log::Fatal("Sigmoid param %f should be greater than zero", sigmoid_);
}
}
~LambdarankNDCG() {
......@@ -47,7 +47,7 @@ public:
// get boundries
query_boundaries_ = metadata.query_boundaries();
if (query_boundaries_ == nullptr) {
Log::Fatal("For lambdarank tasks, should have query information");
Log::Fatal("Lambdarank tasks require query information");
}
num_queries_ = metadata.num_queries();
// cache inverse max DCG, avoid computation many times
......
......@@ -112,7 +112,7 @@ void SerialTreeLearner::Init(const Dataset* train_data) {
if (has_ordered_bin_) {
is_data_in_leaf_ = new char[num_data_];
}
Log::Info("Number of data:%d, Number of features:%d", num_data_, num_features_);
Log::Info("Number of data: %d, number of features: %d", num_data_, num_features_);
}
......@@ -142,7 +142,7 @@ Tree* SerialTreeLearner::Train(const score_t* gradients, const score_t *hessians
const SplitInfo& best_leaf_SplitInfo = best_split_per_leaf_[best_leaf];
// cannot split, quit
if (best_leaf_SplitInfo.gain <= 0.0) {
Log::Info("cannot find more split with gain = %f , current #leaves=%d",
Log::Info("No further splits with positive gain, best gain: %f, leaves: %d",
best_leaf_SplitInfo.gain, split + 1);
break;
}
......@@ -266,7 +266,7 @@ bool SerialTreeLearner::BeforeFindBestSplit(int left_leaf, int right_leaf) {
if (right_leaf < 0) {
histogram_pool_.Get(left_leaf, &smaller_leaf_histogram_array_);
larger_leaf_histogram_array_ = nullptr;
} else if (num_data_in_left_child < num_data_in_right_child) {
smaller_leaf = left_leaf;
larger_leaf = right_leaf;
......
......@@ -40,13 +40,13 @@ public:
double right_sum_hessian;
SplitInfo() {
// initilize with -1 and -inf gain
// initialize with -1 and -inf gain
feature = -1;
gain = kMinScore;
}
inline void Reset() {
// initilize with -1 and -inf gain
// initialize with -1 and -inf gain
feature = -1;
gain = kMinScore;
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment