Commit bb05a06f authored by Hui Xue's avatar Hui Xue
Browse files

update for typo.

parent 85e90f21
......@@ -65,7 +65,7 @@ public:
}
private:
/*! \brief Output frequently */
/*! \brief Output frequency */
int output_freq_;
/*! \brief Number of data */
data_size_t num_data_;
......
......@@ -9,7 +9,7 @@
namespace LightGBM {
// static member defination
// static member definition
int Network::num_machines_;
int Network::rank_;
Linkers* Network::linkers_;
......@@ -141,7 +141,7 @@ void Network::ReduceScatter(char* input, int input_size, int* block_start, int*
// send local data to neighbor first
linkers_->Send(recursive_halving_map_.neighbor, input, input_size);
} else if (recursive_halving_map_.type == RecursiveHalvingNodeType::GroupLeader) {
// recieve neighbor data first
// receive neighbor data first
int need_recv_cnt = input_size;
linkers_->Recv(recursive_halving_map_.neighbor, output, need_recv_cnt);
// reduce
......
......@@ -50,7 +50,7 @@ public:
Log::Stderr("For NDCG metric, should have query information");
}
num_queries_ = metadata.num_queries();
// cache inverse max DCG, avoid compution many times
// cache inverse max DCG, avoid computation many times
inverse_max_dcgs_ = new score_t[num_queries_];
for (data_size_t i = 0; i < num_queries_; ++i) {
inverse_max_dcgs_[i] = static_cast<score_t>(
......
......@@ -40,7 +40,7 @@ public:
* \brief Construct a histogram
* \param num_data number of data in current leaf
* \param sum_gradients sum of gradients of current leaf
* \param sum_hessians sum of hissians of current leaf
* \param sum_hessians sum of hessians of current leaf
* \param ordered_gradients Orederd gradients
* \param ordered_hessians Ordered hessians
* \param data_indices data indices of current leaf
......@@ -59,7 +59,7 @@ public:
* \param leaf current leaf
* \param num_data number of data in current leaf
* \param sum_gradients sum of gradients of current leaf
* \param sum_hessians sum of hissians of current leaf
* \param sum_hessians sum of hessians of current leaf
* \param gradients
* \param hessian
*/
......@@ -76,7 +76,7 @@ public:
* \brief Set sumup information for current histogram
* \param num_data number of data in current leaf
* \param sum_gradients sum of gradients of current leaf
* \param sum_hessians sum of hissians of current leaf
* \param sum_hessians sum of hessians of current leaf
*/
void SetSumup(data_size_t num_data, score_t sum_gradients, score_t sum_hessians) {
num_data_ = num_data;
......
......@@ -26,7 +26,7 @@ public:
}
/*!
* \brief Init splits on current leaf, don't need to travesal all data
* \brief Init splits on current leaf, don't need to traverse all data
* \param leaf Index of current leaf
* \param data_partition current data partition
* \param sum_gradients
......@@ -43,7 +43,7 @@ public:
}
/*!
* \brief Init splits on current leaf, need to travesal all data to sum up
* \brief Init splits on current leaf, need to traverse all data to sum up
* \param gradients
* \param hessians
*/
......
......@@ -77,9 +77,9 @@ private:
int* block_start_;
/*! \brief Block size for reduce scatter */
int* block_len_;
/*! \brief Write positions for feature histgrams */
/*! \brief Write positions for feature histograms */
int* buffer_write_start_pos_;
/*! \brief Read positions for local feature histgrams */
/*! \brief Read positions for local feature histograms */
int* buffer_read_start_pos_;
/*! \brief Size for reduce scatter */
int reduce_scatter_size_;
......
......@@ -239,16 +239,16 @@ bool SerialTreeLearner::BeforeFindBestSplit(int left_leaf, int right_leaf) {
larger_leaf_histogram_array_ = nullptr;
} else if (num_data_in_left_child < num_data_in_right_child) {
smaller_leaf = left_leaf;
// put parent(left) leaf's histograms into larger leaf's histgrams
// put parent(left) leaf's histograms into larger leaf's histograms
larger_leaf_histogram_array_ = historical_histogram_array_[left_leaf];
smaller_leaf_histogram_array_ = historical_histogram_array_[right_leaf];
// We will construc histograms for smaller leaf, and smaller_leaf=left_leaf = parent.
// if we don't swap the cache, we will overwrite the parent's hisogram cache.
// if we don't swap the cache, we will overwrite the parent's histogram cache.
std::swap(historical_histogram_array_[left_leaf], historical_histogram_array_[right_leaf]);
} else {
smaller_leaf = right_leaf;
// put parent(left) leaf's histograms to larger leaf's histgrams
// put parent(left) leaf's histograms to larger leaf's histograms
larger_leaf_histogram_array_ = historical_histogram_array_[left_leaf];
smaller_leaf_histogram_array_ = historical_histogram_array_[right_leaf];
}
......
......@@ -120,7 +120,7 @@ protected:
DataPartition* data_partition_;
/*! \brief used for generate used features */
Random random_;
/*! \brief used for sub feature training, is_feature_used_[i] = falase means don't used feature i */
/*! \brief used for sub feature training, is_feature_used_[i] = false means don't used feature i */
bool* is_feature_used_;
/*! \brief cache historical histogram to speed up */
FeatureHistogram** historical_histogram_array_;
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment