Commit 85e90f21 authored by Hui Xue's avatar Hui Xue
Browse files

update for typo.

parent 1aefcd8a
...@@ -107,7 +107,7 @@ private: ...@@ -107,7 +107,7 @@ private:
*/ */
void UpdateScore(const Tree* tree); void UpdateScore(const Tree* tree);
/*! /*!
* \brief Print Metric result of current iteration * \brief Print metric result of current iteration
* \param iter Current interation * \param iter Current interation
*/ */
void OutputMetric(int iter); void OutputMetric(int iter);
...@@ -116,11 +116,11 @@ private: ...@@ -116,11 +116,11 @@ private:
const Dataset* train_data_; const Dataset* train_data_;
/*! \brief Config of gbdt */ /*! \brief Config of gbdt */
const GBDTConfig* gbdt_config_; const GBDTConfig* gbdt_config_;
/*! \brief Tree learner, will use tihs class to learn trees */ /*! \brief Tree learner, will use this class to learn trees */
TreeLearner* tree_learner_; TreeLearner* tree_learner_;
/*! \brief Objective function */ /*! \brief Objective function */
const ObjectiveFunction* object_function_; const ObjectiveFunction* object_function_;
/*! \brief Store and update traning data's score */ /*! \brief Store and update training data's score */
ScoreUpdater* train_score_updater_; ScoreUpdater* train_score_updater_;
/*! \brief Metrics for training data */ /*! \brief Metrics for training data */
std::vector<const Metric*> training_metrics_; std::vector<const Metric*> training_metrics_;
......
...@@ -57,8 +57,8 @@ public: ...@@ -57,8 +57,8 @@ public:
* \brief Like AddScore(const Tree* tree), but only for part of data * \brief Like AddScore(const Tree* tree), but only for part of data
* Used for prediction of training out-of-bad data * Used for prediction of training out-of-bad data
* \param tree Trained tree model * \param tree Trained tree model
* \param data_indices Indices of data that want proccess to * \param data_indices Indices of data that will be proccessed
* \param data_cnt Number of data that want proccess to * \param data_cnt Number of data that will be proccessed
*/ */
inline void AddScore(const Tree* tree, const data_size_t* data_indices, inline void AddScore(const Tree* tree, const data_size_t* data_indices,
data_size_t data_cnt) { data_size_t data_cnt) {
......
...@@ -31,12 +31,12 @@ Dataset::Dataset(const char* data_filename, const char* init_score_filename, ...@@ -31,12 +31,12 @@ Dataset::Dataset(const char* data_filename, const char* init_score_filename,
// create text parser // create text parser
parser_ = Parser::CreateParser(data_filename_, 0, nullptr); parser_ = Parser::CreateParser(data_filename_, 0, nullptr);
if (parser_ == nullptr) { if (parser_ == nullptr) {
Log::Stderr("cannot recognise input data format, filename: %s", data_filename_); Log::Stderr("cannot recognize input data format, filename: %s", data_filename_);
} }
// create text reader // create text reader
text_reader_ = new TextReader<data_size_t>(data_filename); text_reader_ = new TextReader<data_size_t>(data_filename);
} else { } else {
// only need to load initilize score, other meta data will load from bin flie // only need to load initilize score, other meta data will be loaded from bin flie
metadata_.Init(init_score_filename); metadata_.Init(init_score_filename);
Log::Stdout("will load data set from binary file"); Log::Stdout("will load data set from binary file");
parser_ = nullptr; parser_ = nullptr;
...@@ -613,7 +613,7 @@ void Dataset::LoadDataFromBinFile(int rank, int num_machines, bool is_pre_partit ...@@ -613,7 +613,7 @@ void Dataset::LoadDataFromBinFile(int rank, int num_machines, bool is_pre_partit
size_t size_of_metadata = *(reinterpret_cast<size_t*>(buffer)); size_t size_of_metadata = *(reinterpret_cast<size_t*>(buffer));
// re-allocmate space if not enough // re-allocate space if not enough
if (size_of_metadata > buffer_size) { if (size_of_metadata > buffer_size) {
delete[] buffer; delete[] buffer;
buffer_size = size_of_metadata; buffer_size = size_of_metadata;
...@@ -673,7 +673,7 @@ void Dataset::LoadDataFromBinFile(int rank, int num_machines, bool is_pre_partit ...@@ -673,7 +673,7 @@ void Dataset::LoadDataFromBinFile(int rank, int num_machines, bool is_pre_partit
Log::Stderr("binary file format error at feature %d's size", i); Log::Stderr("binary file format error at feature %d's size", i);
} }
size_t size_of_feature = *(reinterpret_cast<size_t*>(buffer)); size_t size_of_feature = *(reinterpret_cast<size_t*>(buffer));
// re-allocmate space if not enough // re-allocate space if not enough
if (size_of_feature > buffer_size) { if (size_of_feature > buffer_size) {
delete[] buffer; delete[] buffer;
buffer_size = size_of_feature; buffer_size = size_of_feature;
......
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
namespace LightGBM { namespace LightGBM {
/*! /*!
* \brief Used to Store bins for dense feature * \brief Used to store bins for dense feature
* Use template to reduce memory cost * Use template to reduce memory cost
*/ */
template <typename VAL_T> template <typename VAL_T>
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
namespace LightGBM { namespace LightGBM {
/*! /*!
* \brief Ordered bin for sparse feature . efficient for construct histogram, especally for sparse bin * \brief Ordered bin for sparse feature . Efficient for construct histogram, especally for sparse bin
* There are 2 advantages for using ordered bin. * There are 2 advantages for using ordered bin.
* 1. group the data by leaf, improve the cache hit. * 1. group the data by leaf, improve the cache hit.
* 2. only store the non-zero bin, which can speed up the histogram cconsturction for sparse feature. * 2. only store the non-zero bin, which can speed up the histogram cconsturction for sparse feature.
......
...@@ -225,7 +225,7 @@ public: ...@@ -225,7 +225,7 @@ public:
} }
private: private:
/*! \brief Output frequently */ /*! \brief Output frequency */
int output_freq_; int output_freq_;
/*! \brief Number of data */ /*! \brief Number of data */
data_size_t num_data_; data_size_t num_data_;
......
...@@ -91,7 +91,7 @@ void SerialTreeLearner::Init(const Dataset* train_data) { ...@@ -91,7 +91,7 @@ void SerialTreeLearner::Init(const Dataset* train_data) {
// initialize ordered gradients and hessians // initialize ordered gradients and hessians
ordered_gradients_ = new score_t[num_data_]; ordered_gradients_ = new score_t[num_data_];
ordered_hessians_ = new score_t[num_data_]; ordered_hessians_ = new score_t[num_data_];
// if has ordered bin, need allocata a buffer to fast split // if has ordered bin, need allocate a buffer to fast split
if (has_ordered_bin_) { if (has_ordered_bin_) {
is_data_in_leaf_ = new char[num_data_]; is_data_in_leaf_ = new char[num_data_];
} }
...@@ -331,7 +331,7 @@ void SerialTreeLearner::FindBestThresholds() { ...@@ -331,7 +331,7 @@ void SerialTreeLearner::FindBestThresholds() {
// only has root leaf // only has root leaf
if (larger_leaf_splits_ == nullptr || larger_leaf_splits_->LeafIndex() < 0) continue; if (larger_leaf_splits_ == nullptr || larger_leaf_splits_->LeafIndex() < 0) continue;
// construct histgroms for large leaf, we initialize larger leaf as the parent, // construct histograms for large leaf, we initialize larger leaf as the parent,
// so we can just subtract the smaller leaf's histograms // so we can just subtract the smaller leaf's histograms
larger_leaf_histogram_array_[feature_index].Subtract(smaller_leaf_histogram_array_[feature_index]); larger_leaf_histogram_array_[feature_index].Subtract(smaller_leaf_histogram_array_[feature_index]);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment