Unverified Commit b9b8b7af authored by Nikita Titov's avatar Nikita Titov Committed by GitHub
Browse files

[docs] generate parameters description from config file. Stage 4 (#1416)

refine Objective Parameters section
parent f5570e18
...@@ -566,58 +566,74 @@ public: ...@@ -566,58 +566,74 @@ public:
#pragma endregion #pragma endregion
#pragma region Objective Parameters #pragma region Objective Parameters
// alias=num_classes
// desc=need to specify this in multi-class classification
int num_class = 1;
// check=>0 // check = >0
// desc=parameter for sigmoid function. Will be used in binary and multiclassova classification and in lambdarank // alias = num_classes
double sigmoid = 1.0; // desc = used only in ``multi-class`` classification application
int num_class = 1;
// desc=parameter for `Huber loss`_ and `Quantile regression`_. Will be used in regression task // alias = unbalanced_sets
double alpha = 0.9; // desc = used only in ``binary`` application
// desc = set this to ``true`` if training data are unbalance
// desc = **Note**: this parameter cannot be used at the same time with ``scale_pos_weight``, choose only **one** of them
bool is_unbalance = false;
// desc=parameter for `Fair loss`_. Will be used in regression task // check = >0.0
double fair_c = 1.0; // desc = used only in ``binary`` application
// desc = weight of labels with positive class
// desc = **Note**: this parameter cannot be used at the same time with ``is_unbalance``, choose only **one** of them
double scale_pos_weight = 1.0;
// desc=parameter for `Poisson regression`_ to safeguard optimization // check = >0.0
double poisson_max_delta_step = 0.7; // desc = used only in ``binary`` and ``multiclassova`` classification and in ``lambdarank`` applications
// desc = parameter for the sigmoid function
double sigmoid = 1.0;
// desc=only used in regression task // desc = used only in ``regression``, ``binary`` and ``cross-entropy`` applications
// desc=adjust initial score to the mean of labels for faster convergence // desc = adjusts initial score to the mean of labels for faster convergence
bool boost_from_average = true; bool boost_from_average = true;
// alias=unbalanced_sets // desc = used only in ``regression`` application
// desc=used in binary classification // desc = used to fit ``sqrt(label)`` instead of original values and prediction result will be also automatically converted to ``prediction^2``
// desc=set this to true if training data are unbalance // desc = might be useful in case of large-range labels
bool is_unbalance = false; bool reg_sqrt = false;
// check=>0 // check = >0.0
// desc=weight of positive class in binary classification task // check = <1.0
double scale_pos_weight = 1.0; // desc = used only in ``huber`` and ``quantile`` ``regression`` applications
// desc = parameter for `Huber loss <https://en.wikipedia.org/wiki/Huber_loss>`__ and `Quantile regression <https://en.wikipedia.org/wiki/Quantile_regression>`__
double alpha = 0.9;
// desc=only used in regression, usually works better for the large-range of labels // check = >0.0
// desc=will fit sqrt(label) instead and prediction result will be also automatically converted to pow2(prediction) // desc = used only in ``fair`` ``regression`` application
bool reg_sqrt = false; // desc = parameter for `Fair loss <https://www.kaggle.com/c/allstate-claims-severity/discussion/24520>`__
double fair_c = 1.0;
// desc=only used in tweedie regression // check = >0.0
// desc=controls the variance of the tweedie distribution // desc = used only in ``poisson`` ``regression`` application
// desc=set closer to 2 to shift towards a gamma distribution // desc = parameter for `Poisson regression <https://en.wikipedia.org/wiki/Poisson_regression>`__ to safeguard optimization
// desc=set closer to 1 to shift towards a poisson distribution double poisson_max_delta_step = 0.7;
double tweedie_variance_power = 1.5;
// default = 0, 1, 3, 7, 15, 31, 63, ..., 2 ^ 30 - 1 // check = >=1.0
// desc=used in lambdarank // check = <2.0
// desc=relevant gain for labels. For example,the gain of label 2 is 3 if using default label gains // desc = used only in ``tweedie`` ``regression`` application
// desc=separate by , // desc = used to control the variance of the tweedie distribution
std::vector<double> label_gain; // desc = set this closer to ``2`` to shift towards a **Gamma** distribution
// desc = set this closer to ``1`` to shift towards a **Poisson** distribution
double tweedie_variance_power = 1.5;
// check=>0 // check = >0
// desc=used in lambdarank // desc = used only in ``lambdarank`` application
// desc=will optimize `NDCG`_ at this position // desc = optimizes `NDCG <https://en.wikipedia.org/wiki/Discounted_cumulative_gain#Normalized_DCG>`__ at this position
int max_position = 20; int max_position = 20;
// type = multi-double
// default = 0,1,3,7,15,31,63,...,2^30-1
// desc = used only in ``lambdarank`` application
// desc = relevant gain for labels. For example, the gain of label ``2`` is ``3`` in case of default label gains
// desc = separate by ``,``
std::vector<double> label_gain;
#pragma endregion #pragma endregion
#pragma region Metric Parameters #pragma region Metric Parameters
......
...@@ -377,7 +377,7 @@ double GBDT::BoostFromAverage() { ...@@ -377,7 +377,7 @@ double GBDT::BoostFromAverage() {
} else if (std::string(objective_function_->GetName()) == std::string("regression_l1") } else if (std::string(objective_function_->GetName()) == std::string("regression_l1")
|| std::string(objective_function_->GetName()) == std::string("quantile") || std::string(objective_function_->GetName()) == std::string("quantile")
|| std::string(objective_function_->GetName()) == std::string("mape")) { || std::string(objective_function_->GetName()) == std::string("mape")) {
Log::Warning("Disable boost_from_average in %s may cause the slow convergence", objective_function_->GetName()); Log::Warning("Disabling boost_from_average in %s may cause the slow convergence", objective_function_->GetName());
} }
} }
return 0.0f; return 0.0f;
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment