"git@developer.sourcefind.cn:tianlh/lightgbm-dcu.git" did not exist on "6373356735f8e5126b6e3472f9d71d88051ce5fa"
Unverified Commit 8478020e authored by Nikita Titov's avatar Nikita Titov Committed by GitHub
Browse files

[cpp] rename is_finished to produced_empty_tree in cpp and R code (#6970)


Co-authored-by: default avatarJames Lamb <jaylamb20@gmail.com>
parent 27dcf820
......@@ -739,8 +739,8 @@ SEXP LGBM_BoosterGetNumFeature_R(SEXP handle) {
SEXP LGBM_BoosterUpdateOneIter_R(SEXP handle) {
R_API_BEGIN();
_AssertBoosterHandleNotNull(handle);
int is_finished = 0;
CHECK_CALL(LGBM_BoosterUpdateOneIter(R_ExternalPtrAddr(handle), &is_finished));
int produced_empty_tree = 0;
CHECK_CALL(LGBM_BoosterUpdateOneIter(R_ExternalPtrAddr(handle), &produced_empty_tree));
return R_NilValue;
R_API_END();
}
......@@ -751,12 +751,13 @@ SEXP LGBM_BoosterUpdateOneIterCustom_R(SEXP handle,
SEXP len) {
R_API_BEGIN();
_AssertBoosterHandleNotNull(handle);
int is_finished = 0;
int produced_empty_tree = 0;
int int_len = Rf_asInteger(len);
std::unique_ptr<float[]> tgrad(new float[int_len]), thess(new float[int_len]);
std::copy(REAL(grad), REAL(grad) + int_len, tgrad.get());
std::copy(REAL(hess), REAL(hess) + int_len, thess.get());
CHECK_CALL(LGBM_BoosterUpdateOneIterCustom(R_ExternalPtrAddr(handle), tgrad.get(), thess.get(), &is_finished));
CHECK_CALL(LGBM_BoosterUpdateOneIterCustom(R_ExternalPtrAddr(handle), tgrad.get(), thess.get(),
&produced_empty_tree));
return R_NilValue;
R_API_END();
}
......
......@@ -759,11 +759,15 @@ LIGHTGBM_C_EXPORT int LGBM_BoosterGetNumClasses(BoosterHandle handle,
/*!
* \brief Update the model for one iteration.
* \param handle Handle of booster
* \param[out] is_finished 1 means the update was successfully finished (cannot split any more), 0 indicates failure
* \param[out] produced_empty_tree 1 means the tree(s) produced by this iteration did not have any splits.
* This usually means that training is "finished" (calling this function again will not change the model's predictions).
* However, that is not always the case.
* For example, if you have added any randomness (like column sampling by setting ``feature_fraction_bynode < 1.0``),
* it is possible that another call to this function would produce a non-empty tree.
* \return 0 when succeed, -1 when failure happens
*/
LIGHTGBM_C_EXPORT int LGBM_BoosterUpdateOneIter(BoosterHandle handle,
int* is_finished);
int* produced_empty_tree);
/*!
* \brief Refit the tree model using the new data (online learning).
......@@ -787,13 +791,17 @@ LIGHTGBM_C_EXPORT int LGBM_BoosterRefit(BoosterHandle handle,
* \param handle Handle of booster
* \param grad The first order derivative (gradient) statistics
* \param hess The second order derivative (Hessian) statistics
* \param[out] is_finished 1 means the update was successfully finished (cannot split any more), 0 indicates failure
* \param[out] produced_empty_tree 1 means the tree(s) produced by this iteration did not have any splits.
* This usually means that training is "finished" (calling this function again will not change the model's predictions).
* However, that is not always the case.
* For example, if you have added any randomness (like column sampling by setting ``feature_fraction_bynode < 1.0``),
* it is possible that another call to this function would produce a non-empty tree.
* \return 0 when succeed, -1 when failure happens
*/
LIGHTGBM_C_EXPORT int LGBM_BoosterUpdateOneIterCustom(BoosterHandle handle,
const float* grad,
const float* hess,
int* is_finished);
int* produced_empty_tree);
/*!
* \brief Rollback one iteration.
......
......@@ -2059,13 +2059,13 @@ int LGBM_BoosterRefit(BoosterHandle handle, const int32_t* leaf_preds, int32_t n
API_END();
}
int LGBM_BoosterUpdateOneIter(BoosterHandle handle, int* is_finished) {
int LGBM_BoosterUpdateOneIter(BoosterHandle handle, int* produced_empty_tree) {
API_BEGIN();
Booster* ref_booster = reinterpret_cast<Booster*>(handle);
if (ref_booster->TrainOneIter()) {
*is_finished = 1;
*produced_empty_tree = 1;
} else {
*is_finished = 0;
*produced_empty_tree = 0;
}
API_END();
}
......@@ -2073,20 +2073,20 @@ int LGBM_BoosterUpdateOneIter(BoosterHandle handle, int* is_finished) {
int LGBM_BoosterUpdateOneIterCustom(BoosterHandle handle,
const float* grad,
const float* hess,
int* is_finished) {
int* produced_empty_tree) {
API_BEGIN();
#ifdef SCORE_T_USE_DOUBLE
(void) handle; // UNUSED VARIABLE
(void) grad; // UNUSED VARIABLE
(void) hess; // UNUSED VARIABLE
(void) is_finished; // UNUSED VARIABLE
(void) produced_empty_tree; // UNUSED VARIABLE
Log::Fatal("Don't support custom loss function when SCORE_T_USE_DOUBLE is enabled");
#else
Booster* ref_booster = reinterpret_cast<Booster*>(handle);
if (ref_booster->TrainOneIter(grad, hess)) {
*is_finished = 1;
*produced_empty_tree = 1;
} else {
*is_finished = 0;
*produced_empty_tree = 0;
}
#endif
API_END();
......
......@@ -184,9 +184,9 @@ def test_booster(tmp_path):
model_path = tmp_path / "model.txt"
LIB.LGBM_BoosterCreate(train, c_str("app=binary metric=auc num_leaves=31 verbose=0"), ctypes.byref(booster))
LIB.LGBM_BoosterAddValidData(booster, test)
is_finished = ctypes.c_int(0)
produced_empty_tree = ctypes.c_int(0)
for i in range(1, 51):
LIB.LGBM_BoosterUpdateOneIter(booster, ctypes.byref(is_finished))
LIB.LGBM_BoosterUpdateOneIter(booster, ctypes.byref(produced_empty_tree))
result = np.array([0.0], dtype=np.float64)
out_len = ctypes.c_int(0)
LIB.LGBM_BoosterGetEval(
......
......@@ -25,10 +25,10 @@ void test_predict_type(int predict_type, int num_predicts) {
EXPECT_EQ(0, result) << "LGBM_BoosterCreate result code: " << result;
for (int i = 0; i < 51; i++) {
int is_finished;
int produced_empty_tree;
result = LGBM_BoosterUpdateOneIter(
booster_handle,
&is_finished);
&produced_empty_tree);
EXPECT_EQ(0, result) << "LGBM_BoosterUpdateOneIter result code: " << result;
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment