Unverified Commit bef83598 authored by Nikita Titov's avatar Nikita Titov Committed by GitHub
Browse files

[python][R-package] removed duplicated code from language wrappers (#2606)

* removed duplicated code from language wrappers

* removed check for resetting metric
parent 509c2e50
......@@ -18,15 +18,6 @@
, "early_stopping"
, "n_iter_no_change"
)
, "metric" = c(
"metric"
, "metrics"
, "metric_types"
)
, "num_class" = c(
"num_class"
, "num_classes"
)
, "num_iterations" = c(
"num_iterations"
, "num_iteration"
......
......@@ -34,21 +34,6 @@ cb.reset.parameters <- function(new_params) {
stop("Env should have a ", sQuote("model"))
}
# Some parameters are not allowed to be changed,
# since changing them would simply wreck some chaos
not_allowed <- c(
.PARAMETER_ALIASES()[["num_class"]]
, .PARAMETER_ALIASES()[["metric"]]
, .PARAMETER_ALIASES()[["boosting"]]
)
if (any(pnames %in% not_allowed)) {
stop(
"Parameters "
, paste0(pnames[pnames %in% not_allowed], collapse = ", ")
, " cannot be changed during boosting"
)
}
# Store boosting rounds
nrounds <<- env$end_iteration - env$begin_iteration + 1L
......
......@@ -50,8 +50,6 @@ test_that(".PARAMETER_ALIASES() returns a named list", {
expect_true(is.character(names(param_aliases)))
expect_true(is.character(param_aliases[["boosting"]]))
expect_true(is.character(param_aliases[["early_stopping_round"]]))
expect_true(is.character(param_aliases[["metric"]]))
expect_true(is.character(param_aliases[["num_class"]]))
expect_true(is.character(param_aliases[["num_iterations"]]))
})
......
......@@ -2037,8 +2037,6 @@ class Booster(object):
self : Booster
Booster with new parameters.
"""
if any(metric_alias in params for metric_alias in _ConfigAliases.get("metric")):
self.__need_reload_eval_info = True
params_str = param_dict_to_str(params)
if params_str:
_safe_call(_LIB.LGBM_BoosterResetParameter(
......
......@@ -130,8 +130,6 @@ def reset_parameter(**kwargs):
def _callback(env):
new_parameters = {}
for key, value in kwargs.items():
if key in _ConfigAliases.get("num_class", "boosting", "metric"):
raise RuntimeError("Cannot reset {} during training".format(repr(key)))
if isinstance(value, list):
if len(value) != env.end_iteration - env.begin_iteration:
raise ValueError("Length of list {} has to equal to 'num_boost_round'."
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment