Unverified Commit 64f15005 authored by Nikita Titov's avatar Nikita Titov Committed by GitHub
Browse files

[python] deprecate `silent` and standalone `verbose` args. Prefer global `verbose` param (#4577)

* deprecate `silent` and standalone `verbose` args. Prefer global `verbose` param

* simplify code

* Rephrase warning messages
parent b7120d27
...@@ -1123,7 +1123,7 @@ class Dataset: ...@@ -1123,7 +1123,7 @@ class Dataset:
"""Dataset in LightGBM.""" """Dataset in LightGBM."""
def __init__(self, data, label=None, reference=None, def __init__(self, data, label=None, reference=None,
weight=None, group=None, init_score=None, silent=False, weight=None, group=None, init_score=None, silent='warn',
feature_name='auto', categorical_feature='auto', params=None, feature_name='auto', categorical_feature='auto', params=None,
free_raw_data=True): free_raw_data=True):
"""Initialize Dataset. """Initialize Dataset.
...@@ -1439,6 +1439,11 @@ class Dataset: ...@@ -1439,6 +1439,11 @@ class Dataset:
_log_warning(f'{key} keyword has been found in `params` and will be ignored.\n' _log_warning(f'{key} keyword has been found in `params` and will be ignored.\n'
f'Please use {key} argument of the Dataset constructor to pass this parameter.') f'Please use {key} argument of the Dataset constructor to pass this parameter.')
# user can set verbose with params, it has higher priority # user can set verbose with params, it has higher priority
if silent != "warn":
_log_warning("'silent' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'verbose' parameter via 'params' instead.")
else:
silent = False
if not any(verbose_alias in params for verbose_alias in _ConfigAliases.get("verbosity")) and silent: if not any(verbose_alias in params for verbose_alias in _ConfigAliases.get("verbosity")) and silent:
params["verbose"] = -1 params["verbose"] = -1
# get categorical features # get categorical features
...@@ -1769,7 +1774,7 @@ class Dataset: ...@@ -1769,7 +1774,7 @@ class Dataset:
return self return self
def create_valid(self, data, label=None, weight=None, group=None, def create_valid(self, data, label=None, weight=None, group=None,
init_score=None, silent=False, params=None): init_score=None, silent='warn', params=None):
"""Create validation data align with current Dataset. """Create validation data align with current Dataset.
Parameters Parameters
...@@ -2462,7 +2467,7 @@ class Dataset: ...@@ -2462,7 +2467,7 @@ class Dataset:
class Booster: class Booster:
"""Booster in LightGBM.""" """Booster in LightGBM."""
def __init__(self, params=None, train_set=None, model_file=None, model_str=None, silent=False): def __init__(self, params=None, train_set=None, model_file=None, model_str=None, silent='warn'):
"""Initialize the Booster. """Initialize the Booster.
Parameters Parameters
...@@ -2488,6 +2493,11 @@ class Booster: ...@@ -2488,6 +2493,11 @@ class Booster:
self.best_score = {} self.best_score = {}
params = {} if params is None else deepcopy(params) params = {} if params is None else deepcopy(params)
# user can set verbose with params, it has higher priority # user can set verbose with params, it has higher priority
if silent != 'warn':
_log_warning("'silent' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'verbose' parameter via 'params' instead.")
else:
silent = False
if not any(verbose_alias in params for verbose_alias in _ConfigAliases.get("verbosity")) and silent: if not any(verbose_alias in params for verbose_alias in _ConfigAliases.get("verbosity")) and silent:
params["verbose"] = -1 params["verbose"] = -1
if train_set is not None: if train_set is not None:
...@@ -2574,7 +2584,7 @@ class Booster: ...@@ -2574,7 +2584,7 @@ class Booster:
self.__num_class = out_num_class.value self.__num_class = out_num_class.value
self.pandas_categorical = _load_pandas_categorical(file_name=model_file) self.pandas_categorical = _load_pandas_categorical(file_name=model_file)
elif model_str is not None: elif model_str is not None:
self.model_from_string(model_str, not silent) self.model_from_string(model_str, verbose="_silent_false")
else: else:
raise TypeError('Need at least one training dataset or model file or model string ' raise TypeError('Need at least one training dataset or model file or model string '
'to create Booster instance') 'to create Booster instance')
...@@ -3255,7 +3265,7 @@ class Booster: ...@@ -3255,7 +3265,7 @@ class Booster:
ctypes.c_int(end_iteration))) ctypes.c_int(end_iteration)))
return self return self
def model_from_string(self, model_str, verbose=True): def model_from_string(self, model_str, verbose='warn'):
"""Load Booster from a string. """Load Booster from a string.
Parameters Parameters
...@@ -3283,6 +3293,10 @@ class Booster: ...@@ -3283,6 +3293,10 @@ class Booster:
_safe_call(_LIB.LGBM_BoosterGetNumClasses( _safe_call(_LIB.LGBM_BoosterGetNumClasses(
self.handle, self.handle,
ctypes.byref(out_num_class))) ctypes.byref(out_num_class)))
if verbose in {'warn', '_silent_false'}:
verbose = verbose == 'warn'
else:
_log_warning("'verbose' argument is deprecated and will be removed in a future release of LightGBM.")
if verbose: if verbose:
_log_info(f'Finished loading model, total used {int(out_num_iterations.value)} iterations') _log_info(f'Finished loading model, total used {int(out_num_iterations.value)} iterations')
self.__num_class = out_num_class.value self.__num_class = out_num_class.value
......
...@@ -1108,7 +1108,7 @@ class DaskLGBMClassifier(LGBMClassifier, _DaskLGBMModel): ...@@ -1108,7 +1108,7 @@ class DaskLGBMClassifier(LGBMClassifier, _DaskLGBMModel):
reg_lambda: float = 0., reg_lambda: float = 0.,
random_state: Optional[Union[int, np.random.RandomState]] = None, random_state: Optional[Union[int, np.random.RandomState]] = None,
n_jobs: int = -1, n_jobs: int = -1,
silent: bool = True, silent: bool = "warn",
importance_type: str = 'split', importance_type: str = 'split',
client: Optional[Client] = None, client: Optional[Client] = None,
**kwargs: Any **kwargs: Any
...@@ -1288,7 +1288,7 @@ class DaskLGBMRegressor(LGBMRegressor, _DaskLGBMModel): ...@@ -1288,7 +1288,7 @@ class DaskLGBMRegressor(LGBMRegressor, _DaskLGBMModel):
reg_lambda: float = 0., reg_lambda: float = 0.,
random_state: Optional[Union[int, np.random.RandomState]] = None, random_state: Optional[Union[int, np.random.RandomState]] = None,
n_jobs: int = -1, n_jobs: int = -1,
silent: bool = True, silent: bool = "warn",
importance_type: str = 'split', importance_type: str = 'split',
client: Optional[Client] = None, client: Optional[Client] = None,
**kwargs: Any **kwargs: Any
...@@ -1448,7 +1448,7 @@ class DaskLGBMRanker(LGBMRanker, _DaskLGBMModel): ...@@ -1448,7 +1448,7 @@ class DaskLGBMRanker(LGBMRanker, _DaskLGBMModel):
reg_lambda: float = 0., reg_lambda: float = 0.,
random_state: Optional[Union[int, np.random.RandomState]] = None, random_state: Optional[Union[int, np.random.RandomState]] = None,
n_jobs: int = -1, n_jobs: int = -1,
silent: bool = True, silent: bool = "warn",
importance_type: str = 'split', importance_type: str = 'split',
client: Optional[Client] = None, client: Optional[Client] = None,
**kwargs: Any **kwargs: Any
......
...@@ -299,7 +299,7 @@ def train( ...@@ -299,7 +299,7 @@ def train(
for dataset_name, eval_name, score, _ in evaluation_result_list: for dataset_name, eval_name, score, _ in evaluation_result_list:
booster.best_score[dataset_name][eval_name] = score booster.best_score[dataset_name][eval_name] = score
if not keep_training_booster: if not keep_training_booster:
booster.model_from_string(booster.model_to_string(), False).free_dataset() booster.model_from_string(booster.model_to_string(), verbose='_silent_false').free_dataset()
return booster return booster
......
...@@ -369,7 +369,7 @@ class LGBMModel(_LGBMModelBase): ...@@ -369,7 +369,7 @@ class LGBMModel(_LGBMModelBase):
reg_lambda: float = 0., reg_lambda: float = 0.,
random_state: Optional[Union[int, np.random.RandomState]] = None, random_state: Optional[Union[int, np.random.RandomState]] = None,
n_jobs: int = -1, n_jobs: int = -1,
silent: bool = True, silent: Union[bool, str] = 'warn',
importance_type: str = 'split', importance_type: str = 'split',
**kwargs **kwargs
): ):
...@@ -590,7 +590,13 @@ class LGBMModel(_LGBMModelBase): ...@@ -590,7 +590,13 @@ class LGBMModel(_LGBMModelBase):
evals_result = {} evals_result = {}
params = self.get_params() params = self.get_params()
# user can set verbose with kwargs, it has higher priority # user can set verbose with kwargs, it has higher priority
if not any(verbose_alias in params for verbose_alias in _ConfigAliases.get("verbosity")) and self.silent: if self.silent != "warn":
_log_warning("'silent' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'verbose' parameter via keyword arguments instead.")
silent = self.silent
else:
silent = True
if not any(verbose_alias in params for verbose_alias in _ConfigAliases.get("verbosity")) and silent:
params['verbose'] = -1 params['verbose'] = -1
params.pop('silent', None) params.pop('silent', None)
params.pop('importance_type', None) params.pop('importance_type', None)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment