Unverified Commit 5f261c42 authored by James Lamb's avatar James Lamb Committed by GitHub
Browse files

[python-package] fix mypy errors about scikit-learn properties (#5788)

* [python-package] fix mypy errors about scikit-learn properties

* Update python-package/lightgbm/sklearn.py
parent ce0813ef
...@@ -921,7 +921,7 @@ def _predict( ...@@ -921,7 +921,7 @@ def _predict(
elif isinstance(data, dask_Array): elif isinstance(data, dask_Array):
# for multi-class classification with sparse matrices, pred_contrib predictions # for multi-class classification with sparse matrices, pred_contrib predictions
# are returned as a list of sparse matrices (one per class) # are returned as a list of sparse matrices (one per class)
num_classes = model._n_classes or -1 num_classes = model._n_classes
if ( if (
num_classes > 2 num_classes > 2
......
...@@ -535,7 +535,7 @@ class LGBMModel(_LGBMModelBase): ...@@ -535,7 +535,7 @@ class LGBMModel(_LGBMModelBase):
self._n_features: int = -1 self._n_features: int = -1
self._n_features_in: int = -1 self._n_features_in: int = -1
self._classes: Optional[np.ndarray] = None self._classes: Optional[np.ndarray] = None
self._n_classes: Optional[int] = None self._n_classes: int = -1
self.set_params(**kwargs) self.set_params(**kwargs)
def _more_tags(self) -> Dict[str, Any]: def _more_tags(self) -> Dict[str, Any]:
...@@ -641,7 +641,7 @@ class LGBMModel(_LGBMModelBase): ...@@ -641,7 +641,7 @@ class LGBMModel(_LGBMModelBase):
if isinstance(params['random_state'], np.random.RandomState): if isinstance(params['random_state'], np.random.RandomState):
params['random_state'] = params['random_state'].randint(np.iinfo(np.int32).max) params['random_state'] = params['random_state'].randint(np.iinfo(np.int32).max)
if self._n_classes is not None and self._n_classes > 2: if self._n_classes > 2:
for alias in _ConfigAliases.get('num_class'): for alias in _ConfigAliases.get('num_class'):
params.pop(alias, None) params.pop(alias, None)
params['num_class'] = self._n_classes params['num_class'] = self._n_classes
...@@ -881,9 +881,11 @@ class LGBMModel(_LGBMModelBase): ...@@ -881,9 +881,11 @@ class LGBMModel(_LGBMModelBase):
predict_params = _choose_param_value("num_threads", predict_params, self.n_jobs) predict_params = _choose_param_value("num_threads", predict_params, self.n_jobs)
predict_params["num_threads"] = self._process_n_jobs(predict_params["num_threads"]) predict_params["num_threads"] = self._process_n_jobs(predict_params["num_threads"])
return self._Booster.predict(X, raw_score=raw_score, start_iteration=start_iteration, num_iteration=num_iteration, return self._Booster.predict( # type: ignore[union-attr]
pred_leaf=pred_leaf, pred_contrib=pred_contrib, validate_features=validate_features, X, raw_score=raw_score, start_iteration=start_iteration, num_iteration=num_iteration,
**predict_params) pred_leaf=pred_leaf, pred_contrib=pred_contrib, validate_features=validate_features,
**predict_params
)
predict.__doc__ = _lgbmmodel_doc_predict.format( predict.__doc__ = _lgbmmodel_doc_predict.format(
description="Return the predicted value for each sample.", description="Return the predicted value for each sample.",
...@@ -956,7 +958,7 @@ class LGBMModel(_LGBMModelBase): ...@@ -956,7 +958,7 @@ class LGBMModel(_LGBMModelBase):
"""Booster: The underlying Booster of this model.""" """Booster: The underlying Booster of this model."""
if not self.__sklearn_is_fitted__(): if not self.__sklearn_is_fitted__():
raise LGBMNotFittedError('No booster found. Need to call fit beforehand.') raise LGBMNotFittedError('No booster found. Need to call fit beforehand.')
return self._Booster return self._Booster # type: ignore[return-value]
@property @property
def evals_result_(self) -> _EvalResultDict: def evals_result_(self) -> _EvalResultDict:
...@@ -976,14 +978,14 @@ class LGBMModel(_LGBMModelBase): ...@@ -976,14 +978,14 @@ class LGBMModel(_LGBMModelBase):
""" """
if not self.__sklearn_is_fitted__(): if not self.__sklearn_is_fitted__():
raise LGBMNotFittedError('No feature_importances found. Need to call fit beforehand.') raise LGBMNotFittedError('No feature_importances found. Need to call fit beforehand.')
return self._Booster.feature_importance(importance_type=self.importance_type) return self._Booster.feature_importance(importance_type=self.importance_type) # type: ignore[union-attr]
@property @property
def feature_name_(self) -> List[str]: def feature_name_(self) -> List[str]:
""":obj:`list` of shape = [n_features]: The names of features.""" """:obj:`list` of shape = [n_features]: The names of features."""
if not self.__sklearn_is_fitted__(): if not self.__sklearn_is_fitted__():
raise LGBMNotFittedError('No feature_name found. Need to call fit beforehand.') raise LGBMNotFittedError('No feature_name found. Need to call fit beforehand.')
return self._Booster.feature_name() return self._Booster.feature_name() # type: ignore[union-attr]
class LGBMRegressor(_LGBMRegressorBase, LGBMModel): class LGBMRegressor(_LGBMRegressorBase, LGBMModel):
...@@ -1062,7 +1064,7 @@ class LGBMClassifier(_LGBMClassifierBase, LGBMModel): ...@@ -1062,7 +1064,7 @@ class LGBMClassifier(_LGBMClassifierBase, LGBMModel):
self._class_weight = {self._class_map[k]: v for k, v in self.class_weight.items()} self._class_weight = {self._class_map[k]: v for k, v in self.class_weight.items()}
self._classes = self._le.classes_ self._classes = self._le.classes_
self._n_classes = len(self._classes) self._n_classes = len(self._classes) # type: ignore[arg-type]
# adjust eval metrics to match whether binary or multiclass # adjust eval metrics to match whether binary or multiclass
# classification is being performed # classification is being performed
...@@ -1180,7 +1182,7 @@ class LGBMClassifier(_LGBMClassifierBase, LGBMModel): ...@@ -1180,7 +1182,7 @@ class LGBMClassifier(_LGBMClassifierBase, LGBMModel):
"due to the usage of customized objective function.\n" "due to the usage of customized objective function.\n"
"Returning raw scores instead.") "Returning raw scores instead.")
return result return result
elif self._n_classes > 2 or raw_score or pred_leaf or pred_contrib: elif self._n_classes > 2 or raw_score or pred_leaf or pred_contrib: # type: ignore [operator]
return result return result
else: else:
return np.vstack((1. - result, result)).transpose() return np.vstack((1. - result, result)).transpose()
...@@ -1199,7 +1201,7 @@ class LGBMClassifier(_LGBMClassifierBase, LGBMModel): ...@@ -1199,7 +1201,7 @@ class LGBMClassifier(_LGBMClassifierBase, LGBMModel):
""":obj:`array` of shape = [n_classes]: The class label array.""" """:obj:`array` of shape = [n_classes]: The class label array."""
if not self.__sklearn_is_fitted__(): if not self.__sklearn_is_fitted__():
raise LGBMNotFittedError('No classes found. Need to call fit beforehand.') raise LGBMNotFittedError('No classes found. Need to call fit beforehand.')
return self._classes return self._classes # type: ignore[return-value]
@property @property
def n_classes_(self) -> int: def n_classes_(self) -> int:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment