Unverified Commit f5afaba1 authored by Liwen Fan's avatar Liwen Fan Committed by GitHub
Browse files

Merge branch 'master' into 1492

parents b1ec44fa 1437ce8b
......@@ -59,14 +59,6 @@ test_script:
- IF "%COMPILER%"=="MINGW" appveyor exit # skip all further steps
- cd ../../python-package && python setup.py bdist_wheel --plat-name=win-amd64 --universal
after_test:
- cd ../.nuget
- python create_nuget.py
- nuget.exe pack LightGBM.nuspec
nuget:
project_feed: true
artifacts:
- path: Release/lib_lightgbm.dll
name: Library
......@@ -74,15 +66,13 @@ artifacts:
name: Exe
- path: python-package/dist/*
name: Pip
- path: .nuget/*.nupkg
name: Nuget
deploy:
release: $(APPVEYOR_REPO_TAG_NAME)
provider: GitHub
auth_token:
secure: KR44XwtxY0cLlVpQwY726BvC6gzT0cYTf0ahJ4cSvvS0UVoSJxkR900ICfVXHRoT
artifact: Library,Exe,Pip,Nuget
artifact: Library,Exe,Pip
force_update: true
draft: true
on:
......
......@@ -181,6 +181,7 @@ BundleArtifacts/
ClientBin/
~$*
*~
.*.swp
*.dbmdl
*.dbproj.schemaview
*.pfx
......
import os
import sys
import distutils
from distutils import file_util
if not os.path.exists("lib"):
os.makedirs("lib")
if not os.path.exists("lib/native"):
os.makedirs("lib/native")
if not os.path.exists("lib/net40"):
os.makedirs("lib/net40")
if not os.path.exists("lib/net45"):
os.makedirs("lib/net45")
distutils.file_util.copy_file("../Release/lightgbm.exe", "./lib/")
distutils.file_util.copy_file("../Release/lib_lightgbm.dll", "./lib/")
version = open('../VERSION.txt').read().strip()
nuget_str = '''<?xml version="1.0"?>
<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd">
<metadata>
<id>LightGBM</id>
<version>%s</version>
<authors>Guolin Ke</authors>
<owners>Guolin Ke</owners>
<licenseUrl>https://github.com/Microsoft/LightGBM/blob/master/LICENSE</licenseUrl>
<projectUrl>https://github.com/Microsoft/LightGBM</projectUrl>
<requireLicenseAcceptance>false</requireLicenseAcceptance>
<description>A fast, distributed, high performance gradient boosting framework</description>
<copyright>Copyright 2017 @ Microsoft</copyright>
<tags>machine-learning data-mining distributed native boosting gbdt</tags>
<dependencies> </dependencies>
</metadata>
<files>
<file src="lib\**" target="lib"/>
</files>
</package>
''' % (version)
nuget_file = open("LightGBM.nuspec", "w")
nuget_file.write(nuget_str)
nuget_file.close()
from distutils.file_util import copy_file
if __name__ == "__main__":
source = sys.argv[1]
current_dir = os.path.abspath(os.path.dirname(__file__))
if not os.path.exists(os.path.join(current_dir, "runtimes/linux-x64/native")):
os.makedirs(os.path.join(current_dir, "runtimes/linux-x64/native"))
if not os.path.exists(os.path.join(current_dir, "runtimes/osx-x64/native")):
os.makedirs(os.path.join(current_dir, "runtimes/osx-x64/native"))
if not os.path.exists(os.path.join(current_dir, "runtimes/win-x64/native")):
os.makedirs(os.path.join(current_dir, "runtimes/win-x64/native"))
copy_file(os.path.join(source, "lib_lightgbm.so"), os.path.join(current_dir, "runtimes/linux-x64/native/lib_lightgbm.so"))
copy_file(os.path.join(source, "lib_lightgbm.dylib"), os.path.join(current_dir, "runtimes/osx-x64/native/lib_lightgbm.dylib"))
copy_file(os.path.join(source, "lib_lightgbm.dll"), os.path.join(current_dir, "runtimes/win-x64/native/lib_lightgbm.dll"))
version = open(os.path.join(current_dir, '../VERSION.txt')).read().strip()
nuget_str = '''<?xml version="1.0"?>
<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd">
<metadata>
<id>LightGBM</id>
<version>%s</version>
<authors>Guolin Ke</authors>
<owners>Guolin Ke</owners>
<licenseUrl>https://github.com/Microsoft/LightGBM/blob/master/LICENSE</licenseUrl>
<projectUrl>https://github.com/Microsoft/LightGBM</projectUrl>
<requireLicenseAcceptance>false</requireLicenseAcceptance>
<description>A fast, distributed, high performance gradient boosting framework</description>
<copyright>Copyright 2018 @ Microsoft</copyright>
<tags>machine-learning data-mining distributed native boosting gbdt</tags>
<dependencies> </dependencies>
</metadata>
<files>
<file src="runtimes\**" target="runtimes"/>
</files>
</package>
''' % (version)
with open(os.path.join(current_dir, "LightGBM.nuspec"), "w") as nuget_file:
nuget_file.write(nuget_str)
phases:
###########################################
- phase: Linux
###########################################
variables:
PYTHON_VERSION: 3.6
CONDA_ENV: test-env
queue:
name: 'Hosted Linux Preview'
parallel: 9
matrix:
regular:
TASK: regular
mpi:
TASK: mpi
PYTHON_VERSION: 2.7
pylint:
TASK: pylint
inference:
TASK: if-else
sdist:
TASK: sdist
PYTHON_VERSION: 3.4
bdist:
TASK: bdist
PYTHON_VERSION: 3.5
gpu_1:
TASK: gpu
METHOD: source
gpu_2:
TASK: gpu
METHOD: pip
steps:
- task: CondaEnvironment@0
inputs:
environmentName: $(CONDA_ENV)
packageSpecs: 'python=$(PYTHON_VERSION)' # Optional
createOptions: '-q'
- script: |
sudo apt-get update
export LGB_VER=$(head -n 1 VERSION.txt)
export AMDAPPSDK=$HOME/AMDAPPSDK
export LD_LIBRARY_PATH="$AMDAPPSDK/lib/x86_64:$LD_LIBRARY_PATH"
export OPENCL_VENDOR_PATH=$AMDAPPSDK/etc/OpenCL/vendors
if [[ $TASK == "mpi" ]]; then
sudo apt-get install -y libopenmpi-dev openmpi-bin
fi
if [[ $TASK == "gpu" ]]; then
sudo apt-get install --no-install-recommends -y ocl-icd-opencl-dev libboost-dev libboost-system-dev libboost-filesystem-dev
wget -q https://github.com/Microsoft/LightGBM/releases/download/v2.0.12/AMD-APP-SDKInstaller-v3.0.130.136-GA-linux64.tar.bz2
tar -xjf AMD-APP-SDK*.tar.bz2
mkdir -p $OPENCL_VENDOR_PATH
sh AMD-APP-SDK*.sh --tar -xf -C $AMDAPPSDK
mv $AMDAPPSDK/lib/x86_64/sdk/* $AMDAPPSDK/lib/x86_64/
echo libamdocl64.so > $OPENCL_VENDOR_PATH/amdocl64.icd
fi
bash .vsts-ci/test.sh
- task: PublishBuildArtifacts@1
inputs:
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
artifactName: PackageAssets
artifactType: container
###########################################
- phase: MacOS
###########################################
variables:
PYTHON_VERSION: 3.6
CONDA_ENV: test-env
queue:
name: 'Hosted macOS Preview'
parallel: 3
matrix:
regular:
TASK: regular
PYTHON_VERSION: 2.7
sdist:
TASK: sdist
PYTHON_VERSION: 3.4
bdist:
TASK: bdist
PYTHON_VERSION: 3.5
steps:
- script: |
test -n $CC && unset CC
test -n $CXX && unset CXX
rm '/usr/local/include/c++'
brew install gcc@8
export CXX=g++-8
export CC=gcc-8
wget -O conda.sh https://repo.continuum.io/miniconda/Miniconda${PYTHON_VERSION:0:1}-latest-MacOSX-x86_64.sh
bash conda.sh -b -p $HOME/miniconda
export PATH=$HOME/miniconda/bin:$PATH
conda config --set always_yes yes --set changeps1 no
conda create -q -n $CONDA_ENV python=$PYTHON_VERSION
source activate $CONDA_ENV
export LGB_VER=$(head -n 1 VERSION.txt)
bash .vsts-ci/test.sh
- task: PublishBuildArtifacts@1
inputs:
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
artifactName: PackageAssets
artifactType: container
###########################################
- phase: Windows
###########################################
variables:
PYTHON_VERSION: 3.6
CONDA_ENV: test-env
queue:
name: 'Hosted VS2017'
parallel: 4
matrix:
regular:
TASK: regular
sdist:
TASK: sdist
PYTHON_VERSION: 2.7
bdist:
TASK: bdist
PYTHON_VERSION: 3.5
# mingw:
# TASK: mingw
steps:
- task: CondaEnvironment@0
inputs:
environmentName: $(CONDA_ENV)
packageSpecs: 'python=$(PYTHON_VERSION)' # Optional
createOptions: '-q'
- powershell: |
conda install -q -y -n $env:CONDA_ENV numpy nose scipy scikit-learn pandas matplotlib python-graphviz pytest
if ("$env:TASK" -eq "regular") {
mkdir build; cd build
cmake -DCMAKE_GENERATOR_PLATFORM=x64 .. ; cmake --build . --target ALL_BUILD --config Release
cd ../python-package; python setup.py install -p
cd ..
pytest tests/c_api_test/test_.py
cp Release/lib_lightgbm.dll $env:BUILD_ARTIFACTSTAGINGDIRECTORY
}
elseif ("$env:TASK" -eq "sdist"){
cd python-package; python setup.py sdist --formats gztar
cd dist; pip install @(Get-ChildItem *.gz) -v
cd ../..
}
# elseif ("$env:TASK" -eq "mingw"){
# cd python-package; python setup.py sdist --formats gztar
# cd dist; pip install @(Get-ChildItem *.gz) --install-option=--mingw -v
# cd ../..
# }
else {
cd python-package
python setup.py bdist_wheel --plat-name=win-amd64 --universal
cd dist; pip install @(Get-ChildItem *.whl)
cp @(Get-ChildItem *.whl) $env:BUILD_ARTIFACTSTAGINGDIRECTORY
cd ../..
}
pytest tests/python_package_test
cd examples/python-guide
@("import matplotlib", "matplotlib.use('Agg')") + (Get-Content "plot_example.py") | Set-Content "plot_example.py"
(Get-Content "plot_example.py").replace('graph.render(view=True)', 'graph.render(view=False)') | Set-Content "plot_example.py"
foreach ($file in @(Get-ChildItem *.py)) {
python $file
if ($LastExitCode -ne 0) { $host.SetShouldExit($LastExitCode) }
} # run all examples
- task: PublishBuildArtifacts@1
inputs:
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
artifactName: PackageAssets
artifactType: container
################################################################################
- phase: Package
################################################################################
dependsOn:
- Linux
- MacOS
- Windows
queue:
name: 'Hosted VS2017'
steps:
# Download all agent packages from all previous phases
- task: DownloadBuildArtifacts@0
displayName: Download package assets
inputs:
artifactName: PackageAssets
downloadPath: $(Build.SourcesDirectory)/binaries
- powershell: |
$client = new-object System.Net.WebClient
$client.DownloadFile("https://dist.nuget.org/win-x86-commandline/latest/nuget.exe",".nuget/nuget.exe")
- script: |
cd .nuget
python create_nuget.py %BUILD_SOURCESDIRECTORY%/binaries/PackageAssets
nuget.exe pack LightGBM.nuspec
xcopy *.nupkg %BUILD_ARTIFACTSTAGINGDIRECTORY%
- task: PublishBuildArtifacts@1
inputs:
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
artifactName: Nuget
artifactType: container
#!/bin/bash
cd ${BUILD_REPOSITORY_LOCALPATH}
if [[ $TASK == "check-docs" ]]; then
if [[ $AGENT_OS == "Linux" ]]; then
sudo apt-get install linkchecker -y
fi
if [[ ${PYTHON_VERSION} == "2.7" ]]; then
conda install -y -n $CONDA_ENV mock
fi
conda install -y -n $CONDA_ENV sphinx "sphinx_rtd_theme>=0.3" # html5validator
pip install rstcheck
cd ${BUILD_REPOSITORY_LOCALPATH}/python-package
rstcheck --report warning `find . -type f -name "*.rst"` || exit -1
cd ${BUILD_REPOSITORY_LOCALPATH}/docs
rstcheck --report warning --ignore-directives=autoclass,autofunction `find . -type f -name "*.rst"` || exit -1
make html || exit -1
find ./_build/html/ -type f -name '*.html' -exec \
sed -i -e 's;\(\.\/[^.]*\.\)rst\([^[:space:]]*\);\1html\2;g' {} \; # emulate js function
# html5validator --root ./_build/html/ || exit -1
if [[ $AGENT_OS == "Linux" ]]; then
linkchecker --config=.linkcheckerrc ./_build/html/*.html || exit -1
fi
exit 0
fi
if [[ $TASK == "pylint" ]]; then
conda install -y -n $CONDA_ENV pycodestyle
pycodestyle --ignore=E501,W503 --exclude=./compute,./docs,./.nuget . || exit -1
exit 0
fi
if [[ $TASK == "if-else" ]]; then
conda install -y -n $CONDA_ENV numpy
mkdir build && cd build && cmake .. && make lightgbm || exit -1
cd ${BUILD_REPOSITORY_LOCALPATH}/tests/cpp_test && ../../lightgbm config=train.conf convert_model_language=cpp convert_model=../../src/boosting/gbdt_prediction.cpp && ../../lightgbm config=predict.conf output_result=origin.pred || exit -1
cd ${BUILD_REPOSITORY_LOCALPATH}/build && make lightgbm || exit -1
cd ${BUILD_REPOSITORY_LOCALPATH}/tests/cpp_test && ../../lightgbm config=predict.conf output_result=ifelse.pred && python test.py || exit -1
exit 0
fi
conda install -q -y -n $CONDA_ENV numpy nose scipy scikit-learn pandas matplotlib python-graphviz pytest
if [[ $TASK == "sdist" ]]; then
cd ${BUILD_REPOSITORY_LOCALPATH}/python-package && python setup.py sdist || exit -1
pip install ${BUILD_REPOSITORY_LOCALPATH}/python-package/dist/lightgbm-$LGB_VER.tar.gz -v || exit -1
cp ${BUILD_REPOSITORY_LOCALPATH}/python-package/dist/lightgbm-$LGB_VER.tar.gz ${BUILD_ARTIFACTSTAGINGDIRECTORY}/lightgbm-$LGB_VER.tar.gz
pytest ${BUILD_REPOSITORY_LOCALPATH}/tests/python_package_test || exit -1
exit 0
elif [[ $TASK == "bdist" ]]; then
if [[ $AGENT_OS == "Darwin" ]]; then
cd ${BUILD_REPOSITORY_LOCALPATH}/python-package && python setup.py bdist_wheel --plat-name=macdarwin --universal || exit -1
cp dist/lightgbm-$LGB_VER-py2.py3-none-macdarwin.whl ${BUILD_ARTIFACTSTAGINGDIRECTORY}/lightgbm-$LGB_VER-py2.py3-none-macosx_10_6_x86_64.macosx_10_7_x86_64.macosx_10_8_x86_64.macosx_10_9_x86_64.macosx_10_10_x86_64.macosx_10_11_x86_64.macosx_10_12_x86_64.macosx_10_13_x86_64.whl
mv dist/lightgbm-$LGB_VER-py2.py3-none-macdarwin.whl dist/lightgbm-$LGB_VER-py2.py3-none-macosx_10_6_x86_64.macosx_10_7_x86_64.macosx_10_8_x86_64.macosx_10_9_x86_64.macosx_10_10_x86_64.macosx_10_11_x86_64.macosx_10_12_x86_64.macosx_10_13_x86_64.whl
else
cd ${BUILD_REPOSITORY_LOCALPATH}/python-package && python setup.py bdist_wheel --plat-name=manylinux1_x86_64 --universal || exit -1
cp dist/lightgbm-$LGB_VER-py2.py3-none-manylinux1_x86_64.whl ${BUILD_ARTIFACTSTAGINGDIRECTORY}/lightgbm-$LGB_VER-py2.py3-none-manylinux1_x86_64.whl
fi
pip install ${BUILD_REPOSITORY_LOCALPATH}/python-package/dist/*.whl || exit -1
pytest ${BUILD_REPOSITORY_LOCALPATH}/tests/python_package_test || exit -1
exit 0
fi
if [[ $TASK == "gpu" ]]; then
sed -i 's/std::string device_type = "cpu";/std::string device_type = "gpu";/' ${BUILD_REPOSITORY_LOCALPATH}/include/LightGBM/config.h
grep -q 'std::string device_type = "gpu"' ${BUILD_REPOSITORY_LOCALPATH}/include/LightGBM/config.h || exit -1 # make sure that changes were really done
if [[ $METHOD == "pip" ]]; then
cd ${BUILD_REPOSITORY_LOCALPATH}/python-package && python setup.py sdist || exit -1
pip install ${BUILD_REPOSITORY_LOCALPATH}/python-package/dist/lightgbm-$LGB_VER.tar.gz -v --install-option=--gpu --install-option="--opencl-include-dir=$AMDAPPSDK/include/" || exit -1
pytest ${BUILD_REPOSITORY_LOCALPATH}/tests/python_package_test || exit -1
exit 0
fi
fi
mkdir build && cd build
if [[ $TASK == "mpi" ]]; then
cd ${BUILD_REPOSITORY_LOCALPATH}/python-package && python setup.py sdist || exit -1
pip install ${BUILD_REPOSITORY_LOCALPATH}/python-package/dist/lightgbm-$LGB_VER.tar.gz -v --install-option=--mpi || exit -1
cd ${BUILD_REPOSITORY_LOCALPATH}/build
cmake -DUSE_MPI=ON ..
elif [[ $TASK == "gpu" ]]; then
cmake -DUSE_GPU=ON -DOpenCL_INCLUDE_DIR=$AMDAPPSDK/include/ ..
else
cmake ..
fi
make _lightgbm || exit -1
cd ${BUILD_REPOSITORY_LOCALPATH}/python-package && python setup.py install --precompile || exit -1
pytest ${BUILD_REPOSITORY_LOCALPATH} || exit -1
if [[ $TASK == "regular" ]]; then
if [[ $AGENT_OS == "Darwin" ]]; then
cp ${BUILD_REPOSITORY_LOCALPATH}/lib_lightgbm.so ${BUILD_ARTIFACTSTAGINGDIRECTORY}/lib_lightgbm.dylib
else
cp ${BUILD_REPOSITORY_LOCALPATH}/lib_lightgbm.so ${BUILD_ARTIFACTSTAGINGDIRECTORY}/lib_lightgbm.so
fi
cd ${BUILD_REPOSITORY_LOCALPATH}/examples/python-guide
sed -i'.bak' '/import lightgbm as lgb/a\
import matplotlib\
matplotlib.use\(\"Agg\"\)\
' plot_example.py # prevent interactive window mode
sed -i 's/graph.render(view=True)/graph.render(view=False)/' plot_example.py
for f in *.py; do python $f || exit -1; done # run all examples
fi
......@@ -124,12 +124,12 @@ if(MSVC)
CMAKE_CXX_FLAGS_RELEASE
CMAKE_CXX_FLAGS_RELWITHDEBINFO
)
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4 /O2 /Ob2 /Oi /Ot /Oy /GL")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4 /O2 /Ob2 /Oi /Ot /Oy /GL /MP")
else()
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -funroll-loops")
endif()
SET(LightGBM_HEADER_DIR ${PROJECT_SOURCE_DIR}/include)
SET(EXECUTABLE_OUTPUT_PATH ${PROJECT_SOURCE_DIR})
......
LightGBM, Light Gradient Boosting Machine
=========================================
[![Build Status](https://lightgbm.visualstudio.com/LightGBM-Build/_apis/build/status/LightGBM-Build-CI&branchName=master)]()
[![Build Status](https://travis-ci.org/Microsoft/LightGBM.svg?branch=master)](https://travis-ci.org/Microsoft/LightGBM)
[![Windows Build Status](https://ci.appveyor.com/api/projects/status/1ys5ot401m0fep6l/branch/master?svg=true)](https://ci.appveyor.com/project/guolinke/lightgbm/branch/master)
[![Documentation Status](https://readthedocs.org/projects/lightgbm/badge/?version=latest)](https://lightgbm.readthedocs.io/)
......
......@@ -201,9 +201,9 @@ LightGBM supports the following metrics:
- MAP
- Multi class log loss
- Multi-class log loss
- Multi class error rate
- Multi-class error rate
- Fair
......
This diff is collapsed.
......@@ -37,8 +37,8 @@ For the setting details, please refer to `Parameters <./Parameters.rst>`__.
Weight and Query/Group Data
~~~~~~~~~~~~~~~~~~~~~~~~~~~
LightGBM also supports weighted training, it needs an additional `weight data <./Parameters.rst#io-parameters>`__.
And it needs an additional `query data <./Parameters.rst#io-parameters>`_ for ranking task.
LightGBM also supports weighted training, it needs an additional `weight data <./Parameters.rst#weight-data>`__.
And it needs an additional `query data <./Parameters.rst#query-data>`_ for ranking task.
Also, weight and query data could be specified as columns in training data in the same manner as label.
......
......@@ -147,7 +147,7 @@ def GenParameterDescription(sections, descriptions, params_rst):
checks_str = ', constraints: ``{0} {1} {2}``'.format(name, sign, number)
else:
checks_str = ''
main_desc = '- ``{0}``, default = ``{1}``, type = {2}{3}{4}{5}'.format(name, default, param_type, options_str, aliases_str, checks_str)
main_desc = '- ``{0}`` :raw-html:`<a id="{0}" title="Permalink to this parameter" href="#{0}">&#x1F517;&#xFE0E;</a>`, default = ``{1}``, type = {2}{3}{4}{5}'.format(name, default, param_type, options_str, aliases_str, checks_str)
params_to_write.append(main_desc)
params_to_write.extend([' ' * 3 * int(desc[0][-1]) + '- ' + desc[1] for desc in param_desc['desc']])
......
......@@ -134,7 +134,7 @@ class LGBMModel(_LGBMModelBase):
min_split_gain=0., min_child_weight=1e-3, min_child_samples=20,
subsample=1., subsample_freq=0, colsample_bytree=1.,
reg_alpha=0., reg_lambda=0., random_state=None,
n_jobs=-1, silent=True, **kwargs):
n_jobs=-1, silent=True, importance_type='split', **kwargs):
"""Construct a gradient boosting model.
Parameters
......@@ -193,6 +193,10 @@ class LGBMModel(_LGBMModelBase):
Number of parallel threads.
silent : bool, optional (default=True)
Whether to print messages while running boosting.
importance_type : str, optional (default='split')
The type of feature importance to be filled into ``feature_importances_``.
If "split", result contains numbers of times the feature is used in a model.
If "gain", result contains total gains of splits which use the feature.
**kwargs : other parameters
Check http://lightgbm.readthedocs.io/en/latest/Parameters.html for more parameters.
......@@ -264,6 +268,7 @@ class LGBMModel(_LGBMModelBase):
self.random_state = random_state
self.n_jobs = n_jobs
self.silent = silent
self.importance_type = importance_type
self._Booster = None
self._evals_result = None
self._best_score = None
......@@ -399,6 +404,7 @@ class LGBMModel(_LGBMModelBase):
if 'verbose' not in params and self.silent:
params['verbose'] = 0
params.pop('silent', None)
params.pop('importance_type', None)
params.pop('n_estimators', None)
params.pop('class_weight', None)
if self._n_classes is not None and self._n_classes > 2:
......@@ -606,11 +612,13 @@ class LGBMModel(_LGBMModelBase):
Note
----
Feature importance in sklearn interface used to normalize to 1,
it's deprecated after 2.0.4 and same as Booster.feature_importance() now.
it's deprecated after 2.0.4 and is the same as Booster.feature_importance() now.
``importance_type`` attribute is passed to the function
to configure the type of importance values to be extracted.
"""
if self._n_features is None:
raise LGBMNotFittedError('No feature_importances found. Need to call fit beforehand.')
return self.booster_.feature_importance()
return self.booster_.feature_importance(importance_type=self.importance_type)
class LGBMRegressor(LGBMModel, _LGBMRegressorBase):
......
......@@ -214,15 +214,13 @@ void Config::CheckParamConflict() {
Log::Fatal("Number of classes must be 1 for non-multiclass training");
}
}
if (is_provide_training_metric || !valid.empty()) {
for (std::string metric_type : metric) {
bool metric_type_multiclass = (CheckMultiClassObjective(metric_type)
|| metric_type == std::string("multi_logloss")
|| metric_type == std::string("multi_error"));
if ((objective_type_multiclass && !metric_type_multiclass)
|| (!objective_type_multiclass && metric_type_multiclass)) {
Log::Fatal("Objective and metrics don't match");
}
for (std::string metric_type : metric) {
bool metric_type_multiclass = (CheckMultiClassObjective(metric_type)
|| metric_type == std::string("multi_logloss")
|| metric_type == std::string("multi_error"));
if ((objective_type_multiclass && !metric_type_multiclass)
|| (!objective_type_multiclass && metric_type_multiclass)) {
Log::Fatal("Multiclass qbjective and metrics don't match");
}
}
......
......@@ -166,6 +166,19 @@ class TestSklearn(unittest.TestCase):
importances = clf.feature_importances_
self.assertEqual(len(importances), 4)
def test_feature_importances_type(self):
clf = lgb.LGBMClassifier(n_estimators=100)
data = load_iris()
clf.fit(data.data, data.target)
clf.set_params(importance_type='split')
importances_split = clf.feature_importances_
clf.set_params(importance_type='gain')
importances_gain = clf.feature_importances_
# Test that the largest element is NOT the same, the smallest can be the same, i.e. zero
importance_split_top1 = sorted(importances_split, reverse=True)[0]
importance_gain_top1 = sorted(importances_gain, reverse=True)[0]
self.assertNotEqual(importance_split_top1, importance_gain_top1)
def test_sklearn_backward_compatibility(self):
iris = load_iris()
X_train, X_test, y_train, y_test = train_test_split(iris.data, iris.target, test_size=0.2, random_state=42)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment