Unverified Commit 0d3f13a3 authored by chicm-ms's avatar chicm-ms Committed by GitHub
Browse files

Refactor builtin/customized tuner installation (#3134)

parent c48623d6
......@@ -34,8 +34,7 @@ jobs:
python3 -m pip install gym onnx peewee thop --user
python3 -m pip install sphinx==1.8.3 sphinx-argparse==0.2.5 sphinx-markdown-tables==0.0.9 sphinx-rtd-theme==0.4.2 sphinxcontrib-websupport==1.1.0 recommonmark==0.5.0 nbsphinx --user
sudo apt-get install swig -y
nnictl package install --name=SMAC
nnictl package install --name=BOHB
python3 -m pip install -e .[SMAC,BOHB]
displayName: 'Install dependencies'
- script: |
cd test
......@@ -73,8 +72,7 @@ jobs:
python3 -m pip install keras==2.1.6 --user
python3 -m pip install gym onnx peewee --user
sudo apt-get install swig -y
nnictl package install --name=SMAC
nnictl package install --name=BOHB
python3 -m pip install -e .[SMAC,BOHB]
displayName: 'Install dependencies'
- script: |
set -e
......
advisors:
- builtinName: Hyperband
classArgsValidator: nni.algorithms.hpo.hyperband_advisor.hyperband_advisor.HyperbandClassArgsValidator
className: nni.algorithms.hpo.hyperband_advisor.hyperband_advisor.Hyperband
source: nni
- builtinName: BOHB
classArgsValidator: nni.algorithms.hpo.bohb_advisor.bohb_advisor.BOHBClassArgsValidator
className: nni.algorithms.hpo.bohb_advisor.bohb_advisor.BOHB
source: nni
assessors:
- builtinName: Medianstop
classArgsValidator: nni.algorithms.hpo.medianstop_assessor.medianstop_assessor.MedianstopClassArgsValidator
className: nni.algorithms.hpo.medianstop_assessor.medianstop_assessor.MedianstopAssessor
source: nni
- builtinName: Curvefitting
classArgsValidator: nni.algorithms.hpo.curvefitting_assessor.curvefitting_assessor.CurvefittingClassArgsValidator
className: nni.algorithms.hpo.curvefitting_assessor.curvefitting_assessor.CurvefittingAssessor
source: nni
tuners:
- builtinName: PPOTuner
classArgsValidator: nni.algorithms.hpo.ppo_tuner.ppo_tuner.PPOClassArgsValidator
className: nni.algorithms.hpo.ppo_tuner.ppo_tuner.PPOTuner
source: nni
- builtinName: SMAC
classArgsValidator: nni.algorithms.hpo.smac_tuner.smac_tuner.SMACClassArgsValidator
className: nni.algorithms.hpo.smac_tuner.smac_tuner.SMACTuner
source: nni
- builtinName: TPE
classArgs:
algorithm_name: tpe
classArgsValidator: nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptClassArgsValidator
className: nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptTuner
source: nni
- acceptClassArgs: false
builtinName: Random
classArgs:
algorithm_name: random_search
classArgsValidator: nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptClassArgsValidator
className: nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptTuner
source: nni
- builtinName: Anneal
classArgs:
algorithm_name: anneal
classArgsValidator: nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptClassArgsValidator
className: nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptTuner
source: nni
- builtinName: Evolution
classArgsValidator: nni.algorithms.hpo.evolution_tuner.evolution_tuner.EvolutionClassArgsValidator
className: nni.algorithms.hpo.evolution_tuner.evolution_tuner.EvolutionTuner
source: nni
- acceptClassArgs: false
builtinName: BatchTuner
className: nni.algorithms.hpo.batch_tuner.batch_tuner.BatchTuner
source: nni
- acceptClassArgs: false
builtinName: GridSearch
className: nni.algorithms.hpo.gridsearch_tuner.gridsearch_tuner.GridSearchTuner
source: nni
- builtinName: NetworkMorphism
classArgsValidator: nni.algorithms.hpo.networkmorphism_tuner.networkmorphism_tuner.NetworkMorphismClassArgsValidator
className: nni.algorithms.hpo.networkmorphism_tuner.networkmorphism_tuner.NetworkMorphismTuner
source: nni
- builtinName: MetisTuner
classArgsValidator: nni.algorithms.hpo.metis_tuner.metis_tuner.MetisClassArgsValidator
className: nni.algorithms.hpo.metis_tuner.metis_tuner.MetisTuner
source: nni
- builtinName: GPTuner
classArgsValidator: nni.algorithms.hpo.gp_tuner.gp_tuner.GPClassArgsValidator
className: nni.algorithms.hpo.gp_tuner.gp_tuner.GPTuner
source: nni
- builtinName: PBTTuner
classArgsValidator: nni.algorithms.hpo.pbt_tuner.pbt_tuner.PBTClassArgsValidator
className: nni.algorithms.hpo.pbt_tuner.pbt_tuner.PBTTuner
source: nni
- builtinName: RegularizedEvolutionTuner
classArgsValidator: nni.algorithms.hpo.regularized_evolution_tuner.regularized_evolution_tuner.EvolutionClassArgsValidator
className: nni.algorithms.hpo.regularized_evolution_tuner.regularized_evolution_tuner.RegularizedEvolutionTuner
source: nni
......@@ -46,7 +46,7 @@ The sampling procedure (using Multidimensional KDE to guide selection) is summar
BOHB advisor requires the [ConfigSpace](https://github.com/automl/ConfigSpace) package. ConfigSpace can be installed using the following command.
```bash
nnictl package install --name=BOHB
pip install nni[BOHB]
```
To use BOHB, you should add the following spec in your experiment's YAML config file:
......
......@@ -12,7 +12,7 @@ Currently, we support the following algorithms:
|[__Random Search__](#Random)|In Random Search for Hyper-Parameter Optimization show that Random Search might be surprisingly simple and effective. We suggest that we could use Random Search as the baseline when we have no knowledge about the prior distribution of hyper-parameters. [Reference Paper](http://www.jmlr.org/papers/volume13/bergstra12a/bergstra12a.pdf)|
|[__Anneal__](#Anneal)|This simple annealing algorithm begins by sampling from the prior, but tends over time to sample from points closer and closer to the best ones observed. This algorithm is a simple variation on the random search that leverages smoothness in the response surface. The annealing rate is not adaptive.|
|[__Naïve Evolution__](#Evolution)|Naïve Evolution comes from Large-Scale Evolution of Image Classifiers. It randomly initializes a population-based on search space. For each generation, it chooses better ones and does some mutation (e.g., change a hyperparameter, add/remove one layer) on them to get the next generation. Naïve Evolution requires many trials to work, but it's very simple and easy to expand new features. [Reference paper](https://arxiv.org/pdf/1703.01041.pdf)|
|[__SMAC__](#SMAC)|SMAC is based on Sequential Model-Based Optimization (SMBO). It adapts the most prominent previously used model class (Gaussian stochastic process models) and introduces the model class of random forests to SMBO, in order to handle categorical parameters. The SMAC supported by NNI is a wrapper on the SMAC3 GitHub repo. Notice, SMAC needs to be installed by `nnictl package` command. [Reference Paper,](https://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf) [GitHub Repo](https://github.com/automl/SMAC3)|
|[__SMAC__](#SMAC)|SMAC is based on Sequential Model-Based Optimization (SMBO). It adapts the most prominent previously used model class (Gaussian stochastic process models) and introduces the model class of random forests to SMBO, in order to handle categorical parameters. The SMAC supported by NNI is a wrapper on the SMAC3 GitHub repo. Notice, SMAC needs to be installed by `pip install nni[SMAC]` command. [Reference Paper,](https://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf) [GitHub Repo](https://github.com/automl/SMAC3)|
|[__Batch tuner__](#Batch)|Batch tuner allows users to simply provide several configurations (i.e., choices of hyper-parameters) for their trial code. After finishing all the configurations, the experiment is done. Batch tuner only supports the type choice in search space spec.|
|[__Grid Search__](#GridSearch)|Grid Search performs an exhaustive searching through a manually specified subset of the hyperparameter space defined in the searchspace file. Note that the only acceptable types of search space are choice, quniform, randint. |
|[__Hyperband__](#Hyperband)|Hyperband tries to use limited resources to explore as many configurations as possible and returns the most promising ones as a final result. The basic idea is to generate many configurations and run them for a small number of trials. The half least-promising configurations are thrown out, the remaining are further trained along with a selection of new configurations. The size of these populations is sensitive to resource constraints (e.g. allotted search time). [Reference Paper](https://arxiv.org/pdf/1603.06560.pdf)|
......@@ -27,7 +27,9 @@ Currently, we support the following algorithms:
Using a built-in tuner provided by the NNI SDK requires one to declare the **builtinTunerName** and **classArgs** in the `config.yml` file. In this part, we will introduce each tuner along with information about usage and suggested scenarios, classArg requirements, and an example configuration.
Note: Please follow the format when you write your `config.yml` file. Some built-in tuners need to be installed using `nnictl package`, like SMAC.
Note: Please follow the format when you write your `config.yml` file. Some built-in tuners have
dependencies need to be installed using `pip install nni[<tuner>]`, like SMAC's dependencies can
be installed using `pip install nni[SMAC]`.
<a name="TPE"></a>
......@@ -144,10 +146,10 @@ tuner:
**Installation**
SMAC needs to be installed by following command before the first usage. As a reminder, `swig` is required for SMAC: for Ubuntu `swig` can be installed with `apt`.
SMAC has dependencies need to be installed by following command before the first usage. As a reminder, `swig` is required for SMAC: for Ubuntu `swig` can be installed with `apt`.
```bash
nnictl package install --name=SMAC
pip install nni[SMAC]
```
**Suggested scenario**
......@@ -340,7 +342,7 @@ tuner:
BOHB advisor requires [ConfigSpace](https://github.com/automl/ConfigSpace) package. ConfigSpace can be installed using the following command.
```bash
nnictl package install --name=BOHB
pip install nni[BOHB]
```
**Suggested scenario**
......
# How to install customized tuner as a builtin tuner
# How to register a customized tuner as a builtin tuner
You can following below steps to install a customized tuner in `nni/examples/tuners/customized_tuner` as a builtin tuner.
You can following below steps to register a customized tuner in `nni/examples/tuners/customized_tuner` as a builtin tuner.
## Prepare installation source and install package
## Install the customized tuner package into python environment
There are 2 options to install this customized tuner:
There are 2 options to install the package into python environment:
### Option 1: install from directory
Step 1: From `nni/examples/tuners/customized_tuner` directory, run:
From `nni/examples/tuners/customized_tuner` directory, run:
`python setup.py develop`
This command will build the `nni/examples/tuners/customized_tuner` directory as a pip installation source.
Step 2: Run command:
`nnictl package install ./`
### Option 2: install from whl file
......@@ -28,16 +25,22 @@ This command build a whl file which is a pip installation source.
Step 2: Run command:
`nnictl package install dist/demo_tuner-0.1-py3-none-any.whl`
`pip install dist/demo_tuner-0.1-py3-none-any.whl`
## Register the customized tuner as builtin tuner:
Run following command:
`nnictl algo register --meta meta_file.yml`
## Check the installed package
## Check the registered builtin algorithms
Then run command `nnictl package list`, you should be able to see that demotuner is installed:
Then run command `nnictl algo list`, you should be able to see that demotuner is installed:
```
+-----------------+------------+-----------+--------=-------------+------------------------------------------+
| Name | Type | Installed | Class Name | Module Name |
| Name | Type | source | Class Name | Module Name |
+-----------------+------------+-----------+----------------------+------------------------------------------+
| demotuner | tuners | Yes | DemoTuner | demo_tuner |
| demotuner | tuners | user | DemoTuner | demo_tuner |
+-----------------+------------+-----------+----------------------+------------------------------------------+
```
......
**How to install customized algorithms as builtin tuners, assessors and advisors**
**How to register customized algorithms as builtin tuners, assessors and advisors**
===
## Overview
NNI provides a lot of [builtin tuners](../Tuner/BuiltinTuner.md), [advisors](../Tuner/HyperbandAdvisor.md) and [assessors](../Assessor/BuiltinAssessor.md) can be used directly for Hyper Parameter Optimization, and some extra algorithms can be installed via `nnictl package install --name <name>` after NNI is installed. You can check these extra algorithms via `nnictl package list` command.
NNI provides a lot of [builtin tuners](../Tuner/BuiltinTuner.md), [advisors](../Tuner/HyperbandAdvisor.md) and [assessors](../Assessor/BuiltinAssessor.md) can be used directly for Hyper Parameter Optimization, and some extra algorithms can be registered via `nnictl algo register --meta <path_to_meta_file>` after NNI is installed. You can check builtin algorithms via `nnictl algo list` command.
NNI also provides the ability to build your own customized tuners, advisors and assessors. To use the customized algorithm, users can simply follow the spec in experiment config file to properly reference the algorithm, which has been illustrated in the tutorials of [customized tuners](../Tuner/CustomizeTuner.md)/[advisors](../Tuner/CustomizeAdvisor.md)/[assessors](../Assessor/CustomizeAssessor.md).
......@@ -13,8 +13,8 @@ tuner:
builtinTunerName: mytuner
```
## Install customized algorithms as builtin tuners, assessors and advisors
You can follow below steps to build a customized tuner/assessor/advisor, and install it into NNI as builtin algorithm.
## Register customized algorithms as builtin tuners, assessors and advisors
You can follow below steps to build a customized tuner/assessor/advisor, and register it into NNI as builtin algorithm.
### 1. Create a customized tuner/assessor/advisor
Reference following instructions to create:
......@@ -48,56 +48,43 @@ class MedianstopClassArgsValidator(ClassArgsValidator):
```
The validator will be invoked before experiment is started to check whether the classArgs fields are valid for your customized algorithms.
### 3. Prepare package installation source
In order to be installed as builtin tuners, assessors and advisors, the customized algorithms need to be packaged as installable source which can be recognized by `pip` command, under the hood nni calls `pip` command to install the package.
Besides being a common pip source, the package needs to provide meta information in the `classifiers` field.
Format of classifiers field is a following:
```
NNI Package :: <type> :: <builtin name> :: <full class name of tuner> :: <full class name of class args validator>
```
* `type`: type of algorithms, could be one of `tuner`, `assessor`, `advisor`
* `builtin name`: builtin name used in experiment configuration file
* `full class name of tuner`: tuner class name, including its module name, for example: `demo_tuner.DemoTuner`
* `full class name of class args validator`: class args validator class name, including its module name, for example: `demo_tuner.MyClassArgsValidator`
Following is an example of classfiers in package's `setup.py`:
### 3. Install your customized algorithms into python environment
Firstly, the customized algorithms need to be prepared as a python package. Then you can install the package into python environment via:
* Run command `python setup.py develop` from the package directory, this command will install the package in development mode, this is recommended if your algorithm is under development.
* Run command `python setup.py bdist_wheel` from the package directory, this command build a whl file which is a pip installation source. Then run `pip install <wheel file>` to install it.
```python
classifiers = [
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: ',
'NNI Package :: tuner :: demotuner :: demo_tuner.DemoTuner :: demo_tuner.MyClassArgsValidator'
],
```
Once you have the meta info in `setup.py`, you can build your pip installation source via:
* Run command `python setup.py develop` from the package directory, this command will build the directory as a pip installation source.
* Run command `python setup.py bdist_wheel` from the package directory, this command build a whl file which is a pip installation source.
### 4. Prepare meta file
NNI will look for the classifier starts with `NNI Package` to retrieve the package meta information while the package being installed with `nnictl package install <source>` command.
Create a yaml file with following keys as meta file:
* `algoType`: type of algorithms, could be one of `tuner`, `assessor`, `advisor`
* `builtinName`: builtin name used in experiment configuration file
* `className`: tuner class name, including its module name, for example: `demo_tuner.DemoTuner`
* `classArgsValidator`: class args validator class name, including its module name, for example: `demo_tuner.MyClassArgsValidator`
Reference [customized tuner example](../Tuner/InstallCustomizedTuner.md) for a full example.
Following is an example of the yaml file:
### 4. Install customized algorithms package into NNI
If your installation source is prepared as a directory with `python setup.py develop`, you can install the package by following command:
`nnictl package install <installation source directory>`
```yaml
algoType: tuner
builtinName: demotuner
className: demo_tuner.DemoTuner
classArgsValidator: demo_tuner.MyClassArgsValidator
For example:
```
`nnictl package install nni/examples/tuners/customized_tuner/`
### 5. Register customized algorithms into NNI
Run following command to register the customized algorithms as builtin algorithms in NNI:
If your installation source is prepared as a whl file with `python setup.py bdist_wheel`, you can install the package by following command:
```bash
nnictl algo register --meta <path_to_meta_file>
```
The `<path_to_meta_file>` is the path to the yaml file your created in above section.
`nnictl package install <whl file path>`
For example:
Reference [customized tuner example](../Tuner/InstallCustomizedTuner.md) for a full example.
`nnictl package install nni/examples/tuners/customized_tuner/dist/demo_tuner-0.1-py3-none-any.whl`
## 5. Use the installed builtin algorithms in experiment
## 6. Use the installed builtin algorithms in experiment
Once your customized algorithms is installed, you can use it in experiment configuration file the same way as other builtin tuners/assessors/advisors, for example:
```yaml
......@@ -109,56 +96,42 @@ tuner:
```
## Manage packages using `nnictl package`
## Manage builtin algorithms using `nnictl algo`
### List installed packages
### List builtin algorithms
Run following command to list the installed packages:
Run following command to list the registered builtin algorithms:
```
nnictl package list
+-----------------+------------+-----------+--------=-------------+------------------------------------------+
| Name | Type | Installed | Class Name | Module Name |
+-----------------+------------+-----------+----------------------+------------------------------------------+
| demotuner | tuners | Yes | DemoTuner | demo_tuner |
| SMAC | tuners | No | SMACTuner | nni.smac_tuner.smac_tuner |
| PPOTuner | tuners | No | PPOTuner | nni.ppo_tuner.ppo_tuner |
| BOHB | advisors | Yes | BOHB | nni.bohb_advisor.bohb_advisor |
+-----------------+------------+-----------+----------------------+------------------------------------------+
```
Run following command to list all packages, including the builtin packages can not be uninstalled.
```
nnictl package list --all
```bash
nnictl algo list
+-----------------+------------+-----------+--------=-------------+------------------------------------------+
| Name | Type | Installed | Class Name | Module Name |
| Name | Type | Source | Class Name | Module Name |
+-----------------+------------+-----------+----------------------+------------------------------------------+
| TPE | tuners | Yes | HyperoptTuner | nni.hyperopt_tuner.hyperopt_tuner |
| Random | tuners | Yes | HyperoptTuner | nni.hyperopt_tuner.hyperopt_tuner |
| Anneal | tuners | Yes | HyperoptTuner | nni.hyperopt_tuner.hyperopt_tuner |
| Evolution | tuners | Yes | EvolutionTuner | nni.evolution_tuner.evolution_tuner |
| BatchTuner | tuners | Yes | BatchTuner | nni.batch_tuner.batch_tuner |
| GridSearch | tuners | Yes | GridSearchTuner | nni.gridsearch_tuner.gridsearch_tuner |
| NetworkMorphism | tuners | Yes | NetworkMorphismTuner | nni.networkmorphism_tuner.networkmo... |
| MetisTuner | tuners | Yes | MetisTuner | nni.metis_tuner.metis_tuner |
| GPTuner | tuners | Yes | GPTuner | nni.gp_tuner.gp_tuner |
| PBTTuner | tuners | Yes | PBTTuner | nni.pbt_tuner.pbt_tuner |
| SMAC | tuners | No | SMACTuner | nni.smac_tuner.smac_tuner |
| PPOTuner | tuners | No | PPOTuner | nni.ppo_tuner.ppo_tuner |
| Medianstop | assessors | Yes | MedianstopAssessor | nni.medianstop_assessor.medianstop_... |
| Curvefitting | assessors | Yes | CurvefittingAssessor | nni.curvefitting_assessor.curvefitt... |
| Hyperband | advisors | Yes | Hyperband | nni.hyperband_advisor.hyperband_adv... |
| BOHB | advisors | Yes | BOHB | nni.bohb_advisor.bohb_advisor |
| TPE | tuners | nni | HyperoptTuner | nni.hyperopt_tuner.hyperopt_tuner |
| Random | tuners | nni | HyperoptTuner | nni.hyperopt_tuner.hyperopt_tuner |
| Anneal | tuners | nni | HyperoptTuner | nni.hyperopt_tuner.hyperopt_tuner |
| Evolution | tuners | nni | EvolutionTuner | nni.evolution_tuner.evolution_tuner |
| BatchTuner | tuners | nni | BatchTuner | nni.batch_tuner.batch_tuner |
| GridSearch | tuners | nni | GridSearchTuner | nni.gridsearch_tuner.gridsearch_tuner |
| NetworkMorphism | tuners | nni | NetworkMorphismTuner | nni.networkmorphism_tuner.networkmo... |
| MetisTuner | tuners | nni | MetisTuner | nni.metis_tuner.metis_tuner |
| GPTuner | tuners | nni | GPTuner | nni.gp_tuner.gp_tuner |
| PBTTuner | tuners | nni | PBTTuner | nni.pbt_tuner.pbt_tuner |
| SMAC | tuners | nni | SMACTuner | nni.smac_tuner.smac_tuner |
| PPOTuner | tuners | nni | PPOTuner | nni.ppo_tuner.ppo_tuner |
| Medianstop | assessors | nni | MedianstopAssessor | nni.medianstop_assessor.medianstop_... |
| Curvefitting | assessors | nni | CurvefittingAssessor | nni.curvefitting_assessor.curvefitt... |
| Hyperband | advisors | nni | Hyperband | nni.hyperband_advisor.hyperband_adv... |
| BOHB | advisors | nni | BOHB | nni.bohb_advisor.bohb_advisor |
+-----------------+------------+-----------+----------------------+------------------------------------------+
```
### Uninstall package
### Unregister builtin algorithms
Run following command to uninstall an installed package:
`nnictl package uninstall <builtin name>`
`nnictl algo unregister <builtin name>`
For example:
`nnictl package uninstall demotuner`
`nnictl algo unregister demotuner`
......@@ -123,7 +123,7 @@ If there is a stderr file, please check it. Two possible cases are:
### Fail to use BOHB on Windows
Make sure a C++ 14.0 compiler is installed when trying to run `nnictl package install --name=BOHB` to install the dependencies.
Make sure a C++ 14.0 compiler is installed when trying to run `pip install nni[BOHB]` to install the dependencies.
### Not supported tuner on Windows
......
......@@ -21,7 +21,7 @@ nnictl support commands:
* [nnictl log](#log)
* [nnictl webui](#webui)
* [nnictl tensorboard](#tensorboard)
* [nnictl package](#package)
* [nnictl algo](#algo)
* [nnictl ss_gen](#ss_gen)
* [nnictl --version](#version)
......@@ -769,112 +769,89 @@ Debug mode will disable version check function in Trialkeeper.
|------|------|------ |------|
|id| False| |ID of the experiment you want to set|
<a name="package"></a>
<a name="algo"></a>
### Manage package
### Manage builtin algorithms
* __nnictl package install__
* __nnictl algo register__
* Description
Install a package (customized algorithms or nni provided algorithms) as builtin tuner/assessor/advisor.
Register customized algorithms as builtin tuner/assessor/advisor.
* Usage
```bash
nnictl package install --name <package name>
nnictl algo register --meta <path_to_meta_file>
```
`<path_to_meta_file>` is the path to the meta data file in yml format, which has following keys:
* `algoType`: type of algorithms, could be one of `tuner`, `assessor`, `advisor`
* `builtinName`: builtin name used in experiment configuration file
* `className`: tuner class name, including its module name, for example: `demo_tuner.DemoTuner`
* `classArgsValidator`: class args validator class name, including its module name, for example: `demo_tuner.MyClassArgsValidator`
The available `<package name>` can be checked via `nnictl package list` command.
or
```bash
nnictl package install <installation source>
```
Reference [Install customized algorithms](InstallCustomizedAlgos.md) to prepare the installation source.
* Example
> Install SMAC tuner
> Install a customized tuner in nni examples
```bash
nnictl package install --name SMAC
cd nni/examples/tuners/customized_tuner
python3 setup.py develop
nnictl algo register --meta meta_file.yml
```
> Install a customized tuner
```bash
nnictl package install nni/examples/tuners/customized_tuner/dist/demo_tuner-0.1-py3-none-any.whl
```
* __nnictl package show__
* __nnictl algo show__
* Description
Show the detailed information of specified packages.
Show the detailed information of specified registered algorithms.
* Usage
```bash
nnictl package show <package name>
nnictl algo show <builtinName>
```
* Example
```bash
nnictl package show SMAC
nnictl algo show SMAC
```
* __nnictl package list__
* __nnictl algo list__
* Description
List the installed/all packages.
List the registered builtin algorithms
* Usage
```bash
nnictl package list [OPTIONS]
nnictl algo list
```
* Options
|Name, shorthand|Required|Default|Description|
|------|------|------ |------|
|--all| False| |List all packages|
* Example
> List installed packages
```bash
nnictl package list
```
> List all packages
```bash
nnictl package list --all
nnictl algo list
```
* __nnictl package uninstall__
* __nnictl algo unregister__
* Description
Uninstall a package.
Unregister a registered customized builtin algorithms. The NNI provided builtin algorithms can not be unregistered.
* Usage
```bash
nnictl package uninstall <package name>
nnictl algo unregister <builtinName>
```
* Example
Uninstall SMAC package
```bash
nnictl package uninstall SMAC
nnictl algo unregister demotuner
```
......
algoType: tuner
builtinName: demotuner
className: demo_tuner.DemoTuner
classArgsValidator: demo_tuner.MyClassArgsValidator
......@@ -12,10 +12,8 @@ setuptools.setup(
classifiers = [
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: ',
'NNI Package :: tuner :: demotuner :: demo_tuner.DemoTuner :: demo_tuner.MyClassArgsValidator'
'Operating System :: '
],
author = 'Microsoft NNI Team',
author_email = 'nni@microsoft.com',
description = 'NNI control for Neural Network Intelligence project',
......
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
import importlib
import json
from nni.tools.package_utils import read_registerd_algo_meta, get_registered_algo_meta, \
write_registered_algo_meta, ALGO_TYPES, parse_full_class_name
from .common_utils import print_error, print_green, get_yml_content
def read_reg_meta_list(meta_path):
content = get_yml_content(meta_path)
if content.get('algorithms'):
meta_list = content.get('algorithms')
else:
meta_list = [content]
for meta in meta_list:
assert 'algoType' in meta
assert meta['algoType'] in ['tuner', 'assessor', 'advisor']
assert 'builtinName' in meta
assert 'className' in meta
return meta_list
def verify_algo_import(meta):
def _do_verify_import(fullName):
module_name, class_name = parse_full_class_name(fullName)
class_module = importlib.import_module(module_name)
getattr(class_module, class_name)
_do_verify_import(meta['className'])
if meta.get('classArgsValidator'):
_do_verify_import(meta['classArgsValidator'])
def algo_reg(args):
meta_list = read_reg_meta_list(args.meta_path)
for meta in meta_list:
if get_registered_algo_meta(meta['builtinName']) is not None:
print_error('builtinName {} already registered'.format(meta['builtinName']))
return
verify_algo_import(meta)
save_algo_meta_data(meta)
print_green('{} registered sucessfully!'.format(meta['builtinName']))
def algo_unreg(args):
name = args.name[0]
meta = get_registered_algo_meta(name)
if meta is None:
print_error('builtin algorithms {} not found!'.format(name))
return
if meta['source'] == 'nni':
print_error('{} is provided by nni, can not be unregistered!'.format(name))
return
if remove_algo_meta_data(name):
print_green('{} unregistered sucessfully!'.format(name))
else:
print_error('Failed to unregistered {}!'.format(name))
def algo_show(args):
builtin_name = args.name[0]
meta = get_registered_algo_meta(builtin_name)
if meta:
print(json.dumps(meta, indent=4))
else:
print_error('package {} not found'.format(builtin_name))
def algo_list(args):
meta = read_registerd_algo_meta()
print('+-----------------+------------+-----------+--------=-------------+------------------------------------------+')
print('| Name | Type | source | Class Name | Module Name |')
print('+-----------------+------------+-----------+----------------------+------------------------------------------+')
MAX_MODULE_NAME = 38
for t in ['tuners', 'assessors', 'advisors']:
for p in meta[t]:
module_name = '.'.join(p['className'].split('.')[:-1])
if len(module_name) > MAX_MODULE_NAME:
module_name = module_name[:MAX_MODULE_NAME-3] + '...'
class_name = p['className'].split('.')[-1]
print('| {:15s} | {:10s} | {:9s} | {:20s} | {:40s} |'.format(p['builtinName'], t, p['source'], class_name, module_name[:38]))
print('+-----------------+------------+-----------+----------------------+------------------------------------------+')
def save_algo_meta_data(meta_data):
meta_data['source'] = 'user'
config = read_registerd_algo_meta()
config[meta_data['algoType']+'s'].append(meta_data)
write_registered_algo_meta(config)
def remove_algo_meta_data(name):
config = read_registerd_algo_meta()
updated = False
for t in ALGO_TYPES:
for meta in config[t]:
if meta['builtinName'] == name:
config[t].remove(meta)
updated = True
if updated:
write_registered_algo_meta(config)
return True
return False
......@@ -6,7 +6,7 @@ import logging
import os
import netifaces
from schema import Schema, And, Optional, Regex, Or, SchemaError
from nni.tools.package_utils import create_validator_instance, get_all_builtin_names, get_builtin_algo_meta
from nni.tools.package_utils import create_validator_instance, get_all_builtin_names, get_registered_algo_meta
from .constants import SCHEMA_TYPE_ERROR, SCHEMA_RANGE_ERROR, SCHEMA_PATH_ERROR
from .common_utils import get_yml_content, print_warning
......@@ -75,8 +75,8 @@ class AlgoSchema:
def validate_class_args(self, class_args, algo_type, builtin_name):
if not builtin_name or not class_args:
return
meta = get_builtin_algo_meta(algo_type+'s', builtin_name)
if meta and 'accept_class_args' in meta and meta['accept_class_args'] == False:
meta = get_registered_algo_meta(builtin_name, algo_type+'s')
if meta and 'acceptClassArgs' in meta and meta['acceptClassArgs'] == False:
raise SchemaError('classArgs is not allowed.')
logging.getLogger('nni.protocol').setLevel(logging.ERROR) # we know IPC is not there, don't complain
......
......@@ -61,27 +61,6 @@ TRIAL_MONITOR_CONTENT = '%-15s %-25s %-25s %-15s'
TRIAL_MONITOR_TAIL = '-------------------------------------------------------------------------------------\n\n\n'
INSTALLABLE_PACKAGE_META = {
'SMAC': {
'type': 'tuner',
'class_name': 'nni.algorithms.hpo.smac_tuner.smac_tuner.SMACTuner',
'code_sub_dir': 'smac_tuner',
'class_args_validator': 'nni.algorithms.hpo.smac_tuner.smac_tuner.SMACClassArgsValidator'
},
'BOHB': {
'type': 'advisor',
'class_name': 'nni.algorithms.hpo.bohb_advisor.bohb_advisor.BOHB',
'code_sub_dir': 'bohb_advisor',
'class_args_validator': 'nni.algorithms.hpo.bohb_advisor.bohb_advisor.BOHBClassArgsValidator'
},
'PPOTuner': {
'type': 'tuner',
'class_name': 'nni.algorithms.hpo.ppo_tuner.ppo_tuner.PPOTuner',
'code_sub_dir': 'ppo_tuner',
'class_args_validator': 'nni.algorithms.hpo.ppo_tuner.ppo_tuner.PPOClassArgsValidator'
}
}
TUNERS_SUPPORTING_IMPORT_DATA = {
'TPE',
'Anneal',
......
......@@ -19,7 +19,7 @@ from .config_utils import Config, Experiments
from .common_utils import get_yml_content, get_json_content, print_error, print_normal, \
detect_port, get_user
from .constants import NNICTL_HOME_DIR, ERROR_INFO, REST_TIME_OUT, EXPERIMENT_SUCCESS_INFO, LOG_HEADER, INSTALLABLE_PACKAGE_META
from .constants import NNICTL_HOME_DIR, ERROR_INFO, REST_TIME_OUT, EXPERIMENT_SUCCESS_INFO, LOG_HEADER
from .command_utils import check_output_command, kill_command
from .nnictl_utils import update_experiment
......@@ -452,10 +452,9 @@ def launch_experiment(args, experiment_config, mode, experiment_id):
except CalledProcessError:
print_error('some errors happen when import package %s.' %(package_name))
print_log_content(experiment_id)
if package_name in INSTALLABLE_PACKAGE_META:
print_error('If %s is not installed, it should be installed through '\
'\'nnictl package install --name %s\'' % (package_name, package_name))
exit(1)
if package_name in ['SMAC', 'BOHB', 'PPOTuner']:
print_error(f'The dependencies for {package_name} can be installed through pip install nni[{package_name}]')
raise
log_dir = experiment_config['logDir'] if experiment_config.get('logDir') else None
log_level = experiment_config['logLevel'] if experiment_config.get('logLevel') else None
#view experiment mode do not need debug function, when view an experiment, there will be no new logs created
......
......@@ -13,7 +13,7 @@ from .nnictl_utils import stop_experiment, trial_ls, trial_kill, list_experiment
monitor_experiment, export_trials_data, trial_codegen, webui_url, \
get_config, log_stdout, log_stderr, search_space_auto_gen, webui_nas, \
save_experiment, load_experiment
from .package_management import package_install, package_uninstall, package_show, package_list
from .algo_management import algo_reg, algo_unreg, algo_show, algo_list
from .constants import DEFAULT_REST_PORT
from .tensorboard_utils import start_tensorboard, stop_tensorboard
init(autoreset=True)
......@@ -212,26 +212,43 @@ def parse_args():
parser_log_trial.add_argument('--trial_id', '-T', dest='trial_id', help='find trial log path by id')
parser_log_trial.set_defaults(func=log_trial)
#parse package command
parser_package = subparsers.add_parser('package', help='control nni tuner and assessor packages')
# add subparsers for parser_package
parser_package_subparsers = parser_package.add_subparsers()
parser_package_install = parser_package_subparsers.add_parser('install', help='install packages')
parser_package_install.add_argument('source', nargs='?', help='installation source, can be a directory or whl file')
parser_package_install.add_argument('--name', '-n', dest='name', help='package name to be installed', required=False)
parser_package_install.set_defaults(func=package_install)
#parse algo command
parser_algo = subparsers.add_parser('algo', help='control nni builtin tuner, assessor and advisor algorithms')
# add subparsers for parser_algo
parser_algo_subparsers = parser_algo.add_subparsers()
parser_algo_reg = parser_algo_subparsers.add_parser(
'register',
aliases=('reg',),
help='''register algorithms as nni builtin algorithm, for example:
nnictl reg --meta_path <path_to_meta_file>
where <path_to_meta_file> is the path to a meta data in yml format,
reference the nni document and examples/tuners/customized_tuner example
for the format of the yml file.'''
)
parser_algo_reg.add_argument('--meta_path', '-m', dest='meta_path', help='path to the meta file', required=True)
parser_algo_reg.set_defaults(func=algo_reg)
parser_package_uninstall = parser_package_subparsers.add_parser('uninstall', help='uninstall packages')
parser_package_uninstall.add_argument('name', nargs=1, help='package name to be uninstalled')
parser_package_uninstall.set_defaults(func=package_uninstall)
parser_algo_unreg = parser_algo_subparsers.add_parser('unregister', aliases=('unreg',), help='unregister algorithm')
parser_algo_unreg.add_argument('name', nargs=1, help='builtin name of the algorithm')
parser_algo_unreg.set_defaults(func=algo_unreg)
parser_package_show = parser_package_subparsers.add_parser('show', help='show the information of packages')
parser_package_show.add_argument('name', nargs=1, help='builtin name of the package')
parser_package_show.set_defaults(func=package_show)
parser_algo_show = parser_algo_subparsers.add_parser('show', help='show the information of algorithm')
parser_algo_show.add_argument('name', nargs=1, help='builtin name of the algorithm')
parser_algo_show.set_defaults(func=algo_show)
parser_package_list = parser_package_subparsers.add_parser('list', help='list installed packages')
parser_package_list.add_argument('--all', action='store_true', help='list all builtin packages')
parser_package_list.set_defaults(func=package_list)
parser_algo_list = parser_algo_subparsers.add_parser('list', help='list registered algorithms')
parser_algo_list.set_defaults(func=algo_list)
# To show message that nnictl package command is replaced by nnictl algo, to be remove in the future release.
def show_messsage_for_nnictl_package(args):
print_error('nnictl package command is replaced by nnictl algo, please run nnictl algo -h to show the usage')
parser_package_subparsers = subparsers.add_parser('package', help='control nni tuner and assessor packages').add_subparsers()
parser_package_subparsers.add_parser('install', help='install packages').set_defaults(func=show_messsage_for_nnictl_package)
parser_package_subparsers.add_parser('uninstall', help='uninstall packages').set_defaults(func=show_messsage_for_nnictl_package)
parser_package_subparsers.add_parser('show', help='show the information of packages').set_defaults(
func=show_messsage_for_nnictl_package)
parser_package_subparsers.add_parser('list', help='list installed packages').set_defaults(func=show_messsage_for_nnictl_package)
#parse tensorboard command
parser_tensorboard = subparsers.add_parser('tensorboard', help='manage tensorboard')
......
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
from collections import defaultdict
import json
import pkginfo
import nni
from nni.tools.package_utils import read_installed_package_meta, get_installed_package_meta, \
write_package_meta, get_builtin_algo_meta, get_not_installable_builtin_names, ALGO_TYPES
from .constants import INSTALLABLE_PACKAGE_META
from .common_utils import print_error, print_green
from .command_utils import install_requirements_command, call_pip_install, call_pip_uninstall
PACKAGE_TYPES = ['tuner', 'assessor', 'advisor']
def install_by_name(package_name):
if package_name not in INSTALLABLE_PACKAGE_META:
raise RuntimeError('{} is not found in installable packages!'.format(package_name))
requirements_path = os.path.join(nni.__path__[0], 'algorithms/hpo', INSTALLABLE_PACKAGE_META[package_name]['code_sub_dir'], 'requirements.txt')
assert os.path.exists(requirements_path)
return install_requirements_command(requirements_path)
def package_install(args):
'''install packages'''
installed = False
try:
if args.name:
if install_by_name(args.name) == 0:
package_meta = {}
package_meta['type'] = INSTALLABLE_PACKAGE_META[args.name]['type']
package_meta['name'] = args.name
package_meta['class_name'] = INSTALLABLE_PACKAGE_META[args.name]['class_name']
package_meta['class_args_validator'] = INSTALLABLE_PACKAGE_META[args.name]['class_args_validator']
save_package_meta_data(package_meta)
print_green('{} installed!'.format(args.name))
installed = True
else:
package_meta = get_nni_meta(args.source)
if package_meta:
if call_pip_install(args.source) == 0:
save_package_meta_data(package_meta)
print_green('{} installed!'.format(package_meta['name']))
installed = True
except Exception as e:
print_error(e)
if not installed:
print_error('installation failed!')
def package_uninstall(args):
'''uninstall packages'''
name = args.name[0]
if name in get_not_installable_builtin_names():
print_error('{} can not be uninstalled!'.format(name))
exit(1)
meta = get_installed_package_meta(None, name)
if meta is None:
print_error('package {} not found!'.format(name))
return
if 'installed_package' in meta:
call_pip_uninstall(meta['installed_package'])
if remove_package_meta_data(name):
print_green('{} uninstalled sucessfully!'.format(name))
else:
print_error('Failed to uninstall {}!'.format(name))
def package_show(args):
'''show specified packages'''
builtin_name = args.name[0]
meta = get_builtin_algo_meta(builtin_name=builtin_name)
if meta:
print(json.dumps(meta, indent=4))
else:
print_error('package {} not found'.format(builtin_name))
def print_package_list(meta):
print('+-----------------+------------+-----------+--------=-------------+------------------------------------------+')
print('| Name | Type | Installed | Class Name | Module Name |')
print('+-----------------+------------+-----------+----------------------+------------------------------------------+')
MAX_MODULE_NAME = 38
for t in ['tuners', 'assessors', 'advisors']:
for p in meta[t]:
module_name = '.'.join(p['class_name'].split('.')[:-1])
if len(module_name) > MAX_MODULE_NAME:
module_name = module_name[:MAX_MODULE_NAME-3] + '...'
class_name = p['class_name'].split('.')[-1]
print('| {:15s} | {:10s} | {:9s} | {:20s} | {:40s} |'.format(p['name'], t, p['installed'], class_name, module_name[:38]))
print('+-----------------+------------+-----------+----------------------+------------------------------------------+')
def package_list(args):
'''list all packages'''
if args.all:
meta = get_builtin_algo_meta()
else:
meta = read_installed_package_meta()
installed_names = defaultdict(list)
for t in ['tuners', 'assessors', 'advisors']:
for p in meta[t]:
p['installed'] = 'Yes'
installed_names[t].append(p['name'])
for k, v in INSTALLABLE_PACKAGE_META.items():
t = v['type']+'s'
if k not in installed_names[t]:
meta[t].append({
'name': k,
'class_name': v['class_name'],
'class_args_validator': v['class_args_validator'],
'installed': 'No'
})
print_package_list(meta)
def save_package_meta_data(meta_data):
assert meta_data['type'] in PACKAGE_TYPES
assert 'name' in meta_data
assert 'class_name' in meta_data
config = read_installed_package_meta()
if meta_data['name'] in [x['name'] for x in config[meta_data['type']+'s']]:
raise ValueError('name %s already installed' % meta_data['name'])
package_meta = {k: meta_data[k] for k in ['name', 'class_name', 'class_args_validator'] if k in meta_data}
if 'package_name' in meta_data:
package_meta['installed_package'] = meta_data['package_name']
config[meta_data['type']+'s'].append(package_meta)
write_package_meta(config)
def remove_package_meta_data(name):
config = read_installed_package_meta()
updated = False
for t in ALGO_TYPES:
for meta in config[t]:
if meta['name'] == name:
config[t].remove(meta)
updated = True
if updated:
write_package_meta(config)
return True
return False
def get_nni_meta(source):
if not os.path.exists(source):
print_error('{} does not exist'.format(source))
return None
if os.path.isdir(source):
if not os.path.exists(os.path.join(source, 'setup.py')):
print_error('setup.py not found')
return None
pkg = pkginfo.Develop(source)
else:
if not source.endswith('.whl'):
print_error('File name {} must ends with \'.whl\''.format(source))
return False
pkg = pkginfo.Wheel(source)
classifiers = pkg.classifiers
meta = parse_classifiers(classifiers)
meta['package_name'] = pkg.name
return meta
def parse_classifiers(classifiers):
parts = []
for c in classifiers:
if c.startswith('NNI Package'):
parts = [x.strip() for x in c.split('::')]
break
if len(parts) < 4 or not all(parts):
raise ValueError('Can not find correct NNI meta data in package classifiers.')
meta = {
'type': parts[1],
'name': parts[2],
'class_name': parts[3]
}
if len(parts) >= 5:
meta['class_args_validator'] = parts[4]
return meta
......@@ -6,18 +6,13 @@ import importlib
import os
from pathlib import Path
import sys
import ruamel.yaml as yaml
import nni
from .constants import BuiltinAlgorithms
ALGO_TYPES = ['tuners', 'assessors', 'advisors']
def get_all_builtin_names(algo_type):
"""Get all valid builtin names, including:
1. BuiltinAlgorithms which is pre-installed.
2. User installed packages in <nni_installation_path>/config/installed_packages.yml
"""Get all builtin names of registered algorithms of specified type
Parameters
----------
......@@ -30,109 +25,33 @@ def get_all_builtin_names(algo_type):
all builtin tuner names.
"""
assert algo_type in ALGO_TYPES
merged_dict = _get_merged_builtin_dict()
builtin_names = [x['name'] for x in merged_dict[algo_type]]
return builtin_names
return [x['builtinName'] for x in read_registerd_algo_meta()[algo_type]]
def get_not_installable_builtin_names(algo_type=None):
"""Get builtin names in BuiltinAlgorithms which do not need to be installed
and can be used once NNI is installed.
Parameters
----------
algo_type: str | None
can be one of 'tuners', 'assessors', 'advisors' or None
Returns: list of string
-------
All builtin names of specified type, for example, if algo_type is 'tuners', returns
all builtin tuner names.
If algo_type is None, returns all builtin names of all types.
"""
if algo_type is None:
meta = BuiltinAlgorithms
else:
assert algo_type in ALGO_TYPES
meta = {
algo_type: BuiltinAlgorithms[algo_type]
}
names = []
for t in ALGO_TYPES:
if t in meta:
names.extend([x['name'] for x in meta[t]])
return names
def get_builtin_algo_meta(algo_type=None, builtin_name=None):
""" Get meta information of builtin algorithms from:
1. Pre-installed BuiltinAlgorithms
2. User installed packages in <nni_installation_path>/config/installed_packages.yml
def get_registered_algo_meta(builtin_name, algo_type=None):
""" Get meta information of registered algorithms.
Parameters
----------
algo_type: str | None
can be one of 'tuners', 'assessors', 'advisors' or None
builtin_name: str | None
builtin_name: str
builtin name.
Returns: dict | list of dict | None
-------
If builtin_name is specified, returns meta information of speicified builtin
alogorithms, for example:
{
'name': 'Random',
'class_name': 'nni.hyperopt_tuner.hyperopt_tuner.HyperoptTuner',
'class_args': {
'algorithm_name': 'random_search'
},
'accept_class_args': False,
'class_args_validator': 'nni.hyperopt_tuner.hyperopt_tuner.HyperoptClassArgsValidator'
}
If builtin_name is None, returns multiple meta information in a list.
"""
merged_dict = _get_merged_builtin_dict()
if algo_type is None and builtin_name is None:
return merged_dict
if algo_type:
assert algo_type in ALGO_TYPES
metas = merged_dict[algo_type]
else:
metas = merged_dict['tuners'] + merged_dict['assessors'] + merged_dict['advisors']
if builtin_name:
for m in metas:
if m['name'] == builtin_name:
return m
else:
return metas
return None
def get_installed_package_meta(algo_type, builtin_name):
""" Get meta information of user installed algorithms from:
<nni_installation_path>/config/installed_packages.yml
Parameters
----------
algo_type: str | None
can be one of 'tuners', 'assessors', 'advisors' or None
builtin_name: str
builtin name.
Returns: dict | None
-------
Returns meta information of speicified builtin alogorithms, for example:
{
'class_args_validator': 'nni.smac_tuner.smac_tuner.SMACClassArgsValidator',
'class_name': 'nni.smac_tuner.smac_tuner.SMACTuner',
'name': 'SMAC'
'classArgsValidator': 'nni.smac_tuner.smac_tuner.SMACClassArgsValidator',
'className': 'nni.smac_tuner.smac_tuner.SMACTuner',
'builtinName': 'SMAC'
}
"""
assert builtin_name is not None
if algo_type:
assert algo_type in ALGO_TYPES
config = read_installed_package_meta()
config = read_registerd_algo_meta()
candidates = []
if algo_type:
......@@ -141,11 +60,11 @@ def get_installed_package_meta(algo_type, builtin_name):
for algo_type in ALGO_TYPES:
candidates.extend(config[algo_type])
for meta in candidates:
if meta['name'] == builtin_name:
if meta['builtinName'] == builtin_name:
return meta
return None
def _parse_full_class_name(full_class_name):
def parse_full_class_name(full_class_name):
if not full_class_name:
return None, None
parts = full_class_name.split('.')
......@@ -168,10 +87,10 @@ def get_builtin_module_class_name(algo_type, builtin_name):
"""
assert algo_type in ALGO_TYPES
assert builtin_name is not None
meta = get_builtin_algo_meta(algo_type, builtin_name)
meta = get_registered_algo_meta(builtin_name, algo_type)
if not meta:
return None, None
return _parse_full_class_name(meta['class_name'])
return parse_full_class_name(meta['className'])
def create_validator_instance(algo_type, builtin_name):
"""Create instance of validator class
......@@ -190,10 +109,10 @@ def create_validator_instance(algo_type, builtin_name):
"""
assert algo_type in ALGO_TYPES
assert builtin_name is not None
meta = get_builtin_algo_meta(algo_type, builtin_name)
if not meta or 'class_args_validator' not in meta:
meta = get_registered_algo_meta(builtin_name, algo_type)
if not meta or 'classArgsValidator' not in meta:
return None
module_name, class_name = _parse_full_class_name(meta['class_args_validator'])
module_name, class_name = parse_full_class_name(meta['classArgsValidator'])
class_module = importlib.import_module(module_name)
class_constructor = getattr(class_module, class_name)
......@@ -229,17 +148,17 @@ def create_builtin_class_instance(builtin_name, input_class_args, algo_type):
2. merge user specified class args together with builtin class args.
"""
assert algo_meta
module_name, class_name = _parse_full_class_name(algo_meta['class_name'])
module_name, class_name = parse_full_class_name(algo_meta['className'])
class_args = {}
if 'class_args' in algo_meta:
class_args = algo_meta['class_args']
if 'classArgs' in algo_meta:
class_args = algo_meta['classArgs']
if input_class_args is not None:
class_args.update(input_class_args)
return module_name, class_name, class_args
algo_meta = get_builtin_algo_meta(algo_type, builtin_name)
algo_meta = get_registered_algo_meta(builtin_name, algo_type)
module_name, class_name, class_args = parse_algo_meta(algo_meta, input_class_args)
if importlib.util.find_spec(module_name) is None:
......@@ -287,15 +206,26 @@ def create_customized_class_instance(class_params):
return instance
def get_package_config_path():
# FIXME: this might not be the desired location
config_dir = Path(nni.__path__[0]).parent / 'nni_config'
if not os.path.exists(config_dir):
os.makedirs(config_dir, exist_ok=True)
return os.path.join(config_dir, 'installed_packages.yml')
def _using_conda_or_virtual_environment():
return sys.prefix != sys.base_prefix or os.path.isdir(os.path.join(sys.prefix, 'conda-meta'))
def get_registered_algo_config_path():
# Find the path for registered_algorithms.yml for this nni installation,
# the registered_algorithms.yml is copied into this location in setup.py,
# so we need to ensure that we use the same logic as setup.py to find the location.
def read_installed_package_meta():
config_file = get_package_config_path()
if _using_conda_or_virtual_environment():
nni_config_dir = os.path.join(sys.prefix, 'nni')
elif sys.platform == 'win32':
nni_config_dir = os.path.join(os.getenv('APPDATA'), 'nni')
else:
nni_config_dir = os.path.expanduser('~/.config/nni')
if not os.path.exists(nni_config_dir):
os.makedirs(nni_config_dir, exist_ok=True)
return os.path.join(nni_config_dir, 'registered_algorithms.yml')
def read_registerd_algo_meta():
config_file = get_registered_algo_config_path()
if os.path.exists(config_file):
with open(config_file, 'r') as f:
config = yaml.load(f, Loader=yaml.Loader)
......@@ -306,16 +236,7 @@ def read_installed_package_meta():
config[t] = []
return config
def write_package_meta(config):
config_file = get_package_config_path()
def write_registered_algo_meta(config):
config_file = get_registered_algo_config_path()
with open(config_file, 'w') as f:
f.write(yaml.dump(dict(config), default_flow_style=False))
def _get_merged_builtin_dict():
def merge_meta_dict(d1, d2):
res = defaultdict(list)
for t in ALGO_TYPES:
res[t] = d1[t] + d2[t]
return res
return merge_meta_dict(BuiltinAlgorithms, read_installed_package_meta())
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
BuiltinAlgorithms = {
'tuners': [
{
'name': 'TPE',
'class_name': 'nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptTuner',
'class_args': {
'algorithm_name': 'tpe'
},
'class_args_validator': 'nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptClassArgsValidator'
},
{
'name': 'Random',
'class_name': 'nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptTuner',
'class_args': {
'algorithm_name': 'random_search'
},
'accept_class_args': False,
'class_args_validator': 'nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptClassArgsValidator'
},
{
'name': 'Anneal',
'class_name': 'nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptTuner',
'class_args': {
'algorithm_name': 'anneal'
},
'class_args_validator': 'nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptClassArgsValidator'
},
{
'name': 'Evolution',
'class_name': 'nni.algorithms.hpo.evolution_tuner.evolution_tuner.EvolutionTuner',
'class_args_validator': 'nni.algorithms.hpo.evolution_tuner.evolution_tuner.EvolutionClassArgsValidator'
},
{
'name': 'BatchTuner',
'class_name': 'nni.algorithms.hpo.batch_tuner.batch_tuner.BatchTuner',
'accept_class_args': False,
},
{
'name': 'GridSearch',
'class_name': 'nni.algorithms.hpo.gridsearch_tuner.gridsearch_tuner.GridSearchTuner',
'accept_class_args': False,
},
{
'name': 'NetworkMorphism',
'class_name': 'nni.algorithms.hpo.networkmorphism_tuner.networkmorphism_tuner.NetworkMorphismTuner',
'class_args_validator': 'nni.algorithms.hpo.networkmorphism_tuner.networkmorphism_tuner.NetworkMorphismClassArgsValidator'
},
{
'name': 'MetisTuner',
'class_name': 'nni.algorithms.hpo.metis_tuner.metis_tuner.MetisTuner',
'class_args_validator': 'nni.algorithms.hpo.metis_tuner.metis_tuner.MetisClassArgsValidator'
},
{
'name': 'GPTuner',
'class_name': 'nni.algorithms.hpo.gp_tuner.gp_tuner.GPTuner',
'class_args_validator': 'nni.algorithms.hpo.gp_tuner.gp_tuner.GPClassArgsValidator'
},
{
'name': 'PBTTuner',
'class_name': 'nni.algorithms.hpo.pbt_tuner.pbt_tuner.PBTTuner',
'class_args_validator': 'nni.algorithms.hpo.pbt_tuner.pbt_tuner.PBTClassArgsValidator'
},
{
'name': 'RegularizedEvolutionTuner',
'class_name': 'nni.algorithms.hpo.regularized_evolution_tuner.regularized_evolution_tuner.RegularizedEvolutionTuner',
'class_args_validator': 'nni.algorithms.hpo.regularized_evolution_tuner.regularized_evolution_tuner.EvolutionClassArgsValidator'
}
],
'assessors': [
{
'name': 'Medianstop',
'class_name': 'nni.algorithms.hpo.medianstop_assessor.medianstop_assessor.MedianstopAssessor',
'class_args_validator': 'nni.algorithms.hpo.medianstop_assessor.medianstop_assessor.MedianstopClassArgsValidator'
},
{
'name': 'Curvefitting',
'class_name': 'nni.algorithms.hpo.curvefitting_assessor.curvefitting_assessor.CurvefittingAssessor',
'class_args_validator': 'nni.algorithms.hpo.curvefitting_assessor.curvefitting_assessor.CurvefittingClassArgsValidator'
},
],
'advisors': [
{
'name': 'Hyperband',
'class_name': 'nni.algorithms.hpo.hyperband_advisor.hyperband_advisor.Hyperband',
'class_args_validator': 'nni.algorithms.hpo.hyperband_advisor.hyperband_advisor.HyperbandClassArgsValidator'
}
]
}
......@@ -44,8 +44,7 @@ jobs:
python3 -m pip install --upgrade gym onnx peewee thop
python3 -m pip install sphinx==1.8.3 sphinx-argparse==0.2.5 sphinx-markdown-tables==0.0.9 sphinx-rtd-theme==0.4.2 sphinxcontrib-websupport==1.1.0 recommonmark==0.5.0 nbsphinx
sudo apt-get install swig -y
nnictl package install --name=SMAC
nnictl package install --name=BOHB
python3 -m pip install -e .[SMAC,BOHB]
displayName: Install extra dependencies
- script: |
......@@ -110,8 +109,7 @@ jobs:
python -m pip install keras==2.1.6
python -m pip install gym onnx peewee
sudo apt-get install swig -y
nnictl package install --name=SMAC
nnictl package install --name=BOHB
python -m pip install -e .[SMAC,BOHB]
displayName: Install extra dependencies
- script: |
......@@ -171,7 +169,7 @@ jobs:
brew install swig@3
rm -f /usr/local/bin/swig
ln -s /usr/local/opt/swig\@3/bin/swig /usr/local/bin/swig
nnictl package install --name=SMAC
python -m pip install -e .[SMAC]
displayName: Install extra dependencies
- script: |
......
......@@ -111,6 +111,14 @@ def _setup():
python_requires = '>=3.6',
install_requires = dependencies,
extras_require = {
'SMAC': [
'ConfigSpaceNNI @ git+https://github.com/QuanluZhang/ConfigSpace.git',
'smac @ git+https://github.com/QuanluZhang/SMAC3.git'
],
'BOHB': ['ConfigSpace==0.4.7', 'statsmodels==0.10.0'],
'PPOTuner': ['enum34', 'gym']
},
setup_requires = ['requests'],
entry_points = {
......@@ -158,6 +166,19 @@ def _find_node_files():
def _using_conda_or_virtual_environment():
return sys.prefix != sys.base_prefix or os.path.isdir(os.path.join(sys.prefix, 'conda-meta'))
def _copy_data_files():
# after installation, nni needs to find this location in nni.tools.package_utils.get_registered_algo_config_path
# since we can not import nni here, we need to ensure get_registered_algo_config_path use the same
# logic here to retrieve registered_algorithms.yml
if _using_conda_or_virtual_environment():
nni_config_dir = os.path.join(sys.prefix, 'nni')
elif sys.platform == 'win32':
nni_config_dir = os.path.join(os.getenv('APPDATA'), 'nni')
else:
nni_config_dir = os.path.expanduser('~/.config/nni')
if not os.path.exists(nni_config_dir):
os.makedirs(nni_config_dir)
shutil.copyfile('./deployment/registered_algorithms.yml', os.path.join(nni_config_dir, 'registered_algorithms.yml'))
class BuildTs(Command):
description = 'build TypeScript modules'
......@@ -179,6 +200,7 @@ class Build(build):
sys.exit('Please set environment variable "NNI_RELEASE=<release_version>"')
if os.path.islink('nni_node/main.js'):
sys.exit('A development build already exists. Please uninstall NNI and run "python3 setup.py clean --all".')
_copy_data_files()
super().run()
class Develop(develop):
......@@ -204,6 +226,7 @@ class Develop(develop):
def run(self):
if not self.skip_ts:
setup_ts.build(release=None)
_copy_data_files()
super().run()
class Clean(clean):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment