"examples/trials/git@developer.sourcefind.cn:OpenDAS/nni.git" did not exist on "a0820dd5561e19b67cc93c19c4ef987a406ee14e"
Unverified Commit f24c8380 authored by liuzhe-lz's avatar liuzhe-lz Committed by GitHub
Browse files

Use "tuner" to config advisor (#4773)

parent c8cc5c62
...@@ -38,7 +38,7 @@ As a general example, random tuner can be configured as follow: ...@@ -38,7 +38,7 @@ As a general example, random tuner can be configured as follow:
'x': {'_type': 'uniform', '_value': [0, 1]}, 'x': {'_type': 'uniform', '_value': [0, 1]},
'y': {'_type': 'choice', '_value': ['a', 'b', 'c']} 'y': {'_type': 'choice', '_value': ['a', 'b', 'c']}
} }
config.tuner.name = 'Random' config.tuner.name = 'random'
config.tuner.class_args = {'seed': 0} config.tuner.class_args = {'seed': 0}
Built-in Tuners Built-in Tuners
......
...@@ -203,9 +203,9 @@ ExperimentConfig ...@@ -203,9 +203,9 @@ ExperimentConfig
* - tunerGpuIndices * - tunerGpuIndices
- ``list[int]`` or ``str`` or ``int``, optional - ``list[int]`` or ``str`` or ``int``, optional
- Limit the GPUs visible to tuner, assessor, and advisor. - Limit the GPUs visible to tuner and assessor.
This will be the ``CUDA_VISIBLE_DEVICES`` environment variable of tuner process. This will be the ``CUDA_VISIBLE_DEVICES`` environment variable of tuner process.
Because tuner, assessor, and advisor run in the same process, this option will affect them all. Because tuner and assessor run in the same process, this option will affect both of them.
* - tuner * - tuner
- ``AlgorithmConfig``, optional - ``AlgorithmConfig``, optional
...@@ -219,8 +219,7 @@ ExperimentConfig ...@@ -219,8 +219,7 @@ ExperimentConfig
* - advisor * - advisor
- ``AlgorithmConfig``, optional - ``AlgorithmConfig``, optional
- Specify the advisor. - Deprecated, use ``tuner`` instead.
NNI provides two built-in advisors: :class:`BOHB <nni.algorithms.hpo.bohb_advisor.BOHB>` and :class:`Hyperband <nni.algorithms.hpo.hyperband_advisor.Hyperband>`.
* - trainingService * - trainingService
- ``TrainingServiceConfig`` - ``TrainingServiceConfig``
...@@ -251,7 +250,7 @@ For customized algorithms, there are two ways to describe them: ...@@ -251,7 +250,7 @@ For customized algorithms, there are two ways to describe them:
* - name * - name
- ``str`` or ``None``, optional - ``str`` or ``None``, optional
- Default: None. Name of the built-in or registered algorithm. - Default: None. Name of the built-in or registered algorithm, case insensitive.
``str`` for the built-in and registered algorithm, ``None`` for other customized algorithms. ``str`` for the built-in and registered algorithm, ``None`` for other customized algorithms.
* - className * - className
......
...@@ -7,8 +7,8 @@ import logging ...@@ -7,8 +7,8 @@ import logging
import json import json
import base64 import base64
from .runtime.common import enable_multi_thread
from .runtime.msg_dispatcher import MsgDispatcher from .runtime.msg_dispatcher import MsgDispatcher
from .runtime.msg_dispatcher_base import MsgDispatcherBase
from .tools.package_utils import create_builtin_class_instance, create_customized_class_instance from .tools.package_utils import create_builtin_class_instance, create_customized_class_instance
logger = logging.getLogger('nni.main') logger = logging.getLogger('nni.main')
...@@ -29,82 +29,50 @@ def main(): ...@@ -29,82 +29,50 @@ def main():
exp_params = json.loads(exp_params_decode) exp_params = json.loads(exp_params_decode)
logger.debug('exp_params json obj: [%s]', json.dumps(exp_params, indent=4)) logger.debug('exp_params json obj: [%s]', json.dumps(exp_params, indent=4))
if exp_params.get('deprecated', {}).get('multiThread'):
enable_multi_thread()
if 'trainingServicePlatform' in exp_params: # config schema is v1 if 'trainingServicePlatform' in exp_params: # config schema is v1
from .experiment.config.convert import convert_algo from .experiment.config.convert import convert_algo
for algo_type in ['tuner', 'assessor', 'advisor']: for algo_type in ['tuner', 'assessor']:
if algo_type in exp_params: if algo_type in exp_params:
exp_params[algo_type] = convert_algo(algo_type, exp_params[algo_type]) exp_params[algo_type] = convert_algo(algo_type, exp_params[algo_type])
if 'advisor' in exp_params:
exp_params['tuner'] = convert_algo('advisor', exp_params['advisor'])
if exp_params.get('advisor') is not None: assert exp_params.get('tuner') is not None
# advisor is enabled and starts to run tuner = _create_algo(exp_params['tuner'], 'tuner')
_run_advisor(exp_params)
else: if isinstance(tuner, MsgDispatcherBase): # is advisor
# tuner (and assessor) is enabled and starts to run logger.debug(f'Tuner {type(tuner).__name__} is advisor.')
assert exp_params.get('tuner') is not None
tuner = _create_tuner(exp_params)
if exp_params.get('assessor') is not None: if exp_params.get('assessor') is not None:
assessor = _create_assessor(exp_params) logger.error('Tuner {type(tuner).__name__} has built-in early stopping logic. Assessor is ignored.')
else: tuner.run()
assessor = None return
dispatcher = MsgDispatcher(tuner, assessor)
if exp_params.get('assessor') is not None:
try: assessor = _create_algo(exp_params['assessor'], 'assessor')
dispatcher.run()
tuner._on_exit()
if assessor is not None:
assessor._on_exit()
except Exception as exception:
logger.exception(exception)
tuner._on_error()
if assessor is not None:
assessor._on_error()
raise
def _run_advisor(exp_params):
if exp_params.get('advisor').get('name'):
dispatcher = create_builtin_class_instance(
exp_params['advisor']['name'],
exp_params['advisor'].get('classArgs'),
'advisors')
else: else:
dispatcher = create_customized_class_instance(exp_params.get('advisor')) assessor = None
if dispatcher is None: dispatcher = MsgDispatcher(tuner, assessor)
raise AssertionError('Failed to create Advisor instance')
try: try:
dispatcher.run() dispatcher.run()
except Exception as exception: tuner._on_exit()
logger.exception(exception) if assessor is not None:
assessor._on_exit()
except Exception:
tuner._on_error()
if assessor is not None:
assessor._on_error()
raise raise
def _create_tuner(exp_params): def _create_algo(algo_config, algo_type):
if exp_params['tuner'].get('name'): if algo_config.get('name'):
tuner = create_builtin_class_instance( algo = create_builtin_class_instance(algo_config['name'], algo_config.get('classArgs'), algo_type + 's')
exp_params['tuner']['name'],
exp_params['tuner'].get('classArgs'),
'tuners')
else:
tuner = create_customized_class_instance(exp_params['tuner'])
if tuner is None:
raise AssertionError('Failed to create Tuner instance')
return tuner
def _create_assessor(exp_params):
if exp_params['assessor'].get('name'):
assessor = create_builtin_class_instance(
exp_params['assessor']['name'],
exp_params['assessor'].get('classArgs'),
'assessors')
else: else:
assessor = create_customized_class_instance(exp_params['assessor']) algo = create_customized_class_instance(algo_config)
if assessor is None: if algo is None:
raise AssertionError('Failed to create Assessor instance') raise AssertionError(f'Failed to create {algo_type} instance')
return assessor return algo
if __name__ == '__main__': if __name__ == '__main__':
......
...@@ -70,7 +70,7 @@ class BatchTuner(Tuner): ...@@ -70,7 +70,7 @@ class BatchTuner(Tuner):
] ]
} }
} }
config.tuner.name = 'BatchTuner' config.tuner.name = 'Batch'
""" """
def __init__(self): def __init__(self):
......
...@@ -272,8 +272,8 @@ class BOHB(MsgDispatcherBase): ...@@ -272,8 +272,8 @@ class BOHB(MsgDispatcherBase):
.. code-block:: .. code-block::
config.advisor.name = 'BOHB' config.tuner.name = 'BOHB'
config.advisor.class_args = { config.tuner.class_args = {
'optimize_mode': 'maximize', 'optimize_mode': 'maximize',
'min_budget': 1, 'min_budget': 1,
'max_budget': 27, 'max_budget': 27,
......
...@@ -41,7 +41,7 @@ class GPClassArgsValidator(ClassArgsValidator): ...@@ -41,7 +41,7 @@ class GPClassArgsValidator(ClassArgsValidator):
class GPTuner(Tuner): class GPTuner(Tuner):
""" """
GPTuner is a Bayesian Optimization method where Gaussian Process GP tuner is a Bayesian Optimization method where Gaussian Process
is used for modeling loss functions. is used for modeling loss functions.
Bayesian optimization works by constructing a posterior distribution of functions Bayesian optimization works by constructing a posterior distribution of functions
...@@ -50,7 +50,7 @@ class GPTuner(Tuner): ...@@ -50,7 +50,7 @@ class GPTuner(Tuner):
and the algorithm becomes more certain of which regions in parameter space and the algorithm becomes more certain of which regions in parameter space
are worth exploring and which are not. are worth exploring and which are not.
GPTuner is designed to minimize/maximize the number of steps required to find GP tuner is designed to minimize/maximize the number of steps required to find
a combination of parameters that are close to the optimal combination. a combination of parameters that are close to the optimal combination.
To do so, this method uses a proxy optimization problem (finding the maximum of To do so, this method uses a proxy optimization problem (finding the maximum of
the acquisition function) that, albeit still a hard problem, is cheaper the acquisition function) that, albeit still a hard problem, is cheaper
...@@ -70,7 +70,7 @@ class GPTuner(Tuner): ...@@ -70,7 +70,7 @@ class GPTuner(Tuner):
.. code-block:: .. code-block::
config.tuner.name = 'GPTuner' config.tuner.name = 'GP'
config.tuner.class_args = { config.tuner.class_args = {
'optimize_mode': 'maximize', 'optimize_mode': 'maximize',
'utility': 'ei', 'utility': 'ei',
......
...@@ -284,8 +284,8 @@ class Hyperband(MsgDispatcherBase): ...@@ -284,8 +284,8 @@ class Hyperband(MsgDispatcherBase):
.. code-block:: .. code-block::
config.advisor.name = 'Hyperband' config.tuner.name = 'Hyperband'
config.advisor.class_args = { config.tuner.class_args = {
'optimize_mode': 'maximize', 'optimize_mode': 'maximize',
'R': 60, 'R': 60,
'eta': 3 'eta': 3
......
...@@ -81,7 +81,7 @@ class MetisTuner(Tuner): ...@@ -81,7 +81,7 @@ class MetisTuner(Tuner):
.. code-block:: .. code-block::
config.tuner.name = 'MetisTuner' config.tuner.name = 'Metis'
config.tuner.class_args = { config.tuner.class_args = {
'optimize_mode': 'maximize' 'optimize_mode': 'maximize'
} }
......
...@@ -179,14 +179,14 @@ class PBTTuner(Tuner): ...@@ -179,14 +179,14 @@ class PBTTuner(Tuner):
.. image:: ../../img/pbt.jpg .. image:: ../../img/pbt.jpg
PBTTuner initializes a population with several trials (i.e., ``population_size``). PBT tuner initializes a population with several trials (i.e., ``population_size``).
There are four steps in the above figure, each trial only runs by one step. How long is one step is controlled by trial code, There are four steps in the above figure, each trial only runs by one step. How long is one step is controlled by trial code,
e.g., one epoch. When a trial starts, it loads a checkpoint specified by PBTTuner and continues to run one step, e.g., one epoch. When a trial starts, it loads a checkpoint specified by PBT tuner and continues to run one step,
then saves checkpoint to a directory specified by PBTTuner and exits. then saves checkpoint to a directory specified by PBT tuner and exits.
The trials in a population run steps synchronously, that is, after all the trials finish the ``i``-th step, The trials in a population run steps synchronously, that is, after all the trials finish the ``i``-th step,
the ``(i+1)``-th step can be started. Exploitation and exploration of PBT are executed between two consecutive steps. the ``(i+1)``-th step can be started. Exploitation and exploration of PBT are executed between two consecutive steps.
Two important steps to follow if you are trying to use PBTTuner: Two important steps to follow if you are trying to use PBT tuner:
1. **Provide checkpoint directory**. Since some trials need to load other trial's checkpoint, 1. **Provide checkpoint directory**. Since some trials need to load other trial's checkpoint,
users should provide a directory (i.e., ``all_checkpoint_dir``) which is accessible by every trial. users should provide a directory (i.e., ``all_checkpoint_dir``) which is accessible by every trial.
...@@ -196,7 +196,7 @@ class PBTTuner(Tuner): ...@@ -196,7 +196,7 @@ class PBTTuner(Tuner):
to provide a directory in a shared storage, such as NFS, Azure storage. to provide a directory in a shared storage, such as NFS, Azure storage.
2. **Modify your trial code**. Before running a step, a trial needs to load a checkpoint, 2. **Modify your trial code**. Before running a step, a trial needs to load a checkpoint,
the checkpoint directory is specified in hyper-parameter configuration generated by PBTTuner, the checkpoint directory is specified in hyper-parameter configuration generated by PBT tuner,
i.e., ``params['load_checkpoint_dir']``. Similarly, the directory for saving checkpoint is also included in the configuration, i.e., ``params['load_checkpoint_dir']``. Similarly, the directory for saving checkpoint is also included in the configuration,
i.e., ``params['save_checkpoint_dir']``. Here, ``all_checkpoint_dir`` is base folder of ``load_checkpoint_dir`` i.e., ``params['save_checkpoint_dir']``. Here, ``all_checkpoint_dir`` is base folder of ``load_checkpoint_dir``
and ``save_checkpoint_dir`` whose format is ``all_checkpoint_dir/<population-id>/<step>``. and ``save_checkpoint_dir`` whose format is ``all_checkpoint_dir/<population-id>/<step>``.
...@@ -238,12 +238,12 @@ class PBTTuner(Tuner): ...@@ -238,12 +238,12 @@ class PBTTuner(Tuner):
Examples Examples
-------- --------
Below is an example of PBTTuner configuration in experiment config file. Below is an example of PBT tuner configuration in experiment config file.
.. code-block:: yaml .. code-block:: yaml
tuner: tuner:
name: PBTTuner name: PBT
classArgs: classArgs:
optimize_mode: maximize optimize_mode: maximize
all_checkpoint_dir: /the/path/to/store/checkpoints all_checkpoint_dir: /the/path/to/store/checkpoints
...@@ -251,7 +251,7 @@ class PBTTuner(Tuner): ...@@ -251,7 +251,7 @@ class PBTTuner(Tuner):
Notes Notes
----- -----
Assessor is not allowed if PBTTuner is used. Assessor is not allowed if PBT tuner is used.
""" """
def __init__(self, optimize_mode="maximize", all_checkpoint_dir=None, population_size=10, factor=0.2, def __init__(self, optimize_mode="maximize", all_checkpoint_dir=None, population_size=10, factor=0.2,
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
# Licensed under the MIT license. # Licensed under the MIT license.
""" """
Config classes for tuner/assessor/advisor algorithms. Config classes for tuner and assessor algorithms.
Use ``AlgorithmConfig`` to specify a built-in algorithm; Use ``AlgorithmConfig`` to specify a built-in algorithm;
use ``CustomAlgorithmConfig`` to specify a custom algorithm. use ``CustomAlgorithmConfig`` to specify a custom algorithm.
......
...@@ -113,6 +113,11 @@ class ExperimentConfig(ConfigBase): ...@@ -113,6 +113,11 @@ class ExperimentConfig(ConfigBase):
if algo is not None and algo.name == '_none_': if algo is not None and algo.name == '_none_':
setattr(self, algo_type, None) setattr(self, algo_type, None)
if self.advisor is not None:
assert self.tuner is None, '"advisor" is deprecated. You should only set "tuner".'
self.tuner = self.advisor
self.advisor = None
super()._canonicalize([self]) super()._canonicalize([self])
if self.search_space_file is not None: if self.search_space_file is not None:
...@@ -161,9 +166,8 @@ class ExperimentConfig(ConfigBase): ...@@ -161,9 +166,8 @@ class ExperimentConfig(ConfigBase):
utils.validate_gpu_indices(self.tuner_gpu_indices) utils.validate_gpu_indices(self.tuner_gpu_indices)
tuner_cnt = (self.tuner is not None) + (self.advisor is not None) if self.tuner is None:
if tuner_cnt != 1: raise ValueError('ExperimentConfig: tuner must be set')
raise ValueError('ExperimentConfig: tuner and advisor must be set one')
def _load_search_space_file(search_space_path): def _load_search_space_file(search_space_path):
# FIXME # FIXME
......
advisors:
- builtinName: Hyperband
classArgsValidator: nni.algorithms.hpo.hyperband_advisor.HyperbandClassArgsValidator
className: nni.algorithms.hpo.hyperband_advisor.Hyperband
source: nni
- builtinName: BOHB
classArgsValidator: nni.algorithms.hpo.bohb_advisor.BOHBClassArgsValidator
className: nni.algorithms.hpo.bohb_advisor.BOHB
source: nni
assessors: assessors:
- builtinName: Medianstop - builtinName: Medianstop
classArgsValidator: nni.algorithms.hpo.medianstop_assessor.MedianstopClassArgsValidator classArgsValidator: nni.algorithms.hpo.medianstop_assessor.MedianstopClassArgsValidator
...@@ -17,7 +8,8 @@ assessors: ...@@ -17,7 +8,8 @@ assessors:
className: nni.algorithms.hpo.curvefitting_assessor.CurvefittingAssessor className: nni.algorithms.hpo.curvefitting_assessor.CurvefittingAssessor
source: nni source: nni
tuners: tuners:
- builtinName: PPOTuner - alias: PPOTuner
builtinName: PPO
classArgsValidator: nni.algorithms.hpo.ppo_tuner.PPOClassArgsValidator classArgsValidator: nni.algorithms.hpo.ppo_tuner.PPOClassArgsValidator
className: nni.algorithms.hpo.ppo_tuner.PPOTuner className: nni.algorithms.hpo.ppo_tuner.PPOTuner
source: nni source: nni
...@@ -49,7 +41,8 @@ tuners: ...@@ -49,7 +41,8 @@ tuners:
className: nni.algorithms.hpo.evolution_tuner.EvolutionTuner className: nni.algorithms.hpo.evolution_tuner.EvolutionTuner
source: nni source: nni
- acceptClassArgs: false - acceptClassArgs: false
builtinName: BatchTuner alias: BatchTuner
builtinName: Batch
className: nni.algorithms.hpo.batch_tuner.BatchTuner className: nni.algorithms.hpo.batch_tuner.BatchTuner
source: nni source: nni
- acceptClassArgs: false - acceptClassArgs: false
...@@ -60,15 +53,18 @@ tuners: ...@@ -60,15 +53,18 @@ tuners:
classArgsValidator: nni.algorithms.hpo.networkmorphism_tuner.NetworkMorphismClassArgsValidator classArgsValidator: nni.algorithms.hpo.networkmorphism_tuner.NetworkMorphismClassArgsValidator
className: nni.algorithms.hpo.networkmorphism_tuner.NetworkMorphismTuner className: nni.algorithms.hpo.networkmorphism_tuner.NetworkMorphismTuner
source: nni source: nni
- builtinName: MetisTuner - alias: MetisTuner
builtinName: Metis
classArgsValidator: nni.algorithms.hpo.metis_tuner.MetisClassArgsValidator classArgsValidator: nni.algorithms.hpo.metis_tuner.MetisClassArgsValidator
className: nni.algorithms.hpo.metis_tuner.MetisTuner className: nni.algorithms.hpo.metis_tuner.MetisTuner
source: nni source: nni
- builtinName: GPTuner - alias: GPTuner
builtinName: GP
classArgsValidator: nni.algorithms.hpo.gp_tuner.GPClassArgsValidator classArgsValidator: nni.algorithms.hpo.gp_tuner.GPClassArgsValidator
className: nni.algorithms.hpo.gp_tuner.GPTuner className: nni.algorithms.hpo.gp_tuner.GPTuner
source: nni source: nni
- builtinName: PBTTuner - alias: PBTTuner
builtinName: PBT
classArgsValidator: nni.algorithms.hpo.pbt_tuner.PBTClassArgsValidator classArgsValidator: nni.algorithms.hpo.pbt_tuner.PBTClassArgsValidator
className: nni.algorithms.hpo.pbt_tuner.PBTTuner className: nni.algorithms.hpo.pbt_tuner.PBTTuner
source: nni source: nni
...@@ -76,7 +72,18 @@ tuners: ...@@ -76,7 +72,18 @@ tuners:
classArgsValidator: nni.algorithms.hpo.regularized_evolution_tuner.EvolutionClassArgsValidator classArgsValidator: nni.algorithms.hpo.regularized_evolution_tuner.EvolutionClassArgsValidator
className: nni.algorithms.hpo.regularized_evolution_tuner.RegularizedEvolutionTuner className: nni.algorithms.hpo.regularized_evolution_tuner.RegularizedEvolutionTuner
source: nni source: nni
- builtinName: DNGOTuner - alias: DNGOTuner
builtinName: DNGO
classArgsValidator: nni.algorithms.hpo.dngo_tuner.DNGOClassArgsValidator classArgsValidator: nni.algorithms.hpo.dngo_tuner.DNGOClassArgsValidator
className: nni.algorithms.hpo.dngo_tuner.DNGOTuner className: nni.algorithms.hpo.dngo_tuner.DNGOTuner
source: nni source: nni
- builtinName: Hyperband
classArgsValidator: nni.algorithms.hpo.hyperband_advisor.HyperbandClassArgsValidator
className: nni.algorithms.hpo.hyperband_advisor.Hyperband
isAdvisor: true
source: nni
- builtinName: BOHB
classArgsValidator: nni.algorithms.hpo.bohb_advisor.BOHBClassArgsValidator
className: nni.algorithms.hpo.bohb_advisor.BOHB
isAdvisor: true
source: nni
...@@ -6,11 +6,7 @@ import logging ...@@ -6,11 +6,7 @@ import logging
import os import os
from schema import And, Optional, Or, Regex, Schema, SchemaError from schema import And, Optional, Or, Regex, Schema, SchemaError
from nni.tools.package_utils.tuner_factory import ( from nni.tools.package_utils.tuner_factory import create_validator_instance
create_validator_instance,
get_all_builtin_names,
get_registered_algo_meta,
)
from .common_utils import get_yml_content, print_warning from .common_utils import get_yml_content, print_warning
from .constants import SCHEMA_PATH_ERROR, SCHEMA_RANGE_ERROR, SCHEMA_TYPE_ERROR from .constants import SCHEMA_PATH_ERROR, SCHEMA_RANGE_ERROR, SCHEMA_TYPE_ERROR
...@@ -73,16 +69,13 @@ class AlgoSchema: ...@@ -73,16 +69,13 @@ class AlgoSchema:
} }
self.builtin_name_schema = {} self.builtin_name_schema = {}
for k, n in self.builtin_keys.items(): for k, n in self.builtin_keys.items():
self.builtin_name_schema[k] = {Optional(n): setChoice(n, *get_all_builtin_names(k+'s'))} self.builtin_name_schema[k] = {Optional(n): setType(n, str)}
self.customized_keys = set(['codeDir', 'classFileName', 'className']) self.customized_keys = set(['codeDir', 'classFileName', 'className'])
def validate_class_args(self, class_args, algo_type, builtin_name): def validate_class_args(self, class_args, algo_type, builtin_name):
if not builtin_name or not class_args: if not builtin_name or not class_args:
return return
meta = get_registered_algo_meta(builtin_name, algo_type+'s')
if meta and 'acceptClassArgs' in meta and meta['acceptClassArgs'] == False:
raise SchemaError('classArgs is not allowed.')
logging.getLogger('nni.protocol').setLevel(logging.ERROR) # we know IPC is not there, don't complain logging.getLogger('nni.protocol').setLevel(logging.ERROR) # we know IPC is not there, don't complain
validator = create_validator_instance(algo_type+'s', builtin_name) validator = create_validator_instance(algo_type+'s', builtin_name)
......
# Copyright (c) Microsoft Corporation. # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license. # Licensed under the MIT license.
from __future__ import annotations
__all__ = ['AlgoMeta'] __all__ = ['AlgoMeta']
from typing import Dict, NamedTuple, Optional from typing import NamedTuple
from nni.typehint import Literal
class AlgoMeta(NamedTuple): class AlgoMeta(NamedTuple):
name: str name: str
class_name: Optional[str] alias: str | None
class_name: str | None
accept_class_args: bool accept_class_args: bool
class_args: Optional[dict] class_args: dict | None
validator_class_name: Optional[str] validator_class_name: str | None
algo_type: str # 'tuner' | 'assessor' | 'advisor' algo_type: Literal['tuner', 'assessor']
is_advisor: bool
is_builtin: bool is_builtin: bool
nni_version: Optional[str] nni_version: str | None
@staticmethod @staticmethod
def load(meta: Dict, algo_type: Optional[str] = None) -> 'AlgoMeta': def load(meta: dict, algo_type: Literal['tuner', 'assessor', 'advisor'] | None = None) -> AlgoMeta:
if algo_type is None: if algo_type is None:
algo_type = meta['algoType'] algo_type = meta['algoType'] # type: ignore
return AlgoMeta( return AlgoMeta(
name=meta['builtinName'], name = meta['builtinName'],
class_name=meta['className'], alias = meta.get('alias'),
accept_class_args=meta.get('acceptClassArgs', True), class_name = meta['className'],
class_args=meta.get('classArgs'), accept_class_args = meta.get('acceptClassArgs', True),
validator_class_name=meta.get('classArgsValidator'), class_args = meta.get('classArgs'),
algo_type=algo_type, validator_class_name = meta.get('classArgsValidator'),
is_builtin=(meta.get('source') == 'nni'), algo_type = ('assessor' if algo_type == 'assessor' else 'tuner'),
nni_version=meta.get('nniVersion') is_advisor = meta.get('isAdvisor', algo_type == 'advisor'),
is_builtin = (meta.get('source') == 'nni'),
nni_version = meta.get('nniVersion')
) )
def dump(self) -> Dict: def dump(self) -> dict:
ret = {} ret = {}
ret['builtinName'] = self.name ret['builtinName'] = self.name
if self.alias is not None:
ret['alias'] = self.alias
ret['className'] = self.class_name ret['className'] = self.class_name
if not self.accept_class_args: if not self.accept_class_args:
ret['acceptClassArgs'] = False ret['acceptClassArgs'] = False
...@@ -40,6 +50,8 @@ class AlgoMeta(NamedTuple): ...@@ -40,6 +50,8 @@ class AlgoMeta(NamedTuple):
ret['classArgs'] = self.class_args ret['classArgs'] = self.class_args
if self.validator_class_name is not None: if self.validator_class_name is not None:
ret['classArgsValidator'] = self.validator_class_name ret['classArgsValidator'] = self.validator_class_name
if self.is_advisor:
ret['isAdvisor'] = True
ret['source'] = 'nni' if self.is_builtin else 'user' ret['source'] = 'nni' if self.is_builtin else 'user'
if self.nni_version is not None: if self.nni_version is not None:
ret['nniVersion'] = self.nni_version ret['nniVersion'] = self.nni_version
......
# Copyright (c) Microsoft Corporation. # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license. # Licensed under the MIT license.
from __future__ import annotations
__all__ = [ __all__ = [
'get_algo_meta', 'get_algo_meta',
'get_all_algo_meta', 'get_all_algo_meta',
...@@ -9,24 +11,26 @@ __all__ = [ ...@@ -9,24 +11,26 @@ __all__ = [
] ]
from collections import defaultdict from collections import defaultdict
from typing import List, Optional
import yaml import yaml
from nni.runtime.config import get_builtin_config_file, get_config_file from nni.runtime.config import get_builtin_config_file, get_config_file
from .common import AlgoMeta from .common import AlgoMeta
def get_algo_meta(name: AlgoMeta) -> Optional[AlgoMeta]: def get_algo_meta(name: str) -> AlgoMeta | None:
""" """
Get meta information of a built-in or registered algorithm. Get meta information of a built-in or registered algorithm.
Return None if not found. Return None if not found.
""" """
name = name.lower()
for algo in get_all_algo_meta(): for algo in get_all_algo_meta():
if algo.name == name: if algo.name.lower() == name:
return algo
if algo.alias is not None and algo.alias.lower() == name:
return algo return algo
return None return None
def get_all_algo_meta() -> List[AlgoMeta]: def get_all_algo_meta() -> list[AlgoMeta]:
""" """
Get meta information of all built-in and registered algorithms. Get meta information of all built-in and registered algorithms.
""" """
...@@ -64,7 +68,7 @@ def _load_config_file(path): ...@@ -64,7 +68,7 @@ def _load_config_file(path):
algos = [] algos = []
for algo_type in ['tuner', 'assessor', 'advisor']: for algo_type in ['tuner', 'assessor', 'advisor']:
for algo in config.get(algo_type + 's', []): for algo in config.get(algo_type + 's', []):
algos.append(AlgoMeta.load(algo, algo_type)) algos.append(AlgoMeta.load(algo, algo_type)) # type: ignore
return algos return algos
def _save_custom_config(custom_algos): def _save_custom_config(custom_algos):
......
# Copyright (c) Microsoft Corporation. # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license. # Licensed under the MIT license.
from __future__ import annotations
__all__ = [ __all__ = [
'create_builtin_class_instance', 'create_builtin_class_instance',
'create_customized_class_instance', 'create_customized_class_instance',
...@@ -9,38 +11,23 @@ __all__ = [ ...@@ -9,38 +11,23 @@ __all__ = [
import importlib import importlib
import os import os
import sys import sys
from typing import Any
from nni.typehint import Literal
from . import config_manager from . import config_manager
ALGO_TYPES = ['tuners', 'assessors', 'advisors'] ALGO_TYPES = ['tuners', 'assessors']
def get_all_builtin_names(algo_type):
"""Get all builtin names of registered algorithms of specified type
Parameters def _get_all_builtin_names(algo_type: Literal['tuners', 'assessors']) -> list[str]:
----------
algo_type: str
can be one of 'tuners', 'assessors' or 'advisors'
Returns: list of string
-------
All builtin names of specified type, for example, if algo_type is 'tuners', returns
all builtin tuner names.
"""
algos = config_manager.get_all_algo_meta() algos = config_manager.get_all_algo_meta()
return [meta.name for meta in algos if meta.algo_type == algo_type.rstrip('s')] algos = [meta for meta in algos if meta.algo_type + 's' == algo_type]
names = [meta.name for meta in algos] + [meta.alias for meta in algos if meta.alias is not None]
return [name.lower() for name in names]
def get_registered_algo_meta(builtin_name, algo_type=None): def _get_registered_algo_meta(builtin_name: str) -> dict | None:
""" Get meta information of registered algorithms. """ Get meta information of registered algorithms.
Parameters Returns
----------
builtin_name: str
builtin name.
algo_type: str | None
can be one of 'tuners', 'assessors', 'advisors' or None
Returns: dict | None
------- -------
Returns meta information of speicified builtin alogorithms, for example: Returns meta information of speicified builtin alogorithms, for example:
{ {
...@@ -52,8 +39,6 @@ def get_registered_algo_meta(builtin_name, algo_type=None): ...@@ -52,8 +39,6 @@ def get_registered_algo_meta(builtin_name, algo_type=None):
algo = config_manager.get_algo_meta(builtin_name) algo = config_manager.get_algo_meta(builtin_name)
if algo is None: if algo is None:
return None return None
if algo_type is not None and algo.algo_type != algo_type.rstrip('s'):
return None
return algo.dump() return algo.dump()
def parse_full_class_name(full_class_name): def parse_full_class_name(full_class_name):
...@@ -69,7 +54,7 @@ def get_builtin_module_class_name(algo_type, builtin_name): ...@@ -69,7 +54,7 @@ def get_builtin_module_class_name(algo_type, builtin_name):
Parameters Parameters
---------- ----------
algo_type: str algo_type: str
can be one of 'tuners', 'assessors', 'advisors' can be one of 'tuners', 'assessors'
builtin_name: str builtin_name: str
builtin name. builtin name.
...@@ -79,7 +64,7 @@ def get_builtin_module_class_name(algo_type, builtin_name): ...@@ -79,7 +64,7 @@ def get_builtin_module_class_name(algo_type, builtin_name):
""" """
assert algo_type in ALGO_TYPES assert algo_type in ALGO_TYPES
assert builtin_name is not None assert builtin_name is not None
meta = get_registered_algo_meta(builtin_name, algo_type) meta = _get_registered_algo_meta(builtin_name)
if not meta: if not meta:
return None, None return None, None
return parse_full_class_name(meta['className']) return parse_full_class_name(meta['className'])
...@@ -90,7 +75,7 @@ def create_validator_instance(algo_type, builtin_name): ...@@ -90,7 +75,7 @@ def create_validator_instance(algo_type, builtin_name):
Parameters Parameters
---------- ----------
algo_type: str algo_type: str
can be one of 'tuners', 'assessors', 'advisors' can be one of 'tuners', 'assessors'
builtin_name: str builtin_name: str
builtin name. builtin name.
...@@ -101,16 +86,20 @@ def create_validator_instance(algo_type, builtin_name): ...@@ -101,16 +86,20 @@ def create_validator_instance(algo_type, builtin_name):
""" """
assert algo_type in ALGO_TYPES assert algo_type in ALGO_TYPES
assert builtin_name is not None assert builtin_name is not None
meta = get_registered_algo_meta(builtin_name, algo_type) meta = _get_registered_algo_meta(builtin_name)
if not meta or 'classArgsValidator' not in meta: if not meta or 'classArgsValidator' not in meta:
return None return None
module_name, class_name = parse_full_class_name(meta['classArgsValidator']) module_name, class_name = parse_full_class_name(meta['classArgsValidator'])
assert module_name is not None
class_module = importlib.import_module(module_name) class_module = importlib.import_module(module_name)
class_constructor = getattr(class_module, class_name) class_constructor = getattr(class_module, class_name)
return class_constructor() return class_constructor()
def create_builtin_class_instance(builtin_name, input_class_args, algo_type): def create_builtin_class_instance(
builtin_name: str,
input_class_args: dict,
algo_type: Literal['tuners', 'assessors']) -> Any:
"""Create instance of builtin algorithms """Create instance of builtin algorithms
Parameters Parameters
...@@ -120,14 +109,15 @@ def create_builtin_class_instance(builtin_name, input_class_args, algo_type): ...@@ -120,14 +109,15 @@ def create_builtin_class_instance(builtin_name, input_class_args, algo_type):
input_class_args: dict input_class_args: dict
kwargs for builtin class constructor kwargs for builtin class constructor
algo_type: str algo_type: str
can be one of 'tuners', 'assessors', 'advisors' can be one of 'tuners', 'assessors'
Returns: object Returns: object
------- -------
Returns builtin class instance. Returns builtin class instance.
""" """
assert algo_type in ALGO_TYPES assert algo_type in ALGO_TYPES
if builtin_name not in get_all_builtin_names(algo_type): builtin_name = builtin_name.lower()
if builtin_name not in _get_all_builtin_names(algo_type):
raise RuntimeError('Builtin name is not found: {}'.format(builtin_name)) raise RuntimeError('Builtin name is not found: {}'.format(builtin_name))
def parse_algo_meta(algo_meta, input_class_args): def parse_algo_meta(algo_meta, input_class_args):
...@@ -150,10 +140,11 @@ def create_builtin_class_instance(builtin_name, input_class_args, algo_type): ...@@ -150,10 +140,11 @@ def create_builtin_class_instance(builtin_name, input_class_args, algo_type):
return module_name, class_name, class_args return module_name, class_name, class_args
algo_meta = get_registered_algo_meta(builtin_name, algo_type) algo_meta = _get_registered_algo_meta(builtin_name)
module_name, class_name, class_args = parse_algo_meta(algo_meta, input_class_args) module_name, class_name, class_args = parse_algo_meta(algo_meta, input_class_args)
assert module_name is not None
if importlib.util.find_spec(module_name) is None: if importlib.util.find_spec(module_name) is None: # type: ignore
raise RuntimeError('Builtin module can not be loaded: {}'.format(module_name)) raise RuntimeError('Builtin module can not be loaded: {}'.format(module_name))
class_module = importlib.import_module(module_name) class_module = importlib.import_module(module_name)
......
...@@ -8,7 +8,11 @@ ...@@ -8,7 +8,11 @@
"nni/nas", "nni/nas",
"nni/retiarii", "nni/retiarii",
"nni/smartparam.py", "nni/smartparam.py",
"nni/tools" "nni/tools/annotation",
"nni/tools/gpu_tool",
"nni/tools/jupyter_extension",
"nni/tools/nnictl",
"nni/tools/trial_tool"
], ],
"reportMissingImports": false "reportMissingImports": false
} }
...@@ -13,8 +13,8 @@ logLevel: warning ...@@ -13,8 +13,8 @@ logLevel: warning
tunerGpuIndices: 0 tunerGpuIndices: 0
assessor: assessor:
name: assess name: assess
advisor: tuner:
className: Advisor className: Tuner
codeDirectory: . codeDirectory: .
classArgs: {random_seed: 0} classArgs: {random_seed: 0}
trainingService: trainingService:
......
...@@ -72,8 +72,8 @@ detailed_canon = { ...@@ -72,8 +72,8 @@ detailed_canon = {
'assessor': { 'assessor': {
'name': 'assess', 'name': 'assess',
}, },
'advisor': { 'tuner': {
'className': 'Advisor', 'className': 'Tuner',
'codeDirectory': expand_path('assets'), 'codeDirectory': expand_path('assets'),
'classArgs': {'random_seed': 0}, 'classArgs': {'random_seed': 0},
}, },
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment