Unverified Commit 5136a86d authored by liuzhe-lz's avatar liuzhe-lz Committed by GitHub
Browse files

Typehint and copyright header (#4669)

parent 68347c5e
......@@ -5,6 +5,7 @@ ipython
jupyterlab
nbsphinx
pylint
pyright
pytest
pytest-azurepipelines
pytest-cov
......
......@@ -19,5 +19,5 @@ scikit-learn >= 0.24.1
scipy < 1.8 ; python_version < "3.8"
scipy ; python_version >= "3.8"
typeguard
typing_extensions ; python_version < "3.8"
typing_extensions >= 4.0.0 ; python_version < "3.8"
websockets >= 10.1
Uncategorized Modules
=====================
nni.typehint
------------
.. automodule:: nni.typehint
:members:
Others
======
nni
---
nni.common
----------
nni.utils
---------
......@@ -9,4 +9,4 @@ API Reference
Model Compression <compression>
Feature Engineering <./python_api/feature_engineering>
Experiment <experiment>
Others <./python_api/others>
Others <others>
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from .bohb_advisor import BOHB, BOHBClassArgsValidator
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import logging
import warnings
......
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from .gp_tuner import GPTuner, GPClassArgsValidator
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from .metis_tuner import MetisTuner, MetisClassArgsValidator
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from .networkmorphism_tuner import NetworkMorphismTuner, NetworkMorphismClassArgsValidator
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from .ppo_tuner import PPOTuner, PPOClassArgsValidator
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import copy
import logging
import random
......
......@@ -8,10 +8,13 @@ to tell whether this trial can be early stopped or not.
See :class:`Assessor`' specification and ``docs/en_US/assessors.rst`` for details.
"""
from __future__ import annotations
from enum import Enum
import logging
from .recoverable import Recoverable
from .typehint import TrialMetric
__all__ = ['AssessResult', 'Assessor']
......@@ -54,7 +57,7 @@ class Assessor(Recoverable):
:class:`~nni.algorithms.hpo.curvefitting_assessor.CurvefittingAssessor`
"""
def assess_trial(self, trial_job_id, trial_history):
def assess_trial(self, trial_job_id: str, trial_history: list[TrialMetric]) -> AssessResult:
"""
Abstract method for determining whether a trial should be killed. Must override.
......@@ -91,7 +94,7 @@ class Assessor(Recoverable):
"""
raise NotImplementedError('Assessor: assess_trial not implemented')
def trial_end(self, trial_job_id, success):
def trial_end(self, trial_job_id: str, success: bool) -> None:
"""
Abstract method invoked when a trial is completed or terminated. Do nothing by default.
......@@ -103,22 +106,22 @@ class Assessor(Recoverable):
True if the trial successfully completed; False if failed or terminated.
"""
def load_checkpoint(self):
def load_checkpoint(self) -> None:
"""
Internal API under revising, not recommended for end users.
"""
checkpoin_path = self.get_checkpoint_path()
_logger.info('Load checkpoint ignored by assessor, checkpoint path: %s', checkpoin_path)
def save_checkpoint(self):
def save_checkpoint(self) -> None:
"""
Internal API under revising, not recommended for end users.
"""
checkpoin_path = self.get_checkpoint_path()
_logger.info('Save checkpoint ignored by assessor, checkpoint path: %s', checkpoin_path)
def _on_exit(self):
def _on_exit(self) -> None:
pass
def _on_error(self):
def _on_error(self) -> None:
pass
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from .serializer import trace, dump, load, is_traceable
......@@ -2,6 +2,8 @@
# Licensed under the MIT license.
"""
Helper class and functions for tuners to deal with search space.
This script provides a more program-friendly representation of HPO search space.
The format is considered internal helper and is not visible to end users.
......@@ -9,8 +11,16 @@ You will find this useful when you want to support nested search space.
The random tuner is an intuitive example for this utility.
You should check its code before reading docstrings in this file.
.. attention::
This module does not guarantee forward-compatibility.
If you want to use it outside official NNI repo, it is recommended to copy the script.
"""
from __future__ import annotations
__all__ = [
'ParameterSpec',
'deformat_parameters',
......@@ -20,10 +30,16 @@ __all__ = [
import math
from types import SimpleNamespace
from typing import Any, List, NamedTuple, Optional, Tuple
from typing import Any, Dict, NamedTuple, Tuple, cast
import numpy as np
from nni.typehint import Parameters, SearchSpace
ParameterKey = Tuple['str | int', ...]
FormattedParameters = Dict[ParameterKey, 'float | int']
FormattedSearchSpace = Dict[ParameterKey, 'ParameterSpec']
class ParameterSpec(NamedTuple):
"""
Specification (aka space / range / domain) of one single parameter.
......@@ -33,29 +49,31 @@ class ParameterSpec(NamedTuple):
name: str # The object key in JSON
type: str # "_type" in JSON
values: List[Any] # "_value" in JSON
values: list[Any] # "_value" in JSON
key: Tuple[str] # The "path" of this parameter
key: ParameterKey # The "path" of this parameter
categorical: bool # Whether this paramter is categorical (unordered) or numerical (ordered)
size: int = None # If it's categorical, how many candidates it has
size: int = cast(int, None) # If it's categorical, how many candidates it has
# uniform distributed
low: float = None # Lower bound of uniform parameter
high: float = None # Upper bound of uniform parameter
low: float = cast(float, None) # Lower bound of uniform parameter
high: float = cast(float, None) # Upper bound of uniform parameter
normal_distributed: bool = None # Whether this parameter is uniform or normal distrubuted
mu: float = None # µ of normal parameter
sigma: float = None # σ of normal parameter
normal_distributed: bool = cast(bool, None)
# Whether this parameter is uniform or normal distrubuted
mu: float = cast(float, None) # µ of normal parameter
sigma: float = cast(float, None)# σ of normal parameter
q: Optional[float] = None # If not `None`, the parameter value should be an integer multiple of this
clip: Optional[Tuple[float, float]] = None
q: float | None = None # If not `None`, the parameter value should be an integer multiple of this
clip: tuple[float, float] | None = None
# For q(log)uniform, this equals to "values[:2]"; for others this is None
log_distributed: bool = None # Whether this parameter is log distributed
log_distributed: bool = cast(bool, None)
# Whether this parameter is log distributed
# When true, low/high/mu/sigma describes log of parameter value (like np.lognormal)
def is_activated_in(self, partial_parameters):
def is_activated_in(self, partial_parameters: FormattedParameters) -> bool:
"""
For nested search space, check whether this parameter should be skipped for current set of paremters.
This function must be used in a pattern similar to random tuner. Otherwise it will misbehave.
......@@ -64,7 +82,7 @@ class ParameterSpec(NamedTuple):
return True
return partial_parameters[self.key[:-2]] == self.key[-2]
def format_search_space(search_space):
def format_search_space(search_space: SearchSpace) -> FormattedSearchSpace:
"""
Convert user provided search space into a dict of ParameterSpec.
The dict key is dict value's `ParameterSpec.key`.
......@@ -76,7 +94,9 @@ def format_search_space(search_space):
# Remove these comments when we drop 3.6 support.
return {spec.key: spec for spec in formatted}
def deformat_parameters(formatted_parameters, formatted_search_space):
def deformat_parameters(
formatted_parameters: FormattedParameters,
formatted_search_space: FormattedSearchSpace) -> Parameters:
"""
Convert internal format parameters to users' expected format.
......@@ -88,10 +108,11 @@ def deformat_parameters(formatted_parameters, formatted_search_space):
3. For "q*", convert x to `round(x / q) * q`, then clip into range.
4. For nested choices, convert flatten key-value pairs into nested structure.
"""
ret = {}
ret: Parameters = {}
for key, x in formatted_parameters.items():
spec = formatted_search_space[key]
if spec.categorical:
x = cast(int, x)
if spec.type == 'randint':
lower = min(math.ceil(float(x)) for x in spec.values)
_assign(ret, key, int(lower + x))
......@@ -112,7 +133,7 @@ def deformat_parameters(formatted_parameters, formatted_search_space):
_assign(ret, key, x)
return ret
def format_parameters(parameters, formatted_search_space):
def format_parameters(parameters: Parameters, formatted_search_space: FormattedSearchSpace) -> FormattedParameters:
"""
Convert end users' parameter format back to internal format, mainly for resuming experiments.
......@@ -123,7 +144,7 @@ def format_parameters(parameters, formatted_search_space):
for key, spec in formatted_search_space.items():
if not spec.is_activated_in(ret):
continue
value = parameters
value: Any = parameters
for name in key:
if isinstance(name, str):
value = value[name]
......@@ -142,8 +163,8 @@ def format_parameters(parameters, formatted_search_space):
ret[key] = value
return ret
def _format_search_space(parent_key, space):
formatted = []
def _format_search_space(parent_key: ParameterKey, space: SearchSpace) -> list[ParameterSpec]:
formatted: list[ParameterSpec] = []
for name, spec in space.items():
if name == '_name':
continue
......@@ -155,7 +176,7 @@ def _format_search_space(parent_key, space):
formatted += _format_search_space(key, sub_space)
return formatted
def _format_parameter(key, type_, values):
def _format_parameter(key: ParameterKey, type_: str, values: list[Any]):
spec = SimpleNamespace(
name = key[-1],
type = type_,
......@@ -197,7 +218,7 @@ def _format_parameter(key, type_, values):
return ParameterSpec(**spec.__dict__)
def _is_nested_choices(values):
def _is_nested_choices(values: list[Any]) -> bool:
assert values # choices should not be empty
for value in values:
if not isinstance(value, dict):
......@@ -206,9 +227,9 @@ def _is_nested_choices(values):
return False
return True
def _assign(params, key, x):
def _assign(params: Parameters, key: ParameterKey, x: Any) -> None:
if len(key) == 1:
params[key[0]] = x
params[cast(str, key[0])] = x
elif isinstance(key[0], int):
_assign(params, key[1:], x)
else:
......
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from enum import Enum
class OptimizeMode(Enum):
......
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from __future__ import annotations
import logging
from typing import Any, List, Optional
from typing import Any
common_search_space_types = [
'choice',
......@@ -19,7 +21,7 @@ common_search_space_types = [
def validate_search_space(
search_space: Any,
support_types: Optional[List[str]] = None,
support_types: list[str] | None = None,
raise_exception: bool = False # for now, in case false positive
) -> bool:
......
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import abc
import base64
import collections.abc
......
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import logging
try:
import torch
......
......@@ -47,6 +47,7 @@ class _AlgorithmConfig(ConfigBase):
else: # custom algorithm
assert self.name is None
assert self.class_name
assert self.code_directory is not None
if not Path(self.code_directory).is_dir():
raise ValueError(f'CustomAlgorithmConfig: code_directory "{self.code_directory}" is not a directory')
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment