Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
nni
Commits
5136a86d
Unverified
Commit
5136a86d
authored
Mar 24, 2022
by
liuzhe-lz
Committed by
GitHub
Mar 24, 2022
Browse files
Typehint and copyright header (#4669)
parent
68347c5e
Changes
41
Show whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
103 additions
and
45 deletions
+103
-45
dependencies/develop.txt
dependencies/develop.txt
+1
-0
dependencies/required.txt
dependencies/required.txt
+1
-1
docs/source/reference/others.rst
docs/source/reference/others.rst
+8
-0
docs/source/reference/python_api/others.rst
docs/source/reference/python_api/others.rst
+0
-11
docs/source/reference/python_api_ref.rst
docs/source/reference/python_api_ref.rst
+1
-1
nni/algorithms/hpo/bohb_advisor/__init__.py
nni/algorithms/hpo/bohb_advisor/__init__.py
+3
-0
nni/algorithms/hpo/dngo_tuner.py
nni/algorithms/hpo/dngo_tuner.py
+3
-0
nni/algorithms/hpo/gp_tuner/__init__.py
nni/algorithms/hpo/gp_tuner/__init__.py
+3
-0
nni/algorithms/hpo/metis_tuner/__init__.py
nni/algorithms/hpo/metis_tuner/__init__.py
+3
-0
nni/algorithms/hpo/networkmorphism_tuner/__init__.py
nni/algorithms/hpo/networkmorphism_tuner/__init__.py
+3
-0
nni/algorithms/hpo/ppo_tuner/__init__.py
nni/algorithms/hpo/ppo_tuner/__init__.py
+3
-0
nni/algorithms/hpo/regularized_evolution_tuner.py
nni/algorithms/hpo/regularized_evolution_tuner.py
+3
-0
nni/assessor.py
nni/assessor.py
+9
-6
nni/common/__init__.py
nni/common/__init__.py
+3
-0
nni/common/hpo_utils/formatting.py
nni/common/hpo_utils/formatting.py
+45
-24
nni/common/hpo_utils/optimize_mode.py
nni/common/hpo_utils/optimize_mode.py
+3
-0
nni/common/hpo_utils/validation.py
nni/common/hpo_utils/validation.py
+4
-2
nni/common/serializer.py
nni/common/serializer.py
+3
-0
nni/common/version.py
nni/common/version.py
+3
-0
nni/experiment/config/algorithm.py
nni/experiment/config/algorithm.py
+1
-0
No files found.
dependencies/develop.txt
View file @
5136a86d
...
...
@@ -5,6 +5,7 @@ ipython
jupyterlab
nbsphinx
pylint
pyright
pytest
pytest-azurepipelines
pytest-cov
...
...
dependencies/required.txt
View file @
5136a86d
...
...
@@ -19,5 +19,5 @@ scikit-learn >= 0.24.1
scipy < 1.8 ; python_version < "3.8"
scipy ; python_version >= "3.8"
typeguard
typing_extensions ; python_version < "3.8"
typing_extensions
>= 4.0.0
; python_version < "3.8"
websockets >= 10.1
docs/source/reference/others.rst
0 → 100644
View file @
5136a86d
Uncategorized Modules
=====================
nni.typehint
------------
.. automodule:: nni.typehint
:members:
docs/source/reference/python_api/others.rst
deleted
100644 → 0
View file @
68347c5e
Others
======
nni
---
nni.common
----------
nni.utils
---------
docs/source/reference/python_api_ref.rst
View file @
5136a86d
...
...
@@ -9,4 +9,4 @@ API Reference
Model Compression <compression>
Feature Engineering <./python_api/feature_engineering>
Experiment <experiment>
Others <
./python_api/
others>
Others <others>
nni/algorithms/hpo/bohb_advisor/__init__.py
View file @
5136a86d
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from
.bohb_advisor
import
BOHB
,
BOHBClassArgsValidator
nni/algorithms/hpo/dngo_tuner.py
View file @
5136a86d
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import
logging
import
warnings
...
...
nni/algorithms/hpo/gp_tuner/__init__.py
View file @
5136a86d
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from
.gp_tuner
import
GPTuner
,
GPClassArgsValidator
nni/algorithms/hpo/metis_tuner/__init__.py
View file @
5136a86d
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from
.metis_tuner
import
MetisTuner
,
MetisClassArgsValidator
nni/algorithms/hpo/networkmorphism_tuner/__init__.py
View file @
5136a86d
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from
.networkmorphism_tuner
import
NetworkMorphismTuner
,
NetworkMorphismClassArgsValidator
nni/algorithms/hpo/ppo_tuner/__init__.py
View file @
5136a86d
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from
.ppo_tuner
import
PPOTuner
,
PPOClassArgsValidator
nni/algorithms/hpo/regularized_evolution_tuner.py
View file @
5136a86d
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import
copy
import
logging
import
random
...
...
nni/assessor.py
View file @
5136a86d
...
...
@@ -8,10 +8,13 @@ to tell whether this trial can be early stopped or not.
See :class:`Assessor`' specification and ``docs/en_US/assessors.rst`` for details.
"""
from
__future__
import
annotations
from
enum
import
Enum
import
logging
from
.recoverable
import
Recoverable
from
.typehint
import
TrialMetric
__all__
=
[
'AssessResult'
,
'Assessor'
]
...
...
@@ -54,7 +57,7 @@ class Assessor(Recoverable):
:class:`~nni.algorithms.hpo.curvefitting_assessor.CurvefittingAssessor`
"""
def
assess_trial
(
self
,
trial_job_id
,
trial_history
)
:
def
assess_trial
(
self
,
trial_job_id
:
str
,
trial_history
:
list
[
TrialMetric
])
->
AssessResult
:
"""
Abstract method for determining whether a trial should be killed. Must override.
...
...
@@ -91,7 +94,7 @@ class Assessor(Recoverable):
"""
raise
NotImplementedError
(
'Assessor: assess_trial not implemented'
)
def
trial_end
(
self
,
trial_job_id
,
success
)
:
def
trial_end
(
self
,
trial_job_id
:
str
,
success
:
bool
)
->
None
:
"""
Abstract method invoked when a trial is completed or terminated. Do nothing by default.
...
...
@@ -103,22 +106,22 @@ class Assessor(Recoverable):
True if the trial successfully completed; False if failed or terminated.
"""
def
load_checkpoint
(
self
):
def
load_checkpoint
(
self
)
->
None
:
"""
Internal API under revising, not recommended for end users.
"""
checkpoin_path
=
self
.
get_checkpoint_path
()
_logger
.
info
(
'Load checkpoint ignored by assessor, checkpoint path: %s'
,
checkpoin_path
)
def
save_checkpoint
(
self
):
def
save_checkpoint
(
self
)
->
None
:
"""
Internal API under revising, not recommended for end users.
"""
checkpoin_path
=
self
.
get_checkpoint_path
()
_logger
.
info
(
'Save checkpoint ignored by assessor, checkpoint path: %s'
,
checkpoin_path
)
def
_on_exit
(
self
):
def
_on_exit
(
self
)
->
None
:
pass
def
_on_error
(
self
):
def
_on_error
(
self
)
->
None
:
pass
nni/common/__init__.py
View file @
5136a86d
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from
.serializer
import
trace
,
dump
,
load
,
is_traceable
nni/common/hpo_utils/formatting.py
View file @
5136a86d
...
...
@@ -2,6 +2,8 @@
# Licensed under the MIT license.
"""
Helper class and functions for tuners to deal with search space.
This script provides a more program-friendly representation of HPO search space.
The format is considered internal helper and is not visible to end users.
...
...
@@ -9,8 +11,16 @@ You will find this useful when you want to support nested search space.
The random tuner is an intuitive example for this utility.
You should check its code before reading docstrings in this file.
.. attention::
This module does not guarantee forward-compatibility.
If you want to use it outside official NNI repo, it is recommended to copy the script.
"""
from
__future__
import
annotations
__all__
=
[
'ParameterSpec'
,
'deformat_parameters'
,
...
...
@@ -20,10 +30,16 @@ __all__ = [
import
math
from
types
import
SimpleNamespace
from
typing
import
Any
,
Lis
t
,
NamedTuple
,
Optional
,
Tuple
from
typing
import
Any
,
Dic
t
,
NamedTuple
,
Tuple
,
cast
import
numpy
as
np
from
nni.typehint
import
Parameters
,
SearchSpace
ParameterKey
=
Tuple
[
'str | int'
,
...]
FormattedParameters
=
Dict
[
ParameterKey
,
'float | int'
]
FormattedSearchSpace
=
Dict
[
ParameterKey
,
'ParameterSpec'
]
class
ParameterSpec
(
NamedTuple
):
"""
Specification (aka space / range / domain) of one single parameter.
...
...
@@ -33,29 +49,31 @@ class ParameterSpec(NamedTuple):
name
:
str
# The object key in JSON
type
:
str
# "_type" in JSON
values
:
L
ist
[
Any
]
# "_value" in JSON
values
:
l
ist
[
Any
]
# "_value" in JSON
key
:
Tuple
[
str
]
# The "path" of this parameter
key
:
ParameterKey
# The "path" of this parameter
categorical
:
bool
# Whether this paramter is categorical (unordered) or numerical (ordered)
size
:
int
=
None
# If it's categorical, how many candidates it has
size
:
int
=
cast
(
int
,
None
)
# If it's categorical, how many candidates it has
# uniform distributed
low
:
float
=
None
# Lower bound of uniform parameter
high
:
float
=
None
# Upper bound of uniform parameter
low
:
float
=
cast
(
float
,
None
)
# Lower bound of uniform parameter
high
:
float
=
cast
(
float
,
None
)
# Upper bound of uniform parameter
normal_distributed
:
bool
=
None
# Whether this parameter is uniform or normal distrubuted
mu
:
float
=
None
# µ of normal parameter
sigma
:
float
=
None
# σ of normal parameter
normal_distributed
:
bool
=
cast
(
bool
,
None
)
# Whether this parameter is uniform or normal distrubuted
mu
:
float
=
cast
(
float
,
None
)
# µ of normal parameter
sigma
:
float
=
cast
(
float
,
None
)
# σ of normal parameter
q
:
Optional
[
float
]
=
None
# If not `None`, the parameter value should be an integer multiple of this
clip
:
Optional
[
T
uple
[
float
,
float
]
]
=
None
q
:
float
|
None
=
None
# If not `None`, the parameter value should be an integer multiple of this
clip
:
t
uple
[
float
,
float
]
|
None
=
None
# For q(log)uniform, this equals to "values[:2]"; for others this is None
log_distributed
:
bool
=
None
# Whether this parameter is log distributed
log_distributed
:
bool
=
cast
(
bool
,
None
)
# Whether this parameter is log distributed
# When true, low/high/mu/sigma describes log of parameter value (like np.lognormal)
def
is_activated_in
(
self
,
partial_parameters
)
:
def
is_activated_in
(
self
,
partial_parameters
:
FormattedParameters
)
->
bool
:
"""
For nested search space, check whether this parameter should be skipped for current set of paremters.
This function must be used in a pattern similar to random tuner. Otherwise it will misbehave.
...
...
@@ -64,7 +82,7 @@ class ParameterSpec(NamedTuple):
return
True
return
partial_parameters
[
self
.
key
[:
-
2
]]
==
self
.
key
[
-
2
]
def
format_search_space
(
search_space
)
:
def
format_search_space
(
search_space
:
SearchSpace
)
->
FormattedSearchSpace
:
"""
Convert user provided search space into a dict of ParameterSpec.
The dict key is dict value's `ParameterSpec.key`.
...
...
@@ -76,7 +94,9 @@ def format_search_space(search_space):
# Remove these comments when we drop 3.6 support.
return
{
spec
.
key
:
spec
for
spec
in
formatted
}
def
deformat_parameters
(
formatted_parameters
,
formatted_search_space
):
def
deformat_parameters
(
formatted_parameters
:
FormattedParameters
,
formatted_search_space
:
FormattedSearchSpace
)
->
Parameters
:
"""
Convert internal format parameters to users' expected format.
...
...
@@ -88,10 +108,11 @@ def deformat_parameters(formatted_parameters, formatted_search_space):
3. For "q*", convert x to `round(x / q) * q`, then clip into range.
4. For nested choices, convert flatten key-value pairs into nested structure.
"""
ret
=
{}
ret
:
Parameters
=
{}
for
key
,
x
in
formatted_parameters
.
items
():
spec
=
formatted_search_space
[
key
]
if
spec
.
categorical
:
x
=
cast
(
int
,
x
)
if
spec
.
type
==
'randint'
:
lower
=
min
(
math
.
ceil
(
float
(
x
))
for
x
in
spec
.
values
)
_assign
(
ret
,
key
,
int
(
lower
+
x
))
...
...
@@ -112,7 +133,7 @@ def deformat_parameters(formatted_parameters, formatted_search_space):
_assign
(
ret
,
key
,
x
)
return
ret
def
format_parameters
(
parameters
,
formatted_search_space
)
:
def
format_parameters
(
parameters
:
Parameters
,
formatted_search_space
:
FormattedSearchSpace
)
->
FormattedParameters
:
"""
Convert end users' parameter format back to internal format, mainly for resuming experiments.
...
...
@@ -123,7 +144,7 @@ def format_parameters(parameters, formatted_search_space):
for
key
,
spec
in
formatted_search_space
.
items
():
if
not
spec
.
is_activated_in
(
ret
):
continue
value
=
parameters
value
:
Any
=
parameters
for
name
in
key
:
if
isinstance
(
name
,
str
):
value
=
value
[
name
]
...
...
@@ -142,8 +163,8 @@ def format_parameters(parameters, formatted_search_space):
ret
[
key
]
=
value
return
ret
def
_format_search_space
(
parent_key
,
space
)
:
formatted
=
[]
def
_format_search_space
(
parent_key
:
ParameterKey
,
space
:
SearchSpace
)
->
list
[
ParameterSpec
]
:
formatted
:
list
[
ParameterSpec
]
=
[]
for
name
,
spec
in
space
.
items
():
if
name
==
'_name'
:
continue
...
...
@@ -155,7 +176,7 @@ def _format_search_space(parent_key, space):
formatted
+=
_format_search_space
(
key
,
sub_space
)
return
formatted
def
_format_parameter
(
key
,
type_
,
values
):
def
_format_parameter
(
key
:
ParameterKey
,
type_
:
str
,
values
:
list
[
Any
]
):
spec
=
SimpleNamespace
(
name
=
key
[
-
1
],
type
=
type_
,
...
...
@@ -197,7 +218,7 @@ def _format_parameter(key, type_, values):
return
ParameterSpec
(
**
spec
.
__dict__
)
def
_is_nested_choices
(
values
)
:
def
_is_nested_choices
(
values
:
list
[
Any
])
->
bool
:
assert
values
# choices should not be empty
for
value
in
values
:
if
not
isinstance
(
value
,
dict
):
...
...
@@ -206,9 +227,9 @@ def _is_nested_choices(values):
return
False
return
True
def
_assign
(
params
,
key
,
x
)
:
def
_assign
(
params
:
Parameters
,
key
:
ParameterKey
,
x
:
Any
)
->
None
:
if
len
(
key
)
==
1
:
params
[
key
[
0
]]
=
x
params
[
cast
(
str
,
key
[
0
]
)
]
=
x
elif
isinstance
(
key
[
0
],
int
):
_assign
(
params
,
key
[
1
:],
x
)
else
:
...
...
nni/common/hpo_utils/optimize_mode.py
View file @
5136a86d
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from
enum
import
Enum
class
OptimizeMode
(
Enum
):
...
...
nni/common/hpo_utils/validation.py
View file @
5136a86d
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from
__future__
import
annotations
import
logging
from
typing
import
Any
,
List
,
Optional
from
typing
import
Any
common_search_space_types
=
[
'choice'
,
...
...
@@ -19,7 +21,7 @@ common_search_space_types = [
def
validate_search_space
(
search_space
:
Any
,
support_types
:
Optional
[
L
ist
[
str
]
]
=
None
,
support_types
:
l
ist
[
str
]
|
None
=
None
,
raise_exception
:
bool
=
False
# for now, in case false positive
)
->
bool
:
...
...
nni/common/serializer.py
View file @
5136a86d
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import
abc
import
base64
import
collections.abc
...
...
nni/common/version.py
View file @
5136a86d
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import
logging
try
:
import
torch
...
...
nni/experiment/config/algorithm.py
View file @
5136a86d
...
...
@@ -47,6 +47,7 @@ class _AlgorithmConfig(ConfigBase):
else
:
# custom algorithm
assert
self
.
name
is
None
assert
self
.
class_name
assert
self
.
code_directory
is
not
None
if
not
Path
(
self
.
code_directory
).
is_dir
():
raise
ValueError
(
f
'CustomAlgorithmConfig: code_directory "
{
self
.
code_directory
}
" is not a directory'
)
...
...
Prev
1
2
3
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment