Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
nni
Commits
593a275c
Commit
593a275c
authored
Dec 14, 2020
by
Yuge Zhang
Browse files
Merge branch 'master' of
https://github.com/microsoft/nni
into dev-retiarii
parents
b3cdee85
683c458a
Changes
85
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
494 additions
and
523 deletions
+494
-523
examples/trials/cifar10_pytorch/adl.Dockerfile
examples/trials/cifar10_pytorch/adl.Dockerfile
+8
-0
examples/trials/cifar10_pytorch/config_adl.yml
examples/trials/cifar10_pytorch/config_adl.yml
+2
-1
examples/tuners/customized_tuner/meta_file.yml
examples/tuners/customized_tuner/meta_file.yml
+4
-0
examples/tuners/customized_tuner/setup.py
examples/tuners/customized_tuner/setup.py
+1
-3
nni/algorithms/hpo/gridsearch_tuner/gridsearch_tuner.py
nni/algorithms/hpo/gridsearch_tuner/gridsearch_tuner.py
+2
-2
nni/nas/benchmarks/nlp/__init__.py
nni/nas/benchmarks/nlp/__init__.py
+4
-0
nni/nas/benchmarks/nlp/db_gen.py
nni/nas/benchmarks/nlp/db_gen.py
+46
-0
nni/nas/benchmarks/nlp/model.py
nni/nas/benchmarks/nlp/model.py
+92
-0
nni/nas/benchmarks/nlp/query.py
nni/nas/benchmarks/nlp/query.py
+61
-0
nni/tools/nnictl/algo_management.py
nni/tools/nnictl/algo_management.py
+101
-0
nni/tools/nnictl/config_schema.py
nni/tools/nnictl/config_schema.py
+4
-3
nni/tools/nnictl/constants.py
nni/tools/nnictl/constants.py
+0
-21
nni/tools/nnictl/launcher.py
nni/tools/nnictl/launcher.py
+4
-5
nni/tools/nnictl/nnictl.py
nni/tools/nnictl/nnictl.py
+35
-18
nni/tools/nnictl/nnictl_utils.py
nni/tools/nnictl/nnictl_utils.py
+5
-4
nni/tools/nnictl/package_management.py
nni/tools/nnictl/package_management.py
+0
-184
nni/tools/package_utils/__init__.py
nni/tools/package_utils/__init__.py
+41
-120
nni/tools/package_utils/constants.py
nni/tools/package_utils/constants.py
+0
-91
nni/utils.py
nni/utils.py
+1
-0
pipelines/fast-test.yml
pipelines/fast-test.yml
+83
-71
No files found.
examples/trials/cifar10_pytorch/adl.Dockerfile
0 → 100644
View file @
593a275c
# Dockerfile for building AdaptDL-enabled CIFAR10 image
# Set docker build context to current folder
FROM
pytorch/pytorch:1.4-cuda10.1-cudnn7-runtime
RUN
pip
install
nni adaptdl tensorboard
COPY
./ /cifar10
examples/trials/cifar10_pytorch/config_adl.yml
View file @
593a275c
...
@@ -17,10 +17,11 @@ tuner:
...
@@ -17,10 +17,11 @@ tuner:
#choice: maximize, minimize
#choice: maximize, minimize
optimize_mode
:
maximize
optimize_mode
:
maximize
trial
:
trial
:
namespace
:
default
command
:
python3 /cifar10/main_adl.py
command
:
python3 /cifar10/main_adl.py
codeDir
:
/cifar10
codeDir
:
/cifar10
gpuNum
:
1
gpuNum
:
1
image
:
{
replace_with_the_image_that_has_adaptdl_instal
le
d
}
image
:
{
image_built_by_adl.Dockerfi
le
}
# optional
# optional
imagePullSecrets
:
imagePullSecrets
:
-
name
:
{
secret
}
-
name
:
{
secret
}
...
...
examples/tuners/customized_tuner/meta_file.yml
0 → 100644
View file @
593a275c
algoType
:
tuner
builtinName
:
demotuner
className
:
demo_tuner.DemoTuner
classArgsValidator
:
demo_tuner.MyClassArgsValidator
examples/tuners/customized_tuner/setup.py
View file @
593a275c
...
@@ -12,10 +12,8 @@ setuptools.setup(
...
@@ -12,10 +12,8 @@ setuptools.setup(
classifiers
=
[
classifiers
=
[
'Programming Language :: Python :: 3'
,
'Programming Language :: Python :: 3'
,
'License :: OSI Approved :: MIT License'
,
'License :: OSI Approved :: MIT License'
,
'Operating System :: '
,
'Operating System :: '
'NNI Package :: tuner :: demotuner :: demo_tuner.DemoTuner :: demo_tuner.MyClassArgsValidator'
],
],
author
=
'Microsoft NNI Team'
,
author
=
'Microsoft NNI Team'
,
author_email
=
'nni@microsoft.com'
,
author_email
=
'nni@microsoft.com'
,
description
=
'NNI control for Neural Network Intelligence project'
,
description
=
'NNI control for Neural Network Intelligence project'
,
...
...
nni/algorithms/hpo/gridsearch_tuner/gridsearch_tuner.py
View file @
593a275c
...
@@ -169,7 +169,7 @@ class GridSearchTuner(Tuner):
...
@@ -169,7 +169,7 @@ class GridSearchTuner(Tuner):
"""
"""
self
.
count
+=
1
self
.
count
+=
1
while
self
.
count
<=
len
(
self
.
expanded_search_space
)
-
1
:
while
self
.
count
<=
len
(
self
.
expanded_search_space
)
-
1
:
_params_tuple
=
convert_dict2tuple
(
self
.
expanded_search_space
[
self
.
count
])
_params_tuple
=
convert_dict2tuple
(
copy
.
deepcopy
(
self
.
expanded_search_space
[
self
.
count
])
)
if
_params_tuple
in
self
.
supplement_data
:
if
_params_tuple
in
self
.
supplement_data
:
self
.
count
+=
1
self
.
count
+=
1
else
:
else
:
...
@@ -203,6 +203,6 @@ class GridSearchTuner(Tuner):
...
@@ -203,6 +203,6 @@ class GridSearchTuner(Tuner):
if
not
_value
:
if
not
_value
:
logger
.
info
(
"Useless trial data, value is %s, skip this trial data."
,
_value
)
logger
.
info
(
"Useless trial data, value is %s, skip this trial data."
,
_value
)
continue
continue
_params_tuple
=
convert_dict2tuple
(
_params
)
_params_tuple
=
convert_dict2tuple
(
copy
.
deepcopy
(
_params
)
)
self
.
supplement_data
[
_params_tuple
]
=
True
self
.
supplement_data
[
_params_tuple
]
=
True
logger
.
info
(
"Successfully import data to grid search tuner."
)
logger
.
info
(
"Successfully import data to grid search tuner."
)
nni/nas/benchmarks/nlp/__init__.py
0 → 100644
View file @
593a275c
from
.model
import
NlpTrialStats
,
NlpIntermediateStats
,
NlpTrialConfig
from
.query
import
query_nlp_trial_stats
nni/nas/benchmarks/nlp/db_gen.py
0 → 100644
View file @
593a275c
import
json
import
os
import
argparse
import
tqdm
from
.model
import
db
,
NlpTrialConfig
,
NlpTrialStats
,
NlpIntermediateStats
def
main
():
parser
=
argparse
.
ArgumentParser
()
parser
.
add_argument
(
'input_dir'
,
help
=
'Path to extracted NLP data dir.'
)
args
=
parser
.
parse_args
()
with
db
,
tqdm
.
tqdm
(
total
=
len
(
os
.
listdir
(
args
.
input_dir
)),
desc
=
"creating tables"
)
as
pbar
:
db
.
create_tables
([
NlpTrialConfig
,
NlpTrialStats
,
NlpIntermediateStats
])
json_files
=
os
.
listdir
(
args
.
input_dir
)
for
json_file
in
json_files
:
pbar
.
update
(
1
)
if
json_file
.
endswith
(
'.json'
):
log_path
=
os
.
path
.
join
(
args
.
input_dir
,
json_file
)
cur
=
json
.
load
(
open
(
log_path
,
'r'
))
arch
=
json
.
loads
(
cur
[
'recepie'
])
unested_arch
=
{}
for
k
in
arch
.
keys
():
# print(k)
unested_arch
[
'{}_op'
.
format
(
k
)]
=
arch
[
k
][
'op'
]
for
i
in
range
(
len
(
arch
[
k
][
'input'
])):
unested_arch
[
'{}_input_{}'
.
format
(
k
,
i
)]
=
arch
[
k
][
'input'
][
i
]
config
=
NlpTrialConfig
.
create
(
arch
=
unested_arch
,
dataset
=
cur
[
'data'
][
5
:])
if
cur
[
'status'
]
==
'OK'
:
trial_stats
=
NlpTrialStats
.
create
(
config
=
config
,
train_loss
=
cur
[
'train_losses'
][
-
1
],
val_loss
=
cur
[
'val_losses'
][
-
1
],
test_loss
=
cur
[
'test_losses'
][
-
1
],
training_time
=
cur
[
'wall_times'
][
-
1
])
epochs
=
50
intermediate_stats
=
[]
for
epoch
in
range
(
epochs
):
epoch_res
=
{
'train_loss'
:
cur
[
'train_losses'
][
epoch
],
'val_loss'
:
cur
[
'val_losses'
][
epoch
],
'test_loss'
:
cur
[
'test_losses'
][
epoch
],
'training_time'
:
cur
[
'wall_times'
][
epoch
]
}
epoch_res
.
update
(
current_epoch
=
epoch
+
1
,
trial
=
trial_stats
)
intermediate_stats
.
append
(
epoch_res
)
NlpIntermediateStats
.
insert_many
(
intermediate_stats
).
execute
(
db
)
if
__name__
==
'__main__'
:
main
()
nni/nas/benchmarks/nlp/model.py
0 → 100644
View file @
593a275c
import
os
from
peewee
import
CharField
,
FloatField
,
ForeignKeyField
,
IntegerField
,
Model
from
playhouse.sqlite_ext
import
JSONField
,
SqliteExtDatabase
from
nni.nas.benchmarks.utils
import
json_dumps
from
nni.nas.benchmarks.constants
import
DATABASE_DIR
db
=
SqliteExtDatabase
(
os
.
path
.
join
(
DATABASE_DIR
,
'nlp.db'
),
autoconnect
=
True
)
class
NlpTrialConfig
(
Model
):
"""
Trial config for NLP. epoch_num is fixed at 50.
Attributes
----------
arch: dict
aka recepie in NAS-NLP-Benchmark repo (https://github.com/fmsnew/nas-bench-nlp-release).
an arch has multiple Node, Node_input_n and Node_op.
``Node`` can be ``node_n`` or ``h_new_n`` or ``f/i/o/j(_act)`` etc. (n is an int number and need not to be consecutive)
``Node_input_n`` can be ``Node`` or ``x`` etc.
``Node_op`` can be ``linear`` or ``activation_sigm`` or ``activation_tanh`` or ``elementwise_prod``
or ``elementwise_sum`` or ``activation_leaky_relu`` ...
e.g., {"h_new_0_input_0":"node_3","h_new_0_input_1":"x","h_new_0_op":"linear","node_2_input_0":"x",
"node_2_input_1":"h_prev_0","node_2_op":"linear","node_3_input_0":"node_2","node_3_op":"activation_leaky_relu"}
dataset: str
Dataset used. Could be ``ptb`` or ``wikitext-2``.
"""
arch
=
JSONField
(
json_dumps
=
json_dumps
,
index
=
True
)
dataset
=
CharField
(
max_length
=
15
,
index
=
True
,
choices
=
[
'ptb'
,
'wikitext-2'
])
class
Meta
:
database
=
db
class
NlpTrialStats
(
Model
):
"""
Computation statistics for NAS-NLP-Benchmark.
Each corresponds to one trial result after 50 epoch.
Attributes
----------
config : NlpTrialConfig
Corresponding config for trial.
train_loss : float or None
Final loss on training data. Could be NaN (None).
val_loss : float or None
Final loss on validation data. Could be NaN (None).
test_loss : float or None
Final loss on test data. Could be NaN (None).
training_time : float
Time elapsed in seconds. aka wall_time in in NAS-NLP-Benchmark repo.
"""
config
=
ForeignKeyField
(
NlpTrialConfig
,
backref
=
'trial_stats'
,
index
=
True
)
train_loss
=
FloatField
(
null
=
True
)
val_loss
=
FloatField
(
null
=
True
)
test_loss
=
FloatField
(
null
=
True
)
training_time
=
FloatField
(
null
=
True
)
class
Meta
:
database
=
db
class
NlpIntermediateStats
(
Model
):
"""
Computation statistics for NAS-NLP-Benchmark.
Each corresponds to one trial result for 1-50 epoch.
Attributes
----------
config : NlpTrialConfig
Corresponding config for trial.
train_loss : float or None
Final loss on training data. Could be NaN (None).
val_loss : float or None
Final loss on validation data. Could be NaN (None).
test_loss : float or None
Final loss on test data. Could be NaN (None).
training_time : float
Time elapsed in seconds. aka wall_time in in NAS-NLP-Benchmark repo.
"""
trial
=
ForeignKeyField
(
NlpTrialStats
,
backref
=
'intermediates'
,
index
=
True
)
current_epoch
=
IntegerField
(
index
=
True
)
train_loss
=
FloatField
(
null
=
True
)
val_loss
=
FloatField
(
null
=
True
)
test_loss
=
FloatField
(
null
=
True
)
training_time
=
FloatField
(
null
=
True
)
class
Meta
:
database
=
db
\ No newline at end of file
nni/nas/benchmarks/nlp/query.py
0 → 100644
View file @
593a275c
import
functools
from
peewee
import
fn
from
playhouse.shortcuts
import
model_to_dict
from
.model
import
NlpTrialStats
,
NlpTrialConfig
def
query_nlp_trial_stats
(
arch
,
dataset
,
reduction
=
None
,
include_intermediates
=
False
):
"""
Query trial stats of NLP benchmark given conditions, including config(arch + dataset) and training results after 50 epoch.
Parameters
----------
arch : dict or None
If a dict, it is in the format that is described in
:class:`nni.nas.benchmark.nlp.NlpTrialConfig`. Only trial stats matched will be returned.
If none, all architectures in the database will be matched.
dataset : str or None
If specified, can be one of the dataset available in :class:`nni.nas.benchmark.nlp.NlpTrialConfig`.
Otherwise a wildcard.
reduction : str or None
If 'none' or None, all trial stats will be returned directly.
If 'mean', fields in trial stats will be averaged given the same trial config.
Please note that some trial configs have multiple runs which make "reduction" meaningful, while some may not.
include_intermediates : boolean
If true, intermediate results will be returned.
Returns
-------
generator of dict
A generator of :class:`nni.nas.benchmark.nlp.NlpTrialStats` objects,
where each of them has been converted into a dict.
"""
fields
=
[]
if
reduction
==
'none'
:
reduction
=
None
if
reduction
==
'mean'
:
for
field_name
in
NlpTrialStats
.
_meta
.
sorted_field_names
:
if
field_name
not
in
[
'id'
,
'config'
]:
fields
.
append
(
fn
.
AVG
(
getattr
(
NlpTrialStats
,
field_name
)).
alias
(
field_name
))
elif
reduction
is
None
:
fields
.
append
(
NlpTrialStats
)
else
:
raise
ValueError
(
'Unsupported reduction:
\'
%s
\'
'
%
reduction
)
query
=
NlpTrialStats
.
select
(
*
fields
,
NlpTrialConfig
).
join
(
NlpTrialConfig
)
conditions
=
[]
if
arch
is
not
None
:
conditions
.
append
(
NlpTrialConfig
.
arch
==
arch
)
if
dataset
is
not
None
:
conditions
.
append
(
NlpTrialConfig
.
dataset
==
dataset
)
for
trial
in
query
.
where
(
functools
.
reduce
(
lambda
a
,
b
:
a
&
b
,
conditions
)):
if
include_intermediates
:
data
=
model_to_dict
(
trial
)
# exclude 'trial' from intermediates as it is already available in data
data
[
'intermediates'
]
=
[
{
k
:
v
for
k
,
v
in
model_to_dict
(
t
).
items
()
if
k
!=
'trial'
}
for
t
in
trial
.
intermediates
]
yield
data
else
:
yield
model_to_dict
(
trial
)
\ No newline at end of file
nni/tools/nnictl/algo_management.py
0 → 100644
View file @
593a275c
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import
os
import
importlib
import
json
from
nni.tools.package_utils
import
read_registerd_algo_meta
,
get_registered_algo_meta
,
\
write_registered_algo_meta
,
ALGO_TYPES
,
parse_full_class_name
from
.common_utils
import
print_error
,
print_green
,
get_yml_content
def
read_reg_meta_list
(
meta_path
):
content
=
get_yml_content
(
meta_path
)
if
content
.
get
(
'algorithms'
):
meta_list
=
content
.
get
(
'algorithms'
)
else
:
meta_list
=
[
content
]
for
meta
in
meta_list
:
assert
'algoType'
in
meta
assert
meta
[
'algoType'
]
in
[
'tuner'
,
'assessor'
,
'advisor'
]
assert
'builtinName'
in
meta
assert
'className'
in
meta
return
meta_list
def
verify_algo_import
(
meta
):
def
_do_verify_import
(
fullName
):
module_name
,
class_name
=
parse_full_class_name
(
fullName
)
class_module
=
importlib
.
import_module
(
module_name
)
getattr
(
class_module
,
class_name
)
_do_verify_import
(
meta
[
'className'
])
if
meta
.
get
(
'classArgsValidator'
):
_do_verify_import
(
meta
[
'classArgsValidator'
])
def
algo_reg
(
args
):
meta_list
=
read_reg_meta_list
(
args
.
meta_path
)
for
meta
in
meta_list
:
if
get_registered_algo_meta
(
meta
[
'builtinName'
])
is
not
None
:
print_error
(
'builtinName {} already registered'
.
format
(
meta
[
'builtinName'
]))
return
verify_algo_import
(
meta
)
save_algo_meta_data
(
meta
)
print_green
(
'{} registered sucessfully!'
.
format
(
meta
[
'builtinName'
]))
def
algo_unreg
(
args
):
name
=
args
.
name
[
0
]
meta
=
get_registered_algo_meta
(
name
)
if
meta
is
None
:
print_error
(
'builtin algorithms {} not found!'
.
format
(
name
))
return
if
meta
[
'source'
]
==
'nni'
:
print_error
(
'{} is provided by nni, can not be unregistered!'
.
format
(
name
))
return
if
remove_algo_meta_data
(
name
):
print_green
(
'{} unregistered sucessfully!'
.
format
(
name
))
else
:
print_error
(
'Failed to unregistered {}!'
.
format
(
name
))
def
algo_show
(
args
):
builtin_name
=
args
.
name
[
0
]
meta
=
get_registered_algo_meta
(
builtin_name
)
if
meta
:
print
(
json
.
dumps
(
meta
,
indent
=
4
))
else
:
print_error
(
'package {} not found'
.
format
(
builtin_name
))
def
algo_list
(
args
):
meta
=
read_registerd_algo_meta
()
print
(
'+-----------------+------------+-----------+--------=-------------+------------------------------------------+'
)
print
(
'| Name | Type | source | Class Name | Module Name |'
)
print
(
'+-----------------+------------+-----------+----------------------+------------------------------------------+'
)
MAX_MODULE_NAME
=
38
for
t
in
[
'tuners'
,
'assessors'
,
'advisors'
]:
for
p
in
meta
[
t
]:
module_name
=
'.'
.
join
(
p
[
'className'
].
split
(
'.'
)[:
-
1
])
if
len
(
module_name
)
>
MAX_MODULE_NAME
:
module_name
=
module_name
[:
MAX_MODULE_NAME
-
3
]
+
'...'
class_name
=
p
[
'className'
].
split
(
'.'
)[
-
1
]
print
(
'| {:15s} | {:10s} | {:9s} | {:20s} | {:40s} |'
.
format
(
p
[
'builtinName'
],
t
,
p
[
'source'
],
class_name
,
module_name
[:
38
]))
print
(
'+-----------------+------------+-----------+----------------------+------------------------------------------+'
)
def
save_algo_meta_data
(
meta_data
):
meta_data
[
'source'
]
=
'user'
config
=
read_registerd_algo_meta
()
config
[
meta_data
[
'algoType'
]
+
's'
].
append
(
meta_data
)
write_registered_algo_meta
(
config
)
def
remove_algo_meta_data
(
name
):
config
=
read_registerd_algo_meta
()
updated
=
False
for
t
in
ALGO_TYPES
:
for
meta
in
config
[
t
]:
if
meta
[
'builtinName'
]
==
name
:
config
[
t
].
remove
(
meta
)
updated
=
True
if
updated
:
write_registered_algo_meta
(
config
)
return
True
return
False
nni/tools/nnictl/config_schema.py
View file @
593a275c
...
@@ -6,7 +6,7 @@ import logging
...
@@ -6,7 +6,7 @@ import logging
import
os
import
os
import
netifaces
import
netifaces
from
schema
import
Schema
,
And
,
Optional
,
Regex
,
Or
,
SchemaError
from
schema
import
Schema
,
And
,
Optional
,
Regex
,
Or
,
SchemaError
from
nni.tools.package_utils
import
create_validator_instance
,
get_all_builtin_names
,
get_
builtin
_algo_meta
from
nni.tools.package_utils
import
create_validator_instance
,
get_all_builtin_names
,
get_
registered
_algo_meta
from
.constants
import
SCHEMA_TYPE_ERROR
,
SCHEMA_RANGE_ERROR
,
SCHEMA_PATH_ERROR
from
.constants
import
SCHEMA_TYPE_ERROR
,
SCHEMA_RANGE_ERROR
,
SCHEMA_PATH_ERROR
from
.common_utils
import
get_yml_content
,
print_warning
from
.common_utils
import
get_yml_content
,
print_warning
...
@@ -75,8 +75,8 @@ class AlgoSchema:
...
@@ -75,8 +75,8 @@ class AlgoSchema:
def
validate_class_args
(
self
,
class_args
,
algo_type
,
builtin_name
):
def
validate_class_args
(
self
,
class_args
,
algo_type
,
builtin_name
):
if
not
builtin_name
or
not
class_args
:
if
not
builtin_name
or
not
class_args
:
return
return
meta
=
get_
builtin
_algo_meta
(
algo_type
+
's'
,
builtin_name
)
meta
=
get_
registered
_algo_meta
(
builtin_name
,
algo_type
+
's'
)
if
meta
and
'accept
_c
lass
_a
rgs'
in
meta
and
meta
[
'accept
_c
lass
_a
rgs'
]
==
False
:
if
meta
and
'accept
C
lass
A
rgs'
in
meta
and
meta
[
'accept
C
lass
A
rgs'
]
==
False
:
raise
SchemaError
(
'classArgs is not allowed.'
)
raise
SchemaError
(
'classArgs is not allowed.'
)
logging
.
getLogger
(
'nni.protocol'
).
setLevel
(
logging
.
ERROR
)
# we know IPC is not there, don't complain
logging
.
getLogger
(
'nni.protocol'
).
setLevel
(
logging
.
ERROR
)
# we know IPC is not there, don't complain
...
@@ -268,6 +268,7 @@ adl_trial_schema = {
...
@@ -268,6 +268,7 @@ adl_trial_schema = {
'command'
:
setType
(
'command'
,
str
),
'command'
:
setType
(
'command'
,
str
),
'gpuNum'
:
setNumberRange
(
'gpuNum'
,
int
,
0
,
99999
),
'gpuNum'
:
setNumberRange
(
'gpuNum'
,
int
,
0
,
99999
),
'image'
:
setType
(
'image'
,
str
),
'image'
:
setType
(
'image'
,
str
),
Optional
(
'namespace'
):
setType
(
'namespace'
,
str
),
Optional
(
'imagePullSecrets'
):
[{
Optional
(
'imagePullSecrets'
):
[{
'name'
:
setType
(
'name'
,
str
)
'name'
:
setType
(
'name'
,
str
)
}],
}],
...
...
nni/tools/nnictl/constants.py
View file @
593a275c
...
@@ -61,27 +61,6 @@ TRIAL_MONITOR_CONTENT = '%-15s %-25s %-25s %-15s'
...
@@ -61,27 +61,6 @@ TRIAL_MONITOR_CONTENT = '%-15s %-25s %-25s %-15s'
TRIAL_MONITOR_TAIL
=
'-------------------------------------------------------------------------------------
\n\n\n
'
TRIAL_MONITOR_TAIL
=
'-------------------------------------------------------------------------------------
\n\n\n
'
INSTALLABLE_PACKAGE_META
=
{
'SMAC'
:
{
'type'
:
'tuner'
,
'class_name'
:
'nni.algorithms.hpo.smac_tuner.smac_tuner.SMACTuner'
,
'code_sub_dir'
:
'smac_tuner'
,
'class_args_validator'
:
'nni.algorithms.hpo.smac_tuner.smac_tuner.SMACClassArgsValidator'
},
'BOHB'
:
{
'type'
:
'advisor'
,
'class_name'
:
'nni.algorithms.hpo.bohb_advisor.bohb_advisor.BOHB'
,
'code_sub_dir'
:
'bohb_advisor'
,
'class_args_validator'
:
'nni.algorithms.hpo.bohb_advisor.bohb_advisor.BOHBClassArgsValidator'
},
'PPOTuner'
:
{
'type'
:
'tuner'
,
'class_name'
:
'nni.algorithms.hpo.ppo_tuner.ppo_tuner.PPOTuner'
,
'code_sub_dir'
:
'ppo_tuner'
,
'class_args_validator'
:
'nni.algorithms.hpo.ppo_tuner.ppo_tuner.PPOClassArgsValidator'
}
}
TUNERS_SUPPORTING_IMPORT_DATA
=
{
TUNERS_SUPPORTING_IMPORT_DATA
=
{
'TPE'
,
'TPE'
,
'Anneal'
,
'Anneal'
,
...
...
nni/tools/nnictl/launcher.py
View file @
593a275c
...
@@ -19,7 +19,7 @@ from .config_utils import Config, Experiments
...
@@ -19,7 +19,7 @@ from .config_utils import Config, Experiments
from
.common_utils
import
get_yml_content
,
get_json_content
,
print_error
,
print_normal
,
\
from
.common_utils
import
get_yml_content
,
get_json_content
,
print_error
,
print_normal
,
\
detect_port
,
get_user
detect_port
,
get_user
from
.constants
import
NNICTL_HOME_DIR
,
ERROR_INFO
,
REST_TIME_OUT
,
EXPERIMENT_SUCCESS_INFO
,
LOG_HEADER
,
INSTALLABLE_PACKAGE_META
from
.constants
import
NNICTL_HOME_DIR
,
ERROR_INFO
,
REST_TIME_OUT
,
EXPERIMENT_SUCCESS_INFO
,
LOG_HEADER
from
.command_utils
import
check_output_command
,
kill_command
from
.command_utils
import
check_output_command
,
kill_command
from
.nnictl_utils
import
update_experiment
from
.nnictl_utils
import
update_experiment
...
@@ -452,10 +452,9 @@ def launch_experiment(args, experiment_config, mode, experiment_id):
...
@@ -452,10 +452,9 @@ def launch_experiment(args, experiment_config, mode, experiment_id):
except
CalledProcessError
:
except
CalledProcessError
:
print_error
(
'some errors happen when import package %s.'
%
(
package_name
))
print_error
(
'some errors happen when import package %s.'
%
(
package_name
))
print_log_content
(
experiment_id
)
print_log_content
(
experiment_id
)
if
package_name
in
INSTALLABLE_PACKAGE_META
:
if
package_name
in
[
'SMAC'
,
'BOHB'
,
'PPOTuner'
]:
print_error
(
'If %s is not installed, it should be installed through '
\
print_error
(
f
'The dependencies for
{
package_name
}
can be installed through pip install nni[
{
package_name
}
]'
)
'
\'
nnictl package install --name %s
\'
'
%
(
package_name
,
package_name
))
raise
exit
(
1
)
log_dir
=
experiment_config
[
'logDir'
]
if
experiment_config
.
get
(
'logDir'
)
else
None
log_dir
=
experiment_config
[
'logDir'
]
if
experiment_config
.
get
(
'logDir'
)
else
None
log_level
=
experiment_config
[
'logLevel'
]
if
experiment_config
.
get
(
'logLevel'
)
else
None
log_level
=
experiment_config
[
'logLevel'
]
if
experiment_config
.
get
(
'logLevel'
)
else
None
#view experiment mode do not need debug function, when view an experiment, there will be no new logs created
#view experiment mode do not need debug function, when view an experiment, there will be no new logs created
...
...
nni/tools/nnictl/nnictl.py
View file @
593a275c
...
@@ -13,7 +13,7 @@ from .nnictl_utils import stop_experiment, trial_ls, trial_kill, list_experiment
...
@@ -13,7 +13,7 @@ from .nnictl_utils import stop_experiment, trial_ls, trial_kill, list_experiment
monitor_experiment
,
export_trials_data
,
trial_codegen
,
webui_url
,
\
monitor_experiment
,
export_trials_data
,
trial_codegen
,
webui_url
,
\
get_config
,
log_stdout
,
log_stderr
,
search_space_auto_gen
,
webui_nas
,
\
get_config
,
log_stdout
,
log_stderr
,
search_space_auto_gen
,
webui_nas
,
\
save_experiment
,
load_experiment
save_experiment
,
load_experiment
from
.
package
_management
import
package_install
,
package_uninstall
,
package_show
,
package
_list
from
.
algo
_management
import
algo_reg
,
algo_unreg
,
algo_show
,
algo
_list
from
.constants
import
DEFAULT_REST_PORT
from
.constants
import
DEFAULT_REST_PORT
from
.tensorboard_utils
import
start_tensorboard
,
stop_tensorboard
from
.tensorboard_utils
import
start_tensorboard
,
stop_tensorboard
init
(
autoreset
=
True
)
init
(
autoreset
=
True
)
...
@@ -212,26 +212,43 @@ def parse_args():
...
@@ -212,26 +212,43 @@ def parse_args():
parser_log_trial
.
add_argument
(
'--trial_id'
,
'-T'
,
dest
=
'trial_id'
,
help
=
'find trial log path by id'
)
parser_log_trial
.
add_argument
(
'--trial_id'
,
'-T'
,
dest
=
'trial_id'
,
help
=
'find trial log path by id'
)
parser_log_trial
.
set_defaults
(
func
=
log_trial
)
parser_log_trial
.
set_defaults
(
func
=
log_trial
)
#parse package command
#parse algo command
parser_package
=
subparsers
.
add_parser
(
'package'
,
help
=
'control nni tuner and assessor packages'
)
parser_algo
=
subparsers
.
add_parser
(
'algo'
,
help
=
'control nni builtin tuner, assessor and advisor algorithms'
)
# add subparsers for parser_package
# add subparsers for parser_algo
parser_package_subparsers
=
parser_package
.
add_subparsers
()
parser_algo_subparsers
=
parser_algo
.
add_subparsers
()
parser_package_install
=
parser_package_subparsers
.
add_parser
(
'install'
,
help
=
'install packages'
)
parser_algo_reg
=
parser_algo_subparsers
.
add_parser
(
parser_package_install
.
add_argument
(
'source'
,
nargs
=
'?'
,
help
=
'installation source, can be a directory or whl file'
)
'register'
,
parser_package_install
.
add_argument
(
'--name'
,
'-n'
,
dest
=
'name'
,
help
=
'package name to be installed'
,
required
=
False
)
aliases
=
(
'reg'
,),
parser_package_install
.
set_defaults
(
func
=
package_install
)
help
=
'''register algorithms as nni builtin algorithm, for example:
nnictl reg --meta_path <path_to_meta_file>
where <path_to_meta_file> is the path to a meta data in yml format,
reference the nni document and examples/tuners/customized_tuner example
for the format of the yml file.'''
)
parser_algo_reg
.
add_argument
(
'--meta_path'
,
'-m'
,
dest
=
'meta_path'
,
help
=
'path to the meta file'
,
required
=
True
)
parser_algo_reg
.
set_defaults
(
func
=
algo_reg
)
parser_
package_uninstall
=
parser_
package
_subparsers
.
add_parser
(
'un
install'
,
help
=
'uninstall packages
'
)
parser_
algo_unreg
=
parser_
algo
_subparsers
.
add_parser
(
'un
register'
,
aliases
=
(
'unreg'
,),
help
=
'unregister algorithm
'
)
parser_
package_uninstall
.
add_argument
(
'name'
,
nargs
=
1
,
help
=
'
package name to be uninstalled
'
)
parser_
algo_unreg
.
add_argument
(
'name'
,
nargs
=
1
,
help
=
'
builtin name of the algorithm
'
)
parser_
package_uninstall
.
set_defaults
(
func
=
package_uninstall
)
parser_
algo_unreg
.
set_defaults
(
func
=
algo_unreg
)
parser_
package
_show
=
parser_
package
_subparsers
.
add_parser
(
'show'
,
help
=
'show the information of
packages
'
)
parser_
algo
_show
=
parser_
algo
_subparsers
.
add_parser
(
'show'
,
help
=
'show the information of
algorithm
'
)
parser_
package
_show
.
add_argument
(
'name'
,
nargs
=
1
,
help
=
'builtin name of the
package
'
)
parser_
algo
_show
.
add_argument
(
'name'
,
nargs
=
1
,
help
=
'builtin name of the
algorithm
'
)
parser_
package
_show
.
set_defaults
(
func
=
package
_show
)
parser_
algo
_show
.
set_defaults
(
func
=
algo
_show
)
parser_package_list
=
parser_package_subparsers
.
add_parser
(
'list'
,
help
=
'list installed packages'
)
parser_algo_list
=
parser_algo_subparsers
.
add_parser
(
'list'
,
help
=
'list registered algorithms'
)
parser_package_list
.
add_argument
(
'--all'
,
action
=
'store_true'
,
help
=
'list all builtin packages'
)
parser_algo_list
.
set_defaults
(
func
=
algo_list
)
parser_package_list
.
set_defaults
(
func
=
package_list
)
# To show message that nnictl package command is replaced by nnictl algo, to be remove in the future release.
def
show_messsage_for_nnictl_package
(
args
):
print_error
(
'nnictl package command is replaced by nnictl algo, please run nnictl algo -h to show the usage'
)
parser_package_subparsers
=
subparsers
.
add_parser
(
'package'
,
help
=
'control nni tuner and assessor packages'
).
add_subparsers
()
parser_package_subparsers
.
add_parser
(
'install'
,
help
=
'install packages'
).
set_defaults
(
func
=
show_messsage_for_nnictl_package
)
parser_package_subparsers
.
add_parser
(
'uninstall'
,
help
=
'uninstall packages'
).
set_defaults
(
func
=
show_messsage_for_nnictl_package
)
parser_package_subparsers
.
add_parser
(
'show'
,
help
=
'show the information of packages'
).
set_defaults
(
func
=
show_messsage_for_nnictl_package
)
parser_package_subparsers
.
add_parser
(
'list'
,
help
=
'list installed packages'
).
set_defaults
(
func
=
show_messsage_for_nnictl_package
)
#parse tensorboard command
#parse tensorboard command
parser_tensorboard
=
subparsers
.
add_parser
(
'tensorboard'
,
help
=
'manage tensorboard'
)
parser_tensorboard
=
subparsers
.
add_parser
(
'tensorboard'
,
help
=
'manage tensorboard'
)
...
...
nni/tools/nnictl/nnictl_utils.py
View file @
593a275c
...
@@ -345,9 +345,9 @@ def log_internal(args, filetype):
...
@@ -345,9 +345,9 @@ def log_internal(args, filetype):
'''internal function to call get_log_content'''
'''internal function to call get_log_content'''
file_name
=
get_config_filename
(
args
)
file_name
=
get_config_filename
(
args
)
if
filetype
==
'stdout'
:
if
filetype
==
'stdout'
:
file_full_path
=
os
.
path
.
join
(
NNICTL_HOME_DIR
,
file_name
,
'stdout'
)
file_full_path
=
os
.
path
.
join
(
NNICTL_HOME_DIR
,
file_name
,
'
log'
,
'nnictl_
stdout
.log
'
)
else
:
else
:
file_full_path
=
os
.
path
.
join
(
NNICTL_HOME_DIR
,
file_name
,
'stderr'
)
file_full_path
=
os
.
path
.
join
(
NNICTL_HOME_DIR
,
file_name
,
'
log'
,
'nnictl_
stderr
.log
'
)
print
(
check_output_command
(
file_full_path
,
head
=
args
.
head
,
tail
=
args
.
tail
))
print
(
check_output_command
(
file_full_path
,
head
=
args
.
head
,
tail
=
args
.
tail
))
def
log_stdout
(
args
):
def
log_stdout
(
args
):
...
@@ -854,8 +854,9 @@ def save_experiment(args):
...
@@ -854,8 +854,9 @@ def save_experiment(args):
except
IOError
:
except
IOError
:
print_error
(
'Write file to %s failed!'
%
os
.
path
.
join
(
temp_nnictl_dir
,
'.experiment'
))
print_error
(
'Write file to %s failed!'
%
os
.
path
.
join
(
temp_nnictl_dir
,
'.experiment'
))
exit
(
1
)
exit
(
1
)
nnictl_config_dir
=
os
.
path
.
join
(
NNICTL_HOME_DIR
,
args
.
id
)
nnictl_log_dir
=
os
.
path
.
join
(
NNICTL_HOME_DIR
,
args
.
id
,
'log'
)
shutil
.
copytree
(
nnictl_config_dir
,
os
.
path
.
join
(
temp_nnictl_dir
,
args
.
id
))
shutil
.
copytree
(
nnictl_log_dir
,
os
.
path
.
join
(
temp_nnictl_dir
,
args
.
id
,
'log'
))
shutil
.
copy
(
os
.
path
.
join
(
NNICTL_HOME_DIR
,
args
.
id
,
'.config'
),
os
.
path
.
join
(
temp_nnictl_dir
,
args
.
id
,
'.config'
))
# Step3. Copy code dir
# Step3. Copy code dir
if
args
.
saveCodeDir
:
if
args
.
saveCodeDir
:
...
...
nni/tools/nnictl/package_management.py
deleted
100644 → 0
View file @
b3cdee85
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import
os
from
collections
import
defaultdict
import
json
import
pkginfo
import
nni
from
nni.tools.package_utils
import
read_installed_package_meta
,
get_installed_package_meta
,
\
write_package_meta
,
get_builtin_algo_meta
,
get_not_installable_builtin_names
,
ALGO_TYPES
from
.constants
import
INSTALLABLE_PACKAGE_META
from
.common_utils
import
print_error
,
print_green
from
.command_utils
import
install_requirements_command
,
call_pip_install
,
call_pip_uninstall
PACKAGE_TYPES
=
[
'tuner'
,
'assessor'
,
'advisor'
]
def
install_by_name
(
package_name
):
if
package_name
not
in
INSTALLABLE_PACKAGE_META
:
raise
RuntimeError
(
'{} is not found in installable packages!'
.
format
(
package_name
))
requirements_path
=
os
.
path
.
join
(
nni
.
__path__
[
0
],
'algorithms/hpo'
,
INSTALLABLE_PACKAGE_META
[
package_name
][
'code_sub_dir'
],
'requirements.txt'
)
assert
os
.
path
.
exists
(
requirements_path
)
return
install_requirements_command
(
requirements_path
)
def
package_install
(
args
):
'''install packages'''
installed
=
False
try
:
if
args
.
name
:
if
install_by_name
(
args
.
name
)
==
0
:
package_meta
=
{}
package_meta
[
'type'
]
=
INSTALLABLE_PACKAGE_META
[
args
.
name
][
'type'
]
package_meta
[
'name'
]
=
args
.
name
package_meta
[
'class_name'
]
=
INSTALLABLE_PACKAGE_META
[
args
.
name
][
'class_name'
]
package_meta
[
'class_args_validator'
]
=
INSTALLABLE_PACKAGE_META
[
args
.
name
][
'class_args_validator'
]
save_package_meta_data
(
package_meta
)
print_green
(
'{} installed!'
.
format
(
args
.
name
))
installed
=
True
else
:
package_meta
=
get_nni_meta
(
args
.
source
)
if
package_meta
:
if
call_pip_install
(
args
.
source
)
==
0
:
save_package_meta_data
(
package_meta
)
print_green
(
'{} installed!'
.
format
(
package_meta
[
'name'
]))
installed
=
True
except
Exception
as
e
:
print_error
(
e
)
if
not
installed
:
print_error
(
'installation failed!'
)
def
package_uninstall
(
args
):
'''uninstall packages'''
name
=
args
.
name
[
0
]
if
name
in
get_not_installable_builtin_names
():
print_error
(
'{} can not be uninstalled!'
.
format
(
name
))
exit
(
1
)
meta
=
get_installed_package_meta
(
None
,
name
)
if
meta
is
None
:
print_error
(
'package {} not found!'
.
format
(
name
))
return
if
'installed_package'
in
meta
:
call_pip_uninstall
(
meta
[
'installed_package'
])
if
remove_package_meta_data
(
name
):
print_green
(
'{} uninstalled sucessfully!'
.
format
(
name
))
else
:
print_error
(
'Failed to uninstall {}!'
.
format
(
name
))
def
package_show
(
args
):
'''show specified packages'''
builtin_name
=
args
.
name
[
0
]
meta
=
get_builtin_algo_meta
(
builtin_name
=
builtin_name
)
if
meta
:
print
(
json
.
dumps
(
meta
,
indent
=
4
))
else
:
print_error
(
'package {} not found'
.
format
(
builtin_name
))
def
print_package_list
(
meta
):
print
(
'+-----------------+------------+-----------+--------=-------------+------------------------------------------+'
)
print
(
'| Name | Type | Installed | Class Name | Module Name |'
)
print
(
'+-----------------+------------+-----------+----------------------+------------------------------------------+'
)
MAX_MODULE_NAME
=
38
for
t
in
[
'tuners'
,
'assessors'
,
'advisors'
]:
for
p
in
meta
[
t
]:
module_name
=
'.'
.
join
(
p
[
'class_name'
].
split
(
'.'
)[:
-
1
])
if
len
(
module_name
)
>
MAX_MODULE_NAME
:
module_name
=
module_name
[:
MAX_MODULE_NAME
-
3
]
+
'...'
class_name
=
p
[
'class_name'
].
split
(
'.'
)[
-
1
]
print
(
'| {:15s} | {:10s} | {:9s} | {:20s} | {:40s} |'
.
format
(
p
[
'name'
],
t
,
p
[
'installed'
],
class_name
,
module_name
[:
38
]))
print
(
'+-----------------+------------+-----------+----------------------+------------------------------------------+'
)
def
package_list
(
args
):
'''list all packages'''
if
args
.
all
:
meta
=
get_builtin_algo_meta
()
else
:
meta
=
read_installed_package_meta
()
installed_names
=
defaultdict
(
list
)
for
t
in
[
'tuners'
,
'assessors'
,
'advisors'
]:
for
p
in
meta
[
t
]:
p
[
'installed'
]
=
'Yes'
installed_names
[
t
].
append
(
p
[
'name'
])
for
k
,
v
in
INSTALLABLE_PACKAGE_META
.
items
():
t
=
v
[
'type'
]
+
's'
if
k
not
in
installed_names
[
t
]:
meta
[
t
].
append
({
'name'
:
k
,
'class_name'
:
v
[
'class_name'
],
'class_args_validator'
:
v
[
'class_args_validator'
],
'installed'
:
'No'
})
print_package_list
(
meta
)
def
save_package_meta_data
(
meta_data
):
assert
meta_data
[
'type'
]
in
PACKAGE_TYPES
assert
'name'
in
meta_data
assert
'class_name'
in
meta_data
config
=
read_installed_package_meta
()
if
meta_data
[
'name'
]
in
[
x
[
'name'
]
for
x
in
config
[
meta_data
[
'type'
]
+
's'
]]:
raise
ValueError
(
'name %s already installed'
%
meta_data
[
'name'
])
package_meta
=
{
k
:
meta_data
[
k
]
for
k
in
[
'name'
,
'class_name'
,
'class_args_validator'
]
if
k
in
meta_data
}
if
'package_name'
in
meta_data
:
package_meta
[
'installed_package'
]
=
meta_data
[
'package_name'
]
config
[
meta_data
[
'type'
]
+
's'
].
append
(
package_meta
)
write_package_meta
(
config
)
def
remove_package_meta_data
(
name
):
config
=
read_installed_package_meta
()
updated
=
False
for
t
in
ALGO_TYPES
:
for
meta
in
config
[
t
]:
if
meta
[
'name'
]
==
name
:
config
[
t
].
remove
(
meta
)
updated
=
True
if
updated
:
write_package_meta
(
config
)
return
True
return
False
def
get_nni_meta
(
source
):
if
not
os
.
path
.
exists
(
source
):
print_error
(
'{} does not exist'
.
format
(
source
))
return
None
if
os
.
path
.
isdir
(
source
):
if
not
os
.
path
.
exists
(
os
.
path
.
join
(
source
,
'setup.py'
)):
print_error
(
'setup.py not found'
)
return
None
pkg
=
pkginfo
.
Develop
(
source
)
else
:
if
not
source
.
endswith
(
'.whl'
):
print_error
(
'File name {} must ends with
\'
.whl
\'
'
.
format
(
source
))
return
False
pkg
=
pkginfo
.
Wheel
(
source
)
classifiers
=
pkg
.
classifiers
meta
=
parse_classifiers
(
classifiers
)
meta
[
'package_name'
]
=
pkg
.
name
return
meta
def
parse_classifiers
(
classifiers
):
parts
=
[]
for
c
in
classifiers
:
if
c
.
startswith
(
'NNI Package'
):
parts
=
[
x
.
strip
()
for
x
in
c
.
split
(
'::'
)]
break
if
len
(
parts
)
<
4
or
not
all
(
parts
):
raise
ValueError
(
'Can not find correct NNI meta data in package classifiers.'
)
meta
=
{
'type'
:
parts
[
1
],
'name'
:
parts
[
2
],
'class_name'
:
parts
[
3
]
}
if
len
(
parts
)
>=
5
:
meta
[
'class_args_validator'
]
=
parts
[
4
]
return
meta
nni/tools/package_utils/__init__.py
View file @
593a275c
...
@@ -6,18 +6,13 @@ import importlib
...
@@ -6,18 +6,13 @@ import importlib
import
os
import
os
from
pathlib
import
Path
from
pathlib
import
Path
import
sys
import
sys
import
ruamel.yaml
as
yaml
import
ruamel.yaml
as
yaml
import
nni
import
nni
from
.constants
import
BuiltinAlgorithms
ALGO_TYPES
=
[
'tuners'
,
'assessors'
,
'advisors'
]
ALGO_TYPES
=
[
'tuners'
,
'assessors'
,
'advisors'
]
def
get_all_builtin_names
(
algo_type
):
def
get_all_builtin_names
(
algo_type
):
"""Get all valid builtin names, including:
"""Get all builtin names of registered algorithms of specified type
1. BuiltinAlgorithms which is pre-installed.
2. User installed packages in <nni_installation_path>/config/installed_packages.yml
Parameters
Parameters
----------
----------
...
@@ -30,109 +25,33 @@ def get_all_builtin_names(algo_type):
...
@@ -30,109 +25,33 @@ def get_all_builtin_names(algo_type):
all builtin tuner names.
all builtin tuner names.
"""
"""
assert
algo_type
in
ALGO_TYPES
assert
algo_type
in
ALGO_TYPES
merged_dict
=
_get_merged_builtin_dict
()
builtin_names
=
[
x
[
'name'
]
for
x
in
merged_dict
[
algo_type
]]
return
builtin_names
def
get_not_installable_builtin_names
(
algo_type
=
None
):
return
[
x
[
'builtinName'
]
for
x
in
read_registerd_algo_meta
()[
algo_type
]]
"""Get builtin names in BuiltinAlgorithms which do not need to be installed
and can be used once NNI is installed.
Parameters
----------
algo_type: str | None
can be one of 'tuners', 'assessors', 'advisors' or None
Returns: list of string
-------
All builtin names of specified type, for example, if algo_type is 'tuners', returns
all builtin tuner names.
If algo_type is None, returns all builtin names of all types.
"""
if
algo_type
is
None
:
meta
=
BuiltinAlgorithms
else
:
assert
algo_type
in
ALGO_TYPES
meta
=
{
algo_type
:
BuiltinAlgorithms
[
algo_type
]
}
names
=
[]
for
t
in
ALGO_TYPES
:
if
t
in
meta
:
names
.
extend
([
x
[
'name'
]
for
x
in
meta
[
t
]])
return
names
def
get_builtin_algo_meta
(
algo_type
=
None
,
builtin_name
=
None
):
def
get_registered_algo_meta
(
builtin_name
,
algo_type
=
None
):
""" Get meta information of builtin algorithms from:
""" Get meta information of registered algorithms.
1. Pre-installed BuiltinAlgorithms
2. User installed packages in <nni_installation_path>/config/installed_packages.yml
Parameters
Parameters
----------
----------
algo_type: str | None
builtin_name: str
can be one of 'tuners', 'assessors', 'advisors' or None
builtin_name: str | None
builtin name.
builtin name.
Returns: dict | list of dict | None
-------
If builtin_name is specified, returns meta information of speicified builtin
alogorithms, for example:
{
'name': 'Random',
'class_name': 'nni.hyperopt_tuner.hyperopt_tuner.HyperoptTuner',
'class_args': {
'algorithm_name': 'random_search'
},
'accept_class_args': False,
'class_args_validator': 'nni.hyperopt_tuner.hyperopt_tuner.HyperoptClassArgsValidator'
}
If builtin_name is None, returns multiple meta information in a list.
"""
merged_dict
=
_get_merged_builtin_dict
()
if
algo_type
is
None
and
builtin_name
is
None
:
return
merged_dict
if
algo_type
:
assert
algo_type
in
ALGO_TYPES
metas
=
merged_dict
[
algo_type
]
else
:
metas
=
merged_dict
[
'tuners'
]
+
merged_dict
[
'assessors'
]
+
merged_dict
[
'advisors'
]
if
builtin_name
:
for
m
in
metas
:
if
m
[
'name'
]
==
builtin_name
:
return
m
else
:
return
metas
return
None
def
get_installed_package_meta
(
algo_type
,
builtin_name
):
""" Get meta information of user installed algorithms from:
<nni_installation_path>/config/installed_packages.yml
Parameters
----------
algo_type: str | None
algo_type: str | None
can be one of 'tuners', 'assessors', 'advisors' or None
can be one of 'tuners', 'assessors', 'advisors' or None
builtin_name: str
builtin name.
Returns: dict | None
Returns: dict | None
-------
-------
Returns meta information of speicified builtin alogorithms, for example:
Returns meta information of speicified builtin alogorithms, for example:
{
{
'class
_a
rgs
_v
alidator': 'nni.smac_tuner.smac_tuner.SMACClassArgsValidator',
'class
A
rgs
V
alidator': 'nni.smac_tuner.smac_tuner.SMACClassArgsValidator',
'class
_n
ame': 'nni.smac_tuner.smac_tuner.SMACTuner',
'class
N
ame': 'nni.smac_tuner.smac_tuner.SMACTuner',
'
n
ame': 'SMAC'
'
builtinN
ame': 'SMAC'
}
}
"""
"""
assert
builtin_name
is
not
None
assert
builtin_name
is
not
None
if
algo_type
:
if
algo_type
:
assert
algo_type
in
ALGO_TYPES
assert
algo_type
in
ALGO_TYPES
config
=
read_
installed_package
_meta
()
config
=
read_
registerd_algo
_meta
()
candidates
=
[]
candidates
=
[]
if
algo_type
:
if
algo_type
:
...
@@ -141,11 +60,11 @@ def get_installed_package_meta(algo_type, builtin_name):
...
@@ -141,11 +60,11 @@ def get_installed_package_meta(algo_type, builtin_name):
for
algo_type
in
ALGO_TYPES
:
for
algo_type
in
ALGO_TYPES
:
candidates
.
extend
(
config
[
algo_type
])
candidates
.
extend
(
config
[
algo_type
])
for
meta
in
candidates
:
for
meta
in
candidates
:
if
meta
[
'
n
ame'
]
==
builtin_name
:
if
meta
[
'
builtinN
ame'
]
==
builtin_name
:
return
meta
return
meta
return
None
return
None
def
_
parse_full_class_name
(
full_class_name
):
def
parse_full_class_name
(
full_class_name
):
if
not
full_class_name
:
if
not
full_class_name
:
return
None
,
None
return
None
,
None
parts
=
full_class_name
.
split
(
'.'
)
parts
=
full_class_name
.
split
(
'.'
)
...
@@ -168,10 +87,10 @@ def get_builtin_module_class_name(algo_type, builtin_name):
...
@@ -168,10 +87,10 @@ def get_builtin_module_class_name(algo_type, builtin_name):
"""
"""
assert
algo_type
in
ALGO_TYPES
assert
algo_type
in
ALGO_TYPES
assert
builtin_name
is
not
None
assert
builtin_name
is
not
None
meta
=
get_
builtin
_algo_meta
(
algo_type
,
builtin_name
)
meta
=
get_
registered
_algo_meta
(
builtin_nam
e
,
algo_typ
e
)
if
not
meta
:
if
not
meta
:
return
None
,
None
return
None
,
None
return
_
parse_full_class_name
(
meta
[
'class
_n
ame'
])
return
parse_full_class_name
(
meta
[
'class
N
ame'
])
def
create_validator_instance
(
algo_type
,
builtin_name
):
def
create_validator_instance
(
algo_type
,
builtin_name
):
"""Create instance of validator class
"""Create instance of validator class
...
@@ -190,10 +109,10 @@ def create_validator_instance(algo_type, builtin_name):
...
@@ -190,10 +109,10 @@ def create_validator_instance(algo_type, builtin_name):
"""
"""
assert
algo_type
in
ALGO_TYPES
assert
algo_type
in
ALGO_TYPES
assert
builtin_name
is
not
None
assert
builtin_name
is
not
None
meta
=
get_
builtin
_algo_meta
(
algo_type
,
builtin_name
)
meta
=
get_
registered
_algo_meta
(
builtin_nam
e
,
algo_typ
e
)
if
not
meta
or
'class
_a
rgs
_v
alidator'
not
in
meta
:
if
not
meta
or
'class
A
rgs
V
alidator'
not
in
meta
:
return
None
return
None
module_name
,
class_name
=
_
parse_full_class_name
(
meta
[
'class
_a
rgs
_v
alidator'
])
module_name
,
class_name
=
parse_full_class_name
(
meta
[
'class
A
rgs
V
alidator'
])
class_module
=
importlib
.
import_module
(
module_name
)
class_module
=
importlib
.
import_module
(
module_name
)
class_constructor
=
getattr
(
class_module
,
class_name
)
class_constructor
=
getattr
(
class_module
,
class_name
)
...
@@ -229,17 +148,17 @@ def create_builtin_class_instance(builtin_name, input_class_args, algo_type):
...
@@ -229,17 +148,17 @@ def create_builtin_class_instance(builtin_name, input_class_args, algo_type):
2. merge user specified class args together with builtin class args.
2. merge user specified class args together with builtin class args.
"""
"""
assert
algo_meta
assert
algo_meta
module_name
,
class_name
=
_
parse_full_class_name
(
algo_meta
[
'class
_n
ame'
])
module_name
,
class_name
=
parse_full_class_name
(
algo_meta
[
'class
N
ame'
])
class_args
=
{}
class_args
=
{}
if
'class
_a
rgs'
in
algo_meta
:
if
'class
A
rgs'
in
algo_meta
:
class_args
=
algo_meta
[
'class
_a
rgs'
]
class_args
=
algo_meta
[
'class
A
rgs'
]
if
input_class_args
is
not
None
:
if
input_class_args
is
not
None
:
class_args
.
update
(
input_class_args
)
class_args
.
update
(
input_class_args
)
return
module_name
,
class_name
,
class_args
return
module_name
,
class_name
,
class_args
algo_meta
=
get_
builtin
_algo_meta
(
algo_type
,
builtin_name
)
algo_meta
=
get_
registered
_algo_meta
(
builtin_nam
e
,
algo_typ
e
)
module_name
,
class_name
,
class_args
=
parse_algo_meta
(
algo_meta
,
input_class_args
)
module_name
,
class_name
,
class_args
=
parse_algo_meta
(
algo_meta
,
input_class_args
)
if
importlib
.
util
.
find_spec
(
module_name
)
is
None
:
if
importlib
.
util
.
find_spec
(
module_name
)
is
None
:
...
@@ -287,15 +206,26 @@ def create_customized_class_instance(class_params):
...
@@ -287,15 +206,26 @@ def create_customized_class_instance(class_params):
return
instance
return
instance
def
get_package_config_path
():
def
_using_conda_or_virtual_environment
():
# FIXME: this might not be the desired location
return
sys
.
prefix
!=
sys
.
base_prefix
or
os
.
path
.
isdir
(
os
.
path
.
join
(
sys
.
prefix
,
'conda-meta'
))
config_dir
=
Path
(
nni
.
__path__
[
0
]).
parent
/
'nni_config'
if
not
os
.
path
.
exists
(
config_dir
):
os
.
makedirs
(
config_dir
,
exist_ok
=
True
)
return
os
.
path
.
join
(
config_dir
,
'installed_packages.yml'
)
def
read_installed_package_meta
():
def
get_registered_algo_config_path
():
config_file
=
get_package_config_path
()
# Find the path for registered_algorithms.yml for this nni installation,
# the registered_algorithms.yml is copied into this location in setup.py,
# so we need to ensure that we use the same logic as setup.py to find the location.
if
_using_conda_or_virtual_environment
():
nni_config_dir
=
os
.
path
.
join
(
sys
.
prefix
,
'nni'
)
elif
sys
.
platform
==
'win32'
:
nni_config_dir
=
os
.
path
.
join
(
os
.
getenv
(
'APPDATA'
),
'nni'
)
else
:
nni_config_dir
=
os
.
path
.
expanduser
(
'~/.config/nni'
)
if
not
os
.
path
.
exists
(
nni_config_dir
):
os
.
makedirs
(
nni_config_dir
,
exist_ok
=
True
)
return
os
.
path
.
join
(
nni_config_dir
,
'registered_algorithms.yml'
)
def
read_registerd_algo_meta
():
config_file
=
get_registered_algo_config_path
()
if
os
.
path
.
exists
(
config_file
):
if
os
.
path
.
exists
(
config_file
):
with
open
(
config_file
,
'r'
)
as
f
:
with
open
(
config_file
,
'r'
)
as
f
:
config
=
yaml
.
load
(
f
,
Loader
=
yaml
.
Loader
)
config
=
yaml
.
load
(
f
,
Loader
=
yaml
.
Loader
)
...
@@ -306,16 +236,7 @@ def read_installed_package_meta():
...
@@ -306,16 +236,7 @@ def read_installed_package_meta():
config
[
t
]
=
[]
config
[
t
]
=
[]
return
config
return
config
def
write_
package
_meta
(
config
):
def
write_
registered_algo
_meta
(
config
):
config_file
=
get_
package
_config_path
()
config_file
=
get_
registered_algo
_config_path
()
with
open
(
config_file
,
'w'
)
as
f
:
with
open
(
config_file
,
'w'
)
as
f
:
f
.
write
(
yaml
.
dump
(
dict
(
config
),
default_flow_style
=
False
))
f
.
write
(
yaml
.
dump
(
dict
(
config
),
default_flow_style
=
False
))
def
_get_merged_builtin_dict
():
def
merge_meta_dict
(
d1
,
d2
):
res
=
defaultdict
(
list
)
for
t
in
ALGO_TYPES
:
res
[
t
]
=
d1
[
t
]
+
d2
[
t
]
return
res
return
merge_meta_dict
(
BuiltinAlgorithms
,
read_installed_package_meta
())
nni/tools/package_utils/constants.py
deleted
100644 → 0
View file @
b3cdee85
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
BuiltinAlgorithms
=
{
'tuners'
:
[
{
'name'
:
'TPE'
,
'class_name'
:
'nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptTuner'
,
'class_args'
:
{
'algorithm_name'
:
'tpe'
},
'class_args_validator'
:
'nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptClassArgsValidator'
},
{
'name'
:
'Random'
,
'class_name'
:
'nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptTuner'
,
'class_args'
:
{
'algorithm_name'
:
'random_search'
},
'accept_class_args'
:
False
,
'class_args_validator'
:
'nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptClassArgsValidator'
},
{
'name'
:
'Anneal'
,
'class_name'
:
'nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptTuner'
,
'class_args'
:
{
'algorithm_name'
:
'anneal'
},
'class_args_validator'
:
'nni.algorithms.hpo.hyperopt_tuner.hyperopt_tuner.HyperoptClassArgsValidator'
},
{
'name'
:
'Evolution'
,
'class_name'
:
'nni.algorithms.hpo.evolution_tuner.evolution_tuner.EvolutionTuner'
,
'class_args_validator'
:
'nni.algorithms.hpo.evolution_tuner.evolution_tuner.EvolutionClassArgsValidator'
},
{
'name'
:
'BatchTuner'
,
'class_name'
:
'nni.algorithms.hpo.batch_tuner.batch_tuner.BatchTuner'
,
'accept_class_args'
:
False
,
},
{
'name'
:
'GridSearch'
,
'class_name'
:
'nni.algorithms.hpo.gridsearch_tuner.gridsearch_tuner.GridSearchTuner'
,
'accept_class_args'
:
False
,
},
{
'name'
:
'NetworkMorphism'
,
'class_name'
:
'nni.algorithms.hpo.networkmorphism_tuner.networkmorphism_tuner.NetworkMorphismTuner'
,
'class_args_validator'
:
'nni.algorithms.hpo.networkmorphism_tuner.networkmorphism_tuner.NetworkMorphismClassArgsValidator'
},
{
'name'
:
'MetisTuner'
,
'class_name'
:
'nni.algorithms.hpo.metis_tuner.metis_tuner.MetisTuner'
,
'class_args_validator'
:
'nni.algorithms.hpo.metis_tuner.metis_tuner.MetisClassArgsValidator'
},
{
'name'
:
'GPTuner'
,
'class_name'
:
'nni.algorithms.hpo.gp_tuner.gp_tuner.GPTuner'
,
'class_args_validator'
:
'nni.algorithms.hpo.gp_tuner.gp_tuner.GPClassArgsValidator'
},
{
'name'
:
'PBTTuner'
,
'class_name'
:
'nni.algorithms.hpo.pbt_tuner.pbt_tuner.PBTTuner'
,
'class_args_validator'
:
'nni.algorithms.hpo.pbt_tuner.pbt_tuner.PBTClassArgsValidator'
},
{
'name'
:
'RegularizedEvolutionTuner'
,
'class_name'
:
'nni.algorithms.hpo.regularized_evolution_tuner.regularized_evolution_tuner.RegularizedEvolutionTuner'
,
'class_args_validator'
:
'nni.algorithms.hpo.regularized_evolution_tuner.regularized_evolution_tuner.EvolutionClassArgsValidator'
}
],
'assessors'
:
[
{
'name'
:
'Medianstop'
,
'class_name'
:
'nni.algorithms.hpo.medianstop_assessor.medianstop_assessor.MedianstopAssessor'
,
'class_args_validator'
:
'nni.algorithms.hpo.medianstop_assessor.medianstop_assessor.MedianstopClassArgsValidator'
},
{
'name'
:
'Curvefitting'
,
'class_name'
:
'nni.algorithms.hpo.curvefitting_assessor.curvefitting_assessor.CurvefittingAssessor'
,
'class_args_validator'
:
'nni.algorithms.hpo.curvefitting_assessor.curvefitting_assessor.CurvefittingClassArgsValidator'
},
],
'advisors'
:
[
{
'name'
:
'Hyperband'
,
'class_name'
:
'nni.algorithms.hpo.hyperband_advisor.hyperband_advisor.Hyperband'
,
'class_args_validator'
:
'nni.algorithms.hpo.hyperband_advisor.hyperband_advisor.HyperbandClassArgsValidator'
}
]
}
nni/utils.py
View file @
593a275c
...
@@ -109,6 +109,7 @@ def extract_scalar_history(trial_history, scalar_key='default'):
...
@@ -109,6 +109,7 @@ def extract_scalar_history(trial_history, scalar_key='default'):
def
convert_dict2tuple
(
value
):
def
convert_dict2tuple
(
value
):
"""
"""
convert dict type to tuple to solve unhashable problem.
convert dict type to tuple to solve unhashable problem.
NOTE: this function will change original data.
"""
"""
if
isinstance
(
value
,
dict
):
if
isinstance
(
value
,
dict
):
for
_keys
in
value
:
for
_keys
in
value
:
...
...
pipelines/fast-test.yml
View file @
593a275c
...
@@ -2,25 +2,30 @@
...
@@ -2,25 +2,30 @@
# so that a bug in any module will cause at least one platform to fail quickly.
# so that a bug in any module will cause at least one platform to fail quickly.
jobs
:
jobs
:
-
job
:
'
ubuntu_latest
'
-
job
:
ubuntu_latest
pool
:
pool
:
# FIXME: In ubuntu-20.04 Python interpreter crashed during SMAC UT
# FIXME: In ubuntu-20.04 Python interpreter crashed during SMAC UT
vmImage
:
'
ubuntu-18.04
'
vmImage
:
ubuntu-18.04
# This platform tests lint and doc first.
# This platform tests lint and doc first.
steps
:
steps
:
-
task
:
UsePythonVersion@0
inputs
:
versionSpec
:
3.6
displayName
:
Configure Python version
-
script
:
|
-
script
:
|
set -e
set -e
python3 -m pip install
-U
--upgrade pip setuptools
python3 -m pip install --upgrade pip setuptools
python3 -m pip install
-U
pytest coverage
python3 -m pip install pytest coverage
python3 -m pip install
-U
pylint flake8
python3 -m pip install pylint flake8
echo "##vso[task.setvariable variable=PATH]${HOME}/.local/bin:${PATH}"
echo "##vso[task.setvariable variable=PATH]${HOME}/.local/bin:${PATH}"
displayName
:
'
Install
Python
tools
'
displayName
:
Install Python tools
-
script
:
|
-
script
:
|
python3 setup.py develop
python3 setup.py develop
displayName
:
'
Install
NNI
'
displayName
:
Install NNI
-
script
:
|
-
script
:
|
set -e
set -e
...
@@ -28,21 +33,19 @@ jobs:
...
@@ -28,21 +33,19 @@ jobs:
yarn eslint
yarn eslint
cd ../webui
cd ../webui
yarn eslint
yarn eslint
displayName
:
'
ESLint
'
displayName
:
ESLint
-
script
:
|
-
script
:
|
set -e
set -e
sudo apt-get install -y pandoc
sudo apt-get install -y pandoc
python3 -m pip install -U --upgrade pygments
python3 -m pip install --upgrade pygments
python3 -m pip install -U torch==1.7.0+cpu torchvision==0.8.1+cpu -f https://download.pytorch.org/whl/torch_stable.html
python3 -m pip install --upgrade torch>=1.7.0+cpu torchvision>=0.8.1+cpu -f https://download.pytorch.org/whl/torch_stable.html
python3 -m pip install -U tensorflow==2.3.1
python3 -m pip install --upgrade tensorflow
python3 -m pip install -U keras==2.4.2
python3 -m pip install --upgrade gym onnx peewee thop
python3 -m pip install -U gym onnx peewee thop
python3 -m pip install sphinx==1.8.3 sphinx-argparse==0.2.5 sphinx-markdown-tables==0.0.9 sphinx-rtd-theme==0.4.2 sphinxcontrib-websupport==1.1.0 recommonmark==0.5.0 nbsphinx
python3 -m pip install -U sphinx==1.8.3 sphinx-argparse==0.2.5 sphinx-markdown-tables==0.0.9 sphinx-rtd-theme==0.4.2 sphinxcontrib-websupport==1.1.0 recommonmark==0.5.0 nbsphinx
sudo apt-get install swig -y
sudo apt-get install swig -y
nnictl package install --name=SMAC
python3 -m pip install -e .[SMAC,BOHB]
nnictl package install --name=BOHB
displayName
:
Install extra dependencies
displayName
:
'
Install
extra
dependencies'
-
script
:
|
-
script
:
|
set -e
set -e
...
@@ -50,17 +53,17 @@ jobs:
...
@@ -50,17 +53,17 @@ jobs:
python3 -m flake8 nni --count --select=E9,F63,F72,F82 --show-source --statistics
python3 -m flake8 nni --count --select=E9,F63,F72,F82 --show-source --statistics
EXCLUDES=examples/trials/mnist-nas/*/mnist*.py,examples/trials/nas_cifar10/src/cifar10/general_child.py
EXCLUDES=examples/trials/mnist-nas/*/mnist*.py,examples/trials/nas_cifar10/src/cifar10/general_child.py
python3 -m flake8 examples --count --exclude=$EXCLUDES --select=E9,F63,F72,F82 --show-source --statistics
python3 -m flake8 examples --count --exclude=$EXCLUDES --select=E9,F63,F72,F82 --show-source --statistics
displayName
:
'
pylint
and
flake8
'
displayName
:
pylint and flake8
-
script
:
|
-
script
:
|
cd docs/en_US
cd docs/en_US
sphinx-build -M html . _build -W --keep-going -T
sphinx-build -M html . _build -W --keep-going -T
displayName
:
'
Check
Sphinx
documentation
'
displayName
:
Check Sphinx documentation
-
script
:
|
-
script
:
|
cd test
cd test
python3 -m pytest ut
python3 -m pytest ut
displayName
:
'
Python
unit
test
'
displayName
:
Python unit test
-
script
:
|
-
script
:
|
set -e
set -e
...
@@ -68,52 +71,56 @@ jobs:
...
@@ -68,52 +71,56 @@ jobs:
yarn test
yarn test
cd ../nasui
cd ../nasui
CI=true yarn test
CI=true yarn test
displayName
:
'
TypeScript
unit
test
'
displayName
:
TypeScript unit test
-
script
:
|
-
script
:
|
cd test
cd test
python3 nni_test/nnitest/run_tests.py --config config/pr_tests.yml
python3 nni_test/nnitest/run_tests.py --config config/pr_tests.yml
displayName
:
'
Simple
integration
test
'
displayName
:
Simple integration test
-
job
:
'
ubuntu_legacy
'
-
job
:
ubuntu_legacy
pool
:
pool
:
vmImage
:
'
ubuntu-18.04
'
vmImage
:
ubuntu-18.04
# This platform runs integration test first.
# This platform runs integration test first.
steps
:
steps
:
-
task
:
UsePythonVersion@0
inputs
:
versionSpec
:
3.6
displayName
:
Configure Python version
-
script
:
|
-
script
:
|
set -e
set -e
python
3
-m pip install
-U
--upgrade pip setuptools
python -m pip install --upgrade pip setuptools
python
3
-m pip install
-U
pytest coverage
python -m pip install pytest coverage
echo "##vso[task.setvariable variable=PATH]${HOME}/.local/bin:${PATH}"
echo "##vso[task.setvariable variable=PATH]${HOME}/.local/bin:${PATH}"
displayName
:
'
Install
Python
tools
'
displayName
:
Install Python tools
-
script
:
|
-
script
:
|
python
3
setup.py develop
python setup.py develop
displayName
:
'
Install
NNI
'
displayName
:
Install NNI
-
script
:
|
-
script
:
|
set -e
set -e
python
3
-m pip install
-U
torch==1.5.0+cpu torchvision==0.6.0+cpu -f https://download.pytorch.org/whl/torch_stable.html
python -m pip install torch==1.5.0+cpu torchvision==0.6.0+cpu -f https://download.pytorch.org/whl/torch_stable.html
python
3
-m pip install
-U
tensorflow==1.15.
2
python -m pip install tensorflow==1.15.
4
python
3
-m pip install
-U
keras==2.1.6
python -m pip install keras==2.1.6
python
3
-m pip install
-U
gym onnx peewee
python -m pip install gym onnx peewee
sudo apt-get install swig -y
sudo apt-get install swig -y
nnictl package install --name=SMAC
python -m pip install -e .[SMAC,BOHB]
nnictl package install --name=BOHB
displayName
:
Install extra dependencies
displayName
:
'
Install
extra
dependencies'
-
script
:
|
-
script
:
|
cd test
cd test
python
3
nni_test/nnitest/run_tests.py --config config/pr_tests.yml
python nni_test/nnitest/run_tests.py --config config/pr_tests.yml
displayName
:
'
Simple
integration
test
'
displayName
:
Simple integration test
-
script
:
|
-
script
:
|
cd test
cd test
python
3
-m pytest ut
python -m pytest ut
displayName
:
'
Python
unit
test
'
displayName
:
Python unit test
-
script
:
|
-
script
:
|
set -e
set -e
...
@@ -121,12 +128,12 @@ jobs:
...
@@ -121,12 +128,12 @@ jobs:
yarn test
yarn test
cd ../nasui
cd ../nasui
CI=true yarn test
CI=true yarn test
displayName
:
'
TypeScript
unit
test
'
displayName
:
TypeScript unit test
-
job
:
'
macos
'
-
job
:
macos
pool
:
pool
:
vmImage
:
'
macOS-10.15
'
vmImage
:
macOS-10.15
# This platform runs TypeScript unit test first.
# This platform runs TypeScript unit test first.
...
@@ -134,86 +141,91 @@ jobs:
...
@@ -134,86 +141,91 @@ jobs:
-
task
:
UsePythonVersion@0
-
task
:
UsePythonVersion@0
inputs
:
inputs
:
versionSpec
:
3.8
versionSpec
:
3.8
displayName
:
Configure Python
displayName
:
Configure Python
version
-
script
:
|
-
script
:
|
set -e
set -e
echo "##vso[task.setvariable variable=PATH]${PATH}:${HOME}/.local/bin"
echo "##vso[task.setvariable variable=PATH]${PATH}:${HOME}/.local/bin"
python -m pip install
-U
--upgrade pip setuptools wheel
python -m pip install --upgrade pip setuptools wheel
python -m pip install
-U
pytest coverage
python -m pip install pytest coverage
displayName
:
'
Install
Python
tools
'
displayName
:
Install Python tools
-
script
:
|
-
script
:
|
python
3
setup.py develop
python setup.py develop
displayName
:
'
Install
NNI
'
displayName
:
Install NNI
-
script
:
|
-
script
:
|
set -e
set -e
export CI=true
export CI=true
(cd ts/nni_manager && yarn test)
(cd ts/nni_manager && yarn test)
(cd ts/nasui && yarn test)
(cd ts/nasui && yarn test)
displayName
:
'
TypeScript
unit
test
'
displayName
:
TypeScript unit test
-
script
:
|
-
script
:
|
set -e
set -e
# pytorch Mac binary does not support CUDA, default is cpu version
# pytorch Mac binary does not support CUDA, default is cpu version
python
3
-m pip install
-U
torchvision==0.6.0 torch==1.5.0
python -m pip install torchvision==0.6.0 torch==1.5.0
python
3
-m pip install
-U
tensorflow==2.3.1
python -m pip install tensorflow==2.3.1
brew install swig@3
brew install swig@3
rm -f /usr/local/bin/swig
rm -f /usr/local/bin/swig
ln -s /usr/local/opt/swig\@3/bin/swig /usr/local/bin/swig
ln -s /usr/local/opt/swig\@3/bin/swig /usr/local/bin/swig
nnictl package
install -
-name=
SMAC
python -m pip
install -
e .[
SMAC
]
displayName
:
'
Install
extra
dependencies
'
displayName
:
Install extra dependencies
-
script
:
|
-
script
:
|
cd test
cd test
python
3
-m pytest ut
python -m pytest ut
displayName
:
'
Python
unit
test
'
displayName
:
Python unit test
-
script
:
|
-
script
:
|
cd test
cd test
python
3
nni_test/nnitest/run_tests.py --config config/pr_tests.yml
python nni_test/nnitest/run_tests.py --config config/pr_tests.yml
displayName
:
'
Simple
integration
test
'
displayName
:
Simple integration test
# FIXME: Windows UT is still under debugging
# FIXME: Windows UT is still under debugging
-
job
:
'
windows
'
-
job
:
windows
pool
:
pool
:
vmImage
:
'
windows-2019
'
vmImage
:
windows-2019
# This platform runs Python unit test first.
# This platform runs Python unit test first.
steps
:
steps
:
-
task
:
UsePythonVersion@0
inputs
:
versionSpec
:
3.8
displayName
:
Configure Python version
-
script
:
|
-
script
:
|
python -m pip install
-U
--upgrade pip setuptools
python -m pip install --upgrade pip setuptools
python -m pip install
-U
pytest coverage
python -m pip install pytest coverage
displayName
:
'
Install
Python
tools
'
displayName
:
Install Python tools
-
script
:
|
-
script
:
|
python setup.py develop --no-user
python setup.py develop --no-user
displayName
:
'
Install
NNI
'
displayName
:
Install NNI
-
script
:
|
-
script
:
|
python -m pip install
-U
scikit-learn==0.23.2
python -m pip install scikit-learn==0.23.2
python -m pip install
-U
torch==1.5.0+cpu torchvision==0.6.0+cpu -f https://download.pytorch.org/whl/torch_stable.html
python -m pip install torch==1.5.0+cpu torchvision==0.6.0+cpu -f https://download.pytorch.org/whl/torch_stable.html
python -m pip install
-U
tensorflow==2.3.1
python -m pip install tensorflow==2.3.1
displayName
:
'
Install
extra
dependencies
'
displayName
:
Install extra dependencies
-
script
:
|
-
script
:
|
cd test
cd test
python -m pytest ut
python -m pytest ut
displayName
:
'
Python
unit
test
'
displayName
:
Python unit test
-
script
:
|
-
script
:
|
cd ts/nni_manager
cd ts/nni_manager
yarn test
yarn test
displayName
:
'
TypeScript
unit
test
'
displayName
:
TypeScript unit test
-
script
:
|
-
script
:
|
cd test
cd test
python nni_test/nnitest/run_tests.py --config config/pr_tests.yml
python nni_test/nnitest/run_tests.py --config config/pr_tests.yml
displayName
:
'
Simple
integration
test
'
displayName
:
Simple integration test
trigger
:
trigger
:
...
...
Prev
1
2
3
4
5
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment