Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
nni
Commits
4784cc6c
Unverified
Commit
4784cc6c
authored
Jan 14, 2021
by
liuzhe-lz
Committed by
GitHub
Jan 14, 2021
Browse files
Merge pull request #3302 from microsoft/v2.0-merge
Merge branch v2.0 into master (no squash)
parents
25db55ca
349ead41
Changes
291
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
100 additions
and
693 deletions
+100
-693
test/.gitignore
test/.gitignore
+1
-0
test/config/examples/cifar10-pytorch-adl.yml
test/config/examples/cifar10-pytorch-adl.yml
+23
-0
test/config/examples/cifar10_adl_search_space.json
test/config/examples/cifar10_adl_search_space.json
+5
-0
test/config/integration_tests.yml
test/config/integration_tests.yml
+4
-0
test/config/integration_tests_tf2.yml
test/config/integration_tests_tf2.yml
+4
-0
test/config/training_service.yml
test/config/training_service.yml
+19
-0
test/nni_test/nnitest/generate_ts_config.py
test/nni_test/nnitest/generate_ts_config.py
+23
-1
test/nni_test/nnitest/run_tests.py
test/nni_test/nnitest/run_tests.py
+21
-3
test/pipelines/pipelines-it-frameworkcontroller.yml
test/pipelines/pipelines-it-frameworkcontroller.yml
+0
-69
test/pipelines/pipelines-it-installation.yml
test/pipelines/pipelines-it-installation.yml
+0
-41
test/pipelines/pipelines-it-kubeflow.yml
test/pipelines/pipelines-it-kubeflow.yml
+0
-68
test/pipelines/pipelines-it-local-tf2.yml
test/pipelines/pipelines-it-local-tf2.yml
+0
-37
test/pipelines/pipelines-it-local-windows.yml
test/pipelines/pipelines-it-local-windows.yml
+0
-28
test/pipelines/pipelines-it-local.yml
test/pipelines/pipelines-it-local.yml
+0
-45
test/pipelines/pipelines-it-pai-windows.yml
test/pipelines/pipelines-it-pai-windows.yml
+0
-80
test/pipelines/pipelines-it-pai.yml
test/pipelines/pipelines-it-pai.yml
+0
-70
test/pipelines/pipelines-it-paiYarn.yml
test/pipelines/pipelines-it-paiYarn.yml
+0
-61
test/pipelines/pipelines-it-remote-linux-to-linux.yml
test/pipelines/pipelines-it-remote-linux-to-linux.yml
+0
-75
test/pipelines/pipelines-it-remote-linux-to-windows.yml
test/pipelines/pipelines-it-remote-linux-to-windows.yml
+0
-55
test/pipelines/pipelines-it-remote-windows-to-linux.yml
test/pipelines/pipelines-it-remote-windows-to-linux.yml
+0
-60
No files found.
test/.gitignore
View file @
4784cc6c
...
...
@@ -5,6 +5,7 @@ tuner_result.txt
assessor_result.txt
_generated_model.py
_generated_model_*.py
data
generated
test/config/examples/cifar10-pytorch-adl.yml
0 → 100644
View file @
4784cc6c
authorName
:
nni
experimentName
:
default_test
maxExecDuration
:
15m
maxTrialNum
:
1
trialConcurrency
:
1
searchSpacePath
:
./cifar10_adl_search_space.json
tuner
:
builtinTunerName
:
Random
assessor
:
builtinAssessorName
:
Medianstop
classArgs
:
optimize_mode
:
maximize
trial
:
codeDir
:
/examples/trials/cifar10_pytorch
command
:
python3 main_adl.py --epochs
1
gpuNum
:
1
useAnnotation
:
false
multiPhase
:
false
multiThread
:
false
trainingServicePlatform
:
adl
test/config/examples/cifar10_adl_search_space.json
0 → 100644
View file @
4784cc6c
{
"lr"
:{
"_type"
:
"choice"
,
"_value"
:[
0.1
,
0.01
,
0.001
]},
"bs"
:{
"_type"
:
"choice"
,
"_value"
:[
64
,
96
,
128
]},
"model"
:{
"_type"
:
"choice"
,
"_value"
:[
"ResNet18"
,
"SENet18"
,
"MobileNet"
]}
}
test/config/integration_tests.yml
View file @
4784cc6c
...
...
@@ -75,6 +75,10 @@ testCases:
command
:
python3 main.py --epochs 1 --batches
1
gpuNum
:
0
-
name
:
cifar10-pytorch-adl
configFile
:
test/config/examples/cifar10-pytorch-adl.yml
trainingService
:
adl
#- name: nested-ss
# configFile: test/config/examples/mnist-nested-search-space.yml
...
...
test/config/integration_tests_tf2.yml
View file @
4784cc6c
...
...
@@ -52,6 +52,10 @@ testCases:
command
:
python3 main.py --epochs 1 --batches
1
gpuNum
:
0
-
name
:
cifar10-pytorch-adl
configFile
:
test/config/examples/cifar10-pytorch-adl.yml
trainingService
:
adl
-
name
:
classic-nas-gen-ss
configFile
:
test/config/examples/classic-nas-tf2.yml
launchCommand
:
nnictl ss_gen --trial_command="python3 train.py --epochs 1" --trial_dir=../examples/nas/classic_nas-tf --file=config/examples/nni-nas-search-space-tf2.json
...
...
test/config/training_service.yml
View file @
4784cc6c
...
...
@@ -103,3 +103,22 @@ remote:
port
:
username
:
trainingServicePlatform
:
remote
adl
:
maxExecDuration
:
15m
nniManagerIp
:
# use a small trial number to make IT faster
maxTrialNum
:
2
trialConcurrency
:
2
trial
:
namespace
:
default
command
:
codeDir
:
gpuNum
:
1
cpuNum
:
1
image
:
memorySize
:
1Gi
checkpoint
:
storageClass
:
storageSize
:
trainingServicePlatform
:
adl
test/nni_test/nnitest/generate_ts_config.py
View file @
4784cc6c
...
...
@@ -88,13 +88,28 @@ def update_training_service_config(args):
config
[
args
.
ts
][
'machineList'
][
0
][
'passwd'
]
=
args
.
remote_pwd
if
args
.
remote_reuse
is
not
None
:
config
[
args
.
ts
][
'remoteConfig'
][
'reuse'
]
=
args
.
remote_reuse
.
lower
()
==
'true'
elif
args
.
ts
==
'adl'
:
if
args
.
nni_docker_image
is
not
None
:
config
[
args
.
ts
][
'trial'
][
'image'
]
=
args
.
nni_docker_image
if
args
.
checkpoint_storage_class
is
not
None
:
config
[
args
.
ts
][
'trial'
][
'checkpoint'
][
'storageClass'
]
=
args
.
checkpoint_storage_class
if
args
.
checkpoint_storage_size
is
not
None
:
config
[
args
.
ts
][
'trial'
][
'checkpoint'
][
'storageSize'
]
=
args
.
checkpoint_storage_size
if
args
.
adaptive
is
not
None
:
config
[
args
.
ts
][
'trial'
][
'adaptive'
]
=
args
.
adaptive
if
args
.
adl_nfs_server
is
not
None
and
args
.
adl_nfs_path
is
not
None
and
args
.
adl_nfs_container_mount_path
is
not
None
:
# default keys in nfs is empty, need to initialize
config
[
args
.
ts
][
'trial'
][
'nfs'
]
=
{}
config
[
args
.
ts
][
'trial'
][
'nfs'
][
'server'
]
=
args
.
adl_nfs_server
config
[
args
.
ts
][
'trial'
][
'nfs'
][
'path'
]
=
args
.
adl_nfs_path
config
[
args
.
ts
][
'trial'
][
'nfs'
][
'container_mount_path'
]
=
args
.
nadl_fs_container_mount_path
dump_yml_content
(
TRAINING_SERVICE_FILE
,
config
)
if
__name__
==
'__main__'
:
parser
=
argparse
.
ArgumentParser
()
parser
.
add_argument
(
"--ts"
,
type
=
str
,
choices
=
[
'pai'
,
'kubeflow'
,
'remote'
,
'local'
,
'frameworkcontroller'
],
default
=
'pai'
)
parser
.
add_argument
(
"--ts"
,
type
=
str
,
choices
=
[
'pai'
,
'kubeflow'
,
'remote'
,
'local'
,
'frameworkcontroller'
,
'adl'
],
default
=
'pai'
)
parser
.
add_argument
(
"--nni_docker_image"
,
type
=
str
)
parser
.
add_argument
(
"--nni_manager_ip"
,
type
=
str
)
# args for PAI
...
...
@@ -122,6 +137,13 @@ if __name__ == '__main__':
parser
.
add_argument
(
"--remote_host"
,
type
=
str
)
parser
.
add_argument
(
"--remote_port"
,
type
=
int
)
parser
.
add_argument
(
"--remote_reuse"
,
type
=
str
)
# args for adl
parser
.
add_argument
(
"--checkpoint_storage_class"
,
type
=
str
)
parser
.
add_argument
(
"--checkpoint_storage_size"
,
type
=
str
)
parser
.
add_argument
(
"--adaptive"
,
type
=
str
)
parser
.
add_argument
(
"--adl_nfs_server"
,
type
=
str
)
parser
.
add_argument
(
"--adl_nfs_path"
,
type
=
str
)
parser
.
add_argument
(
"--adl_nfs_container_mount_path"
,
type
=
str
)
args
=
parser
.
parse_args
()
update_training_service_config
(
args
)
test/nni_test/nnitest/run_tests.py
View file @
4784cc6c
...
...
@@ -23,7 +23,7 @@ from utils import (CLEAR, EXPERIMENT_URL, GREEN, RED, REST_ENDPOINT,
it_variables
=
{}
def
update_training_service_config
(
config
,
training_service
):
def
update_training_service_config
(
config
,
training_service
,
config_file_path
):
it_ts_config
=
get_yml_content
(
os
.
path
.
join
(
'config'
,
'training_service.yml'
))
# hack for kubeflow trial config
...
...
@@ -38,6 +38,20 @@ def update_training_service_config(config, training_service):
config
[
'trial'
].
pop
(
'command'
)
if
'gpuNum'
in
config
[
'trial'
]:
config
[
'trial'
].
pop
(
'gpuNum'
)
if
training_service
==
'adl'
:
# hack for adl trial config, codeDir in adl mode refers to path in container
containerCodeDir
=
config
[
'trial'
][
'codeDir'
]
# replace metric test folders to container folder
if
config
[
'trial'
][
'codeDir'
]
==
'.'
:
containerCodeDir
=
'/'
+
config_file_path
[:
config_file_path
.
rfind
(
'/'
)]
elif
config
[
'trial'
][
'codeDir'
]
==
'../naive_trial'
:
containerCodeDir
=
'/test/config/naive_trial'
elif
'../../../'
in
config
[
'trial'
][
'codeDir'
]:
# replace example folders to container folder
containerCodeDir
=
config
[
'trial'
][
'codeDir'
].
replace
(
'../../../'
,
'/'
)
it_ts_config
[
training_service
][
'trial'
][
'codeDir'
]
=
containerCodeDir
it_ts_config
[
training_service
][
'trial'
][
'command'
]
=
'cd {0} && {1}'
.
format
(
containerCodeDir
,
config
[
'trial'
][
'command'
])
deep_update
(
config
,
it_ts_config
[
'all'
])
deep_update
(
config
,
it_ts_config
[
training_service
])
...
...
@@ -58,7 +72,7 @@ def prepare_config_file(test_case_config, it_config, args):
# apply training service config
# user's gpuNum, logCollection config is overwritten by the config in training_service.yml
# the hack for kubeflow should be applied at last step
update_training_service_config
(
test_yml_config
,
args
.
ts
)
update_training_service_config
(
test_yml_config
,
args
.
ts
,
test_case_config
[
'configFile'
]
)
# generate temporary config yml file to launch experiment
new_config_file
=
config_path
+
'.tmp'
...
...
@@ -249,6 +263,10 @@ def run(args):
wait_for_port_available
(
8080
,
180
)
else
:
wait_for_port_available
(
8080
,
30
)
# adl mode need more time to cleanup PVC
if
args
.
ts
==
'adl'
and
name
==
'nnictl-resume-2'
:
time
.
sleep
(
30
)
print
(
'## {}Testing: {}{} ##'
.
format
(
GREEN
,
name
,
CLEAR
))
begin_time
=
time
.
time
()
...
...
@@ -263,7 +281,7 @@ if __name__ == '__main__':
parser
.
add_argument
(
"--cases"
,
type
=
str
,
default
=
None
)
parser
.
add_argument
(
"--exclude"
,
type
=
str
,
default
=
None
)
parser
.
add_argument
(
"--ts"
,
type
=
str
,
choices
=
[
'local'
,
'remote'
,
'pai'
,
'kubeflow'
,
'frameworkcontroller'
],
default
=
'local'
)
'kubeflow'
,
'frameworkcontroller'
,
'adl'
],
default
=
'local'
)
args
=
parser
.
parse_args
()
run
(
args
)
test/pipelines/pipelines-it-frameworkcontroller.yml
deleted
100644 → 0
View file @
25db55ca
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
jobs
:
-
job
:
'
integration_test_frameworkController'
timeoutInMinutes
:
120
steps
:
-
script
:
python3 -m pip install --upgrade pip setuptools --user
displayName
:
'
Install
python
tools'
-
script
:
|
set -e
cd deployment/pypi
if [ -d ./dist ]
then
rm -rf ./dist/*
fi
echo 'building prerelease package...'
make build
ls $(Build.SourcesDirectory)/deployment/pypi/dist/
condition
:
eq( variables['build_docker_img'], 'true' )
displayName
:
'
build
nni
bdsit_wheel'
-
script
:
|
source install.sh
displayName
:
'
Install
nni
toolkit
via
source
code'
-
script
:
|
set -e
sudo apt-get install swig -y
PATH=$HOME/.local/bin:$PATH nnictl package install --name=SMAC
PATH=$HOME/.local/bin:$PATH nnictl package install --name=BOHB
displayName
:
'
Install
dependencies
for
integration
tests
in
frameworkcontroller
mode'
-
script
:
|
set -e
cd examples/tuners/customized_tuner
python3 setup.py develop --user
PATH=$HOME/.local/bin:$PATH nnictl package install ./
displayName
:
'
Install
demotuner
for
customized_tuner
test'
-
script
:
|
set -e
if [ $(build_docker_img) = 'true' ]
then
cd deployment/pypi
docker login -u $(docker_hub_user) -p $(docker_hub_pwd)
echo 'updating docker file for installing nni from local...'
# update Dockerfile to install NNI in docker image from whl file built in last step
sed -ie 's/RUN python3 -m pip --no-cache-dir install nni/COPY .\/dist\/* .\/\nRUN python3 -m pip install nni-*.whl/' ../docker/Dockerfile
cat ../docker/Dockerfile
export IMG_TAG=`date -u +%y%m%d%H%M`
docker build -f ../docker/Dockerfile -t $(test_docker_img_name):$IMG_TAG .
docker push $(test_docker_img_name):$IMG_TAG
export TEST_IMG=$(test_docker_img_name):$IMG_TAG
cd ../../
else
export TEST_IMG=$(existing_docker_img)
fi
echo "TEST_IMG:$TEST_IMG"
cd test
python3 nni_test/nnitest/generate_ts_config.py --ts frameworkcontroller --keyvault_vaultname $(keyVault_vaultName) --keyvault_name $(keyVault_name) \
--azs_account $(azureStorage_accountName) --azs_share $(azureStorage_azureShare) --nni_docker_image $TEST_IMG --nni_manager_ip $(nni_manager_ip)
cat config/training_service.yml
PATH=$HOME/.local/bin:$PATH python3 nni_test/nnitest/run_tests.py --config config/integration_tests.yml --ts frameworkcontroller --exclude multi-phase
displayName
:
'
integration
test'
test/pipelines/pipelines-it-installation.yml
deleted
100644 → 0
View file @
25db55ca
jobs
:
-
job
:
'
pip_install_ubuntu_python36'
pool
:
vmImage
:
'
ubuntu-18.04'
strategy
:
matrix
:
Python36
:
PYTHON_VERSION
:
'
3.6'
steps
:
-
script
:
|
python3 -V
python3 -m pip install --upgrade pip setuptools --user
python3 -m pip install --upgrade nni --user
displayName
:
'
Install
nni'
-
job
:
'
pip_install_macOS_python3'
pool
:
vmImage
:
'
macOS-latest'
steps
:
-
script
:
|
python3 -V
python3 -m pip install --upgrade pip setuptools --user
python3 -m pip install --upgrade nni --user
displayName
:
'
Install
nni'
-
job
:
'
pip_install_windows_python36'
pool
:
vmImage
:
'
windows-latest'
strategy
:
matrix
:
Python36
:
PYTHON_VERSION
:
'
3.6'
steps
:
-
script
:
|
python -V
python -m pip install --upgrade pip setuptools --user
python -m pip install --upgrade nni --user
displayName
:
'
Install
nni'
test/pipelines/pipelines-it-kubeflow.yml
deleted
100644 → 0
View file @
25db55ca
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
jobs
:
-
job
:
'
integration_test_kubeflow'
timeoutInMinutes
:
120
steps
:
-
script
:
python3 -m pip install --upgrade pip setuptools --user
displayName
:
'
Install
python
tools'
-
script
:
|
set -e
cd deployment/pypi
if [ -d ./dist ]
then
rm -rf ./dist/*
fi
echo 'building prerelease package...'
make build
ls $(Build.SourcesDirectory)/deployment/pypi/dist/
condition
:
eq( variables['build_docker_img'], 'true' )
displayName
:
'
build
nni
bdsit_wheel'
-
script
:
|
source install.sh
displayName
:
'
Install
nni
toolkit
via
source
code'
-
script
:
|
sudo apt-get install swig -y
PATH=$HOME/.local/bin:$PATH nnictl package install --name=SMAC
PATH=$HOME/.local/bin:$PATH nnictl package install --name=BOHB
displayName
:
'
Install
dependencies
for
integration
tests
in
Kubeflow
mode'
-
script
:
|
set -e
cd examples/tuners/customized_tuner
python3 setup.py develop --user
PATH=$HOME/.local/bin:$PATH nnictl package install ./
displayName
:
'
Install
demotuner
for
customized_tuner
test'
-
script
:
|
set -e
if [ $(build_docker_img) = 'true' ]
then
cd deployment/pypi
docker login -u $(docker_hub_user) -p $(docker_hub_pwd)
echo 'updating docker file for installing nni from local...'
# update Dockerfile to install NNI in docker image from whl file built in last step
sed -ie 's/RUN python3 -m pip --no-cache-dir install nni/COPY .\/dist\/* .\/\nRUN python3 -m pip install nni-*.whl/' ../docker/Dockerfile
cat ../docker/Dockerfile
export IMG_TAG=`date -u +%y%m%d%H%M`
docker build -f ../docker/Dockerfile -t $(test_docker_img_name):$IMG_TAG .
docker push $(test_docker_img_name):$IMG_TAG
export TEST_IMG=$(test_docker_img_name):$IMG_TAG
cd ../../
else
export TEST_IMG=$(existing_docker_img)
fi
echo "TEST_IMG:$TEST_IMG"
cd test
python3 nni_test/nnitest/generate_ts_config.py --ts kubeflow --keyvault_vaultname $(keyVault_vaultName) --keyvault_name $(keyVault_name) \
--azs_account $(azureStorage_accountName) --azs_share $(azureStorage_azureShare) --nni_docker_image $TEST_IMG --nni_manager_ip $(nni_manager_ip)
cat config/training_service.yml
PATH=$HOME/.local/bin:$PATH python3 nni_test/nnitest/run_tests.py --config config/integration_tests.yml --ts kubeflow --exclude multi-phase
displayName
:
'
integration
test'
test/pipelines/pipelines-it-local-tf2.yml
deleted
100644 → 0
View file @
25db55ca
jobs
:
-
job
:
'
integration_test_local_ubuntu'
timeoutInMinutes
:
120
steps
:
-
script
:
python3 -m pip install --upgrade pip setuptools --user
displayName
:
'
Install
python
tools'
-
script
:
|
source install.sh
displayName
:
'
Install
nni
toolkit
via
source
code'
-
script
:
|
set -e
python3 -m pip install scikit-learn==0.23.2 --user
python3 -m pip install torch==1.3.1 torchvision==0.4.2 -f https://download.pytorch.org/whl/torch_stable.html --user
python3 -m pip install tensorflow-gpu==2.2.0 tensorflow-estimator==2.2.0 --force --user
python3 -m pip install keras==2.4.2 --user
sudo apt-get install swig -y
PATH=$HOME/.local/bin:$PATH nnictl package install --name=SMAC
PATH=$HOME/.local/bin:$PATH nnictl package install --name=BOHB
PATH=$HOME/.local/bin:$PATH nnictl package install --name=PPOTuner
displayName
:
'
Install
dependencies
for
integration
tests'
-
script
:
|
cd test
source scripts/unittest.sh
displayName
:
'
Unit
test'
-
script
:
|
cd test
PATH=$HOME/.local/bin:$PATH python3 nni_test/nnitest/run_tests.py --config config/integration_tests_tf2.yml --ts local
displayName
:
'
Integration
test'
-
script
:
|
cd test
PATH=$HOME/.local/bin:$PATH source scripts/nas.sh
displayName
:
'
NAS
test'
-
script
:
|
cd test
source scripts/model_compression.sh
displayName
:
'
Model
compression
test'
test/pipelines/pipelines-it-local-windows.yml
deleted
100644 → 0
View file @
25db55ca
jobs
:
-
job
:
'
integration_test_local_windows'
timeoutInMinutes
:
120
steps
:
-
script
:
|
powershell.exe -file install.ps1
displayName
:
'
Install
nni
toolkit
via
source
code'
-
script
:
|
python -m pip install scikit-learn==0.23.2 --user
python -m pip install keras==2.1.6 --user
python -m pip install torchvision===0.4.1 torch===1.3.1 -f https://download.pytorch.org/whl/torch_stable.html --user
python -m pip install tensorflow-gpu==1.15.2 tensorflow-estimator==1.15.1 --force --user
nnictl package install --name=PPOTuner
displayName
:
'
Install
dependencies
for
integration
tests'
-
script
:
|
cd examples/tuners/customized_tuner
python setup.py develop --user
nnictl package install ./
displayName
:
'
Install
demotuner
for
customized_tuner
test'
-
script
:
|
cd test
powershell.exe -file scripts/unittest.ps1
displayName
:
'
unit
test'
-
script
:
|
cd test
python nni_test/nnitest/run_tests.py --config config/integration_tests.yml --ts local
displayName
:
'
Integration
tests'
test/pipelines/pipelines-it-local.yml
deleted
100644 → 0
View file @
25db55ca
jobs
:
-
job
:
'
integration_test_local_ubuntu'
timeoutInMinutes
:
120
steps
:
-
script
:
python3 -m pip install --upgrade pip setuptools --user
displayName
:
'
Install
python
tools'
-
script
:
|
source install.sh
displayName
:
'
Install
nni
toolkit
via
source
code'
-
script
:
|
set -e
python3 -m pip install scikit-learn==0.23.2 --user
python3 -m pip install torchvision==0.4.2 --user
python3 -m pip install torch==1.3.1 --user
python3 -m pip install keras==2.1.6 --user
python3 -m pip install tensorflow-gpu==1.15.2 tensorflow-estimator==1.15.1 --force --user
python3 -m pip install thop --user
sudo apt-get install swig -y
PATH=$HOME/.local/bin:$PATH nnictl package install --name=SMAC
PATH=$HOME/.local/bin:$PATH nnictl package install --name=BOHB
PATH=$HOME/.local/bin:$PATH nnictl package install --name=PPOTuner
displayName
:
'
Install
dependencies
for
integration
tests'
-
script
:
|
set -e
cd examples/tuners/customized_tuner
python3 setup.py develop --user
PATH=$HOME/.local/bin:$PATH nnictl package install ./
displayName
:
'
Install
demotuner
for
customized_tuner
test'
-
script
:
|
cd test
source scripts/unittest.sh
displayName
:
'
Unit
test'
-
script
:
|
cd test
PATH=$HOME/.local/bin:$PATH python3 nni_test/nnitest/run_tests.py --config config/integration_tests.yml --ts local
displayName
:
'
Integration
test'
-
script
:
|
cd test
PATH=$HOME/.local/bin:$PATH source scripts/nas.sh
displayName
:
'
NAS
test'
-
script
:
|
cd test
source scripts/model_compression.sh
displayName
:
'
Model
compression
test'
test/pipelines/pipelines-it-pai-windows.yml
deleted
100644 → 0
View file @
25db55ca
jobs
:
-
job
:
'
build_docker_image'
timeoutInMinutes
:
120
pool
:
vmImage
:
'
Ubuntu
16.04'
steps
:
-
script
:
python3 -m pip install --upgrade pip setuptools --user
displayName
:
'
Install
python
tools'
-
script
:
|
cd deployment/pypi
if [ -d ./dist ]
then
rm -rf ./dist/*
fi
echo 'building prerelease package...'
make build
ls $(Build.SourcesDirectory)/deployment/pypi/dist/
condition
:
eq( variables['build_docker_img'], 'true' )
displayName
:
'
build
nni
bdsit_wheel'
-
script
:
|
set -e
if [ $(build_docker_img) = 'true' ]
then
cd deployment/pypi
docker login -u $(docker_hub_user) -p $(docker_hub_pwd)
echo 'updating docker file for installing nni from local...'
# update Dockerfile to install NNI in docker image from whl file built in last step
sed -ie 's/RUN python3 -m pip --no-cache-dir install nni/COPY .\/dist\/* .\/\nRUN python3 -m pip install nni-*.whl/' ../docker/Dockerfile
cat ../docker/Dockerfile
export IMG_TAG=`date -u +%y%m%d%H%M`
echo 'build and upload docker image'
docker build -f ../docker/Dockerfile -t $(test_docker_img_name):$IMG_TAG .
docker push $(test_docker_img_name):$IMG_TAG
export TEST_IMG=$(test_docker_img_name):$IMG_TAG
cd ../../
else
export TEST_IMG=$(existing_docker_img)
fi
echo "##vso[task.setvariable variable=TEST_IMG]$TEST_IMG"
displayName
:
'
build
docker
image'
-
script
:
echo $TEST_IMG
echo "##vso[task.setvariable variable=docker_image;isOutput=true]$TEST_IMG"
name
:
setvariableStep
displayName
:
'
set
image
variable'
-
job
:
'
integration_test_pai'
timeoutInMinutes
:
0
dependsOn
:
build_docker_image
variables
:
docker_image
:
$[ dependencies.build_docker_image.outputs['setvariableStep.docker_image'] ]
steps
:
-
script
:
|
set PATH=$(ENV_PATH)
python --version
powershell.exe -file install.ps1
displayName
:
'
Install
nni
toolkit
via
source
code'
-
script
:
|
set PATH=$(ENV_PATH)
python -m pip install scikit-learn==0.23.2 --user
displayName
:
'
Install
dependencies
for
integration
tests'
-
script
:
|
set PATH=$(ENV_PATH)
cd examples/tuners/customized_tuner
python setup.py develop --user
nnictl package install ./
displayName
:
'
Install
demotuner
for
customized_tuner
test'
-
script
:
|
cd test
set PATH=$(ENV_PATH)
python --version
mount -o anon $(pai_nfs_uri) $(local_nfs_uri)
python nni_test/nnitest/generate_ts_config.py --ts pai --pai_token $(pai_token) --pai_host $(pai_host) --pai_user $(pai_user) --nni_docker_image $(docker_image) --pai_storage_config_name $(pai_storage_config_name) --nni_manager_nfs_mount_path $(nni_manager_nfs_mount_path) --container_nfs_mount_path $(container_nfs_mount_path) --nni_manager_ip $(nni_manager_ip) --vc $(virtual_cluster)
python nni_test/nnitest/run_tests.py --config config/integration_tests.yml --ts pai
displayName
:
'
Examples
and
advanced
features
tests
on
pai'
test/pipelines/pipelines-it-pai.yml
deleted
100644 → 0
View file @
25db55ca
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
jobs
:
-
job
:
'
integration_test_pai'
timeoutInMinutes
:
120
steps
:
-
script
:
python3 -m pip install --upgrade pip setuptools --user
displayName
:
'
Install
python
tools'
-
script
:
|
set -e
cd deployment/pypi
if [ -d ./dist ]
then
rm -rf ./dist/*
fi
echo 'building prerelease package...'
make build
ls $(Build.SourcesDirectory)/deployment/pypi/dist/
condition
:
eq( variables['build_docker_img'], 'true' )
displayName
:
'
build
nni
bdsit_wheel'
-
script
:
|
source install.sh
displayName
:
'
Install
nni
toolkit
via
source
code'
-
script
:
|
set -e
sudo apt-get install swig -y
PATH=$HOME/.local/bin:$PATH nnictl package install --name=SMAC
PATH=$HOME/.local/bin:$PATH nnictl package install --name=BOHB
displayName
:
'
Install
dependencies
for
integration
tests
in
PAI
mode'
-
script
:
|
set -e
cd examples/tuners/customized_tuner
python3 setup.py develop --user
PATH=$HOME/.local/bin:$PATH nnictl package install ./
displayName
:
'
Install
demotuner
for
customized_tuner
test'
-
script
:
|
set -e
if [ $(build_docker_img) = 'true' ]
then
cd deployment/pypi
docker login -u $(docker_hub_user) -p $(docker_hub_pwd)
echo 'updating docker file for installing nni from local...'
# update Dockerfile to install NNI in docker image from whl file built in last step
sed -ie 's/RUN python3 -m pip --no-cache-dir install nni/COPY .\/dist\/* .\/\nRUN python3 -m pip install nni-*.whl/' ../docker/Dockerfile
cat ../docker/Dockerfile
export IMG_TAG=`date -u +%y%m%d%H%M`
echo 'build and upload docker image'
docker build -f ../docker/Dockerfile -t $(test_docker_img_name):$IMG_TAG .
docker push $(test_docker_img_name):$IMG_TAG
export TEST_IMG=$(test_docker_img_name):$IMG_TAG
cd ../../
else
export TEST_IMG=$(existing_docker_img)
fi
echo "TEST_IMG:$TEST_IMG"
cd test
python3 nni_test/nnitest/generate_ts_config.py --ts pai --pai_reuse $(pai_reuse) --pai_host $(pai_host) --pai_user $(pai_user) --nni_docker_image $TEST_IMG --pai_storage_config_name $(pai_storage_config_name)\
--pai_token $(pai_token) --nni_manager_nfs_mount_path $(nni_manager_nfs_mount_path) --container_nfs_mount_path $(container_nfs_mount_path) --nni_manager_ip $(nni_manager_ip) --vc $(virtual_cluster)
PATH=$HOME/.local/bin:$PATH python3 nni_test/nnitest/run_tests.py --config config/integration_tests.yml --ts pai
displayName
:
'
integration
test'
test/pipelines/pipelines-it-paiYarn.yml
deleted
100644 → 0
View file @
25db55ca
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
jobs
:
-
job
:
'
integration_test_paiYarn'
timeoutInMinutes
:
0
steps
:
-
script
:
python3 -m pip install --upgrade pip setuptools --user
displayName
:
'
Install
python
tools'
-
script
:
|
set -e
cd deployment/pypi
echo 'building prerelease package...'
make build
ls $(Build.SourcesDirectory)/deployment/pypi/dist/
condition
:
eq( variables['build_docker_img'], 'true' )
displayName
:
'
build
nni
bdsit_wheel'
-
script
:
|
source install.sh
displayName
:
'
Install
nni
toolkit
via
source
code'
-
script
:
|
set -e
sudo apt-get install swig -y
PATH=$HOME/.local/bin:$PATH nnictl package install --name=SMAC
PATH=$HOME/.local/bin:$PATH nnictl package install --name=BOHB
displayName
:
'
Install
dependencies
for
integration
tests
in
PAI
mode'
-
script
:
|
set -e
if [ $(build_docker_img) = 'true' ]
then
cd deployment/pypi
docker login -u $(docker_hub_user) -p $(docker_hub_pwd)
echo 'updating docker file for installing nni from local...'
# update Dockerfile to install NNI in docker image from whl file built in last step
sed -ie 's/RUN python3 -m pip --no-cache-dir install nni/COPY .\/dist\/* .\nRUN python3 -m pip install nni-*.whl/' ../docker/Dockerfile
cat ../docker/Dockerfile
export IMG_TAG=`date -u +%y%m%d%H%M`
echo 'build and upload docker image'
docker build -f ../docker/Dockerfile -t $(test_docker_img_name):$IMG_TAG .
docker push $(test_docker_img_name):$IMG_TAG
export TEST_IMG=$(test_docker_img_name):$IMG_TAG
cd ../../
else
export TEST_IMG=$(existing_docker_img)
fi
echo "TEST_IMG:$TEST_IMG"
cd test
python3 generate_ts_config.py --ts paiYarn --pai_host $(pai_host) --pai_user $(pai_user) --pai_pwd $(pai_pwd) --vc $(pai_virtual_cluster) \
--nni_docker_image $TEST_IMG --data_dir $(data_dir) --output_dir $(output_dir) --nni_manager_ip $(nni_manager_ip)
PATH=$HOME/.local/bin:$PATH python3 config_test.py --ts paiYarn
PATH=$HOME/.local/bin:$PATH python3 metrics_test.py
displayName
:
'
integration
test'
test/pipelines/pipelines-it-remote-linux-to-linux.yml
deleted
100644 → 0
View file @
25db55ca
jobs
:
-
job
:
'
integration_test_remote_linux_to_linux'
timeoutInMinutes
:
120
steps
:
-
script
:
python3 -m pip install --upgrade pip setuptools --user
displayName
:
'
Install
python
tools'
-
script
:
|
set -e
cd deployment/pypi
echo 'building prerelease package...'
make build
ls $(Build.SourcesDirectory)/deployment/pypi/dist/
displayName
:
'
build
nni
bdsit_wheel'
-
script
:
|
source install.sh
displayName
:
'
Install
nni
toolkit
via
source
code'
-
script
:
|
set -e
sudo apt-get install swig -y
PATH=$HOME/.local/bin:$PATH nnictl package install --name=SMAC
PATH=$HOME/.local/bin:$PATH nnictl package install --name=BOHB
displayName
:
'
Install
dependencies
for
integration
tests
in
remote
mode'
-
script
:
|
set -e
cd examples/tuners/customized_tuner
python3 setup.py develop --user
PATH=$HOME/.local/bin:$PATH nnictl package install ./
displayName
:
'
Install
demotuner
for
customized_tuner
test'
-
task
:
CopyFilesOverSSH@0
inputs
:
sshEndpoint
:
$(end_point)
sourceFolder
:
deployment/pypi/dist/
targetFolder
:
/tmp/nnitest/$(Build.BuildId)/dist
overwrite
:
true
displayName
:
'
Copy
dist
files
to
remote
machine'
timeoutInMinutes
:
10
-
task
:
CopyFilesOverSSH@0
inputs
:
sshEndpoint
:
$(end_point)
sourceFolder
:
test
targetFolder
:
/tmp/nnitest/$(Build.BuildId)/test
overwrite
:
true
displayName
:
'
Copy
test
files
to
remote
machine'
timeoutInMinutes
:
10
-
task
:
SSH@0
inputs
:
sshEndpoint
:
$(end_point)
runOptions
:
commands
commands
:
python3 /tmp/nnitest/$(Build.BuildId)/test/nni_test/nnitest/remote_docker.py --mode start --name $(Build.BuildId) --image nni/nni
displayName
:
'
Start
docker'
-
task
:
DownloadSecureFile@1
inputs
:
secureFile
:
$(remote_private_key)
-
script
:
|
set -e
cp $(Agent.TempDirectory)/$(remote_private_key) test/id_rsa
chmod 600 test/id_rsa
scp -P $(remote_port) -i test/id_rsa $(remote_user)@$(remote_host):/tmp/nnitest/$(Build.BuildId)/port test/port
cat test/port
displayName
:
'
Get
docker
port'
-
script
:
|
set -e
cd test
python3 nni_test/nnitest/generate_ts_config.py --ts remote --remote_reuse $(remote_reuse) --remote_user $(docker_user) --remote_host $(remote_host) \
--remote_port $(cat port) --remote_pwd $(docker_pwd) --nni_manager_ip $(nni_manager_ip)
cat config/training_service.yml
PATH=$HOME/.local/bin:$PATH python3 nni_test/nnitest/run_tests.py --config config/integration_tests.yml --ts remote
displayName
:
'
integration
test'
-
task
:
SSH@0
inputs
:
sshEndpoint
:
$(end_point)
runOptions
:
commands
commands
:
python3 /tmp/nnitest/$(Build.BuildId)/test/nni_test/nnitest/remote_docker.py --mode stop --name $(Build.BuildId)
displayName
:
'
Stop
docker'
test/pipelines/pipelines-it-remote-linux-to-windows.yml
deleted
100644 → 0
View file @
25db55ca
jobs
:
-
job
:
"
integration_test_remote_linux_to_windows"
timeoutInMinutes
:
120
steps
:
-
script
:
make clean
displayName
:
"
clean
nni
source
code"
-
task
:
CopyFilesOverSSH@0
inputs
:
sshEndpoint
:
$(end_point)
contents
:
|
**
!**/dist/**
!**/node_modules/**
targetFolder
:
/tmp/nnitest/$(Build.BuildId)
overwrite
:
true
displayName
:
"
Copy
all
files
to
remote
machine"
timeoutInMinutes
:
10
-
task
:
SSH@0
inputs
:
sshEndpoint
:
$(end_point)
runOptions
:
commands
commands
:
cd "\tmp\nnitest\$(Build.BuildId)" && powershell.exe -command "conda activate l2w | .\uninstall.ps1 | .\install.ps1"
failOnStdErr
:
false
displayName
:
"
install
on
remote
windows"
-
script
:
python3 -m pip install --upgrade pip setuptools --user
displayName
:
"
Install
python
tools"
-
script
:
make easy-install
displayName
:
"
Install
nni
via
source
code"
-
script
:
|
set -e
sudo apt-get install swig -y
PATH=$HOME/.local/bin:$PATH nnictl package install --name=SMAC
PATH=$HOME/.local/bin:$PATH nnictl package install --name=BOHB
displayName
:
"
Install
dependencies
for
integration
tests
in
remote
mode"
-
script
:
|
set -e
cd examples/tuners/customized_tuner
python3 setup.py develop --user
PATH=$HOME/.local/bin:$PATH nnictl package install ./
displayName
:
'
Install
demotuner
for
customized_tuner
test'
-
script
:
|
set -e
cd test
python3 nni_test/nnitest/generate_ts_config.py --ts remote --remote_user $(remote_user) --remote_host $(remote_host) \
--remote_port $(remote_port) --remote_pwd $(remote_pwd) --nni_manager_ip $(nni_manager_ip)
cat config/training_service.yml
PATH=$HOME/.local/bin:$PATH python3 nni_test/nnitest/run_tests.py --config config/integration_tests.yml --ts remote
displayName
:
"
integration
test"
-
task
:
SSH@0
inputs
:
sshEndpoint
:
$(end_point)
runOptions
:
commands
commands
:
rmdir /s /q "\\?\c:\tmp\nnitest\$(Build.BuildId)"
condition
:
always()
displayName
:
"
clean
up
on
remote
server"
test/pipelines/pipelines-it-remote-windows-to-linux.yml
deleted
100644 → 0
View file @
25db55ca
jobs
:
-
job
:
'
integration_test_remote_windows_to_linux'
timeoutInMinutes
:
120
steps
:
-
script
:
python -m pip install --upgrade pip setuptools
displayName
:
'
Install
python
tools'
-
task
:
CopyFilesOverSSH@0
inputs
:
sshEndpoint
:
$(end_point)
targetFolder
:
/tmp/nnitest/$(Build.BuildId)/nni-remote
overwrite
:
true
displayName
:
'
Copy
all
files
to
remote
machine'
timeoutInMinutes
:
10
-
script
:
|
powershell.exe -file install.ps1
displayName
:
'
Install
nni
toolkit
via
source
code'
-
script
:
|
python -m pip install scikit-learn==0.23.2 --user
displayName
:
'
Install
dependencies
for
integration
tests'
-
script
:
|
cd examples/tuners/customized_tuner
python setup.py develop --user
nnictl package install ./
displayName
:
'
Install
demotuner
for
customized_tuner
test'
-
task
:
SSH@0
inputs
:
sshEndpoint
:
$(end_point)
runOptions
:
inline
inline
:
cd /tmp/nnitest/$(Build.BuildId)/nni-remote/deployment/pypi;make build
failOnStdErr
:
false
continueOnError
:
true
displayName
:
'
build
nni
bdsit_wheel'
-
task
:
SSH@0
inputs
:
sshEndpoint
:
$(end_point)
runOptions
:
commands
commands
:
python3 /tmp/nnitest/$(Build.BuildId)/nni-remote/test/nni_test/nnitest/remote_docker.py --mode start --name $(Build.BuildId) --image nni/nni --os windows
displayName
:
'
Start
docker'
-
powershell
:
|
Write-Host "Downloading Putty..."
(New-Object Net.WebClient).DownloadFile("https://the.earth.li/~sgtatham/putty/latest/w64/pscp.exe", "$(Agent.TempDirectory)\pscp.exe")
Write-Host "Download Putty success!"
Write-Host "Connecting to host..."
$(Agent.TempDirectory)\pscp.exe -P $(remote_port) -hostkey $(hostkey) -pw $(pscp_pwd) $(remote_user)@$(remote_host):/tmp/nnitest/$(Build.BuildId)/port test\port
Write-Host "Get port success!"
Get-Content test\port
displayName
:
'
Get
docker
port'
-
powershell
:
|
cd test
python nni_test/nnitest/generate_ts_config.py --ts remote --remote_reuse $(remote_reuse) --remote_user $(docker_user) --remote_host $(remote_host) --remote_port $(Get-Content port) --remote_pwd $(docker_pwd) --nni_manager_ip $(nni_manager_ip)
Get-Content config/training_service.yml
python nni_test/nnitest/run_tests.py --config config/integration_tests.yml --ts remote --exclude cifar10
displayName
:
'
integration
test'
-
task
:
SSH@0
inputs
:
sshEndpoint
:
$(end_point)
runOptions
:
commands
commands
:
python3 /tmp/nnitest/$(Build.BuildId)/nni-remote/test/nni_test/nnitest/remote_docker.py --mode stop --name $(Build.BuildId) --os windows
displayName
:
'
Stop
docker'
Prev
1
…
9
10
11
12
13
14
15
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment