"examples/git@developer.sourcefind.cn:OpenDAS/nni.git" did not exist on "56a1575bba3344945261fc6b28ba831ed4785d5c"
Unverified Commit 6c3148c7 authored by SparkSnail's avatar SparkSnail Committed by GitHub
Browse files

Merge pull request #239 from microsoft/master

merge master
parents 0fb78620 a2e524d3
...@@ -77,16 +77,15 @@ testCases: ...@@ -77,16 +77,15 @@ testCases:
kwargs: kwargs:
expected_result_file: expected_metrics.json expected_result_file: expected_metrics.json
# to be enabled - name: metrics-dict
#- name: metrics-dict configFile: test/config/metrics_test/config_dict_metrics.yml
# configFile: test/config/metrics_test/config_dict_metrics.yml config:
# config: maxTrialNum: 1
# maxTrialNum: 1 trialConcurrency: 1
# trialConcurrency: 1 validator:
# validator: class: MetricsValidator
# class: MetricsValidator kwargs:
# kwargs: expected_result_file: expected_metrics_dict.json
# expected_result_file: expected_metrics_dict.json
- name: nnicli - name: nnicli
configFile: test/config/examples/sklearn-regression.yml configFile: test/config/examples/sklearn-regression.yml
......
...@@ -31,16 +31,15 @@ testCases: ...@@ -31,16 +31,15 @@ testCases:
kwargs: kwargs:
expected_result_file: expected_metrics.json expected_result_file: expected_metrics.json
# to be enabled - name: metrics-dict
#- name: metrics-dict configFile: test/config/metrics_test/config_dict_metrics.yml
# configFile: test/config/metrics_test/config_dict_metrics.yml config:
# config: maxTrialNum: 1
# maxTrialNum: 1 trialConcurrency: 1
# trialConcurrency: 1 validator:
# validator: class: MetricsValidator
# class: MetricsValidator kwargs:
# kwargs: expected_result_file: expected_metrics_dict.json
# expected_result_file: expected_metrics_dict.json
- name: nnicli - name: nnicli
configFile: test/config/examples/sklearn-regression.yml configFile: test/config/examples/sklearn-regression.yml
......
...@@ -35,8 +35,8 @@ class MetricsValidator(ITValidator): ...@@ -35,8 +35,8 @@ class MetricsValidator(ITValidator):
assert len(trial_final_result) == 1, 'there should be 1 final result' assert len(trial_final_result) == 1, 'there should be 1 final result'
assert trial_final_result[0] == expected_metrics['final_result'] assert trial_final_result[0] == expected_metrics['final_result']
# encode dict/number into json string to compare them in set # encode dict/number into json string to compare them in set
assert set([json.dumps(x) for x in trial_intermediate_result]) \ assert set([json.dumps(x, sort_keys=True) for x in trial_intermediate_result]) \
== set([json.dumps(x) for x in expected_metrics['intermediate_result']]) == set([json.dumps(x, sort_keys=True) for x in expected_metrics['intermediate_result']])
def get_metric_results(self, metrics): def get_metric_results(self, metrics):
intermediate_result = {} intermediate_result = {}
......
...@@ -26,6 +26,10 @@ jobs: ...@@ -26,6 +26,10 @@ jobs:
cd test cd test
PATH=$HOME/.local/bin:$PATH python3 nni_test/nnitest/run_tests.py --config config/integration_tests.yml --ts local PATH=$HOME/.local/bin:$PATH python3 nni_test/nnitest/run_tests.py --config config/integration_tests.yml --ts local
displayName: 'Integration test' displayName: 'Integration test'
- script: |
cd test
PATH=$HOME/.local/bin:$PATH source scripts/nas.sh
displayName: 'NAS test'
- script: | - script: |
cd test cd test
source scripts/model_compression.sh source scripts/model_compression.sh
......
...@@ -6,22 +6,14 @@ echo "" ...@@ -6,22 +6,14 @@ echo ""
echo "===========================Testing: pruning and speedup===========================" echo "===========================Testing: pruning and speedup==========================="
cd ${CWD}/../examples/model_compress cd ${CWD}/../examples/model_compress
echo "testing slim pruning and speedup..." for name in fpgm slim l1filter apoz
python3 model_prune_torch.py --pruner_name slim --pretrain_epochs 1 --prune_epochs 1 do
python3 model_speedup.py --example_name slim --model_checkpoint ./checkpoints/pruned_vgg19_cifar10_slim.pth \ echo "testing $name pruning and speedup..."
--masks_file ./checkpoints/mask_vgg19_cifar10_slim.pth python3 model_prune_torch.py --pruner_name $name --pretrain_epochs 1 --prune_epochs 1
python3 model_speedup.py --example_name $name
echo "testing l1 pruning and speedup..." done
python3 model_prune_torch.py --pruner_name l1 --pretrain_epochs 1 --prune_epochs 1
python3 model_speedup.py --example_name l1filter --model_checkpoint ./checkpoints/pruned_vgg16_cifar10_l1.pth \ for name in level mean_activation
--masks_file ./checkpoints/mask_vgg16_cifar10_l1.pth
echo "testing apoz pruning and speedup..."
python3 model_prune_torch.py --pruner_name apoz --pretrain_epochs 1 --prune_epochs 1
python3 model_speedup.py --example_name apoz --model_checkpoint ./checkpoints/pruned_vgg16_cifar10_apoz.pth \
--masks_file ./checkpoints/mask_vgg16_cifar10_apoz.pth
for name in level fpgm mean_activation
do do
echo "testing $name pruning..." echo "testing $name pruning..."
python3 model_prune_torch.py --pruner_name $name --pretrain_epochs 1 --prune_epochs 1 python3 model_prune_torch.py --pruner_name $name --pretrain_epochs 1 --prune_epochs 1
......
#!/bin/bash
set -e
CWD=${PWD}
echo ""
echo "===========================Testing: NAS==========================="
EXAMPLE_DIR=${CWD}/../examples/nas
echo "testing classic nas..."
cd $EXAMPLE_DIR/classic_nas
SEARCH_SPACE_JSON=nni_auto_gen_search_space.json
if [ -f $SEARCH_SPACE_JSON ]; then
rm $SEARCH_SPACE_JSON
fi
nnictl ss_gen -t "python3 mnist.py"
if [ ! -f $SEARCH_SPACE_JSON ]; then
echo "Search space file not found!"
exit 1
fi
echo "testing darts..."
cd $EXAMPLE_DIR/darts
python3 search.py --epochs 1 --channels 2 --layers 4
python3 retrain.py --arc-checkpoint ./checkpoints/epoch_0.json --layers 4 --epochs 1
echo "testing enas..."
cd $EXAMPLE_DIR/enas
python3 search.py --search-for macro --epochs 1
python3 search.py --search-for micro --epochs 1
echo "testing naive..."
cd $EXAMPLE_DIR/naive
python3 train.py
echo "testing pdarts..."
cd $EXAMPLE_DIR/pdarts
python3 search.py --epochs 1 --channels 4 --nodes 2 --log-frequency 10 --add_layers 0 --add_layers 1 --dropped_ops 3 --dropped_ops 3
...@@ -76,6 +76,14 @@ def get_python_dir(sitepackages_path): ...@@ -76,6 +76,14 @@ def get_python_dir(sitepackages_path):
else: else:
return str(Path(sitepackages_path).parents[2]) return str(Path(sitepackages_path).parents[2])
def check_tensorboard_version():
try:
import tensorboard
return tensorboard.__version__
except:
print_error('import tensorboard error!')
exit(1)
def get_nni_installation_path(): def get_nni_installation_path():
''' Find nni lib from the following locations in order ''' Find nni lib from the following locations in order
Return nni root directory if it exists Return nni root directory if it exists
......
...@@ -153,6 +153,18 @@ tuner_schema_dict = { ...@@ -153,6 +153,18 @@ tuner_schema_dict = {
Optional('includeIntermediateResults'): setType('includeIntermediateResults', bool), Optional('includeIntermediateResults'): setType('includeIntermediateResults', bool),
Optional('gpuIndices'): Or(int, And(str, lambda x: len([int(i) for i in x.split(',')]) > 0), error='gpuIndex format error!'), Optional('gpuIndices'): Or(int, And(str, lambda x: len([int(i) for i in x.split(',')]) > 0), error='gpuIndex format error!'),
}, },
'PBTTuner': {
'builtinTunerName': 'PBTTuner',
'classArgs': {
'optimize_mode': setChoice('optimize_mode', 'maximize', 'minimize'),
Optional('all_checkpoint_dir'): setType('all_checkpoint_dir', str),
Optional('population_size'): setNumberRange('population_size', int, 0, 99999),
Optional('factors'): setType('factors', tuple),
Optional('fraction'): setType('fraction', float),
},
Optional('includeIntermediateResults'): setType('includeIntermediateResults', bool),
Optional('gpuIndices'): Or(int, And(str, lambda x: len([int(i) for i in x.split(',')]) > 0), error='gpuIndex format error!'),
},
'customized': { 'customized': {
'codeDir': setPathCheck('codeDir'), 'codeDir': setPathCheck('codeDir'),
'classFileName': setType('classFileName', str), 'classFileName': setType('classFileName', str),
......
...@@ -10,7 +10,7 @@ from .rest_utils import rest_get, check_rest_server_quick, check_response ...@@ -10,7 +10,7 @@ from .rest_utils import rest_get, check_rest_server_quick, check_response
from .config_utils import Config, Experiments from .config_utils import Config, Experiments
from .url_utils import trial_jobs_url, get_local_urls from .url_utils import trial_jobs_url, get_local_urls
from .constants import COLOR_GREEN_FORMAT, REST_TIME_OUT from .constants import COLOR_GREEN_FORMAT, REST_TIME_OUT
from .common_utils import print_normal, print_error, detect_process, detect_port from .common_utils import print_normal, print_error, detect_process, detect_port, check_tensorboard_version
from .nnictl_utils import check_experiment_id, check_experiment_id from .nnictl_utils import check_experiment_id, check_experiment_id
from .ssh_utils import create_ssh_sftp_client, copy_remote_directory_to_local from .ssh_utils import create_ssh_sftp_client, copy_remote_directory_to_local
...@@ -77,7 +77,8 @@ def start_tensorboard_process(args, nni_config, path_list, temp_nni_path): ...@@ -77,7 +77,8 @@ def start_tensorboard_process(args, nni_config, path_list, temp_nni_path):
exit(1) exit(1)
with open(os.path.join(temp_nni_path, 'tensorboard_stdout'), 'a+') as stdout_file, \ with open(os.path.join(temp_nni_path, 'tensorboard_stdout'), 'a+') as stdout_file, \
open(os.path.join(temp_nni_path, 'tensorboard_stderr'), 'a+') as stderr_file: open(os.path.join(temp_nni_path, 'tensorboard_stderr'), 'a+') as stderr_file:
cmds = ['tensorboard', '--logdir', format_tensorboard_log_path(path_list), '--port', str(args.port)] log_dir_cmd = '--logdir_spec' if check_tensorboard_version() >= '2.0' else '--logdir'
cmds = ['tensorboard', log_dir_cmd, format_tensorboard_log_path(path_list), '--port', str(args.port)]
tensorboard_process = Popen(cmds, stdout=stdout_file, stderr=stderr_file) tensorboard_process = Popen(cmds, stdout=stdout_file, stderr=stderr_file)
url_list = get_local_urls(args.port) url_list = get_local_urls(args.port)
print_normal(COLOR_GREEN_FORMAT % 'Start tensorboard success!\n' + 'Tensorboard urls: ' + ' '.join(url_list)) print_normal(COLOR_GREEN_FORMAT % 'Start tensorboard success!\n' + 'Tensorboard urls: ' + ' '.join(url_list))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment