Commit c94996c7 authored by Lee's avatar Lee Committed by chicm-ms
Browse files

Add different tuner config files for config_test (#760)

Add integration test cases for tuners.
parent 5f8ffcd5
...@@ -18,6 +18,8 @@ jobs: ...@@ -18,6 +18,8 @@ jobs:
python3 -m pip install torchvision==0.2.1 --user python3 -m pip install torchvision==0.2.1 --user
python3 -m pip install keras==2.1.6 --user python3 -m pip install keras==2.1.6 --user
python3 -m pip install tensorflow-gpu==1.10.0 --user python3 -m pip install tensorflow-gpu==1.10.0 --user
sudo apt-get install swig -y
nnictl package install --name=SMAC
displayName: 'Install dependencies for integration tests' displayName: 'Install dependencies for integration tests'
- script: | - script: |
cd test cd test
......
...@@ -149,8 +149,8 @@ def main(params): ...@@ -149,8 +149,8 @@ def main(params):
''' '''
# Import data # Import data
mnist = input_data.read_data_sets(params['data_dir'], one_hot=True) mnist = input_data.read_data_sets(params['data_dir'], one_hot=True)
print('Mnist download data down.') print('Mnist download data done.')
logger.debug('Mnist download data down.') logger.debug('Mnist download data done.')
# Create the model # Create the model
# Build the graph for the deep net # Build the graph for the deep net
......
...@@ -150,8 +150,8 @@ def main(params): ...@@ -150,8 +150,8 @@ def main(params):
''' '''
# Import data # Import data
mnist = input_data.read_data_sets(params['data_dir'], one_hot=True) mnist = input_data.read_data_sets(params['data_dir'], one_hot=True)
print('Mnist download data down.') print('Mnist download data done.')
logger.debug('Mnist download data down.') logger.debug('Mnist download data done.')
# Create the model # Create the model
# Build the graph for the deep net # Build the graph for the deep net
......
...@@ -150,8 +150,8 @@ def main(params): ...@@ -150,8 +150,8 @@ def main(params):
''' '''
# Import data # Import data
mnist = input_data.read_data_sets(params['data_dir'], one_hot=True) mnist = input_data.read_data_sets(params['data_dir'], one_hot=True)
print('Mnist download data down.') print('Mnist download data done.')
logger.debug('Mnist download data down.') logger.debug('Mnist download data done.')
# Create the model # Create the model
# Build the graph for the deep net # Build the graph for the deep net
......
authorName: nni
experimentName: default_test
maxExecDuration: 5m
maxTrialNum: 2
trialConcurrency: 1
tuner:
builtinTunerName: Anneal
classArgs:
optimize_mode: maximize
assessor:
builtinAssessorName: Medianstop
classArgs:
optimize_mode: maximize
trial:
codeDir: ../../../examples/trials/mnist-annotation
command: python3 mnist.py --batch_num 100
gpuNum: 0
useAnnotation: true
multiPhase: false
multiThread: false
trainingServicePlatform: local
authorName: nni
experimentName: default_test
maxExecDuration: 5m
maxTrialNum: 2
trialConcurrency: 1
tuner:
builtinTunerName: Evolution
classArgs:
optimize_mode: maximize
assessor:
builtinAssessorName: Medianstop
classArgs:
optimize_mode: maximize
trial:
codeDir: ../../../examples/trials/mnist-annotation
command: python3 mnist.py --batch_num 100
gpuNum: 0
useAnnotation: true
multiPhase: false
multiThread: false
trainingServicePlatform: local
authorName: nni
experimentName: default_test
maxExecDuration: 5m
maxTrialNum: 2
trialConcurrency: 1
tuner:
builtinTunerName: Random
classArgs:
optimize_mode: maximize
assessor:
builtinAssessorName: Medianstop
classArgs:
optimize_mode: maximize
trial:
codeDir: ../../../examples/trials/mnist-annotation
command: python3 mnist.py --batch_num 100
gpuNum: 0
useAnnotation: true
multiPhase: false
multiThread: false
trainingServicePlatform: local
authorName: nni
experimentName: default_test
maxExecDuration: 5m
maxTrialNum: 2
trialConcurrency: 1
tuner:
builtinTunerName: SMAC
classArgs:
optimize_mode: maximize
assessor:
builtinAssessorName: Medianstop
classArgs:
optimize_mode: maximize
trial:
codeDir: ../../../examples/trials/mnist-annotation
command: python3 mnist.py --batch_num 100
gpuNum: 0
useAnnotation: true
multiPhase: false
multiThread: false
trainingServicePlatform: local
authorName: nni
experimentName: default_test
maxExecDuration: 5m
maxTrialNum: 2
trialConcurrency: 1
tuner:
builtinTunerName: TPE
classArgs:
optimize_mode: maximize
assessor:
builtinAssessorName: Medianstop
classArgs:
optimize_mode: maximize
trial:
codeDir: ../../../examples/trials/mnist-annotation
command: python3 mnist.py --batch_num 100
gpuNum: 0
useAnnotation: true
multiPhase: false
multiThread: false
trainingServicePlatform: local
authorName: nni
experimentName: default_test
maxExecDuration: 5m
maxTrialNum: 2
trialConcurrency: 1
searchSpacePath: search_space_batchtuner.json
tuner:
builtinTunerName: BatchTuner
assessor:
builtinAssessorName: Medianstop
classArgs:
optimize_mode: maximize
trial:
codeDir: ../../../examples/trials/mnist-batch-tune-keras
command: python3 mnist-keras.py --epochs 1
gpuNum: 0
useAnnotation: false
multiPhase: false
multiThread: false
trainingServicePlatform: local
authorName: nni
experimentName: default_test
maxExecDuration: 5m
maxTrialNum: 2
trialConcurrency: 1
searchSpacePath: search_space.json
tuner:
builtinTunerName: TPE
classArgs:
optimize_mode: maximize
assessor:
builtinAssessorName: Curvefitting
classArgs:
epoch_num: 20
optimize_mode: maximize
start_step: 6
threshold: 0.95
trial:
codeDir: ../../../examples/trials/mnist
command: python3 mnist.py --batch_num 100
gpuNum: 0
useAnnotation: false
multiPhase: false
multiThread: false
trainingServicePlatform: local
\ No newline at end of file
authorName: nni
experimentName: default_test
maxExecDuration: 5m
maxTrialNum: 2
trialConcurrency: 1
searchSpacePath: search_space.json
tuner:
builtinTunerName: GridSearch
assessor:
builtinAssessorName: Medianstop
classArgs:
optimize_mode: maximize
trial:
codeDir: ../../../examples/trials/mnist
command: python3 mnist.py --batch_num 100
gpuNum: 0
useAnnotation: false
multiPhase: false
multiThread: false
trainingServicePlatform: local
authorName: nni
experimentName: default_test
maxExecDuration: 5m
maxTrialNum: 2
trialConcurrency: 1
searchSpacePath: search_space_hyperband.json
advisor:
builtinAdvisorName: Hyperband
classArgs:
optimize_mode: maximize
R: 60
eta: 3
assessor:
builtinAssessorName: Medianstop
classArgs:
optimize_mode: maximize
trial:
codeDir: ../../../examples/trials/mnist-hyperband
command: python3 mnist.py
gpuNum: 0
useAnnotation: false
multiPhase: false
multiThread: false
trainingServicePlatform: local
authorName: nni
experimentName: default_test
maxExecDuration: 5m
maxTrialNum: 2
trialConcurrency: 1
searchSpacePath: search_space.json
tuner:
builtinTunerName: MetisTuner
classArgs:
optimize_mode: maximize
assessor:
builtinAssessorName: Medianstop
classArgs:
optimize_mode: maximize
trial:
codeDir: ../../../examples/trials/mnist
command: python3 mnist.py --batch_num 100
gpuNum: 0
useAnnotation: false
multiPhase: false
multiThread: false
trainingServicePlatform: local
{
"dropout_rate":{"_type":"quniform","_value":[0.5, 0.9, 2]},
"conv_size":{"_type":"choice","_value":[2,3,5,7]},
"hidden_size":{"_type":"choice","_value":[124, 512, 1024]},
"batch_size": {"_type":"choice", "_value": [1, 4, 8, 16, 32]},
"learning_rate":{"_type":"choice","_value":[0.0001, 0.001, 0.01, 0.1]}
}
{
"combine_params":
{
"_type" : "choice",
"_value" : [{"optimizer": "Adam", "learning_rate": 0.00001},
{"optimizer": "Adam", "learning_rate": 0.0001},
{"optimizer": "Adam", "learning_rate": 0.001},
{"optimizer": "SGD", "learning_rate": 0.01},
{"optimizer": "SGD", "learning_rate": 0.005},
{"optimizer": "SGD", "learning_rate": 0.0002}]
}
}
\ No newline at end of file
{
"dropout_rate":{"_type":"uniform","_value":[0.5,0.9]},
"conv_size":{"_type":"choice","_value":[2,3,5,7]},
"hidden_size":{"_type":"choice","_value":[124, 512, 1024]},
"batch_size": {"_type":"choice","_value":[8, 16, 32, 64]},
"learning_rate":{"_type":"choice","_value":[0.0001, 0.001, 0.01, 0.1]}
}
...@@ -129,8 +129,8 @@ def main(params): ...@@ -129,8 +129,8 @@ def main(params):
Main function, build mnist network, run and send result to NNI. Main function, build mnist network, run and send result to NNI.
""" """
mnist = input_data.read_data_sets(params['data_dir'], one_hot=True) mnist = input_data.read_data_sets(params['data_dir'], one_hot=True)
print('Mnist download data down.') print('Mnist download data done.')
logger.debug('Mnist download data down.') logger.debug('Mnist download data done.')
mnist_network = MnistNetwork(channel_1_num=params['channel_1_num'], mnist_network = MnistNetwork(channel_1_num=params['channel_1_num'],
channel_2_num=params['channel_2_num'], conv_size=params['conv_size' channel_2_num=params['channel_2_num'], conv_size=params['conv_size'
], hidden_size=params['hidden_size'], pool_size=params['pool_size'], ], hidden_size=params['hidden_size'], pool_size=params['pool_size'],
......
...@@ -175,8 +175,8 @@ def main(params): ...@@ -175,8 +175,8 @@ def main(params):
''' '''
# Import data # Import data
mnist = input_data.read_data_sets(params['data_dir'], one_hot=True) mnist = input_data.read_data_sets(params['data_dir'], one_hot=True)
print('Mnist download data down.') print('Mnist download data done.')
logger.debug('Mnist download data down.') logger.debug('Mnist download data done.')
# Create the model # Create the model
# Build the graph for the deep net # Build the graph for the deep net
......
...@@ -179,8 +179,8 @@ def main(params): ...@@ -179,8 +179,8 @@ def main(params):
''' '''
# Import data # Import data
mnist = input_data.read_data_sets(params['data_dir'], one_hot=True) mnist = input_data.read_data_sets(params['data_dir'], one_hot=True)
print('Mnist download data down.') print('Mnist download data done.')
logger.debug('Mnist download data down.') logger.debug('Mnist download data done.')
# Create the model # Create the model
# Build the graph for the deep net # Build the graph for the deep net
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment