Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
nni
Commits
71688b86
Commit
71688b86
authored
Sep 14, 2018
by
xuehui
Browse files
fix bug
parent
bfb4074d
Changes
6
Show whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
180 additions
and
16 deletions
+180
-16
examples/trials/mnist-batch-tune-keras/config.yml
examples/trials/mnist-batch-tune-keras/config.yml
+20
-0
examples/trials/mnist-batch-tune-keras/mnist-keras.py
examples/trials/mnist-batch-tune-keras/mnist-keras.py
+133
-0
examples/trials/mnist-batch-tune-keras/search_space.json
examples/trials/mnist-batch-tune-keras/search_space.json
+12
-0
src/sdk/pynni/nni/__main__.py
src/sdk/pynni/nni/__main__.py
+1
-0
src/sdk/pynni/nni/batch_tuner/batch_tuner.py
src/sdk/pynni/nni/batch_tuner/batch_tuner.py
+13
-15
tools/nnicmd/launcher_utils.py
tools/nnicmd/launcher_utils.py
+1
-1
No files found.
examples/trials/mnist-batch-tune-keras/config.yml
0 → 100644
View file @
71688b86
authorName
:
default
experimentName
:
example_mnist-keras
trialConcurrency
:
1
maxExecDuration
:
1h
maxTrialNum
:
10
#choice: local, remote
trainingServicePlatform
:
local
searchSpacePath
:
~/nni/examples/trials/mnist-batch-tune-keras/search_space.json
#choice: true, false
useAnnotation
:
false
tuner
:
#choice: TPE, Random, Anneal, Evolution, BatchTuner
builtinTunerName
:
BatchTuner
classArgs
:
#choice: maximize, minimize
optimize_mode
:
maximize
trial
:
command
:
python3 mnist-keras.py
codeDir
:
~/nni/examples/trials/mnist-batch-tune-keras
gpuNum
:
0
\ No newline at end of file
examples/trials/mnist-batch-tune-keras/mnist-keras.py
0 → 100644
View file @
71688b86
# Copyright (c) Microsoft Corporation
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import
argparse
import
logging
import
os
import
keras
import
numpy
as
np
from
keras
import
backend
as
K
from
keras.callbacks
import
TensorBoard
from
keras.datasets
import
mnist
from
keras.layers
import
Conv2D
,
Dense
,
Flatten
,
MaxPooling2D
from
keras.models
import
Sequential
import
nni
LOG
=
logging
.
getLogger
(
'mnist_keras'
)
K
.
set_image_data_format
(
'channels_last'
)
TENSORBOARD_DIR
=
os
.
environ
[
'NNI_OUTPUT_DIR'
]
H
,
W
=
28
,
28
NUM_CLASSES
=
10
def
create_mnist_model
(
hyper_params
,
input_shape
=
(
H
,
W
,
1
),
num_classes
=
NUM_CLASSES
):
'''
Create simple convolutional model
'''
layers
=
[
Conv2D
(
32
,
kernel_size
=
(
3
,
3
),
activation
=
'relu'
,
input_shape
=
input_shape
),
Conv2D
(
64
,
(
3
,
3
),
activation
=
'relu'
),
MaxPooling2D
(
pool_size
=
(
2
,
2
)),
Flatten
(),
Dense
(
100
,
activation
=
'relu'
),
Dense
(
num_classes
,
activation
=
'softmax'
)
]
model
=
Sequential
(
layers
)
if
hyper_params
[
'optimizer'
]
==
'Adam'
:
optimizer
=
keras
.
optimizers
.
Adam
(
lr
=
hyper_params
[
'learning_rate'
])
else
:
optimizer
=
keras
.
optimizers
.
SGD
(
lr
=
hyper_params
[
'learning_rate'
],
momentum
=
0.9
)
model
.
compile
(
loss
=
keras
.
losses
.
categorical_crossentropy
,
optimizer
=
optimizer
,
metrics
=
[
'accuracy'
])
return
model
def
load_mnist_data
(
args
):
'''
Load MNIST dataset
'''
(
x_train
,
y_train
),
(
x_test
,
y_test
)
=
mnist
.
load_data
()
x_train
=
(
np
.
expand_dims
(
x_train
,
-
1
).
astype
(
np
.
float
)
/
255.
)[:
args
.
num_train
]
x_test
=
(
np
.
expand_dims
(
x_test
,
-
1
).
astype
(
np
.
float
)
/
255.
)[:
args
.
num_test
]
y_train
=
keras
.
utils
.
to_categorical
(
y_train
,
NUM_CLASSES
)[:
args
.
num_train
]
y_test
=
keras
.
utils
.
to_categorical
(
y_test
,
NUM_CLASSES
)[:
args
.
num_test
]
LOG
.
debug
(
'x_train shape: %s'
,
(
x_train
.
shape
,))
LOG
.
debug
(
'x_test shape: %s'
,
(
x_test
.
shape
,))
return
x_train
,
y_train
,
x_test
,
y_test
class
SendMetrics
(
keras
.
callbacks
.
Callback
):
'''
Keras callback to send metrics to NNI framework
'''
def
on_epoch_end
(
self
,
epoch
,
logs
=
{}):
'''
Run on end of each epoch
'''
LOG
.
debug
(
logs
)
nni
.
report_intermediate_result
(
logs
[
'acc'
])
def
train
(
args
,
params
):
'''
Train model
'''
x_train
,
y_train
,
x_test
,
y_test
=
load_mnist_data
(
args
)
model
=
create_mnist_model
(
params
)
# nni
model
.
fit
(
x_train
,
y_train
,
batch_size
=
args
.
batch_size
,
epochs
=
args
.
epochs
,
verbose
=
1
,
validation_data
=
(
x_test
,
y_test
),
callbacks
=
[
SendMetrics
(),
TensorBoard
(
log_dir
=
TENSORBOARD_DIR
)])
_
,
acc
=
model
.
evaluate
(
x_test
,
y_test
,
verbose
=
0
)
LOG
.
debug
(
'Final result is: %d'
,
acc
)
nni
.
report_final_result
(
acc
)
def
generate_default_params
():
'''
Generate default hyper parameters
'''
return
{
'optimizer'
:
'Adam'
,
'learning_rate'
:
0.001
}
if
__name__
==
'__main__'
:
PARSER
=
argparse
.
ArgumentParser
()
PARSER
.
add_argument
(
"--batch_size"
,
type
=
int
,
default
=
200
,
help
=
"batch size"
,
required
=
False
)
PARSER
.
add_argument
(
"--epochs"
,
type
=
int
,
default
=
10
,
help
=
"Train epochs"
,
required
=
False
)
PARSER
.
add_argument
(
"--num_train"
,
type
=
int
,
default
=
60000
,
help
=
"Number of train samples to be used, maximum 60000"
,
required
=
False
)
PARSER
.
add_argument
(
"--num_test"
,
type
=
int
,
default
=
10000
,
help
=
"Number of test samples to be used, maximum 10000"
,
required
=
False
)
ARGS
,
UNKNOWN
=
PARSER
.
parse_known_args
()
try
:
# get parameters from tuner
# RECEIVED_PARAMS = {"optimizer": "Adam", "learning_rate": 0.00001}
RECEIVED_PARAMS
=
nni
.
get_parameters
()
LOG
.
debug
(
RECEIVED_PARAMS
)
PARAMS
=
generate_default_params
()
PARAMS
.
update
(
RECEIVED_PARAMS
)
# train
train
(
ARGS
,
PARAMS
)
except
Exception
as
e
:
LOG
.
exception
(
e
)
raise
examples/trials/mnist-batch-tune-keras/search_space.json
0 → 100644
View file @
71688b86
{
"combine_params"
:
{
"_type"
:
"choice"
,
"_value"
:
[{
"optimizer"
:
"Adam"
,
"learning_rate"
:
0.00001
},
{
"optimizer"
:
"Adam"
,
"learning_rate"
:
0.0001
},
{
"optimizer"
:
"Adam"
,
"learning_rate"
:
0.001
},
{
"optimizer"
:
"SGD"
,
"learning_rate"
:
0.01
},
{
"optimizer"
:
"SGD"
,
"learning_rate"
:
0.005
},
{
"optimizer"
:
"SGD"
,
"learning_rate"
:
0.0002
}]
}
}
\ No newline at end of file
src/sdk/pynni/nni/__main__.py
View file @
71688b86
...
...
@@ -30,6 +30,7 @@ import importlib
from
nni.msg_dispatcher
import
MsgDispatcher
from
nni.hyperopt_tuner.hyperopt_tuner
import
HyperoptTuner
from
nni.evolution_tuner.evolution_tuner
import
EvolutionTuner
from
nni.batch_tuner.batch_tuner
import
BatchTuner
from
nni.medianstop_assessor.medianstop_assessor
import
MedianstopAssessor
logger
=
logging
.
getLogger
(
'nni.main'
)
...
...
src/sdk/pynni/nni/batch_tuner/batch_tuner.py
View file @
71688b86
...
...
@@ -29,8 +29,6 @@ import random
import
numpy
as
np
from
nni.tuner
import
Tuner
from
.
import
parameter_expressions
TYPE
=
'_type'
CHOICE
=
'choice'
...
...
@@ -48,35 +46,35 @@ class BatchTuner(Tuner):
}
'''
def
__init__
(
self
):
def
__init__
(
self
,
optimize_mode
):
self
.
count
=
-
1
self
.
values
=
[]
def
is_valid
(
self
,
search_space
)
def
is_valid
(
self
,
search_space
)
:
'''
Check the search space is valid: only contains 'choice' type
'''
if
not
len
(
search_space
)
==
1
:
raise
RuntimeE
xception
(
'BatchTuner only supprt one combined-paramreters key.'
)
raise
RuntimeE
rror
(
'BatchTuner only supprt one combined-paramreters key.'
)
for
param
in
search_space
:
param_type
=
param
[
TYPE
]
if
param_type
is
not
CHOICE
:
raise
RuntimeE
xception
(
'BatchTuner only supprt one combined-paramreters type is choice.'
)
param_type
=
search_space
[
param
]
[
TYPE
]
if
not
param_type
==
CHOICE
:
raise
RuntimeE
rror
(
'BatchTuner only supprt one combined-paramreters type is choice.'
)
else
:
if
isinstance
(
param
[
VALUE
],
list
):
return
param
[
VALUE
]
raise
RuntimeE
xception
(
'The combined-paramreters value in BatchTuner is not a list.'
)
if
isinstance
(
search_space
[
param
]
[
VALUE
],
list
):
return
search_space
[
param
]
[
VALUE
]
raise
RuntimeE
rror
(
'The combined-paramreters value in BatchTuner is not a list.'
)
return
None
def
update_search_space
(
self
,
search_space
):
self
.
values
=
is_valid
(
search_space
)
self
.
values
=
self
.
is_valid
(
search_space
)
def
generate_parameters
(
self
,
parameter_id
):
count
+=
1
if
count
>
len
(
self
.
value
)
-
1
:
self
.
count
+=
1
if
self
.
count
>
len
(
self
.
value
s
)
-
1
:
return
None
return
self
.
values
[
count
]
return
self
.
values
[
self
.
count
]
def
receive_trial_result
(
self
,
parameter_id
,
parameters
,
reward
):
pass
\ No newline at end of file
tools/nnicmd/launcher_utils.py
View file @
71688b86
...
...
@@ -91,7 +91,7 @@ def parse_tuner_content(experiment_config):
'Random'
:
'HyperoptTuner'
,
\
'Anneal'
:
'HyperoptTuner'
,
\
'Evolution'
:
'EvolutionTuner'
,
\
'BatchTun
ing
'
:
'BatchTuner'
}
'BatchTun
er
'
:
'BatchTuner'
}
tuner_algorithm_name_dict
=
{
'TPE'
:
'tpe'
,
\
'Random'
:
'random_search'
,
\
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment