"git@developer.sourcefind.cn:dadigang/Ventoy.git" did not exist on "2be340d2e8db0fa70fb4f132a99dfe8824ede6c8"
Unverified Commit 640e7bbf authored by QuanluZhang's avatar QuanluZhang Committed by GitHub
Browse files

Merge v0.3 to master (#339)

* Fix pypi package missing python module

* Fix pypi package missing python module

* fix bug in smartparam example (#322)

* Fix nnictl update trialnum and document (#326)

1.Fix restful server of update
2.Update nnictl document of update
3.Add tensorboard in docement

* Update the version numbers from 0.3.2 to 0.3.3

* Update examples (#331)

* update mnist-annotation

* fix mnist-annotation typo

* update mnist example

* update mnist-smartparam

* update mnist-annotation

* update mnist-smartparam

* change learning rate

* update mnist assessor maxTrialNum

* update examples

* update examples

* update maxTrialNum

* fix breaking path in config_assessor.yml
parent a87517cf
......@@ -157,8 +157,8 @@ def main(params):
'''
# Import data
mnist = input_data.read_data_sets(params['data_dir'], one_hot=True)
print('Mnist download data down.')
logger.debug('Mnist download data down.')
print('Mnist download data done.')
logger.debug('Mnist download data done.')
# Create the model
# Build the graph for the deep net
......@@ -180,15 +180,15 @@ def main(params):
test_acc = 0.0
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
"""@nni.variable(nni.choice(50, 250, 500), name=batch_num)"""
batch_num = params['batch_num']
for i in range(batch_num):
batch = mnist.train.next_batch(batch_num)
"""@nni.variable(nni.choice(1, 5), name=dropout_rate)"""
"""@nni.variable(nni.choice(1, 4, 8, 16, 32), name=batch_size)"""
batch_size = params['batch_size']
for i in range(params['batch_num']):
batch = mnist.train.next_batch(batch_size)
"""@nni.variable(nni.choice(0.5, 0.9), name=dropout_rate)"""
dropout_rate = params['dropout_rate']
mnist_network.train_step.run(feed_dict={mnist_network.images: batch[0],
mnist_network.labels: batch[1],
mnist_network.keep_prob: dropout_rate}
mnist_network.keep_prob: 1 - dropout_rate}
)
if i % 100 == 0:
......@@ -224,7 +224,8 @@ def generate_defualt_params():
'pool_size': 2,
'hidden_size': 1024,
'learning_rate': 1e-4,
'batch_num': 200}
'batch_num': 2000,
'batch_size': 32}
return params
......
......@@ -180,13 +180,13 @@ def main(params):
test_acc = 0.0
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
batch_num = nni.choice(50, 250, 500, name='batch_num')
for i in range(batch_num):
batch = mnist.train.next_batch(batch_num)
dropout_rate = nni.choice(1, 5, name='dropout_rate')
batch_size = nni.choice(1, 4, 8, 16, 32, name='batch_size')
for i in range(2000):
batch = mnist.train.next_batch(batch_size)
dropout_rate = nni.choice(0.5, 0.9, name='dropout_rate')
mnist_network.train_step.run(feed_dict={mnist_network.images: batch[0],
mnist_network.labels: batch[1],
mnist_network.keep_prob: dropout_rate}
mnist_network.keep_prob: 1 - dropout_rate}
)
if i % 100 == 0:
......
......@@ -2,10 +2,10 @@ authorName: default
experimentName: example_mnist
trialConcurrency: 1
maxExecDuration: 1h
maxTrialNum: 1
maxTrialNum: 20
#choice: local, remote
trainingServicePlatform: local
searchSpacePath: ~/nni/examples/trials/mnist/search_space.json
searchSpacePath: search_space.json
#choice: true, false
useAnnotation: false
tuner:
......@@ -23,5 +23,5 @@ assessor:
optimize_mode: maximize
trial:
command: python3 mnist.py
codeDir: ~/nni/examples/trials/mnist
codeDir: .
gpuNum: 0
......@@ -173,10 +173,10 @@ def main(params):
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for i in range(params['batch_num']):
batch = mnist.train.next_batch(params['batch_num'])
batch = mnist.train.next_batch(params['batch_size'])
mnist_network.train_step.run(feed_dict={mnist_network.images: batch[0],
mnist_network.labels: batch[1],
mnist_network.keep_prob: params['dropout_rate']}
mnist_network.keep_prob: 1 - params['dropout_rate']}
)
if i % 100 == 0:
......@@ -212,7 +212,8 @@ def generate_default_params():
'pool_size': 2,
'hidden_size': 1024,
'learning_rate': 1e-4,
'batch_num': 200}
'batch_num': 2000,
'batch_size': 32}
return params
......
{
"dropout_rate":{"_type":"uniform","_value":[0.1,0.5]},
"dropout_rate":{"_type":"uniform","_value":[0.5, 0.9]},
"conv_size":{"_type":"choice","_value":[2,3,5,7]},
"hidden_size":{"_type":"choice","_value":[124, 512, 1024]},
"batch_size": {"_type":"choice", "_value": [1, 4, 8, 16, 32]},
"learning_rate":{"_type":"choice","_value":[0.0001, 0.001, 0.01, 0.1]}
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment