Unverified Commit 8896afa3 authored by xuehui's avatar xuehui Committed by GitHub
Browse files

Update for reference (#684)

* update readme in ga_squad

* update readme

* fix typo

* Update README.md

* Update README.md

* Update README.md

* update readme

* update

* fix path

* update reference

* fix bug in config file

* update nni_arch_overview.png

* update

* update

* update

* update home page

* update for reference
parent 85d936d1
...@@ -17,10 +17,10 @@ ...@@ -17,10 +17,10 @@
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
''' """
batch_tuner.py including: batch_tuner.py including:
class BatchTuner class BatchTuner
''' """
import copy import copy
from enum import Enum, unique from enum import Enum, unique
...@@ -37,7 +37,7 @@ VALUE = '_value' ...@@ -37,7 +37,7 @@ VALUE = '_value'
class BatchTuner(Tuner): class BatchTuner(Tuner):
''' """
BatchTuner is tuner will running all the configure that user want to run batchly. BatchTuner is tuner will running all the configure that user want to run batchly.
The search space only be accepted like: The search space only be accepted like:
{ {
...@@ -45,16 +45,20 @@ class BatchTuner(Tuner): ...@@ -45,16 +45,20 @@ class BatchTuner(Tuner):
'_value': '[{...}, {...}, {...}]', '_value': '[{...}, {...}, {...}]',
} }
} }
''' """
def __init__(self): def __init__(self):
self.count = -1 self.count = -1
self.values = [] self.values = []
def is_valid(self, search_space): def is_valid(self, search_space):
''' """
Check the search space is valid: only contains 'choice' type Check the search space is valid: only contains 'choice' type
'''
Parameters
----------
search_space : dict
"""
if not len(search_space) == 1: if not len(search_space) == 1:
raise RuntimeError('BatchTuner only supprt one combined-paramreters key.') raise RuntimeError('BatchTuner only supprt one combined-paramreters key.')
...@@ -69,9 +73,21 @@ class BatchTuner(Tuner): ...@@ -69,9 +73,21 @@ class BatchTuner(Tuner):
return None return None
def update_search_space(self, search_space): def update_search_space(self, search_space):
"""Update the search space
Parameters
----------
search_space : dict
"""
self.values = self.is_valid(search_space) self.values = self.is_valid(search_space)
def generate_parameters(self, parameter_id): def generate_parameters(self, parameter_id):
"""Returns a dict of trial (hyper-)parameters, as a serializable object.
Parameters
----------
parameter_id : int
"""
self.count +=1 self.count +=1
if self.count>len(self.values)-1: if self.count>len(self.values)-1:
raise nni.NoMoreTrialError('no more parameters now.') raise nni.NoMoreTrialError('no more parameters now.')
......
...@@ -17,16 +17,12 @@ ...@@ -17,16 +17,12 @@
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
''' """
evolution_tuner.py including: evolution_tuner.py including:
class OptimizeMode class OptimizeMode
class Individual class Individual
class EvolutionTuner class EvolutionTuner
if OptimizeMode is 'minimize', it means the tuner need to minimize the reward """
that received from Trial.
if OptimizeMode is 'maximize', it means the tuner need to maximize the reward
that received from Trial.
'''
import copy import copy
from enum import Enum, unique from enum import Enum, unique
...@@ -40,18 +36,22 @@ from .. import parameter_expressions ...@@ -40,18 +36,22 @@ from .. import parameter_expressions
@unique @unique
class OptimizeMode(Enum): class OptimizeMode(Enum):
''' """Optimize Mode class
Optimize Mode class
''' if OptimizeMode is 'minimize', it means the tuner need to minimize the reward
that received from Trial.
if OptimizeMode is 'maximize', it means the tuner need to maximize the reward
that received from Trial.
"""
Minimize = 'minimize' Minimize = 'minimize'
Maximize = 'maximize' Maximize = 'maximize'
@unique @unique
class NodeType(Enum): class NodeType(Enum):
''' """Node Type class
Node Type class """
'''
Root = 'root' Root = 'root'
Type = '_type' Type = '_type'
Value = '_value' Value = '_value'
...@@ -59,9 +59,8 @@ class NodeType(Enum): ...@@ -59,9 +59,8 @@ class NodeType(Enum):
def json2space(x, oldy=None, name=NodeType.Root.value): def json2space(x, oldy=None, name=NodeType.Root.value):
''' """Change search space from json format to hyperopt format
Change search space from json format to hyperopt format """
'''
y = list() y = list()
if isinstance(x, dict): if isinstance(x, dict):
if NodeType.Type.value in x.keys(): if NodeType.Type.value in x.keys():
...@@ -89,9 +88,8 @@ def json2space(x, oldy=None, name=NodeType.Root.value): ...@@ -89,9 +88,8 @@ def json2space(x, oldy=None, name=NodeType.Root.value):
def json2paramater(x, is_rand, random_state, oldy=None, Rand=False, name=NodeType.Root.value): def json2paramater(x, is_rand, random_state, oldy=None, Rand=False, name=NodeType.Root.value):
''' """Json to pramaters.
Json to pramaters. """
'''
if isinstance(x, dict): if isinstance(x, dict):
if NodeType.Type.value in x.keys(): if NodeType.Type.value in x.keys():
_type = x[NodeType.Type.value] _type = x[NodeType.Type.value]
...@@ -131,6 +129,16 @@ def json2paramater(x, is_rand, random_state, oldy=None, Rand=False, name=NodeTyp ...@@ -131,6 +129,16 @@ def json2paramater(x, is_rand, random_state, oldy=None, Rand=False, name=NodeTyp
def _split_index(params): def _split_index(params):
"""Delete index information from params
Parameters
----------
params : dict
Returns
-------
result : dict
"""
result = {} result = {}
for key in params: for key in params:
if isinstance(params[key], dict): if isinstance(params[key], dict):
...@@ -142,10 +150,19 @@ def _split_index(params): ...@@ -142,10 +150,19 @@ def _split_index(params):
class Individual(object): class Individual(object):
''' """
Indicidual class to store the indv info. Indicidual class to store the indv info.
''' """
def __init__(self, config=None, info=None, result=None, save_dir=None): def __init__(self, config=None, info=None, result=None, save_dir=None):
"""
Parameters
----------
config : str
info : str
result : float
save_dir : str
"""
self.config = config self.config = config
self.result = result self.result = result
self.info = info self.info = info
...@@ -157,6 +174,13 @@ class Individual(object): ...@@ -157,6 +174,13 @@ class Individual(object):
", config :" + str(self.config) + ", result: " + str(self.result) ", config :" + str(self.config) + ", result: " + str(self.result)
def mutation(self, config=None, info=None, save_dir=None): def mutation(self, config=None, info=None, save_dir=None):
"""
Parameters
----------
config : str
info : str
save_dir : str
"""
self.result = None self.result = None
self.config = config self.config = config
self.restore_dir = self.save_dir self.restore_dir = self.save_dir
...@@ -165,13 +189,22 @@ class Individual(object): ...@@ -165,13 +189,22 @@ class Individual(object):
class EvolutionTuner(Tuner): class EvolutionTuner(Tuner):
''' """
EvolutionTuner is tuner using evolution algorithm. EvolutionTuner is tuner using navie evolution algorithm.
''' """
def __init__(self, optimize_mode, population_size=32): def __init__(self, optimize_mode, population_size=32):
"""
Parameters
----------
optimize_mode : str
population_size : int
initial population size. The larger population size,
the better evolution performance.
"""
self.optimize_mode = OptimizeMode(optimize_mode) self.optimize_mode = OptimizeMode(optimize_mode)
self.population_size = population_size self.population_size = population_size
self.trial_result = [] self.trial_result = []
self.searchspace_json = None self.searchspace_json = None
self.total_data = {} self.total_data = {}
...@@ -180,10 +213,13 @@ class EvolutionTuner(Tuner): ...@@ -180,10 +213,13 @@ class EvolutionTuner(Tuner):
self.space = None self.space = None
def update_search_space(self, search_space): def update_search_space(self, search_space):
''' """Update search space.
Update search space Search_space contains the information that user pre-defined.
search_space: search_space the json file that user pre-defined.
''' Parameters
----------
search_space : dict
"""
self.searchspace_json = search_space self.searchspace_json = search_space
self.space = json2space(self.searchspace_json) self.space = json2space(self.searchspace_json)
...@@ -198,8 +234,15 @@ class EvolutionTuner(Tuner): ...@@ -198,8 +234,15 @@ class EvolutionTuner(Tuner):
self.population.append(Individual(config=config)) self.population.append(Individual(config=config))
def generate_parameters(self, parameter_id): def generate_parameters(self, parameter_id):
"""Returns a set of trial (hyper-)parameters, as a serializable object. """Returns a dict of trial (hyper-)parameters, as a serializable object.
Parameters
----------
parameter_id : int parameter_id : int
Returns
-------
config : dict
""" """
if not self.population: if not self.population:
raise RuntimeError('The population is empty') raise RuntimeError('The population is empty')
...@@ -235,10 +278,14 @@ class EvolutionTuner(Tuner): ...@@ -235,10 +278,14 @@ class EvolutionTuner(Tuner):
return config return config
def receive_trial_result(self, parameter_id, parameters, value): def receive_trial_result(self, parameter_id, parameters, value):
''' '''Record the result from a trial
Record an observation of the objective function
parameters: dict of parameters Parameters
value: final metrics of the trial, including reward ----------
parameters: dict
value : dict/float
if value is dict, it should have "default" key.
value is final metrics of the trial.
''' '''
reward = self.extract_scalar_reward(value) reward = self.extract_scalar_reward(value)
if parameter_id not in self.total_data: if parameter_id not in self.total_data:
......
...@@ -35,9 +35,9 @@ logger = logging.getLogger('hyperopt_AutoML') ...@@ -35,9 +35,9 @@ logger = logging.getLogger('hyperopt_AutoML')
@unique @unique
class OptimizeMode(Enum): class OptimizeMode(Enum):
''' """
Oprimize Mode class Optimize Mode including Minimize and Maximize
''' """
Minimize = 'minimize' Minimize = 'minimize'
Maximize = 'maximize' Maximize = 'maximize'
...@@ -49,9 +49,16 @@ INDEX = '_index' ...@@ -49,9 +49,16 @@ INDEX = '_index'
def json2space(in_x, name=ROOT): def json2space(in_x, name=ROOT):
''' """
Change json to search space in hyperopt. Change json to search space in hyperopt.
'''
Parameters
----------
in_x : dict/list/str/int/float
The part of json.
name : str
name could be ROOT, TYPE, VALUE or INDEX.
"""
out_y = copy.deepcopy(in_x) out_y = copy.deepcopy(in_x)
if isinstance(in_x, dict): if isinstance(in_x, dict):
if TYPE in in_x.keys(): if TYPE in in_x.keys():
...@@ -78,9 +85,9 @@ def json2space(in_x, name=ROOT): ...@@ -78,9 +85,9 @@ def json2space(in_x, name=ROOT):
def json2parameter(in_x, parameter, name=ROOT): def json2parameter(in_x, parameter, name=ROOT):
''' """
Change json to parameters. Change json to parameters.
''' """
out_y = copy.deepcopy(in_x) out_y = copy.deepcopy(in_x)
if isinstance(in_x, dict): if isinstance(in_x, dict):
if TYPE in in_x.keys(): if TYPE in in_x.keys():
...@@ -133,6 +140,9 @@ def json2vals(in_x, vals, out_y, name=ROOT): ...@@ -133,6 +140,9 @@ def json2vals(in_x, vals, out_y, name=ROOT):
def _split_index(params): def _split_index(params):
"""
Delete index infromation from params
"""
result = {} result = {}
for key in params: for key in params:
if isinstance(params[key], dict): if isinstance(params[key], dict):
...@@ -144,11 +154,18 @@ def _split_index(params): ...@@ -144,11 +154,18 @@ def _split_index(params):
class HyperoptTuner(Tuner): class HyperoptTuner(Tuner):
''' """
HyperoptTuner is a tuner which using hyperopt algorithm. HyperoptTuner is a tuner which using hyperopt algorithm.
''' """
def __init__(self, algorithm_name, optimize_mode): def __init__(self, algorithm_name, optimize_mode):
"""
Parameters
----------
algorithm_name : str
algorithm_name includes "tpe", "random_search" and anneal".
optimize_mode : str
"""
self.algorithm_name = algorithm_name self.algorithm_name = algorithm_name
self.optimize_mode = OptimizeMode(optimize_mode) self.optimize_mode = OptimizeMode(optimize_mode)
self.json = None self.json = None
...@@ -156,6 +173,12 @@ class HyperoptTuner(Tuner): ...@@ -156,6 +173,12 @@ class HyperoptTuner(Tuner):
self.rval = None self.rval = None
def _choose_tuner(self, algorithm_name): def _choose_tuner(self, algorithm_name):
"""
Parameters
----------
algorithm_name : str
algorithm_name includes "tpe", "random_search" and anneal"
"""
if algorithm_name == 'tpe': if algorithm_name == 'tpe':
return hp.tpe.suggest return hp.tpe.suggest
if algorithm_name == 'random_search': if algorithm_name == 'random_search':
...@@ -165,11 +188,15 @@ class HyperoptTuner(Tuner): ...@@ -165,11 +188,15 @@ class HyperoptTuner(Tuner):
raise RuntimeError('Not support tuner algorithm in hyperopt.') raise RuntimeError('Not support tuner algorithm in hyperopt.')
def update_search_space(self, search_space): def update_search_space(self, search_space):
''' """
Update search space definition in tuner by search_space in parameters. Update search space definition in tuner by search_space in parameters.
'''
#assert self.json is None
Will called when first setup experiemnt or update search space in WebUI.
Parameters
----------
search_space : dict
"""
self.json = search_space self.json = search_space
search_space_instance = json2space(self.json) search_space_instance = json2space(self.json)
rstate = np.random.RandomState() rstate = np.random.RandomState()
...@@ -182,10 +209,17 @@ class HyperoptTuner(Tuner): ...@@ -182,10 +209,17 @@ class HyperoptTuner(Tuner):
self.rval.catch_eval_exceptions = False self.rval.catch_eval_exceptions = False
def generate_parameters(self, parameter_id): def generate_parameters(self, parameter_id):
''' """
Returns a set of trial (hyper-)parameters, as a serializable object. Returns a set of trial (hyper-)parameters, as a serializable object.
Parameters
----------
parameter_id : int parameter_id : int
'''
Returns
-------
params : dict
"""
rval = self.rval rval = self.rval
trials = rval.trials trials = rval.trials
algorithm = rval.algo algorithm = rval.algo
...@@ -209,12 +243,17 @@ class HyperoptTuner(Tuner): ...@@ -209,12 +243,17 @@ class HyperoptTuner(Tuner):
return params return params
def receive_trial_result(self, parameter_id, parameters, value): def receive_trial_result(self, parameter_id, parameters, value):
''' """
Record an observation of the objective function Record an observation of the objective function
Parameters
----------
parameter_id : int parameter_id : int
parameters : dict of parameters parameters : dict
value: final metrics of the trial, including reward value : dict/float
''' if value is dict, it should have "default" key.
value is final metrics of the trial.
"""
reward = self.extract_scalar_reward(value) reward = self.extract_scalar_reward(value)
# restore the paramsters contains '_index' # restore the paramsters contains '_index'
if parameter_id not in self.total_data: if parameter_id not in self.total_data:
...@@ -262,13 +301,14 @@ class HyperoptTuner(Tuner): ...@@ -262,13 +301,14 @@ class HyperoptTuner(Tuner):
def miscs_update_idxs_vals(self, miscs, idxs, vals, def miscs_update_idxs_vals(self, miscs, idxs, vals,
assert_all_vals_used=True, assert_all_vals_used=True,
idxs_map=None): idxs_map=None):
''' """
Unpack the idxs-vals format into the list of dictionaries that is Unpack the idxs-vals format into the list of dictionaries that is
`misc`. `misc`.
idxs_map: a dictionary of id->id mappings so that the misc['idxs'] can idxs_map : dict
contain different numbers than the idxs argument. XXX CLARIFY idxs_map is a dictionary of id->id mappings so that the misc['idxs'] can
''' contain different numbers than the idxs argument.
"""
if idxs_map is None: if idxs_map is None:
idxs_map = {} idxs_map = {}
......
...@@ -45,9 +45,9 @@ logger = logging.getLogger("Metis_Tuner_AutoML") ...@@ -45,9 +45,9 @@ logger = logging.getLogger("Metis_Tuner_AutoML")
@unique @unique
class OptimizeMode(Enum): class OptimizeMode(Enum):
''' """
Optimize Mode class Optimize Mode class
''' """
Minimize = 'minimize' Minimize = 'minimize'
Maximize = 'maximize' Maximize = 'maximize'
...@@ -59,27 +59,38 @@ CONSTRAINT_PARAMS_IDX = [] ...@@ -59,27 +59,38 @@ CONSTRAINT_PARAMS_IDX = []
class MetisTuner(Tuner): class MetisTuner(Tuner):
''' """
Metis Tuner Metis Tuner
'''
More algorithm information you could reference here:
https://www.microsoft.com/en-us/research/publication/metis-robustly-tuning-tail-latencies-cloud-systems/
"""
def __init__(self, optimize_mode="maximize", no_resampling=True, no_candidates=True, def __init__(self, optimize_mode="maximize", no_resampling=True, no_candidates=True,
selection_num_starting_points=10, cold_start_num=10): selection_num_starting_points=10, cold_start_num=10):
''' """
optimize_mode: is a string that including two mode "maximize" and "minimize" Parameters
----------
optimize_mode : str
optimize_mode is a string that including two mode "maximize" and "minimize"
no_resampling: True or False. Should Metis consider re-sampling as part of the search strategy? no_resampling : bool
True or False. Should Metis consider re-sampling as part of the search strategy?
If you are confident that the training dataset is noise-free, then you do not need re-sampling. If you are confident that the training dataset is noise-free, then you do not need re-sampling.
no_candidates: True or False. Should Metis suggest parameters for the next benchmark? no_candidates: bool
True or False. Should Metis suggest parameters for the next benchmark?
If you do not plan to do more benchmarks, Metis can skip this step. If you do not plan to do more benchmarks, Metis can skip this step.
selection_num_starting_points: how many times Metis should try to find the global optimal in the search space? selection_num_starting_points: int
how many times Metis should try to find the global optimal in the search space?
The higher the number, the longer it takes to output the solution. The higher the number, the longer it takes to output the solution.
cold_start_num: Metis need some trial result to get cold start. when the number of trial result is less than cold_start_num: int
Metis need some trial result to get cold start. when the number of trial result is less than
cold_start_num, Metis will randomly sample hyper-parameter for trial. cold_start_num, Metis will randomly sample hyper-parameter for trial.
''' """
self.samples_x = [] self.samples_x = []
self.samples_y = [] self.samples_y = []
self.samples_y_aggregation = [] self.samples_y_aggregation = []
...@@ -95,9 +106,12 @@ class MetisTuner(Tuner): ...@@ -95,9 +106,12 @@ class MetisTuner(Tuner):
def update_search_space(self, search_space): def update_search_space(self, search_space):
''' """Update the self.x_bounds and self.x_types by the search_space.json
Update the self.x_bounds and self.x_types by the search_space.json
''' Parameters
----------
search_space : dict
"""
self.x_bounds = [[] for i in range(len(search_space))] self.x_bounds = [[] for i in range(len(search_space))]
self.x_types = [NONE_TYPE for i in range(len(search_space))] self.x_types = [NONE_TYPE for i in range(len(search_space))]
...@@ -152,9 +166,16 @@ class MetisTuner(Tuner): ...@@ -152,9 +166,16 @@ class MetisTuner(Tuner):
def _pack_output(self, init_parameter): def _pack_output(self, init_parameter):
''' """Pack the output
Pack the output
''' Parameters
----------
init_parameter : dict
Returns
-------
output : dict
"""
output = {} output = {}
for i, param in enumerate(init_parameter): for i, param in enumerate(init_parameter):
output[self.key_order[i]] = param output[self.key_order[i]] = param
...@@ -162,12 +183,19 @@ class MetisTuner(Tuner): ...@@ -162,12 +183,19 @@ class MetisTuner(Tuner):
def generate_parameters(self, parameter_id): def generate_parameters(self, parameter_id):
''' """Generate next parameter for trial
This function is for generate parameters to trial.
If the number of trial result is lower than cold start number, If the number of trial result is lower than cold start number,
metis will first random generate some parameters. metis will first random generate some parameters.
Otherwise, metis will choose the parameters by the Gussian Process Model and the Gussian Mixture Model. Otherwise, metis will choose the parameters by the Gussian Process Model and the Gussian Mixture Model.
'''
Parameters
----------
parameter_id : int
Returns
-------
result : dict
"""
if self.samples_x or len(self.samples_x) < self.cold_start_num: if self.samples_x or len(self.samples_x) < self.cold_start_num:
init_parameter = _rand_init(self.x_bounds, self.x_types, 1)[0] init_parameter = _rand_init(self.x_bounds, self.x_types, 1)[0]
results = self._pack_output(init_parameter) results = self._pack_output(init_parameter)
...@@ -184,11 +212,15 @@ class MetisTuner(Tuner): ...@@ -184,11 +212,15 @@ class MetisTuner(Tuner):
def receive_trial_result(self, parameter_id, parameters, value): def receive_trial_result(self, parameter_id, parameters, value):
''' """Tuner receive result from trial.
Tuner receive result from trial.
An value example as follow: Parameters
value: 99.5% ----------
''' parameter_id : int
parameters : dict
value : dict/float
if value is dict, it should have "default" key.
"""
value = self.extract_scalar_reward(value) value = self.extract_scalar_reward(value)
if self.optimize_mode == OptimizeMode.Maximize: if self.optimize_mode == OptimizeMode.Maximize:
value = -value value = -value
...@@ -434,9 +466,8 @@ def _rand_init(x_bounds, x_types, selection_num_starting_points): ...@@ -434,9 +466,8 @@ def _rand_init(x_bounds, x_types, selection_num_starting_points):
def get_median(temp_list): def get_median(temp_list):
''' """Return median
Return median """
'''
num = len(temp_list) num = len(temp_list)
temp_list.sort() temp_list.sort()
print(temp_list) print(temp_list)
......
...@@ -52,20 +52,35 @@ class NetworkMorphismTuner(Tuner): ...@@ -52,20 +52,35 @@ class NetworkMorphismTuner(Tuner):
default_model_width=Constant.MODEL_WIDTH, default_model_width=Constant.MODEL_WIDTH,
): ):
""" initilizer of the NetworkMorphismTuner. """ initilizer of the NetworkMorphismTuner.
Keyword Arguments:
task {str} -- [task mode, such as "cv","common" etc.] (default: {"cv"}) Parameters
input_width {int} -- [input sample shape] (default: {32}) ----------
input_channel {int} -- [input sample shape] (default: {3}) task : str
n_output_node {int} -- [output node number] (default: {10}) task mode, such as "cv","common" etc. (default: {"cv"})
algorithm_name {str} -- [algorithm name used in the network morphism] (default: {"Bayesian"}) input_width : int
optimize_mode {str} -- [optimize mode "minimize" or "maximize"] (default: {"minimize"}) input sample shape (default: {32})
path {str} -- [default mode path to save the model file] (default: {"model_path"}) input_channel : int
verbose {bool} -- [verbose to print the log] (default: {True}) input sample shape (default: {3})
beta {float} -- [The beta in acquisition function. (refer to our paper)] (default: {Constant.BETA}) n_output_node : int
t_min {float} -- [The minimum temperature for simulated annealing.] (default: {Constant.T_MIN}) output node number (default: {10})
max_model_size {int} -- [max model size to the graph] (default: {Constant.MAX_MODEL_SIZE}) algorithm_name : str
default_model_len {int} -- [default model length] (default: {Constant.MODEL_LEN}) algorithm name used in the network morphism (default: {"Bayesian"})
default_model_width {int} -- [default model width] (default: {Constant.MODEL_WIDTH}) optimize_mode : str
optimize mode "minimize" or "maximize" (default: {"minimize"})
path : str
default mode path to save the model file (default: {"model_path"})
verbose : bool
verbose to print the log (default: {True})
beta : float
The beta in acquisition function. (default: {Constant.BETA})
t_min : float
The minimum temperature for simulated annealing. (default: {Constant.T_MIN})
max_model_size : int
max model size to the graph (default: {Constant.MAX_MODEL_SIZE})
default_model_len : int
default model length (default: {Constant.MODEL_LEN})
default_model_width : int
default model width (default: {Constant.MODEL_WIDTH})
""" """
if not os.path.exists(path): if not os.path.exists(path):
...@@ -92,8 +107,6 @@ class NetworkMorphismTuner(Tuner): ...@@ -92,8 +107,6 @@ class NetworkMorphismTuner(Tuner):
self.bo = BayesianOptimizer(self, self.t_min, self.optimize_mode, self.beta) self.bo = BayesianOptimizer(self, self.t_min, self.optimize_mode, self.beta)
self.training_queue = [] self.training_queue = []
# self.x_queue = []
# self.y_queue = []
self.descriptors = [] self.descriptors = []
self.history = [] self.history = []
...@@ -112,6 +125,9 @@ class NetworkMorphismTuner(Tuner): ...@@ -112,6 +125,9 @@ class NetworkMorphismTuner(Tuner):
def generate_parameters(self, parameter_id): def generate_parameters(self, parameter_id):
""" """
Returns a set of trial neural architecture, as a serializable object. Returns a set of trial neural architecture, as a serializable object.
Parameters
----------
parameter_id : int parameter_id : int
""" """
if not self.history: if not self.history:
...@@ -137,14 +153,14 @@ class NetworkMorphismTuner(Tuner): ...@@ -137,14 +153,14 @@ class NetworkMorphismTuner(Tuner):
def receive_trial_result(self, parameter_id, parameters, value): def receive_trial_result(self, parameter_id, parameters, value):
""" Record an observation of the objective function. """ Record an observation of the objective function.
Arguments:
parameter_id : int Parameters
parameters : dict of parameters ----------
value: final metrics of the trial, including reward parameter_id : int
Raises: parameters : dict
RuntimeError -- Received parameter_id not in total_data. value : dict/float
if value is dict, it should have "default" key.
""" """
reward = self.extract_scalar_reward(value) reward = self.extract_scalar_reward(value)
if parameter_id not in self.total_data: if parameter_id not in self.total_data:
...@@ -176,9 +192,13 @@ class NetworkMorphismTuner(Tuner): ...@@ -176,9 +192,13 @@ class NetworkMorphismTuner(Tuner):
def generate(self): def generate(self):
"""Generate the next neural architecture. """Generate the next neural architecture.
Returns:
other_info: Anything to be saved in the training queue together with the architecture. Returns
generated_graph: An instance of Graph. -------
other_info: any object
Anything to be saved in the training queue together with the architecture.
generated_graph: Graph
An instance of Graph.
""" """
generated_graph, new_father_id = self.bo.generate(self.descriptors) generated_graph, new_father_id = self.bo.generate(self.descriptors)
if new_father_id is None: if new_father_id is None:
...@@ -191,11 +211,16 @@ class NetworkMorphismTuner(Tuner): ...@@ -191,11 +211,16 @@ class NetworkMorphismTuner(Tuner):
def update(self, other_info, graph, metric_value, model_id): def update(self, other_info, graph, metric_value, model_id):
""" Update the controller with evaluation result of a neural architecture. """ Update the controller with evaluation result of a neural architecture.
Args:
other_info: Anything. In our case it is the father ID in the search tree. Parameters
graph: An instance of Graph. The trained neural architecture. ----------
metric_value: The final evaluated metric value. other_info: any object
model_id: An integer. In our case it is the father ID in the search tree.
graph: Graph
An instance of Graph. The trained neural architecture.
metric_value: float
The final evaluated metric value.
model_id: int
""" """
father_id = other_info father_id = other_info
self.bo.fit([graph.extract_descriptor()], [metric_value]) self.bo.fit([graph.extract_descriptor()], [metric_value])
...@@ -204,15 +229,16 @@ class NetworkMorphismTuner(Tuner): ...@@ -204,15 +229,16 @@ class NetworkMorphismTuner(Tuner):
def add_model(self, metric_value, model_id): def add_model(self, metric_value, model_id):
""" Add model to the history, x_queue and y_queue """ Add model to the history, x_queue and y_queue
Arguments: Parameters
metric_value: int --metric_value ----------
graph: dict -- graph metric_value : float
model_id: int -- model_id graph : dict
model_id : int
Returns: Returns
model dict -------
model : dict
""" """
if self.verbose: if self.verbose:
logger.info("Saving model.") logger.info("Saving model.")
...@@ -223,16 +249,10 @@ class NetworkMorphismTuner(Tuner): ...@@ -223,16 +249,10 @@ class NetworkMorphismTuner(Tuner):
file = open(os.path.join(self.path, "best_model.txt"), "w") file = open(os.path.join(self.path, "best_model.txt"), "w")
file.write("best model: " + str(model_id)) file.write("best model: " + str(model_id))
file.close() file.close()
# descriptor = graph.extract_descriptor()
# self.x_queue.append(descriptor)
# self.y_queue.append(metric_value)
return ret return ret
def get_best_model_id(self): def get_best_model_id(self):
""" Get the best model_id from history using the metric value """ Get the best model_id from history using the metric value
Returns:
int -- the best model_id
""" """
if self.optimize_mode is OptimizeMode.Maximize: if self.optimize_mode is OptimizeMode.Maximize:
...@@ -241,10 +261,16 @@ class NetworkMorphismTuner(Tuner): ...@@ -241,10 +261,16 @@ class NetworkMorphismTuner(Tuner):
def load_model_by_id(self, model_id): def load_model_by_id(self, model_id):
"""Get the model by model_id """Get the model by model_id
Arguments:
model_id {int} -- model index Parameters
Returns: ----------
Graph -- the model graph representation model_id : int
model index
Returns
-------
load_model : Graph
the model graph representation
""" """
with open(os.path.join(self.path, str(model_id) + ".json")) as fin: with open(os.path.join(self.path, str(model_id) + ".json")) as fin:
...@@ -255,17 +281,26 @@ class NetworkMorphismTuner(Tuner): ...@@ -255,17 +281,26 @@ class NetworkMorphismTuner(Tuner):
def load_best_model(self): def load_best_model(self):
""" Get the best model by model id """ Get the best model by model id
Returns:
Graph -- the best model graph representation Returns
-------
load_model : Graph
the model graph representation
""" """
return self.load_model_by_id(self.get_best_model_id()) return self.load_model_by_id(self.get_best_model_id())
def get_metric_value_by_id(self, model_id): def get_metric_value_by_id(self, model_id):
""" Get the model metric valud by its model_id """ Get the model metric valud by its model_id
Arguments:
model_id {int} -- model index Parameters
Returns: ----------
float -- the model metric model_id : int
model index
Returns
-------
float
the model metric
""" """
for item in self.history: for item in self.history:
if item["model_id"] == model_id: if item["model_id"] == model_id:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment