"vscode:/vscode.git/clone" did not exist on "5aa0ba498414731c46f5fcd55ccf0540407aff7c"
Unverified Commit 8896afa3 authored by xuehui's avatar xuehui Committed by GitHub
Browse files

Update for reference (#684)

* update readme in ga_squad

* update readme

* fix typo

* Update README.md

* Update README.md

* Update README.md

* update readme

* update

* fix path

* update reference

* fix bug in config file

* update nni_arch_overview.png

* update

* update

* update

* update home page

* update for reference
parent 85d936d1
......@@ -17,10 +17,10 @@
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
"""
batch_tuner.py including:
class BatchTuner
'''
"""
import copy
from enum import Enum, unique
......@@ -37,7 +37,7 @@ VALUE = '_value'
class BatchTuner(Tuner):
'''
"""
BatchTuner is tuner will running all the configure that user want to run batchly.
The search space only be accepted like:
{
......@@ -45,16 +45,20 @@ class BatchTuner(Tuner):
'_value': '[{...}, {...}, {...}]',
}
}
'''
"""
def __init__(self):
self.count = -1
self.values = []
def is_valid(self, search_space):
'''
"""
Check the search space is valid: only contains 'choice' type
'''
Parameters
----------
search_space : dict
"""
if not len(search_space) == 1:
raise RuntimeError('BatchTuner only supprt one combined-paramreters key.')
......@@ -69,9 +73,21 @@ class BatchTuner(Tuner):
return None
def update_search_space(self, search_space):
"""Update the search space
Parameters
----------
search_space : dict
"""
self.values = self.is_valid(search_space)
def generate_parameters(self, parameter_id):
"""Returns a dict of trial (hyper-)parameters, as a serializable object.
Parameters
----------
parameter_id : int
"""
self.count +=1
if self.count>len(self.values)-1:
raise nni.NoMoreTrialError('no more parameters now.')
......
......@@ -17,16 +17,12 @@
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
"""
evolution_tuner.py including:
class OptimizeMode
class Individual
class EvolutionTuner
if OptimizeMode is 'minimize', it means the tuner need to minimize the reward
that received from Trial.
if OptimizeMode is 'maximize', it means the tuner need to maximize the reward
that received from Trial.
'''
"""
import copy
from enum import Enum, unique
......@@ -40,18 +36,22 @@ from .. import parameter_expressions
@unique
class OptimizeMode(Enum):
'''
Optimize Mode class
'''
"""Optimize Mode class
if OptimizeMode is 'minimize', it means the tuner need to minimize the reward
that received from Trial.
if OptimizeMode is 'maximize', it means the tuner need to maximize the reward
that received from Trial.
"""
Minimize = 'minimize'
Maximize = 'maximize'
@unique
class NodeType(Enum):
'''
Node Type class
'''
"""Node Type class
"""
Root = 'root'
Type = '_type'
Value = '_value'
......@@ -59,9 +59,8 @@ class NodeType(Enum):
def json2space(x, oldy=None, name=NodeType.Root.value):
'''
Change search space from json format to hyperopt format
'''
"""Change search space from json format to hyperopt format
"""
y = list()
if isinstance(x, dict):
if NodeType.Type.value in x.keys():
......@@ -89,9 +88,8 @@ def json2space(x, oldy=None, name=NodeType.Root.value):
def json2paramater(x, is_rand, random_state, oldy=None, Rand=False, name=NodeType.Root.value):
'''
Json to pramaters.
'''
"""Json to pramaters.
"""
if isinstance(x, dict):
if NodeType.Type.value in x.keys():
_type = x[NodeType.Type.value]
......@@ -131,6 +129,16 @@ def json2paramater(x, is_rand, random_state, oldy=None, Rand=False, name=NodeTyp
def _split_index(params):
"""Delete index information from params
Parameters
----------
params : dict
Returns
-------
result : dict
"""
result = {}
for key in params:
if isinstance(params[key], dict):
......@@ -142,10 +150,19 @@ def _split_index(params):
class Individual(object):
'''
"""
Indicidual class to store the indv info.
'''
"""
def __init__(self, config=None, info=None, result=None, save_dir=None):
"""
Parameters
----------
config : str
info : str
result : float
save_dir : str
"""
self.config = config
self.result = result
self.info = info
......@@ -157,6 +174,13 @@ class Individual(object):
", config :" + str(self.config) + ", result: " + str(self.result)
def mutation(self, config=None, info=None, save_dir=None):
"""
Parameters
----------
config : str
info : str
save_dir : str
"""
self.result = None
self.config = config
self.restore_dir = self.save_dir
......@@ -165,13 +189,22 @@ class Individual(object):
class EvolutionTuner(Tuner):
'''
EvolutionTuner is tuner using evolution algorithm.
'''
"""
EvolutionTuner is tuner using navie evolution algorithm.
"""
def __init__(self, optimize_mode, population_size=32):
"""
Parameters
----------
optimize_mode : str
population_size : int
initial population size. The larger population size,
the better evolution performance.
"""
self.optimize_mode = OptimizeMode(optimize_mode)
self.population_size = population_size
self.trial_result = []
self.searchspace_json = None
self.total_data = {}
......@@ -180,10 +213,13 @@ class EvolutionTuner(Tuner):
self.space = None
def update_search_space(self, search_space):
'''
Update search space
search_space: search_space the json file that user pre-defined.
'''
"""Update search space.
Search_space contains the information that user pre-defined.
Parameters
----------
search_space : dict
"""
self.searchspace_json = search_space
self.space = json2space(self.searchspace_json)
......@@ -198,8 +234,15 @@ class EvolutionTuner(Tuner):
self.population.append(Individual(config=config))
def generate_parameters(self, parameter_id):
"""Returns a set of trial (hyper-)parameters, as a serializable object.
"""Returns a dict of trial (hyper-)parameters, as a serializable object.
Parameters
----------
parameter_id : int
Returns
-------
config : dict
"""
if not self.population:
raise RuntimeError('The population is empty')
......@@ -235,10 +278,14 @@ class EvolutionTuner(Tuner):
return config
def receive_trial_result(self, parameter_id, parameters, value):
'''
Record an observation of the objective function
parameters: dict of parameters
value: final metrics of the trial, including reward
'''Record the result from a trial
Parameters
----------
parameters: dict
value : dict/float
if value is dict, it should have "default" key.
value is final metrics of the trial.
'''
reward = self.extract_scalar_reward(value)
if parameter_id not in self.total_data:
......
......@@ -35,9 +35,9 @@ logger = logging.getLogger('hyperopt_AutoML')
@unique
class OptimizeMode(Enum):
'''
Oprimize Mode class
'''
"""
Optimize Mode including Minimize and Maximize
"""
Minimize = 'minimize'
Maximize = 'maximize'
......@@ -49,9 +49,16 @@ INDEX = '_index'
def json2space(in_x, name=ROOT):
'''
"""
Change json to search space in hyperopt.
'''
Parameters
----------
in_x : dict/list/str/int/float
The part of json.
name : str
name could be ROOT, TYPE, VALUE or INDEX.
"""
out_y = copy.deepcopy(in_x)
if isinstance(in_x, dict):
if TYPE in in_x.keys():
......@@ -78,9 +85,9 @@ def json2space(in_x, name=ROOT):
def json2parameter(in_x, parameter, name=ROOT):
'''
"""
Change json to parameters.
'''
"""
out_y = copy.deepcopy(in_x)
if isinstance(in_x, dict):
if TYPE in in_x.keys():
......@@ -133,6 +140,9 @@ def json2vals(in_x, vals, out_y, name=ROOT):
def _split_index(params):
"""
Delete index infromation from params
"""
result = {}
for key in params:
if isinstance(params[key], dict):
......@@ -144,11 +154,18 @@ def _split_index(params):
class HyperoptTuner(Tuner):
'''
"""
HyperoptTuner is a tuner which using hyperopt algorithm.
'''
"""
def __init__(self, algorithm_name, optimize_mode):
"""
Parameters
----------
algorithm_name : str
algorithm_name includes "tpe", "random_search" and anneal".
optimize_mode : str
"""
self.algorithm_name = algorithm_name
self.optimize_mode = OptimizeMode(optimize_mode)
self.json = None
......@@ -156,6 +173,12 @@ class HyperoptTuner(Tuner):
self.rval = None
def _choose_tuner(self, algorithm_name):
"""
Parameters
----------
algorithm_name : str
algorithm_name includes "tpe", "random_search" and anneal"
"""
if algorithm_name == 'tpe':
return hp.tpe.suggest
if algorithm_name == 'random_search':
......@@ -165,11 +188,15 @@ class HyperoptTuner(Tuner):
raise RuntimeError('Not support tuner algorithm in hyperopt.')
def update_search_space(self, search_space):
'''
"""
Update search space definition in tuner by search_space in parameters.
'''
#assert self.json is None
Will called when first setup experiemnt or update search space in WebUI.
Parameters
----------
search_space : dict
"""
self.json = search_space
search_space_instance = json2space(self.json)
rstate = np.random.RandomState()
......@@ -182,10 +209,17 @@ class HyperoptTuner(Tuner):
self.rval.catch_eval_exceptions = False
def generate_parameters(self, parameter_id):
'''
"""
Returns a set of trial (hyper-)parameters, as a serializable object.
Parameters
----------
parameter_id : int
'''
Returns
-------
params : dict
"""
rval = self.rval
trials = rval.trials
algorithm = rval.algo
......@@ -209,12 +243,17 @@ class HyperoptTuner(Tuner):
return params
def receive_trial_result(self, parameter_id, parameters, value):
'''
"""
Record an observation of the objective function
Parameters
----------
parameter_id : int
parameters : dict of parameters
value: final metrics of the trial, including reward
'''
parameters : dict
value : dict/float
if value is dict, it should have "default" key.
value is final metrics of the trial.
"""
reward = self.extract_scalar_reward(value)
# restore the paramsters contains '_index'
if parameter_id not in self.total_data:
......@@ -262,13 +301,14 @@ class HyperoptTuner(Tuner):
def miscs_update_idxs_vals(self, miscs, idxs, vals,
assert_all_vals_used=True,
idxs_map=None):
'''
"""
Unpack the idxs-vals format into the list of dictionaries that is
`misc`.
idxs_map: a dictionary of id->id mappings so that the misc['idxs'] can
contain different numbers than the idxs argument. XXX CLARIFY
'''
idxs_map : dict
idxs_map is a dictionary of id->id mappings so that the misc['idxs'] can
contain different numbers than the idxs argument.
"""
if idxs_map is None:
idxs_map = {}
......
......@@ -45,9 +45,9 @@ logger = logging.getLogger("Metis_Tuner_AutoML")
@unique
class OptimizeMode(Enum):
'''
"""
Optimize Mode class
'''
"""
Minimize = 'minimize'
Maximize = 'maximize'
......@@ -59,27 +59,38 @@ CONSTRAINT_PARAMS_IDX = []
class MetisTuner(Tuner):
'''
"""
Metis Tuner
'''
More algorithm information you could reference here:
https://www.microsoft.com/en-us/research/publication/metis-robustly-tuning-tail-latencies-cloud-systems/
"""
def __init__(self, optimize_mode="maximize", no_resampling=True, no_candidates=True,
selection_num_starting_points=10, cold_start_num=10):
'''
optimize_mode: is a string that including two mode "maximize" and "minimize"
no_resampling: True or False. Should Metis consider re-sampling as part of the search strategy?
"""
Parameters
----------
optimize_mode : str
optimize_mode is a string that including two mode "maximize" and "minimize"
no_resampling : bool
True or False. Should Metis consider re-sampling as part of the search strategy?
If you are confident that the training dataset is noise-free, then you do not need re-sampling.
no_candidates: True or False. Should Metis suggest parameters for the next benchmark?
no_candidates: bool
True or False. Should Metis suggest parameters for the next benchmark?
If you do not plan to do more benchmarks, Metis can skip this step.
selection_num_starting_points: how many times Metis should try to find the global optimal in the search space?
selection_num_starting_points: int
how many times Metis should try to find the global optimal in the search space?
The higher the number, the longer it takes to output the solution.
cold_start_num: Metis need some trial result to get cold start. when the number of trial result is less than
cold_start_num: int
Metis need some trial result to get cold start. when the number of trial result is less than
cold_start_num, Metis will randomly sample hyper-parameter for trial.
'''
"""
self.samples_x = []
self.samples_y = []
self.samples_y_aggregation = []
......@@ -95,9 +106,12 @@ class MetisTuner(Tuner):
def update_search_space(self, search_space):
'''
Update the self.x_bounds and self.x_types by the search_space.json
'''
"""Update the self.x_bounds and self.x_types by the search_space.json
Parameters
----------
search_space : dict
"""
self.x_bounds = [[] for i in range(len(search_space))]
self.x_types = [NONE_TYPE for i in range(len(search_space))]
......@@ -152,9 +166,16 @@ class MetisTuner(Tuner):
def _pack_output(self, init_parameter):
'''
Pack the output
'''
"""Pack the output
Parameters
----------
init_parameter : dict
Returns
-------
output : dict
"""
output = {}
for i, param in enumerate(init_parameter):
output[self.key_order[i]] = param
......@@ -162,12 +183,19 @@ class MetisTuner(Tuner):
def generate_parameters(self, parameter_id):
'''
This function is for generate parameters to trial.
"""Generate next parameter for trial
If the number of trial result is lower than cold start number,
metis will first random generate some parameters.
Otherwise, metis will choose the parameters by the Gussian Process Model and the Gussian Mixture Model.
'''
Parameters
----------
parameter_id : int
Returns
-------
result : dict
"""
if self.samples_x or len(self.samples_x) < self.cold_start_num:
init_parameter = _rand_init(self.x_bounds, self.x_types, 1)[0]
results = self._pack_output(init_parameter)
......@@ -184,11 +212,15 @@ class MetisTuner(Tuner):
def receive_trial_result(self, parameter_id, parameters, value):
'''
Tuner receive result from trial.
An value example as follow:
value: 99.5%
'''
"""Tuner receive result from trial.
Parameters
----------
parameter_id : int
parameters : dict
value : dict/float
if value is dict, it should have "default" key.
"""
value = self.extract_scalar_reward(value)
if self.optimize_mode == OptimizeMode.Maximize:
value = -value
......@@ -434,9 +466,8 @@ def _rand_init(x_bounds, x_types, selection_num_starting_points):
def get_median(temp_list):
'''
Return median
'''
"""Return median
"""
num = len(temp_list)
temp_list.sort()
print(temp_list)
......
......@@ -52,20 +52,35 @@ class NetworkMorphismTuner(Tuner):
default_model_width=Constant.MODEL_WIDTH,
):
""" initilizer of the NetworkMorphismTuner.
Keyword Arguments:
task {str} -- [task mode, such as "cv","common" etc.] (default: {"cv"})
input_width {int} -- [input sample shape] (default: {32})
input_channel {int} -- [input sample shape] (default: {3})
n_output_node {int} -- [output node number] (default: {10})
algorithm_name {str} -- [algorithm name used in the network morphism] (default: {"Bayesian"})
optimize_mode {str} -- [optimize mode "minimize" or "maximize"] (default: {"minimize"})
path {str} -- [default mode path to save the model file] (default: {"model_path"})
verbose {bool} -- [verbose to print the log] (default: {True})
beta {float} -- [The beta in acquisition function. (refer to our paper)] (default: {Constant.BETA})
t_min {float} -- [The minimum temperature for simulated annealing.] (default: {Constant.T_MIN})
max_model_size {int} -- [max model size to the graph] (default: {Constant.MAX_MODEL_SIZE})
default_model_len {int} -- [default model length] (default: {Constant.MODEL_LEN})
default_model_width {int} -- [default model width] (default: {Constant.MODEL_WIDTH})
Parameters
----------
task : str
task mode, such as "cv","common" etc. (default: {"cv"})
input_width : int
input sample shape (default: {32})
input_channel : int
input sample shape (default: {3})
n_output_node : int
output node number (default: {10})
algorithm_name : str
algorithm name used in the network morphism (default: {"Bayesian"})
optimize_mode : str
optimize mode "minimize" or "maximize" (default: {"minimize"})
path : str
default mode path to save the model file (default: {"model_path"})
verbose : bool
verbose to print the log (default: {True})
beta : float
The beta in acquisition function. (default: {Constant.BETA})
t_min : float
The minimum temperature for simulated annealing. (default: {Constant.T_MIN})
max_model_size : int
max model size to the graph (default: {Constant.MAX_MODEL_SIZE})
default_model_len : int
default model length (default: {Constant.MODEL_LEN})
default_model_width : int
default model width (default: {Constant.MODEL_WIDTH})
"""
if not os.path.exists(path):
......@@ -92,8 +107,6 @@ class NetworkMorphismTuner(Tuner):
self.bo = BayesianOptimizer(self, self.t_min, self.optimize_mode, self.beta)
self.training_queue = []
# self.x_queue = []
# self.y_queue = []
self.descriptors = []
self.history = []
......@@ -112,6 +125,9 @@ class NetworkMorphismTuner(Tuner):
def generate_parameters(self, parameter_id):
"""
Returns a set of trial neural architecture, as a serializable object.
Parameters
----------
parameter_id : int
"""
if not self.history:
......@@ -137,14 +153,14 @@ class NetworkMorphismTuner(Tuner):
def receive_trial_result(self, parameter_id, parameters, value):
""" Record an observation of the objective function.
Arguments:
Parameters
----------
parameter_id : int
parameters : dict of parameters
value: final metrics of the trial, including reward
Raises:
RuntimeError -- Received parameter_id not in total_data.
parameters : dict
value : dict/float
if value is dict, it should have "default" key.
"""
reward = self.extract_scalar_reward(value)
if parameter_id not in self.total_data:
......@@ -176,9 +192,13 @@ class NetworkMorphismTuner(Tuner):
def generate(self):
"""Generate the next neural architecture.
Returns:
other_info: Anything to be saved in the training queue together with the architecture.
generated_graph: An instance of Graph.
Returns
-------
other_info: any object
Anything to be saved in the training queue together with the architecture.
generated_graph: Graph
An instance of Graph.
"""
generated_graph, new_father_id = self.bo.generate(self.descriptors)
if new_father_id is None:
......@@ -191,11 +211,16 @@ class NetworkMorphismTuner(Tuner):
def update(self, other_info, graph, metric_value, model_id):
""" Update the controller with evaluation result of a neural architecture.
Args:
other_info: Anything. In our case it is the father ID in the search tree.
graph: An instance of Graph. The trained neural architecture.
metric_value: The final evaluated metric value.
model_id: An integer.
Parameters
----------
other_info: any object
In our case it is the father ID in the search tree.
graph: Graph
An instance of Graph. The trained neural architecture.
metric_value: float
The final evaluated metric value.
model_id: int
"""
father_id = other_info
self.bo.fit([graph.extract_descriptor()], [metric_value])
......@@ -204,15 +229,16 @@ class NetworkMorphismTuner(Tuner):
def add_model(self, metric_value, model_id):
""" Add model to the history, x_queue and y_queue
Arguments:
metric_value: int --metric_value
graph: dict -- graph
model_id: int -- model_id
Parameters
----------
metric_value : float
graph : dict
model_id : int
Returns:
model dict
Returns
-------
model : dict
"""
if self.verbose:
logger.info("Saving model.")
......@@ -223,16 +249,10 @@ class NetworkMorphismTuner(Tuner):
file = open(os.path.join(self.path, "best_model.txt"), "w")
file.write("best model: " + str(model_id))
file.close()
# descriptor = graph.extract_descriptor()
# self.x_queue.append(descriptor)
# self.y_queue.append(metric_value)
return ret
def get_best_model_id(self):
""" Get the best model_id from history using the metric value
Returns:
int -- the best model_id
"""
if self.optimize_mode is OptimizeMode.Maximize:
......@@ -241,10 +261,16 @@ class NetworkMorphismTuner(Tuner):
def load_model_by_id(self, model_id):
"""Get the model by model_id
Arguments:
model_id {int} -- model index
Returns:
Graph -- the model graph representation
Parameters
----------
model_id : int
model index
Returns
-------
load_model : Graph
the model graph representation
"""
with open(os.path.join(self.path, str(model_id) + ".json")) as fin:
......@@ -255,17 +281,26 @@ class NetworkMorphismTuner(Tuner):
def load_best_model(self):
""" Get the best model by model id
Returns:
Graph -- the best model graph representation
Returns
-------
load_model : Graph
the model graph representation
"""
return self.load_model_by_id(self.get_best_model_id())
def get_metric_value_by_id(self, model_id):
""" Get the model metric valud by its model_id
Arguments:
model_id {int} -- model index
Returns:
float -- the model metric
Parameters
----------
model_id : int
model index
Returns
-------
float
the model metric
"""
for item in self.history:
if item["model_id"] == model_id:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment