Commit 0da6f470 authored by Lee's avatar Lee Committed by xuehui
Browse files

random search when suggested parameter appeared in the history (#734)

* fix some bugs in doc and log

* The learning rate focus more on validation sets accuracy than training sets accuracy.

* update Dockerfile and README

* Update README.md

Merge to branch v0.5

* random search when suggested parameter occurs in the history

* avoid infinite loop when seacrh space is limited

* follow the review comments

* add comments
parent d051a2f6
...@@ -157,7 +157,7 @@ class HyperoptTuner(Tuner): ...@@ -157,7 +157,7 @@ class HyperoptTuner(Tuner):
""" """
HyperoptTuner is a tuner which using hyperopt algorithm. HyperoptTuner is a tuner which using hyperopt algorithm.
""" """
def __init__(self, algorithm_name, optimize_mode): def __init__(self, algorithm_name, optimize_mode):
""" """
Parameters Parameters
...@@ -192,7 +192,7 @@ class HyperoptTuner(Tuner): ...@@ -192,7 +192,7 @@ class HyperoptTuner(Tuner):
Update search space definition in tuner by search_space in parameters. Update search space definition in tuner by search_space in parameters.
Will called when first setup experiemnt or update search space in WebUI. Will called when first setup experiemnt or update search space in WebUI.
Parameters Parameters
---------- ----------
search_space : dict search_space : dict
...@@ -211,7 +211,7 @@ class HyperoptTuner(Tuner): ...@@ -211,7 +211,7 @@ class HyperoptTuner(Tuner):
def generate_parameters(self, parameter_id): def generate_parameters(self, parameter_id):
""" """
Returns a set of trial (hyper-)parameters, as a serializable object. Returns a set of trial (hyper-)parameters, as a serializable object.
Parameters Parameters
---------- ----------
parameter_id : int parameter_id : int
...@@ -220,24 +220,11 @@ class HyperoptTuner(Tuner): ...@@ -220,24 +220,11 @@ class HyperoptTuner(Tuner):
------- -------
params : dict params : dict
""" """
rval = self.rval total_params = self.get_suggestion(random_search=False)
trials = rval.trials # avoid generating same parameter with concurrent trials because hyperopt doesn't support parallel mode
algorithm = rval.algo if total_params in self.total_data.values():
new_ids = rval.trials.new_trial_ids(1) # but it can cause deplicate parameter rarely
rval.trials.refresh() total_params = self.get_suggestion(random_search=True)
random_state = rval.rstate.randint(2**31-1)
new_trials = algorithm(new_ids, rval.domain, trials, random_state)
rval.trials.refresh()
vals = new_trials[0]['misc']['vals']
parameter = dict()
for key in vals:
try:
parameter[key] = vals[key][0].item()
except Exception:
parameter[key] = None
# remove '_index' from json2parameter and save params-id
total_params = json2parameter(self.json, parameter)
self.total_data[parameter_id] = total_params self.total_data[parameter_id] = total_params
params = _split_index(total_params) params = _split_index(total_params)
return params return params
...@@ -245,7 +232,7 @@ class HyperoptTuner(Tuner): ...@@ -245,7 +232,7 @@ class HyperoptTuner(Tuner):
def receive_trial_result(self, parameter_id, parameters, value): def receive_trial_result(self, parameter_id, parameters, value):
""" """
Record an observation of the objective function Record an observation of the objective function
Parameters Parameters
---------- ----------
parameter_id : int parameter_id : int
...@@ -305,8 +292,10 @@ class HyperoptTuner(Tuner): ...@@ -305,8 +292,10 @@ class HyperoptTuner(Tuner):
Unpack the idxs-vals format into the list of dictionaries that is Unpack the idxs-vals format into the list of dictionaries that is
`misc`. `misc`.
Parameters
----------
idxs_map : dict idxs_map : dict
idxs_map is a dictionary of id->id mappings so that the misc['idxs'] can idxs_map is a dictionary of id->id mappings so that the misc['idxs'] can
contain different numbers than the idxs argument. contain different numbers than the idxs argument.
""" """
if idxs_map is None: if idxs_map is None:
...@@ -326,3 +315,40 @@ class HyperoptTuner(Tuner): ...@@ -326,3 +315,40 @@ class HyperoptTuner(Tuner):
if assert_all_vals_used or tid in misc_by_id: if assert_all_vals_used or tid in misc_by_id:
misc_by_id[tid]['idxs'][key] = [tid] misc_by_id[tid]['idxs'][key] = [tid]
misc_by_id[tid]['vals'][key] = [val] misc_by_id[tid]['vals'][key] = [val]
def get_suggestion(self, random_search=False):
"""get suggestion from hyperopt
Parameters
----------
random_search : bool
flag to indicate random search or not (default: {False})
Returns
----------
total_params : dict
parameter suggestion
"""
rval = self.rval
trials = rval.trials
algorithm = rval.algo
new_ids = rval.trials.new_trial_ids(1)
rval.trials.refresh()
random_state = rval.rstate.randint(2**31-1)
if random_search:
new_trials = hp.rand.suggest(new_ids, rval.domain, trials, random_state)
else:
new_trials = algorithm(new_ids, rval.domain, trials, random_state)
rval.trials.refresh()
vals = new_trials[0]['misc']['vals']
parameter = dict()
for key in vals:
try:
parameter[key] = vals[key][0].item()
except KeyError:
parameter[key] = None
# remove '_index' from json2parameter and save params-id
total_params = json2parameter(self.json, parameter)
return total_params
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment