Commit 0da6f470 authored by Lee's avatar Lee Committed by xuehui
Browse files

random search when suggested parameter appeared in the history (#734)

* fix some bugs in doc and log

* The learning rate focus more on validation sets accuracy than training sets accuracy.

* update Dockerfile and README

* Update README.md

Merge to branch v0.5

* random search when suggested parameter occurs in the history

* avoid infinite loop when seacrh space is limited

* follow the review comments

* add comments
parent d051a2f6
...@@ -220,24 +220,11 @@ class HyperoptTuner(Tuner): ...@@ -220,24 +220,11 @@ class HyperoptTuner(Tuner):
------- -------
params : dict params : dict
""" """
rval = self.rval total_params = self.get_suggestion(random_search=False)
trials = rval.trials # avoid generating same parameter with concurrent trials because hyperopt doesn't support parallel mode
algorithm = rval.algo if total_params in self.total_data.values():
new_ids = rval.trials.new_trial_ids(1) # but it can cause deplicate parameter rarely
rval.trials.refresh() total_params = self.get_suggestion(random_search=True)
random_state = rval.rstate.randint(2**31-1)
new_trials = algorithm(new_ids, rval.domain, trials, random_state)
rval.trials.refresh()
vals = new_trials[0]['misc']['vals']
parameter = dict()
for key in vals:
try:
parameter[key] = vals[key][0].item()
except Exception:
parameter[key] = None
# remove '_index' from json2parameter and save params-id
total_params = json2parameter(self.json, parameter)
self.total_data[parameter_id] = total_params self.total_data[parameter_id] = total_params
params = _split_index(total_params) params = _split_index(total_params)
return params return params
...@@ -305,6 +292,8 @@ class HyperoptTuner(Tuner): ...@@ -305,6 +292,8 @@ class HyperoptTuner(Tuner):
Unpack the idxs-vals format into the list of dictionaries that is Unpack the idxs-vals format into the list of dictionaries that is
`misc`. `misc`.
Parameters
----------
idxs_map : dict idxs_map : dict
idxs_map is a dictionary of id->id mappings so that the misc['idxs'] can idxs_map is a dictionary of id->id mappings so that the misc['idxs'] can
contain different numbers than the idxs argument. contain different numbers than the idxs argument.
...@@ -326,3 +315,40 @@ class HyperoptTuner(Tuner): ...@@ -326,3 +315,40 @@ class HyperoptTuner(Tuner):
if assert_all_vals_used or tid in misc_by_id: if assert_all_vals_used or tid in misc_by_id:
misc_by_id[tid]['idxs'][key] = [tid] misc_by_id[tid]['idxs'][key] = [tid]
misc_by_id[tid]['vals'][key] = [val] misc_by_id[tid]['vals'][key] = [val]
def get_suggestion(self, random_search=False):
"""get suggestion from hyperopt
Parameters
----------
random_search : bool
flag to indicate random search or not (default: {False})
Returns
----------
total_params : dict
parameter suggestion
"""
rval = self.rval
trials = rval.trials
algorithm = rval.algo
new_ids = rval.trials.new_trial_ids(1)
rval.trials.refresh()
random_state = rval.rstate.randint(2**31-1)
if random_search:
new_trials = hp.rand.suggest(new_ids, rval.domain, trials, random_state)
else:
new_trials = algorithm(new_ids, rval.domain, trials, random_state)
rval.trials.refresh()
vals = new_trials[0]['misc']['vals']
parameter = dict()
for key in vals:
try:
parameter[key] = vals[key][0].item()
except KeyError:
parameter[key] = None
# remove '_index' from json2parameter and save params-id
total_params = json2parameter(self.json, parameter)
return total_params
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment