Commit 34950963 authored by Lee's avatar Lee Committed by xuehui
Browse files

Fix for MetisTuner (#935)

* Fix Metis bug

remove unexpected logger

* Increase 'alpha' parameter

Increase 'alpha' parameter of your GaussianProcessRegressor to make sure a positive definite matrix

* Increase 'alpha' parameter

Increase 'alpha' parameter of your GaussianProcessRegressor to make sure a positive definite matrix

* Increase the probability from 0.1 to 0.9

Increase the probability from 0.1 to 0.9 to select from next config

* Update metis_tuner.py

Use local name instead of global name
parent c58a7c7d
...@@ -40,7 +40,7 @@ def create_model(samples_x, samples_y_aggregation, ...@@ -40,7 +40,7 @@ def create_model(samples_x, samples_y_aggregation,
regressor = gp.GaussianProcessRegressor(kernel=kernel, regressor = gp.GaussianProcessRegressor(kernel=kernel,
n_restarts_optimizer=n_restarts_optimizer, n_restarts_optimizer=n_restarts_optimizer,
normalize_y=True, normalize_y=True,
alpha=0) alpha=1e-10)
regressor.fit(numpy.array(samples_x), numpy.array(samples_y_aggregation)) regressor.fit(numpy.array(samples_x), numpy.array(samples_y_aggregation))
model = {} model = {}
......
...@@ -65,7 +65,7 @@ class MetisTuner(Tuner): ...@@ -65,7 +65,7 @@ class MetisTuner(Tuner):
""" """
def __init__(self, optimize_mode="maximize", no_resampling=True, no_candidates=True, def __init__(self, optimize_mode="maximize", no_resampling=True, no_candidates=True,
selection_num_starting_points=600, cold_start_num=10, exploration_probability=0.1): selection_num_starting_points=600, cold_start_num=10, exploration_probability=0.9):
""" """
Parameters Parameters
---------- ----------
...@@ -126,12 +126,7 @@ class MetisTuner(Tuner): ...@@ -126,12 +126,7 @@ class MetisTuner(Tuner):
for key in search_space: for key in search_space:
key_type = search_space[key]['_type'] key_type = search_space[key]['_type']
key_range = search_space[key]['_value'] key_range = search_space[key]['_value']
try: idx = self.key_order.index(key) )
idx = self.key_order.index(key)
except Exception as ex:
logger.exception(ex)
raise RuntimeError("The format search space contains \
some key that didn't define in key_order." )
if key_type == 'quniform': if key_type == 'quniform':
if key_range[2] == 1: if key_range[2] == 1:
...@@ -271,7 +266,6 @@ class MetisTuner(Tuner): ...@@ -271,7 +266,6 @@ class MetisTuner(Tuner):
samples_size_unique = len(samples_y) samples_size_unique = len(samples_y)
# ===== STEP 1: Compute the current optimum ===== # ===== STEP 1: Compute the current optimum =====
#sys.stderr.write("[%s] Predicting the optimal configuration from the current training dataset...\n" % (os.path.basename(__file__)))
gp_model = gp_create_model.create_model(samples_x, samples_y_aggregation) gp_model = gp_create_model.create_model(samples_x, samples_y_aggregation)
lm_current = gp_selection.selection( lm_current = gp_selection.selection(
"lm", "lm",
...@@ -291,8 +285,6 @@ class MetisTuner(Tuner): ...@@ -291,8 +285,6 @@ class MetisTuner(Tuner):
'reason': "exploitation_gp"}) 'reason': "exploitation_gp"})
# ===== STEP 2: Get recommended configurations for exploration ===== # ===== STEP 2: Get recommended configurations for exploration =====
#sys.stderr.write("[%s] Getting candidates for exploration...\n"
#% \(os.path.basename(__file__)))
results_exploration = gp_selection.selection( results_exploration = gp_selection.selection(
"lc", "lc",
samples_y_aggregation, samples_y_aggregation,
...@@ -309,15 +301,11 @@ class MetisTuner(Tuner): ...@@ -309,15 +301,11 @@ class MetisTuner(Tuner):
'expected_sigma': results_exploration['expected_sigma'], 'expected_sigma': results_exploration['expected_sigma'],
'reason': "exploration"}) 'reason': "exploration"})
logger.info("DEBUG: 1 exploration candidate selected\n") logger.info("DEBUG: 1 exploration candidate selected\n")
#sys.stderr.write("[%s] DEBUG: 1 exploration candidate selected\n" % (os.path.basename(__file__)))
else: else:
logger.info("DEBUG: No suitable exploration candidates were") logger.info("DEBUG: No suitable exploration candidates were")
# sys.stderr.write("[%s] DEBUG: No suitable exploration candidates were \
# found\n" % (os.path.basename(__file__)))
# ===== STEP 3: Get recommended configurations for exploitation ===== # ===== STEP 3: Get recommended configurations for exploitation =====
if samples_size_all >= threshold_samplessize_exploitation: if samples_size_all >= threshold_samplessize_exploitation:
#sys.stderr.write("[%s] Getting candidates for exploitation...\n" % (os.path.basename(__file__)))
print("Getting candidates for exploitation...\n") print("Getting candidates for exploitation...\n")
try: try:
gmm = gmm_create_model.create_model(samples_x, samples_y_aggregation) gmm = gmm_create_model.create_model(samples_x, samples_y_aggregation)
...@@ -385,13 +373,6 @@ class MetisTuner(Tuner): ...@@ -385,13 +373,6 @@ class MetisTuner(Tuner):
temp_improvement = threads_result['expected_lowest_mu'] - lm_current['expected_mu'] temp_improvement = threads_result['expected_lowest_mu'] - lm_current['expected_mu']
if next_improvement > temp_improvement: if next_improvement > temp_improvement:
logger.info("DEBUG: \"next_candidate\" changed: \
lowest mu might reduce from %f (%s) to %f (%s), %s\n" %\
lm_current['expected_mu'], str(lm_current['hyperparameter']),\
threads_result['expected_lowest_mu'],\
str(threads_result['candidate']['hyperparameter']),\
threads_result['candidate']['reason'])
next_improvement = temp_improvement next_improvement = temp_improvement
next_candidate = threads_result['candidate'] next_candidate = threads_result['candidate']
else: else:
...@@ -415,7 +396,7 @@ class MetisTuner(Tuner): ...@@ -415,7 +396,7 @@ class MetisTuner(Tuner):
if next_candidate is not None: if next_candidate is not None:
outputs = self._pack_output(next_candidate['hyperparameter']) outputs = self._pack_output(next_candidate['hyperparameter'])
else: else:
random_parameter = _rand_init(self.x_bounds, self.x_types, 1)[0] random_parameter = _rand_init(x_bounds, x_types, 1)[0]
outputs = self._pack_output(random_parameter) outputs = self._pack_output(random_parameter)
self.history_parameters.append(outputs) self.history_parameters.append(outputs)
return outputs return outputs
...@@ -443,8 +424,6 @@ def _rand_with_constraints(x_bounds, x_types): ...@@ -443,8 +424,6 @@ def _rand_with_constraints(x_bounds, x_types):
def _calculate_lowest_mu_threaded(inputs): def _calculate_lowest_mu_threaded(inputs):
[candidate, samples_x, samples_y, x_bounds, x_types, minimize_constraints_fun, minimize_starting_points] = inputs [candidate, samples_x, samples_y, x_bounds, x_types, minimize_constraints_fun, minimize_starting_points] = inputs
sys.stderr.write("[%s] Evaluating information gain of %s (%s)...\n" % \
(os.path.basename(__file__), candidate['hyperparameter'], candidate['reason']))
outputs = {"candidate": candidate, "expected_lowest_mu": None} outputs = {"candidate": candidate, "expected_lowest_mu": None}
for expected_mu in [candidate['expected_mu'] + 1.96 * candidate['expected_sigma'], for expected_mu in [candidate['expected_mu'] + 1.96 * candidate['expected_sigma'],
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment