Unverified Commit e16c4019 authored by QuanluZhang's avatar QuanluZhang Committed by GitHub
Browse files

[BUG] import dict metric data in SMAC and BOHB (#2143)

parent 5eb95c2d
...@@ -641,6 +641,7 @@ class BOHB(MsgDispatcherBase): ...@@ -641,6 +641,7 @@ class BOHB(MsgDispatcherBase):
if not _value: if not _value:
logger.info("Useless trial data, value is %s, skip this trial data.", _value) logger.info("Useless trial data, value is %s, skip this trial data.", _value)
continue continue
_value = extract_scalar_reward(_value)
budget_exist_flag = False budget_exist_flag = False
barely_params = dict() barely_params = dict()
for keys in _params: for keys in _params:
......
...@@ -309,6 +309,7 @@ class SMACTuner(Tuner): ...@@ -309,6 +309,7 @@ class SMACTuner(Tuner):
if not _value: if not _value:
self.logger.info("Useless trial data, value is %s, skip this trial data.", _value) self.logger.info("Useless trial data, value is %s, skip this trial data.", _value)
continue continue
_value = extract_scalar_reward(_value)
# convert the keys in loguniform and categorical types # convert the keys in loguniform and categorical types
valid_entry = True valid_entry = True
for key, value in _params.items(): for key, value in _params.items():
......
...@@ -34,7 +34,7 @@ class BuiltinTunersTestCase(TestCase): ...@@ -34,7 +34,7 @@ class BuiltinTunersTestCase(TestCase):
- [ ] save_checkpoint - [ ] save_checkpoint
- [X] update_search_space - [X] update_search_space
- [X] generate_multiple_parameters - [X] generate_multiple_parameters
- [ ] import_data - [X] import_data
- [ ] trial_end - [ ] trial_end
- [x] receive_trial_result - [x] receive_trial_result
""" """
...@@ -141,50 +141,128 @@ class BuiltinTunersTestCase(TestCase): ...@@ -141,50 +141,128 @@ class BuiltinTunersTestCase(TestCase):
logger.info("Full supported search space: %s", full_supported_search_space) logger.info("Full supported search space: %s", full_supported_search_space)
self.search_space_test_one(tuner_factory, full_supported_search_space) self.search_space_test_one(tuner_factory, full_supported_search_space)
def import_data_test(self, tuner_factory, stype="choice_str"):
"""
import data at the beginning with number value and dict value
import data in the middle also with number value and dict value, and duplicate data record
generate parameters after data import
Parameters
----------
tuner_factory : lambda
a lambda for instantiate a tuner
stype : str
the value type of hp choice, support "choice_str" and "choice_num"
"""
if stype == "choice_str":
search_space = {
"choice_str": {
"_type": "choice",
"_value": ["cat", "dog", "elephant", "cow", "sheep", "panda"]
}
}
elif stype == "choice_num":
search_space = {
"choice_num": {
"_type": "choice",
"_value": [10, 20, 30, 40, 50, 60]
}
}
else:
raise RuntimeError("Unexpected stype")
tuner = tuner_factory()
self.assertIsInstance(tuner, Tuner)
tuner.update_search_space(search_space)
# import data at the beginning
if stype == "choice_str":
data = [{"parameter": {"choice_str": "cat"}, "value": 1.1},
{"parameter": {"choice_str": "dog"}, "value": {"default": 1.2, "tmp": 2}}]
else:
data = [{"parameter": {"choice_num": 20}, "value": 1.1},
{"parameter": {"choice_num": 60}, "value": {"default": 1.2, "tmp": 2}}]
tuner.import_data(data)
logger.info("Imported data successfully at the beginning")
# generate parameters
parameters = tuner.generate_multiple_parameters(list(range(3)))
for i in range(3):
tuner.receive_trial_result(i, parameters[i], random.uniform(-100, 100))
# import data in the middle
if stype == "choice_str":
data = [{"parameter": {"choice_str": "cat"}, "value": 1.1},
{"parameter": {"choice_str": "dog"}, "value": {"default": 1.2, "tmp": 2}},
{"parameter": {"choice_str": "cow"}, "value": 1.3}]
else:
data = [{"parameter": {"choice_num": 20}, "value": 1.1},
{"parameter": {"choice_num": 60}, "value": {"default": 1.2, "tmp": 2}},
{"parameter": {"choice_num": 50}, "value": 1.3}]
tuner.import_data(data)
logger.info("Imported data successfully in the middle")
# generate parameters again
parameters = tuner.generate_multiple_parameters([3])
tuner.receive_trial_result(3, parameters[0], random.uniform(-100, 100))
def test_grid_search(self): def test_grid_search(self):
self.exhaustive = True self.exhaustive = True
self.search_space_test_all(lambda: GridSearchTuner(), tuner_fn = lambda: GridSearchTuner()
self.search_space_test_all(tuner_fn,
supported_types=["choice", "randint", "quniform"]) supported_types=["choice", "randint", "quniform"])
self.import_data_test(tuner_fn)
def test_tpe(self): def test_tpe(self):
self.search_space_test_all(lambda: HyperoptTuner("tpe"), tuner_fn = lambda: HyperoptTuner("tpe")
self.search_space_test_all(tuner_fn,
ignore_types=["uniform_equal", "qloguniform_equal", "loguniform_equal", "quniform_clip_2"]) ignore_types=["uniform_equal", "qloguniform_equal", "loguniform_equal", "quniform_clip_2"])
# NOTE: types are ignored because `tpe.py line 465, in adaptive_parzen_normal assert prior_sigma > 0` # NOTE: types are ignored because `tpe.py line 465, in adaptive_parzen_normal assert prior_sigma > 0`
self.import_data_test(tuner_fn)
def test_random_search(self): def test_random_search(self):
self.search_space_test_all(lambda: HyperoptTuner("random_search")) tuner_fn = lambda: HyperoptTuner("random_search")
self.search_space_test_all(tuner_fn)
self.import_data_test(tuner_fn)
def test_anneal(self): def test_anneal(self):
self.search_space_test_all(lambda: HyperoptTuner("anneal")) tuner_fn = lambda: HyperoptTuner("anneal")
self.search_space_test_all(tuner_fn)
self.import_data_test(tuner_fn)
def test_smac(self): def test_smac(self):
if sys.platform == "win32": if sys.platform == "win32":
return # smac doesn't work on windows return # smac doesn't work on windows
self.search_space_test_all(lambda: SMACTuner(), tuner_fn = lambda: SMACTuner()
self.search_space_test_all(tuner_fn,
supported_types=["choice", "randint", "uniform", "quniform", "loguniform"]) supported_types=["choice", "randint", "uniform", "quniform", "loguniform"])
self.import_data_test(tuner_fn)
def test_batch(self): def test_batch(self):
self.exhaustive = True self.exhaustive = True
self.search_space_test_all(lambda: BatchTuner(), tuner_fn = lambda: BatchTuner()
self.search_space_test_all(tuner_fn,
supported_types=["choice"]) supported_types=["choice"])
self.import_data_test(tuner_fn)
def test_evolution(self): def test_evolution(self):
# Needs enough population size, otherwise it will throw a runtime error # Needs enough population size, otherwise it will throw a runtime error
self.search_space_test_all(lambda: EvolutionTuner(population_size=100)) tuner_fn = lambda: EvolutionTuner(population_size=100)
self.search_space_test_all(tuner_fn)
self.import_data_test(tuner_fn)
def test_gp(self): def test_gp(self):
self.test_round = 1 # NOTE: GP tuner got hanged for multiple testing round self.test_round = 1 # NOTE: GP tuner got hanged for multiple testing round
self.search_space_test_all(lambda: GPTuner(), tuner_fn = lambda: GPTuner()
self.search_space_test_all(tuner_fn,
supported_types=["choice", "randint", "uniform", "quniform", "loguniform", supported_types=["choice", "randint", "uniform", "quniform", "loguniform",
"qloguniform"], "qloguniform"],
ignore_types=["normal", "lognormal", "qnormal", "qlognormal"], ignore_types=["normal", "lognormal", "qnormal", "qlognormal"],
fail_types=["choice_str", "choice_mixed"]) fail_types=["choice_str", "choice_mixed"])
self.import_data_test(tuner_fn, "choice_num")
def test_metis(self): def test_metis(self):
self.test_round = 1 # NOTE: Metis tuner got hanged for multiple testing round self.test_round = 1 # NOTE: Metis tuner got hanged for multiple testing round
self.search_space_test_all(lambda: MetisTuner(), tuner_fn = lambda: MetisTuner()
self.search_space_test_all(tuner_fn,
supported_types=["choice", "randint", "uniform", "quniform"], supported_types=["choice", "randint", "uniform", "quniform"],
fail_types=["choice_str", "choice_mixed"]) fail_types=["choice_str", "choice_mixed"])
self.import_data_test(tuner_fn, "choice_num")
def test_networkmorphism(self): def test_networkmorphism(self):
pass pass
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment