Unverified Commit 487230d8 authored by liuzhe-lz's avatar liuzhe-lz Committed by GitHub
Browse files

Make TPE import data more robust (#4865)

parent 0ab4916a
...@@ -24,6 +24,7 @@ import numpy as np ...@@ -24,6 +24,7 @@ import numpy as np
from scipy.special import erf # pylint: disable=no-name-in-module from scipy.special import erf # pylint: disable=no-name-in-module
from typing_extensions import Literal from typing_extensions import Literal
import nni
from nni.common.hpo_utils import Deduplicator, OptimizeMode, format_search_space, deformat_parameters, format_parameters from nni.common.hpo_utils import Deduplicator, OptimizeMode, format_search_space, deformat_parameters, format_parameters
from nni.tuner import Tuner from nni.tuner import Tuner
from nni.utils import extract_scalar_reward from nni.utils import extract_scalar_reward
...@@ -201,11 +202,17 @@ class TpeTuner(Tuner): ...@@ -201,11 +202,17 @@ class TpeTuner(Tuner):
self._running_params.pop(parameter_id, None) self._running_params.pop(parameter_id, None)
def import_data(self, data): # for resuming experiment def import_data(self, data): # for resuming experiment
if isinstance(data, str):
data = nni.load(data)
for trial in data: for trial in data:
if isinstance(trial, str):
trial = nni.load(trial)
param = format_parameters(trial['parameter'], self.space) param = format_parameters(trial['parameter'], self.space)
loss = trial['value'] loss = trial['value']
if isinstance(loss, dict) and 'default' in loss:
loss = loss['default']
if self.optimize_mode is OptimizeMode.Maximize: if self.optimize_mode is OptimizeMode.Maximize:
loss = -trial['value'] loss = -loss
for key, value in param.items(): for key, value in param.items():
self._history[key].append(Record(value, loss)) self._history[key].append(Record(value, loss))
_logger.info(f'Replayed {len(data)} trials') _logger.info(f'Replayed {len(data)} trials')
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment