I was able to solve this by overriding the BayesianOptimization
and BayesianOptimizationOracle
classes. It just names each trial "0", "1", "2", etc.
But it would be nice if this was more flexible, because I will probably end up doing this for the other hypertuner methods. as well.
from kerastuner.engine import trial as trial_lib
from kerastuner.tuners import BayesianOptimization
from kerastuner.tuners.bayesian import
BayesianOptimization, BayesianOptimizationOracle
class CustomBayesianOptimizationOracle(BayesianOptimizationOracle):
def __init__(self,
objective,
max_trials,
num_initial_points=None,
alpha=1e-4,
beta=2.6,
seed=None,
hyperparameters=None,
allow_new_entries=True,
tune_new_entries=True):
super(CustomBayesianOptimizationOracle, self).__init__(
objective=objective,
max_trials=max_trials,
num_initial_points=num_initial_points,
alpha=alpha,
beta=beta,
seed=seed,
hyperparameters=hyperparameters,
tune_new_entries=tune_new_entries,
allow_new_entries=allow_new_entries)
self.trial_id = '0'
def create_trial(self, tuner_id):
"""Create a new `Trial` to be run by the `Tuner`.
A `Trial` corresponds to a unique set of hyperparameters to be run
by `Tuner.run_trial`.
Args:
tuner_id: A ID that identifies the `Tuner` requesting a
`Trial`. `Tuners` that should run the same trial (for instance,
when running a multi-worker model) should have the same ID.
Returns:
A `Trial` object containing a set of hyperparameter values to run
in a `Tuner`.
"""
# Allow for multi-worker DistributionStrategy within a Trial.
if tuner_id in self.ongoing_trials:
return self.ongoing_trials[tuner_id]
if self.max_trials and len(self.trials) >= self.max_trials:
status = trial_lib.TrialStatus.STOPPED
values = None
else:
response = self._populate_space(self.trial_id)
status = response['status']
values = response['values'] if 'values' in response else None
hyperparameters = self.hyperparameters.copy()
hyperparameters.values = values or {}
trial = trial_lib.Trial(
hyperparameters=hyperparameters,
trial_id=self.trial_id,
status=status)
if status == trial_lib.TrialStatus.RUNNING:
self.ongoing_trials[tuner_id] = trial
self.trials[self.trial_id] = trial
self._save_trial(trial)
self.save()
self.trial_id = str(int(self.trial_id) + 1)
return trial
class CustomBayesianOptimization(BayesianOptimization):
def __init__(self,
hypermodel,
objective,
max_trials,
num_initial_points=2,
seed=None,
hyperparameters=None,
tune_new_entries=True,
allow_new_entries=True,
**kwargs):
oracle = CustomBayesianOptimizationOracle(
objective=objective,
max_trials=max_trials,
num_initial_points=num_initial_points,
seed=seed,
hyperparameters=hyperparameters,
tune_new_entries=tune_new_entries,
allow_new_entries=allow_new_entries)
super(BayesianOptimization, self).__init__(
oracle=oracle,
hypermodel=hypermodel,
**kwargs)
与恶龙缠斗过久,自身亦成为恶龙;凝视深渊过久,深渊将回以凝视…