Source code for elliot.hyperoptimization

"""
Module description:

"""

__version__ = '0.3.1'
__author__ = 'Vito Walter Anelli, Claudio Pomo'
__email__ = 'vitowalter.anelli@poliba.it, claudio.pomo@poliba.it'

from elliot.hyperoptimization.model_coordinator import ModelCoordinator
from hyperopt import tpe, atpe, mix, rand, anneal
import numpy as np

from hyperopt import pyll
from hyperopt.base import miscs_update_idxs_vals


[docs]def parse_algorithms(opt_alg): return _optimization_algorithms[opt_alg]
[docs]def suggest(new_ids, domain, trials, seed, nbMaxSucessiveFailures=1000): # Build a hash set for previous trials hashset = set([hash(frozenset([(key, value[0]) if len(value) > 0 else ((key, None)) for key, value in trial['misc']['vals'].items()])) for trial in trials.trials]) rng = np.random.RandomState(seed) rval = [] for _, new_id in enumerate(new_ids): newSample = False nbSucessiveFailures = 0 while not newSample: # -- sample new specs, idxs, vals idxs, vals = pyll.rec_eval( domain.s_idxs_vals, memo={ domain.s_new_ids: [new_id], domain.s_rng: rng, }) new_result = domain.new_result() new_misc = dict(tid=new_id, cmd=domain.cmd, workdir=domain.workdir) miscs_update_idxs_vals([new_misc], idxs, vals) # Compare with previous hashes h = hash(frozenset([(key, value[0]) if len(value) > 0 else ( (key, None)) for key, value in vals.items()])) if h not in hashset: newSample = True else: # Duplicated sample, ignore nbSucessiveFailures += 1 if nbSucessiveFailures > nbMaxSucessiveFailures: # No more samples to produce return [] rval.extend(trials.new_trial_docs([new_id], [None], [new_result], [new_misc])) return rval
_optimization_algorithms = { "tpe": tpe.suggest, "atpe": atpe.suggest, "mix": mix.suggest, "rand": rand.suggest, "anneal": anneal.suggest, "grid": suggest }