hyper-engine icon indicating copy to clipboard operation
hyper-engine copied to clipboard

Black box optimizer example

Open KOLANICH opened this issue 7 years ago • 2 comments

import hyperengine
import numpy as np

def rosenbrock(hyperparams):
	return (hyperparams["x"]-1)**2 + 10*(hyperparams["x"]**2-hyperparams["y"])**2

class BlackBoxSolver:
	def __init__(self, func):
		self.func=func
		self._val_loss_curve = []

	def train(self):
		loss=self.func()
		self._val_loss_curve.append(loss)
		return self._reducer(self._val_loss_curve)
	
	def _reducer(self, *args, **kwargs):
		return np.min(*args, **kwargs)
	
	def terminate(self):
		pass

def solver_generator(hyperparams):
	return BlackBoxSolver(partial(rosenbrock, hyperparams))

class IterLimitedHyperTuner(hyperengine.HyperTuner):
	def __init__(self, hyper_params_spec, solver_generator, iterLimit, *args, **kwargs):
		j=0
		def solver_generator_limited(hyperparams):
			nonlocal j
			if j<iterLimit:
				j+=1
				return solver_generator(hyperparams)
			else:
				raise StopIteration()
		
		super().__init__(hyper_params_spec, solver_generator_limited, *args, **kwargs)
	
	def tune(self):
		try:
			super().tune()
		except StopIteration as ex:
			minLossPointNum=np.argmin(self.strategy.values)
			return dict(zip(self.parsed._spec.keys(), self.strategy.points[minLossPointNum]))


spec=hp.new({
	"x": hp.uniform(-10, 10),
	"y": hp.uniform(-10, 10),
})
tuner=IterLimitedHyperTuner(spec, solver_generator, iterLimit=10, strategy='bayesian') #'portfolio'
tuner.tune()

KOLANICH avatar Aug 20 '18 11:08 KOLANICH

@KOLANICH Can you describe this code please?

maxim5 avatar Aug 28 '18 07:08 maxim5

The lower part is a workaround for then missing iteration limit. The upper part is an adapter adapting any function into HyperSolver-compatible object.

KOLANICH avatar Aug 28 '18 15:08 KOLANICH