Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update ScipyOptimizer for Categorial, Ordinal and Constant. #66

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 44 additions & 17 deletions openbox/acq_maximizer/ei_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,15 @@
import random
import scipy.optimize
import numpy as np
from ConfigSpace import (
Configuration, ConfigurationSpace,
UniformIntegerHyperparameter, UniformFloatHyperparameter,
CategoricalHyperparameter, OrdinalHyperparameter, Constant,
)

from openbox import logger
from openbox.acquisition_function.acquisition import AbstractAcquisitionFunction
from openbox.utils.config_space import get_one_exchange_neighbourhood, \
Configuration, ConfigurationSpace
from openbox.utils.config_space import get_one_exchange_neighbourhood
from openbox.acq_maximizer.random_configuration_chooser import ChooserNoCoolDown, ChooserProb
from openbox.utils.history import History, MultiStartHistory
from openbox.utils.util_funcs import get_types
Expand Down Expand Up @@ -354,11 +358,11 @@ def _one_iter(
if len(time_n) == 0:
time_n.append(0.0)
logger.debug("Local search took %d steps and looked at %d "
"configurations. Computing the acquisition "
"value for one configuration took %f seconds"
" on average.",
local_search_steps, neighbors_looked_at,
np.mean(time_n))
"configurations. Computing the acquisition "
"value for one configuration took %f seconds"
" on average.",
local_search_steps, neighbors_looked_at,
np.mean(time_n))
break

return acq_val_incumbent, incumbent
Expand Down Expand Up @@ -575,9 +579,7 @@ def __init__(
super().__init__(acquisition_function, config_space, rng)
self.random_chooser = ChooserProb(prob=rand_prob, rng=rng)

types, bounds = get_types(self.config_space) # todo: support constant hp in scipy optimizer
assert all(types == 0), 'Scipy optimizer (L-BFGS-B) only supports Integer and Float parameters.'
self.bounds = bounds
self.bounds, self.discrete_dims = self._get_bounds(config_space)

options = dict(disp=False, maxiter=1000)
self.scipy_config = dict(tol=None, method='L-BFGS-B', options=options)
Expand All @@ -591,7 +593,8 @@ def maximize(

def negative_acquisition(x):
# shape of x = (d,)
x = np.clip(x, 0.0, 1.0) # fix numerical problem in L-BFGS-B
x = np.clip(x, self.bounds[:, 0], self.bounds[:, 1]) # fix numerical problem in L-BFGS-B
x[self.discrete_dims] = np.round(x[self.discrete_dims]) # support Categorical, Ordinal and Constant
try:
# self.config_space._check_forbidden(x)
Configuration(self.config_space, vector=x).is_valid_configuration()
Expand All @@ -611,10 +614,13 @@ def negative_acquisition(x):
x0=init_point,
bounds=self.bounds,
**self.scipy_config)

if not result.success:
logger.debug('Scipy optimizer failed. Info:\n%s' % (result,))
try:
x = np.clip(result.x, 0.0, 1.0) # fix numerical problem in L-BFGS-B
x = np.clip(result.x, self.bounds[:, 0], self.bounds[:, 1]) # fix numerical problem in L-BFGS-B
x[self.discret_dims] = np.round(x[self.discret_dims]) # support Categorical, Ordinal and Constant

config = Configuration(self.config_space, vector=x)
config.is_valid_configuration()
acq = self.acquisition_function(x, convert=False)
Expand All @@ -639,6 +645,26 @@ def _maximize(
) -> Iterable[Tuple[float, Configuration]]:
raise NotImplementedError()

@staticmethod
def _get_bounds(config_space):
bounds = []
discrete_dims = []
for i, param in enumerate(config_space.get_hyperparameters()):
if isinstance(param, (CategoricalHyperparameter, OrdinalHyperparameter, Constant)):
discrete_dims.append(i)

if isinstance(param, CategoricalHyperparameter):
bounds.append([0, param.num_choices - 1])
elif isinstance(param, OrdinalHyperparameter):
bounds.append([0, param.num_elements - 1])
elif isinstance(param, Constant):
bounds.append([-0.01, 0.01]) # for round to 0
elif isinstance(param, (UniformFloatHyperparameter, UniformIntegerHyperparameter)):
bounds.append([0.0, 1.0])
else:
raise TypeError("Unknown hyperparameter type %s" % type(param))
return np.array(bounds, dtype=np.float64), discrete_dims


class RandomScipyOptimizer(AcquisitionFunctionMaximizer):
"""
Expand Down Expand Up @@ -693,7 +719,7 @@ def maximize(
success_count = 0
for config in scipy_initial_configs:
scipy_configs = self.scipy_optimizer.maximize(runhistory, initial_config=config).challengers
if not scipy_configs: # empty
if not scipy_configs: # empty
continue
scipy_acqs = self.acquisition_function(scipy_configs)
acq_configs.extend(zip(scipy_acqs, scipy_configs))
Expand Down Expand Up @@ -775,7 +801,8 @@ def negative_acquisition(x):
pass

if not acq_configs: # empty
logger.warning('Scipy differential evolution optimizer failed. Return empty config list. Info:\n%s' % (result,))
logger.warning(
'Scipy differential evolution optimizer failed. Return empty config list. Info:\n%s' % (result,))

challengers = ChallengerList([config for _, config in acq_configs],
self.config_space,
Expand Down Expand Up @@ -850,7 +877,7 @@ def gen_initial_points(self, num_restarts, raw_samples):
return random_points[idx]

def gen_batch_scipy_points(self, initial_points: np.ndarray):
#count = 0 # todo remove
# count = 0 # todo remove
def f(X_flattened):
# nonlocal count
# count += 1
Expand All @@ -869,14 +896,14 @@ def f(X_flattened):
bounds=bounds,
options=dict(maxiter=self.scipy_max_iter),
)
#print('count=', count) # todo remove
# print('count=', count) # todo remove

# return result.x even failed. may because 'STOP: TOTAL NO. of ITERATIONS REACHED LIMIT'
# if not result.success:
# logger.warning('Scipy minimizer %s failed in this round: %s.' % (self.method, result))
# return None

#print(result.x.reshape(shapeX)) # todo remove
# print(result.x.reshape(shapeX)) # todo remove
return result.x.reshape(shapeX)

def maximize(
Expand Down