Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Parameter Typing #430

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions bayes_opt/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from .util import UtilityFunction
from .logger import ScreenLogger, JSONLogger
from .constraint import ConstraintModel
from .parameter import BayesParameter

__all__ = [
"BayesianOptimization",
Expand All @@ -12,5 +13,6 @@
"ScreenLogger",
"JSONLogger",
"SequentialDomainReductionTransformer",
"BayesParameter"
]

52 changes: 23 additions & 29 deletions bayes_opt/bayesian_optimization.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
import warnings

from bayes_opt.constraint import ConstraintModel

from .target_space import TargetSpace
from .event import Events, DEFAULT_EVENTS
from .logger import _get_default_logger
from .util import UtilityFunction, acq_max, ensure_rng

from sklearn.gaussian_process.kernels import Matern
from sklearn.gaussian_process import GaussianProcessRegressor
from .parameter import wrap_kernel
from .domain_reduction import DomainTransformer


class Queue:

def __init__(self):
self._queue = []

Expand Down Expand Up @@ -123,15 +124,6 @@ def __init__(self,
self._allow_duplicate_points = allow_duplicate_points
self._queue = Queue()

# Internal GP regressor
self._gp = GaussianProcessRegressor(
kernel=Matern(nu=2.5),
alpha=1e-6,
normalize_y=True,
n_restarts_optimizer=5,
random_state=self._random_state,
)

if constraint is None:
# Data structure containing the function to be optimized, the
# bounds of its domain, and a record of the evaluations we have
Expand All @@ -140,26 +132,28 @@ def __init__(self,
allow_duplicate_points=self._allow_duplicate_points)
self.is_constrained = False
else:
constraint_ = ConstraintModel(
constraint.fun,
constraint.lb,
constraint.ub,
random_state=random_state
)
self._space = TargetSpace(
f,
pbounds,
constraint=constraint_,
random_state=random_state
)
self._space = TargetSpace(f,
pbounds,
constraint=constraint,
random_state=random_state)
self.is_constrained = True

# Internal GP regressor
self._gp = GaussianProcessRegressor(
kernel=wrap_kernel(Matern(nu=2.5),
transform=self._space.kernel_transform),
alpha=1e-6,
normalize_y=True,
n_restarts_optimizer=5,
random_state=self._random_state,
)

self._verbose = verbose
self._bounds_transformer = bounds_transformer
if self._bounds_transformer:
try:
self._bounds_transformer.initialize(self._space)
except (AttributeError, TypeError):
except (AttributeError, TypeError) as e:
raise TypeError('The transformer must be an instance of '
'DomainTransformer')

Expand Down Expand Up @@ -227,9 +221,8 @@ def suggest(self, utility_function):
gp=self._gp,
constraint=self.constraint,
y_max=self._space._target_max(),
bounds=self._space.bounds,
bounds=self._space.float_bounds,
random_state=self._random_state)

return self._space.array_to_params(suggestion)

def _prime_queue(self, init_points):
Expand All @@ -238,7 +231,8 @@ def _prime_queue(self, init_points):
init_points = max(init_points, 1)

for _ in range(init_points):
self._queue.add(self._space.random_sample())
self._queue.add(
self._space.array_to_params(self._space.random_sample()))

def _prime_subscriptions(self):
if not any([len(subs) for subs in self._events.values()]):
Expand Down Expand Up @@ -312,8 +306,8 @@ def maximize(self,
if self._bounds_transformer and iteration > 0:
# The bounds transformer should only modify the bounds after
# the init_points points (only for the true iterations)
self.set_bounds(
self._bounds_transformer.transform(self._space))
self.set_bounds(self._bounds_transformer.transform(
self._space))

self.dispatch(Events.OPTIMIZATION_END)

Expand Down
6 changes: 3 additions & 3 deletions bayes_opt/constraint.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from sklearn.gaussian_process.kernels import Matern
from sklearn.gaussian_process import GaussianProcessRegressor
from scipy.stats import norm

from .parameter import wrap_kernel

class ConstraintModel():
"""
Expand Down Expand Up @@ -37,7 +37,7 @@ class ConstraintModel():
is a simply the product of the individual probabilities.
"""

def __init__(self, fun, lb, ub, random_state=None):
def __init__(self, fun, lb, ub, transform=None, random_state=None):
self.fun = fun

self._lb = np.atleast_1d(lb)
Expand All @@ -48,7 +48,7 @@ def __init__(self, fun, lb, ub, random_state=None):
raise ValueError(msg)

basis = lambda: GaussianProcessRegressor(
kernel=Matern(nu=2.5),
kernel=wrap_kernel(Matern(nu=2.5), transform) if transform is not None else Matern(nu=2.5),
alpha=1e-6,
normalize_y=True,
n_restarts_optimizer=5,
Expand Down
13 changes: 6 additions & 7 deletions bayes_opt/domain_reduction.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,19 +37,19 @@ def __init__(

def initialize(self, target_space: TargetSpace) -> None:
"""Initialize all of the parameters"""
self.original_bounds = np.copy(target_space.bounds)
self.original_bounds = np.copy(target_space.float_bounds)
self.bounds = [self.original_bounds]

# Set the minimum window to an array of length bounds
if isinstance(self.minimum_window_value, list) or isinstance(self.minimum_window_value, np.ndarray):
assert len(self.minimum_window_value) == len(target_space.bounds)
assert len(self.minimum_window_value) == len(target_space.float_bounds)
self.minimum_window = self.minimum_window_value
else:
self.minimum_window = [self.minimum_window_value] * len(target_space.bounds)
self.minimum_window = [self.minimum_window_value] * len(target_space.float_bounds)

self.previous_optimal = np.mean(target_space.bounds, axis=1)
self.current_optimal = np.mean(target_space.bounds, axis=1)
self.r = target_space.bounds[:, 1] - target_space.bounds[:, 0]
self.previous_optimal = np.mean(target_space.float_bounds, axis=1)
self.current_optimal = np.mean(target_space.float_bounds, axis=1)
self.r = target_space.float_bounds[:, 1] - target_space.float_bounds[:, 0]

self.previous_d = 2.0 * \
(self.current_optimal - self.previous_optimal) / self.r
Expand Down Expand Up @@ -135,7 +135,6 @@ def _create_bounds(self, parameters: dict, bounds: np.array) -> dict:
def transform(self, target_space: TargetSpace) -> dict:

self._update(target_space)

new_bounds = np.array(
[
self.current_optimal - 0.5 * self.r,
Expand Down
24 changes: 18 additions & 6 deletions bayes_opt/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
from .observer import _Tracker
from .event import Events
from .util import Colours
from .parameter import FloatParameter, IntParameter, CategoricalParameter

import numpy as np

def _get_default_logger(verbose, is_constrained):
return ScreenLogger(verbose=verbose, is_constrained=is_constrained)
Expand Down Expand Up @@ -67,7 +70,7 @@ def _format_bool(self, x):
)
return s

def _format_key(self, key):
def _format_str(self, key):
s = "{key:^{s}}".format(
key=key,
s=self._default_cell_size
Expand All @@ -87,20 +90,29 @@ def _step(self, instance, colour=Colours.black):


for key in instance.space.keys:
cells.append(self._format_number(res["params"][key]))
val = res["params"][key]
if type(instance.space._params_config[key]) == FloatParameter:
cells.append(self._format_number(val))
elif type(instance.space._params_config[key]) == IntParameter:
cells.append(self._format_number(val))
elif type(instance.space._params_config[key]) == CategoricalParameter:
cells.append(self._format_str(str(val)))
else:
raise TypeError


return "| " + " | ".join(map(colour, cells)) + " |"

def _header(self, instance):
cells = []
cells.append(self._format_key("iter"))
cells.append(self._format_key("target"))
cells.append(self._format_str("iter"))
cells.append(self._format_str("target"))

if self._is_constrained:
cells.append(self._format_key("allowed"))
cells.append(self._format_str("allowed"))

for key in instance.space.keys:
cells.append(self._format_key(key))
cells.append(self._format_str(key))

line = "| " + " | ".join(cells) + " |"
self._header_length = len(line)
Expand Down
133 changes: 133 additions & 0 deletions bayes_opt/parameter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
from typing import Callable
import numpy as np
from sklearn.gaussian_process import kernels
from inspect import signature


def is_numeric(value):
return np.issubdtype(type(value), np.number)


class BayesParameter():

def __init__(self, name: str, domain) -> None:
self.name = name
self.domain = domain

@property
def float_bounds(self):
pass

def to_float(self, value) -> np.ndarray:
pass

def to_param(self, value):
pass

def kernel_transform(self, value):
pass

@property
def dim(self) -> int:
pass


class FloatParameter(BayesParameter):

def __init__(self, name: str, domain) -> None:
super().__init__(name, domain)

@property
def float_bounds(self):
return np.array(self.domain)

def to_float(self, value) -> np.ndarray:
return value

def to_param(self, value):
return float(value)

def kernel_transform(self, value):
return value

@property
def dim(self) -> int:
return 1


class IntParameter(BayesParameter):

def __init__(self, name: str, domain) -> None:
super().__init__(name, domain)

@property
def float_bounds(self):
# adding/subtracting ~0.5 to achieve uniform probability of integers
return np.array(
[self.domain[0] - 0.4999999, self.domain[1] + 0.4999999])

def to_float(self, value) -> np.ndarray:
return float(value)

def to_param(self, value):
return int(np.round(np.squeeze(value)))

def kernel_transform(self, value):
return np.round(value)

@property
def dim(self) -> int:
return 1


class CategoricalParameter(BayesParameter):

def __init__(self, name: str, domain) -> None:
super().__init__(name, domain)

@property
def float_bounds(self):
# to achieve uniform probability after rounding
lower = np.zeros(self.dim)
upper = np.ones(self.dim)
return np.vstack((lower, upper)).T

def to_float(self, value) -> np.ndarray:
res = np.zeros(len(self.domain))
one_hot_index = [i for i, val in enumerate(self.domain) if val==value]
if len(one_hot_index) != 1:
raise ValueError
res[one_hot_index] = 1
return res.astype(float)

def to_param(self, value):
return self.domain[np.argmax(value)]

def kernel_transform(self, value):
value = np.atleast_2d(value)
res = np.zeros(value.shape)
res[np.argmax(value, axis=0)] = 1
return res

@property
def dim(self) -> int:
return len(self.domain)

def wrap_kernel(kernel: kernels.Kernel, transform: Callable) -> kernels.Kernel:
class WrappedKernel(type(kernel)):
@copy_signature(getattr(kernel.__class__.__init__, "deprecated_original", kernel.__class__.__init__))
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)

def __call__(self, X, Y=None, eval_gradient=False):
X = transform(X)
return super().__call__(X, Y, eval_gradient)

return WrappedKernel(**kernel.get_params())

def copy_signature(source_fct):
"""https://stackoverflow.com/a/58989918/"""
def copy(target_fct):
target_fct.__signature__ = signature(source_fct)
return target_fct
return copy
Loading