Skip to content

Commit

Permalink
Update docs & linting for constraints.py, target_space.py (#440)
Browse files Browse the repository at this point in the history
* Run tests on any PR

* Update docs, linting

* Update bayes_opt/constraint.py

Co-authored-by: Leandro Braga <[email protected]>

* Rename mislabelled parameters

---------

Co-authored-by: Leandro Braga <[email protected]>
  • Loading branch information
till-m and leandrobbraga authored Aug 13, 2023
1 parent 201c6b2 commit b9f16f2
Show file tree
Hide file tree
Showing 3 changed files with 314 additions and 143 deletions.
1 change: 0 additions & 1 deletion .github/workflows/run_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ on:
push:
branches: [ "master" ]
pull_request:
branches: [ "master" ]

permissions:
contents: read
Expand Down
174 changes: 121 additions & 53 deletions bayes_opt/constraint.py
Original file line number Diff line number Diff line change
@@ -1,46 +1,40 @@
"""Constraint handling."""
import numpy as np
from sklearn.gaussian_process.kernels import Matern
from sklearn.gaussian_process import GaussianProcessRegressor
from scipy.stats import norm


class ConstraintModel():
"""
"""Model constraints using GPRs.
This class takes the function to optimize as well as the parameters bounds
in order to find which values for the parameters yield the maximum value
using bayesian optimization.
Parameters
----------
fun: function
Constraint function. If multiple constraints are handled, this should
return a numpy.ndarray of appropriate size.
lb: numeric or numpy.ndarray
Upper limit(s) for the constraints. The return value of `fun` should
have exactly this shape.
ub: numeric or numpy.ndarray
Upper limit(s) for the constraints. The return value of `fun` should
have exactly this shape.
random_state: int or numpy.random.RandomState, optional(default=None)
If the value is an integer, it is used as the seed for creating a
numpy.random.RandomState. Otherwise the random state provided is used.
When set to None, an unseeded random state is generated.
Note
----
In case of multiple constraints, this model assumes conditional
independence. This means that for each constraint, the probability of
fulfillment is the cdf of a univariate Gaussian. The overall probability
is a simply the product of the individual probabilities.
fun : None or Callable -> float or np.ndarray
The constraint function. Should be float-valued or array-valued (if
multiple constraints are present). Needs to take the same parameters
as the optimization target with the same argument names.
lb : float or np.ndarray
The lower bound on the constraints. Should have the same
dimensionality as the return value of the constraint function.
ub : float or np.ndarray
The upper bound on the constraints. Should have the same
dimensionality as the return value of the constraint function.
random_state : np.random.RandomState or int or None, default=None
Random state to use.
"""

def __init__(self, fun, lb, ub, random_state=None):
self.fun = fun

self._lb = np.atleast_1d(lb)
self._lb = np.atleast_1d(lb)
self._ub = np.atleast_1d(ub)

if np.any(self._lb >= self._ub):
Expand All @@ -58,19 +52,36 @@ def __init__(self, fun, lb, ub, random_state=None):

@property
def lb(self):
"""Return lower bounds."""
return self._lb

@property
def ub(self):
"""Return upper bounds."""
return self._ub

@property
def model(self):
"""Return GP regressors of the constraint function."""
return self._model

def eval(self, **kwargs):
"""
Evaluates the constraint function.
"""Evaluate the constraint function.
Parameters
----------
**kwargs :
Function arguments to evaluate the constraint function on.
Returns
-------
Value of the constraint function.
Raises
------
TypeError
If the kwargs keys don't match the function argument names.
"""
try:
return self.fun(**kwargs)
Expand All @@ -85,8 +96,19 @@ def eval(self, **kwargs):
raise

def fit(self, X, Y):
"""
Fits internal GaussianProcessRegressor's to the data.
"""Fit internal GPRs to the data.
Parameters
----------
X :
Parameters of the constraint function.
Y :
Values of the constraint function.
Returns
-------
None
"""
if len(self._model) == 1:
self._model[0].fit(X, Y)
Expand All @@ -95,13 +117,39 @@ def fit(self, X, Y):
gp.fit(X, Y[:, i])

def predict(self, X):
"""
Returns the probability that the constraint is fulfilled at `X` based
on the internal Gaussian Process Regressors.
r"""Calculate the probability that the constraint is fulfilled at `X`.
Note that this does not try to approximate the values of the
constraint function (for this, see `ConstraintModel.approx()`.), but
probability that the constraint function is fulfilled. That is, this
function calculates
.. math::
p = \text{Pr}\left\{c^{\text{low}} \leq \tilde{c}(x) \leq
c^{\text{up}} \right\} = \int_{c^{\text{low}}}^{c^{\text{up}}}
\mathcal{N}(c, \mu(x), \sigma^2(x)) \, dc.
with :math:`\mu(x)`, :math:`\sigma^2(x)` the mean and variance at
:math:`x` as given by the GP and :math:`c^{\text{low}}`,
:math:`c^{\text{up}}` the lower and upper bounds of the constraint
respectively.
In case of multiple constraints, we assume conditional independence.
This means we calculate the probability of constraint fulfilment
individually, with the joint probability given as their product.
Parameters
----------
X : np.ndarray of shape (n_samples, n_features)
Parameters for which to predict the probability of constraint
fulfilment.
Returns
-------
np.ndarray of shape (n_samples,)
Probability of constraint fulfilment.
Note that this does not try to approximate the values of the constraint
function, but probability that the constraint function is fulfilled.
For the former, see `ConstraintModel.approx()`.
"""
X_shape = X.shape
X = X.reshape((-1, self._model[0].n_features_in_))
Expand All @@ -114,33 +162,53 @@ def predict(self, X):
if self._lb[0] != np.inf else np.array([1]))
result = p_upper - p_lower
return result.reshape(X_shape[:-1])
else:
result = np.ones(X.shape[0])
for j, gp in enumerate(self._model):
y_mean, y_std = gp.predict(X, return_std=True)
p_lower = (norm(loc=y_mean, scale=y_std).cdf(self._lb[j])
if self._lb[j] != -np.inf else np.array([0]))
p_upper = (norm(loc=y_mean, scale=y_std).cdf(self._ub[j])
if self._lb[j] != np.inf else np.array([1]))
result = result * (p_upper - p_lower)
return result.reshape(X_shape[:-1])

result = np.ones(X.shape[0])
for j, gp in enumerate(self._model):
y_mean, y_std = gp.predict(X, return_std=True)
p_lower = (norm(loc=y_mean, scale=y_std).cdf(self._lb[j])
if self._lb[j] != -np.inf else np.array([0]))
p_upper = (norm(loc=y_mean, scale=y_std).cdf(self._ub[j])
if self._lb[j] != np.inf else np.array([1]))
result = result * (p_upper - p_lower)
return result.reshape(X_shape[:-1])

def approx(self, X):
"""
Returns the approximation of the constraint function using the internal
Gaussian Process Regressors.
Approximate the constraint function using the internal GPR model.
Parameters
----------
X : np.ndarray of shape (n_samples, n_features)
Parameters for which to estimate the constraint function value.
Returns
-------
np.ndarray of shape (n_samples, n_constraints)
Constraint function value estimates.
"""
X_shape = X.shape
X = X.reshape((-1, self._model[0].n_features_in_))
if len(self._model) == 1:
return self._model[0].predict(X).reshape(X_shape[:-1])
else:
result = np.column_stack([gp.predict(X) for gp in self._model])
return result.reshape(X_shape[:-1] + (len(self._lb), ))

result = np.column_stack([gp.predict(X) for gp in self._model])
return result.reshape(X_shape[:-1] + (len(self._lb), ))

def allowed(self, constraint_values):
"""
Checks whether `constraint_values` are below the specified limits.
"""Check whether `constraint_values` fulfills the specified limits.
Parameters
----------
constraint_values : np.ndarray of shape (n_samples, n_constraints)
The values of the constraint function.
Returns
-------
np.ndarrray of shape (n_samples,)
Specifying wheter the constraints are fulfilled.
"""
if self._lb.size == 1:
return (np.less_equal(self._lb, constraint_values)
Expand Down
Loading

0 comments on commit b9f16f2

Please sign in to comment.