Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Advanced constrained optimization #344

Merged
Merged
2 changes: 2 additions & 0 deletions bayes_opt/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@
from .domain_reduction import SequentialDomainReductionTransformer
from .util import UtilityFunction
from .logger import ScreenLogger, JSONLogger
from .constraint import ConstraintModel

__all__ = [
"BayesianOptimization",
"ConstraintModel"
"UtilityFunction",
"Events",
"ScreenLogger",
Expand Down
60 changes: 39 additions & 21 deletions bayes_opt/bayesian_optimization.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import warnings
from queue import Queue, Empty

from .target_space import TargetSpace
from .target_space import TargetSpace, ConstrainedTargetSpace
from .event import Events, DEFAULT_EVENTS
from .logger import _get_default_logger
from .util import UtilityFunction, acq_max, ensure_rng
Expand All @@ -16,6 +16,7 @@ class Observable(object):
Inspired/Taken from
https://www.protechtraining.com/blog/post/879#simple-observer
"""

def __init__(self, events):
# maps event names to subscribers
# str -> dict
Expand Down Expand Up @@ -52,9 +53,12 @@ class BayesianOptimization(Observable):
Dictionary with parameters names as keys and a tuple with minimum
and maximum values.

constraint: A ConstraintModel. Note that the names of arguments of the
constraint function and of f need to be the same.

random_state: int or numpy.random.RandomState, optional(default=None)
If the value is an integer, it is used as the seed for creating a
numpy.random.RandomState. Otherwise the random state provided it is used.
numpy.random.RandomState. Otherwise the random state provided is used.
When set to None, an unseeded random state is generated.

verbose: int, optional(default=2)
Expand All @@ -76,14 +80,16 @@ class BayesianOptimization(Observable):
set_bounds()
Allows changing the lower and upper searching bounds
"""
def __init__(self, f, pbounds, random_state=None, verbose=2,

def __init__(self,
f,
pbounds,
constraint=None,
random_state=None,
verbose=2,
bounds_transformer=None):
self._random_state = ensure_rng(random_state)

# Data structure containing the function to be optimized, the bounds of
# its domain, and a record of the evaluations we have done so far
self._space = TargetSpace(f, pbounds, random_state)

self._queue = Queue()

# Internal GP regressor
Expand All @@ -95,6 +101,16 @@ def __init__(self, f, pbounds, random_state=None, verbose=2,
random_state=self._random_state,
)

if constraint is None:
# Data structure containing the function to be optimized, the
# bounds of its domain, and a record of the evaluations we have
# done so far
self._space = TargetSpace(f, pbounds, random_state)
else:
self._space = ConstrainedTargetSpace(f, constraint, pbounds,
random_state)
self.constraint = constraint
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I notice a lot of these properties are prepended with a _ and equipped with a separate method essentially acting as a getter, presumably to avoid users setting these properties. Let me know which properties you want me to handle this way.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

hey, I might consider putting self.constraint as self._constraint - which would be consistent with how _gp is currently handled.


self._verbose = verbose
self._bounds_transformer = bounds_transformer
if self._bounds_transformer:
Expand Down Expand Up @@ -136,6 +152,7 @@ def probe(self, params, lazy=True):
If True, the optimizer will evaluate the points when calling
maximize(). Otherwise it will evaluate it at the moment.
"""

if lazy:
self._queue.put(params)
else:
Expand All @@ -152,15 +169,17 @@ def suggest(self, utility_function):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self._gp.fit(self._space.params, self._space.target)
if self.constraint is not None:
bwheelz36 marked this conversation as resolved.
Show resolved Hide resolved
self.constraint.fit(self._space.params,
self._space._constraint_values)

# Finding argmax of the acquisition function.
suggestion = acq_max(
ac=utility_function.utility,
gp=self._gp,
y_max=self._space.target.max(),
bounds=self._space.bounds,
random_state=self._random_state
)
suggestion = acq_max(ac=utility_function.utility,
gp=self._gp,
constraint=self.constraint,
y_max=self._space.target.max(),
bounds=self._space.bounds,
random_state=self._random_state)

return self._space.array_to_params(suggestion)

Expand Down Expand Up @@ -211,15 +230,15 @@ def maximize(self,
kappa: float, optional(default=2.576)
Parameter to indicate how closed are the next parameters sampled.
Higher value = favors spaces that are least explored.
Lower value = favors spaces where the regression function is the
highest.
Lower value = favors spaces where the regression function is
the highest.

kappa_decay: float, optional(default=1)
`kappa` is multiplied by this factor every iteration.

kappa_decay_delay: int, optional(default=0)
Number of iterations that must have passed before applying the decay
to `kappa`.
Number of iterations that must have passed before applying the
decay to `kappa`.

xi: float, optional(default=0.0)
[unused]
Expand All @@ -242,12 +261,11 @@ def maximize(self,
util.update_params()
x_probe = self.suggest(util)
iteration += 1

self.probe(x_probe, lazy=False)

if self._bounds_transformer and iteration > 0:
# The bounds transformer should only modify the bounds after the init_points points (only for the true
# iterations)
# The bounds transformer should only modify the bounds after
# the init_points points (only for the true iterations)
self.set_bounds(
self._bounds_transformer.transform(self._space))

Expand Down
125 changes: 125 additions & 0 deletions bayes_opt/constraint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
import numpy as np
from sklearn.gaussian_process.kernels import Matern
from sklearn.gaussian_process import GaussianProcessRegressor
from scipy.stats import norm


class ConstraintModel():
"""
This class takes the function to optimize as well as the parameters bounds
in order to find which values for the parameters yield the maximum value
using bayesian optimization.

Parameters
----------
func: function
Constraint function. If multiple constraints are handled, this should
return a numpy.ndarray of appropriate size.

limits: numeric or numpy.ndarray
Upper limit(s) for the constraints. The return value of `func` should
have exactly this shape.

random_state: int or numpy.random.RandomState, optional(default=None)
If the value is an integer, it is used as the seed for creating a
numpy.random.RandomState. Otherwise the random state provided is used.
When set to None, an unseeded random state is generated.

Note
----
In case of multiple constraints, this model assumes conditional
independence. This means that for each constraint, the probability of
fulfillment is the cdf of a univariate Gaussian. The overall probability
is a simply the product of the individual probabilities.
"""

def __init__(self, func, limits, random_state=None):
self.func = func

if isinstance(limits, float):
self._limits = np.array([limits])
else:
self._limits = limits

basis = lambda: GaussianProcessRegressor(
kernel=Matern(nu=2.5),
alpha=1e-6,
normalize_y=True,
n_restarts_optimizer=5,
random_state=random_state,
)
self._model = [basis() for _ in range(len(self._limits))]

@property
def limits(self):
return self._limits

def eval(self, **kwargs):
"""
Evaluates the constraint function.
"""
try:
return self.func(**kwargs)
except TypeError as e:
msg = (
"Encountered TypeError when evaluating constraint " +
"function. This could be because your constraint function " +
"doesn't use the same keyword arguments as the target " +
f"function. Original error message:\n\n{e}"
)
raise TypeError(msg)

def fit(self, X, Y):
"""
Fits internal GaussianProcessRegressor's to the data.
"""
if len(self._model) == 1:
self._model[0].fit(X, Y)
else:
for i, gp in enumerate(self._model):
gp.fit(X, Y[:, i])

def predict(self, X):
"""
Returns the probability that the constraint is fulfilled at `X` based
on the internal Gaussian Process Regressors.

Note that this does not try to approximate the values of the constraint
function, but probability that the constraint function is fulfilled.
For the former, see `ConstraintModel.approx()`.
"""
X_shape = X.shape
X = X.reshape((-1, self._model[0].n_features_in_))
if len(self._model) == 1:
y_mean, y_std = self._model[0].predict(X, return_std=True)
result = norm(loc=y_mean, scale=y_std).cdf(self._limits[0])
return result.reshape(X_shape[:-1])
else:
result = np.ones(X.shape[0])
for j, gp in enumerate(self._model):
y_mean, y_std = gp.predict(X, return_std=True)
result = result * norm(loc=y_mean, scale=y_std).cdf(
self._limits[j])
return result.reshape(X_shape[:-1])

def approx(self, X):
"""
Returns the approximation of the constraint function using the internal
Gaussian Process Regressors.
"""
X_shape = X.shape
X = X.reshape((-1, self._model[0].n_features_in_))
if len(self._model) == 1:
return self._model[0].predict(X).reshape(X_shape[:-1])
else:
result = np.column_stack([gp.predict(X) for gp in self._model])
return result.reshape(X_shape[:-1] + (len(self._limits), ))

def allowed(self, constraint_values):
"""
Checks whether `constraint_values` are below the specified limits.
"""
if self._limits.size == 1:
return np.less_equal(constraint_values, self._limits)

return np.all(constraint_values <= self._limits, axis=-1)
6 changes: 5 additions & 1 deletion bayes_opt/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from .event import Events
from .util import Colours


def _get_default_logger(verbose):
return ScreenLogger(verbose=verbose)

Expand Down Expand Up @@ -81,6 +80,11 @@ def _header(self, instance):
return line + "\n" + ("-" * self._header_length)

def _is_new_max(self, instance):
if instance.max["target"] is None:
# During constrained optimization, there might not be a maximum
# value since the optimizer might've not encountered any points
# that fulfill the constraints.
return False
if self._previous_max is None:
self._previous_max = instance.max["target"]
return instance.max["target"] > self._previous_max
Expand Down
4 changes: 2 additions & 2 deletions bayes_opt/observer.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ def _update_tracker(self, event, instance):
self._iterations += 1

current_max = instance.max
if (self._previous_max is None or
current_max["target"] > self._previous_max):
if (self._previous_max is None
or current_max["target"] > self._previous_max):
self._previous_max = current_max["target"]
self._previous_max_params = current_max["params"]

Expand Down
Loading