Skip to content

Commit

Permalink
Update multistart option handling (#513)
Browse files Browse the repository at this point in the history
  • Loading branch information
timmens authored Aug 5, 2024
1 parent cbbdd61 commit 694b537
Show file tree
Hide file tree
Showing 24 changed files with 1,403 additions and 484 deletions.
442 changes: 345 additions & 97 deletions docs/source/how_to/how_to_multistart.ipynb

Large diffs are not rendered by default.

11 changes: 5 additions & 6 deletions docs/source/how_to/how_to_visualize_histories.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -177,12 +177,11 @@
"\n",
"\n",
"res = om.minimize(\n",
" sphere,\n",
" params=np.arange(10),\n",
" bounds=om.Bounds(soft_lower=np.full(10, -3), soft_upper=np.full(10, 10)),\n",
" alpine,\n",
" params=np.arange(7),\n",
" bounds=om.Bounds(soft_lower=np.full(7, -3), soft_upper=np.full(7, 10)),\n",
" algorithm=\"scipy_neldermead\",\n",
" multistart=True,\n",
" multistart_options={\"n_samples\": 1000, \"convergence.max_discoveries\": 10},\n",
" multistart=om.MultistartOptions(n_samples=100, convergence_max_discoveries=3),\n",
")"
]
},
Expand All @@ -193,7 +192,7 @@
"metadata": {},
"outputs": [],
"source": [
"fig = om.criterion_plot(res, max_evaluations=3000)\n",
"fig = om.criterion_plot(res, max_evaluations=1000, monotone=True)\n",
"fig.show(renderer=\"png\")"
]
}
Expand Down
3 changes: 1 addition & 2 deletions docs/source/tutorials/optimization_overview.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -355,8 +355,7 @@
" params=np.arange(10),\n",
" algorithm=\"scipy_neldermead\",\n",
" bounds=bounds,\n",
" multistart=True,\n",
" multistart_options={\"convergence.max_discoveries\": 5},\n",
" multistart=om.MultistartOptions(convergence_max_discoveries=5),\n",
")\n",
"res.params.round(5)"
]
Expand Down
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,6 @@ module = [
"optimagic.optimization.optimization_logging",
"optimagic.optimization.optimize_result",
"optimagic.optimization.optimize",
"optimagic.optimization.process_multistart_sample",
"optimagic.optimization.process_results",
"optimagic.optimization.multistart",
"optimagic.optimization.scipy_aliases",
Expand Down
2 changes: 2 additions & 0 deletions src/optimagic/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from optimagic.benchmarking.run_benchmark import run_benchmark
from optimagic.differentiation.derivatives import first_derivative, second_derivative
from optimagic.logging.read_log import OptimizeLogReader
from optimagic.optimization.multistart_options import MultistartOptions
from optimagic.optimization.optimize import maximize, minimize
from optimagic.optimization.optimize_result import OptimizeResult
from optimagic.parameters.bounds import Bounds
Expand Down Expand Up @@ -48,5 +49,6 @@
"OptimizeResult",
"Bounds",
"ScalingOptions",
"MultistartOptions",
"__version__",
]
66 changes: 66 additions & 0 deletions src/optimagic/deprecations.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import warnings
from dataclasses import replace

from optimagic.parameters.bounds import Bounds

Expand Down Expand Up @@ -71,6 +72,15 @@ def throw_scaling_options_future_warning():
warnings.warn(msg, FutureWarning)


def throw_multistart_options_future_warning():
msg = (
"Specifying multistart options via the argument `multistart_options` is "
"deprecated and will be removed in optimagic version 0.6.0 and later. You can "
"pass these options directly to the `multistart` argument instead."
)
warnings.warn(msg, FutureWarning)


def replace_and_warn_about_deprecated_algo_options(algo_options):
if not isinstance(algo_options, dict):
return algo_options
Expand Down Expand Up @@ -138,3 +148,59 @@ def replace_and_warn_about_deprecated_bounds(
bounds = Bounds(**old_bounds)

return bounds


def replace_and_warn_about_deprecated_multistart_options(options):
"""Replace deprecated multistart options and warn about them.
Args:
options (MultistartOptions): The multistart options to replace.
Returns:
MultistartOptions: The replaced multistart options.
"""
replacements = {}

if options.share_optimization is not None:
msg = (
"The share_optimization option is deprecated and will be removed in "
"version 0.6.0. Use stopping_maxopt instead to specify the number of "
"optimizations directly."
)
warnings.warn(msg, FutureWarning)

if options.convergence_relative_params_tolerance is not None:
msg = (
"The convergence_relative_params_tolerance option is deprecated and will "
"be removed in version 0.6.0. Use convergence_xtol_rel instead."
)
warnings.warn(msg, FutureWarning)
if options.convergence_xtol_rel is None:
replacements["convergence_xtol_rel"] = (
options.convergence_relative_params_tolerance
)

if options.optimization_error_handling is not None:
msg = (
"The optimization_error_handling option is deprecated and will be removed "
"in version 0.6.0. Setting this attribute also sets the error handling "
"for exploration. Use the new error_handling option to set the error "
"handling for both optimization and exploration."
)
warnings.warn(msg, FutureWarning)
if options.error_handling is None:
replacements["error_handling"] = options.optimization_error_handling

if options.exploration_error_handling is not None:
msg = (
"The exploration_error_handling option is deprecated and will be "
"removed in version 0.6.0. Setting this attribute also sets the error "
"handling for exploration. Use the new error_handling option to set the "
"error handling for both optimization and exploration."
)
warnings.warn(msg, FutureWarning)
if options.error_handling is None:
replacements["error_handling"] = options.exploration_error_handling

return replace(options, **replacements)
4 changes: 4 additions & 0 deletions src/optimagic/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,10 @@ class InvalidScalingError(OptimagicError):
"""Exception for invalid user provided scaling."""


class InvalidMultistartError(OptimagicError):
"""Exception for invalid user provided multistart options."""


class NotInstalledError(OptimagicError):
"""Exception when optional dependencies are needed but not installed."""

Expand Down
29 changes: 16 additions & 13 deletions src/optimagic/optimization/create_optimization_problem.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
from optimagic.optimization.get_algorithm import (
process_user_algorithm,
)
from optimagic.optimization.multistart_options import (
MultistartOptions,
pre_process_multistart,
)
from optimagic.optimization.scipy_aliases import (
map_method_to_algorithm,
split_fun_and_jac,
Expand Down Expand Up @@ -70,10 +74,7 @@ class OptimizationProblem:
error_handling: Literal["raise", "continue"]
error_penalty: dict[str, Any] | None
scaling: ScalingOptions | None
# TODO: multistart will become None | MultistartOptions and multistart_options will
# be removed
multistart: bool
multistart_options: dict[str, Any] | None
multistart: MultistartOptions | None
collect_history: bool
skip_checks: bool
direction: Literal["minimize", "maximize"]
Expand All @@ -100,7 +101,6 @@ def create_optimization_problem(
error_penalty,
scaling,
multistart,
multistart_options,
collect_history,
skip_checks,
# scipy aliases
Expand All @@ -126,6 +126,7 @@ def create_optimization_problem(
soft_lower_bounds,
soft_upper_bounds,
scaling_options,
multistart_options,
):
# ==================================================================================
# error handling needed as long as fun is an optional argument (i.e. until
Expand Down Expand Up @@ -187,7 +188,13 @@ def create_optimization_problem(

if scaling_options is not None:
deprecations.throw_scaling_options_future_warning()
scaling = scaling_options if scaling is None else scaling
if scaling is True and scaling_options is not None:
scaling = scaling_options

if multistart_options is not None:
deprecations.throw_multistart_options_future_warning()
if multistart is True and multistart_options is not None:
multistart = multistart_options

algo_options = replace_and_warn_about_deprecated_algo_options(algo_options)

Expand Down Expand Up @@ -303,6 +310,7 @@ def create_optimization_problem(
# ==================================================================================
bounds = pre_process_bounds(bounds)
scaling = pre_process_scaling(scaling)
multistart = pre_process_multistart(multistart)

fun_kwargs = {} if fun_kwargs is None else fun_kwargs
constraints = [] if constraints is None else constraints
Expand All @@ -312,7 +320,6 @@ def create_optimization_problem(
numdiff_options = {} if numdiff_options is None else numdiff_options
log_options = {} if log_options is None else log_options
error_penalty = {} if error_penalty is None else error_penalty
multistart_options = {} if multistart_options is None else multistart_options
if logging:
logging = Path(logging)

Expand Down Expand Up @@ -405,11 +412,8 @@ def create_optimization_problem(
if not isinstance(scaling, ScalingOptions | None):
raise ValueError("scaling must be a ScalingOptions object or None")

if not isinstance(multistart, bool):
raise ValueError("multistart must be a boolean")

if not isinstance(multistart_options, dict | None):
raise ValueError("multistart_options must be a dictionary or None")
if not isinstance(multistart, MultistartOptions | None):
raise ValueError("multistart must be a MultistartOptions object or None")

if not isinstance(collect_history, bool):
raise ValueError("collect_history must be a boolean")
Expand Down Expand Up @@ -446,7 +450,6 @@ def create_optimization_problem(
error_penalty=error_penalty,
scaling=scaling,
multistart=multistart,
multistart_options=multistart_options,
collect_history=collect_history,
skip_checks=skip_checks,
direction=direction,
Expand Down
Loading

0 comments on commit 694b537

Please sign in to comment.