From d3c9713c61b6e49e075794ef613697109de05384 Mon Sep 17 00:00:00 2001 From: Sebastian Blauth Date: Tue, 6 Jun 2023 10:28:16 +0200 Subject: [PATCH] Add configuration file parameter [LineSearch][fail_if_not_converged] This parameter determines, whether the line search is cancelled once the state system cannot be solved at a (trial) iterate. If this is True, then the line search is cancelled. Otherwise, the step size is "halved" and a new iterate is generated. --- CHANGELOG.rst | 6 ++++ .../line_search/armijo_line_search.py | 28 +++++++++++++++++-- cashocs/io/config.py | 4 +++ .../user/demos/optimal_control/doc_config.rst | 11 ++++++++ .../demos/shape_optimization/doc_config.rst | 12 ++++++-- 5 files changed, 56 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 6ac5028e..131b27ab 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -10,6 +10,12 @@ of the maintenance releases, please take a look at 2.1.0 (in development) ---------------------- +* New configuration file parameters: + + * Section LineSearch + + * :ini:`fail_if_not_converged` determines, whether the line search is cancelled once the state system cannot be solved or if a new iterate is tried instead. + 2.0.0 (May 16, 2023) diff --git a/cashocs/_optimization/line_search/armijo_line_search.py b/cashocs/_optimization/line_search/armijo_line_search.py index 7bc20474..556c50b0 100644 --- a/cashocs/_optimization/line_search/armijo_line_search.py +++ b/cashocs/_optimization/line_search/armijo_line_search.py @@ -24,6 +24,7 @@ import fenics from typing_extensions import TYPE_CHECKING +from cashocs import _exceptions from cashocs import _loggers from cashocs._optimization.line_search import line_search @@ -124,9 +125,9 @@ def search( ) current_function_value = solver.objective_value - - self.state_problem.has_solution = False - objective_step = self.cost_functional.evaluate() + objective_step = self._compute_objective_at_new_iterate( + current_function_value + ) decrease_measure = self._compute_decrease_measure(search_direction) @@ -176,3 +177,24 @@ def _compute_decrease_measure( return self.decrease_measure_w_o_step * self.stepsize else: return float("inf") + + def _compute_objective_at_new_iterate(self, current_function_value: float) -> float: + """Computes the objective value for the new (trial) iterate. + + Args: + current_function_value: The current function value. + + Returns: + The value of the cost functional at the new iterate. + + """ + self.state_problem.has_solution = False + try: + objective_step = self.cost_functional.evaluate() + except (_exceptions.PETScKSPError, _exceptions.NotConvergedError) as error: + if self.config.getboolean("LineSearch", "fail_if_not_converged"): + raise error + else: + objective_step = 2.0 * current_function_value + + return objective_step diff --git a/cashocs/io/config.py b/cashocs/io/config.py index b7b6a08c..2e39a168 100644 --- a/cashocs/io/config.py +++ b/cashocs/io/config.py @@ -224,6 +224,9 @@ def __init__(self, config_file: Optional[str] = None) -> None: "attributes": ["less_than_one", "positive"], "larger_than": ("LineSearch", "factor_low"), }, + "fail_if_not_converged": { + "type": "bool", + }, }, "AlgoLBFGS": { "bfgs_memory_size": { @@ -560,6 +563,7 @@ def __init__(self, config_file: Optional[str] = None) -> None: polynomial_model = cubic factor_high = 0.5 factor_low = 0.1 +fail_if_not_converged = False [ShapeGradient] lambda_lame = 0.0 diff --git a/docs/source/user/demos/optimal_control/doc_config.rst b/docs/source/user/demos/optimal_control/doc_config.rst index 61ead36a..a6bd6d3a 100755 --- a/docs/source/user/demos/optimal_control/doc_config.rst +++ b/docs/source/user/demos/optimal_control/doc_config.rst @@ -337,6 +337,14 @@ For the polynomial models, we also have a safeguarding procedure, which ensures and the values specified here are also the default values for these parameters. +Finally, we have the parameter + +.. code-block:: ini + + fail_if_not_converged = False + +which determines, whether the line search is terminated if the state system cannot be solved at the current iterate. If this is :ini:`fail_if_not_converged = True`, then an exception is raised. Otherwise, the iterate is counted as having too high of a function value and the stepsize is "halved" and a new iterate is formed. + .. _config_ocp_algolbfgs: Section AlgoLBFGS @@ -688,6 +696,9 @@ in the following. - Safeguard for stepsize, upper bound * - :ini:`factor_low = 0.1` - Safeguard for stepsize, lower bound + * - :ini:`fail_if_not_converged = False` + - if this is :ini:`True`, then the line search fails if the state system can not be solved at the new iterate + [AlgoLBFGS] *********** diff --git a/docs/source/user/demos/shape_optimization/doc_config.rst b/docs/source/user/demos/shape_optimization/doc_config.rst index e0d2879a..0efbc71f 100755 --- a/docs/source/user/demos/shape_optimization/doc_config.rst +++ b/docs/source/user/demos/shape_optimization/doc_config.rst @@ -356,6 +356,14 @@ For the polynomial models, we also have a safeguarding procedure, which ensures and the values specified here are also the default values for these parameters. +Finally, we have the parameter + +.. code-block:: ini + + fail_if_not_converged = False + +which determines, whether the line search is terminated if the state system cannot be solved at the current iterate. If this is :ini:`fail_if_not_converged = True`, then an exception is raised. Otherwise, the iterate is counted as having too high of a function value and the stepsize is "halved" and a new iterate is formed. + .. _config_shape_algolbfgs: Section AlgoLBFGS @@ -1128,7 +1136,6 @@ in the following. - :ini:`picard_verbose = True` enables verbose output of Picard iteration - [OptimizationRoutine] ********************* @@ -1178,7 +1185,8 @@ in the following. - Safeguard for stepsize, upper bound * - :ini:`factor_low = 0.1` - Safeguard for stepsize, lower bound - + * - :ini:`fail_if_not_converged = False` + - if this is :ini:`True`, then the line search fails if the state system can not be solved at the new iterate [AlgoLBFGS] ***********