Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/bfgs restart #93

Merged
merged 2 commits into from
Sep 13, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,13 @@ in development

* implemented a wrapper for cashocs-convert, so that this can be used from inside python too. Simply call cashocs.convert(inputfile).

* BFGS methods can now be used in a restarted fashion, if desired

* New configuration file parameters

* Section AlgoLBFGS

* ``bfgs_periodic_restart`` is an integer parameter. If this is 0 (the default), no restarting is done. If this is >0, then the BFGS method is restarted after as many iterations, as given in the parameter

1.8.0 (July 6, 2022)
--------------------
Expand Down
24 changes: 24 additions & 0 deletions cashocs/_optimization/optimization_algorithms/l_bfgs.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,10 @@ def __init__(

self.bfgs_memory_size = self.config.getint("AlgoLBFGS", "bfgs_memory_size")
self.use_bfgs_scaling = self.config.getboolean("AlgoLBFGS", "use_bfgs_scaling")
self.bfgs_periodic_restart = self.config.getint(
"AlgoLBFGS", "bfgs_periodic_restart"
)
self.periodic_its = 0

self._init_helpers()

Expand All @@ -78,6 +82,7 @@ def _init_helpers(self) -> None:
def run(self) -> None:
"""Solves the optimization problem with the L-BFGS method."""
self.initialize_solver()
self.periodic_its = 0
self.compute_gradient()
self.form_handler.compute_active_sets()
self.gradient_norm = (
Expand All @@ -88,6 +93,7 @@ def run(self) -> None:

while not self.converged:
self.compute_search_direction(self.gradient)
self.check_restart()
self.check_for_ascent()

self.objective_value = self.cost_functional.evaluate()
Expand Down Expand Up @@ -259,3 +265,21 @@ def update_hessian_approximation(self) -> None:
self.history_s.pop()
self.history_y.pop()
self.history_rho.pop()

def check_restart(self) -> None:
"""Checks, whether a restart should be performed and does so, if necessary."""
if self.bfgs_periodic_restart > 0:
if self.periodic_its < self.bfgs_periodic_restart:
self.periodic_its += 1
else:
for i in range(len(self.gradient)):
self.search_direction[i].vector().vec().aypx(
0.0, -self.gradient[i].vector().vec()
)
self.search_direction[i].vector().apply("")
self.periodic_its = 0
self.has_curvature_info = False

self.history_s.clear()
self.history_y.clear()
self.history_rho.clear()
5 changes: 5 additions & 0 deletions cashocs/io/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,10 @@ def __init__(self, config_file: Optional[str] = None) -> None:
"use_bfgs_scaling": {
"type": "bool",
},
"bfgs_periodic_restart": {
"type": "int",
"attributes": ["non_negative"],
},
},
"AlgoCG": {
"cg_method": {
Expand Down Expand Up @@ -589,6 +593,7 @@ def __init__(self, config_file: Optional[str] = None) -> None:
[AlgoLBFGS]
bfgs_memory_size = 5
use_bfgs_scaling = True
bfgs_periodic_restart = 0

[AlgoCG]
cg_method = DY
Expand Down
9 changes: 9 additions & 0 deletions docs/source/demos/optimal_control/doc_config.rst
Original file line number Diff line number Diff line change
Expand Up @@ -286,6 +286,12 @@ This determines, whether one should use a scaling of the initial Hessian approxi
(see `Nocedal and Wright, Numerical Optimization <https://doi.org/10.1007/978-0-387-40065-5>`_).
This is usually very beneficial and should be kept enabled, which it is by default.

Third, we have the parameter ``bfgs_periodic_restart``, which is set in the line ::

bfgs_periodic_restart = 0

This is a non-negative integer value, which indicates the number of BFGS iterations, before a reinitialization takes place. In case that this is ``0`` (which is the default), no restarts are performed.

.. _config_ocp_algocg:

Section AlgoCG
Expand Down Expand Up @@ -583,6 +589,9 @@ in the following.
* - use_bfgs_scaling
- ``True``
- if ``True``, uses a scaled identity mapping as initial guess for the inverse Hessian
* - bfgs_periodic_restart
- ``0``
- specifies, after how many iterations the method is restarted. If this is 0, no restarting is done.


[AlgoCG]
Expand Down
10 changes: 9 additions & 1 deletion docs/source/demos/shape_optimization/doc_config.rst
Original file line number Diff line number Diff line change
Expand Up @@ -303,6 +303,12 @@ This determines, whether one should use a scaling of the initial Hessian approxi
(see `Nocedal and Wright, Numerical Optimization <https://doi.org/10.1007/978-0-387-40065-5>`_).
This is usually very beneficial and should be kept enabled (which is the default).

Third, we have the parameter ``bfgs_periodic_restart``, which is set in the line ::

bfgs_periodic_restart = 0

This is a non-negative integer value, which indicates the number of BFGS iterations, before a reinitialization takes place. In case that this is ``0`` (which is the default), no restarts are performed.

.. _config_shape_algocg:

Section AlgoCG
Expand Down Expand Up @@ -1011,7 +1017,9 @@ in the following.
* - use_bfgs_scaling
- ``True``
- if ``True``, uses a scaled identity mapping as initial guess for the inverse Hessian

* - bfgs_periodic_restart
- ``0``
- specifies, after how many iterations the method is restarted. If this is 0, no restarting is done.

[AlgoCG]
********
Expand Down
13 changes: 13 additions & 0 deletions tests/test_optimal_control.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,19 @@ def test_control_bfgs():
assert ocp.solver.relative_norm <= ocp.solver.rtol


def test_control_bfgs_restarted():
u.vector().vec().set(0.0)
u.vector().apply("")

config = cashocs.load_config(dir_path + "/config_ocp.ini")

config.set("AlgoLBFGS", "bfgs_periodic_restart", "2")

ocp = cashocs.OptimalControlProblem(F, bcs, J, y, u, p, config)
ocp.solve("bfgs", rtol=1e-2, atol=0.0, max_iter=20)
assert ocp.solver.relative_norm <= ocp.solver.rtol


def test_control_newton_cg():
config = cashocs.load_config(dir_path + "/config_ocp.ini")

Expand Down