Skip to content

Commit

Permalink
Merge branch 'main' into hotfix/1.8.10
Browse files Browse the repository at this point in the history
  • Loading branch information
sblauth committed Oct 5, 2022
2 parents ae7b453 + 9b24745 commit ec5acdc
Show file tree
Hide file tree
Showing 39 changed files with 934 additions and 309 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,14 @@ repos:


- repo: https://github.com/PyCQA/pylint
rev: v2.15.0
rev: v2.15.3
hooks:
- id: pylint
files: cashocs/


- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.971
rev: v0.981
hooks:
- id: mypy
files: cashocs/
Expand Down
24 changes: 23 additions & 1 deletion CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,34 @@ in development

* Added space mapping methods to cashocs. The space mapping methods can utilize parallelism via MPI.

* Added polynomial based models for computing trial stepsizes in an extended Armijo rule. This method will become the default line search in the future.

* implemented a wrapper for cashocs-convert, so that this can be used from inside python too. Simply call cashocs.convert(inputfile).

* cashocs print calls now flush the output buffer, which helps when sys.stdout is a file

* cashocs now uses pathlib over os.path

* cashocs' loggers are now not colored anymore, which makes reading the log easier if one logs to a file

* implemented a wrapper for cashocs-convert, so that this can be used from inside python too. Simply call cashocs.convert(inputfile).

* BFGS methods can now be used in a restarted fashion, if desired

* New configuration file parameters

* Section AlgoLBFGS

* ``bfgs_periodic_restart`` is an integer parameter. If this is 0 (the default), no restarting is done. If this is >0, then the BFGS method is restarted after as many iterations, as given in the parameter

* Section LineSearch is a completely new section where the line searches can be configured.

* ``method`` is a string parameter, which can take the values ``armijo`` (which is the default previous line search) and ``polynomial`` (which are the new models)

* ``polynomial_model`` is a string parameter which can be either ``quadratic`` or ``cubic``. In case this is ``quadratic``, three values (current function value, directional derivative, and trial function value) are used to generate a quadratic model of the one-dimensional cost functional. If this is ``cubic``, a cubic model is generated based on the last two guesses for the stepsize. These models are exactly minimized to get a new trial stepsize and a safeguarding is applied so that the steps remain feasible.

* ``factor_high`` is one parameter for the safeguarding, the upper bound for the search interval for the stepsize (this is multiplied with the previous stepsize)

* ``factor_low`` is the other parameter for the safeguarding, the lower bound for the search interval for the stepsize (this is multiplied with the previous stepsize)

1.8.0 (July 6, 2022)
--------------------
Expand Down
5 changes: 4 additions & 1 deletion cashocs/_optimization/line_search/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,8 @@

from cashocs._optimization.line_search.armijo_line_search import ArmijoLineSearch
from cashocs._optimization.line_search.line_search import LineSearch
from cashocs._optimization.line_search.polynomial_line_search import (
PolynomialLineSearch,
)

__all__ = ["ArmijoLineSearch", "LineSearch"]
__all__ = ["ArmijoLineSearch", "LineSearch", "PolynomialLineSearch"]
29 changes: 1 addition & 28 deletions cashocs/_optimization/line_search/armijo_line_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
from typing import List

import fenics
import numpy as np
from typing_extensions import TYPE_CHECKING

from cashocs import _loggers
Expand Down Expand Up @@ -51,9 +50,6 @@ def __init__(
self.epsilon_armijo: float = self.config.getfloat(
"OptimizationRoutine", "epsilon_armijo"
)
self.beta_armijo: float = self.config.getfloat(
"OptimizationRoutine", "beta_armijo"
)
self.armijo_stepsize_initial = self.stepsize
self.search_direction_inf = 1.0
self.decrease_measure_w_o_step = 1.0
Expand Down Expand Up @@ -105,30 +101,7 @@ def search(
(presumably) scaled.
"""
self.search_direction_inf = np.max(
[
search_direction[i].vector().norm("linf")
for i in range(len(self.gradient))
]
)

if has_curvature_info:
self.stepsize = 1.0

num_decreases = (
self.optimization_variable_abstractions.compute_a_priori_decreases(
search_direction, self.stepsize
)
)
self.stepsize /= pow(self.beta_armijo, num_decreases)

if self.safeguard_stepsize and solver.iteration == 0:
search_direction_norm = np.sqrt(
self.form_handler.scalar_product(search_direction, search_direction)
)
self.stepsize = float(
np.minimum(self.stepsize, 100.0 / (1.0 + search_direction_norm))
)
self.initialize_stepsize(solver, search_direction, has_curvature_info)

while True:

Expand Down
52 changes: 52 additions & 0 deletions cashocs/_optimization/line_search/line_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
from typing import List, TYPE_CHECKING

import fenics
import numpy as np

from cashocs import _utils

Expand Down Expand Up @@ -58,6 +59,10 @@ def __init__(self, optimization_problem: types.OptimizationProblem) -> None:
"OptimizationRoutine", "safeguard_stepsize"
)

self.beta_armijo: float = self.config.getfloat(
"OptimizationRoutine", "beta_armijo"
)

algorithm = _utils.optimization_algorithm_configuration(self.config)
self.is_newton_like = algorithm.casefold() == "lbfgs"
self.is_newton = algorithm.casefold() == "newton"
Expand Down Expand Up @@ -88,6 +93,53 @@ def perform(
self.search(solver, search_direction, has_curvature_info)
self.post_line_search()

def initialize_stepsize(
self,
solver: optimization_algorithms.OptimizationAlgorithm,
search_direction: List[fenics.Function],
has_curvature_info: bool,
) -> None:
"""Initializes the stepsize.
Performs various ways for safeguarding (can be deactivated).
Args:
solver: The optimization algorithm.
search_direction: The current search direction.
has_curvature_info: A flag, which indicates whether the direction is
(presumably) scaled.
"""
self.search_direction_inf = np.max(
[
search_direction[i].vector().norm("linf")
for i in range(len(self.gradient))
]
)

if has_curvature_info:
self.stepsize = 1.0

if solver.is_restarted:
self.stepsize = self.config.getfloat(
"OptimizationRoutine", "initial_stepsize"
)

num_decreases = (
self.optimization_variable_abstractions.compute_a_priori_decreases(
search_direction, self.stepsize
)
)
self.stepsize /= pow(self.beta_armijo, num_decreases)

if self.safeguard_stepsize and solver.iteration == 0:
search_direction_norm = np.sqrt(
self.form_handler.scalar_product(search_direction, search_direction)
)
self.stepsize = float(
np.minimum(self.stepsize, 100.0 / (1.0 + search_direction_norm))
)

@abc.abstractmethod
def search(
self,
Expand Down
Loading

0 comments on commit ec5acdc

Please sign in to comment.