Skip to content

Commit

Permalink
Merge pull request #269 from dynamicslab/cassio_branch
Browse files Browse the repository at this point in the history
New optimizer for stable linear models of arbitrary dimension
  • Loading branch information
akaptano authored Dec 28, 2022
2 parents 1e1b9bd + 7421bf7 commit a299cd8
Show file tree
Hide file tree
Showing 17 changed files with 1,955 additions and 77 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ repos:
rev: 22.3.0
hooks:
- id: black
- repo: https://gitlab.com/pycqa/flake8
- repo: https://github.com/PyCQA/flake8
rev: 5.0.4
hooks:
- id: flake8
Expand Down
1 change: 1 addition & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
"sphinx.ext.napoleon",
"sphinx.ext.mathjax",
"sphinx.ext.intersphinx",
"IPython.sphinxext.ipython_console_highlighting",
]
nb_execution_mode = "off"

Expand Down
4 changes: 2 additions & 2 deletions examples/13_ensembling.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -1079,7 +1079,7 @@
"hash": "3ee6f1cb9fc3b265a5f24cdb7fa225f31e54d7494aa3be0e32b8f891af359708"
},
"kernelspec": {
"display_name": "Python 3.7.5 ('env': venv)",
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
Expand All @@ -1093,7 +1093,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.5"
"version": "3.7.4"
},
"toc": {
"base_numbering": 1,
Expand Down
556 changes: 556 additions & 0 deletions examples/18_linear_stable_models/StateSpaceModel.ipynb

Large diffs are not rendered by default.

Binary file not shown.
Binary file not shown.
287 changes: 287 additions & 0 deletions examples/18_linear_stable_models/example7_reboot.ipynb

Large diffs are not rendered by default.

245 changes: 245 additions & 0 deletions examples/18_linear_stable_models/viscoelastic_pod_models.ipynb

Large diffs are not rendered by default.

344 changes: 289 additions & 55 deletions examples/1_feature_overview.ipynb

Large diffs are not rendered by default.

4 changes: 4 additions & 0 deletions pysindy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,10 @@
from .optimizers import TrappingSR3
except ImportError:
pass
try: # Waiting on PEP 690 to lazy import CVXPY
from .optimizers import StableLinearSR3
except ImportError:
pass
from .optimizers import SINDyOptimizer
from .optimizers import SR3
from .optimizers import SSR
Expand Down
5 changes: 5 additions & 0 deletions pysindy/optimizers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
from .sindy_pi import SINDyPI
except ImportError:
pass
try: # Waiting on PEP 690 to lazy import cvxpy
from .stable_linear_sr3 import StableLinearSR3
except ImportError:
pass
from .sindy_optimizer import SINDyOptimizer
from .sr3 import SR3
from .ssr import SSR
Expand All @@ -27,6 +31,7 @@
"SR3",
"STLSQ",
"ConstrainedSR3",
"StableLinearSR3",
"TrappingSR3",
"SSR",
"FROLS",
Expand Down
39 changes: 31 additions & 8 deletions pysindy/optimizers/constrained_sr3.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,9 @@ def __init__(
copy_X=True,
initial_guess=None,
thresholds=None,
equality_constraints=False,
inequality_constraints=False,
constraint_separation_index=0,
verbose=False,
verbose_cvxpy=False,
):
Expand All @@ -184,29 +186,38 @@ def __init__(

self.verbose_cvxpy = verbose_cvxpy
self.reg = get_regularization(thresholder)
self.constraint_lhs = constraint_lhs
self.constraint_rhs = constraint_rhs
self.constraint_order = constraint_order
self.use_constraints = (constraint_lhs is not None) and (
constraint_rhs is not None
)

if (
self.use_constraints
and not equality_constraints
and not inequality_constraints
):
warnings.warn(
"constraint_lhs and constraint_rhs passed to the optimizer, "
" but user did not specify if the constraints were equality or"
" inequality constraints. Assuming equality constraints."
)
self.equality_constraints = True

if self.use_constraints:
if constraint_order not in ("feature", "target"):
raise ValueError(
"constraint_order must be either 'feature' or 'target'"
)

self.constraint_lhs = constraint_lhs
self.constraint_rhs = constraint_rhs
self.unbias = False
self.constraint_order = constraint_order

if inequality_constraints and not cvxpy_flag:
raise ValueError(
"Cannot use inequality constraints without cvxpy installed."
)

if inequality_constraints:
self.max_iter = max(10000, max_iter) # max iterations for CVXPY

if inequality_constraints and not self.use_constraints:
raise ValueError(
"Use of inequality constraints requires constraint_lhs and "
Expand All @@ -223,6 +234,8 @@ def __init__(
"Use of inequality constraints requires a convex regularizer."
)
self.inequality_constraints = inequality_constraints
self.equality_constraints = equality_constraints
self.constraint_separation_index = constraint_separation_index

def _update_full_coef_constraints(self, H, x_transpose_y, coef_sparse):
g = x_transpose_y + coef_sparse / self.nu
Expand Down Expand Up @@ -250,7 +263,18 @@ def _update_coef_cvxpy(self, x, y, coef_sparse):
elif self.thresholder.lower() == "weighted_l2":
cost = cost + cp.norm2(np.ravel(self.thresholds) @ xi)
if self.use_constraints:
if self.inequality_constraints:
if self.inequality_constraints and self.equality_constraints:
# Process inequality constraints then equality constraints
prob = cp.Problem(
cp.Minimize(cost),
[
self.constraint_lhs[: self.constraint_separation_index, :] @ xi
<= self.constraint_rhs[: self.constraint_separation_index],
self.constraint_lhs[self.constraint_separation_index :, :] @ xi
== self.constraint_rhs[self.constraint_separation_index :],
],
)
elif self.inequality_constraints:
prob = cp.Problem(
cp.Minimize(cost),
[self.constraint_lhs @ xi <= self.constraint_rhs],
Expand Down Expand Up @@ -442,7 +466,6 @@ def _reduce(self, x, y):
),
ConvergenceWarning,
)

if self.use_constraints and self.constraint_order.lower() == "target":
self.constraint_lhs = reorder_constraints(
self.constraint_lhs, n_features, output_order="target"
Expand Down
Loading

0 comments on commit a299cd8

Please sign in to comment.