-
Notifications
You must be signed in to change notification settings - Fork 1
/
cvar.py
79 lines (62 loc) · 2.94 KB
/
cvar.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
Closure generation for distributionally robust cvar constraints as defined in
"Distributionally Robust Infinite-horizon Control" (DRInC) by
JS Brouillon et. al., 2023.
Copyright Jean-Sébastien Brouillon (2024)
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
import numpy as np
import cvxpy as cp
from utils.data_structures import Polytope
def cvar_constraints(feasible_set: Polytope, support: Polytope,
radius: float, p_level: float):
"""
This function generates the closure that defines the distributionally robust
conditional value-at-risk constraints for a given feasible set and noise
distribution support. The closure can be used afterward for various
different optimization problems over the SLS closed loop map phi.
:param feasible_set: Polytope, the feasible set of the optimization problem.
:param support: Polytope, the support of the noise distribution.
:param radius: Radius of the Wasserstein ball
:param p_level: Probability level of the cvar constraints
:return: closure with signature (phi, xis) -> cons, where phi is the SLS
closed loop map, xis are the samples of the empirical distribution at
the center of the Wasserstein ball (one column per sample), and cons
is a list of linear matrix inequality constraints.
"""
# Check that the radius is positive
if radius <= 0:
raise ValueError("The radius must be positive.")
# Check that the probability level is in [0, 1]
if p_level < 0 or p_level > 1:
raise ValueError("The probability level must be in [0, 1].")
# Short notations
_H = support.h
_h = support.g if len(support.g.shape) == 2 else support.g[:, None]
_G = np.vstack((feasible_set.h, 0*feasible_set.h[[0], :]))
_j = _G.shape[0]
_y = p_level
def mkcons(phi, xis, ps=None):
# Optimization variables
tau = cp.Variable((1, 1))
rho = cp.Variable()
zeta = cp.Variable((xis.shape[1], 1))
# Short notations
_n = xis.shape[1] # Number of samples
_g = cp.vstack((feasible_set.g, -tau))
_p = ps if ps is not None else 1 / _n
# One-of Constraints
cons = [rho >= 0,
rho*radius + (_y-1)/_y*tau + cp.multiply(_p, cp.sum(zeta)) <= 0]
# Wasserstein ball constraints
for i, xii in enumerate(xis.T):
k_i = cp.Variable((_j, _h.shape[0]))
cons += [k_i >= 0]
for j, _gj in enumerate(_G):
_s = zeta[i] + (_g[j] - _gj @ phi @ xii)/_y \
- (_h - _H @ xii[:, None]).T @ k_i[j, :]
_od = phi.T @ _gj[:, None] - _H.T @ k_i[[j], :].T * _y
_m = cp.bmat([[_s[:, None], _od.T],
[_od, 4*rho*_y*_y*np.eye(phi.shape[1])]])
cons += [_m >> 0]
return cons
return mkcons