forked from cvxgrp/diffcp
-
Notifications
You must be signed in to change notification settings - Fork 0
/
ecos_example_lpgd.py
39 lines (29 loc) · 1009 Bytes
/
ecos_example_lpgd.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import diffcp
import numpy as np
import utils
np.set_printoptions(precision=5, suppress=True)
# We generate a random cone program with a cone
# defined as a product of a 3-d fixed cone, 3-d positive orthant cone,
# and a 5-d second order cone.
K = {
'z': 3,
'l': 3,
'q': [5]
}
m = 3 + 3 + 5
n = 5
np.random.seed(0)
A, b, c = utils.random_cone_prog(m, n, K)
# We solve the cone program and get the derivative and its adjoint
x, y, s, derivative, adjoint_derivative = diffcp.solve_and_derivative(
A, b, c, K, solve_method="ECOS", verbose=False, mode="lpgd", derivative_kwargs=dict(tau=0.1, rho=0.0))
print("x =", x)
print("y =", y)
print("s =", s)
# We evaluate the gradient of the objective with respect to A, b and c.
dA, db, dc = adjoint_derivative(c, np.zeros(m), np.zeros(m))
# The gradient of the objective with respect to b should be
# equal to minus the dual variable y (see, e.g., page 268 of Convex Optimization by
# Boyd & Vandenberghe).
print("db =", db)
print("-y =", -y)