-
Notifications
You must be signed in to change notification settings - Fork 4
/
cma_covmat_record.py
208 lines (189 loc) · 8.73 KB
/
cma_covmat_record.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
"""Benchmark code for comparing the cma es algorithm and the ones implemnted by us and Genetic Algorithm"""
import os
import sys
from os.path import join
import torch
import numpy as np
import pickle as pkl
from easydict import EasyDict
from core.GAN_utils import upconvGAN
from core.CNN_scorers import TorchScorer
from time import time
import warnings
import cma
from argparse import ArgumentParser
warnings.simplefilter("ignore", cma.evolution_strategy.InjectionWarning)
#%%
parser = ArgumentParser()
parser.add_argument('--units', nargs='+', type=str, required=True)
parser.add_argument('--chan_rng', nargs=2, type=int, default=[0, 10])
parser.add_argument('--rep', type=int, default=5)
parser.add_argument('--noise_lvl', type=float, default=0)
parser.add_argument('--fevalN', type=int, default=3000)
# parser.add_argument('--RFfit', action='store_true') # will be false if not specified.
# parser.add_argument('--imgsize', nargs=2, type=int, default=[227, 227])
# parser.add_argument('--corner', nargs=2, type=int, default=[0, 0])
args = parser.parse_args()
budget = args.fevalN
noise_level = args.noise_lvl
netname = args.units[0]
layername = args.units[1]
chan_rng = args.chan_rng
repitition = args.rep
if len(args.units) == 5:
centpos = (int(args.units[3]), int(args.units[4]))
units = (netname, layername, int(args.units[2]), int(args.units[3]), int(args.units[4]))
elif len(args.units) == 3:
centpos = None
units = (netname, layername, int(args.units[2]))
else:
raise ValueError("args.units should be a 3 element or 5 element tuple!")
#%%
from core.Optimizers import Genetic, CholeskyCMAES, ZOHA_Sphere_lr_euclid, pycma_optimizer
popsize = 40
def get_optimizer(optimname, opts={}):
if optimname == "Genetic":
population_size = 40
mutation_rate = 0.25
mutation_size = 0.75
kT_multiplier = 2
n_conserve = 10
parental_skew = 0.75
optimizer = Genetic(4096, population_size, mutation_rate, mutation_size, kT_multiplier,
parental_skew=parental_skew, n_conserve=n_conserve)
elif optimname == "CholeskyCMAES":
optimizer = CholeskyCMAES(4096, population_size=40, init_sigma=3.0,
Aupdate_freq=10, init_code=np.zeros([1, 4096]))
elif optimname == "CholeskyCMAES_Aupdate1":
optimizer = CholeskyCMAES(4096, population_size=40, init_sigma=3.0,
Aupdate_freq=1, init_code=np.zeros([1, 4096]))
elif optimname == "pycma":
optimizer = pycma_optimizer(4096, population_size=40, sigma0=2.0,
inopts={}, maximize=True)
elif optimname == "pycmaDiagonal":
optimizer = pycma_optimizer(4096, population_size=40, sigma0=2.0,
inopts={"CMA_diagonal": True}, maximize=True)
elif optimname == "ZOHA_Sphere_exp":
optimizer = ZOHA_Sphere_lr_euclid(4096, population_size=40, select_size=20,
lr=1.5, sphere_norm=300)
optimizer.lr_schedule(n_gen=75, mode="exp", lim=(50, 7.33), )
elif optimname == "ZOHA_Sphere_inv":
optimizer = ZOHA_Sphere_lr_euclid(4096, population_size=40, select_size=20,
lr=1.5, sphere_norm=300)
optimizer.lr_schedule(n_gen=75, mode="inv", lim=(50, 7.33), )
else:
raise NotImplementedError
return optimizer
def add_noise(clean_score, noise_level):
if noise_level == 0.0:
return clean_score
else:
noise_gain = np.maximum(0, 1 + noise_level * np.random.randn(*clean_score.shape))
return clean_score * noise_gain
def get_CovMat_spectrum(optim):
if isinstance(optim, CholeskyCMAES):
Atsr = torch.tensor(optim.A).cuda()
Ctsr = Atsr @ Atsr.T
D2, eigvecs = torch.linalg.eigh(Ctsr)
D2 = D2.cpu().numpy()
eigvecs = eigvecs.cpu().numpy()
covmat = Ctsr.cpu().numpy()
elif isinstance(optim, pycma_optimizer):
if optim.es.opts['CMA_diagonal']: # Diagonal CMA
D2 = np.array(optim.es.sigma_vec.tolist())**2
covmat = np.diag(D2)
eigvecs = np.eye(4096)
else: # normal CMA
D2 = optim.es.D**2
covmat = optim.es.C
eigvecs = optim.es.B
else:
raise NotImplementedError
return D2, eigvecs, covmat
def covmat_calc_stats(D2, eigvecs, covmat):
maxeig = D2.max()
mineig = D2.min()
condnum = maxeig / mineig
devi2id = np.linalg.norm(covmat - np.eye(4096),'fro')**2 / np.linalg.norm(covmat,'fro')**2
summarystr = "Max 1 + %.3e Min 1 - %.4e Condition %.4e\nDistance to identity %.5e" % (maxeig - 1, 1 - mineig, condnum, devi2id)
return EasyDict(maxeig=maxeig, mineig=mineig, condnum=condnum, devi2id=devi2id), summarystr
#%%
if sys.platform == "linux":
rootdir = "/scratch1/fs1/crponce/cma_optim_covmat"
else:
rootdir = r"D:\Github\ActMax-Optimizer-Dev\optim_log2"
os.makedirs(rootdir, exist_ok=True)
optimlist = ["CholeskyCMAES", "CholeskyCMAES_Aupdate1", "pycma", "pycmaDiagonal"]
G = upconvGAN("fc6")
G.eval().cuda()
G.requires_grad_(False)
for channel in range(chan_rng[0], chan_rng[1]):
if len(units) == 5:
unit = (netname, layername, channel, *centpos)
unit_lab = "%s_%s_%03d" % (netname, unit[1], unit[2])
elif len(units) == 3:
unit = (netname, layername, channel,)
unit_lab = "%s_%s_%03d" % (netname, unit[1], unit[2])
else:
raise ValueError
if noise_level > 0:
unit_lab = unit_lab + "_ns%.1f" % noise_level
savedir = join(rootdir, unit_lab)
os.makedirs(savedir, exist_ok=True)
#%%
# unit = ("alexnet", ".features.ReLU11", 5, 6, 6)
# netname = unit[0]
scorer = TorchScorer(netname) # _linf8
scorer.select_unit(unit)
for repi in range(repitition):
RND = np.random.randint(100000)
log_file = open(join(savedir, "optimlog_%s_%s_%03d_%05d.txt" % (netname, layername, channel, RND)), "w+")
optim_log_dict = {}
for optimname in optimlist:
optim = get_optimizer(optimname)
codes = optim.get_init_pop() # random population on the sphere.
generations = []
scores_col = []
cleanscore_col = []
codes_col = []
nGeneration = int(budget / popsize) #
t0 = time()
for i in range(nGeneration):
with torch.no_grad():
cleanscores = scorer.score(G.visualize(torch.tensor(
codes, dtype=torch.float32, device="cuda"))) #.reshape([-1,4096])
scores = add_noise(cleanscores, noise_level)
newcodes = optim.step_simple(scores, codes, verbosity=0)
cleanscore_col.append(cleanscores)
scores_col.append(scores)
codes_col.append(codes)
generations.extend([i] * len(scores))
codes = newcodes
t1 = time()
runtime = t1 - t0
scores_all = np.concatenate(scores_col, axis=0)
cleanscores_all = np.concatenate(cleanscore_col, axis=0)
codes_all = np.concatenate(codes_col, axis=0)
generations = np.array(generations)
# final generation codes
codes = np.array(codes)
final_norm = np.linalg.norm(codes, axis=1).mean()
bestcode = codes.mean(axis=0)
D2, eigvecs, covmat = get_CovMat_spectrum(optim)
statsdict, spectstr = covmat_calc_stats(D2, eigvecs, covmat)
summarystr = f"{optimname} took {t1-t0:.3f} sec, code norm {final_norm:.2f} \n score max {scores_all.max():.3f}, final mean {scores.mean():.3f},\n"+\
f"clean score max {cleanscores_all.max():.3f}, clean score final mean {cleanscores.mean():.3f}.\n"
print(summarystr+spectstr+"\n", end="")
log_file.write(summarystr+spectstr+"\n")
optim_log_dict[optimname] = EasyDict(maxobj=scores_all.max(), bestcode=bestcode,
codenorm=final_norm, runtime=runtime, scores_all=scores_all,
cleanscores_all=cleanscores_all,**statsdict)
meancodes = [np.mean(codes_all[generations==i, :], axis=0) for i in range(nGeneration)]
meancodes = np.array(meancodes)
# if optimname == "CholeskyCMAES":
np.savez(join(savedir, r"%s_%s_%s_%03d_%05d_covmat.npz") % (optimname, netname, layername, channel, RND),
generations=generations, meancodes=meancodes, D2=D2, eigvecs=eigvecs,
scores_all=scores_all, cleanscores_all=cleanscores_all, runtime=runtime)
log_file.close()
pkl.dump(optim_log_dict,
open(join(savedir, "summarize_%s_%s_%03d_%05d.pkl") % (netname, layername, channel, RND), "wb"))