This repository has been archived by the owner on Jul 28, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1
/
xnor-amplitude.py
117 lines (94 loc) · 3.75 KB
/
xnor-amplitude.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
import itertools
import os
from datetime import datetime
from multiprocessing import Manager
from typing import Any
from argparse import ArgumentParser
import numpy as np
from nptyping import NDArray, Float
from KDVEquation import KDVEquation
from MAPElites import MAPElites
def parser():
parser = ArgumentParser()
parser.add_argument('--seed', type=int, default=0, help="Random seed.")
parser.add_argument('--n_workers', type=int, default=12,
help="Number of processes for parallelization.")
return parser.parse_args()
def fitness_fn(array: NDArray[Any, Float]) -> float:
global fitness_fn_cache
global fitness_function_lock
x = tuple(array)
x_hash = hash(x)
if x_hash not in fitness_fn_cache.keys():
a, b, c, d, e, f, t0, t1, t2, t3 = x
c, d, e, f = [abs(t / 2 - 1e-10) for t in [c, d, e, f]]
k_cnoidals = [a, b]
eps_soliton = 1.0
k_soliton = 0.5
nu = 0.333
rho2 = 1.0
delta = 20
t_delay = 12.75
detection_times = [40 + 20 * t for t in [t0, t1, t2, t3]]
results = []
for eps_cnoidals in itertools.product([c, d], [e, f]):
kdv = KDVEquation(
eps_cnoidals=eps_cnoidals,
k_cnoidals=k_cnoidals,
eps_soliton=eps_soliton,
k_soliton=k_soliton,
nu=nu,
rho2=rho2,
delta=delta,
t_delay=t_delay,
ts_detection=detection_times)
results.append(kdv.solve())
with fitness_function_lock:
fitness_fn_cache.update(
{x_hash: (x, np.abs(np.linalg.det(np.array(results))))})
return fitness_fn_cache[x_hash][1]
if __name__ == '__main__':
args = parser()
fitness_fn_manager = Manager()
fitness_fn_cache = fitness_fn_manager.dict()
fitness_function_lock = fitness_fn_manager.Lock()
def initializer(rng: np.random.Generator) -> NDArray[Any, Float]:
return rng.uniform(lb, ub)
def feature_fn(x: NDArray[Any, Float]) -> NDArray[Any, Float]:
a, b, c, d, e, f, t0, t1, t2, t3 = tuple(x)
return np.array([float(np.mean([c, d, e, f])), float(np.std([c, d, e, f]))])
feature_lb = np.array([0., 0.])
feature_ub = np.array([1.0, 1.0])
feature_grid_size = np.array([50, 50])
lb = np.array([0.] * 10)
ub = np.array([1.] * 10)
def mutation(x: NDArray[Any, Float], rng: np.random.Generator) -> NDArray[
Any, Float]:
return np.clip(x + rng.normal(0, 0.1, x.shape[0]), lb, ub)
algorithm = MAPElites(
initializer=initializer,
fitness_fn=fitness_fn,
feature_fn=feature_fn,
lower_bound=feature_lb,
upper_bound=feature_ub,
grid_size=feature_grid_size,
mutation=mutation,
n_jobs=args.n_workers)
algorithm.run(g_steps=2000, e_steps=8000, seed=args.seed)
time = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
time = time + "-xnor-ampl-ampl"
os.makedirs('results', exist_ok=True)
os.makedirs(f'results/{time}', exist_ok=True)
with open(f'results/{time}/archive.csv', 'w') as file:
file.write("a,b,c,d,e,f,t0,t1,t2,t3,x1,x2,fitness\n")
for key, value in algorithm.archive.items():
a, b, c, d, e, f, t0, t1, t2, t3 = value[0]
x1, x2 = key
file.write(
f"{a},{b},{c},{d},{e},{f},{t0},{t1},{t2},{t3},{x1},{x2},{value[1]}\n")
with open(f'results/{time}/individuals.csv', 'w') as file:
file.write("a,b,c,d,e,f,t0,t1,t2,t3,fitness\n")
for key, value in fitness_fn_cache.items():
a, b, c, d, e, f, t0, t1, t2, t3 = value[0]
file.write(
f"{a},{b},{c},{d},{e},{f},{t0},{t1},{t2},{t3},{value[1]}\n")