-
Notifications
You must be signed in to change notification settings - Fork 585
/
render.py
318 lines (264 loc) · 9.92 KB
/
render.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
import mako.template
import mako.lookup
import mako.exceptions
import io
import json
import datetime
import os
import copy
import yaml
import re
from detail_pages_ec2 import build_detail_pages_ec2
from detail_pages_rds import build_detail_pages_rds
from detail_pages_cache import build_detail_pages_cache
from detail_pages_opensearch import build_detail_pages_opensearch
from detail_pages_redshift import build_detail_pages_redshift
def network_sort(inst):
perf = inst["network_performance"]
network_rank = [
"Very Low",
"Low",
"Low to Moderate",
"Moderate",
"High",
"Up to 5 Gigabit",
"Up to 10 Gigabit",
"10 Gigabit",
"12 Gigabit",
"20 Gigabit",
"Up to 25 Gigabit",
"25 Gigabit",
"50 Gigabit",
"75 Gigabit",
"100 Gigabit",
]
try:
sort = network_rank.index(perf)
except ValueError:
sort = len(network_rank)
sort *= 2
if inst.get("ebs_optimized"):
sort += 1
return sort
def add_cpu_detail(i):
try:
i["ECU_per_vcpu"] = i["ECU"] / i["vCPU"]
except:
# these will be instances with variable/burstable ECU
i["ECU_per_vcpu"] = "unknown"
try:
if "vCPU" in i:
# only EC2 uses vCPU
i["memory_per_vcpu"] = round(i["memory"] / i["vCPU"], 2)
else:
i["memory_per_vcpu"] = round(float(i["memory"]) / float(i["vcpu"]), 2)
except:
# just to be safe...
i["memory_per_vcpu"] = "unknown"
if "physical_processor" in i:
i["physical_processor"] = (i["physical_processor"] or "").replace("*", "")
i["intel_avx"] = "Yes" if i["intel_avx"] else ""
i["intel_avx2"] = "Yes" if i["intel_avx2"] else ""
i["intel_avx512"] = "Yes" if i["intel_avx512"] else ""
i["intel_turbo"] = "Yes" if i["intel_turbo"] else ""
def add_render_info(i):
try:
i["network_sort"] = network_sort(i)
except KeyError:
# This instance, probably from a non EC2 service, does not have traditional networking specs
pass
add_cpu_detail(i)
prices_dict = {}
prices_index = 0
def _compress_pricing(d):
global prices_index
for k, v in d.items():
if k in prices_dict:
nk = prices_dict[k]
else:
prices_dict[k] = nk = prices_index
prices_index += 1
if isinstance(v, dict):
nv = dict(_compress_pricing(v))
else:
nv = v
yield nk, nv
def compress_pricing(instances):
global prices_index
prices = {i["instance_type"]: i["pricing"] for i in instances}
prices_dict.clear()
prices_index = 0
return json.dumps({"index": prices_dict, "data": dict(_compress_pricing(prices))})
def compress_instance_azs(instances):
instance_type_region_availability_zones = {}
for inst in instances:
if "instance_type" in inst and "availability_zones" in inst:
instance_type_region_availability_zones[inst["instance_type"]] = inst[
"availability_zones"
]
return json.dumps(instance_type_region_availability_zones)
def about_page(destination_file="www/about.html"):
print("Rendering to %s..." % destination_file)
lookup = mako.lookup.TemplateLookup(directories=["."])
template = mako.template.Template(filename="in/about.html.mako", lookup=lookup)
generated_at = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC")
os.makedirs(os.path.dirname(destination_file), exist_ok=True)
with io.open(destination_file, "w+", encoding="utf-8") as fh:
try:
fh.write(template.render(generated_at=generated_at))
except:
print(mako.exceptions.text_error_template().render())
return destination_file
def build_sitemap(sitemap):
HOST = ""
surls = ['<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">']
for url in sitemap:
surl = url.replace("www/", "")
if "index" in surl:
surl = surl.replace("index", "")
surls.append("<url><loc>{}/{}</loc></url>".format(HOST, surl[0:-5]))
surls.append("</urlset>")
destination_file = "www/sitemap.xml"
print("Rendering all URLs to %s..." % destination_file)
with io.open(destination_file, "w+") as fp:
fp.write("\n".join(surls))
def per_region_pricing(instances, data_file, all_regions):
# This function splits instances.json into per-region files which are written to
# disk and then can be loaded by the web app to reduce the amount of data that
# needs to be sent to the client.
init_pricing_json = ""
init_instance_azs_json = ""
outdir = data_file.replace("instances.json", "")
instances_no_pricing = copy.deepcopy(instances)
for i in instances_no_pricing:
if "pricing" in i:
del i["pricing"]
if "availability_zones" in i:
del i["availability_zones"]
for r in all_regions:
per_region_out = {}
per_region_out = instances_no_pricing
for i, inst in enumerate(instances):
per_region_out[i]["pricing"] = {}
per_region_out[i]["availability_zones"] = {}
if r in inst["pricing"]:
per_region_out[i]["pricing"][r] = instances[i]["pricing"][r]
if "availability_zones" in inst and r in inst["availability_zones"]:
per_region_out[i]["availability_zones"][r] = instances[i][
"availability_zones"
][r]
pricing_out_file = "{}pricing_{}.json".format(outdir, r)
azs_out_file = "{}instance_azs_{}.json".format(outdir, r)
pricing_json = compress_pricing(per_region_out)
instance_azs_json = compress_instance_azs(per_region_out)
if r == "us-east-1":
init_pricing_json = pricing_json
init_instance_azs_json = instance_azs_json
with open(pricing_out_file, "w+") as f:
f.write(pricing_json)
with open(azs_out_file, "w+") as f:
f.write(instance_azs_json)
return init_pricing_json, init_instance_azs_json
def regions_list(instances):
regions = {}
regions["main"] = {}
regions["local_zone"] = {}
regions["wavelength"] = {}
for i in instances:
for r in i["pricing"]:
try:
if "wl1" in r or "wl2" in r:
regions["wavelength"][r] = i["regions"][r]
elif len(re.findall(r"\d+", r)) > 1:
regions["local_zone"][r] = i["regions"][r]
else:
regions["main"][r] = i["regions"][r]
except KeyError:
print(
'ERROR: "regions" key not found in instances.json. Run scrape.py.'
)
return regions
def render(data_file, template_file, destination_file, detail_pages=True):
"""Build the HTML content from scraped data"""
lookup = mako.lookup.TemplateLookup(directories=["."])
template = mako.template.Template(filename=template_file, lookup=lookup)
with open(data_file, "r") as f:
instances = json.load(f)
print("Loading data from %s..." % data_file)
for i in instances:
add_render_info(i)
regions = regions_list(instances)
sitemap = []
if data_file == "www/instances.json":
all_regions = regions["main"].copy()
all_regions.update(regions["local_zone"])
all_regions.update(regions["wavelength"])
if detail_pages:
sitemap.extend(build_detail_pages_ec2(instances, all_regions))
elif data_file == "www/rds/instances.json":
all_regions = regions["main"].copy()
all_regions.update(regions["local_zone"])
if detail_pages:
sitemap.extend(build_detail_pages_rds(instances, all_regions))
elif data_file == "www/cache/instances.json":
all_regions = regions["main"].copy()
if detail_pages:
sitemap.extend(build_detail_pages_cache(instances, all_regions))
elif data_file == "www/opensearch/instances.json":
all_regions = regions["main"].copy()
if detail_pages:
sitemap.extend(build_detail_pages_opensearch(instances, all_regions))
elif data_file == "www/redshift/instances.json":
all_regions = regions["main"].copy()
if detail_pages:
sitemap.extend(build_detail_pages_redshift(instances, all_regions))
generated_at = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC")
pricing_json, instance_azs_json = per_region_pricing(
instances, data_file, all_regions
)
print("Rendering to %s..." % destination_file)
os.makedirs(os.path.dirname(destination_file), exist_ok=True)
with io.open(destination_file, "w+", encoding="utf-8") as fh:
try:
fh.write(
template.render(
instances=instances,
regions=regions,
pricing_json=pricing_json,
generated_at=generated_at,
instance_azs_json=instance_azs_json,
)
)
sitemap.append(destination_file)
except:
print(mako.exceptions.text_error_template().render())
return sitemap
if __name__ == "__main__":
sitemap = []
sitemap.extend(render("www/instances.json", "in/index.html.mako", "www/index.html"))
sitemap.extend(
render("www/rds/instances.json", "in/rds.html.mako", "www/rds/index.html")
)
sitemap.extend(
render(
"www/cache/instances.json",
"in/cache.html.mako",
"www/cache/index.html",
)
)
sitemap.extend(
render(
"www/redshift/instances.json",
"in/redshift.html.mako",
"www/redshift/index.html",
)
)
sitemap.extend(
render(
"www/opensearch/instances.json",
"in/opensearch.html.mako",
"www/opensearch/index.html",
)
)
sitemap.append(about_page())
build_sitemap(sitemap)