-
Notifications
You must be signed in to change notification settings - Fork 26
/
content_resolver.py
executable file
·122 lines (85 loc) · 3.5 KB
/
content_resolver.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
#!/usr/bin/python3
import datetime
from content_resolver.analyzer import Analyzer
from content_resolver.data_generation import generate_data_files
from content_resolver.historia_data import generate_historic_data
from content_resolver.page_generation import generate_pages
from content_resolver.query import Query
from content_resolver.utils import load_data, log, datetime_now_string, dump_data
from content_resolver.config_manager import ConfigManager
# Features of this new release
# - multiarch from the ground up!
# - more resilient
# - better internal data structure
# - user-defined views
###############################################################################
### Help ######################################################################
###############################################################################
# Configs:
# TYPE: KEY: ID:
# - repo repos repo_id
# - env_conf envs env_id
# - workload_conf workloads workload_id
# - label labels label_id
# - conf_view views view_id
#
# Data:
# TYPE: KEY: ID:
# - pkg pkgs/repo_id/arch NEVR
# - env envs env_id:repo_id:arch_id
# - workload workloads workload_id:env_id:repo_id:arch_id
# - view views view_id:repo_id:arch_id
#
#
#
###############################################################################
### Main ######################################################################
###############################################################################
def main():
# -------------------------------------------------
# Stage 1: Data collection and analysis using DNF
# -------------------------------------------------
# measuring time of execution
time_started = datetime_now_string()
config_manager = ConfigManager()
settings = config_manager.settings
settings["global_refresh_time_started"] = datetime.datetime.now().strftime("%-d %B %Y %H:%M UTC")
if settings["use_cache"]:
configs = load_data("cache_configs.json")
data = load_data("cache_data.json")
else:
configs = config_manager.get_configs()
analyzer = Analyzer(configs, settings)
data = analyzer.analyze_things()
if settings["dev_buildroot"]:
dump_data("cache_configs.json", configs)
dump_data("cache_data.json", data)
# measuring time of execution
time_analysis_time = datetime_now_string()
# -------------------------------------------------
# Stage 2: Generating pages and data outputs
# -------------------------------------------------
query = Query(data, configs, settings)
generate_pages(query)
generate_data_files(query)
generate_historic_data(query)
# -------------------------------------------------
# Done! Printing final summary
# -------------------------------------------------
# measuring time of execution
time_ended = datetime_now_string()
# Print extra metrics
if not settings["use_cache"]:
analyzer.print_metrics()
# Print base metrics
log("")
log("=============================")
log("Feedback Pipeline build done!")
log("=============================")
log("")
log(" Started: {}".format(time_started))
log(" Analysis done: {}".format(time_analysis_time))
log(" Finished: {}".format(time_ended))
log("")
if __name__ == "__main__":
main()