diff --git a/src/main/java/com/autotune/analyzer/Analyzer.java b/src/main/java/com/autotune/analyzer/Analyzer.java index b8c9ee2c5..8fd18e24a 100644 --- a/src/main/java/com/autotune/analyzer/Analyzer.java +++ b/src/main/java/com/autotune/analyzer/Analyzer.java @@ -51,6 +51,8 @@ public static void addServlets(ServletContextHandler context) { context.addServlet(ListRecommendations.class, ServerContext.RECOMMEND_RESULTS); context.addServlet(PerformanceProfileService.class, ServerContext.CREATE_PERF_PROFILE); context.addServlet(PerformanceProfileService.class, ServerContext.LIST_PERF_PROFILES); + context.addServlet(ListClusters.class, ServerContext.LIST_CLUSTER_NAMES); + context.addServlet(Summarize.class, ServerContext.SUMMARIZE); // Adding UI support API's context.addServlet(ListNamespaces.class, ServerContext.LIST_NAMESPACES); diff --git a/src/main/java/com/autotune/analyzer/recommendations/Recommendation.java b/src/main/java/com/autotune/analyzer/recommendations/Recommendation.java index c36625ea3..d61d79ab7 100644 --- a/src/main/java/com/autotune/analyzer/recommendations/Recommendation.java +++ b/src/main/java/com/autotune/analyzer/recommendations/Recommendation.java @@ -150,6 +150,8 @@ public String toString() { ", confidence_level=" + confidence_level + ", config=" + config + ", current=" + currentConfig + + ", variation=" + variation + + ", notifications=" + notifications + '}'; } } diff --git a/src/main/java/com/autotune/analyzer/recommendations/RecommendationConfigItem.java b/src/main/java/com/autotune/analyzer/recommendations/RecommendationConfigItem.java index 64be675e3..087023778 100644 --- a/src/main/java/com/autotune/analyzer/recommendations/RecommendationConfigItem.java +++ b/src/main/java/com/autotune/analyzer/recommendations/RecommendationConfigItem.java @@ -48,4 +48,21 @@ public Double getAmount() { public String getFormat() { return format; } + + public void setAmount(Double amount) { + this.amount = amount; + } + + public void setFormat(String format) { + this.format = format; + } + + @Override + public String toString() { + return "RecommendationConfigItem{" + + "amount=" + amount + + ", format='" + format + '\'' + + ", errorMsg='" + errorMsg + '\'' + + '}'; + } } diff --git a/src/main/java/com/autotune/analyzer/recommendations/summary/ActionSummary.java b/src/main/java/com/autotune/analyzer/recommendations/summary/ActionSummary.java new file mode 100644 index 000000000..e5ab495e4 --- /dev/null +++ b/src/main/java/com/autotune/analyzer/recommendations/summary/ActionSummary.java @@ -0,0 +1,147 @@ +/******************************************************************************* + * Copyright (c) 2023 Red Hat, IBM Corporation and others. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + *******************************************************************************/ + +package com.autotune.analyzer.recommendations.summary; + +import com.autotune.analyzer.utils.AnalyzerConstants; +import com.autotune.utils.KruizeConstants; +import com.google.gson.annotations.SerializedName; + +import java.util.HashMap; + +/** + * stores the summary of the actions to be displayed to the user + */ +public class ActionSummary { + @SerializedName(KruizeConstants.JSONKeys.IDLE) + private HashMap idle; + @SerializedName(KruizeConstants.JSONKeys.OPTIMIZED) + private HashMap optimized; + @SerializedName(KruizeConstants.JSONKeys.CRITICAL) + private HashMap critical; + @SerializedName(KruizeConstants.JSONKeys.OPTIMIZABLE) + private HashMap optimizable; + @SerializedName(KruizeConstants.JSONKeys.ERROR) + private HashMap error; + @SerializedName(KruizeConstants.JSONKeys.NO_DATA) + private HashMap noData; + @SerializedName(KruizeConstants.JSONKeys.TOTAL) + private HashMap total; + + public ActionSummary() { + this.idle = new HashMap<>(); + this.optimized = new HashMap<>(); + this.critical = new HashMap<>(); + this.optimizable = new HashMap<>(); + this.error = new HashMap<>(); + this.noData = new HashMap<>(); + this.total = new HashMap<>(); + } + + public HashMap getIdle() { + return idle; + } + + public void setIdle(HashMap idle) { + this.idle = idle; + } + + public HashMap getOptimized() { + return optimized; + } + + public void setOptimized(HashMap optimized) { + this.optimized = optimized; + } + + public HashMap getCritical() { + return critical; + } + + public void setCritical(HashMap critical) { + this.critical = critical; + } + + public HashMap getOptimizable() { + return optimizable; + } + + public void setOptimizable(HashMap optimizable) { + this.optimizable = optimizable; + } + + public HashMap getError() { + return error; + } + + public void setError(HashMap error) { + this.error = error; + } + + public HashMap getNoData() { + return noData; + } + + public void setNoData(HashMap noData) { + this.noData = noData; + } + + public HashMap getTotal() { + return total; + } + + public void setTotal(HashMap total) { + this.total = total; + } + + public ActionSummary merge(ActionSummary other) { + mergeMap(idle, other.getIdle()); + mergeMap(optimized, other.getOptimized()); + mergeMap(critical, other.getCritical()); + mergeMap(optimizable, other.getOptimizable()); + mergeMap(error, other.getError()); + mergeMap(noData, other.getNoData()); + mergeMap(total, other.getTotal()); + return this; + } + + private void mergeMap(HashMap original, HashMap toMerge) { + for (HashMap.Entry entry : toMerge.entrySet()) { + AnalyzerConstants.ActionSummaryRecommendationItem key = entry.getKey(); + ResourceInfo value = entry.getValue(); + + ResourceInfo originalValue = original.get(key); + if (originalValue == null) { + original.put(key, value); + } else { + originalValue.setCount(originalValue.getCount() + value.getCount()); + originalValue.getWorkloadNames().addAll(value.getWorkloadNames()); + } + } + } + @Override + public String toString() { + return "ActionSummary{" + + "idle=" + idle + + ", optimized=" + optimized + + ", critical=" + critical + + ", optimizable=" + optimizable + + ", error=" + error + + ", info=" + noData + + ", total=" + total + + '}'; + } +} \ No newline at end of file diff --git a/src/main/java/com/autotune/analyzer/recommendations/summary/NotificationsSummary.java b/src/main/java/com/autotune/analyzer/recommendations/summary/NotificationsSummary.java new file mode 100644 index 000000000..2a0e4564a --- /dev/null +++ b/src/main/java/com/autotune/analyzer/recommendations/summary/NotificationsSummary.java @@ -0,0 +1,100 @@ +/******************************************************************************* + * Copyright (c) 2023 Red Hat, IBM Corporation and others. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + *******************************************************************************/ + +package com.autotune.analyzer.recommendations.summary; + +/** + * stores the summary of all the notifications based on the summarization type + */ +public class NotificationsSummary { + + private int info; + + private int notice; + + private int warning; + + private int error; + + private int critical; + + public int getInfo() { + return info; + } + + public void setInfo(int info) { + this.info = info; + } + + public int getNotice() { + return notice; + } + + public void setNotice(int notice) { + this.notice = notice; + } + + public int getWarning() { + return warning; + } + + public void setWarning(int warning) { + this.warning = warning; + } + + public int getError() { + return error; + } + + public void setError(int error) { + this.error = error; + } + + public int getCritical() { + return critical; + } + + public void setCritical(int critical) { + this.critical = critical; + } + + public NotificationsSummary mergeNotificationsSummary(NotificationsSummary notifications1, NotificationsSummary notifications2) { + + int infoCount = notifications1.getInfo() + notifications2.getInfo(); + int noticeCount = notifications1.getNotice() + notifications2.getNotice(); + int warningCount = notifications1.getWarning() + notifications2.getWarning(); + int errorCount = notifications1.getError() + notifications2.getError(); + int criticalCount = notifications1.getCritical() + notifications2.getCritical(); + + NotificationsSummary mergedSummary = new NotificationsSummary(); + mergedSummary.setInfo(infoCount); + mergedSummary.setNotice(noticeCount); + mergedSummary.setWarning(warningCount); + mergedSummary.setError(errorCount); + mergedSummary.setCritical(criticalCount); + return mergedSummary; + } + @Override + public String toString() { + return "NotificationsSummary{" + + "info=" + info + + ", notice=" + notice + + ", warning=" + warning + + ", error=" + error + + ", critical=" + critical + + '}'; + } +} \ No newline at end of file diff --git a/src/main/java/com/autotune/analyzer/recommendations/summary/RecommendationEngineSummary.java b/src/main/java/com/autotune/analyzer/recommendations/summary/RecommendationEngineSummary.java new file mode 100644 index 000000000..817edec76 --- /dev/null +++ b/src/main/java/com/autotune/analyzer/recommendations/summary/RecommendationEngineSummary.java @@ -0,0 +1,102 @@ +/******************************************************************************* + * Copyright (c) 2023 Red Hat, IBM Corporation and others. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + *******************************************************************************/ +package com.autotune.analyzer.recommendations.summary; + +import com.autotune.analyzer.recommendations.RecommendationConfigItem; +import com.autotune.analyzer.services.Summarize; +import com.autotune.analyzer.utils.AnalyzerConstants; +import com.autotune.utils.KruizeConstants; +import com.google.gson.annotations.SerializedName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; + +/** + * Storage object for recommendation engine summary + */ + +public class RecommendationEngineSummary { + private static final Logger LOGGER = LoggerFactory.getLogger(RecommendationEngineSummary.class); + @SerializedName(KruizeConstants.JSONKeys.CONFIG) + private HashMap> config; + @SerializedName(KruizeConstants.JSONKeys.CHANGE) + private HashMap>> change; + @SerializedName("engine_level_notifications_summary") + private NotificationsSummary notificationsSummary; + @SerializedName("action_summary") + private ActionSummary actionSummary; + + public ActionSummary getActionSummary() { + return actionSummary; + } + + public void setActionSummary(ActionSummary actionSummary) { + this.actionSummary = actionSummary; + } + + + public HashMap> getConfig() { + return config; + } + + public void setConfig(HashMap> config) { + this.config = config; + } + + public HashMap>> getChange() { + return change; + } + + public void setChange(HashMap>> change) { + this.change = change; + } + + public NotificationsSummary getNotificationsSummary() { + return notificationsSummary; + } + + public void setNotificationsSummary(NotificationsSummary notificationsSummary) { + this.notificationsSummary = notificationsSummary; + } + + // Merge existing values with new ones + public RecommendationEngineSummary mergeEngineSummaries(RecommendationEngineSummary existingSummary, RecommendationEngineSummary currentSummary) { + Summarize summarize = new Summarize(); + RecommendationEngineSummary mergedSummary = new RecommendationEngineSummary(); + try { + mergedSummary.setConfig(summarize.mergeConfigItems(existingSummary.getConfig(), currentSummary.getConfig(), mergedSummary.getConfig())); + mergedSummary.setChange(summarize.mergeChangeObjects(existingSummary, currentSummary)); + mergedSummary.setNotificationsSummary(existingSummary.getNotificationsSummary().mergeNotificationsSummary(existingSummary.getNotificationsSummary(), currentSummary.getNotificationsSummary())); + mergedSummary.setActionSummary(existingSummary.getActionSummary().merge(currentSummary.getActionSummary())); + } catch (Exception e){ + LOGGER.error("Exception occurred while merging recommendations: {}", e.getMessage()); + } + return mergedSummary; + } + + + @Override + public String toString() { + return "RecommendationEngineSummary{" + + "config=" + config + + ", change=" + change + + ", notificationsSummary=" + notificationsSummary + + ", actionSummary=" + actionSummary + + '}'; + } + +} diff --git a/src/main/java/com/autotune/analyzer/recommendations/summary/RecommendationSummary.java b/src/main/java/com/autotune/analyzer/recommendations/summary/RecommendationSummary.java new file mode 100644 index 000000000..eda5eb5ac --- /dev/null +++ b/src/main/java/com/autotune/analyzer/recommendations/summary/RecommendationSummary.java @@ -0,0 +1,72 @@ +/******************************************************************************* + * Copyright (c) 2023 Red Hat, IBM Corporation and others. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + *******************************************************************************/ +package com.autotune.analyzer.recommendations.summary; + +import com.autotune.analyzer.recommendations.RecommendationConfigItem; +import com.autotune.analyzer.services.Summarize; +import com.autotune.analyzer.utils.AnalyzerConstants; +import com.autotune.utils.KruizeConstants; +import com.google.gson.annotations.SerializedName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; + +/** + * Storage object for recommendation summary + */ + +public class RecommendationSummary { + @SerializedName("recommendation_engines") + private HashMap recommendationEngineSummaryHashMap; + @SerializedName("notifications_summary") + private NotificationsSummary notificationsSummary; + @SerializedName("action_summary") + private ActionSummary actionSummary; + + public HashMap getRecommendationEngineSummaryHashMap() { + return recommendationEngineSummaryHashMap; + } + + public void setRecommendationEngineSummaryHashMap(HashMap recommendationEngineSummaryHashMap) { + this.recommendationEngineSummaryHashMap = recommendationEngineSummaryHashMap; + } + + public ActionSummary getActionSummary() { + return actionSummary; + } + + public void setActionSummary(ActionSummary actionSummary) { + this.actionSummary = actionSummary; + } + + public NotificationsSummary getNotificationsSummary() { + return notificationsSummary; + } + + public void setNotificationsSummary(NotificationsSummary notificationsSummary) { + this.notificationsSummary = notificationsSummary; + } + + @Override + public String toString() { + return "RecommendationSummary{" + + "recommendationEngineSummaryHashMap=" + recommendationEngineSummaryHashMap + + ", notificationsSummary=" + notificationsSummary + + ", actionSummary=" + actionSummary + + '}'; + } +} diff --git a/src/main/java/com/autotune/analyzer/recommendations/summary/RecommendationTimestampLevelSummary.java b/src/main/java/com/autotune/analyzer/recommendations/summary/RecommendationTimestampLevelSummary.java new file mode 100644 index 000000000..38d7c73ab --- /dev/null +++ b/src/main/java/com/autotune/analyzer/recommendations/summary/RecommendationTimestampLevelSummary.java @@ -0,0 +1,98 @@ +/******************************************************************************* + * Copyright (c) 2023 Red Hat, IBM Corporation and others. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + *******************************************************************************/ +package com.autotune.analyzer.recommendations.summary; + +import com.autotune.analyzer.recommendations.RecommendationConfigItem; +import com.autotune.analyzer.utils.AnalyzerConstants; +import com.autotune.utils.KruizeConstants; +import com.google.gson.annotations.SerializedName; + +import java.util.HashMap; + +/** + * Storage object for recommendation engine summary + */ + +public class RecommendationTimestampLevelSummary { + + @SerializedName(KruizeConstants.JSONKeys.CURRENT) + private HashMap> currentConfig; + @SerializedName("recommendation_terms") + private HashMap recommendationSummaryHashMap; + @SerializedName("timestamp_level_notifications_summary") + private NotificationsSummary notificationsSummary; + @SerializedName("action_summary") + private ActionSummary actionSummary; + + public HashMap getRecommendationSummaryHashMap() { + return recommendationSummaryHashMap; + } + + public void setRecommendationSummaryHashMap(HashMap recommendationSummaryHashMap) { + this.recommendationSummaryHashMap = recommendationSummaryHashMap; + } + + public ActionSummary getActionSummary() { + return actionSummary; + } + + public void setActionSummary(ActionSummary actionSummary) { + this.actionSummary = actionSummary; + } + + public HashMap> getCurrentConfig() { + return currentConfig; + } + + public void setCurrentConfig(HashMap> currentConfig) { + this.currentConfig = currentConfig; + } + + public NotificationsSummary getNotificationsSummary() { + return notificationsSummary; + } + + public void setNotificationsSummary(NotificationsSummary notificationsSummary) { + this.notificationsSummary = notificationsSummary; + } + + // Merge existing values with new ones +// public RecommendationEngineSummary mergeSummaries(RecommendationEngineSummary existingSummary, RecommendationEngineSummary currentSummary) { +// Summarize summarize = new Summarize(); +// RecommendationEngineSummary mergedSummary = new RecommendationEngineSummary(); +// try { +// mergedSummary.setCurrentConfig(summarize.mergeConfigItems(existingSummary.getCurrentConfig(), currentSummary.getCurrentConfig(), mergedSummary.getCurrentConfig())); +// mergedSummary.setConfig(summarize.mergeConfigItems(existingSummary.getConfig(), currentSummary.getConfig(), mergedSummary.getConfig())); +// mergedSummary.setChange(summarize.mergeChangeObjects(existingSummary, currentSummary)); +// mergedSummary.setNotificationsSummary(existingSummary.getNotificationsSummary().mergeNotificationsSummary(existingSummary.getNotificationsSummary(), currentSummary.getNotificationsSummary())); +// mergedSummary.setActionSummary(existingSummary.getActionSummary().merge(currentSummary.getActionSummary())); +// } catch (Exception e){ +// LOGGER.error("Exception occurred while merging recommendations: {}", e.getMessage()); +// } +// return mergedSummary; +// } + + + @Override + public String toString() { + return "RecommendationTimestampLevelSummary{" + + "currentConfig=" + currentConfig + + ", recommendationSummaryHashMap=" + recommendationSummaryHashMap + + ", notificationsSummary=" + notificationsSummary + + ", actionSummary=" + actionSummary + + '}'; + } +} diff --git a/src/main/java/com/autotune/analyzer/recommendations/summary/ResourceInfo.java b/src/main/java/com/autotune/analyzer/recommendations/summary/ResourceInfo.java new file mode 100644 index 000000000..0a53b28b6 --- /dev/null +++ b/src/main/java/com/autotune/analyzer/recommendations/summary/ResourceInfo.java @@ -0,0 +1,66 @@ +/******************************************************************************* + * Copyright (c) 2023 Red Hat, IBM Corporation and others. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + *******************************************************************************/ +package com.autotune.analyzer.recommendations.summary; + +import com.autotune.utils.KruizeConstants; +import com.google.gson.annotations.SerializedName; + +import java.util.HashSet; +import java.util.Set; + +/** + * Stores the count and list of names of various JSON objects + */ +public class ResourceInfo { + private int count; + @SerializedName(KruizeConstants.JSONKeys.WORKLOAD_NAMES) + private Set workloadNames; + + + public ResourceInfo(int count, Set workloadNames) { + this.count = count; + this.workloadNames = workloadNames; + } + + public ResourceInfo() { + this.count = 0; + this.workloadNames = new HashSet<>(); + } + + public int getCount() { + return count; + } + + public void setCount(int count) { + this.count = count; + } + + public Set getWorkloadNames() { + return workloadNames; + } + + public void setWorkloadNames(Set workloadNames) { + this.workloadNames = workloadNames; + } + + @Override + public String toString() { + return "ResourceInfo{" + + "count=" + count + + ", workloadNames=" + workloadNames + + '}'; + } +} diff --git a/src/main/java/com/autotune/analyzer/recommendations/summary/Summary.java b/src/main/java/com/autotune/analyzer/recommendations/summary/Summary.java new file mode 100644 index 000000000..6e4ad9cde --- /dev/null +++ b/src/main/java/com/autotune/analyzer/recommendations/summary/Summary.java @@ -0,0 +1,42 @@ +/******************************************************************************* + * Copyright (c) 2023 Red Hat, IBM Corporation and others. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + *******************************************************************************/ + +package com.autotune.analyzer.recommendations.summary; + +import java.sql.Timestamp; +import java.util.HashMap; + +/** + * stores the summarized recommendation data + */ +public class Summary { + private HashMap data; + + public HashMap getData() { + return data; + } + + public void setData(HashMap data) { + this.data = data; + } + + @Override + public String toString() { + return "Summary{" + + "data=" + data + + '}'; + } +} diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/SummarizeAPIObject.java b/src/main/java/com/autotune/analyzer/serviceObjects/SummarizeAPIObject.java new file mode 100644 index 000000000..e547c2710 --- /dev/null +++ b/src/main/java/com/autotune/analyzer/serviceObjects/SummarizeAPIObject.java @@ -0,0 +1,129 @@ +/******************************************************************************* + * Copyright (c) 2023 Red Hat, IBM Corporation and others. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + *******************************************************************************/ + +package com.autotune.analyzer.serviceObjects; + +import com.autotune.analyzer.recommendations.summary.NotificationsSummary; +import com.autotune.analyzer.recommendations.summary.ResourceInfo; +import com.autotune.analyzer.recommendations.summary.Summary; +import com.autotune.analyzer.utils.AnalyzerConstants; +import com.autotune.utils.KruizeConstants; +import com.google.gson.annotations.SerializedName; + +import java.util.HashMap; + +public class SummarizeAPIObject extends BaseSO { + + @SerializedName(KruizeConstants.JSONKeys.CLUSTER_NAME) + private String clusterName; + + @SerializedName(KruizeConstants.JSONKeys.NAMESPACE) + private String namespace; + + @SerializedName(KruizeConstants.JSONKeys.SUMMARY) + private Summary summary; + + @SerializedName("notifications_summary") + private NotificationsSummary notificationsSummary; + @SerializedName("action_summary") + HashMap> actionSummaryTopLevel; + + private HashMap namespaces; + private HashMap workloads; + private HashMap clusters; + private HashMap containers; + + public String getClusterName() { + return clusterName; + } + + public void setClusterName(String clusterName) { + this.clusterName = clusterName; + } + + public String getNamespace() { + return namespace; + } + + public void setNamespace(String namespace) { + this.namespace = namespace; + } + + public Summary getSummary() { + return summary; + } + + public void setSummary(Summary summary) { + this.summary = summary; + } + + public NotificationsSummary getNotificationsSummary() { + return notificationsSummary; + } + + public void setNotificationsSummary(NotificationsSummary notificationsSummary) { + this.notificationsSummary = notificationsSummary; + } + + public HashMap> getActionSummaryTopLevel() { + return actionSummaryTopLevel; + } + + public void setActionSummaryTopLevel(HashMap> actionSummaryTopLevel) { + this.actionSummaryTopLevel = actionSummaryTopLevel; + } + + public HashMap getWorkloads() { + return workloads; + } + + public void setWorkloads(HashMap workloads) { + this.workloads = workloads; + } + + public HashMap getNamespaces() { + return namespaces; + } + + public void setNamespaces(HashMap namespaces) { + this.namespaces = namespaces; + } + + public HashMap getClusters() { + return clusters; + } + + public void setClusters(HashMap clusters) { + this.clusters = clusters; + } + + public HashMap getContainers() { + return containers; + } + + public void setContainers(HashMap containers) { + this.containers = containers; + } + + @Override + public String toString() { + return "SummarizeAPIObject{" + + "clusterName='" + clusterName + '\'' + + ", namespace='" + namespace + '\'' + + ", summary=" + summary + + '}'; + } +} diff --git a/src/main/java/com/autotune/analyzer/services/ListClusters.java b/src/main/java/com/autotune/analyzer/services/ListClusters.java new file mode 100644 index 000000000..2e922a9bf --- /dev/null +++ b/src/main/java/com/autotune/analyzer/services/ListClusters.java @@ -0,0 +1,89 @@ +/******************************************************************************* + * Copyright (c) 2023, 2023 Red Hat, IBM Corporation and others. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + *******************************************************************************/ +package com.autotune.analyzer.services; + +import com.autotune.database.service.ExperimentDBService; +import com.autotune.utils.MetricsConfig; +import com.google.gson.Gson; +import com.google.gson.JsonArray; +import io.micrometer.core.instrument.Timer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.servlet.ServletConfig; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.util.List; + +import static com.autotune.analyzer.utils.AnalyzerConstants.ServiceConstants.CHARACTER_ENCODING; +import static com.autotune.analyzer.utils.AnalyzerConstants.ServiceConstants.JSON_CONTENT_TYPE; + +/** + * Rest API used to list cluster names. + */ +public class ListClusters extends HttpServlet { + private static final Logger LOGGER = LoggerFactory.getLogger(ListClusters.class); + + @Override + public void init(ServletConfig config) throws ServletException { + super.init(config); + } + + /** + * Handles HTTP GET requests for retrieving a list of cluster names from the database. + * + * @param request The HttpServletRequest object representing the incoming HTTP request. + * @param response The HttpServletResponse object representing the outgoing HTTP response. + * @throws IOException If an I/O error occurs while handling the request or response. + */ + @Override + protected void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { + Timer.Sample timerListExp = Timer.start(MetricsConfig.meterRegistry()); + response.setStatus(HttpServletResponse.SC_OK); + response.setContentType(JSON_CONTENT_TYPE); + response.setCharacterEncoding(CHARACTER_ENCODING); + List clusterNamesList; + + // get cluster list from the DB and send JSON array as the response + try { + clusterNamesList = new ExperimentDBService().loadAllClusterNames(); + Gson gson = new Gson(); + JsonArray jsonArray = gson.toJsonTree(clusterNamesList).getAsJsonArray(); + response.getWriter().println(jsonArray.toString()); + response.getWriter().close(); + } catch (Exception e) { + LOGGER.error(e.getMessage()); + sendErrorResponse(response, e, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage()); + } finally { + if (null != timerListExp) timerListExp.stop(MetricsConfig.timerListExp); + } + + } + + public void sendErrorResponse(HttpServletResponse response, Exception e, int httpStatusCode, String errorMsg) throws + IOException { + if (null != e) { + LOGGER.error(e.getMessage()); + if (null == errorMsg) errorMsg = e.getMessage(); + } + response.sendError(httpStatusCode, errorMsg); + } + + +} diff --git a/src/main/java/com/autotune/analyzer/services/ListRecommendations.java b/src/main/java/com/autotune/analyzer/services/ListRecommendations.java index 2b6414442..0a71e01c1 100644 --- a/src/main/java/com/autotune/analyzer/services/ListRecommendations.java +++ b/src/main/java/com/autotune/analyzer/services/ListRecommendations.java @@ -23,9 +23,9 @@ import com.autotune.analyzer.utils.AnalyzerErrorConstants; import com.autotune.analyzer.utils.GsonUTCDateAdapter; import com.autotune.analyzer.utils.ServiceHelpers; -import com.autotune.common.data.result.ContainerData; import com.autotune.analyzer.kruizeObject.KruizeObject; import com.autotune.analyzer.utils.AnalyzerConstants; +import com.autotune.common.data.result.ContainerData; import com.autotune.database.service.ExperimentDBService; import com.autotune.utils.KruizeConstants; import com.autotune.utils.Utils; @@ -34,8 +34,6 @@ import com.google.gson.Gson; import com.google.gson.GsonBuilder; import io.micrometer.core.instrument.Timer; -import io.prometheus.client.Gauge; -import io.prometheus.client.Summary; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -65,6 +63,7 @@ public class ListRecommendations extends HttpServlet { private static final long serialVersionUID = 1L; private static final Logger LOGGER = LoggerFactory.getLogger(ListRecommendations.class); + static String statusValue = "failure"; @Override public void init(ServletConfig config) throws ServletException { @@ -73,7 +72,6 @@ public void init(ServletConfig config) throws ServletException { @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - String statusValue = "failure"; Timer.Sample timerListRec = Timer.start(MetricsConfig.meterRegistry()); response.setContentType(JSON_CONTENT_TYPE); response.setCharacterEncoding(CHARACTER_ENCODING); @@ -187,23 +185,8 @@ protected void doGet(HttpServletRequest request, HttpServletResponse response) t } } if (!error) { - List recommendationList = new ArrayList<>(); - for (KruizeObject ko : kruizeObjectList) { - try { - LOGGER.debug(ko.getKubernetes_objects().toString()); - ListRecommendationsAPIObject listRecommendationsAPIObject = Converters.KruizeObjectConverters. - convertKruizeObjectToListRecommendationSO( - ko, - getLatest, - checkForTimestamp, - monitoringEndTimestamp); - recommendationList.add(listRecommendationsAPIObject); - statusValue = "success"; - } catch (Exception e) { - LOGGER.error("Not able to generate recommendation for expName : {} due to {}", ko.getExperimentName(), e.getMessage()); - } - } - + List recommendationList = buildAPIResponse(kruizeObjectList, + checkForTimestamp, getLatest, monitoringEndTimestamp); ExclusionStrategy strategy = new ExclusionStrategy() { @Override public boolean shouldSkipField(FieldAttributes field) { @@ -242,6 +225,29 @@ public boolean shouldSkipClass(Class clazz) { } } + public static List buildAPIResponse(List kruizeObjectList, boolean checkForTimestamp, boolean getLatest, + Timestamp monitoringEndTimestamp) { + + List recommendationList = new ArrayList<>(); + for (KruizeObject ko : kruizeObjectList) { + try { + LOGGER.debug(ko.getKubernetes_objects().toString()); + ListRecommendationsAPIObject listRecommendationsAPIObject = Converters.KruizeObjectConverters. + convertKruizeObjectToListRecommendationSO( + ko, + getLatest, + checkForTimestamp, + monitoringEndTimestamp); + recommendationList.add(listRecommendationsAPIObject); + statusValue = "success"; + } catch (Exception e) { + LOGGER.error("Not able to generate recommendation for expName : {} due to {}", ko.getExperimentName(), e.getMessage()); + } + } + + return recommendationList; + } + private void sendSuccessResponse(HttpServletResponse response) throws IOException { response.setContentType(JSON_CONTENT_TYPE); response.setCharacterEncoding(CHARACTER_ENCODING); diff --git a/src/main/java/com/autotune/analyzer/services/Summarize.java b/src/main/java/com/autotune/analyzer/services/Summarize.java new file mode 100644 index 000000000..b284b3600 --- /dev/null +++ b/src/main/java/com/autotune/analyzer/services/Summarize.java @@ -0,0 +1,986 @@ +/******************************************************************************* + * Copyright (c) 2023 Red Hat, IBM Corporation and others. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + *******************************************************************************/ +package com.autotune.analyzer.services; + +import com.autotune.analyzer.kruizeObject.KruizeObject; +import com.autotune.analyzer.recommendations.*; +import com.autotune.analyzer.recommendations.objects.MappedRecommendationForEngine; +import com.autotune.analyzer.recommendations.objects.MappedRecommendationForTimestamp; +import com.autotune.analyzer.recommendations.objects.TermRecommendations; +import com.autotune.analyzer.recommendations.summary.*; +import com.autotune.analyzer.serviceObjects.ContainerAPIObject; +import com.autotune.analyzer.serviceObjects.KubernetesAPIObject; +import com.autotune.analyzer.serviceObjects.ListRecommendationsAPIObject; +import com.autotune.analyzer.serviceObjects.SummarizeAPIObject; +import com.autotune.analyzer.utils.AnalyzerConstants; +import com.autotune.analyzer.utils.AnalyzerErrorConstants; +import com.autotune.analyzer.utils.GsonUTCDateAdapter; +import com.autotune.database.service.ExperimentDBService; +import com.autotune.utils.KruizeConstants; +import com.autotune.utils.KruizeSupportedTypes; +import com.autotune.utils.MetricsConfig; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import io.micrometer.core.instrument.Timer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.servlet.ServletConfig; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.sql.Timestamp; +import java.time.Instant; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static com.autotune.analyzer.utils.AnalyzerConstants.ServiceConstants.CHARACTER_ENCODING; +import static com.autotune.analyzer.utils.AnalyzerConstants.ServiceConstants.JSON_CONTENT_TYPE; + +/** + * Rest API to build and return the summarized response based on the parameters passed. + */ +public class Summarize extends HttpServlet { + private static final Logger LOGGER = LoggerFactory.getLogger(Summarize.class); + HashMap clusterSummaryCacheMap = new HashMap<>(); + HashMap namespaceSummaryCacheMap = new HashMap<>(); + HashMap> allClustersRecommendationsAPIObjMap = new HashMap<>(); + HashMap> allNamespacesRecommendationsAPIObjMap = new HashMap<>(); + + @Override + public void init(ServletConfig config) throws ServletException { + super.init(config); + } + + /** + * Handles HTTP GET requests for retrieving summarized API objects based on query parameters. + * @param request + * @param response + * @throws IOException + */ + @Override + protected void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { + Timer.Sample timerListRec = Timer.start(MetricsConfig.meterRegistry()); + response.setStatus(HttpServletResponse.SC_OK); + response.setContentType(JSON_CONTENT_TYPE); + response.setCharacterEncoding(CHARACTER_ENCODING); + List summarizeAPIObjectList; + // Extract query parameters from the request + String summarizeType = request.getParameter(KruizeConstants.JSONKeys.SUMMARIZE_TYPE); + String clusterName = request.getParameter(KruizeConstants.JSONKeys.CLUSTER_NAME); + String namespaceName = request.getParameter(KruizeConstants.JSONKeys.NAMESPACE_NAME); + String fetchFromDB = request.getParameter(KruizeConstants.JSONKeys.FETCH_FROM_DB); + + // Validate Query params + Set invalidParams = new HashSet<>(); + for (String param : request.getParameterMap().keySet()) { + if (!KruizeSupportedTypes.SUMMARIZE_PARAMS_SUPPORTED.contains(param)) { + invalidParams.add(param); + } + } + if (invalidParams.isEmpty()) { + // Set default values if absent + if (summarizeType == null || summarizeType.isEmpty()) + summarizeType = KruizeConstants.JSONKeys.CLUSTER; + // by default, fetchFromDB will be false so the data will be fetched from cache only + if (fetchFromDB == null || fetchFromDB.isEmpty()) + fetchFromDB = AnalyzerConstants.BooleanString.FALSE; + if (isValidValue(summarizeType, fetchFromDB)) { + // load recommendations based on params + try { + // Reset cache maps if fetching from the database + if (fetchFromDB.equals(AnalyzerConstants.BooleanString.TRUE)) { + clusterSummaryCacheMap = new HashMap<>(); + namespaceSummaryCacheMap = new HashMap<>(); + } + // Load namespaces with clusters from the database + HashMap> clusterNamespaceAssociationMap = new ExperimentDBService().loadAllClusterNamespaceAssociationMap(); + + // fetch all the latest recommendations for all clusters and namespaces and convert it into the API object list + fetchLatestRecommendationsFromDBBasedOnClustersAndNamespaces(clusterNamespaceAssociationMap); + summarizeAPIObjectList = initiateSummarization(namespaceName,clusterName, summarizeType); + + String gsonStr = "[]"; + Gson gsonObj = new GsonBuilder() + .disableHtmlEscaping() + .setPrettyPrinting() + .enableComplexMapKeySerialization() + .registerTypeAdapter(Date.class, new GsonUTCDateAdapter()) + .create(); + gsonStr = gsonObj.toJson(summarizeAPIObjectList); + response.getWriter().println(gsonStr); + response.getWriter().close(); + } catch (Exception e) { + LOGGER.error("Loading saved recommendations failed: {} ", e.getMessage()); + sendErrorResponse(response, e, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage()); + } finally { + timerListRec.stop(MetricsConfig.timerListRec); + } + } else { + sendErrorResponse( + response, + new Exception(AnalyzerErrorConstants.APIErrors.ListRecommendationsAPI.INVALID_QUERY_PARAM_VALUE), + HttpServletResponse.SC_BAD_REQUEST, + String.format(AnalyzerErrorConstants.APIErrors.ListRecommendationsAPI.INVALID_QUERY_PARAM_VALUE) + ); + } + + } else { + sendErrorResponse( + response, + new Exception(AnalyzerErrorConstants.APIErrors.ListRecommendationsAPI.INVALID_QUERY_PARAM), + HttpServletResponse.SC_BAD_REQUEST, + String.format(AnalyzerErrorConstants.APIErrors.ListRecommendationsAPI.INVALID_QUERY_PARAM, invalidParams) + ); + } + } + + /** + * Initiates summarization based on the provided parameters. + * + * @param namespaceNameParam The name of the namespace for which summarization is requested. Can be null if cluster-level summarization is requested. + * @param clusterNameParam The name of the cluster for which summarization is requested. Can be null if namespace-level summarization is requested. + * @param summarizeType The type of summarization (e.g., cluster or namespace). + * @return A list of SummarizeAPIObject containing the summarization details for the specified namespace or cluster. + */ + private List initiateSummarization(String namespaceNameParam, String clusterNameParam, String summarizeType) { + List summaryList; + SummarizeAPIObject summarizeAPIObject; + + // Get the current system timestamp in UTC and set it for the response + Timestamp currentTimestamp = Timestamp.from(Instant.now()); + currentTimestamp.setNanos(currentTimestamp.getNanos() / 1000 * 1000); + // Convert to ISO date format + currentTimestamp.setTime(currentTimestamp.getTime() + Calendar.getInstance(TimeZone.getTimeZone("UTC")).get(Calendar.ZONE_OFFSET)); + if (summarizeType.equalsIgnoreCase(KruizeConstants.JSONKeys.CLUSTER)) { + summaryList = new ArrayList<>(); + for (String clusterName : allClustersRecommendationsAPIObjMap.keySet()) { + // check cache + if (namespaceNameParam == null) { + SummarizeAPIObject summarizeFromCache = getSummaryFromCache(clusterName, clusterSummaryCacheMap); + if (summarizeFromCache != null) { + summaryList.add(summarizeFromCache); + continue; + } + } + summarizeAPIObject = new SummarizeAPIObject(); + commonSummarization(allClustersRecommendationsAPIObjMap.get(clusterName), summarizeAPIObject, namespaceNameParam, + clusterName, currentTimestamp, summarizeType); + summarizeAPIObject.setClusterName(clusterName); + if (namespaceNameParam != null) + summarizeAPIObject.setNamespace(namespaceNameParam); + + summaryList.add(summarizeAPIObject); + if (namespaceNameParam == null) + clusterSummaryCacheMap.put(clusterName, summarizeAPIObject); + } + } else { + summaryList = new ArrayList<>(); + for (String namespaceName : allNamespacesRecommendationsAPIObjMap.keySet()) { + // check cache + if (clusterNameParam == null) { + SummarizeAPIObject summarizeFromCache = getSummaryFromCache(namespaceName, namespaceSummaryCacheMap); + if (summarizeFromCache != null) { + summaryList.add(summarizeFromCache); + continue; + } + } + + summarizeAPIObject = new SummarizeAPIObject(); + commonSummarization(allNamespacesRecommendationsAPIObjMap.get(namespaceName), summarizeAPIObject, namespaceName, clusterNameParam, currentTimestamp, summarizeType); + summarizeAPIObject.setNamespace(namespaceName); + if (clusterNameParam != null) + summarizeAPIObject.setClusterName(clusterNameParam); + + summaryList.add(summarizeAPIObject); + if (clusterNameParam == null) + namespaceSummaryCacheMap.put(namespaceName, summarizeAPIObject); + } + } + + return summaryList; + } + + /** + * Fetches the latest recommendations from the database based on the provided cluster and namespace associations. + * + * @param clusterNamespaceAssociationMap A HashMap containing cluster names as keys and lists of associated namespace names as values. + * @throws Exception If there is an error while fetching recommendations from the database. + */ + private void fetchLatestRecommendationsFromDBBasedOnClustersAndNamespaces(HashMap> clusterNamespaceAssociationMap) throws Exception { + + HashMap recommendationsMap = new HashMap<>(); + ExperimentDBService experimentDBService = new ExperimentDBService(); + + if (allClustersRecommendationsAPIObjMap == null) { + LOGGER.debug("Getting from DB"); + for (String clusterName : clusterNamespaceAssociationMap.keySet()) { + for (String namespaceInCluster : clusterNamespaceAssociationMap.get(clusterName)) { + experimentDBService.loadExperimentsAndRecommendationsByClusterAndNamespaceName(recommendationsMap, clusterName, namespaceInCluster); + } + allClustersRecommendationsAPIObjMap.put(clusterName, buildRecommendationsList(new ArrayList<>(recommendationsMap.values()))); + recommendationsMap = new HashMap<>(); + } + } + if (allNamespacesRecommendationsAPIObjMap == null) { + Set uniqueNamespaces = new HashSet<>(); + for (List namespaces : clusterNamespaceAssociationMap.values()) { + uniqueNamespaces.addAll(namespaces); + } + for (String namespaceName : uniqueNamespaces) { + for (String clusterInNamespace : clusterNamespaceAssociationMap.keySet()) { + experimentDBService.loadExperimentsAndRecommendationsByClusterAndNamespaceName(recommendationsMap, clusterInNamespace, namespaceName); + } + allNamespacesRecommendationsAPIObjMap.put(namespaceName, buildRecommendationsList(new ArrayList<>(recommendationsMap.values()))); + recommendationsMap = new HashMap<>(); + } + } + } + + /** + * Checks if the provided values for summarizeType and fetchFromDB are valid. + * + * @param summarizeTypeValue The value of the summarizeType parameter. + * @param fetchFromDBValue The value of the fetchFromDB parameter. + * @return True if both summarizeTypeValue and fetchFromDBValue are valid; otherwise, false. + */ + private boolean isValidValue(String summarizeTypeValue, String fetchFromDBValue) { + return (summarizeTypeValue.equalsIgnoreCase(KruizeConstants.JSONKeys.CLUSTER) || summarizeTypeValue.equalsIgnoreCase(KruizeConstants.JSONKeys.NAMESPACE) + && (fetchFromDBValue.equals(AnalyzerConstants.BooleanString.TRUE) || fetchFromDBValue.equals(AnalyzerConstants.BooleanString.FALSE))); + } + + /** + * Performs summarization of API objects based on the provided summarizeType, cluster name, and namespace name. + * + * @param summarizeType The type of summarization ("cluster" or "namespace"). + * @param clusterName The specific cluster name to summarize, or null if not specified. + * @param namespaceName The specific namespace name to summarize, or null if not specified. + * @param summarizeAPIObject The SummarizeAPIObject to populate with summarized data. + * @param listRecommendationsAPIObjectList The list containing the recommendation data. + * @param currentTimestamp The object containing the current timestamp. + */ + private void commonSummarization(List listRecommendationsAPIObjectList, SummarizeAPIObject summarizeAPIObject, + String namespaceName, String clusterName, Timestamp currentTimestamp, String summarizeType) { + Summarize summarize = new Summarize(); + HashMap data = new HashMap<>(); + HashMap recommendationsCategoryMap = new HashMap<>(); + HashMap recommendationEngineSummaryHashMap = new HashMap<>(); + HashMap clusters = new HashMap<>(); + HashMap namespaces = new HashMap<>(); + HashMap workloads = new HashMap<>(); + HashMap containers = new HashMap<>(); + NotificationsSummary allOuterNotificationsSummary = null; + NotificationsSummary allTimestampLevelNotificationsSummary = null; + NotificationsSummary allTermLevelNotificationsSummary = null; + Set clustersSet = new HashSet<>(); + Set namespaceSet = new HashSet<>(); + Set workloadsSet = new HashSet<>(); + Set containersSet = new HashSet<>(); + Set workloadsWithoutRecommendation = new HashSet<>(); + Summary summary = new Summary(); + RecommendationEngineSummary recommendationEngineSummary; + RecommendationTimestampLevelSummary recommendationTimestampLevelSummary = new RecommendationTimestampLevelSummary(); + ResourceInfo resourceInfo; + HashMap> mergedCurrentConfig = null; + + for (ListRecommendationsAPIObject listRecommendationsAPIObject : listRecommendationsAPIObjectList) { + if(summarizeType.equalsIgnoreCase(KruizeConstants.JSONKeys.NAMESPACE)) { + if (clusterName != null && !clusterName.equalsIgnoreCase(listRecommendationsAPIObject.getClusterName())) + continue; + } + clustersSet.add(listRecommendationsAPIObject.getClusterName()); + for (KubernetesAPIObject kubernetesAPIObject : listRecommendationsAPIObject.getKubernetesObjects()) { + if(summarizeType.equalsIgnoreCase(KruizeConstants.JSONKeys.CLUSTER)) { + if (namespaceName != null && !namespaceName.equalsIgnoreCase(kubernetesAPIObject.getNamespace())) + continue; + } + for (ContainerAPIObject containerAPIObject : kubernetesAPIObject.getContainerAPIObjects()) { + ContainerRecommendations containerRecommendations = containerAPIObject.getContainerRecommendations(); + for (Map.Entry containerRecommendationMapEntry + : containerRecommendations.getData().entrySet()) { + // stores the current configs + HashMap> currentConfig; + MappedRecommendationForTimestamp mappedRecommendationForTimestamp = containerRecommendationMapEntry.getValue(); + LOGGER.debug("mappedRecommendationForTimestamp = {}", mappedRecommendationForTimestamp); + recommendationTimestampLevelSummary = new RecommendationTimestampLevelSummary(); + currentConfig = mappedRecommendationForTimestamp.getCurrentConfig(); + // check and merge the current configs with the existing one + if (mergedCurrentConfig == null) { + mergedCurrentConfig = currentConfig; + } + else { + mergeConfigObjects(mergedCurrentConfig, currentConfig); + } + + for (Map.Entry recommendationForTermMapEntry : + mappedRecommendationForTimestamp.getRecommendationForTermHashMap().entrySet()) { + RecommendationSummary recommendationSummary = new RecommendationSummary(); + String recommendationPeriod = recommendationForTermMapEntry.getKey(); + LOGGER.info("recommendationPeriod = {}", recommendationPeriod); + if (recommendationForTermMapEntry.getValue().getRecommendationForEngineHashMap() != null) { + for (Map.Entry recommendationEnginesMapEntry : + recommendationForTermMapEntry.getValue().getRecommendationForEngineHashMap().entrySet()) { + String engineName = recommendationEnginesMapEntry.getKey(); + LOGGER.debug("Recommendation engine : {}", engineName); + MappedRecommendationForEngine mappedRecommendationForEngine = recommendationEnginesMapEntry.getValue(); + RecommendationEngineSummary recommendationEngineSummaryCurrent = summarize.convertToEngineSummary(mappedRecommendationForEngine, + kubernetesAPIObject.getName()); + if (recommendationEngineSummaryHashMap.containsKey(engineName)) { + recommendationEngineSummary = recommendationEngineSummaryCurrent.mergeEngineSummaries( + recommendationEngineSummaryHashMap.get(engineName), recommendationEngineSummaryCurrent); + } else { + recommendationEngineSummary = recommendationEngineSummaryCurrent; + } + recommendationEngineSummaryHashMap.put(engineName, recommendationEngineSummary); + } + // get the term_level notifications summary here + NotificationsSummary currentNotificationsSummary = summarize.calculateNotificationsSummary( + containerRecommendations.getNotificationMap()); + if (allTermLevelNotificationsSummary != null) { + allTermLevelNotificationsSummary = currentNotificationsSummary.mergeNotificationsSummary(allTermLevelNotificationsSummary, currentNotificationsSummary); + } else { + allTermLevelNotificationsSummary = currentNotificationsSummary; + } + } + // set the recommendation engine summary and notifications + recommendationSummary.setRecommendationEngineSummaryHashMap(recommendationEngineSummaryHashMap); + recommendationSummary.setNotificationsSummary(allTermLevelNotificationsSummary); + LOGGER.debug("recommendationSummary = {}", recommendationSummary); + recommendationsCategoryMap.put(recommendationPeriod, recommendationSummary); + } + // get the timestamp_level notifications summary here + NotificationsSummary currentNotificationsSummary = summarize.calculateNotificationsSummary(containerRecommendationMapEntry.getValue().getHigherLevelNotificationMap()); + if (allTimestampLevelNotificationsSummary != null) { + allTimestampLevelNotificationsSummary = currentNotificationsSummary.mergeNotificationsSummary(allTimestampLevelNotificationsSummary, currentNotificationsSummary); + } else { + allTimestampLevelNotificationsSummary = currentNotificationsSummary; + } + recommendationTimestampLevelSummary.setRecommendationSummaryHashMap(recommendationsCategoryMap); + LOGGER.debug("recommendationTimestampLevelSummary = {}", recommendationTimestampLevelSummary); + } + // set current config and timestamp_level notifications summary in this object + recommendationTimestampLevelSummary.setCurrentConfig(mergedCurrentConfig); + recommendationTimestampLevelSummary.setNotificationsSummary(allTimestampLevelNotificationsSummary); + // get the recommendations level notifications summary + NotificationsSummary currentNotificationsSummary = summarize.calculateNotificationsSummary(containerRecommendations.getNotificationMap()); + if (allOuterNotificationsSummary != null) { + allOuterNotificationsSummary = currentNotificationsSummary.mergeNotificationsSummary(allOuterNotificationsSummary, currentNotificationsSummary); + } else { + allOuterNotificationsSummary = currentNotificationsSummary; + } + // get the top level action summary + for (RecommendationNotification recommendationNotification : containerRecommendations.getNotificationMap().values()) { + if (recommendationNotification.getCode() == 120001) { + workloadsWithoutRecommendation.add(kubernetesAPIObject.getName()); + } + } + } + workloadsSet.add(kubernetesAPIObject.getName()); + namespaceSet.add(kubernetesAPIObject.getNamespace()); + } + data.put(currentTimestamp, recommendationTimestampLevelSummary); + summary.setData(data); + + // set the recommendations level notifications summary + summarizeAPIObject.setNotificationsSummary(allOuterNotificationsSummary); + // set the namespaces object + if(summarizeType.equalsIgnoreCase(KruizeConstants.JSONKeys.CLUSTER)) { + if (namespaceName == null) { + namespaces.put(KruizeConstants.JSONKeys.COUNT, namespaceSet.size()); + namespaces.put(KruizeConstants.JSONKeys.NAMES, namespaceSet); + summarizeAPIObject.setNamespaces(namespaces); + } + } else { + // set clusters and namespaces based on params + if (clusterName != null && namespaceName != null) { + containers.put(KruizeConstants.JSONKeys.COUNT, containersSet.size()); + containers.put(KruizeConstants.JSONKeys.NAMES, containersSet); + summarizeAPIObject.setContainers(containers); + } else if (clusterName == null) { + clusters.put(KruizeConstants.JSONKeys.COUNT, clustersSet.size()); + clusters.put(KruizeConstants.JSONKeys.NAMES, clustersSet); + summarizeAPIObject.setClusters(clusters); + } + } + workloads.put(KruizeConstants.JSONKeys.COUNT, workloadsSet.size()); + workloads.put(KruizeConstants.JSONKeys.NAMES, workloadsSet); + summarizeAPIObject.setWorkloads(workloads); + + // set the top level action summary + resourceInfo = new ResourceInfo(workloadsWithoutRecommendation.size(), workloadsWithoutRecommendation); + HashMap noData = new HashMap<>(); + noData.put(AnalyzerConstants.ActionSummaryRecommendationItem.general, resourceInfo); + HashMap> actionSummaryTopLevel = new HashMap<>(); + actionSummaryTopLevel.put(KruizeConstants.JSONKeys.NO_DATA, noData); + + summarizeAPIObject.setActionSummaryTopLevel(actionSummaryTopLevel); + + summarizeAPIObject.setSummary(summary); + } + } + + /** + * Builds a list of ListRecommendationsAPIObject from a list of KruizeObject. + * + * @param kruizeObjects The list of KruizeObject to build recommendations from. + * @return A list of ListRecommendationsAPIObject containing recommendations based on the provided KruizeObject list. + */ + private List buildRecommendationsList(List kruizeObjects) { + return ListRecommendations.buildAPIResponse(kruizeObjects, false, true, null); + } + + /** + * Retrieves a SummarizeAPIObject from the provided cacheMap using the specified id. + * + * @param id The id used as the key to retrieve the SummarizeAPIObject from the cache. + * @param cacheMap The map representing the cache containing SummarizeAPIObject instances. + * @return The SummarizeAPIObject associated with the provided id in the cacheMap, or null if not found. + */ + private SummarizeAPIObject getSummaryFromCache(String id, Map cacheMap) { + if (cacheMap.containsKey(id) && cacheMap.get(id) != null) { + return cacheMap.get(id); + } + return null; + } + + /** + * Creates an ActionSummary object based on the provided notification map, variation map, and workload name. + * + * @param notificationMap A map containing recommendation notifications. + * @param variation A map representing the variation of recommendation items. + * @param workloadName The name of the workload associated with the ActionSummary. + * @return An ActionSummary object containing summarized information about recommendations and notifications. + */ + private ActionSummary createActionSummaryObject(HashMap notificationMap, + HashMap> variation, String workloadName) { + + ActionSummary actionSummary = new ActionSummary(); + HashMap optimizable = new HashMap<>(); + HashMap idle = new HashMap<>(); + HashMap error = new HashMap<>(); + HashMap optimized = new HashMap<>(); + HashMap noData = new HashMap<>(); + HashMap critical = new HashMap<>(); + HashMap total = new HashMap<>(); + + Set commonErrorValues = new HashSet<>(); + commonErrorValues.add(RecommendationConstants.NotificationCodes.ERROR_NUM_PODS_CANNOT_BE_ZERO); + commonErrorValues.add(RecommendationConstants.NotificationCodes.ERROR_NUM_PODS_CANNOT_BE_NEGATIVE); + commonErrorValues.add(RecommendationConstants.NotificationCodes.ERROR_HOURS_CANNOT_BE_NEGATIVE); + + Set cpuErrorValues = new HashSet<>(); + cpuErrorValues.add(RecommendationConstants.NotificationCodes.ERROR_AMOUNT_MISSING_IN_CPU_SECTION); + cpuErrorValues.add(RecommendationConstants.NotificationCodes.ERROR_INVALID_FORMAT_IN_CPU_SECTION); + cpuErrorValues.add(RecommendationConstants.NotificationCodes.ERROR_INVALID_AMOUNT_IN_CPU_SECTION); + cpuErrorValues.add(RecommendationConstants.NotificationCodes.ERROR_FORMAT_MISSING_IN_CPU_SECTION); + + Set memoryErrorValues = new HashSet<>(); + memoryErrorValues.add(RecommendationConstants.NotificationCodes.ERROR_AMOUNT_MISSING_IN_MEMORY_SECTION); + memoryErrorValues.add(RecommendationConstants.NotificationCodes.ERROR_INVALID_FORMAT_IN_MEMORY_SECTION); + memoryErrorValues.add(RecommendationConstants.NotificationCodes.ERROR_INVALID_AMOUNT_IN_MEMORY_SECTION); + memoryErrorValues.add(RecommendationConstants.NotificationCodes.ERROR_FORMAT_MISSING_IN_MEMORY_SECTION); + + LOGGER.info("Workload = {}", workloadName); + LOGGER.info("Notifications = {}", notificationMap.values()); + // set the actionSummary as optimizable in case of no notifications + try { + if (notificationMap.isEmpty()) { + HashMap recommendationItemMap = new HashMap<>(); + for (Map.Entry> settingEntry : variation.entrySet()) { + if (settingEntry.getKey() == AnalyzerConstants.ResourceSetting.requests) { + for (Map.Entry itemEntry : settingEntry.getValue().entrySet()) { + RecommendationConfigItem configItem = itemEntry.getValue(); + if (configItem != null && configItem.getAmount() != null && configItem.getAmount() != 0) { + recommendationItemMap.put(itemEntry.getKey(), configItem.getAmount()); + } + } + } + } + + if (recommendationItemMap.containsKey(AnalyzerConstants.RecommendationItem.cpu)) { + ResourceInfo resourceInfo = new ResourceInfo(); + resourceInfo.setCount(resourceInfo.getCount() + 1); + resourceInfo.getWorkloadNames().add(workloadName); + optimizable.put(AnalyzerConstants.ActionSummaryRecommendationItem.cpu, resourceInfo); + actionSummary.setOptimizable(optimizable); + if (recommendationItemMap.containsKey(AnalyzerConstants.RecommendationItem.memory)) { + resourceInfo = new ResourceInfo(); + resourceInfo.setCount(resourceInfo.getCount() + 1); + resourceInfo.getWorkloadNames().add(workloadName); + optimizable.put(AnalyzerConstants.ActionSummaryRecommendationItem.memory, resourceInfo); + actionSummary.setOptimizable(optimizable); + + } + } + } + } catch (Exception e) { + LOGGER.error("Exception occurred while getting optimizable configs: {}", e.getMessage()); + } + + // create the actionSummary for the rest of the cases + ResourceInfo cpuSection = null; + ResourceInfo memorySection = null; + ResourceInfo generalSection; + try { + for (RecommendationNotification notification : notificationMap.values()) { + int code = notification.getCode(); + + if (code == RecommendationConstants.NotificationCodes.CRITICAL_CPU_REQUEST_NOT_SET) { + cpuSection = new ResourceInfo(); + cpuSection.getWorkloadNames().add(workloadName); + cpuSection.setCount(cpuSection.getWorkloadNames().size()); + critical.put(AnalyzerConstants.ActionSummaryRecommendationItem.cpu, cpuSection); + actionSummary.setCritical(critical); + } else if (code == RecommendationConstants.NotificationCodes.CRITICAL_MEMORY_REQUEST_NOT_SET || + code == RecommendationConstants.NotificationCodes.CRITICAL_MEMORY_LIMIT_NOT_SET) { + memorySection = new ResourceInfo(); + memorySection.getWorkloadNames().add(workloadName); + memorySection.setCount(memorySection.getWorkloadNames().size()); + critical.put(AnalyzerConstants.ActionSummaryRecommendationItem.memory, memorySection); + actionSummary.setCritical(critical); + } else if (code == RecommendationConstants.NotificationCodes.NOTICE_CPU_RECORDS_ARE_IDLE) { + cpuSection = new ResourceInfo(); + cpuSection.getWorkloadNames().add(workloadName); + cpuSection.setCount(cpuSection.getWorkloadNames().size()); + idle.put(AnalyzerConstants.ActionSummaryRecommendationItem.cpu, cpuSection); + actionSummary.setIdle(idle); + } else if (code == RecommendationConstants.NotificationCodes.NOTICE_CPU_REQUESTS_OPTIMISED || + code == RecommendationConstants.NotificationCodes.NOTICE_CPU_LIMITS_OPTIMISED) { + cpuSection = new ResourceInfo(); + cpuSection.getWorkloadNames().add(workloadName); + cpuSection.setCount(cpuSection.getWorkloadNames().size()); + optimized.put(AnalyzerConstants.ActionSummaryRecommendationItem.cpu, cpuSection); + actionSummary.setOptimized(optimized); + } else if (code == RecommendationConstants.NotificationCodes.WARNING_CPU_LIMIT_NOT_SET) { + cpuSection = new ResourceInfo(); + cpuSection.getWorkloadNames().add(workloadName); + cpuSection.setCount(cpuSection.getWorkloadNames().size()); + optimizable.put(AnalyzerConstants.ActionSummaryRecommendationItem.cpu, cpuSection); + actionSummary.setOptimizable(optimizable); + } else if (cpuErrorValues.contains(code)) { + cpuSection = new ResourceInfo(); + cpuSection.getWorkloadNames().add(workloadName); + cpuSection.setCount(cpuSection.getWorkloadNames().size()); + error.put(AnalyzerConstants.ActionSummaryRecommendationItem.cpu, cpuSection); + actionSummary.setError(error); + } else if (code == RecommendationConstants.NotificationCodes.NOTICE_MEMORY_REQUESTS_OPTIMISED || + code == RecommendationConstants.NotificationCodes.NOTICE_MEMORY_LIMITS_OPTIMISED) { + memorySection = new ResourceInfo(); + memorySection.getWorkloadNames().add(workloadName); + memorySection.setCount(memorySection.getWorkloadNames().size()); + optimized.put(AnalyzerConstants.ActionSummaryRecommendationItem.memory, memorySection); + actionSummary.setOptimized(optimized); + } else if (memoryErrorValues.contains(code)) { + memorySection = new ResourceInfo(); + memorySection.getWorkloadNames().add(workloadName); + memorySection.setCount(memorySection.getWorkloadNames().size()); + error.put(AnalyzerConstants.ActionSummaryRecommendationItem.memory, memorySection); + actionSummary.setError(error); + } else if (code == RecommendationConstants.NotificationCodes.INFO_NOT_ENOUGH_DATA || + code == RecommendationConstants.NotificationCodes.NOTICE_CPU_RECORDS_ARE_ZERO) { + generalSection = new ResourceInfo(); + generalSection.getWorkloadNames().add(workloadName); + generalSection.setCount(generalSection.getWorkloadNames().size()); + noData.put(AnalyzerConstants.ActionSummaryRecommendationItem.general, generalSection); + actionSummary.setNoData(noData); + } else if (commonErrorValues.contains(code)) { + generalSection = new ResourceInfo(); + generalSection.getWorkloadNames().add(workloadName); + generalSection.setCount(generalSection.getWorkloadNames().size()); + error.put(AnalyzerConstants.ActionSummaryRecommendationItem.general, generalSection); + actionSummary.setError(error); + } else { + LOGGER.warn("Code {} not present in the list!", code); + } + } + } catch (Exception e) { + LOGGER.info("Exception occurred while building actionSummary : {}", e.getMessage()); + } + + // check for case when only CPU/Memory notification is present, if yes, set the other one as optimizable + if (cpuSection == null && memorySection != null) { + cpuSection = new ResourceInfo(); + cpuSection.getWorkloadNames().add(workloadName); + cpuSection.setCount(cpuSection.getWorkloadNames().size()); + optimizable.put(AnalyzerConstants.ActionSummaryRecommendationItem.memory, cpuSection); + actionSummary.setOptimizable(optimizable); + } else if (cpuSection != null && memorySection == null) { + memorySection = new ResourceInfo(); + memorySection.getWorkloadNames().add(workloadName); + memorySection.setCount(memorySection.getWorkloadNames().size()); + optimizable.put(AnalyzerConstants.ActionSummaryRecommendationItem.memory, memorySection); + actionSummary.setOptimizable(optimizable); + } + + // set the total value + ResourceInfo totalCpuResource = new ResourceInfo(); + ResourceInfo totalMemoryResource = new ResourceInfo(); + ResourceInfo totalGeneralResource = new ResourceInfo(); + try { + Set allCpuWorkloads = Optional.of(actionSummary).stream() + .flatMap(summary -> Stream.of(summary.getIdle(), summary.getOptimized(), summary.getCritical(), + summary.getOptimizable(), summary.getError(), summary.getNoData())) + .flatMap(map -> Optional.ofNullable(map.get(AnalyzerConstants.ActionSummaryRecommendationItem.cpu)) + .stream().flatMap(item -> item.getWorkloadNames().stream())) + .collect(Collectors.toCollection(HashSet::new)); + + totalCpuResource.setWorkloadNames(allCpuWorkloads); + totalCpuResource.setCount(allCpuWorkloads.size()); + + Set allMemoryWorkloads = Optional.of(actionSummary).stream() + .flatMap(summary -> Stream.of(summary.getIdle(), summary.getOptimized(), summary.getCritical(), + summary.getOptimizable(), summary.getError(), summary.getNoData())) + .flatMap(map -> Optional.ofNullable(map.get(AnalyzerConstants.ActionSummaryRecommendationItem.memory)) + .stream().flatMap(item -> item.getWorkloadNames().stream())) + .collect(Collectors.toCollection(HashSet::new)); + + totalMemoryResource.setWorkloadNames(allMemoryWorkloads); + totalMemoryResource.setCount(allMemoryWorkloads.size()); + + Set allGeneralWorkloads = Optional.of(actionSummary).stream() + .flatMap(summary -> Stream.of(summary.getIdle(), summary.getOptimized(), summary.getCritical(), + summary.getOptimizable(), summary.getError(), summary.getNoData())) + .flatMap(map -> Optional.ofNullable(map.get(AnalyzerConstants.ActionSummaryRecommendationItem.general)) + .stream().flatMap(item -> item.getWorkloadNames().stream())) + .collect(Collectors.toCollection(HashSet::new)); + + totalGeneralResource.setWorkloadNames(allGeneralWorkloads); + totalGeneralResource.setCount(allGeneralWorkloads.size()); + } catch (Exception e) { + LOGGER.error("Exception occurred while computing the total value: {}", e.getMessage()); + } + total.put(AnalyzerConstants.ActionSummaryRecommendationItem.cpu, totalCpuResource); + total.put(AnalyzerConstants.ActionSummaryRecommendationItem.memory, totalMemoryResource); + total.put(AnalyzerConstants.ActionSummaryRecommendationItem.general, totalGeneralResource); + + actionSummary.setTotal(total); + + LOGGER.debug("actionSummary Final = {}", actionSummary); + return actionSummary; + } + + /** + * Converts a RecommendationEngine into a RecommendationEngineSummary object containing summarized information. + * + * @param mappedRecommendationForEngine The RecommendationEngine object to be converted. + * @param workloadName The name of the workload associated with the RecommendationEngine. + * @return A RecommendationEngineSummary object containing summarized information about the RecommendationEngine. + */ + public RecommendationEngineSummary convertToEngineSummary(MappedRecommendationForEngine mappedRecommendationForEngine, String workloadName) { + RecommendationEngineSummary recommendationEngineSummary = new RecommendationEngineSummary(); + try { + mappedRecommendationForEngine.setConfig(setDefaultValuesForConfigs(mappedRecommendationForEngine.getConfig())); + mappedRecommendationForEngine.setVariation(setDefaultValuesForConfigs(mappedRecommendationForEngine.getVariation())); + recommendationEngineSummary.setConfig(mappedRecommendationForEngine.getConfig()); + recommendationEngineSummary.setChange(calculateChange(mappedRecommendationForEngine)); + recommendationEngineSummary.setNotificationsSummary(calculateNotificationsSummary(mappedRecommendationForEngine.getNotificationHashMap())); + recommendationEngineSummary.setActionSummary(createActionSummaryObject(mappedRecommendationForEngine.getNotificationHashMap(), mappedRecommendationForEngine.getVariation(), workloadName)); + } catch (Exception e) { + LOGGER.error("Exception occurred while converting recommendation to recommendationSummary: {}", e.getMessage()); + } + return recommendationEngineSummary; + } + + /** + * Calculates and populates a change map based on the provided Recommendation object's variation. + * + * @param mappedRecommendationForEngine The Recommendation object to calculate changes for. + * @return A change map containing information about increases, decreases, and variations in resource settings. + */ + private HashMap>> calculateChange(MappedRecommendationForEngine mappedRecommendationForEngine) { + HashMap>> changeMap = new HashMap<>(); + changeMap.put(AnalyzerConstants.ResourceChange.increase, new HashMap<>()); + changeMap.put(AnalyzerConstants.ResourceChange.decrease, new HashMap<>()); + changeMap.put(AnalyzerConstants.ResourceChange.variation, new HashMap<>()); + + // Populate the changeMap with default values + setDefaultValuesForChangeObject(changeMap); + + HashMap> + variationConfig = mappedRecommendationForEngine.getVariation(); + + // set the increase and decrease values + for (Map.Entry> + settingEntry : variationConfig.entrySet()) { + AnalyzerConstants.ResourceSetting resourceSetting = settingEntry.getKey(); + HashMap itemMap = settingEntry.getValue(); + + for (Map.Entry itemEntry : itemMap.entrySet()) { + AnalyzerConstants.RecommendationItem recommendationItem = itemEntry.getKey(); + RecommendationConfigItem configItem = itemEntry.getValue(); + double amount = configItem.getAmount(); + + if (amount > 0) { + changeMap.get(AnalyzerConstants.ResourceChange.increase) + .computeIfAbsent(resourceSetting, k -> new HashMap<>()) + .put(recommendationItem, configItem); + } else if (amount < 0) { + changeMap.get(AnalyzerConstants.ResourceChange.decrease) + .computeIfAbsent(resourceSetting, k -> new HashMap<>()) + .put(recommendationItem, configItem); + } + } + } + + // Set the variation + changeMap.put(AnalyzerConstants.ResourceChange.variation, variationConfig); + return changeMap; + } + + /** + * Populates the provided change map with default values for the change objects. + * + * @param changeMap The change map to populate with default values. + */ + private void setDefaultValuesForChangeObject(HashMap>> changeMap) { + for (AnalyzerConstants.ResourceChange change : AnalyzerConstants.ResourceChange.values()) { + HashMap> settingMap = new HashMap<>(); + changeMap.put(change, settingMap); + + for (AnalyzerConstants.ResourceSetting setting : AnalyzerConstants.ResourceSetting.values()) { + HashMap recommendationMap = new HashMap<>(); + settingMap.put(setting, recommendationMap); + + for (AnalyzerConstants.RecommendationItem item : AnalyzerConstants.RecommendationItem.values()) { + RecommendationConfigItem configItem = new RecommendationConfigItem(); + + if (setting == AnalyzerConstants.ResourceSetting.requests || setting == AnalyzerConstants.ResourceSetting.limits) { + configItem.setAmount(0.0); + configItem.setFormat(item == AnalyzerConstants.RecommendationItem.memory ? "MiB" : "cores"); + + recommendationMap.put(item, configItem); + } + } + } + } + } + + /** + * Calculates the summary of different types of recommendations notifications. + * + * @param notifications The map containing recommendation notifications. + * @return The summary of notifications categorized by type. + */ + public NotificationsSummary calculateNotificationsSummary(HashMap notifications) { + NotificationsSummary summary = new NotificationsSummary(); + int infoCount = 0; + int noticeCount = 0; + int warningCount = 0; + int errorCount = 0; + int criticalCount = 0; + + for (RecommendationNotification notification : notifications.values()) { + switch (notification.getType()) { + case "info" -> infoCount++; + case "notice" -> noticeCount++; + case "warning" -> warningCount++; + case "error" -> errorCount++; + case "critical" -> criticalCount++; + } + } + summary.setInfo(infoCount); + summary.setNotice(noticeCount); + summary.setWarning(warningCount); + summary.setError(errorCount); + summary.setCritical(criticalCount); + LOGGER.debug("Notif summary = {}", notifications.values()); + return summary; + } + + /** + * Merges two configuration maps and populates a target configuration map with the merged values. + * + * @param config1 The first configuration map to be merged. + * @param config2 The second configuration map to be merged. + * @param configMap The target configuration map to populate with merged values. + * @return The merged configuration map. + */ + public HashMap> mergeConfigItems( + HashMap> config1, + HashMap> config2, + HashMap> configMap) { + if (configMap == null) { + configMap = new HashMap<>(); + } + + // if the incoming config is null, skip merging + if (config1 != null) { + mergeConfigObjects(configMap, config1); + } + if (config2 != null) { + mergeConfigObjects(configMap, config2); + } + + return configMap; + } + + /** + * Merges two configuration maps and populates a target configuration map with the merged values. + * + * @param targetMap The target configuration map to populate with merged values. + * @param sourceMap The source configuration map to merge into the target map. + */ + private void mergeConfigObjects(HashMap> targetMap, HashMap> sourceMap) { + for (Map.Entry> entry : + sourceMap.entrySet()) { + AnalyzerConstants.ResourceSetting resourceSetting = entry.getKey(); + HashMap itemMap = entry.getValue(); + + for (Map.Entry itemEntry : itemMap.entrySet()) { + AnalyzerConstants.RecommendationItem recommendationItem = itemEntry.getKey(); + RecommendationConfigItem configItem = itemEntry.getValue(); + + targetMap.computeIfAbsent(resourceSetting, k -> new HashMap<>()) + .merge(recommendationItem, configItem, (existingItem, newItem) -> { + // Sum the amount values for existing and new RecommendationConfigItems + existingItem.setAmount(existingItem.getAmount() + newItem.getAmount()); + return existingItem; + }); + } + } + } + + /** + * Merges two sets of change objects representing resource change details from different recommendation summaries. + * + * @param existingSummary The existing recommendation summary to merge from. + * @param currentSummary The current recommendation summary to merge from. + * @return A merged map of resource change objects. + */ + public HashMap>> mergeChangeObjects(RecommendationEngineSummary existingSummary, RecommendationEngineSummary currentSummary) { + HashMap>> changeMapExisting = existingSummary.getChange(); + HashMap>> changeMapCurrent = currentSummary.getChange(); + + return mergeChangeObjectMaps(changeMapExisting, changeMapCurrent); + } + + /** + * Merges two maps of change objects representing resource change details from different recommendation summaries. + * + * @param changeMapExisting The map of change objects from the existing recommendation summary. + * @param changeMapCurrent The map of change objects from the current recommendation summary. + * @return A merged map of resource change objects. + */ + public static HashMap>> mergeChangeObjectMaps(HashMap>> changeMapExisting, + HashMap>> changeMapCurrent) { + + HashMap>> mergedMap = new HashMap<>(); + try { + for (AnalyzerConstants.ResourceChange change : AnalyzerConstants.ResourceChange.values()) { + mergedMap.put(change, new HashMap<>()); + for (AnalyzerConstants.ResourceSetting setting : AnalyzerConstants.ResourceSetting.values()) { + mergedMap.get(change).put(setting, new HashMap<>()); + for (AnalyzerConstants.RecommendationItem item : AnalyzerConstants.RecommendationItem.values()) { + RecommendationConfigItem existingItem = changeMapExisting.get(change).get(setting).get(item); + RecommendationConfigItem currentItem = changeMapCurrent.get(change).get(setting).get(item); + if (existingItem != null && currentItem != null) { + Double mergedAmount = existingItem.getAmount() + currentItem.getAmount(); + String format = existingItem.getFormat(); + RecommendationConfigItem mergedItem = new RecommendationConfigItem(mergedAmount, format); + mergedMap.get(change).get(setting).put(item, mergedItem); + } else if (existingItem != null) { + mergedMap.get(change).get(setting).put(item, existingItem); + } else if (currentItem != null) { + mergedMap.get(change).get(setting).put(item, currentItem); + } + } + } + } + } catch (Exception e) { + LOGGER.error("Exception occurred while merging Change Maps : {}", e.getMessage()); + } + return mergedMap; + } + /** + * Sets default values for a configuration map of recommendation items. + * + * @param config The configuration map to set default values for. + * @return The configuration map with default values populated. + */ + private HashMap> + setDefaultValuesForConfigs(HashMap> config) { + if (config == null) { + config = new HashMap<>(); + // Initialize inner maps + config.put(AnalyzerConstants.ResourceSetting.requests, new HashMap<>()); + config.put(AnalyzerConstants.ResourceSetting.limits, new HashMap<>()); + // Add default config items + for (AnalyzerConstants.ResourceSetting resourceSetting : AnalyzerConstants.ResourceSetting.values()) { + HashMap innerMap = config.get(resourceSetting); + + for (AnalyzerConstants.RecommendationItem key : AnalyzerConstants.RecommendationItem.values()) { + innerMap.put(key, new RecommendationConfigItem()); + } + } + } + // Check inner maps and config items for null + for (Map.Entry> + entry : config.entrySet()) { + + HashMap innerMap = entry.getValue(); + + for (AnalyzerConstants.RecommendationItem key : AnalyzerConstants.RecommendationItem.values()) { + if (!innerMap.containsKey(key)) { + // Item not present, add with defaults + if (key.equals(AnalyzerConstants.RecommendationItem.cpu)) + innerMap.put(key, new RecommendationConfigItem(0.0, "cores")); + else + innerMap.put(key, new RecommendationConfigItem(0.0, "MiB")); + } else { + RecommendationConfigItem configItem = innerMap.get(key); + if (configItem.getAmount() == null) { + configItem.setAmount(0.0); + } + if (configItem.getFormat() == null) { + if (key.equals(AnalyzerConstants.RecommendationItem.cpu)) + configItem.setFormat("cores"); + else + configItem.setFormat("MiB"); + } + } + } + + } + return config; + } + + public void sendErrorResponse(HttpServletResponse response, Exception e, int httpStatusCode, String errorMsg) throws + IOException { + if (null != e) { + LOGGER.error(e.getMessage()); + if (null == errorMsg) errorMsg = e.getMessage(); + } + response.sendError(httpStatusCode, errorMsg); + } +} diff --git a/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java b/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java index 0fb0bf328..ea3fc2e9c 100644 --- a/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java +++ b/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java @@ -129,6 +129,12 @@ public enum ResourceSetting { limits } + public enum ResourceChange { + increase, + decrease, + variation + } + public enum PersistenceType { LOCAL, //Store only local , Default HYBRID, //Store data both in db and local @@ -531,4 +537,10 @@ private BooleanString() { } } + + public enum ActionSummaryRecommendationItem { + cpu, + memory, + general + } } diff --git a/src/main/java/com/autotune/database/dao/ExperimentDAO.java b/src/main/java/com/autotune/database/dao/ExperimentDAO.java index 9c60d84ce..1c743f372 100644 --- a/src/main/java/com/autotune/database/dao/ExperimentDAO.java +++ b/src/main/java/com/autotune/database/dao/ExperimentDAO.java @@ -9,6 +9,7 @@ import com.autotune.database.table.KruizeResultsEntry; import java.sql.Timestamp; +import java.util.HashMap; import java.util.List; public interface ExperimentDAO { @@ -68,4 +69,19 @@ public interface ExperimentDAO { public void addPartitions(String tableName, String month, String year, int dayOfTheMonth, String partitionType) throws Exception; + List loadAllClusterNames() throws Exception; + + List loadRecommendationsFromDBByNamespaceName(String namespaceName, List entries) throws Exception; + + List loadExperimentsByClusterName(String clusterName) throws Exception; + + List loadExperimentFromDBByClusterAndNamespaceName(String clusterName, String namespaceName) throws Exception; + + List loadRecommendationsFromDBByClusterAndNamespaceName(String clusterName, String namespaceName, List entries) throws Exception; + + List loadRecommendationsFromDBByClusterName(String clusterName, List entries) throws Exception; + + List loadExperimentsByNamespaceName(String namespaceName) throws Exception; + + HashMap> loadAllClusterNamespaceAssociation() throws Exception; } diff --git a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java index 369b2030b..b6185e34c 100644 --- a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java +++ b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java @@ -19,6 +19,8 @@ import org.hibernate.Session; import org.hibernate.Transaction; import org.hibernate.exception.ConstraintViolationException; +import jakarta.persistence.EntityManager; +import org.hibernate.*; import org.hibernate.query.Query; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,6 +30,7 @@ import java.time.YearMonth; import java.time.format.DateTimeFormatter; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.stream.IntStream; @@ -371,7 +374,7 @@ public List loadAllExperiments() throws Exception { statusValue = "success"; } catch (Exception e) { LOGGER.error("Not able to load experiment due to {}", e.getMessage()); - throw new Exception("Error while loading exsisting experiments from database due to : " + e.getMessage()); + throw new Exception("Error while loading existing experiments from database due to : " + e.getMessage()); } finally { if (null != timerLoadAllExp) { MetricsConfig.timerLoadAllExp = MetricsConfig.timerBLoadAllExp.tag("status", statusValue).register(MetricsConfig.meterRegistry()); @@ -602,4 +605,185 @@ public List getKruizeResultsEntry(String experiment_name, St } return kruizeResultsEntryList; } + + + + @Override + public List loadAllClusterNames() throws Exception { + List distinctClusterNames; + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + Query query = session.createQuery(DBConstants.SQLQUERY.SELECT_DISTINCT_CLUSTER_NAMES_FROM_EXPERIMENTS, + String.class); + distinctClusterNames = query.getResultList(); + } catch (Exception e) { + LOGGER.error("Unable to fetch cluster names : {}", e.getMessage()); + throw new Exception("Error while fetching the cluster names from database due to : " + e.getMessage()); + } + return distinctClusterNames; + } + + @Override + public List loadRecommendationsFromDBByNamespaceName(String namespaceName, List entries) throws Exception { + List recommendationEntries = new ArrayList<>(); + ArrayList experimentNames = new ArrayList<>(); + for (KruizeExperimentEntry entry : entries) { + experimentNames.add(entry.getExperiment_name()); + } + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + + String namespaceFilter = "[{\"namespace\": \"" + namespaceName + "\"}]"; //TODO: need to think of a better way to set up this query + + ScrollableResults results = session.createNativeQuery(SELECT_FROM_RECOMMENDATIONS_BY_NAMESPACE_NAME, KruizeRecommendationEntry.class) + .setParameter("namespaceFilter", namespaceFilter) + .setParameterList("experimentNames", experimentNames) + .setParameter("limit", experimentNames.size()) + .scroll(); + while (results.next()) { + KruizeRecommendationEntry recommendationEntry = results.get(); + Hibernate.initialize(recommendationEntry.getExtended_data()); + + recommendationEntries.add(recommendationEntry); + } + } catch (Exception e) { + LOGGER.error("Not able to load recommendations due to {}", e.getMessage()); + throw new Exception("Error while loading recommendations from database due to : " + e.getMessage()); + } + return recommendationEntries; + } + + @Override + public List loadExperimentsByClusterName(String clusterName) throws Exception { + List entries; + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + entries = session.createQuery(SELECT_FROM_EXPERIMENTS_BY_CLUSTER_NAME, KruizeExperimentEntry.class) + .setParameter("clusterName", clusterName) + .list(); + } catch (Exception e) { + LOGGER.error("Not able to load experiment due to {}", e.getMessage()); + throw new Exception("Error while loading existing experiment from database due to : " + e.getMessage()); + } + return entries; + } + + @Override + public List loadExperimentFromDBByClusterAndNamespaceName(String clusterName, String namespaceName) throws Exception { + List entries = new ArrayList<>(); + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + + String namespaceFilter = "[{\"namespace\": \"" + namespaceName + "\"}]"; //TODO: need to think of a better way to set up this query + + ScrollableResults results = session.createNativeQuery(SELECT_FROM_EXPERIMENTS_BY_NAMESPACE_AND_CLUSTER_NAME, KruizeExperimentEntry.class) + .setParameter("clusterName", clusterName) + .setParameter("namespaceFilter", namespaceFilter) + .setMaxResults(BATCH_SIZE) + .scroll(); + while (results.next()) { + KruizeExperimentEntry experimentEntry = results.get(); + Hibernate.initialize(experimentEntry.getExtended_data()); + + entries.add(experimentEntry); + } + } catch (Exception e) { + LOGGER.error("Not able to load experiment due to {}", e.getMessage()); + throw new Exception("Error while loading existing experiment from database due to : " + e.getMessage()); + } + return entries; + } + + @Override + public List loadRecommendationsFromDBByClusterAndNamespaceName(String clusterName, String namespaceName, + List entries) throws Exception { + + List recommendationEntries = new ArrayList<>(); + ArrayList experimentNames = new ArrayList<>(); + for (KruizeExperimentEntry entry : entries) { + experimentNames.add(entry.getExperiment_name()); + } + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + + String namespaceFilter = "[{\"namespace\": \"" + namespaceName + "\"}]"; //TODO: need to think of a better way to set up this query + + ScrollableResults results = session.createNativeQuery(SELECT_FROM_RECOMMENDATIONS_BY_NAMESPACE_AND_CLUSTER_NAME, KruizeRecommendationEntry.class) + .setParameter("clusterName", clusterName) + .setParameter("namespaceFilter", namespaceFilter) + .setParameterList("experimentNames", experimentNames) + .setParameter("limit", experimentNames.size()) + .scroll(); + while (results.next()) { + KruizeRecommendationEntry recommendationEntry = results.get(); + Hibernate.initialize(recommendationEntry.getExtended_data()); + + recommendationEntries.add(recommendationEntry); + } + } catch (Exception e) { + LOGGER.error("Not able to load recommendations due to {}", e.getMessage()); + throw new Exception("Error while loading existing recommendations from database due to : " + e.getMessage()); + } + return recommendationEntries; + } + + @Override + public List loadRecommendationsFromDBByClusterName(String clusterName, List entries) throws Exception { + List recommendationEntries = new ArrayList<>(); + ArrayList experimentNames = new ArrayList<>(); + for (KruizeExperimentEntry entry : entries) { + experimentNames.add(entry.getExperiment_name()); + } + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()){ + Query query = session.createQuery(SELECT_FROM_RECOMMENDATIONS_BY_CLUSTER_NAME, KruizeRecommendationEntry.class) + .setParameter("clusterName", clusterName) + .setParameter("experimentList", experimentNames) + .setParameter("limit", experimentNames.size()); + ScrollableResults results = query.scroll(); + + while (results.next()) { + KruizeRecommendationEntry recommendationEntry = results.get(); + Hibernate.initialize(recommendationEntry.getExtended_data()); + + recommendationEntries.add(recommendationEntry); + } + } catch (Exception e) { + LOGGER.error("Not able to load recommendations due to {}", e.getMessage()); + throw new Exception("Error while loading existing recommendations from database due to : " + e.getMessage()); + } + return recommendationEntries; + } + + @Override + public List loadExperimentsByNamespaceName(String namespaceName) throws Exception { + List entries; + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + EntityManager entityManager = session.getEntityManagerFactory().createEntityManager(); + String namespaceFilter = "[{\"namespace\": \"" + namespaceName + "\"}]"; //TODO: need to think of a better way to set up this query + + jakarta.persistence.Query query = entityManager.createNativeQuery(SELECT_FROM_EXPERIMENTS_BY_NAMESPACE_NAME, KruizeExperimentEntry.class) + .setParameter("namespaceFilter", namespaceFilter); + entries = query.getResultList(); + } catch (Exception e) { + LOGGER.error("Not able to load experiment due to {}", e.getMessage()); + throw new Exception("Error while loading existing experiment from database due to : " + e.getMessage()); + } + return entries; + } + + @Override + public HashMap> loadAllClusterNamespaceAssociation() throws Exception { + HashMap> entries = new HashMap<>(); + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + EntityManager entityManager = session.getEntityManagerFactory().createEntityManager(); + + Query query = (Query) entityManager.createNativeQuery(SELECT_CLUSTERS_AND_NAMESPACE_FROM_RECOMMENDATIONS); + List results = query.getResultList(); + + for (Object[] result : results) { + String clusterName = (String) result[0]; + List namespaces = (List) result[1]; + entries.put(clusterName, namespaces); + } + } catch (Exception e) { + LOGGER.error("Not able to load experiment due to {}", e.getMessage()); + throw new Exception("Error while loading existing experiment from database due to : " + e.getMessage()); + } + return entries; + } } diff --git a/src/main/java/com/autotune/database/helper/DBConstants.java b/src/main/java/com/autotune/database/helper/DBConstants.java index ba5341808..9a77615ec 100644 --- a/src/main/java/com/autotune/database/helper/DBConstants.java +++ b/src/main/java/com/autotune/database/helper/DBConstants.java @@ -5,8 +5,10 @@ public class DBConstants { public static final class SQLQUERY { + public static final int BATCH_SIZE = 1000; public static final String SELECT_FROM_EXPERIMENTS = "from KruizeExperimentEntry"; public static final String SELECT_FROM_EXPERIMENTS_BY_EXP_NAME = "from KruizeExperimentEntry k WHERE k.experiment_name = :experimentName"; + public static final String SELECT_FROM_EXPERIMENTS_BY_CLUSTER_NAME = "from KruizeExperimentEntry k WHERE k.cluster_name = :clusterName"; public static final String SELECT_FROM_RESULTS = "from KruizeResultsEntry"; public static final String SELECT_FROM_RESULTS_BY_EXP_NAME = "from KruizeResultsEntry k WHERE k.experiment_name = :experimentName"; public static final String SELECT_FROM_RESULTS_BY_EXP_NAME_AND_DATE_AND_LIMIT = String.format("from KruizeResultsEntry k " + @@ -28,6 +30,23 @@ public static final class SQLQUERY { public static final String SELECT_FROM_RECOMMENDATIONS = "from KruizeRecommendationEntry"; public static final String SELECT_FROM_PERFORMANCE_PROFILE = "from KruizePerformanceProfileEntry"; public static final String SELECT_FROM_PERFORMANCE_PROFILE_BY_NAME = "from KruizePerformanceProfileEntry k WHERE k.name = :name"; + + public static final String SELECT_DISTINCT_CLUSTER_NAMES_FROM_EXPERIMENTS = "SELECT DISTINCT cluster_name " + SELECT_FROM_EXPERIMENTS ; + public static final String SELECT_FROM_RECOMMENDATIONS_BY_CLUSTER_NAME = "from KruizeRecommendationEntry k WHERE k.cluster_name = :clusterName " + + "and k.experiment_name IN :experimentList ORDER by interval_end_time DESC LIMIT :limit"; + public static final String SELECT_FROM_RECOMMENDATIONS_BY_NAMESPACE_NAME = "SELECT * FROM public.kruize_recommendations WHERE (extended_data -> " + + "'kubernetes_objects' @> cast(:namespaceFilter as jsonb)) AND experiment_name IN (:experimentNames) ORDER by interval_end_time DESC LIMIT :limit"; + public static final String SELECT_FROM_EXPERIMENTS_BY_NAMESPACE_NAME = "SELECT * FROM public.kruize_experiments WHERE (extended_data -> " + + "'kubernetes_objects' @> cast(:namespaceFilter as jsonb))"; + public static final String SELECT_FROM_RECOMMENDATIONS_BY_NAMESPACE_AND_CLUSTER_NAME = "SELECT * FROM public.kruize_recommendations WHERE (cluster_name = :clusterName " + + "AND extended_data -> 'kubernetes_objects' @> cast(:namespaceFilter as jsonb)) AND experiment_name IN (:experimentNames) " + + "ORDER by interval_end_time DESC LIMIT :limit"; + public static final String SELECT_FROM_EXPERIMENTS_BY_NAMESPACE_AND_CLUSTER_NAME = "SELECT * FROM public.kruize_experiments WHERE (cluster_name = :clusterName " + + "AND extended_data -> 'kubernetes_objects' @> cast(:namespaceFilter as jsonb))"; + public static final String SELECT_CLUSTERS_AND_NAMESPACE_FROM_RECOMMENDATIONS = "SELECT cluster_name, JSON_AGG(DISTINCT extended_data->'kubernetes_objects'->0->'namespace') as namespaces " + + "FROM kruize_recommendations GROUP BY cluster_name"; + + public static final String DELETE_FROM_EXPERIMENTS_BY_EXP_NAME = "DELETE FROM KruizeExperimentEntry k WHERE k.experiment_name = :experimentName"; public static final String DELETE_FROM_RESULTS_BY_EXP_NAME = "DELETE FROM KruizeResultsEntry k WHERE k.experiment_name = :experimentName"; public static final String DELETE_FROM_RECOMMENDATIONS_BY_EXP_NAME = "DELETE FROM KruizeRecommendationEntry k WHERE k.experiment_name = :experimentName"; diff --git a/src/main/java/com/autotune/database/service/ExperimentDBService.java b/src/main/java/com/autotune/database/service/ExperimentDBService.java index 7f790e9de..a66e7ac4f 100644 --- a/src/main/java/com/autotune/database/service/ExperimentDBService.java +++ b/src/main/java/com/autotune/database/service/ExperimentDBService.java @@ -40,9 +40,7 @@ import org.slf4j.LoggerFactory; import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; +import java.util.*; public class ExperimentDBService { private static final long serialVersionUID = 1L; @@ -111,22 +109,7 @@ public void loadAllRecommendations(Map mainKruizeExperimen // Load Recommendations from DB and save to local List recommendationEntries = experimentDAO.loadAllRecommendations(); - if (null != recommendationEntries && !recommendationEntries.isEmpty()) { - List recommendationsAPIObjects - = null; - try { - recommendationsAPIObjects = DBHelpers.Converters.KruizeObjectConverters - .convertRecommendationEntryToRecommendationAPIObject(recommendationEntries); - } catch (InvalidConversionOfRecommendationEntryException e) { - e.printStackTrace(); - } - if (null != recommendationsAPIObjects && !recommendationsAPIObjects.isEmpty()) { - - experimentInterface.addRecommendationsToLocalStorage(mainKruizeExperimentMap, - recommendationsAPIObjects, - true); - } - } + saveDBRecommendationsDataToLocal(mainKruizeExperimentMap, experimentInterface, recommendationEntries); } public void loadAllPerformanceProfiles(Map performanceProfileMap) throws Exception { @@ -173,21 +156,7 @@ public void loadRecommendationsFromDBByName(Map mainKruize ExperimentInterface experimentInterface = new ExperimentInterfaceImpl(); // Load Recommendations from DB and save to local List recommendationEntries = experimentDAO.loadRecommendationsByExperimentName(experimentName); - if (null != recommendationEntries && !recommendationEntries.isEmpty()) { - List recommendationsAPIObjects - = null; - try { - recommendationsAPIObjects = DBHelpers.Converters.KruizeObjectConverters - .convertRecommendationEntryToRecommendationAPIObject(recommendationEntries); - } catch (InvalidConversionOfRecommendationEntryException e) { - e.printStackTrace(); - } - if (null != recommendationsAPIObjects && !recommendationsAPIObjects.isEmpty()) { - experimentInterface.addRecommendationsToLocalStorage(mainKruizeExperimentMap, - recommendationsAPIObjects, - true); - } - } + saveDBRecommendationsDataToLocal(mainKruizeExperimentMap, experimentInterface, recommendationEntries); } public ValidationOutputData addExperimentToDB(CreateExperimentAPIObject createExperimentAPIObject) { @@ -355,4 +324,152 @@ public List getExperimentResultData(String experiment_name } return experimentResultDataList; } + + public List loadAllClusterNames() throws Exception { + return experimentDAO.loadAllClusterNames(); + } + + public void loadExperimentsAndRecommendationsByClusterAndNamespaceName(Map mKruizeExperimentMap, + String clusterName, String namespaceName) throws Exception { + ExperimentInterface experimentInterface = new ExperimentInterfaceImpl(); + List entries = experimentDAO.loadExperimentFromDBByClusterAndNamespaceName(clusterName, namespaceName); + if (null != entries && !entries.isEmpty()) { + List createExperimentAPIObjects = DBHelpers.Converters.KruizeObjectConverters.convertExperimentEntryToCreateExperimentAPIObject(entries); + if (null != createExperimentAPIObjects && !createExperimentAPIObjects.isEmpty()) { + List kruizeExpList = new ArrayList<>(); + + int failureThreshHold = createExperimentAPIObjects.size(); + int failureCount = 0; + for (CreateExperimentAPIObject createExperimentAPIObject : createExperimentAPIObjects) { + KruizeObject kruizeObject = Converters.KruizeObjectConverters.convertCreateExperimentAPIObjToKruizeObject(createExperimentAPIObject); + if (null != kruizeObject) { + kruizeExpList.add(kruizeObject); + } else { + failureCount++; + } + } + if (failureThreshHold > 0 && failureCount == failureThreshHold) { + throw new Exception("unable to load from DB."); + } + experimentInterface.addExperimentToLocalStorage(mKruizeExperimentMap, kruizeExpList); + } + } + + loadRecommendationsFromDBByClusterAndNamespaceName(mKruizeExperimentMap, clusterName, namespaceName, entries); + } + + private void loadRecommendationsFromDBByClusterAndNamespaceName(Map mKruizeExperimentMap, String clusterName, + String namespaceName, List entries) throws Exception { + ExperimentInterface experimentInterface = new ExperimentInterfaceImpl(); + + // Load Recommendations from DB and save to local + List recommendationEntries = experimentDAO.loadRecommendationsFromDBByClusterAndNamespaceName( + clusterName, namespaceName, entries); + saveDBRecommendationsDataToLocal(mKruizeExperimentMap, experimentInterface, recommendationEntries); + } + + public void loadExperimentsAndRecommendationsByClusterName(Map mKruizeExperimentMap, String clusterName) throws Exception { + // Load experiments from DB and save to local + ExperimentInterface experimentInterface = new ExperimentInterfaceImpl(); + List entries = null; + try { + entries = experimentDAO.loadExperimentsByClusterName(clusterName); + } catch (Exception e) { + LOGGER.error("Exception occurred while loading experiments from the DB: {}", e.getMessage()); + } + if (null != entries && !entries.isEmpty()) { + List createExperimentAPIObjects = DBHelpers.Converters.KruizeObjectConverters.convertExperimentEntryToCreateExperimentAPIObject(entries); + if (null != createExperimentAPIObjects && !createExperimentAPIObjects.isEmpty()) { + List kruizeExpList = new ArrayList<>(); + + int failureThreshHold = createExperimentAPIObjects.size(); + int failureCount = 0; + for (CreateExperimentAPIObject createExperimentAPIObject : createExperimentAPIObjects) { + KruizeObject kruizeObject = Converters.KruizeObjectConverters.convertCreateExperimentAPIObjToKruizeObject(createExperimentAPIObject); + if (null != kruizeObject) { + kruizeExpList.add(kruizeObject); + } else { + failureCount++; + } + } + if (failureThreshHold > 0 && failureCount == failureThreshHold) { + throw new Exception("Unable to load Experiment with cluster name " + clusterName + " from the DB."); + } + experimentInterface.addExperimentToLocalStorage(mKruizeExperimentMap, kruizeExpList); + } + } + + // load recommendations for the experiments fetched in the above step + loadRecommendationsFromDBByClusterName(mKruizeExperimentMap, clusterName, entries); + } + + private void loadRecommendationsFromDBByClusterName(Map mKruizeExperimentMap, String clusterName, + List entries) throws Exception { + ExperimentInterface experimentInterface = new ExperimentInterfaceImpl(); + List recommendationEntries = null; + // Load Recommendations from DB and save to local + try { + recommendationEntries = experimentDAO.loadRecommendationsFromDBByClusterName(clusterName, entries); + } catch (Exception e) { + LOGGER.error("Exception occurred while fetching recommendations by cluster name: {}", e.getMessage()); + } + saveDBRecommendationsDataToLocal(mKruizeExperimentMap, experimentInterface, recommendationEntries); + } + + private void saveDBRecommendationsDataToLocal(Map mKruizeExperimentMap, ExperimentInterface experimentInterface, List recommendationEntries) { + if (null != recommendationEntries && !recommendationEntries.isEmpty()) { + List recommendationsAPIObjects + = null; + try { + recommendationsAPIObjects = DBHelpers.Converters.KruizeObjectConverters + .convertRecommendationEntryToRecommendationAPIObject(recommendationEntries); + } catch (InvalidConversionOfRecommendationEntryException e) { + e.printStackTrace(); + } + if (null != recommendationsAPIObjects && !recommendationsAPIObjects.isEmpty()) { + experimentInterface.addRecommendationsToLocalStorage(mKruizeExperimentMap, + recommendationsAPIObjects, + true); + } + } + } + + public void loadExperimentsAndRecommendationsByNamespaceName(Map mKruizeExperimentMap, String namespaceName) throws Exception { + ExperimentInterface experimentInterface = new ExperimentInterfaceImpl(); + List entries = experimentDAO.loadExperimentsByNamespaceName(namespaceName); + if (null != entries && !entries.isEmpty()) { + List createExperimentAPIObjects = DBHelpers.Converters.KruizeObjectConverters.convertExperimentEntryToCreateExperimentAPIObject(entries); + if (null != createExperimentAPIObjects && !createExperimentAPIObjects.isEmpty()) { + List kruizeExpList = new ArrayList<>(); + + int failureThreshHold = createExperimentAPIObjects.size(); + int failureCount = 0; + for (CreateExperimentAPIObject createExperimentAPIObject : createExperimentAPIObjects) { + KruizeObject kruizeObject = Converters.KruizeObjectConverters.convertCreateExperimentAPIObjToKruizeObject(createExperimentAPIObject); + if (null != kruizeObject) { + kruizeExpList.add(kruizeObject); + } else { + failureCount++; + } + } + if (failureThreshHold > 0 && failureCount == failureThreshHold) { + throw new Exception("Unable to load Experiment with namespace name " + namespaceName + " from the DB."); + } + experimentInterface.addExperimentToLocalStorage(mKruizeExperimentMap, kruizeExpList); + } + } + + loadRecommendationsFromDBByNamespaceName(mKruizeExperimentMap, namespaceName, entries); + } + + private void loadRecommendationsFromDBByNamespaceName(Map mKruizeExperimentMap, String namespaceName, List entries) throws Exception { + ExperimentInterface experimentInterface = new ExperimentInterfaceImpl(); + // Load Recommendations from DB and save to local + List recommendationEntries = experimentDAO.loadRecommendationsFromDBByNamespaceName(namespaceName, entries); + saveDBRecommendationsDataToLocal(mKruizeExperimentMap, experimentInterface, recommendationEntries); + } + + public HashMap> loadAllClusterNamespaceAssociationMap() throws Exception { + return experimentDAO.loadAllClusterNamespaceAssociation(); + } } diff --git a/src/main/java/com/autotune/utils/KruizeConstants.java b/src/main/java/com/autotune/utils/KruizeConstants.java index 8e4012cf2..fb861aa09 100644 --- a/src/main/java/com/autotune/utils/KruizeConstants.java +++ b/src/main/java/com/autotune/utils/KruizeConstants.java @@ -207,6 +207,20 @@ public static final class JSONKeys { public static final String RECOMMENDATION_TERMS = "recommendation_terms"; public static final String RECOMMENDATION_ENGINES = "recommendation_engines"; public static final String CONFIDENCE_LEVEL = "confidence_level"; + public static final String CHANGE = "change"; + public static final String SUMMARY = "summary"; + public static final String NAMESPACE_NAME = "namespace_name"; + public static final String SUMMARIZE_TYPE = "summarize_type"; + public static final String CLUSTER = "cluster"; + public static final String FETCH_FROM_DB = "fetchFromDB"; + public static final String IDLE = "idle"; + public static final String OPTIMIZED = "optimized"; + public static final String CRITICAL = "critical"; + public static final String OPTIMIZABLE = "optimizable"; + public static final String TOTAL = "total"; + public static final String WORKLOAD_NAMES = "workload_names"; + public static final String NAMES = "names"; + public static final String NO_DATA = "no_data"; private JSONKeys() { } diff --git a/src/main/java/com/autotune/utils/KruizeSupportedTypes.java b/src/main/java/com/autotune/utils/KruizeSupportedTypes.java index 77a40dde9..fe51f4a8b 100644 --- a/src/main/java/com/autotune/utils/KruizeSupportedTypes.java +++ b/src/main/java/com/autotune/utils/KruizeSupportedTypes.java @@ -80,4 +80,7 @@ private KruizeSupportedTypes() { } public static final Set QUERY_PARAMS_SUPPORTED = new HashSet<>(Arrays.asList( "experiment_name", "results", "recommendations", "latest" )); + public static final Set SUMMARIZE_PARAMS_SUPPORTED = new HashSet<>(Arrays.asList( + "cluster_name", "namespace_name", "summarize_type", "fetchFromDB" + )); } diff --git a/src/main/java/com/autotune/utils/ServerContext.java b/src/main/java/com/autotune/utils/ServerContext.java index a64958e08..2fca41302 100644 --- a/src/main/java/com/autotune/utils/ServerContext.java +++ b/src/main/java/com/autotune/utils/ServerContext.java @@ -42,6 +42,8 @@ public class ServerContext { public static final String RECOMMEND_RESULTS = ROOT_CONTEXT + "listRecommendations"; public static final String CREATE_PERF_PROFILE = ROOT_CONTEXT + "createPerformanceProfile"; public static final String LIST_PERF_PROFILES = ROOT_CONTEXT + "listPerformanceProfiles"; + public static final String LIST_CLUSTER_NAMES = ROOT_CONTEXT + "listClusters"; + public static final String SUMMARIZE = ROOT_CONTEXT + "summarize"; public static final String KRUIZE_SERVER_URL = "http://localhost:" + KRUIZE_SERVER_PORT; public static final String SEARCH_SPACE_END_POINT = KRUIZE_SERVER_URL + SEARCH_SPACE;