diff --git a/generic_config_updater/generic_updater.py b/generic_config_updater/generic_updater.py index 079d7ab742..061376b032 100644 --- a/generic_config_updater/generic_updater.py +++ b/generic_config_updater/generic_updater.py @@ -3,6 +3,7 @@ from enum import Enum from .gu_common import GenericConfigUpdaterError, ConfigWrapper, \ DryRunConfigWrapper, PatchWrapper +from .patch_sorter import PatchSorter CHECKPOINTS_DIR = "/etc/sonic/checkpoints" CHECKPOINT_EXT = ".cp.json" @@ -16,11 +17,6 @@ def release_lock(self): # TODO: Implement ConfigLock pass -class PatchSorter: - def sort(self, patch): - # TODO: Implement patch sorter - raise NotImplementedError("PatchSorter.sort(patch) is not implemented yet") - class ChangeApplier: def apply(self, change): # TODO: Implement change applier @@ -36,7 +32,7 @@ def __init__(self, changeapplier=None, config_wrapper=None, patch_wrapper=None): - self.patchsorter = patchsorter if patchsorter is not None else PatchSorter() + self.patchsorter = patchsorter if patchsorter is not None else PatchSorter(config_wrapper, patch_wrapper) self.changeapplier = changeapplier if changeapplier is not None else ChangeApplier() self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper() self.patch_wrapper = patch_wrapper if patch_wrapper is not None else PatchWrapper() diff --git a/generic_config_updater/gu_common.py b/generic_config_updater/gu_common.py index 2aa6a36d8a..66d9b0d7d9 100644 --- a/generic_config_updater/gu_common.py +++ b/generic_config_updater/gu_common.py @@ -1,8 +1,12 @@ import json import jsonpatch +from jsonpointer import JsonPointer import sonic_yang import subprocess +import yang as ly import copy +import re +from enum import Enum YANG_DIR = "/usr/local/yang-models" @@ -10,8 +14,26 @@ class GenericConfigUpdaterError(Exception): pass class JsonChange: - # TODO: Implement JsonChange - pass + """ + A class that describes a partial change to a JSON object. + It is is similar to JsonPatch, but the order of updating the configs is unknown. + Only the final outcome of the update can be retrieved. + It provides a single function to apply the change to a given JSON object. + """ + def __init__(self, patch): + self.patch = patch + + def apply(self, config): + return self.patch.apply(config) + + def __str__(self): + return f'{self.patch}' + + def __eq__(self, other): + """Overrides the default implementation""" + if isinstance(other, JsonChange): + return self.patch == other.patch + return False class ConfigWrapper: def __init__(self, yang_dir = YANG_DIR): @@ -110,14 +132,6 @@ def crop_tables_without_yang(self, config_db_as_json): return sy.jIn - def _create_and_connect_config_db(self): - if self.default_config_db_connector != None: - return self.default_config_db_connector - - config_db = ConfigDBConnector() - config_db.connect() - return config_db - class DryRunConfigWrapper(ConfigWrapper): # TODO: implement DryRunConfigWrapper # This class will simulate all read/write operations to ConfigDB on a virtual storage unit. @@ -126,11 +140,12 @@ class DryRunConfigWrapper(ConfigWrapper): class PatchWrapper: def __init__(self, config_wrapper=None): self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper() + self.path_addressing = PathAddressing() def validate_config_db_patch_has_yang_models(self, patch): config_db = {} for operation in patch: - tokens = operation['path'].split('/')[1:] + tokens = self.path_addressing.get_path_tokens(operation[OperationWrapper.PATH_KEYWORD]) if len(tokens) == 0: # Modifying whole config_db tables_dict = {table_name: {} for table_name in operation['value']} config_db.update(tables_dict) @@ -174,3 +189,505 @@ def convert_sonic_yang_patch_to_config_db_patch(self, patch): target_config_db = self.config_wrapper.convert_sonic_yang_to_config_db(target_yang) return self.generate_patch(current_config_db, target_config_db) + +class OperationType(Enum): + ADD = 1 + REMOVE = 2 + REPLACE = 3 + +class OperationWrapper: + OP_KEYWORD = "op" + PATH_KEYWORD = "path" + VALUE_KEYWORD = "value" + + def create(self, operation_type, path, value=None): + op_type = operation_type.name.lower() + + operation = {OperationWrapper.OP_KEYWORD: op_type, OperationWrapper.PATH_KEYWORD: path} + + if operation_type in [OperationType.ADD, OperationType.REPLACE]: + operation[OperationWrapper.VALUE_KEYWORD] = value + + return operation + +class PathAddressing: + """ + Path refers to the 'path' in JsonPatch operations: https://tools.ietf.org/html/rfc6902 + The path corresponds to JsonPointer: https://tools.ietf.org/html/rfc6901 + + All xpath operations in this class are only relevent to ConfigDb and the conversion to YANG xpath. + It is not meant to support all the xpath functionalities, just the ones relevent to ConfigDb/YANG. + """ + PATH_SEPARATOR = "/" + XPATH_SEPARATOR = "/" + def get_path_tokens(self, path): + return JsonPointer(path).parts + + def create_path(self, tokens): + return JsonPointer.from_parts(tokens).path + + def get_xpath_tokens(self, xpath): + """ + Splits the given xpath into tokens by '/'. + + Example: + xpath: /sonic-vlan:sonic-vlan/VLAN_MEMBER/VLAN_MEMBER_LIST[name='Vlan1000'][port='Ethernet8']/tagging_mode + tokens: sonic-vlan:sonic-vlan, VLAN_MEMBER, VLAN_MEMBER_LIST[name='Vlan1000'][port='Ethernet8'], tagging_mode + """ + if xpath == "": + raise ValueError("xpath cannot be empty") + + if xpath == "/": + return [] + + idx = 0 + tokens = [] + while idx < len(xpath): + end = self._get_xpath_token_end(idx+1, xpath) + token = xpath[idx+1:end] + tokens.append(token) + idx = end + + return tokens + + def _get_xpath_token_end(self, start, xpath): + idx = start + while idx < len(xpath): + if xpath[idx] == PathAddressing.XPATH_SEPARATOR: + break + elif xpath[idx] == "[": + idx = self._get_xpath_predicate_end(idx, xpath) + idx = idx+1 + + return idx + + def _get_xpath_predicate_end(self, start, xpath): + idx = start + while idx < len(xpath): + if xpath[idx] == "]": + break + elif xpath[idx] == "'": + idx = self._get_xpath_single_quote_str_end(idx, xpath) + elif xpath[idx] == '"': + idx = self._get_xpath_double_quote_str_end(idx, xpath) + + idx = idx+1 + + return idx + + def _get_xpath_single_quote_str_end(self, start, xpath): + idx = start+1 # skip first single quote + while idx < len(xpath): + if xpath[idx] == "'": + break + # libyang implements XPATH 1.0 which does not escape single quotes + # libyang src: https://netopeer.liberouter.org/doc/libyang/master/html/howtoxpath.html + # XPATH 1.0 src: https://www.w3.org/TR/1999/REC-xpath-19991116/#NT-Literal + idx = idx+1 + + return idx + + def _get_xpath_double_quote_str_end(self, start, xpath): + idx = start+1 # skip first single quote + while idx < len(xpath): + if xpath[idx] == '"': + break + # libyang implements XPATH 1.0 which does not escape double quotes + # libyang src: https://netopeer.liberouter.org/doc/libyang/master/html/howtoxpath.html + # XPATH 1.0 src: https://www.w3.org/TR/1999/REC-xpath-19991116/#NT-Literal + idx = idx+1 + + return idx + + def create_xpath(self, tokens): + """ + Creates an xpath by combining the given tokens using '/' + Example: + tokens: module, container, list[key='value'], leaf + xpath: /module/container/list[key='value']/leaf + """ + if len(tokens) == 0: + return "/" + + return f"{PathAddressing.XPATH_SEPARATOR}{PathAddressing.XPATH_SEPARATOR.join(str(t) for t in tokens)}" + + def find_ref_paths(self, path, config): + """ + Finds the paths referencing any line under the given 'path' within the given 'config'. + Example: + path: /PORT + config: + { + "VLAN_MEMBER": { + "Vlan1000|Ethernet0": {}, + "Vlan1000|Ethernet4": {} + }, + "ACL_TABLE": { + "EVERFLOW": { + "ports": [ + "Ethernet4" + ], + }, + "EVERFLOWV6": { + "ports": [ + "Ethernet4", + "Ethernet8" + ] + } + }, + "PORT": { + "Ethernet0": {}, + "Ethernet4": {}, + "Ethernet8": {} + } + } + return: + /VLAN_MEMBER/Vlan1000|Ethernet0 + /VLAN_MEMBER/Vlan1000|Ethernet4 + /ACL_TABLE/EVERFLOW/ports/0 + /ACL_TABLE/EVERFLOW6/ports/0 + /ACL_TABLE/EVERFLOW6/ports/1 + """ + # TODO: Also fetch references by must statement (check similar statements) + return self._find_leafref_paths(path, config) + + def _find_leafref_paths(self, path, config): + sy = sonic_yang.SonicYang(YANG_DIR) + sy.loadYangModel() + + sy.loadData(config) + + xpath = self.convert_path_to_xpath(path, config, sy) + + leaf_xpaths = self._get_inner_leaf_xpaths(xpath, sy) + + ref_xpaths = [] + for xpath in leaf_xpaths: + ref_xpaths.extend(sy.find_data_dependencies(xpath)) + + ref_paths = [] + for ref_xpath in ref_xpaths: + ref_path = self.convert_xpath_to_path(ref_xpath, config, sy) + ref_paths.append(ref_path) + + return set(ref_paths) + + def _get_inner_leaf_xpaths(self, xpath, sy): + if xpath == "/": # Point to Root element which contains all xpaths + nodes = sy.root.tree_for() + else: # Otherwise get all nodes that match xpath + nodes = sy.root.find_path(xpath).data() + + for node in nodes: + for inner_node in node.tree_dfs(): + # TODO: leaflist also can be used as the 'path' argument in 'leafref' so add support to leaflist + if self._is_leaf_node(inner_node): + yield inner_node.path() + + def _is_leaf_node(self, node): + schema = node.schema() + return ly.LYS_LEAF == schema.nodetype() + + def convert_path_to_xpath(self, path, config, sy): + """ + Converts the given JsonPatch path (i.e. JsonPointer) to XPATH. + Example: + path: /VLAN_MEMBER/Vlan1000|Ethernet8/tagging_mode + xpath: /sonic-vlan:sonic-vlan/VLAN_MEMBER/VLAN_MEMBER_LIST[name='Vlan1000'][port='Ethernet8']/tagging_mode + """ + self.convert_xpath_to_path + tokens = self.get_path_tokens(path) + if len(tokens) == 0: + return self.create_xpath(tokens) + + xpath_tokens = [] + table = tokens[0] + + cmap = sy.confDbYangMap[table] + + # getting the top level element : + xpath_tokens.append(cmap['module']+":"+cmap['topLevelContainer']) + + xpath_tokens.extend(self._get_xpath_tokens_from_container(cmap['container'], 0, tokens, config)) + + return self.create_xpath(xpath_tokens) + + def _get_xpath_tokens_from_container(self, model, token_index, path_tokens, config): + token = path_tokens[token_index] + xpath_tokens = [token] + + if len(path_tokens)-1 == token_index: + return xpath_tokens + + # check if the configdb token is referring to a list + list_model = self._get_list_model(model, token_index, path_tokens) + if list_model: + new_xpath_tokens = self._get_xpath_tokens_from_list(list_model, token_index+1, path_tokens, config[path_tokens[token_index]]) + xpath_tokens.extend(new_xpath_tokens) + return xpath_tokens + + # check if it is targetting a child container + child_container_model = self._get_model(model.get('container'), path_tokens[token_index+1]) + if child_container_model: + new_xpath_tokens = self._get_xpath_tokens_from_container(child_container_model, token_index+1, path_tokens, config[path_tokens[token_index]]) + xpath_tokens.extend(new_xpath_tokens) + return xpath_tokens + + new_xpath_tokens = self._get_xpath_tokens_from_leaf(model, token_index+1, path_tokens, config[path_tokens[token_index]]) + xpath_tokens.extend(new_xpath_tokens) + + return xpath_tokens + + def _get_xpath_tokens_from_list(self, model, token_index, path_tokens, config): + list_name = model['@name'] + + tableKey = path_tokens[token_index] + listKeys = model['key']['@value'] + keyDict = self._extractKey(tableKey, listKeys) + keyTokens = [f"[{key}='{keyDict[key]}']" for key in keyDict] + item_token = f"{list_name}{''.join(keyTokens)}" + + xpath_tokens = [item_token] + + # if whole list-item is needed i.e. if in the path is not referencing child leaf items + # Example: + # path: /VLAN/Vlan1000 + # xpath: /sonic-vlan:sonic-vlan/VLAN/VLAN_LIST[name='Vlan1000'] + if len(path_tokens)-1 == token_index: + return xpath_tokens + + new_xpath_tokens = self._get_xpath_tokens_from_leaf(model, token_index+1, path_tokens,config[path_tokens[token_index]]) + xpath_tokens.extend(new_xpath_tokens) + return xpath_tokens + + def _get_xpath_tokens_from_leaf(self, model, token_index, path_tokens, config): + token = path_tokens[token_index] + + # checking all leaves + leaf_model = self._get_model(model.get('leaf'), token) + if leaf_model: + return [token] + + # checking choice + choices = model.get('choice') + if choices: + for choice in choices: + cases = choice['case'] + for case in cases: + leaf_model = self._get_model(case.get('leaf'), token) + if leaf_model: + return [token] + + # checking leaf-list (i.e. arrays of string, number or bool) + leaf_list_model = self._get_model(model.get('leaf-list'), token) + if leaf_list_model: + # if whole-list is to be returned, just return the token without checking the list items + # Example: + # path: /VLAN/Vlan1000/dhcp_servers + # xpath: /sonic-vlan:sonic-vlan/VLAN/VLAN_LIST[name='Vlan1000']/dhcp_servers + if len(path_tokens)-1 == token_index: + return [token] + list_config = config[token] + value = list_config[int(path_tokens[token_index+1])] + # To get a leaf-list instance with the value 'val' + # /module-name:container/leaf-list[.='val'] + # Source: Check examples in https://netopeer.liberouter.org/doc/libyang/master/html/howto_x_path.html + return [f"{token}[.='{value}']"] + + raise ValueError("Token not found") + + def _extractKey(self, tableKey, keys): + keyList = keys.split() + # get the value groups + value = tableKey.split("|") + # match lens + if len(keyList) != len(value): + raise ValueError("Value not found for {} in {}".format(keys, tableKey)) + # create the keyDict + keyDict = dict() + for i in range(len(keyList)): + keyDict[keyList[i]] = value[i].strip() + + return keyDict + + def _get_list_model(self, model, token_index, path_tokens): + parent_container_name = path_tokens[token_index] + clist = model.get('list') + # Container contains a single list, just return it + # TODO: check if matching also by name is necessary + if isinstance(clist, dict): + return clist + + if isinstance(clist, list): + configdb_values_str = path_tokens[token_index+1] + # Format: "value1|value2|value|..." + configdb_values = configdb_values_str.split("|") + for list_model in clist: + yang_keys_str = list_model['key']['@value'] + # Format: "key1 key2 key3 ..." + yang_keys = yang_keys_str.split() + # if same number of values and keys, this is the intended list-model + # TODO: Match also on types and not only the length of the keys/values + if len(yang_keys) == len(configdb_values): + return list_model + raise GenericConfigUpdaterError(f"Container {parent_container_name} has multiple lists, " + f"but none of them match the config_db value {configdb_values_str}") + + return None + + def convert_xpath_to_path(self, xpath, config, sy): + """ + Converts the given XPATH to JsonPatch path (i.e. JsonPointer). + Example: + xpath: /sonic-vlan:sonic-vlan/VLAN_MEMBER/VLAN_MEMBER_LIST[name='Vlan1000'][port='Ethernet8']/tagging_mode + path: /VLAN_MEMBER/Vlan1000|Ethernet8/tagging_mode + """ + tokens = self.get_xpath_tokens(xpath) + if len(tokens) == 0: + return self.create_path([]) + + if len(tokens) == 1: + raise GenericConfigUpdaterError("xpath cannot be just the module-name, there is no mapping to path") + + table = tokens[1] + cmap = sy.confDbYangMap[table] + + path_tokens = self._get_path_tokens_from_container(cmap['container'], 1, tokens, config) + return self.create_path(path_tokens) + + def _get_path_tokens_from_container(self, model, token_index, xpath_tokens, config): + token = xpath_tokens[token_index] + path_tokens = [token] + + if len(xpath_tokens)-1 == token_index: + return path_tokens + + # check child list + list_name = xpath_tokens[token_index+1].split("[")[0] + list_model = self._get_model(model.get('list'), list_name) + if list_model: + new_path_tokens = self._get_path_tokens_from_list(list_model, token_index+1, xpath_tokens, config[token]) + path_tokens.extend(new_path_tokens) + return path_tokens + + container_name = xpath_tokens[token_index+1] + container_model = self._get_model(model.get('container'), container_name) + if container_model: + new_path_tokens = self._get_path_tokens_from_container(container_model, token_index+1, xpath_tokens, config[token]) + path_tokens.extend(new_path_tokens) + return path_tokens + + new_path_tokens = self._get_path_tokens_from_leaf(model, token_index+1, xpath_tokens, config[token]) + path_tokens.extend(new_path_tokens) + + return path_tokens + + def _get_path_tokens_from_list(self, model, token_index, xpath_tokens, config): + token = xpath_tokens[token_index] + key_dict = self._extract_key_dict(token) + + # If no keys specified return empty tokens, as we are already inside the correct table. + # Also note that the list name in SonicYang has no correspondence in ConfigDb and is ignored. + # Example where VLAN_MEMBER_LIST has no specific key/value: + # xpath: /sonic-vlan:sonic-vlan/VLAN_MEMBER/VLAN_MEMBER_LIST + # path: /VLAN_MEMBER + if not(key_dict): + return [] + + listKeys = model['key']['@value'] + key_list = listKeys.split() + + if len(key_list) != len(key_dict): + raise GenericConfigUpdaterError(f"Keys in configDb not matching keys in SonicYang. ConfigDb keys: {key_dict.keys()}. SonicYang keys: {key_list}") + + values = [key_dict[k] for k in key_list] + path_token = '|'.join(values) + path_tokens = [path_token] + + if len(xpath_tokens)-1 == token_index: + return path_tokens + + next_token = xpath_tokens[token_index+1] + # if the target node is a key, then it does not have a correspondene to path. + # Just return the current 'key1|key2|..' token as it already refers to the keys + # Example where the target node is 'name' which is a key in VLAN_MEMBER_LIST: + # xpath: /sonic-vlan:sonic-vlan/VLAN_MEMBER/VLAN_MEMBER_LIST[name='Vlan1000'][port='Ethernet8']/name + # path: /VLAN_MEMBER/Vlan1000|Ethernet8 + if next_token in key_dict: + return path_tokens + + new_path_tokens = self._get_path_tokens_from_leaf(model, token_index+1, xpath_tokens, config[path_token]) + path_tokens.extend(new_path_tokens) + return path_tokens + + def _get_path_tokens_from_leaf(self, model, token_index, xpath_tokens, config): + token = xpath_tokens[token_index] + + # checking all leaves + leaf_model = self._get_model(model.get('leaf'), token) + if leaf_model: + return [token] + + # checking choices + choices = model.get('choice') + if choices: + for choice in choices: + cases = choice['case'] + for case in cases: + leaf_model = self._get_model(case.get('leaf'), token) + if leaf_model: + return [token] + + # checking leaf-list + leaf_list_tokens = token.split("[", 1) # split once on the first '[', a regex is used later to fetch keys/values + leaf_list_name = leaf_list_tokens[0] + leaf_list_model = self._get_model(model.get('leaf-list'), leaf_list_name) + if leaf_list_model: + # if whole-list is to be returned, just return the list-name without checking the list items + # Example: + # xpath: /sonic-vlan:sonic-vlan/VLAN/VLAN_LIST[name='Vlan1000']/dhcp_servers + # path: /VLAN/Vlan1000/dhcp_servers + if len(leaf_list_tokens) == 1: + return [leaf_list_name] + leaf_list_pattern = "^[^\[]+(?:\[\.='([^']*)'\])?$" + leaf_list_regex = re.compile(leaf_list_pattern) + match = leaf_list_regex.match(token) + # leaf_list_name = match.group(1) + leaf_list_value = match.group(1) + list_config = config[leaf_list_name] + list_idx = list_config.index(leaf_list_value) + return [leaf_list_name, list_idx] + + raise Exception("no leaf") + + def _extract_key_dict(self, list_token): + # Example: VLAN_MEMBER_LIST[name='Vlan1000'][port='Ethernet8'] + # the groups would be ('VLAN_MEMBER'), ("[name='Vlan1000'][port='Ethernet8']") + table_keys_pattern = "^([^\[]+)(.*)$" + text = list_token + table_keys_regex = re.compile(table_keys_pattern) + match = table_keys_regex.match(text) + # list_name = match.group(1) + all_key_value = match.group(2) + + # Example: [name='Vlan1000'][port='Ethernet8'] + # the findall groups would be ('name', 'Vlan1000'), ('port', 'Ethernet8') + key_value_pattern = "\[([^=]+)='([^']*)'\]" + matches = re.findall(key_value_pattern, all_key_value) + key_dict = {} + for item in matches: + key = item[0] + value = item[1] + key_dict[key] = value + + return key_dict + + def _get_model(self, model, name): + if isinstance(model, dict) and model['@name'] == name: + return model + if isinstance(model, list): + for submodel in model: + if submodel['@name'] == name: + return submodel + + return None diff --git a/generic_config_updater/patch_sorter.py b/generic_config_updater/patch_sorter.py new file mode 100644 index 0000000000..8bf99ba004 --- /dev/null +++ b/generic_config_updater/patch_sorter.py @@ -0,0 +1,1010 @@ +import copy +import json +import jsonpatch +from collections import deque +from enum import Enum +from .gu_common import OperationWrapper, OperationType, GenericConfigUpdaterError, JsonChange, PathAddressing + +class Diff: + """ + A class that contains the diff info between current and target configs. + """ + def __init__(self, current_config, target_config): + self.current_config = current_config + self.target_config = target_config + + def __hash__(self): + cc = json.dumps(self.current_config, sort_keys=True) + tc = json.dumps(self.target_config, sort_keys=True) + return hash((cc,tc)) + + def __eq__(self, other): + """Overrides the default implementation""" + if isinstance(other, Diff): + return self.current_config == other.current_config and self.target_config == other.target_config + + return False + + # TODO: Can be optimized to apply the move in place. JsonPatch supports that using the option 'in_place=True' + # Check: https://python-json-patch.readthedocs.io/en/latest/tutorial.html#applying-a-patch + # NOTE: in case move is applied in place, we will need to support `undo_move` as well. + def apply_move(self, move): + new_current_config = move.apply(self.current_config) + return Diff(new_current_config, self.target_config) + + def has_no_diff(self): + return self.current_config == self.target_config + +class JsonMove: + """ + A class similar to JsonPatch operation, but it allows the path to refer to non-existing middle elements. + + JsonPatch operation fails to update json if the path in the patch refers to element that do not exist. + For example, assume json to be: + {} + The following path will be rejected: + /elem1/key1 + The reason is 'elem1' does not exist in the json + + JsonMove on the other hand allows that given the target_config_tokens i.e. the target_config path, + and current_config_tokens i.e. current_config path where the update needs to happen. + """ + def __init__(self, diff, op_type, current_config_tokens, target_config_tokens=None): + operation = JsonMove._to_jsonpatch_operation(diff, op_type, current_config_tokens, target_config_tokens) + self.patch = jsonpatch.JsonPatch([operation]) + self.op_type = operation[OperationWrapper.OP_KEYWORD] + self.path = operation[OperationWrapper.PATH_KEYWORD] + self.value = operation.get(OperationWrapper.VALUE_KEYWORD, None) + + self.op_type = op_type + self.current_config_tokens = current_config_tokens + self.target_config_tokens = target_config_tokens + + @staticmethod + def _to_jsonpatch_operation(diff, op_type, current_config_tokens, target_config_tokens): + operation_wrapper = OperationWrapper() + path_addressing = PathAddressing() + + if op_type == OperationType.REMOVE: + path = path_addressing.create_path(current_config_tokens) + return operation_wrapper.create(op_type, path) + + if op_type == OperationType.REPLACE: + path = path_addressing.create_path(current_config_tokens) + value = JsonMove._get_value(diff.target_config, target_config_tokens) + return operation_wrapper.create(op_type, path, value) + + if op_type == OperationType.ADD: + return JsonMove._to_jsonpatch_add_operation(diff, current_config_tokens, target_config_tokens) + + raise ValueError(f"OperationType {op_type} is not supported") + + @staticmethod + def _get_value(config, tokens): + for token in tokens: + config = config[token] + + return copy.deepcopy(config) + + @staticmethod + def _to_jsonpatch_add_operation(diff, current_config_tokens, target_config_tokens): + """ + Check description of JsonMove class first. + + ADD operation path can refer to elements that do not exist, so to convert JsonMove to JsonPatch operation + We need to remove the non-existing tokens from the current_config path and move them to the value. + + Example: + Assume Target Config: + { + "dict1":{ + "key11": "value11" + } + } + Assume Current Config: + { + } + Assume JsonMove: + op_type=add, current_config_tokens=[dict1, key11], target_config_tokens=[dict1, key11] + + Converting this to operation directly would result in: + {"op":"add", "path":"/dict1/key11", "value":"value11"} + BUT this is not correct since 'dict1' which does not exist in Current Config. + Instead we convert to: + {"op":"add", "path":"/dict1", "value":{"key11": "value11"}} + """ + operation_wrapper = OperationWrapper() + path_addressing = PathAddressing() + + # if path refers to whole config i.e. no tokens, then just create the operation + if not current_config_tokens: + path = path_addressing.create_path(current_config_tokens) + value = JsonMove._get_value(diff.target_config, target_config_tokens) + return operation_wrapper.create(OperationType.ADD, path, value) + + # Start with getting target-config that match the path all the way to the value in json format + # Example: + # Assume target-config: + # { + # "dict1":{ + # "key11": "value11", + # "list12": [ + # "value121", + # "value122" + # ] + # }, + # "dict2":{ + # "key21": "value21" + # } + # } + # Assume target config tokens: + # dict1, list12, 1 + # filtered_config will be + # { + # "dict1":{ + # "list12": [ + # "value122" + # ] + # } + # } + target_ptr = diff.target_config + filtered_config = {} + filtered_config_ptr = filtered_config + for token_index in range(len(target_config_tokens)): + token = target_config_tokens[token_index] + + # Tokens are expected to be of the correct data-type i.e. string, int (list-index) + # So not checking the type of the token before consuming it + target_ptr = target_ptr[token] + + # if it is the last item, then just return the last target_ptr + if token_index == len(target_config_tokens)-1: + filtered_value = target_ptr + elif isinstance(target_ptr, list): + filtered_value = [] + else: + filtered_value = {} + + if isinstance(filtered_config_ptr, list): + filtered_config_ptr.append(filtered_value) # filtered_config list will contain only 1 value + else: # otherwise it is a dict + filtered_config_ptr[token] = filtered_value + + filtered_config_ptr = filtered_value + + # Then from the filtered_config get the all the tokens that exist in current_config + # This will be the new path, and the new value will be the corresponding filtered_config + # Example: + # Assume filtered_config + # { + # "dict1":{ + # "key11": "value11" + # } + # } + # Assume current-config + # { + # "dict1":{ + # "list12": [ + # "value122" + # ] + # } + # } + # Then the JsonPatch path would be: + # /dict1/list12 + # And JsonPatch value would be: + # [ "value122" ] + current_ptr = diff.current_config + new_tokens = [] + for token in current_config_tokens: + new_tokens.append(token) + was_list = isinstance(filtered_config, list) + if was_list: + # filtered_config list can only have 1 item + filtered_config = filtered_config[0] + else: + filtered_config = filtered_config[token] + + if was_list and token >= len(current_ptr): + break + if not(was_list) and token not in current_ptr: + break + current_ptr = current_ptr[token] + + op_type = OperationType.ADD + new_path = path_addressing.create_path(new_tokens) + new_value = copy.deepcopy(filtered_config) + + return operation_wrapper.create(op_type, new_path, new_value) + + @staticmethod + def from_patch(patch): + ops = list(patch) + if len(ops) != 1: + raise GenericConfigUpdaterError( + f"Only a patch of a single operation be converted to JsonMove. Patch has {len(ops)} operation/s") + + return JsonMove.from_operation(ops[0]) + + @staticmethod + def from_operation(operation): + path_addressing = PathAddressing() + op_type = OperationType[operation[OperationWrapper.OP_KEYWORD].upper()] + path = operation[OperationWrapper.PATH_KEYWORD] + if op_type in [OperationType.ADD, OperationType.REPLACE]: + value = operation[OperationWrapper.VALUE_KEYWORD] + else: + value = None + + tokens = path_addressing.get_path_tokens(path) + + target_config = {} + target_config_ptr = target_config + current_config = {} + current_config_ptr = current_config + for token in tokens[:-1]: + target_config_ptr[token] = {} + current_config_ptr[token] = {} + target_config_ptr = target_config_ptr[token] + current_config_ptr = current_config_ptr[token] + + if tokens: + target_config_ptr[tokens[-1]] = value + else: + # whole-config, just use value + target_config = value + + current_config_tokens = tokens + if op_type in [OperationType.ADD, OperationType.REPLACE]: + target_config_tokens = tokens + else: + target_config_tokens = None + + diff = Diff(current_config, target_config) + + return JsonMove(diff, op_type, current_config_tokens, target_config_tokens) + + def apply(self, config): + return self.patch.apply(config) + + def __str__(self): + return str(self.patch) + + def __repr__(self): + return str(self.patch) + + def __eq__(self, other): + """Overrides the default implementation""" + if isinstance(other, JsonMove): + return self.patch == other.patch + return False + + def __hash__(self): + return hash((self.op_type, self.path, json.dumps(self.value))) + +class MoveWrapper: + def __init__(self, move_generators, move_extenders, move_validators): + self.move_generators = move_generators + self.move_extenders = move_extenders + self.move_validators = move_validators + + def generate(self, diff): + processed_moves = set() + moves = deque([]) + + for move in self._generate_moves(diff): + if move in processed_moves: + continue + processed_moves.add(move) + yield move + moves.extend(self._extend_moves(move, diff)) + + while moves: + move = moves.popleft() + if move in processed_moves: + continue + processed_moves.add(move) + yield move + moves.extend(self._extend_moves(move, diff)) + + def validate(self, move, diff): + for validator in self.move_validators: + if not validator.validate(move, diff): + return False + return True + + def simulate(self, move, diff): + return diff.apply_move(move) + + def _generate_moves(self, diff): + for generator in self.move_generators: + for move in generator.generate(diff): + yield move + + def _extend_moves(self, move, diff): + for extender in self.move_extenders: + for newmove in extender.extend(move, diff): + yield newmove + +class DeleteWholeConfigMoveValidator: + """ + A class to validate not deleting whole config as it is not supported by JsonPatch lib. + """ + def validate(self, move, diff): + if move.op_type == OperationType.REMOVE and move.path == "": + return False + return True + +class FullConfigMoveValidator: + """ + A class to validate that full config is valid according to YANG models after applying the move. + """ + def __init__(self, config_wrapper): + self.config_wrapper = config_wrapper + + def validate(self, move, diff): + simulated_config = move.apply(diff.current_config) + return self.config_wrapper.validate_config_db_config(simulated_config) + +# TODO: Add this validation to YANG models instead +class UniqueLanesMoveValidator: + """ + A class to validate lanes and any port are unique between all ports. + """ + def validate(self, move, diff): + simulated_config = move.apply(diff.current_config) + + if "PORT" not in simulated_config: + return True + + ports = simulated_config["PORT"] + existing = set() + for port in ports: + attrs = ports[port] + if "lanes" in attrs: + lanes_str = attrs["lanes"] + lanes = lanes_str.split(", ") + for lane in lanes: + if lane in existing: + return False + existing.add(lane) + return True + +class CreateOnlyMoveValidator: + """ + A class to validate create-only fields are only added/removed but never replaced. + Parents of create-only fields are also only added/removed but never replaced when they contain + a modified create-only field. + """ + def __init__(self, path_addressing): + self.path_addressing = path_addressing + + def validate(self, move, diff): + if move.op_type != OperationType.REPLACE: + return True + + # The 'create-only' field needs to be common between current and simulated anyway but different. + # This means it is enough to just get the paths from current_config, paths that are not common can be ignored. + paths = self._get_create_only_paths(diff.current_config) + simulated_config = move.apply(diff.current_config) + + for path in paths: + tokens = self.path_addressing.get_path_tokens(path) + if self._value_exist_but_different(tokens, diff.current_config, simulated_config): + return False + + return True + + # TODO: create-only fields are hard-coded for now, it should be moved to YANG models + def _get_create_only_paths(self, config): + if "PORT" not in config: + return + + ports = config["PORT"] + + for port in ports: + attrs = ports[port] + if "lanes" in attrs: + yield f"/PORT/{port}/lanes" + + def _value_exist_but_different(self, tokens, current_config_ptr, simulated_config_ptr): + for token in tokens: + mod_token = int(token) if isinstance(current_config_ptr, list) else token + + if mod_token not in current_config_ptr: + return False + + if mod_token not in simulated_config_ptr: + return False + + current_config_ptr = current_config_ptr[mod_token] + simulated_config_ptr = simulated_config_ptr[mod_token] + + return current_config_ptr != simulated_config_ptr + +class NoDependencyMoveValidator: + """ + A class to validate that the modified configs do not have dependency on each other. This should prevent + moves that update whole config in a single step where multiple changed nodes are dependent on each. This + way dependent configs are never updated together. + """ + def __init__(self, path_addressing, config_wrapper): + self.path_addressing = path_addressing + self.config_wrapper = config_wrapper + + def validate(self, move, diff): + operation_type = move.op_type + path = move.path + + if operation_type == OperationType.ADD: + simulated_config = move.apply(diff.current_config) + # For add operation, we check the simulated config has no dependencies between nodes under the added path + if not self._validate_paths_config([path], simulated_config): + return False + elif operation_type == OperationType.REMOVE: + # For remove operation, we check the current config has no dependencies between nodes under the removed path + if not self._validate_paths_config([path], diff.current_config): + return False + elif operation_type == OperationType.REPLACE: + if not self._validate_replace(move, diff): + return False + + return True + + # NOTE: this function can be used for validating JsonChange as well which might have more than one move. + def _validate_replace(self, move, diff): + """ + The table below shows how mixed deletion/addition within replace affect this validation. + + The table is answring the question whether the change is valid: + Y = Yes + N = No + n/a = not applicable as the change itself is not valid + + symbols meaning; + +A, -A: adding, removing config A + +refA, -refA: adding, removing a reference to A config + + + +refA|-refA|refA + --|-----|-----|---- + +A| N | n/a | n/a + -A| n/a | N | n/a + A| Y | Y | Y + + The conclusion is that: + +A, +refA is invalid because there is a dependency and a single move should not have dependency + -A, -refA is invalid because there is a dependency and a single move should not have dependency + A kept unchanged can be ignored, as it is always OK regardless of what happens to its reference + Other states are all non applicable since they are invalid to begin with + + So verification would be: + if A is deleted and refA is deleted: return False + if A is added and refA is added: return False + return True + """ + simulated_config = move.apply(diff.current_config) + deleted_paths, added_paths = self._get_paths(diff.current_config, simulated_config, []) + + if not self._validate_paths_config(deleted_paths, diff.current_config): + return False + + if not self._validate_paths_config(added_paths, diff.target_config): + return False + + return True + + def _get_paths(self, current_ptr, target_ptr, tokens): + deleted_paths = [] + added_paths = [] + + if isinstance(current_ptr, list) or isinstance(target_ptr, list): + tmp_deleted_paths, tmp_added_paths = self._get_list_paths(current_ptr, target_ptr, tokens) + deleted_paths.extend(tmp_deleted_paths) + added_paths.extend(tmp_added_paths) + return deleted_paths, added_paths + + if isinstance(current_ptr, dict): + for token in current_ptr: + tokens.append(token) + if token not in target_ptr: + deleted_paths.append(self.path_addressing.create_path(tokens)) + else: + tmp_deleted_paths, tmp_added_paths = self._get_paths(current_ptr[token], target_ptr[token], tokens) + deleted_paths.extend(tmp_deleted_paths) + added_paths.extend(tmp_added_paths) + tokens.pop() + + for token in target_ptr: + tokens.append(token) + if token not in current_ptr: + added_paths.append(self.path_addressing.create_path(tokens)) + tokens.pop() + + return deleted_paths, added_paths + + # current/target configs are not dict nor list, so handle them as string, int, bool, float + if current_ptr != target_ptr: + # tokens.append(token) + deleted_paths.append(self.path_addressing.create_path(tokens)) + added_paths.append(self.path_addressing.create_path(tokens)) + # tokens.pop() + + return deleted_paths, added_paths + + def _get_list_paths(self, current_list, target_list, tokens): + """ + Gets all paths within the given list, assume list items are unique + """ + deleted_paths = [] + added_paths = [] + + hashed_target = set(target_list) + for index, value in enumerate(current_list): + if value not in hashed_target: + tokens.append(index) + deleted_paths.append(self.path_addressing.create_path(tokens)) + tokens.pop() + + hashed_current = set(current_list) + for index, value in enumerate(target_list): + if value not in hashed_current: + tokens.append(index) + # added_paths refer to paths in the target config and not necessarily the current config + added_paths.append(self.path_addressing.create_path(tokens)) + tokens.pop() + + return deleted_paths, added_paths + + def _validate_paths_config(self, paths, config): + """ + validates all config under paths do not have config and its references + """ + refs = self._find_ref_paths(paths, config) + for ref in refs: + for path in paths: + if ref.startswith(path): + return False + + return True + + def _find_ref_paths(self, paths, config): + refs = [] + for path in paths: + refs.extend(self.path_addressing.find_ref_paths(path, config)) + return refs + +class LowLevelMoveGenerator: + """ + A class to generate the low level moves i.e. moves corresponding to differences between current/target config + where the path of the move does not have children. + """ + def __init__(self, path_addressing): + self.path_addressing = path_addressing + def generate(self, diff): + single_run_generator = SingleRunLowLevelMoveGenerator(diff, self.path_addressing) + for move in single_run_generator.generate(): + yield move + +class SingleRunLowLevelMoveGenerator: + """ + A class that can only run once to assist LowLevelMoveGenerator with generating the moves. + """ + def __init__(self, diff, path_addressing): + self.diff = diff + self.path_addressing = path_addressing + + def generate(self): + current_ptr = self.diff.current_config + target_ptr = self.diff.target_config + current_tokens = [] + target_tokens = [] + + for move in self._traverse(current_ptr, target_ptr, current_tokens, target_tokens): + yield move + + def _traverse(self, current_ptr, target_ptr, current_tokens, target_tokens): + """ + Traverses the current/target config trees. + The given ptrs can be: + dict + list of string, number, boolean, int + string, number, boolean, int + + list of dict is not allowed + """ + if isinstance(current_ptr, list) or isinstance(target_ptr, list): + for move in self._traverse_list(current_ptr, target_ptr, current_tokens, target_tokens): + yield move + return + + if isinstance(current_ptr, dict) or isinstance(target_ptr, dict): + for key in current_ptr: + current_tokens.append(key) + if key in target_ptr: + target_tokens.append(key) + for move in self._traverse(current_ptr[key], target_ptr[key], current_tokens, target_tokens): + yield move + target_tokens.pop() + else: + for move in self._traverse_current(current_ptr[key], current_tokens): + yield move + + current_tokens.pop() + + for key in target_ptr: + if key in current_ptr: + continue # Already tried in the previous loop + + target_tokens.append(key) + current_tokens.append(key) + for move in self._traverse_target(target_ptr[key], current_tokens, target_tokens): + yield move + current_tokens.pop() + target_tokens.pop() + + return + + # The current/target ptr are neither dict nor list, so they might be string, int, float, bool + for move in self._traverse_value(current_ptr, target_ptr, current_tokens, target_tokens): + yield move + + def _traverse_list(self, current_ptr, target_ptr, current_tokens, target_tokens): + # if same elements different order, just sort by replacing whole list + # Example: + # current: [1, 2, 3, 4] + # target: [4, 3, 2, 1] + # returned move: REPLACE, current, target + current_dict_cnts = self._list_to_dict_with_count(current_ptr) + target_dict_cnts = self._list_to_dict_with_count(target_ptr) + if current_dict_cnts == target_dict_cnts: + for move in self._traverse_value(current_ptr, target_ptr, current_tokens, target_tokens): + yield move + return + + # Otherwise try add missing and remove additional elements + # Try remove + if current_ptr is not None: + for current_index, current_item in enumerate(current_ptr): + if current_dict_cnts[current_item] > target_dict_cnts.get(current_item, 0): + current_tokens.append(current_index) + for move in self._traverse_current_value(current_item, current_tokens): + yield move + current_tokens.pop() + # Try add + if target_ptr is not None: + current_cnt = len(current_ptr) if current_ptr is not None else 0 + for target_index, target_item in enumerate(target_ptr): + if target_dict_cnts[target_item] > current_dict_cnts.get(target_item, 0): + index = min(current_cnt, target_index) + current_tokens.append(index) + target_tokens.append(target_index) + for move in self._traverse_target_value(target_item, current_tokens, target_tokens): + yield move + target_tokens.pop() + current_tokens.pop() + + # Try replace + if current_ptr is not None and target_ptr is not None: + for current_index, current_item in enumerate(current_ptr): + for target_index, target_item in enumerate(target_ptr): + if current_dict_cnts[current_item] > target_dict_cnts.get(current_item, 0) and \ + target_dict_cnts[target_item] > current_dict_cnts.get(target_item, 0): + current_tokens.append(current_index) + target_tokens.append(target_index) + for move in self._traverse_value(current_item, target_item, current_tokens, target_tokens): + yield move + target_tokens.pop() + current_tokens.pop() + + def _traverse_value(self, current_value, target_value, current_tokens, target_tokens): + if current_value == target_value: + return + + yield JsonMove(self.diff, OperationType.REPLACE, current_tokens, target_tokens) + + def _traverse_current(self, ptr, current_tokens): + if isinstance(ptr, list): + for move in self._traverse_current_list(ptr, current_tokens): + yield move + return + + if isinstance(ptr, dict): + if len(ptr) == 0: + yield JsonMove(self.diff, OperationType.REMOVE, current_tokens) + return + + for key in ptr: + current_tokens.append(key) + for move in self._traverse_current(ptr[key], current_tokens): + yield move + current_tokens.pop() + + return + + # ptr is not a dict nor a list, it can be string, int, float, bool + for move in self._traverse_current_value(ptr, current_tokens): + yield move + + def _traverse_current_list(self, ptr, current_tokens): + if len(ptr) == 0: + yield JsonMove(self.diff, OperationType.REMOVE, current_tokens) + return + + for index, val in enumerate(ptr): + current_tokens.append(index) + for move in self._traverse_current_value(val, current_tokens): + yield move + current_tokens.pop() + + def _traverse_current_value(self, val, current_tokens): + yield JsonMove(self.diff, OperationType.REMOVE, current_tokens) + + def _traverse_target(self, ptr, current_tokens, target_tokens): + if isinstance(ptr, list): + for move in self._traverse_target_list(ptr, current_tokens, target_tokens): + yield move + return + + if isinstance(ptr, dict): + if len(ptr) == 0: + yield JsonMove(self.diff, OperationType.ADD, current_tokens, target_tokens) + return + + for key in ptr: + current_tokens.append(key) + target_tokens.append(key) + for move in self._traverse_target(ptr[key], current_tokens, target_tokens): + yield move + target_tokens.pop() + current_tokens.pop() + + return + + # target configs are not dict nor list, so handle them as string, int, bool, float + for move in self._traverse_target_value(ptr, current_tokens, target_tokens): + yield move + + def _traverse_target_list(self, ptr, current_tokens, target_tokens): + if len(ptr) == 0: + yield JsonMove(self.diff, OperationType.ADD, current_tokens, target_tokens) + return + + for index, val in enumerate(ptr): + # _traverse_target_list is called when the whole list is missing + # in such case any item should be added at first location i.e. 0 + current_tokens.append(0) + target_tokens.append(index) + for move in self._traverse_target_value(val, current_tokens, target_tokens): + yield move + target_tokens.pop() + current_tokens.pop() + + def _traverse_target_value(self, val, current_tokens, target_tokens): + yield JsonMove(self.diff, OperationType.ADD, current_tokens, target_tokens) + + def _list_to_dict_with_count(self, items): + counts = dict() + + if items is None: + return counts + + for item in items: + counts[item] = counts.get(item, 0) + 1 + + return counts + +class UpperLevelMoveExtender: + """ + A class to extend the given move by including its parent. It has 3 cases: + 1) If parent was in current and target, then replace the parent + 2) If parent was in current but not target, then delete the parent + 3) If parent was in target but not current, then add the parent + """ + def extend(self, move, diff): + # if no tokens i.e. whole config + if not move.current_config_tokens: + return + + upper_current_tokens = move.current_config_tokens[:-1] + operation_type = self._get_upper_operation(upper_current_tokens, diff) + + upper_target_tokens = None + if operation_type in [OperationType.ADD, OperationType.REPLACE]: + upper_target_tokens = upper_current_tokens + + yield JsonMove(diff, operation_type, upper_current_tokens, upper_target_tokens) + + # get upper operation assumes ConfigDb to not have list-of-objects, only list-of-values + def _get_upper_operation(self, tokens, diff): + current_ptr = diff.current_config + target_ptr = diff.target_config + + for token in tokens: + if token not in current_ptr: + return OperationType.ADD + current_ptr = current_ptr[token] + if token not in target_ptr: + return OperationType.REMOVE + target_ptr = target_ptr[token] + + return OperationType.REPLACE + +class DeleteInsteadOfReplaceMoveExtender: + """ + A class to extend the given REPLACE move by adding a REMOVE move. + """ + def extend(self, move, diff): + operation_type = move.op_type + + if operation_type != OperationType.REPLACE: + return + + new_move = JsonMove(diff, OperationType.REMOVE, move.current_config_tokens) + + yield new_move + +class DeleteRefsMoveExtender: + """ + A class to extend the given DELETE move by adding DELETE moves to configs referring to the path in the move. + """ + def __init__(self, path_addressing): + self.path_addressing = path_addressing + + def extend(self, move, diff): + operation_type = move.op_type + + if operation_type != OperationType.REMOVE: + return + + for ref_path in self.path_addressing.find_ref_paths(move.path, diff.current_config): + yield JsonMove(diff, OperationType.REMOVE, self.path_addressing.get_path_tokens(ref_path)) + +class DfsSorter: + def __init__(self, move_wrapper): + self.visited = {} + self.move_wrapper = move_wrapper + + def sort(self, diff): + if diff.has_no_diff(): + return [] + + diff_hash = hash(diff) + if diff_hash in self.visited: + return None + self.visited[diff_hash] = True + + moves = self.move_wrapper.generate(diff) + + for move in moves: + if self.move_wrapper.validate(move, diff): + new_diff = self.move_wrapper.simulate(move, diff) + new_moves = self.sort(new_diff) + if new_moves is not None: + return [move] + new_moves + + return None + +class BfsSorter: + def __init__(self, move_wrapper): + self.visited = {} + self.move_wrapper = move_wrapper + + def sort(self, diff): + diff_queue = deque([]) + prv_moves_queue = deque([]) + + diff_queue.append(diff) + prv_moves_queue.append([]) + + while len(diff_queue): + diff = diff_queue.popleft() + prv_moves = prv_moves_queue.popleft() + + diff_hash = hash(diff) + if diff_hash in self.visited: + continue + self.visited[diff_hash] = True + + if diff.has_no_diff(): + return prv_moves + + moves = self.move_wrapper.generate(diff) + for move in moves: + if self.move_wrapper.validate(move, diff): + new_diff = self.move_wrapper.simulate(move, diff) + new_prv_moves = prv_moves + [move] + + diff_queue.append(new_diff) + prv_moves_queue.append(new_prv_moves) + + return None + +class MemoizationSorter: + def __init__(self, move_wrapper): + self.visited = {} + self.move_wrapper = move_wrapper + self.mem = {} + + def rec(self, diff): + if diff.has_no_diff(): + return [] + + diff_hash = hash(diff) + if diff_hash in self.mem: + return self.mem[diff_hash] + if diff_hash in self.visited: + return None + self.visited[diff_hash] = True + + moves = self.move_wrapper.generate(diff) + + bst_moves = None + for move in moves: + if self.move_wrapper.validate(move, diff): + new_diff = self.move_wrapper.simulate(move, diff) + new_moves = self.sort(new_diff) + if new_moves != None and (bst_moves is None or len(bst_moves) > len(new_moves)+1): + bst_moves = [move] + new_moves + + self.mem[diff_hash] = bst_moves + return bst_moves + +class Algorithm(Enum): + DFS = 1 + BFS = 2 + MEMOIZATION = 3 + +class SortAlgorithmFactory: + def __init__(self, operation_wrapper, config_wrapper, path_addressing): + self.operation_wrapper = operation_wrapper + self.config_wrapper = config_wrapper + self.path_addressing = path_addressing + + def create(self, algorithm=Algorithm.DFS): + move_generators = [LowLevelMoveGenerator(self.path_addressing)] + move_extenders = [UpperLevelMoveExtender(), + DeleteInsteadOfReplaceMoveExtender(), + DeleteRefsMoveExtender(self.path_addressing)] + move_validators = [DeleteWholeConfigMoveValidator(), + FullConfigMoveValidator(self.config_wrapper), + NoDependencyMoveValidator(self.path_addressing, self.config_wrapper), + UniqueLanesMoveValidator(), + CreateOnlyMoveValidator(self.path_addressing) ] + + move_wrapper = MoveWrapper(move_generators, move_extenders, move_validators) + + if algorithm == Algorithm.DFS: + sorter = DfsSorter(move_wrapper) + elif algorithm == Algorithm.BFS: + sorter = BfsSorter(move_wrapper) + elif algorithm == Algorithm.MEMOIZATION: + sorter = MemoizationSorter(move_wrapper) + else: + raise ValueError(f"Algorithm {algorithm} is not supported") + + return sorter + +class PatchSorter: + def __init__(self, config_wrapper, patch_wrapper, sort_algorithm_factory=None): + self.config_wrapper = config_wrapper + self.patch_wrapper = patch_wrapper + self.operation_wrapper = OperationWrapper() + self.path_addressing = PathAddressing() + self.sort_algorithm_factory = sort_algorithm_factory if sort_algorithm_factory else \ + SortAlgorithmFactory(self.operation_wrapper, config_wrapper, self.path_addressing) + + def sort(self, patch, algorithm=Algorithm.DFS): + current_config = self.config_wrapper.get_config_db_as_json() + target_config = self.patch_wrapper.simulate_patch(patch, current_config) + + diff = Diff(current_config, target_config) + + sort_algorithm = self.sort_algorithm_factory.create(algorithm) + moves = sort_algorithm.sort(diff) + + if moves is None: + raise GenericConfigUpdaterError("There is no possible sorting") + + changes = [JsonChange(move.patch) for move in moves] + + return changes diff --git a/setup.py b/setup.py index 6c8a349c69..24345c8528 100644 --- a/setup.py +++ b/setup.py @@ -174,6 +174,7 @@ 'jinja2>=2.11.3', 'jsondiff>=1.2.0', 'jsonpatch>=1.32.0', + 'jsonpointer>=1.9', 'm2crypto>=0.31.0', 'natsort>=6.2.1', # 6.2.1 is the last version which supports Python 2. Can update once we no longer support Python 2 'netaddr>=0.8.0', diff --git a/tests/generic_config_updater/files/any_config_db.json b/tests/generic_config_updater/files/any_config_db.json new file mode 100644 index 0000000000..2c63c08510 --- /dev/null +++ b/tests/generic_config_updater/files/any_config_db.json @@ -0,0 +1,2 @@ +{ +} diff --git a/tests/generic_config_updater/files/any_other_config_db.json b/tests/generic_config_updater/files/any_other_config_db.json new file mode 100644 index 0000000000..c258f768cf --- /dev/null +++ b/tests/generic_config_updater/files/any_other_config_db.json @@ -0,0 +1,4 @@ +{ + "VLAN": { + } +} diff --git a/tests/generic_config_updater/files/config_db_after_multi_patch.json b/tests/generic_config_updater/files/config_db_after_multi_patch.json index 042bf1d51b..39dff7d688 100644 --- a/tests/generic_config_updater/files/config_db_after_multi_patch.json +++ b/tests/generic_config_updater/files/config_db_after_multi_patch.json @@ -119,4 +119,4 @@ "key12": "value12" } } -} \ No newline at end of file +} diff --git a/tests/generic_config_updater/files/config_db_after_single_operation.json b/tests/generic_config_updater/files/config_db_after_single_operation.json new file mode 100644 index 0000000000..0f2f447537 --- /dev/null +++ b/tests/generic_config_updater/files/config_db_after_single_operation.json @@ -0,0 +1,83 @@ +{ + "VLAN_MEMBER": { + "Vlan1000|Ethernet0": { + "tagging_mode": "untagged" + }, + "Vlan1000|Ethernet4": { + "tagging_mode": "untagged" + } + }, + "VLAN": { + "Vlan1000": { + "vlanid": "1000", + "dhcp_servers": [ + "192.0.0.1", + "192.0.0.2", + "192.0.0.3", + "192.0.0.4" + ] + } + }, + "ACL_TABLE": { + "NO-NSW-PACL-V4": { + "type": "L3", + "policy_desc": "NO-NSW-PACL-V4", + "ports": [ + "Ethernet0" + ] + }, + "DATAACL": { + "policy_desc": "DATAACL", + "ports": [ + "Ethernet4" + ], + "stage": "ingress", + "type": "L3" + }, + "EVERFLOW": { + "policy_desc": "EVERFLOW", + "ports": [ + "Ethernet8" + ], + "stage": "ingress", + "type": "MIRROR" + }, + "EVERFLOWV6": { + "policy_desc": "EVERFLOWV6", + "ports": [ + "Ethernet4", + "Ethernet8" + ], + "stage": "ingress", + "type": "MIRRORV6" + } + }, + "PORT": { + "Ethernet0": { + "alias": "Eth1", + "lanes": "65, 66, 67, 68", + "description": "Ethernet0 100G link", + "speed": "100000" + }, + "Ethernet4": { + "admin_status": "up", + "alias": "fortyGigE0/4", + "description": "Servers0:eth0", + "index": "1", + "lanes": "29,30,31,32", + "mtu": "9100", + "pfc_asym": "off", + "speed": "40000" + }, + "Ethernet8": { + "admin_status": "up", + "alias": "fortyGigE0/8", + "description": "Servers1:eth0", + "index": "2", + "lanes": "33,34,35,36", + "mtu": "9100", + "pfc_asym": "off", + "speed": "40000" + } + } +} diff --git a/tests/generic_config_updater/files/config_db_choice.json b/tests/generic_config_updater/files/config_db_choice.json new file mode 100644 index 0000000000..eaece3248f --- /dev/null +++ b/tests/generic_config_updater/files/config_db_choice.json @@ -0,0 +1,17 @@ +{ + "ACL_RULE": { + "SSH_ONLY|RULE1": { + "L4_SRC_PORT":"65174-6530" + } + }, + "ACL_TABLE": { + "SSH_ONLY": { + "policy_desc": "SSH_ONLY", + "type": "CTRLPLANE", + "stage": "ingress", + "services": [ + "SSH" + ] + } + } +} diff --git a/tests/generic_config_updater/files/config_db_no_dependencies.json b/tests/generic_config_updater/files/config_db_no_dependencies.json new file mode 100644 index 0000000000..12bdd464a5 --- /dev/null +++ b/tests/generic_config_updater/files/config_db_no_dependencies.json @@ -0,0 +1,39 @@ +{ + "VLAN": { + "Vlan1000": { + "vlanid": "1000", + "dhcp_servers": [ + "192.0.0.1", + "192.0.0.2", + "192.0.0.3", + "192.0.0.4" + ] + } + }, + "ACL_TABLE": { + "EVERFLOW": { + "policy_desc": "EVERFLOW", + "ports": [ + "" + ], + "stage": "ingress", + "type": "MIRROR" + }, + "EVERFLOWV6": { + "policy_desc": "EVERFLOWV6", + "ports": [ + "" + ], + "stage": "ingress", + "type": "MIRRORV6" + } + }, + "PORT": { + "Ethernet0": { + "alias": "Eth1", + "lanes": "65, 66, 67, 68", + "description": "Ethernet0 100G link", + "speed": "100000" + } + } +} diff --git a/tests/generic_config_updater/files/config_db_with_crm.json b/tests/generic_config_updater/files/config_db_with_crm.json new file mode 100644 index 0000000000..5fd324d988 --- /dev/null +++ b/tests/generic_config_updater/files/config_db_with_crm.json @@ -0,0 +1,9 @@ +{ + "CRM": { + "Config": { + "acl_counter_high_threshold": "90", + "acl_counter_low_threshold": "70", + "acl_counter_threshold_type": "free" + } + } +} \ No newline at end of file diff --git a/tests/generic_config_updater/files/config_db_with_device_metadata.json b/tests/generic_config_updater/files/config_db_with_device_metadata.json new file mode 100644 index 0000000000..34def579f6 --- /dev/null +++ b/tests/generic_config_updater/files/config_db_with_device_metadata.json @@ -0,0 +1,16 @@ +{ + "DEVICE_METADATA": { + "localhost": { + "default_bgp_status": "up", + "default_pfcwd_status": "disable", + "bgp_asn": "65100", + "deployment_id": "1", + "docker_routing_config_mode": "separated", + "hostname": "vlab-01", + "hwsku": "Force10-S6000", + "type": "ToRRouter", + "platform": "x86_64-kvm_x86_64-r0", + "mac": "52:54:00:99:7e:85" + } + } +} \ No newline at end of file diff --git a/tests/generic_config_updater/files/config_db_with_interface.json b/tests/generic_config_updater/files/config_db_with_interface.json new file mode 100644 index 0000000000..2e1c488a4a --- /dev/null +++ b/tests/generic_config_updater/files/config_db_with_interface.json @@ -0,0 +1,20 @@ +{ + "INTERFACE": { + "Ethernet8": {}, + "Ethernet8|10.0.0.1/30": { + "family": "IPv4", + "scope": "global" + } + }, + "PORT": { + "Ethernet8": { + "admin_status": "up", + "alias": "eth8", + "description": "Ethernet8", + "fec": "rs", + "lanes": "65", + "mtu": "9000", + "speed": "25000" + } + } +} diff --git a/tests/generic_config_updater/files/config_db_with_portchannel_and_acl.json b/tests/generic_config_updater/files/config_db_with_portchannel_and_acl.json new file mode 100644 index 0000000000..23d33890f3 --- /dev/null +++ b/tests/generic_config_updater/files/config_db_with_portchannel_and_acl.json @@ -0,0 +1,25 @@ +{ + "PORT": { + "Ethernet0": { + "alias": "Eth1/1", + "lanes": "65", + "description": "", + "speed": "10000" + } + }, + "PORTCHANNEL": { + "PortChannel0001": { + "admin_status": "up" + } + }, + "ACL_TABLE": { + "NO-NSW-PACL-V4": { + "type": "L3", + "policy_desc": "NO-NSW-PACL-V4", + "ports": [ + "Ethernet0", + "PortChannel0001" + ] + } + } +} diff --git a/tests/generic_config_updater/files/config_db_with_portchannel_interface.json b/tests/generic_config_updater/files/config_db_with_portchannel_interface.json new file mode 100644 index 0000000000..4e05639dc5 --- /dev/null +++ b/tests/generic_config_updater/files/config_db_with_portchannel_interface.json @@ -0,0 +1,10 @@ +{ + "PORTCHANNEL": { + "PortChannel0001": { + "admin_status": "up" + } + }, + "PORTCHANNEL_INTERFACE": { + "PortChannel0001|1.1.1.1/24": {} + } +} diff --git a/tests/generic_config_updater/files/contrainer_with_container_config_db.json b/tests/generic_config_updater/files/contrainer_with_container_config_db.json new file mode 100644 index 0000000000..b0680b22b5 --- /dev/null +++ b/tests/generic_config_updater/files/contrainer_with_container_config_db.json @@ -0,0 +1,7 @@ +{ + "FLEX_COUNTER_TABLE": { + "BUFFER_POOL_WATERMARK": { + "FLEX_COUNTER_STATUS": "enable" + } + } +} diff --git a/tests/generic_config_updater/files/dpb_1_split_full_config.json b/tests/generic_config_updater/files/dpb_1_split_full_config.json new file mode 100644 index 0000000000..2097289606 --- /dev/null +++ b/tests/generic_config_updater/files/dpb_1_split_full_config.json @@ -0,0 +1,35 @@ +{ + "PORT": { + "Ethernet0": { + "alias": "Eth1", + "lanes": "65, 66, 67, 68", + "description": "Ethernet0 100G link", + "speed": "100000" + } + }, + "ACL_TABLE": { + "NO-NSW-PACL-V4": { + "type": "L3", + "policy_desc": "NO-NSW-PACL-V4", + "ports": [ + "Ethernet0" + ] + } + }, + "VLAN_MEMBER": { + "Vlan100|Ethernet0": { + "tagging_mode": "untagged" + } + }, + "VLAN": { + "Vlan100": { + "vlanid": "100", + "dhcp_servers": [ + "192.0.0.1", + "192.0.0.2", + "192.0.0.3", + "192.0.0.4" + ] + } + } +} diff --git a/tests/generic_config_updater/files/dpb_1_to_4.json-patch b/tests/generic_config_updater/files/dpb_1_to_4.json-patch new file mode 100644 index 0000000000..8eddd7a19d --- /dev/null +++ b/tests/generic_config_updater/files/dpb_1_to_4.json-patch @@ -0,0 +1,88 @@ +[ + { + "op": "add", + "path": "/PORT/Ethernet3", + "value": { + "alias": "Eth1/4", + "lanes": "68", + "description": "", + "speed": "10000" + } + }, + { + "op": "add", + "path": "/PORT/Ethernet1", + "value": { + "alias": "Eth1/2", + "lanes": "66", + "description": "", + "speed": "10000" + } + }, + { + "op": "add", + "path": "/PORT/Ethernet2", + "value": { + "alias": "Eth1/3", + "lanes": "67", + "description": "", + "speed": "10000" + } + }, + { + "op": "replace", + "path": "/PORT/Ethernet0/lanes", + "value": "65" + }, + { + "op": "replace", + "path": "/PORT/Ethernet0/alias", + "value": "Eth1/1" + }, + { + "op": "replace", + "path": "/PORT/Ethernet0/description", + "value": "" + }, + { + "op": "replace", + "path": "/PORT/Ethernet0/speed", + "value": "10000" + }, + { + "op": "add", + "path": "/VLAN_MEMBER/Vlan100|Ethernet2", + "value": { + "tagging_mode": "untagged" + } + }, + { + "op": "add", + "path": "/VLAN_MEMBER/Vlan100|Ethernet3", + "value": { + "tagging_mode": "untagged" + } + }, + { + "op": "add", + "path": "/VLAN_MEMBER/Vlan100|Ethernet1", + "value": { + "tagging_mode": "untagged" + } + }, + { + "op": "add", + "path": "/ACL_TABLE/NO-NSW-PACL-V4/ports/1", + "value": "Ethernet1" + }, + { + "op": "add", + "path": "/ACL_TABLE/NO-NSW-PACL-V4/ports/2", + "value": "Ethernet2" + }, + { + "op": "add", + "path": "/ACL_TABLE/NO-NSW-PACL-V4/ports/3", + "value": "Ethernet3" + } +] diff --git a/tests/generic_config_updater/files/dpb_4_splits_full_config.json b/tests/generic_config_updater/files/dpb_4_splits_full_config.json new file mode 100644 index 0000000000..23d1b9ecfc --- /dev/null +++ b/tests/generic_config_updater/files/dpb_4_splits_full_config.json @@ -0,0 +1,65 @@ +{ + "PORT": { + "Ethernet0": { + "alias": "Eth1/1", + "lanes": "65", + "description": "", + "speed": "10000" + }, + "Ethernet1": { + "alias": "Eth1/2", + "lanes": "66", + "description": "", + "speed": "10000" + }, + "Ethernet2": { + "alias": "Eth1/3", + "lanes": "67", + "description": "", + "speed": "10000" + }, + "Ethernet3": { + "alias": "Eth1/4", + "lanes": "68", + "description": "", + "speed": "10000" + } + }, + "ACL_TABLE": { + "NO-NSW-PACL-V4": { + "type": "L3", + "policy_desc": "NO-NSW-PACL-V4", + "ports": [ + "Ethernet0", + "Ethernet1", + "Ethernet2", + "Ethernet3" + ] + } + }, + "VLAN_MEMBER": { + "Vlan100|Ethernet0": { + "tagging_mode": "untagged" + }, + "Vlan100|Ethernet1": { + "tagging_mode": "untagged" + }, + "Vlan100|Ethernet2": { + "tagging_mode": "untagged" + }, + "Vlan100|Ethernet3": { + "tagging_mode": "untagged" + } + }, + "VLAN": { + "Vlan100": { + "vlanid": "100", + "dhcp_servers": [ + "192.0.0.1", + "192.0.0.2", + "192.0.0.3", + "192.0.0.4" + ] + } + } +} diff --git a/tests/generic_config_updater/files/dpb_4_to_1.json-patch b/tests/generic_config_updater/files/dpb_4_to_1.json-patch new file mode 100644 index 0000000000..33addd290d --- /dev/null +++ b/tests/generic_config_updater/files/dpb_4_to_1.json-patch @@ -0,0 +1,58 @@ +[ + { + "op": "remove", + "path": "/PORT/Ethernet2" + }, + { + "op": "remove", + "path": "/PORT/Ethernet1" + }, + { + "op": "remove", + "path": "/PORT/Ethernet3" + }, + { + "op": "replace", + "path": "/PORT/Ethernet0/alias", + "value": "Eth1" + }, + { + "op": "replace", + "path": "/PORT/Ethernet0/lanes", + "value": "65, 66, 67, 68" + }, + { + "op": "replace", + "path": "/PORT/Ethernet0/description", + "value": "Ethernet0 100G link" + }, + { + "op": "replace", + "path": "/PORT/Ethernet0/speed", + "value": "100000" + }, + { + "op": "remove", + "path": "/VLAN_MEMBER/Vlan100|Ethernet1" + }, + { + "op": "remove", + "path": "/VLAN_MEMBER/Vlan100|Ethernet3" + }, + { + "op": "remove", + "path": "/VLAN_MEMBER/Vlan100|Ethernet2" + }, + { + "op": "remove", + "path": "/ACL_TABLE/NO-NSW-PACL-V4/ports/1" + }, + { + "op": "remove", + "path": "/ACL_TABLE/NO-NSW-PACL-V4/ports/1" + }, + { + "op": "remove", + "path": "/ACL_TABLE/NO-NSW-PACL-V4/ports/1" + } +] diff --git a/tests/generic_config_updater/files/empty_config_db.json b/tests/generic_config_updater/files/empty_config_db.json new file mode 100644 index 0000000000..2c63c08510 --- /dev/null +++ b/tests/generic_config_updater/files/empty_config_db.json @@ -0,0 +1,2 @@ +{ +} diff --git a/tests/generic_config_updater/files/simple_config_db_inc_deps.json b/tests/generic_config_updater/files/simple_config_db_inc_deps.json new file mode 100644 index 0000000000..4554582103 --- /dev/null +++ b/tests/generic_config_updater/files/simple_config_db_inc_deps.json @@ -0,0 +1,20 @@ +{ + "ACL_TABLE": { + "EVERFLOW": { + "policy_desc": "EVERFLOW", + "ports": [ + "Ethernet0" + ], + "stage": "ingress", + "type": "MIRROR" + } + }, + "PORT": { + "Ethernet0": { + "alias": "Eth1", + "lanes": "65, 66, 67, 68", + "description": "Ethernet0 100G link", + "speed": "100000" + } + } +} diff --git a/tests/generic_config_updater/gu_common_test.py b/tests/generic_config_updater/gu_common_test.py index f18ad45799..f69ec08030 100644 --- a/tests/generic_config_updater/gu_common_test.py +++ b/tests/generic_config_updater/gu_common_test.py @@ -1,15 +1,12 @@ import json import jsonpatch +import sonic_yang import unittest from unittest.mock import MagicMock, Mock -from .gutest_helpers import create_side_effect_dict, Files +from .gutest_helpers import create_side_effect_dict, Files import generic_config_updater.gu_common as gu_common -# import sys -# sys.path.insert(0,'../../generic_config_updater') -# import gu_common - class TestConfigWrapper(unittest.TestCase): def setUp(self): self.config_wrapper_mock = gu_common.ConfigWrapper() @@ -333,3 +330,306 @@ def __assert_same_patch(self, config_db_patch, sonic_yang_patch, config_wrapper, config_wrapper.convert_sonic_yang_to_config_db(after_update_sonic_yang) self.assertTrue(patch_wrapper.verify_same_json(after_update_config_db_cropped, after_update_sonic_yang_as_config_db)) + +class TestPathAddressing(unittest.TestCase): + def setUp(self): + self.path_addressing = gu_common.PathAddressing() + self.sy_only_models = sonic_yang.SonicYang(gu_common.YANG_DIR) + self.sy_only_models.loadYangModel() + + def test_get_path_tokens(self): + def check(path, tokens): + expected=tokens + actual=self.path_addressing.get_path_tokens(path) + self.assertEqual(expected, actual) + + check("", []) + check("/", [""]) + check("/token", ["token"]) + check("/more/than/one/token", ["more", "than", "one", "token"]) + check("/has/numbers/0/and/symbols/^", ["has", "numbers", "0", "and", "symbols", "^"]) + check("/~0/this/is/telda", ["~", "this", "is", "telda"]) + check("/~1/this/is/forward-slash", ["/", "this", "is", "forward-slash"]) + check("/\\\\/no-escaping", ["\\\\", "no-escaping"]) + check("////empty/tokens/are/ok", ["", "", "", "empty", "tokens", "are", "ok"]) + + def test_create_path(self): + def check(tokens, path): + expected=path + actual=self.path_addressing.create_path(tokens) + self.assertEqual(expected, actual) + + check([], "",) + check([""], "/",) + check(["token"], "/token") + check(["more", "than", "one", "token"], "/more/than/one/token") + check(["has", "numbers", "0", "and", "symbols", "^"], "/has/numbers/0/and/symbols/^") + check(["~", "this", "is", "telda"], "/~0/this/is/telda") + check(["/", "this", "is", "forward-slash"], "/~1/this/is/forward-slash") + check(["\\\\", "no-escaping"], "/\\\\/no-escaping") + check(["", "", "", "empty", "tokens", "are", "ok"], "////empty/tokens/are/ok") + check(["~token", "telda-not-followed-by-0-or-1"], "/~0token/telda-not-followed-by-0-or-1") + + def test_get_xpath_tokens(self): + def check(path, tokens): + expected=tokens + actual=self.path_addressing.get_xpath_tokens(path) + self.assertEqual(expected, actual) + + self.assertRaises(ValueError, check, "", []) + check("/", []) + check("/token", ["token"]) + check("/more/than/one/token", ["more", "than", "one", "token"]) + check("/multi/tokens/with/empty/last/token/", ["multi", "tokens", "with", "empty", "last", "token", ""]) + check("/has/numbers/0/and/symbols/^", ["has", "numbers", "0", "and", "symbols", "^"]) + check("/has[a='predicate']/in/the/beginning", ["has[a='predicate']", "in", "the", "beginning"]) + check("/ha/s[a='predicate']/in/the/middle", ["ha", "s[a='predicate']", "in", "the", "middle"]) + check("/ha/s[a='predicate-in-the-end']", ["ha", "s[a='predicate-in-the-end']"]) + check("/it/has[more='than'][one='predicate']/somewhere", ["it", "has[more='than'][one='predicate']", "somewhere"]) + check("/ha/s[a='predicate\"with']/double-quotes/inside", ["ha", "s[a='predicate\"with']", "double-quotes", "inside"]) + check('/a/predicate[with="double"]/quotes', ["a", 'predicate[with="double"]', "quotes"]) + check('/multiple["predicate"][with="double"]/quotes', ['multiple["predicate"][with="double"]', "quotes"]) + check('/multiple["predicate"][with="double"]/quotes', ['multiple["predicate"][with="double"]', "quotes"]) + check('/ha/s[a="predicate\'with"]/single-quote/inside', ["ha", 's[a="predicate\'with"]', "single-quote", "inside"]) + # XPATH 1.0 does not support single-quote within single-quoted string. str literal can be '[^']*' + # Not validating no single-quote within single-quoted string + check("/a/mix['of''quotes\"does']/not/work/well", ["a", "mix['of''quotes\"does']", "not", "work", "well"]) + # XPATH 1.0 does not support double-quotes within double-quoted string. str literal can be "[^"]*" + # Not validating no double-quotes within double-quoted string + check('/a/mix["of""quotes\'does"]/not/work/well', ["a", 'mix["of""quotes\'does"]', "not", "work", "well"]) + + def test_create_xpath(self): + def check(tokens, xpath): + expected=xpath + actual=self.path_addressing.create_xpath(tokens) + self.assertEqual(expected, actual) + + check([], "/") + check(["token"], "/token") + check(["more", "than", "one", "token"], "/more/than/one/token") + check(["multi", "tokens", "with", "empty", "last", "token", ""], "/multi/tokens/with/empty/last/token/") + check(["has", "numbers", "0", "and", "symbols", "^"], "/has/numbers/0/and/symbols/^") + check(["has[a='predicate']", "in", "the", "beginning"], "/has[a='predicate']/in/the/beginning") + check(["ha", "s[a='predicate']", "in", "the", "middle"], "/ha/s[a='predicate']/in/the/middle") + check(["ha", "s[a='predicate-in-the-end']"], "/ha/s[a='predicate-in-the-end']") + check(["it", "has[more='than'][one='predicate']", "somewhere"], "/it/has[more='than'][one='predicate']/somewhere") + check(["ha", "s[a='predicate\"with']", "double-quotes", "inside"], "/ha/s[a='predicate\"with']/double-quotes/inside") + check(["a", 'predicate[with="double"]', "quotes"], '/a/predicate[with="double"]/quotes') + check(['multiple["predicate"][with="double"]', "quotes"], '/multiple["predicate"][with="double"]/quotes') + check(['multiple["predicate"][with="double"]', "quotes"], '/multiple["predicate"][with="double"]/quotes') + check(["ha", 's[a="predicate\'with"]', "single-quote", "inside"], '/ha/s[a="predicate\'with"]/single-quote/inside') + # XPATH 1.0 does not support single-quote within single-quoted string. str literal can be '[^']*' + # Not validating no single-quote within single-quoted string + check(["a", "mix['of''quotes\"does']", "not", "work", "well"], "/a/mix['of''quotes\"does']/not/work/well", ) + # XPATH 1.0 does not support double-quotes within double-quoted string. str literal can be "[^"]*" + # Not validating no double-quotes within double-quoted string + check(["a", 'mix["of""quotes\'does"]', "not", "work", "well"], '/a/mix["of""quotes\'does"]/not/work/well') + + def test_find_ref_paths__ref_is_the_whole_key__returns_ref_paths(self): + # Arrange + path = "/PORT/Ethernet0" + expected = [ + "/ACL_TABLE/NO-NSW-PACL-V4/ports/0", + "/VLAN_MEMBER/Vlan1000|Ethernet0", + ] + + # Act + actual = self.path_addressing.find_ref_paths(path, Files.CROPPED_CONFIG_DB_AS_JSON) + + # Assert + self.assertCountEqual(expected, actual) + + def test_find_ref_paths__ref_is_a_part_of_key__returns_ref_paths(self): + # Arrange + path = "/VLAN/Vlan1000" + expected = [ + "/VLAN_MEMBER/Vlan1000|Ethernet0", + "/VLAN_MEMBER/Vlan1000|Ethernet4", + "/VLAN_MEMBER/Vlan1000|Ethernet8", + ] + + # Act + actual = self.path_addressing.find_ref_paths(path, Files.CROPPED_CONFIG_DB_AS_JSON) + + # Assert + self.assertCountEqual(expected, actual) + + def test_find_ref_paths__ref_is_in_multilist__returns_ref_paths(self): + # Arrange + path = "/PORT/Ethernet8" + expected = [ + "/INTERFACE/Ethernet8", + "/INTERFACE/Ethernet8|10.0.0.1~130", + ] + + # Act + actual = self.path_addressing.find_ref_paths(path, Files.CONFIG_DB_WITH_INTERFACE) + + # Assert + self.assertCountEqual(expected, actual) + + def test_find_ref_paths__ref_is_in_leafref_union__returns_ref_paths(self): + # Arrange + path = "/PORTCHANNEL/PortChannel0001" + expected = [ + "/ACL_TABLE/NO-NSW-PACL-V4/ports/1", + ] + + # Act + actual = self.path_addressing.find_ref_paths(path, Files.CONFIG_DB_WITH_PORTCHANNEL_AND_ACL) + + # Assert + self.assertCountEqual(expected, actual) + + def test_find_ref_paths__path_is_table__returns_ref_paths(self): + # Arrange + path = "/PORT" + expected = [ + "/ACL_TABLE/DATAACL/ports/0", + "/ACL_TABLE/EVERFLOW/ports/0", + "/ACL_TABLE/EVERFLOWV6/ports/0", + "/ACL_TABLE/EVERFLOWV6/ports/1", + "/ACL_TABLE/NO-NSW-PACL-V4/ports/0", + "/VLAN_MEMBER/Vlan1000|Ethernet0", + "/VLAN_MEMBER/Vlan1000|Ethernet4", + "/VLAN_MEMBER/Vlan1000|Ethernet8", + ] + + # Act + actual = self.path_addressing.find_ref_paths(path, Files.CROPPED_CONFIG_DB_AS_JSON) + + # Assert + self.assertCountEqual(expected, actual) + + def test_find_ref_paths__whole_config_path__returns_all_refs(self): + # Arrange + path = "" + expected = [ + "/ACL_TABLE/DATAACL/ports/0", + "/ACL_TABLE/EVERFLOW/ports/0", + "/ACL_TABLE/EVERFLOWV6/ports/0", + "/ACL_TABLE/EVERFLOWV6/ports/1", + "/ACL_TABLE/NO-NSW-PACL-V4/ports/0", + "/VLAN_MEMBER/Vlan1000|Ethernet0", + "/VLAN_MEMBER/Vlan1000|Ethernet4", + "/VLAN_MEMBER/Vlan1000|Ethernet8", + ] + + # Act + actual = self.path_addressing.find_ref_paths(path, Files.CROPPED_CONFIG_DB_AS_JSON) + + # Assert + self.assertCountEqual(expected, actual) + + def test_convert_path_to_xpath(self): + def check(path, xpath, config=None): + if not config: + config = Files.CROPPED_CONFIG_DB_AS_JSON + + expected=xpath + actual=self.path_addressing.convert_path_to_xpath(path, config, self.sy_only_models) + self.assertEqual(expected, actual) + + check(path="", xpath="/") + check(path="/VLAN_MEMBER", xpath="/sonic-vlan:sonic-vlan/VLAN_MEMBER") + check(path="/VLAN/Vlan1000/dhcp_servers", + xpath="/sonic-vlan:sonic-vlan/VLAN/VLAN_LIST[name='Vlan1000']/dhcp_servers") + check(path="/VLAN/Vlan1000/dhcp_servers/0", + xpath="/sonic-vlan:sonic-vlan/VLAN/VLAN_LIST[name='Vlan1000']/dhcp_servers[.='192.0.0.1']") + check(path="/PORT/Ethernet0/lanes", xpath="/sonic-port:sonic-port/PORT/PORT_LIST[name='Ethernet0']/lanes") + check(path="/ACL_TABLE/NO-NSW-PACL-V4/ports/0", + xpath="/sonic-acl:sonic-acl/ACL_TABLE/ACL_TABLE_LIST[ACL_TABLE_NAME='NO-NSW-PACL-V4']/ports[.='Ethernet0']") + check(path="/ACL_TABLE/NO-NSW-PACL-V4/ports/0", + xpath="/sonic-acl:sonic-acl/ACL_TABLE/ACL_TABLE_LIST[ACL_TABLE_NAME='NO-NSW-PACL-V4']/ports[.='Ethernet0']") + check(path="/VLAN_MEMBER/Vlan1000|Ethernet8/tagging_mode", + xpath="/sonic-vlan:sonic-vlan/VLAN_MEMBER/VLAN_MEMBER_LIST[name='Vlan1000'][port='Ethernet8']/tagging_mode") + check(path="/VLAN_MEMBER/Vlan1000|Ethernet8", + xpath="/sonic-vlan:sonic-vlan/VLAN_MEMBER/VLAN_MEMBER_LIST[name='Vlan1000'][port='Ethernet8']") + check(path="/DEVICE_METADATA/localhost/hwsku", + xpath="/sonic-device_metadata:sonic-device_metadata/DEVICE_METADATA/localhost/hwsku", + config=Files.CONFIG_DB_WITH_DEVICE_METADATA) + check(path="/FLEX_COUNTER_TABLE/BUFFER_POOL_WATERMARK/FLEX_COUNTER_STATUS", + xpath="/sonic-flex_counter:sonic-flex_counter/FLEX_COUNTER_TABLE/BUFFER_POOL_WATERMARK/FLEX_COUNTER_STATUS", + config=Files.CONTRAINER_WITH_CONTAINER_CONFIG_DB) + check(path="/ACL_RULE/SSH_ONLY|RULE1/L4_SRC_PORT", + xpath="/sonic-acl:sonic-acl/ACL_RULE/ACL_RULE_LIST[ACL_TABLE_NAME='SSH_ONLY'][RULE_NAME='RULE1']/L4_SRC_PORT", + config=Files.CONFIG_DB_CHOICE) + check(path="/INTERFACE/Ethernet8", + xpath="/sonic-interface:sonic-interface/INTERFACE/INTERFACE_LIST[name='Ethernet8']", + config=Files.CONFIG_DB_WITH_INTERFACE) + check(path="/INTERFACE/Ethernet8|10.0.0.1~130", + xpath="/sonic-interface:sonic-interface/INTERFACE/INTERFACE_IPPREFIX_LIST[name='Ethernet8'][ip-prefix='10.0.0.1/30']", + config=Files.CONFIG_DB_WITH_INTERFACE) + check(path="/INTERFACE/Ethernet8|10.0.0.1~130/scope", + xpath="/sonic-interface:sonic-interface/INTERFACE/INTERFACE_IPPREFIX_LIST[name='Ethernet8'][ip-prefix='10.0.0.1/30']/scope", + config=Files.CONFIG_DB_WITH_INTERFACE) + check(path="/PORTCHANNEL_INTERFACE", + xpath="/sonic-portchannel:sonic-portchannel/PORTCHANNEL_INTERFACE", + config=Files.CONFIG_DB_WITH_PORTCHANNEL_INTERFACE) + check(path="/PORTCHANNEL_INTERFACE/PortChannel0001|1.1.1.1~124", + xpath="/sonic-portchannel:sonic-portchannel/PORTCHANNEL_INTERFACE/PORTCHANNEL_INTERFACE_IPPREFIX_LIST[name='PortChannel0001'][ip_prefix='1.1.1.1/24']", + config=Files.CONFIG_DB_WITH_PORTCHANNEL_INTERFACE) + + def test_convert_xpath_to_path(self): + def check(xpath, path, config=None): + if not config: + config = Files.CROPPED_CONFIG_DB_AS_JSON + + expected=path + actual=self.path_addressing.convert_xpath_to_path(xpath, config, self.sy_only_models) + self.assertEqual(expected, actual) + + check(xpath="/",path="") + check(xpath="/sonic-vlan:sonic-vlan/VLAN_MEMBER", path="/VLAN_MEMBER") + check(xpath="/sonic-vlan:sonic-vlan/VLAN_MEMBER/VLAN_MEMBER_LIST",path="/VLAN_MEMBER") + check(xpath="/sonic-vlan:sonic-vlan/VLAN_MEMBER/VLAN_MEMBER_LIST[name='Vlan1000'][port='Ethernet8']", + path="/VLAN_MEMBER/Vlan1000|Ethernet8") + check(xpath="/sonic-vlan:sonic-vlan/VLAN_MEMBER/VLAN_MEMBER_LIST[name='Vlan1000'][port='Ethernet8']/name", + path="/VLAN_MEMBER/Vlan1000|Ethernet8") + check(xpath="/sonic-vlan:sonic-vlan/VLAN_MEMBER/VLAN_MEMBER_LIST[name='Vlan1000'][port='Ethernet8']/port", + path="/VLAN_MEMBER/Vlan1000|Ethernet8") + check(xpath="/sonic-vlan:sonic-vlan/VLAN_MEMBER/VLAN_MEMBER_LIST[name='Vlan1000'][port='Ethernet8']/tagging_mode", + path="/VLAN_MEMBER/Vlan1000|Ethernet8/tagging_mode") + check(xpath="/sonic-vlan:sonic-acl/ACL_RULE", path="/ACL_RULE") + check(xpath="/sonic-vlan:sonic-acl/ACL_RULE/ACL_RULE_LIST[ACL_TABLE_NAME='SSH_ONLY'][RULE_NAME='RULE1']", + path="/ACL_RULE/SSH_ONLY|RULE1", + config=Files.CONFIG_DB_CHOICE) + check(xpath="/sonic-acl:sonic-acl/ACL_RULE/ACL_RULE_LIST[ACL_TABLE_NAME='SSH_ONLY'][RULE_NAME='RULE1']/L4_SRC_PORT", + path="/ACL_RULE/SSH_ONLY|RULE1/L4_SRC_PORT", + config=Files.CONFIG_DB_CHOICE) + check(xpath="/sonic-vlan:sonic-vlan/VLAN/VLAN_LIST[name='Vlan1000']/dhcp_servers", + path="/VLAN/Vlan1000/dhcp_servers") + check(xpath="/sonic-vlan:sonic-vlan/VLAN/VLAN_LIST[name='Vlan1000']/dhcp_servers[.='192.0.0.1']", + path="/VLAN/Vlan1000/dhcp_servers/0") + check(xpath="/sonic-port:sonic-port/PORT/PORT_LIST[name='Ethernet0']/lanes", path="/PORT/Ethernet0/lanes") + check(xpath="/sonic-acl:sonic-acl/ACL_TABLE/ACL_TABLE_LIST[ACL_TABLE_NAME='NO-NSW-PACL-V4']/ports[.='Ethernet0']", + path="/ACL_TABLE/NO-NSW-PACL-V4/ports/0") + check(xpath="/sonic-vlan:sonic-vlan/VLAN_MEMBER/VLAN_MEMBER_LIST[name='Vlan1000'][port='Ethernet8']/tagging_mode", + path="/VLAN_MEMBER/Vlan1000|Ethernet8/tagging_mode") + check(xpath="/sonic-vlan:sonic-vlan/VLAN_MEMBER/VLAN_MEMBER_LIST[name='Vlan1000'][port='Ethernet8']", + path="/VLAN_MEMBER/Vlan1000|Ethernet8") + check(xpath="/sonic-device_metadata:sonic-device_metadata/DEVICE_METADATA/localhost/hwsku", + path="/DEVICE_METADATA/localhost/hwsku", + config=Files.CONFIG_DB_WITH_DEVICE_METADATA) + check(xpath="/sonic-flex_counter:sonic-flex_counter/FLEX_COUNTER_TABLE/BUFFER_POOL_WATERMARK", + path="/FLEX_COUNTER_TABLE/BUFFER_POOL_WATERMARK", + config=Files.CONTRAINER_WITH_CONTAINER_CONFIG_DB) + check(xpath="/sonic-flex_counter:sonic-flex_counter/FLEX_COUNTER_TABLE/BUFFER_POOL_WATERMARK/FLEX_COUNTER_STATUS", + path="/FLEX_COUNTER_TABLE/BUFFER_POOL_WATERMARK/FLEX_COUNTER_STATUS", + config=Files.CONTRAINER_WITH_CONTAINER_CONFIG_DB) + check(xpath="/sonic-interface:sonic-interface/INTERFACE/INTERFACE_LIST[name='Ethernet8']", + path="/INTERFACE/Ethernet8", + config=Files.CONFIG_DB_WITH_INTERFACE) + check(xpath="/sonic-interface:sonic-interface/INTERFACE/INTERFACE_IPPREFIX_LIST[name='Ethernet8'][ip-prefix='10.0.0.1/30']", + path="/INTERFACE/Ethernet8|10.0.0.1~130", + config=Files.CONFIG_DB_WITH_INTERFACE) + check(xpath="/sonic-interface:sonic-interface/INTERFACE/INTERFACE_IPPREFIX_LIST[name='Ethernet8'][ip-prefix='10.0.0.1/30']/scope", + path="/INTERFACE/Ethernet8|10.0.0.1~130/scope", + config=Files.CONFIG_DB_WITH_INTERFACE) + check(xpath="/sonic-portchannel:sonic-portchannel/PORTCHANNEL_INTERFACE", + path="/PORTCHANNEL_INTERFACE", + config=Files.CONFIG_DB_WITH_PORTCHANNEL_INTERFACE) + check(xpath="/sonic-portchannel:sonic-portchannel/PORTCHANNEL_INTERFACE/PORTCHANNEL_INTERFACE_IPPREFIX_LIST[name='PortChannel0001'][ip_prefix='1.1.1.1/24']", + path="/PORTCHANNEL_INTERFACE/PortChannel0001|1.1.1.1~124", + config=Files.CONFIG_DB_WITH_PORTCHANNEL_INTERFACE) + diff --git a/tests/generic_config_updater/patch_sorter_test.py b/tests/generic_config_updater/patch_sorter_test.py new file mode 100644 index 0000000000..4da9fb901b --- /dev/null +++ b/tests/generic_config_updater/patch_sorter_test.py @@ -0,0 +1,1730 @@ +import jsonpatch +import unittest +from unittest.mock import MagicMock, Mock + +import generic_config_updater.patch_sorter as ps +from .gutest_helpers import Files, create_side_effect_dict +from generic_config_updater.gu_common import ConfigWrapper, PatchWrapper, OperationWrapper, \ + GenericConfigUpdaterError, OperationType, JsonChange, PathAddressing + +class TestDiff(unittest.TestCase): + def test_apply_move__updates_current_config(self): + # Arrange + diff = ps.Diff(current_config=Files.CROPPED_CONFIG_DB_AS_JSON, target_config=Files.ANY_CONFIG_DB) + move = ps.JsonMove.from_patch(Files.SINGLE_OPERATION_CONFIG_DB_PATCH) + + expected = ps.Diff(current_config=Files.CONFIG_DB_AFTER_SINGLE_OPERATION, target_config=Files.ANY_CONFIG_DB) + + # Act + actual = diff.apply_move(move) + + # Assert + self.assertEqual(expected.current_config, actual.current_config) + self.assertEqual(expected.target_config, actual.target_config) + + def test_has_no_diff__diff_exists__returns_false(self): + # Arrange + diff = ps.Diff(current_config=Files.CROPPED_CONFIG_DB_AS_JSON, + target_config=Files.CONFIG_DB_AFTER_SINGLE_OPERATION) + + # Act and Assert + self.assertFalse(diff.has_no_diff()) + + def test_has_no_diff__no_diff__returns_true(self): + # Arrange + diff = ps.Diff(current_config=Files.CROPPED_CONFIG_DB_AS_JSON, + target_config=Files.CROPPED_CONFIG_DB_AS_JSON) + + # Act and Assert + self.assertTrue(diff.has_no_diff()) + + def test_hash__different_current_config__different_hashes(self): + # Arrange + diff1 = ps.Diff(current_config=Files.CROPPED_CONFIG_DB_AS_JSON, target_config=Files.ANY_CONFIG_DB) + diff2 = ps.Diff(current_config=Files.CROPPED_CONFIG_DB_AS_JSON, target_config=Files.ANY_CONFIG_DB) + diff3 = ps.Diff(current_config=Files.CONFIG_DB_AFTER_SINGLE_OPERATION, target_config=Files.ANY_CONFIG_DB) + + # Act + hash1 = hash(diff1) + hash2 = hash(diff2) + hash3 = hash(diff3) + + # Assert + self.assertEqual(hash1, hash2) # same current config + self.assertNotEqual(hash1, hash3) + + def test_hash__different_target_config__different_hashes(self): + # Arrange + diff1 = ps.Diff(current_config=Files.ANY_CONFIG_DB, target_config=Files.CROPPED_CONFIG_DB_AS_JSON) + diff2 = ps.Diff(current_config=Files.ANY_CONFIG_DB, target_config=Files.CROPPED_CONFIG_DB_AS_JSON) + diff3 = ps.Diff(current_config=Files.ANY_CONFIG_DB, target_config=Files.CONFIG_DB_AFTER_SINGLE_OPERATION) + + # Act + hash1 = hash(diff1) + hash2 = hash(diff2) + hash3 = hash(diff3) + + # Assert + self.assertEqual(hash1, hash2) # same target config + self.assertNotEqual(hash1, hash3) + + def test_hash__swapped_current_and_target_configs__different_hashes(self): + # Arrange + diff1 = ps.Diff(current_config=Files.ANY_CONFIG_DB, target_config=Files.ANY_OTHER_CONFIG_DB) + diff2 = ps.Diff(current_config=Files.ANY_OTHER_CONFIG_DB, target_config=Files.ANY_CONFIG_DB) + + # Act + hash1 = hash(diff1) + hash2 = hash(diff2) + + # Assert + self.assertNotEqual(hash1, hash2) + + def test_eq__different_current_config__returns_false(self): + # Arrange + diff = ps.Diff(Files.ANY_CONFIG_DB, Files.ANY_CONFIG_DB) + other_diff = ps.Diff(Files.ANY_OTHER_CONFIG_DB, Files.ANY_CONFIG_DB) + + # Act and assert + self.assertNotEqual(diff, other_diff) + self.assertFalse(diff == other_diff) + + def test_eq__different_target_config__returns_false(self): + # Arrange + diff = ps.Diff(Files.ANY_CONFIG_DB, Files.ANY_CONFIG_DB) + other_diff = ps.Diff(Files.ANY_CONFIG_DB, Files.ANY_OTHER_CONFIG_DB) + + # Act and assert + self.assertNotEqual(diff, other_diff) + self.assertFalse(diff == other_diff) + + def test_eq__different_target_config__returns_true(self): + # Arrange + diff = ps.Diff(Files.ANY_CONFIG_DB, Files.ANY_CONFIG_DB) + other_diff = ps.Diff(Files.ANY_CONFIG_DB, Files.ANY_CONFIG_DB) + + # Act and assert + self.assertEqual(diff, other_diff) + self.assertTrue(diff == other_diff) + +class TestJsonMove(unittest.TestCase): + def setUp(self): + self.operation_wrapper = OperationWrapper() + self.any_op_type = OperationType.REPLACE + self.any_tokens = ["table1", "key11"] + self.any_path = "/table1/key11" + self.any_config = { + "table1": { + "key11": "value11" + } + } + self.any_value = "value11" + self.any_operation = self.operation_wrapper.create(self.any_op_type, self.any_path, self.any_value) + self.any_diff = ps.Diff(self.any_config, self.any_config) + + def test_ctor__delete_op_whole_config__none_value_and_empty_path(self): + # Arrange + path = "" + diff = ps.Diff(current_config={}, target_config=self.any_config) + + # Act + jsonmove = ps.JsonMove(diff, OperationType.REMOVE, []) + + # Assert + self.verify_jsonmove(self.operation_wrapper.create(OperationType.REMOVE, path), + OperationType.REMOVE, + [], + None, + jsonmove) + def test_ctor__remove_op__operation_created_directly(self): + # Arrange and Act + jsonmove = ps.JsonMove(self.any_diff, OperationType.REMOVE, self.any_tokens) + + # Assert + self.verify_jsonmove(self.operation_wrapper.create(OperationType.REMOVE, self.any_path), + OperationType.REMOVE, + self.any_tokens, + None, + jsonmove) + + def test_ctor__replace_op_whole_config__whole_config_value_and_empty_path(self): + # Arrange + path = "" + diff = ps.Diff(current_config={}, target_config=self.any_config) + + # Act + jsonmove = ps.JsonMove(diff, OperationType.REPLACE, [], []) + + # Assert + self.verify_jsonmove(self.operation_wrapper.create(OperationType.REPLACE, path, self.any_config), + OperationType.REPLACE, + [], + [], + jsonmove) + + def test_ctor__replace_op__operation_created_directly(self): + # Arrange and Act + jsonmove = ps.JsonMove(self.any_diff, OperationType.REPLACE, self.any_tokens, self.any_tokens) + + # Assert + self.verify_jsonmove(self.operation_wrapper.create(OperationType.REPLACE, self.any_path, self.any_value), + OperationType.REPLACE, + self.any_tokens, + self.any_tokens, + jsonmove) + + def test_ctor__add_op_whole_config__whole_config_value_and_empty_path(self): + # Arrange + path = "" + diff = ps.Diff(current_config={}, target_config=self.any_config) + + # Act + jsonmove = ps.JsonMove(diff, OperationType.ADD, [], []) + + # Assert + self.verify_jsonmove(self.operation_wrapper.create(OperationType.ADD, path, self.any_config), + OperationType.ADD, + [], + [], + jsonmove) + + def test_ctor__add_op_path_exist__same_value_and_path(self): + # Arrange and Act + jsonmove = ps.JsonMove(self.any_diff, OperationType.ADD, self.any_tokens, self.any_tokens) + + # Assert + self.verify_jsonmove(self.operation_wrapper.create(OperationType.ADD, self.any_path, self.any_value), + OperationType.ADD, + self.any_tokens, + self.any_tokens, + jsonmove) + + def test_ctor__add_op_path_exist_include_list__same_value_and_path(self): + # Arrange + current_config = { + "table1": { + "list1": ["value11", "value13"] + } + } + target_config = { + "table1": { + "list1": ["value11", "value12", "value13", "value14"] + } + } + diff = ps.Diff(current_config, target_config) + op_type = OperationType.ADD + current_config_tokens = ["table1", "list1", 1] # Index is 1 which does not exist in target + target_config_tokens = ["table1", "list1", 1] + expected_jsonpatch_path = "/table1/list1/1" + expected_jsonpatch_value = "value12" + # NOTE: the target config can contain more diff than the given move. + + # Act + jsonmove = ps.JsonMove(diff, op_type, current_config_tokens, target_config_tokens) + + # Assert + self.verify_jsonmove(self.operation_wrapper.create(op_type, expected_jsonpatch_path, expected_jsonpatch_value), + op_type, + current_config_tokens, + target_config_tokens, + jsonmove) + + def test_ctor__add_op_path_exist_list_index_doesnot_exist_in_target___same_value_and_path(self): + # Arrange + current_config = { + "table1": { + "list1": ["value11"] + } + } + target_config = { + "table1": { + "list1": ["value12"] + } + } + diff = ps.Diff(current_config, target_config) + op_type = OperationType.ADD + current_config_tokens = ["table1", "list1", 1] # Index is 1 which does not exist in target + target_config_tokens = ["table1", "list1", 0] + expected_jsonpatch_path = "/table1/list1/1" + expected_jsonpatch_value = "value12" + # NOTE: the target config can contain more diff than the given move. + + # Act + jsonmove = ps.JsonMove(diff, op_type, current_config_tokens, target_config_tokens) + + # Assert + self.verify_jsonmove(self.operation_wrapper.create(op_type, expected_jsonpatch_path, expected_jsonpatch_value), + op_type, + current_config_tokens, + target_config_tokens, + jsonmove) + + def test_ctor__add_op_path_doesnot_exist__value_and_path_of_parent(self): + # Arrange + current_config = { + } + target_config = { + "table1": { + "key11": { + "key111": "value111" + } + } + } + diff = ps.Diff(current_config, target_config) + op_type = OperationType.ADD + current_config_tokens = ["table1", "key11", "key111"] + target_config_tokens = ["table1", "key11", "key111"] + expected_jsonpatch_path = "/table1" + expected_jsonpatch_value = { + "key11": { + "key111": "value111" + } + } + # NOTE: the target config can contain more diff than the given move. + + # Act + jsonmove = ps.JsonMove(diff, op_type, current_config_tokens, target_config_tokens) + + # Assert + self.verify_jsonmove(self.operation_wrapper.create(op_type, expected_jsonpatch_path, expected_jsonpatch_value), + op_type, + current_config_tokens, + target_config_tokens, + jsonmove) + + def test_ctor__add_op_path_doesnot_exist_include_list__value_and_path_of_parent(self): + # Arrange + current_config = { + } + target_config = { + "table1": { + "list1": ["value11", "value12", "value13", "value14"] + } + } + diff = ps.Diff(current_config, target_config) + op_type = OperationType.ADD + current_config_tokens = ["table1", "list1", 0] + target_config_tokens = ["table1", "list1", 1] + expected_jsonpatch_path = "/table1" + expected_jsonpatch_value = { + "list1": ["value12"] + } + # NOTE: the target config can contain more diff than the given move. + + # Act + jsonmove = ps.JsonMove(diff, op_type, current_config_tokens, target_config_tokens) + + # Assert + self.verify_jsonmove(self.operation_wrapper.create(op_type, expected_jsonpatch_path, expected_jsonpatch_value), + op_type, + current_config_tokens, + target_config_tokens, + jsonmove) + + def test_from_patch__more_than_1_op__failure(self): + # Arrange + patch = jsonpatch.JsonPatch([self.any_operation, self.any_operation]) + + # Act and Assert + self.assertRaises(GenericConfigUpdaterError, ps.JsonMove.from_patch, patch) + + def test_from_patch__delete_op__delete_jsonmove(self): + # Arrange + operation = self.operation_wrapper.create(OperationType.REMOVE, self.any_path) + patch = jsonpatch.JsonPatch([operation]) + + # Act + jsonmove = ps.JsonMove.from_patch(patch) + + # Assert + self.verify_jsonmove(operation, + OperationType.REMOVE, + self.any_tokens, + None, + jsonmove) + + def test_from_patch__replace_op__replace_jsonmove(self): + # Arrange + operation = self.operation_wrapper.create(OperationType.REPLACE, self.any_path, self.any_value) + patch = jsonpatch.JsonPatch([operation]) + + # Act + jsonmove = ps.JsonMove.from_patch(patch) + + # Assert + self.verify_jsonmove(operation, + OperationType.REPLACE, + self.any_tokens, + self.any_tokens, + jsonmove) + + def test_from_patch__add_op__add_jsonmove(self): + # Arrange + operation = self.operation_wrapper.create(OperationType.ADD, self.any_path, self.any_value) + patch = jsonpatch.JsonPatch([operation]) + + # Act + jsonmove = ps.JsonMove.from_patch(patch) + + # Assert + self.verify_jsonmove(operation, + OperationType.ADD, + self.any_tokens, + self.any_tokens, + jsonmove) + + def test_from_patch__add_op_with_list_indexes__add_jsonmove(self): + # Arrange + path = "/table1/key11/list1111/3" + value = "value11111" + # From a JsonPatch it is not possible to figure out if the '3' is an item in a list or a dictionary, + # will assume by default a dictionary for simplicity. + tokens = ["table1", "key11", "list1111", "3"] + operation = self.operation_wrapper.create(OperationType.ADD, path, value) + patch = jsonpatch.JsonPatch([operation]) + + # Act + jsonmove = ps.JsonMove.from_patch(patch) + + # Assert + self.verify_jsonmove(operation, + OperationType.ADD, + tokens, + tokens, + jsonmove) + + def test_from_patch__replace_whole_config__whole_config_jsonmove(self): + # Arrange + tokens = [] + path = "" + value = {"table1": {"key1": "value1"} } + operation = self.operation_wrapper.create(OperationType.REPLACE, path, value) + patch = jsonpatch.JsonPatch([operation]) + + # Act + jsonmove = ps.JsonMove.from_patch(patch) + + # Assert + self.verify_jsonmove(operation, + OperationType.REPLACE, + tokens, + tokens, + jsonmove) + + def verify_jsonmove(self, + expected_operation, + expected_op_type, + expected_current_config_tokens, + expected_target_config_tokens, + jsonmove): + expected_patch = jsonpatch.JsonPatch([expected_operation]) + self.assertEqual(expected_patch, jsonmove.patch) + self.assertEqual(expected_op_type, jsonmove.op_type) + self.assertListEqual(expected_current_config_tokens, jsonmove.current_config_tokens) + self.assertEqual(expected_target_config_tokens, jsonmove.target_config_tokens) + +class TestMoveWrapper(unittest.TestCase): + def setUp(self): + self.any_current_config = {} + self.any_target_config = {} + self.any_diff = ps.Diff(self.any_current_config, self.any_target_config) + self.any_move = Mock() + self.any_other_move1 = Mock() + self.any_other_move2 = Mock() + self.any_extended_move = Mock() + self.any_other_extended_move1 = Mock() + self.any_other_extended_move2 = Mock() + + self.single_move_generator = Mock() + self.single_move_generator.generate.side_effect = \ + create_side_effect_dict({(str(self.any_diff),): [self.any_move]}) + + self.another_single_move_generator = Mock() + self.another_single_move_generator.generate.side_effect = \ + create_side_effect_dict({(str(self.any_diff),): [self.any_other_move1]}) + + self.multiple_move_generator = Mock() + self.multiple_move_generator.generate.side_effect = create_side_effect_dict( + {(str(self.any_diff),): [self.any_move, self.any_other_move1, self.any_other_move2]}) + + self.single_move_extender = Mock() + self.single_move_extender.extend.side_effect = create_side_effect_dict( + { + (str(self.any_move), str(self.any_diff)): [self.any_extended_move], + (str(self.any_extended_move), str(self.any_diff)): [], # As first extended move will be extended + (str(self.any_other_extended_move1), str(self.any_diff)): [] # Needed when mixed with other extenders + }) + + self.another_single_move_extender = Mock() + self.another_single_move_extender.extend.side_effect = create_side_effect_dict( + { + (str(self.any_move), str(self.any_diff)): [self.any_other_extended_move1], + (str(self.any_other_extended_move1), str(self.any_diff)): [], # As first extended move will be extended + (str(self.any_extended_move), str(self.any_diff)): [] # Needed when mixed with other extenders + }) + + self.multiple_move_extender = Mock() + self.multiple_move_extender.extend.side_effect = create_side_effect_dict( + { + (str(self.any_move), str(self.any_diff)): \ + [self.any_extended_move, self.any_other_extended_move1, self.any_other_extended_move2], + # All extended moves will be extended + (str(self.any_extended_move), str(self.any_diff)): [], + (str(self.any_other_extended_move1), str(self.any_diff)): [], + (str(self.any_other_extended_move2), str(self.any_diff)): [], + }) + + self.mixed_move_extender = Mock() + self.mixed_move_extender.extend.side_effect = create_side_effect_dict( + { + (str(self.any_move), str(self.any_diff)): [self.any_extended_move], + (str(self.any_other_move1), str(self.any_diff)): [self.any_other_extended_move1], + (str(self.any_extended_move), str(self.any_diff)): \ + [self.any_other_extended_move1, self.any_other_extended_move2], + # All extended moves will be extended + (str(self.any_other_extended_move1), str(self.any_diff)): [], + (str(self.any_other_extended_move2), str(self.any_diff)): [], + }) + + self.fail_move_validator = Mock() + self.fail_move_validator.validate.side_effect = create_side_effect_dict( + {(str(self.any_move), str(self.any_diff)): False}) + + self.success_move_validator = Mock() + self.success_move_validator.validate.side_effect = create_side_effect_dict( + {(str(self.any_move), str(self.any_diff)): True}) + + def test_ctor__assigns_values_correctly(self): + # Arrange + move_generators = Mock() + move_extenders = Mock() + move_validators = Mock() + + # Act + move_wrapper = ps.MoveWrapper(move_generators, move_extenders, move_validators) + + # Assert + self.assertIs(move_generators, move_wrapper.move_generators) + self.assertIs(move_extenders, move_wrapper.move_extenders) + self.assertIs(move_validators, move_wrapper.move_validators) + + def test_generate__single_move_generator__single_move_returned(self): + # Arrange + move_generators = [self.single_move_generator] + move_wrapper = ps.MoveWrapper(move_generators, [], []) + expected = [self.any_move] + + # Act + actual = list(move_wrapper.generate(self.any_diff)) + + # Assert + self.assertListEqual(expected, actual) + + def test_generate__multiple_move_generator__multiple_move_returned(self): + # Arrange + move_generators = [self.multiple_move_generator] + move_wrapper = ps.MoveWrapper(move_generators, [], []) + expected = [self.any_move, self.any_other_move1, self.any_other_move2] + + # Act + actual = list(move_wrapper.generate(self.any_diff)) + + # Assert + self.assertListEqual(expected, actual) + + def test_generate__different_move_generators__different_moves_returned(self): + # Arrange + move_generators = [self.single_move_generator, self.another_single_move_generator] + move_wrapper = ps.MoveWrapper(move_generators, [], []) + expected = [self.any_move, self.any_other_move1] + + # Act + actual = list(move_wrapper.generate(self.any_diff)) + + # Assert + self.assertListEqual(expected, actual) + + def test_generate__duplicate_generated_moves__unique_moves_returned(self): + # Arrange + move_generators = [self.single_move_generator, self.single_move_generator] + move_wrapper = ps.MoveWrapper(move_generators, [], []) + expected = [self.any_move] + + # Act + actual = list(move_wrapper.generate(self.any_diff)) + + # Assert + self.assertListEqual(expected, actual) + + def test_generate__single_move_extender__one_extended_move_returned(self): + # Arrange + move_generators = [self.single_move_generator] + move_extenders = [self.single_move_extender] + move_wrapper = ps.MoveWrapper(move_generators, move_extenders, []) + expected = [self.any_move, self.any_extended_move] + + # Act + actual = list(move_wrapper.generate(self.any_diff)) + + # Assert + self.assertListEqual(expected, actual) + + def test_generate__multiple_move_extender__multiple_extended_move_returned(self): + # Arrange + move_generators = [self.single_move_generator] + move_extenders = [self.multiple_move_extender] + move_wrapper = ps.MoveWrapper(move_generators, move_extenders, []) + expected = [self.any_move, self.any_extended_move, self.any_other_extended_move1, self.any_other_extended_move2] + + # Act + actual = list(move_wrapper.generate(self.any_diff)) + + # Assert + self.assertListEqual(expected, actual) + + def test_generate__different_move_extenders__different_extended_moves_returned(self): + # Arrange + move_generators = [self.single_move_generator] + move_extenders = [self.single_move_extender, self.another_single_move_extender] + move_wrapper = ps.MoveWrapper(move_generators, move_extenders, []) + expected = [self.any_move, self.any_extended_move, self.any_other_extended_move1] + + # Act + actual = list(move_wrapper.generate(self.any_diff)) + + # Assert + self.assertListEqual(expected, actual) + + def test_generate__duplicate_extended_moves__unique_moves_returned(self): + # Arrange + move_generators = [self.single_move_generator] + move_extenders = [self.single_move_extender, self.single_move_extender] + move_wrapper = ps.MoveWrapper(move_generators, move_extenders, []) + expected = [self.any_move, self.any_extended_move] + + # Act + actual = list(move_wrapper.generate(self.any_diff)) + + # Assert + self.assertListEqual(expected, actual) + + def test_generate__mixed_extended_moves__unique_moves_returned(self): + # Arrange + move_generators = [self.single_move_generator, self.another_single_move_generator] + move_extenders = [self.mixed_move_extender] + move_wrapper = ps.MoveWrapper(move_generators, move_extenders, []) + expected = [self.any_move, + self.any_other_move1, + self.any_extended_move, + self.any_other_extended_move1, + self.any_other_extended_move2] + + # Act + actual = list(move_wrapper.generate(self.any_diff)) + + # Assert + self.assertListEqual(expected, actual) + + def test_validate__validation_fail__false_returned(self): + # Arrange + move_validators = [self.fail_move_validator] + move_wrapper = ps.MoveWrapper([], [], move_validators) + + # Act and assert + self.assertFalse(move_wrapper.validate(self.any_move, self.any_diff)) + + def test_validate__validation_succeed__true_returned(self): + # Arrange + move_validators = [self.success_move_validator] + move_wrapper = ps.MoveWrapper([], [], move_validators) + + # Act and assert + self.assertTrue(move_wrapper.validate(self.any_move, self.any_diff)) + + def test_validate__multiple_validators_last_fail___false_returned(self): + # Arrange + move_validators = [self.success_move_validator, self.success_move_validator, self.fail_move_validator] + move_wrapper = ps.MoveWrapper([], [], move_validators) + + # Act and assert + self.assertFalse(move_wrapper.validate(self.any_move, self.any_diff)) + + def test_validate__multiple_validators_succeed___true_returned(self): + # Arrange + move_validators = [self.success_move_validator, self.success_move_validator, self.success_move_validator] + move_wrapper = ps.MoveWrapper([], [], move_validators) + + # Act and assert + self.assertTrue(move_wrapper.validate(self.any_move, self.any_diff)) + + def test_simulate__applies_move(self): + # Arrange + diff = Mock() + diff.apply_move.side_effect = create_side_effect_dict({(str(self.any_move), ): self.any_diff}) + move_wrapper = ps.MoveWrapper(None, None, None) + + # Act + actual = move_wrapper.simulate(self.any_move, diff) + + # Assert + self.assertIs(self.any_diff, actual) + +class TestDeleteWholeConfigMoveValidator(unittest.TestCase): + def setUp(self): + self.operation_wrapper = OperationWrapper() + self.validator = ps.DeleteWholeConfigMoveValidator() + self.any_diff = Mock() + self.any_non_whole_config_path = "/table1" + self.whole_config_path = "" + + def test_validate__non_remove_op_non_whole_config__success(self): + self.verify(OperationType.REPLACE, self.any_non_whole_config_path, True) + self.verify(OperationType.ADD, self.any_non_whole_config_path, True) + + def test_validate__remove_op_non_whole_config__success(self): + self.verify(OperationType.REMOVE, self.any_non_whole_config_path, True) + + def test_validate__non_remove_op_whole_config__success(self): + self.verify(OperationType.REPLACE, self.whole_config_path, True) + self.verify(OperationType.ADD, self.whole_config_path, True) + + def test_validate__remove_op_whole_config__failure(self): + self.verify(OperationType.REMOVE, self.whole_config_path, False) + + def verify(self, operation_type, path, expected): + # Arrange + value = None + if operation_type in [OperationType.ADD, OperationType.REPLACE]: + value = Mock() + + operation = self.operation_wrapper.create(operation_type, path, value) + move = ps.JsonMove.from_operation(operation) + + # Act + actual = self.validator.validate(move, self.any_diff) + + # Assert + self.assertEqual(expected, actual) + +class TestUniqueLanesMoveValidator(unittest.TestCase): + def setUp(self): + self.validator = ps.UniqueLanesMoveValidator() + + def test_validate__no_port_table__success(self): + config = {"ACL_TABLE": {}} + self.validate_target_config(config) + + def test_validate__empty_port_table__success(self): + config = {"PORT": {}} + self.validate_target_config(config) + + def test_validate__single_lane__success(self): + config = {"PORT": {"Ethernet0": {"lanes": "66", "speed":"10000"}}} + self.validate_target_config(config) + + def test_validate__different_lanes_single_port___success(self): + config = {"PORT": {"Ethernet0": {"lanes": "66, 67, 68", "speed":"10000"}}} + self.validate_target_config(config) + + def test_validate__different_lanes_multi_ports___success(self): + config = {"PORT": { + "Ethernet0": {"lanes": "64, 65", "speed":"10000"}, + "Ethernet1": {"lanes": "66, 67, 68", "speed":"10000"}, + }} + self.validate_target_config(config) + + def test_validate__same_lanes_single_port___success(self): + config = {"PORT": {"Ethernet0": {"lanes": "65, 65", "speed":"10000"}}} + self.validate_target_config(config, False) + + def validate_target_config(self, target_config, expected=True): + # Arrange + current_config = {} + diff = ps.Diff(current_config, target_config) + move = ps.JsonMove(diff, OperationType.REPLACE, [], []) + + # Act + actual = self.validator.validate(move, diff) + + # Assert + self.assertEqual(expected, actual) + +class TestFullConfigMoveValidator(unittest.TestCase): + def setUp(self): + self.any_current_config = Mock() + self.any_target_config = Mock() + self.any_simulated_config = Mock() + self.any_diff = ps.Diff(self.any_current_config, self.any_target_config) + self.any_move = Mock() + self.any_move.apply.side_effect = \ + create_side_effect_dict({(str(self.any_current_config),): self.any_simulated_config}) + + def test_validate__invalid_config_db_after_applying_move__failure(self): + # Arrange + config_wrapper = Mock() + config_wrapper.validate_config_db_config.side_effect = \ + create_side_effect_dict({(str(self.any_simulated_config),): False}) + validator = ps.FullConfigMoveValidator(config_wrapper) + + # Act and assert + self.assertFalse(validator.validate(self.any_move, self.any_diff)) + + def test_validate__valid_config_db_after_applying_move__success(self): + # Arrange + config_wrapper = Mock() + config_wrapper.validate_config_db_config.side_effect = \ + create_side_effect_dict({(str(self.any_simulated_config),): True}) + validator = ps.FullConfigMoveValidator(config_wrapper) + + # Act and assert + self.assertTrue(validator.validate(self.any_move, self.any_diff)) + +class TestCreateOnlyMoveValidator(unittest.TestCase): + def setUp(self): + self.validator = ps.CreateOnlyMoveValidator(ps.PathAddressing()) + self.any_diff = ps.Diff({}, {}) + + def test_validate__non_replace_operation__success(self): + # Assert + self.assertTrue(self.validator.validate( \ + ps.JsonMove(self.any_diff, OperationType.ADD, [], []), self.any_diff)) + self.assertTrue(self.validator.validate( \ + ps.JsonMove(self.any_diff, OperationType.REMOVE, [], []), self.any_diff)) + + def test_validate__no_create_only_field__success(self): + current_config = {"PORT": {}} + target_config = {"PORT": {}, "ACL_TABLE": {}} + self.verify_diff(current_config, target_config) + + def test_validate__same_create_only_field__success(self): + current_config = {"PORT": {"Ethernet0":{"lanes":"65"}}} + target_config = {"PORT": {"Ethernet0":{"lanes":"65"}}, "ACL_TABLE": {}} + self.verify_diff(current_config, target_config) + + def test_validate__different_create_only_field__failure(self): + current_config = {"PORT": {"Ethernet0":{"lanes":"65"}}} + target_config = {"PORT": {"Ethernet0":{"lanes":"66"}}, "ACL_TABLE": {}} + self.verify_diff(current_config, target_config, expected=False) + + def test_validate__different_create_only_field_directly_updated__failure(self): + current_config = {"PORT": {"Ethernet0":{"lanes":"65"}}} + target_config = {"PORT": {"Ethernet0":{"lanes":"66"}}, "ACL_TABLE": {}} + self.verify_diff(current_config, + target_config, + ["PORT", "Ethernet0", "lanes"], + ["PORT", "Ethernet0", "lanes"], + False) + + def test_validate__different_create_only_field_updating_parent__failure(self): + current_config = {"PORT": {"Ethernet0":{"lanes":"65"}}} + target_config = {"PORT": {"Ethernet0":{"lanes":"66"}}, "ACL_TABLE": {}} + self.verify_diff(current_config, + target_config, + ["PORT", "Ethernet0"], + ["PORT", "Ethernet0"], + False) + + def test_validate__different_create_only_field_updating_grandparent__failure(self): + current_config = {"PORT": {"Ethernet0":{"lanes":"65"}}} + target_config = {"PORT": {"Ethernet0":{"lanes":"66"}}, "ACL_TABLE": {}} + self.verify_diff(current_config, + target_config, + ["PORT"], + ["PORT"], + False) + + def test_validate__same_create_only_field_directly_updated__failure(self): + current_config = {"PORT": {"Ethernet0":{"lanes":"65"}}} + target_config = {"PORT": {"Ethernet0":{"lanes":"65"}}, "ACL_TABLE": {}} + self.verify_diff(current_config, + target_config, + ["PORT", "Ethernet0", "lanes"], + ["PORT", "Ethernet0", "lanes"]) + + def test_validate__same_create_only_field_updating_parent__failure(self): + current_config = {"PORT": {"Ethernet0":{"lanes":"65"}}} + target_config = {"PORT": {"Ethernet0":{"lanes":"65"}}, "ACL_TABLE": {}} + self.verify_diff(current_config, + target_config, + ["PORT", "Ethernet0"], + ["PORT", "Ethernet0"]) + + def test_validate__same_create_only_field_updating_grandparent__failure(self): + current_config = {"PORT": {"Ethernet0":{"lanes":"65"}}} + target_config = {"PORT": {"Ethernet0":{"lanes":"65"}}, "ACL_TABLE": {}} + self.verify_diff(current_config, + target_config, + ["PORT"], + ["PORT"]) + + def verify_diff(self, current_config, target_config, current_config_tokens=None, target_config_tokens=None, expected=True): + # Arrange + current_config_tokens = current_config_tokens if current_config_tokens else [] + target_config_tokens = target_config_tokens if target_config_tokens else [] + diff = ps.Diff(current_config, target_config) + move = ps.JsonMove(diff, OperationType.REPLACE, current_config_tokens, target_config_tokens) + + # Act + actual = self.validator.validate(move, diff) + + # Assert + self.assertEqual(expected, actual) + +class TestNoDependencyMoveValidator(unittest.TestCase): + def setUp(self): + path_addressing = ps.PathAddressing() + config_wrapper = ConfigWrapper() + self.validator = ps.NoDependencyMoveValidator(path_addressing, config_wrapper) + + def test_validate__add_full_config_has_dependencies__failure(self): + # Arrange + # CROPPED_CONFIG_DB_AS_JSON has dependencies between PORT and ACL_TABLE + diff = ps.Diff(Files.EMPTY_CONFIG_DB, Files.CROPPED_CONFIG_DB_AS_JSON) + move = ps.JsonMove(diff, OperationType.ADD, [], []) + + # Act and assert + self.assertFalse(self.validator.validate(move, diff)) + + def test_validate__add_full_config_no_dependencies__success(self): + # Arrange + diff = ps.Diff(Files.EMPTY_CONFIG_DB, Files.CONFIG_DB_NO_DEPENDENCIES) + move = ps.JsonMove(diff, OperationType.ADD, [], []) + + # Act and assert + self.assertTrue(self.validator.validate(move, diff)) + + def test_validate__add_table_has_no_dependencies__success(self): + # Arrange + target_config = Files.CROPPED_CONFIG_DB_AS_JSON + # prepare current config by removing ACL_TABLE from current config + current_config = self.prepare_config(target_config, jsonpatch.JsonPatch([ + {"op": "remove", "path":"/ACL_TABLE"} + ])) + diff = ps.Diff(current_config, target_config) + move = ps.JsonMove(diff, OperationType.ADD, ["ACL_TABLE"], ["ACL_TABLE"]) + + # Act and assert + self.assertTrue(self.validator.validate(move, diff)) + + def test_validate__remove_full_config_has_dependencies__failure(self): + # Arrange + # CROPPED_CONFIG_DB_AS_JSON has dependencies between PORT and ACL_TABLE + diff = ps.Diff(Files.CROPPED_CONFIG_DB_AS_JSON, Files.EMPTY_CONFIG_DB) + move = ps.JsonMove(diff, OperationType.REMOVE, [], []) + + # Act and assert + self.assertFalse(self.validator.validate(move, diff)) + + def test_validate__remove_full_config_no_dependencies__success(self): + # Arrange + diff = ps.Diff(Files.EMPTY_CONFIG_DB, Files.CONFIG_DB_NO_DEPENDENCIES) + move = ps.JsonMove(diff, OperationType.REMOVE, [], []) + + # Act and assert + self.assertTrue(self.validator.validate(move, diff)) + + def test_validate__remove_table_has_no_dependencies__success(self): + # Arrange + current_config = Files.CROPPED_CONFIG_DB_AS_JSON + target_config = self.prepare_config(current_config, jsonpatch.JsonPatch([ + {"op": "remove", "path":"/ACL_TABLE"} + ])) + diff = ps.Diff(current_config, target_config) + move = ps.JsonMove(diff, OperationType.REMOVE, ["ACL_TABLE"]) + + # Act and assert + self.assertTrue(self.validator.validate(move, diff)) + + def test_validate__replace_whole_config_item_added_ref_added__failure(self): + # Arrange + target_config = Files.SIMPLE_CONFIG_DB_INC_DEPS + # prepare current config by removing an item and its ref from target config + current_config = self.prepare_config(target_config, jsonpatch.JsonPatch([ + {"op": "replace", "path":"/ACL_TABLE/EVERFLOW/ports/0", "value":""}, + {"op": "remove", "path":"/PORT/Ethernet0"} + ])) + + diff = ps.Diff(current_config, target_config) + move = ps.JsonMove(diff, OperationType.REPLACE, [], []) + + # Act and assert + self.assertFalse(self.validator.validate(move, diff)) + + def test_validate__replace_whole_config_item_removed_ref_removed__false(self): + # Arrange + current_config = Files.SIMPLE_CONFIG_DB_INC_DEPS + # prepare target config by removing an item and its ref from current config + target_config = self.prepare_config(current_config, jsonpatch.JsonPatch([ + {"op": "replace", "path":"/ACL_TABLE/EVERFLOW/ports/0", "value":""}, + {"op": "remove", "path":"/PORT/Ethernet0"} + ])) + + diff = ps.Diff(current_config, target_config) + move = ps.JsonMove(diff, OperationType.REPLACE, [], []) + + # Act and assert + self.assertFalse(self.validator.validate(move, diff)) + + def test_validate__replace_whole_config_item_same_ref_added__true(self): + # Arrange + target_config = Files.SIMPLE_CONFIG_DB_INC_DEPS + # prepare current config by removing ref from target config + current_config = self.prepare_config(target_config, jsonpatch.JsonPatch([ + {"op": "replace", "path":"/ACL_TABLE/EVERFLOW/ports/0", "value":""} + ])) + + diff = ps.Diff(current_config, target_config) + move = ps.JsonMove(diff, OperationType.REPLACE, [], []) + + # Act and assert + self.assertTrue(self.validator.validate(move, diff)) + + def test_validate__replace_whole_config_item_same_ref_removed__true(self): + # Arrange + current_config= Files.SIMPLE_CONFIG_DB_INC_DEPS + # prepare target config by removing ref from current config + target_config = self.prepare_config(current_config, jsonpatch.JsonPatch([ + {"op": "replace", "path":"/ACL_TABLE/EVERFLOW/ports/0", "value":""} + ])) + + diff = ps.Diff(current_config, target_config) + move = ps.JsonMove(diff, OperationType.REPLACE, [], []) + + # Act and assert + self.assertTrue(self.validator.validate(move, diff)) + + def test_validate__replace_whole_config_item_same_ref_same__true(self): + # Arrange + current_config= Files.SIMPLE_CONFIG_DB_INC_DEPS + # prepare target config by removing ref from current config + target_config = current_config + + diff = ps.Diff(current_config, target_config) + move = ps.JsonMove(diff, OperationType.REPLACE, [], []) + + # Act and assert + self.assertTrue(self.validator.validate(move, diff)) + + def prepare_config(self, config, patch): + return patch.apply(config) + +class TestLowLevelMoveGenerator(unittest.TestCase): + def setUp(self): + path_addressing = PathAddressing() + self.generator = ps.LowLevelMoveGenerator(path_addressing) + + def test_generate__no_diff__no_moves(self): + self.verify() + + def test_generate__replace_key__replace_move(self): + self.verify(tc_ops=[{"op": "replace", 'path': '/PORT/Ethernet0/description', 'value':'any-desc'}]) + + def test_generate__leaf_key_missing__add_move(self): + self.verify( + cc_ops=[{"op": "remove", 'path': '/ACL_TABLE/EVERFLOW/policy_desc'}], + ex_ops=[{"op": "add", 'path': '/ACL_TABLE/EVERFLOW/policy_desc', 'value':'EVERFLOW'}] + ) + + def test_generate__leaf_key_additional__remove_move(self): + self.verify( + tc_ops=[{"op": "remove", 'path': '/ACL_TABLE/EVERFLOW/policy_desc'}] + ) + + def test_generate__table_missing__add_leafs_moves(self): + self.verify( + cc_ops=[{"op": "remove", 'path': '/VLAN'}], + ex_ops=[{'op': 'add', 'path': '/VLAN', 'value': {'Vlan1000': {'vlanid': '1000'}}}, + {'op': 'add', 'path': '/VLAN', 'value': {'Vlan1000': {'dhcp_servers': ['192.0.0.1']}}}, + {'op': 'add', 'path': '/VLAN', 'value': {'Vlan1000': {'dhcp_servers': ['192.0.0.2']}}}, + {'op': 'add', 'path': '/VLAN', 'value': {'Vlan1000': {'dhcp_servers': ['192.0.0.3']}}}, + {'op': 'add', 'path': '/VLAN', 'value': {'Vlan1000': {'dhcp_servers': ['192.0.0.4']}}}] + ) + + def test_generate__table_additional__remove_leafs_moves(self): + self.verify( + tc_ops=[{"op": "remove", 'path': '/VLAN'}], + ex_ops=[{'op': 'remove', 'path': '/VLAN/Vlan1000/vlanid'}, + {'op': 'remove', 'path': '/VLAN/Vlan1000/dhcp_servers/0'}, + {'op': 'remove', 'path': '/VLAN/Vlan1000/dhcp_servers/1'}, + {'op': 'remove', 'path': '/VLAN/Vlan1000/dhcp_servers/2'}, + {'op': 'remove', 'path': '/VLAN/Vlan1000/dhcp_servers/3'}] + ) + + def test_generate__leaf_table_missing__add_table(self): + self.verify( + tc_ops=[{"op": "add", 'path': '/NEW_TABLE', 'value':{}}] + ) + + def test_generate__leaf_table_additional__remove_table(self): + self.verify( + cc_ops=[{"op": "add", 'path': '/NEW_TABLE', 'value':{}}], + ex_ops=[{"op": "remove", 'path': '/NEW_TABLE'}] + ) + + def test_generate__replace_list_item__remove_add_replace_moves(self): + self.verify( + tc_ops=[{"op": "replace", 'path': '/ACL_TABLE/EVERFLOW/ports/0', 'value':'Ethernet0'}], + ex_ops=[ + {"op": "remove", 'path': '/ACL_TABLE/EVERFLOW/ports/0'}, + {"op": "add", 'path': '/ACL_TABLE/EVERFLOW/ports/0', 'value':'Ethernet0'}, + {"op": "replace", 'path': '/ACL_TABLE/EVERFLOW/ports/0', 'value':'Ethernet0'}, + ]) + + def test_generate__remove_list_item__remove_move(self): + self.verify( + tc_ops=[{"op": "remove", 'path': '/VLAN/Vlan1000/dhcp_servers/0'}]) + + def test_generate__remove_multiple_list_items__multiple_remove_moves(self): + self.verify( + tc_ops=[{"op": "remove", 'path': '/VLAN/Vlan1000/dhcp_servers/0'}, + {"op": "remove", 'path': '/VLAN/Vlan1000/dhcp_servers/0'}], + ex_ops=[{"op": "remove", 'path': '/VLAN/Vlan1000/dhcp_servers/0'}, + {"op": "remove", 'path': '/VLAN/Vlan1000/dhcp_servers/1'}] + ) + + def test_generate__remove_all_list_items__multiple_remove_moves(self): + self.verify( + tc_ops=[{"op": "replace", 'path': '/VLAN/Vlan1000/dhcp_servers', 'value':[]}], + ex_ops=[{"op": "remove", 'path': '/VLAN/Vlan1000/dhcp_servers/0'}, + {"op": "remove", 'path': '/VLAN/Vlan1000/dhcp_servers/2'}, + {"op": "remove", 'path': '/VLAN/Vlan1000/dhcp_servers/3'}, + {"op": "remove", 'path': '/VLAN/Vlan1000/dhcp_servers/1'}] + ) + + def test_generate__add_list_items__add_move(self): + self.verify( + tc_ops=[{"op": "add", 'path': '/VLAN/Vlan1000/dhcp_servers/0', 'value':'192.168.1.5'}] + ) + + def test_generate__add_multiple_list_items__multiple_add_moves(self): + self.verify( + tc_ops=[{"op": "add", 'path': '/VLAN/Vlan1000/dhcp_servers/0', 'value':'192.168.1.5'}, + {"op": "add", 'path': '/VLAN/Vlan1000/dhcp_servers/3', 'value':'192.168.1.6'}] + ) + + def test_generate__add_all_list_items__multiple_add_moves(self): + self.verify( + cc_ops=[{"op": "replace", 'path': '/VLAN/Vlan1000/dhcp_servers', 'value':[]}], + ex_ops=[{"op": "add", 'path': '/VLAN/Vlan1000/dhcp_servers/0', 'value':'192.0.0.1'}, + {"op": "add", 'path': '/VLAN/Vlan1000/dhcp_servers/0', 'value':'192.0.0.2'}, + {"op": "add", 'path': '/VLAN/Vlan1000/dhcp_servers/0', 'value':'192.0.0.3'}, + {"op": "add", 'path': '/VLAN/Vlan1000/dhcp_servers/0', 'value':'192.0.0.4'}] + ) + + def test_generate__replace_multiple_list_items__multiple_remove_add_replace_moves(self): + self.verify( + tc_ops=[{"op": "replace", 'path': '/VLAN/Vlan1000/dhcp_servers/0', 'value':'192.168.1.5'}, + {"op": "replace", 'path': '/VLAN/Vlan1000/dhcp_servers/3', 'value':'192.168.1.6'}], + ex_ops=[{"op": "remove", 'path': '/VLAN/Vlan1000/dhcp_servers/0'}, + {"op": "remove", 'path': '/VLAN/Vlan1000/dhcp_servers/3'}, + {"op": "add", 'path': '/VLAN/Vlan1000/dhcp_servers/0', 'value':'192.168.1.5'}, + {"op": "add", 'path': '/VLAN/Vlan1000/dhcp_servers/3', 'value':'192.168.1.6'}, + {"op": "replace", 'path': '/VLAN/Vlan1000/dhcp_servers/0', 'value':'192.168.1.5'}, + {"op": "replace", 'path': '/VLAN/Vlan1000/dhcp_servers/3', 'value':'192.168.1.6'}, + {"op": "replace", 'path': '/VLAN/Vlan1000/dhcp_servers/3', 'value':'192.168.1.5'}, + {"op": "replace", 'path': '/VLAN/Vlan1000/dhcp_servers/0', 'value':'192.168.1.6'}] + ) + + def test_generate__different_order_list_items__whole_list_replace_move(self): + self.verify( + tc_ops=[{"op": "replace", 'path': '/VLAN/Vlan1000/dhcp_servers', 'value':[ + "192.0.0.4", + "192.0.0.3", + "192.0.0.2", + "192.0.0.1" + ]}]) + + def test_generate__whole_list_missing__add_items_moves(self): + self.verify( + cc_ops=[{"op": "remove", 'path': '/VLAN/Vlan1000/dhcp_servers'}], + ex_ops=[{'op': 'add', 'path': '/VLAN/Vlan1000/dhcp_servers', 'value': ['192.0.0.1']}, + {'op': 'add', 'path': '/VLAN/Vlan1000/dhcp_servers', 'value': ['192.0.0.2']}, + {'op': 'add', 'path': '/VLAN/Vlan1000/dhcp_servers', 'value': ['192.0.0.3']}, + {'op': 'add', 'path': '/VLAN/Vlan1000/dhcp_servers', 'value': ['192.0.0.4']}]) + + def test_generate__whole_list_additional__remove_items_moves(self): + self.verify( + tc_ops=[{"op": "remove", 'path': '/VLAN/Vlan1000/dhcp_servers'}], + ex_ops=[{'op': 'remove', 'path': '/VLAN/Vlan1000/dhcp_servers/0'}, + {'op': 'remove', 'path': '/VLAN/Vlan1000/dhcp_servers/1'}, + {'op': 'remove', 'path': '/VLAN/Vlan1000/dhcp_servers/2'}, + {'op': 'remove', 'path': '/VLAN/Vlan1000/dhcp_servers/3'}]) + + def test_generate__empty_list_missing__add_whole_list(self): + self.verify( + tc_ops=[{"op": "replace", 'path': '/VLAN/Vlan1000/dhcp_servers', 'value':[]}], + cc_ops=[{"op": "remove", 'path': '/VLAN/Vlan1000/dhcp_servers'}], + ex_ops=[{'op': 'add', 'path': '/VLAN/Vlan1000/dhcp_servers', 'value':[]}]) + + def test_generate__empty_list_additional__remove_whole_list(self): + self.verify( + tc_ops=[{"op": "remove", 'path': '/VLAN/Vlan1000/dhcp_servers'}], + cc_ops=[{"op": "replace", 'path': '/VLAN/Vlan1000/dhcp_servers', 'value':[]}], + ex_ops=[{'op': 'remove', 'path': '/VLAN/Vlan1000/dhcp_servers'}]) + + def test_generate__dpb_1_to_4_example(self): + # Arrange + diff = ps.Diff(Files.DPB_1_SPLIT_FULL_CONFIG, Files.DPB_4_SPLITS_FULL_CONFIG) + + # Act + moves = list(self.generator.generate(diff)) + + # Assert + self.verify_moves([{'op': 'replace', 'path': '/PORT/Ethernet0/alias', 'value': 'Eth1/1'}, + {'op': 'replace', 'path': '/PORT/Ethernet0/lanes', 'value': '65'}, + {'op': 'replace', 'path': '/PORT/Ethernet0/description', 'value': ''}, + {'op': 'replace', 'path': '/PORT/Ethernet0/speed', 'value': '10000'}, + {'op': 'add', 'path': '/PORT/Ethernet1', 'value': {'alias': 'Eth1/2'}}, + {'op': 'add', 'path': '/PORT/Ethernet1', 'value': {'lanes': '66'}}, + {'op': 'add', 'path': '/PORT/Ethernet1', 'value': {'description': ''}}, + {'op': 'add', 'path': '/PORT/Ethernet1', 'value': {'speed': '10000'}}, + {'op': 'add', 'path': '/PORT/Ethernet2', 'value': {'alias': 'Eth1/3'}}, + {'op': 'add', 'path': '/PORT/Ethernet2', 'value': {'lanes': '67'}}, + {'op': 'add', 'path': '/PORT/Ethernet2', 'value': {'description': ''}}, + {'op': 'add', 'path': '/PORT/Ethernet2', 'value': {'speed': '10000'}}, + {'op': 'add', 'path': '/PORT/Ethernet3', 'value': {'alias': 'Eth1/4'}}, + {'op': 'add', 'path': '/PORT/Ethernet3', 'value': {'lanes': '68'}}, + {'op': 'add', 'path': '/PORT/Ethernet3', 'value': {'description': ''}}, + {'op': 'add', 'path': '/PORT/Ethernet3', 'value': {'speed': '10000'}}, + {'op': 'add', 'path': '/ACL_TABLE/NO-NSW-PACL-V4/ports/1', 'value': 'Ethernet1'}, + {'op': 'add', 'path': '/ACL_TABLE/NO-NSW-PACL-V4/ports/1', 'value': 'Ethernet2'}, + {'op': 'add', 'path': '/ACL_TABLE/NO-NSW-PACL-V4/ports/1', 'value': 'Ethernet3'}, + {'op': 'add', 'path': '/VLAN_MEMBER/Vlan100|Ethernet1', 'value': {'tagging_mode': 'untagged'}}, + {'op': 'add', 'path': '/VLAN_MEMBER/Vlan100|Ethernet2', 'value': {'tagging_mode': 'untagged'}}, + {'op': 'add', 'path': '/VLAN_MEMBER/Vlan100|Ethernet3', 'value': {'tagging_mode': 'untagged'}}], + moves) + + def test_generate__dpb_4_to_1_example(self): + # Arrange + diff = ps.Diff(Files.DPB_4_SPLITs_FULL_CONFIG, Files.DPB_1_SPLIT_FULL_CONFIG) + + # Act + moves = list(self.generator.generate(diff)) + + # Assert + self.verify_moves([{'op': 'replace', 'path': '/PORT/Ethernet0/alias', 'value': 'Eth1'}, + {'op': 'replace', 'path': '/PORT/Ethernet0/lanes', 'value': '65, 66, 67, 68'}, + {'op': 'replace', 'path': '/PORT/Ethernet0/description', 'value': 'Ethernet0 100G link'}, + {'op': 'replace', 'path': '/PORT/Ethernet0/speed', 'value': '100000'}, + {'op': 'remove', 'path': '/PORT/Ethernet1/alias'}, + {'op': 'remove', 'path': '/PORT/Ethernet1/lanes'}, + {'op': 'remove', 'path': '/PORT/Ethernet1/description'}, + {'op': 'remove', 'path': '/PORT/Ethernet1/speed'}, + {'op': 'remove', 'path': '/PORT/Ethernet2/alias'}, + {'op': 'remove', 'path': '/PORT/Ethernet2/lanes'}, + {'op': 'remove', 'path': '/PORT/Ethernet2/description'}, + {'op': 'remove', 'path': '/PORT/Ethernet2/speed'}, + {'op': 'remove', 'path': '/PORT/Ethernet3/alias'}, + {'op': 'remove', 'path': '/PORT/Ethernet3/lanes'}, + {'op': 'remove', 'path': '/PORT/Ethernet3/description'}, + {'op': 'remove', 'path': '/PORT/Ethernet3/speed'}, + {'op': 'remove', 'path': '/ACL_TABLE/NO-NSW-PACL-V4/ports/1'}, + {'op': 'remove', 'path': '/ACL_TABLE/NO-NSW-PACL-V4/ports/2'}, + {'op': 'remove', 'path': '/ACL_TABLE/NO-NSW-PACL-V4/ports/3'}, + {'op': 'remove', 'path': '/VLAN_MEMBER/Vlan100|Ethernet1/tagging_mode'}, + {'op': 'remove', 'path': '/VLAN_MEMBER/Vlan100|Ethernet2/tagging_mode'}, + {'op': 'remove', 'path': '/VLAN_MEMBER/Vlan100|Ethernet3/tagging_mode'}], + moves) + + def verify(self, tc_ops=None, cc_ops=None, ex_ops=None): + """ + Generate a diff where target config is modified using the given tc_ops. + The expected low level moves should ex_ops if it is not None, otherwise tc_ops + """ + # Arrange + diff = self.get_diff(target_config_ops=tc_ops, current_config_ops=cc_ops) + expected = ex_ops if ex_ops is not None else \ + tc_ops if tc_ops is not None else \ + [] + + # Act + actual = self.generator.generate(diff) + + # Assert + self.verify_moves(expected, actual) + + def verify_moves(self, ops, moves): + moves_ops = [list(move.patch)[0] for move in moves] + self.assertCountEqual(ops, moves_ops) + + def get_diff(self, target_config_ops = None, current_config_ops = None): + current_config = Files.CROPPED_CONFIG_DB_AS_JSON + if current_config_ops: + cc_patch = jsonpatch.JsonPatch(current_config_ops) + current_config = cc_patch.apply(current_config) + + target_config = Files.CROPPED_CONFIG_DB_AS_JSON + if target_config_ops: + tc_patch = jsonpatch.JsonPatch(target_config_ops) + target_config = tc_patch.apply(target_config) + + return ps.Diff(current_config, target_config) + +class TestUpperLevelMoveExtender(unittest.TestCase): + def setUp(self): + self.extender = ps.UpperLevelMoveExtender() + self.any_diff = ps.Diff(Files.ANY_CONFIG_DB, Files.ANY_CONFIG_DB) + + def test_extend__root_level_move__no_extended_moves(self): + self.verify(OperationType.REMOVE, []) + self.verify(OperationType.ADD, [], []) + self.verify(OperationType.REPLACE, [], []) + + def test_extend__remove_key_upper_level_does_not_exist__remove_upper_level(self): + self.verify(OperationType.REMOVE, + ["ACL_TABLE", "EVERFLOW", "policy_desc"], + tc_ops=[{'op':'remove', 'path':'/ACL_TABLE/EVERFLOW'}], + ex_ops=[{'op':'remove', 'path':'/ACL_TABLE/EVERFLOW'}]) + + def test_extend__remove_key_upper_level_does_exist__replace_upper_level(self): + self.verify(OperationType.REMOVE, + ["ACL_TABLE", "EVERFLOW", "policy_desc"], + tc_ops=[{'op':'remove', 'path':'/ACL_TABLE/EVERFLOW/policy_desc'}], + ex_ops=[{'op':'replace', 'path':'/ACL_TABLE/EVERFLOW', 'value':{ + "ports": [ + "Ethernet8" + ], + "stage": "ingress", + "type": "MIRROR" + }}]) + + def test_extend__remove_list_item_upper_level_does_not_exist__remove_upper_level(self): + self.verify(OperationType.REMOVE, + ["VLAN", "Vlan1000", "dhcp_servers", 1], + tc_ops=[{'op':'remove', 'path':'/VLAN/Vlan1000/dhcp_servers'}], + ex_ops=[{'op':'remove', 'path':'/VLAN/Vlan1000/dhcp_servers'}]) + + def test_extend__remove_list_item_upper_level_does_exist__replace_upper_level(self): + self.verify(OperationType.REMOVE, + ["VLAN", "Vlan1000", "dhcp_servers", 1], + tc_ops=[{'op':'remove', 'path':'/VLAN/Vlan1000/dhcp_servers/1'}], + ex_ops=[{'op':'replace', 'path':'/VLAN/Vlan1000/dhcp_servers', 'value':[ + "192.0.0.1", + "192.0.0.3", + "192.0.0.4" + ]}]) + + def test_extend__add_key_upper_level_missing__add_upper_level(self): + self.verify(OperationType.ADD, + ["ACL_TABLE", "EVERFLOW", "policy_desc"], + ["ACL_TABLE", "EVERFLOW", "policy_desc"], + cc_ops=[{'op':'remove', 'path':'/ACL_TABLE/EVERFLOW'}], + ex_ops=[{'op':'add', 'path':'/ACL_TABLE/EVERFLOW', 'value':{ + "policy_desc": "EVERFLOW", + "ports": [ + "Ethernet8" + ], + "stage": "ingress", + "type": "MIRROR" + }}]) + + def test_extend__add_key_upper_level_exist__replace_upper_level(self): + self.verify(OperationType.ADD, + ["ACL_TABLE", "EVERFLOW", "policy_desc"], + ["ACL_TABLE", "EVERFLOW", "policy_desc"], + cc_ops=[{'op':'remove', 'path':'/ACL_TABLE/EVERFLOW/policy_desc'}], + ex_ops=[{'op':'replace', 'path':'/ACL_TABLE/EVERFLOW', 'value':{ + "policy_desc": "EVERFLOW", + "ports": [ + "Ethernet8" + ], + "stage": "ingress", + "type": "MIRROR" + }}]) + + def test_extend__add_list_item_upper_level_missing__add_upper_level(self): + self.verify(OperationType.ADD, + ["VLAN", "Vlan1000", "dhcp_servers", 1], + ["VLAN", "Vlan1000", "dhcp_servers", 1], + cc_ops=[{'op':'remove', 'path':'/VLAN/Vlan1000/dhcp_servers'}], + ex_ops=[{'op':'add', 'path':'/VLAN/Vlan1000/dhcp_servers', 'value':[ + "192.0.0.1", + "192.0.0.2", + "192.0.0.3", + "192.0.0.4" + ]}]) + + def test_extend__add_list_item_upper_level_exist__replace_upper_level(self): + self.verify(OperationType.ADD, + ["VLAN", "Vlan1000", "dhcp_servers", 1], + ["VLAN", "Vlan1000", "dhcp_servers", 1], + cc_ops=[{'op':'remove', 'path':'/VLAN/Vlan1000/dhcp_servers/1'}], + ex_ops=[{'op':'replace', 'path':'/VLAN/Vlan1000/dhcp_servers', 'value':[ + "192.0.0.1", + "192.0.0.2", + "192.0.0.3", + "192.0.0.4" + ]}]) + + def test_extend__add_table__replace_whole_config(self): + self.verify(OperationType.ADD, + ["ACL_TABLE"], + ["ACL_TABLE"], + cc_ops=[{'op':'remove', 'path':'/ACL_TABLE'}], + ex_ops=[{'op':'replace', 'path':'', 'value':Files.CROPPED_CONFIG_DB_AS_JSON}]) + + def test_extend__replace_key__replace_upper_level(self): + self.verify(OperationType.REPLACE, + ["ACL_TABLE", "EVERFLOW", "policy_desc"], + ["ACL_TABLE", "EVERFLOW", "policy_desc"], + cc_ops=[{'op':'replace', 'path':'/ACL_TABLE/EVERFLOW/policy_desc', 'value':'old_desc'}], + ex_ops=[{'op':'replace', 'path':'/ACL_TABLE/EVERFLOW', 'value':{ + "policy_desc": "EVERFLOW", + "ports": [ + "Ethernet8" + ], + "stage": "ingress", + "type": "MIRROR" + }}]) + + def test_extend__replace_list_item__replace_upper_level(self): + self.verify(OperationType.REPLACE, + ["VLAN", "Vlan1000", "dhcp_servers", 1], + ["VLAN", "Vlan1000", "dhcp_servers", 1], + cc_ops=[{'op':'replace', 'path':'/VLAN/Vlan1000/dhcp_servers/1', 'value':'192.0.0.7'}], + ex_ops=[{'op':'replace', 'path':'/VLAN/Vlan1000/dhcp_servers', 'value':[ + "192.0.0.1", + "192.0.0.2", + "192.0.0.3", + "192.0.0.4" + ]}]) + + def test_extend__replace_table__replace_whole_config(self): + self.verify(OperationType.REPLACE, + ["VLAN"], + ["VLAN"], + cc_ops=[{'op':'replace', 'path':'/VLAN/Vlan1000/dhcp_servers/1', 'value':'192.0.0.7'}], + ex_ops=[{'op':'replace', 'path':'', 'value':Files.CROPPED_CONFIG_DB_AS_JSON}]) + + def verify(self, op_type, ctokens, ttokens=None, cc_ops=[], tc_ops=[], ex_ops=[]): + """ + cc_ops, tc_ops are used to build the diff object. + diff, op_type, ctokens, ttokens are used to build the move. + move is extended and the result should match ex_ops. + """ + # Arrange + current_config=jsonpatch.JsonPatch(cc_ops).apply(Files.CROPPED_CONFIG_DB_AS_JSON) + target_config=jsonpatch.JsonPatch(tc_ops).apply(Files.CROPPED_CONFIG_DB_AS_JSON) + diff = ps.Diff(current_config, target_config) + move = ps.JsonMove(diff, op_type, ctokens, ttokens) + + # Act + moves = self.extender.extend(move, diff) + + # Assert + self.verify_moves(ex_ops, moves) + + def verify_moves(self, ex_ops, moves): + moves_ops = [list(move.patch)[0] for move in moves] + self.assertCountEqual(ex_ops, moves_ops) + +class TestDeleteInsteadOfReplaceMoveExtender(unittest.TestCase): + def setUp(self): + self.extender = ps.DeleteInsteadOfReplaceMoveExtender() + + def test_extend__non_replace__no_extended_moves(self): + self.verify(OperationType.REMOVE, + ["ACL_TABLE"], + tc_ops=[{'op':'remove', 'path':'/ACL_TABLE'}], + ex_ops=[]) + self.verify(OperationType.ADD, + ["ACL_TABLE"], + ["ACL_TABLE"], + cc_ops=[{'op':'remove', 'path':'/ACL_TABLE'}], + ex_ops=[]) + + def test_extend__replace_key__delete_key(self): + self.verify(OperationType.REPLACE, + ["ACL_TABLE", "EVERFLOW", "policy_desc"], + ["ACL_TABLE", "EVERFLOW", "policy_desc"], + cc_ops=[{'op':'replace', 'path':'/ACL_TABLE/EVERFLOW/policy_desc', 'value':'old_desc'}], + ex_ops=[{'op':'remove', 'path':'/ACL_TABLE/EVERFLOW/policy_desc'}]) + + def test_extend__replace_list_item__delete_list_item(self): + self.verify(OperationType.REPLACE, + ["VLAN", "Vlan1000", "dhcp_servers", 1], + ["VLAN", "Vlan1000", "dhcp_servers", 1], + cc_ops=[{'op':'replace', 'path':'/VLAN/Vlan1000/dhcp_servers/1', 'value':'192.0.0.7'}], + ex_ops=[{'op':'remove', 'path':'/VLAN/Vlan1000/dhcp_servers/1'}]) + + def test_extend__replace_table__delete_table(self): + self.verify(OperationType.REPLACE, + ["ACL_TABLE"], + ["ACL_TABLE"], + cc_ops=[{'op':'replace', 'path':'/ACL_TABLE/EVERFLOW/policy_desc', 'value':'old_desc'}], + ex_ops=[{'op':'remove', 'path':'/ACL_TABLE'}]) + + def test_extend__replace_whole_config__delete_whole_config(self): + self.verify(OperationType.REPLACE, + [], + [], + cc_ops=[{'op':'replace', 'path':'/ACL_TABLE/EVERFLOW/policy_desc', 'value':'old_desc'}], + ex_ops=[{'op':'remove', 'path':''}]) + + def verify(self, op_type, ctokens, ttokens=None, cc_ops=[], tc_ops=[], ex_ops=[]): + """ + cc_ops, tc_ops are used to build the diff object. + diff, op_type, ctokens, ttokens are used to build the move. + move is extended and the result should match ex_ops. + """ + # Arrange + current_config=jsonpatch.JsonPatch(cc_ops).apply(Files.CROPPED_CONFIG_DB_AS_JSON) + target_config=jsonpatch.JsonPatch(tc_ops).apply(Files.CROPPED_CONFIG_DB_AS_JSON) + diff = ps.Diff(current_config, target_config) + move = ps.JsonMove(diff, op_type, ctokens, ttokens) + + # Act + moves = self.extender.extend(move, diff) + + # Assert + self.verify_moves(ex_ops, moves) + + def verify_moves(self, ex_ops, moves): + moves_ops = [list(move.patch)[0] for move in moves] + self.assertCountEqual(ex_ops, moves_ops) + +class DeleteRefsMoveExtender(unittest.TestCase): + def setUp(self): + self.extender = ps.DeleteRefsMoveExtender(PathAddressing()) + + def test_extend__non_delete_ops__no_extended_moves(self): + self.verify(OperationType.ADD, + ["ACL_TABLE"], + ["ACL_TABLE"], + cc_ops=[{'op':'remove', 'path':'/ACL_TABLE'}], + ex_ops=[]) + self.verify(OperationType.REPLACE, + ["ACL_TABLE"], + ["ACL_TABLE"], + cc_ops=[{'op':'remove', 'path':'/ACL_TABLE/EVERFLOW'}], + ex_ops=[]) + + def test_extend__path_with_no_refs__no_extended_moves(self): + self.verify(OperationType.REMOVE, + ["ACL_TABLE"], + tc_ops=[{'op':'remove', 'path':'/ACL_TABLE'}], + ex_ops=[]) + + def test_extend__path_with_direct_refs__extended_moves(self): + self.verify(OperationType.REMOVE, + ["PORT", "Ethernet0"], + tc_ops=[{'op':'remove', 'path':'/PORT/Ethernet0'}], + ex_ops=[{'op': 'remove', 'path': '/VLAN_MEMBER/Vlan1000|Ethernet0'}, + {'op': 'remove', 'path': '/ACL_TABLE/NO-NSW-PACL-V4/ports/0'}]) + + def test_extend__path_with_refs_to_children__extended_moves(self): + self.verify(OperationType.REMOVE, + ["PORT"], + tc_ops=[{'op':'remove', 'path':'/PORT/Ethernet0'}], + ex_ops=[{'op': 'remove', 'path': '/VLAN_MEMBER/Vlan1000|Ethernet0'}, + {'op': 'remove', 'path': '/ACL_TABLE/NO-NSW-PACL-V4/ports/0'}, + {'op': 'remove', 'path': '/VLAN_MEMBER/Vlan1000|Ethernet4'}, + {'op': 'remove', 'path': '/ACL_TABLE/DATAACL/ports/0'}, + {'op': 'remove', 'path': '/VLAN_MEMBER/Vlan1000|Ethernet8'}, + {'op': 'remove', 'path': '/ACL_TABLE/EVERFLOWV6/ports/0'}, + {'op': 'remove', 'path': '/ACL_TABLE/EVERFLOW/ports/0'}, + {'op': 'remove', 'path': '/ACL_TABLE/EVERFLOWV6/ports/1'}]) + + def verify(self, op_type, ctokens, ttokens=None, cc_ops=[], tc_ops=[], ex_ops=[]): + """ + cc_ops, tc_ops are used to build the diff object. + diff, op_type, ctokens, ttokens are used to build the move. + move is extended and the result should match ex_ops. + """ + # Arrange + current_config=jsonpatch.JsonPatch(cc_ops).apply(Files.CROPPED_CONFIG_DB_AS_JSON) + target_config=jsonpatch.JsonPatch(tc_ops).apply(Files.CROPPED_CONFIG_DB_AS_JSON) + diff = ps.Diff(current_config, target_config) + move = ps.JsonMove(diff, op_type, ctokens, ttokens) + + # Act + moves = self.extender.extend(move, diff) + + # Assert + self.verify_moves(ex_ops, moves) + + def verify_moves(self, ex_ops, moves): + moves_ops = [list(move.patch)[0] for move in moves] + self.assertCountEqual(ex_ops, moves_ops) + +class TestSortAlgorithmFactory(unittest.TestCase): + def test_dfs_sorter(self): + self.verify(ps.Algorithm.DFS, ps.DfsSorter) + + def test_bfs_sorter(self): + self.verify(ps.Algorithm.BFS, ps.BfsSorter) + + def test_memoization_sorter(self): + self.verify(ps.Algorithm.MEMOIZATION, ps.MemoizationSorter) + + def verify(self, algo, algo_class): + # Arrange + factory = ps.SortAlgorithmFactory(OperationWrapper(), ConfigWrapper(), PathAddressing()) + expected_generators = [ps.LowLevelMoveGenerator] + expected_extenders = [ps.UpperLevelMoveExtender, ps.DeleteInsteadOfReplaceMoveExtender, ps.DeleteRefsMoveExtender] + expected_validator = [ps.DeleteWholeConfigMoveValidator, + ps.FullConfigMoveValidator, + ps.NoDependencyMoveValidator, + ps.UniqueLanesMoveValidator, + ps.CreateOnlyMoveValidator] + + # Act + sorter = factory.create(algo) + actual_generators = [type(item) for item in sorter.move_wrapper.move_generators] + actual_extenders = [type(item) for item in sorter.move_wrapper.move_extenders] + actual_validators = [type(item) for item in sorter.move_wrapper.move_validators] + + # Assert + self.assertIsInstance(sorter, algo_class) + self.assertCountEqual(expected_generators, actual_generators) + self.assertCountEqual(expected_extenders, actual_extenders) + self.assertCountEqual(expected_validator, actual_validators) + +class TestPatchSorter(unittest.TestCase): + def create_patch_sorter(self, config=None): + if config is None: + config=Files.CROPPED_CONFIG_DB_AS_JSON + config_wrapper = ConfigWrapper() + config_wrapper.get_config_db_as_json = MagicMock(return_value=config) + patch_wrapper = PatchWrapper(config_wrapper) + operation_wrapper = OperationWrapper() + path_addressing= ps.PathAddressing() + sort_algorithm_factory = ps.SortAlgorithmFactory(operation_wrapper, config_wrapper, path_addressing) + + return ps.PatchSorter(config_wrapper, patch_wrapper, sort_algorithm_factory) + + def test_sort__empty_patch__returns_empty_changes_list(self): + # Arrange + patch = jsonpatch.JsonPatch([]) + expected = [] + + # Act + actual = self.create_patch_sorter().sort(patch) + + # Assert + self.assertCountEqual(expected, actual) + + def test_sort__patch_with_single_simple_operation__returns_one_change(self): + # Arrange + patch = jsonpatch.JsonPatch([{"op":"remove", "path":"/VLAN/Vlan1000/dhcp_servers/0"}]) + expected = [JsonChange(patch)] + + # Act + actual = self.create_patch_sorter().sort(patch) + + # Assert + self.assertCountEqual(expected, actual) + + def test_sort__replacing_create_only_field__success(self): + # Arrange + patch = jsonpatch.JsonPatch([{"op":"replace", "path": "/PORT/Ethernet0/lanes", "value":"67"}]) + + # Act + actual = self.create_patch_sorter(Files.DPB_1_SPLIT_FULL_CONFIG).sort(patch) + + # Assert + self.assertNotEqual(None, actual) + + def test_sort__inter_dependency_within_same_table__success(self): + # Arrange + patch = jsonpatch.JsonPatch([{"op":"add", "path":"/VLAN_INTERFACE", "value": { + "Vlan1000|fc02:1000::1/64": {}, + "Vlan1000|192.168.0.1/21": {}, + "Vlan1000": {} + }}]) + expected = [ + JsonChange(jsonpatch.JsonPatch([{"op": "add", "path": "/VLAN_INTERFACE", "value": {"Vlan1000": {}}}])), + JsonChange(jsonpatch.JsonPatch([{"op": "add", "path": "/VLAN_INTERFACE/Vlan1000|fc02:1000::1~164", "value": {}}])), + JsonChange(jsonpatch.JsonPatch([{"op": "add", "path": "/VLAN_INTERFACE/Vlan1000|192.168.0.1~121", "value": {}}])) + ] + + # Act + actual = self.create_patch_sorter().sort(patch) + + # Assert + self.assertListEqual(expected, actual) + + def test_sort__add_table__success(self): + self.verify(cc_ops=[{"op":"remove", "path":"/ACL_TABLE"}]) + + def test_sort__remove_table__success(self): + self.verify(tc_ops=[{"op":"remove", "path":"/ACL_TABLE"}]) + + def test_sort__modify_value_in_existing_table__success(self): + self.verify(tc_ops=[{"op":"replace", "path":"/ACL_TABLE/EVERFLOW/stage", "value":"egress"}]) + + def test_sort__modify_value_in_existing_array__success(self): + self.verify(tc_ops=[{"op":"replace", "path":"/ACL_TABLE/EVERFLOWV6/ports/0", "value":"Ethernet0"}]) + + def test_sort__add_value_to_existing_array__success(self): + self.verify(tc_ops=[{"op":"add", "path":"/ACL_TABLE/EVERFLOWV6/ports/0", "value":"Ethernet0"}]) + + def test_sort__add_new_key_to_existing_table__success(self): + self.verify(cc_ops=[{"op":"remove", "path":"/ACL_TABLE/EVERFLOWV6"}]) + + def test_sort__remove_2_items_with_dependency_from_different_tables__success(self): + self.verify(tc_ops=[{"op":"remove", "path":"/PORT/Ethernet0"}, + {"op":"remove", "path":"/VLAN_MEMBER/Vlan1000|Ethernet0"}, + {"op":"remove", "path":"/ACL_TABLE/NO-NSW-PACL-V4"}], # removing ACL from current and target + cc_ops=[{"op":"remove", "path":"/ACL_TABLE/NO-NSW-PACL-V4"}]) + + def test_sort__add_2_items_with_dependency_from_different_tables__success(self): + self.verify(tc_ops=[{"op":"remove", "path":"/ACL_TABLE/NO-NSW-PACL-V4"}], # removing ACL from current and target + cc_ops=[{"op":"remove", "path":"/PORT/Ethernet0"}, + {"op":"remove", "path":"/VLAN_MEMBER/Vlan1000|Ethernet0"}, + {"op":"remove", "path":"/ACL_TABLE/NO-NSW-PACL-V4"}]) + + def test_sort__remove_2_items_with_dependency_from_same_table__success(self): + self.verify(tc_ops=[{"op":"replace", "path":"", "value":Files.CONFIG_DB_WITH_INTERFACE}, + {"op":"remove", "path":"/INTERFACE/Ethernet8"}, + {"op":"remove", "path":"/INTERFACE/Ethernet8|10.0.0.1~130"}], + cc_ops=[{"op":"replace", "path":"", "value":Files.CONFIG_DB_WITH_INTERFACE}]) + + def test_sort__add_2_items_with_dependency_from_same_table__success(self): + self.verify(tc_ops=[{"op":"replace", "path":"", "value":Files.CONFIG_DB_WITH_INTERFACE}], + cc_ops=[{"op":"replace", "path":"", "value":Files.CONFIG_DB_WITH_INTERFACE}, + {"op":"remove", "path":"/INTERFACE/Ethernet8"}, + {"op":"remove", "path":"/INTERFACE/Ethernet8|10.0.0.1~130"}]) + + def test_sort__replace_mandatory_item__success(self): + self.verify(tc_ops=[{"op":"replace", "path":"/ACL_TABLE/EVERFLOWV6/type", "value":"L2"}]) + + def test_sort__dpb_1_to_4__success(self): + self.verify(tc_ops=[{"op":"replace", "path":"", "value":Files.DPB_4_SPLITS_FULL_CONFIG}], + cc_ops=[{"op":"replace", "path":"", "value":Files.DPB_1_SPLIT_FULL_CONFIG}]) + + def test_sort__dpb_4_to_1__success(self): + self.verify(tc_ops=[{"op":"replace", "path":"", "value":Files.DPB_1_SPLIT_FULL_CONFIG}], + cc_ops=[{"op":"replace", "path":"", "value":Files.DPB_4_SPLITS_FULL_CONFIG}]) + + def test_sort__remove_an_item_with_default_value__success(self): + self.verify(tc_ops=[{"op":"remove", "path":"/ACL_TABLE/EVERFLOW/stage"}]) + + def test_sort__modify_items_with_dependencies_using_must__success(self): + self.verify(tc_ops=[{"op":"replace", "path":"", "value":Files.CONFIG_DB_WITH_CRM}, + {"op":"replace", "path":"/CRM/Config/acl_counter_high_threshold", "value":"60"}, + {"op":"replace", "path":"/CRM/Config/acl_counter_low_threshold", "value":"50"}], + cc_ops=[{"op":"replace", "path":"", "value":Files.CONFIG_DB_WITH_CRM}]) + + # in the following example, it is possible to start with acl_counter_high_threshold + self.verify(tc_ops=[{"op":"replace", "path":"", "value":Files.CONFIG_DB_WITH_CRM}, + {"op":"replace", "path":"/CRM/Config/acl_counter_high_threshold", "value":"80"}, + {"op":"replace", "path":"/CRM/Config/acl_counter_low_threshold", "value":"60"}], + cc_ops=[{"op":"replace", "path":"", "value":Files.CONFIG_DB_WITH_CRM}]) + + def verify(self, cc_ops=[], tc_ops=[]): + # Arrange + config_wrapper=ConfigWrapper() + target_config=jsonpatch.JsonPatch(tc_ops).apply(Files.CROPPED_CONFIG_DB_AS_JSON) + current_config=jsonpatch.JsonPatch(cc_ops).apply(Files.CROPPED_CONFIG_DB_AS_JSON) + patch=jsonpatch.make_patch(current_config, target_config) + + # Act + actual = self.create_patch_sorter(current_config).sort(patch) + + # Assert + simulated_config = current_config + for move in actual: + simulated_config = move.apply(simulated_config) + self.assertTrue(config_wrapper.validate_config_db_config(simulated_config)) + self.assertEqual(target_config, simulated_config)