From c5c105f4ae9ec5779758215c73cc3e7a7d8c6b42 Mon Sep 17 00:00:00 2001 From: Nazarii Hnydyn Date: Wed, 30 Mar 2022 15:21:00 +0300 Subject: [PATCH] [PBH] Implement Edit Flows (#2093) PBH Edit Flows is a second phase of PBH feature implementation. It allows user to modify the already existing objects. PBH Edit Flows offer a full entity update which assumes Config DB field ADD/UPDATE/REMOVE processing. HLD: Azure/SONiC#909 - What I did Implemented Edit Flows in scope of PBH enhancement - How I did it Implementation is done according to the PBH HLD Signed-off-by: Nazarii Hnydyn --- clear/main.py | 10 +- config/plugins/pbh.py | 1134 ++++++++++++++++++------- show/plugins/pbh.py | 58 +- tests/mock_tables/dbconnector.py | 38 +- tests/pbh_input/assert_show_output.py | 17 + tests/pbh_input/state_db.json | 26 + tests/pbh_test.py | 260 +++++- utilities_common/db.py | 4 +- 8 files changed, 1181 insertions(+), 366 deletions(-) create mode 100644 tests/pbh_input/state_db.json diff --git a/clear/main.py b/clear/main.py index 3ba0a1d735..1ad42ad786 100755 --- a/clear/main.py +++ b/clear/main.py @@ -4,10 +4,10 @@ import sys import click import utilities_common.cli as clicommon -import json from utilities_common import util_base from show.plugins.pbh import read_pbh_counters +from config.plugins.pbh import serialize_pbh_counters from . import plugins @@ -473,14 +473,8 @@ def statistics(db): pbh_rules = db.cfgdb.get_table("PBH_RULE") pbh_counters = read_pbh_counters(pbh_rules) - try: - with open('/tmp/.pbh_counters.txt', 'w') as fp: - json.dump(remap_keys(pbh_counters), fp) - except IOError as err: - pass + serialize_pbh_counters(pbh_counters) -def remap_keys(dict): - return [{'key': k, 'value': v} for k, v in dict.items()] # ("sonic-clear flowcnt-trap") @cli.command() diff --git a/config/plugins/pbh.py b/config/plugins/pbh.py index e5e5f0fdde..b6726aa154 100644 --- a/config/plugins/pbh.py +++ b/config/plugins/pbh.py @@ -7,73 +7,154 @@ """ import click +import json import ipaddress import re import utilities_common.cli as clicommon -hash_field_types = [ - 'INNER_IP_PROTOCOL', - 'INNER_L4_DST_PORT', - 'INNER_L4_SRC_PORT', - 'INNER_DST_IPV4', - 'INNER_SRC_IPV4', - 'INNER_DST_IPV6', - 'INNER_SRC_IPV6' +from show.plugins.pbh import deserialize_pbh_counters + +GRE_KEY_RE = r"^(0x){1}[a-fA-F0-9]{1,8}/(0x){1}[a-fA-F0-9]{1,8}$" + +ETHER_TYPE_RE = r"^(0x){1}[a-fA-F0-9]{1,4}$" +L4_DST_PORT_RE = ETHER_TYPE_RE +INNER_ETHER_TYPE_RE = ETHER_TYPE_RE + +IP_PROTOCOL_RE = r"^(0x){1}[a-fA-F0-9]{1,2}$" +IPV6_NEXT_HEADER_RE = IP_PROTOCOL_RE + +HASH_FIELD_VALUE_LIST = [ + "INNER_IP_PROTOCOL", + "INNER_L4_DST_PORT", + "INNER_L4_SRC_PORT", + "INNER_DST_IPV4", + "INNER_SRC_IPV4", + "INNER_DST_IPV6", + "INNER_SRC_IPV6" ] -packet_action_types = ['SET_ECMP_HASH', 'SET_LAG_HASH'] -flow_counter_state = ['DISABLED', 'ENABLED'] -gre_key_re = r"^(0x){1}[a-fA-F0-9]{1,8}/(0x){1}[a-fA-F0-9]{1,8}$" -ip_protocol_re = r"^(0x){1}[a-fA-F0-9]{1,2}$" -ipv6_next_header_re = ip_protocol_re -l4_dst_port_re = r"^(0x){1}[a-fA-F0-9]{1,4}$" -inner_ether_type_re = l4_dst_port_re -ether_type_re = l4_dst_port_re +PACKET_ACTION_VALUE_LIST = [ + "SET_ECMP_HASH", + "SET_LAG_HASH" +] -pbh_hash_field_tbl_name = 'PBH_HASH_FIELD' -pbh_hash_tbl_name = 'PBH_HASH' -pbh_table_tbl_name = 'PBH_TABLE' +FLOW_COUNTER_VALUE_LIST = [ + "DISABLED", + "ENABLED" +] +PBH_TABLE_CDB = "PBH_TABLE" +PBH_RULE_CDB = "PBH_RULE" +PBH_HASH_CDB = "PBH_HASH" +PBH_HASH_FIELD_CDB = "PBH_HASH_FIELD" -def exit_with_error(*args, **kwargs): - """ Print a message and abort CLI. """ +PBH_TABLE_INTERFACE_LIST = "interface_list" +PBH_TABLE_DESCRIPTION = "description" - click.secho(*args, **kwargs) - raise click.Abort() +PBH_RULE_PRIORITY = "priority" +PBH_RULE_GRE_KEY = "gre_key" +PBH_RULE_ETHER_TYPE = "ether_type" +PBH_RULE_IP_PROTOCOL = "ip_protocol" +PBH_RULE_IPV6_NEXT_HEADER = "ipv6_next_header" +PBH_RULE_L4_DST_PORT = "l4_dst_port" +PBH_RULE_INNER_ETHER_TYPE = "inner_ether_type" +PBH_RULE_HASH = "hash" +PBH_RULE_PACKET_ACTION = "packet_action" +PBH_RULE_FLOW_COUNTER = "flow_counter" + +PBH_HASH_HASH_FIELD_LIST = "hash_field_list" + +PBH_HASH_FIELD_HASH_FIELD = "hash_field" +PBH_HASH_FIELD_IP_MASK = "ip_mask" +PBH_HASH_FIELD_SEQUENCE_ID = "sequence_id" + +PBH_CAPABILITIES_SDB = "PBH_CAPABILITIES" + +PBH_TABLE_CAPABILITIES_KEY = "table" +PBH_RULE_CAPABILITIES_KEY = "rule" +PBH_HASH_CAPABILITIES_KEY = "hash" +PBH_HASH_FIELD_CAPABILITIES_KEY = "hash-field" + +PBH_ADD = "ADD" +PBH_UPDATE = "UPDATE" +PBH_REMOVE = "REMOVE" + +PBH_COUNTERS_LOCATION = "/tmp/.pbh_counters.txt" +# +# DB interface -------------------------------------------------------------------------------------------------------- +# def add_entry(db, table, key, data): """ Add new entry in table """ cfg = db.get_config() cfg.setdefault(table, {}) + if key in cfg[table]: - raise Exception("{} already exists".format(key)) + raise click.ClickException("{}{}{} already exists in Config DB".format( + table, db.TABLE_NAME_SEPARATOR, db.serialize_key(key) + ) + ) cfg[table][key] = data db.set_entry(table, key, data) -def update_entry(db, table, key, data, create_if_not_exists=False): +def update_entry(db, cap, table, key, data): """ Update entry in table and validate configuration. - If attribute value in data is None, the attribute is deleted. + If field value in data is None, the field is deleted """ + field_root = "{}{}{}".format(table, db.TABLE_NAME_SEPARATOR, db.serialize_key(key)) + cfg = db.get_config() cfg.setdefault(table, {}) - if create_if_not_exists: - cfg[table].setdefault(key, {}) - if key not in cfg[table]: - raise Exception("{} does not exist".format(key)) - - for attr, value in data.items(): - if value is None and attr in cfg[table][key]: - cfg[table][key].pop(attr) - else: - cfg[table][key][attr] = value + raise click.ClickException("{} doesn't exist in Config DB".format(field_root)) + + for field, value in data.items(): + if field not in cap: + raise click.ClickException( + "{}{}{} doesn't have a configuration capabilities".format( + field_root, db.KEY_SEPARATOR, field + ) + ) + if value is None: # HDEL + if field in cfg[table][key]: + if PBH_REMOVE in cap[field]: + cfg[table][key].pop(field) + else: + raise click.ClickException( + "Failed to remove {}{}{}: operation is prohibited".format( + field_root, db.KEY_SEPARATOR, field + ) + ) + else: + raise click.ClickException( + "Failed to remove {}{}{}: field doesn't exist".format( + field_root, db.KEY_SEPARATOR, field + ) + ) + else: # HSET + if field in cfg[table][key]: + if PBH_UPDATE not in cap[field]: + raise click.ClickException( + "Failed to update {}{}{}: operation is prohibited".format( + field_root, db.KEY_SEPARATOR, field + ) + ) + else: + if PBH_ADD not in cap[field]: + raise click.ClickException( + "Failed to add {}{}{}: operation is prohibited".format( + field_root, db.KEY_SEPARATOR, field + ) + ) + + cfg[table][key][field] = value db.set_entry(table, key, cfg[table][key]) @@ -83,41 +164,124 @@ def del_entry(db, table, key): cfg = db.get_config() cfg.setdefault(table, {}) + if key not in cfg[table]: - raise Exception("{} does not exist".format(key)) + raise click.ClickException("{}{}{} doesn't exist in Config DB".format( + table, db.TABLE_NAME_SEPARATOR, db.serialize_key(key) + ) + ) cfg[table].pop(key) db.set_entry(table, key, None) -def ip_address_validator(ctx, param, value): - """ Check if the given ip address is valid +def is_exist_in_db(db, table, key): + """ Check if provided hash already exists in Config DB Args: - ctx: click context, - param: click parameter context, - value: value of parameter + db: reference to Config DB + table: table to search in Config DB + key: key to search in Config DB Returns: - str: ip address + bool: The return value. True for success, False otherwise """ - if value is not None: - try: - ip = ipaddress.ip_address(value) - except Exception as e: - exit_with_error("Error: invalid value '{}' for '{}' option\n{}".format(value, param.name, e), fg="red") + if (not table) or (not key): + return False - return str(ip) + if not db.get_entry(table, key): + return False + return True -def re_match(value, param_name, regexp): - """ Regexp validation of given parameter +# +# PBH validators ------------------------------------------------------------------------------------------------------ +# + +def table_name_validator(ctx, db, table_name, is_exist=True): + if is_exist: + if not is_exist_in_db(db, str(PBH_TABLE_CDB), str(table_name)): + raise click.UsageError( + "Invalid value for \"TABLE_NAME\": {} is not a valid PBH table".format(table_name), ctx + ) + else: + if is_exist_in_db(db, str(PBH_TABLE_CDB), str(table_name)): + raise click.UsageError( + "Invalid value for \"TABLE_NAME\": {} is a valid PBH table".format(table_name), ctx + ) + + +def rule_name_validator(ctx, db, table_name, rule_name, is_exist=True): + if is_exist: + if not is_exist_in_db(db, str(PBH_RULE_CDB), (str(table_name), str(rule_name))): + raise click.UsageError( + "Invalid value for \"RULE_NAME\": {} is not a valid PBH rule".format(rule_name), ctx + ) + else: + if is_exist_in_db(db, str(PBH_RULE_CDB), (str(table_name), str(rule_name))): + raise click.UsageError( + "Invalid value for \"RULE_NAME\": {} is a valid PBH rule".format(rule_name), ctx + ) + + +def hash_name_validator(ctx, db, hash_name, is_exist=True): + if is_exist: + if not is_exist_in_db(db, str(PBH_HASH_CDB), str(hash_name)): + raise click.UsageError( + "Invalid value for \"HASH_NAME\": {} is not a valid PBH hash".format(hash_name), ctx + ) + else: + if is_exist_in_db(db, str(PBH_HASH_CDB), str(hash_name)): + raise click.UsageError( + "Invalid value for \"HASH_NAME\": {} is a valid PBH hash".format(hash_name), ctx + ) + + +def hash_field_name_validator(ctx, db, hash_field_name, is_exist=True): + if is_exist: + if not is_exist_in_db(db, str(PBH_HASH_FIELD_CDB), str(hash_field_name)): + raise click.UsageError( + "Invalid value for \"HASH_FIELD_NAME\": {} is not a valid PBH hash field".format(hash_field_name), ctx + ) + else: + if is_exist_in_db(db, str(PBH_HASH_FIELD_CDB), str(hash_field_name)): + raise click.UsageError( + "Invalid value for \"HASH_FIELD_NAME\": {} is a valid PBH hash field".format(hash_field_name), ctx + ) + + +def interface_list_validator(ctx, db, interface_list): + for intf in interface_list.split(','): + if not (clicommon.is_valid_port(db, str(intf)) or clicommon.is_valid_portchannel(db, str(intf))): + raise click.UsageError( + "Invalid value for \"--interface-list\": {} is not a valid interface".format(intf), ctx + ) + + +def hash_field_list_validator(ctx, db, hash_field_list): + for hfield in hash_field_list.split(','): + if not is_exist_in_db(db, str(PBH_HASH_FIELD_CDB), str(hfield)): + raise click.UsageError( + "Invalid value for \"--hash-field-list\": {} is not a valid PBH hash field".format(hfield), ctx + ) + + +def hash_validator(ctx, db, hash): + if not is_exist_in_db(db, str(PBH_HASH_CDB), str(hash)): + raise click.UsageError( + "Invalid value for \"--hash\": {} is not a valid PBH hash".format(hash), ctx + ) + + +def re_match(ctx, param, value, regexp): + """ Regexp validation of given PBH rule parameter Args: - value: value to validate, - param_name: parameter name, + ctx: click context + param: click parameter context + value: value to validate regexp: regular expression Return: @@ -125,17 +289,19 @@ def re_match(value, param_name, regexp): """ if re.match(regexp, str(value)) is None: - exit_with_error("Error: invalid value '{}' for '{}' option".format(str(value), param_name), fg="red") + raise click.UsageError( + "Invalid value for {}: {} is ill-formed".format(param.get_error_hint(ctx), value), ctx + ) return value -def pbh_re_match_validator(ctx, param, value): +def match_validator(ctx, param, value): """ Check if PBH rule options are valid Args: - ctx: click context, - param: click parameter context, + ctx: click context + param: click parameter context value: value of parameter Returns: @@ -143,133 +309,242 @@ def pbh_re_match_validator(ctx, param, value): """ if value is not None: - if param.name == 'gre_key': - return re_match(value, param.name, gre_key_re) - elif param.name == 'ip_protocol': - return re_match(value, param.name, ip_protocol_re) - elif param.name == 'ipv6_next_header': - return re_match(value, param.name, ipv6_next_header_re) - elif param.name == 'l4_dst_port': - return re_match(value, param.name, l4_dst_port_re) - elif param.name == 'inner_ether_type': - return re_match(value, param.name, inner_ether_type_re) - elif param.name == 'ether_type': - return re_match(value, param.name, ether_type_re) - - -def is_exist_in_db(db, obj_list, conf_db_key): - """ Check if provided CLI option already exist in Config DB, - i.g in case of --hash-field-list option it will check - if 'hash-field' was previously added by - 'config pbh hash-field ...' CLI command + if param.name == PBH_RULE_GRE_KEY: + return re_match(ctx, param, value, GRE_KEY_RE) + elif param.name == PBH_RULE_ETHER_TYPE: + return re_match(ctx, param, value, ETHER_TYPE_RE) + elif param.name == PBH_RULE_IP_PROTOCOL: + return re_match(ctx, param, value, IP_PROTOCOL_RE) + elif param.name == PBH_RULE_IPV6_NEXT_HEADER: + return re_match(ctx, param, value, IPV6_NEXT_HEADER_RE) + elif param.name == PBH_RULE_L4_DST_PORT: + return re_match(ctx, param, value, L4_DST_PORT_RE) + elif param.name == PBH_RULE_INNER_ETHER_TYPE: + return re_match(ctx, param, value, INNER_ETHER_TYPE_RE) + + +def ip_mask_validator(ctx, param, value): + """ Check if PBH hash field IP mask option is valid Args: - db: reference to Config DB, - obj_list: value of 'click' option - conf_db_key: key to search in Config DB - """ - - if obj_list is None: - return True - - table = db.cfgdb.get_table(conf_db_key) - correct_list = list(table.keys()) + ctx: click context + param: click parameter context + value: value of parameter - splited_list = obj_list.split(',') + Returns: + str: validated parameter + """ - for elem in splited_list: - if elem not in correct_list: - return False + if value is not None: + try: + ip = ipaddress.ip_address(value) + except Exception as err: + raise click.UsageError("Invalid value for {}: {}".format(param.get_error_hint(ctx), err), ctx) - return True + return str(ip) -def ip_mask_hash_field_correspondence_validator(ip_mask, hash_field): - """ Check if the --ip-mask option are correspond to - the --hash-field option +def hash_field_to_ip_mask_correspondence_validator(ctx, hash_field, ip_mask): + """ Function to validate whether --hash-field value + corresponds to the --ip-mask value Args: - ip_mask: ip address or None, - hash_field: hash field value, which was configured before + ctx: click context + hash_field: native hash field value + ip_mask: ip address or None """ - hf_v4 = ['INNER_DST_IPV4', 'INNER_SRC_IPV4'] - hf_v6 = ['INNER_DST_IPV6', 'INNER_SRC_IPV6'] - hf_v4_and_v6 = hf_v4 + hf_v6 - hf_no_ip = ['INNER_IP_PROTOCOL', 'INNER_L4_DST_PORT', 'INNER_L4_SRC_PORT'] + hf_no_ip = ["INNER_IP_PROTOCOL", "INNER_L4_DST_PORT", "INNER_L4_SRC_PORT"] - if (hash_field in hf_no_ip) and (ip_mask): - exit_with_error("Error: the value of '--hash-field'='{}' is NOT compatible with the value of '--ip-mask'='{}'".format(hash_field, ip_mask), fg='red') + if ip_mask is None: + if hash_field not in hf_no_ip: + raise click.UsageError( + "Invalid value for \"--hash-field\": invalid choice: {}. (choose from {} when no \"--ip-mask\" is provided)".format( + hash_field, ", ".join(hf_no_ip) + ), ctx + ) + return + + hf_v4 = ["INNER_DST_IPV4", "INNER_SRC_IPV4"] + hf_v6 = ["INNER_DST_IPV6", "INNER_SRC_IPV6"] - if (hash_field in hf_v4_and_v6) and (ip_mask is None): - exit_with_error("Error: the value of '--hash-field'='{}' is NOT compatible with the value of '--ip-mask'='{}'".format(hash_field, ip_mask), fg='red') + if not ((hash_field in hf_v4) or (hash_field in hf_v6)): + raise click.UsageError( + "Invalid value for \"--hash-field\": invalid choice: {}. (choose from {} when \"--ip-mask\" is provided)".format( + hash_field, ", ".join(hf_v4 + hf_v6) + ), ctx + ) - if (ip_mask is not None): - ip_addr_version = ipaddress.ip_address(ip_mask).version + ip_ver = ipaddress.ip_address(ip_mask).version - if (hash_field in hf_v4) and (ip_addr_version != 4): - exit_with_error("Error: the value of '--hash-field'='{}' is NOT compatible with the value of '--ip-mask'='{}'".format(hash_field, ip_mask), fg='red') + if (hash_field in hf_v4) and (ip_ver != 4): + raise click.UsageError( + "Invalid value for \"--ip-mask\": {} is not compatible with {}".format( + ip_mask, hash_field + ), ctx + ) - if (hash_field in hf_v6) and (ip_addr_version != 6): - exit_with_error("Error: the value of '--hash-field'='{}' is NOT compatible with the value of '--ip-mask'='{}'".format(hash_field, ip_mask), fg='red') + if (hash_field in hf_v6) and (ip_ver != 6): + raise click.UsageError( + "Invalid value for \"--ip-mask\": {} is not compatible with {}".format( + ip_mask, hash_field + ), ctx + ) -def ip_mask_hash_field_update_validator(db, hash_field_name, ip_mask, hash_field): - """ Function to validate --ip-mask and --hash-field - correspondence, during update flow +def hash_field_to_ip_mask_validator(ctx, db, hash_field_name, hash_field, ip_mask, is_update=True): + """ Function to validate --hash-field and --ip-mask + correspondence, during add/update flow Args: - db: reference to CONFIG DB, - hash_field_name: name of the hash-field, - ip_mask: ip address, + ctx: click context + db: reference to Config DB + hash_field_name: name of the hash-field hash_field: native hash field value + ip_mask: ip address + is_update: update flow flag """ - if (ip_mask is None) and (hash_field is None): + if not is_update: + hash_field_to_ip_mask_correspondence_validator(ctx, hash_field, ip_mask) + return + + if (hash_field is None) and (ip_mask is None): return - table = db.cfgdb.get_table(pbh_hash_field_tbl_name) - hash_field_obj = table[hash_field_name] + if (hash_field is not None) and (ip_mask is not None): + hash_field_to_ip_mask_correspondence_validator(ctx, hash_field, ip_mask) + return - if (ip_mask is None) and (hash_field is not None): + hf_obj = db.get_entry(str(PBH_HASH_FIELD_CDB), str(hash_field_name)) + if not hf_obj: + raise click.ClickException( + "Failed to validate \"--hash-field\" and \"--ip-mask\" correspondence: {} is not a valid PBH hash field".format( + hash_field_name + ) + ) + + if hash_field is None: + if PBH_HASH_FIELD_HASH_FIELD not in hf_obj: + raise click.ClickException( + "Failed to validate \"--hash-field\" and \"--ip-mask\" correspondence: {} is not a valid PBH field".format( + PBH_HASH_FIELD_HASH_FIELD + ) + ) + hash_field_to_ip_mask_correspondence_validator(ctx, hf_obj[PBH_HASH_FIELD_HASH_FIELD], ip_mask) + else: + if PBH_HASH_FIELD_IP_MASK in hf_obj: + hash_field_to_ip_mask_correspondence_validator(ctx, hash_field, hf_obj[PBH_HASH_FIELD_IP_MASK]) + else: + hash_field_to_ip_mask_correspondence_validator(ctx, hash_field, ip_mask) + +# +# PBH helpers --------------------------------------------------------------------------------------------------------- +# + +def serialize_pbh_counters(obj): + """ Helper that performs PBH counters serialization. + + in = { + ('pbh_table1', 'pbh_rule1'): {'SAI_ACL_COUNTER_ATTR_BYTES': '0', 'SAI_ACL_COUNTER_ATTR_PACKETS': '0'}, + ... + ('pbh_tableN', 'pbh_ruleN'): {'SAI_ACL_COUNTER_ATTR_BYTES': '0', 'SAI_ACL_COUNTER_ATTR_PACKETS': '0'} + } + + out = [ + { + "key": ["pbh_table1", "pbh_rule1"], + "value": {"SAI_ACL_COUNTER_ATTR_BYTES": "0", "SAI_ACL_COUNTER_ATTR_PACKETS": "0"} + }, + ... + { + "key": ["pbh_tableN", "pbh_ruleN"], + "value": {"SAI_ACL_COUNTER_ATTR_BYTES": "0", "SAI_ACL_COUNTER_ATTR_PACKETS": "0"} + } + ] - try: - ip_mask = hash_field_obj['ip_mask'] - except Exception as e: - ip_mask = None + Args: + obj: counters dict. + """ + + def remap_keys(obj): + return [{'key': k, 'value': v} for k, v in obj.items()] - ip_mask_hash_field_correspondence_validator(ip_mask, hash_field) + try: + with open(PBH_COUNTERS_LOCATION, 'w') as f: + json.dump(remap_keys(obj), f) + except IOError as err: + pass - if (ip_mask is not None) and (hash_field is None): - hash_field = hash_field_obj['hash_field'] +def update_pbh_counters(table_name, rule_name): + """ Helper that performs PBH counters update """ + pbh_counters = deserialize_pbh_counters() + key_to_del = table_name, rule_name - ip_mask_hash_field_correspondence_validator(ip_mask, hash_field) + if key_to_del in pbh_counters: + del pbh_counters[key_to_del] + serialize_pbh_counters(pbh_counters) -def interfaces_list_validator(db, interface_list, is_update): - if is_update and (interface_list is None): - return +def pbh_capabilities_query(db, key): + """ Query PBH capabilities """ - is_error = False - interfaces_splited = interface_list.split(',') - - for intf in interfaces_splited: - if intf.startswith('Ethernet'): - if not clicommon.is_valid_port(db.cfgdb, intf): - is_error = True - break - elif intf.startswith('PortChannel'): - if not clicommon.is_valid_portchannel(db.cfgdb, intf): - is_error = True - break - else: - is_error = True - break + sdb_id = db.STATE_DB + sdb_sep = db.get_db_separator(sdb_id) + + cap_map = db.get_all(sdb_id, "{}{}{}".format(str(PBH_CAPABILITIES_SDB), sdb_sep, str(key))) + if not cap_map: + return None + + return cap_map + + +def pbh_match_count(db, table, key, data): + """ Count PBH rule match fields """ - if is_error: - exit_with_error("Error: invalid value '{}', for '--interface-list' option".format(interface_list), fg="red") + field_map = db.get_entry(table, key) + match_total = 0 + match_count = 0 + + if PBH_RULE_GRE_KEY in field_map: + if PBH_RULE_GRE_KEY in data: + match_count += 1 + match_total += 1 + if PBH_RULE_ETHER_TYPE in field_map: + if PBH_RULE_ETHER_TYPE in data: + match_count += 1 + match_total += 1 + if PBH_RULE_IP_PROTOCOL in field_map: + if PBH_RULE_IP_PROTOCOL in data: + match_count += 1 + match_total += 1 + if PBH_RULE_IPV6_NEXT_HEADER in field_map: + if PBH_RULE_IPV6_NEXT_HEADER in data: + match_count += 1 + match_total += 1 + if PBH_RULE_L4_DST_PORT in field_map: + if PBH_RULE_L4_DST_PORT in data: + match_count += 1 + match_total += 1 + if PBH_RULE_INNER_ETHER_TYPE in field_map: + if PBH_RULE_INNER_ETHER_TYPE in data: + match_count += 1 + match_total += 1 + + return match_total, match_count + + +def exit_with_error(*args, **kwargs): + """ Print a message and abort CLI """ + + click.secho(*args, **kwargs) + raise click.Abort() + +# +# PBH CLI ------------------------------------------------------------------------------------------------------------- +# @click.group( name='pbh', @@ -280,6 +555,9 @@ def PBH(): pass +# +# PBH hash field ------------------------------------------------------------------------------------------------------ +# @PBH.group( name="hash-field", @@ -295,43 +573,50 @@ def PBH_HASH_FIELD(): @click.argument( "hash-field-name", nargs=1, - required=True, + required=True ) @click.option( "--hash-field", help="Configures native hash field for this hash field", required=True, - type=click.Choice(hash_field_types) + type=click.Choice(HASH_FIELD_VALUE_LIST) ) @click.option( "--ip-mask", - help="""Configures IPv4/IPv6 address mask for this hash field, required when the value of --hash-field is - INNER_DST_IPV4 or INNER_SRC_IPV4 or INNER_SRC_IPV6 or INNER_SRC_IPV6""", - callback=ip_address_validator, + help="""Configures IPv4/IPv6 address mask for this hash field, required when the value of --hash-field is - INNER_DST_IPV4 or INNER_SRC_IPV4 or INNER_DST_IPV6 or INNER_SRC_IPV6""", + callback=ip_mask_validator ) @click.option( "--sequence-id", help="Configures in which order the fields are hashed and defines which fields should be associative", required=True, - type=click.INT, + type=click.INT ) @clicommon.pass_db def PBH_HASH_FIELD_add(db, hash_field_name, hash_field, ip_mask, sequence_id): """ Add object to PBH_HASH_FIELD table """ - ip_mask_hash_field_correspondence_validator(ip_mask, hash_field) + ctx = click.get_current_context() + + hash_field_name_validator(ctx, db.cfgdb_pipe, hash_field_name, False) + hash_field_to_ip_mask_validator(ctx, db.cfgdb_pipe, hash_field_name, hash_field, ip_mask, False) - table = pbh_hash_field_tbl_name - key = hash_field_name + table = str(PBH_HASH_FIELD_CDB) + key = str(hash_field_name) data = {} + if hash_field is not None: - data["hash_field"] = hash_field + data[PBH_HASH_FIELD_HASH_FIELD] = hash_field if ip_mask is not None: - data["ip_mask"] = ip_mask + data[PBH_HASH_FIELD_IP_MASK] = ip_mask if sequence_id is not None: - data["sequence_id"] = sequence_id + data[PBH_HASH_FIELD_SEQUENCE_ID] = sequence_id + + if not data: + exit_with_error("Error: Failed to add PBH hash field: options are not provided", fg="red") try: - add_entry(db.cfgdb, table, key, data) + add_entry(db.cfgdb_pipe, table, key, data) except Exception as err: exit_with_error("Error: {}".format(err), fg="red") @@ -340,41 +625,52 @@ def PBH_HASH_FIELD_add(db, hash_field_name, hash_field, ip_mask, sequence_id): @click.argument( "hash-field-name", nargs=1, - required=True, + required=True ) @click.option( "--hash-field", help="Configures native hash field for this hash field", - type=click.Choice(hash_field_types) + type=click.Choice(HASH_FIELD_VALUE_LIST) ) @click.option( "--ip-mask", - help="""Configures IPv4/IPv6 address mask for this hash field, required when the value of --hash-field is - INNER_DST_IPV4 or INNER_SRC_IPV4 or INNER_SRC_IPV6 or INNER_SRC_IPV6 """, - callback=ip_address_validator, + help="""Configures IPv4/IPv6 address mask for this hash field, required when the value of --hash-field is - INNER_DST_IPV4 or INNER_SRC_IPV4 or INNER_DST_IPV6 or INNER_SRC_IPV6 """, + callback=ip_mask_validator ) @click.option( "--sequence-id", help="Configures in which order the fields are hashed and defines which fields should be associative", - type=click.INT, + type=click.INT ) @clicommon.pass_db def PBH_HASH_FIELD_update(db, hash_field_name, hash_field, ip_mask, sequence_id): """ Update object in PBH_HASH_FIELD table """ - ip_mask_hash_field_update_validator(db, hash_field_name, ip_mask, hash_field) + ctx = click.get_current_context() - table = pbh_hash_field_tbl_name - key = hash_field_name + hash_field_name_validator(ctx, db.cfgdb_pipe, hash_field_name) + hash_field_to_ip_mask_validator(ctx, db.cfgdb_pipe, hash_field_name, hash_field, ip_mask) + + table = str(PBH_HASH_FIELD_CDB) + key = str(hash_field_name) data = {} + if hash_field is not None: - data["hash_field"] = hash_field + data[PBH_HASH_FIELD_HASH_FIELD] = hash_field if ip_mask is not None: - data["ip_mask"] = ip_mask + data[PBH_HASH_FIELD_IP_MASK] = ip_mask if sequence_id is not None: - data["sequence_id"] = sequence_id + data[PBH_HASH_FIELD_SEQUENCE_ID] = sequence_id + + if not data: + exit_with_error("Error: Failed to update PBH hash field: options are not provided", fg="red") + + cap = pbh_capabilities_query(db.db, PBH_HASH_FIELD_CAPABILITIES_KEY) + if cap is None: + exit_with_error("Error: Failed to query PBH hash field capabilities: configuration is not available", fg="red") try: - update_entry(db.cfgdb, table, key, data) + update_entry(db.cfgdb_pipe, cap, table, key, data) except Exception as err: exit_with_error("Error: {}".format(err), fg="red") @@ -383,19 +679,27 @@ def PBH_HASH_FIELD_update(db, hash_field_name, hash_field, ip_mask, sequence_id) @click.argument( "hash-field-name", nargs=1, - required=True, + required=True ) @clicommon.pass_db def PBH_HASH_FIELD_delete(db, hash_field_name): """ Delete object from PBH_HASH_FIELD table """ - table = pbh_hash_field_tbl_name - key = hash_field_name + ctx = click.get_current_context() + + hash_field_name_validator(ctx, db.cfgdb_pipe, hash_field_name) + + table = str(PBH_HASH_FIELD_CDB) + key = str(hash_field_name) + try: - del_entry(db.cfgdb, table, key) + del_entry(db.cfgdb_pipe, table, key) except Exception as err: exit_with_error("Error: {}".format(err), fg="red") +# +# PBH hash ------------------------------------------------------------------------------------------------------------ +# @PBH.group( name="hash", @@ -411,28 +715,34 @@ def PBH_HASH(): @click.argument( "hash-name", nargs=1, - required=True, + required=True ) @click.option( "--hash-field-list", help="The list of hash fields to apply with this hash", - required=True, + required=True ) @clicommon.pass_db def PBH_HASH_add(db, hash_name, hash_field_list): """ Add object to PBH_HASH table """ - if not is_exist_in_db(db, hash_field_list, pbh_hash_field_tbl_name): - exit_with_error("Error: invalid value '{}' for '--hash-field-list' option".format(hash_field_list), fg="red") + ctx = click.get_current_context() + + hash_name_validator(ctx, db.cfgdb_pipe, hash_name, False) - table = pbh_hash_tbl_name - key = hash_name + table = str(PBH_HASH_CDB) + key = str(hash_name) data = {} + if hash_field_list is not None: - data["hash_field_list"] = hash_field_list.split(",") + hash_field_list_validator(ctx, db.cfgdb_pipe, hash_field_list) + data[PBH_HASH_HASH_FIELD_LIST] = hash_field_list.split(",") + + if not data: + exit_with_error("Error: Failed to add PBH hash: options are not provided", fg="red") try: - add_entry(db.cfgdb, table, key, data) + add_entry(db.cfgdb_pipe, table, key, data) except Exception as err: exit_with_error("Error: {}".format(err), fg="red") @@ -441,27 +751,37 @@ def PBH_HASH_add(db, hash_name, hash_field_list): @click.argument( "hash-name", nargs=1, - required=True, + required=True ) @click.option( "--hash-field-list", - help="The list of hash fields to apply with this hash", + help="The list of hash fields to apply with this hash" ) @clicommon.pass_db def PBH_HASH_update(db, hash_name, hash_field_list): """ Update object in PBH_HASH table """ - if not is_exist_in_db(db, hash_field_list, pbh_hash_field_tbl_name): - exit_with_error("Error: invalid value '{}' for '--hash-field-list' option".format(hash_field_list), fg="red") + ctx = click.get_current_context() + + hash_name_validator(ctx, db.cfgdb_pipe, hash_name) - table = pbh_hash_tbl_name - key = hash_name + table = str(PBH_HASH_CDB) + key = str(hash_name) data = {} + if hash_field_list is not None: - data["hash_field_list"] = hash_field_list.split(",") + hash_field_list_validator(ctx, db.cfgdb_pipe, hash_field_list) + data[PBH_HASH_HASH_FIELD_LIST] = hash_field_list.split(",") + + if not data: + exit_with_error("Error: Failed to update PBH hash: options are not provided", fg="red") + + cap = pbh_capabilities_query(db.db, PBH_HASH_CAPABILITIES_KEY) + if cap is None: + exit_with_error("Error: Failed to query PBH hash capabilities: configuration is not available", fg="red") try: - update_entry(db.cfgdb, table, key, data) + update_entry(db.cfgdb_pipe, cap, table, key, data) except Exception as err: exit_with_error("Error: {}".format(err), fg="red") @@ -470,19 +790,27 @@ def PBH_HASH_update(db, hash_name, hash_field_list): @click.argument( "hash-name", nargs=1, - required=True, + required=True ) @clicommon.pass_db def PBH_HASH_delete(db, hash_name): """ Delete object from PBH_HASH table """ - table = pbh_hash_tbl_name - key = hash_name + ctx = click.get_current_context() + + hash_name_validator(ctx, db.cfgdb_pipe, hash_name) + + table = str(PBH_HASH_CDB) + key = str(hash_name) + try: - del_entry(db.cfgdb, table, key) + del_entry(db.cfgdb_pipe, table, key) except Exception as err: exit_with_error("Error: {}".format(err), fg="red") +# +# PBH rule ------------------------------------------------------------------------------------------------------------ +# @PBH.group( name="rule", @@ -498,63 +826,63 @@ def PBH_RULE(): @click.argument( "table-name", nargs=1, - required=True, + required=True ) @click.argument( "rule-name", nargs=1, - required=True, + required=True ) @click.option( "--priority", help="Configures priority for this rule", required=True, - type=click.INT, + type=click.INT ) @click.option( "--gre-key", - help="Configures packet match: GRE key (value/mask)", - callback=pbh_re_match_validator, + help="Configures packet match for this rule: GRE key (value/mask)", + callback=match_validator ) @click.option( "--ether-type", help="Configures packet match for this rule: EtherType (IANA Ethertypes)", - callback=pbh_re_match_validator, + callback=match_validator ) @click.option( "--ip-protocol", help="Configures packet match for this rule: IP protocol (IANA Protocol Numbers)", - callback=pbh_re_match_validator, + callback=match_validator ) @click.option( "--ipv6-next-header", help="Configures packet match for this rule: IPv6 Next header (IANA Protocol Numbers)", - callback=pbh_re_match_validator, + callback=match_validator ) @click.option( "--l4-dst-port", help="Configures packet match for this rule: L4 destination port", - callback=pbh_re_match_validator, + callback=match_validator ) @click.option( "--inner-ether-type", help="Configures packet match for this rule: inner EtherType (IANA Ethertypes)", - callback=pbh_re_match_validator, + callback=match_validator ) @click.option( "--hash", - required=True, help="The hash to apply with this rule", + required=True ) @click.option( "--packet-action", help="Configures packet action for this rule", - type=click.Choice(packet_action_types) + type=click.Choice(PACKET_ACTION_VALUE_LIST) ) @click.option( "--flow-counter", - help="Enables/Disables packet/byte counter", - type=click.Choice(flow_counter_state) + help="Enables/Disables packet/byte counter for this rule", + type=click.Choice(FLOW_COUNTER_VALUE_LIST) ) @clicommon.pass_db def PBH_RULE_add( @@ -574,103 +902,139 @@ def PBH_RULE_add( ): """ Add object to PBH_RULE table """ - if not is_exist_in_db(db, table_name, pbh_table_tbl_name): - exit_with_error("Error: invalid value '{}' for 'table-name' argument".format(table_name), fg="red") - if not is_exist_in_db(db, hash, pbh_hash_tbl_name): - exit_with_error("Error: invalid value '{}' for '--hash' option".format(hash), fg="red") + ctx = click.get_current_context() + + table_name_validator(ctx, db.cfgdb_pipe, table_name) + rule_name_validator(ctx, db.cfgdb_pipe, table_name, rule_name, False) - table = "PBH_RULE" - key = table_name, rule_name + table = str(PBH_RULE_CDB) + key = (str(table_name), str(rule_name)) data = {} + + match_count = 0 + if priority is not None: - data["priority"] = priority + data[PBH_RULE_PRIORITY] = priority if gre_key is not None: - data["gre_key"] = gre_key + data[PBH_RULE_GRE_KEY] = gre_key + match_count += 1 if ether_type is not None: - data["ether_type"] = ether_type + data[PBH_RULE_ETHER_TYPE] = ether_type + match_count += 1 if ip_protocol is not None: - data["ip_protocol"] = ip_protocol + data[PBH_RULE_IP_PROTOCOL] = ip_protocol + match_count += 1 if ipv6_next_header is not None: - data["ipv6_next_header"] = ipv6_next_header + data[PBH_RULE_IPV6_NEXT_HEADER] = ipv6_next_header + match_count += 1 if l4_dst_port is not None: - data["l4_dst_port"] = l4_dst_port + data[PBH_RULE_L4_DST_PORT] = l4_dst_port + match_count += 1 if inner_ether_type is not None: - data["inner_ether_type"] = inner_ether_type + data[PBH_RULE_INNER_ETHER_TYPE] = inner_ether_type + match_count += 1 if hash is not None: - data["hash"] = hash + hash_validator(ctx, db.cfgdb_pipe, hash) + data[PBH_RULE_HASH] = hash if packet_action is not None: - data["packet_action"] = packet_action + data[PBH_RULE_PACKET_ACTION] = packet_action if flow_counter is not None: - data["flow_counter"] = flow_counter + data[PBH_RULE_FLOW_COUNTER] = flow_counter + + if not data: + exit_with_error("Error: Failed to add PBH rule: options are not provided", fg="red") + + if match_count == 0: + exit_with_error("Error: Failed to add PBH rule: match options are not provided", fg="red") try: - add_entry(db.cfgdb, table, key, data) + add_entry(db.cfgdb_pipe, table, key, data) except Exception as err: exit_with_error("Error: {}".format(err), fg="red") -@PBH_RULE.command(name="update") +@PBH_RULE.group( + name="update", + cls=clicommon.AliasedGroup +) +def PBH_RULE_update(): + """ Update object in PBH_RULE table """ + + pass + + +@PBH_RULE_update.group( + name="field", + cls=clicommon.AliasedGroup +) +def PBH_RULE_update_field(): + """ Update object field in PBH_RULE table """ + + pass + + +@PBH_RULE_update_field.command(name="set") @click.argument( "table-name", nargs=1, - required=True, + required=True ) @click.argument( "rule-name", nargs=1, - required=True, + required=True ) @click.option( "--priority", help="Configures priority for this rule", - type=click.INT, + type=click.INT ) @click.option( "--gre-key", - help="Configures packet match: GRE key (value/mask)", - callback=pbh_re_match_validator, + help="Configures packet match for this rule: GRE key (value/mask)", + callback=match_validator ) @click.option( "--ether-type", help="Configures packet match for this rule: EtherType (IANA Ethertypes)", - callback=pbh_re_match_validator, + callback=match_validator ) @click.option( "--ip-protocol", help="Configures packet match for this rule: IP protocol (IANA Protocol Numbers)", - callback=pbh_re_match_validator, + callback=match_validator ) @click.option( "--ipv6-next-header", help="Configures packet match for this rule: IPv6 Next header (IANA Protocol Numbers)", - callback=pbh_re_match_validator, + callback=match_validator ) @click.option( "--l4-dst-port", help="Configures packet match for this rule: L4 destination port", - callback=pbh_re_match_validator, + callback=match_validator ) @click.option( "--inner-ether-type", help="Configures packet match for this rule: inner EtherType (IANA Ethertypes)", - callback=pbh_re_match_validator, + callback=match_validator ) @click.option( "--hash", - help="The hash to apply with this rule", + help="The hash to apply with this rule" ) @click.option( "--packet-action", help="Configures packet action for this rule", - type=click.Choice(packet_action_types) + type=click.Choice(PACKET_ACTION_VALUE_LIST) ) @click.option( "--flow-counter", - help="Enables/Disables packet/byte counter", - type=click.Choice(flow_counter_state) + help="Enables/Disables packet/byte counter for this rule", + type=click.Choice(FLOW_COUNTER_VALUE_LIST) ) @clicommon.pass_db -def PBH_RULE_update( +def PBH_RULE_update_field_set( db, table_name, rule_name, @@ -685,39 +1049,178 @@ def PBH_RULE_update( packet_action, flow_counter ): - """ Update object in PBH_RULE table """ + """ Set object field in PBH_RULE table """ - if not is_exist_in_db(db, table_name, pbh_table_tbl_name): - exit_with_error("Error: invalid value '{}' for 'table-name' argument".format(table_name), fg="red") - if not is_exist_in_db(db, hash, pbh_hash_tbl_name): - exit_with_error("Error: invalid value '{}' for '--hash' option".format(hash), fg="red") + ctx = click.get_current_context() - table = "PBH_RULE" - key = table_name, rule_name + table_name_validator(ctx, db.cfgdb_pipe, table_name) + rule_name_validator(ctx, db.cfgdb_pipe, table_name, rule_name) + + table = str(PBH_RULE_CDB) + key = (str(table_name), str(rule_name)) data = {} + if priority is not None: - data["priority"] = priority + data[PBH_RULE_PRIORITY] = priority if gre_key is not None: - data["gre_key"] = gre_key + data[PBH_RULE_GRE_KEY] = gre_key if ether_type is not None: - data["ether_type"] = ether_type + data[PBH_RULE_ETHER_TYPE] = ether_type if ip_protocol is not None: - data["ip_protocol"] = ip_protocol + data[PBH_RULE_IP_PROTOCOL] = ip_protocol if ipv6_next_header is not None: - data["ipv6_next_header"] = ipv6_next_header + data[PBH_RULE_IPV6_NEXT_HEADER] = ipv6_next_header if l4_dst_port is not None: - data["l4_dst_port"] = l4_dst_port + data[PBH_RULE_L4_DST_PORT] = l4_dst_port if inner_ether_type is not None: - data["inner_ether_type"] = inner_ether_type + data[PBH_RULE_INNER_ETHER_TYPE] = inner_ether_type if hash is not None: - data["hash"] = hash + hash_validator(ctx, db.cfgdb_pipe, hash) + data[PBH_RULE_HASH] = hash if packet_action is not None: - data["packet_action"] = packet_action + data[PBH_RULE_PACKET_ACTION] = packet_action if flow_counter is not None: - data["flow_counter"] = flow_counter + data[PBH_RULE_FLOW_COUNTER] = flow_counter + + if not data: + exit_with_error("Error: Failed to update PBH rule: options are not provided", fg="red") + + cap = pbh_capabilities_query(db.db, PBH_RULE_CAPABILITIES_KEY) + if cap is None: + exit_with_error("Error: Failed to query PBH rule capabilities: configuration is not available", fg="red") try: - update_entry(db.cfgdb, table, key, data) + update_entry(db.cfgdb_pipe, cap, table, key, data) + if data.get(PBH_RULE_FLOW_COUNTER, "") == "DISABLED": + update_pbh_counters(table_name, rule_name) + except Exception as err: + exit_with_error("Error: {}".format(err), fg="red") + + +@PBH_RULE_update_field.command(name="del") +@click.argument( + "table-name", + nargs=1, + required=True +) +@click.argument( + "rule-name", + nargs=1, + required=True +) +@click.option( + "--priority", + help="Deletes priority for this rule", + is_flag=True +) +@click.option( + "--gre-key", + help="Deletes packet match for this rule: GRE key (value/mask)", + is_flag=True +) +@click.option( + "--ether-type", + help="Deletes packet match for this rule: EtherType (IANA Ethertypes)", + is_flag=True +) +@click.option( + "--ip-protocol", + help="Deletes packet match for this rule: IP protocol (IANA Protocol Numbers)", + is_flag=True +) +@click.option( + "--ipv6-next-header", + help="Deletes packet match for this rule: IPv6 Next header (IANA Protocol Numbers)", + is_flag=True +) +@click.option( + "--l4-dst-port", + help="Deletes packet match for this rule: L4 destination port", + is_flag=True +) +@click.option( + "--inner-ether-type", + help="Deletes packet match for this rule: inner EtherType (IANA Ethertypes)", + is_flag=True +) +@click.option( + "--hash", + help="Deletes hash for this rule", + is_flag=True +) +@click.option( + "--packet-action", + help="Deletes packet action for this rule", + is_flag=True +) +@click.option( + "--flow-counter", + help="Deletes packet/byte counter for this rule", + is_flag=True +) +@clicommon.pass_db +def PBH_RULE_update_field_del( + db, + table_name, + rule_name, + priority, + gre_key, + ether_type, + ip_protocol, + ipv6_next_header, + l4_dst_port, + inner_ether_type, + hash, + packet_action, + flow_counter +): + """ Delete object field from PBH_RULE table """ + + ctx = click.get_current_context() + + table_name_validator(ctx, db.cfgdb_pipe, table_name) + rule_name_validator(ctx, db.cfgdb_pipe, table_name, rule_name) + + table = str(PBH_RULE_CDB) + key = (str(table_name), str(rule_name)) + data = {} + + if priority: + data[PBH_RULE_PRIORITY] = None + if gre_key: + data[PBH_RULE_GRE_KEY] = None + if ether_type: + data[PBH_RULE_ETHER_TYPE] = None + if ip_protocol: + data[PBH_RULE_IP_PROTOCOL] = None + if ipv6_next_header: + data[PBH_RULE_IPV6_NEXT_HEADER] = None + if l4_dst_port: + data[PBH_RULE_L4_DST_PORT] = None + if inner_ether_type: + data[PBH_RULE_INNER_ETHER_TYPE] = None + if hash: + data[PBH_RULE_HASH] = None + if packet_action: + data[PBH_RULE_PACKET_ACTION] = None + if flow_counter: + data[PBH_RULE_FLOW_COUNTER] = None + + if not data: + exit_with_error("Error: Failed to update PBH rule: options are not provided", fg="red") + + match_total, match_count = pbh_match_count(db.cfgdb_pipe, table, key, data) + if match_count >= match_total: + exit_with_error("Error: Failed to update PBH rule: match options are required", fg="red") + + cap = pbh_capabilities_query(db.db, PBH_RULE_CAPABILITIES_KEY) + if cap is None: + exit_with_error("Error: Failed to query PBH rule capabilities: configuration is not available", fg="red") + + try: + update_entry(db.cfgdb_pipe, cap, table, key, data) + if flow_counter: + update_pbh_counters(table_name, rule_name) except Exception as err: exit_with_error("Error: {}".format(err), fg="red") @@ -726,24 +1229,34 @@ def PBH_RULE_update( @click.argument( "table-name", nargs=1, - required=True, + required=True ) @click.argument( "rule-name", nargs=1, - required=True, + required=True ) @clicommon.pass_db def PBH_RULE_delete(db, table_name, rule_name): """ Delete object from PBH_RULE table """ - table = "PBH_RULE" - key = table_name, rule_name + ctx = click.get_current_context() + + table_name_validator(ctx, db.cfgdb_pipe, table_name) + rule_name_validator(ctx, db.cfgdb_pipe, table_name, rule_name) + + table = str(PBH_RULE_CDB) + key = (str(table_name), str(rule_name)) + try: - del_entry(db.cfgdb, table, key) + del_entry(db.cfgdb_pipe, table, key) + update_pbh_counters(table_name, rule_name) except Exception as err: exit_with_error("Error: {}".format(err), fg="red") +# +# PBH table ----------------------------------------------------------------------------------------------------------- +# @PBH.group( name="table", @@ -759,34 +1272,41 @@ def PBH_TABLE(): @click.argument( "table-name", nargs=1, - required=True, -) -@click.option( - "--description", - help="The description of this table", - required=True, + required=True ) @click.option( "--interface-list", help="Interfaces to which this table is applied", - required=True, + required=True +) +@click.option( + "--description", + help="The description of this table", + required=True ) @clicommon.pass_db -def PBH_TABLE_add(db, table_name, description, interface_list): +def PBH_TABLE_add(db, table_name, interface_list, description): """ Add object to PBH_TABLE table """ - interfaces_list_validator(db, interface_list, is_update=False) + ctx = click.get_current_context() + + table_name_validator(ctx, db.cfgdb_pipe, table_name, False) - table = "PBH_TABLE" - key = table_name + table = str(PBH_TABLE_CDB) + key = str(table_name) data = {} - if description is not None: - data["description"] = description + if interface_list is not None: - data["interface_list"] = interface_list.split(",") + interface_list_validator(ctx, db.cfgdb_pipe, interface_list) + data[PBH_TABLE_INTERFACE_LIST] = interface_list.split(",") + if description is not None: + data[PBH_TABLE_DESCRIPTION] = description + + if not data: + exit_with_error("Error: Failed to add PBH table: options are not provided", fg="red") try: - add_entry(db.cfgdb, table, key, data) + add_entry(db.cfgdb_pipe, table, key, data) except Exception as err: exit_with_error("Error: {}".format(err), fg="red") @@ -795,32 +1315,43 @@ def PBH_TABLE_add(db, table_name, description, interface_list): @click.argument( "table-name", nargs=1, - required=True, + required=True ) @click.option( - "--description", - help="The description of this table", + "--interface-list", + help="Interfaces to which this table is applied" ) @click.option( - "--interface-list", - help="Interfaces to which this table is applied", + "--description", + help="The description of this table", ) @clicommon.pass_db -def PBH_TABLE_update(db, table_name, description, interface_list): +def PBH_TABLE_update(db, table_name, interface_list, description): """ Update object in PBH_TABLE table """ - interfaces_list_validator(db, interface_list, is_update=True) + ctx = click.get_current_context() - table = "PBH_TABLE" - key = table_name + table_name_validator(ctx, db.cfgdb_pipe, table_name) + + table = str(PBH_TABLE_CDB) + key = str(table_name) data = {} - if description is not None: - data["description"] = description + if interface_list is not None: - data["interface_list"] = interface_list.split(",") + interface_list_validator(ctx, db.cfgdb_pipe, interface_list) + data[PBH_TABLE_INTERFACE_LIST] = interface_list.split(",") + if description is not None: + data[PBH_TABLE_DESCRIPTION] = description + + if not data: + exit_with_error("Error: Failed to update PBH table: options are not provided", fg="red") + + cap = pbh_capabilities_query(db.db, PBH_TABLE_CAPABILITIES_KEY) + if cap is None: + exit_with_error("Error: Failed to query PBH table capabilities: configuration is not available", fg="red") try: - update_entry(db.cfgdb, table, key, data) + update_entry(db.cfgdb_pipe, cap, table, key, data) except Exception as err: exit_with_error("Error: {}".format(err), fg="red") @@ -835,17 +1366,24 @@ def PBH_TABLE_update(db, table_name, description, interface_list): def PBH_TABLE_delete(db, table_name): """ Delete object from PBH_TABLE table """ - table = "PBH_TABLE" - key = table_name + ctx = click.get_current_context() + + table_name_validator(ctx, db.cfgdb_pipe, table_name) + + table = str(PBH_TABLE_CDB) + key = str(table_name) + try: - del_entry(db.cfgdb, table, key) + del_entry(db.cfgdb_pipe, table, key) except Exception as err: exit_with_error("Error: {}".format(err), fg="red") +# +# PBH plugin ---------------------------------------------------------------------------------------------------------- +# def register(cli): cli_node = PBH if cli_node.name in cli.commands: raise Exception("{} already exists in CLI".format(cli_node.name)) cli.add_command(PBH) - diff --git a/show/plugins/pbh.py b/show/plugins/pbh.py index 95115d976d..e50f6507a5 100644 --- a/show/plugins/pbh.py +++ b/show/plugins/pbh.py @@ -93,7 +93,7 @@ def PBH_HASH_FIELD(db): body = [] - table = db.cfgdb.get_table(pbh_hash_field_tbl_name) + table = db.cfgdb_pipe.get_table(pbh_hash_field_tbl_name) for key in natsort.natsorted(table): entry = table[key] @@ -158,7 +158,7 @@ def PBH_HASH(db): body = [] - table = db.cfgdb.get_table(pbh_hash_tbl_name) + table = db.cfgdb_pipe.get_table(pbh_hash_tbl_name) for key in natsort.natsorted(table): entry = table[key] if not isinstance(key, tuple): @@ -203,7 +203,7 @@ def PBH_RULE(db): body = [] - table = db.cfgdb.get_table(pbh_rule_tbl_name) + table = db.cfgdb_pipe.get_table(pbh_rule_tbl_name) for key in natsort.natsorted(table): entry = table[key] if not isinstance(key, tuple): @@ -322,7 +322,7 @@ def PBH_TABLE(db): body = [] - table = db.cfgdb.get_table(pbh_table_tbl_name) + table = db.cfgdb_pipe.get_table(pbh_table_tbl_name) for key in natsort.natsorted(table): entry = table[key] if not isinstance(key, tuple): @@ -374,12 +374,12 @@ def PBH_STATISTICS(db): body = [] - pbh_rules = db.cfgdb.get_table(pbh_rule_tbl_name) + pbh_rules = db.cfgdb_pipe.get_table(pbh_rule_tbl_name) pbh_counters = read_pbh_counters(pbh_rules) - saved_pbh_counters = read_saved_pbh_counters() + saved_pbh_counters = deserialize_pbh_counters() for key in pbh_rules: - if pbh_rules[key]['flow_counter'] == 'ENABLED': + if pbh_rules[key].get("flow_counter", "") == "ENABLED": row = [ key[0], key[1], @@ -403,20 +403,43 @@ def get_counter_value(pbh_counters, saved_pbh_counters, key, type): return str(pbh_counters[key][type]) -def remap_keys(obj_list): - res = {} - for e in obj_list: - res[e['key'][0], e['key'][1]] = e['value'] - return res +def deserialize_pbh_counters(): + """ Helper that performs PBH counters deserialization. + in = [ + { + "key": ["pbh_table1", "pbh_rule1"], + "value": {"SAI_ACL_COUNTER_ATTR_BYTES": "0", "SAI_ACL_COUNTER_ATTR_PACKETS": "0"} + }, + ... + { + "key": ["pbh_tableN", "pbh_ruleN"], + "value": {"SAI_ACL_COUNTER_ATTR_BYTES": "0", "SAI_ACL_COUNTER_ATTR_PACKETS": "0"} + } + ] + + out = { + ('pbh_table1', 'pbh_rule1'): {'SAI_ACL_COUNTER_ATTR_BYTES': '0', 'SAI_ACL_COUNTER_ATTR_PACKETS': '0'}, + ... + ('pbh_tableN', 'pbh_ruleN'): {'SAI_ACL_COUNTER_ATTR_BYTES': '0', 'SAI_ACL_COUNTER_ATTR_PACKETS': '0'} + } + + Returns: + obj: counters dict. + """ + + def remap_keys(obj): + res = {} + for e in obj: + res[e['key'][0], e['key'][1]] = e['value'] + return res -def read_saved_pbh_counters(): if os.path.isfile(PBH_COUNTERS_LOCATION): try: - with open(PBH_COUNTERS_LOCATION) as fp: - return remap_keys(json.load(fp)) - except Exception: - return {} + with open(PBH_COUNTERS_LOCATION, 'r') as f: + return remap_keys(json.load(f)) + except Exception as err: + pass return {} @@ -484,4 +507,3 @@ def register(cli): if cli_node.name in cli.commands: raise Exception(f"{cli_node.name} already exists in CLI") cli.add_command(PBH) - diff --git a/tests/mock_tables/dbconnector.py b/tests/mock_tables/dbconnector.py index 30a9f5a653..80d74cafd7 100644 --- a/tests/mock_tables/dbconnector.py +++ b/tests/mock_tables/dbconnector.py @@ -2,6 +2,7 @@ import json import os import sys +import re from unittest import mock import mockredis @@ -119,6 +120,41 @@ def __init__(self, *args, **kwargs): for attr, value in v.items(): self.hset(k, attr, value) + # Patch mockredis/mockredis/client.py + # The offical implementation assume decode_responses=False + def _common_scan(self, values_function, cursor='0', match=None, count=10, key=None): + """ + Common scanning skeleton. + + :param key: optional function used to identify what 'match' is applied to + """ + if count is None: + count = 10 + cursor = int(cursor) + count = int(count) + if not count: + raise ValueError('if specified, count must be > 0: %s' % count) + + values = values_function() + if cursor + count >= len(values): + # we reached the end, back to zero + result_cursor = 0 + else: + result_cursor = cursor + count + + values = values[cursor:cursor+count] + + if match is not None: + if self.decode_responses: + regex = re.compile('^' + re.escape(self._encode(match)).replace('\\*', '.*') + '$') + else: + regex = re.compile(b'^' + re.escape(self._encode(match)).replace(b'\\*', b'.*') + b'$') + if not key: + key = lambda v: v + values = [v for v in values if regex.match(key(v))] + + return [result_cursor, values] + # Patch mockredis/mockredis/client.py # The offical implementation assume decode_responses=False # Here we detect the option and decode after doing encode @@ -128,7 +164,7 @@ def _encode(self, value): value = super(SwssSyncClient, self)._encode(value) if self.decode_responses: - return value.decode('utf-8') + return value.decode('utf-8') # Patch mockredis/mockredis/client.py # The official implementation will filter out keys with a slash '/' diff --git a/tests/pbh_input/assert_show_output.py b/tests/pbh_input/assert_show_output.py index 5b67403a17..7a701ba4bc 100644 --- a/tests/pbh_input/assert_show_output.py +++ b/tests/pbh_input/assert_show_output.py @@ -77,6 +77,7 @@ pbh_table2 vxlan 300 400 """ + show_pbh_statistics_updated="""\ TABLE RULE RX PACKETS COUNT RX BYTES COUNT ---------- ------ ------------------ ---------------- @@ -84,9 +85,25 @@ pbh_table2 vxlan 400 400 """ + show_pbh_statistics_after_disabling_rule="""\ TABLE RULE RX PACKETS COUNT RX BYTES COUNT ---------- ------ ------------------ ---------------- pbh_table1 nvgre 0 0 """ + +show_pbh_statistics_after_toggling_counter="""\ +TABLE RULE RX PACKETS COUNT RX BYTES COUNT +---------- ------ ------------------ ---------------- +pbh_table1 nvgre 100 200 +pbh_table2 vxlan 0 0 +""" + + +show_pbh_statistics_after_toggling_rule="""\ +TABLE RULE RX PACKETS COUNT RX BYTES COUNT +---------- ------ ------------------ ---------------- +pbh_table1 nvgre 0 0 +pbh_table2 vxlan 300 400 +""" diff --git a/tests/pbh_input/state_db.json b/tests/pbh_input/state_db.json new file mode 100644 index 0000000000..dccc54cf40 --- /dev/null +++ b/tests/pbh_input/state_db.json @@ -0,0 +1,26 @@ +{ + "PBH_CAPABILITIES|table": { + "interface_list": "UPDATE", + "description": "UPDATE" + }, + "PBH_CAPABILITIES|rule": { + "priority": "UPDATE", + "ether_type": "ADD,UPDATE,REMOVE", + "ip_protocol": "ADD,UPDATE,REMOVE", + "ipv6_next_header": "ADD,UPDATE,REMOVE", + "l4_dst_port": "ADD,UPDATE,REMOVE", + "gre_key": "ADD,UPDATE,REMOVE", + "inner_ether_type": "ADD,UPDATE,REMOVE", + "hash": "UPDATE", + "packet_action": "ADD,UPDATE,REMOVE", + "flow_counter": "ADD,UPDATE,REMOVE" + }, + "PBH_CAPABILITIES|hash": { + "hash_field_list": "UPDATE" + }, + "PBH_CAPABILITIES|hash-field": { + "hash_field": "", + "ip_mask": "", + "sequence_id": "" + } +} diff --git a/tests/pbh_test.py b/tests/pbh_test.py index bc4c74db73..1972747782 100644 --- a/tests/pbh_test.py +++ b/tests/pbh_test.py @@ -37,6 +37,7 @@ def teardown_class(cls): os.environ['UTILITIES_UNIT_TESTING'] = "0" os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = "" dbconnector.dedicated_dbs['CONFIG_DB'] = None + dbconnector.dedicated_dbs['STATE_DB'] = None dbconnector.dedicated_dbs['COUNTERS_DB'] = None @@ -116,7 +117,7 @@ def test_config_pbh_hash_field_add_mismatch_hash_field_ip_mask( logger.debug("\n" + result.output) logger.debug(result.exit_code) - assert result.exit_code == ERROR + assert result.exit_code == ERROR2 def test_config_pbh_hash_field_add_invalid_ip(self): @@ -132,7 +133,7 @@ def test_config_pbh_hash_field_add_invalid_ip(self): logger.debug("\n" + result.output) logger.debug(result.exit_code) - assert result.exit_code == ERROR + assert result.exit_code == ERROR2 @pytest.mark.parametrize("hash_field_name,hash_field", [ @@ -155,7 +156,7 @@ def test_config_pbh_hash_field_add_none_ip_mask( logger.debug("\n" + result.output) logger.debug(result.exit_code) - assert result.exit_code == ERROR + assert result.exit_code == ERROR2 @pytest.mark.parametrize("hash_field_name,hash_field,updated_hash_field,sequence_id", [ @@ -169,6 +170,8 @@ def test_config_pbh_hash_field_update_hash_field_sequence_id_no_ip( updated_hash_field, sequence_id ): + dbconnector.dedicated_dbs['STATE_DB'] = os.path.join(mock_db_path, 'state_db') + db = Db() runner = CliRunner() @@ -190,7 +193,7 @@ def test_config_pbh_hash_field_update_hash_field_sequence_id_no_ip( logger.debug("\n" + result.output) logger.debug(result.exit_code) - assert result.exit_code == SUCCESS + assert result.exit_code == ERROR @pytest.mark.parametrize("hash_field_name,hash_field,updated_hash_field,ip_mask,updated_ip_mask", [ @@ -205,6 +208,8 @@ def test_config_pbh_hash_field_update_hash_field_ip_mask( ip_mask, updated_ip_mask ): + dbconnector.dedicated_dbs['STATE_DB'] = os.path.join(mock_db_path, 'state_db') + db = Db() runner = CliRunner() @@ -227,10 +232,12 @@ def test_config_pbh_hash_field_update_hash_field_ip_mask( logger.debug("\n" + result.output) logger.debug(result.exit_code) - assert result.exit_code == SUCCESS + assert result.exit_code == ERROR def test_config_pbh_hash_field_update_invalid_hash_field(self): + dbconnector.dedicated_dbs['STATE_DB'] = os.path.join(mock_db_path, 'state_db') + db = Db() runner = CliRunner() @@ -252,10 +259,12 @@ def test_config_pbh_hash_field_update_invalid_hash_field(self): logger.debug("\n" + result.output) logger.debug(result.exit_code) - assert result.exit_code == ERROR + assert result.exit_code == ERROR2 def test_config_pbh_hash_field_update_invalid_ipv4_mask(self): + dbconnector.dedicated_dbs['STATE_DB'] = os.path.join(mock_db_path, 'state_db') + db = Db() runner = CliRunner() @@ -277,7 +286,7 @@ def test_config_pbh_hash_field_update_invalid_ipv4_mask(self): logger.debug("\n" + result.output) logger.debug(result.exit_code) - assert result.exit_code == ERROR + assert result.exit_code == ERROR2 @pytest.mark.parametrize("hash_field_name,hash_field,ip_mask,updated_ip_mask", [ @@ -291,6 +300,8 @@ def test_config_pbh_hash_field_update_invalid_ip_mask( ip_mask, updated_ip_mask ): + dbconnector.dedicated_dbs['STATE_DB'] = os.path.join(mock_db_path, 'state_db') + db = Db() runner = CliRunner() @@ -313,7 +324,7 @@ def test_config_pbh_hash_field_update_invalid_ip_mask( logger.debug("\n" + result.output) logger.debug(result.exit_code) - assert result.exit_code == ERROR + assert result.exit_code == ERROR2 ########## CONFIG PBH HASH ########## @@ -321,6 +332,7 @@ def test_config_pbh_hash_field_update_invalid_ip_mask( def test_config_pbh_hash_add_delete_ipv4(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'hash_fields') + db = Db() runner = CliRunner() @@ -347,6 +359,8 @@ def test_config_pbh_hash_add_delete_ipv4(self): def test_config_pbh_hash_add_update_ipv6(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'hash_fields') + dbconnector.dedicated_dbs['STATE_DB'] = os.path.join(mock_db_path, 'state_db') + db = Db() runner = CliRunner() @@ -373,8 +387,8 @@ def test_config_pbh_hash_add_update_ipv6(self): assert result.exit_code == SUCCESS @pytest.mark.parametrize("hash_name,hash_field_list,exit_code", [ - ("inner_v6_hash", INVALID_VALUE, ERROR), - ("inner_v6_hash", "", ERROR), + ("inner_v6_hash", INVALID_VALUE, ERROR2), + ("inner_v6_hash", "", ERROR2), ("inner_v6_hash", None, ERROR2) ]) def test_config_pbh_hash_add_invalid_hash_field_list( @@ -384,6 +398,7 @@ def test_config_pbh_hash_add_invalid_hash_field_list( exit_code ): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'hash_fields') + db = Db() runner = CliRunner() @@ -403,6 +418,7 @@ def test_config_pbh_hash_add_invalid_hash_field_list( def test_config_pbh_table_add_delete_ports(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'table') + db = Db() runner = CliRunner() @@ -428,6 +444,8 @@ def test_config_pbh_table_add_delete_ports(self): def test_config_pbh_table_add_update_portchannels(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'table') + dbconnector.dedicated_dbs['STATE_DB'] = os.path.join(mock_db_path, 'state_db') + db = Db() runner = CliRunner() @@ -474,6 +492,7 @@ def test_config_pbh_table_add_update_portchannels(self): def test_config_pbh_table_add_port_and_portchannel(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'table') + db = Db() runner = CliRunner() @@ -491,6 +510,7 @@ def test_config_pbh_table_add_port_and_portchannel(self): def test_config_pbh_table_add_invalid_port(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'table') + db = Db() runner = CliRunner() @@ -502,11 +522,13 @@ def test_config_pbh_table_add_invalid_port(self): logger.debug("\n" + result.output) logger.debug(result.exit_code) - assert result.exit_code == ERROR + assert result.exit_code == ERROR2 def test_config_pbh_table_add_update_invalid_interface(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'table') + dbconnector.dedicated_dbs['STATE_DB'] = os.path.join(mock_db_path, 'state_db') + db = Db() runner = CliRunner() @@ -528,7 +550,7 @@ def test_config_pbh_table_add_update_invalid_interface(self): logger.debug("\n" + result.output) logger.debug(result.exit_code) - assert result.exit_code == ERROR + assert result.exit_code == ERROR2 ########## CONFIG PBH RULE ########## @@ -536,6 +558,7 @@ def test_config_pbh_table_add_update_invalid_interface(self): def test_config_pbh_rule_add_delete_nvgre(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'rule') + db = Db() runner = CliRunner() @@ -563,12 +586,14 @@ def test_config_pbh_rule_add_delete_nvgre(self): def test_config_pbh_rule_add_update_vxlan(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'rule') + dbconnector.dedicated_dbs['STATE_DB'] = os.path.join(mock_db_path, 'state_db') + db = Db() runner = CliRunner() result = runner.invoke( config.config.commands["pbh"].commands["rule"]. - commands["add"], ["pbh_table1", "vxlan ", + commands["add"], ["pbh_table1", "vxlan", "--priority", "2", "--ip-protocol", "0x11", "--inner-ether-type", "0x0800","--l4-dst-port", "0x12b5", "--hash", "inner_v4_hash", "--packet-action", @@ -581,7 +606,8 @@ def test_config_pbh_rule_add_update_vxlan(self): result = runner.invoke( config.config.commands["pbh"].commands["rule"]. - commands["update"], ["pbh_table1", "vxlan ", + commands["update"].commands["field"]. + commands["set"], ["pbh_table1", "vxlan", "--priority", "3", "--inner-ether-type", "0x086d", "--packet-action", "SET_LAG_HASH", "--flow-counter", "DISABLED"], obj=db @@ -592,14 +618,61 @@ def test_config_pbh_rule_add_update_vxlan(self): assert result.exit_code == SUCCESS + def test_config_pbh_rule_update_nvgre_to_vxlan(self): + dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'rule') + dbconnector.dedicated_dbs['STATE_DB'] = os.path.join(mock_db_path, 'state_db') + + db = Db() + runner = CliRunner() + + result = runner.invoke( + config.config.commands["pbh"].commands["rule"]. + commands["add"],["pbh_table1", "nvgre", "--priority", "1", + "--ether-type", "0x0800", "--ip-protocol", "0x2f", + "--gre-key", "0x2500/0xffffff00", "--inner-ether-type", + "0x86dd", "--hash", "inner_v6_hash", "--packet-action", + "SET_ECMP_HASH", "--flow-counter", "DISABLED"], obj=db + ) + + logger.debug("\n" + result.output) + logger.debug(result.exit_code) + assert result.exit_code == SUCCESS + + result = runner.invoke( + config.config.commands["pbh"].commands["rule"]. + commands["update"].commands["field"]. + commands["set"], ["pbh_table1", "nvgre", + "--ether-type", "0x86dd", "--ipv6-next-header", "0x11", + "--l4-dst-port", "0x12b5", "--inner-ether-type", "0x0800", + "--hash", "inner_v4_hash"], obj=db + ) + + logger.debug("\n" + result.output) + logger.debug(result.exit_code) + assert result.exit_code == SUCCESS + + result = runner.invoke( + config.config.commands["pbh"].commands["rule"]. + commands["update"].commands["field"]. + commands["del"], ["pbh_table1", "nvgre", + "--ip-protocol", "--gre-key", + "--packet-action", "--flow-counter"], obj=db + ) + + logger.debug("\n" + result.output) + logger.debug(result.exit_code) + assert result.exit_code == SUCCESS + + def test_config_pbh_rule_update_invalid(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'rule') + db = Db() runner = CliRunner() result = runner.invoke( config.config.commands["pbh"].commands["rule"]. - commands["add"], ["pbh_table1", "vxlan ", "--priority", + commands["add"], ["pbh_table1", "vxlan", "--priority", "2", "--ip-protocol", "0x11", "--inner-ether-type", "0x0800", "--l4-dst-port", "0x12b5", "--hash", "inner_v6_hash", "--packet-action", "SET_ECMP_HASH", @@ -612,7 +685,7 @@ def test_config_pbh_rule_update_invalid(self): result = runner.invoke( config.config.commands["pbh"].commands["rule"]. - commands["update"], ["pbh_table1", "vxlan ", + commands["update"], ["pbh_table1", "vxlan", "--flow-counter", INVALID_VALUE], obj=db ) @@ -623,12 +696,13 @@ def test_config_pbh_rule_update_invalid(self): def test_config_pbh_rule_add_invalid_ip_protocol(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'rule') + db = Db() runner = CliRunner() result = runner.invoke( config.config.commands["pbh"].commands["rule"]. - commands["add"], ["pbh_table1", "vxlan ", "--priority", + commands["add"], ["pbh_table1", "vxlan", "--priority", "2", "--ip-protocol", INVALID_VALUE, "--inner-ether-type", "0x0800", "--l4-dst-port", "0x12b5", "--hash", "inner_v6_hash", "--packet-action", "SET_ECMP_HASH", "--flow-counter", @@ -637,17 +711,18 @@ def test_config_pbh_rule_add_invalid_ip_protocol(self): logger.debug("\n" + result.output) logger.debug(result.exit_code) - assert result.exit_code == ERROR + assert result.exit_code == ERROR2 def test_config_pbh_rule_add_invalid_inner_ether_type(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'rule') + db = Db() runner = CliRunner() result = runner.invoke( config.config.commands["pbh"].commands["rule"]. - commands["add"], ["pbh_table1", "vxlan ", "--priority", + commands["add"], ["pbh_table1", "vxlan", "--priority", "2", "--ip-protocol", "0x11", "--inner-ether-type", INVALID_VALUE, "--l4-dst-port", "0x12b5", "--hash", "inner_v6_hash", "--packet-action", "SET_ECMP_HASH", @@ -656,17 +731,18 @@ def test_config_pbh_rule_add_invalid_inner_ether_type(self): logger.debug("\n" + result.output) logger.debug(result.exit_code) - assert result.exit_code == ERROR + assert result.exit_code == ERROR2 def test_config_pbh_rule_add_invalid_hash(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'rule') + db = Db() runner = CliRunner() result = runner.invoke( config.config.commands["pbh"].commands["rule"]. - commands["add"], ["pbh_table1", "vxlan ", "--priority", + commands["add"], ["pbh_table1", "vxlan", "--priority", "2", "--ip-protocol", "0x11", "--inner-ether-type", "0x0800", "--l4-dst-port", "0x12b5", "--hash", INVALID_VALUE, "--packet-action", "SET_ECMP_HASH", "--flow-counter", @@ -675,17 +751,18 @@ def test_config_pbh_rule_add_invalid_hash(self): logger.debug("\n" + result.output) logger.debug(result.exit_code) - assert result.exit_code == ERROR + assert result.exit_code == ERROR2 def test_config_pbh_rule_add_invalid_packet_action(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'rule') + db = Db() runner = CliRunner() result = runner.invoke( config.config.commands["pbh"].commands["rule"]. - commands["add"], ["pbh_table1", "vxlan ", "--priority", + commands["add"], ["pbh_table1", "vxlan", "--priority", "2", "--ip-protocol", "0x11", "--inner-ether-type", "0x0800", "--l4-dst-port", "0x12b5", "--hash", "inner_v6_hash", "--packet-action", INVALID_VALUE, @@ -699,12 +776,13 @@ def test_config_pbh_rule_add_invalid_packet_action(self): def test_config_pbh_rule_add_invalid_flow_counter(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'rule') + db = Db() runner = CliRunner() result = runner.invoke( config.config.commands["pbh"].commands["rule"]. - commands["add"], ["pbh_table1", "vxlan ", "--priority", + commands["add"], ["pbh_table1", "vxlan", "--priority", "2", "--ip-protocol", "0x11", "--inner-ether-type", "0x0800", "--l4-dst-port", "0x12b5", "--hash", "inner_v6_hash", "--packet-action", "SET_ECMP_HASH", @@ -719,6 +797,7 @@ def test_config_pbh_rule_add_invalid_flow_counter(self): def test_show_pbh_hash_field(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'full_pbh_config') + db = Db() runner = CliRunner() @@ -738,6 +817,7 @@ def test_show_pbh_hash_field(self): def test_show_pbh_hash(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'full_pbh_config') + db = Db() runner = CliRunner() @@ -757,6 +837,7 @@ def test_show_pbh_hash(self): def test_show_pbh_table(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'full_pbh_config') + db = Db() runner = CliRunner() @@ -776,6 +857,7 @@ def test_show_pbh_table(self): def test_show_pbh_rule(self): dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'full_pbh_config') + db = Db() runner = CliRunner() @@ -792,14 +874,19 @@ def test_show_pbh_rule(self): ########## SHOW PBH STATISTICS ########## - def test_show_pbh_statistics_on_empty_config(self): - dbconnector.dedicated_dbs['CONFIG_DB'] = None - dbconnector.dedicated_dbs['COUNTERS_DB'] = None + def remove_pbh_counters_file(self): SAVED_PBH_COUNTERS_FILE = '/tmp/.pbh_counters.txt' if os.path.isfile(SAVED_PBH_COUNTERS_FILE): os.remove(SAVED_PBH_COUNTERS_FILE) + + def test_show_pbh_statistics_on_empty_config(self): + dbconnector.dedicated_dbs['CONFIG_DB'] = None + dbconnector.dedicated_dbs['COUNTERS_DB'] = None + + self.remove_pbh_counters_file() + db = Db() runner = CliRunner() @@ -818,9 +905,7 @@ def test_show_pbh_statistics(self): dbconnector.dedicated_dbs['COUNTERS_DB'] = os.path.join(mock_db_path, 'counters_db') dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'full_pbh_config') - SAVED_PBH_COUNTERS_FILE = '/tmp/.pbh_counters.txt' - if os.path.isfile(SAVED_PBH_COUNTERS_FILE): - os.remove(SAVED_PBH_COUNTERS_FILE) + self.remove_pbh_counters_file() db = Db() runner = CliRunner() @@ -840,9 +925,7 @@ def test_show_pbh_statistics_after_clear(self): dbconnector.dedicated_dbs['COUNTERS_DB'] = os.path.join(mock_db_path, 'counters_db') dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'full_pbh_config') - SAVED_PBH_COUNTERS_FILE = '/tmp/.pbh_counters.txt' - if os.path.isfile(SAVED_PBH_COUNTERS_FILE): - os.remove(SAVED_PBH_COUNTERS_FILE) + self.remove_pbh_counters_file() db = Db() runner = CliRunner() @@ -870,9 +953,7 @@ def test_show_pbh_statistics_after_clear_and_counters_updated(self): dbconnector.dedicated_dbs['COUNTERS_DB'] = os.path.join(mock_db_path, 'counters_db') dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'full_pbh_config') - SAVED_PBH_COUNTERS_FILE = '/tmp/.pbh_counters.txt' - if os.path.isfile(SAVED_PBH_COUNTERS_FILE): - os.remove(SAVED_PBH_COUNTERS_FILE) + self.remove_pbh_counters_file() db = Db() runner = CliRunner() @@ -901,10 +982,9 @@ def test_show_pbh_statistics_after_clear_and_counters_updated(self): def test_show_pbh_statistics_after_disabling_rule(self): dbconnector.dedicated_dbs['COUNTERS_DB'] = os.path.join(mock_db_path, 'counters_db') dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'full_pbh_config') + dbconnector.dedicated_dbs['STATE_DB'] = os.path.join(mock_db_path, 'state_db') - SAVED_PBH_COUNTERS_FILE = '/tmp/.pbh_counters.txt' - if os.path.isfile(SAVED_PBH_COUNTERS_FILE): - os.remove(SAVED_PBH_COUNTERS_FILE) + self.remove_pbh_counters_file() db = Db() runner = CliRunner() @@ -919,7 +999,8 @@ def test_show_pbh_statistics_after_disabling_rule(self): result = runner.invoke( config.config.commands["pbh"]. - commands["rule"].commands["update"], + commands["rule"].commands["update"]. + commands["field"].commands["set"], ["pbh_table2", "vxlan", "--flow-counter", "DISABLED"], obj=db ) @@ -937,3 +1018,102 @@ def test_show_pbh_statistics_after_disabling_rule(self): assert result.exit_code == SUCCESS assert result.output == assert_show_output.show_pbh_statistics_after_disabling_rule + + def test_show_pbh_statistics_after_flow_counter_toggle(self): + dbconnector.dedicated_dbs['COUNTERS_DB'] = os.path.join(mock_db_path, 'counters_db') + dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'full_pbh_config') + dbconnector.dedicated_dbs['STATE_DB'] = os.path.join(mock_db_path, 'state_db') + + self.remove_pbh_counters_file() + + db = Db() + runner = CliRunner() + + result = runner.invoke( + clear.cli.commands["pbh"]. + commands["statistics"], [], obj=db + ) + + logger.debug("\n" + result.output) + logger.debug(result.exit_code) + + result = runner.invoke( + config.config.commands["pbh"]. + commands["rule"].commands["update"]. + commands["field"].commands["set"], + ["pbh_table1", "nvgre", "--flow-counter", + "DISABLED"], obj=db + ) + + logger.debug("\n" + result.output) + logger.debug(result.exit_code) + + result = runner.invoke( + config.config.commands["pbh"]. + commands["rule"].commands["update"]. + commands["field"].commands["set"], + ["pbh_table1", "nvgre", "--flow-counter", + "ENABLED"], obj=db + ) + + logger.debug("\n" + result.output) + logger.debug(result.exit_code) + + result = runner.invoke( + show.cli.commands["pbh"]. + commands["statistics"], [], obj=db + ) + + logger.debug("\n" + result.output) + logger.debug(result.exit_code) + assert result.exit_code == SUCCESS + assert result.output == assert_show_output.show_pbh_statistics_after_toggling_counter + + + def test_show_pbh_statistics_after_rule_toggle(self): + dbconnector.dedicated_dbs['COUNTERS_DB'] = os.path.join(mock_db_path, 'counters_db') + dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'full_pbh_config') + + self.remove_pbh_counters_file() + + db = Db() + runner = CliRunner() + + result = runner.invoke( + clear.cli.commands["pbh"]. + commands["statistics"], [], obj=db + ) + + logger.debug("\n" + result.output) + logger.debug(result.exit_code) + + result = runner.invoke( + config.config.commands["pbh"]. + commands["rule"].commands["delete"], + ["pbh_table2", "vxlan"], obj=db + ) + + logger.debug("\n" + result.output) + logger.debug(result.exit_code) + + result = runner.invoke( + config.config.commands["pbh"].commands["rule"]. + commands["add"], ["pbh_table2", "vxlan", "--priority", + "2", "--ip-protocol", "0x11", "--inner-ether-type", + "0x0800", "--l4-dst-port", "0x12b5", "--hash", + "inner_v4_hash", "--packet-action", "SET_LAG_HASH", + "--flow-counter", "ENABLED"], obj=db + ) + + logger.debug("\n" + result.output) + logger.debug(result.exit_code) + + result = runner.invoke( + show.cli.commands["pbh"]. + commands["statistics"], [], obj=db + ) + + logger.debug("\n" + result.output) + logger.debug(result.exit_code) + assert result.exit_code == SUCCESS + assert result.output == assert_show_output.show_pbh_statistics_after_toggling_rule diff --git a/utilities_common/db.py b/utilities_common/db.py index d736aa1be7..136e1fc91e 100644 --- a/utilities_common/db.py +++ b/utilities_common/db.py @@ -1,5 +1,5 @@ from sonic_py_common import multi_asic, device_info -from swsscommon.swsscommon import ConfigDBConnector, SonicV2Connector +from swsscommon.swsscommon import ConfigDBConnector, ConfigDBPipeConnector, SonicV2Connector from utilities_common import constants from utilities_common.multi_asic import multi_asic_ns_choices @@ -10,6 +10,8 @@ def __init__(self): self.db_clients = {} self.cfgdb = ConfigDBConnector() self.cfgdb.connect() + self.cfgdb_pipe = ConfigDBPipeConnector() + self.cfgdb_pipe.connect() self.db = SonicV2Connector(host="127.0.0.1") # Skip connecting to chassis databases in line cards