Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[counters] Keep counters cache in a single directory #2232

Merged
merged 11 commits into from
Jul 27, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 0 additions & 5 deletions config/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -1517,11 +1517,6 @@ def reload(db, filename, yes, load_sysinfo, no_service_restart, disable_arp_cach
if multi_asic.is_multi_asic() and file_format == 'config_db':
num_cfg_file += num_asic

# Remove cached PG drop counters data
dropstat_dir_prefix = '/tmp/dropstat'
command = "rm -rf {}-*".format(dropstat_dir_prefix)
clicommon.run_command(command, display_cmd=True)

bingwang-ms marked this conversation as resolved.
Show resolved Hide resolved
# If the user give the filename[s], extract the file names.
if filename is not None:
cfg_files = filename.split(',')
Expand Down
10 changes: 6 additions & 4 deletions config/plugins/pbh.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,14 @@
CLI Auto-generation tool HLD - https://github.com/Azure/SONiC/pull/78
"""

import os
import click
import json
import ipaddress
import re
import utilities_common.cli as clicommon

from show.plugins.pbh import deserialize_pbh_counters
from show.plugins.pbh import deserialize_pbh_counters, PBH_COUNTERS_CACHE_FILENAME

GRE_KEY_RE = r"^(0x){1}[a-fA-F0-9]{1,8}/(0x){1}[a-fA-F0-9]{1,8}$"

Expand Down Expand Up @@ -79,8 +80,6 @@
PBH_UPDATE = "UPDATE"
PBH_REMOVE = "REMOVE"

PBH_COUNTERS_LOCATION = "/tmp/.pbh_counters.txt"

#
# DB interface --------------------------------------------------------------------------------------------------------
#
Expand Down Expand Up @@ -467,11 +466,14 @@ def serialize_pbh_counters(obj):
obj: counters dict.
"""

cache = clicommon.UserCache('pbh')
counters_cache_file = os.path.join(cache.get_directory(), PBH_COUNTERS_CACHE_FILENAME)

def remap_keys(obj):
return [{'key': k, 'value': v} for k, v in obj.items()]

try:
with open(PBH_COUNTERS_LOCATION, 'w') as f:
with open(counters_cache_file, 'w') as f:
json.dump(remap_keys(obj), f)
except IOError as err:
pass
Expand Down
17 changes: 9 additions & 8 deletions scripts/aclshow
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,13 @@ optional arguments:
import argparse
import json
import os
from swsscommon.swsscommon import SonicV2Connector, ConfigDBConnector
import sys

from swsscommon.swsscommon import SonicV2Connector, ConfigDBConnector
from utilities_common.cli import UserCache

from tabulate import tabulate

### temp file to save counter positions when doing clear counter action.
### if we could have a SAI command to clear counters will be better, so no need to maintain
### counters in temp loaction for clear conter action
COUNTER_POSITION = '/tmp/.counters_acl.p'
COUNTERS = "COUNTERS"
ACL_COUNTER_RULE_MAP = "ACL_COUNTER_RULE_MAP"

Expand All @@ -38,6 +36,9 @@ ACL_HEADER = ["RULE NAME", "TABLE NAME", "PRIO", "PACKETS COUNT", "BYTES COUNT"]
COUNTER_PACKETS_ATTR = "SAI_ACL_COUNTER_ATTR_PACKETS"
COUNTER_BYTES_ATTR = "SAI_ACL_COUNTER_ATTR_BYTES"

USER_CACHE = UserCache()
COUNTERS_CACHE_DIR = USER_CACHE.get_directory()
COUNTERS_CACHE = os.path.join(COUNTERS_CACHE_DIR, 'aclstat')

class AclStat(object):
"""
Expand Down Expand Up @@ -78,9 +79,9 @@ class AclStat(object):
res[e['key'][0], e['key'][1]] = e['value']
return res

if os.path.isfile(COUNTER_POSITION):
if os.path.isfile(COUNTERS_CACHE):
try:
with open(COUNTER_POSITION) as fp:
with open(COUNTERS_CACHE) as fp:
self.saved_acl_counters = remap_keys(json.load(fp))
except Exception:
pass
Expand Down Expand Up @@ -207,7 +208,7 @@ class AclStat(object):
def remap_keys(dict):
return [{'key': k, 'value': v} for k, v in dict.items()]

with open(COUNTER_POSITION, 'w') as fp:
with open(COUNTERS_CACHE, 'w') as fp:
json.dump(remap_keys(self.acl_counters), fp)

def main():
Expand Down
15 changes: 2 additions & 13 deletions scripts/dropstat
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ except KeyError:
pass

from swsscommon.swsscommon import SonicV2Connector, ConfigDBConnector
from utilities_common.cli import UserCache


# COUNTERS_DB Tables
Expand Down Expand Up @@ -80,8 +81,7 @@ std_switch_description_header = ['DEVICE']


def get_dropstat_dir():
dropstat_dir_prefix = '/tmp/dropstat'
return "{}-{}/".format(dropstat_dir_prefix, os.getuid())
return UserCache().get_directory()


class DropStat(object):
Expand Down Expand Up @@ -411,18 +411,7 @@ Examples:
group = args.group
counter_type = args.type

dropstat_dir = get_dropstat_dir()

# Create the directory to hold clear results
if not os.path.exists(dropstat_dir):
try:
os.makedirs(dropstat_dir)
except IOError as e:
print(e)
sys.exit(e.errno)

dcstat = DropStat()

if command == 'clear':
dcstat.clear_drop_counts()
elif command == 'show':
Expand Down
21 changes: 7 additions & 14 deletions scripts/fast-reboot
Original file line number Diff line number Diff line change
Expand Up @@ -462,21 +462,14 @@ function unload_kernel()
}

function save_counters_folder() {
debug "Saving counters folder before warmboot..."
if [[ "$REBOOT_TYPE" = "warm-reboot" ]]; then
bingwang-ms marked this conversation as resolved.
Show resolved Hide resolved
debug "Saving counters folder before warmboot..."

counters_folder="/host/counters"
if [[ ! -d $counters_folder ]]; then
mkdir $counters_folder
fi
if [[ "$REBOOT_TYPE" = "warm-reboot" || "$REBOOT_TYPE" = "fastfast-reboot" ]]; then
modules=("portstat-0" "dropstat" "pfcstat-0" "queuestat-0" "intfstat-0")
for module in ${modules[@]}
do
statfile="/tmp/$module"
if [[ -d $statfile ]]; then
cp -rf $statfile $counters_folder
fi
done
counters_folder="/host/counters"
if [[ ! -d $counters_folder ]]; then
mkdir $counters_folder
fi
cp -rf /tmp/cache $counters_folder
fi
}

Expand Down
8 changes: 5 additions & 3 deletions scripts/flow_counters_stat
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import utilities_common.multi_asic as multi_asic_util
from flow_counter_util.route import build_route_pattern, extract_route_pattern, exit_if_route_flow_counter_not_support, DEFAULT_VRF, COUNTERS_ROUTE_TO_PATTERN_MAP
from utilities_common import constants
from utilities_common.netstat import format_number_with_comma, table_as_json, ns_diff, format_prate
from utilities_common.cli import UserCache

# Flow counter meta data, new type of flow counters can extend this dictinary to reuse existing logic
flow_counter_meta = {
Expand Down Expand Up @@ -57,9 +58,10 @@ class FlowCounterStats(object):
meta_data = flow_counter_meta[args.type]
self.name_map = meta_data['name_map']
self.headers = meta_data['headers']
self.data_file = os.path.join('/tmp/{}-stats-{}'.format(args.type, os.getuid()))
if self.args.delete and os.path.exists(self.data_file):
os.remove(self.data_file)
self.cache = UserCache()
self.data_file = os.path.join(self.cache.get_directory(), "flow-counter-stats")
if self.args.delete:
self.cache.remove()
self.data = {}

def show(self):
Expand Down
55 changes: 12 additions & 43 deletions scripts/intfstat
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ from collections import namedtuple, OrderedDict
from natsort import natsorted
from tabulate import tabulate
from utilities_common.netstat import ns_diff, table_as_json, STATUS_NA, format_brate, format_prate
from utilities_common.cli import UserCache
from swsscommon.swsscommon import SonicV2Connector

nstat_fields = (
Expand Down Expand Up @@ -274,63 +275,34 @@ def main():
delete_saved_stats = args.delete
delete_all_stats = args.delete_all
use_json = args.json
tag_name = args.tag if args.tag else ""
uid = str(os.getuid())
tag_name = args.tag
wait_time_in_seconds = args.period
interface_name = args.interface if args.interface else ""

# fancy filename with dashes: uid-tag / uid etc
filename_components = [uid, tag_name]
cnstat_file = "intfstat"

cnstat_file = "-".join(filter(None, filename_components))
cache = UserCache(tag=tag_name)
bingwang-ms marked this conversation as resolved.
Show resolved Hide resolved

cnstat_dir = "/tmp/intfstat-" + uid
cache_general = UserCache()
cnstat_dir = cache.get_directory()
cnstat_general_dir = cache_general.get_directory()

cnstat_fqn_general_file = cnstat_general_dir + "/" + cnstat_file
cnstat_fqn_file = cnstat_dir + "/" + cnstat_file

if delete_all_stats:
# There is nothing to delete
if not os.path.isdir(cnstat_dir):
sys.exit(0)

for file in os.listdir(cnstat_dir):
os.remove(cnstat_dir + "/" + file)

try:
os.rmdir(cnstat_dir)
sys.exit(0)
except IOError as e:
print(e.errno, e)
sys.exit(e)
cache.remove_all()

if delete_saved_stats:
try:
os.remove(cnstat_fqn_file)
except IOError as e:
if e.errno != ENOENT:
print(e.errno, e)
sys.exit(1)
finally:
if os.listdir(cnstat_dir) == []:
os.rmdir(cnstat_dir)
sys.exit(0)
cache.remove()

intfstat = Intfstat()
cnstat_dict, ratestat_dict = intfstat.get_cnstat(rif=interface_name)

# At this point, either we'll create a file or open an existing one.
if not os.path.exists(cnstat_dir):
try:
os.makedirs(cnstat_dir)
except IOError as e:
print(e.errno, e)
sys.exit(1)

if save_fresh_stats:
try:
# Add the information also to the general file - i.e. without the tag name
if tag_name != '' and tag_name in cnstat_fqn_file.split('/')[-1]:
gen_index = cnstat_fqn_file.rfind('/')
cnstat_fqn_general_file = cnstat_fqn_file[:gen_index] + cnstat_fqn_file[gen_index:].split('-')[0]
if tag_name is not None:
if os.path.isfile(cnstat_fqn_general_file):
try:
general_data = pickle.load(open(cnstat_fqn_general_file, 'rb'))
Expand All @@ -354,9 +326,6 @@ def main():
sys.exit(0)

if wait_time_in_seconds == 0:
gen_index = cnstat_fqn_file.rfind('/')
cnstat_fqn_general_file = cnstat_fqn_file[:gen_index] + cnstat_fqn_file[gen_index:].split('-')[0]

if os.path.isfile(cnstat_fqn_file) or (os.path.isfile(cnstat_fqn_general_file)):
try:
cnstat_cached_dict = {}
Expand Down
33 changes: 10 additions & 23 deletions scripts/pfcstat
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,6 @@ from natsort import natsorted
from tabulate import tabulate

from sonic_py_common.multi_asic import get_external_ports
from utilities_common.netstat import ns_diff, STATUS_NA, format_number_with_comma
from utilities_common import multi_asic as multi_asic_util
from utilities_common import constants

# mock the redis for unit test purposes #
try:
Expand All @@ -37,6 +34,12 @@ try:
except KeyError:
pass

from utilities_common.netstat import ns_diff, STATUS_NA, format_number_with_comma
from utilities_common import multi_asic as multi_asic_util
from utilities_common import constants
from utilities_common.cli import UserCache


PStats = namedtuple("PStats", "pfc0, pfc1, pfc2, pfc3, pfc4, pfc5, pfc6, pfc7")
header_Rx = ['Port Rx', 'PFC0', 'PFC1', 'PFC2', 'PFC3', 'PFC4', 'PFC5', 'PFC6', 'PFC7']

Expand Down Expand Up @@ -224,10 +227,10 @@ Examples:
save_fresh_stats = args.clear
delete_all_stats = args.delete

uid = str(os.getuid())
cnstat_file = uid
cache = UserCache()
cnstat_file = 'pfcstat'

cnstat_dir = os.path.join(os.sep, "tmp", "pfcstat-{}".format(uid))
cnstat_dir = cache.get_directory()
cnstat_fqn_file_rx = os.path.join(cnstat_dir, "{}rx".format(cnstat_file))
cnstat_fqn_file_tx = os.path.join(cnstat_dir, "{}tx".format(cnstat_file))

Expand All @@ -239,15 +242,7 @@ Examples:
pfcstat = Pfcstat(args.namespace, args.show)

if delete_all_stats:
for file in os.listdir(cnstat_dir):
os.remove(os.path.join(cnstat_dir, file))

try:
os.rmdir(cnstat_dir)
sys.exit(0)
except IOError as e:
print(e.errno, e)
sys.exit(e)
cache.remove()

"""
Get the counters of pfc rx counter
Expand All @@ -259,14 +254,6 @@ Examples:
"""
cnstat_dict_tx = deepcopy(pfcstat.get_cnstat(False))

# At this point, either we'll create a file or open an existing one.
if not os.path.exists(cnstat_dir):
try:
os.makedirs(cnstat_dir)
except IOError as e:
print(e.errno, e)
sys.exit(1)

if save_fresh_stats:
try:
pickle.dump(cnstat_dict_rx, open(cnstat_fqn_file_rx, 'wb'))
Expand Down
4 changes: 2 additions & 2 deletions scripts/pg-drop
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ try:
except KeyError:
pass

from utilities_common.cli import UserCache
from swsscommon.swsscommon import ConfigDBConnector, SonicV2Connector

STATUS_NA = 'N/A'
Expand All @@ -38,8 +39,7 @@ COUNTERS_PG_PORT_MAP = "COUNTERS_PG_PORT_MAP"
COUNTERS_PG_INDEX_MAP = "COUNTERS_PG_INDEX_MAP"

def get_dropstat_dir():
dropstat_dir_prefix = '/tmp/dropstat'
return "{}-{}/".format(dropstat_dir_prefix, os.getuid())
return UserCache().get_directory()

class PgDropStat(object):

Expand Down
Loading