Skip to content

Commit

Permalink
HPE Primera compatibility modification (sodafoundation#727)
Browse files Browse the repository at this point in the history
  • Loading branch information
yuanyu-ghca authored Jan 5, 2022
1 parent 8332cd7 commit a9c31b5
Show file tree
Hide file tree
Showing 3 changed files with 50 additions and 34 deletions.
32 changes: 21 additions & 11 deletions delfin/drivers/hpe/hpe_3par/component_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def list_storage_pools(self, context):

if pools is not None:
members = pools.get('members')
for pool in members:
for pool in (members or []):
# Get pool status 1=normal 2,3=abnormal 99=offline
status = self.STATUS_MAP.get(pool.get('state'))

Expand Down Expand Up @@ -153,13 +153,18 @@ def handler_volume(self, volumes, pool_ids):
return
else:
members = volumes.get('members')
for volume in members:
for volume in (members or []):
status = self.STATUS_MAP.get(volume.get('state'))
orig_pool_name = volume.get('userCPG', '')

compressed = True
deduplicated = True

if volume.get('compressionState') and volume.get(
'compressionState') != 1:
compressed = False
if volume.get('deduplicationState') and volume.get(
'deduplicationState') != 1:
deduplicated = False
vol_type = self.VOL_TYPE_MAP.get(
volume.get('provisioningType'))

Expand Down Expand Up @@ -197,7 +202,7 @@ def list_volumes(self, context):
pool_ids = {}
if pools is not None:
members = pools.get('members')
for pool in members:
for pool in (members or []):
pool_ids[pool.get('name')] = pool.get('id')

return self.handler_volume(volumes, pool_ids)
Expand All @@ -220,12 +225,12 @@ def list_controllers(self, storage_id):
node_cpu_map = self.ssh_handler.get_controllers_cpu()
node_version_map = self.ssh_handler.get_controllers_version()
for controller in controllers:
node_id = controller.get('node_id')
memory_size = int(controller.get('node_control_mem',
node_id = controller.get('node')
memory_size = int(controller.get('controlmem(mb)',
'0')) * units.Mi + int(
controller.get('node_data_mem', '0')) * units.Mi
controller.get('datamem(mb)', '0')) * units.Mi
cpu_info = ''
if node_cpu_map:
if node_cpu_map and node_cpu_map.get(node_id):
cpu_info_map = node_cpu_map.get(node_id)
cpu_info_keys = list(cpu_info_map.keys())
for cpu_key in cpu_info_keys:
Expand All @@ -237,11 +242,11 @@ def list_controllers(self, storage_id):
if node_version_map:
soft_version = node_version_map.get(node_id, '')
controller_model = {
'name': controller.get('node_name'),
'name': controller.get('name'),
'storage_id': storage_id,
'native_controller_id': node_id,
'status': consts.CONTROLLER_STATUS_MAP.get(
controller.get('node_state', '').upper(),
controller.get('state', '').upper(),
constants.ControllerStatus.OFFLINE),
'location': None,
'soft_version': soft_version,
Expand All @@ -261,7 +266,12 @@ def list_disks(self, storage_id):
status = consts.DISK_STATUS_MAP.get(
disk.get('state', '').upper(),
constants.DiskStatus.ABNORMAL)
capacity = int(float(disk.get("total", 0)) * units.Mi)
total = 0
if disk.get('total'):
total = float(disk.get("total"))
elif disk.get('size_mb'):
total = float(disk.get("size_mb"))
capacity = int(total * units.Mi)
serial_number = None
manufacturer = None
model = None
Expand Down
7 changes: 6 additions & 1 deletion delfin/drivers/hpe/hpe_3par/consts.py
Original file line number Diff line number Diff line change
Expand Up @@ -626,7 +626,7 @@
'0x09f0002': 'File Persona CPG grow limit warning',
'0x0a50001': 'File Access Auditing Alerts'
}
NODE_PATTERN = "^\\s*Node\\s+[-]*Name[-]*\\s+[-]*State[-]*\\s+Master\\s+"
NODE_PATTERN = "^\\s*Node\\s+[-]*Name[-]*\\s+[-]*State[-]*\\s+"
CPU_PATTERN = "^\\s*Node\\s+CPU\\s+[-]*Manufacturer[-]*\\s+[-]*Serial[-]*" \
"\\s+CPUSpeed"
DISK_PATTERN = "^\\s*Id\\s+[-]*CagePos[-]*\\s+[-]*Type[-]*\\s+RPM\\s+State\\s+"
Expand Down Expand Up @@ -700,3 +700,8 @@
'RCIP': constants.PortType.RCIP,
'RCFC': constants.PortType.OTHER
}
VERSION_PATTERN = "^\\s*[-]*Service[-]*\\s+[-]*State[-]*\\s+"
SSH_NODE_MEM_TYPE = {
1: "control",
2: "data"
}
45 changes: 23 additions & 22 deletions delfin/drivers/hpe/hpe_3par/ssh_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,8 @@ def login(self, context):
version = ''
try:
re = self.exec_command(SSHHandler.HPE3PAR_COMMAND_SHOWWSAPI)
wsapi_infos = re.split('\n')
if len(wsapi_infos) > 1:
version = self.get_version(wsapi_infos)

if re:
version = self.get_version(re)
except Exception as e:
LOG.error("Login error: %s", six.text_type(e))
raise e
Expand All @@ -71,9 +69,10 @@ def get_version(self, wsapi_infos):
"""get wsapi version """
version = ''
try:
str_line = ' '.join(wsapi_infos[1].split())
wsapi_values = str_line.split(' ')
version = wsapi_values[6]
version_list = self.parse_datas_to_list(wsapi_infos,
consts.VERSION_PATTERN)
if version_list and version_list[0]:
version = version_list[0].get('version')
except Exception as e:
LOG.error("Get version error: %s, wsapi info: %s" % (
six.text_type(e), wsapi_infos))
Expand Down Expand Up @@ -287,7 +286,8 @@ def parse_datas_to_list(self, resource_info, pattern_str, para_map=None):
obj_list = self.parse_node_table(cols_size,
titles_size,
str_info,
obj_list)
obj_list,
titles)
else:
if cols_size == titles_size:
obj_model = {}
Expand Down Expand Up @@ -340,7 +340,7 @@ def parse_datas_to_map(self, resource_info, pattern_str, para_map=None):

def parse_disk_table(self, cols_size, titles_size, str_info,
obj_list, titles):
if cols_size == titles_size:
if cols_size >= titles_size:
fw_rev_index = self.get_index_of_key(titles, 'FW_Rev')
if fw_rev_index:
inventory_map = {
Expand All @@ -353,20 +353,21 @@ def parse_disk_table(self, cols_size, titles_size, str_info,
obj_list.append(inventory_map)
return obj_list

def parse_node_table(self, cols_size, titles_size, str_info, obj_list):
def parse_node_table(self, cols_size, titles_size, str_info, obj_list,
titles):
if cols_size >= titles_size:
# Only node_ The name attribute may contain spaces,
# so there will be several more columns
# after splitting
# You need to start with the last few columns
obj_model = {
'node_id': str_info[0],
'node_name': ' '.join(str_info[1:cols_size - 8]),
'node_state': str_info[cols_size - 8],
'node_control_mem': str_info[cols_size - 3],
'node_data_mem': str_info[cols_size - 2]
}
obj_list.append(obj_model)
obj_model = {}
num_prefix = 1
for i in range(cols_size):
key_prefix = ''
key = titles[i].lower().replace('-', '')
if key == 'mem(mb)':
key_prefix = consts.SSH_NODE_MEM_TYPE.get(num_prefix)
num_prefix += 1
key = '%s%s' % (key_prefix, key)
obj_model[key] = str_info[i]
if obj_model:
obj_list.append(obj_model)
return obj_list

def parse_node_cpu(self, cols_size, titles_size, str_info, obj_map):
Expand Down

0 comments on commit a9c31b5

Please sign in to comment.