Skip to content

Commit

Permalink
D.O. Rework: Topological PP (#472)
Browse files Browse the repository at this point in the history
* stash

* stash

* stash

* topologicals working

* cleanup

* review comments
  • Loading branch information
PaulTalbot-INL authored and alfoa committed Dec 15, 2017
1 parent 002944b commit cce9e67
Show file tree
Hide file tree
Showing 7 changed files with 79 additions and 45 deletions.
2 changes: 1 addition & 1 deletion framework/BaseClasses.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ def provideExpectedMetaKeys(self):
"""
Provides the registered list of metadata keys for this entity.
@ In, None
@ Out, meta, list(str), expected keys (empty if none)
@ Out, meta, set(str), expected keys (empty if none)
"""
return self.metadataKeys

Expand Down
5 changes: 4 additions & 1 deletion framework/DataObjects/XDataSet.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,10 +345,11 @@ def getVars(self,subset=None):
else:
self.raiseAnError(KeyError,'Unrecognized subset choice: "{}"'.format(subset))

def getVarValues(self,var):
def getVarValues(self,var,asDict=False):
"""
Returns the sampled values of "var"
@ In, var, str or list(str), name(s) of variable(s)
@ In, asDict, bool, optional, if True then always returns a dictionary even if only one sample requested
@ Out, res, xr.DataArray, samples (or dict of {var:xr.DataArray} if multiple variables requested)
"""
## NOTE TO DEVELOPER:
Expand All @@ -365,6 +366,8 @@ def getVarValues(self,var):
#format as dataarray
else:
res = self._data[var]
if asDict:
res = {var:res}
elif isinstance(var,list):
res = dict((v,self.getVarValues(v)) for v in var)
else:
Expand Down
15 changes: 15 additions & 0 deletions framework/Models/PostProcessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,21 @@ def __init__(self,runInfoDict):
self.workingDir = ''
self.printTag = 'POSTPROCESSOR MODEL'

def provideExpectedMetaKeys(self):
"""
Overrides the base class method to assure child postprocessor is also polled for its keys.
@ In, None
@ Out, meta, set(str), expected keys (empty if none)
"""
# get keys as per base class
keys = Model.provideExpectedMetaKeys(self)
# add postprocessor keys
try:
keys = keys.union(self.interface.provideExpectedMetaKeys())
except AttributeError:
pass # either "interface" has no method for returning meta keys, or "interface" is not established yet.
return keys

def whatDoINeed(self):
"""
This method is used mainly by the Simulation class at the Step construction stage.
Expand Down
20 changes: 20 additions & 0 deletions framework/PostProcessors/PostProcessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ def __init__(self, messageHandler):
self.type = self.__class__.__name__ # pp type
self.name = self.__class__.__name__ # pp name
self.messageHandler = messageHandler
self.metadataKeys = set()

@classmethod
def getInputSpecification(cls):
Expand Down Expand Up @@ -94,3 +95,22 @@ def run(self, input):
@ Out, None
"""
pass

## TODO FIXME ##
# These two methods (addMetaKeys, provideExpectedMetaKeys) are made to be consistent with the BaseClasses.BaseType, and in
# that glorious day when the PostProcessors inherit from the BaseType, these implementations should be removed.
def addMetaKeys(self,*args):
"""
Adds keywords to a list of expected metadata keys.
@ In, args, list(str), keywords to register
@ Out, None
"""
self.metadataKeys = self.metadataKeys.union(set(args))

def provideExpectedMetaKeys(self):
"""
Provides the registered list of metadata keys for this entity.
@ In, None
@ Out, meta, list(str), expected keys (empty if none)
"""
return self.metadataKeys
78 changes: 37 additions & 41 deletions framework/PostProcessors/TopologicalDecomposition.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,47 +116,27 @@ def inputToInternal(self, currentInp):
@ In, currentInp, list or DataObjects, The input object to process
@ Out, inputDict, dict, the converted input
"""
if type(currentInp) == list:
currentInput = currentInp [-1]
else:
currentInput = currentInp
if type(currentInput) == dict:
if 'features' in currentInput.keys():
return currentInput
inputDict = {'features':{}, 'targets':{}, 'metadata':{}}
if hasattr(currentInput, 'type'):
inType = currentInput.type
elif type(currentInput).__name__ == 'list':
inType = 'list'
else:
# TODO typechecking against what currentInp can be; so far it's a length=1 list with a dataobject inside
currentInp = currentInp[0]
currentInp.asDataset()
# nowadays, our only input should be DataObject
## if no "type", then you're not a PointSet or HistorySet
if not hasattr(currentInp,'type') or currentInp.type != 'PointSet':
self.raiseAnError(IOError, self.__class__.__name__,
' postprocessor accepts files, HDF5, Data(s) only. ',
' Requested: ', type(currentInput))

if inType not in ['HDF5', 'PointSet', 'list'] and not isinstance(currentInput,Files.File):
self.raiseAnError(IOError, self, self.__class__.__name__ + ' post-processor only accepts files, HDF5, or DataObjects! Got ' + str(inType) + '!!!!')
# FIXME: implement this feature
if isinstance(currentInput,Files.File):
if currentInput.subtype == 'csv':
pass
# FIXME: implement this feature
if inType == 'HDF5':
pass # to be implemented
if inType in ['PointSet']:
for targetP in self.parameters['features']:
if targetP in currentInput.getParaKeys('input'):
inputDict['features'][targetP] = currentInput.getParam('input' , targetP)
elif targetP in currentInput.getParaKeys('output'):
inputDict['features'][targetP] = currentInput.getParam('output', targetP)
for targetP in self.parameters['targets']:
if targetP in currentInput.getParaKeys('input'):
inputDict['targets'][targetP] = currentInput.getParam('input' , targetP)
elif targetP in currentInput.getParaKeys('output'):
inputDict['targets'][targetP] = currentInput.getParam('output', targetP)
inputDict['metadata'] = currentInput.getAllMetadata()
# now we check if the sampler that genereted the samples are from adaptive... in case... create the grid
if 'SamplerType' in inputDict['metadata'].keys():
pass
' postprocessor only accepts PointSet DataObjects for input. ',
' Requested: ', type(currentInp))
# now we know we have a PointSet
## TODO FIXME maintaining old structure for now, in the future convert to use DataObject directly
## and not bother with inputToInternal
## This works particularly well since we only accept point sets.
data = currentInp.asDataset(outType='dict')['data']
inputDict = {'features':dict((var,data[var]) for var in self.parameters['features']),
'targets' :dict((var,data[var]) for var in self.parameters['targets' ]),
'metadata':currentInp.getMeta(general=True)}
#if 'PointProbability' in currentInp.getVars():
inputDict['metadata']['PointProbability'] = currentInp.getVarValues('PointProbability').values
#else:
# raise NotImplementedError # TODO
return inputDict

def _localReadMoreXML(self, xmlNode):
Expand All @@ -169,6 +149,8 @@ def _localReadMoreXML(self, xmlNode):
paramInput = TopologicalDecomposition.getInputSpecification()()
paramInput.parseNode(xmlNode)
self._handleInput(paramInput)
# register metadata
self.addMetaKeys('maxLabel','minLabel')

def _handleInput(self, paramInput):
"""
Expand Down Expand Up @@ -233,6 +215,20 @@ def collectOutput(self, finishedJob, output):
inputList,outputDict = evaluation

if output.type == 'PointSet':
# TODO this is a slow dict-based implementation. It should be improved on need.
# TODO can inputList ever be multiple dataobjects?
if len(inputList) > 1:
self.raiseAnError(NotImplementedError, 'Need to implement looping over all inputs.')
fromInput = inputList[0].asDataset('dict')['data']
results = dict((var,fromInput[var]) for var in output.getVars() if var in fromInput.keys())
for label in ['minLabel','maxLabel']:
results[label] = outputDict[label]
output.load(results,style='dict')
output.addMeta(self.type,{'general':{'hierarchy':outputDict['hierarchy']}})
return


#### OLD ####
requestedInput = output.getParaKeys('input')
requestedOutput = output.getParaKeys('output')
dataLength = None
Expand Down Expand Up @@ -318,7 +314,7 @@ def run(self, inputIn):
self.inputData[:, i] = myDataIn[lbl.encode('UTF-8')]

if self.weighted:
self.weights = inputIn[0].getMetadata('PointProbability')
self.weights = internalInput['metadata']['PointProbability']
else:
self.weights = None

Expand Down
2 changes: 1 addition & 1 deletion framework/Samplers/Sampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -577,7 +577,7 @@ def initialize(self,externalSeeding=None,solutionExport=None):
self.entitiesToRemove.append('transformation-'+distName)

# Register expected metadata
meta = ['ProbabilityWeight','prefix']
meta = ['ProbabilityWeight','prefix','PointProbability']
self.addMetaKeys(*meta)

def localInitialize(self):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<?xml version="1.0" ?>
<Simulation>
<Simulation verbosity='debug'>
<TestInfo>
<name>framework/PostProcessors/TopologicalPostProcessor.topology_simple</name>
<author>maljdan</author>
Expand Down

0 comments on commit cce9e67

Please sign in to comment.