Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Handling Constraints using feasibleFirst parameterless fitness #1455

Merged
merged 22 commits into from
Jul 11, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions developer_tools/XSDSchemas/Optimizers.xsd
Original file line number Diff line number Diff line change
Expand Up @@ -175,9 +175,9 @@
<xsd:attribute name="type" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="Constraint" type="AssemblerObjectType" minOccurs="0" maxOccurs="1"/>
<xsd:element name="ImplicitConstraint" type="AssemblerObjectType" minOccurs="0" maxOccurs="1"/>
<xsd:element name="Sampler" type="AssemblerObjectType" minOccurs="0" maxOccurs="1"/>
<xsd:element name="Constraint" type="AssemblerObjectType" minOccurs="0" maxOccurs="unbounded"/>
<xsd:element name="ImplicitConstraint" type="AssemblerObjectType" minOccurs="0" maxOccurs="unbounded"/>
<xsd:element name="Restart" type="AssemblerObjectType" minOccurs="0" maxOccurs="1"/>
<xsd:element name="restartTolerance" type="xsd:float" minOccurs="0" maxOccurs="1"/>
<xsd:element name="variableTransformation" type="variablesTransformationType" minOccurs="0" maxOccurs="1"/>
Expand Down Expand Up @@ -211,8 +211,8 @@

<xsd:complexType name="fitnessType">
<xsd:all>
<xsd:element name="a" type="xsd:float" />
<xsd:element name="b" type="xsd:float"/>
<xsd:element name="a" type="xsd:float" minOccurs="0" maxOccurs='1'/>
<xsd:element name="b" type="xsd:float" minOccurs="0" maxOccurs='1'/>
</xsd:all>
<xsd:attribute name="type" type="xsd:string" />
</xsd:complexType>
Expand Down Expand Up @@ -243,9 +243,9 @@
<xsd:attribute name="type" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="Constraint" type="AssemblerObjectType" minOccurs="0" maxOccurs="1"/>
<xsd:element name="ImplicitConstraint" type="AssemblerObjectType" minOccurs="0" maxOccurs="1"/>
<xsd:element name="Sampler" type="AssemblerObjectType" minOccurs="0" maxOccurs="1"/>
<xsd:element name="Constraint" type="AssemblerObjectType" minOccurs="0" maxOccurs="unbounded"/>
<xsd:element name="ImplicitConstraint" type="AssemblerObjectType" minOccurs="0" maxOccurs="unbounded"/>
<xsd:element name="Restart" type="AssemblerObjectType" minOccurs="0" maxOccurs="1"/>
<xsd:element name="restartTolerance" type="xsd:float" minOccurs="0" maxOccurs="1"/>
<xsd:element name="variableTransformation" type="variablesTransformationType" minOccurs="0" maxOccurs="1"/>
Expand Down
2 changes: 1 addition & 1 deletion framework/Functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def __importValues(self,myInput):
@ In, myInput, object (dataObjects,dict), object from which the data need to be imported
@ Out, None
"""
if type(myInput)==dict:
if isinstance(myInput,dict):
self.__inputFromWhat['dict'](myInput)
else:
self.raiseAnError(IOError,'Unknown type of input provided to the function '+str(self.name))
Expand Down
174 changes: 117 additions & 57 deletions framework/Optimizers/GeneticAlgorithm.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@

#Internal Modules------------------------------------------------------------------------------------
from utils import mathUtils, randomUtils, InputData, InputTypes
from utils.gaUtils import dataArrayToDict, datasetToDataArray
from .RavenSampled import RavenSampled
from .parentSelectors.parentSelectors import returnInstance as parentSelectionReturnInstance
from .crossOverOperators.crossovers import returnInstance as crossoversReturnInstance
Expand Down Expand Up @@ -215,10 +216,15 @@ def getInputSpecification(cls):
contentType=InputTypes.StringType,
printPriority=108,
descr=r"""a subnode containing the implemented fitness functions.
This includes: a. invLinear: $fitness = \frac{1}{a \times obj + b \times penalty}$.
b. logistic: $fitness = \frac{1}{1+e^{a \times (obj-b)}}$""")
This includes: a. invLinear: $fitness = -a \\times obj - b \\times \\Sum_{j=1}^{nConstraint} max(0,-penalty_j)$.
b. logistic: $fitness = \\frac{1}{1+e^{a \\times (obj-b)}}$.
c. feasibleFirst: $fitness = \[ \\begin{cases}
-obj & g_j(x)\\geq 0 \\forall j \\
-obj_{worst} - \\Sigma_{j=1}^{J}<g_j(x)> & otherwise \\
\\end{cases}
\]$""")
fitness.addParam("type", InputTypes.StringType, True,
descr=r"""[invLin, logistic]""")
descr=r"""[invLin, logistic, feasibleFirst]""")
objCoeff = InputData.parameterInputFactory('a', strictMode=True,
contentType=InputTypes.FloatType,
printPriority=108,
Expand Down Expand Up @@ -315,12 +321,17 @@ def handleInput(self, paramInput):
# Fitness
fitnessNode = gaParamsNode.findFirst('fitness')
self._fitnessType = fitnessNode.parameterValues['type']
self._objCoeff = fitnessNode.findFirst('a').value
self._penaltyCoeff = fitnessNode.findFirst('b').value

# Check if the fitness requested is among the constrained optimization fitnesses
# Currently, only InvLin and feasibleFirst Fitnesses deal with constrained optimization
## TODO: @mandd, please explore the possibility to conver the logistic fitness into a constrained optimization fitness.
if 'Constraint' in self.assemblerObjects.keys() and self._fitnessType not in ['invLinear','feasibleFirst']:
self.raiseAnError(IOError, 'Currently constrained Genetic Algorithms only support invLinear and feasibleFirst fitnesses, whereas provided fitness is {}'.format(self._fitnessType))
self._objCoeff = fitnessNode.findFirst('a').value if fitnessNode.findFirst('a') is not None else None
self._penaltyCoeff = fitnessNode.findFirst('b').value if fitnessNode.findFirst('b') is not None else None
self._fitnessInstance = fitnessReturnInstance(self,name = self._fitnessType)
self._repairInstance = repairReturnInstance(self,name='replacementRepair') # currently only replacement repair is implemented,
# if other repair methods are implemented then
# ##TODO: make the repair type a user input
self._repairInstance = repairReturnInstance(self,name='replacementRepair') # currently only replacement repair is implemented.

# Convergence Criterion
convNode = paramInput.findFirst('convergence')
if convNode is not None:
Expand Down Expand Up @@ -351,7 +362,7 @@ def initialize(self, externalSeeding=None, solutionExport=None):
self.batch = self._populationSize*(self.counter==0)+self._nChildren*(self.counter>0)
if self._populationSize != len(self._initialValues):
self.raiseAnError(IOError, 'Number of initial values provided for each variable is {}, while the population size is {}'.format(len(self._initialValues),self._populationSize,self._populationSize))
for _, init in enumerate(self._initialValues): # TODO: this should be single traj
for _, init in enumerate(self._initialValues):
self._submitRun(init,0,self.getIteration(0)+1)

def initializeTrajectory(self, traj=None):
Expand Down Expand Up @@ -404,10 +415,29 @@ def _useRealization(self, info, rlz):

# 5.1 @ n-1: fitnessCalculation(rlz)
# perform fitness calculation for newly obtained children (rlz)
fitness = self._fitnessInstance(rlz, objVar=self._objectiveVar, a=self._objCoeff, b=self._penaltyCoeff, penalty=None)
population = datasetToDataArray(rlz, list(self.toBeSampled))
objectiveVal = list(np.atleast_1d(rlz[self._objectiveVar].data))
# Compute constraint function g_j(x) for all constraints (j = 1 .. J)
# and all x's (individuals) in the population
g0 = np.zeros((np.shape(population)[0],len(self._constraintFunctions)+len(self._impConstraintFunctions)))
g = xr.DataArray(g0,
dims=['chromosome','Constraint'],
coords={'chromosome':np.arange(np.shape(population)[0]),
'Constraint':[y.name for y in (self._constraintFunctions + self._impConstraintFunctions)]})
## FIXME The constraint handling is following the structure of the RavenSampled.py,
# there are many utility functions that can be simplified and/or merged with
# _check, _handle, and _apply, for explicit and implicit constraints.
# This can be simpliefied in the near future in GradientDescent, SimulatedAnnealing, and here in GA
for index,individual in enumerate(population):
newOpt = individual
opt = objectiveVal[index]
for constIndex,constraint in enumerate(self._constraintFunctions + self._impConstraintFunctions):
if constraint in self._constraintFunctions:
g.data[index, constIndex] = self._handleExplicitConstraints(newOpt,constraint)
else:
g.data[index, constIndex] = self._handleImplicitConstraints(newOpt, opt,constraint)
fitness = self._fitnessInstance(rlz, objVar=self._objectiveVar, a=self._objCoeff, b=self._penaltyCoeff,constraintFunction=g,type=self._minMax)
acceptable = 'first' if self.counter==1 else 'accepted'
population = self._datasetToDataArray(rlz) # TODO: rename
self._collectOptPoint(population,fitness,objectiveVal)
self._resolveNewGeneration(traj, rlz, objectiveVal, fitness, info)

Expand Down Expand Up @@ -481,21 +511,6 @@ def _useRealization(self, info, rlz):
newRlz[var] = float(daChildren.loc[i,var].values)
self._submitRun(newRlz, traj, self.getIteration(traj))

def _datasetToDataArray(self,rlzDataset):
"""
Converts the realization DataSet to a DataArray
@ In, rlzDataset, xr.dataset, the data set containing the batched realizations
@ Out, dataset, xr.dataarray, a data array containing the realization with
dims = ['chromosome','Gene']
chromosomes are named 0,1,2...
Genes are named after variables to be sampled
"""
dataset = xr.DataArray(np.atleast_2d(rlzDataset[list(self.toBeSampled)].to_array().transpose()),
dims=['chromosome','Gene'],
coords={'chromosome': np.arange(rlzDataset[self._objectiveVar].data.size),
'Gene':list(self.toBeSampled)})
return dataset

def _submitRun(self, point, traj, step, moreInfo=None):
"""
Submits a single run with associated info to the submission queue
Expand All @@ -511,7 +526,8 @@ def _submitRun(self, point, traj, step, moreInfo=None):
info.update({'traj': traj,
'step': step
})
# NOTE: explicit constraints have been checked before this!
# NOTE: Currently, GA treats explicit and implicit constraints similarly
# while box constraints (Boundary constraints) are automatically handled via limits of the distribution
#
self.raiseADebug('Adding run to queue: {} | {}'.format(self.denormalizeData(point), info))
self._submissionQueue.append((point, info))
Expand Down Expand Up @@ -568,7 +584,7 @@ def _collectOptPoint(self, population, fitness, objectiveVal):
"""
optPoints,fit,obj = zip(*[[x,y,z] for x,y,z in sorted(zip(np.atleast_2d(population.data),np.atleast_1d(fitness.data),objectiveVal),reverse=True,key=lambda x: (x[1]))])
point = dict((var,float(optPoints[0][i])) for i,var in enumerate(self.toBeSampled.keys()))
if (self.counter>1 and obj[0] < self.bestObjective) or self.counter == 1:
if (self.counter>1 and obj[0] <= self.bestObjective and fit[0]>=self.bestFitness) or self.counter == 1:
self.bestPoint = point
self.bestFitness = fit[0]
self.bestObjective = obj[0]
Expand All @@ -578,23 +594,8 @@ def _checkAcceptability(self, traj):
"""
This is an abstract method for all RavenSampled Optimizer, whereas for GA all children are accepted
@ In, traj, int, identifier
@ Out, (acceptable, old, rejectionReason), tuple, tuple which contains the following three items:
acceptable, str, acceptability condition for point
old, dict, old opt point
rejectReason, str, reject reason of opt point, or return None if accepted
"""
acceptable = 'accepted'
try:
old, _ = self._optPointHistory[traj][-1]
except IndexError:
# if first sample, simply assume it's better!
acceptable = 'first'
old = None
self._acceptHistory[traj].append(acceptable)
self.raiseADebug(' ... {a}!'.format(a=acceptable))
rejectionReason = None

return acceptable, old, rejectionReason
"""
return

def checkConvergence(self, traj, new, old):
"""
Expand Down Expand Up @@ -646,7 +647,7 @@ def _checkConvAHDp(self, traj, **kwargs):
@ Out, converged, bool, convergence state
"""
old = kwargs['old'].data
new = self._datasetToDataArray(kwargs['new']).data
new = datasetToDataArray(kwargs['new'], list(self.toBeSampled)).data
if ('p' not in kwargs.keys() or kwargs['p'] == None):
p = 3
else:
Expand All @@ -670,7 +671,7 @@ def _checkConvAHD(self, traj, **kwargs):
@ Out, converged, bool, convergence state
"""
old = kwargs['old'].data
new = self._datasetToDataArray(kwargs['new']).data
new = datasetToDataArray(kwargs['new'], list(self.toBeSampled)).data
ahd = self._ahd(old,new)
self.ahd = ahd
converged = (ahd < self._convergenceCriteria['AHD'])
Expand Down Expand Up @@ -768,7 +769,7 @@ def _updatePersistence(self, traj, converged, optVal):
"""
# This is not required for the genetic algorithms as it's handled in the probabilistic acceptance criteria
# But since it is an abstract method it has to exist
pass
return
Jimmy-INL marked this conversation as resolved.
Show resolved Hide resolved

def _checkForImprovement(self, new, old):
"""
Expand All @@ -779,7 +780,7 @@ def _checkForImprovement(self, new, old):
"""
# This is not required for the genetic algorithms as it's handled in the probabilistic acceptance criteria
# But since it is an abstract method it has to exist
return True
return

def _rejectOptPoint(self, traj, info, old):
"""
Expand All @@ -788,17 +789,76 @@ def _rejectOptPoint(self, traj, info, old):
@ In, info, dict, meta information about the opt point
@ In, old, dict, previous optimal point (to resubmit)
"""
pass
return

# * * * * * * * * * * * *
# Constraint Handling
def _handleExplicitConstraints(self, point, constraint):
"""
Computes explicit (i.e. input-based) constraints
@ In, point, xr.DataArray, the DataArray containing the chromosome (point)
@ In, constraint, external function, explicit constraint function
@ out, g, float, the value g_j(x) is the value of the constraint function number j when fed with the chromosome (point)
if $g_j(x)<0$, then the contraint is violated
"""
g = self._applyFunctionalConstraints(point, constraint)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do you really need these different layers of functions: "_handleExplicitConstraints", "_applyFunctionalConstraints" and "_checkFunctionalConstraints"?

return g

def _handleImplicitConstraints(self, point, opt,constraint):
"""
Computes implicit (i.e. output- or output-input-based) constraints
@ In, point, xr.DataArray, the DataArray containing the chromosome (point)
@ In, opt, float, the objective value at this chromosome (point)
@ In, constraint, external function, implicit constraint function
@ out, g, float,the value g_j(x) is the value of the constraint function number j when fed with the chromosome (point)
if $g_j(x)<0$, then the contraint is violated
"""
g = self._checkImpFunctionalConstraints(point, opt, constraint)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

similar question here, do you really need all these functions: "_handleImplicitConstraints" and "_handleImplicitConstraints"? Can you combine them?

return g

def _applyFunctionalConstraints(self, point, constraint):
"""
fixes functional constraints of variables in "point" -> DENORMED point expected!
@ In, point, xr.DataArray, the dataArray containing potential point to apply constraints to
@ In, constraint, external function, constraint function
@ out, g, float, the value g_j(x) is the value of the constraint function number j when fed with the chromosome (point)
if $g_j(x)<0$, then the contraint is violated
"""
# are we violating functional constraints?
g = self._checkFunctionalConstraints(point, constraint)
return g

def _checkFunctionalConstraints(self, point, constraint):
"""
evaluates the provided constraint at the provided point
@ In, point, dict, the dictionary containing the chromosome (point)
@ In, constraint, external function, explicit constraint function
@ out, g, float, the value g_j(x) is the value of the constraint function number j when fed with the chromosome (point)
if $g_j(x)<0$, then the contraint is violated
"""
inputs = dataArrayToDict(point)
inputs.update(self.constants)
g = constraint.evaluate('constrain', inputs)
return g

def _applyFunctionalConstraints(self, suggested, previous):
def _checkImpFunctionalConstraints(self, point, opt, impConstraint):
"""
applies functional constraints of variables in "suggested" -> DENORMED point expected!
@ In, suggested, dict, potential point to apply constraints to
@ In, previous, dict, previous opt point in consideration
@ Out, point, dict, adjusted variables
@ Out, modded, bool, whether point was modified or not
evaluates the provided implicit constraint at the provided point
@ In, point, dict, the dictionary containing the chromosome (point)
Jimmy-INL marked this conversation as resolved.
Show resolved Hide resolved
@ In, opt, dict, the dictionary containing the chromosome (point)
@ In, impConstraint, external function, implicit constraint function
@ out, g, float, the value g_j(x, objVar) is the value of the constraint function number j when fed with the chromosome (point)
if $g_j(x, objVar)<0$, then the contraint is violated
"""
self.raiseAnError(NotImplementedError, 'Constraint Handling is not implemented yet!')
inputs = dataArrayToDict(point)
inputs.update(self.constants)
inputs[self._objectiveVar] = opt
g = impConstraint.evaluate('impConstrain', inputs)
return g

# END constraint handling
# * * * * * * * * * * * *


def _addToSolutionExport(self, traj, rlz, acceptable):
"""
Expand Down
6 changes: 3 additions & 3 deletions framework/Optimizers/Optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def getInputSpecification(cls):
the input space of the Model. From a practical point of view, this XML node must contain
the name of a function defined in the \xmlNode{Functions} block (see Section~\ref{sec:functions}).
This external function must contain a method called ``constrain'', which returns True for
inputs satisfying the explicit constraints and False otherwise.""")
inputs satisfying the explicit constraints and False otherwise. \nb Currently this accepts any number of constraints from the user.""")
ConstraintInput.addParam("class", InputTypes.StringType, True,
descr=r"""RAVEN class for this source. Options include \xmlString{Functions}. """)
ConstraintInput.addParam("type", InputTypes.StringType, True,
Expand Down Expand Up @@ -174,8 +174,8 @@ def __init__(self):
self._impConstraintFunctions = [] # list of implicit constraint functions
# __private
# additional methods
self.addAssemblerObject('Constraint', InputData.Quantity.zero_to_one) # Explicit (input-based) constraints
self.addAssemblerObject('ImplicitConstraint', InputData.Quantity.zero_to_one) # Implicit constraints
self.addAssemblerObject('Constraint', InputData.Quantity.zero_to_infinity) # Explicit (input-based) constraints
Jimmy-INL marked this conversation as resolved.
Show resolved Hide resolved
self.addAssemblerObject('ImplicitConstraint', InputData.Quantity.zero_to_infinity) # Implicit constraints
self.addAssemblerObject('Sampler', InputData.Quantity.zero_to_one) # This Sampler can be used to initialize the optimization initial points (e.g. partially replace the <initial> blocks for some variables)

# register adaptive sample identification criteria
Expand Down
Loading