Skip to content

Commit

Permalink
Merge pull request #2 from guykatzz/bnn_opt
Browse files Browse the repository at this point in the history
Printing of BNN layers
  • Loading branch information
guykatzz authored Aug 20, 2020
2 parents 0ac47f3 + 6450e43 commit 637455e
Show file tree
Hide file tree
Showing 7 changed files with 107 additions and 5 deletions.
79 changes: 78 additions & 1 deletion src/engine/InputQuery.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -573,7 +573,8 @@ bool InputQuery::constructNetworkLevelReasoner()
unsigned newLayerIndex = 1;
// Now, repeatedly attempt to construct addditional layers
while ( constructWeighedSumLayer( nlr, handledVariableToLayer, newLayerIndex ) ||
constructReluLayer( nlr, handledVariableToLayer, newLayerIndex ) )
constructReluLayer( nlr, handledVariableToLayer, newLayerIndex ) ||
constructSignLayer( nlr, handledVariableToLayer, newLayerIndex ) )
{
++newLayerIndex;
}
Expand Down Expand Up @@ -780,6 +781,82 @@ bool InputQuery::constructReluLayer( NLR::NetworkLevelReasoner *nlr,
return true;
}

bool InputQuery::constructSignLayer( NLR::NetworkLevelReasoner *nlr,
Map<unsigned, unsigned> &handledVariableToLayer,
unsigned newLayerIndex )
{
struct NeuronInformation
{
public:

NeuronInformation( unsigned variable, unsigned neuron, unsigned sourceVariable )
: _variable( variable )
, _neuron( neuron )
, _sourceVariable( sourceVariable )
{
}

unsigned _variable;
unsigned _neuron;
unsigned _sourceVariable;
};

List<NeuronInformation> newNeurons;

// Look for Signs where the b variables have already been handled
const List<PiecewiseLinearConstraint *> &plConstraints =
getPiecewiseLinearConstraints();

for ( const auto &plc : plConstraints )
{
// Only consider Signs
if ( plc->getType() != SIGN )
continue;

const SignConstraint *sign = (const SignConstraint *)plc;

// Has the b variable been handled?
unsigned b = sign->getB();
if ( !handledVariableToLayer.exists( b ) )
continue;

// If the f variable has also been handled, ignore this constraint
unsigned f = sign->getF();
if ( handledVariableToLayer.exists( f ) )
continue;

// B has been handled, f hasn't. Add f
newNeurons.append( NeuronInformation( f, newNeurons.size(), b ) );
}

// No neurons found for the new layer
if ( newNeurons.empty() )
return false;

nlr->addLayer( newLayerIndex, NLR::Layer::SIGN, newNeurons.size() );
for ( const auto &newNeuron : newNeurons )
{
handledVariableToLayer[newNeuron._variable] = newLayerIndex;

unsigned sourceLayer = handledVariableToLayer[newNeuron._sourceVariable];
unsigned sourceNeuron = nlr->getLayer( sourceLayer )->variableToNeuron( newNeuron._sourceVariable );

// Mark the layer dependency
nlr->addLayerDependency( sourceLayer, newLayerIndex );

// Add the new neuron
nlr->setNeuronVariable( NLR::NeuronIndex( newLayerIndex, newNeuron._neuron ), newNeuron._variable );

// Mark the activation connection
nlr->addActivationSource( sourceLayer,
sourceNeuron,
newLayerIndex,
newNeuron._neuron );
}

return true;
}

//
// Local Variables:
// compile-command: "make -C ../.. "
Expand Down
4 changes: 3 additions & 1 deletion src/engine/InputQuery.h
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,9 @@ class InputQuery
bool constructReluLayer( NLR::NetworkLevelReasoner *nlr,
Map<unsigned, unsigned> &handledVariableToLayer,
unsigned newLayerIndex );

bool constructSignLayer( NLR::NetworkLevelReasoner *nlr,
Map<unsigned, unsigned> &handledVariableToLayer,
unsigned newLayerIndex );

public:
/*
Expand Down
2 changes: 1 addition & 1 deletion src/engine/ReluConstraint.h
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ class ReluConstraint : public PiecewiseLinearConstraint
String serializeToString() const;

/*
Get the index of the B variable.
Get the index of the B and F variables.
*/
unsigned getB() const;
unsigned getF() const;
Expand Down
10 changes: 10 additions & 0 deletions src/engine/SignConstraint.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -412,3 +412,13 @@ void SignConstraint::eliminateVariable( __attribute__((unused)) unsigned variabl
// In a Sign constraint, if a variable is removed the entire constraint can be discarded.
_haveEliminatedVariables = true;
}

unsigned SignConstraint::getB() const
{
return _b;
}

unsigned SignConstraint::getF() const
{
return _f;
}
6 changes: 6 additions & 0 deletions src/engine/SignConstraint.h
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,12 @@ class SignConstraint : public PiecewiseLinearConstraint
*/
String serializeToString() const;

/*
Get the index of the B and F variables.
*/
unsigned getB() const;
unsigned getF() const;

private:
unsigned _b, _f;
PhaseStatus _phaseStatus;
Expand Down
9 changes: 7 additions & 2 deletions src/nlr/Layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -244,15 +244,15 @@ double Layer::getBias( unsigned neuron ) const

void Layer::addActivationSource( unsigned sourceLayer, unsigned sourceNeuron, unsigned targetNeuron )
{
ASSERT( _type == RELU || _type == ABSOLUTE_VALUE || _type == MAX );
ASSERT( _type == RELU || _type == ABSOLUTE_VALUE || _type == MAX || _type == SIGN );

if ( !_neuronToActivationSources.exists( targetNeuron ) )
_neuronToActivationSources[targetNeuron] = List<NeuronIndex>();

_neuronToActivationSources[targetNeuron].append( NeuronIndex( sourceLayer, sourceNeuron ) );

DEBUG({
if ( _type == RELU || _type == ABSOLUTE_VALUE )
if ( _type == RELU || _type == ABSOLUTE_VALUE || _type == SIGN )
ASSERT( _neuronToActivationSources[targetNeuron].size() == 1 );
});
}
Expand Down Expand Up @@ -1281,6 +1281,10 @@ String Layer::typeToString( Type type )
return "MAX";
break;

case SIGN:
return "SIGN";
break;

default:
return "UNKNOWN TYPE";
break;
Expand Down Expand Up @@ -1337,6 +1341,7 @@ void Layer::dump() const
case RELU:
case ABSOLUTE_VALUE:
case MAX:
case SIGN:

for ( unsigned i = 0; i < _size; ++i )
{
Expand Down
2 changes: 2 additions & 0 deletions src/nlr/Layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
#include "MatrixMultiplication.h"
#include "NeuronIndex.h"
#include "ReluConstraint.h"
#include "SignConstraint.h"

namespace NLR {

Expand All @@ -39,6 +40,7 @@ class Layer
RELU,
ABSOLUTE_VALUE,
MAX,
SIGN,
};

/*
Expand Down

0 comments on commit 637455e

Please sign in to comment.