From b0dc43ae1174218b08b10bacf42078f41b76b7f5 Mon Sep 17 00:00:00 2001 From: "Joshua J. Cogliati" Date: Thu, 4 Nov 2021 11:11:29 -0600 Subject: [PATCH 1/2] Adding a KerasMLPRegression class to go with the KerasMLPClassifier. This allows regression to be done for non-timeseries data. --- doc/user_manual/kerasROM.tex | 40 +- framework/SupervisedLearning/Factory.py | 1 + framework/SupervisedLearning/KerasBase.py | 2 +- .../SupervisedLearning/KerasLSTMRegression.py | 22 + .../SupervisedLearning/KerasMLPRegression.py | 74 ++ .../SupervisedLearning/KerasRegression.py | 32 +- .../gold/LSTMRegression/out_resampled_0.csv | 202 ++--- .../gold/LSTMRegression/out_resampled_1.csv | 202 ++--- .../keras_mlp_regression/outMLPClassifier.csv | 41 + .../tensorflow_keras/keras_mlp_regression.xml | 137 ++++ .../keras_mlp_regression/test.csv | 41 + .../keras_mlp_regression/train.csv | 729 ++++++++++++++++++ tests/framework/ROM/tensorflow_keras/tests | 6 + 13 files changed, 1310 insertions(+), 219 deletions(-) create mode 100644 framework/SupervisedLearning/KerasMLPRegression.py create mode 100644 tests/framework/ROM/tensorflow_keras/gold/keras_mlp_regression/outMLPClassifier.csv create mode 100644 tests/framework/ROM/tensorflow_keras/keras_mlp_regression.xml create mode 100644 tests/framework/ROM/tensorflow_keras/keras_mlp_regression/test.csv create mode 100644 tests/framework/ROM/tensorflow_keras/keras_mlp_regression/train.csv diff --git a/doc/user_manual/kerasROM.tex b/doc/user_manual/kerasROM.tex index 2af0abe1e1..7a3aa6c647 100644 --- a/doc/user_manual/kerasROM.tex +++ b/doc/user_manual/kerasROM.tex @@ -568,9 +568,10 @@ \subsubsection{TensorFlow-Keras Deep Neural Networks} and an upper bound. \end{itemize} -%%%%% ROM Model - KerasMLPClassifier %%%%%%% -\paragraph{KerasMLPClassifier} +%%%%% ROM Model - KerasMLPClassifier and KerasMLPRegression %%%%%%% +\paragraph{KerasMLPClassifier and KerasMLPRegression} \label{KerasMLPClassifier} +\label{KerasMLPRegression} Multi-Layer Perceptron (MLP) (or Artificial Neural Network - ANN), a class of feedforward ANN, can be viewed as a logistic regression classifier where input is first transformed @@ -584,10 +585,10 @@ \subsubsection{TensorFlow-Keras Deep Neural Networks} relationships. The extra layers enable composition of features from lower layers, potentially modeling complex data with fewer units than a similarly performing shallow network. -\zNormalizationPerformed{KerasMLPClassifier} +\zNormalizationPerformed{KerasMLPClassifier \textup{and} KerasMLPRegression} In order to use this ROM, the \xmlNode{ROM} attribute \xmlAttr{subType} needs to -be \xmlString{KerasMLPClassifier} (see the example below). This model can be initialized with +be \xmlString{KerasMLPClassifier} or \xmlString{KerasMLPRegression} (see the examples below). This model can be initialized with the following layers: \begin{itemize} @@ -595,7 +596,7 @@ \subsubsection{TensorFlow-Keras Deep Neural Networks} \DropoutLayer \end{itemize} -\textbf{Example:} +\textbf{KerasMLPClassifier Example:} \begin{lstlisting}[style=XML,morekeywords={name,subType}] ... @@ -641,6 +642,35 @@ \subsubsection{TensorFlow-Keras Deep Neural Networks} \end{lstlisting} +\textbf{KerasMLPRegression Example:} +\begin{lstlisting}[style=XML,morekeywords={name,subType}] + + ... + + + x1,x2,x3,x4,x5,x6,x7,x8 + y + mean_squared_error + 10 + 60 + False + 0.25 + 1986 + + 30 + + + 12 + + + + layer1, layer2, outLayer + + + ... + +\end{lstlisting} + %%%%% ROM Model - KerasConvNetClassifier %%%%%%% \paragraph{KerasConvNetClassifier} \label{KerasClassifier} diff --git a/framework/SupervisedLearning/Factory.py b/framework/SupervisedLearning/Factory.py index da653312be..5cd09c6952 100644 --- a/framework/SupervisedLearning/Factory.py +++ b/framework/SupervisedLearning/Factory.py @@ -36,6 +36,7 @@ ## Tensorflow-Keras Neural Network Models from .KerasMLPClassifier import KerasMLPClassifier +from .KerasMLPRegression import KerasMLPRegression from .KerasConvNetClassifier import KerasConvNetClassifier from .KerasLSTMClassifier import KerasLSTMClassifier from .KerasLSTMRegression import KerasLSTMRegression diff --git a/framework/SupervisedLearning/KerasBase.py b/framework/SupervisedLearning/KerasBase.py index 5115b56ed0..faa93e46ee 100644 --- a/framework/SupervisedLearning/KerasBase.py +++ b/framework/SupervisedLearning/KerasBase.py @@ -1893,7 +1893,7 @@ def __init__(self): self.kerasLayersList = functools.reduce(lambda x,y: x+y, list(self.kerasDict.values())) - self.kerasROMsList = ['KerasMLPClassifier', 'KerasConvNetClassifier', 'KerasLSTMClassifier', 'KerasLSTMRegression'] + self.kerasROMsList = ['KerasMLPClassifier', 'KerasMLPRegression', 'KerasConvNetClassifier', 'KerasLSTMClassifier', 'KerasLSTMRegression'] if len(self.availOptimizer) == 0: # stochastic gradient descent optimizer, includes support for momentum,learning rate decay, and Nesterov momentum diff --git a/framework/SupervisedLearning/KerasLSTMRegression.py b/framework/SupervisedLearning/KerasLSTMRegression.py index 3d0a140e10..56173576f1 100644 --- a/framework/SupervisedLearning/KerasLSTMRegression.py +++ b/framework/SupervisedLearning/KerasLSTMRegression.py @@ -19,6 +19,8 @@ """ #External Modules------------------------------------------------------------------------------------ import numpy as np +import utils.importerUtils +tf = utils.importerUtils.importModuleLazyRenamed("tf", globals(), "tensorflow") ###### #Internal Modules------------------------------------------------------------------------------------ from .KerasRegression import KerasRegression @@ -81,6 +83,26 @@ def _checkLayers(self): self.initOptionDict[layerName]['return_sequences'] = True self.raiseAWarning('return_sequences is resetted to True for layer',layerName) + def _getFirstHiddenLayer(self, layerInstant, layerSize, layerDict): + """ + Creates the first hidden layer + @ In, layerInstant, class, layer type from tensorflow.python.keras.layers + @ In, layerSize, int, nodes in layer + @ In, layerDict, dict, layer details + @ Out, layer, tensorflow.python.keras.layers, new layer + """ + return layerInstant(layerSize,input_shape=[None,self.featv.shape[-1]], **layerDict) + + def _getLastLayer(self, layerInstant, layerDict): + """ + Creates the last layer + @ In, layerInstant, class, layer type from tensorflow.python.keras.layers + @ In, layerSize, int, nodes in layer + @ In, layerDict, dict, layer details + @ Out, layer, tensorflow.python.keras.layers, new layer + """ + return tf.keras.layers.TimeDistributed(layerInstant(len(self.targv),**layerDict)) + def _preprocessInputs(self,featureVals): """ Perform input feature values before sending to ROM prediction diff --git a/framework/SupervisedLearning/KerasMLPRegression.py b/framework/SupervisedLearning/KerasMLPRegression.py new file mode 100644 index 0000000000..3bf6e622d0 --- /dev/null +++ b/framework/SupervisedLearning/KerasMLPRegression.py @@ -0,0 +1,74 @@ +# Copyright 2017 Battelle Energy Alliance, LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" + Created on 3-Nov-2021 + + @author: cogljj + module for Multi-layer perceptron regression +""" +#Internal Modules------------------------------------------------------------------------------------ +from .KerasRegression import KerasRegression +#Internal Modules End-------------------------------------------------------------------------------- + +class KerasMLPRegression(KerasRegression): + """ + Multi-layer perceptron regressor constructed using Keras API in TensorFlow + """ + info = {'problemtype':'regression', 'normalize':True} + + @classmethod + def getInputSpecification(cls): + """ + Method to get a reference to a class that specifies the input data for + class cls. + @ In, cls, the class for which we are retrieving the specification + @ Out, inputSpecification, InputData.ParameterInput, class to use for + specifying input of cls. + """ + specs = super().getInputSpecification() + specs.description = r"""Multi-Layer Perceptron (MLP) (or Artificial Neural Network - ANN), a class of feedforward + ANN, can be viewed as a logistic regression where input is first transformed + using a non-linear transformation. This transformation probjects the input data into a + space where it becomes linearly separable. This intermediate layer is referred to as a + \textbf{hidden layer}. An MLP consists of at least three layers of nodes. Except for the + input nodes, each node is a neuron that uses a nonlinear \textbf{activation function}. MLP + utilizes a suppervised learning technique called \textbf{Backpropagation} for training. + Generally, a single hidden layer is sufficient to make MLPs a universal approximator. + However, many hidden layers, i.e. deep learning, can be used to model more complex nonlinear + relationships. The extra layers enable composition of features from lower layers, potentially + modeling complex data with fewer units than a similarly performing shallow network. + \\ + \zNormalizationPerformed{KerasMLPRegression} + \\ + In order to use this ROM, the \xmlNode{ROM} attribute \xmlAttr{subType} needs to + be \xmlString{KerasMLPRegression}""" + return specs + + def __init__(self): + """ + A constructor that will appropriately intialize a supervised learning object + @ In, None + @ Out, None + """ + super().__init__() + self.printTag = 'KerasMLPRegression' + self.allowedLayers = self.basicLayers + + def _handleInput(self, paramInput): + """ + Function to handle the common parts of the model parameter input. + @ In, paramInput, InputData.ParameterInput, the already parsed input. + @ Out, None + """ + super()._handleInput(paramInput) diff --git a/framework/SupervisedLearning/KerasRegression.py b/framework/SupervisedLearning/KerasRegression.py index 826774bbcf..30a758e033 100644 --- a/framework/SupervisedLearning/KerasRegression.py +++ b/framework/SupervisedLearning/KerasRegression.py @@ -71,7 +71,7 @@ def _getFirstHiddenLayer(self, layerInstant, layerSize, layerDict): @ In, layerDict, dict, layer details @ Out, layer, tensorflow.python.keras.layers, new layer """ - return layerInstant(layerSize,input_shape=[None,self.featv.shape[-1]], **layerDict) + return layerInstant(layerSize,input_shape=[self.featv.shape[-1]], **layerDict) def _getLastLayer(self, layerInstant, layerDict): """ @@ -81,7 +81,7 @@ def _getLastLayer(self, layerInstant, layerDict): @ In, layerDict, dict, layer details @ Out, layer, tensorflow.python.keras.layers, new layer """ - return tf.keras.layers.TimeDistributed(layerInstant(len(self.targv),**layerDict)) + return layerInstant(self.targv.shape[-1],**layerDict) def _getTrainingTargetValues(self, names, values): """ @@ -89,7 +89,7 @@ def _getTrainingTargetValues(self, names, values): on if this is a regression or classifier. @ In, names, list of names @ In, values, list of values - @ Out, targetValues, numpy.ndarray of shape (numSamples, numTimesteps, numFeatures) + @ Out, targetValues, numpy.ndarray of shape (numSamples, numTimesteps, numFeatures) or shape (numSamples, numFeatures) """ # Features must be 3d i.e. [numSamples, numTimeSteps, numFeatures] @@ -98,11 +98,17 @@ def _getTrainingTargetValues(self, names, values): self.raiseAnError(IOError,'The target '+target+' is not in the training set') firstTarget = values[names.index(self.target[0])] - targetValues = np.zeros((len(firstTarget), len(firstTarget[0]), - len(self.target))) - for i, target in enumerate(self.target): - self._localNormalizeData(values, names, target) - targetValues[:, :, i] = self._scaleToNormal(values[names.index(target)], target) + if type(firstTarget) == type(np.array(1)) and len(firstTarget.shape) == 1: + targetValues = np.zeros((len(firstTarget), len(self.target))) + for i, target in enumerate(self.target): + self._localNormalizeData(values, names, target) + targetValues[:, i] = self._scaleToNormal(values[names.index(target)], target) + else: + targetValues = np.zeros((len(firstTarget), len(firstTarget[0]), + len(self.target))) + for i, target in enumerate(self.target): + self._localNormalizeData(values, names, target) + targetValues[:, :, i] = self._scaleToNormal(values[names.index(target)], target) return targetValues @@ -145,7 +151,6 @@ def evaluate(self,edict): featureValuesShape = fval.shape if featureValuesShape != fval.shape: self.raiseAnError(IOError,'In training set, the number of values provided for feature '+feat+' are not consistent to other features!') - self._localNormalizeData(values,names,feat) fval = self._scaleToNormal(fval, feat) featureValues.append(fval) else: @@ -154,7 +159,9 @@ def evaluate(self,edict): result = self.__evaluateLocal__(featureValues) pivotParameter = self.pivotID - if type(edict[pivotParameter]) == type([]): + if pivotParameter not in edict: + pass #don't need to do anything + elif type(edict[pivotParameter]) == type([]): #XXX this should not be needed since sampler should just provide the numpy array. #Currently the CustomSampler provides all the pivot parameter values instead of the current one. self.raiseAWarning("Adjusting pivotParameter because incorrect type provided") @@ -175,5 +182,8 @@ def __evaluateLocal__(self,featureVals): prediction = {} outcome = self._ROM.predict(featureVals) for i, target in enumerate(self.target): - prediction[target] = self._invertScaleToNormal(outcome[0, :, i], target) + if len(outcome.shape) == 3: + prediction[target] = self._invertScaleToNormal(outcome[0, :, i], target) + else: + prediction[target] = self._invertScaleToNormal(outcome[0, i], target) return prediction diff --git a/tests/framework/ROM/tensorflow_keras/gold/LSTMRegression/out_resampled_0.csv b/tests/framework/ROM/tensorflow_keras/gold/LSTMRegression/out_resampled_0.csv index e36af7c657..749d4ce50f 100644 --- a/tests/framework/ROM/tensorflow_keras/gold/LSTMRegression/out_resampled_0.csv +++ b/tests/framework/ROM/tensorflow_keras/gold/LSTMRegression/out_resampled_0.csv @@ -1,102 +1,102 @@ index,sum,square,prev_sum,prev_square,prev_square_sum -0,0.301715016365,23.9918575287,0.0,0.0,0.0 -1,0.229419589043,24.0840988159,-7.57264440482,57.3449432818,31.7160669424 -2,0.107220843434,24.3070220947,-7.87435645452,62.0054895729,35.3035396515 -3,-0.0351829528809,24.6532649994,-8.07870057495,65.2654029798,38.1221882131 -4,-0.180599540472,25.0589580536,-8.18096117119,66.9281256846,39.9580643964 -5,-0.318931311369,25.4515113831,-8.17805981355,66.880662314,40.6626090155 -6,-0.444335013628,25.7774448395,-8.06861057073,65.1024765422,40.162889391 -7,-0.55317056179,26.0078105927,-7.85294698564,61.6687763592,38.4667574053 -8,-0.642424583435,26.1305732727,-7.53312022945,56.7479003913,35.662578878 -9,-0.708554029465,26.1403503418,-7.11286845588,50.5928976706,31.9135498803 -10,-0.746821045876,26.0320796967,-6.59755786267,43.5277697512,27.4469794863 -11,-0.75127363205,25.8018722534,-5.99409644477,35.9291921892,22.5392568497 -12,-0.715579986572,25.4541931152,-5.31082188387,28.2048290822,17.4975106529 -13,-0.634981393814,25.0096435547,-4.55736545507,20.769579891,12.6391914008 -14,-0.509246230125,24.5072040558,-3.74449423598,14.0212370833,8.27094676353 -15,-0.34494766593,23.997209549,-2.88393426929,8.31707686959,4.66820782895 -16,-0.154685854912,23.524312973,-1.98817765084,3.95285037132,2.05685648579 -17,0.0463823080063,23.111574173,-1.0702767866,1.14549239994,0.59820444152 -18,0.243957415223,22.7607688904,-0.143629277806,0.0206293694431,0.378291958227 -19,0.426405906677,22.466884613,0.778242948154,0.605662086351,1.40222422628 -20,0.585257053375,22.2322635651,1.68191654683,2.82884327049,3.59392488599 -21,0.715216398239,22.0713863373,2.55428795712,6.52438696786,6.80132229929 -22,0.81441795826,22.0073051453,3.3827891733,11.443262591,10.8066192267 -23,0.885156273842,22.0636310577,4.15559446071,17.2689653219,15.3409551072 -24,0.934410214424,22.2549037933,4.86181453988,23.6372406202,20.1024751237 -25,0.97277545929,22.5800151825,5.4916749756,30.1584940376,24.7765914869 -26,1.01117730141,23.0251235962,6.03667577376,36.4414543975,29.0570752678 -27,1.05675590038,23.5736675262,6.48972950603,42.1165890615,32.6665615328 -28,1.11028766632,24.2094593048,6.84527564458,46.8577986502,35.3750902188 -29,1.16653037071,24.9071960449,7.09936919054,50.4010429036,37.0154374669 -30,1.21732020378,25.623966217,7.24974211359,52.5587607136,37.4942081055 -31,1.25560927391,26.3035640717,7.29583657844,53.2292313793,36.797945128 -32,1.27794837952,26.8886375427,7.23880941032,52.4003616789,34.9938471695 -33,1.2844671011,27.3308906555,7.08150773633,50.1477518197,32.2250476236 -34,1.27734398842,27.5969848633,6.8284162259,46.6272681542,28.7007747728 -35,1.25921845436,27.6726608276,6.48557683248,42.06270685,24.6820565524 -36,1.23247516155,27.5664672852,6.06048240233,36.7294469489,20.4639331805 -37,1.19970142841,27.3110542297,5.56194595701,30.9352428288,16.3553757311 -38,1.16478705406,26.9584217072,4.99994786626,24.9994786653,12.6582630106 -39,1.13317525387,26.5675086975,4.38546350048,19.232290114,9.64683245111 -40,1.11000192165,26.1867408752,3.73027428148,13.9149462151,7.54898889913 -41,1.09694302082,25.838809967,3.04676532925,9.28277897153,6.53073029055 -42,1.09033381939,25.5171451569,2.34771312875,5.5117569349,6.68473966219 -43,1.08199059963,25.1969871521,1.64606680824,2.70953593717,8.0239128394 -44,1.06163346767,24.8520908356,0.954726728218,0.911503125574,10.4802592884 -45,1.01915383339,24.4656162262,0.286324124698,0.0819815043843,13.909252339 -46,0.946109652519,24.0320835114,-0.346994467836,0.120405160708,18.0993385547 -47,0.836756706238,23.5550632477,-0.933774369528,0.871934573188,22.7859690855 -48,0.688973903656,23.0461997986,-1.46344798302,2.14167999902,27.6692117065 -49,0.505214512348,22.5264129639,-1.92651479657,3.71145926141,32.433761368 -50,0.293485283852,22.0246429443,-2.31470312047,5.35785053591,36.7700056554 -51,0.0675588399172,21.5696907043,-2.62111082379,6.87022195059,40.3947304297 -52,-0.155188560486,21.1789131165,-2.8403226935,8.06743300321,43.0700748794 -53,-0.358226805925,20.8529014587,-2.96850243314,8.81200669555,44.6194626494 -54,-0.529858827591,20.579328537,-3.00345774698,9.0207584379,44.9394387343 -55,-0.664429545403,20.3409194946,-2.94467741044,8.67112505153,44.006616785 -56,-0.760982990265,20.1215457916,-2.7933397009,7.80274668463,41.8792699002 -57,-0.820948839188,19.9085731506,-2.55229204728,6.51419469462,38.6934577812 -58,-0.846134305,19.6928386688,-2.22600224278,4.95508598485,34.6539501417 -59,-0.837579727173,19.4684848785,-1.8204820461,3.31415488017,30.0205558078 -60,-0.795394897461,19.2338695526,-1.34318446296,1.80414450154,25.0907755299 -61,-0.719451546669,18.9935207367,-0.802876444496,0.644610585127,20.1799434156 -62,-0.610578894615,18.759979248,-0.209489154746,0.0438857059564,15.6001904824 -63,-0.471667557955,18.5542106628,0.426051661374,0.181520018159,11.6396428011 -64,-0.308161467314,18.4038410187,1.09201234183,1.19249095471,8.54325073525 -65,-0.127842903137,18.339099884,1.77605184708,3.15436016351,6.49653596831 -66,0.0598771870136,18.3871765137,2.46543278485,6.07835881659,5.61334671855 -67,0.2450440377,18.5666713715,3.14723935195,9.90511553847,5.92844197967 -68,0.41858112812,18.8842964172,3.80859850763,14.5054225924,7.39540090496 -69,0.57379335165,19.3344478607,4.43690063784,19.6860872701,9.88999540961 -70,0.707495391369,19.9004936218,5.02001597792,25.2005604186,13.2187967651 -71,0.82030582428,20.5570163727,5.54650313235,30.7636969972,17.132435092 -72,0.915791869164,21.274061203,6.00580616285,36.0697076658,21.342617878 -73,0.998562335968,22.024187088,6.38843690887,40.8121261386,25.5417609789 -74,1.07204210758,22.7886066437,6.68613945302,44.7044607852,29.423909988 -75,1.13696241379,23.5557403564,6.89203394518,47.5001319015,32.7055431859 -76,1.19125568867,24.311460495,7.00073734674,49.010323398,35.1448553296 -77,1.23132312298,25.0295505524,7.0084590457,49.1184981952,36.5582237721 -78,1.25391471386,25.6708450317,6.91306971711,47.7905329136,36.8327479407 -79,1.25752687454,26.1906890869,6.71414225448,45.0797062135,35.9340173085 -80,1.24270105362,26.5489196777,6.412964069,41.1261081503,33.9085839082 -81,1.21143901348,26.7183036804,6.01252053609,36.1504031969,30.8809715939 -82,1.16642701626,26.6910228729,5.51744985597,30.4422529132,27.0454217042 -83,1.11066043377,26.4832611084,4.93397007671,24.3440607179,22.6529288224 -84,1.04773294926,26.1355438232,4.26977949817,18.231016963,17.9944371652 -85,0.982568979263,25.7054347992,3.53393212433,12.4886762594,13.3813264572 -86,0.921653985977,25.2521018982,2.73669025242,7.48947353771,9.12449860071 -87,0.87150812149,24.8186016083,1.88935667313,3.56966863828,5.51347075848 -88,0.835680961609,24.4230499268,1.00408930024,1.00819532285,2.79688029582 -89,0.812774062157,24.065908432,0.0937013446953,0.0087799419977,1.16571237584 -90,0.797000527382,23.7456436157,-0.828549608133,0.686494453137,0.74037823996 -91,0.780135989189,23.4684944153,-1.74918007159,3.05963092284,1.562513608 -92,0.753383994102,23.2478427887,-2.65470300745,7.04744805774,3.59204958455 -93,0.708757460117,23.0981788635,-3.53185031282,12.4739666321,6.70975426768 -94,0.640135169029,23.0297298431,-4.36779170229,19.0776043546,10.7250757298 -95,0.544076085091,23.0478630066,-5.15034630396,26.5260670507,15.388760882 -96,0.420568585396,23.1585292816,-5.86818341083,34.4355765431,20.4094038795 -97,0.274244964123,23.3723888397,-6.51100901154,42.3932383483,25.4728137467 -98,0.115107126534,23.6951541901,-7.06973496434,49.981152466,30.2629015337 -99,-0.0439793467522,24.1101932526,-7.53662797066,56.8007611681,34.4826852946 -100,-0.192170113325,24.5745697021,-7.90543584503,62.4959158999,37.8740033557 +0,0.303927361965,23.9951114655,0.0,0.0,0.0 +1,0.226605504751,24.0882339478,-7.57264440482,57.3449432818,31.7160669424 +2,0.0941530317068,24.3212051392,-7.87435645452,62.0054895729,35.3035396515 +3,-0.0612124204636,24.6869087219,-8.07870057495,65.2654029798,38.1221882131 +4,-0.219535082579,25.1129283905,-8.18096117119,66.9281256846,39.9580643964 +5,-0.368804067373,25.5201568604,-8.17805981355,66.880662314,40.6626090155 +6,-0.502567529678,25.8541374207,-8.06861057073,65.1024765422,40.162889391 +7,-0.617612600327,26.0891437531,-7.85294698564,61.6687763592,38.4667574053 +8,-0.711839437485,26.217880249,-7.53312022945,56.7479003913,35.662578878 +9,-0.782597899437,26.238609314,-7.11286845588,50.5928976706,31.9135498803 +10,-0.825647950172,26.1464271545,-6.59755786267,43.5277697512,27.4469794863 +11,-0.834898591042,25.9327716827,-5.99409644477,35.9291921892,22.5392568497 +12,-0.803183197975,25.594171524,-5.31082188387,28.2048290822,17.4975106529 +13,-0.724315643311,25.1444606781,-4.55736545507,20.769579891,12.6391914008 +14,-0.59641289711,24.6205978394,-3.74449423598,14.0212370833,8.27094676353 +15,-0.425060659647,24.076593399,-2.88393426929,8.31707686959,4.66820782895 +16,-0.223322421312,23.5648479462,-1.98817765084,3.95285037132,2.05685648579 +17,-0.00800274312496,23.1159591675,-1.0702767866,1.14549239994,0.59820444152 +18,0.204671755433,22.735748291,-0.143629277806,0.0206293694431,0.378291958227 +19,0.40147870779,22.4200630188,0.778242948154,0.605662086351,1.40222422628 +20,0.572872161865,22.1708583832,1.68191654683,2.82884327049,3.59392488599 +21,0.712975978851,22.0022468567,2.55428795712,6.52438696786,6.80132229929 +22,0.819815993309,21.9368915558,3.3827891733,11.443262591,10.8066192267 +23,0.896090626717,21.9972648621,4.15559446071,17.2689653219,15.3409551072 +24,0.949645400047,22.1957321167,4.86181453988,23.6372406202,20.1024751237 +25,0.992008805275,22.5299682617,5.4916749756,30.1584940376,24.7765914869 +26,1.03440916538,22.9895572662,6.03667577376,36.4414543975,29.0570752678 +27,1.08317327499,23.5658359528,6.48972950603,42.1165890615,32.6665615328 +28,1.13731682301,24.2478027344,6.84527564458,46.8577986502,35.3750902188 +29,1.18983387947,25.0063781738,7.09936919054,50.4010429036,37.0154374669 +30,1.23206317425,25.7887115479,7.24974211359,52.5587607136,37.4942081055 +31,1.25820028782,26.5296878815,7.29583657844,53.2292313793,36.797945128 +32,1.26703190804,27.167098999,7.23880941032,52.4003616789,34.9938471695 +33,1.26082658768,27.6499938965,7.08150773633,50.1477518197,32.2250476236 +34,1.24318242073,27.941608429,6.8284162259,46.6272681542,28.7007747728 +35,1.21724534035,28.022354126,6.48557683248,42.06270685,24.6820565524 +36,1.18490254879,27.8948554993,6.06048240233,36.7294469489,20.4639331805 +37,1.14729082584,27.5883083344,5.56194595701,30.9352428288,16.3553757311 +38,1.10653281212,27.1565361023,4.99994786626,24.9994786653,12.6582630106 +39,1.06726169586,26.6665992737,4.38546350048,19.232290114,9.64683245111 +40,1.03564453125,26.18019104,3.73027428148,13.9149462151,7.54898889913 +41,1.01559150219,25.734703064,3.04676532925,9.28277897153,6.53073029055 +42,1.00522005558,25.3340568542,2.34771312875,5.5117569349,6.68473966219 +43,0.996650695801,24.9562454224,1.64606680824,2.70953593717,8.0239128394 +44,0.978643059731,24.5713157654,0.954726728218,0.911503125574,10.4802592884 +45,0.939589977264,24.1559944153,0.286324124698,0.0819815043843,13.909252339 +46,0.869584441185,23.6987800598,-0.346994467836,0.120405160708,18.0993385547 +47,0.761756539345,23.1995658875,-0.933774369528,0.871934573188,22.7859690855 +48,0.613351345062,22.6698207855,-1.46344798302,2.14167999902,27.6692117065 +49,0.426902115345,22.1337776184,-1.92651479657,3.71145926141,32.433761368 +50,0.211579784751,21.6245536804,-2.31470312047,5.35785053591,36.7700056554 +51,-0.0166815072298,21.1718902588,-2.62111082379,6.87022195059,40.3947304297 +52,-0.23844397068,20.7894744873,-2.8403226935,8.06743300321,43.0700748794 +53,-0.43647095561,20.4724311829,-2.96850243314,8.81200669555,44.6194626494 +54,-0.599930882454,20.2048568726,-3.00345774698,9.0207584379,44.9394387343 +55,-0.72488796711,19.9692058563,-2.94467741044,8.67112505153,44.006616785 +56,-0.812146782875,19.7515029907,-2.7933397009,7.80274668463,41.8792699002 +57,-0.864467978477,19.5419139862,-2.55229204728,6.51419469462,38.6934577812 +58,-0.884399294853,19.3332862854,-2.22600224278,4.95508598485,34.6539501417 +59,-0.873121142387,19.1202850342,-1.8204820461,3.31415488017,30.0205558078 +60,-0.830319285393,18.9004535675,-1.34318446296,1.80414450154,25.0907755299 +61,-0.754966616631,18.6769428253,-0.802876444496,0.644610585127,20.1799434156 +62,-0.646701335907,18.4613113403,-0.209489154746,0.0438857059564,15.6001904824 +63,-0.507231235504,18.2745018005,0.426051661374,0.181520018159,11.6396428011 +64,-0.341137081385,18.1448402405,1.09201234183,1.19249095471,8.54325073525 +65,-0.155876368284,18.1033973694,1.77605184708,3.15436016351,6.49653596831 +66,0.0388505011797,18.177728653,2.46543278485,6.07835881659,5.61334671855 +67,0.232285305858,18.3860092163,3.14723935195,9.90511553847,5.92844197967 +68,0.414341658354,18.7337360382,3.80859850763,14.5054225924,7.39540090496 +69,0.577488064766,19.2136535645,4.43690063784,19.6860872701,9.88999540961 +70,0.718109548092,19.807559967,5.02001597792,25.2005604186,13.2187967651 +71,0.836775541306,20.4892787933,5.54650313235,30.7636969972,17.132435092 +72,0.937107682228,21.2300949097,6.00580616285,36.0697076658,21.342617878 +73,1.02348697186,22.0065994263,6.38843690887,40.8121261386,25.5417609789 +74,1.09858012199,22.8053894043,6.68613945302,44.7044607852,29.423909988 +75,1.16196894646,23.6177387238,6.89203394518,47.5001319015,32.7055431859 +76,1.2105973959,24.4268436432,7.00073734674,49.010323398,35.1448553296 +77,1.24071669579,25.1994056702,7.0084590457,49.1184981952,36.5582237721 +78,1.25005185604,25.888715744,6.91306971711,47.7905329136,36.8327479407 +79,1.23886072636,26.4449596405,6.71414225448,45.0797062135,35.9340173085 +80,1.20957386494,26.8251285553,6.412964069,41.1261081503,33.9085839082 +81,1.16568183899,26.9996757507,6.01252053609,36.1504031969,30.8809715939 +82,1.11068439484,26.9576454163,5.51744985597,30.4422529132,27.0454217042 +83,1.04757344723,26.7117996216,4.93397007671,24.3440607179,22.6529288224 +84,0.979054808617,26.3013648987,4.26977949817,18.231016963,17.9944371652 +85,0.908600449562,25.7875785828,3.53393212433,12.4886762594,13.3813264572 +86,0.841751813889,25.2399253845,2.73669025242,7.48947353771,9.12449860071 +87,0.785813808441,24.7168464661,1.88935667313,3.56966863828,5.51347075848 +88,0.746554613113,24.250585556,1.00408930024,1.00819532285,2.79688029582 +89,0.724395632744,23.8476161957,0.0937013446953,0.0087799419977,1.16571237584 +90,0.713783502579,23.5039215088,-0.828549608133,0.686494453137,0.74037823996 +91,0.705411374569,23.2194404602,-1.74918007159,3.05963092284,1.562513608 +92,0.68885320425,23.0014648438,-2.65470300745,7.04744805774,3.59204958455 +93,0.654487609863,22.8600521088,-3.53185031282,12.4739666321,6.70975426768 +94,0.594767332077,22.802816391,-4.36779170229,19.0776043546,10.7250757298 +95,0.505023360252,22.8349323273,-5.15034630396,26.5260670507,15.388760882 +96,0.384233087301,22.9659919739,-5.86818341083,34.4355765431,20.4094038795 +97,0.236605405807,23.212972641,-6.51100901154,42.3932383483,25.4728137467 +98,0.0726844072342,23.5840129852,-7.06973496434,49.981152466,30.2629015337 +99,-0.0933926403522,24.0556869507,-7.53662797066,56.8007611681,34.4826852946 +100,-0.249267965555,24.5724411011,-7.90543584503,62.4959158999,37.8740033557 diff --git a/tests/framework/ROM/tensorflow_keras/gold/LSTMRegression/out_resampled_1.csv b/tests/framework/ROM/tensorflow_keras/gold/LSTMRegression/out_resampled_1.csv index 9e4e665e7b..75e27eeaaf 100644 --- a/tests/framework/ROM/tensorflow_keras/gold/LSTMRegression/out_resampled_1.csv +++ b/tests/framework/ROM/tensorflow_keras/gold/LSTMRegression/out_resampled_1.csv @@ -1,102 +1,102 @@ index,sum,square,prev_sum,prev_square,prev_square_sum -0,0.309352308512,24.0071716309,0.0,0.0,0.0 -1,0.396279752254,24.1006088257,5.48571680083,30.093088819,15.87093203 -2,0.500626087189,24.2868785858,5.97047046237,35.6465175421,18.0772231897 -3,0.615488409996,24.5597705841,6.4160769313,41.1660431884,20.5988298121 -4,0.736331701279,24.9189720154,6.81550301844,46.4510813944,23.2746906398 -5,0.857904314995,25.3638343811,7.16236129498,51.2994193199,25.9346461023 -6,0.973170280457,25.8886814117,7.45100221818,55.5174340554,28.409219378 -7,1.07432663441,26.4772491455,7.67659565148,58.9301207964,30.5393586838 -8,1.15498507023,27.1010398865,7.83520060653,61.3903685446,32.185582283 -9,1.21194863319,27.7230873108,7.92382220388,62.7869583187,33.2360019371 -10,1.24561607838,28.3039913177,7.94045503297,63.0508261307,33.6127648211 -11,1.25913918018,28.8067378998,7.88411228776,62.159226566,33.2765445807 -12,1.25707554817,29.1989631653,7.75484025865,60.1375474371,32.2288240406 -13,1.24416053295,29.4533653259,7.55371797236,57.058655206,30.511838664 -14,1.22442865372,29.5477828979,7.28284198469,53.0397873739,28.20618396 -15,1.20064556599,29.4666862488,6.9452965447,48.2371440938,25.4262239445 -16,1.17401719093,29.2051181793,6.54510955954,42.8384591464,22.3135638174 -17,1.14434587955,28.7745800018,6.08719499286,37.053942881,19.0289609801 -18,1.11097931862,28.207906723,5.57728252532,31.1060803672,15.7431380431 -19,1.07459330559,27.5585842133,5.02183548873,25.2188316758,12.6270244132 -20,1.03877806664,26.8920021057,4.42795825389,19.6068142982,9.84198580776 -21,1.00962400436,26.270532608,3.80329440417,14.4650483248,7.53060173447 -22,0.992751955986,25.7368564606,3.15591715907,9.95981311492,5.80851956504 -23,0.989789128304,25.3017978668,2.49421362379,6.22110160112,4.75785211684 -24,0.997075915337,24.9453964233,1.82676452932,3.33706864556,4.42249717623 -25,1.0071901083,24.6326732635,1.16222119262,1.35075810058,4.80564722539 -26,1.01140320301,24.3321266174,0.509181466517,0.259265765844,5.86963208313 -27,1.00137078762,24.0250530243,-0.123933536936,0.0153595215775,7.53810343927 -28,0.969933271408,23.7049732208,-0.729000176437,0.531441257246,9.70043601595 -29,0.911587715149,23.3730583191,-1.29830872868,1.68560555496,12.2180930242 -30,0.823001146317,23.0348510742,-1.8246738781,3.32943476142,14.9325909838 -31,0.703559160233,22.7000770569,-2.30153766019,5.29707560125,17.6746072929 -32,0.555826425552,22.3839626312,-2.72306337785,7.41507415981,20.2737084292 -33,0.385879546404,22.1062679291,-3.08421914035,9.51240770568,22.5681410869 -34,0.203271508217,21.8856182098,-3.38084982323,11.4301455272,24.4141249485 -35,0.0200037956238,21.7316131592,-3.60973641472,13.0301969838,25.6941143584 -36,-0.151661962271,21.6405582428,-3.76864189496,14.2026617325,26.3235552764 -37,-0.301582843065,21.5975131989,-3.85634298844,14.8713812445,26.2557501427 -38,-0.423045128584,21.5821838379,-3.8726473332,14.9973973673,25.4845517538 -39,-0.512620449066,21.5744438171,-3.81839581989,14.5801466374,24.0447316973 -40,-0.569092392921,21.5573635101,-3.69545006737,13.6563512004,22.0100022005 -41,-0.592229127884,21.5180988312,-3.506665215,12.2967009301,19.4888047636 -42,-0.581963062286,21.4479789734,-3.25584842324,10.6005489551,16.6181069675 -43,-0.538252830505,21.3428936005,-2.94770367952,8.68895698225,13.5555630284 -44,-0.461624771357,21.2041244507,-2.58776370349,6.6965209851,10.4704874671 -45,-0.35410246253,21.0389671326,-2.18230993111,4.76247663541,7.53415930252 -46,-0.219972342253,20.8601837158,-1.73828172866,3.0216233682,4.91001255501 -47,-0.0658738911152,20.6839752197,-1.26317614264,1.59561396733,2.74427536676 -48,0.0998588502407,20.5270900726,-0.764939627466,0.585132633668,1.1575943291 -49,0.268248856068,20.4040298462,-0.251853308576,0.0634300890409,0.238124134119 -50,0.430704295635,20.3251228333,0.2675865688,0.0716025718021,0.0364786705157 -51,0.579788327217,20.2959690094,0.784791285476,0.61589736176,0.562832980227 -52,0.709668397903,20.3182487488,1.29120362461,1.66720680021,1.78634219808 -53,0.816288352013,20.391242981,1.77842139518,3.16278265883,3.6369107796 -54,0.89740717411,20.51304245,2.23831872582,5.01007071836,6.00921059603 -55,0.952634811401,20.6806163788,2.66316338438,7.09243921192,8.76871756308 -56,0.983466744423,20.8886680603,3.04572843554,9.27646170306,11.7594207614 -57,0.993147611618,21.1281509399,3.37939663045,11.4203215859,14.8127621347 -58,0.986135721207,21.3860797882,3.65825602677,13.3828371574,17.7572943105 -59,0.967175483704,21.6478176117,3.8771854637,15.03256712,20.4285029274 -60,0.940330266953,21.9007835388,4.03192866236,16.2564487383,22.6782304198 -61,0.908409953117,22.1363372803,4.11915588565,16.9674452103,24.3831610641 -62,0.872915029526,22.3477401733,4.13651227065,17.1107337653,25.4518808998 -63,0.834348201752,22.5263347626,4.08265213801,16.66804848,25.8301078383 -64,0.792746543884,22.6601047516,3.95725878493,15.6598970909,25.5037921637 -65,0.748282313347,22.7362594604,3.76104947672,14.1454931664,24.4999097096 -66,0.701722025871,22.7458229065,3.49576556508,12.220376886,22.8849022653 -67,0.654583692551,22.68724823,3.16414787491,10.0118317743,20.7608546324 -68,0.609021067619,22.5674381256,2.76989771369,7.67233334428,18.2596275034 -69,0.567573904991,22.4000968933,2.31762406383,5.37138130123,15.5352825119 -70,0.532886147499,22.2023487091,1.81277771749,3.28616305303,12.7552337081 -71,0.507344782352,21.9910583496,1.26157330097,1.59156719372,10.0906327201 -72,0.492535561323,21.7803993225,0.67090030996,0.450107225905,7.70653879158 -73,0.488579571247,21.5816078186,0.0482244351478,0.00232559614532,5.75243721768 -74,0.493670374155,21.404548645,-0.5985194029,0.358225475647,4.35364973468 -75,0.504111051559,21.2596263885,-1.2610407712,1.59022382664,3.60412928557 -76,0.514856934547,21.1585083008,-1.93081255883,3.72803713733,3.56105222974 -77,0.520292282104,21.1132774353,-2.59919321932,6.75580539136,4.24151802824 -78,0.514941394329,21.1345367432,-3.25755066309,10.6116363226,5.62154562833 -79,0.493950754404,21.2294216156,-3.897386018,15.1896177733,7.63742411617 -80,0.453333258629,21.4004669189,-4.51045547831,20.3442086218,10.1893402511 -81,0.390149712563,21.6464614868,-5.08888848962,25.8967860598,13.1470749764 -82,0.303062319756,21.9654026031,-5.62530057006,31.6440065035,16.3574423984 -83,0.19357471168,22.356098175,-6.11289914535,37.3675359613,19.6530448452 -84,0.0671241432428,22.8139724731,-6.54558087557,42.8446289986,22.8618421684 -85,-0.0676289200783,23.3233680725,-6.91801907393,47.8589879073,25.8169867818 -86,-0.201394885778,23.8544006348,-7.22573996057,52.2113179778,28.3663606943 -87,-0.326856046915,24.3685722351,-7.46518665453,55.7290117869,30.3812678805 -88,-0.439756780863,24.8293991089,-7.63376998379,58.2744441655,31.7637837509 -89,-0.53850710392,25.2108726501,-7.72990538283,59.7514372274,32.4523404646 -90,-0.623111128807,25.4999351501,-7.75303534687,60.1095570898,32.4252279576 -91,-0.69409263134,25.6942024231,-7.70363712034,59.3460248819,31.7018100382 -92,-0.751692533493,25.7976074219,-7.58321550879,57.5051574527,30.341385883 -93,-0.795375227928,25.8158893585,-7.39428091795,54.6753902936,28.4397622445 -94,-0.82360625267,25.753112793,-7.14031293596,50.9840688235,26.1237329086 -95,-0.833904743195,25.6101722717,-6.82570998246,46.5903167647,23.5437819089 -96,-0.823221564293,25.3861408234,-6.45572574909,41.6763949475,20.8654288251 -97,-0.788672804832,25.0822181702,-6.03639334556,36.4380446223,18.2597123309 -98,-0.728544354439,24.7063045502,-5.57443824234,31.0743617177,15.8933575617 -99,-0.643340945244,24.2753868103,-5.07718126238,25.7777695711,13.9191909899 -100,-0.536521077156,23.8140258789,-4.55243301702,20.7246463745,12.4673523124 +0,0.303927361965,23.9951114655,0.0,0.0,0.0 +1,0.389233916998,24.0739383698,5.48571680083,30.093088819,15.87093203 +2,0.493053525686,24.2503185272,5.97047046237,35.6465175421,18.0772231897 +3,0.606620669365,24.511346817,6.4160769313,41.1660431884,20.5988298121 +4,0.724901080132,24.8496990204,6.81550301844,46.4510813944,23.2746906398 +5,0.843661308289,25.257982254,7.16236129498,51.2994193199,25.9346461023 +6,0.957948923111,25.7273025513,7.45100221818,55.5174340554,28.409219378 +7,1.06210184097,26.243894577,7.67659565148,58.9301207964,30.5393586838 +8,1.15088105202,26.786195755,7.83520060653,61.3903685446,32.185582283 +9,1.22079753876,27.3252544403,7.92382220388,62.7869583187,33.2360019371 +10,1.27081251144,27.8280010223,7.94045503297,63.0508261307,33.6127648211 +11,1.3021696806,28.2612361908,7.88411228776,62.159226566,33.2765445807 +12,1.31762433052,28.594783783,7.75484025865,60.1375474371,32.2288240406 +13,1.32049059868,28.8035755157,7.55371797236,57.058655206,30.511838664 +14,1.3138076067,28.8693408966,7.28284198469,53.0397873739,28.20618396 +15,1.29979896545,28.7827911377,6.9452965447,48.2371440938,25.4262239445 +16,1.27973783016,28.5466918945,6.54510955954,42.8384591464,22.3135638174 +17,1.25435221195,28.1790275574,6.08719499286,37.053942881,19.0289609801 +18,1.22475552559,27.7141704559,5.57728252532,31.1060803672,15.7431380431 +19,1.19345092773,27.1997070312,5.02183548873,25.2188316758,12.6270244132 +20,1.16450309753,26.6879940033,4.42795825389,19.6068142982,9.84198580776 +21,1.14226937294,26.2235946655,3.80329440417,14.4650483248,7.53060173447 +22,1.12924075127,25.8302288055,3.15591715907,9.95981311492,5.80851956504 +23,1.12446570396,25.5041370392,2.49421362379,6.22110160112,4.75785211684 +24,1.12363922596,25.220205307,1.82676452932,3.33706864556,4.42249717623 +25,1.12051010132,24.9475021362,1.16222119262,1.35075810058,4.80564722539 +26,1.10840523243,24.6630935669,0.509181466517,0.259265765844,5.86963208313 +27,1.08117806911,24.3571147919,-0.123933536936,0.0153595215775,7.53810343927 +28,1.03368163109,24.0303382874,-0.729000176437,0.531441257246,9.70043601595 +29,0.962133407593,23.6893424988,-1.29830872868,1.68560555496,12.2180930242 +30,0.864563703537,23.3432331085,-1.8246738781,3.32943476142,14.9325909838 +31,0.741284489632,23.0030765533,-2.30153766019,5.29707560125,17.6746072929 +32,0.595222294331,22.682756424,-2.72306337785,7.41507415981,20.2737084292 +33,0.432025432587,22.3986854553,-3.08421914035,9.51240770568,22.5681410869 +34,0.259829610586,22.1664772034,-3.38084982323,11.4301455272,24.4141249485 +35,0.088434278965,21.9955291748,-3.60973641472,13.0301969838,25.6941143584 +36,-0.0721649229527,21.8850078583,-3.76864189496,14.2026617325,26.3235552764 +37,-0.213382810354,21.8238811493,-3.85634298844,14.8713812445,26.2557501427 +38,-0.329048722982,21.7944660187,-3.8726473332,14.9973973673,25.4845517538 +39,-0.415486425161,21.7770214081,-3.81839581989,14.5801466374,24.0447316973 +40,-0.470916062593,21.7533111572,-3.69545006737,13.6563512004,22.0100022005 +41,-0.494675010443,21.7086734772,-3.506665215,12.2967009301,19.4888047636 +42,-0.486682444811,21.6330871582,-3.25584842324,10.6005489551,16.6181069675 +43,-0.447361320257,21.5219268799,-2.94770367952,8.68895698225,13.5555630284 +44,-0.377997368574,21.3764972687,-2.58776370349,6.6965209851,10.4704874671 +45,-0.281281083822,21.2039813995,-2.18230993111,4.76247663541,7.53415930252 +46,-0.161662042141,21.0164146423,-1.73828172866,3.0216233682,4.91001255501 +47,-0.0252401232719,20.828660965,-1.26317614264,1.59561396733,2.74427536676 +48,0.120791479945,20.6560344696,-0.764939627466,0.585132633668,1.1575943291 +49,0.26890835166,20.5121421814,-0.251853308576,0.0634300890409,0.238124134119 +50,0.411975204945,20.407459259,0.2675865688,0.0716025718021,0.0364786705157 +51,0.543795466423,20.3488311768,0.784791285476,0.61589736176,0.562832980227 +52,0.659462094307,20.339887619,1.29120362461,1.66720680021,1.78634219808 +53,0.755542874336,20.381942749,1.77842139518,3.16278265883,3.6369107796 +54,0.830168128014,20.4747066498,2.23831872582,5.01007071836,6.00921059603 +55,0.883079171181,20.616350174,2.66316338438,7.09243921192,8.76871756308 +56,0.915621161461,20.802734375,3.04572843554,9.27646170306,11.7594207614 +57,0.930563807487,21.0264129639,3.37939663045,11.4203215859,14.8127621347 +58,0.931628704071,21.276304245,3.65825602677,13.3828371574,17.7572943105 +59,0.922748565674,21.5389003754,3.8771854637,15.03256712,20.4285029274 +60,0.907297253609,21.8006267548,4.03192866236,16.2564487383,22.6782304198 +61,0.887593626976,22.0497570038,4.11915588565,16.9674452103,24.3831610641 +62,0.864804506302,22.2763519287,4.13651227065,17.1107337653,25.4518808998 +63,0.839181423187,22.470703125,4.08265213801,16.66804848,25.8301078383 +64,0.810498714447,22.6223201752,3.95725878493,15.6598970909,25.5037921637 +65,0.778564214706,22.7209606171,3.76104947672,14.1454931664,24.4999097096 +66,0.743658781052,22.7592449188,3.49576556508,12.220376886,22.8849022653 +67,0.706777572632,22.7351779938,3.16414787491,10.0118317743,20.7608546324 +68,0.669630408287,22.6531429291,2.76989771369,7.67233334428,18.2596275034 +69,0.634434938431,22.5228939056,2.31762406383,5.37138130123,15.5352825119 +70,0.603549838066,22.3571567535,1.81277771749,3.28616305303,12.7552337081 +71,0.578987121582,22.1690502167,1.26157330097,1.59156719372,10.0906327201 +72,0.56186747551,21.9706172943,0.67090030996,0.450107225905,7.70653879158 +73,0.551974177361,21.7729606628,0.0482244351478,0.00232559614532,5.75243721768 +74,0.547583818436,21.5874328613,-0.5985194029,0.358225475647,4.35364973468 +75,0.545651674271,21.4266757965,-1.2610407712,1.59022382664,3.60412928557 +76,0.542273283005,21.3046360016,-1.93081255883,3.72803713733,3.56105222974 +77,0.533250570297,21.2354698181,-2.59919321932,6.75580539136,4.24151802824 +78,0.51459634304,21.2317886353,-3.25755066309,10.6116363226,5.62154562833 +79,0.482885867357,21.3028182983,-3.897386018,15.1896177733,7.63742411617 +80,0.435438990593,21.4529895782,-4.51045547831,20.3442086218,10.1893402511 +81,0.370468348265,21.681728363,-5.08888848962,25.8967860598,13.1470749764 +82,0.287497550249,21.9847488403,-5.62530057006,31.6440065035,16.3574423984 +83,0.188226789236,22.3552913666,-6.11289914535,37.3675359613,19.6530448452 +84,0.0772227942944,22.7829723358,-6.54558087557,42.8446289986,22.8618421684 +85,-0.0387051403522,23.2508468628,-6.91801907393,47.8589879073,25.8169867818 +86,-0.152502566576,23.7343139648,-7.22573996057,52.2113179778,28.3663606943 +87,-0.258852720261,24.2039813995,-7.46518665453,55.7290117869,30.3812678805 +88,-0.354904741049,24.63123703,-7.63376998379,58.2744441655,31.7637837509 +89,-0.439757555723,24.9934158325,-7.72990538283,59.7514372274,32.4523404646 +90,-0.513444304466,25.2760906219,-7.75303534687,60.1095570898,32.4252279576 +91,-0.576037764549,25.4723968506,-7.70363712034,59.3460248819,31.7018100382 +92,-0.627094864845,25.5807666779,-7.58321550879,57.5051574527,30.341385883 +93,-0.665424346924,25.6023349762,-7.39428091795,54.6753902936,28.4397622445 +94,-0.68910074234,25.5390167236,-7.14031293596,50.9840688235,26.1237329086 +95,-0.695691943169,25.393037796,-6.82570998246,46.5903167647,23.5437819089 +96,-0.682686567307,25.168132782,-6.45572574909,41.6763949475,20.8654288251 +97,-0.648098945618,24.8718338013,-6.03639334556,36.4380446223,18.2597123309 +98,-0.591177940369,24.5173168182,-5.57443824234,31.0743617177,15.8933575617 +99,-0.513076424599,24.1235103607,-5.07718126238,25.7777695711,13.9191909899 +100,-0.417236655951,23.7130737305,-4.55243301702,20.7246463745,12.4673523124 diff --git a/tests/framework/ROM/tensorflow_keras/gold/keras_mlp_regression/outMLPClassifier.csv b/tests/framework/ROM/tensorflow_keras/gold/keras_mlp_regression/outMLPClassifier.csv new file mode 100644 index 0000000000..536b7ab885 --- /dev/null +++ b/tests/framework/ROM/tensorflow_keras/gold/keras_mlp_regression/outMLPClassifier.csv @@ -0,0 +1,41 @@ +x1,x2,x3,x4,x5,x6,x7,x8,y +2.0,175.0,88.0,0.0,0.0,22.9,0.326,22.0,-168.534967145 +2.0,92.0,52.0,0.0,0.0,30.1,0.141,22.0,-136.640403488 +3.0,130.0,78.0,23.0,79.0,28.4,0.323,34.0,624.101374858 +8.0,120.0,86.0,0.0,0.0,28.4,0.259,22.0,-250.096262333 +2.0,174.0,88.0,37.0,120.0,44.5,0.646,24.0,1417.05884179 +2.0,106.0,56.0,27.0,165.0,29.0,0.426,22.0,724.428347807 +2.0,105.0,75.0,0.0,0.0,23.3,0.56,53.0,-296.397438805 +4.0,95.0,60.0,32.0,0.0,35.4,0.284,28.0,1161.74621636 +0.0,126.0,86.0,27.0,120.0,27.4,0.515,21.0,684.765842826 +8.0,65.0,72.0,23.0,0.0,32.0,0.6,42.0,622.514089322 +2.0,99.0,60.0,17.0,160.0,36.6,0.453,21.0,398.730881462 +1.0,102.0,74.0,0.0,0.0,39.5,0.293,42.0,-92.226801636 +11.0,120.0,80.0,37.0,150.0,42.3,0.785,48.0,1217.16961815 +3.0,102.0,44.0,20.0,94.0,30.8,0.4,26.0,605.181799346 +1.0,109.0,58.0,18.0,116.0,28.5,0.219,22.0,422.199516942 +9.0,140.0,94.0,0.0,0.0,32.7,0.734,45.0,-175.692705443 +13.0,153.0,88.0,37.0,140.0,40.6,1.174,39.0,1270.77116024 +12.0,100.0,84.0,33.0,105.0,30.0,0.488,46.0,877.836640891 +1.0,147.0,94.0,41.0,0.0,49.3,0.358,27.0,1686.38979997 +1.0,81.0,74.0,41.0,57.0,46.3,1.096,32.0,1508.80888951 +3.0,187.0,70.0,22.0,200.0,36.4,0.408,36.0,720.232358272 +6.0,162.0,62.0,0.0,0.0,24.3,0.178,50.0,-89.3525953073 +4.0,136.0,70.0,0.0,0.0,31.2,1.182,22.0,-61.2443783666 +1.0,121.0,78.0,39.0,74.0,39.0,0.261,28.0,1378.82910041 +3.0,108.0,62.0,24.0,0.0,26.0,0.223,25.0,752.237118717 +0.0,181.0,88.0,44.0,510.0,43.3,0.222,26.0,1175.11924589 +8.0,154.0,78.0,32.0,0.0,32.4,0.443,45.0,1186.87858501 +1.0,128.0,88.0,39.0,110.0,36.5,1.057,37.0,1291.75851705 +7.0,137.0,90.0,41.0,0.0,32.0,0.391,39.0,1438.53006794 +0.0,123.0,72.0,0.0,0.0,36.3,0.258,52.0,-71.1431638126 +1.0,106.0,76.0,0.0,0.0,37.5,0.197,26.0,-112.775338427 +6.0,190.0,92.0,0.0,0.0,35.5,0.278,66.0,-12.5909536144 +2.0,88.0,58.0,26.0,16.0,28.4,0.766,22.0,825.54933236 +9.0,170.0,74.0,31.0,0.0,44.0,0.403,43.0,1355.87012305 +9.0,89.0,62.0,0.0,0.0,22.5,0.142,33.0,-308.15859374 +10.0,101.0,76.0,48.0,180.0,32.9,0.171,63.0,1435.49974544 +2.0,122.0,70.0,27.0,0.0,36.8,0.34,27.0,1014.91176073 +5.0,121.0,72.0,23.0,112.0,26.2,0.245,30.0,551.266019966 +1.0,126.0,60.0,0.0,0.0,30.1,0.349,47.0,-84.8083653316 +1.0,93.0,70.0,31.0,0.0,30.4,0.315,23.0,1015.51785117 diff --git a/tests/framework/ROM/tensorflow_keras/keras_mlp_regression.xml b/tests/framework/ROM/tensorflow_keras/keras_mlp_regression.xml new file mode 100644 index 0000000000..8bfa9457ff --- /dev/null +++ b/tests/framework/ROM/tensorflow_keras/keras_mlp_regression.xml @@ -0,0 +1,137 @@ + + + + framework/Samplers/ROM/tensorflow_keras.tf_mlpc + cogljj + 2021-11-03 + Models.ROM + + Test the capability of multilayer perceptron regression (mlpr) using TensorFlow/Keras + The data had an output of '=A2+2*B2-3*C2+D2^2-E2+F2+G2-H2' generated by spreadsheet with the numbers comming from the classifier test. + CustomSampler is used to convert the csv format data into PointSet, and it is also used to test the trained + mlpc with the test data. + + + + + keras_mlp_regression + CustomSampler,train,stats,resample,print + 1 + + + + train.csv + test.csv + + + + + train.csv + + + + + + + + + + + + test.csv + + + + + + + + + + + + + + x1,x2,x3,x4,x5,x6,x7,x8 + y + + + x1,x2,x3,x4,x5,x6,x7,x8 + y + + + x1,x2,x3,x4,x5,x6,x7,x8,y + OutputPlaceHolder + + + x1,x2,x3,x4,x5,x6,x7,x8 + OutputPlaceHolder + + + + + + + dummyIN + MyDummy + customSamplerFileTrain + trainingData + + + dummyINTest + modelUnderTest + customSamplerFileTest + outData + + + modelUnderTest + rom_stats + + + outData + rom_stats + outPrint + romXML + + + trainingData + modelUnderTest + + + + + + + x1,x2,x3,x4,x5,x6,x7,x8 + y + mean_squared_error + 10 + 60 + False + 0.25 + 1986 + + 30 + + + 12 + + + + layer1, layer2, outLayer + + + + + + csv + outData + outMLPClassifier + input,output + + + csv + rom_stats + + + diff --git a/tests/framework/ROM/tensorflow_keras/keras_mlp_regression/test.csv b/tests/framework/ROM/tensorflow_keras/keras_mlp_regression/test.csv new file mode 100644 index 0000000000..8ed30be415 --- /dev/null +++ b/tests/framework/ROM/tensorflow_keras/keras_mlp_regression/test.csv @@ -0,0 +1,41 @@ +x1,x2,x3,x4,x5,x6,x7,x8 +2,175,88,0,0,22.9,0.326,22 +2,92,52,0,0,30.1,0.141,22 +3,130,78,23,79,28.4,0.323,34 +8,120,86,0,0,28.4,0.259,22 +2,174,88,37,120,44.5,0.646,24 +2,106,56,27,165,29.0,0.426,22 +2,105,75,0,0,23.3,0.560,53 +4,95,60,32,0,35.4,0.284,28 +0,126,86,27,120,27.4,0.515,21 +8,65,72,23,0,32.0,0.600,42 +2,99,60,17,160,36.6,0.453,21 +1,102,74,0,0,39.5,0.293,42 +11,120,80,37,150,42.3,0.785,48 +3,102,44,20,94,30.8,0.400,26 +1,109,58,18,116,28.5,0.219,22 +9,140,94,0,0,32.7,0.734,45 +13,153,88,37,140,40.6,1.174,39 +12,100,84,33,105,30.0,0.488,46 +1,147,94,41,0,49.3,0.358,27 +1,81,74,41,57,46.3,1.096,32 +3,187,70,22,200,36.4,0.408,36 +6,162,62,0,0,24.3,0.178,50 +4,136,70,0,0,31.2,1.182,22 +1,121,78,39,74,39.0,0.261,28 +3,108,62,24,0,26.0,0.223,25 +0,181,88,44,510,43.3,0.222,26 +8,154,78,32,0,32.4,0.443,45 +1,128,88,39,110,36.5,1.057,37 +7,137,90,41,0,32.0,0.391,39 +0,123,72,0,0,36.3,0.258,52 +1,106,76,0,0,37.5,0.197,26 +6,190,92,0,0,35.5,0.278,66 +2,88,58,26,16,28.4,0.766,22 +9,170,74,31,0,44.0,0.403,43 +9,89,62,0,0,22.5,0.142,33 +10,101,76,48,180,32.9,0.171,63 +2,122,70,27,0,36.8,0.340,27 +5,121,72,23,112,26.2,0.245,30 +1,126,60,0,0,30.1,0.349,47 +1,93,70,31,0,30.4,0.315,23 diff --git a/tests/framework/ROM/tensorflow_keras/keras_mlp_regression/train.csv b/tests/framework/ROM/tensorflow_keras/keras_mlp_regression/train.csv new file mode 100644 index 0000000000..c39b7133e8 --- /dev/null +++ b/tests/framework/ROM/tensorflow_keras/keras_mlp_regression/train.csv @@ -0,0 +1,729 @@ +x1,x2,x3,x4,x5,x6,x7,x8,y +6,148,72,35,0,33.6,0.627,50,1295.227 +1,85,66,29,0,26.6,0.351,31,809.951 +8,183,64,0,0,23.3,0.672,32,173.972 +1,89,66,23,94,28.1,0.167,21,423.267 +0,137,40,35,168,43.1,2.288,33,1223.388 +5,116,74,0,0,25.6,0.201,30,10.801 +3,78,50,32,88,31,0.248,26,950.248 +10,115,0,0,0,35.3,0.134,29,246.434 +2,197,70,45,543,30.5,0.158,53,1645.658 +8,125,96,0,0,0,0.232,54,-83.768 +4,110,92,0,0,37.6,0.191,30,-44.209 +10,168,74,0,0,38,0.537,34,128.537 +10,139,80,0,0,27.1,1.441,57,19.541 +1,189,60,23,846,30.1,0.398,59,-146.502 +5,166,72,19,175,25.8,0.587,51,282.387 +7,100,0,0,0,30,0.484,32,205.484 +0,118,84,47,230,45.8,0.551,31,1978.351 +7,107,74,0,0,29.6,0.254,31,-2.146 +1,103,30,38,83,43.3,0.183,33,1488.483 +1,115,70,30,96,34.6,0.529,32,828.129 +3,126,88,41,235,39.3,0.704,27,1450.004 +8,99,84,0,0,35.4,0.388,50,-60.212 +7,196,90,0,0,39.8,0.451,41,128.251 +9,119,80,35,0,29,0.263,29,1232.263 +11,143,94,33,146,36.6,0.254,51,943.854 +10,125,70,26,115,31.1,0.205,41,601.305 +7,147,76,0,0,39.4,0.257,43,69.657 +1,97,66,15,140,23.2,0.487,22,83.687 +13,145,82,19,110,22.2,0.245,57,273.445 +5,117,92,0,0,34.1,0.337,38,-40.563 +5,109,75,26,0,36,0.546,60,650.546 +3,158,76,36,245,31.6,0.851,28,1146.451 +3,88,58,11,54,24.8,0.267,22,75.067 +6,92,92,0,0,19.9,0.188,28,-93.912 +10,122,78,31,0,27.6,0.512,45,964.112 +4,103,60,33,192,24,0.966,33,918.966 +11,138,76,0,0,33.2,0.42,35,57.62 +9,102,76,37,0,32.9,0.665,46,1341.565 +2,90,68,42,0,38.2,0.503,27,1753.703 +4,111,72,47,207,37.1,1.39,56,1994.49 +3,180,64,25,70,34,0.271,26,734.271 +7,133,84,0,0,40.2,0.696,37,24.896 +7,106,92,18,0,22.7,0.235,48,241.935 +9,171,110,24,240,45.4,0.721,54,349.121 +7,159,64,0,0,27.4,0.294,40,120.694 +0,180,66,39,0,42,1.893,25,1701.893 +1,146,56,0,0,29.7,0.564,29,126.264 +2,71,70,27,0,28,0.586,22,669.586 +7,103,66,32,0,39.1,0.344,31,1047.444 +7,105,0,0,0,0,0.305,24,193.305 +1,103,80,11,82,19.4,0.491,22,3.891 +1,101,50,15,36,24.2,0.526,26,240.726 +5,88,66,21,23,24.4,0.342,30,395.742 +8,176,90,34,300,33.7,0.467,58,922.167 +7,150,66,42,342,34.7,0.718,42,1524.418 +1,73,50,10,0,23,0.248,21,99.248 +7,187,68,39,304,37.7,0.254,41,1390.954 +0,100,88,60,110,46.8,0.962,31,3442.762 +0,146,82,0,0,40.5,1.781,44,44.281 +0,105,64,41,142,41.5,0.173,22,1576.673 +2,84,0,0,0,0,0.304,21,149.304 +8,133,72,0,0,32.9,0.27,39,52.17 +5,44,62,0,0,25,0.587,36,-103.413 +2,141,58,34,128,25.4,0.699,24,1140.099 +7,114,66,0,0,32.8,0.258,42,28.058 +5,99,74,27,0,29,0.203,32,707.203 +0,109,88,30,0,32.5,0.855,38,849.355 +2,109,92,0,0,42.7,0.845,54,-66.455 +1,95,66,13,38,19.6,0.334,25,118.934 +4,146,85,27,100,28.9,0.189,27,672.089 +2,100,66,20,90,32.9,0.867,28,319.767 +5,139,64,35,140,28.6,0.411,26,1179.011 +13,126,90,0,0,43.4,0.583,42,-3.017 +4,129,86,20,270,35.1,0.231,23,146.331 +1,79,75,30,0,32,0.396,22,844.396 +1,0,48,20,0,24.7,0.14,22,259.84 +7,62,78,0,0,32.6,0.391,41,-111.009 +5,95,72,33,0,37.7,0.37,27,1079.07 +0,131,0,0,0,43.2,0.27,26,279.47 +2,112,66,22,0,25,0.307,24,513.307 +3,113,44,13,0,22.4,0.14,22,266.54 +2,74,0,0,0,0,0.102,22,128.102 +7,83,78,26,71,29.3,0.767,36,538.067 +0,101,65,28,0,24.6,0.237,22,793.837 +5,137,108,0,0,48.8,0.227,37,-32.973 +2,110,74,29,125,32.4,0.698,27,722.098 +13,106,72,54,0,36.6,0.178,45,2916.778 +2,100,68,25,71,38.5,0.324,26,564.824 +15,136,70,32,110,37.1,0.153,43,985.253 +1,107,68,19,0,26.5,0.165,24,374.665 +1,80,55,0,0,19.1,0.258,21,-5.642 +4,123,80,15,176,32,0.443,34,57.443 +7,81,78,40,48,46.7,0.261,42,1491.961 +4,134,72,0,0,23.8,0.277,60,20.077 +2,142,82,18,64,24.7,0.761,21,304.461 +6,144,72,27,228,33.9,0.255,40,573.155 +2,92,62,28,0,31.6,0.13,24,791.73 +1,71,48,18,76,20.4,0.323,22,245.723 +6,93,50,30,64,28.7,0.356,23,884.056 +1,122,90,51,220,49.7,0.325,31,2375.025 +1,163,72,0,0,39,1.222,33,118.222 +1,151,60,0,0,26.1,0.179,22,127.279 +0,125,96,0,0,22.5,0.262,21,-36.238 +1,81,72,18,40,26.6,0.283,24,233.883 +2,85,65,0,0,39.6,0.93,27,-9.47 +1,126,56,29,152,28.7,0.801,21,782.501 +1,96,122,0,0,22.4,0.207,27,-177.393 +4,144,58,28,140,29.5,0.287,37,754.787 +3,83,58,31,18,34.3,0.336,25,947.636 +0,95,85,25,36,37.4,0.247,24,537.647 +3,171,72,33,135,33.3,0.199,24,1092.499 +8,155,62,26,495,34,0.543,46,301.543 +1,89,76,34,37,31.2,0.192,23,1078.392 +4,76,62,0,0,34,0.391,25,-20.609 +7,160,54,32,175,30.5,0.588,39,1006.088 +4,146,92,0,0,31.2,0.539,61,-9.261 +5,124,74,0,0,34,0.22,38,27.22 +5,78,48,0,0,33.7,0.654,25,26.354 +4,97,60,23,0,28.2,0.443,22,553.643 +4,99,76,15,51,23.2,0.223,21,150.423 +0,162,76,56,100,53.2,0.759,25,3160.959 +6,111,64,39,0,34.2,0.26,24,1567.46 +2,107,74,30,100,33.6,0.404,23,805.004 +5,132,80,0,0,26.8,0.186,69,-13.014 +0,113,76,0,0,33.3,0.278,23,8.578 +1,88,30,42,99,55,0.496,26,1781.496 +3,120,70,30,135,42.9,0.452,30,811.352 +1,118,58,36,94,33.3,0.261,23,1275.561 +1,117,88,24,145,34.5,0.403,40,396.903 +0,105,84,0,0,27.9,0.741,62,-75.359 +4,173,70,14,168,29.7,0.361,33,165.061 +9,122,56,0,0,33.3,1.114,33,86.414 +3,170,64,37,225,34.5,0.356,30,1299.856 +8,84,74,31,0,38.3,0.457,39,914.757 +2,96,68,13,49,21.1,0.647,26,105.747 +2,125,60,20,140,33.8,0.088,31,334.888 +0,100,70,26,50,30.8,0.597,21,626.397 +0,93,60,25,92,28.7,0.532,22,546.232 +0,129,80,0,0,31.2,0.703,29,20.903 +5,105,72,29,325,36.9,0.159,28,524.059 +3,128,78,0,0,21.1,0.268,55,-8.632 +5,106,82,30,0,39.5,0.286,38,872.786 +2,108,52,26,63,32.5,0.318,22,685.818 +10,108,66,0,0,32.4,0.272,42,18.672 +4,154,62,31,284,32.8,0.237,23,813.037 +0,102,75,23,0,0,0.572,21,487.572 +9,57,80,37,0,32.8,0.096,41,1243.896 +2,106,64,35,119,30.5,1.4,34,1125.9 +5,147,78,0,0,33.7,0.218,65,33.918 +2,90,70,17,0,27.3,0.085,22,266.385 +1,136,74,50,204,37.4,0.399,24,2360.799 +4,114,65,0,0,21.9,0.432,37,22.332 +9,156,86,28,155,34.3,1.189,42,685.489 +1,153,82,42,485,40.6,0.687,23,1358.287 +8,188,78,0,0,47.9,0.137,43,155.037 +7,152,88,44,0,50,0.337,36,1997.337 +2,99,52,15,94,24.6,0.637,21,179.237 +1,109,56,21,135,25.2,0.833,23,360.033 +2,88,74,19,53,29,0.229,22,271.229 +17,163,72,41,114,40.9,0.817,47,1688.717 +4,151,90,38,0,29.7,0.294,36,1473.994 +7,102,74,40,105,37.2,0.204,45,1476.404 +0,114,80,34,285,44.2,0.167,27,876.367 +2,100,64,23,0,29.7,0.368,21,548.068 +0,131,88,0,0,31.6,0.743,32,-1.657 +6,104,74,18,156,29.9,0.722,41,149.622 +3,148,66,25,0,32.5,0.256,22,736.756 +4,120,68,0,0,29.6,0.709,34,36.309 +4,110,66,0,0,31.9,0.471,29,29.371 +3,111,90,12,78,28.4,0.495,29,20.895 +6,102,82,0,0,30.8,0.18,36,-41.02 +6,134,70,23,130,35.4,0.542,29,469.942 +2,87,0,23,0,28.9,0.773,25,709.673 +1,79,60,42,48,43.5,0.678,23,1716.178 +2,75,64,24,55,29.7,0.37,33,478.07 +8,179,72,42,130,32.7,0.719,36,1781.419 +6,85,78,0,0,31.2,0.382,42,-68.418 +0,129,110,46,130,67.1,0.319,26,1955.419 +5,143,78,0,0,45,0.19,47,55.19 +5,130,82,0,0,39.1,0.956,37,22.056 +6,87,80,0,0,23.2,0.084,32,-68.716 +0,119,64,18,92,34.9,0.725,23,290.625 +1,0,74,20,23,27.7,0.299,21,162.999 +5,73,60,0,0,26.8,0.268,27,-28.932 +4,141,74,0,0,27.6,0.244,40,51.844 +7,194,68,28,0,35.9,0.745,41,970.645 +8,181,68,36,495,30.1,0.615,60,937.715 +1,128,98,41,58,32,1.321,33,1586.321 +8,109,76,39,114,27.9,0.64,31,1402.54 +5,139,80,35,160,31.6,0.361,25,1114.961 +3,111,62,0,0,22.6,0.142,21,40.742 +9,123,70,44,94,33.1,0.374,40,1880.474 +7,159,66,0,0,30.4,0.383,36,121.783 +11,135,0,0,0,52.3,0.578,40,293.878 +8,85,55,20,0,24.4,0.136,42,395.536 +5,158,84,41,210,39.4,0.395,29,1550.795 +1,105,58,0,0,24.3,0.187,21,40.487 +3,107,62,13,48,22.9,0.678,23,152.578 +4,109,64,44,99,34.8,0.905,26,1876.705 +4,148,60,27,318,30.9,0.15,29,533.05 +0,113,80,16,0,31,0.874,21,252.874 +1,138,82,0,0,40.1,0.236,28,43.336 +0,108,68,20,0,27.3,0.787,32,408.087 +2,99,70,16,44,20.4,0.235,27,195.635 +6,103,72,32,190,37.7,0.324,55,813.024 +5,111,72,28,0,23.9,0.407,27,792.307 +8,196,76,29,280,37.5,0.605,57,714.105 +5,162,104,0,0,37.7,0.151,52,2.851 +1,96,64,27,87,33.2,0.289,21,655.489 +7,184,84,33,0,35.5,0.355,41,1206.855 +2,81,60,22,0,27.7,0.29,25,470.99 +0,147,85,54,0,42.8,0.375,24,2974.175 +7,179,95,31,0,34.2,0.164,60,1015.364 +0,140,65,26,130,42.6,0.431,24,650.031 +9,112,82,32,175,34.2,0.26,36,834.46 +12,151,70,40,271,41.8,0.742,38,1437.542 +5,109,62,41,129,35.8,0.514,25,1600.314 +6,125,68,30,120,30,0.464,32,830.464 +5,85,74,22,0,29,1.224,32,435.224 +5,112,66,0,0,37.8,0.261,41,28.061 +0,177,60,29,478,34.6,1.072,21,551.672 +2,158,90,0,0,31.6,0.805,66,14.405 +7,119,0,0,0,25.2,0.209,37,233.409 +7,142,60,33,190,28.8,0.687,61,978.487 +1,100,66,15,56,23.6,0.666,26,170.266 +1,87,78,27,32,34.6,0.101,22,650.701 +0,101,76,0,0,35.7,0.198,26,-16.102 +3,162,52,38,0,37.2,0.652,24,1628.852 +4,197,70,39,744,36.7,2.329,31,973.029 +0,117,80,31,53,45.2,0.089,24,923.289 +4,142,86,0,0,44,0.645,22,52.645 +6,134,80,37,370,46.2,0.238,46,1033.438 +1,79,80,25,37,25.4,0.583,22,510.983 +4,122,68,0,0,35,0.394,29,50.394 +3,74,68,28,45,29.7,0.293,23,692.993 +4,171,72,0,0,43.6,0.479,26,148.079 +7,181,84,21,192,35.9,0.586,51,351.486 +0,179,90,27,0,44.1,0.686,23,838.786 +9,164,84,21,0,30.8,0.831,32,525.631 +0,104,76,0,0,18.4,0.582,27,-28.018 +1,91,64,24,0,29.2,0.192,21,575.392 +4,91,70,32,88,33.1,0.446,22,923.546 +3,139,54,0,0,25.6,0.402,22,123.002 +6,119,50,22,176,27.1,1.318,33,397.418 +2,146,76,35,194,38.2,0.329,29,1106.529 +9,184,85,15,0,30,1.213,49,329.213 +10,122,68,0,0,31.2,0.258,41,40.458 +0,165,90,33,680,52.3,0.427,23,498.727 +9,124,70,33,402,35.4,0.282,34,735.682 +1,111,86,19,0,30.1,0.143,23,333.243 +9,106,52,0,0,31.2,0.38,42,54.58 +2,129,84,0,0,28,0.284,27,9.284 +2,90,80,14,55,24.4,0.249,24,83.649 +0,86,68,32,0,35.8,0.238,25,1003.038 +12,92,62,7,258,27.6,0.926,44,-214.474 +1,113,64,35,0,33.6,0.543,21,1273.143 +3,111,56,39,0,30.1,0.557,30,1578.657 +2,114,68,22,0,28.7,0.092,25,513.792 +1,193,50,16,375,25.9,0.655,24,120.555 +11,155,76,28,150,33.3,1.353,51,710.653 +3,191,68,15,130,30.9,0.299,34,273.199 +3,141,0,0,0,30,0.761,27,288.761 +4,95,70,32,0,32.1,0.612,24,1016.712 +3,142,80,15,0,32.4,0.2,63,241.6 +4,123,62,0,0,32,0.226,35,61.226 +5,96,74,18,67,33.6,0.997,43,223.597 +0,138,0,0,0,36.3,0.933,25,288.233 +2,128,64,42,0,40,1.101,24,1847.101 +0,102,52,0,0,25.1,0.078,21,52.178 +2,146,0,0,0,27.5,0.24,28,293.74 +10,101,86,37,0,45.6,1.136,38,1331.736 +2,108,62,32,56,25.2,0.128,21,1004.328 +3,122,78,0,0,23,0.254,40,-3.746 +1,71,78,50,45,33.2,0.422,21,2376.622 +13,106,70,0,0,34.2,0.251,52,-2.549 +2,100,70,52,57,40.5,0.677,25,2655.177 +7,106,60,24,0,26.5,0.296,29,612.796 +0,104,64,23,116,27.8,0.454,23,434.254 +5,114,74,0,0,24.9,0.744,57,-20.356 +2,108,62,10,278,25.3,0.881,22,-141.819 +0,146,70,0,0,37.9,0.334,28,92.234 +10,129,76,28,122,35.9,0.28,39,699.18 +7,133,88,15,155,32.4,0.262,37,74.662 +7,161,86,0,0,30.4,0.165,47,54.565 +2,108,80,0,0,27,0.259,52,-46.741 +7,136,74,26,135,26,0.647,51,573.647 +5,155,84,44,545,38.7,0.619,34,1459.319 +1,119,86,39,220,45.6,0.808,29,1299.408 +4,96,56,17,49,20.8,0.34,26,263.14 +5,108,72,43,75,36.1,0.263,33,1782.363 +0,78,88,29,40,36.9,0.434,21,709.334 +0,107,62,30,74,36.6,0.757,25,866.357 +2,128,78,37,182,43.3,1.224,31,1224.524 +1,128,48,45,194,40.5,0.613,24,1961.113 +0,161,50,0,0,21.9,0.254,65,129.154 +6,151,62,31,120,35.5,0.692,28,971.192 +2,146,70,38,360,28,0.337,29,1167.337 +0,126,84,29,215,30.7,0.52,24,633.22 +14,100,78,25,184,36.6,0.412,46,412.012 +8,112,72,0,0,23.6,0.84,58,-17.56 +0,167,0,0,0,32.3,0.839,30,337.139 +2,144,58,33,135,31.6,0.422,25,1077.022 +5,77,82,41,42,35.8,0.156,35,1552.956 +5,115,98,0,0,52.9,0.209,28,-33.891 +3,150,76,0,0,21,0.207,37,59.207 +2,120,76,37,105,39.7,0.215,29,1288.915 +10,161,68,23,132,25.5,0.326,47,503.826 +0,137,68,14,148,24.8,0.143,21,121.943 +0,128,68,19,180,30.5,1.391,25,239.891 +2,124,68,28,205,32.9,0.875,30,628.775 +6,80,66,30,0,26.2,0.313,41,853.513 +0,106,70,37,148,39.4,0.605,22,1241.005 +2,155,74,17,96,26.6,0.433,27,283.033 +3,113,50,10,85,29.5,0.626,25,99.126 +7,109,80,31,0,35.9,1.127,43,940.027 +2,112,68,22,94,34.1,0.315,26,420.415 +3,99,80,11,64,19.3,0.284,30,7.584 +3,182,74,0,0,30.5,0.345,29,146.845 +3,115,66,39,140,38.1,0.15,28,1426.25 +6,194,78,0,0,23.5,0.129,59,124.629 +4,129,60,12,231,27.5,0.527,31,-7.973 +3,112,74,30,0,31.6,0.197,25,911.797 +0,124,70,20,0,27.4,0.254,36,429.654 +13,152,90,33,29,26.8,0.731,43,1091.531 +2,112,75,32,0,35.7,0.148,21,1039.848 +1,157,72,21,168,25.6,0.123,24,373.723 +1,122,64,32,156,35.1,0.692,30,926.792 +10,179,70,0,0,35.1,0.2,37,156.3 +2,102,86,36,120,45.5,0.127,23,1146.627 +6,105,70,32,68,30.8,0.122,37,955.922 +8,118,72,19,0,23.1,1.476,46,367.576 +2,87,58,16,52,32.7,0.166,25,213.866 +1,180,0,0,0,43.3,0.282,41,363.582 +12,106,80,0,0,23.6,0.137,44,-36.263 +1,95,60,18,58,23.9,0.26,22,279.16 +0,165,76,43,255,47.9,0.259,26,1718.159 +0,117,0,0,0,33.8,0.932,44,224.732 +5,115,76,0,0,31.2,0.343,44,-5.457 +9,152,78,34,171,34.2,0.893,33,1066.093 +7,178,84,0,0,39.9,0.331,41,110.231 +1,130,70,13,105,25.9,0.472,22,119.372 +1,95,74,21,73,25.9,0.673,36,327.573 +1,0,68,35,0,32,0.389,22,1032.389 +5,122,86,0,0,34.7,0.29,33,-7.01 +8,95,72,0,0,36.8,0.485,57,-37.715 +8,126,88,36,108,38.5,0.349,49,1173.849 +1,139,46,19,83,28.7,0.654,22,426.354 +3,116,0,0,0,23.5,0.187,23,235.687 +3,99,62,19,74,21.8,0.279,26,298.079 +5,0,80,32,0,41,0.346,37,793.346 +4,92,80,0,0,42.2,0.237,29,-38.563 +4,137,84,0,0,31.2,0.252,30,27.452 +3,61,82,28,0,34.4,0.243,46,651.643 +1,90,62,12,43,27.2,0.58,24,99.78 +3,90,78,0,0,42.7,0.559,21,-28.741 +9,165,88,0,0,30.4,0.302,49,56.702 +1,125,50,40,167,33.3,0.962,28,1540.262 +13,129,0,30,0,39.9,0.569,44,1167.469 +12,88,74,40,54,35.3,0.378,48,1499.678 +1,196,76,36,249,36.5,0.875,29,1220.375 +5,189,64,33,325,31.2,0.583,29,957.783 +5,158,70,0,0,29.8,0.207,63,78.007 +5,103,108,37,0,39.2,0.305,65,1230.505 +4,146,78,0,0,38.5,0.52,67,34.02 +4,147,74,25,293,34.9,0.385,30,413.285 +5,99,54,28,83,34,0.499,30,746.499 +6,124,72,0,0,27.6,0.368,29,36.968 +0,101,64,17,0,21,0.252,21,299.252 +3,81,86,16,66,27.5,0.306,22,102.806 +1,133,102,28,140,32.8,0.234,45,593.034 +3,173,82,48,465,38.4,2.137,25,1957.537 +0,118,64,23,89,0,1.731,21,464.731 +0,84,64,22,66,35.8,0.545,21,409.345 +2,105,58,40,94,34.9,0.225,25,1554.125 +2,122,52,43,158,36.2,0.816,28,1790.016 +12,140,82,43,325,39.2,0.528,58,1551.728 +0,98,82,15,84,25.2,0.299,22,94.499 +1,87,60,37,75,37.2,0.509,22,1304.709 +4,156,75,0,0,48.3,0.238,32,107.538 +0,93,100,39,72,43.4,1.021,35,1344.421 +1,107,72,30,82,30.8,0.821,24,824.621 +0,105,68,22,0,20,0.236,22,488.236 +1,109,60,8,182,25.4,0.947,21,-73.653 +1,90,62,18,59,25.1,1.268,25,261.368 +1,125,70,24,110,24.3,0.221,25,506.521 +1,119,54,13,50,22.3,0.205,24,194.505 +5,116,74,29,0,32.3,0.66,35,853.96 +8,105,100,36,0,43.3,0.239,45,1212.539 +5,144,82,26,285,32,0.452,58,412.452 +3,100,68,23,81,31.6,0.949,28,451.549 +1,100,66,29,196,32,0.444,42,638.444 +5,166,76,0,0,45.7,0.34,27,128.04 +1,131,64,14,415,23.7,0.389,21,-144.911 +4,116,72,12,87,22.1,0.463,37,62.563 +4,158,78,0,0,32.9,0.803,31,88.703 +2,127,58,24,275,27.7,1.6,25,387.3 +3,96,56,34,115,24.7,0.944,39,1054.644 +0,131,66,40,0,34.3,0.196,22,1676.496 +3,82,70,0,0,21.1,0.389,25,-46.511 +3,193,70,31,0,34.9,0.241,25,1150.141 +4,95,64,0,0,32,0.161,31,3.161 +6,137,61,0,0,24.2,0.151,55,66.351 +5,136,84,41,88,35,0.286,35,1618.286 +9,72,78,25,0,31.6,0.28,38,537.88 +5,168,64,0,0,32.9,0.135,41,141.035 +2,123,48,32,165,42.1,0.52,26,979.62 +4,115,72,0,0,28.9,0.376,46,1.276 +0,101,62,0,0,21.9,0.336,25,13.236 +8,197,74,0,0,25.9,1.191,39,168.091 +1,172,68,49,579,42.4,0.702,28,1978.102 +6,102,90,39,0,35.7,0.674,28,1469.374 +1,112,72,30,176,34.4,0.528,25,742.928 +1,143,84,23,310,42.4,1.076,22,275.476 +1,143,74,22,61,26.2,0.256,21,493.456 +0,138,60,35,167,34.6,0.534,21,1168.134 +3,173,84,33,474,35.7,0.258,22,725.958 +1,97,68,21,0,27.2,1.095,22,438.295 +4,144,82,32,0,38.5,0.554,37,1072.054 +1,83,68,0,0,18.2,0.624,27,-45.176 +3,129,64,29,115,26.4,0.219,28,793.619 +1,119,88,41,170,45.3,0.507,26,1505.807 +2,94,68,18,76,26,0.561,21,239.561 +0,102,64,46,78,40.6,0.496,21,2070.096 +2,115,64,22,0,30.8,0.421,21,534.221 +8,151,78,32,210,42.9,0.516,36,897.416 +4,184,78,39,277,37,0.264,31,1388.264 +0,94,0,0,0,0,0.256,25,163.256 +1,181,64,30,180,34.1,0.328,38,887.428 +0,135,94,46,145,40.6,0.284,26,1973.884 +1,95,82,25,180,35,0.233,43,382.233 +2,99,0,0,0,22.2,0.108,23,199.308 +3,89,74,16,85,30.4,0.551,38,122.951 +1,80,74,11,60,30,0.527,22,8.527 +2,139,75,0,0,25.6,0.167,29,51.767 +1,90,68,8,0,24.5,1.138,36,30.638 +0,141,0,0,0,42.4,0.205,29,295.605 +12,140,85,33,0,37.4,0.244,41,1122.644 +5,147,75,0,0,29.9,0.434,28,76.334 +1,97,70,15,0,18.2,0.147,21,207.347 +6,107,88,0,0,36.8,0.727,31,-37.473 +0,189,104,25,0,34.3,0.435,41,684.735 +2,83,66,23,50,32.2,0.497,22,459.697 +4,117,64,27,120,33.2,0.23,24,664.43 +8,108,70,0,0,30.5,0.955,33,12.455 +4,117,62,12,0,29.7,0.38,30,196.08 +0,180,78,63,14,59.4,2.42,25,4117.82 +1,100,72,12,70,25.3,0.658,28,56.958 +0,95,80,45,92,36.5,0.33,26,1893.83 +0,104,64,37,64,33.6,0.51,22,1333.11 +0,120,74,18,63,30.5,0.285,26,283.785 +1,82,64,13,95,21.2,0.415,23,45.615 +2,134,70,0,0,28.9,0.542,23,66.442 +0,91,68,32,210,39.9,0.381,25,807.281 +2,119,0,0,0,19.6,0.832,72,188.432 +2,100,54,28,105,37.8,0.498,24,733.298 +14,175,62,30,0,33.6,0.212,38,1073.812 +1,135,54,0,0,26.7,0.687,62,74.387 +5,86,68,28,71,30.2,0.364,24,692.564 +10,148,84,48,237,37.6,1.001,51,2108.601 +9,134,74,33,60,25.9,0.46,81,1029.36 +9,120,72,22,56,20.8,0.733,48,434.533 +1,71,62,0,0,21.8,0.416,26,-46.784 +8,74,70,40,49,35.3,0.705,39,1494.005 +5,88,78,30,0,27.6,0.258,37,837.858 +10,115,98,0,0,24,1.022,34,-62.978 +0,124,56,13,105,21.8,0.452,21,145.252 +0,74,52,10,36,27.8,0.269,22,62.069 +0,97,64,36,100,36.8,0.6,25,1210.4 +8,120,0,0,0,30,0.183,38,240.183 +6,154,78,41,140,46.1,0.571,27,1640.671 +1,144,82,40,0,41.3,0.607,28,1656.907 +0,137,70,38,0,33.2,0.17,22,1519.37 +0,119,66,27,0,38.8,0.259,22,786.059 +7,136,90,0,0,29.9,0.21,50,-10.89 +4,114,64,0,0,28.9,0.126,24,45.026 +0,137,84,27,0,27.3,0.231,59,719.531 +2,105,80,45,191,33.7,0.711,29,1811.411 +7,114,76,17,110,23.8,0.466,31,179.266 +8,126,74,38,75,25.9,0.162,39,1394.062 +4,132,86,31,0,28,0.419,63,936.419 +3,158,70,30,328,35.5,0.344,35,681.844 +0,123,88,37,0,35.2,0.197,29,1357.397 +4,85,58,22,49,27.8,0.306,28,435.106 +0,84,82,31,125,38.2,0.233,23,773.433 +0,145,0,0,0,44.2,0.63,31,303.83 +0,135,68,42,250,42.3,0.365,24,1598.665 +1,139,62,41,480,40.7,0.536,21,1314.236 +0,173,78,32,265,46.5,1.159,58,860.659 +4,99,72,17,0,25.6,0.294,28,272.894 +8,194,80,0,0,26.1,0.551,67,115.651 +2,83,65,28,66,36.8,0.629,24,704.429 +2,89,90,30,0,33.5,0.292,42,801.792 +4,99,68,38,0,32.8,0.145,33,1441.945 +4,125,70,18,122,28.9,1.144,45,231.044 +3,80,0,0,0,0,0.174,22,141.174 +6,166,74,0,0,26.6,0.304,66,76.904 +5,110,68,0,0,26,0.292,30,17.292 +2,81,72,15,76,30.1,0.547,25,102.647 +7,195,70,33,145,25.1,0.163,55,1101.263 +6,154,74,32,193,29.3,0.839,39,914.139 +2,117,90,19,71,25.2,0.313,21,260.513 +3,84,72,32,0,37.2,0.267,28,988.467 +6,0,68,41,0,39,0.727,41,1481.727 +7,94,64,25,79,33.3,0.738,41,542.038 +3,96,78,39,0,37.3,0.238,40,1479.538 +10,75,82,0,0,33.3,0.263,38,-90.437 +0,180,90,26,90,36.5,0.314,35,677.814 +1,130,60,23,170,28.6,0.692,21,448.292 +2,84,50,23,76,30.4,0.968,21,483.368 +8,120,78,0,0,25,0.409,64,-24.591 +12,84,72,31,0,29.7,0.297,46,908.997 +0,139,62,17,210,22.1,0.207,21,172.307 +9,91,68,0,0,24.2,0.2,58,-46.6 +2,91,62,0,0,27.3,0.525,22,3.825 +3,99,54,19,86,25.6,0.154,24,315.754 +3,163,70,18,105,31.6,0.268,28,341.868 +9,145,88,34,165,30.3,0.771,53,1004.071 +7,125,86,0,0,37.6,0.304,51,-14.096 +13,76,60,0,0,32.8,0.18,41,-23.02 +6,129,90,7,326,19.6,0.582,60,-322.818 +2,68,70,32,66,25,0.187,25,886.187 +3,124,80,33,130,33.2,0.305,26,977.505 +6,114,0,0,0,0,0.189,26,208.189 +9,130,70,0,0,34.2,0.652,45,48.852 +3,125,58,0,0,31.6,0.151,24,86.751 +3,87,60,18,0,21.8,0.444,21,322.244 +1,97,64,19,82,18.2,0.299,21,279.499 +3,116,74,15,105,26.3,0.107,24,135.407 +0,117,66,31,188,30.8,0.493,22,818.293 +0,111,65,0,0,24.6,0.66,31,21.26 +2,122,60,18,106,29.8,0.717,22,292.517 +0,107,76,0,0,45.3,0.686,24,7.986 +1,86,66,52,65,41.3,0.917,29,2627.217 +6,91,0,0,0,29.8,0.501,31,187.301 +1,77,56,30,56,33.3,1.251,24,841.551 +4,132,0,0,0,32.9,0.302,23,278.202 +0,105,90,0,0,29.6,0.197,46,-76.203 +0,57,60,0,0,21.7,0.735,67,-110.565 +0,127,80,37,210,36.3,0.804,23,1187.104 +3,129,92,49,155,36.4,0.968,32,2236.368 +8,100,74,40,215,39.4,0.661,43,1368.061 +3,128,72,25,190,32.4,0.549,27,483.949 +10,90,85,32,0,34.9,0.825,56,938.725 +4,84,90,23,56,39.5,0.159,25,389.659 +1,88,78,29,76,32,0.365,29,711.365 +8,186,90,35,225,34.5,0.423,37,1107.923 +5,187,76,27,207,43.6,1.034,53,664.634 +4,131,68,21,166,33.1,0.16,28,342.26 +1,164,82,43,67,32.8,0.341,50,1848.141 +4,189,110,31,0,28.5,0.68,37,1005.18 +1,116,70,28,0,27.4,0.204,21,813.604 +3,84,68,30,106,31.9,0.591,25,768.491 +6,114,88,0,0,27.8,0.247,66,-67.953 +1,88,62,24,44,29.9,0.422,23,530.322 +1,84,64,23,115,36.9,0.471,28,400.371 +7,124,70,33,215,25.5,0.161,37,907.661 +1,97,70,40,0,38.1,0.218,30,1593.318 +8,110,76,0,0,27.8,0.237,58,-29.963 +11,103,68,40,0,46.2,0.126,42,1617.326 +11,85,74,0,0,30.1,0.3,35,-45.6 +6,125,76,0,0,33.8,0.121,54,7.921 +0,198,66,32,274,41.3,0.502,28,961.802 +1,87,68,34,77,37.6,0.401,24,1064.001 +6,99,60,19,54,26.9,0.497,32,326.397 +0,91,80,0,0,32.4,0.601,27,-51.999 +2,95,54,14,88,26.1,0.748,22,142.848 +1,99,72,30,18,38.6,0.412,21,883.012 +6,92,62,32,126,32,0.085,46,888.085 +4,154,72,29,126,31.3,0.338,37,805.638 +0,121,66,30,165,34.3,0.203,33,780.503 +3,78,70,0,0,32.5,0.27,39,-57.23 +2,130,96,0,0,22.6,0.268,21,-24.132 +3,111,58,31,44,29.5,0.43,22,975.93 +2,98,60,17,120,34.7,0.198,22,199.898 +1,143,86,30,330,30.1,0.892,23,606.992 +1,119,44,47,63,35.5,0.28,25,2263.78 +6,108,44,20,130,24,0.813,35,349.813 +2,118,80,0,0,42.9,0.693,21,20.593 +10,133,68,0,0,27,0.245,36,63.245 +2,197,70,99,0,34.7,0.575,62,9960.275 +0,151,90,46,0,42.1,0.371,21,2169.471 +6,109,60,27,0,25,0.206,27,771.206 +12,121,78,17,0,26.5,0.259,62,273.759 +8,100,76,0,0,38.7,0.19,42,-23.11 +8,124,76,24,600,28.7,0.687,52,-18.613 +1,93,56,11,0,22.5,0.417,22,140.917 +8,143,66,0,0,34.9,0.129,41,90.029 +6,103,66,0,0,24.3,0.249,29,9.549 +3,176,86,27,156,33.3,1.154,52,652.454 +0,73,0,0,0,21.1,0.342,25,142.442 +11,111,84,40,0,46.8,0.925,45,1583.725 +2,112,78,50,140,39.4,0.175,24,2367.575 +3,132,80,0,0,34.4,0.402,44,17.802 +2,82,52,22,115,28.5,1.699,25,384.199 +6,123,72,45,230,33.6,0.733,34,1831.333 +0,188,82,14,185,32,0.682,22,151.682 +0,67,76,0,0,45.3,0.194,46,-94.506 +1,89,24,19,25,27.8,0.559,21,450.359 +1,173,74,0,0,36.8,0.088,38,123.888 +1,109,38,18,120,23.1,0.407,26,306.507 +1,108,88,19,0,27.1,0.4,24,317.5 +6,96,0,0,0,23.7,0.19,28,193.89 +1,124,74,36,0,27.8,0.1,30,1320.9 +7,150,78,29,126,35.2,0.692,54,769.892 +4,183,0,0,0,28.4,0.212,36,362.612 +1,124,60,32,0,35.8,0.514,21,1108.314 +1,181,78,42,293,40,1.258,22,1619.258 +1,92,62,25,41,19.5,0.482,25,577.982 +0,152,82,39,272,41.5,0.27,27,1321.77 +1,111,62,13,182,24,0.138,23,25.138 +3,106,54,21,158,30.9,0.292,24,343.192 +3,174,58,22,194,32.9,0.593,36,464.493 +7,168,88,42,321,38.2,0.787,40,1520.987 +6,105,80,28,0,32.5,0.878,26,767.378 +11,138,74,26,144,36.1,0.557,50,583.657 +3,106,72,0,0,25.8,0.207,27,-1.993 +6,117,96,0,0,28.7,0.157,30,-49.143 +2,68,62,13,15,20.1,0.257,23,103.357 +9,112,82,24,0,28.2,1.282,50,542.482 +0,119,0,0,0,32.4,0.141,24,246.541 +2,112,86,42,160,38.4,0.246,28,1582.646 +2,92,76,20,0,24.2,1.698,28,355.898 +6,183,94,0,0,40.8,1.461,45,87.261 +0,94,70,27,115,43.5,0.347,21,614.847 +2,108,64,0,0,30.8,0.158,21,35.958 +4,90,88,47,54,37.7,0.362,29,2084.062 +0,125,68,0,0,24.7,0.206,21,49.906 +0,132,78,0,0,32.4,0.393,21,41.793 +5,128,80,0,0,34.6,0.144,45,10.744 +4,94,65,22,0,24.7,0.148,21,484.848 +7,114,64,0,0,27.4,0.732,34,37.132 +0,102,78,40,90,34.5,0.238,24,1490.738 +2,111,60,0,0,26.2,0.343,23,47.543 +1,128,82,17,183,27.5,0.115,22,122.615 +10,92,62,0,0,25.9,0.167,31,3.067 +13,104,72,0,0,31.2,0.465,38,-1.335 +5,104,74,0,0,28.8,0.153,48,-28.047 +2,94,76,18,66,31.6,0.649,23,229.249 +7,97,76,32,91,40.9,0.871,32,915.771 +1,100,74,12,46,19.5,0.149,28,68.649 +0,102,86,17,105,29.3,0.695,27,132.995 +4,128,70,0,0,34.3,0.303,24,60.603 +6,147,80,0,0,29.5,0.178,50,39.678 +4,90,0,0,0,28,0.61,31,181.61 +3,103,72,30,152,27.6,0.73,27,742.33 +2,157,74,35,440,39.4,0.134,30,888.534 +1,167,74,17,144,23.4,0.447,33,248.847 +0,179,50,36,159,37.8,0.455,22,1361.255 +11,136,84,35,130,28.3,0.26,42,1112.56 +0,107,60,25,0,26.4,0.133,23,662.533 +1,91,54,25,100,25.2,0.234,23,548.434 +1,117,60,23,106,33.8,0.466,27,485.266 +5,123,74,40,77,34.1,0.269,28,1558.369 +2,120,54,0,0,26.8,0.455,27,80.255 +1,106,70,28,135,34.2,0.142,22,664.342 +2,155,52,27,540,38.7,0.24,25,358.94 +2,101,58,35,90,21.8,0.155,22,1164.955 +1,120,80,48,200,38.9,1.162,41,2104.062 +11,127,106,0,0,39,0.19,51,-64.81 +3,80,82,31,70,34.2,1.292,27,816.492 +10,162,84,0,0,27.7,0.182,54,55.882 +1,199,76,43,0,42.9,1.394,22,2042.294 +8,167,106,46,231,37.6,0.165,43,1903.765 +9,145,80,46,130,37.9,0.637,40,2043.537 +6,115,60,39,0,33.7,0.245,40,1570.945 +1,112,80,45,132,34.8,0.217,24,1889.017 +4,145,82,18,0,32.5,0.235,70,334.735 +10,111,70,27,0,27.5,0.141,40,738.641 +6,98,58,33,190,34,0.43,43,918.43 +9,154,78,30,100,30.9,0.164,45,869.064 +6,165,68,26,168,33.6,0.631,49,625.231 +1,99,58,10,0,25.4,0.551,21,129.951 +10,68,106,23,49,35.5,0.285,47,296.785 +3,123,100,35,240,57.3,0.88,22,970.18 +8,91,82,0,0,35.6,0.587,68,-87.813 +6,195,70,0,0,30.9,0.328,31,186.228 +9,156,86,0,0,24.8,0.23,53,35.03 +0,93,60,0,0,35.3,0.263,25,16.563 +3,121,52,0,0,36,0.127,25,100.127 +2,101,58,17,265,24.2,0.614,23,55.814 +2,56,56,28,45,24.2,0.332,22,687.532 +0,162,76,36,0,49.6,0.364,26,1415.964 +0,95,64,39,105,44.6,0.366,22,1436.966 +4,125,80,0,0,32.3,0.536,27,19.836 +5,136,82,0,0,0,0.64,69,-37.36 +2,129,74,26,205,33.2,0.591,25,517.791 +3,130,64,0,0,23.1,0.314,22,72.414 +1,107,50,19,0,28.3,0.181,29,425.481 +1,140,74,26,180,24.1,0.828,23,556.928 +1,144,82,46,180,46.1,0.335,46,1979.435 +8,107,80,0,0,24.6,0.856,34,-26.544 +13,158,114,0,0,42.3,0.257,44,-14.443 +2,121,70,32,95,39.1,0.886,23,979.986 +7,129,68,49,125,38.5,0.439,43,2332.939 +2,90,60,0,0,23.5,0.191,25,0.691 +7,142,90,24,480,30.4,0.128,43,104.528 +3,169,74,19,125,29.9,0.268,31,354.168 +0,99,0,0,0,25,0.253,22,201.253 +4,127,88,11,155,34.5,0.598,28,-32.902 +4,118,70,0,0,44.5,0.904,26,49.404 +2,122,76,27,200,35.9,0.483,26,557.383 +6,125,78,31,0,27.6,0.565,49,962.165 +1,168,88,29,0,35,0.905,52,897.905 +2,129,0,0,0,38.5,0.304,41,257.804 +4,110,76,20,100,28.4,0.118,27,297.518 +6,80,80,36,0,39.8,0.177,28,1233.977 +10,115,0,0,0,0,0.261,30,210.261 +2,127,46,21,335,34.4,0.176,22,236.576 +9,164,78,0,0,32.8,0.148,45,90.948 +2,93,64,32,160,38,0.674,23,875.674 +3,158,64,13,387,31.2,0.295,24,-83.505 +5,126,78,27,22,29.6,0.439,40,720.039 +10,129,62,36,0,41.2,0.441,38,1381.641 +0,134,58,20,291,26.4,0.352,21,208.752 +3,102,74,0,0,29.5,0.121,32,-17.379 +7,187,50,33,392,33.9,0.826,34,928.726 +3,173,78,39,185,33.8,0.97,31,1454.77 +10,94,72,18,0,23.1,0.595,56,273.695 +1,108,60,46,178,35.5,0.415,24,1986.915 +5,97,76,27,0,35.6,0.378,52,683.978 +4,83,86,19,0,29.3,0.317,34,268.617 +1,114,66,36,200,38.1,0.289,21,1144.389 +1,149,68,29,127,29.3,0.349,42,796.649 +5,117,86,30,105,39.1,0.251,42,773.351 +1,111,94,0,0,32.8,0.265,45,-70.935 +4,112,78,40,0,39.4,0.236,38,1595.636 +1,116,78,29,180,36.1,0.496,25,671.596 +0,141,84,26,0,32.4,0.433,22,716.833 \ No newline at end of file diff --git a/tests/framework/ROM/tensorflow_keras/tests b/tests/framework/ROM/tensorflow_keras/tests index 9af6379d4d..9130288592 100644 --- a/tests/framework/ROM/tensorflow_keras/tests +++ b/tests/framework/ROM/tensorflow_keras/tests @@ -12,6 +12,12 @@ output = 'diabetes/romXML.xml' csv = 'diabetes/outMLPClassifier.csv' [../] + [./tf_mlpr] + type = 'RavenFramework' + input = 'keras_mlp_regression.xml' + output = 'keras_mlp_regression/romXML.xml' + csv = 'keras_mlp_regression/outMLPClassifier.csv' + [../] # Disable test for pickling, currently, we could not pickle the DNN ROMs #[./tf_cnn_dump] # type = 'RavenFramework' From bb34c45714f3c0bddc03c21960b0a92edf926dfa Mon Sep 17 00:00:00 2001 From: "Joshua J. Cogliati" Date: Thu, 4 Nov 2021 12:38:11 -0600 Subject: [PATCH 2/2] Adding rel_err because of differences between computers. --- tests/framework/ROM/tensorflow_keras/tests | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/framework/ROM/tensorflow_keras/tests b/tests/framework/ROM/tensorflow_keras/tests index 9130288592..0b26eefe2d 100644 --- a/tests/framework/ROM/tensorflow_keras/tests +++ b/tests/framework/ROM/tensorflow_keras/tests @@ -17,6 +17,7 @@ input = 'keras_mlp_regression.xml' output = 'keras_mlp_regression/romXML.xml' csv = 'keras_mlp_regression/outMLPClassifier.csv' + rel_err = 0.0005 [../] # Disable test for pickling, currently, we could not pickle the DNN ROMs #[./tf_cnn_dump]