From e97a7a311ae0c5deb33ac248d10cf26a3e73038e Mon Sep 17 00:00:00 2001 From: Pieter-Jan Hoedt Date: Thu, 14 Jan 2021 12:02:31 +0100 Subject: [PATCH] use linear layer for all models (should not hurt addition) --- stable_nalu/network/simple_function_recurrent.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/stable_nalu/network/simple_function_recurrent.py b/stable_nalu/network/simple_function_recurrent.py index 892d49c..de24731 100644 --- a/stable_nalu/network/simple_function_recurrent.py +++ b/stable_nalu/network/simple_function_recurrent.py @@ -32,9 +32,9 @@ def __init__(self, unit_name, input_size=10, writer=None, **kwargs): name='recurrent_layer', **kwargs) self.output_layer = GeneralizedLayer(self.hidden_size, 1, - 'linear' - if unit_name in {'GRU', 'LSTM', 'MCLSTM', 'RNN-tanh', 'RNN-ReLU'} - else unit_name, + 'linear', + # if unit_name in {'GRU', 'LSTM', 'MCLSTM', 'RNN-tanh', 'RNN-ReLU'} + # else unit_name, writer=self.writer, name='output_layer', **kwargs)