Skip to content

Commit

Permalink
Architecture changes from training
Browse files Browse the repository at this point in the history
  • Loading branch information
reuben committed Jul 18, 2018
1 parent 6930adf commit 2dbb1e9
Showing 1 changed file with 10 additions and 10 deletions.
20 changes: 10 additions & 10 deletions DeepSpeech.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,22 +413,22 @@ def BiRNN(batch_x, seq_length, dropout, batch_size=None, n_steps=-1, previous_st
# clipped RELU activation and dropout.

# 1st layer
b1 = variable_on_worker_level('b1', [n_hidden_1], tf.random_normal_initializer(stddev=FLAGS.b1_stddev))
h1 = variable_on_worker_level('h1', [n_input + 2*n_input*n_context, n_hidden_1], tf.contrib.layers.xavier_initializer(uniform=False))
b1 = variable_on_worker_level('b1', [n_hidden_1], tf.zeros_initializer())
h1 = variable_on_worker_level('h1', [n_input + 2*n_input*n_context, n_hidden_1], tf.contrib.layers.xavier_initializer())
layer_1 = tf.minimum(tf.nn.relu(tf.add(tf.matmul(batch_x, h1), b1)), FLAGS.relu_clip)
layer_1 = tf.nn.dropout(layer_1, (1.0 - dropout[0]))
layers['layer_1'] = layer_1

# 2nd layer
b2 = variable_on_worker_level('b2', [n_hidden_2], tf.random_normal_initializer(stddev=FLAGS.b2_stddev))
h2 = variable_on_worker_level('h2', [n_hidden_1, n_hidden_2], tf.random_normal_initializer(stddev=FLAGS.h2_stddev))
b2 = variable_on_worker_level('b2', [n_hidden_2], tf.zeros_initializer())
h2 = variable_on_worker_level('h2', [n_hidden_1, n_hidden_2], tf.contrib.layers.xavier_initializer())
layer_2 = tf.minimum(tf.nn.relu(tf.add(tf.matmul(layer_1, h2), b2)), FLAGS.relu_clip)
layer_2 = tf.nn.dropout(layer_2, (1.0 - dropout[1]))
layers['layer_2'] = layer_2

# 3rd layer
b3 = variable_on_worker_level('b3', [n_hidden_3], tf.random_normal_initializer(stddev=FLAGS.b3_stddev))
h3 = variable_on_worker_level('h3', [n_hidden_2, n_hidden_3], tf.random_normal_initializer(stddev=FLAGS.h3_stddev))
b3 = variable_on_worker_level('b3', [n_hidden_3], tf.zeros_initializer())
h3 = variable_on_worker_level('h3', [n_hidden_2, n_hidden_3], tf.contrib.layers.xavier_initializer())
layer_3 = tf.minimum(tf.nn.relu(tf.add(tf.matmul(layer_2, h3), b3)), FLAGS.relu_clip)
layer_3 = tf.nn.dropout(layer_3, (1.0 - dropout[2]))
layers['layer_3'] = layer_3
Expand All @@ -455,16 +455,16 @@ def BiRNN(batch_x, seq_length, dropout, batch_size=None, n_steps=-1, previous_st
layers['rnn_output_state'] = output_state

# Now we feed `output` to the fifth hidden layer with clipped RELU activation and dropout
b5 = variable_on_worker_level('b5', [n_hidden_5], tf.random_normal_initializer(stddev=FLAGS.b5_stddev))
h5 = variable_on_worker_level('h5', [n_cell_dim, n_hidden_5], tf.random_normal_initializer(stddev=FLAGS.h5_stddev))
b5 = variable_on_worker_level('b5', [n_hidden_5], tf.zeros_initializer())
h5 = variable_on_worker_level('h5', [n_cell_dim, n_hidden_5], tf.contrib.layers.xavier_initializer())
layer_5 = tf.minimum(tf.nn.relu(tf.add(tf.matmul(output, h5), b5)), FLAGS.relu_clip)
layer_5 = tf.nn.dropout(layer_5, (1.0 - dropout[5]))
layers['layer_5'] = layer_5

# Now we apply the weight matrix `h6` and bias `b6` to the output of `layer_5`
# creating `n_classes` dimensional vectors, the logits.
b6 = variable_on_worker_level('b6', [n_hidden_6], tf.random_normal_initializer(stddev=FLAGS.b6_stddev))
h6 = variable_on_worker_level('h6', [n_hidden_5, n_hidden_6], tf.contrib.layers.xavier_initializer(uniform=False))
b6 = variable_on_worker_level('b6', [n_hidden_6], tf.zeros_initializer())
h6 = variable_on_worker_level('h6', [n_hidden_5, n_hidden_6], tf.contrib.layers.xavier_initializer())
layer_6 = tf.add(tf.matmul(layer_5, h6), b6)
layers['layer_6'] = layer_6

Expand Down

0 comments on commit 2dbb1e9

Please sign in to comment.