-
Notifications
You must be signed in to change notification settings - Fork 1
/
trump.py
80 lines (63 loc) · 2.41 KB
/
trump.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
"""
"""
import network
import os
from basic_config import Config
import tensorflow as tf
import logging
def generate_text():
"""
Generates the text that is hopefully Trumpian.
"""
# Minimize TF warnings which are not helpful in generate mode.
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
tf.logging.set_verbosity(tf.logging.ERROR)
net_features = network.construct()
sess = tf.Session()
Config.import_model(sess)
# Enable the user to enter multiple text strings with a do-while loop
while True:
_generate_output(sess, net_features)
if not Config.Generate.loop:
break
while True:
print("")
logging.info("Please supply a new seed text then press enter when complete: ")
Config.Generate.seed_text = input("")
if len(Config.Generate.seed_text) > Config.Generate.min_seed_len:
print("You entered: \"" + Config.Generate.seed_text + "\"")
print("")
break
logging.info("Invalid Seed Text. Must be at least %d characters long"
% Config.Generate.min_seed_len)
sess.close()
def _generate_output(sess, net_features):
x = net_features["X"]
get_softmax = tf.nn.softmax(net_features["output"])[0, :]
seq_len = net_features["seq_len"]
cur_seq_len = min(len(Config.Generate.seed_text), Config.sequence_length)
input_x = Config.Generate.build_initial_x()
generated_text = []
# Generate the text character by character
while len(generated_text) < Config.Generate.output_len:
phrase_seq_len = [cur_seq_len] * Config.batch_size
softmax_out = sess.run(get_softmax, feed_dict={x: input_x, seq_len: phrase_seq_len})
pred_char_id = Config.DecisionEngine.function(sess, softmax_out)
pred_char = Config.Generate.int2char()[pred_char_id]
generated_text.append(pred_char)
if cur_seq_len == Config.sequence_length:
# Delete off the front of the list if it has reached the specified sequence length
del input_x[0][0]
else:
# Shave last dummy element off since fixed batch size
del input_x[0][Config.sequence_length - 1]
cur_seq_len += 1
input_x[0].insert(cur_seq_len - 1, pred_char_id)
Config.Generate.prev_char = pred_char
logging.info("Output Text: " + Config.Generate.seed_text + "".join(generated_text))
if __name__ == "__main__":
Config.parse_args()
Config.import_character_to_integer_map()
Config.Generate.build_int2char()
Config.Generate.build_seed_x()
generate_text()