-
Notifications
You must be signed in to change notification settings - Fork 0
/
recurrent_keras.py
95 lines (83 loc) · 3.48 KB
/
recurrent_keras.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
from __future__ import print_function
import matplotlib.pyplot as plt
import numpy as np
import time
import csv
from keras.models import Sequential
from keras.layers.core import Dense, Activation, Dropout
from keras.layers.recurrent import LSTM, SimpleRNN
from keras.layers.wrappers import TimeDistributed
import argparse
import twitter as twit
from configparser import ConfigParser
from RNN_utils import *
config = ConfigParser()
config.read('DNA_Bot.ini')
api = twit.Api(consumer_key= config.get('Twitter_Settings','consumer_key'),
consumer_secret= config.get('Twitter_Settings', 'consumer_secret'),
access_token_key= config.get('Twitter_Settings', 'access_token_key'),
access_token_secret= config.get( 'Twitter_Settings', 'access_token_secret'))
users = api.GetFriends()
print([u.name for u in users])
# instantiate
DATA_DIR = config.get('ML_Settings','data_dir')
BATCH_SIZE = config.getint('ML_Settings', 'batch_size')
HIDDEN_DIM = config.getint('ML_Settings','hidden_dim')
SEQ_LENGTH = config.getint('ML_Settings','seq_length')
WEIGHTS = config.get('ML_Settings','weights')
GENERATE_LENGTH = config.getint('ML_Settings','gen_length')
LAYER_NUM = config.getint('ML_Settings', 'layer_num')
MODE = config.get('ML_Settings', 'mode')
# Creating training data
print('Creating Training Data')
X, y, VOCAB_SIZE, ix_to_char = load_data(DATA_DIR, SEQ_LENGTH)
# Creating and compiling the Network
print('compiling the network \n')
model = Sequential()
model.add(LSTM(HIDDEN_DIM, input_shape=(None, VOCAB_SIZE), return_sequences=True))
for i in range(LAYER_NUM - 1):
model.add(LSTM(HIDDEN_DIM, return_sequences=True))
model.add(TimeDistributed(Dense(VOCAB_SIZE)))
model.add(Activation('softmax'))
model.compile(loss="categorical_crossentropy", optimizer="rmsprop")
Logger = Log()
print('network compiled \n')
# Generate some sample before training to know how bad it is!
print('Gen samp txt')
sampleText =generate_text(model, GENERATE_LENGTH, VOCAB_SIZE, ix_to_char)
#status = api.PostUpdate(sampleText)
#print('Posted to twitter :: {}'.format(status.text))
print('Sample text data from before training!!!!! \n')
print(sampleText)
Logger.AddEvent(-1, sampleText)
if not WEIGHTS == '':
model.load_weights(WEIGHTS)
nb_epoch = int(WEIGHTS[WEIGHTS.rfind('_') + 1:WEIGHTS.find('.')])
else:
nb_epoch = 0
print ('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! \n \n')
print ('Training started this is going to take a while \n\n\n')
# Training if there is no trained weights specified
if MODE == 'train' or WEIGHTS == '':
print('training started!!! This may take a while.')
while True:
print('\n\nEpoch: {}\n'.format(nb_epoch))
model.fit(X, y, batch_size=BATCH_SIZE, verbose=1, nb_epoch=1)
nb_epoch += 1
print('creating text from epoch {} \n'.format(nb_epoch))
GenText = generate_text(model, GENERATE_LENGTH, VOCAB_SIZE, ix_to_char)
print("---------------Generated Text-------------\n")
print(GenText)
status = api.PostUpdate(GenText)
print('\n -----------------------------------------')
Logger.AddEvent(nb_epoch, GenText)
print('NB Epoch {}'.format(nb_epoch))
model.save_weights('checkpoint_layer_{}_hidden_{}_epoch_{}.hdf5'.format(LAYER_NUM, HIDDEN_DIM, nb_epoch))
# Else, loading the trained weights and performing generation only
elif WEIGHTS == '':
# Loading the trained weights
model.load_weights(WEIGHTS)
generate_text(model, GENERATE_LENGTH, VOCAB_SIZE, ix_to_char)
print('\n\n')
else:
print('\n\nNothing to do!')