-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathlstm_simple.py
More file actions
53 lines (39 loc) · 1.92 KB
/
lstm_simple.py
File metadata and controls
53 lines (39 loc) · 1.92 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import logging
import numpy as np
import time
from keras.layers import BatchNormalization, Bidirectional, Dense, Dropout, Embedding, LSTM
from keras.models import Sequential
from keras.optimizers import Adam
from keras.wrappers.scikit_learn import KerasClassifier
from memory_profiler import profile
from sklearn.metrics import classification_report
from sklearn.model_selection import GridSearchCV
from utils import prepare_sequential
MAX_LENGTH = 400
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
def build_model(embedding_matrix=None, n_units=32, fc_dim=32, lr=0.001):
logging.info('Initializing model...')
model = Sequential()
embedding_layer = Embedding(input_dim=embedding_matrix.shape[0], output_dim=embedding_matrix[0].shape[0],
input_length=MAX_LENGTH, weights=[embedding_matrix], trainable=False)
model.add(embedding_layer)
model.add(LSTM(n_units, activation='sigmoid', recurrent_dropout=0.2, recurrent_activation='sigmoid',
return_sequences=False))
model.add(Dropout(0.2))
model.add(Dense(fc_dim, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(4, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=lr), metrics=['accuracy'])
logging.info(model.summary())
return model
if __name__ == '__main__':
t0 = time.time()
X_train, y_train, X_test, y_test, emb_matrix = prepare_sequential(merge=True)
model = KerasClassifier(build_fn=build_model, embedding_matrix=emb_matrix, n_units=64, fc_dim=256, lr=0.00001, verbose=1)
history = model.fit(X_train, y_train, batch_size=32, epochs=20)
y_pred = model.predict(X_test)
y_pred_m = [np.argmax(y) for y in y_pred]
y_test_m = [np.argmax(y) for y in y_test]
print(classification_report(y_test_m, y_pred_m))
print(set(y_pred_m))
print('Time: %s seconds' % (time.time() - t0))