CuDNNLSTM .
from random import random
from numpy import array
from numpy import cumsum
from keras.models import Sequential
from keras.layers import CuDNNLSTM
from keras.layers import Dense
from keras.layers import TimeDistributed
def get_sequence(n_timesteps):
X = array([random() for _ in range(n_timesteps)])
limit = n_timesteps/4.0
y = array([0 if x < limit else 1 for x in cumsum(X)])
X = X.reshape(1, n_timesteps, 1)
y = y.reshape(1, n_timesteps, 1)
return X, y
n_timesteps = 20
model = Sequential()
model.add(CuDNNLSTM(20, input_shape=(n_timesteps, 1), return_sequences=True))
model.add(TimeDistributed(Dense(1, activation='sigmoid')))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['acc'])
for epoch in range(10):
X,y = get_sequence(n_timesteps)
model.fit(X, y, epochs=1, batch_size=1, verbose=2)
X,y = get_sequence(n_timesteps)
yhat = model.predict_classes(X, verbose=0)
for i in range(n_timesteps):
print('Expected:', y[0, i], 'Predicted', yhat[0, i])
:
Epoch 1/1
- 0s - loss: 0.6943 - acc: 0.4000
Epoch 1/1
- 0s - loss: 0.6830 - acc: 0.4500
Epoch 1/1
- 0s - loss: 0.7007 - acc: 0.3500
Epoch 1/1
- 0s - loss: 0.6893 - acc: 0.4500
Epoch 1/1
- 0s - loss: 0.6764 - acc: 0.5000
Epoch 1/1
- 0s - loss: 0.6890 - acc: 0.5000
Epoch 1/1
- 0s - loss: 0.6612 - acc: 0.6000
Epoch 1/1
- 0s - loss: 0.6621 - acc: 0.6000
Epoch 1/1
- 0s - loss: 0.6736 - acc: 0.6000
Epoch 1/1
- 0s - loss: 0.6630 - acc: 0.6000
:
Expected: [0] Predicted [0]
Expected: [0] Predicted [1]
Expected: [0] Predicted [1]
Expected: [0] Predicted [1]
Expected: [0] Predicted [1]
Expected: [0] Predicted [1]
Expected: [0] Predicted [1]
Expected: [0] Predicted [1]
Expected: [0] Predicted [1]
Expected: [0] Predicted [1]
Expected: [0] Predicted [1]
Expected: [0] Predicted [1]
Expected: [0] Predicted [1]
Expected: [1] Predicted [1]
Expected: [1] Predicted [1]
Expected: [1] Predicted [1]
Expected: [1] Predicted [1]
Expected: [1] Predicted [1]
Expected: [1] Predicted [1]
Expected: [1] Predicted [1]