# multinput and multioutput
from numpy import array
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import TimeDistributed
from keras.layers import LSTM
import numpy as np
# prepare sequence
length = 5
seq = array([i/float(length) for i in range(length)])
#X = seq.reshape(1, length, 1)
X=np.array([ [[ 0. , 0. ],
[ 0.2, 0.2],
[ 0.4, 0.4],
[ 0.6, 0.6],
[ 0.8, 0.8]],
[[ 0. , 0. ],
[ 0.2, 0.2],
[ 0.4, 0.4],
[ 0.6, 0.6],
[ 0.8, 0.8]],
])
y=np.array([ [[ 0,1,0],
[ 0,0,1],
[ 1,0,0],
[ 0,1,0],
[ 0,0,1]],
[[ 0,1,0],
[ 0,0,1],
[ 1,0,0],
[ 0,1,0],
[ 0,0,1]],
])
#y = seq.reshape(1, length, 1)
# define LSTM configuration
n_neurons = length
n_batch = 1
n_epoch = 2000
# create LSTM
model = Sequential()
model.add(LSTM(n_neurons, input_shape=(length, 2), return_sequences=True))
model.add(TimeDistributed(Dense(3,activation='softmax')))
#model.add(Dense(7, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam')
print(model.summary())
# train LSTM
model.fit(X, y, epochs=n_epoch, batch_size=n_batch, verbose=2)
# evaluate
result = model.predict(X, batch_size=n_batch, verbose=0)
for value in result[0,:,0]:
print('%.1f' % value)