Keras LSTM Learning Data Format

I am trying to use the LSTM neural network (using Keras) to predict the next step of the adversary in the game Rock-Paper-Scissor.

I encoded the inputs as Rock: [1 0 0], Paper: [0 1 0], Scissor: [0 0 1]. Now I want to train a neural network, but I'm a little confused in the data structure of my training data.

I saved the game history of the opponent in a CSV file with the following structure:

1,0,0
0,1,0
0,1,0
0,0,1
1,0,0
0,1,0
0,1,0
0,0,1
1,0,0
0,0,1

And I try to use every fifth data as my training label, and the previous 4 as input for training. In other words, at each time step, a vector of dimension 3 is sent to the network, and we have 4 time steps.

For example, the following input

1,0,0
0,1,0
0,1,0
0,0,1

And the fifth is a training label

1,0,0

, KSTA LSTM? ? , :

#usr/bin/python
from __future__ import print_function

from keras.models import Sequential
from keras.layers import Dense, Activation, Dropout
from keras.layers.recurrent import LSTM
from keras.optimizers import Adam

output_dim = 3
input_dim = 3
input_length = 4
batch_size = 20   #use all the data to train in one iteration


#each input has such strcture
#Rock: [1 0 0], Paper: [0 1 0], Scissor: [0 0 1]
#4 inputs (vectors) are sent to the LSTM net and output 1 vector as the prediction

#incomplete function
def read_data():
    raw_training = np.genfromtxt('training_data.csv',delimiter=',')




    print(raw_training)

def createNet(summary=False):
    print("Start Initialzing Neural Network!")
    model = Sequential()
    model.add(LSTM(4,input_dim=input_dim,input_length=input_length,
            return_sequences=True,activation='softmax'))
    model.add(Dropout(0.1))
    model.add(LSTM(4,
            return_sequences=True,activation='softmax'))
    model.add(Dropout(0.1))
    model.add(Dense(3,activation='softmax'))
    model.add(Dropout(0.1))
    model.add(Dense(3,activation='softmax'))
    model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=['accuracy'])
    if summary:
        print(model.summary())
    return model

if __name__=='__main__':
    createNet(True)
+6
1

LSTM (sequence_length, input_dim). , numpy (4,3).

, , (number_of_train_examples, sequence_length, input_dim). , __ (4,3). :

1,0,0
0,1,0
0,1,0
0,0,1

np.array(list_of_train_example).

, LSTM? - (4,4), , , . , , LSTM. False LSTM, "" (4), . , LSTM , (4,3) , (4,4), , ... Can ' .

, softmax, softmax . softmax LSTM Dense . , "" "tanh".

,

def createNet(summary=False):
    print("Start Initialzing Neural Network!")
    model = Sequential()
    model.add(LSTM(4,input_dim=input_dim,input_length=input_length,
            return_sequences=True,activation='tanh'))
    model.add(Dropout(0.1))
    # output shape : (4,4)
    model.add(LSTM(4,
            return_sequences=False,activation='tanh'))
    model.add(Dropout(0.1))
    # output shape : (4,)
    model.add(Dense(3,activation='tanh'))
    model.add(Dropout(0.1))
    # output shape : (3,)
    model.add(Dense(3,activation='softmax'))
    # output shape : (3,)
    model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=['accuracy'])
    if summary:
        print(model.summary())
    return model
+2

Source: https://habr.com/ru/post/1014852/


All Articles