Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from keras.models import Sequential
- from keras.layers import LSTM, Dropout, Activation
- import numpy as np
- ils = 10 # input layer size
- ilt = 11 # input layer time steps
- hls = 12 # hidden layer size
- nhl = 2 # number of hidden layers
- ols = 1 # output layer size
- p = 0.2 # dropout probability
- f_a = 'relu' # activation function
- opt = 'rmsprop' # optimizing function
- #
- # Building the model
- #
- model = Sequential()
- # The input layer
- model.add(LSTM(hls, input_shape=(ilt, ils), return_sequences=True))
- model.add(Activation(f_a))
- model.add(Dropout(p))
- # Hidden layers
- for i in range(nhl - 1):
- model.add(LSTM(hls, return_sequences=True))
- model.add(Activation(f_a))
- model.add(Dropout(p))
- # Output layer
- model.add(LSTM(ols, return_sequences=False))
- model.add(Activation('softmax'))
- model.compile(optimizer=opt, loss='binary_crossentropy')
- #
- # Making test data and fitting the model
- #
- m_train, n_class = 1000, ols
- data = np.array(np.random.random((m_train, ilt, ils)))
- labels = np.random.randint(n_class, size=(m_train, 1))
- # Make labels onehot
- onehot_labels = np.zeros(shape=(labels.shape[0], ols))
- onehot_labels[np.arange(labels.shape[0]), labels.astype(np.int)] = 1
- model.fit(data, labels, nb_epoch=10, batch_size=32)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement