Advertisement
jonksar

KERAS: Recurrent network

Jul 19th, 2016
93
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 1.32 KB | None | 0 0
  1. from keras.models import Sequential
  2. from keras.layers import LSTM, Dropout, Activation
  3. import numpy as np
  4.  
  5. ils = 10            # input layer size
  6. ilt = 11            # input layer time steps
  7. hls = 12            # hidden layer size
  8. nhl = 2             # number of hidden layers
  9. ols = 1             # output layer size
  10. p = 0.2             # dropout probability
  11. f_a = 'relu'        # activation function
  12. opt = 'rmsprop'     # optimizing function
  13.  
  14. #
  15. # Building the model
  16. #
  17. model = Sequential()
  18.  
  19. # The input layer
  20. model.add(LSTM(hls, input_shape=(ilt, ils), return_sequences=True))
  21. model.add(Activation(f_a))
  22. model.add(Dropout(p))
  23.  
  24. # Hidden layers
  25. for i in range(nhl - 1):
  26.     model.add(LSTM(hls, return_sequences=True))
  27.     model.add(Activation(f_a))
  28.     model.add(Dropout(p))
  29.  
  30. # Output layer
  31. model.add(LSTM(ols, return_sequences=False))
  32. model.add(Activation('softmax'))
  33.  
  34. model.compile(optimizer=opt, loss='binary_crossentropy')
  35.  
  36. #
  37. # Making test data and fitting the model
  38. #
  39.  
  40. m_train, n_class = 1000, ols
  41. data = np.array(np.random.random((m_train, ilt, ils)))
  42. labels = np.random.randint(n_class, size=(m_train, 1))
  43. # Make labels onehot
  44. onehot_labels = np.zeros(shape=(labels.shape[0], ols))
  45. onehot_labels[np.arange(labels.shape[0]), labels.astype(np.int)] = 1
  46.  
  47. model.fit(data, labels, nb_epoch=10, batch_size=32)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement