Guest User

Untitled

a guest
Oct 26th, 2016
51
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.55 KB | None | 0 0
  1. # Time Series Testing
  2. import keras.callbacks
  3. from keras.models import Sequential
  4. from keras.layers.core import Dense, Activation, Dense, Dropout
  5. from keras.layers.recurrent import LSTM
  6.  
  7. # Call back to capture losses
  8. class LossHistory(keras.callbacks.Callback):
  9. def on_train_begin(self, logs={}):
  10. self.losses = []
  11.  
  12. def on_batch_end(self, batch, logs={}):
  13. self.losses.append(logs.get('loss'))
  14.  
  15. # You should get data frames with prices somewhere, e.g. on Quandl - implementation is up to you
  16. # merge data frames
  17. merged = df1.merge(df2, left_index=True, right_index=True, how='inner').dropna()
  18.  
  19. # data prep
  20. # use 100 days of historical data to predict 10 days in the future
  21. data = merged.values
  22. examples = 100
  23. y_examples = 10
  24. nb_samples = len(data) - examples - y_examples
  25.  
  26. # input - 2 features
  27. input_list = [np.expand_dims(np.atleast_2d(data[i:examples+i,:]), axis=0) for i in xrange(nb_samples)]
  28. input_mat = np.concatenate(input_list, axis=0)
  29.  
  30. # target - the first column in merged dataframe
  31. target_list = [np.atleast_2d(data[i+examples:examples+i+y_examples,0]) for i in xrange(nb_samples)]
  32. target_mat = np.concatenate(target_list, axis=0)
  33.  
  34. # set up model
  35. trials = input_mat.shape[0]
  36. features = input_mat.shape[2]
  37. hidden = 64
  38. model = Sequential()
  39. model.add(LSTM(hidden, input_shape=(examples, features)))
  40. model.add(Dropout(.2))
  41. model.add(Dense(y_examples))
  42. model.add(Activation('linear'))
  43. model.compile(loss='mse', optimizer='rmsprop')
  44.  
  45. # Train
  46. history = LossHistory()
  47. model.fit(input_mat, target_mat, nb_epoch=100, batch_size=400, callbacks=[history])
Add Comment
Please, Sign In to add comment