Advertisement
Guest User

simple_LSTM_ed

a guest
Jan 6th, 2021
348
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 4.63 KB | None | 0 0
  1. # import essentials
  2. import matplotlib.pyplot as plt
  3. import pandas as pd
  4. import numpy as np
  5.  
  6. #
  7. from sklearn.preprocessing import MinMaxScaler
  8.  
  9. # import keras
  10. from keras.models import Sequential
  11. from keras.layers import LSTM, Dense
  12.  
  13. # read the data
  14. # data = pd.read_csv("")
  15.  
  16. # create array to train, validate, and test
  17. pure = np.linspace(0, 100, 200)
  18. noise = np.random.normal(-1, 1, 200)
  19. signal = pure + noise + np.sin(pure) + np.sin(2*pure)
  20.  
  21. plt.plot(signal, '-')
  22.  
  23. # set splitting functions to train, validation, and test sets
  24. dataset = np.array(signal).reshape(-1, 1)
  25. window = 10
  26.  
  27. scaler = MinMaxScaler(feature_range=(0, 1))
  28. dataset = scaler.fit_transform(dataset)
  29.  
  30. def split(_dataset, p):
  31.   train_size = int(len(_dataset) * p)
  32.   test_size = len(_dataset) - train_size
  33.   return _dataset[0:train_size,:], _dataset[train_size:len(_dataset), :]
  34.  
  35. def prepare(_dataset, _look_back=1):
  36.     data_x, data_y = [], []
  37.     for i in range(len(_dataset) - _look_back):
  38.       a = _dataset[i:(i + _look_back), 0]
  39.       data_x.append(a)
  40.       data_y.append(_dataset[i + _look_back, 0]) # increase to 1 dim
  41.     data_x = np.array(data_x)
  42.     data_x = np.reshape(data_x, (data_x.shape[0], data_x.shape[1], 1))
  43.     return data_x, np.array(data_y)
  44.  
  45. train, test = split(dataset, 0.8)
  46. train, val = split(train, 0.6)
  47.  
  48. train_x, train_y = prepare(train, window)
  49. val_x, val_y = prepare(val, window)
  50. test_x, test_y = prepare(test, window)
  51.  
  52. # LSTM model
  53.  
  54. model = Sequential()
  55. model.add(LSTM(window, return_sequences=True, input_shape=(window, 1)))
  56. model.add(LSTM(window, return_sequences=True))
  57. model.add(LSTM(window))
  58. model.add(Dense(1))
  59.  
  60. model.compile(loss='mean_squared_error', optimizer='adam',)
  61.  
  62. # train the model
  63. model.fit(train_x, train_y, validation_data=(val_x, val_y),
  64.           epochs=10, batch_size=10, verbose=0)
  65.  
  66. # make predictions on concatenated set - train, validation, test together
  67. prediction = np.concatenate((train_x, val_x), axis=0)
  68. pred = model.predict(np.concatenate((prediction, test_x), axis=0))
  69.  
  70. # do the scaler inversion
  71. pred = scaler.inverse_transform(pred)
  72. dataset = scaler.inverse_transform(dataset)
  73.  
  74. # reshape dataset for plotting
  75. dataset = np.array(signal).reshape(-1, 1)
  76.  
  77. # Commented out IPython magic to ensure Python compatibility.
  78. # %matplotlib inline
  79. def to_plot(dataset, predict, start, end):
  80.   predict_plot = np.empty_like(dataset)
  81.   predict_plot[:, :] = np.nan
  82.   predict_plot[start:end, :] = predict
  83.   return predict_plot
  84.  
  85. start = window+1
  86. end =  window + len(pred)+1
  87.  
  88. pred_plot = to_plot(dataset, pred, start, end)
  89.  
  90. plt.plot(dataset, color='g', label='dataset')
  91. plt.plot(pred_plot, color='m', label='prediction')
  92. plt.legend()
  93. plt.title("prediction for the 200 length dataset")
  94. plt.show()
  95.  
  96. x = scaler.inverse_transform(model.predict(test_x))
  97. y = dataset[-len(test_x):]
  98. p = x - y
  99. plt.figure()
  100. plt.plot(p[:,0],)
  101. plt.title("error")
  102. plt.xlabel("index")
  103. plt.ylabel("error")
  104.  
  105. """Let's repeat all, but for the larger dataset"""
  106.  
  107. # create array to train, validate, and test with 2000 values
  108. pure = np.linspace(0, 100, 2000)
  109. noise = np.random.normal(-1, 1, 2000)
  110. signal = pure + noise + np.sin(pure) + np.sin(2*pure)
  111.  
  112. plt.plot(signal, '-')
  113.  
  114. # set splitting functions to train, validation, and test sets
  115. dataset = np.array(signal).reshape(-1, 1)
  116. window = 10
  117.  
  118. scaler = MinMaxScaler(feature_range=(0, 1))
  119. dataset = scaler.fit_transform(dataset)
  120.  
  121. train, test = split(dataset, 0.8)
  122. train, val = split(train, 0.6)
  123.  
  124. train_x, train_y = prepare(train, window)
  125. val_x, val_y = prepare(val, window)
  126. test_x, test_y = prepare(test, window)
  127.  
  128. # train the model
  129. model.fit(train_x, train_y, validation_data=(val_x, val_y),
  130.           epochs=10, batch_size=10, verbose=0)
  131.  
  132. # make predictions on concatenated set - train, validation, test together
  133. prediction = np.concatenate((train_x, val_x), axis=0)
  134. pred = model.predict(np.concatenate((prediction, test_x), axis=0))
  135.  
  136. # do the scaler inversion
  137. pred = scaler.inverse_transform(pred)
  138. dataset = scaler.inverse_transform(dataset)
  139.  
  140. # reshape dataset for plotting
  141. dataset = np.array(signal).reshape(-1, 1)
  142.  
  143. # Commented out IPython magic to ensure Python compatibility.
  144. # %matplotlib inline
  145. start = window+1
  146. end =  window + len(pred)+1
  147.  
  148. pred_plot = to_plot(dataset, pred, start, end)
  149.  
  150. plt.plot(dataset, color='g', label='dataset')
  151. plt.plot(pred_plot, color='m', label='prediction')
  152. plt.legend()
  153. plt.title("prediction for the 2000 length dataset")
  154. plt.show()
  155.  
  156. x = scaler.inverse_transform(model.predict(test_x))
  157. y = dataset[-len(test_x):]
  158. p = x - y
  159. plt.figure()
  160. plt.plot(p[:,0],)
  161. plt.title("error")
  162. plt.xlabel("index")
  163. plt.ylabel("error")
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement