Advertisement
Guest User

LSTM Experiment

a guest
Feb 8th, 2019
100
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 4.76 KB | None | 0 0
  1. import seaborn as sns
  2. import pandas as pd
  3. import numpy as np
  4.  
  5.  
  6.  
  7. dom    = range(0,11)
  8. H      = 8
  9.  
  10. ## Noiseless paths
  11. # Upward exponential --> 'Real' increase
  12. f1  = lambda x : 10 + np.exp(0.33*x) / 3
  13. y1  = [f1(x) for x in dom]
  14.  
  15. # Downward exponential --> 'Real' decrease
  16. f2  = lambda x : 10 - np.exp(0.33*x) / 3
  17. y2  = [f2(x) for x in dom]
  18.  
  19. # Upward exponential --> 'Fake' increase
  20. f3  = lambda x : 10 + (-(x-5)**2 + 5**2) / 9
  21. y3  = [f3(x) for x in dom]
  22.  
  23. # Downward exponential --> 'Fake' increase
  24. f4  = lambda x : 10 - (-(x-5)**2 + 5**2) / 9
  25. y4  = [f4(x) for x in dom]
  26.  
  27.  
  28. ## Noisy Paths
  29. sigma = 0.75
  30. y1_ = [f1(x) + np.random.randn()*sigma for x in dom]
  31. y2_ = [f2(x) + np.random.randn()*sigma for x in dom]
  32. y3_ = [f3(x) + np.random.randn()*sigma for x in dom]
  33. y4_ = [f4(x) + np.random.randn()*sigma for x in dom]
  34.  
  35.  
  36. # Plots
  37. fig, (ax1,ax2) = plt.subplots(1,2, figsize=(14,5))
  38. colors = ['g', 'r', 'orange', 'lightgreen']
  39.  
  40. ax1.set_title("Noiseless")
  41. ax1.axhline(10, color='k');
  42. ax1.axvline( H, color='k',linestyle=':');
  43. ax1.plot(dom,y1, marker='o', color=colors[0], label='Real Increase');
  44. ax1.plot(dom,y3, marker='o', color=colors[2], label='Fake Increase');
  45. ax1.plot(dom,y2, marker='o', color=colors[1], label='Real Decrease');
  46. ax1.plot(dom,y4, marker='o', color=colors[3], label='Fake Decrease');
  47. ax1.legend(frameon=True);
  48.  
  49. ax2.set_title("Noisy")
  50. ax2.axhline(10, color='k');
  51. ax2.axvline( 8, color='k',linestyle=':');
  52. ax2.plot(dom,y1_, marker='o', color=colors[0], label='Real Increase');
  53. ax2.plot(dom,y3_, marker='o', color=colors[2], label='Fake Increase');
  54. ax2.plot(dom,y2_, marker='o', color=colors[1], label='Real Decrease');
  55. ax2.plot(dom,y4_, marker='o', color=colors[3], label='Fake Decrease');
  56. ax2.legend(frameon=True);
  57.  
  58. plt.tight_layout();
  59. plt.savefig("noiseless_and_noisy.png");
  60.  
  61.  
  62. # Simulated noisy data
  63. N = 100
  64. y = []
  65. for i in range(N):
  66.     y1  = [f1(x) + np.random.randn()*sig for x in dom] + [100]; y.append(y1)
  67.     y2  = [f2(x) + np.random.randn()*sig for x in dom] + [200]; y.append(y2)
  68.     y3  = [f3(x) + np.random.randn()*sig for x in dom] + [300]; y.append(y3)
  69.     y4  = [f4(x) + np.random.randn()*sig for x in dom] + [400]; y.append(y4)
  70.  
  71. ds = pd.DataFrame(y) / 100
  72. ds.rename({11:'c'}, axis=1, inplace=True)
  73. ds['y'] = ds[10] - ds[7]
  74.  
  75.  
  76. # Train, validation and test
  77. N     = len(ds)
  78. N1    = int(N * 0.6)
  79. N2    = int(N * 0.8)
  80.  
  81. train = ds.iloc[:N1]
  82. vali  = ds.iloc[N1:N2]
  83. test  = ds.iloc[N2:]
  84.  
  85. lags  = np.arange(0,11)
  86.  
  87. # For Random Forest
  88. train_X_RF = train[lags[:H]].values
  89. train_y_RF = train['y'].values
  90. vali_X_RF  = vali [lags[:H]].values
  91. vali_y_RF  = vali ['y'].values
  92. test_X_RF  = test [lags[:H]].values
  93. test_y_RF  = test ['y'].values
  94.  
  95. # For LSTM
  96. train_X_LS = train[lags[:H]].values.reshape(len(train), H, 1)
  97. train_y_LS = train[['y']].values
  98. vali_X_LS  = vali [lags[:H]].values.reshape(len(vali ), H, 1)
  99. vali_y_LS  = vali [['y']].values
  100. test_X_LS  = test [lags[:H]].values.reshape(len(test ), H, 1)
  101. test_y_LS  = test [['y']].values
  102.  
  103. # Copy the test set (predictions will be added here)
  104. te = ds.iloc[N2:].copy()
  105.  
  106.  
  107. # Train the Random Forest Model
  108. RF = RandomForestRegressor(random_state=0, n_estimators=20)
  109. RF.fit(train_X_RF, train_y_RF);
  110.  
  111. # Out-of-sample Prediction (Random Forest)
  112. te['RF'] = RF.predict(test_X_RF)
  113.  
  114. # Train the LSTM Model
  115. model = Sequential()
  116. model.add(LSTM((1), batch_input_shape=(None, H, 1), return_sequences=True))
  117. model.add(LSTM((1), return_sequences=False))
  118. model.compile(loss='mean_squared_error', optimizer='adam', metrics=['accuracy'])
  119. history = model.fit(train_X_LS, train_y_LS, epochs=100, validation_data=(vali_X_LS, vali_y_LS), verbose=0)
  120.  
  121. # Out-of-sample Prediction (LSTM)
  122. te['LSTM'] = model.predict(test_X_LS)
  123.  
  124.  
  125. # Plot Results
  126. fig, (ax1,ax2) = plt.subplots(1,2, figsize=(15,5), sharey=True)
  127.  
  128. ax1.set_title("Random Forest")
  129. (te[te['c']==1]*100).plot.scatter('RF', 'y', ax=ax1, color=colors[0], s=50, alpha=0.75);
  130. (te[te['c']==2]*100).plot.scatter('RF', 'y', ax=ax1, color=colors[1], s=50, alpha=0.75);
  131. (te[te['c']==3]*100).plot.scatter('RF', 'y', ax=ax1, color=colors[2], s=50, alpha=0.75);
  132. (te[te['c']==4]*100).plot.scatter('RF', 'y', ax=ax1, color=colors[3], s=50, alpha=0.75);
  133. ax1.xaxis.set_label_text("Prediction");
  134. ax1.yaxis.set_label_text("Target");
  135.  
  136. ax2.set_title("LSTM")
  137. (te[te['c']==1]*100).plot.scatter('LSTM', 'y', ax=ax2, color=colors[0], s=50, alpha=0.75);
  138. (te[te['c']==2]*100).plot.scatter('LSTM', 'y', ax=ax2, color=colors[1], s=50, alpha=0.75);
  139. (te[te['c']==3]*100).plot.scatter('LSTM', 'y', ax=ax2, color=colors[2], s=50, alpha=0.75);
  140. (te[te['c']==4]*100).plot.scatter('LSTM', 'y', ax=ax2, color=colors[3], s=50, alpha=0.75);
  141. ax2.xaxis.set_label_text("Prediction");
  142. ax2.yaxis.set_label_text("Target");
  143.  
  144. plt.tight_layout();
  145.  
  146. plt.savefig("experimental_results.png");
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement