Advertisement
Guest User

Untitled

a guest
Mar 21st, 2019
49
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 3.35 KB | None | 0 0
  1. output_nfs = data_normalization(nf_data.iloc[:, 1:28], 'PCA') #Normalization via StandardScaler
  2. output_afs = data_normalization(af_data.iloc[:, 1:28], 'PCA')
  3. output_nfs ['Fault'] = nf_data['Fault'].values
  4. output_afs ['Fault'] = af_data['Fault'].values
  5.  
  6. output_nfs_rec_array = output_nfs.to_records (index = False)
  7. output_afs_rec_array = output_afs.to_records (index = False)
  8.  
  9. final_data = np.concatenate ([output_nfs_rec_array, output_afs_rec_array])
  10. np.random.shuffle(final_data)
  11. y=final_data ['Fault'] #target
  12. X=rf.drop_fields(final_data, ['Fault'], False).view (np.float64).reshape(len(final_data), len(final_data.dtype)-1) #Actual datapoints
  13. y_new = [] #Combine all the faults into 3 separate categories: no faults (0), electrical (1), mechanical (2)
  14. for i in range(len(y)):
  15. if y[i]==0:
  16. y_new.append(0)
  17. elif (y[i] == 188)|(y[i] ==176)|(y[i] == 315)|(y[i] == 485)|(y[i] == 286)|(y[i] ==707)|
  18. (y[i] == 959)|(y[i] ==958)|(y[i] ==817)|(y[i] == 187)|(y[i] == 489)|(y[i] == 632)|
  19. (y[i] == 102)|(y[i] ==648)|(y[i] ==687)|(y[i] == 935)|(y[i] == 332)|(y[i] == 846)|
  20. (y[i] == 944)|(y[i] == 254)|(y[i] == 181)|(y[i] == 317):
  21. y_new.append(1) #electrical
  22. elif (y[i]==604)|(y[i]==603)|(y[i]==958)|(y[i]==154)|(y[i]==162)|
  23. (y[i]==165)|(y[i]==512)|(y[i]==948)|(y[i]==151)|(y[i]==163)|(y[i]==296)|
  24. (y[i]==734)|(y[i]==844)|(y[i]==191)|(y[i]==560)|(y[i]==297)|(y[i]==504)|(y[i]==735):
  25. y_new.append(2) #mechanical
  26. y = y_new
  27. y=np.array(y,dtype=int)
  28.  
  29. from sklearn.model_selection import KFold
  30. n_folds = 10
  31. kfold = KFold(n_folds, True, 1)
  32. scores, members = list(), list()
  33.  
  34.  
  35. import keras_metrics as km
  36. from keras.layers import Dropout
  37. labels = ['no faults', 'electrical fault', 'mechanical fault']
  38. def evaluate_model(X_train, y_train, X_test, y_test):
  39. trainy_enc=to_categorical(y_train)
  40. testy_enc=to_categorical(y_test)
  41. early_stopping_monitor = EarlyStopping(monitor='val_loss', patience=3)
  42. model = Sequential()
  43. n_cols = X_train.shape[1]
  44. model.add (Dense (35, activation = 'relu', input_shape = (n_cols,)))
  45. model.add(Dropout(0.5))
  46. model.add (Dense (10, activation = 'relu'))
  47. model.add(Dropout(0.5))
  48. model.add (Dense (3, activation = 'softmax'))
  49. model.compile (loss = 'categorical_crossentropy', optimizer = 'adam', metrics = ['acc'])
  50. model.fit (X_train, trainy_enc, batch_size=10, shuffle=True, epochs= 100,
  51. validation_data=(X_test,testy_enc), callbacks = [early_stopping_monitor], verbose = 1)
  52. # evaluate the model
  53. _, test_acc = model.evaluate(X_test, testy_enc, verbose=1)
  54.  
  55. return model, test_acc
  56.  
  57. for train_iX, test_iX in kfold.split(X):
  58. X_train, y_train = X[train_iX], y[train_iX]
  59. X_test, y_test = X[test_iX], y[test_iX]
  60. #Class imbalance is too severe. No Fault prevails. Using smote to balance out electrical and mechanical faults
  61. from imblearn.over_sampling import SMOTE
  62. sm = SMOTE (random_state = 12)
  63. X_train,y_train = sm.fit_resample(X_train,y_train)
  64. X_train, y_train = utils.shuffle(X_train, y_train, random_state=42)
  65. X_train, X_test = feature_selection (X_train, X_test, None, 'PCA', None)
  66. model, test_acc = evaluate_model(X_train, y_train, X_test, y_test)
  67. scores.append(test_acc)
  68. members.append(model)
  69.  
  70. pca = PCA (0.95)
  71. pca.fit(X_train)
  72. X_train_pca = pca.transform(X_train)
  73. X_test_pca = pca.transform(X_test)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement