Guest User

Untitled

a guest
Jan 19th, 2018
77
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.18 KB | None | 0 0
  1. import keras
  2. from keras import backend as K
  3. import os
  4. from keras.layers.advanced_activations import LeakyReLU
  5. from __future__ import print_function
  6. from keras.datasets import mnist
  7. import matplotlib.pylab as plt
  8.  
  9.  
  10. from importlib import reload
  11. def set_keras_backend(backend):
  12.  
  13. if K.backend() != backend:
  14. os.environ['KERAS_BACKEND'] = backend
  15. reload(K)
  16. assert K.backend() == backend
  17. set_keras_backend("tensorflow")
  18.  
  19.  
  20. DATA = joblib.load(open('Data.sav', 'rb'))
  21. LABEL = joblib.load(open('Lable.sav', 'rb'))
  22.  
  23. print(DATA.shape)
  24. print(LABEL.shape)
  25.  
  26. print(tf.__version__)
  27.  
  28.  
  29. X_train, X_test, y_train, y_test = train_test_split(DATA, LABEL, test_size=0.30, random_state=45)
  30. print(X_train.shape)
  31. print(X_test.shape)
  32. print(y_train.shape)
  33. print(y_test.shape)
  34. print(X_train[0])
  35.  
  36.  
  37. X_train = np.reshape(X_train,(X_train.shape[0],200,180,1))
  38. X_test = np.reshape(X_test,(X_test.shape[0],200,180,1))
  39.  
  40. # convert the data from binary to float
  41.  
  42. X_train = X_train.astype('float32')
  43. X_test = X_test.astype('float32')
  44. X_train /= 255
  45. X_test /= 255
  46.  
  47.  
  48. model = Sequential()
  49.  
  50. model.add(Conv2D(32, kernel_size=(5,5), strides=(1, 1),
  51. activation='relu',
  52. input_shape=([200,180,1])))
  53.  
  54. model.add(LeakyReLU(alpha=0.1))
  55. model.add(MaxPooling2D(pool_size=(2,2)))
  56.  
  57. model.add(Conv2D(64,(5,5), activation='relu'))
  58. model.add(LeakyReLU(alpha=0.1))
  59. model.add(MaxPooling2D(pool_size=(2,2)))
  60.  
  61. model.add(Conv2D(128,(5,5), activation='relu'))
  62. model.add(LeakyReLU(alpha=0.1))
  63. model.add(MaxPooling2D(pool_size=(2,2)))
  64. model.add(Dropout(0.30))
  65.  
  66.  
  67. model.add(Flatten())
  68. model.add(Dense(1000, activation='relu'))
  69. model.add(Dropout(0.5))
  70. model.add(Dense(72, activation='softmax'))
  71.  
  72. # When we compile the model, we declare the loss function and the optimizer
  73.  
  74. model.compile(loss=keras.losses.categorical_crossentropy,
  75.  
  76. optimizer=keras.optimizers.Adam(),
  77. metrics=['accuracy'])
  78.  
  79. # Train the model
  80. hist = model.fit(X_train, Y_train,batch_size=32,epochs=12, verbose=1, validation_data=(X_test, Y_test))
  81.  
  82.  
  83. score = model.evaluate(X_test, Y_test, verbose=0)
  84.  
  85. print("%s: %.2f%%" % ('Accuracy', score[1]*100))
Add Comment
Please, Sign In to add comment