Advertisement
Guest User

Untitled

a guest
Dec 9th, 2019
116
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.49 KB | None | 0 0
  1. import cv2
  2. import numpy as np
  3. import os
  4. import matplotlib.pyplot as plt
  5. from tensorflow import keras
  6. from tensorflow.keras.utils import to_categorical
  7. from tensorflow.keras.models import Sequential
  8. from tensorflow.keras.layers import Dense, Dropout, Flatten, Conv2D, MaxPooling2D
  9. import tensorflow.keras.losses as losses
  10. import tensorflow.keras.optimizers as optimizers
  11. from tensorflow.keras import backend as K
  12.  
  13.  
  14. def show_train_history(train_history, str_test_acc, train_acc='acc', test_acc='val_acc'):
  15. # draw with training history
  16. plt.plot(train_history.history[train_acc])
  17. plt.plot(train_history.history[test_acc])
  18. # style the diagram
  19. plt.title('Train History')
  20. plt.ylabel('Accuracy')
  21. plt.xlabel('Epoch')
  22. plt.legend(['train', 'test'], loc='upper left')
  23. # save the diagram
  24. plt.savefig(str_test_acc + '.png')
  25.  
  26.  
  27. def enumerate_files(sets, path='All_gray_1_32_32', n_poses=3, n_samples=20):
  28. filenames, labels = [], []
  29. # get the set name from sets list
  30. for name_set in sets:
  31. # get which pose is now
  32. for i_pose in range(n_poses):
  33. # for use with pose index to control the name for 9 separate directories
  34. for j in range(3):
  35. dir_name = path + '/' + name_set + \
  36. '/000' + str(i_pose * 3 + j) + '/'
  37. # get the index of sample
  38. for i_sample in range(n_samples):
  39. d = dir_name + '%04d/' % i_sample
  40. # list all files in the 'd' directory
  41. for f in os.listdir(d):
  42. # find the file which the filename extension is 'jpg' type
  43. if f.endswith('jpg'):
  44. # append the whole path into the filename list
  45. filenames.append(d + f)
  46. # append the label number into the labels list
  47. labels.append(i_pose)
  48. return filenames, labels
  49.  
  50.  
  51. def read_images(files):
  52. imgs = []
  53. # get file in the files list
  54. for f in files:
  55. # read the image by the gray-level
  56. img = cv2.imread(f, cv2.IMREAD_GRAYSCALE)
  57. # limit range in 0-255, and convert type to float
  58. img = img/255.0
  59. # append the image into the 'imgs' list
  60. imgs.append(img)
  61. return imgs
  62.  
  63.  
  64. def load_datasets(datasets):
  65. # get all file paths and labels
  66. files, labels = enumerate_files(datasets)
  67. # start read all image
  68. list_of_arrays = read_images(files)
  69. # convert the list, saving all image, to numpy array
  70. return np.array(list_of_arrays), labels
  71.  
  72.  
  73. def main():
  74. train_sets = ['Set1', 'Set2', 'Set3']
  75. test_sets = ['Set4', 'Set5']
  76.  
  77. batch_size = 360
  78. num_classes = 3
  79. epochs = 200
  80. input_shape = (32, 32, 1)
  81.  
  82. # load all data and labels of training dataset and testing dataset
  83. x_train, y_train = load_datasets(train_sets)
  84. x_test, y_test = load_datasets(test_sets)
  85.  
  86. # reshape data of training and testing dataset
  87. x_train = x_train.reshape((x_train.shape[0], 32, 32, 1))
  88. x_test = x_test.reshape((x_test.shape[0], 32, 32, 1))
  89.  
  90. #
  91. y_train_onehot = to_categorical(y_train, num_classes)
  92. y_test_onehot = to_categorical(y_test, num_classes)
  93.  
  94. # building model
  95. model = Sequential()
  96. model.add(Conv2D(32, kernel_size=(3, 3), padding='same',
  97. activation='tanh', input_shape=input_shape))
  98. model.add(MaxPooling2D(pool_size=(2, 2)))
  99. model.add(Dropout(0.35))
  100.  
  101. model.add(Conv2D(32, kernel_size=(3, 3), padding='same', activation='relu'))
  102. model.add(MaxPooling2D(pool_size=(2, 2)))
  103. model.add(Dropout(0.4))
  104.  
  105. model.add(Conv2D(28, kernel_size=(3, 3), padding='same', activation='relu'))
  106. model.add(MaxPooling2D(pool_size=(2, 2)))
  107. model.add(Dropout(0.45))
  108.  
  109. model.add(Flatten())
  110. model.add(Dense(128, activation='relu'))
  111. model.add(Dropout(0.5))
  112. model.add(Dense(num_classes, activation='softmax'))
  113.  
  114. model.compile(loss='categorical_crossentropy',
  115. optimizer='adam', metrics=['acc'])
  116.  
  117. train_history = model.fit(x_train, y_train_onehot, batch_size=batch_size, epochs=epochs,
  118. verbose=2, validation_data=(x_test, y_test_onehot))
  119.  
  120. score = model.evaluate(x_test, y_test_onehot, verbose=0)
  121. print(f'Test loss: {score[0]}\nTest accuracy: {score[1]}')
  122.  
  123. show_train_history(train_history, str(score[1]))
  124.  
  125.  
  126. if __name__ == '__main__':
  127. main()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement