Advertisement
Guest User

Untitled

a guest
Oct 15th, 2019
96
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 3.59 KB | None | 0 0
  1. # IMPORTING ALL THE NECESSARY LIBRARIES
  2. from keras.models import Sequential
  3. from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout
  4. from keras.preprocessing.image import ImageDataGenerator
  5. from keras.callbacks import EarlyStopping
  6.  
  7. import matplotlib.pyplot as plt
  8.  
  9. # DEFINING A VALIDATION
  10. VALIDATION_PATIENCE = 20
  11.  
  12. # INITIATING THE SEQUENTIAL MODEL
  13. classifier = Sequential()
  14.  
  15. '''
  16. ADDING THE CONVOLUTIONAL LAYER WITH:
  17. 1)NUMBER OF FILTERS TO BE USED FOR CONVOLUTION
  18. 2)FILTER SIZE
  19. 3)INPUT SHAPE OF THE IMAGES(NUMBER OF ROWS, NUMBER OF COLUMNS, NUMBER OF CHANNELS)
  20. 4)AN ACTIVATION FUNCTION
  21. '''
  22. classifier.add(Conv2D(32, (3, 3), input_shape = (64, 64, 3), activation = 'relu'))
  23. # ADDING A POOLING LAYER MATRIX OF SIZE 2X2
  24. classifier.add(MaxPooling2D(pool_size = (2, 2)))
  25. # DROPPING OUT RANDOM NODES TO AVOID OVERFITTING
  26. classifier.add(Dropout(0.25))
  27.  
  28. '''
  29. ADDING ONE MORE CONVOLUTIONAL LAYER, BUT THIS TIME THE INPUT OF THIS LAYER
  30. WILL BE THE OUTPUT OF THE PREVIOUS LAYER
  31. '''
  32. classifier.add(Conv2D(32, (3, 3), activation = 'relu'))
  33. # ADDING ANOTHER POOLING LAYER
  34. classifier.add(MaxPooling2D(pool_size = (2, 2)))
  35. # DROPPING SOME MORE RANDOM NODES
  36. classifier.add(Dropout(0.25))
  37.  
  38. # ADDING ONE MORE CONVOLUTIONAL LAYER WITH 64 FILTERS
  39. classifier.add(Conv2D(64, (3, 3), activation = 'relu'))
  40. # ADDING ANOTHER POOLING LAYER
  41. classifier.add(MaxPooling2D(pool_size = (2, 2)))
  42. # # DROPPING SOME MORE RANDOM NODES
  43. classifier.add(Dropout(0.25))
  44.  
  45. # ADDING ONE MORE CONVOLUTIONAL LAYER WITH 128 FILTERS
  46. classifier.add(Conv2D(128, (3, 3), activation = 'relu'))
  47. # ADDING ANOTHER POOLING LAYER
  48. classifier.add(MaxPooling2D(pool_size = (2, 2)))
  49. # DROPPING SOME MORE RANDOM NODES
  50. classifier.add(Dropout(0.25))
  51.  
  52. # FLATTENING THE OUTPUT OF THE PREVIOUS LAYER TO 1D
  53. classifier.add(Flatten())
  54.  
  55. # ADDING A FULLY CONNECTED DENSE NEURAL NETWORK WITH INPUT BEING THE FLATTEND ARRAY
  56. classifier.add(Dense(units = 128, activation = 'relu'))
  57. # DROPPING OUT RANDOM NODES TO AVOID OVERFITTING
  58. classifier.add(Dropout(0.25))
  59.  
  60. '''
  61. ADDING A FULLY CONNECTED DENSE NEURAL NETWORK, BUT THIS TIME THE NETWORK WILL ONLY
  62. CONSIST OF NODES EQUIVALENT TO NUMBER OF CATEGORIES(LABELS). NOTICE THE ACTIVATION
  63. FUNCTION IS ALSO BEEN CHANGED TO A SIGMOID INSTEAD OF A "Rectified Linear Unit"(ReLU)
  64. '''
  65. classifier.add(Dense(units = 1, activation = 'sigmoid'))
  66.  
  67. # COMPILING THE FINAL CREATED MODEL WITH AN OPTIMIZER, LOSS FUNCTION AND EVALUATION METRICS
  68. classifier.compile(
  69. optimizer = 'adam',
  70. loss = 'binary_crossentropy',
  71. metrics = ['accuracy']
  72. )
  73.  
  74. '''
  75. IMPLEMENTING A CONVERGENCE MONITOR SO THE THE MODEL AUTOMATICALLY STOPS THE ITERATION
  76. AS SOON AS THE CONVERGENCE IS ACHIEVED
  77. '''
  78. stopper = EarlyStopping(monitor='val_loss', patience=VALIDATION_PATIENCE)
  79.  
  80. '''
  81. CREATING AN "ImageDataGenerator" INSTANCE THAT WILL RANDOMLY-
  82.  
  83. 1)ZOOM
  84. 2)RESCALE
  85. 3)FLIP(HORIZONTALLY & VERTICALLY)
  86.  
  87. -THE DATA
  88. '''
  89. train_datagen = ImageDataGenerator(
  90. rescale=1./255,
  91. shear_range=0.2,
  92. zoom_range=0.2,
  93. horizontal_flip=True,
  94. vertical_flip = True
  95. )
  96. # CREATING A TRAINING DATASET
  97. training_set = train_datagen.flow_from_directory(
  98. '../dataset/training_set',
  99. target_size=(64, 64),
  100. batch_size=32,
  101. class_mode='binary'
  102. )
  103.  
  104.  
  105. # CREATING AN "ImageDataGenerator" INSTANCE THAT WILL RESCALE THE DATA
  106. test_datagen = ImageDataGenerator(rescale=1./255)
  107.  
  108. # CREATING A TESTING DATASET
  109. test_set = test_datagen.flow_from_directory(
  110. '../dataset/test_set',
  111. target_size=(64, 64),
  112. batch_size=32,
  113. class_mode='binary'
  114. )
  115.  
  116.  
  117. # TRAINS THE MODEL ON DATA GENERATED BATCH-BY-BATCH
  118. classifier.fit_generator(
  119. training_set,
  120. steps_per_epoch=8000,
  121. callbacks = [stopper],
  122. epochs=25,
  123. validation_data=test_set,
  124. validation_steps=2000,
  125. use_multiprocessing = True
  126. )
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement