Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- import os
- import numpy
- from tensorflow.python.keras.layers import Dense
- from tensorflow.python.keras import optimizers
- from keras import regularizers
- from keras.regularizers import l2
- from keras.layers import Dropout
- from keras.applications.mobilenet import MobileNet
- from keras.layers import GlobalAveragePooling2D, Dense, Dropout, Flatten, BatchNormalization
- from keras.models import Sequential
- from keras.preprocessing.image import ImageDataGenerator
- from keras.callbacks import EarlyStopping, ModelCheckpoint, LearningRateScheduler
- from keras.optimizers import Adam
- import sys
- from keras import optimizers
- # Fixed for our classes
- NUM_CLASSES = 4
- # Fixed for color images
- CHANNELS = 3
- IMAGE_RESIZE = 224
- DENSE_LAYER_ACTIVATION = 'softmax'
- OBJECTIVE_FUNCTION = 'categorical_crossentropy'
- LOSS_METRICS = ['accuracy']
- NUM_EPOCHS = 1000
- EARLY_STOP_PATIENCE = 50
- BATCH_SIZE_TRAINING = 128
- BATCH_SIZE_VALIDATION = 64
- STEPS_PER_EPOCH_TRAINING = 5096/BATCH_SIZE_TRAINING
- STEPS_PER_EPOCH_VALIDATION = 1456/BATCH_SIZE_VALIDATION
- base_mobilenet_model = MobileNet(include_top = False, weights = None)
- model = Sequential()
- model.add(Dense(3,input_shape = [IMAGE_RESIZE,IMAGE_RESIZE,3]))
- model.add(base_mobilenet_model)
- model.add(Dropout(0.5))
- model.add(BatchNormalization())
- model.add(GlobalAveragePooling2D())
- # 2nd layer as Dense for 4-class classification,
- model.add(Dense(NUM_CLASSES, activation = DENSE_LAYER_ACTIVATION))
- model.summary()
- opt = optimizers.SGD(lr=0.001, decay=1e-6, momentum=0.5, nesterov=True)
- model.compile(optimizer = opt, loss = OBJECTIVE_FUNCTION, metrics = LOSS_METRICS)
- image_size = IMAGE_RESIZE
- shift = 0.2
- data_generator = ImageDataGenerator(rescale=1.0/255.0,
- width_shift_range=shift,
- height_shift_range=shift,
- horizontal_flip=True,
- vertical_flip=True,
- rotation_range=90,
- brightness_range=[0.2,1.0],
- zoom_range=[0.5,1.0])
- data_generator2 = ImageDataGenerator(rescale=1.0/255.0)
- train_generator = data_generator.flow_from_directory(
- 'trainset/',
- target_size=(image_size, image_size),
- batch_size=BATCH_SIZE_TRAINING,
- class_mode='categorical')
- validation_generator = data_generator2.flow_from_directory(
- 'testset/',
- target_size=(image_size, image_size),
- batch_size=BATCH_SIZE_VALIDATION,
- class_mode='categorical')
- filepath="weights-improvement-{epoch:02d}-vacc:{val_accuracy:.2f}-tacc:{accuracy:.2f}.hdf5"
- cb_early_stopper = EarlyStopping(monitor = 'val_accuracy', mode='max', verbose=1, patience = EARLY_STOP_PATIENCE)
- cb_checkpointer = ModelCheckpoint(filepath = filepath, monitor = 'val_accuracy', save_best_only = False, mode = 'auto')
- fit_history = model.fit_generator(train_generator,
- steps_per_epoch=STEPS_PER_EPOCH_TRAINING,
- epochs = NUM_EPOCHS,
- validation_data=validation_generator,
- validation_steps=STEPS_PER_EPOCH_VALIDATION,
- verbose=2,
- callbacks = [cb_checkpointer, cb_early_stopper]
- )
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement