Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from __future__ import print_function
- import os.path
- import densenet
- import numpy as np
- import sklearn.metrics as metrics
- #from keras.datasets import cifar10
- from keras.utils import np_utils
- from keras.preprocessing.image import ImageDataGenerator
- from keras.optimizers import Adam
- from keras.callbacks import ModelCheckpoint, ReduceLROnPlateau
- from keras import backend as K
- batch_size = 8
- nb_classes = 3
- nb_epoch = 10
- from keras.callbacks import TensorBoard
- tbCallBack = TensorBoard(log_dir='./log', histogram_freq=1,
- write_graph=True,
- write_grads=True,
- batch_size=batch_size,
- write_images=True)
- img_rows, img_cols = 240, 320
- img_channels = 3
- img_dim = (img_channels, img_rows, img_cols) if K.image_dim_ordering() == "th" else (img_rows, img_cols, img_channels)
- depth = 40
- nb_dense_block = 3
- growth_rate = 12
- nb_filter = -1
- dropout_rate = 0.0 # 0.0 for data augmentation
- model = densenet.DenseNet(img_dim, classes=nb_classes, depth=depth, nb_dense_block=nb_dense_block,
- growth_rate=growth_rate, nb_filter=nb_filter, dropout_rate=dropout_rate, weights=None)
- print("Model created")
- model.summary()
- optimizer = Adam(lr=1e-3) # Using Adam instead of SGD to speed up training
- model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=["accuracy"])
- print("Finished compiling")
- print("Building model...")
- # Load model
- weights_file="/content/gdrive/My Drive/PhD/weights/DenseNet-40-12.h5"
- if os.path.exists(weights_file):
- #model.load_weights(weights_file, by_name=True)
- print("Model loaded.")
- out_dir="/content/gdrive/My Drive/PhD/weights/"
- lr_reducer = ReduceLROnPlateau(monitor='val_acc', factor=np.sqrt(0.1),
- cooldown=0, patience=5, min_lr=1e-5)
- model_checkpoint= ModelCheckpoint(weights_file, monitor="val_acc", save_best_only=True,
- save_weights_only=True, verbose=1)
- callbacks=[lr_reducer, model_checkpoint]
- my_training_batch_generator = My_Generator(TrainPaths, TrainLabels, batch_size)
- my_validation_batch_generator = My_Generator(TestPaths, TestLabels, batch_size)
- model.fit_generator(generator=my_training_batch_generator,
- steps_per_epoch=len(TrainPaths) // batch_size, epochs=nb_epoch,
- callbacks=callbacks,
- validation_data=my_validation_batch_generator,
- validation_steps=len(TestPaths) // batch_size,
- verbose=1)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement