Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- INIT_LR = 5e-3 # initial learning rate
- BATCH_SIZE = 32
- EPOCHS = 10
- s = reset_tf_session() # clear default graph
- # don't call K.set_learning_phase() !!! (otherwise will enable dropout in train/test simultaneously)
- model = make_model() # define our model
- # Define a Callback class that stops training once accuracy reaches 99.9%
- class myCallback(tf.keras.callbacks.Callback):
- def on_epoch_end(self, epoch, logs={}):
- if(logs.get('acc')>0.999):
- print("\nReached 99.9% accuracy so cancelling training!")
- self.model.stop_training = True
- # scheduler of learning rate (decay with epochs)
- def lr_scheduler(epoch):
- return INIT_LR * 0.9 ** epoch
- # callback for printing of actual learning rate used by optimizer
- class LrHistory(keras.callbacks.Callback):
- def on_epoch_begin(self, epoch, logs={}):
- print("Learning rate:", K.get_value(model.optimizer.lr))
- # prepare model for fitting (loss, optimizer, etc)
- model.compile(
- loss='categorical_crossentropy', # we train 10-way classification
- optimizer=keras.optimizers.adamax(lr=INIT_LR), # for SGD
- metrics=['accuracy'] # report accuracy during training
- )
- # we will save model checkpoints to continue training in case of kernel death
- model_filename = 'cifar.{0:03d}.hdf5'
- last_finished_epoch = None
- #### uncomment below to continue training from model checkpoint
- #### fill `last_finished_epoch` with your latest finished epoch
- # from keras.models import load_model
- # s = reset_tf_session()
- # last_finished_epoch = 7
- # model = load_model(model_filename.format(last_finished_epoch))
- history = model.fit_generator(
- train_generator,
- validation_data = validation_generator,
- steps_per_epoch = 100,
- epochs = 100,
- validation_steps = 50,
- verbose = 2,
- callbacks=[myCallback(),
- keras.callbacks.LearningRateScheduler(lr_scheduler),
- LrHistory(),
- keras_utils.TqdmProgressCallback(),
- keras_utils.ModelSaveCallback(model_filename)])
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement