Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- results={}
- OPTIMIZERS=[Adam(lr=1e-3) , RectifiedAdam(lr=1e-3)]
- for optim in OPTIMIZERS:
- base_model=VGG16(input_shape=(224,224,3),include_top=False)
- base_model.trainable = False
- model = models.Sequential()
- model.add(base_model)
- model.add(layers.Flatten())
- model.add(layers.Dense(256, activation='relu'))
- model.add(layers.Dense(1, activation='sigmoid'))
- model.summary()
- model.compile(optimizer=optim ,
- loss='binary_crossentropy' ,
- metrics=['accuracy'])
- model.fit_generator(train_batches,
- steps_per_epoch = train_batches.samples / 64,
- validation_data = valid_batches,
- validation_steps = valid_batches.samples /64,
- epochs = 100,
- callbacks=[LR_function])
- train_acc=model.history.history['acc']
- train_loss=model.history.history['loss']
- val_acc=model.history.history['val_acc']
- val_loss=model.history.history['val_acc']
- exp_tag_name=f'exp-optimizer-{optim}'
- results[exp_tag_name] = {'train-acc':train_acc,
- 'train-loss':train_loss,
- 'val-acc':val_acc,
- 'val-loss':val_loss}
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement