Advertisement
Guest User

Untitled

a guest
Oct 15th, 2019
96
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.23 KB | None | 0 0
  1. results={}
  2. OPTIMIZERS=[Adam(lr=1e-3) , RectifiedAdam(lr=1e-3)]
  3.  
  4. for optim in OPTIMIZERS:
  5. base_model=VGG16(input_shape=(224,224,3),include_top=False)
  6. base_model.trainable = False
  7.  
  8. model = models.Sequential()
  9. model.add(base_model)
  10. model.add(layers.Flatten())
  11. model.add(layers.Dense(256, activation='relu'))
  12. model.add(layers.Dense(1, activation='sigmoid'))
  13. model.summary()
  14.  
  15. model.compile(optimizer=optim ,
  16. loss='binary_crossentropy' ,
  17. metrics=['accuracy'])
  18. model.fit_generator(train_batches,
  19. steps_per_epoch = train_batches.samples / 64,
  20. validation_data = valid_batches,
  21. validation_steps = valid_batches.samples /64,
  22. epochs = 100,
  23. callbacks=[LR_function])
  24.  
  25. train_acc=model.history.history['acc']
  26. train_loss=model.history.history['loss']
  27. val_acc=model.history.history['val_acc']
  28. val_loss=model.history.history['val_acc']
  29.  
  30. exp_tag_name=f'exp-optimizer-{optim}'
  31. results[exp_tag_name] = {'train-acc':train_acc,
  32. 'train-loss':train_loss,
  33. 'val-acc':val_acc,
  34. 'val-loss':val_loss}
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement