Advertisement
jack06215

[keras] triangle lr example

May 29th, 2020
88
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.86 KB | None | 0 0
  1. from tensorflow.keras import Sequential
  2. from tensorflow.keras.utils import Sequence
  3. from tensorflow.keras.layers import LSTM, Dense, Masking, BatchNormalization, TimeDistributed, Lambda, RepeatVector
  4. from tensorflow.keras.optimizers import RMSprop
  5. import numpy as np
  6. from tensorflow.keras.preprocessing.sequence import pad_sequences
  7.  
  8. import matplotlib.pyplot as plt
  9. import tensorflow as tf
  10.  
  11. import math
  12.  
  13. class TriangularSchedule():
  14.     def __init__(self, min_lr, max_lr, cycle_length, inc_fraction=0.5):    
  15.         self.min_lr = min_lr
  16.         self.max_lr = max_lr
  17.         self.cycle_length = cycle_length
  18.         self.inc_fraction = inc_fraction
  19.        
  20.     def __call__(self, iteration):
  21.         if iteration <= self.cycle_length*self.inc_fraction:
  22.             unit_cycle = iteration * 1 / (self.cycle_length * self.inc_fraction)
  23.         elif iteration <= self.cycle_length:
  24.             unit_cycle = (self.cycle_length - iteration) * 1 / (self.cycle_length * (1 - self.inc_fraction))
  25.         else:
  26.             unit_cycle = 0
  27.         adjusted_cycle = (unit_cycle * (self.max_lr - self.min_lr)) + self.min_lr
  28.         return adjusted_cycle
  29.  
  30. class CyclicalSchedule():
  31.     def __init__(self, schedule_class, cycle_length, cycle_length_decay=1, cycle_magnitude_decay=1, **kwargs):
  32.         self.schedule_class = schedule_class
  33.         self.length = cycle_length
  34.         self.length_decay = cycle_length_decay
  35.         self.magnitude_decay = cycle_magnitude_decay
  36.         self.kwargs = kwargs
  37.    
  38.     def __call__(self, iteration):
  39.         cycle_idx = 0
  40.         cycle_length = self.length
  41.         idx = self.length
  42.         while idx <= iteration:
  43.             cycle_length = math.ceil(cycle_length * self.length_decay)
  44.             cycle_idx += 1
  45.             idx += cycle_length
  46.         cycle_offset = iteration - idx + cycle_length
  47.        
  48.         schedule = self.schedule_class(cycle_length=cycle_length, **self.kwargs)
  49.         return schedule(cycle_offset) * self.magnitude_decay**cycle_idx
  50.  
  51. iterations=300
  52. cycle_len = math.floor(iterations/20) - 1
  53. schedule = CyclicalSchedule(TriangularSchedule, min_lr=0.009, max_lr=0.011, cycle_length=cycle_len)
  54.  
  55. plt.plot([i+1 for i in range(iterations)],[schedule(i) for i in range(iterations)])
  56. plt.title('Learning rate for each epoch')
  57. plt.xlabel("Epoch")
  58. plt.ylabel("Learning Rate")
  59. plt.show()
  60.  
  61. lr_vec = [schedule(i) for i in range(iterations)]
  62. def lr_scheduler(epoch, lr):
  63.     lr = lr_vec[epoch]
  64.     return lr
  65.  
  66. callbacks = [tf.keras.callbacks.LearningRateScheduler(lr_scheduler, verbose=0)]
  67.  
  68. train_history = model.fit(X_train_padded,
  69.                           X_train_padded,
  70.                           epochs=300,
  71.                           verbose=2,
  72.                           batch_size=100,
  73.                           callbacks=[callbacks]).history
  74. print(train_history.keys())
  75. plt.plot(train_history['loss'])
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement