Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from tensorflow.keras import Sequential
- from tensorflow.keras.utils import Sequence
- from tensorflow.keras.layers import LSTM, Dense, Masking, BatchNormalization, TimeDistributed, Lambda, RepeatVector
- from tensorflow.keras.optimizers import RMSprop
- import numpy as np
- from tensorflow.keras.preprocessing.sequence import pad_sequences
- import matplotlib.pyplot as plt
- import tensorflow as tf
- import math
- class TriangularSchedule():
- def __init__(self, min_lr, max_lr, cycle_length, inc_fraction=0.5):
- self.min_lr = min_lr
- self.max_lr = max_lr
- self.cycle_length = cycle_length
- self.inc_fraction = inc_fraction
- def __call__(self, iteration):
- if iteration <= self.cycle_length*self.inc_fraction:
- unit_cycle = iteration * 1 / (self.cycle_length * self.inc_fraction)
- elif iteration <= self.cycle_length:
- unit_cycle = (self.cycle_length - iteration) * 1 / (self.cycle_length * (1 - self.inc_fraction))
- else:
- unit_cycle = 0
- adjusted_cycle = (unit_cycle * (self.max_lr - self.min_lr)) + self.min_lr
- return adjusted_cycle
- class CyclicalSchedule():
- def __init__(self, schedule_class, cycle_length, cycle_length_decay=1, cycle_magnitude_decay=1, **kwargs):
- self.schedule_class = schedule_class
- self.length = cycle_length
- self.length_decay = cycle_length_decay
- self.magnitude_decay = cycle_magnitude_decay
- self.kwargs = kwargs
- def __call__(self, iteration):
- cycle_idx = 0
- cycle_length = self.length
- idx = self.length
- while idx <= iteration:
- cycle_length = math.ceil(cycle_length * self.length_decay)
- cycle_idx += 1
- idx += cycle_length
- cycle_offset = iteration - idx + cycle_length
- schedule = self.schedule_class(cycle_length=cycle_length, **self.kwargs)
- return schedule(cycle_offset) * self.magnitude_decay**cycle_idx
- iterations=300
- cycle_len = math.floor(iterations/20) - 1
- schedule = CyclicalSchedule(TriangularSchedule, min_lr=0.009, max_lr=0.011, cycle_length=cycle_len)
- plt.plot([i+1 for i in range(iterations)],[schedule(i) for i in range(iterations)])
- plt.title('Learning rate for each epoch')
- plt.xlabel("Epoch")
- plt.ylabel("Learning Rate")
- plt.show()
- lr_vec = [schedule(i) for i in range(iterations)]
- def lr_scheduler(epoch, lr):
- lr = lr_vec[epoch]
- return lr
- callbacks = [tf.keras.callbacks.LearningRateScheduler(lr_scheduler, verbose=0)]
- train_history = model.fit(X_train_padded,
- X_train_padded,
- epochs=300,
- verbose=2,
- batch_size=100,
- callbacks=[callbacks]).history
- print(train_history.keys())
- plt.plot(train_history['loss'])
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement