Advertisement
Guest User

Untitled

a guest
Mar 21st, 2019
65
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.80 KB | None | 0 0
  1. import keras.backend as K
  2. from keras.layers import Layer
  3.  
  4. class KLDivergenceLayer(Layer):
  5.  
  6. """ Identity transform layer that adds KL divergence
  7. to the final model loss.
  8. """
  9.  
  10. def __init__(self, beta=.5, *args, **kwargs):
  11. self.is_placeholder = True
  12. self.beta = beta
  13. super(KLDivergenceLayer, self).__init__(*args, **kwargs)
  14.  
  15. def call(self, inputs):
  16.  
  17. mu, log_var = inputs
  18.  
  19. kl_batch = - self.beta * K.sum(1 + log_var -
  20. K.square(mu) -
  21. K.exp(log_var), axis=-1)
  22.  
  23. self.add_loss(K.mean(kl_batch), inputs=inputs)
  24.  
  25. return inputs
  26.  
  27. def get_config(self):
  28. config = dict(beta=self.beta)
  29. base_config = super(KLDivergenceLayer, self).get_config()
  30. base_config.update(config)
  31. return base_config
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement