Guest User

Untitled

a guest
Feb 18th, 2019
87
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.53 KB | None | 0 0
  1. def lrelu1(x, a=0.2):
  2. with tf.name_scope("lrelu"):
  3. # adding these together creates the leak part and linear part
  4. # then cancels them out by subtracting/adding an absolute value term
  5. # leak: a*x/2 - a*abs(x)/2
  6. # linear: x/2 + abs(x)/2
  7.  
  8. # this block looks like it has 2 inputs on the graph unless we do this
  9. x = tf.identity(x)
  10. return (0.5 * (1 + a)) * x + (0.5 * (1 - a)) * abs(x)
  11.  
  12. def lrelu2(x, a=0.2):
  13. with tf.name_scope("lrelu"):
  14. return tf.nn.relu(x) - a * tf.nn.relu(-x)
Add Comment
Please, Sign In to add comment