Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def lrelu1(x, a=0.2):
- with tf.name_scope("lrelu"):
- # adding these together creates the leak part and linear part
- # then cancels them out by subtracting/adding an absolute value term
- # leak: a*x/2 - a*abs(x)/2
- # linear: x/2 + abs(x)/2
- # this block looks like it has 2 inputs on the graph unless we do this
- x = tf.identity(x)
- return (0.5 * (1 + a)) * x + (0.5 * (1 - a)) * abs(x)
- def lrelu2(x, a=0.2):
- with tf.name_scope("lrelu"):
- return tf.nn.relu(x) - a * tf.nn.relu(-x)
Add Comment
Please, Sign In to add comment