Parametric Exponential Linear Unit in TensorFlow
In [ ]:
import tensorflow as tf
def pelu(x):
"""Parametric Exponential Linear Unit (https://arxiv.org/abs/1605.09332v1)."""
with tf.variable_scope(x.op.name + '_activation',
initializer=tf.constant_initializer(1.0)):
shape = x.get_shape().as_list()[1:]
alpha = tf.get_variable('alpha', shape)
beta = tf.get_variable('beta', shape)
positive = tf.nn.relu(x) * alpha / (beta + 1e-9)
negative = alpha * (tf.exp((-tf.nn.relu(-x)) / (beta + 1e-9)) - 1)
return negative + positive
Comments
Comments powered by Disqus