SELU( Self Normalizing Neural Networks ) - beyondnlp/nlp GitHub Wiki

SELU

def applyselu( tensor ):
    relu = tf.nn.relu( tensor ) # >0: x, <0: 0
    neg_relu = tf.subtract( tensor, relu )  # >0: 0, <0: x
    selu_neg = tf.subtract( tf.multiply( ALPHA, tf.exp( neg_relu ) ), ALPHA )   # 0 goes to 0
    return tf.multiply( LAMB, tf.add( relu, selu_neg ) )
import tensorflow as tf
from tensorflow.python.framework import ops


def selu(x, name="selu"):
    alpha = 1.6732632423543772848170429916717
    scale = 1.0507009873554804934193349852946
    return scale * tf.where(x >= 0.0, x, alpha * tf.nn.elu(x))