Commit dedf6c7b authored by vipulraheja's avatar vipulraheja
Browse files

Add selu activation

parent 3b3362c4
Loading
Loading
Loading
Loading
+3 −0
Original line number Diff line number Diff line
@@ -68,6 +68,9 @@ def softmax(x):
def elu(x, alpha=1.0):
  return model_ops.elu(x, alpha)

def selu(x):
  return model_ops.selu(x)

def softplus(x):
  return tf.nn.softplus(x)

+6 −0
Original line number Diff line number Diff line
@@ -645,6 +645,12 @@ def relu(x, alpha=0., max_value=None):
  return x


def selu(x):
  alpha = 1.6732632423543772848170429916717
  scale = 1.0507009873554804934193349852946
  return scale * elu(x, alpha)


def hard_sigmoid(x):
  """Segment-wise linear approximation of sigmoid.
  Faster than sigmoid.