Commit d5bde688 authored by vipulraheja's avatar vipulraheja
Browse files

Adding ArXiV citation

parent dedf6c7b
Loading
Loading
Loading
Loading
+19 −5
Original line number Diff line number Diff line
@@ -646,6 +646,20 @@ def relu(x, alpha=0., max_value=None):


def selu(x):
  """Scaled Exponential Linear unit.

  Parameters
  ----------
  x: A tensor or variable.

  Returns
  -------
  A tensor.

  References
  ----------
  - [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)
  """
  alpha = 1.6732632423543772848170429916717
  scale = 1.0507009873554804934193349852946
  return scale * elu(x, alpha)