Commit 0cbdeb46 authored by Atreya Majumdar's avatar Atreya Majumdar
Browse files

Removed extra test line,added formatting, doc-change

parent 716d5299
Loading
Loading
Loading
Loading
+0 −1
Original line number Diff line number Diff line
@@ -609,7 +609,6 @@ def test_DAG_gather():

@pytest.mark.pytorch
def test_layer_norm():
  assert(1 == 2)
  """Test invoking LayerNorm."""
  input_ar = torch.tensor([[1., 99., 10000.], [0.003, 999.37, 23.]])
  layer = torch_layers.LayerNorm(input_ar.shape)
+18 −1
Original line number Diff line number Diff line
@@ -8,7 +8,7 @@ except:
class LayerNorm(nn.Module):
  """Apply Layer Normalization to input.

    The layer takes input and applies layer Normalization to it.
    The layer takes input and applies layer normalization to it.

    References
    ----------
@@ -16,12 +16,29 @@ class LayerNorm(nn.Module):
    """

  def __init__(self, features, eps=1e-6):
    """Initialize a LayerNorm layer.

    Parameters
    ----------
    features: Tensor
        Tensor to be normalized.
    eps: float
        Epsilon value to be used.
    """

    super(LayerNorm, self).__init__()
    self.a_2 = nn.Parameter(torch.ones(features))
    self.b_2 = nn.Parameter(torch.zeros(features))
    self.eps = eps

  def forward(self, x):
    """Normalize the Tensor.

    Parameters
    ----------
    x: Tensor
        Tensor to be normalized.
    """
    mean = x.mean(-1, keepdim=True)
    std = x.std(-1, keepdim=True)
    return self.a_2 * (x - mean) / (std + self.eps) + self.b_2