Commit abd0b4e8 authored by alat-rights's avatar alat-rights
Browse files

Merge branch 'bert_to_merge' of https://github.com/alat-rights/deepchem into bert_to_merge

parents 45d1b493 fd96570e
Loading
Loading
Loading
Loading
+87 −2
Original line number Diff line number Diff line
@@ -636,10 +636,95 @@ def test_multi_headed_mat_attention():
      dropout_p=0.0)
  input_tensor = torch.tensor([[1., 2.], [5., 6.]])
  mask = torch.tensor([[1., 1.], [1., 1.]])
  result = layer(input_tensor, input_tensor, input_tensor, mask, 0.0,
                 adj_matrix, distance_matrix)
  result = layer(input_tensor, input_tensor, input_tensor, mask, adj_matrix,
                 distance_matrix, 0.0)
  output_ar = torch.tensor([[[0.0492, -0.0792], [-0.9971, -0.3172],
                             [0.0492, -0.0792], [-0.9971, -0.3172]],
                            [[0.8671, 0.1069], [-3.4075, -0.8656],
                             [0.8671, 0.1069], [-3.4075, -0.8656]]])
  assert torch.allclose(result, output_ar, rtol=1e-3)


@pytest.mark.torch
def test_position_wise_feed_forward():
  """Test invoking PositionwiseFeedForward."""
  torch.manual_seed(0)
  input_ar = torch.tensor([[1., 2.], [5., 6.]])
  layer = torch_layers.PositionwiseFeedForward(
      d_input=2,
      d_hidden=2,
      d_output=2,
      activation='relu',
      n_layers=1,
      dropout_p=0.0)
  result = layer(input_ar)
  output_ar = torch.tensor([[0.4810, 0.0000], [1.9771, 0.0000]])
  assert torch.allclose(result, output_ar, rtol=1e-4)


@pytest.mark.torch
def test_sub_layer_connection():
  """Test invoking SublayerConnection."""
  torch.manual_seed(0)
  input_ar = torch.tensor([[1., 2.], [5., 6.]])
  layer = torch_layers.SublayerConnection(2, 0.0)
  result = layer(input_ar, input_ar)
  output_ar = torch.tensor([[2.0027e-05, 3.0000e+00], [4.0000e+00, 7.0000e+00]])
  assert torch.allclose(result, output_ar)


@pytest.mark.torch
def test_mat_encoder_layer():
  """Test invoking MATEncoderLayer."""
  torch.manual_seed(0)
  from rdkit import Chem
  input_ar = torch.Tensor([[1., 2.], [5., 6.]])
  mask = torch.Tensor([[1., 1.], [1., 1.]])
  mol = Chem.MolFromSmiles("CC")
  adj_matrix = Chem.GetAdjacencyMatrix(mol)
  distance_matrix = Chem.GetDistanceMatrix(mol)
  layer = torch_layers.MATEncoderLayer(
      dist_kernel='softmax',
      lambda_attention=0.33,
      lambda_distance=0.33,
      h=2,
      sa_hsize=2,
      sa_dropout_p=0.0,
      output_bias=True,
      d_input=2,
      d_hidden=2,
      d_output=2,
      activation='relu',
      n_layers=2,
      ff_dropout_p=0.0,
      encoder_hsize=2,
      encoder_dropout_p=0.0)
  result = layer(input_ar, mask, adj_matrix, distance_matrix, 0.0)
  output_ar = torch.tensor([[[0.9988, 2.0012], [-0.9999, 3.9999],
                             [0.9988, 2.0012], [-0.9999, 3.9999]],
                            [[5.0000, 6.0000], [3.0000, 8.0000],
                             [5.0000, 6.0000], [3.0000, 8.0000]]])
  assert torch.allclose(result, output_ar, rtol=1e-4)


@pytest.mark.torch
def test_mat_embedding():
  """Test invoking MATEmbedding."""
  torch.manual_seed(0)
  input_ar = torch.tensor([1., 2., 3.])
  layer = torch_layers.MATEmbedding(3, 1, 0.0)
  result = layer(input_ar).detach()
  output_ar = torch.tensor([-1.2353])
  assert torch.allclose(result, output_ar, rtol=1e-4)


@pytest.mark.torch
def test_mat_generator():
  """Test invoking MATGenerator."""
  torch.manual_seed(0)
  input_ar = torch.tensor([1., 2., 3.])
  layer = torch_layers.MATGenerator(3, 'mean', 1, 1, 0.0)
  mask = torch.tensor([1., 1., 1.])
  result = layer(input_ar, mask)
  output_ar = torch.tensor([-1.4436])
  assert torch.allclose(result, output_ar, rtol=1e-4)
+1 −1
Original line number Diff line number Diff line
@@ -140,7 +140,7 @@ def test_weave_model():
      batch_size=batch_size,
      mode='classification',
      dropouts=0,
      learning_rate=0.002)
      learning_rate=0.0001)
  model.fit(dataset, nb_epoch=250)
  scores = model.evaluate(dataset, [metric], transformers)
  assert scores['mean-roc_auc_score'] >= 0.9
+466 −22

File changed.

Preview size limit exceeded, changes collapsed.

+7 −12
Original line number Diff line number Diff line
@@ -2488,12 +2488,7 @@ class RxnSplitTransformer(Transformer):
  >>> # When mixed training is toggled.
  >>> import numpy as np
  >>> from deepchem.trans.transformers import RxnSplitTransformer
  >>> reactions = np.array(
    [
        "CC(C)C[Mg+].CON(C)C(=O)c1ccc(O)nc1>C1CCOC1.[Cl-]>CC(C)CC(=O)c1ccc(O)nc1",
        "CCn1cc(C(=O)O)c(=O)c2cc(F)c(-c3ccc(N)cc3)cc21.O=CO>>CCn1cc(C(=O)O)c(=O)c2cc(F)c(-c3ccc(NC=O)cc3)cc21"
    ],
    dtype=object)
  >>> reactions = np.array(["CC(C)C[Mg+].CON(C)C(=O)c1ccc(O)nc1>C1CCOC1.[Cl-]>CC(C)CC(=O)c1ccc(O)nc1","CCn1cc(C(=O)O)c(=O)c2cc(F)c(-c3ccc(N)cc3)cc21.O=CO>>CCn1cc(C(=O)O)c(=O)c2cc(F)c(-c3ccc(NC=O)cc3)cc21"], dtype=object)
  >>> trans = RxnSplitTransformer(sep_reagent=True)
  >>> split_reactions = trans.transform_array(X=reactions, y=np.array([]), w=np.array([]), ids=np.array([]))
  >>> split_reactions
+15 −1
Original line number Diff line number Diff line
@@ -125,9 +125,24 @@ Torch Layers
.. autoclass:: deepchem.models.torch_models.layers.ScaleNorm
  :members:

.. autoclass:: deepchem.models.torch_models.layers.MATEncoderLayer
  :members:

.. autoclass:: deepchem.models.torch_models.layers.MultiHeadedMATAttention
  :members:

.. autoclass:: deepchem.models.torch_models.layers.SublayerConnection
  :members:

.. autoclass:: deepchem.models.torch_models.layers.PositionwiseFeedForward
  :members:

.. autoclass:: deepchem.models.torch_models.layers.MATEmbedding
  :members:

.. autoclass:: deepchem.models.torch_models.layers.MATGenerator
  :members:

.. autofunction:: deepchem.models.layers.cosine_dist

Jax Layers
@@ -135,4 +150,3 @@ Jax Layers

.. autoclass:: deepchem.models.jax_models.layers.Linear
  :members: