Unverified Commit db47d907 authored by Vignesh Ram Somnath's avatar Vignesh Ram Somnath Committed by GitHub
Browse files

Merge pull request #6 from deepchem/master

parents 25e38a21 787bffa4
Loading
Loading
Loading
Loading
+1 −1
Original line number Diff line number Diff line
@@ -46,7 +46,7 @@ def load_images_DR(split='random', seed=None):
          (-1, 1))
  image_full_paths = [os.path.join(images_path, n) for n in image_names]

  classes, cts = np.unique(all_labels.values(), return_counts=True)
  classes, cts = np.unique(list(all_labels.values()), return_counts=True)
  weight_ratio = dict(zip(classes, np.max(cts) / cts.astype(float)))
  weights = np.array([weight_ratio[l[0]] for l in labels]).reshape((-1, 1))

+1 −0
Original line number Diff line number Diff line
@@ -2,6 +2,7 @@
from __future__ import division
import random
import string
import warnings
from collections import Sequence
from copy import deepcopy

+13 −2
Original line number Diff line number Diff line
@@ -974,16 +974,27 @@ class TensorGraph(Model):

  def get_layer_variables(self, layer):
    """Get the list of trainable variables in a layer of the graph."""
    if tfe.in_eager_mode():
      return layer.variables
    if not self.built:
      self.build()
    with self._get_tf("Graph").as_default():
      if tfe.in_eager_mode():
        return layer.variables
      if layer.variable_scope == '':
        return []
      return tf.get_collection(
          tf.GraphKeys.TRAINABLE_VARIABLES, scope=layer.variable_scope)

  def get_layer_variable_values(self, layer):
    """Get the variable values associated with a given layer """

    layer_variables = self.get_layer_variables(layer)
    with self._get_tf("Graph").as_default():
      if tfe.in_eager_mode():
        return [v.numpy() for v in layer_variables]
      if len(layer_variables) == 0:
        return []
      return self.session.run(layer_variables)

  def get_variables(self):
    """Get the list of all trainable variables in the graph."""
    if not self.built:
+41 −1
Original line number Diff line number Diff line
@@ -12,7 +12,7 @@ import deepchem as dc
from deepchem.data import NumpyDataset
from deepchem.data.datasets import Databag
from deepchem.models.tensorgraph.layers import Dense, SoftMaxCrossEntropy, ReduceMean, ReduceSum, SoftMax, Constant, Variable
from deepchem.models.tensorgraph.layers import Feature, Label
from deepchem.models.tensorgraph.layers import Feature, Label, Input
from deepchem.models.tensorgraph.layers import ReduceSquareDifference, Add, GRU
from deepchem.models.tensorgraph.tensor_graph import TensorGraph
from deepchem.models.tensorgraph.optimizers import GradientDescent, ExponentialDecay, Adam
@@ -576,3 +576,43 @@ class TestTensorGraph(unittest.TestCase):
          (1, n_features))).flatten()
      self.assertAlmostEqual(
          pred1[task], (pred2 + norm * delta)[task], places=4)

  def test_get_layer_variable_values(self):
    """Test to get the variable values associated with a layer"""
    # Test for correct value return (normal mode)
    tg = dc.models.TensorGraph()
    var = Variable([10.0, 12.0])
    tg.add_output(var)
    expected = [10.0, 12.0]
    obtained = tg.get_layer_variable_values(var)[0]
    np.testing.assert_array_equal(expected, obtained)

    # Test for shapes (normal mode)
    tg = dc.models.TensorGraph()
    input_tensor = Input(shape=(10, 100))
    output = Dense(out_channels=20, in_layers=[input_tensor])
    tg.add_output(output)
    expected_shape = (100, 20)
    obtained_shape = tg.get_layer_variable_values(output)[0].shape
    assert expected_shape == obtained_shape

  def test_get_layer_variable_values_eager(self):
    """Tests to get variable values associated with a layer in eager mode"""

    with context.eager_mode():
      # Test for correct value return (eager mode)
      tg = dc.models.TensorGraph()
      var = Variable([10.0, 12.0])
      tg.add_output(var)
      expected = [10.0, 12.0]
      obtained = tg.get_layer_variable_values(var)[0]
      np.testing.assert_array_equal(expected, obtained)

      # Test for shape (eager mode)
      tg = dc.models.TensorGraph()
      input_tensor = Input(shape=(10, 100))
      output = Dense(out_channels=20, in_layers=[input_tensor])
      tg.add_output(output)
      expected_shape = (100, 20)
      obtained_shape = tg.get_layer_variable_values(output)[0].shape
      assert expected_shape == obtained_shape
+1 −1
Original line number Diff line number Diff line
@@ -244,7 +244,7 @@ def load_pdbbind(featurizer="grid", split="random", subset="core", reload=True):
  # No transformations of data
  transformers = []
  if split == None:
    return tasks, (dataset, None, None), transformers
    return pdbbind_tasks, (dataset, None, None), transformers

  # TODO(rbharath): This should be modified to contain a cluster split so
  # structures of the same protein aren't in both train/test
Loading