Commit 7f63ddf3 authored by Bharath Ramsundar's avatar Bharath Ramsundar
Browse files

Removing more imports

parent bcd2ba85
Loading
Loading
Loading
Loading
+2 −2
Original line number Diff line number Diff line
@@ -7,8 +7,8 @@ from __future__ import unicode_literals

import six
import tensorflow as tf
from deepchem.nn import model_ops
from deepchem.nn.model_ops import get_ndim
from deepchem.models.tensorgraph import model_ops
from deepchem.models.tensorgraph.model_ops import get_ndim


def get_from_module(identifier,
+30 −36
Original line number Diff line number Diff line
@@ -21,7 +21,6 @@ from deepchem.models.tensorgraph.layers import convert_to_layers

class WeaveLayer(Layer):
  """ TensorGraph style implementation
    The same as deepchem.nn.WeaveLayer
  Note: Use WeaveLayerFactory to construct this layer
  """

@@ -126,7 +125,8 @@ class WeaveLayer(Layer):
          [self.W_AP, self.b_AP, self.W_PP, self.b_PP, self.W_P, self.b_P])

  def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
    """ description and explanation refer to deepchem.nn.WeaveLayer
    """Creates weave tensors.

    parent layers: [atom_features, pair_features], pair_split, atom_to_pair
    """
    activation = activations.get(self.activation)  # Get activations
@@ -203,7 +203,6 @@ def WeaveLayerFactory(**kwargs):

class WeaveGather(Layer):
  """ TensorGraph style implementation
    The same as deepchem.nn.WeaveGather
  """

  def __init__(self,
@@ -251,7 +250,7 @@ class WeaveGather(Layer):
      self.trainable_weights = None

  def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
    """ description and explanation refer to deepchem.nn.WeaveGather
    """ 
    parent layers: atom_features, atom_split
    """
    if in_layers is None:
@@ -307,7 +306,6 @@ class WeaveGather(Layer):

class DTNNEmbedding(Layer):
  """ TensorGraph style implementation
    The same as deepchem.nn.DTNNEmbedding
  """

  def __init__(self,
@@ -337,7 +335,7 @@ class DTNNEmbedding(Layer):
    self.trainable_weights = [self.embedding_list]

  def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
    """description and explanation refer to deepchem.nn.DTNNEmbedding
    """
    parent layers: atom_number
    """
    if in_layers is None:
@@ -364,7 +362,6 @@ class DTNNEmbedding(Layer):

class DTNNStep(Layer):
  """ TensorGraph style implementation
    The same as deepchem.nn.DTNNStep
  """

  def __init__(self,
@@ -412,7 +409,7 @@ class DTNNStep(Layer):
    ]

  def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
    """description and explanation refer to deepchem.nn.DTNNStep
    """
    parent layers: atom_features, distance, distance_membership_i, distance_membership_j
    """
    if in_layers is None:
@@ -461,7 +458,6 @@ class DTNNStep(Layer):

class DTNNGather(Layer):
  """ TensorGraph style implementation
    The same as deepchem.nn.DTNNGather
  """

  def __init__(self,
@@ -514,7 +510,7 @@ class DTNNGather(Layer):
    self.trainable_weights = self.W_list + self.b_list

  def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
    """description and explanation refer to deepchem.nn.DTNNGather
    """
    parent layers: atom_features, atom_membership
    """
    if in_layers is None:
@@ -566,7 +562,6 @@ class DTNNExtract(Layer):

class DAGLayer(Layer):
  """ TensorGraph style implementation
    The same as deepchem.nn.DAGLayer
  """

  def __init__(self,
@@ -634,7 +629,7 @@ class DAGLayer(Layer):
    self.trainable_weights = self.W_list + self.b_list

  def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
    """description and explanation refer to deepchem.nn.DAGLayer
    """
    parent layers: atom_features, parents, calculation_orders, calculation_masks, n_atoms
    """
    if in_layers is None:
@@ -725,7 +720,6 @@ class DAGLayer(Layer):

class DAGGather(Layer):
  """ TensorGraph style implementation
    The same as deepchem.nn.DAGGather
  """

  def __init__(self,
@@ -786,7 +780,7 @@ class DAGGather(Layer):
    self.trainable_weights = self.W_list + self.b_list

  def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
    """description and explanation refer to deepchem.nn.DAGGather
    """
    parent layers: atom_features, membership
    """
    if in_layers is None:
+3 −4
Original line number Diff line number Diff line
@@ -5,10 +5,9 @@ from __future__ import unicode_literals

import numpy as np
import tensorflow as tf
#from deepchem.nn.model_ops import variable
from deepchem.nn.model_ops import random_uniform_variable
from deepchem.nn.model_ops import random_normal_variable
from deepchem.nn.activations import get_from_module
from deepchem.models.tensorgraph.model_ops import random_uniform_variable
from deepchem.models.tensorgraph.model_ops import random_normal_variable
from deepchem.models.tensorgraph.activations import get_from_module


def get_fans(shape):
+1 −1
Original line number Diff line number Diff line
@@ -7,7 +7,7 @@ from copy import deepcopy
import tensorflow as tf
import numpy as np

from deepchem.nn import model_ops, initializations, regularizers, activations
from deepchem.models.tensorgraph import model_ops, initializations, regularizers, activations
import math


+2 −2
Original line number Diff line number Diff line
@@ -8,8 +8,8 @@ from __future__ import unicode_literals
from __future__ import absolute_import

import warnings
from deepchem.nn import model_ops
from deepchem.nn.activations import get
from deepchem.models.tensorgraph import model_ops
from deepchem.models.tensorgraph.activations import get


class Regularizer(object):