Commit b0903081 authored by Vignesh's avatar Vignesh
Browse files

Added documentation for ChemNet layers, Updated directory for Chembl25 loader

parent 69ee7cb7
Loading
Loading
Loading
Loading
+55 −0
Original line number Diff line number Diff line
@@ -14,6 +14,13 @@ from tensorflow.keras.layers import Conv2D, Concatenate, ReLU, Add, MaxPool2D


class Stem(tf.keras.layers.Layer):
  """
  Stem Layer as defined in https://arxiv.org/abs/1710.02238. The structure is
  significantly altered from the original Inception-ResNet architecture,
  (https://arxiv.org/abs/1602.07261) but the idea behind this layer is to
  downsample the image as a preprocessing step for the Inception-ResNet layers,
  and reduce computational complexity.
  """

  def __init__(self, num_filters, **kwargs):
    """
@@ -38,11 +45,22 @@ class Stem(tf.keras.layers.Layer):
    self._layers = [self.conv_layer, self.activation_layer]

  def call(self, inputs):
    """Invoked when __call__ method of the layer is used."""
    conv1 = self.conv_layer(inputs)
    return self.activation_layer(conv1)


class InceptionResnetA(tf.keras.layers.Layer):
  """
  Variant A of the three InceptionResNet layers described in
  https://arxiv.org/abs/1710.02238. All variants use multiple
  convolutional blocks with varying kernel sizes and number of filters. This
  allows capturing patterns over different scales in the inputs. Residual
  connections are additionally used and have been shown previously to improve
  convergence and training in deep networks. A 1x1 convolution is used on the
  concatenated feature maps from the different convolutional blocks, to ensure
  shapes of inputs and feature maps are same for the residual connection.
  """

  def __init__(self, num_filters, input_dim, **kwargs):
    """
@@ -127,6 +145,7 @@ class InceptionResnetA(tf.keras.layers.Layer):
        [self.concat_layer, self.add_layer, self.activation_layer])

  def call(self, inputs):
    """Invoked when __call__ method of the layer is used."""
    conv1 = inputs
    for layer in self.conv_block1:
      conv1 = layer(conv1)
@@ -149,6 +168,16 @@ class InceptionResnetA(tf.keras.layers.Layer):


class InceptionResnetB(tf.keras.layers.Layer):
  """
  Variant B of the three InceptionResNet layers described in
  https://arxiv.org/abs/1710.02238. All variants use multiple
  convolutional blocks with varying kernel sizes and number of filters. This
  allows capturing patterns over different scales in the inputs. Residual
  connections are additionally used and have been shown previously to improve
  convergence and training in deep networks. A 1x1 convolution is used on the
  concatenated feature maps from the different convolutional blocks, to ensure
  shapes of inputs and feature maps are same for the residual connection.
  """

  def __init__(self, num_filters, input_dim, **kwargs):
    """
@@ -212,6 +241,7 @@ class InceptionResnetB(tf.keras.layers.Layer):
        [self.concat_layer, self.add_layer, self.activation_layer])

  def call(self, inputs):
    """Invoked when __call__ method of the layer is used."""
    conv1 = inputs
    for layer in self.conv_block1:
      conv1 = layer(conv1)
@@ -232,6 +262,16 @@ class InceptionResnetB(tf.keras.layers.Layer):


class InceptionResnetC(tf.keras.layers.Layer):
  """
  Variant C of the three InceptionResNet layers described in
  https://arxiv.org/abs/1710.02238. All variants use multiple
  convolutional blocks with varying kernel sizes and number of filters. This
  allows capturing patterns over different scales in the inputs. Residual
  connections are additionally used and have been shown previously to improve
  convergence and training in deep networks. A 1x1 convolution is used on the
  concatenated feature maps from the different convolutional blocks, to ensure
  shapes of inputs and feature maps are same for the residual connection.
  """

  def __init__(self, num_filters, input_dim, **kwargs):
    """
@@ -298,6 +338,7 @@ class InceptionResnetC(tf.keras.layers.Layer):
        [self.concat_layer, self.add_layer, self.activation_layer])

  def call(self, inputs):
    """Invoked when __call__ method of the layer is used."""
    conv1 = inputs
    for layer in self.conv_block1:
      conv1 = layer(conv1)
@@ -318,6 +359,12 @@ class InceptionResnetC(tf.keras.layers.Layer):


class ReductionA(tf.keras.layers.Layer):
  """
  Variant A of the two Reduction layers described in
  https://arxiv.org/abs/1710.02238. All variants use multiple convolutional
  blocks with varying kernel sizes and number of filters, to reduce the spatial
  extent of the image and reduce computational complexity for downstream layers.
  """

  def __init__(self, num_filters, **kwargs):
    """
@@ -374,6 +421,7 @@ class ReductionA(tf.keras.layers.Layer):
        [self.max_pool1, self.concat_layer, self.activation_layer])

  def call(self, inputs):
    """Invoked when __call__ method of the layer is used."""
    maxpool1 = self.max_pool1(inputs)
    conv1 = inputs
    for layer in self.conv_block1:
@@ -389,6 +437,12 @@ class ReductionA(tf.keras.layers.Layer):


class ReductionB(tf.keras.layers.Layer):
  """
  Variant B of the two Reduction layers described in
  https://arxiv.org/abs/1710.02238. All variants use multiple convolutional
  blocks with varying kernel sizes and number of filters, to reduce the spatial
  extent of the image and reduce computational complexity for downstream layers.
  """

  def __init__(self, num_filters, **kwargs):
    """
@@ -469,6 +523,7 @@ class ReductionB(tf.keras.layers.Layer):
        [self.max_pool1, self.concat_layer, self.activation_layer])

  def call(self, inputs):
    """Invoked when __call__ method of the layer is used."""
    maxpool1 = self.max_pool1(inputs)
    conv1 = inputs
    for layer in self.conv_block1:
+5 −2
Original line number Diff line number Diff line
@@ -75,6 +75,9 @@ def load_chembl25(featurizer="smiles2seq",
    save_dir = DEFAULT_DIR

  save_folder = os.path.join(save_dir, "chembl_25-featurized", str(featurizer))
  if featurizer == "smiles2img":
    img_spec = kwargs.get("img_spec", "engd")
    save_folder = os.path.join(save_folder, img_spec)

  if reload:
    if not os.path.exists(save_folder):
@@ -104,9 +107,9 @@ def load_chembl25(featurizer="smiles2seq",

  elif featurizer == "smiles2img":
    img_size = kwargs.get("img_size", 80)
    img_mode = kwargs.get("img_mode", "engd")
    img_spec = kwargs.get("img_spec", "engd")
    res = kwargs.get("res", 0.5)
    featurizer = SmilesToImage(img_size=img_size, img_mode=img_mode, res=res)
    featurizer = SmilesToImage(img_size=img_size, img_spec=img_spec, res=res)

  else:
    raise ValueError(