Commit 3f42da48 authored by Bharat123rox's avatar Bharat123rox
Browse files

Refactor code in models/ folder

parent 19eeac11
Loading
Loading
Loading
Loading
+9 −6
Original line number Diff line number Diff line
@@ -6,7 +6,10 @@ from deepchem.models.layers import SwitchedDropout
from deepchem.metrics import to_one_hot
from tensorflow.keras.layers import Input, Dense, Reshape, Softmax, Dropout, Activation, Lambda
import tensorflow.keras.layers as layers
import collections
try:
  from collections.abc import Sequence as SequenceCollection
except:
  from collections import Sequence as SequenceCollection


class CNN(KerasModel):
@@ -128,15 +131,15 @@ class CNN(KerasModel):
    n_layers = len(layer_filters)
    if not isinstance(kernel_size, list):
      kernel_size = [kernel_size] * n_layers
    if not isinstance(strides, collections.Sequence):
    if not isinstance(strides, SequenceCollection):
      strides = [strides] * n_layers
    if not isinstance(weight_init_stddevs, collections.Sequence):
    if not isinstance(weight_init_stddevs, SequenceCollection):
      weight_init_stddevs = [weight_init_stddevs] * (n_layers + 1)
    if not isinstance(bias_init_consts, collections.Sequence):
    if not isinstance(bias_init_consts, SequenceCollection):
      bias_init_consts = [bias_init_consts] * (n_layers + 1)
    if not isinstance(dropouts, collections.Sequence):
    if not isinstance(dropouts, SequenceCollection):
      dropouts = [dropouts] * n_layers
    if not isinstance(activation_fns, collections.Sequence):
    if not isinstance(activation_fns, SequenceCollection):
      activation_fns = [activation_fns] * n_layers
    if weight_decay_penalty != 0.0:
      if weight_decay_penalty_type == 'l1':
+12 −9
Original line number Diff line number Diff line
@@ -6,7 +6,10 @@ import time
import numpy as np
import tensorflow as tf
import threading
import collections
try:
  from collections.abc import Sequence as SequenceCollection
except:
  from collections import Sequence as SequenceCollection

import deepchem as dc
from deepchem.models import KerasModel
@@ -94,13 +97,13 @@ class MultitaskClassifier(KerasModel):
    self.n_features = n_features
    self.n_classes = n_classes
    n_layers = len(layer_sizes)
    if not isinstance(weight_init_stddevs, collections.Sequence):
    if not isinstance(weight_init_stddevs, SequenceCollection):
      weight_init_stddevs = [weight_init_stddevs] * n_layers
    if not isinstance(bias_init_consts, collections.Sequence):
    if not isinstance(bias_init_consts, SequenceCollection):
      bias_init_consts = [bias_init_consts] * n_layers
    if not isinstance(dropouts, collections.Sequence):
    if not isinstance(dropouts, SequenceCollection):
      dropouts = [dropouts] * n_layers
    if not isinstance(activation_fns, collections.Sequence):
    if not isinstance(activation_fns, SequenceCollection):
      activation_fns = [activation_fns] * n_layers
    if weight_decay_penalty != 0.0:
      if weight_decay_penalty_type == 'l1':
@@ -240,13 +243,13 @@ class MultitaskRegressor(KerasModel):
    self.n_tasks = n_tasks
    self.n_features = n_features
    n_layers = len(layer_sizes)
    if not isinstance(weight_init_stddevs, collections.Sequence):
    if not isinstance(weight_init_stddevs, SequenceCollection):
      weight_init_stddevs = [weight_init_stddevs] * (n_layers + 1)
    if not isinstance(bias_init_consts, collections.Sequence):
    if not isinstance(bias_init_consts, SequenceCollection):
      bias_init_consts = [bias_init_consts] * (n_layers + 1)
    if not isinstance(dropouts, collections.Sequence):
    if not isinstance(dropouts, SequenceCollection):
      dropouts = [dropouts] * n_layers
    if not isinstance(activation_fns, collections.Sequence):
    if not isinstance(activation_fns, SequenceCollection):
      activation_fns = [activation_fns] * n_layers
    if weight_decay_penalty != 0.0:
      if weight_decay_penalty_type == 'l1':
+4 −1
Original line number Diff line number Diff line
@@ -2,6 +2,9 @@

from deepchem.models import KerasModel, layers, losses
from tensorflow.keras.layers import Input, Lambda, Layer, Softmax, Reshape, Multiply
try:
  from collections.abc import Sequence
except:
  from collections import Sequence
import numpy as np
import tensorflow as tf
+12 −9
Original line number Diff line number Diff line
import collections
try:
  from collections.abc import Sequence as SequenceCollection
except:
  from collections import Sequence as SequenceCollection

import deepchem as dc
import numpy as np
@@ -181,20 +184,20 @@ class WeaveModel(KerasModel):
    if mode not in ['classification', 'regression']:
      raise ValueError("mode must be either 'classification' or 'regression'")

    if not isinstance(n_atom_feat, collections.Sequence):
    if not isinstance(n_atom_feat, SequenceCollection):
      n_atom_feat = [n_atom_feat] * n_weave
    if not isinstance(n_pair_feat, collections.Sequence):
    if not isinstance(n_pair_feat, SequenceCollection):
      n_pair_feat = [n_pair_feat] * n_weave
    n_layers = len(fully_connected_layer_sizes)
    if not isinstance(conv_weight_init_stddevs, collections.Sequence):
    if not isinstance(conv_weight_init_stddevs, SequenceCollection):
      conv_weight_init_stddevs = [conv_weight_init_stddevs] * n_weave
    if not isinstance(weight_init_stddevs, collections.Sequence):
    if not isinstance(weight_init_stddevs, SequenceCollection):
      weight_init_stddevs = [weight_init_stddevs] * n_layers
    if not isinstance(bias_init_consts, collections.Sequence):
    if not isinstance(bias_init_consts, SequenceCollection):
      bias_init_consts = [bias_init_consts] * n_layers
    if not isinstance(dropouts, collections.Sequence):
    if not isinstance(dropouts, SequenceCollection):
      dropouts = [dropouts] * n_layers
    if not isinstance(activation_fns, collections.Sequence):
    if not isinstance(activation_fns, SequenceCollection):
      activation_fns = [activation_fns] * n_layers
    if weight_decay_penalty != 0.0:
      if weight_decay_penalty_type == 'l1':
@@ -790,7 +793,7 @@ class _GraphConvKerasModel(tf.keras.Model):
    self.mode = mode
    self.uncertainty = uncertainty

    if not isinstance(dropout, collections.Sequence):
    if not isinstance(dropout, SequenceCollection):
      dropout = [dropout] * (len(graph_conv_layers) + 1)
    if len(dropout) != len(graph_conv_layers) + 1:
      raise ValueError('Wrong number of dropout probabilities provided')
+7 −4
Original line number Diff line number Diff line
# -*- coding: utf-8 -*-
import tensorflow as tf
import numpy as np
import collections
try:
  from collections.abc import Sequence as SequenceCollection
except:
  from collections import Sequence as SequenceCollection
from typing import Callable, Dict, List
from tensorflow.keras import activations, initializers, backend
from tensorflow.keras.layers import Dropout, BatchNormalization
@@ -1098,7 +1101,7 @@ class NeighborList(tf.keras.layers.Layer):
    return config

  def call(self, inputs):
    if isinstance(inputs, collections.Sequence):
    if isinstance(inputs, SequenceCollection):
      if len(inputs) != 1:
        raise ValueError("NeighborList can only have one input")
      inputs = inputs[0]
@@ -2118,7 +2121,7 @@ class Highway(tf.keras.layers.Layer):
    return config

  def build(self, input_shape):
    if isinstance(input_shape, collections.Sequence):
    if isinstance(input_shape, SequenceCollection):
      input_shape = input_shape[0]
    out_channels = input_shape[1]

@@ -2140,7 +2143,7 @@ class Highway(tf.keras.layers.Layer):
    self.built = True

  def call(self, inputs):
    if isinstance(inputs, collections.Sequence):
    if isinstance(inputs, SequenceCollection):
      parent = inputs[0]
    else:
      parent = inputs
Loading