Commit c1f2ddf2 authored by nd-02110114's avatar nd-02110114
Browse files

Merge branch 'master' into fix-docs-build

parents 34fa116e 12b35a1d
Loading
Loading
Loading
Loading
+5 −2
Original line number Diff line number Diff line
@@ -5,7 +5,10 @@ from deepchem.models.optimizers import Adam
from deepchem.models.tensorgraph.layers import Feature, Weights, Label, Layer
import numpy as np
import tensorflow as tf
import collections
try:
  from collections.abc import Sequence as SequenceCollection
except:
  from collections import Sequence as SequenceCollection
import copy
import time

@@ -109,7 +112,7 @@ class MCTS(object):
    self.n_search_episodes = n_search_episodes
    self.discount_factor = discount_factor
    self.value_weight = value_weight
    self._state_is_list = isinstance(env.state_shape[0], collections.Sequence)
    self._state_is_list = isinstance(env.state_shape[0], SequenceCollection)
    if optimizer is None:
      self._optimizer = Adam(learning_rate=0.001, beta1=0.9, beta2=0.999)
    else:
+10 −5
Original line number Diff line number Diff line
@@ -6,6 +6,11 @@ import warnings
import numpy as np
import tensorflow as tf

try:
  from collections.abc import Sequence as SequenceCollection
except:
  from collections import Sequence as SequenceCollection

from deepchem.nn import model_ops

class RobustMultitaskClassifier(MultiTaskClassifier):
@@ -73,15 +78,15 @@ class RobustMultitaskClassifier(MultiTaskClassifier):

    n_layers = len(layer_sizes)
    assert n_layers == len(bypass_layer_sizes)
    if not isinstance(weight_init_stddevs, collections.Sequence):
    if not isinstance(weight_init_stddevs, SequenceCollection):
      weight_init_stddevs = [weight_init_stddevs] * n_layers
    if not isinstance(bypass_weight_init_stddevs, collections.Sequence):
    if not isinstance(bypass_weight_init_stddevs, SequenceCollection):
      bypass_weight_init_stddevs = [bypass_weight_init_stddevs] * n_layers
    if not isinstance(bias_init_consts, collections.Sequence):
    if not isinstance(bias_init_consts, SequenceCollection):
      bias_init_consts = [bias_init_consts] * n_layers
    if not isinstance(dropouts, collections.Sequence):
    if not isinstance(dropouts, SequenceCollection):
      dropouts = [dropouts] * n_layers
    if not isinstance(activation_fns, collections.Sequence):
    if not isinstance(activation_fns, SequenceCollection):
      activation_fns = [activation_fns] * n_layers

    # Add the input features.
+1 −1
Original line number Diff line number Diff line
@@ -110,7 +110,7 @@ class GridHyperparamOpt(HyperparamOpt):
    hyperparams = params_dict.keys()
    hyperparam_vals = params_dict.values()
    for hyperparam_list in params_dict.values():
      assert isinstance(hyperparam_list, collections.Iterable)
      assert isinstance(hyperparam_list, collections.abc.Iterable)

    number_combinations = reduce(mul, [len(vals) for vals in hyperparam_vals])

+9 −6
Original line number Diff line number Diff line
@@ -6,7 +6,10 @@ from deepchem.models.layers import SwitchedDropout
from deepchem.metrics import to_one_hot
from tensorflow.keras.layers import Input, Dense, Reshape, Softmax, Dropout, Activation, Lambda
import tensorflow.keras.layers as layers
import collections
try:
  from collections.abc import Sequence as SequenceCollection
except:
  from collections import Sequence as SequenceCollection


class CNN(KerasModel):
@@ -128,15 +131,15 @@ class CNN(KerasModel):
    n_layers = len(layer_filters)
    if not isinstance(kernel_size, list):
      kernel_size = [kernel_size] * n_layers
    if not isinstance(strides, collections.Sequence):
    if not isinstance(strides, SequenceCollection):
      strides = [strides] * n_layers
    if not isinstance(weight_init_stddevs, collections.Sequence):
    if not isinstance(weight_init_stddevs, SequenceCollection):
      weight_init_stddevs = [weight_init_stddevs] * (n_layers + 1)
    if not isinstance(bias_init_consts, collections.Sequence):
    if not isinstance(bias_init_consts, SequenceCollection):
      bias_init_consts = [bias_init_consts] * (n_layers + 1)
    if not isinstance(dropouts, collections.Sequence):
    if not isinstance(dropouts, SequenceCollection):
      dropouts = [dropouts] * n_layers
    if not isinstance(activation_fns, collections.Sequence):
    if not isinstance(activation_fns, SequenceCollection):
      activation_fns = [activation_fns] * n_layers
    if weight_decay_penalty != 0.0:
      if weight_decay_penalty_type == 'l1':
+12 −9
Original line number Diff line number Diff line
@@ -6,7 +6,10 @@ import time
import numpy as np
import tensorflow as tf
import threading
import collections
try:
  from collections.abc import Sequence as SequenceCollection
except:
  from collections import Sequence as SequenceCollection

import deepchem as dc
from deepchem.models import KerasModel
@@ -94,13 +97,13 @@ class MultitaskClassifier(KerasModel):
    self.n_features = n_features
    self.n_classes = n_classes
    n_layers = len(layer_sizes)
    if not isinstance(weight_init_stddevs, collections.Sequence):
    if not isinstance(weight_init_stddevs, SequenceCollection):
      weight_init_stddevs = [weight_init_stddevs] * n_layers
    if not isinstance(bias_init_consts, collections.Sequence):
    if not isinstance(bias_init_consts, SequenceCollection):
      bias_init_consts = [bias_init_consts] * n_layers
    if not isinstance(dropouts, collections.Sequence):
    if not isinstance(dropouts, SequenceCollection):
      dropouts = [dropouts] * n_layers
    if not isinstance(activation_fns, collections.Sequence):
    if not isinstance(activation_fns, SequenceCollection):
      activation_fns = [activation_fns] * n_layers
    if weight_decay_penalty != 0.0:
      if weight_decay_penalty_type == 'l1':
@@ -240,13 +243,13 @@ class MultitaskRegressor(KerasModel):
    self.n_tasks = n_tasks
    self.n_features = n_features
    n_layers = len(layer_sizes)
    if not isinstance(weight_init_stddevs, collections.Sequence):
    if not isinstance(weight_init_stddevs, SequenceCollection):
      weight_init_stddevs = [weight_init_stddevs] * (n_layers + 1)
    if not isinstance(bias_init_consts, collections.Sequence):
    if not isinstance(bias_init_consts, SequenceCollection):
      bias_init_consts = [bias_init_consts] * (n_layers + 1)
    if not isinstance(dropouts, collections.Sequence):
    if not isinstance(dropouts, SequenceCollection):
      dropouts = [dropouts] * n_layers
    if not isinstance(activation_fns, collections.Sequence):
    if not isinstance(activation_fns, SequenceCollection):
      activation_fns = [activation_fns] * n_layers
    if weight_decay_penalty != 0.0:
      if weight_decay_penalty_type == 'l1':
Loading