Commit 72ab5773 authored by nd-02110114's avatar nd-02110114
Browse files

♻️ gdbt -> gbdt

parent 84a135d0
Loading
Loading
Loading
Loading
+2 −2
Original line number Diff line number Diff line
@@ -26,7 +26,7 @@ from deepchem.models.chemnet_models import Smiles2Vec, ChemCeption

# scikit-learn model
from deepchem.models.sklearn_models import SklearnModel
from deepchem.models.gdbt_models import GDBTModel
from deepchem.models.gbdt_models import GBDTModel

# PyTorch models
try:
@@ -40,7 +40,7 @@ except ModuleNotFoundError:
# Compatibility imports for renamed XGBoost models. Remove below with DeepChem 3.0.
#####################################################################################

from deepchem.models.gdbt_models.gdbt_model import XGBoostModel
from deepchem.models.gbdt_models.gbdt_model import XGBoostModel

########################################################################################
# Compatibility imports for renamed TensorGraph models. Remove below with DeepChem 3.0.
+2 −0
Original line number Diff line number Diff line
# flake8: noqa
from deepchem.models.gbdt_models.gbdt_model import GBDTModel
 No newline at end of file
+5 −5
Original line number Diff line number Diff line
"""
Gradient boosting wrapper interface
Gradient Boosting Decision Tree wrapper interface
"""

import os
@@ -18,8 +18,8 @@ from deepchem.models.sklearn_models import SklearnModel
logger = logging.getLogger(__name__)


class GDBTModel(SklearnModel):
  """Wrapper class that wraps GDBT models as DeepChem models.
class GBDTModel(SklearnModel):
  """Wrapper class that wraps GBDT models as DeepChem models.

  This class supports LightGBM/XGBoost models.
  """
@@ -145,10 +145,10 @@ class GDBTModel(SklearnModel):
#########################################


class XGBoostModel(GDBTModel):
class XGBoostModel(GBDTModel):

  def __init__(self, *args, **kwargs):
    warnings.warn(
        "XGBoostModel is deprecated and has been renamed to GDBTModel.",
        "XGBoostModel is deprecated and has been renamed to GBDTModel.",
        FutureWarning)
    super(XGBoostModel, self).__init__(*args, **kwargs)
+0 −2
Original line number Diff line number Diff line
# flake8: noqa
from deepchem.models.gdbt_models.gdbt_model import GDBTModel
 No newline at end of file
+6 −6
Original line number Diff line number Diff line
@@ -29,7 +29,7 @@ def test_xgboost_regression():

  xgb_model = xgboost.XGBRegressor(
      n_estimators=50, random_state=123, verbose=False)
  model = dc.models.GDBTModel(xgb_model, **esr)
  model = dc.models.GBDTModel(xgb_model, **esr)

  # Fit trained model
  model.fit(train_dataset)
@@ -62,7 +62,7 @@ def test_xgboost_multitask_regression():

  def model_builder(model_dir):
    xgb_model = xgboost.XGBRegressor(n_estimators=50, seed=123, verbose=False)
    return dc.models.GDBTModel(xgb_model, model_dir, **esr)
    return dc.models.GBDTModel(xgb_model, model_dir, **esr)

  model = dc.models.SingletaskToMultitask(tasks, model_builder)

@@ -93,7 +93,7 @@ def test_xgboost_classification():
  classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score)
  esr = {'early_stopping_rounds': 50}
  xgb_model = xgboost.XGBClassifier(n_estimators=50, seed=123, verbose=False)
  model = dc.models.GDBTModel(xgb_model, **esr)
  model = dc.models.GBDTModel(xgb_model, **esr)

  # Fit trained model
  model.fit(train_dataset)
@@ -123,7 +123,7 @@ def test_lightgbm_regression():

  lgbm_model = lightgbm.LGBMRegressor(
      n_estimators=50, random_state=123, silent=True)
  model = dc.models.GDBTModel(lgbm_model, **esr)
  model = dc.models.GBDTModel(lgbm_model, **esr)

  # Fit trained model
  model.fit(train_dataset)
@@ -156,7 +156,7 @@ def test_lightgbm_multitask_regression():

  def model_builder(model_dir):
    lgbm_model = lightgbm.LGBMRegressor(n_estimators=50, seed=123, silent=True)
    return dc.models.GDBTModel(lgbm_model, model_dir, **esr)
    return dc.models.GBDTModel(lgbm_model, model_dir, **esr)

  model = dc.models.SingletaskToMultitask(tasks, model_builder)

@@ -187,7 +187,7 @@ def test_lightgbm_classification():
  classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score)
  esr = {'early_stopping_rounds': 50}
  lgbm_model = lightgbm.LGBMClassifier(n_estimators=50, seed=123, silent=True)
  model = dc.models.GDBTModel(lgbm_model, **esr)
  model = dc.models.GBDTModel(lgbm_model, **esr)

  # Fit trained model
  model.fit(train_dataset)
Loading