diff --git a/.github/workflows/python-pytest.yml b/.github/workflows/python-pytest.yml index 762e622..b36f78e 100644 --- a/.github/workflows/python-pytest.yml +++ b/.github/workflows/python-pytest.yml @@ -41,7 +41,7 @@ jobs: python -m pip install --upgrade pip pip install tensorflow==${{ matrix.tf-version }} pip install git+https://github.com/DataCanvasIO/Hypernets - pip install -r requirements.txt "protobuf<4.0" "numpy==1.19.5" "featuretools==0.27" + pip install -r requirements.txt "protobuf<4.0" "numpy==1.19.5" "featuretools<=0.27" pip install pytest-cov==2.4.0 python-coveralls codacy-coverage pip list - name: Test with pytest diff --git a/deeptables/tests/models/deeptable_regression_test.py b/deeptables/tests/models/deeptable_regression_test.py index f9e0df7..d860f69 100644 --- a/deeptables/tests/models/deeptable_regression_test.py +++ b/deeptables/tests/models/deeptable_regression_test.py @@ -4,12 +4,12 @@ """ import pandas as pd -from sklearn.datasets import load_boston from deeptables.models import deeptable from deeptables.tests.misc import r2_c from deeptables.utils import consts from hypernets.tabular import get_tool_box +from hypernets.tabular.datasets.dsutils import load_boston class Test_DeepTable_Regression: @@ -17,13 +17,10 @@ class Test_DeepTable_Regression: @staticmethod def load_data(): print("Loading datasets...") - - boston_dataset = load_boston() - - df_train = pd.DataFrame(boston_dataset.data) - df_train.columns = boston_dataset.feature_names - target = pd.Series(boston_dataset.target) - + df = load_boston() + target = df.pop("target") + df_train = df + # target = pd.Series(boston_dataset.target) return df_train, target def setup_class(self): diff --git a/deeptables/tests/models/hyper_dt_regression_test.py b/deeptables/tests/models/hyper_dt_regression_test.py index a1f64e0..46264e8 100644 --- a/deeptables/tests/models/hyper_dt_regression_test.py +++ b/deeptables/tests/models/hyper_dt_regression_test.py @@ -9,8 +9,9 @@ from hypernets.core.callbacks import SummaryCallback, FileStorageLoggingCallback from hypernets.core.searcher import OptimizeDirection from hypernets.searchers import RandomSearcher -from sklearn.datasets import load_boston + from sklearn.model_selection import train_test_split +from hypernets.tabular.datasets.dsutils import load_boston from .. import homedir @@ -19,11 +20,9 @@ class Test_HyperDT_Regression(): def test_boston(self): print("Loading datasets...") - boston_dataset = load_boston() - - df_train = pd.DataFrame(boston_dataset.data) - df_train.columns = boston_dataset.feature_names - self.y = pd.Series(boston_dataset.target) + df = load_boston() + df_train = df + self.y = df.pop('target') self.X = df_train self.X_train, \