Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow sklearn to update to current version. #146

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 15 additions & 15 deletions afqinsight/_serial_bagging.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from sklearn.ensemble._base import _partition_estimators
from sklearn.utils import check_random_state, check_array, indices_to_mask, resample
from sklearn.utils.random import sample_without_replacement
from sklearn.utils.metaestimators import if_delegate_has_method
from sklearn.utils.metaestimators import available_if
from sklearn.utils.validation import (
check_is_fitted,
_check_sample_weight,
Expand Down Expand Up @@ -103,7 +103,7 @@ def _parallel_build_estimators(
max_samples = ensemble._max_samples
bootstrap = ensemble.bootstrap
bootstrap_features = ensemble.bootstrap_features
support_sample_weight = has_fit_parameter(ensemble.base_estimator_, "sample_weight")
support_sample_weight = has_fit_parameter(ensemble.estimator_, "sample_weight")
if not support_sample_weight and sample_weight is not None:
raise ValueError("The base estimator doesn't support sample weight")

Expand Down Expand Up @@ -182,7 +182,7 @@ class SerialBaggingClassifier(BaggingClassifier):

Parameters
----------
base_estimator : object, default=None
estimator : object, default=None
The base estimator to fit on random subsets of the dataset.
If None, then the base estimator is a decision tree.

Expand Down Expand Up @@ -236,7 +236,7 @@ class SerialBaggingClassifier(BaggingClassifier):

Attributes
----------
base_estimator_ : estimator
estimator_ : estimator
The base estimator from which the ensemble is grown.

n_features_in_ : int
Expand Down Expand Up @@ -287,7 +287,7 @@ class SerialBaggingClassifier(BaggingClassifier):

def __init__(
self,
base_estimator=None,
estimator=None,
n_estimators=10,
*,
max_samples=1.0,
Expand All @@ -301,7 +301,7 @@ def __init__(
verbose=0,
):
super().__init__(
base_estimator=base_estimator,
estimator=estimator,
n_estimators=n_estimators,
max_samples=max_samples,
max_features=max_features,
Expand Down Expand Up @@ -367,7 +367,7 @@ def _fit(self, X, y, max_samples=None, max_depth=None, sample_weight=None):
self._validate_estimator()

if max_depth is not None: # pragma: no cover
self.base_estimator_.max_depth = max_depth
self.estimator_.max_depth = max_depth

# Validate max_samples
if max_samples is None: # pragma: no cover
Expand Down Expand Up @@ -568,7 +568,7 @@ def predict_log_proba(self, X):
classes corresponds to that in the attribute :term:`classes_`.
"""
check_is_fitted(self)
if hasattr(self.base_estimator_, "predict_log_proba"):
if hasattr(self.estimator_, "predict_log_proba"):
# Check data
X = check_array(
X, accept_sparse=["csr", "csc"], dtype=None, force_all_finite=False
Expand Down Expand Up @@ -610,7 +610,7 @@ def predict_log_proba(self, X):
else:
return np.log(self.predict_proba(X))

@if_delegate_has_method(delegate="base_estimator")
@available_if(lambda self: hasattr(self, "estimator"))
def decision_function(self, X):
"""Average of the decision functions of the base classifiers.

Expand Down Expand Up @@ -690,7 +690,7 @@ class SerialBaggingRegressor(BaggingRegressor):

Parameters
----------
base_estimator : object, default=None
estimator : object, default=None
The base estimator to fit on random subsets of the dataset.
If None, then the base estimator is a decision tree.

Expand Down Expand Up @@ -745,7 +745,7 @@ class SerialBaggingRegressor(BaggingRegressor):

Attributes
----------
base_estimator_ : estimator
estimator_ : estimator
The base estimator from which the ensemble is grown.

n_features_in_ : int
Expand Down Expand Up @@ -780,7 +780,7 @@ class SerialBaggingRegressor(BaggingRegressor):
>>> X, y = make_regression(n_samples=100, n_features=4,
... n_informative=2, n_targets=1,
... random_state=0, shuffle=False)
>>> regr = BaggingRegressor(base_estimator=SVR(),
>>> regr = BaggingRegressor(estimator=SVR(),
... n_estimators=10, random_state=0).fit(X, y)
>>> regr.predict([[0, 0, 0, 0]])
array([-2.8720...])
Expand All @@ -803,7 +803,7 @@ class SerialBaggingRegressor(BaggingRegressor):

def __init__(
self,
base_estimator=None,
estimator=None,
n_estimators=10,
max_samples=1.0,
max_features=1.0,
Expand All @@ -816,7 +816,7 @@ def __init__(
verbose=0,
):
super().__init__(
base_estimator=base_estimator,
estimator=estimator,
n_estimators=n_estimators,
max_samples=max_samples,
max_features=max_features,
Expand Down Expand Up @@ -881,7 +881,7 @@ def _fit(self, X, y, max_samples=None, max_depth=None, sample_weight=None):
self._validate_estimator()

if max_depth is not None: # pragma: no cover
self.base_estimator_.max_depth = max_depth
self.estimator_.max_depth = max_depth

# Validate max_samples
if max_samples is None: # pragma: no cover
Expand Down
2 changes: 1 addition & 1 deletion afqinsight/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ def call_with_kwargs(Transformer, kwargs):
else:
ensembler_kwargs = {}

ensembler_kwargs["base_estimator"] = base_estimator
ensembler_kwargs["estimator"] = base_estimator

if isinstance(ensemble_meta_estimator, str):
if ensemble_meta_estimator.lower() == "bagging":
Expand Down
Loading
Loading