From 7dc848a24f4c8f370414a166e6b77908a761539b Mon Sep 17 00:00:00 2001 From: Jenni Date: Sat, 26 Jun 2021 14:45:59 -0300 Subject: [PATCH 1/2] fix failing test from estimator AdaBoostClassifier --- maint_tools/test_docstrings.py | 1 - sklearn/ensemble/_weight_boosting.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/maint_tools/test_docstrings.py b/maint_tools/test_docstrings.py index cba5e8dfd2900..7ddbb0f74108e 100644 --- a/maint_tools/test_docstrings.py +++ b/maint_tools/test_docstrings.py @@ -10,7 +10,6 @@ # List of modules ignored when checking for numpydoc validation. DOCSTRING_IGNORE_LIST = [ "ARDRegression", - "AdaBoostClassifier", "AdaBoostRegressor", "AdditiveChi2Sampler", "AffinityPropagation", diff --git a/sklearn/ensemble/_weight_boosting.py b/sklearn/ensemble/_weight_boosting.py index b68b9e97b81f2..d661a50f62345 100644 --- a/sklearn/ensemble/_weight_boosting.py +++ b/sklearn/ensemble/_weight_boosting.py @@ -328,7 +328,7 @@ class AdaBoostClassifier(ClassifierMixin, BaseWeightBoosting): The maximum number of estimators at which boosting is terminated. In case of perfect fit, the learning procedure is stopped early. - learning_rate : float, default=1. + learning_rate : float, default=1 Weight applied to each classifier at each boosting iteration. A higher learning rate increases the contribution of each classifier. There is a trade-off between the `learning_rate` and `n_estimators` parameters. From e99692669dfc5d12d4ef414fa1aab4179017bd06 Mon Sep 17 00:00:00 2001 From: Jennifer Maldonado Date: Sat, 26 Jun 2021 16:39:40 -0300 Subject: [PATCH 2/2] Update sklearn/ensemble/_weight_boosting.py Co-authored-by: Guillaume Lemaitre --- sklearn/ensemble/_weight_boosting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sklearn/ensemble/_weight_boosting.py b/sklearn/ensemble/_weight_boosting.py index d661a50f62345..89ddf4ee5d50e 100644 --- a/sklearn/ensemble/_weight_boosting.py +++ b/sklearn/ensemble/_weight_boosting.py @@ -328,7 +328,7 @@ class AdaBoostClassifier(ClassifierMixin, BaseWeightBoosting): The maximum number of estimators at which boosting is terminated. In case of perfect fit, the learning procedure is stopped early. - learning_rate : float, default=1 + learning_rate : float, default=1.0 Weight applied to each classifier at each boosting iteration. A higher learning rate increases the contribution of each classifier. There is a trade-off between the `learning_rate` and `n_estimators` parameters.