diff --git a/doc/whats_new/v1.1.rst b/doc/whats_new/v1.1.rst index 1a9f773ce08df..0282c83917cd5 100644 --- a/doc/whats_new/v1.1.rst +++ b/doc/whats_new/v1.1.rst @@ -45,6 +45,13 @@ Changelog `pos_label` to specify the positive class label. :pr:`21032` by :user:`Guillaume Lemaitre `. +:mod:`sklearn.cross_decomposition` +.................................. + +- |Enhancement| :func:`cross_decomposition._PLS.inverse_transform` now allows + reconstruction of a `X` target when a `Y` parameter is given. :pr:`19680` by + :user:`Robin Thibaut `. + :mod:`sklearn.ensemble` ........................... diff --git a/sklearn/cross_decomposition/_pls.py b/sklearn/cross_decomposition/_pls.py index a539c211de906..3d4012e6050ff 100644 --- a/sklearn/cross_decomposition/_pls.py +++ b/sklearn/cross_decomposition/_pls.py @@ -398,7 +398,7 @@ def transform(self, X, Y=None, copy=True): return x_scores - def inverse_transform(self, X): + def inverse_transform(self, X, Y=None): """Transform data back to its original space. Parameters @@ -407,10 +407,17 @@ def inverse_transform(self, X): New data, where `n_samples` is the number of samples and `n_components` is the number of pls components. + Y : array-like of shape (n_samples, n_components) + New target, where `n_samples` is the number of samples + and `n_components` is the number of pls components. + Returns ------- - self : ndarray of shape (n_samples, n_features) - Return the reconstructed array. + X_reconstructed : ndarray of shape (n_samples, n_features) + Return the reconstructed `X` data. + + Y_reconstructed : ndarray of shape (n_samples, n_targets) + Return the reconstructed `X` target. Only returned when `Y` is given. Notes ----- @@ -420,10 +427,19 @@ def inverse_transform(self, X): X = check_array(X, dtype=FLOAT_DTYPES) # From pls space to original space X_reconstructed = np.matmul(X, self.x_loadings_.T) - # Denormalize X_reconstructed *= self._x_std X_reconstructed += self._x_mean + + if Y is not None: + Y = check_array(Y, dtype=FLOAT_DTYPES) + # From pls space to original space + Y_reconstructed = np.matmul(Y, self.y_loadings_.T) + # Denormalize + Y_reconstructed *= self._y_std + Y_reconstructed += self._y_mean + return X_reconstructed, Y_reconstructed + return X_reconstructed def predict(self, X, copy=True): diff --git a/sklearn/cross_decomposition/tests/test_pls.py b/sklearn/cross_decomposition/tests/test_pls.py index 1ddc0d0da443f..22af03ff6fc43 100644 --- a/sklearn/cross_decomposition/tests/test_pls.py +++ b/sklearn/cross_decomposition/tests/test_pls.py @@ -59,6 +59,8 @@ def test_pls_canonical_basics(): # Check that inverse_transform works X_back = pls.inverse_transform(Xt) assert_array_almost_equal(X_back, X) + _, Y_back = pls.inverse_transform(Xt, Yt) + assert_array_almost_equal(Y_back, Y) def test_sanity_check_pls_regression():