Skip to content

Commit 9f81779

Browse files
committed
improve docstring
1 parent 2b4cc23 commit 9f81779

File tree

1 file changed

+16
-6
lines changed

1 file changed

+16
-6
lines changed

sklearn/decomposition/nmf.py

+16-6
Original file line numberDiff line numberDiff line change
@@ -86,10 +86,20 @@ def _safe_compute_error(X, W, H):
8686
def beta_divergence(X, W, H, beta):
8787
"""Compute the beta-divergence of X and dot(W, H).
8888
89-
If beta == 2, this is the Frobenius squared norm
90-
If beta == 1, this is the generalized Kullback-Leibler divergence
91-
If beta == 0, this is the Itakura-Saito divergence
92-
Else, this is the general beta-divergence.
89+
Parameters
90+
----------
91+
X : float or array-like, shape (n_samples, n_features)
92+
93+
W : float or array-like, shape (n_samples, n_components)
94+
95+
H : float or array-like, shape (n_components, n_features)
96+
97+
beta : float, string in {'frobenius', 'kullback-leibler', 'itakura-saito'}
98+
Parameter of the beta-divergence.
99+
If beta == 2, this is the Frobenius squared norm.
100+
If beta == 1, this is the generalized Kullback-Leibler divergence.
101+
If beta == 0, this is the Itakura-Saito divergence.
102+
Else, this is the general beta-divergence.
93103
"""
94104
beta = _beta_loss_to_float(beta)
95105

@@ -1159,8 +1169,8 @@ def non_negative_factorization(X, W=None, H=None, n_components=None,
11591169
raise ValueError("Number of components must be a positive integer;"
11601170
" got (n_components=%r)" % n_components)
11611171
if not isinstance(max_iter, INTEGER_TYPES) or max_iter < 0:
1162-
raise ValueError("Maximum number of iterations must be a positive integer;"
1163-
" got (max_iter=%r)" % max_iter)
1172+
raise ValueError("Maximum number of iterations must be a positive "
1173+
"integer; got (max_iter=%r)" % max_iter)
11641174
if not isinstance(tol, numbers.Number) or tol < 0:
11651175
raise ValueError("Tolerance for stopping criteria must be "
11661176
"positive; got (tol=%r)" % tol)

0 commit comments

Comments
 (0)