@@ -174,16 +174,19 @@ def lars_path(X, y, Xy=None, Gram=None, max_iter=500,
174
174
swap , nrm2 = linalg .get_blas_funcs (('swap' , 'nrm2' ), (X ,))
175
175
solve_cholesky , = get_lapack_funcs (('potrs' ,), (X ,))
176
176
177
- if Gram is None :
177
+ if Gram is None or Gram is False :
178
+ Gram = None
178
179
if copy_X :
179
180
# force copy. setting the array to be fortran-ordered
180
181
# speeds up the calculation of the (partial) Gram matrix
181
182
# and allows to easily swap columns
182
183
X = X .copy ('F' )
183
- elif isinstance ( Gram , string_types ) and Gram == 'auto' :
184
- Gram = None
185
- if X .shape [0 ] > X .shape [1 ]:
184
+
185
+ elif isinstance ( Gram , string_types ) and Gram == 'auto' or Gram is True :
186
+ if Gram is True or X .shape [0 ] > X .shape [1 ]:
186
187
Gram = np .dot (X .T , X )
188
+ else :
189
+ Gram = None
187
190
elif copy_Gram :
188
191
Gram = Gram .copy ()
189
192
@@ -593,16 +596,14 @@ def __init__(self, fit_intercept=True, verbose=False, normalize=True,
593
596
self .copy_X = copy_X
594
597
self .fit_path = fit_path
595
598
596
- def _get_gram (self ):
597
- # precompute if n_samples > n_features
598
- precompute = self .precompute
599
- if hasattr (precompute , '__array__' ):
600
- Gram = precompute
601
- elif precompute == 'auto' :
602
- Gram = 'auto'
603
- else :
604
- Gram = None
605
- return Gram
599
+ def _get_gram (self , precompute , X , y ):
600
+ if (not hasattr (precompute , '__array__' )) and (
601
+ (precompute is True ) or
602
+ (precompute == 'auto' and X .shape [0 ] > X .shape [1 ]) or
603
+ (precompute == 'auto' and y .shape [1 ] > 1 )):
604
+ precompute = np .dot (X .T , X )
605
+
606
+ return precompute
606
607
607
608
def fit (self , X , y , Xy = None ):
608
609
"""Fit the model using X, y as training data.
@@ -645,14 +646,7 @@ def fit(self, X, y, Xy=None):
645
646
else :
646
647
max_iter = self .max_iter
647
648
648
- precompute = self .precompute
649
- if not hasattr (precompute , '__array__' ) and (
650
- precompute is True or
651
- (precompute == 'auto' and X .shape [0 ] > X .shape [1 ]) or
652
- (precompute == 'auto' and y .shape [1 ] > 1 )):
653
- Gram = np .dot (X .T , X )
654
- else :
655
- Gram = self ._get_gram ()
649
+ Gram = self ._get_gram (self .precompute , X , y )
656
650
657
651
self .alphas_ = []
658
652
self .n_iter_ = []
@@ -972,10 +966,10 @@ class LarsCV(Lars):
972
966
copy_X : boolean, optional, default True
973
967
If ``True``, X will be copied; else, it may be overwritten.
974
968
975
- precompute : True | False | 'auto' | array-like
969
+ precompute : True | False | 'auto'
976
970
Whether to use a precomputed Gram matrix to speed up
977
- calculations. If set to ``'auto'`` let us decide. The Gram
978
- matrix can also be passed as argument.
971
+ calculations. If set to ``'auto'`` let us decide. The Gram matrix
972
+ cannot be passed as argument since we will use only subsets of X .
979
973
980
974
max_iter: integer, optional
981
975
Maximum number of iterations to perform.
@@ -1081,7 +1075,13 @@ def fit(self, X, y):
1081
1075
# init cross-validation generator
1082
1076
cv = check_cv (self .cv , classifier = False )
1083
1077
1084
- Gram = 'auto' if self .precompute else None
1078
+ # As we use cross-validation, the Gram matrix is not precomputed here
1079
+ Gram = self .precompute
1080
+ if hasattr (Gram , '__array__' ):
1081
+ warnings .warn ("Parameter 'precompute' cannot be an array in "
1082
+ "%s. Automatically switch to 'auto' instead."
1083
+ % self .__class__ .__name__ )
1084
+ Gram = 'auto'
1085
1085
1086
1086
cv_paths = Parallel (n_jobs = self .n_jobs , verbose = self .verbose )(
1087
1087
delayed (_lars_path_residues )(
@@ -1171,10 +1171,10 @@ class LassoLarsCV(LarsCV):
1171
1171
normalize : boolean, optional, default False
1172
1172
If True, the regressors X will be normalized before regression.
1173
1173
1174
- precompute : True | False | 'auto' | array-like
1174
+ precompute : True | False | 'auto'
1175
1175
Whether to use a precomputed Gram matrix to speed up
1176
- calculations. If set to ``'auto'`` let us decide. The Gram
1177
- matrix can also be passed as argument.
1176
+ calculations. If set to ``'auto'`` let us decide. The Gram matrix
1177
+ cannot be passed as argument since we will use only subsets of X .
1178
1178
1179
1179
max_iter : integer, optional
1180
1180
Maximum number of iterations to perform.
@@ -1404,7 +1404,7 @@ def fit(self, X, y, copy_X=True):
1404
1404
X , y , self .fit_intercept , self .normalize , self .copy_X )
1405
1405
max_iter = self .max_iter
1406
1406
1407
- Gram = self ._get_gram ()
1407
+ Gram = self .precompute
1408
1408
1409
1409
alphas_ , active_ , coef_path_ , self .n_iter_ = lars_path (
1410
1410
X , y , Gram = Gram , copy_X = copy_X , copy_Gram = True , alpha_min = 0.0 ,
0 commit comments