Skip to content

Commit 42173fd

Browse files
authored
MNT add isort to ruff's rules (#26649)
1 parent 4a8b4f9 commit 42173fd

File tree

771 files changed

+5515
-5563
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

771 files changed

+5515
-5563
lines changed

.github/scripts/label_title_regex.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
11
"""Labels PRs based on title. Must be run in a github action with the
22
pull_request_target event."""
3-
from github import Github
4-
import os
53
import json
4+
import os
65
import re
76

7+
from github import Github
8+
89
context_dict = json.loads(os.getenv("CONTEXT_GITHUB"))
910

1011
repo = context_dict["repository"]

.pre-commit-config.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ repos:
1414
rev: v0.0.272
1515
hooks:
1616
- id: ruff
17+
args: ["--fix", "--show-source"]
1718
- repo: https://github.com/pre-commit/mirrors-mypy
1819
rev: v1.3.0
1920
hooks:

asv_benchmarks/benchmarks/cluster.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from sklearn.cluster import KMeans, MiniBatchKMeans
22

33
from .common import Benchmark, Estimator, Predictor, Transformer
4-
from .datasets import _blobs_dataset, _20newsgroups_highdim_dataset
4+
from .datasets import _20newsgroups_highdim_dataset, _blobs_dataset
55
from .utils import neg_mean_inertia
66

77

asv_benchmarks/benchmarks/common.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
1-
import os
1+
import itertools
22
import json
3-
import timeit
3+
import os
44
import pickle
5-
import itertools
5+
import timeit
66
from abc import ABC, abstractmethod
7-
from pathlib import Path
87
from multiprocessing import cpu_count
8+
from pathlib import Path
99

1010
import numpy as np
1111

asv_benchmarks/benchmarks/datasets.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,22 @@
1+
from pathlib import Path
2+
13
import numpy as np
24
import scipy.sparse as sp
35
from joblib import Memory
4-
from pathlib import Path
56

6-
from sklearn.decomposition import TruncatedSVD
77
from sklearn.datasets import (
8-
make_blobs,
98
fetch_20newsgroups,
9+
fetch_olivetti_faces,
1010
fetch_openml,
1111
load_digits,
12-
make_regression,
12+
make_blobs,
1313
make_classification,
14-
fetch_olivetti_faces,
14+
make_regression,
1515
)
16-
from sklearn.preprocessing import MaxAbsScaler, StandardScaler
16+
from sklearn.decomposition import TruncatedSVD
1717
from sklearn.feature_extraction.text import TfidfVectorizer
1818
from sklearn.model_selection import train_test_split
19+
from sklearn.preprocessing import MaxAbsScaler, StandardScaler
1920

2021
# memory location for caching datasets
2122
M = Memory(location=str(Path(__file__).resolve().parent / "cache"))

asv_benchmarks/benchmarks/decomposition.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
from sklearn.decomposition import PCA, DictionaryLearning, MiniBatchDictionaryLearning
22

33
from .common import Benchmark, Estimator, Transformer
4-
from .datasets import _olivetti_faces_dataset, _mnist_dataset
5-
from .utils import make_pca_scorers, make_dict_learning_scorers
4+
from .datasets import _mnist_dataset, _olivetti_faces_dataset
5+
from .utils import make_dict_learning_scorers, make_pca_scorers
66

77

88
class PCABenchmark(Transformer, Estimator, Benchmark):

asv_benchmarks/benchmarks/ensemble.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from sklearn.ensemble import (
2-
RandomForestClassifier,
32
GradientBoostingClassifier,
43
HistGradientBoostingClassifier,
4+
RandomForestClassifier,
55
)
66

77
from .common import Benchmark, Estimator, Predictor

asv_benchmarks/benchmarks/linear_model.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
from sklearn.linear_model import (
2-
LogisticRegression,
3-
Ridge,
42
ElasticNet,
53
Lasso,
64
LinearRegression,
5+
LogisticRegression,
6+
Ridge,
77
SGDRegressor,
88
)
99

benchmarks/bench_20newsgroups.py

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,19 @@
1-
from time import time
21
import argparse
3-
import numpy as np
2+
from time import time
43

5-
from sklearn.dummy import DummyClassifier
4+
import numpy as np
65

76
from sklearn.datasets import fetch_20newsgroups_vectorized
8-
from sklearn.metrics import accuracy_score
9-
from sklearn.utils.validation import check_array
10-
11-
from sklearn.ensemble import RandomForestClassifier
12-
from sklearn.ensemble import ExtraTreesClassifier
13-
from sklearn.ensemble import AdaBoostClassifier
7+
from sklearn.dummy import DummyClassifier
8+
from sklearn.ensemble import (
9+
AdaBoostClassifier,
10+
ExtraTreesClassifier,
11+
RandomForestClassifier,
12+
)
1413
from sklearn.linear_model import LogisticRegression
14+
from sklearn.metrics import accuracy_score
1515
from sklearn.naive_bayes import MultinomialNB
16+
from sklearn.utils.validation import check_array
1617

1718
ESTIMATORS = {
1819
"dummy": DummyClassifier(),

benchmarks/bench_covertype.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -45,20 +45,24 @@
4545
# Arnaud Joly <arnaud.v.joly@gmail.com>
4646
# License: BSD 3 clause
4747

48+
import argparse
4849
import os
4950
from time import time
50-
import argparse
51+
5152
import numpy as np
5253
from joblib import Memory
5354

5455
from sklearn.datasets import fetch_covtype, get_data_home
55-
from sklearn.svm import LinearSVC
56-
from sklearn.linear_model import SGDClassifier, LogisticRegression
56+
from sklearn.ensemble import (
57+
ExtraTreesClassifier,
58+
GradientBoostingClassifier,
59+
RandomForestClassifier,
60+
)
61+
from sklearn.linear_model import LogisticRegression, SGDClassifier
62+
from sklearn.metrics import zero_one_loss
5763
from sklearn.naive_bayes import GaussianNB
64+
from sklearn.svm import LinearSVC
5865
from sklearn.tree import DecisionTreeClassifier
59-
from sklearn.ensemble import RandomForestClassifier, ExtraTreesClassifier
60-
from sklearn.ensemble import GradientBoostingClassifier
61-
from sklearn.metrics import zero_one_loss
6266
from sklearn.utils import check_array
6367

6468
# Memoize the data extraction and memory map the resulting

0 commit comments

Comments
 (0)