Skip to content

Tests for bug #25380 #26255

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 11 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
40 changes: 16 additions & 24 deletions .github/workflows/wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,73 +57,65 @@ jobs:
# https://github.com/scikit-learn/scikit-learn/issues/22530
- os: windows-2019
python: 38
bitness: 64
platform_id: win_amd64
- os: windows-latest
python: 39
bitness: 64
platform_id: win_amd64
- os: windows-latest
python: 310
bitness: 64
platform_id: win_amd64

# Window 32 bit
- os: windows-latest
python: 38
bitness: 32
platform_id: win32
- os: windows-latest
python: 39
bitness: 32
platform_id: win32
python: 311
platform_id: win_amd64

# Linux 64 bit manylinux2014
- os: ubuntu-latest
python: 38
bitness: 64
platform_id: manylinux_x86_64
manylinux_image: manylinux2014
- os: ubuntu-latest
python: 39
bitness: 64
platform_id: manylinux_x86_64
manylinux_image: manylinux2014

# NumPy on Python 3.10 only supports 64bit and is only available with manylinux2014
- os: ubuntu-latest
python: 310
bitness: 64
platform_id: manylinux_x86_64
manylinux_image: manylinux2014

- os: ubuntu-latest
python: 311
platform_id: manylinux_x86_64
manylinux_image: manylinux2014

# MacOS x86_64
- os: macos-latest
bitness: 64
python: 38
platform_id: macosx_x86_64
- os: macos-latest
bitness: 64
python: 39
platform_id: macosx_x86_64
- os: macos-latest
bitness: 64
python: 310
platform_id: macosx_x86_64
- os: macos-latest
python: 311
platform_id: macosx_x86_64

# MacOS arm64
- os: macos-latest
bitness: 64
python: 38
platform_id: macosx_arm64
- os: macos-latest
bitness: 64
python: 39
platform_id: macosx_arm64
- os: macos-latest
bitness: 64
python: 310
platform_id: macosx_arm64
- os: macos-latest
python: 311
platform_id: macosx_arm64

steps:
- name: Checkout scikit-learn
Expand All @@ -147,11 +139,11 @@ jobs:
CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux_image }}
CIBW_MANYLINUX_I686_IMAGE: ${{ matrix.manylinux_image }}
CIBW_TEST_SKIP: "*-macosx_arm64"
CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: bash build_tools/github/repair_windows_wheels.sh {wheel} {dest_dir} ${{ matrix.bitness }}
CIBW_BEFORE_TEST_WINDOWS: bash build_tools/github/build_minimal_windows_image.sh ${{ matrix.python }} ${{ matrix.bitness }}
CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: bash build_tools/github/repair_windows_wheels.sh {wheel} {dest_dir}
CIBW_BEFORE_TEST_WINDOWS: bash build_tools/github/build_minimal_windows_image.sh ${{ matrix.python }}
CIBW_TEST_REQUIRES: pytest pandas threadpoolctl
CIBW_TEST_COMMAND: bash {project}/build_tools/github/test_wheels.sh
CIBW_TEST_COMMAND_WINDOWS: bash {project}/build_tools/github/test_windows_wheels.sh ${{ matrix.python }} ${{ matrix.bitness }}
CIBW_TEST_COMMAND_WINDOWS: bash {project}/build_tools/github/test_windows_wheels.sh ${{ matrix.python }}
CIBW_BUILD_VERBOSITY: 1

run: bash build_tools/github/build_wheels.sh
Expand Down
8 changes: 4 additions & 4 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ sklearn/metrics/_pairwise_distances_reduction/_base.pxd
sklearn/metrics/_pairwise_distances_reduction/_base.pyx
sklearn/metrics/_pairwise_distances_reduction/_datasets_pair.pxd
sklearn/metrics/_pairwise_distances_reduction/_datasets_pair.pyx
sklearn/metrics/_pairwise_distances_reduction/_gemm_term_computer.pxd
sklearn/metrics/_pairwise_distances_reduction/_gemm_term_computer.pyx
sklearn/metrics/_pairwise_distances_reduction/_radius_neighborhood.pxd
sklearn/metrics/_pairwise_distances_reduction/_radius_neighborhood.pyx
sklearn/metrics/_pairwise_distances_reduction/_middle_term_computer.pxd
sklearn/metrics/_pairwise_distances_reduction/_middle_term_computer.pyx
sklearn/metrics/_pairwise_distances_reduction/_radius_neighbors.pxd
sklearn/metrics/_pairwise_distances_reduction/_radius_neighbors.pyx
10 changes: 10 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,16 @@ jobs:
- CIBW_BUILD=cp310-manylinux_aarch64
- BUILD_WHEEL=true

- os: linux
arch: arm64-graviton2
dist: focal
virt: vm
group: edge
if: type = cron or commit_message =~ /\[cd build\]/
env:
- CIBW_BUILD=cp311-manylinux_aarch64
- BUILD_WHEEL=true

install: source build_tools/travis/install.sh || travis_terminate 1
script: source build_tools/travis/script.sh || travis_terminate 1
after_success: source build_tools/travis/after_success.sh || travis_terminate 1
Expand Down
2 changes: 1 addition & 1 deletion MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
include *.rst
recursive-include doc *
recursive-include examples *
recursive-include sklearn *.c *.h *.pyx *.pxd *.pxi *.tp
recursive-include sklearn *.c *.cpp *.h *.pyx *.pxd *.pxi *.tp
recursive-include sklearn/datasets *.csv *.csv.gz *.rst *.jpg *.txt *.arff.gz *.json.gz
include COPYING
include README.rst
Expand Down
6 changes: 3 additions & 3 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,12 @@
.. |SciPyMinVersion| replace:: 1.3.2
.. |JoblibMinVersion| replace:: 1.1.1
.. |ThreadpoolctlMinVersion| replace:: 2.0.0
.. |MatplotlibMinVersion| replace:: 3.1.2
.. |MatplotlibMinVersion| replace:: 3.1.3
.. |Scikit-ImageMinVersion| replace:: 0.16.2
.. |PandasMinVersion| replace:: 1.0.5
.. |SeabornMinVersion| replace:: 0.9.0
.. |PytestMinVersion| replace:: 5.0.1
.. |PlotlyMinVersion| replace:: 5.9.0
.. |PytestMinVersion| replace:: 5.3.1
.. |PlotlyMinVersion| replace:: 5.10.0

.. image:: https://raw.githubusercontent.com/scikit-learn/scikit-learn/main/doc/logos/scikit-learn-logo.png
:target: https://scikit-learn.org/
Expand Down
4 changes: 2 additions & 2 deletions SECURITY.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@

| Version | Supported |
| --------- | ------------------ |
| 1.1.2 | :white_check_mark: |
| < 1.1.2 | :x: |
| 1.1.3 | :white_check_mark: |
| < 1.1.3 | :x: |

## Reporting a Vulnerability

Expand Down
28 changes: 0 additions & 28 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -106,27 +106,6 @@ jobs:
LOCK_FILE: './build_tools/azure/python_nogil_lock.txt'
COVERAGE: 'false'

# Check compilation with intel C++ compiler (ICC)
- template: build_tools/azure/posix.yml
parameters:
name: Linux_Nightly_ICC
vmImage: ubuntu-20.04
dependsOn: [git_commit, linting]
condition: |
and(
succeeded(),
not(contains(dependencies['git_commit']['outputs']['commit.message'], '[ci skip]')),
or(eq(variables['Build.Reason'], 'Schedule'),
contains(dependencies['git_commit']['outputs']['commit.message'], '[icc-build]')
)
)
matrix:
pylatest_conda_forge_mkl:
DISTRIB: 'conda'
LOCK_FILE: 'build_tools/azure/pylatest_conda_forge_mkl_no_coverage_linux-64_conda.lock'
COVERAGE: 'false'
BUILD_WITH_ICC: 'true'

- template: build_tools/azure/posix-docker.yml
parameters:
name: Linux_Nightly_PyPy
Expand Down Expand Up @@ -182,7 +161,6 @@ jobs:
DISTRIB: 'conda'
LOCK_FILE: './build_tools/azure/py38_conda_forge_openblas_ubuntu_2204_linux-64_conda.lock'
COVERAGE: 'false'
BUILD_WITH_ICC: 'false'
SKLEARN_TESTS_GLOBAL_RANDOM_SEED: '0' # non-default seed

- template: build_tools/azure/posix.yml
Expand Down Expand Up @@ -280,9 +258,3 @@ jobs:
COVERAGE: 'true'
SKLEARN_ENABLE_DEBUG_CYTHON_DIRECTIVES: '1'
SKLEARN_TESTS_GLOBAL_RANDOM_SEED: '7' # non-default seed
py38_pip_openblas_32bit:
DISTRIB: 'pip-windows'
PYTHON_VERSION: '3.8'
PYTHON_ARCH: '32'
LOCK_FILE: ./build_tools/azure/py38_pip_openblas_32bit_lock.txt
SKLEARN_TESTS_GLOBAL_RANDOM_SEED: '8' # non-default seed
7 changes: 7 additions & 0 deletions benchmarks/bench_hist_gradient_boosting_higgsboson.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
parser.add_argument("--max-bins", type=int, default=255)
parser.add_argument("--no-predict", action="store_true", default=False)
parser.add_argument("--cache-loc", type=str, default="/tmp")
parser.add_argument("--no-interactions", type=bool, default=False)
args = parser.parse_args()

HERE = os.path.dirname(__file__)
Expand Down Expand Up @@ -88,6 +89,11 @@ def predict(est, data_test, target_test):
n_samples, n_features = data_train.shape
print(f"Training set with {n_samples} records with {n_features} features.")

if args.no_interactions:
interaction_cst = [[i] for i in range(n_features)]
else:
interaction_cst = None

est = HistGradientBoostingClassifier(
loss="log_loss",
learning_rate=lr,
Expand All @@ -97,6 +103,7 @@ def predict(est, data_test, target_test):
early_stopping=False,
random_state=0,
verbose=1,
interaction_cst=interaction_cst,
)
fit(est, data_train, target_train, "sklearn")
predict(est, data_test, target_test)
Expand Down
4 changes: 1 addition & 3 deletions benchmarks/bench_lasso.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,7 @@ def compute_bench(alpha, n_samples, n_features, precompute):

gc.collect()
print("- benchmarking LassoLars")
clf = LassoLars(
alpha=alpha, fit_intercept=False, normalize=False, precompute=precompute
)
clf = LassoLars(alpha=alpha, fit_intercept=False, precompute=precompute)
tstart = time()
clf.fit(X, Y)
lars_lasso_results.append(time() - tstart)
Expand Down
10 changes: 2 additions & 8 deletions build_tools/azure/debian_atlas_32bit_lock.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,13 @@
#
# pip-compile --output-file=build_tools/azure/debian_atlas_32bit_lock.txt build_tools/azure/debian_atlas_32bit_requirements.txt
#
atomicwrites==1.4.1
# via pytest
attrs==22.1.0
# via pytest
cython==0.29.32
# via -r build_tools/azure/debian_atlas_32bit_requirements.txt
importlib-metadata==5.0.0
# via pytest
joblib==1.1.1
# via -r build_tools/azure/debian_atlas_32bit_requirements.txt
more-itertools==8.14.0
more-itertools==9.0.0
# via pytest
packaging==21.3
# via pytest
Expand All @@ -24,11 +20,9 @@ py==1.11.0
# via pytest
pyparsing==3.0.9
# via packaging
pytest==5.0.1
pytest==5.3.1
# via -r build_tools/azure/debian_atlas_32bit_requirements.txt
threadpoolctl==2.2.0
# via -r build_tools/azure/debian_atlas_32bit_requirements.txt
wcwidth==0.2.5
# via pytest
zipp==3.9.0
# via importlib-metadata
2 changes: 1 addition & 1 deletion build_tools/azure/debian_atlas_32bit_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
cython
joblib==1.1.1 # min
threadpoolctl==2.2.0
pytest==5.0.1 # min
pytest==5.3.1 # min
16 changes: 0 additions & 16 deletions build_tools/azure/install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -59,15 +59,6 @@ pre_python_environment_install() {
export PYTHON_NOGIL_PATH="${PYTHON_NOGIL_CLONE_PATH}/python"
cd $OLDPWD

elif [[ "$BUILD_WITH_ICC" == "true" ]]; then
wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
rm GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main"
sudo apt-get update
sudo apt-get install intel-oneapi-compiler-dpcpp-cpp-and-cpp-classic
source /opt/intel/oneapi/setvars.sh

fi
}

Expand Down Expand Up @@ -122,13 +113,6 @@ scikit_learn_install() {
export LDFLAGS="$LDFLAGS -Wl,--sysroot=/"
fi

if [[ "$BUILD_WITH_ICC" == "true" ]]; then
# The "build_clib" command is implicitly used to build "libsvm-skl".
# To compile with a different compiler, we also need to specify the
# compiler for this command
python setup.py build_ext --compiler=intelem -i build_clib --compiler=intelem
fi

# TODO use a specific variable for this rather than using a particular build ...
if [[ "$DISTRIB" == "conda-pip-latest" ]]; then
# Check that pip can automatically build scikit-learn with the build
Expand Down
4 changes: 1 addition & 3 deletions build_tools/azure/install_win.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,7 @@ set -x
source build_tools/shared.sh

if [[ "$DISTRIB" == "conda" ]]; then
conda update -n base conda -y
conda install pip -y
pip install "$(get_dep conda-lock min)"
conda install -c conda-forge "$(get_dep conda-lock min)" -y
conda-lock install --name $VIRTUALENV $LOCK_FILE
source activate $VIRTUALENV
else
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ dependencies:
- cython
- joblib
- threadpoolctl=2.2.0
- matplotlib=3.1.2 # min
- matplotlib=3.1.3 # min
- pandas
- pyamg
- pytest
- pytest-xdist
- pytest-xdist=2.5.0
- pillow
- codecov
- pytest-cov
Expand Down
Loading