Skip to content

CI Build wheels for the ARM64 architecture #18782

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 35 commits into from
Dec 15, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
1680a0c
MNT Build wheels for the ARM64 architecture
alfaro96 Nov 7, 2020
56957bf
MNT Trigger build [cd build]
alfaro96 Nov 7, 2020
f2a9cc1
FIX Fix environment variable definition [cd build]
alfaro96 Nov 7, 2020
20c0759
FIX Use COMMIT_MSG instead of TRAVIS_COMMIT_MESSAGE environment varia…
alfaro96 Nov 7, 2020
4c4e213
MNT Increase pip debugging output [cd build]
alfaro96 Nov 7, 2020
958d363
MNT Update .travis.yml
alfaro96 Nov 10, 2020
cbef1fe
Merge remote-tracking branch 'upstream/master' into add_aarch64_build
alfaro96 Nov 10, 2020
6200b35
MNT Trigger build [cd build]
alfaro96 Nov 10, 2020
e0506c1
Merge remote-tracking branch 'upstream/master' into add_aarch64_build
alfaro96 Nov 12, 2020
00d18ab
MNT Set environment variables [cd build]
alfaro96 Nov 12, 2020
835f3c0
MNT Use arm64-gravtion2 [cd build]
alfaro96 Nov 12, 2020
8625e3e
MNT Use arm64-gravtion2 [cd build]
alfaro96 Nov 12, 2020
cb38a5d
DEBUG: show the version of BLAS used by numpy and scipy
ogrisel Nov 12, 2020
f302b6e
[cd build]
ogrisel Nov 12, 2020
dcf3e1b
MNT Minor changes
alfaro96 Nov 13, 2020
d0539fd
DEBUG [cd build]
ogrisel Nov 13, 2020
b47d0f3
MNT Use CI approach [cd build]
alfaro96 Nov 19, 2020
b7196ca
MNT Use CI approach [cd build]
alfaro96 Nov 19, 2020
00e9fe9
[cd build]
alfaro96 Nov 19, 2020
ba332e4
Merge remote-tracking branch 'upstream/master' into add_aarch64_build
alfaro96 Nov 19, 2020
72f5a2a
MNT Use arm64 for linux [cd build]
alfaro96 Nov 19, 2020
5a12f87
MNT Use arm64 for linux [cd build]
alfaro96 Nov 19, 2020
83b018a
MNT Change environment variables [cd build]
alfaro96 Nov 19, 2020
4209917
MNT Build in parallel [cd build]
alfaro96 Nov 19, 2020
c737e99
MNT Refactor code [cd build]
alfaro96 Nov 19, 2020
dcc569b
MNT Faster run of the tests [cd build]
alfaro96 Nov 19, 2020
9e450a6
MNT Minor changes [cd build]
alfaro96 Nov 19, 2020
1c27b97
CLN Apply suggested changes [cd build]
alfaro96 Nov 19, 2020
669bd8e
CLN Apply suggested changes [cd build]
alfaro96 Nov 19, 2020
7862040
[arm64]
alfaro96 Nov 20, 2020
91cf5b0
MNT Faster ARM64 [arm64]
alfaro96 Nov 20, 2020
f5abfbd
[scipy-dev]
alfaro96 Nov 20, 2020
aa2c21f
MNT Minor changes
alfaro96 Nov 20, 2020
9455008
[icc-build]
alfaro96 Nov 20, 2020
23fb380
[cd build]
alfaro96 Nov 20, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
97 changes: 70 additions & 27 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,58 +1,101 @@
# make it explicit that we favor the new container-based travis workers
# Make it explicit that we favor the
# new container-based Travis workers
language: python
dist: xenial

cache:
apt: true
directories:
- $HOME/.cache/pip
- $HOME/.ccache

dist: xenial
- $HOME/.cache/pip
- $HOME/.ccache

env:
global:
# Directory where tests are run from
- TEST_DIR=/tmp/sklearn
- CPU_COUNT=3
- TEST_DIR=/tmp/sklearn # Test directory for continuous integration jobs
- PYTEST_VERSION=latest
- OMP_NUM_THREADS=2
- OPENBLAS_NUM_THREADS=2
- PYTEST_VERSION=latest
- SKLEARN_BUILD_PARALLEL=3
- SKLEARN_SKIP_NETWORK_TESTS=1
# Custom environment variables for the ARM wheel builder
- CIBW_BUILD_VERBOSITY=1
- CIBW_TEST_REQUIRES="pytest pytest-xdist threadpoolctl"
- CIBW_TEST_COMMAND="bash {project}/build_tools/travis/test_wheels.sh"
- CIBW_ENVIRONMENT="CPU_COUNT=8
OMP_NUM_THREADS=2
OPENBLAS_NUM_THREADS=2
SKLEARN_BUILD_PARALLEL=8
SKLEARN_SKIP_NETWORK_TESTS=1"
# Nightly upload token and staging upload token are set in Travis settings
- SCIKIT_LEARN_NIGHTLY_UPLOAD_TOKEN=__token__
- SCIKIT_LEARN_STAGING_UPLOAD_TOKEN=__token__

matrix:
jobs:
include:
# Linux environment to test scikit-learn against numpy and scipy master
# installed from their CI wheels in a virtualenv with the Python
# interpreter provided by travis.
# Linux environment to test scikit-learn against NumPy and SciPy
# master installed from their continuous integration wheels in a
# virtual environment with Python interpreter provided by Travis.
- python: 3.7
env:
- CHECK_WARNINGS="true"
- CI_CPU_COUNT="3"
- CHECK_WARNINGS=true
if: type = cron OR commit_message =~ /\[scipy-dev\]/

# As above but build scikit-learn with Intel C compiler (ICC).
- python: 3.7
env:
- CHECK_WARNING="true"
- BUILD_WITH_ICC="true"
- CI_CPU_COUNT="3"
- CHECK_WARNING=true
- BUILD_WITH_ICC=true
if: type = cron OR commit_message =~ /\[icc-build\]/

- python: 3.7
env:
- CI_CPU_COUNT="8"
os: linux
arch: arm64
if: type = cron OR commit_message =~ /\[arm64\]/
env:
- CPU_COUNT=8

# Linux environments to build the scikit-learn wheels
# for the ARM64 arquitecture and Python 3.6 and newer
- python: 3.6
os: linux
arch: arm64
if: type = cron or commit_message =~ /\[cd build\]/
env:
- BUILD_WHEEL=true
- CIBW_BUILD=cp36-manylinux_aarch64

- python: 3.7
os: linux
arch: arm64
if: type = cron or commit_message =~ /\[cd build\]/
env:
- BUILD_WHEEL=true
- CIBW_BUILD=cp37-manylinux_aarch64

- python: 3.8
os: linux
arch: arm64
if: type = cron or commit_message =~ /\[cd build\]/
env:
- BUILD_WHEEL=true
- CIBW_BUILD=cp38-manylinux_aarch64

- python: 3.9
os: linux
arch: arm64
if: type = cron or commit_message =~ /\[cd build\]/
env:
- BUILD_WHEEL=true
- CIBW_BUILD=cp39-manylinux_aarch64

install: source build_tools/travis/install.sh
script:
- bash build_tools/travis/test_script.sh || travis_terminate 1
- bash build_tools/travis/test_docs.sh || travis_terminate 1
- bash build_tools/travis/test_pytest_soft_dependency.sh || travis_terminate 1
script: source build_tools/travis/script.sh
after_success: source build_tools/travis/after_success.sh

notifications:
webhooks:
urls:
- https://webhooks.gitter.im/e/4ffabb4df010b70cd624
on_success: change # options: [always|never|change] default: always
on_failure: always # options: [always|never|change] default: always
on_start: never # options: [always|never|change] default: always
on_success: change
on_failure: always
on_start: never
29 changes: 17 additions & 12 deletions build_tools/travis/after_success.sh
Original file line number Diff line number Diff line change
@@ -1,19 +1,24 @@
#!/bin/bash
# This script is meant to be called by the "after_success" step defined in
# .travis.yml. See https://docs.travis-ci.com/ for more details.

# License: 3-clause BSD
# This script is meant to be called by the "after_success" step
# defined in ".travis.yml". In particular, we upload the wheels
# of the ARM64 architecture for the continuous deployment jobs.

set -e

if [[ "$COVERAGE" == "true" ]]; then
# Need to run codecov from a git checkout, so we copy .coverage
# from TEST_DIR where pytest has been run
cp $TEST_DIR/.coverage $TRAVIS_BUILD_DIR
# The wheels cannot be uploaded on PRs
if [[ $BUILD_WHEEL == true && $TRAVIS_EVENT_TYPE != pull_request ]]; then
if [ $TRAVIS_EVENT_TYPE == cron ]; then
ANACONDA_ORG="scipy-wheels-nightly"
ANACONDA_TOKEN="$SCIKIT_LEARN_NIGHTLY_UPLOAD_TOKEN"
else
ANACONDA_ORG="scikit-learn-wheels-staging"
ANACONDA_TOKEN="$SCIKIT_LEARN_STAGING_UPLOAD_TOKEN"
fi

# Ignore codecov failures as the codecov server is not
# very reliable but we don't want travis to report a failure
# in the github UI just because the coverage report failed to
# be published.
codecov --root $TRAVIS_BUILD_DIR || echo "codecov upload failed"
pip install git+https://github.com/Anaconda-Server/anaconda-client

# Force a replacement if the remote file already exists
anaconda -t $ANACONDA_TOKEN upload --force -u $ANACONDA_ORG wheelhouse/*.whl
echo "Index: https://pypi.anaconda.org/$ANACONDA_ORG/simple"
fi
110 changes: 6 additions & 104 deletions build_tools/travis/install.sh
100755 → 100644
Original file line number Diff line number Diff line change
@@ -1,111 +1,13 @@
#!/bin/bash
# This script is meant to be called by the "install" step defined in
# .travis.yml. See https://docs.travis-ci.com/ for more details.
# The behavior of the script is controlled by environment variabled defined
# in the .travis.yml in the top level folder of the project.

# License: 3-clause BSD

# Travis clone scikit-learn/scikit-learn repository in to a local repository.
# We use a cached directory with three scikit-learn repositories (one for each
# matrix entry) from which we pull from local Travis repository. This allows
# us to keep build artefact for gcc + cython, and gain time
# This script is meant to be called by the "install" step
# defined in the ".travis.yml" file. In particular, it is
# important that we call to the right installation script.

set -e

# Fail fast
echo "CPU Arch: ${TRAVIS_CPU_ARCH}"

# jq is used in travis_fastfail.sh, it's already pre-installed in non arm64
# environments
sudo apt-get install jq

build_tools/travis/travis_fastfail.sh

# Imports get_dep
source build_tools/shared.sh

echo "List files from cached directories"
echo "pip:"
ls $HOME/.cache/pip

export CC=/usr/lib/ccache/gcc
export CXX=/usr/lib/ccache/g++
# Useful for debugging how ccache is used
# export CCACHE_LOGFILE=/tmp/ccache.log
# ~60M is used by .ccache when compiling from scratch at the time of writing
ccache --max-size 100M --show-stats

# Deactivate the travis-provided virtual environment and setup a
# conda-based environment instead
# If Travvis has language=generic, deactivate does not exist. `|| :` will pass.
deactivate || :


# Install miniconda
if [[ "$TRAVIS_CPU_ARCH" == "arm64" ]]; then
wget https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-aarch64.sh -O miniconda.sh
if [[ $BUILD_WHEEL == true ]]; then
source build_tools/travis/install_wheels.sh
else
fname=Miniconda3-latest-Linux-x86_64.sh
wget https://repo.continuum.io/miniconda/$fname -O miniconda.sh
source build_tools/travis/install_master.sh
fi
MINICONDA_PATH=$HOME/miniconda
chmod +x miniconda.sh && ./miniconda.sh -b -p $MINICONDA_PATH
export PATH=$MINICONDA_PATH/bin:$PATH
conda update --yes conda

# Create environment and install dependencies
conda create -n testenv --yes python=3.7

source activate testenv

if [[ "$TRAVIS_CPU_ARCH" == "amd64" ]]; then
pip install --upgrade pip setuptools
echo "Installing numpy and scipy master wheels"
dev_anaconda_url=https://pypi.anaconda.org/scipy-wheels-nightly/simple
pip install --pre --upgrade --timeout=60 --extra-index $dev_anaconda_url numpy scipy pandas
pip install --pre cython
echo "Installing joblib master"
pip install https://github.com/joblib/joblib/archive/master.zip
echo "Installing pillow master"
pip install https://github.com/python-pillow/Pillow/archive/master.zip
else
conda install -y scipy numpy pandas cython
pip install joblib threadpoolctl
fi

pip install $(get_dep pytest $PYTEST_VERSION) pytest-cov pytest-xdist

# Build scikit-learn in the install.sh script to collapse the verbose
# build output in the travis output when it succeeds.
python --version
python -c "import numpy; print('numpy %s' % numpy.__version__)"
python -c "import scipy; print('scipy %s' % scipy.__version__)"

if [[ "$BUILD_WITH_ICC" == "true" ]]; then
wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB
sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB
rm GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB
sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main"
sudo apt-get update
sudo apt-get install intel-oneapi-icc
source /opt/intel/oneapi/setvars.sh

# The build_clib command is implicitly used to build libsvm-skl. To compile
# with a different compiler we also need to specify the compiler for this
# command.
python setup.py build_ext --compiler=intelem -i -j "${CI_CPU_COUNT}" build_clib --compiler=intelem
else
# Use setup.py instead of `pip install -e .` to be able to pass the -j flag
# to speed-up the building multicore CI machines.
python setup.py build_ext --inplace -j "${CI_CPU_COUNT}"
fi

python setup.py develop

ccache --show-stats
# Useful for debugging how ccache is used
# cat $CCACHE_LOGFILE

# fast fail
build_tools/travis/travis_fastfail.sh
102 changes: 102 additions & 0 deletions build_tools/travis/install_master.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
#!/bin/bash

# Travis clone "scikit-learn/scikit-learn" repository into
# a local repository. We use a cached directory with three
# scikit-learn repositories (one for each matrix entry for
# non continuous deployment jobs) from which we pull local
# Travis repository. This allows us to keep build artifact
# for GCC + Cython, and gain time.

set -e

echo "CPU Arch: $TRAVIS_CPU_ARCH."

# Import "get_dep"
source build_tools/shared.sh

echo "List files from cached directories."
echo "pip:"
ls $HOME/.cache/pip

export CC=/usr/lib/ccache/gcc
export CXX=/usr/lib/ccache/g++

# Useful for debugging how ccache is used
# export CCACHE_LOGFILE=/tmp/ccache.log

# 60MB are (more or less) used by .ccache, when
# compiling from scratch at the time of writing
ccache --max-size 100M --show-stats

# Deactivate the default virtual environment
# to setup a conda-based environment instead
deactivate

if [[ $TRAVIS_CPU_ARCH == arm64 ]]; then
# Different Miniconda URL for ARM64 architectures
MINICONDA_URL="https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-aarch64.sh"
else
MINICONDA_URL="https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh"
fi

# Install Miniconda
wget $MINICONDA_URL -O miniconda.sh
MINICONDA_PATH=$HOME/miniconda
chmod +x miniconda.sh && ./miniconda.sh -b -p $MINICONDA_PATH
export PATH=$MINICONDA_PATH/bin:$PATH
conda update --yes conda

# Create environment and install dependencies
conda create -n testenv --yes python=3.7

source activate testenv

if [[ $TRAVIS_CPU_ARCH == amd64 ]]; then
echo "Upgrading pip and setuptools."
pip install --upgrade pip setuptools
echo "Installing numpy, scipy and pandas master wheels."
dev_anaconda_url=https://pypi.anaconda.org/scipy-wheels-nightly/simple
pip install --pre --upgrade --timeout=60 --extra-index $dev_anaconda_url numpy scipy pandas
echo "Installing cython pre-release wheels."
pip install --pre cython
echo "Installing joblib master."
pip install https://github.com/joblib/joblib/archive/master.zip
echo "Installing pillow master."
pip install https://github.com/python-pillow/Pillow/archive/master.zip
else
conda install -y scipy numpy pandas cython
pip install joblib threadpoolctl
fi

pip install $(get_dep pytest $PYTEST_VERSION) pytest-xdist

# Build scikit-learn in this script to collapse the
# verbose build output in the Travis output when it
# succeeds
python --version
python -c "import numpy; print(f'numpy {numpy.__version__}')"
python -c "import scipy; print(f'scipy {scipy.__version__}')"

if [[ $BUILD_WITH_ICC == true ]]; then
wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB
sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB
rm GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB
sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main"
sudo apt-get update
sudo apt-get install intel-oneapi-icc
source /opt/intel/oneapi/setvars.sh

# The "build_clib" command is implicitly used to build "libsvm-skl".
# To compile with a different compiler, we also need to specify the
# compiler for this command
python setup.py build_ext --compiler=intelem -i build_clib --compiler=intelem
else
pip install -e .
fi

python setup.py develop

ccache --show-stats

# Useful for debugging how ccache is used
# cat $CCACHE_LOGFILE
6 changes: 6 additions & 0 deletions build_tools/travis/install_wheels.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#!/bin/bash

set -e

python -m pip install cibuildwheel
python -m cibuildwheel --output-dir wheelhouse
Loading