From 87963b218893b29c3ea4dcd24c1bf24a08a16d71 Mon Sep 17 00:00:00 2001 From: Charles Bousseau Date: Thu, 21 Sep 2023 15:24:07 -0400 Subject: [PATCH 01/45] align test_dispatcher s390x targets with _umath_tests_mtargets --- numpy/core/tests/test_cpu_dispatcher.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/numpy/core/tests/test_cpu_dispatcher.py b/numpy/core/tests/test_cpu_dispatcher.py index 2f7eac7e8e90..41a60d5c395c 100644 --- a/numpy/core/tests/test_cpu_dispatcher.py +++ b/numpy/core/tests/test_cpu_dispatcher.py @@ -9,7 +9,8 @@ def test_dispatcher(): targets = ( "SSE2", "SSE41", "AVX2", "VSX", "VSX2", "VSX3", - "NEON", "ASIMD", "ASIMDHP" + "NEON", "ASIMD", "ASIMDHP", + "VX", "VXE" ) highest_sfx = "" # no suffix for the baseline all_sfx = [] From 0b7b6e994c26d738b8bcc0abf1ded78a46a48cdd Mon Sep 17 00:00:00 2001 From: Charles Harris Date: Sat, 14 Oct 2023 14:32:55 -0600 Subject: [PATCH 02/45] REL: prepare 1.26.x for further development [skip actions] [skip azp] [skip cirrus] --- doc/source/release.rst | 1 + doc/source/release/1.26.2-notes.rst | 18 ++++++++++++++++++ pavement.py | 2 +- pyproject.toml | 2 +- pyproject.toml.setuppy | 2 +- 5 files changed, 22 insertions(+), 3 deletions(-) create mode 100644 doc/source/release/1.26.2-notes.rst diff --git a/doc/source/release.rst b/doc/source/release.rst index 2c2947ee6685..0e0d477ce06b 100644 --- a/doc/source/release.rst +++ b/doc/source/release.rst @@ -5,6 +5,7 @@ Release notes .. toctree:: :maxdepth: 3 + 1.26.2 1.26.1 1.26.0 1.25.2 diff --git a/doc/source/release/1.26.2-notes.rst b/doc/source/release/1.26.2-notes.rst new file mode 100644 index 000000000000..ff4920dd3ff8 --- /dev/null +++ b/doc/source/release/1.26.2-notes.rst @@ -0,0 +1,18 @@ +.. currentmodule:: numpy + +========================== +NumPy 1.26.2 Release Notes +========================== + +NumPy 1.26.2 is a maintenance release that fixes bugs and regressions +discovered after the 1.26.1 release. The 1.26.release series is the last +planned minor release series before NumPy 2.0. The Python versions supported by +this release are 3.9-3.12. + +Contributors +============ + + +Pull requests merged +==================== + diff --git a/pavement.py b/pavement.py index 70a5e7657dde..41b04bc108fc 100644 --- a/pavement.py +++ b/pavement.py @@ -38,7 +38,7 @@ #----------------------------------- # Path to the release notes -RELEASE_NOTES = 'doc/source/release/1.26.1-notes.rst' +RELEASE_NOTES = 'doc/source/release/1.26.2-notes.rst' #------------------------------------------------------- diff --git a/pyproject.toml b/pyproject.toml index ec8da79d3963..2a015e4565c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,7 +17,7 @@ requires = [ [project] name = "numpy" -version = "1.26.1" +version = "1.26.2" # TODO: add `license-files` once PEP 639 is accepted (see meson-python#88) license = {file = "LICENSE.txt"} diff --git a/pyproject.toml.setuppy b/pyproject.toml.setuppy index bfb093ba7648..02a7d7c57de5 100644 --- a/pyproject.toml.setuppy +++ b/pyproject.toml.setuppy @@ -3,7 +3,7 @@ # to avoid building with Meson (e.g., in the Emscripten/Pyodide CI job) [project] name = "numpy" -version = "1.26.1" +version = "1.26.2" [build-system] requires = [ From d3876a8765311b9aa639cf29b3d18df841414b37 Mon Sep 17 00:00:00 2001 From: William Ayd Date: Tue, 17 Oct 2023 04:30:08 -0400 Subject: [PATCH 03/45] ENH: Add Cython enumeration for NPY_FR_GENERIC (#24923) Add Cython enumeration for NPY_FR_GENERIC --- numpy/__init__.cython-30.pxd | 1 + numpy/__init__.pxd | 1 + 2 files changed, 2 insertions(+) diff --git a/numpy/__init__.cython-30.pxd b/numpy/__init__.cython-30.pxd index 0dd2fff2b87c..3ad186e40926 100644 --- a/numpy/__init__.cython-30.pxd +++ b/numpy/__init__.cython-30.pxd @@ -852,6 +852,7 @@ cdef extern from "numpy/arrayscalars.h": NPY_FR_ps NPY_FR_fs NPY_FR_as + NPY_FR_GENERIC # diff --git a/numpy/__init__.pxd b/numpy/__init__.pxd index 47d9294c1c4b..ca0a3a6c5288 100644 --- a/numpy/__init__.pxd +++ b/numpy/__init__.pxd @@ -810,6 +810,7 @@ cdef extern from "numpy/arrayscalars.h": NPY_FR_ps NPY_FR_fs NPY_FR_as + NPY_FR_GENERIC # From 82d7657ce39c97fcfd86e1a5acee8b5d00682169 Mon Sep 17 00:00:00 2001 From: Sebastian Berg Date: Thu, 19 Oct 2023 15:58:18 +0200 Subject: [PATCH 04/45] REL: Remove Python upper version from the release branch While the upper version is technically correct for the released version of NumPy (we are sure it will not work on Python 3.13) advertising it creates some problems, mostly for locking resolvers. They try to guess correct versions for non-released Python versions... This is probably an ecosystem or just "reasons", but it seems less useful than trouble to do the correct advertising here. See gh-24810 for *way* too much discussion about the why (and you will still be confused afterwards probably, so...). This needs to be fixed or at least documented clearer upstream by PyPA or similar, but... Closes gh-24810 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 2a015e4565c6..fa96cba5dcf9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ authors = [{name = "Travis E. Oliphant et al."}] maintainers = [ {name = "NumPy Developers", email="numpy-discussion@python.org"}, ] -requires-python = ">=3.9,<3.13" +requires-python = ">=3.9" readme = "README.md" classifiers = [ 'Development Status :: 5 - Production/Stable', From 18278b1a5b0020954ca49759ca374b96505b3ece Mon Sep 17 00:00:00 2001 From: Mark Mentovai Date: Fri, 20 Oct 2023 15:20:48 -0400 Subject: [PATCH 05/45] BLD: Use the correct Python interpreter when running tempita.py When a program located by find_program is executable directly, meson will execute it. If it is a script, its shebang (#!) line will control which interpreter is used. tempita.py specifies a standard shebang line of #!/usr/bin/env python3, so that python3 can be located by PATH lookup. In cases where python3 as found in the PATH is not correct, such as on systems that have multiple copies of python3 installed when building numpy for one that is not the python3 found by searching PATH, this interpreter will be wrong. When the python3 found by this mechanism doesn't have Cython available, it will cause the numpy build to fail. With the executable bit removed from tempita.py, meson will scan the script when attempting to execute it, locating the shebang line and substituting the appropriate sys.executable. See https://github.com/numpy/meson/blob/22df45a31981874310a78dde0df59a6a7c5ebb29/mesonbuild/programs.py#L179. --- numpy/_build_utils/tempita.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 numpy/_build_utils/tempita.py diff --git a/numpy/_build_utils/tempita.py b/numpy/_build_utils/tempita.py old mode 100755 new mode 100644 From a0f6999600676c373a9d367eb2065191dc5b3906 Mon Sep 17 00:00:00 2001 From: Sebastian Berg Date: Thu, 19 Oct 2023 11:27:44 +0200 Subject: [PATCH 06/45] MAINT: Remove unhelpful error replacements from `import_array()` Replacing the original error is just not useful and actively unhelpful since the original array may have more information. We could chain the error, but there seems little reason to do so. --- numpy/core/code_generators/generate_numpy_api.py | 1 - 1 file changed, 1 deletion(-) diff --git a/numpy/core/code_generators/generate_numpy_api.py b/numpy/core/code_generators/generate_numpy_api.py index bfcb0d0e5444..ae38c4efc2e3 100644 --- a/numpy/core/code_generators/generate_numpy_api.py +++ b/numpy/core/code_generators/generate_numpy_api.py @@ -55,7 +55,6 @@ c_api = PyObject_GetAttrString(numpy, "_ARRAY_API"); Py_DECREF(numpy); if (c_api == NULL) { - PyErr_SetString(PyExc_AttributeError, "_ARRAY_API not found"); return -1; } From e5b1ce1b65689e1b49b72c4017ba8538d9c297a6 Mon Sep 17 00:00:00 2001 From: Ralf Gommers Date: Fri, 20 Oct 2023 12:34:46 +0200 Subject: [PATCH 07/45] BLD: use classic linker on macOS, the new one in XCode 15 has issues Closes gh-24964 --- meson.build | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/meson.build b/meson.build index 264160d132eb..a8cec6d104d4 100644 --- a/meson.build +++ b/meson.build @@ -82,5 +82,11 @@ if cc_id.startswith('clang') endif endif +if host_machine.system() == 'darwin' and cc.has_link_argument('-Wl,-ld_classic') + # New linker introduced in macOS 14 not working yet with at least OpenBLAS in Spack, + # see gh-24964 (and linked scipy issue from there). + add_project_link_arguments('-Wl,-ld_classic', language : ['c', 'cpp']) +endif + subdir('meson_cpu') subdir('numpy') From b93e52e7032886c334ccbcf1011741598b474196 Mon Sep 17 00:00:00 2001 From: Ralf Gommers Date: Fri, 20 Oct 2023 12:35:39 +0200 Subject: [PATCH 08/45] BLD: only check for scipy-openblas dependency with pkg-config This is a custom-built wheel that can only be detected through pkg-config. So trying other methods just costs time and increases the size of `meson-log.txt`. --- numpy/meson.build | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/numpy/meson.build b/numpy/meson.build index 8da83ce61643..0d17612e81d6 100644 --- a/numpy/meson.build +++ b/numpy/meson.build @@ -93,7 +93,7 @@ mkl_may_use_sdl = not use_ilp64 and _threading_opt in ['auto', 'iomp'] # First try scipy-openblas, and if found don't look for cblas or lapack, we # know what's inside the scipy-openblas wheels already. if blas_name == 'openblas' or blas_name == 'auto' - blas = dependency('scipy-openblas', required: false) + blas = dependency('scipy-openblas', method: 'pkg-config', required: false) if blas.found() blas_name = 'scipy-openblas' endif From af234197531d3b6797aeb60835722aa9c518223a Mon Sep 17 00:00:00 2001 From: Andrew Nelson Date: Wed, 25 Oct 2023 08:34:12 +1100 Subject: [PATCH 09/45] BLD: musllinux_aarch64 [wheel build] --- pyproject.toml | 2 +- tools/ci/cirrus_wheels.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index fa96cba5dcf9..b61adf36217f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -150,7 +150,7 @@ tracker = "https://github.com/numpy/numpy/issues" # Note: the below skip command doesn't do much currently, the platforms to # build wheels for in CI are controlled in `.github/workflows/wheels.yml` and # `tools/ci/cirrus_wheels.yml`. -skip = "cp36-* cp37-* cp-38* pp37-* pp38-* *-manylinux_i686 *_ppc64le *_s390x *-musllinux_aarch64" +skip = "cp36-* cp37-* cp-38* pp37-* pp38-* *-manylinux_i686 *_ppc64le *_s390x" build-verbosity = "3" before-build = "bash {project}/tools/wheels/cibw_before_build.sh {project}" config-settings = "setup-args=-Duse-ilp64=true setup-args=-Dblas=openblas setup-args=-Dlapack=openblas setup-args=-Dblas-symbol-suffix=64_" diff --git a/tools/ci/cirrus_wheels.yml b/tools/ci/cirrus_wheels.yml index ea290a8b23c8..2cc1f6f69884 100644 --- a/tools/ci/cirrus_wheels.yml +++ b/tools/ci/cirrus_wheels.yml @@ -17,7 +17,7 @@ linux_aarch64_task: image: family/docker-builder-arm64 architecture: arm64 platform: linux - cpu: 2 + cpu: 1 memory: 8G matrix: # build in a matrix because building and testing all four wheels in a From 25d8f0e25e9b9128cfc3bebf1800143603de7658 Mon Sep 17 00:00:00 2001 From: Charles Harris Date: Tue, 31 Oct 2023 10:07:35 -0600 Subject: [PATCH 10/45] MAINT: Update mailmap [skip ci] --- .mailmap | 1 - 1 file changed, 1 deletion(-) diff --git a/.mailmap b/.mailmap index c34bd81bc90a..21f4af76ecc3 100644 --- a/.mailmap +++ b/.mailmap @@ -29,7 +29,6 @@ @luzpaz @luzpaz @partev -@pierreloicq @pkubaj @pmvz @pratiklp00 From 6bb799b6a759a8cdcc5a7c9765de4a4a02f644ff Mon Sep 17 00:00:00 2001 From: Ralf Gommers Date: Sun, 13 Aug 2023 22:21:49 +0200 Subject: [PATCH 11/45] MAINT: Split up .github/workflows to match main This is a backport of #24493 and #24291. The purpose of this is to ease future backports that expect these files. - CI: move some jobs in `build_test.yml` to Meson - CI: split `build_test.yml` into three GHA jobs files Also documents better what is being run. See gh-24410 for the overall restructuring plan for GitHub Actions CI. - CI: merge `linux_meson.yml` into `linux_blas.yml` - TST: disable mypy tests in test suite unless an environment variable is set These tests are super slow, and they're effectively always passing in CI. Running them on all "full" test suite runs is too expensive. Note that SciPy has an XSLOW mark, NumPy does not. So use an env var for now. - CI: add new GHA CI jobs to run MyPy across OS/Python flavors --- .github/actions/action.yml | 1 - .github/workflows/linux.yml | 208 ++++++++++ .github/workflows/linux_blas.yml | 109 ++++++ .github/workflows/linux_meson.yml | 355 ------------------ .../{build_test.yml => linux_simd.yml} | 287 ++++++++++---- .github/workflows/mypy.yml | 67 ++++ numpy/typing/tests/test_typing.py | 14 + 7 files changed, 622 insertions(+), 419 deletions(-) create mode 100644 .github/workflows/linux.yml create mode 100644 .github/workflows/linux_blas.yml delete mode 100644 .github/workflows/linux_meson.yml rename .github/workflows/{build_test.yml => linux_simd.yml} (64%) create mode 100644 .github/workflows/mypy.yml diff --git a/.github/actions/action.yml b/.github/actions/action.yml index 20a239026e4f..2c271c0c5726 100644 --- a/.github/actions/action.yml +++ b/.github/actions/action.yml @@ -11,7 +11,6 @@ runs: echo DOWNLOAD_OPENBLAS $DOWNLOAD_OPENBLAS echo USE_DEBUG $USE_DEBUG echo NPY_USE_BLAS_ILP64 $NPY_USE_BLAS_ILP64 - echo NUMPY_EXPERIMENTAL_ARRAY_FUNCTION $NUMPY_EXPERIMENTAL_ARRAY_FUNCTION echo USE_ASV $USE_ASV echo PATH $PATH echo python `which python` diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml new file mode 100644 index 000000000000..d73de33c7640 --- /dev/null +++ b/.github/workflows/linux.yml @@ -0,0 +1,208 @@ +name: Linux tests + +# This file is meant for testing across supported Python versions, build types +# and interpreters (PyPy, python-dbg, a pre-release Python in summer time), +# build-via-sdist, run benchmarks, measure code coverage, and other build +# options like relaxed-strides. + +on: + push: + branches: + # coverage comparison in the "full" step needs to run on main after merges + - main + pull_request: + branches: + - main + - maintenance/** + +defaults: + run: + shell: bash + +env: + DOWNLOAD_OPENBLAS: 1 + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: + lint: + if: github.repository == 'numpy/numpy' && github.event_name != 'push' + runs-on: ubuntu-latest + continue-on-error: true + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: '3.9' + - name: Install linter requirements + run: + python -m pip install -r linter_requirements.txt + - name: Run linter on PR diff + run: + python tools/linter.py --branch origin/${{ github.base_ref }} + + smoke_test: + if: "github.repository == 'numpy/numpy'" + runs-on: ubuntu-latest + env: + MESON_ARGS: "-Dallow-noblas=true -Dcpu-baseline=none -Dcpu-dispatch=none" + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: '3.9' + - uses: ./.github/meson_actions + + basic: + needs: [smoke_test] + runs-on: ubuntu-latest + if: github.event_name != 'push' + strategy: + matrix: + python-version: ["3.9", "pypy3.9-v7.3.12"] + env: + EXPECT_CPU_FEATURES: "SSE SSE2 SSE3 SSSE3 SSE41 POPCNT SSE42 AVX F16C FMA3 AVX2 AVX512F AVX512CD AVX512_KNL AVX512_KNM AVX512_SKX AVX512_CLX AVX512_CNL AVX512_ICL" + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: ${{ matrix.python-version }} + - uses: ./.github/actions + + debug: + needs: [smoke_test] + runs-on: ubuntu-latest + if: github.event_name != 'push' + env: + USE_DEBUG: 1 + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: '3.11' + + - uses: ./.github/actions + + full: + # Build a wheel, install it, then run the full test suite with code coverage + needs: [smoke_test] + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: '3.9' + - name: Install build and test dependencies from PyPI + run: | + pip install -r build_requirements.txt + pip install -r test_requirements.txt + - name: Install gfortran and OpenBLAS (MacPython build) + run: | + set -xe + sudo apt install gfortran libgfortran5 + target=$(python tools/openblas_support.py) + sudo cp -r $target/lib/* /usr/lib + sudo cp $target/include/* /usr/include + - name: Build a wheel + run: | + python -m build --wheel --no-isolation --skip-dependency-check + pip install dist/numpy*.whl + - name: Run full test suite + run: | + cd tools + pytest --pyargs numpy --cov-report=html:build/coverage + # TODO: gcov + + benchmark: + needs: [smoke_test] + runs-on: ubuntu-latest + if: github.event_name != 'push' + env: + PYTHONOPTIMIZE: 2 + BLAS: None + LAPACK: None + ATLAS: None + NPY_BLAS_ORDER: mkl,blis,openblas,atlas,blas + NPY_LAPACK_ORDER: MKL,OPENBLAS,ATLAS,LAPACK + USE_ASV: 1 + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: '3.9' + - uses: ./.github/actions + + relaxed_strides_debug: + needs: [smoke_test] + runs-on: ubuntu-latest + if: github.event_name != 'push' + env: + CHECK_BLAS: 1 + NPY_USE_BLAS_ILP64: 1 + NPY_RELAXED_STRIDES_DEBUG: 1 + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: '3.11' + - uses: ./.github/actions + + sdist: + needs: [smoke_test] + runs-on: ubuntu-latest + if: github.event_name != 'push' + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: '3.11' + - name: Install gfortran and OpenBLAS (MacPython build) + run: | + set -xe + sudo apt install gfortran libgfortran5 + target=$(python tools/openblas_support.py) + sudo cp -r $target/lib/* /usr/lib + sudo cp $target/include/* /usr/include + - name: Build a wheel via an sdist + run: | + pip install build + python -m build + pip install dist/numpy*.whl + - name: Install test dependencies + run: | + pip install -r test_requirements.txt + pip install ninja + - name: Run test suite + run: | + cd tools + pytest --pyargs numpy -m "not slow" + diff --git a/.github/workflows/linux_blas.yml b/.github/workflows/linux_blas.yml new file mode 100644 index 000000000000..699381f6584f --- /dev/null +++ b/.github/workflows/linux_blas.yml @@ -0,0 +1,109 @@ +name: BLAS tests (Linux) + +# This file is meant for testing different BLAS/LAPACK flavors and build +# options on Linux. All other yml files for Linux will only test without BLAS +# (mostly because that's easier and faster to build) or with the same 64-bit +# OpenBLAS build that is used in the wheel jobs. +# +# Jobs and their purpose: +# +# - openblas64_setuppy: +# This job uses the default 64-bit build of OpenBLAS with the +# `numpy.distutils`-based build. It can be removed once we remove +# support for those builds. +# - openblas32_stable_nightly: +# Uses the 32-bit OpenBLAS builds, both the latest stable release and a +# nightly build. +# +# TODO: coverage here is limited, we should add non-OpenBLAS libraries and +# exercise the BLAS-related build options (see `meson_options.txt`). + +on: + pull_request: + branches: + - main + - maintenance/** + +defaults: + run: + shell: bash + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: + openblas64_setuppy: + runs-on: ubuntu-latest + if: "github.repository == 'numpy/numpy'" + env: + DOWNLOAD_OPENBLAS: 1 + NPY_USE_BLAS_ILP64: 1 + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: '3.11' + - uses: ./.github/actions + + openblas32_stable_nightly: + if: "github.repository == 'numpy/numpy'" + runs-on: ubuntu-latest + strategy: + matrix: + USE_NIGHTLY_OPENBLAS: [false, true] + env: + USE_NIGHTLY_OPENBLAS: ${{ matrix.USE_NIGHTLY_OPENBLAS }} + name: "Test Linux (${{ matrix.USE_NIGHTLY_OPENBLAS && 'nightly' || 'stable' }} OpenBLAS)" + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + pip install -r build_requirements.txt + # Install OpenBLAS + set -xe + if [[ $USE_NIGHTLY_OPENBLAS == "true" ]]; then + target=$(python tools/openblas_support.py --nightly) + else + target=$(python tools/openblas_support.py) + fi + sudo cp -r $target/lib/* /usr/lib + sudo cp $target/include/* /usr/include + + - name: Build + shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' + env: + TERM: xterm-256color + run: + spin build -- --werror + + - name: Check build-internal dependencies + run: + ninja -C build -t missingdeps + + - name: Check installed test and stub files + run: + python tools/check_installed_files.py $(find ./build-install -path '*/site-packages/numpy') + + - name: Test + shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' + env: + TERM: xterm-256color + LD_LIBRARY_PATH: "/usr/local/lib/" # to find libopenblas.so.0 + + run: | + pip install pytest pytest-xdist hypothesis typing_extensions + spin test -j auto diff --git a/.github/workflows/linux_meson.yml b/.github/workflows/linux_meson.yml deleted file mode 100644 index ebbd52997e42..000000000000 --- a/.github/workflows/linux_meson.yml +++ /dev/null @@ -1,355 +0,0 @@ -name: BLAS tests (Linux) - -# This file is meant for testing different BLAS/LAPACK flavors and build -# options on Linux. All other yml files for Linux will only test without BLAS -# (mostly because that's easier and faster to build) or with the same 64-bit -# OpenBLAS build that is used in the wheel jobs. -# -# Jobs and their purpose: -# -# - openblas32_stable_nightly: -# Uses the 32-bit OpenBLAS builds, both the latest stable release -# and a nightly build. -# - openblas_no_pkgconfig_fedora: -# Test OpenBLAS on Fedora. Fedora doesn't ship .pc files for OpenBLAS, -# hence this exercises the "system dependency" detection method. -# - flexiblas_fedora: -# Tests FlexiBLAS (the default on Fedora for its own packages), via -# pkg-config. FlexiBLAS allows runtime switching of BLAS/LAPACK -# libraries, which is a useful capability (not tested in this job). -# - openblas_cmake: -# Tests whether OpenBLAS LP64 is detected correctly when only CMake -# and not pkg-config is installed. -# - netlib: -# Installs vanilla blas/lapack, which is the last option tried in -# auto-detection. -# - mkl: -# Tests MKL installed from PyPI (because easiest/fastest, if broken) in -# 3 ways: both LP64 and ILP64 via pkg-config, and then using the -# Single Dynamic Library (SDL, or `libmkl_rt`). -# - blis: -# Simple test for LP64 via pkg-config -# - atlas: -# Simple test for LP64 via pkg-config - -on: - pull_request: - branches: - - main - - maintenance/** - -defaults: - run: - shell: bash - -env: - PYTHON_VERSION: 3.11 - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -permissions: - contents: read # to fetch code (actions/checkout) - -jobs: - meson_spin: - if: "github.repository == 'numpy/numpy'" - runs-on: ubuntu-latest - strategy: - matrix: - USE_NIGHTLY_OPENBLAS: [false, true] - env: - USE_NIGHTLY_OPENBLAS: ${{ matrix.USE_NIGHTLY_OPENBLAS }} - name: "Test Linux (${{ matrix.USE_NIGHTLY_OPENBLAS && 'nightly' || 'stable' }} OpenBLAS)" - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: ${{ env.PYTHON_VERSION }} - - name: Install dependencies - run: | - pip install -r build_requirements.txt - # Install OpenBLAS - set -xe - if [[ $USE_NIGHTLY_OPENBLAS == "true" ]]; then - target=$(python tools/openblas_support.py --nightly) - else - target=$(python tools/openblas_support.py) - fi - sudo cp -r $target/lib/* /usr/lib - sudo cp $target/include/* /usr/include - - name: Build - shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' - env: - TERM: xterm-256color - run: - spin build -- --werror - - name: Check build-internal dependencies - run: - ninja -C build -t missingdeps - - name: Check installed test and stub files - run: - python tools/check_installed_files.py $(find ./build-install -path '*/site-packages/numpy') - - name: Test - shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' - env: - TERM: xterm-256color - LD_LIBRARY_PATH: "/usr/local/lib/" # to find libopenblas.so.0 - run: | - export NPY_RUN_MYPY_IN_TESTSUITE=1 - pip install pytest pytest-xdist hypothesis typing_extensions - spin test -j auto - - - openblas_no_pkgconfig_fedora: - if: "github.repository == 'numpy/numpy'" - runs-on: ubuntu-latest - container: fedora:39 - name: "OpenBLAS (Fedora, no pkg-config, LP64/ILP64)" - steps: - - name: Install system dependencies - run: | - dnf install git gcc-gfortran g++ python3-devel openblas-devel -y - - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 - with: - submodules: recursive - fetch-depth: 0 - - - name: Install dependencies - run: | - pip install -r build_requirements.txt - pip install pytest hypothesis typing_extensions - - - name: Build (LP64) - run: spin build -- -Dblas=openblas -Dlapack=openblas -Ddisable-optimization=true - - - name: Test - run: spin test -- numpy/linalg - - - name: Build (ILP64) - run: | - rm -rf build - spin build -- -Duse-ilp64=true -Ddisable-optimization=true - - - name: Test - run: spin test -- numpy/linalg - - - flexiblas_fedora: - if: "github.repository == 'numpy/numpy'" - runs-on: ubuntu-latest - container: fedora:39 - name: "FlexiBLAS (LP64, ILP64 on Fedora)" - steps: - - name: Install system dependencies - run: | - dnf install git gcc-gfortran g++ python3-devel flexiblas-devel -y - - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 - with: - submodules: recursive - fetch-depth: 0 - - - name: Install dependencies - run: | - pip install -r build_requirements.txt - pip install pytest hypothesis typing_extensions - - - name: Build - run: spin build -- -Ddisable-optimization=true - - - name: Test - run: spin test -- numpy/linalg - - - name: Build (ILP64) - run: | - rm -rf build - spin build -- -Ddisable-optimization=true -Duse-ilp64=true - - - name: Test (ILP64) - run: spin test -- numpy/linalg - - - openblas_cmake: - if: "github.repository == 'numpy/numpy'" - runs-on: ubuntu-latest - name: "OpenBLAS with CMake" - steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.11' - - - name: Install dependencies - run: | - pip install -r build_requirements.txt - pip install pytest pytest-xdist hypothesis typing_extensions - sudo apt-get install libopenblas-dev cmake - sudo apt-get remove pkg-config - - - name: Build - run: spin build -- -Ddisable-optimization=true - - - name: Test - run: spin test -j auto -- numpy/linalg - - - netlib: - if: "github.repository == 'numpy/numpy'" - runs-on: ubuntu-latest - name: "Netlib BLAS/LAPACK" - steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.11' - - - name: Install dependencies - run: | - pip install -r build_requirements.txt - sudo apt-get install liblapack-dev pkg-config - - - name: Build - run: | - spin build -- -Ddisable-optimization=true - - - name: Test - run: | - pip install pytest pytest-xdist hypothesis typing_extensions - spin test -j auto -- numpy/linalg - - - mkl: - if: "github.repository == 'numpy/numpy'" - runs-on: ubuntu-latest - name: "MKL (LP64, ILP64, SDL)" - steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.11' - - - name: Install dependencies - run: | - pip install -r build_requirements.txt - pip install pytest pytest-xdist hypothesis typing_extensions - pip install mkl mkl-devel - - - name: Repair MKL pkg-config files and symlinks - run: | - # MKL 2023.2 works when installed from conda-forge (except for `-iomp` - # and `-tbb` pkg-config files), Spack, or with the standalone Intel - # installer. The standalone installer is the worst option, since it's - # large and clumsy to install and requires running a setvars.sh script - # before things work. The PyPI MKL packages are broken and need the - # fixes in this step. For details, see - # https://github.com/conda-forge/intel_repack-feedstock/issues/34 - cd $Python3_ROOT_DIR/lib/pkgconfig - sed -i 's/\/intel64//g' mkl*.pc - # add the expected .so -> .so.2 symlinks to fix linking - cd .. - for i in $( ls libmkl*.so.2 ); do ln -s $i ${i%.*}; done - - - name: Build with defaults (LP64) - run: | - pkg-config --libs mkl-dynamic-lp64-seq # check link flags - spin build -- -Ddisable-optimization=true - - - name: Test - run: spin test -- numpy/linalg - - - name: Build with ILP64 - run: | - git clean -xdf > /dev/null - pkg-config --libs mkl-dynamic-ilp64-seq - spin build -- -Duse-ilp64=true -Ddisable-optimization=true - - - name: Test - run: spin test -- numpy/linalg - - - name: Build without pkg-config (default options, SDL) - run: | - git clean -xdf > /dev/null - pushd $Python3_ROOT_DIR/lib/pkgconfig - rm mkl*.pc - popd - export MKLROOT=$Python3_ROOT_DIR - spin build -- -Ddisable-optimization=true - - - name: Test - run: spin test -- numpy/linalg - - blis: - if: "github.repository == 'numpy/numpy'" - runs-on: ubuntu-latest - name: "BLIS" - steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.11' - - - name: Install dependencies - run: | - pip install -r build_requirements.txt - pip install pytest pytest-xdist hypothesis typing_extensions - sudo apt-get install libblis-dev libopenblas-dev pkg-config - - - name: Add BLIS pkg-config file - run: | - # Needed because blis.pc missing in Debian: - # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=989076 - # The alternative here would be to use another distro or Miniforge - sudo cp tools/ci/_blis_debian.pc /usr/lib/x86_64-linux-gnu/pkgconfig/blis.pc - # Check if the patch works: - pkg-config --libs blis - pkg-config --cflags blis - - - name: Build - run: spin build -- -Dblas=blis -Ddisable-optimization=true - - - name: Test - run: spin test -- numpy/linalg - - atlas: - if: "github.repository == 'numpy/numpy'" - runs-on: ubuntu-latest - name: "ATLAS" - steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.11' - - - name: Install dependencies - run: | - pip install -r build_requirements.txt - pip install pytest pytest-xdist hypothesis typing_extensions - sudo apt-get install libatlas-base-dev pkg-config - - - name: Build - run: spin build -- -Dblas=blas-atlas -Dlapack=lapack-atlas -Ddisable-optimization=true - - - name: Test - run: spin test -- numpy/linalg - diff --git a/.github/workflows/build_test.yml b/.github/workflows/linux_simd.yml similarity index 64% rename from .github/workflows/build_test.yml rename to .github/workflows/linux_simd.yml index e570c5540415..2c238d66b10d 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/linux_simd.yml @@ -1,10 +1,38 @@ -name: Build_Test +name: SIMD tests (Linux) + +# This file is meant for testing different SIMD-related build options and +# optimization levels. See `meson_options.txt` for the available build options. +# +# Jobs and their purpose: +# +# - smoke_test: +# Meant to complete as quickly as possible, and acts as a filter for +# the other, more expensive jobs (those only start once `smoke_test` +# passes). +# - old_gcc: +# Tests the oldest supported GCC version with the default build +# settings. +# - without_optimizations: +# Completely disables both all SIMD optimizations and other compiler +# optimizations like loop unrolling. +# - with_baseline_only: +# Only uses the baseline SIMD settings, but no runtime dispatch based +# on compiler features detected at runtime. +# - without_avx512_avx2_fma3: +# Uses runtime SIMD dispatching, with AVX2, FMA3 and AVX512 disabled. +# - without_avx512: +# Uses runtime SIMD dispatching, with AVX512 disabled. +# - armv7_simd_test: +# Cross-compiles from x86-64 to armv7, and then runs only the +# SIMD-specific tests under QEMU. +# - sde_simd_avx512_test: +# Runs only the SIMD tests for several AVX512-xxx instruction sets +# under the Intel Software Development Emulator (SDE). +# - intel_spr_sde_test: +# Similar to the SDE test above, but for AVX512-SPR which requires some +# special-casing. on: - push: - branches: - # coverage comparison in the "full" step needs to run on main after merges - - main pull_request: branches: - main @@ -16,7 +44,6 @@ defaults: env: DOWNLOAD_OPENBLAS: 1 - PYTHON_VERSION: 3.9 concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} @@ -26,10 +53,11 @@ permissions: contents: read # to fetch code (actions/checkout) jobs: - lint: - if: github.repository == 'numpy/numpy' && github.event_name != 'push' + smoke_test: + if: "github.repository == 'numpy/numpy'" runs-on: ubuntu-latest - continue-on-error: true + env: + MESON_ARGS: "-Dallow-noblas=true -Dcpu-baseline=none -Dcpu-dispatch=none" steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: @@ -37,19 +65,19 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: - python-version: ${{ env.PYTHON_VERSION }} - - name: Install linter requirements - run: - python -m pip install -r linter_requirements.txt - - name: Run linter on PR diff - run: - python tools/linter.py --branch origin/${{ github.base_ref }} + python-version: '3.9' + - uses: ./.github/meson_actions - smoke_test: - if: "github.repository == 'numpy/numpy'" +<<<<<<< HEAD:.github/workflows/build_test.yml + basic: + needs: [smoke_test] runs-on: ubuntu-latest + if: github.event_name != 'push' + strategy: + matrix: + python-version: ["3.9", "pypy3.9-v7.3.12"] env: - MESON_ARGS: "-Dallow-noblas=true -Dcpu-baseline=none -Dcpu-dispatch=none" + EXPECT_CPU_FEATURES: "SSE SSE2 SSE3 SSSE3 SSE41 POPCNT SSE42 AVX F16C FMA3 AVX2 AVX512F AVX512CD AVX512_KNL AVX512_KNM AVX512_SKX AVX512_CLX AVX512_CNL AVX512_ICL" steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: @@ -57,20 +85,21 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: - python-version: ${{ env.PYTHON_VERSION }} - - uses: ./.github/meson_actions + python-version: ${{ matrix.python-version }} + - uses: ./.github/actions +||||||| parent of 1a81895ae6 (CI: split `build_test.yml` into three GHA jobs files):.github/workflows/build_test.yml basic: needs: [smoke_test] runs-on: ubuntu-latest if: github.event_name != 'push' strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "pypy3.9-v7.3.12"] + python-version: ["3.9", "pypy3.9-v7.3.12"] env: EXPECT_CPU_FEATURES: "SSE SSE2 SSE3 SSSE3 SSE41 POPCNT SSE42 AVX F16C FMA3 AVX2 AVX512F AVX512CD AVX512_KNL AVX512_KNM AVX512_SKX AVX512_CLX AVX512_CNL AVX512_ICL" steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 with: submodules: recursive fetch-depth: 0 @@ -79,9 +108,11 @@ jobs: python-version: ${{ matrix.python-version }} - uses: ./.github/actions +======= +>>>>>>> 1a81895ae6 (CI: split `build_test.yml` into three GHA jobs files):.github/workflows/linux_simd.yml old_gcc: needs: [smoke_test] - # provides GCC 7, 8 + # provides GCC 8 runs-on: ubuntu-20.04 if: github.event_name != 'push' steps: @@ -89,7 +120,6 @@ jobs: with: submodules: recursive fetch-depth: 0 - # comes with python3.6 - name: Install Python3.9 run: | sudo apt update @@ -100,21 +130,14 @@ jobs: sudo ln -s /usr/bin/python3.9 /usr/bin/pythonx pythonx -m pip install --upgrade pip setuptools wheel pythonx -m pip install -r test_requirements.txt - - name: Install Compilers - run: sudo apt install g++-7 g++-8 -y - - name: Build gcc-7 - run: | - export CC=/usr/bin/gcc-7 - export CXX=/usr/bin/g++-7 - rm -rf build && pythonx setup.py install --user - - name: Runtests gcc-7 - run: pythonx runtests.py -n + - name: Install GCC 8 + run: sudo apt install g++-8 -y - name: Build gcc-8 run: | export CC=/usr/bin/gcc-8 export CXX=/usr/bin/g++-8 rm -rf build && pythonx setup.py install --user - - name: Runtests gcc-8 + - name: Run test suite run: pythonx runtests.py -n without_optimizations: @@ -146,7 +169,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: - python-version: ${{ env.PYTHON_VERSION }} + python-version: '3.10' - uses: ./.github/meson_actions without_avx512: @@ -162,7 +185,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: - python-version: ${{ env.PYTHON_VERSION }} + python-version: '3.10' - uses: ./.github/meson_actions without_avx512_avx2_fma3: @@ -178,9 +201,10 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: - python-version: ${{ env.PYTHON_VERSION }} + python-version: '3.10' - uses: ./.github/meson_actions +<<<<<<< HEAD:.github/workflows/build_test.yml debug: needs: [smoke_test] runs-on: ubuntu-latest @@ -194,7 +218,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: - python-version: ${{ env.PYTHON_VERSION }} + python-version: '3.11' - uses: ./.github/actions @@ -211,17 +235,13 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: - python-version: ${{ env.PYTHON_VERSION }} + python-version: '3.11' - uses: ./.github/actions full: + # Build a wheel, install it, then run the full test suite with code coverage needs: [smoke_test] runs-on: ubuntu-22.04 - env: - USE_WHEEL: 1 - RUN_FULL_TESTS: 1 - RUN_COVERAGE: 1 - INSTALL_PICKLE5: 1 steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: @@ -229,8 +249,27 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: - python-version: ${{ env.PYTHON_VERSION }} - - uses: ./.github/actions + python-version: '3.9' + - name: Install build and test dependencies from PyPI + run: | + pip install -r build_requirements.txt + pip install -r test_requirements.txt + - name: Install gfortran and OpenBLAS (MacPython build) + run: | + set -xe + sudo apt install gfortran libgfortran5 + target=$(python tools/openblas_support.py) + sudo cp -r $target/lib/* /usr/lib + sudo cp $target/include/* /usr/include + - name: Build a wheel + run: | + python -m build --wheel --no-isolation --skip-dependency-checks + - name: Run full test suite + run: | + cd doc + pytest --pyargs numpy --cov-report=html:build/coverage + # TODO: gcov + benchmark: needs: [smoke_test] @@ -251,7 +290,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: - python-version: ${{ env.PYTHON_VERSION }} + python-version: '3.9' - uses: ./.github/actions relaxed_strides_debug: @@ -269,15 +308,13 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: - python-version: ${{ env.PYTHON_VERSION }} + python-version: '3.11' - uses: ./.github/actions - use_wheel: + sdist: needs: [smoke_test] runs-on: ubuntu-latest if: github.event_name != 'push' - env: - USE_WHEEL: 1 steps: - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: @@ -285,44 +322,168 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: - python-version: ${{ env.PYTHON_VERSION }} + python-version: '3.12' + - name: Install gfortran and OpenBLAS (MacPython build) + run: | + set -xe + sudo apt install gfortran libgfortran5 + target=$(python tools/openblas_support.py) + sudo cp -r $target/lib/* /usr/lib + sudo cp $target/include/* /usr/include + - name: Build a wheel via an sdist + run: | + python -m build + pip install dist/numpy*.whl + - name: Install test dependencies + run: | + pip install -r test_requirements.txt + - name: Run test suite + run: | + cd doc + pytest --pyargs numpy -m "not slow" + + +||||||| parent of 1a81895ae6 (CI: split `build_test.yml` into three GHA jobs files):.github/workflows/build_test.yml + debug: + needs: [smoke_test] + runs-on: ubuntu-latest + if: github.event_name != 'push' + env: + USE_DEBUG: 1 + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: '3.11' + + - uses: ./.github/actions + + blas64: + needs: [smoke_test] + runs-on: ubuntu-latest + if: github.event_name != 'push' + env: + NPY_USE_BLAS_ILP64: 1 + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: '3.11' - uses: ./.github/actions - no_openblas: + full: + # Build a wheel, install it, then run the full test suite with code coverage + needs: [smoke_test] + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: '3.9' + - name: Install build and test dependencies from PyPI + run: | + pip install -r build_requirements.txt + pip install -r test_requirements.txt + - name: Install gfortran and OpenBLAS (MacPython build) + run: | + set -xe + sudo apt install gfortran libgfortran5 + target=$(python tools/openblas_support.py) + sudo cp -r $target/lib/* /usr/lib + sudo cp $target/include/* /usr/include + - name: Build a wheel + run: | + python -m build --wheel --no-isolation --skip-dependency-checks + - name: Run full test suite + run: | + cd doc + pytest --pyargs numpy --cov-report=html:build/coverage + # TODO: gcov + + + benchmark: needs: [smoke_test] runs-on: ubuntu-latest if: github.event_name != 'push' env: + PYTHONOPTIMIZE: 2 BLAS: None LAPACK: None ATLAS: None - DOWNLOAD_OPENBLAS: '' + NPY_BLAS_ORDER: mkl,blis,openblas,atlas,blas + NPY_LAPACK_ORDER: MKL,OPENBLAS,ATLAS,LAPACK + USE_ASV: 1 steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 with: submodules: recursive fetch-depth: 0 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: - python-version: ${{ env.PYTHON_VERSION }} + python-version: '3.9' - uses: ./.github/actions - sdist: + relaxed_strides_debug: needs: [smoke_test] runs-on: ubuntu-latest if: github.event_name != 'push' env: - USE_SDIST: 1 + CHECK_BLAS: 1 + NPY_USE_BLAS_ILP64: 1 + NPY_RELAXED_STRIDES_DEBUG: 1 steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 with: submodules: recursive fetch-depth: 0 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: - python-version: ${{ env.PYTHON_VERSION }} + python-version: '3.11' - uses: ./.github/actions + sdist: + needs: [smoke_test] + runs-on: ubuntu-latest + if: github.event_name != 'push' + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: '3.12' + - name: Install gfortran and OpenBLAS (MacPython build) + run: | + set -xe + sudo apt install gfortran libgfortran5 + target=$(python tools/openblas_support.py) + sudo cp -r $target/lib/* /usr/lib + sudo cp $target/include/* /usr/include + - name: Build a wheel via an sdist + run: | + python -m build + pip install dist/numpy*.whl + - name: Install test dependencies + run: | + pip install -r test_requirements.txt + - name: Run test suite + run: | + cd doc + pytest --pyargs numpy -m "not slow" + + +======= +>>>>>>> 1a81895ae6 (CI: split `build_test.yml` into three GHA jobs files):.github/workflows/linux_simd.yml armv7_simd_test: needs: [smoke_test] # make sure this matches the base docker image below @@ -391,7 +552,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: - python-version: ${{ env.PYTHON_VERSION }} + python-version: '3.9' - name: Install Intel SDE run: | curl -o /tmp/sde.tar.xz https://downloadmirror.intel.com/732268/sde-external-9.7.0-2022-05-09-lin.tar.xz @@ -420,7 +581,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: - python-version: ${{ env.PYTHON_VERSION }} + python-version: '3.11' - name: Install Intel SDE run: | curl -o /tmp/sde.tar.xz https://downloadmirror.intel.com/784319/sde-external-9.24.0-2023-07-13-lin.tar.xz diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml new file mode 100644 index 000000000000..c89b8e11ec95 --- /dev/null +++ b/.github/workflows/mypy.yml @@ -0,0 +1,67 @@ +name: Run MyPy + +# Mypy is too slow to run as part of regular CI. The purpose of the jobs in +# this file is to cover running Mypy across: +# +# - OSes: Linux, Windows and macOS +# - Python versions: lowest/highest supported versions, and an intermediate one +# +# The build matrix aims for sparse coverage across those two dimensions. +# Use of BLAS/LAPACK and SIMD is disabled on purpose, because those things +# don't matter for static typing and this speeds up the builds. +# +# This is a separate job file so it's easy to trigger by hand. + +on: + pull_request: + branches: + - main + - maintenance/** + paths-ignore: + - 'benchmarks/' + - '.circlecl/' + - 'docs/' + - 'meson_cpu/' + - 'tools/' + workflow_dispatch: + +defaults: + run: + shell: bash + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: + mypy: + if: "github.repository == 'numpy/numpy'" + name: "MyPy" + runs-on: ${{ matrix.os_python[0] }} + strategy: + matrix: + os_python: + - [ubuntu-latest, '3.10'] # switch to 3.12-dev after mypy is upgraded (see gh-23764) + - [windows-2019, '3.11'] + - [macos-12, '3.9'] + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: ${{ matrix.os_python[1] }} + - name: Install dependencies + run: | + pip install -r build_requirements.txt + pip install -r test_requirements.txt + - name: Build + run: | + spin build -j2 -- -Dallow-noblas=true -Ddisable-optimization=true --vsenv + - name: Run Mypy + run: | + spin mypy diff --git a/numpy/typing/tests/test_typing.py b/numpy/typing/tests/test_typing.py index cd009bb6e7f2..68c6f5d03fab 100644 --- a/numpy/typing/tests/test_typing.py +++ b/numpy/typing/tests/test_typing.py @@ -26,6 +26,20 @@ ) +# Only trigger a full `mypy` run if this environment variable is set +# Note that these tests tend to take over a minute even on a macOS M1 CPU, +# and more than that in CI. +RUN_MYPY = "NPY_RUN_MYPY_IN_TESTSUITE" in os.environ +if RUN_MYPY and RUN_MYPY not in ('0', '', 'false'): + RUN_MYPY = True + +# Skips all functions in this file +pytestmark = pytest.mark.skipif( + not RUN_MYPY, + reason="`NPY_RUN_MYPY_IN_TESTSUITE` not set" +) + + try: from mypy import api except ImportError: From 81f1758791a384a7ae03b890bc860ba3d2f075aa Mon Sep 17 00:00:00 2001 From: Charles Harris Date: Sun, 5 Nov 2023 13:45:19 -0700 Subject: [PATCH 12/45] MAINT: Fix spelling error that slipped in somewhere typing_nptensions -> typing_extensions --- numpy/typing/tests/data/reveal/warnings_and_errors.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/numpy/typing/tests/data/reveal/warnings_and_errors.pyi b/numpy/typing/tests/data/reveal/warnings_and_errors.pyi index 16492c2fb41b..b026e4f6e3b0 100644 --- a/numpy/typing/tests/data/reveal/warnings_and_errors.pyi +++ b/numpy/typing/tests/data/reveal/warnings_and_errors.pyi @@ -5,7 +5,7 @@ import numpy as np if sys.version_info >= (3, 11): from typing import assert_type else: - from typing_nptensions import assert_type + from typing_extensions import assert_type assert_type(np.ModuleDeprecationWarning(), np.ModuleDeprecationWarning) assert_type(np.VisibleDeprecationWarning(), np.VisibleDeprecationWarning) From c40e98cc15d95b1f5fe48af691cfc005773d8a4c Mon Sep 17 00:00:00 2001 From: Charles Harris Date: Sun, 5 Nov 2023 14:54:19 -0700 Subject: [PATCH 13/45] CI: Switch SIMD tests to meson Backport of #24625. --- .github/meson_actions/action.yml | 36 +- .github/workflows/linux_simd.yml | 662 +++++++------------------------ 2 files changed, 155 insertions(+), 543 deletions(-) diff --git a/.github/meson_actions/action.yml b/.github/meson_actions/action.yml index aff70da169bc..fb3042c0ade2 100644 --- a/.github/meson_actions/action.yml +++ b/.github/meson_actions/action.yml @@ -3,27 +3,35 @@ description: "checkout repo, build, and test numpy" runs: using: composite steps: - - name: Install dependencies - shell: bash - run: pip install -r build_requirements.txt - name: Build shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' env: TERM: xterm-256color - run: - spin build -- ${MESON_ARGS[@]} - - name: Check build-internal dependencies - shell: bash - run: - ninja -C build -t missingdeps - - name: Check installed test and stub files + PKG_CONFIG_PATH: ./.openblas + run: | + echo "::group::Installing Build Dependencies" + pip install -r build_requirements.txt + echo "::endgroup::" + echo "::group::Building NumPy" + spin build --clean -- ${MESON_ARGS[@]} + echo "::endgroup::" + + - name: Meson Log shell: bash - run: - python tools/check_installed_files.py $(find ./build-install -path '*/site-packages/numpy') + if: always() + run: | + echo "::group::Meson Log" + cat build/meson-logs/meson-log.txt + echo "::endgroup::" + - name: Test shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' env: TERM: xterm-256color run: | - pip install pytest pytest-xdist hypothesis typing_extensions - spin test -j auto + echo "::group::Installing Test Dependencies" + pip install pytest pytest-xdist hypothesis typing_extensions setuptools + echo "::endgroup::" + echo "::group::Test NumPy" + spin test + echo "::endgroup::" diff --git a/.github/workflows/linux_simd.yml b/.github/workflows/linux_simd.yml index 2c238d66b10d..f8b694124d41 100644 --- a/.github/workflows/linux_simd.yml +++ b/.github/workflows/linux_simd.yml @@ -1,37 +1,35 @@ -name: SIMD tests (Linux) +name: Linux SIMD tests # This file is meant for testing different SIMD-related build options and # optimization levels. See `meson_options.txt` for the available build options. # -# Jobs and their purpose: +# Jobs and their purposes: +# +# - baseline_only: +# Focuses on completing as quickly as possible and acts as a filter for other, more resource-intensive jobs. +# Utilizes only the default baseline targets (e.g., SSE3 on X86_64) without enabling any runtime dispatched features. +# +# - old_gcc: +# Tests the oldest supported GCC version with default CPU/baseline/dispatch settings. +# +# - without_optimizations: +# Completely disables all SIMD optimizations and other compiler optimizations such as loop unrolling. +# +# - native: +# Tests against the host CPU features set as the baseline without enabling any runtime dispatched features. +# Intended to assess the entire NumPy codebase against host flags, even for code sections lacking handwritten SIMD intrincis. +# +# - without_avx512/avx2/fma3: +# Uses runtime SIMD dispatching but disables AVX2, FMA3, and AVX512. +# Intended to evaluate 128-bit SIMD extensions without FMA support. +# +# - without_avx512: +# Uses runtime SIMD dispatching but disables AVX512. +# Intended to evaluate 128-bit/256-bit SIMD extensions. +# +# - intel_sde: +# Executes only the SIMD tests for various AVX512 SIMD extensions under the Intel Software Development Emulator (SDE). # -# - smoke_test: -# Meant to complete as quickly as possible, and acts as a filter for -# the other, more expensive jobs (those only start once `smoke_test` -# passes). -# - old_gcc: -# Tests the oldest supported GCC version with the default build -# settings. -# - without_optimizations: -# Completely disables both all SIMD optimizations and other compiler -# optimizations like loop unrolling. -# - with_baseline_only: -# Only uses the baseline SIMD settings, but no runtime dispatch based -# on compiler features detected at runtime. -# - without_avx512_avx2_fma3: -# Uses runtime SIMD dispatching, with AVX2, FMA3 and AVX512 disabled. -# - without_avx512: -# Uses runtime SIMD dispatching, with AVX512 disabled. -# - armv7_simd_test: -# Cross-compiles from x86-64 to armv7, and then runs only the -# SIMD-specific tests under QEMU. -# - sde_simd_avx512_test: -# Runs only the SIMD tests for several AVX512-xxx instruction sets -# under the Intel Software Development Emulator (SDE). -# - intel_spr_sde_test: -# Similar to the SDE test above, but for AVX512-SPR which requires some -# special-casing. - on: pull_request: branches: @@ -40,10 +38,10 @@ on: defaults: run: - shell: bash + shell: 'script -q -e -c "bash --noprofile --norc -eo pipefail {0}"' env: - DOWNLOAD_OPENBLAS: 1 + TERM: xterm-256color concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} @@ -53,556 +51,162 @@ permissions: contents: read # to fetch code (actions/checkout) jobs: - smoke_test: + baseline_only: if: "github.repository == 'numpy/numpy'" runs-on: ubuntu-latest env: - MESON_ARGS: "-Dallow-noblas=true -Dcpu-baseline=none -Dcpu-dispatch=none" + MESON_ARGS: "-Dallow-noblas=true -Dcpu-dispatch=none" steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: submodules: recursive fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1 with: python-version: '3.9' - uses: ./.github/meson_actions + name: Build/Test -<<<<<<< HEAD:.github/workflows/build_test.yml - basic: - needs: [smoke_test] - runs-on: ubuntu-latest + old_gcc: if: github.event_name != 'push' - strategy: - matrix: - python-version: ["3.9", "pypy3.9-v7.3.12"] - env: - EXPECT_CPU_FEATURES: "SSE SSE2 SSE3 SSSE3 SSE41 POPCNT SSE42 AVX F16C FMA3 AVX2 AVX512F AVX512CD AVX512_KNL AVX512_KNM AVX512_SKX AVX512_CLX AVX512_CNL AVX512_ICL" - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: ${{ matrix.python-version }} - - uses: ./.github/actions - -||||||| parent of 1a81895ae6 (CI: split `build_test.yml` into three GHA jobs files):.github/workflows/build_test.yml - basic: - needs: [smoke_test] + needs: [baseline_only] runs-on: ubuntu-latest - if: github.event_name != 'push' - strategy: - matrix: - python-version: ["3.9", "pypy3.9-v7.3.12"] env: - EXPECT_CPU_FEATURES: "SSE SSE2 SSE3 SSSE3 SSE41 POPCNT SSE42 AVX F16C FMA3 AVX2 AVX512F AVX512CD AVX512_KNL AVX512_KNM AVX512_SKX AVX512_CLX AVX512_CNL AVX512_ICL" + MESON_ARGS: "-Dallow-noblas=true" steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: submodules: recursive fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1 with: - python-version: ${{ matrix.python-version }} - - uses: ./.github/actions + python-version: '3.9' -======= ->>>>>>> 1a81895ae6 (CI: split `build_test.yml` into three GHA jobs files):.github/workflows/linux_simd.yml - old_gcc: - needs: [smoke_test] - # provides GCC 8 - runs-on: ubuntu-20.04 - if: github.event_name != 'push' - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - name: Install Python3.9 + - name: Install GCC/8/9 run: | + echo "deb http://archive.ubuntu.com/ubuntu focal main universe" | sudo tee /etc/apt/sources.list.d/focal.list sudo apt update - # for add-apt-repository - sudo apt install software-properties-common -y - sudo add-apt-repository ppa:deadsnakes/ppa -y - sudo apt install python3.9-dev ninja-build -y - sudo ln -s /usr/bin/python3.9 /usr/bin/pythonx - pythonx -m pip install --upgrade pip setuptools wheel - pythonx -m pip install -r test_requirements.txt - - name: Install GCC 8 - run: sudo apt install g++-8 -y - - name: Build gcc-8 - run: | - export CC=/usr/bin/gcc-8 - export CXX=/usr/bin/g++-8 - rm -rf build && pythonx setup.py install --user - - name: Run test suite - run: pythonx runtests.py -n - - without_optimizations: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - env: - MESON_ARGS: "-Dallow-noblas=true -Ddisable-optimization=true" - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: ${{ env.PYTHON_VERSION }} - - uses: ./.github/meson_actions - - with_baseline_only: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - env: - MESON_ARGS: "-Dallow-noblas=true -Dcpu-dispatch=none" - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.10' - - uses: ./.github/meson_actions + sudo apt install -y g++-8 g++-9 - without_avx512: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - env: - MESON_ARGS: "-Dallow-noblas=true -Dcpu-dispatch=SSSE3,SSE41,POPCNT,SSE42,AVX,F16C,AVX2,FMA3" - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.10' - - uses: ./.github/meson_actions - - without_avx512_avx2_fma3: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - env: - MESON_ARGS: "-Dallow-noblas=true -Dcpu-dispatch=SSSE3,SSE41,POPCNT,SSE42,AVX,F16C" - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.10' - - uses: ./.github/meson_actions - -<<<<<<< HEAD:.github/workflows/build_test.yml - debug: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - env: - USE_DEBUG: 1 - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.11' - - - uses: ./.github/actions - - blas64: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - env: - NPY_USE_BLAS_ILP64: 1 - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.11' - - uses: ./.github/actions - - full: - # Build a wheel, install it, then run the full test suite with code coverage - needs: [smoke_test] - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.9' - - name: Install build and test dependencies from PyPI + - name: Enable gcc-8 run: | - pip install -r build_requirements.txt - pip install -r test_requirements.txt - - name: Install gfortran and OpenBLAS (MacPython build) - run: | - set -xe - sudo apt install gfortran libgfortran5 - target=$(python tools/openblas_support.py) - sudo cp -r $target/lib/* /usr/lib - sudo cp $target/include/* /usr/include - - name: Build a wheel - run: | - python -m build --wheel --no-isolation --skip-dependency-checks - - name: Run full test suite - run: | - cd doc - pytest --pyargs numpy --cov-report=html:build/coverage - # TODO: gcov - - - benchmark: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - env: - PYTHONOPTIMIZE: 2 - BLAS: None - LAPACK: None - ATLAS: None - NPY_BLAS_ORDER: mkl,blis,openblas,atlas,blas - NPY_LAPACK_ORDER: MKL,OPENBLAS,ATLAS,LAPACK - USE_ASV: 1 - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.9' - - uses: ./.github/actions + sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-8 1 + sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-8 1 - relaxed_strides_debug: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - env: - CHECK_BLAS: 1 - NPY_USE_BLAS_ILP64: 1 - NPY_RELAXED_STRIDES_DEBUG: 1 - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.11' - - uses: ./.github/actions + - uses: ./.github/meson_actions + name: Build/Test against gcc-8 - sdist: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.12' - - name: Install gfortran and OpenBLAS (MacPython build) - run: | - set -xe - sudo apt install gfortran libgfortran5 - target=$(python tools/openblas_support.py) - sudo cp -r $target/lib/* /usr/lib - sudo cp $target/include/* /usr/include - - name: Build a wheel via an sdist - run: | - python -m build - pip install dist/numpy*.whl - - name: Install test dependencies + - name: Enable gcc-9 run: | - pip install -r test_requirements.txt - - name: Run test suite - run: | - cd doc - pytest --pyargs numpy -m "not slow" + sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-9 2 + sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-9 2 + - uses: ./.github/meson_actions + name: Build/Test against gcc-9 -||||||| parent of 1a81895ae6 (CI: split `build_test.yml` into three GHA jobs files):.github/workflows/build_test.yml - debug: - needs: [smoke_test] + specialize: + needs: [baseline_only] runs-on: ubuntu-latest if: github.event_name != 'push' - env: - USE_DEBUG: 1 - steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.11' - - - uses: ./.github/actions + continue-on-error: true + strategy: + matrix: + BUILD_PROP: + #- [ + #"without optimizations", + #"-Dallow-noblas=true -Ddisable-optimization=true", + #"3.12" + #] + - [ + "native", + "-Dallow-noblas=true -Dcpu-baseline=native -Dcpu-dispatch=none", + "3.11" + ] + - [ + "without avx512", + "-Dallow-noblas=true -Dcpu-dispatch=SSSE3,SSE41,POPCNT,SSE42,AVX,F16C,AVX2,FMA3", + "3.10" + ] + - [ + "without avx512/avx2/fma3", + "-Dallow-noblas=true -Dcpu-dispatch=SSSE3,SSE41,POPCNT,SSE42,AVX,F16C", + "3.9" + ] - blas64: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' env: - NPY_USE_BLAS_ILP64: 1 - steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.11' - - uses: ./.github/actions + MESON_ARGS: ${{ matrix.BUILD_PROP[1] }} - full: - # Build a wheel, install it, then run the full test suite with code coverage - needs: [smoke_test] - runs-on: ubuntu-22.04 + name: "${{ matrix.BUILD_PROP[0] }}" steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: submodules: recursive fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1 with: - python-version: '3.9' - - name: Install build and test dependencies from PyPI - run: | - pip install -r build_requirements.txt - pip install -r test_requirements.txt - - name: Install gfortran and OpenBLAS (MacPython build) - run: | - set -xe - sudo apt install gfortran libgfortran5 - target=$(python tools/openblas_support.py) - sudo cp -r $target/lib/* /usr/lib - sudo cp $target/include/* /usr/include - - name: Build a wheel - run: | - python -m build --wheel --no-isolation --skip-dependency-checks - - name: Run full test suite - run: | - cd doc - pytest --pyargs numpy --cov-report=html:build/coverage - # TODO: gcov - + python-version: "${{ matrix.BUILD_PROP[2] }}" + - uses: ./.github/meson_actions + name: Build/Test - benchmark: - needs: [smoke_test] + intel_sde: + needs: [baseline_only] runs-on: ubuntu-latest - if: github.event_name != 'push' - env: - PYTHONOPTIMIZE: 2 - BLAS: None - LAPACK: None - ATLAS: None - NPY_BLAS_ORDER: mkl,blis,openblas,atlas,blas - NPY_LAPACK_ORDER: MKL,OPENBLAS,ATLAS,LAPACK - USE_ASV: 1 steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: submodules: recursive fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.9' - - uses: ./.github/actions - - relaxed_strides_debug: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - env: - CHECK_BLAS: 1 - NPY_USE_BLAS_ILP64: 1 - NPY_RELAXED_STRIDES_DEBUG: 1 - steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1 with: python-version: '3.11' - - uses: ./.github/actions - - sdist: - needs: [smoke_test] - runs-on: ubuntu-latest - if: github.event_name != 'push' - steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.12' - - name: Install gfortran and OpenBLAS (MacPython build) - run: | - set -xe - sudo apt install gfortran libgfortran5 - target=$(python tools/openblas_support.py) - sudo cp -r $target/lib/* /usr/lib - sudo cp $target/include/* /usr/include - - name: Build a wheel via an sdist - run: | - python -m build - pip install dist/numpy*.whl - - name: Install test dependencies - run: | - pip install -r test_requirements.txt - - name: Run test suite - run: | - cd doc - pytest --pyargs numpy -m "not slow" - - -======= ->>>>>>> 1a81895ae6 (CI: split `build_test.yml` into three GHA jobs files):.github/workflows/linux_simd.yml - armv7_simd_test: - needs: [smoke_test] - # make sure this matches the base docker image below - runs-on: ubuntu-22.04 - if: github.event_name != 'push' - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - name: Initialize binfmt_misc for qemu-user-static - run: | - docker run --rm --privileged multiarch/qemu-user-static --reset -p yes - - name: Creates new container - run: | - # use x86_64 cross-compiler to speed up the build - sudo apt update - sudo apt install -y gcc-arm-linux-gnueabihf g++-arm-linux-gnueabihf gfortran-arm-linux-gnueabihf - - docker run --name the_container --interactive -v /:/host -v $(pwd):/numpy arm32v7/ubuntu:22.04 /bin/bash -c " - apt update && - apt install -y git python3 python3-dev python3-pip && - python3 -m pip install -r /numpy/test_requirements.txt - ln -s /host/lib64 /lib64 && - ln -s /host/lib/x86_64-linux-gnu /lib/x86_64-linux-gnu && - ln -s /host/usr/arm-linux-gnueabihf /usr/arm-linux-gnueabihf && - rm -rf /usr/lib/gcc/arm-linux-gnueabihf && ln -s /host/usr/lib/gcc-cross/arm-linux-gnueabihf /usr/lib/gcc/arm-linux-gnueabihf && - rm -f /usr/bin/arm-linux-gnueabihf-gcc && ln -s /host/usr/bin/arm-linux-gnueabihf-gcc /usr/bin/arm-linux-gnueabihf-gcc && - rm -f /usr/bin/arm-linux-gnueabihf-g++ && ln -s /host/usr/bin/arm-linux-gnueabihf-g++ /usr/bin/arm-linux-gnueabihf-g++ && - rm -f /usr/bin/arm-linux-gnueabihf-gfortran && ln -s /host/usr/bin/arm-linux-gnueabihf-gfortran /usr/bin/arm-linux-gnueabihf-gfortran && - rm -f /usr/bin/arm-linux-gnueabihf-ar && ln -s /host/usr/bin/arm-linux-gnueabihf-ar /usr/bin/arm-linux-gnueabihf-ar && - rm -f /usr/bin/arm-linux-gnueabihf-as && ln -s /host/usr/bin/arm-linux-gnueabihf-as /usr/bin/arm-linux-gnueabihf-as && - rm -f /usr/bin/arm-linux-gnueabihf-ld && ln -s /host/usr/bin/arm-linux-gnueabihf-ld /usr/bin/arm-linux-gnueabihf-ld && - rm -f /usr/bin/arm-linux-gnueabihf-ld.bfd && ln -s /host/usr/bin/arm-linux-gnueabihf-ld.bfd /usr/bin/arm-linux-gnueabihf-ld.bfd - " - docker commit the_container the_container - - name: Build - run: | - sudo docker run --name the_build --interactive -v $(pwd):/numpy -v /:/host the_container /bin/bash -c " - uname -a && - gcc --version && - g++ --version && - arm-linux-gnueabihf-gfortran --version && - python3 --version && - git config --global --add safe.directory /numpy - cd /numpy && - python3 setup.py install - " - docker commit the_build the_build - - name: Run SIMD Tests - run: | - docker run --rm --interactive -v $(pwd):/numpy the_build /bin/bash -c " - cd /numpy && F90=arm-linux-gnueabihf-gfortran python3 runtests.py -n -v -- -k 'test_simd or test_kind' - " - - sde_simd_avx512_test: - # Intel Software Development Emulator (SDE) is used to run a given program - # on a specific instruction set architecture and capture various performance details. - # see https://www.intel.com/content/www/us/en/developer/articles/tool/software-development-emulator.html - needs: [smoke_test] - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.9' - - name: Install Intel SDE - run: | - curl -o /tmp/sde.tar.xz https://downloadmirror.intel.com/732268/sde-external-9.7.0-2022-05-09-lin.tar.xz - mkdir /tmp/sde && tar -xvf /tmp/sde.tar.xz -C /tmp/sde/ - sudo mv /tmp/sde/* /opt/sde && sudo ln -s /opt/sde/sde64 /usr/bin/sde - - name: Install dependencies - run: python -m pip install -r test_requirements.txt - - name: Build - run: python setup.py build - --simd-test="\$werror AVX512F AVX512_KNL AVX512_KNM AVX512_SKX AVX512_CLX AVX512_CNL AVX512_ICL" - install - # KNM implies KNL - - name: Run SIMD tests (Xeon PHI) - run: sde -knm -- python runtests.py -n -v -- -k test_simd - # ICL implies SKX, CLX and CNL - - name: Run SIMD tests (Ice Lake) - run: sde -icl -- python runtests.py -n -v -- -k test_simd - intel_spr_sde_test: - needs: [smoke_test] - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.11' - name: Install Intel SDE run: | curl -o /tmp/sde.tar.xz https://downloadmirror.intel.com/784319/sde-external-9.24.0-2023-07-13-lin.tar.xz mkdir /tmp/sde && tar -xvf /tmp/sde.tar.xz -C /tmp/sde/ sudo mv /tmp/sde/* /opt/sde && sudo ln -s /opt/sde/sde64 /usr/bin/sde + - name: Install dependencies run: | - python -m pip install -r test_requirements.txt - sudo apt install gcc-12 g++-12 - - name: Build and install NumPy - run: | - export CC=/usr/bin/gcc-12 - export CXX=/usr/bin/g++-12 - python setup.py develop - - name: Show config - run: | - python -c "import numpy as np; np.show_config()" - # Run only a few tests, running everything in an SDE takes a long time - # Using pytest directly, unable to use python runtests.py -n -t ... - - name: Run linalg/ufunc/umath tests - run: | - python -m pytest numpy/core/tests/test_umath* numpy/core/tests/test_ufunc.py numpy/linalg/tests/test_* - # Can't run on SDE just yet: see https://github.com/numpy/numpy/issues/23545#issuecomment-1659047365 - #sde -spr -- python -m pytest numpy/core/tests/test_umath* numpy/core/tests/test_ufunc.py numpy/linalg/tests/test_* + sudo apt update + sudo apt install -y g++-13 + sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 1 + sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 1 + python -m pip install -r build_requirements.txt + python -m pip install pytest pytest-xdist hypothesis typing_extensions + + - name: Build + run: spin build -- -Dallow-noblas=true -Dcpu-baseline=avx512f -Dtest-simd='BASELINE,AVX512_KNL,AVX512_KNM,AVX512_SKX,AVX512_CLX,AVX512_CNL,AVX512_ICL,AVX512_SPR' + + - name: Meson Log + if: always() + run: cat build/meson-logs/meson-log.txt + + - name: SIMD tests (KNM) + run: | + export NUMPY_SITE=$(realpath build-install/usr/lib/python*/site-packages/) + export PYTHONPATH="$PYTHONPATH:$NUMPY_SITE" + cd build-install && + sde -knm -- python -c "import numpy; numpy.show_config()" && + sde -knm -- python -m pytest $NUMPY_SITE/numpy/core/tests/test_simd* + + - name: SIMD tests (SPR) + run: | + export NUMPY_SITE=$(realpath build-install/usr/lib/python*/site-packages/) + export PYTHONPATH="$PYTHONPATH:$NUMPY_SITE" + cd build-install && + sde -spr -- python -c "import numpy; numpy.show_config()" && + sde -spr -- python -m pytest $NUMPY_SITE/numpy/core/tests/test_simd* + + # Can't run on SDE just yet: see https://github.com/numpy/numpy/issues/23545#issuecomment-1659047365 + # + #- name: linalg/ufunc/umath tests (SPR) + # run: | + # export NUMPY_SITE=$(realpath build-install/usr/lib/python*/site-packages/) + # export PYTHONPATH="$PYTHONPATH:$NUMPY_SITE" + # cd build-install && + # sde -spr -- python -c "import numpy; numpy.show_config()" && + # sde -spr -- python -m pytest $NUMPY_SITE/numpy/core/tests/test_umath* \ + # $NUMPY_SITE/numpy/core/tests/test_ufunc.py \ + # $NUMPY_SITE/numpy/linalg/tests/test_* + From 39d854f62ffa65ba233e8d83ae051a2fe2a45805 Mon Sep 17 00:00:00 2001 From: Charles Harris Date: Sun, 5 Nov 2023 19:05:47 -0700 Subject: [PATCH 14/45] MAINT: Update vendored-meson/meson to ea5809096. --- vendored-meson/meson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendored-meson/meson b/vendored-meson/meson index 66ba7dbbfe28..ea5809096404 160000 --- a/vendored-meson/meson +++ b/vendored-meson/meson @@ -1 +1 @@ -Subproject commit 66ba7dbbfe2838983f65ad8fe16da1535ebf5b9d +Subproject commit ea580909640457450e03d8b84d1fec9f035d7acb From df93b11159de275ef1c1c5b09f552a9fa34dfd66 Mon Sep 17 00:00:00 2001 From: Charles Harris Date: Mon, 6 Nov 2023 12:23:18 -0700 Subject: [PATCH 15/45] MAINT: Update meson build infrastructure. (#25049) Backports of #24969, #24979, #24968, #25068. * apply 24969.diff * apply 24979.diff * apply 24968.diff * TST: skip flaky test in test_histogram --------- Co-authored-by: mattip --- numpy/core/include/numpy/npy_common.h | 3 ++ numpy/core/meson.build | 46 ++++++++------------------- numpy/core/src/common/numpyos.c | 3 ++ numpy/lib/tests/test_histograms.py | 5 +-- numpy/meson.build | 19 +++++++---- 5 files changed, 35 insertions(+), 41 deletions(-) diff --git a/numpy/core/include/numpy/npy_common.h b/numpy/core/include/numpy/npy_common.h index fb976aa6ae09..9e98f8ef5edd 100644 --- a/numpy/core/include/numpy/npy_common.h +++ b/numpy/core/include/numpy/npy_common.h @@ -168,6 +168,9 @@ #define npy_ftell ftell #endif #include + #ifndef _WIN32 + #include + #endif #define npy_lseek lseek #define npy_off_t off_t diff --git a/numpy/core/meson.build b/numpy/core/meson.build index a1fc7e9d8a84..0823524d6a3f 100644 --- a/numpy/core/meson.build +++ b/numpy/core/meson.build @@ -257,37 +257,6 @@ foreach filefunc_maybe: optional_file_funcs endif endforeach -# Optional locale function -have_strtold_l = cc.has_function('strtold_l', include_directories: inc_curdir, - prefix:''' - #include - #include - #include "feature_detection_locale.h" -''') -if not have_strtold_l - # Retry with locale.h, seems to vary across Linux distros - have_strtold_l = cc.has_function('strtold_l', include_directories: inc_curdir, - prefix:''' - #include - #include - #include "feature_detection_locale.h" - ''') -endif -if have_strtold_l - cdata.set10('HAVE_STRTOLD_L', true) -else - # FIXME: this is wrong! the HAVE_ define should not exist, or it'll be - # interpreted as the function being available (true/false does nothing, see - # note on HAVE_ defines higher up). This is necessary though in order to make - # the Linux CI job pass. So either the check is wrong somehow, or this - # function is not available in CI. For the latter there is a fallback path, - # but that is broken because we don't have the exact long double - # representation checks. - if cc.get_argument_syntax() != 'msvc' - cdata.set10('HAVE_STRTOLD_L', false) - endif -endif - # Other optional functions optional_misc_funcs = [ 'backtrace', @@ -305,7 +274,7 @@ endforeach # SSE headers only enabled automatically on amd64/x32 builds optional_headers = [ 'features.h', # for glibc version linux - 'xlocale.h', # see GH#8367 + 'xlocale.h', # removed in glibc 2.26, but may still be useful - see gh-8367 'dlfcn.h', # dladdr 'execinfo.h', # backtrace 'libunwind.h', # backtrace for LLVM/Clang using libunwind @@ -317,6 +286,19 @@ foreach header: optional_headers endif endforeach +# Optional locale function - GNU-specific +_strtold_prefix = ''' +#define _GNU_SOURCE +#include +#include +''' +if cdata.get('HAVE_XLOCALE_H', 0) == 1 + _strtold_prefix += '#include ' +endif +if cc.has_function('strtold_l', include_directories: inc_curdir, prefix: _strtold_prefix) + cdata.set10('HAVE_STRTOLD_L', true) +endif + # Optional compiler attributes # TODO: this doesn't work with cc.has_function_attribute, see # https://github.com/mesonbuild/meson/issues/10732 diff --git a/numpy/core/src/common/numpyos.c b/numpy/core/src/common/numpyos.c index 2fec06e1c564..19f6be0d47b7 100644 --- a/numpy/core/src/common/numpyos.c +++ b/numpy/core/src/common/numpyos.c @@ -11,6 +11,9 @@ #include "npy_pycompat.h" +#if defined(HAVE_STRTOLD_L) && !defined(_GNU_SOURCE) +# define _GNU_SOURCE +#endif #include #include diff --git a/numpy/lib/tests/test_histograms.py b/numpy/lib/tests/test_histograms.py index 38b3d3dcbf3f..8c55f16db98e 100644 --- a/numpy/lib/tests/test_histograms.py +++ b/numpy/lib/tests/test_histograms.py @@ -398,8 +398,9 @@ def test_histogram_bin_edges(self): edges = histogram_bin_edges(arr, bins='auto', range=(0, 1)) assert_array_equal(edges, e) - @requires_memory(free_bytes=1e10) - @pytest.mark.slow + # @requires_memory(free_bytes=1e10) + # @pytest.mark.slow + @pytest.mark.skip(reason="Bad memory reports lead to OOM in ci testing") def test_big_arrays(self): sample = np.zeros([100000000, 3]) xbins = 400 diff --git a/numpy/meson.build b/numpy/meson.build index 0d17612e81d6..a8f810c8e559 100644 --- a/numpy/meson.build +++ b/numpy/meson.build @@ -10,15 +10,20 @@ endif # Platform detection is_windows = host_machine.system() == 'windows' -is_mingw = is_windows and cc.get_id() == 'gcc' +is_mingw = is_windows and cc.get_define('__MINGW32__') != '' if is_mingw - # For mingw-w64, link statically against the UCRT. - gcc_link_args = ['-lucrt', '-static'] - add_project_link_arguments(gcc_link_args, language: ['c', 'cpp']) - # Force gcc to float64 long doubles for compatibility with MSVC - # builds, for C only. - add_project_arguments('-mlong-double-64', language: 'c') + is_mingw_built_python = run_command( + py, ['-c', 'import sysconfig; print(sysconfig.get_platform())'], + check: true).stdout().strip().startswith('mingw') + if not is_mingw_built_python + # For mingw-w64, link statically against the UCRT. + gcc_link_args = ['-lucrt', '-static'] + add_project_link_arguments(gcc_link_args, language: ['c', 'cpp']) + # Force gcc to float64 long doubles for compatibility with MSVC + # builds, for C only. + add_project_arguments('-mlong-double-64', language: 'c') + endif # Make fprintf("%zd") work (see https://github.com/rgommers/scipy/issues/118) add_project_arguments('-D__USE_MINGW_ANSI_STDIO=1', language: ['c', 'cpp']) endif From 809d00d0f6323b6a044f76e60c4f3ddda6a6310c Mon Sep 17 00:00:00 2001 From: Sayed Adel Date: Thu, 5 Oct 2023 19:17:56 +0300 Subject: [PATCH 16/45] BUG: Resolve build issue on ppc64 with Power9 or higher as baseline This fix addresses two issues: * Corrects the use of unsupported instructions by the assembler in half-precision to double-precision conversion. * Resolves a code error related to variable naming during conversion. It also: * Implement a compile-time test on PPC64 to determine support for Half/Double VSX3 instructions by the assembler * Modify half-precision tests to deal with FP HW exceptions --- meson_cpu/ppc64/meson.build | 3 +++ numpy/core/src/common/half.hpp | 23 ++++++++---------- numpy/core/tests/test_half.py | 24 ++++++++++++------- numpy/distutils/ccompiler_opt.py | 3 ++- .../distutils/checks/extra_vsx3_half_double.c | 12 ++++++++++ 5 files changed, 43 insertions(+), 22 deletions(-) create mode 100644 numpy/distutils/checks/extra_vsx3_half_double.c diff --git a/meson_cpu/ppc64/meson.build b/meson_cpu/ppc64/meson.build index d14b23703fe3..986a57ee184c 100644 --- a/meson_cpu/ppc64/meson.build +++ b/meson_cpu/ppc64/meson.build @@ -26,6 +26,9 @@ VSX3 = mod_features.new( 'VSX3', 3, implies: VSX2, args: {'val': '-mcpu=power9', 'match': '.*[mcpu=|vsx].*'}, detect: {'val': 'VSX3', 'match': 'VSX.*'}, test_code: files(source_root + '/numpy/distutils/checks/cpu_vsx3.c')[0], + extra_tests: { + 'VSX3_HALF_DOUBLE': files(source_root + '/numpy/distutils/checks/extra_vsx3_half_double.c')[0] + } ) VSX4 = mod_features.new( 'VSX4', 4, implies: VSX3, args: {'val': '-mcpu=power10', 'match': '.*[mcpu=|vsx].*'}, diff --git a/numpy/core/src/common/half.hpp b/numpy/core/src/common/half.hpp index 4d16e3bcc1c2..13dcd074283c 100644 --- a/numpy/core/src/common/half.hpp +++ b/numpy/core/src/common/half.hpp @@ -36,7 +36,7 @@ class Half final { #endif ) || ( std::is_same_v && - #if defined(NPY_HAVE_AVX512FP16) || defined(NPY_HAVE_VSX3) + #if defined(NPY_HAVE_AVX512FP16) || (defined(NPY_HAVE_VSX3) && defined(NPY_HAVE_VSX3_HALF_DOUBLE)) true #else false @@ -73,11 +73,8 @@ class Half final { #if defined(NPY_HAVE_AVX512FP16) __m128d md = _mm_load_sd(&f); bits_ = static_cast(_mm_cvtsi128_si32(_mm_castph_si128(_mm_cvtpd_ph(md)))); - #elif defined(NPY_HAVE_VSX3) && defined(NPY_HAVE_VSX_ASM) - __vector double vf64 = vec_splats(f); - __vector unsigned short vf16; - __asm__ __volatile__ ("xvcvdphp %x0,%x1" : "=wa" (vf16) : "wa" (vf64)); - bits_ = vec_extract(vf16, 0); + #elif defined(NPY_HAVE_VSX3) && defined(NPY_HAVE_VSX3_HALF_DOUBLE) + __asm__ __volatile__ ("xscvdphp %x0,%x1" : "=wa" (bits_) : "wa" (f)); #else bits_ = half_private::FromDoubleBits(BitCast(f)); #endif @@ -96,7 +93,7 @@ class Half final { __vector float vf32; __asm__ __volatile__("xvcvhpsp %x0,%x1" : "=wa"(vf32) - : "wa"(vec_splats(bits_.u))); + : "wa"(vec_splats(bits_))); return vec_extract(vf32, 0); #else return BitCast(half_private::ToFloatBits(bits_)); @@ -110,12 +107,12 @@ class Half final { double ret; _mm_store_sd(&ret, _mm_cvtph_pd(_mm_castsi128_ph(_mm_cvtsi32_si128(bits_)))); return ret; - #elif defined(NPY_HAVE_VSX3) && defined(NPY_HAVE_VSX_ASM) - __vector float vf64; - __asm__ __volatile__("xvcvhpdp %x0,%x1" - : "=wa"(vf32) - : "wa"(vec_splats(bits_))); - return vec_extract(vf64, 0); + #elif defined(NPY_HAVE_VSX3) && defined(NPY_HAVE_VSX3_HALF_DOUBLE) + double f64; + __asm__ __volatile__("xscvhpdp %x0,%x1" + : "=wa"(f64) + : "wa"(bits_)); + return f64; #else return BitCast(half_private::ToDoubleBits(bits_)); #endif diff --git a/numpy/core/tests/test_half.py b/numpy/core/tests/test_half.py index ca849ad52ead..3e72eba8948a 100644 --- a/numpy/core/tests/test_half.py +++ b/numpy/core/tests/test_half.py @@ -21,8 +21,11 @@ def setup_method(self): # An array of all possible float16 values self.all_f16 = np.arange(0x10000, dtype=uint16) self.all_f16.dtype = float16 - self.all_f32 = np.array(self.all_f16, dtype=float32) - self.all_f64 = np.array(self.all_f16, dtype=float64) + + # NaN value can cause an invalid FP exception if HW is been used + with np.errstate(invalid='ignore'): + self.all_f32 = np.array(self.all_f16, dtype=float32) + self.all_f64 = np.array(self.all_f16, dtype=float64) # An array of all non-NaN float16 values, in sorted order self.nonan_f16 = np.concatenate( @@ -44,14 +47,19 @@ def test_half_conversions(self): # value is preserved when converting to/from other floats. # Convert from float32 back to float16 - b = np.array(self.all_f32, dtype=float16) - assert_equal(self.all_f16.view(dtype=uint16), - b.view(dtype=uint16)) + with np.errstate(invalid='ignore'): + b = np.array(self.all_f32, dtype=float16) + # avoid testing NaNs due to differ bits wither Q/SNaNs + b_nn = b == b + assert_equal(self.all_f16[b_nn].view(dtype=uint16), + b[b_nn].view(dtype=uint16)) # Convert from float64 back to float16 - b = np.array(self.all_f64, dtype=float16) - assert_equal(self.all_f16.view(dtype=uint16), - b.view(dtype=uint16)) + with np.errstate(invalid='ignore'): + b = np.array(self.all_f64, dtype=float16) + b_nn = b == b + assert_equal(self.all_f16[b_nn].view(dtype=uint16), + b[b_nn].view(dtype=uint16)) # Convert float16 to longdouble and back # This doesn't necessarily preserve the extra NaN bits, diff --git a/numpy/distutils/ccompiler_opt.py b/numpy/distutils/ccompiler_opt.py index 1e9de3c45bc0..d7a02c07be56 100644 --- a/numpy/distutils/ccompiler_opt.py +++ b/numpy/distutils/ccompiler_opt.py @@ -301,7 +301,8 @@ class _Config: ## Power8/ISA 2.07 VSX2 = dict(interest=2, implies="VSX", implies_detect=False), ## Power9/ISA 3.00 - VSX3 = dict(interest=3, implies="VSX2", implies_detect=False), + VSX3 = dict(interest=3, implies="VSX2", implies_detect=False, + extra_checks="VSX3_HALF_DOUBLE"), ## Power10/ISA 3.1 VSX4 = dict(interest=4, implies="VSX3", implies_detect=False, extra_checks="VSX4_MMA"), diff --git a/numpy/distutils/checks/extra_vsx3_half_double.c b/numpy/distutils/checks/extra_vsx3_half_double.c new file mode 100644 index 000000000000..514a2b18f96c --- /dev/null +++ b/numpy/distutils/checks/extra_vsx3_half_double.c @@ -0,0 +1,12 @@ +/** + * Assembler may not fully support the following VSX3 scalar + * instructions, even though compilers report VSX3 support. + */ +int main(void) +{ + unsigned short bits = 0xFF; + double f; + __asm__ __volatile__("xscvhpdp %x0,%x1" : "=wa"(f) : "wa"(bits)); + __asm__ __volatile__ ("xscvdphp %x0,%x1" : "=wa" (bits) : "wa" (f)); + return bits; +} From 1c21b68c5c52d686d2de5d6563131408062153fa Mon Sep 17 00:00:00 2001 From: Sayed Adel Date: Tue, 7 Nov 2023 04:15:09 +0200 Subject: [PATCH 17/45] Add VX targets for dispatch-able sources of the umath multi-targets tests --- numpy/core/src/umath/_umath_tests.dispatch.c | 1 + 1 file changed, 1 insertion(+) diff --git a/numpy/core/src/umath/_umath_tests.dispatch.c b/numpy/core/src/umath/_umath_tests.dispatch.c index 9d8df4c86d36..73a59103c1a0 100644 --- a/numpy/core/src/umath/_umath_tests.dispatch.c +++ b/numpy/core/src/umath/_umath_tests.dispatch.c @@ -5,6 +5,7 @@ * SSE2 SSE41 AVX2 * VSX VSX2 VSX3 * NEON ASIMD ASIMDHP + * VX VXE */ #define PY_SSIZE_T_CLEAN #include From 8463bc941b3417693b3351b3b0e617fb626fb1f2 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 8 Nov 2023 22:07:27 +0100 Subject: [PATCH 18/45] BLD: Fix features.h detection for Meson builds Fixes function blocklisting for glibc<2.18, reported in issue gh-25087. Signed-off-by: Marcel Bargull --- numpy/core/config.h.in | 1 + 1 file changed, 1 insertion(+) diff --git a/numpy/core/config.h.in b/numpy/core/config.h.in index e3b559753521..cd63a59b2afc 100644 --- a/numpy/core/config.h.in +++ b/numpy/core/config.h.in @@ -12,6 +12,7 @@ #mesondefine HAVE___DECLSPEC_THREAD_ /* Optional headers */ +#mesondefine HAVE_FEATURES_H #mesondefine HAVE_XLOCALE_H #mesondefine HAVE_DLFCN_H #mesondefine HAVE_EXECINFO_H From af2c4cffe2119bf377bd1a8cb287c5ce9bbb842b Mon Sep 17 00:00:00 2001 From: Sebastian Berg Date: Thu, 9 Nov 2023 11:57:30 +0100 Subject: [PATCH 19/45] BUG: Avoid intp conversion regression in Cython 3 (backport) This is the minimal backport version of gh-25094, which simply aligns the Cython 2 and Cython 3 definitions. --- numpy/__init__.cython-30.pxd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/numpy/__init__.cython-30.pxd b/numpy/__init__.cython-30.pxd index 3ad186e40926..1409514f7a84 100644 --- a/numpy/__init__.cython-30.pxd +++ b/numpy/__init__.cython-30.pxd @@ -21,7 +21,7 @@ cdef extern from *: cdef extern from "Python.h": - ctypedef Py_ssize_t Py_intptr_t + ctypedef int Py_intptr_t cdef extern from "numpy/arrayobject.h": ctypedef Py_intptr_t npy_intp From 09eb7d5237346033ad0953499ed112394c276662 Mon Sep 17 00:00:00 2001 From: Sebastian Berg Date: Thu, 9 Nov 2023 12:03:13 +0100 Subject: [PATCH 20/45] TST: Check that Cython intp conversion uses normal integer rules --- numpy/core/tests/examples/cython/checks.pyx | 3 +++ numpy/core/tests/test_cython.py | 11 +++++++++++ 2 files changed, 14 insertions(+) diff --git a/numpy/core/tests/examples/cython/checks.pyx b/numpy/core/tests/examples/cython/checks.pyx index e41c6d657351..c5529ee8fcaf 100644 --- a/numpy/core/tests/examples/cython/checks.pyx +++ b/numpy/core/tests/examples/cython/checks.pyx @@ -30,3 +30,6 @@ def get_dt64_unit(obj): def is_integer(obj): return isinstance(obj, (cnp.integer, int)) + +def conv_intp(cnp.intp_t val): + return val diff --git a/numpy/core/tests/test_cython.py b/numpy/core/tests/test_cython.py index 29473f5ba424..99dd57e4c62d 100644 --- a/numpy/core/tests/test_cython.py +++ b/numpy/core/tests/test_cython.py @@ -122,3 +122,14 @@ def test_abstract_scalars(install_temp): assert checks.is_integer(1) assert checks.is_integer(np.int8(1)) assert checks.is_integer(np.uint64(1)) + +def test_conv_intp(install_temp): + import checks + + class myint: + def __int__(self): + return 3 + + # These conversion passes via `__int__`, not `__index__`: + assert checks.conv_intp(3.) == 3 + assert checks.conv_intp(myint()) == 3 From 8bbc85849d36fb27149bc994be9456a2b983dc0c Mon Sep 17 00:00:00 2001 From: mattip Date: Thu, 9 Nov 2023 13:13:27 +0200 Subject: [PATCH 21/45] TST: add a test --- numpy/core/tests/test_umath.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/numpy/core/tests/test_umath.py b/numpy/core/tests/test_umath.py index 5567244d14a9..59c670ffed29 100644 --- a/numpy/core/tests/test_umath.py +++ b/numpy/core/tests/test_umath.py @@ -1712,6 +1712,9 @@ def test_arctanh(self): assert_raises(FloatingPointError, np.arctanh, np.array(value, dtype=dt)) + # Make sure glibc < 2.18 atanh is not used, issue 25087 + assert np.signbit(np.arctanh(-1j).real) + # See: https://github.com/numpy/numpy/issues/20448 @pytest.mark.xfail( _glibc_older_than("2.17"), From aadc2c7f712dcd2785cb96caba21d4f41d7ea4f9 Mon Sep 17 00:00:00 2001 From: mattip Date: Thu, 9 Nov 2023 15:01:17 +0200 Subject: [PATCH 22/45] BLD: blocklist complex trig functions on musl Signed-off-by: mattip --- numpy/core/src/common/npy_config.h | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/numpy/core/src/common/npy_config.h b/numpy/core/src/common/npy_config.h index 715b17777aed..e590366888aa 100644 --- a/numpy/core/src/common/npy_config.h +++ b/numpy/core/src/common/npy_config.h @@ -160,8 +160,29 @@ #undef HAVE_CACOSHL #endif /* __GLIBC_PREREQ(2, 18) */ -#endif /* defined(__GLIBC_PREREQ) */ +#else /* defined(__GLIBC) */ +/* musl linux?, see issue #25092 */ +#undef HAVE_CASIN +#undef HAVE_CASINF +#undef HAVE_CASINL +#undef HAVE_CASINH +#undef HAVE_CASINHF +#undef HAVE_CASINHL +#undef HAVE_CATAN +#undef HAVE_CATANF +#undef HAVE_CATANL +#undef HAVE_CATANH +#undef HAVE_CATANHF +#undef HAVE_CATANHL +#undef HAVE_CACOS +#undef HAVE_CACOSF +#undef HAVE_CACOSL +#undef HAVE_CACOSH +#undef HAVE_CACOSHF +#undef HAVE_CACOSHL + +#endif /* defined(__GLIBC) */ #endif /* defined(HAVE_FEATURES_H) */ #endif /* NUMPY_CORE_SRC_COMMON_NPY_CONFIG_H_ */ From 28775a680b046e27ad59ec16ed308887630b2511 Mon Sep 17 00:00:00 2001 From: Ralf Gommers Date: Thu, 24 Aug 2023 14:28:46 +0200 Subject: [PATCH 23/45] CI: remove GHA job that built with setup.py and ILP64 OpenBLAS We still have several other jobs that use ILP64, and this setup.py based job does nothing special. Hence, let's remove it. [skip cirrus] [skip circle] [skip azp] --- .github/workflows/linux_blas.yml | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/.github/workflows/linux_blas.yml b/.github/workflows/linux_blas.yml index 699381f6584f..77705178869c 100644 --- a/.github/workflows/linux_blas.yml +++ b/.github/workflows/linux_blas.yml @@ -36,22 +36,6 @@ permissions: contents: read # to fetch code (actions/checkout) jobs: - openblas64_setuppy: - runs-on: ubuntu-latest - if: "github.repository == 'numpy/numpy'" - env: - DOWNLOAD_OPENBLAS: 1 - NPY_USE_BLAS_ILP64: 1 - steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 - with: - submodules: recursive - fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 - with: - python-version: '3.11' - - uses: ./.github/actions - openblas32_stable_nightly: if: "github.repository == 'numpy/numpy'" runs-on: ubuntu-latest From 5ff4da23093a979861202f8b1b884d7401dfc10c Mon Sep 17 00:00:00 2001 From: Ralf Gommers Date: Thu, 24 Aug 2023 14:53:09 +0200 Subject: [PATCH 24/45] CI: move conda and macOS Azure job to Meson The use of conda and testing on macOS is nice to combine; no need for the separate jobs. Same for the almost 100% duplication of the two macOS Azure jobs - we can use only one here. Code coverage is removed, because it's done in another job on Linux already, and uploading to Azure isn't all that useful. Some more cleanups to comments and code structure, so that the job is reasonably readable now. --- azure-pipelines.yml | 96 +++++++++++++-------------------------------- 1 file changed, 27 insertions(+), 69 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index d14e73b27edc..0c2109fdf387 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -89,15 +89,13 @@ stages: pool: vmImage: 'macOS-11' strategy: - maxParallel: 3 + maxParallel: 2 matrix: Python39: PYTHON_VERSION: '3.9' - USE_OPENBLAS: '1' Python39-ILP64: PYTHON_VERSION: '3.9' NPY_USE_BLAS_ILP64: '1' - USE_OPENBLAS: '1' steps: - script: | git submodule update --init @@ -123,9 +121,8 @@ stages: source tools/wheels/gfortran_utils.sh install_gfortran displayName: 'install gfortran' - # use the pre-built openblas binary that most closely - # matches our MacOS wheel builds -- currently based - # primarily on file size / name details + # use the pre-built openblas binary that most closely matches our MacOS + # wheel builds -- currently based primarily on file size / name details - script: | set -xe target=$(python tools/openblas_support.py) @@ -135,8 +132,7 @@ stages: cp $target/include/* /usr/local/include/ otool -L /usr/local/lib/libopenblas* displayName: 'install pre-built openblas' - condition: eq(variables['USE_OPENBLAS'], '1') - - script: python -m pip install --upgrade pip 'setuptools<49.2.0' wheel + - script: python -m pip install --upgrade pip displayName: 'Install tools' - script: | python -m pip install -r test_requirements.txt @@ -149,61 +145,29 @@ stages: - script: git submodule update --init displayName: 'Fetch submodules' - # prefer usage of clang over gcc proper - # to match likely scenario on many user mac machines - - script: python setup.py build -j 4 build_src --verbose-cfg install + # TODO: pick up the correct OpenBLAS libraries once we can install those through wheels + - script: python -m pip install . -Ccompile-args="-j4" -Csetup-args="-Dallow-noblas=true" displayName: 'Build NumPy' - env: - BLAS: None - LAPACK: None - ATLAS: None - CC: /usr/bin/clang - # wait until after dev build of NumPy to pip - # install matplotlib to avoid pip install of older numpy + # only install matplotlib here, to avoid pulling in an older numpy - script: python -m pip install matplotlib displayName: 'Install matplotlib before refguide run' - - script: python runtests.py -g --refguide-check + - script: | + set -xe + cd tools + python refguide_check.py --doctests displayName: 'Run Refguide Check' - condition: eq(variables['USE_OPENBLAS'], '1') - script: | + cd tools echo LIBRARY_PATH ${LIBRARY_PATH} - python runtests.py -n --mode=full -- -rsx --junitxml=junit/test-results.xml + pytest --pyargs numpy displayName: 'Run Full NumPy Test Suite' - condition: eq(variables['USE_OPENBLAS'], '1') env: # gfortran installed above adds -lSystem, so this is needed to find it (gh-22043) LIBRARY_PATH: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib - - bash: | - python -m pip install threadpoolctl - python tools/openblas_support.py --check_version - displayName: 'Verify OpenBLAS version' - condition: eq(variables['USE_OPENBLAS'], '1') - - # import doesn't work when in numpy src directory , so do a pip dev install of build lib to test - - script: | - #!/bin/bash -v - set +e - python -c "import numpy as np" > test_output.log 2>&1 - check_output_code=$? - cat test_output.log - grep "buggy Accelerate backend" test_output.log - check_message=$? - if [ $check_output_code == 1 ] && [ $check_message == 0 ]; then exit 0; else exit 1;fi - displayName: "Check if numpy import fails with accelerate" - condition: eq(variables['USE_OPENBLAS'], '0') - - - task: PublishTestResults@2 - condition: succeededOrFailed() - inputs: - testResultsFiles: '**/test-*.xml' - failTaskOnFailedTests: true - testRunTitle: 'Publish test results for Python 3.9 64-bit full Mac OS' - - - job: Windows pool: vmImage: 'windows-2019' @@ -242,25 +206,19 @@ stages: git submodule update --init displayName: 'Fetch submodules' - script: | - # create and activate conda environment - conda env create -f environment.yml + conda env create -f environment.yml displayName: 'Create conda environment.' - script: | - # >>> conda initialize >>> - # !! Contents within this block are 'conda init' !! - # see https://github.com/conda/conda/issues/7980 - __conda_setup="$('conda' 'shell.bash' 'hook' 2> /dev/null)" - eval "$__conda_setup" - unset __conda_setup - # <<< conda initialize <<< - conda activate numpy-dev - # Run native baseline Build / Tests - python runtests.py --show-build-log --cpu-baseline=native --cpu-dispatch=none \ - --debug-info --mode=full -- -rsx --junitxml=junit/test-results.xml - displayName: 'Run native baseline Build / Tests in conda.' - - task: PublishTestResults@2 - condition: succeededOrFailed() - inputs: - testResultsFiles: '**/test-*.xml' - failTaskOnFailedTests: true - testRunTitle: 'Publish test results for conda installation' + # >>> conda initialize >>> + # !! Contents within this block are 'conda init' !! + # see https://github.com/conda/conda/issues/7980 + __conda_setup="$('conda' 'shell.bash' 'hook' 2> /dev/null)" + eval "$__conda_setup" + unset __conda_setup + # <<< conda initialize <<< + conda activate numpy-dev + # Note: conda env activation doesn't carry over between steps, so + # build/test are both in this step + spin build -- -Dcpu-baseline=native -Dcpu-dispatch=none + spin test -m full + displayName: 'Build with native baseline, run full test suite' From 5d331312d7f2bfae3f12c3cf7342eb519b645aa2 Mon Sep 17 00:00:00 2001 From: Ralf Gommers Date: Fri, 25 Aug 2023 11:41:56 +0200 Subject: [PATCH 25/45] CI: factor out custom code checks (ninja, vulture, `test_*` installed) [skip cirrus] [skip circle] --- .github/workflows/linux.yml | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index d73de33c7640..ebceec365d7c 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -206,3 +206,34 @@ jobs: cd tools pytest --pyargs numpy -m "not slow" + custom_checks: + needs: [smoke_test] + runs-on: ubuntu-latest + if: github.event_name != 'push' + steps: + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + with: + python-version: '3.11' + - name: Install build and test dependencies from PyPI + run: | + pip install -r build_requirements.txt + pip install -r test_requirements.txt + pip install vulture + - name: Build and install NumPy + run: | + # Install using the fastests way to build (no BLAS, no SIMD) + spin build -j2 -- -Dallow-noblas=true -Dcpu-baseline=none -Dcpu-dispatch=none + - name: Check build-internal dependencies + run: | + ninja -C build -t missingdeps + - name: Check installed test and stub files + run: | + python tools/check_installed_files.py $(find ./build-install -path '*/site-packages/numpy') + - name: Check for unreachable code paths in Python modules + run: | + # Need the explicit `bash -c` here because `grep` returns exit code 1 for no matches + bash -c "! vulture . --min-confidence 100 --exclude doc/,numpy/distutils/,vendored-meson/ | grep 'unreachable'" From 967e53397ca7d92cf3eeacf8f1851d50011129df Mon Sep 17 00:00:00 2001 From: Ralf Gommers Date: Fri, 25 Aug 2023 12:39:06 +0200 Subject: [PATCH 26/45] CI: remove macOS and conda jobs from Azure [skip cirrus] [skip circle] --- azure-pipelines.yml | 110 -------------------------------------------- 1 file changed, 110 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 0c2109fdf387..ce78d07777fa 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -84,90 +84,6 @@ stages: python3 -m pytest --pyargs numpy" displayName: 'Run 32-bit manylinux2014 Docker Build / Tests' - - - job: macOS - pool: - vmImage: 'macOS-11' - strategy: - maxParallel: 2 - matrix: - Python39: - PYTHON_VERSION: '3.9' - Python39-ILP64: - PYTHON_VERSION: '3.9' - NPY_USE_BLAS_ILP64: '1' - steps: - - script: | - git submodule update --init - displayName: 'Fetch submodules' - # the @0 refers to the (major) version of the *task* on Microsoft's - # end, not the order in the build matrix nor anything to do - # with version of Python selected - - task: UsePythonVersion@0 - inputs: - versionSpec: $(PYTHON_VERSION) - addToPath: true - architecture: 'x64' - - script: | - set -xe - [ -n "$USE_XCODE_10" ] && /bin/bash -c "sudo xcode-select -s /Applications/Xcode_10.app/Contents/Developer" - clang --version - displayName: 'report clang version' - - - script: | - if [[ $PLATFORM == "macosx-arm64" ]]; then - PLAT="arm64" - fi - source tools/wheels/gfortran_utils.sh - install_gfortran - displayName: 'install gfortran' - # use the pre-built openblas binary that most closely matches our MacOS - # wheel builds -- currently based primarily on file size / name details - - script: | - set -xe - target=$(python tools/openblas_support.py) - ls -lR $target - # manually link to appropriate system paths - cp $target/lib/lib* /usr/local/lib/ - cp $target/include/* /usr/local/include/ - otool -L /usr/local/lib/libopenblas* - displayName: 'install pre-built openblas' - - script: python -m pip install --upgrade pip - displayName: 'Install tools' - - script: | - python -m pip install -r test_requirements.txt - # Don't use doc_requirements.txt since that messes up tests - python -m pip install vulture sphinx==4.3.0 numpydoc==1.4.0 ninja - displayName: 'Install dependencies; some are optional to avoid test skips' - - script: /bin/bash -c "! vulture . --min-confidence 100 --exclude doc/,numpy/distutils/ | grep 'unreachable'" - displayName: 'Check for unreachable code paths in Python modules' - - - script: git submodule update --init - displayName: 'Fetch submodules' - - # TODO: pick up the correct OpenBLAS libraries once we can install those through wheels - - script: python -m pip install . -Ccompile-args="-j4" -Csetup-args="-Dallow-noblas=true" - displayName: 'Build NumPy' - - # only install matplotlib here, to avoid pulling in an older numpy - - script: python -m pip install matplotlib - displayName: 'Install matplotlib before refguide run' - - - script: | - set -xe - cd tools - python refguide_check.py --doctests - displayName: 'Run Refguide Check' - - - script: | - cd tools - echo LIBRARY_PATH ${LIBRARY_PATH} - pytest --pyargs numpy - displayName: 'Run Full NumPy Test Suite' - env: - # gfortran installed above adds -lSystem, so this is needed to find it (gh-22043) - LIBRARY_PATH: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib - - job: Windows pool: vmImage: 'windows-2019' @@ -196,29 +112,3 @@ stages: steps: - template: azure-steps-windows.yml - - - - job: Linux_conda - pool: - vmImage: 'ubuntu-20.04' - steps: - - script: | - git submodule update --init - displayName: 'Fetch submodules' - - script: | - conda env create -f environment.yml - displayName: 'Create conda environment.' - - script: | - # >>> conda initialize >>> - # !! Contents within this block are 'conda init' !! - # see https://github.com/conda/conda/issues/7980 - __conda_setup="$('conda' 'shell.bash' 'hook' 2> /dev/null)" - eval "$__conda_setup" - unset __conda_setup - # <<< conda initialize <<< - conda activate numpy-dev - # Note: conda env activation doesn't carry over between steps, so - # build/test are both in this step - spin build -- -Dcpu-baseline=native -Dcpu-dispatch=none - spin test -m full - displayName: 'Build with native baseline, run full test suite' From 0160b41ac15c3a7fe20610e27a488394b4e801b2 Mon Sep 17 00:00:00 2001 From: Sayed Adel Date: Mon, 21 Aug 2023 19:16:46 +0400 Subject: [PATCH 27/45] CI: Provides cross-compile builds for armhf, ppc64le, and s390x This patch implements cross-compile builds for armhf, ppc64le, and IBMZ architectures in the CI pipeline. While it might not be highly efficient due to qemu's quirks and slower performance, it still does extend testing to include umath, ufunc, and simd operations. In this setup, QEMU manages the Python interpreter, meson, and runtime tests, while ninja, the toolchain, and any binutils binaries are executed natively to speed up the build. --- .github/workflows/linux_qemu.yml | 147 +++++++++++++++++++++++++++++++ 1 file changed, 147 insertions(+) create mode 100644 .github/workflows/linux_qemu.yml diff --git a/.github/workflows/linux_qemu.yml b/.github/workflows/linux_qemu.yml new file mode 100644 index 000000000000..fe313acb7800 --- /dev/null +++ b/.github/workflows/linux_qemu.yml @@ -0,0 +1,147 @@ +# Meson's Python module doesn't support crosscompiling, +# and python dependencies may be another potential hurdle. +# There might also be a need to run runtime tests during configure time. +# +# The recommended practice is to rely on Docker to provide the x86_64 crosscompile toolchain, +# enabling native execution via binfmt. +# +# In simpler terms, everything except the crosscompile toolchain will be emulated. + +name: Linux Qemu tests + +on: + pull_request: + branches: + - main + - maintenance/** + +defaults: + run: + shell: bash + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + linux_qemu: + if: "github.repository == 'numpy/numpy'" + runs-on: ubuntu-22.04 + continue-on-error: true + strategy: + matrix: + BUILD_PROP: + - [ + "armhf", + "arm-linux-gnueabihf", + "arm32v7/ubuntu:22.04", + "-Dallow-noblas=true", + # test_unary_spurious_fpexception is currently skipped + # FIXME(@seiko2plus): Requires confirmation for the following issue: + # The presence of an FP invalid exception caused by sqrt. Unsure if this is a qemu bug or not. + "(test_kind or test_multiarray or test_simd or test_umath or test_ufunc) and not test_unary_spurious_fpexception" + ] + - [ + "ppc64le", + "powerpc64le-linux-gnu", + "ppc64le/ubuntu:22.04", + "-Dallow-noblas=true", + "test_kind or test_multiarray or test_simd or test_umath or test_ufunc", + ] + - [ + "s390x", + "s390x-linux-gnu", + "s390x/ubuntu:22.04", + "-Dallow-noblas=true", + # Skipping TestRationalFunctions.test_gcd_overflow test + # because of a possible qemu bug that appears to be related to int64 overflow in absolute operation. + # TODO(@seiko2plus): Confirm the bug and provide a minimal reproducer, then report it to upstream. + "(test_kind or test_multiarray or test_simd or test_umath or test_ufunc) and not test_gcd_overflow" + ] + - [ + "s390x - baseline(Z13)", + "s390x-linux-gnu", + "s390x/ubuntu:22.04", + "-Dallow-noblas=true -Dcpu-baseline=vx", + "(test_kind or test_multiarray or test_simd or test_umath or test_ufunc) and not test_gcd_overflow" + ] + env: + TOOLCHAIN_NAME: ${{ matrix.BUILD_PROP[1] }} + DOCKER_CONTAINER: ${{ matrix.BUILD_PROP[2] }} + MESON_OPTIONS: ${{ matrix.BUILD_PROP[3] }} + RUNTIME_TEST_FILTER: ${{ matrix.BUILD_PROP[4] }} + TERM: xterm-256color + + name: "${{ matrix.BUILD_PROP[0] }}" + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + submodules: recursive + fetch-depth: 0 + + - name: Initialize binfmt_misc for qemu-user-static + run: | + docker run --rm --privileged multiarch/qemu-user-static --reset -p yes + + - name: Install GCC cross-compilers + run: | + sudo apt update + sudo apt install -y ninja-build gcc-${TOOLCHAIN_NAME} g++-${TOOLCHAIN_NAME} gfortran-${TOOLCHAIN_NAME} + + - name: Cache docker container + uses: actions/cache@v3 + id: container-cache + with: + path: /docker_${{ matrix.BUILD_PROP[1] }} + key: container-${{ runner.os }}-${{ matrix.BUILD_PROP[1] }}-${{ matrix.BUILD_PROP[2] }}-${{ hashFiles('build_requirements.txt') }} + + - name: Creates new container + if: steps.container-cache.outputs.cache-hit != 'true' + run: | + docker run --name the_container --interactive -v /:/host -v $(pwd):/numpy ${DOCKER_CONTAINER} /bin/bash -c " + apt update && + apt install -y cmake git python3 python-is-python3 python3-dev python3-pip && + mkdir -p /lib64 && ln -s /host/lib64/ld-* /lib64/ && + ln -s /host/lib/x86_64-linux-gnu /lib/x86_64-linux-gnu && + rm -rf /usr/${TOOLCHAIN_NAME} && ln -s /host/usr/${TOOLCHAIN_NAME} /usr/${TOOLCHAIN_NAME} && + rm -rf /usr/lib/gcc/${TOOLCHAIN_NAME} && ln -s /host/usr/lib/gcc-cross/${TOOLCHAIN_NAME} /usr/lib/gcc/${TOOLCHAIN_NAME} && + rm -f /usr/bin/gcc && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-gcc /usr/bin/gcc && + rm -f /usr/bin/g++ && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-g++ /usr/bin/g++ && + rm -f /usr/bin/gfortran && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-gfortran /usr/bin/gfortran && + rm -f /usr/bin/ar && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-ar /usr/bin/ar && + rm -f /usr/bin/as && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-as /usr/bin/as && + rm -f /usr/bin/ld && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-ld /usr/bin/ld && + rm -f /usr/bin/ld.bfd && ln -s /host/usr/bin/${TOOLCHAIN_NAME}-ld.bfd /usr/bin/ld.bfd && + rm -f /usr/bin/ninja && ln -s /host/usr/bin/ninja /usr/bin/ninja && + git config --global --add safe.directory /numpy && + python -m pip install -r /numpy/build_requirements.txt && + python -m pip install pytest pytest-xdist hypothesis typing_extensions && + rm -f /usr/local/bin/ninja && mkdir -p /usr/local/bin && ln -s /host/usr/bin/ninja /usr/local/bin/ninja + " + docker commit the_container the_container + mkdir -p "~/docker_${TOOLCHAIN_NAME}" + docker save -o "~/docker_${TOOLCHAIN_NAME}/the_container.tar" the_container + + - name: Load container from cache + if: steps.container-cache.outputs.cache-hit == 'true' + run: docker load -i "~/docker_${TOOLCHAIN_NAME}/the_container.tar" + + - name: Meson Build + run: | + docker run --rm -e "TERM=xterm-256color" -v $(pwd):/numpy -v /:/host the_container \ + /bin/script -e -q -c "/bin/bash --noprofile --norc -eo pipefail -c ' + cd /numpy && spin build --clean -- ${MESON_OPTIONS} + '" + + - name: Meson Log + if: always() + run: 'cat build/meson-logs/meson-log.txt' + + - name: Run Tests + run: | + docker run --rm -e "TERM=xterm-256color" -v $(pwd):/numpy -v /:/host the_container \ + /bin/script -e -q -c "/bin/bash --noprofile --norc -eo pipefail -c ' + export F90=/usr/bin/gfortran + cd /numpy && spin test -- -k \"${RUNTIME_TEST_FILTER}\" + '" + From ea4a4fbe7d26c13c52bb8a1fc16e3843abc39969 Mon Sep 17 00:00:00 2001 From: Sayed Adel Date: Thu, 24 Aug 2023 09:14:17 +0400 Subject: [PATCH 28/45] CI: Fix the cache path of Linux QEMU --- .github/workflows/linux_qemu.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/linux_qemu.yml b/.github/workflows/linux_qemu.yml index fe313acb7800..6d560e0b0f44 100644 --- a/.github/workflows/linux_qemu.yml +++ b/.github/workflows/linux_qemu.yml @@ -92,7 +92,7 @@ jobs: uses: actions/cache@v3 id: container-cache with: - path: /docker_${{ matrix.BUILD_PROP[1] }} + path: ~/docker_${{ matrix.BUILD_PROP[1] }} key: container-${{ runner.os }}-${{ matrix.BUILD_PROP[1] }}-${{ matrix.BUILD_PROP[2] }}-${{ hashFiles('build_requirements.txt') }} - name: Creates new container From 882a5a07041e6c22ac6eb289686ad0e374f48c73 Mon Sep 17 00:00:00 2001 From: Charles Harris Date: Fri, 10 Nov 2023 16:38:40 -0700 Subject: [PATCH 29/45] MAINT: Remove .travis.yml. We can run the s390x tests in linux_qemu.yml. --- .travis.yml | 59 ----------------------------------------------------- 1 file changed, 59 deletions(-) delete mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 72845eefac09..000000000000 --- a/.travis.yml +++ /dev/null @@ -1,59 +0,0 @@ -# After changing this file, check it on: -# http://lint.travis-ci.org/ -language: python -group: travis_latest -os: linux -dist: focal - -# Travis allows these packages, additions can be requested -# https://github.com/travis-ci/apt-package-safelist -addons: - apt: - packages: &common_packages - - gfortran - - libgfortran5 - - libatlas-base-dev - # Speedup builds, particularly when USE_CHROOT=1 - - eatmydata - -# Disable clone depth -git: - depth: false - -cache: - directories: - - $HOME/.cache/pip - -jobs: - include: -# The ppc64le for these tests is usually missing, resulting in -# test failure most of the time. Let's not do that. -# - python: "3.9" -# os: linux -# arch: ppc64le -# env: -# # use OpenBLAS build, not system ATLAS -# - DOWNLOAD_OPENBLAS=1 -# # - NPY_USE_BLAS_ILP64=1 # the openblas build fails -# - ATLAS=None -# # VSX4 still not supported by ubuntu/gcc-11 -# - EXPECT_CPU_FEATURES="VSX VSX2 VSX3" - - - python: "3.9" - os: linux - arch: s390x - # fixes VX assembler ambiguous errors - # due to compiler incompatibility - install: sudo apt update && sudo apt -y --only-upgrade install binutils - env: - # use OpenBLAS build, not system ATLAS - - DOWNLOAD_OPENBLAS=1 - - NPY_USE_BLAS_ILP64=1 - - ATLAS=None - - EXPECT_CPU_FEATURES="VX VXE VXE2" - -before_install: - - ./tools/travis-before-install.sh - -script: - - ./tools/travis-test.sh From ba1e504629460c91cdfa7cdc32c9182c5829f49d Mon Sep 17 00:00:00 2001 From: Charles Harris Date: Fri, 10 Nov 2023 17:40:05 -0700 Subject: [PATCH 30/45] BUG: Fix build on ppc64 when the baseline set to Power9 or higher Backport of #24806. This backport was already made in #25083, but that didn't make use of the linux_qemu.yml action, so this tweaks it a bit. --- .github/workflows/linux_qemu.yml | 12 +++++++++++- meson_cpu/ppc64/meson.build | 2 +- numpy/core/src/common/half.hpp | 8 ++++---- numpy/core/tests/test_half.py | 11 ++++++----- numpy/distutils/ccompiler_opt.py | 2 +- 5 files changed, 23 insertions(+), 12 deletions(-) diff --git a/.github/workflows/linux_qemu.yml b/.github/workflows/linux_qemu.yml index 6d560e0b0f44..31ceab851553 100644 --- a/.github/workflows/linux_qemu.yml +++ b/.github/workflows/linux_qemu.yml @@ -23,6 +23,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true +permissions: + contents: read + jobs: linux_qemu: if: "github.repository == 'numpy/numpy'" @@ -48,6 +51,13 @@ jobs: "-Dallow-noblas=true", "test_kind or test_multiarray or test_simd or test_umath or test_ufunc", ] + - [ + "ppc64le - baseline(Power9)", + "powerpc64le-linux-gnu", + "ppc64le/ubuntu:22.04", + "-Dallow-noblas=true -Dcpu-baseline=vsx3", + "test_kind or test_multiarray or test_simd or test_umath or test_ufunc", + ] - [ "s390x", "s390x-linux-gnu", @@ -74,7 +84,7 @@ jobs: name: "${{ matrix.BUILD_PROP[0] }}" steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: submodules: recursive fetch-depth: 0 diff --git a/meson_cpu/ppc64/meson.build b/meson_cpu/ppc64/meson.build index 986a57ee184c..bad95257ca95 100644 --- a/meson_cpu/ppc64/meson.build +++ b/meson_cpu/ppc64/meson.build @@ -33,7 +33,7 @@ VSX3 = mod_features.new( VSX4 = mod_features.new( 'VSX4', 4, implies: VSX3, args: {'val': '-mcpu=power10', 'match': '.*[mcpu=|vsx].*'}, detect: {'val': 'VSX4', 'match': 'VSX.*'}, - test_code: files(source_root + '/numpy/distutils/checks/cpu_vsx3.c')[0], + test_code: files(source_root + '/numpy/distutils/checks/cpu_vsx4.c')[0], extra_tests: { 'VSX4_MMA': files(source_root + '/numpy/distutils/checks/extra_vsx4_mma.c')[0] } diff --git a/numpy/core/src/common/half.hpp b/numpy/core/src/common/half.hpp index 13dcd074283c..ff9a547766d3 100644 --- a/numpy/core/src/common/half.hpp +++ b/numpy/core/src/common/half.hpp @@ -47,7 +47,7 @@ class Half final { /// Default constructor. initialize nothing. Half() = default; - /// Constract from float + /// Construct from float /// If there are no hardware optimization available, rounding will always /// be set to ties to even. explicit Half(float f) @@ -118,7 +118,7 @@ class Half final { #endif } - /// Returns a new Half constracted from the IEEE 754 binary16. + /// Returns a new Half constructed from the IEEE 754 binary16. static constexpr Half FromBits(uint16_t bits) { Half h{}; @@ -131,7 +131,7 @@ class Half final { return bits_; } - /// @name Comparison operators (orderd) + /// @name Comparison operators (ordered) /// @{ constexpr bool operator==(Half r) const { @@ -155,7 +155,7 @@ class Half final { } /// @} - /// @name Comparison operators (unorderd) + /// @name Comparison operators (unordered) /// @{ constexpr bool operator!=(Half r) const { diff --git a/numpy/core/tests/test_half.py b/numpy/core/tests/test_half.py index 3e72eba8948a..fbc1bf6a0a6d 100644 --- a/numpy/core/tests/test_half.py +++ b/numpy/core/tests/test_half.py @@ -274,8 +274,8 @@ def test_half_correctness(self): if len(a32_fail) != 0: bad_index = a32_fail[0] assert_equal(self.finite_f32, a_manual, - "First non-equal is half value %x -> %g != %g" % - (self.finite_f16[bad_index], + "First non-equal is half value 0x%x -> %g != %g" % + (a_bits[bad_index], self.finite_f32[bad_index], a_manual[bad_index])) @@ -283,8 +283,8 @@ def test_half_correctness(self): if len(a64_fail) != 0: bad_index = a64_fail[0] assert_equal(self.finite_f64, a_manual, - "First non-equal is half value %x -> %g != %g" % - (self.finite_f16[bad_index], + "First non-equal is half value 0x%x -> %g != %g" % + (a_bits[bad_index], self.finite_f64[bad_index], a_manual[bad_index])) @@ -327,7 +327,8 @@ def test_half_funcs(self): a = np.array([0, 0, -1, -1/1e20, 0, 2.0**-24, 7.629e-6], dtype=float16) assert_equal(a.nonzero()[0], [2, 5, 6]) - a = a.byteswap().newbyteorder() + a = a.byteswap() + a = a.view(a.dtype.newbyteorder()) assert_equal(a.nonzero()[0], [2, 5, 6]) diff --git a/numpy/distutils/ccompiler_opt.py b/numpy/distutils/ccompiler_opt.py index d7a02c07be56..37a5368b0b82 100644 --- a/numpy/distutils/ccompiler_opt.py +++ b/numpy/distutils/ccompiler_opt.py @@ -1309,7 +1309,7 @@ def feature_names(self, names=None, force_flags=None, macros=[]): def feature_is_exist(self, name): """ Returns True if a certain feature is exist and covered within - `_Config.conf_features`. + ``_Config.conf_features``. Parameters ---------- From 19d5b69fb54ffaf2e8c4daa8b6252c1d2282a2a0 Mon Sep 17 00:00:00 2001 From: Charles Harris Date: Fri, 10 Nov 2023 20:47:45 -0700 Subject: [PATCH 31/45] MAINT: Fix flaky test and update windows.yml action. - Skip test that needs huge memory on 32-bit architectures - Update windows.yml action from main --- .github/workflows/windows.yml | 43 ++++++++++++++-------------------- numpy/core/tests/test_ufunc.py | 2 ++ 2 files changed, 20 insertions(+), 25 deletions(-) diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index fbde86092b92..988a55b761db 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -23,13 +23,13 @@ jobs: compiler: ["MSVC", "Clang-cl"] steps: - name: Checkout - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: submodules: recursive fetch-depth: 0 - name: Setup Python - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1 with: python-version: '3.11' @@ -37,16 +37,9 @@ jobs: run: | python -m pip install spin Cython - - name: Install OpenBLAS (MacPython build) + - name: Install pkg-config run: | - # Download and install pre-built OpenBLAS library with 32-bit - # interfaces. Unpack it in the pkg-config hardcoded path - choco install unzip -y - choco install wget -y choco install -y --checksum 6004DF17818F5A6DBF19CB335CC92702 pkgconfiglite - wget https://anaconda.org/multibuild-wheels-staging/openblas-libs/v0.3.21/download/openblas-v0.3.21-win_amd64-gcc_10_3_0.zip - unzip -d c:\opt openblas-v0.3.21-win_amd64-gcc_10_3_0.zip - echo "PKG_CONFIG_PATH=c:\opt\64\lib\pkgconfig;" >> $env:GITHUB_ENV - name: Install Clang-cl if: matrix.compiler == 'Clang-cl' @@ -55,26 +48,26 @@ jobs: - name: Install NumPy (MSVC) if: matrix.compiler == 'MSVC' + env: + PKG_CONFIG_PATH: ${{ github.workspace }}/.openblas run: | - spin build -j2 -- --vsenv + python -m pip install scipy-openblas32 + spin build --with-scipy-openblas=32 -j2 -- --vsenv - name: Install NumPy (Clang-cl) if: matrix.compiler == 'Clang-cl' + env: + PKG_CONFIG_PATH: ${{ github.workspace }}/.openblas run: | "[binaries]","c = 'clang-cl'","cpp = 'clang-cl'","ar = 'llvm-lib'","c_ld = 'lld-link'","cpp_ld = 'lld-link'" | Out-File $PWD/clang-cl-build.ini -Encoding ascii - spin build -j2 -- --vsenv --native-file=$PWD/clang-cl-build.ini + python -m pip install scipy-openblas32 + spin build --with-scipy-openblas=32 -j2 -- --vsenv --native-file=$PWD/clang-cl-build.ini - - name: Copy OpenBLAS DLL, write _distributor_init.py + - name: Meson Log + shell: bash + if: ${{ failure() }} run: | - # Getting the OpenBLAS DLL to the right place so it loads - $installed_path = "$PWD\build-install\usr\Lib\site-packages" - $numpy_path = "${installed_path}\numpy" - $libs_path = "${installed_path}\numpy.libs" - mkdir ${libs_path} - $ob_path = "C:/opt/64/bin/" - cp $ob_path/*.dll $libs_path - # Write _distributor_init.py to load .libs DLLs. - python -c "from tools import openblas_support; openblas_support.make_init(r'${numpy_path}')" + cat build/meson-logs/meson-log.txt - name: Install test dependencies run: | @@ -85,19 +78,19 @@ jobs: run: | spin test - msvc_32bit_python_openblas: + msvc_32bit_python_no_openblas: name: MSVC, 32-bit Python, no BLAS runs-on: windows-2019 if: "github.repository == 'numpy/numpy'" steps: - name: Checkout - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: submodules: recursive fetch-depth: 0 - name: Setup Python (32-bit) - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1 with: python-version: '3.10' architecture: 'x86' diff --git a/numpy/core/tests/test_ufunc.py b/numpy/core/tests/test_ufunc.py index 02c437021fe9..9fbc4b2dc57b 100644 --- a/numpy/core/tests/test_ufunc.py +++ b/numpy/core/tests/test_ufunc.py @@ -1617,6 +1617,8 @@ def check_identityless_reduction(self, a): assert_equal(np.minimum.reduce(a, axis=()), a) @requires_memory(6 * 1024**3) + @pytest.mark.skipif(sys.maxsize < 2**32, + reason="test array too large for 32bit platform") def test_identityless_reduction_huge_array(self): # Regression test for gh-20921 (copying identity incorrectly failed) arr = np.zeros((2, 2**31), 'uint8') From 31e2c8a36801c3ef6b5c9bc8a7af4ac5c2bae5b2 Mon Sep 17 00:00:00 2001 From: Charles Harris Date: Sat, 11 Nov 2023 10:21:58 -0700 Subject: [PATCH 32/45] MAINT: Update .spin/cmds.py from main. [skip cirrus] [skip azp] [skip actions] [skip circle] --- .spin/cmds.py | 173 +++++++++++++++++++++++++++++++++++++++++++++++++ pyproject.toml | 8 ++- 2 files changed, 180 insertions(+), 1 deletion(-) diff --git a/.spin/cmds.py b/.spin/cmds.py index 09556e67bbce..ea44329b1346 100644 --- a/.spin/cmds.py +++ b/.spin/cmds.py @@ -7,6 +7,8 @@ import shutil import json import pathlib +import importlib +import subprocess import click from spin import util @@ -23,6 +25,61 @@ ) +def _get_numpy_tools(filename): + filepath = pathlib.Path('tools', filename) + spec = importlib.util.spec_from_file_location(filename.stem, filepath) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +@click.command() +@click.option( + "-t", "--token", + help="GitHub access token", + required=True +) +@click.option( + "--revision-range", + help="..", + required=True +) +@click.pass_context +def changelog(ctx, token, revision_range): + """👩 Get change log for provided revision range + + \b + Example: + + \b + $ spin authors -t $GH_TOKEN --revision-range v1.25.0..v1.26.0 + """ + try: + from github.GithubException import GithubException + from git.exc import GitError + changelog = _get_numpy_tools(pathlib.Path('changelog.py')) + except ModuleNotFoundError as e: + raise click.ClickException( + f"{e.msg}. Install the missing packages to use this command." + ) + click.secho( + f"Generating change log for range {revision_range}", + bold=True, fg="bright_green", + ) + try: + changelog.main(token, revision_range) + except GithubException as e: + raise click.ClickException( + f"GithubException raised with status: {e.status} " + f"and message: {e.data['message']}" + ) + except GitError as e: + raise click.ClickException( + f"Git error in command `{' '.join(e.command)}` " + f"with error message: {e.stderr}" + ) + + @click.command() @click.option( "-j", "--jobs", @@ -263,6 +320,47 @@ def _run_asv(cmd): util.run(cmd, cwd='benchmarks', env=env) +@click.command() +@click.option( + "-b", "--branch", + metavar='branch', + default="main", +) +@click.option( + '--uncommitted', + is_flag=True, + default=False, + required=False, +) +@click.pass_context +def lint(ctx, branch, uncommitted): + """🔦 Run lint checks on diffs. + Provide target branch name or `uncommitted` to check changes before committing: + + \b + Examples: + + \b + For lint checks of your development brach with `main` or a custom branch: + + \b + $ spin lint # defaults to main + $ spin lint --branch custom_branch + + \b + To check just the uncommitted changes before committing + + \b + $ spin lint --uncommitted + """ + try: + linter = _get_numpy_tools(pathlib.Path('linter.py')) + except ModuleNotFoundError as e: + raise click.ClickException( + f"{e.msg}. Install using linter_requirements.txt" + ) + + linter.DiffLinter(branch).run_lint(uncommitted) @click.command() @click.option( @@ -470,3 +568,78 @@ def _config_openblas(blas_variant): os.makedirs(openblas_dir, exist_ok=True) with open(pkg_config_fname, "wt", encoding="utf8") as fid: fid.write(openblas.get_pkg_config().replace("\\", "/")) + + +@click.command() +@click.option( + "-v", "--version-override", + help="NumPy version of release", + required=False +) +@click.pass_context +def notes(ctx, version_override): + """🎉 Generate release notes and validate + + \b + Example: + + \b + $ spin notes --version-override 2.0 + + \b + To automatically pick the version + + \b + $ spin notes + """ + project_config = util.get_config() + version = version_override or project_config['project.version'] + + click.secho( + f"Generating release notes for NumPy {version}", + bold=True, fg="bright_green", + ) + + # Check if `towncrier` is installed + if not shutil.which("towncrier"): + raise click.ClickException( + f"please install `towncrier` to use this command" + ) + + click.secho( + f"Reading upcoming changes from {project_config['tool.towncrier.directory']}", + bold=True, fg="bright_yellow" + ) + # towncrier build --version 2.1 --yes + cmd = ["towncrier", "build", "--version", version, "--yes"] + try: + p = util.run( + cmd=cmd, + sys_exit=False, + output=True, + encoding="utf-8" + ) + except subprocess.SubprocessError as e: + raise click.ClickException( + f"`towncrier` failed returned {e.returncode} with error `{e.stderr}`" + ) + + output_path = project_config['tool.towncrier.filename'].format(version=version) + click.secho( + f"Release notes successfully written to {output_path}", + bold=True, fg="bright_yellow" + ) + + click.secho( + "Verifying consumption of all news fragments", + bold=True, fg="bright_green", + ) + + try: + test_notes = _get_numpy_tools(pathlib.Path('ci', 'test_all_newsfragments_used.py')) + except ModuleNotFoundError as e: + raise click.ClickException( + f"{e.msg}. Install the missing packages to use this command." + ) + + test_notes.main() diff --git a/pyproject.toml b/pyproject.toml index b61adf36217f..6a3c22271b01 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -203,10 +203,16 @@ cli = 'vendored-meson/meson/meson.py' ".spin/cmds.py:test", ".spin/cmds.py:mypy", ".spin/cmds.py:config_openblas", + ".spin/cmds.py:lint", ] "Environments" = [ "spin.cmds.meson.run", ".spin/cmds.py:ipython", ".spin/cmds.py:python", "spin.cmds.meson.gdb" ] -"Documentation" = [".spin/cmds.py:docs"] +"Documentation" = [ + ".spin/cmds.py:docs", + ".spin/cmds.py:changelog", + ".spin/cmds.py:notes", +] + "Metrics" = [".spin/cmds.py:bench"] From 535a8024073d1fc457ee78b233606c22459e67a8 Mon Sep 17 00:00:00 2001 From: Stefan <96178532+stefan6419846@users.noreply.github.com> Date: Wed, 25 Oct 2023 17:26:30 +0200 Subject: [PATCH 33/45] DOC: Visually divide main license and bundled licenses in wheels --- tools/wheels/cibw_before_build.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tools/wheels/cibw_before_build.sh b/tools/wheels/cibw_before_build.sh index 23471a43285e..3a6d33543464 100644 --- a/tools/wheels/cibw_before_build.sh +++ b/tools/wheels/cibw_before_build.sh @@ -4,6 +4,9 @@ PROJECT_DIR="$1" PLATFORM=$(PYTHONPATH=tools python -c "import openblas_support; print(openblas_support.get_plat())") # Update license +echo "" >> $PROJECT_DIR/LICENSE.txt +echo ""----" >> $PROJECT_DIR/LICENSE.txt +echo "" >> $PROJECT_DIR/LICENSE.txt cat $PROJECT_DIR/LICENSES_bundled.txt >> $PROJECT_DIR/LICENSE.txt if [[ $RUNNER_OS == "Linux" ]] ; then cat $PROJECT_DIR/tools/wheels/LICENSE_linux.txt >> $PROJECT_DIR/LICENSE.txt From b92293dd5523be39f0b3fbc26ba36841ee0fc76d Mon Sep 17 00:00:00 2001 From: Stefan <96178532+stefan6419846@users.noreply.github.com> Date: Wed, 25 Oct 2023 17:44:39 +0200 Subject: [PATCH 34/45] DOC: Visually divide main license and bundled licenses in wheels --- tools/wheels/cibw_before_build.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/wheels/cibw_before_build.sh b/tools/wheels/cibw_before_build.sh index 3a6d33543464..3217ce488c1b 100644 --- a/tools/wheels/cibw_before_build.sh +++ b/tools/wheels/cibw_before_build.sh @@ -5,7 +5,7 @@ PLATFORM=$(PYTHONPATH=tools python -c "import openblas_support; print(openblas_s # Update license echo "" >> $PROJECT_DIR/LICENSE.txt -echo ""----" >> $PROJECT_DIR/LICENSE.txt +echo "----" >> $PROJECT_DIR/LICENSE.txt echo "" >> $PROJECT_DIR/LICENSE.txt cat $PROJECT_DIR/LICENSES_bundled.txt >> $PROJECT_DIR/LICENSE.txt if [[ $RUNNER_OS == "Linux" ]] ; then From c2f0c441da1bb14b218e523d1761ccbd98e06ec3 Mon Sep 17 00:00:00 2001 From: Sebastian Berg Date: Mon, 30 Oct 2023 13:42:58 +0100 Subject: [PATCH 35/45] MAINT: Add missing `noexcept` to shuffle helpers These shouldn't fail and should work without the GIL, but as cython points out (now), this doesn't work in practice, because without the `noexcept`, cython will grab the GIL to check for errors every time. --- numpy/random/_generator.pyx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/numpy/random/_generator.pyx b/numpy/random/_generator.pyx index 05061aa2dc3b..1bdba3356557 100644 --- a/numpy/random/_generator.pyx +++ b/numpy/random/_generator.pyx @@ -63,7 +63,7 @@ cdef int64_t _safe_sum_nonneg_int64(size_t num_colors, int64_t *colors): cdef inline void _shuffle_raw_wrap(bitgen_t *bitgen, np.npy_intp n, np.npy_intp first, np.npy_intp itemsize, np.npy_intp stride, - char* data, char* buf) nogil: + char* data, char* buf) noexcept nogil: # We trick gcc into providing a specialized implementation for # the most common case, yielding a ~33% performance improvement. # Note that apparently, only one branch can ever be specialized. @@ -76,7 +76,7 @@ cdef inline void _shuffle_raw_wrap(bitgen_t *bitgen, np.npy_intp n, cdef inline void _shuffle_raw(bitgen_t *bitgen, np.npy_intp n, np.npy_intp first, np.npy_intp itemsize, np.npy_intp stride, - char* data, char* buf) nogil: + char* data, char* buf) noexcept nogil: """ Parameters ---------- @@ -107,7 +107,7 @@ cdef inline void _shuffle_raw(bitgen_t *bitgen, np.npy_intp n, cdef inline void _shuffle_int(bitgen_t *bitgen, np.npy_intp n, - np.npy_intp first, int64_t* data) nogil: + np.npy_intp first, int64_t* data) noexcept nogil: """ Parameters ---------- From 81af82625e91a993ad9b41ddac4b3eb8e6728c3c Mon Sep 17 00:00:00 2001 From: Stefan <96178532+stefan6419846@users.noreply.github.com> Date: Fri, 10 Nov 2023 12:48:37 +0100 Subject: [PATCH 36/45] DOC: Fix license identifier for OpenBLAS [skip cirrus] [skip azp] [skip actions] --- tools/wheels/LICENSE_linux.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/wheels/LICENSE_linux.txt b/tools/wheels/LICENSE_linux.txt index 97596fe61f28..a5b5ae5c22e6 100644 --- a/tools/wheels/LICENSE_linux.txt +++ b/tools/wheels/LICENSE_linux.txt @@ -8,7 +8,7 @@ Name: OpenBLAS Files: numpy.libs/libopenblas*.so Description: bundled as a dynamically linked library Availability: https://github.com/OpenMathLib/OpenBLAS/ -License: BSD-3-Clause-Attribution +License: BSD-3-Clause Copyright (c) 2011-2014, The OpenBLAS Project All rights reserved. From 29d46b010364f8295d331d45cddec32bf382c056 Mon Sep 17 00:00:00 2001 From: Stefan <96178532+stefan6419846@users.noreply.github.com> Date: Fri, 10 Nov 2023 12:49:05 +0100 Subject: [PATCH 37/45] DOC: Fix license identifier for OpenBLAS --- tools/wheels/LICENSE_osx.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/wheels/LICENSE_osx.txt b/tools/wheels/LICENSE_osx.txt index dd221a34d49f..1ebd5663d02c 100644 --- a/tools/wheels/LICENSE_osx.txt +++ b/tools/wheels/LICENSE_osx.txt @@ -7,7 +7,7 @@ Name: OpenBLAS Files: numpy/.dylibs/libopenblas*.so Description: bundled as a dynamically linked library Availability: https://github.com/OpenMathLib/OpenBLAS/ -License: BSD-3-Clause-Attribution +License: BSD-3-Clause Copyright (c) 2011-2014, The OpenBLAS Project All rights reserved. From f6906d8983dc34b7c38cdc841b277897b7b6b952 Mon Sep 17 00:00:00 2001 From: Stefan <96178532+stefan6419846@users.noreply.github.com> Date: Fri, 10 Nov 2023 12:49:28 +0100 Subject: [PATCH 38/45] DOC: Fix license identifier for OpenBLAS --- tools/wheels/LICENSE_win32.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/wheels/LICENSE_win32.txt b/tools/wheels/LICENSE_win32.txt index ef954a6ea781..2740fc6165a7 100644 --- a/tools/wheels/LICENSE_win32.txt +++ b/tools/wheels/LICENSE_win32.txt @@ -8,7 +8,7 @@ Name: OpenBLAS Files: numpy.libs\libopenblas*.dll Description: bundled as a dynamically linked library Availability: https://github.com/OpenMathLib/OpenBLAS/ -License: BSD-3-Clause-Attribution +License: BSD-3-Clause Copyright (c) 2011-2014, The OpenBLAS Project All rights reserved. From 60edd98ed505d958a257b80c42c06b98607a454b Mon Sep 17 00:00:00 2001 From: Ralf Gommers Date: Tue, 31 Oct 2023 21:35:16 +0100 Subject: [PATCH 39/45] CI: add a split Netlib BLAS job, no pkg-config, with OpenSUSE container --- .github/workflows/linux_blas.yml | 316 ++++++++++++++++++++++++++++++- 1 file changed, 309 insertions(+), 7 deletions(-) diff --git a/.github/workflows/linux_blas.yml b/.github/workflows/linux_blas.yml index 77705178869c..5dcd93da6316 100644 --- a/.github/workflows/linux_blas.yml +++ b/.github/workflows/linux_blas.yml @@ -12,11 +12,32 @@ name: BLAS tests (Linux) # `numpy.distutils`-based build. It can be removed once we remove # support for those builds. # - openblas32_stable_nightly: -# Uses the 32-bit OpenBLAS builds, both the latest stable release and a -# nightly build. -# -# TODO: coverage here is limited, we should add non-OpenBLAS libraries and -# exercise the BLAS-related build options (see `meson_options.txt`). +# Uses the 32-bit OpenBLAS builds, both the latest stable release +# and a nightly build. +# - openblas_no_pkgconfig_fedora: +# Test OpenBLAS on Fedora. Fedora doesn't ship .pc files for OpenBLAS, +# hence this exercises the "system dependency" detection method. +# - flexiblas_fedora: +# Tests FlexiBLAS (the default on Fedora for its own packages), via +# pkg-config. FlexiBLAS allows runtime switching of BLAS/LAPACK +# libraries, which is a useful capability (not tested in this job). +# - openblas_cmake: +# Tests whether OpenBLAS LP64 is detected correctly when only CMake +# and not pkg-config is installed. +# - netlib-debian: +# Installs libblas/liblapack, which in Debian contains libcblas within +# libblas. +# - netlib-split: +# Installs vanilla Netlib blas/lapack with separate libcblas, which is +# the last option tried in auto-detection. +# - mkl: +# Tests MKL installed from PyPI (because easiest/fastest, if broken) in +# 3 ways: both LP64 and ILP64 via pkg-config, and then using the +# Single Dynamic Library (SDL, or `libmkl_rt`). +# - blis: +# Simple test for LP64 via pkg-config +# - atlas: +# Simple test for LP64 via pkg-config on: pull_request: @@ -46,11 +67,11 @@ jobs: USE_NIGHTLY_OPENBLAS: ${{ matrix.USE_NIGHTLY_OPENBLAS }} name: "Test Linux (${{ matrix.USE_NIGHTLY_OPENBLAS && 'nightly' || 'stable' }} OpenBLAS)" steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: submodules: recursive fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1 with: python-version: '3.11' @@ -91,3 +112,284 @@ jobs: run: | pip install pytest pytest-xdist hypothesis typing_extensions spin test -j auto + + + openblas_no_pkgconfig_fedora: + if: "github.repository == 'numpy/numpy'" + runs-on: ubuntu-latest + container: fedora:39 + name: "OpenBLAS (Fedora, no pkg-config, LP64/ILP64)" + steps: + - name: Install system dependencies + run: | + dnf install git gcc-gfortran g++ python3-devel openblas-devel -y + + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + with: + submodules: recursive + fetch-depth: 0 + + - name: Install dependencies + run: | + pip install -r build_requirements.txt + pip install pytest hypothesis typing_extensions + + - name: Build (LP64) + run: spin build -- -Dblas=openblas -Dlapack=openblas -Ddisable-optimization=true + + - name: Test + run: spin test -- numpy/linalg + + - name: Build (ILP64) + run: | + rm -rf build + spin build -- -Duse-ilp64=true -Ddisable-optimization=true + + - name: Test + run: spin test -- numpy/linalg + + + flexiblas_fedora: + if: "github.repository == 'numpy/numpy'" + runs-on: ubuntu-latest + container: fedora:39 + name: "FlexiBLAS (LP64, ILP64 on Fedora)" + steps: + - name: Install system dependencies + run: | + dnf install git gcc-gfortran g++ python3-devel flexiblas-devel -y + + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + with: + submodules: recursive + fetch-depth: 0 + + - name: Install dependencies + run: | + pip install -r build_requirements.txt + pip install pytest hypothesis typing_extensions + + - name: Build + run: spin build -- -Ddisable-optimization=true + + - name: Test + run: spin test -- numpy/linalg + + - name: Build (ILP64) + run: | + rm -rf build + spin build -- -Ddisable-optimization=true -Duse-ilp64=true + + - name: Test (ILP64) + run: spin test -- numpy/linalg + + + openblas_cmake: + if: "github.repository == 'numpy/numpy'" + runs-on: ubuntu-latest + name: "OpenBLAS with CMake" + steps: + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + pip install -r build_requirements.txt + pip install pytest pytest-xdist hypothesis typing_extensions + sudo apt-get install libopenblas-dev cmake + sudo apt-get remove pkg-config + + - name: Build + run: spin build -- -Ddisable-optimization=true + + - name: Test + run: spin test -j auto -- numpy/linalg + + + netlib-debian: + if: "github.repository == 'numpy/numpy'" + runs-on: ubuntu-latest + name: "Debian libblas/liblapack" + steps: + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + pip install -r build_requirements.txt + sudo apt-get install liblapack-dev pkg-config + + - name: Build + run: | + spin build -- -Ddisable-optimization=true + + - name: Test + run: | + pip install pytest pytest-xdist hypothesis typing_extensions + spin test -j auto -- numpy/linalg + + + netlib-split: + if: "github.repository == 'numpy/numpy'" + runs-on: ubuntu-latest + container: opensuse/tumbleweed + name: "OpenSUSE Netlib BLAS/LAPACK" + steps: + - name: Install system dependencies + run: | + # No blas.pc on OpenSUSE as of Nov 2023, so no need to install pkg-config. + # If it is needed in the future, use install name `pkgconf-pkg-config` + zypper install -y git gcc-c++ python3-pip python3-devel blas cblas lapack + + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + with: + submodules: recursive + fetch-depth: 0 + + - name: Install PyPI dependencies + run: | + pip install --break-system-packages -r build_requirements.txt + + - name: Build + run: | + spin build -- -Dblas=blas -Dlapack=lapack -Ddisable-optimization=true -Dallow-noblas=false + + - name: Test + run: | + pip install --break-system-packages pytest pytest-xdist hypothesis typing_extensions + spin test -j auto -- numpy/linalg + + + mkl: + if: "github.repository == 'numpy/numpy'" + runs-on: ubuntu-latest + name: "MKL (LP64, ILP64, SDL)" + steps: + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + pip install -r build_requirements.txt + pip install pytest pytest-xdist hypothesis typing_extensions + pip install mkl mkl-devel + + - name: Repair MKL pkg-config files and symlinks + run: | + # MKL 2023.2 works when installed from conda-forge (except for `-iomp` + # and `-tbb` pkg-config files), Spack, or with the standalone Intel + # installer. The standalone installer is the worst option, since it's + # large and clumsy to install and requires running a setvars.sh script + # before things work. The PyPI MKL packages are broken and need the + # fixes in this step. For details, see + # https://github.com/conda-forge/intel_repack-feedstock/issues/34 + cd $Python3_ROOT_DIR/lib/pkgconfig + sed -i 's/\/intel64//g' mkl*.pc + # add the expected .so -> .so.2 symlinks to fix linking + cd .. + for i in $( ls libmkl*.so.2 ); do ln -s $i ${i%.*}; done + + - name: Build with defaults (LP64) + run: | + pkg-config --libs mkl-dynamic-lp64-seq # check link flags + spin build -- -Ddisable-optimization=true + + - name: Test + run: spin test -- numpy/linalg + + - name: Build with ILP64 + run: | + git clean -xdf > /dev/null + pkg-config --libs mkl-dynamic-ilp64-seq + spin build -- -Duse-ilp64=true -Ddisable-optimization=true + + - name: Test + run: spin test -- numpy/linalg + + - name: Build without pkg-config (default options, SDL) + run: | + git clean -xdf > /dev/null + pushd $Python3_ROOT_DIR/lib/pkgconfig + rm mkl*.pc + popd + export MKLROOT=$Python3_ROOT_DIR + spin build -- -Ddisable-optimization=true + + - name: Test + run: spin test -- numpy/linalg + + blis: + if: "github.repository == 'numpy/numpy'" + runs-on: ubuntu-latest + name: "BLIS" + steps: + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + pip install -r build_requirements.txt + pip install pytest pytest-xdist hypothesis typing_extensions + sudo apt-get install libblis-dev libopenblas-dev pkg-config + + - name: Add BLIS pkg-config file + run: | + # Needed because blis.pc missing in Debian: + # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=989076 + # The alternative here would be to use another distro or Miniforge + sudo cp tools/ci/_blis_debian.pc /usr/lib/x86_64-linux-gnu/pkgconfig/blis.pc + # Check if the patch works: + pkg-config --libs blis + pkg-config --cflags blis + + - name: Build + run: spin build -- -Dblas=blis -Ddisable-optimization=true + + - name: Test + run: spin test -- numpy/linalg + + atlas: + if: "github.repository == 'numpy/numpy'" + runs-on: ubuntu-latest + name: "ATLAS" + steps: + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + pip install -r build_requirements.txt + pip install pytest pytest-xdist hypothesis typing_extensions + sudo apt-get install libatlas-base-dev pkg-config + + - name: Build + run: spin build -- -Dblas=blas-atlas -Dlapack=lapack-atlas -Ddisable-optimization=true + + - name: Test + run: spin test -- numpy/linalg + From 2a057fde8bf5fb62e2694848e3693d7272b25e42 Mon Sep 17 00:00:00 2001 From: Ralf Gommers Date: Thu, 2 Nov 2023 13:54:05 +0100 Subject: [PATCH 40/45] CI: add blas, cblas and lapack to FreeBSD CI job Also add logging on failure, so future build issues are easier to diagnose. --- tools/ci/cirrus_arm.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tools/ci/cirrus_arm.yml b/tools/ci/cirrus_arm.yml index a28f182ae8ba..7e394f55442b 100644 --- a/tools/ci/cirrus_arm.yml +++ b/tools/ci/cirrus_arm.yml @@ -127,7 +127,7 @@ freebsd_test_task: memory: 4G install_devtools_script: | - pkg install -y git bash ninja ccache + pkg install -y git bash ninja ccache blas cblas lapack pkgconf <<: *MODIFIED_CLONE @@ -149,7 +149,7 @@ freebsd_test_task: build_script: | chsh -s /usr/local/bin/bash source .venv/bin/activate - python -m pip install . --no-build-isolation -v -Csetup-args="-Dallow-noblas=true" + python -m pip install . --no-build-isolation -v -Csetup-args="-Dallow-noblas=false" test_script: | chsh -s /usr/local/bin/bash @@ -157,3 +157,7 @@ freebsd_test_task: cd tools python -m pytest --pyargs numpy -m "not slow" ccache -s + + on_failure: + debug_script: | + cat build/meson-logs/meson-log.txt From 2359aec3510bd9e8d0e80e91430ebb76291d6069 Mon Sep 17 00:00:00 2001 From: Sebastian Berg Date: Wed, 1 Nov 2023 20:08:54 +0100 Subject: [PATCH 41/45] MAINT: Make bitfield integers unsigned I am getting a lot of compile warnings recently. Not sure exactly why, but one source here is that the 1 we store is cast to -1 for a signed integer bitfield. Making it explicitly unsigned quenches the warning and seems easiest. --- numpy/core/src/_simd/_simd_inc.h.src | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/numpy/core/src/_simd/_simd_inc.h.src b/numpy/core/src/_simd/_simd_inc.h.src index 887545414b9b..a023848831ed 100644 --- a/numpy/core/src/_simd/_simd_inc.h.src +++ b/numpy/core/src/_simd/_simd_inc.h.src @@ -105,19 +105,19 @@ typedef struct // type name compatible with python style const char *pyname; // returns '1' if the type represent a unsigned integer - int is_unsigned:1; + unsigned int is_unsigned:1; // returns '1' if the type represent a signed integer - int is_signed:1; + unsigned int is_signed:1; // returns '1' if the type represent a single or double precision - int is_float:1; + unsigned int is_float:1; // returns '1' if the type represent a boolean - int is_bool:1; + unsigned int is_bool:1; // returns '1' if the type represent a sequence - int is_sequence:1; + unsigned int is_sequence:1; // returns '1' if the type represent a scalar - int is_scalar:1; + unsigned int is_scalar:1; // returns '1' if the type represent a vector - int is_vector:1; + unsigned int is_vector:1; // returns the len of multi-vector if the type represent x2 or x3 vector // otherwise returns 0, e.g. returns 2 if data type is simd_data_vu8x2 int is_vectorx; From 12b7b352020b1e7204430bee82af05c804b32f7d Mon Sep 17 00:00:00 2001 From: thalassemia <67928790+thalassemia@users.noreply.github.com> Date: Tue, 7 Nov 2023 17:13:12 -0800 Subject: [PATCH 42/45] BUG: Make n a long int for np.random.multinomial --- numpy/random/mtrand.pyx | 2 +- numpy/random/tests/test_randomstate.py | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/numpy/random/mtrand.pyx b/numpy/random/mtrand.pyx index eb9e3ddd1dd0..9ffaa572d87e 100644 --- a/numpy/random/mtrand.pyx +++ b/numpy/random/mtrand.pyx @@ -4253,7 +4253,7 @@ cdef class RandomState: x.shape = tuple(final_shape) return x - def multinomial(self, np.npy_intp n, object pvals, size=None): + def multinomial(self, long n, object pvals, size=None): """ multinomial(n, pvals, size=None) diff --git a/numpy/random/tests/test_randomstate.py b/numpy/random/tests/test_randomstate.py index 524ac7b7c5e0..c77bfce883ae 100644 --- a/numpy/random/tests/test_randomstate.py +++ b/numpy/random/tests/test_randomstate.py @@ -183,6 +183,9 @@ def test_multinomial_pvals_float32(self): with pytest.raises(ValueError, match=match): random.multinomial(1, pvals) + def test_multinomial_n_float(self): + # Non-index integer types should gracefully truncate floats + random.multinomial(100.5, [0.2, 0.8]) class TestSetState: def setup_method(self): From 0035b44c4276c0b72e56e52a4cf7a7048f487b70 Mon Sep 17 00:00:00 2001 From: Ralf Gommers Date: Fri, 3 Nov 2023 19:57:47 +0100 Subject: [PATCH 43/45] BLD: change default of the `allow-noblas` option to true. Having it set to false caused more disruption and complaints than the silent performance regressions for `true` ever did. So now that the dust has settled a bit on the transition to Meson, go back to the more permissive default. The warning that is emitted when BLAS and/or LAPACK are not found is already much more visible than it was with distutils, so this new situation is still an improvement over where we were until 1.25.x. Closes gh-24703. --- .github/workflows/linux_blas.yml | 24 ++++++++++++------------ .github/workflows/macos.yml | 6 +++--- meson_options.txt | 2 +- pyproject.toml | 2 +- 4 files changed, 17 insertions(+), 17 deletions(-) diff --git a/.github/workflows/linux_blas.yml b/.github/workflows/linux_blas.yml index 5dcd93da6316..aac37d205e9d 100644 --- a/.github/workflows/linux_blas.yml +++ b/.github/workflows/linux_blas.yml @@ -93,7 +93,7 @@ jobs: env: TERM: xterm-256color run: - spin build -- --werror + spin build -- --werror -Dallow-noblas=false - name: Check build-internal dependencies run: @@ -135,7 +135,7 @@ jobs: pip install pytest hypothesis typing_extensions - name: Build (LP64) - run: spin build -- -Dblas=openblas -Dlapack=openblas -Ddisable-optimization=true + run: spin build -- -Dblas=openblas -Dlapack=openblas -Ddisable-optimization=true -Dallow-noblas=false - name: Test run: spin test -- numpy/linalg @@ -143,7 +143,7 @@ jobs: - name: Build (ILP64) run: | rm -rf build - spin build -- -Duse-ilp64=true -Ddisable-optimization=true + spin build -- -Duse-ilp64=true -Ddisable-optimization=true -Dallow-noblas=false - name: Test run: spin test -- numpy/linalg @@ -170,7 +170,7 @@ jobs: pip install pytest hypothesis typing_extensions - name: Build - run: spin build -- -Ddisable-optimization=true + run: spin build -- -Ddisable-optimization=true -Dallow-noblas=false - name: Test run: spin test -- numpy/linalg @@ -178,7 +178,7 @@ jobs: - name: Build (ILP64) run: | rm -rf build - spin build -- -Ddisable-optimization=true -Duse-ilp64=true + spin build -- -Ddisable-optimization=true -Duse-ilp64=true -Dallow-noblas=false - name: Test (ILP64) run: spin test -- numpy/linalg @@ -205,7 +205,7 @@ jobs: sudo apt-get remove pkg-config - name: Build - run: spin build -- -Ddisable-optimization=true + run: spin build -- -Ddisable-optimization=true -Dallow-noblas=false - name: Test run: spin test -j auto -- numpy/linalg @@ -231,7 +231,7 @@ jobs: - name: Build run: | - spin build -- -Ddisable-optimization=true + spin build -- -Ddisable-optimization=true -Dallow-noblas=false - name: Test run: | @@ -307,7 +307,7 @@ jobs: - name: Build with defaults (LP64) run: | pkg-config --libs mkl-dynamic-lp64-seq # check link flags - spin build -- -Ddisable-optimization=true + spin build -- -Ddisable-optimization=true -Dallow-noblas=false - name: Test run: spin test -- numpy/linalg @@ -316,7 +316,7 @@ jobs: run: | git clean -xdf > /dev/null pkg-config --libs mkl-dynamic-ilp64-seq - spin build -- -Duse-ilp64=true -Ddisable-optimization=true + spin build -- -Duse-ilp64=true -Ddisable-optimization=true -Dallow-noblas=false - name: Test run: spin test -- numpy/linalg @@ -328,7 +328,7 @@ jobs: rm mkl*.pc popd export MKLROOT=$Python3_ROOT_DIR - spin build -- -Ddisable-optimization=true + spin build -- -Ddisable-optimization=true -Dallow-noblas=false - name: Test run: spin test -- numpy/linalg @@ -363,7 +363,7 @@ jobs: pkg-config --cflags blis - name: Build - run: spin build -- -Dblas=blis -Ddisable-optimization=true + run: spin build -- -Dblas=blis -Ddisable-optimization=true -Dallow-noblas=false - name: Test run: spin test -- numpy/linalg @@ -388,7 +388,7 @@ jobs: sudo apt-get install libatlas-base-dev pkg-config - name: Build - run: spin build -- -Dblas=blas-atlas -Dlapack=lapack-atlas -Ddisable-optimization=true + run: spin build -- -Dblas=blas-atlas -Dlapack=lapack-atlas -Ddisable-optimization=true -Dallow-noblas=false - name: Test run: spin test -- numpy/linalg diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index b34c86bec901..959b51e0626b 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -84,7 +84,7 @@ jobs: shell: bash -l {0} run: | conda activate numpy-dev - CC="ccache $CC" spin build -j2 + CC="ccache $CC" spin build -j2 -- -Dallow-noblas=false - name: Run test suite (full) shell: bash -l {0} @@ -123,14 +123,14 @@ jobs: pip install pytest pytest-xdist hypothesis - name: Build against Accelerate (LP64) - run: spin build -- -Ddisable-optimization=true + run: spin build -- -Ddisable-optimization=true -Dallow-noblas=false - name: Test (linalg only) run: spin test -j2 -- numpy/linalg - name: Build NumPy against Accelerate (ILP64) run: | - spin build -- -Duse-ilp64=true + spin build -- -Duse-ilp64=true -Dallow-noblas=false - name: Test (fast tests) run: spin test -j2 diff --git a/meson_options.txt b/meson_options.txt index 05e9e733578e..0e8fd0b9d92d 100644 --- a/meson_options.txt +++ b/meson_options.txt @@ -2,7 +2,7 @@ option('blas', type: 'string', value: 'auto', description: 'Option for BLAS library selection. By default, try to find any in the order given by `blas-order`') option('lapack', type: 'string', value: 'auto', description: 'Option for LAPACK library selection. By default, try to find any in the order given by `lapack-order`') -option('allow-noblas', type: 'boolean', value: false, +option('allow-noblas', type: 'boolean', value: true, description: 'If set to true, allow building with (slow!) internal fallback routines') option('blas-order', type: 'array', value: ['mkl', 'accelerate', 'openblas', 'flexiblas', 'blis', 'blas']) diff --git a/pyproject.toml b/pyproject.toml index 6a3c22271b01..c8dc1a72eeba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -153,7 +153,7 @@ tracker = "https://github.com/numpy/numpy/issues" skip = "cp36-* cp37-* cp-38* pp37-* pp38-* *-manylinux_i686 *_ppc64le *_s390x" build-verbosity = "3" before-build = "bash {project}/tools/wheels/cibw_before_build.sh {project}" -config-settings = "setup-args=-Duse-ilp64=true setup-args=-Dblas=openblas setup-args=-Dlapack=openblas setup-args=-Dblas-symbol-suffix=64_" +config-settings = "setup-args=-Duse-ilp64=true setup-args=-Dblas=openblas setup-args=-Dlapack=openblas setup-args=-Dblas-symbol-suffix=64_ setup-args=-Dallow-noblas=false" # meson has a hard dependency on ninja, and we need meson to build # c-extensions in tests. There is a ninja PyPI package used in # build_requirements.txt for macOS, windows, linux but it cannot be in From 766d5a83a2eb5db9a6deca4616a6e32c45955782 Mon Sep 17 00:00:00 2001 From: Nathan Goldbaum Date: Tue, 31 Oct 2023 09:48:29 -0600 Subject: [PATCH 44/45] BUG: ensure passing `np.dtype` to itself doesn't crash --- numpy/core/src/multiarray/descriptor.c | 5 +++++ numpy/core/tests/test_dtype.py | 6 ++++++ 2 files changed, 11 insertions(+) diff --git a/numpy/core/src/multiarray/descriptor.c b/numpy/core/src/multiarray/descriptor.c index 68d398d64806..f8712d4d3bc3 100644 --- a/numpy/core/src/multiarray/descriptor.c +++ b/numpy/core/src/multiarray/descriptor.c @@ -1478,6 +1478,11 @@ PyArray_DTypeOrDescrConverterRequired(PyObject *obj, npy_dtype_info *dt_info) dt_info->descr = NULL; if (PyObject_TypeCheck(obj, &PyArrayDTypeMeta_Type)) { + if (obj == (PyObject *)&PyArrayDescr_Type) { + PyErr_SetString(PyExc_TypeError, + "Cannot convert np.dtype into a dtype."); + return NPY_FAIL; + } Py_INCREF(obj); dt_info->dtype = (PyArray_DTypeMeta *)obj; dt_info->descr = NULL; diff --git a/numpy/core/tests/test_dtype.py b/numpy/core/tests/test_dtype.py index 81692015fc5e..ac155b67baf0 100644 --- a/numpy/core/tests/test_dtype.py +++ b/numpy/core/tests/test_dtype.py @@ -1898,3 +1898,9 @@ def test_result_type_integers_and_unitless_timedelta64(): td = np.timedelta64(4) result = np.result_type(0, td) assert_dtype_equal(result, td.dtype) + + +def test_creating_dtype_with_dtype_class_errors(): + # Regression test for #25031, calling `np.dtype` with itself segfaulted. + with pytest.raises(TypeError, match="Cannot convert np.dtype into a"): + np.array(np.ones(10), dtype=np.dtype) From 6961f60056be3fce2f4c1c9aa1f8840675135973 Mon Sep 17 00:00:00 2001 From: Charles Harris Date: Sun, 12 Nov 2023 11:28:35 -0700 Subject: [PATCH 45/45] REL: Prepare for the NumPy 1.26.2 release - Create the 1.26.2-changelog.rst - Update the 1.26.2-notes.rst [wheel build] --- doc/changelog/1.26.2-changelog.rst | 51 +++++++++++++++++++++++++++++ doc/source/release/1.26.2-notes.rst | 46 ++++++++++++++++++++++++++ 2 files changed, 97 insertions(+) create mode 100644 doc/changelog/1.26.2-changelog.rst diff --git a/doc/changelog/1.26.2-changelog.rst b/doc/changelog/1.26.2-changelog.rst new file mode 100644 index 000000000000..8715f2f0bb58 --- /dev/null +++ b/doc/changelog/1.26.2-changelog.rst @@ -0,0 +1,51 @@ + +Contributors +============ + +A total of 13 people contributed to this release. People with a "+" by their +names contributed a patch for the first time. + +* @stefan6419846 +* @thalassemia + +* Andrew Nelson +* Charles Bousseau + +* Charles Harris +* Marcel Bargull + +* Mark Mentovai + +* Matti Picus +* Nathan Goldbaum +* Ralf Gommers +* Sayed Adel +* Sebastian Berg +* William Ayd + + +Pull requests merged +==================== + +A total of 25 pull requests were merged for this release. + +* `#24814 `__: MAINT: align test_dispatcher s390x targets with _umath_tests_mtargets +* `#24929 `__: MAINT: prepare 1.26.x for further development +* `#24955 `__: ENH: Add Cython enumeration for NPY_FR_GENERIC +* `#24962 `__: REL: Remove Python upper version from the release branch +* `#24971 `__: BLD: Use the correct Python interpreter when running tempita.py +* `#24972 `__: MAINT: Remove unhelpful error replacements from ``import_array()`` +* `#24977 `__: BLD: use classic linker on macOS, the new one in XCode 15 has... +* `#25003 `__: BLD: musllinux_aarch64 [wheel build] +* `#25043 `__: MAINT: Update mailmap +* `#25049 `__: MAINT: Update meson build infrastructure. +* `#25071 `__: MAINT: Split up .github/workflows to match main +* `#25083 `__: BUG: Backport fix build on ppc64 when the baseline set to Power9... +* `#25093 `__: BLD: Fix features.h detection for Meson builds [1.26.x Backport] +* `#25095 `__: BUG: Avoid intp conversion regression in Cython 3 (backport) +* `#25107 `__: CI: remove obsolete jobs, and move macOS and conda Azure jobs... +* `#25108 `__: CI: Add linux_qemu action and remove travis testing. +* `#25112 `__: MAINT: Update .spin/cmds.py from main. +* `#25113 `__: DOC: Visually divide main license and bundled licenses in wheels +* `#25115 `__: MAINT: Add missing ``noexcept`` to shuffle helpers +* `#25116 `__: DOC: Fix license identifier for OpenBLAS +* `#25117 `__: BLD: improve detection of Netlib libblas/libcblas/liblapack +* `#25118 `__: MAINT: Make bitfield integers unsigned +* `#25119 `__: BUG: Make n a long int for np.random.multinomial +* `#25120 `__: BLD: change default of the ``allow-noblas`` option to true. +* `#25121 `__: BUG: ensure passing ``np.dtype`` to itself doesn't crash diff --git a/doc/source/release/1.26.2-notes.rst b/doc/source/release/1.26.2-notes.rst index ff4920dd3ff8..6f32bcf48206 100644 --- a/doc/source/release/1.26.2-notes.rst +++ b/doc/source/release/1.26.2-notes.rst @@ -9,10 +9,56 @@ discovered after the 1.26.1 release. The 1.26.release series is the last planned minor release series before NumPy 2.0. The Python versions supported by this release are 3.9-3.12. + Contributors ============ +A total of 13 people contributed to this release. People with a "+" by their +names contributed a patch for the first time. + +* @stefan6419846 +* @thalassemia + +* Andrew Nelson +* Charles Bousseau + +* Charles Harris +* Marcel Bargull + +* Mark Mentovai + +* Matti Picus +* Nathan Goldbaum +* Ralf Gommers +* Sayed Adel +* Sebastian Berg +* William Ayd + + Pull requests merged ==================== +A total of 25 pull requests were merged for this release. + +* `#24814 `__: MAINT: align test_dispatcher s390x targets with _umath_tests_mtargets +* `#24929 `__: MAINT: prepare 1.26.x for further development +* `#24955 `__: ENH: Add Cython enumeration for NPY_FR_GENERIC +* `#24962 `__: REL: Remove Python upper version from the release branch +* `#24971 `__: BLD: Use the correct Python interpreter when running tempita.py +* `#24972 `__: MAINT: Remove unhelpful error replacements from ``import_array()`` +* `#24977 `__: BLD: use classic linker on macOS, the new one in XCode 15 has... +* `#25003 `__: BLD: musllinux_aarch64 [wheel build] +* `#25043 `__: MAINT: Update mailmap +* `#25049 `__: MAINT: Update meson build infrastructure. +* `#25071 `__: MAINT: Split up .github/workflows to match main +* `#25083 `__: BUG: Backport fix build on ppc64 when the baseline set to Power9... +* `#25093 `__: BLD: Fix features.h detection for Meson builds [1.26.x Backport] +* `#25095 `__: BUG: Avoid intp conversion regression in Cython 3 (backport) +* `#25107 `__: CI: remove obsolete jobs, and move macOS and conda Azure jobs... +* `#25108 `__: CI: Add linux_qemu action and remove travis testing. +* `#25112 `__: MAINT: Update .spin/cmds.py from main. +* `#25113 `__: DOC: Visually divide main license and bundled licenses in wheels +* `#25115 `__: MAINT: Add missing ``noexcept`` to shuffle helpers +* `#25116 `__: DOC: Fix license identifier for OpenBLAS +* `#25117 `__: BLD: improve detection of Netlib libblas/libcblas/liblapack +* `#25118 `__: MAINT: Make bitfield integers unsigned +* `#25119 `__: BUG: Make n a long int for np.random.multinomial +* `#25120 `__: BLD: change default of the ``allow-noblas`` option to true. +* `#25121 `__: BUG: ensure passing ``np.dtype`` to itself doesn't crash +