Skip to content

[NumPy] use NumPy 2.x in CI #158647

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 11 commits into
base: gh/XuehaiPan/370/base
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .ci/aarch64_linux/aarch64_ci_setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ set -eux -o pipefail

NUMPY_VERSION=2.0.2
if [[ "$DESIRED_PYTHON" == "3.13" || "$DESIRED_PYTHON" == "3.13t" ]]; then
NUMPY_VERSION=2.1.2
NUMPY_VERSION=2.1.3
fi

SCRIPTPATH="$( cd "$(dirname "$0")" ; pwd -P )"
Expand Down
32 changes: 13 additions & 19 deletions .ci/docker/requirements-ci.txt
Original file line number Diff line number Diff line change
Expand Up @@ -111,19 +111,17 @@ ninja==1.11.1.3
#Pinned versions: 1.11.1.3
#test that import: run_test.py, test_cpp_extensions_aot.py,test_determination.py

numba==0.49.0 ; python_version < "3.9" and platform_machine != "s390x"
numba==0.55.2 ; python_version == "3.9" and platform_machine != "s390x"
numba==0.55.2 ; python_version == "3.10" and platform_machine != "s390x"
numba==0.60.0 ; python_version == "3.12" and platform_machine != "s390x"
numba==0.60.0; python_version < "3.13" and platform_machine != "s390x"
numba==0.61.2; python_version >= "3.13" and platform_machine != "s390x"
#Description: Just-In-Time Compiler for Numerical Functions
#Pinned versions: 0.54.1, 0.49.0, <=0.49.1
#Pinned versions: 0.60.0
#test that import: test_numba_integration.py
#For numba issue see https://github.com/pytorch/pytorch/issues/51511
#Need release > 0.61.2 for s390x due to https://github.com/numba/numba/pull/10073

#numpy
#Description: Provides N-dimensional arrays and linear algebra
#Pinned versions: 1.26.2
#Pinned versions: 2.0.2
#test that import: test_view_ops.py, test_unary_ufuncs.py, test_type_promotion.py,
#test_type_info.py, test_torch.py, test_tensorexpr_pybind.py, test_tensorexpr.py,
#test_tensorboard.py, test_tensor_creation_ops.py, test_static_runtime.py,
Expand All @@ -133,12 +131,10 @@ numba==0.60.0 ; python_version == "3.12" and platform_machine != "s390x"
#test_nn.py, test_namedtensor.py, test_linalg.py, test_jit_cuda_fuser.py,
#test_jit.py, test_indexing.py, test_datapipe.py, test_dataloader.py,
#test_binary_ufuncs.py
numpy==1.22.4; python_version == "3.9" or python_version == "3.10"
numpy==1.26.2; python_version == "3.11" or python_version == "3.12"
numpy==2.1.2; python_version >= "3.13"
numpy==2.0.2; python_version < "3.13"
numpy==2.1.3; python_version >= "3.13"

pandas==2.0.3; python_version < "3.13"
pandas==2.2.3; python_version >= "3.13"
pandas==2.2.3

#onnxruntime
#Description: scoring engine for Open Neural Network Exchange (ONNX) models
Expand Down Expand Up @@ -249,11 +245,11 @@ scikit-image==0.22.0 ; python_version >= "3.10"
#Pinned versions: 0.20.3
#test that import:

scipy==1.10.1 ; python_version <= "3.11"
scipy==1.13.1 ; python_version <= "3.11"
scipy==1.14.1 ; python_version >= "3.12"
# Pin SciPy because of failing distribution tests (see #60347)
#Description: scientific python
#Pinned versions: 1.10.1
#Pinned versions: 1.13.1
#test that import: test_unary_ufuncs.py, test_torch.py,test_tensor_creation_ops.py
#test_spectral_ops.py, test_sparse_csr.py, test_reductions.py,test_nn.py
#test_linalg.py, test_binary_ufuncs.py
Expand Down Expand Up @@ -314,17 +310,15 @@ z3-solver==4.15.1.0 ; platform_machine != "s390x"
#Pinned versions:
#test that import:

tensorboard==2.13.0 ; python_version < "3.13"
tensorboard==2.18.0 ; python_version >= "3.13"
tensorboard==2.20.0
#Description: Also included in .ci/docker/requirements-docs.txt
#Pinned versions:
#test that import: test_tensorboard

pywavelets==1.4.1 ; python_version < "3.12"
pywavelets==1.7.0 ; python_version >= "3.12"
pywavelets==1.6.0 ; python_version == "3.9"
pywavelets==1.7.0 ; python_version >= "3.10"
#Description: This is a requirement of scikit-image, we need to pin
# it here because 1.5.0 conflicts with numpy 1.21.2 used in CI
#Pinned versions: 1.4.1
#Pinned versions: 1.6.0
#test that import:

lxml==5.3.0
Expand Down
5 changes: 2 additions & 3 deletions .ci/docker/requirements-docs.txt
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,9 @@ matplotlib==3.6.3 ; python_version >= "3.13"
#Description: This is used to generate PyTorch docs
#Pinned versions: 3.6.3 if python > 3.12. Otherwise 3.5.3.

tensorboard==2.13.0 ; python_version < "3.13"
tensorboard==2.18.0 ; python_version >= "3.13"
tensorboard==2.20.0
#Description: This is used to generate PyTorch docs
#Pinned versions: 2.13.0
#Pinned versions: 2.20.0

breathe==4.34.0
#Description: This is used to generate PyTorch C++ docs
Expand Down
4 changes: 2 additions & 2 deletions .ci/manywheel/build_common.sh
Original file line number Diff line number Diff line change
Expand Up @@ -102,11 +102,11 @@ python setup.py clean
retry pip install -qr requirements.txt
case ${DESIRED_PYTHON} in
cp31*)
retry pip install -q --pre numpy==2.1.0
retry pip install -q numpy==2.1.3
;;
# Should catch 3.9+
*)
retry pip install -q --pre numpy==2.0.2
retry pip install -q numpy==2.0.2
;;
esac

Expand Down
2 changes: 1 addition & 1 deletion .ci/manywheel/build_libtorch.sh
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ pushd "$PYTORCH_ROOT"
retry pip install -qUr requirements-build.txt
python setup.py clean
retry pip install -qr requirements.txt
retry pip install -q numpy==2.0.1
retry pip install -q numpy==2.0.2

if [[ "$DESIRED_CUDA" == *"rocm"* ]]; then
echo "Calling build_amd.py at $(date)"
Expand Down
11 changes: 2 additions & 9 deletions .ci/pytorch/run_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ fi
NUMPY_PACKAGE=""
if [[ ${py_ver} == "3.10" ]]; then
PROTOBUF_PACKAGE="protobuf>=3.17.2"
NUMPY_PACKAGE="numpy>=1.21.2"
NUMPY_PACKAGE="numpy>=2.0"
else
PROTOBUF_PACKAGE="protobuf=3.14.0"
fi
Expand All @@ -80,14 +80,7 @@ if [[ "$(uname)" == Darwin ]]; then
retry pip install -q future hypothesis ${NUMPY_PACKAGE} ${PROTOBUF_PACKAGE} pytest
else
retry pip install -qr requirements.txt || true
retry pip install -q hypothesis protobuf pytest || true
numpy_ver=1.15
case "$(python --version 2>&1)" in
*2* | *3.5* | *3.6*)
numpy_ver=1.11
;;
esac
retry pip install -q "numpy==${numpy_ver}" || true
retry pip install -q hypothesis ${NUMPY_PACKAGE} protobuf pytest || true
fi

echo "Testing with:"
Expand Down
7 changes: 3 additions & 4 deletions .ci/pytorch/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -1615,9 +1615,9 @@ if [[ "${TEST_CONFIG}" == *numpy_2* ]]; then
# Force re-install of pandas to avoid error where pandas checks numpy version from initial install and fails upon import
TMP_PANDAS_VERSION=$(python -c "import pandas; print(pandas.__version__)" 2>/dev/null)
if [ -n "$TMP_PANDAS_VERSION" ]; then
python -m pip install --pre numpy==2.0.2 scipy==1.13.1 numba==0.60.0 pandas=="$TMP_PANDAS_VERSION" --force-reinstall
python -m pip install numpy==2.0.2 scipy==1.13.1 numba==0.60.0 pandas=="$TMP_PANDAS_VERSION" --force-reinstall
else
python -m pip install --pre numpy==2.0.2 scipy==1.13.1 numba==0.60.0
python -m pip install numpy==2.0.2 scipy==1.13.1 numba==0.60.0
fi
python test/run_test.py --include dynamo/test_functions.py dynamo/test_unspec.py test_binary_ufuncs.py test_fake_tensor.py test_linalg.py test_numpy_interop.py test_tensor_creation_ops.py test_torch.py torch_np/test_basic.py
elif [[ "${BUILD_ENVIRONMENT}" == *aarch64* && "${TEST_CONFIG}" != *perf_cpu_aarch64* ]]; then
Expand Down Expand Up @@ -1684,8 +1684,7 @@ elif [[ "${TEST_CONFIG}" == *torchbench* ]]; then
install_torchvision
install_torchao
id=$((SHARD_NUMBER-1))
# https://github.com/opencv/opencv-python/issues/885
pip_install opencv-python==4.8.0.74
pip_install opencv-python==4.12.0.88
if [[ "${TEST_CONFIG}" == *inductor_torchbench_smoketest_perf* ]]; then
PYTHONPATH=/torchbench test_inductor_torchbench_smoketest_perf
elif [[ "${TEST_CONFIG}" == *inductor_torchbench_cpu_smoketest_perf* ]]; then
Expand Down
2 changes: 1 addition & 1 deletion .ci/pytorch/win-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ if [[ "$BUILD_ENVIRONMENT" == *cuda* ]]; then
fi

# TODO: Move both of them to Windows AMI
python -m pip install pytest-rerunfailures==10.3 pytest-cpp==2.3.0 tensorboard==2.13.0 protobuf==5.29.4 pytest-subtests==0.13.1
python -m pip install pytest-rerunfailures==10.3 pytest-cpp==2.3.0 tensorboard==2.20.0 protobuf==5.29.4 pytest-subtests==0.13.1

# Install Z3 optional dependency for Windows builds.
python -m pip install z3-solver==4.15.1.0
Expand Down
4 changes: 2 additions & 2 deletions .ci/pytorch/windows/internal/smoke_test.bat
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ echo "install wheel package"

call "internal\install_python.bat"

if "%DESIRED_PYTHON%" == "3.13t" %PYTHON_EXEC% -m pip install --pre numpy==2.2.1 protobuf
if "%DESIRED_PYTHON%" == "3.13" %PYTHON_EXEC% -m pip install --pre numpy==2.1.2 protobuf
if "%DESIRED_PYTHON%" == "3.13t" %PYTHON_EXEC% -m pip install --pre numpy==2.2.6 protobuf
if "%DESIRED_PYTHON%" == "3.13" %PYTHON_EXEC% -m pip install --pre numpy==2.1.3 protobuf
if "%DESIRED_PYTHON%" == "3.12" %PYTHON_EXEC% -m pip install --pre numpy==2.0.2 protobuf
if "%DESIRED_PYTHON%" == "3.11" %PYTHON_EXEC% -m pip install --pre numpy==2.0.2 protobuf
if "%DESIRED_PYTHON%" == "3.10" %PYTHON_EXEC% -m pip install --pre numpy==2.0.2 protobuf
Expand Down
7 changes: 5 additions & 2 deletions .ci/pytorch/windows/setup_build.bat
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,11 @@ call "internal\install_python.bat"

%PYTHON_EXEC% --version
set "PATH=%CD%\Python\Lib\site-packages\cmake\data\bin;%CD%\Python\Scripts;%CD%\Python;%PATH%"
if "%DESIRED_PYTHON%" == "3.13t" %PYTHON_EXEC% -m pip install numpy==2.2.1 cmake
if "%DESIRED_PYTHON%" == "3.13" %PYTHON_EXEC% -m pip install numpy==2.1.2 cmake

%PYTHON_EXEC% -m pip install "setuptools>=77.0.0" "packaging>=24.2"

if "%DESIRED_PYTHON%" == "3.13t" %PYTHON_EXEC% -m pip install numpy==2.2.6 cmake
if "%DESIRED_PYTHON%" == "3.13" %PYTHON_EXEC% -m pip install numpy==2.1.3 cmake
if "%DESIRED_PYTHON%" == "3.12" %PYTHON_EXEC% -m pip install numpy==2.0.2 cmake
if "%DESIRED_PYTHON%" == "3.11" %PYTHON_EXEC% -m pip install numpy==2.0.2 cmake
if "%DESIRED_PYTHON%" == "3.10" %PYTHON_EXEC% -m pip install numpy==2.0.2 cmake
Expand Down
16 changes: 8 additions & 8 deletions .ci/wheel/build_wheel.sh
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ case $desired_python in
echo "Using 3.13 deps"
SETUPTOOLS_PINNED_VERSION=">=70.1.0"
PYYAML_PINNED_VERSION=">=6.0.1"
NUMPY_PINNED_VERSION="=2.1.0"
NUMPY_PINNED_VERSION="==2.1.0"
CONDA_ENV_CREATE_FLAGS="python-freethreading"
EXTRA_CONDA_INSTALL_FLAGS="-c conda-forge"
desired_python="3.13"
Expand All @@ -147,35 +147,35 @@ case $desired_python in
echo "Using 3.13 deps"
SETUPTOOLS_PINNED_VERSION=">=70.1.0"
PYYAML_PINNED_VERSION=">=6.0.1"
NUMPY_PINNED_VERSION="=2.1.0"
NUMPY_PINNED_VERSION="==2.1.0"
;;
3.12)
echo "Using 3.12 deps"
SETUPTOOLS_PINNED_VERSION=">=70.1.0"
PYYAML_PINNED_VERSION=">=6.0.1"
NUMPY_PINNED_VERSION="=2.0.2"
NUMPY_PINNED_VERSION="==2.0.2"
;;
3.11)
echo "Using 3.11 deps"
SETUPTOOLS_PINNED_VERSION=">=70.1.0"
PYYAML_PINNED_VERSION=">=5.3"
NUMPY_PINNED_VERSION="=2.0.2"
NUMPY_PINNED_VERSION="==2.0.2"
;;
3.10)
echo "Using 3.10 deps"
SETUPTOOLS_PINNED_VERSION=">=70.1.0"
PYYAML_PINNED_VERSION=">=5.3"
NUMPY_PINNED_VERSION="=2.0.2"
NUMPY_PINNED_VERSION="==2.0.2"
;;
3.9)
echo "Using 3.9 deps"
SETUPTOOLS_PINNED_VERSION=">=70.1.0"
PYYAML_PINNED_VERSION=">=5.3"
NUMPY_PINNED_VERSION="=2.0.2"
NUMPY_PINNED_VERSION="==2.0.2"
;;
*)
echo "Using default deps"
NUMPY_PINNED_VERSION="=1.11.3"
NUMPY_PINNED_VERSION="==2.0.2"
;;
esac

Expand All @@ -185,7 +185,7 @@ conda create ${EXTRA_CONDA_INSTALL_FLAGS} -yn "$tmp_env_name" python="$desired_p
source activate "$tmp_env_name"

retry pip install -r "${pytorch_rootdir}/requirements-build.txt"
pip install "numpy=${NUMPY_PINNED_VERSION}" "pyyaml${PYYAML_PINNED_VERSION}" requests ninja "setuptools${SETUPTOOLS_PINNED_VERSION}" typing-extensions
pip install "numpy${NUMPY_PINNED_VERSION}" "pyyaml${PYYAML_PINNED_VERSION}" requests ninja "setuptools${SETUPTOOLS_PINNED_VERSION}" typing-extensions
retry pip install -r "${pytorch_rootdir}/requirements.txt" || true
retry brew install libomp

Expand Down
11 changes: 1 addition & 10 deletions .circleci/scripts/binary_linux_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -32,22 +32,13 @@ if [[ "$PACKAGE_TYPE" != libtorch ]]; then
fi

EXTRA_CONDA_FLAGS=""
NUMPY_PIN=""
NUMPY_PIN=">=2.0.2"
PROTOBUF_PACKAGE="defaults::protobuf"

if [[ "\$python_nodot" = *310* ]]; then
# There's an issue with conda channel priority where it'll randomly pick 1.19 over 1.20
# we set a lower boundary here just to be safe
NUMPY_PIN=">=1.21.2"
PROTOBUF_PACKAGE="protobuf>=3.19.0"
fi

if [[ "\$python_nodot" = *39* ]]; then
# There's an issue with conda channel priority where it'll randomly pick 1.19 over 1.20
# we set a lower boundary here just to be safe
NUMPY_PIN=">=1.20"
fi

# Move debug wheels out of the package dir so they don't get installed
mkdir -p /tmp/debug_final_pkgs
mv /final_pkgs/debug-*.zip /tmp/debug_final_pkgs || echo "no debug packages to move"
Expand Down
2 changes: 1 addition & 1 deletion .github/actions/upload-utilization-stats/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ runs:
retry_wait_seconds: 30
command: |
set -eu
python3 -m pip install python-dateutil==2.8.2 boto3==1.35.42 pandas==2.1.3 dataclasses_json==0.6.7
python3 -m pip install python-dateutil==2.8.2 boto3==1.35.42 pandas==2.2.3 dataclasses-json==0.6.7
- name: Upload utilizatoin stats to s3
shell: bash
run: |
Expand Down
10 changes: 5 additions & 5 deletions .github/requirements/pip-requirements-macOS.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ librosa>=0.6.2
mpmath==1.3.0
networkx==2.8.7
ninja==1.10.2.4
numba==0.59.0
numpy==1.26.4
numba==0.60.0
numpy==2.0.2
opt-einsum>=3.3
optree==0.13.0
packaging==23.1
Expand All @@ -25,11 +25,11 @@ pytest-subtests==0.13.1
pytest-xdist==3.3.1
pytest==7.3.2
pyyaml==6.0.2
scipy==1.12.0
setuptools==72.1.0
scipy==1.13.1
setuptools==80.9.0
sympy==1.13.3
tlparse==0.3.30
tensorboard==2.13.0
tensorboard==2.20.0
typing-extensions==4.12.2
unittest-xml-reporting<=3.2.0,>=2.0.0
xdoctest==1.1.0
Expand Down
2 changes: 1 addition & 1 deletion .github/scripts/td_llm_indexer.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ cd llm-target-determinator
pip install -q -r requirements.txt
cd ../codellama
pip install --no-build-isolation -v -e .
pip install numpy==1.26.0
pip install numpy==2.0.2

# Run indexer
cd ../llm-target-determinator
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ jobs:
- name: Install dependencies
run: |
python3 -m pip install --upgrade pip
pip install pytest-rerunfailures==11.1.* pytest-flakefinder==1.1.* pytest-xdist==3.3.* expecttest==0.3.* fbscribelogger==0.1.* numpy==1.24.*
pip install pytest-rerunfailures==11.1.* pytest-flakefinder==1.1.* pytest-xdist==3.3.* expecttest==0.3.* fbscribelogger==0.1.* numpy==2.0.*
pip install torch --pre --index-url https://download.pytorch.org/whl/nightly/cpu/
- name: Run run_test.py (nonretryable)
run: |
Expand Down
4 changes: 2 additions & 2 deletions .lintrunner.toml
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,8 @@ init_command = [
'python3',
'tools/linter/adapters/pip_init.py',
'--dry-run={{DRYRUN}}',
'numpy==1.26.4 ; python_version >= "3.9" and python_version <= "3.11"',
'numpy==2.1.0 ; python_version >= "3.12"',
'numpy==2.0.2 ; python_version == "3.9"',
'numpy==2.1.3 ; python_version >= "3.10"',
'expecttest==0.3.0',
'mypy==1.16.0',
'sympy==1.13.3',
Expand Down
4 changes: 2 additions & 2 deletions requirements-build.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
setuptools>=70.1.0,<80.0 # setuptools develop deprecated on 80.0
cmake>=3.27
ninja
numpy
packaging
numpy>=2.0
packaging>=24.2
pyyaml
requests
six # dependency chain: NNPACK -> PeachPy -> six
Expand Down
6 changes: 3 additions & 3 deletions test/test_openreg.py
Original file line number Diff line number Diff line change
Expand Up @@ -416,10 +416,10 @@ def test_open_device_numpy_serialization(self):
[
(
(
np.core.multiarray._reconstruct,
"numpy.core.multiarray._reconstruct",
np._core.multiarray._reconstruct,
"numpy._core.multiarray._reconstruct",
)
if np.__version__ >= "2.1"
if hasattr(np, "_core")
else np.core.multiarray._reconstruct
),
np.ndarray,
Expand Down
2 changes: 1 addition & 1 deletion tools/build/bazel/requirements.in
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
pyyaml==6.0.2
numpy==1.26.4
numpy==2.0.2
requests==2.32.4
setuptools==78.1.1
sympy==1.12
Expand Down
Loading
Loading