diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index f71214f11109f9..4ffbb428bc381d 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -267,3 +267,7 @@ Lib/test/test_interpreters/ @ericsnowcurrently
# Config Parser
Lib/configparser.py @jaraco
Lib/test/test_configparser.py @jaraco
+
+# Colorize
+Lib/_colorize.py @hugovk
+Lib/test/test__colorize.py @hugovk
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 3649ae6c207b0a..05a2382fccbe25 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -18,6 +18,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}-reusable
cancel-in-progress: true
+env:
+ FORCE_COLOR: 1
+
jobs:
check_source:
name: Change detection
@@ -47,6 +50,8 @@ jobs:
if: needs.check_source.outputs.run_tests == 'true'
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- uses: actions/setup-python@v5
- name: Install dependencies
run: |
@@ -101,6 +106,7 @@ jobs:
- uses: actions/checkout@v4
with:
fetch-depth: 1
+ persist-credentials: false
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Check Autoconf and aclocal versions
@@ -137,6 +143,8 @@ jobs:
if: needs.check_source.outputs.run_tests == 'true'
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: '3.x'
@@ -301,6 +309,8 @@ jobs:
LD_LIBRARY_PATH: ${{ github.workspace }}/multissl/openssl/${{ matrix.openssl_ver }}/lib
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Restore config.cache
@@ -361,6 +371,8 @@ jobs:
PYTHONSTRICTEXTENSIONBUILD: 1
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- name: Register gcc problem matcher
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Install Dependencies
@@ -443,7 +455,7 @@ jobs:
#
# (GH-104097) test_sysconfig is skipped because it has tests that are
# failing when executed from inside a virtual environment.
- ${{ env.VENV_PYTHON }} -m test \
+ "${VENV_PYTHON}" -m test \
-W \
-o \
-j4 \
@@ -478,6 +490,8 @@ jobs:
ASAN_OPTIONS: detect_leaks=0:allocator_may_return_null=1:handle_segv=0
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Restore config.cache
diff --git a/.github/workflows/documentation-links.yml b/.github/workflows/documentation-links.yml
index 43a7afec73884e..fdb4b9aa29a7c8 100644
--- a/.github/workflows/documentation-links.yml
+++ b/.github/workflows/documentation-links.yml
@@ -10,9 +10,6 @@ on:
- 'Doc/**'
- '.github/workflows/doc.yml'
-permissions:
- pull-requests: write
-
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
@@ -20,6 +17,9 @@ concurrency:
jobs:
documentation-links:
runs-on: ubuntu-latest
+ permissions:
+ pull-requests: write
+
steps:
- uses: readthedocs/actions/preview@v1
with:
diff --git a/.github/workflows/jit.yml b/.github/workflows/jit.yml
index 5e3ac9e9e0fada..508bb8c42bcf59 100644
--- a/.github/workflows/jit.yml
+++ b/.github/workflows/jit.yml
@@ -25,6 +25,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
+env:
+ FORCE_COLOR: 1
+
jobs:
interpreter:
name: Interpreter (Debug)
@@ -32,6 +35,8 @@ jobs:
timeout-minutes: 90
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- name: Build tier two interpreter
run: |
./configure --enable-experimental-jit=interpreter --with-pydebug
@@ -103,6 +108,8 @@ jobs:
CC: ${{ matrix.compiler }}
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: '3.11'
@@ -110,8 +117,7 @@ jobs:
- name: Native Windows
if: runner.os == 'Windows' && matrix.architecture != 'ARM64'
run: |
- choco upgrade llvm -y
- choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}
+ choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}.1.0
./PCbuild/build.bat --experimental-jit ${{ matrix.debug && '-d' || '--pgo' }} -p ${{ matrix.architecture }}
./PCbuild/rt.bat ${{ matrix.debug && '-d' || '' }} -p ${{ matrix.architecture }} -q --multiprocess 0 --timeout 4500 --verbose2 --verbose3
@@ -119,8 +125,7 @@ jobs:
- name: Emulated Windows
if: runner.os == 'Windows' && matrix.architecture == 'ARM64'
run: |
- choco upgrade llvm -y
- choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}
+ choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}.1.0
./PCbuild/build.bat --experimental-jit ${{ matrix.debug && '-d' || '' }} -p ${{ matrix.architecture }}
- name: Native macOS
@@ -169,6 +174,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: '3.11'
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index ccde03f91983df..d74ce8fcc256dc 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -20,6 +20,8 @@ jobs:
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: "3.x"
diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml
index e5b05302b5ac27..5dfa8d7bcafd78 100644
--- a/.github/workflows/mypy.yml
+++ b/.github/workflows/mypy.yml
@@ -51,6 +51,8 @@ jobs:
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: "3.13"
diff --git a/.github/workflows/require-pr-label.yml b/.github/workflows/require-pr-label.yml
index ff5cbdf3eda749..ee91e2a39a198d 100644
--- a/.github/workflows/require-pr-label.yml
+++ b/.github/workflows/require-pr-label.yml
@@ -4,15 +4,14 @@ on:
pull_request:
types: [opened, reopened, labeled, unlabeled, synchronize]
-permissions:
- issues: write
- pull-requests: write
-
jobs:
label:
name: DO-NOT-MERGE / unresolved review
if: github.repository_owner == 'python'
runs-on: ubuntu-latest
+ permissions:
+ issues: write
+ pull-requests: write
timeout-minutes: 10
steps:
diff --git a/.github/workflows/reusable-change-detection.yml b/.github/workflows/reusable-change-detection.yml
index 5cd6fb39f1e12f..8bdcc13ae3542f 100644
--- a/.github/workflows/reusable-change-detection.yml
+++ b/.github/workflows/reusable-change-detection.yml
@@ -61,6 +61,8 @@ jobs:
- run: >-
echo '${{ github.event_name }}'
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- name: Check for source changes
id: check
run: |
diff --git a/.github/workflows/reusable-docs.yml b/.github/workflows/reusable-docs.yml
index 39a97392e898aa..6738acc98c6565 100644
--- a/.github/workflows/reusable-docs.yml
+++ b/.github/workflows/reusable-docs.yml
@@ -22,12 +22,14 @@ jobs:
env:
branch_base: 'origin/${{ github.event.pull_request.base.ref }}'
branch_pr: 'origin/${{ github.event.pull_request.head.ref }}'
+ commits: ${{ github.event.pull_request.commits }}
refspec_base: '+${{ github.event.pull_request.base.sha }}:remotes/origin/${{ github.event.pull_request.base.ref }}'
refspec_pr: '+${{ github.event.pull_request.head.sha }}:remotes/origin/${{ github.event.pull_request.head.ref }}'
steps:
- name: 'Check out latest PR branch commit'
uses: actions/checkout@v4
with:
+ persist-credentials: false
ref: >-
${{
github.event_name == 'pull_request'
@@ -39,15 +41,15 @@ jobs:
if: github.event_name == 'pull_request'
run: |
# Fetch enough history to find a common ancestor commit (aka merge-base):
- git fetch origin ${{ env.refspec_pr }} --depth=$(( ${{ github.event.pull_request.commits }} + 1 )) \
+ git fetch origin "${refspec_pr}" --depth=$(( commits + 1 )) \
--no-tags --prune --no-recurse-submodules
# This should get the oldest commit in the local fetched history (which may not be the commit the PR branched from):
- COMMON_ANCESTOR=$( git rev-list --first-parent --max-parents=0 --max-count=1 ${{ env.branch_pr }} )
+ COMMON_ANCESTOR=$( git rev-list --first-parent --max-parents=0 --max-count=1 "${branch_pr}" )
DATE=$( git log --date=iso8601 --format=%cd "${COMMON_ANCESTOR}" )
# Get all commits since that commit date from the base branch (eg: master or main):
- git fetch origin ${{ env.refspec_base }} --shallow-since="${DATE}" \
+ git fetch origin "${refspec_base}" --shallow-since="${DATE}" \
--no-tags --prune --no-recurse-submodules
- name: 'Set up Python'
uses: actions/setup-python@v5
@@ -63,42 +65,26 @@ jobs:
continue-on-error: true
run: |
set -Eeuo pipefail
- # Build docs with the '-n' (nit-picky) option; write warnings to file
- make -C Doc/ PYTHON=../python SPHINXOPTS="-q -n -W --keep-going -w sphinx-warnings.txt" html
+ # Build docs with the nit-picky option; write warnings to file
+ make -C Doc/ PYTHON=../python SPHINXOPTS="--quiet --nitpicky --fail-on-warning --keep-going --warning-file sphinx-warnings.txt" html
- name: 'Check warnings'
if: github.event_name == 'pull_request'
run: |
python Doc/tools/check-warnings.py \
- --annotate-diff '${{ env.branch_base }}' '${{ env.branch_pr }}' \
+ --annotate-diff "${branch_base}" "${branch_pr}" \
--fail-if-regression \
--fail-if-improved \
--fail-if-new-news-nit
- # This build doesn't use problem matchers or check annotations
- build_doc_oldest_supported_sphinx:
- name: 'Docs (Oldest Sphinx)'
- runs-on: ubuntu-latest
- timeout-minutes: 60
- steps:
- - uses: actions/checkout@v4
- - name: 'Set up Python'
- uses: actions/setup-python@v5
- with:
- python-version: '3.13' # known to work with Sphinx 7.2.6
- cache: 'pip'
- cache-dependency-path: 'Doc/requirements-oldest-sphinx.txt'
- - name: 'Install build dependencies'
- run: make -C Doc/ venv REQUIREMENTS="requirements-oldest-sphinx.txt"
- - name: 'Build HTML documentation'
- run: make -C Doc/ SPHINXOPTS="-q" SPHINXERRORHANDLING="-W --keep-going" html
-
# Run "doctest" on HEAD as new syntax doesn't exist in the latest stable release
doctest:
name: 'Doctest'
- runs-on: ubuntu-22.04
+ runs-on: ubuntu-24.04
timeout-minutes: 60
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- uses: actions/cache@v4
with:
path: ~/.cache/pip
@@ -115,4 +101,4 @@ jobs:
run: make -C Doc/ PYTHON=../python venv
# Use "xvfb-run" since some doctest tests open GUI windows
- name: 'Run documentation doctest'
- run: xvfb-run make -C Doc/ PYTHON=../python SPHINXERRORHANDLING="-W --keep-going" doctest
+ run: xvfb-run make -C Doc/ PYTHON=../python SPHINXERRORHANDLING="--fail-on-warning --keep-going" doctest
diff --git a/.github/workflows/reusable-macos.yml b/.github/workflows/reusable-macos.yml
index b26aeea82d045e..6828c0ba2dff3b 100644
--- a/.github/workflows/reusable-macos.yml
+++ b/.github/workflows/reusable-macos.yml
@@ -15,6 +15,9 @@ on:
required: true
type: string
+env:
+ FORCE_COLOR: 1
+
jobs:
build_macos:
name: build and test (${{ inputs.os }})
@@ -29,6 +32,8 @@ jobs:
runs-on: ${{ inputs.os }}
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Restore config.cache
@@ -40,7 +45,7 @@ jobs:
run: |
brew install pkg-config openssl@3.0 xz gdbm tcl-tk@8
# Because alternate versions are not symlinked into place by default:
- brew link tcl-tk@8
+ brew link --overwrite tcl-tk@8
- name: Configure CPython
run: |
GDBM_CFLAGS="-I$(brew --prefix gdbm)/include" \
diff --git a/.github/workflows/reusable-tsan.yml b/.github/workflows/reusable-tsan.yml
index 042ef664d9a436..47a63be32560bb 100644
--- a/.github/workflows/reusable-tsan.yml
+++ b/.github/workflows/reusable-tsan.yml
@@ -18,13 +18,21 @@ on:
required: true
type: string
+env:
+ FORCE_COLOR: 1
+
jobs:
build_tsan_reusable:
name: 'Thread sanitizer'
runs-on: ubuntu-24.04
timeout-minutes: 60
+ env:
+ OPTIONS: ${{ inputs.options }}
+ SUPPRESSIONS_PATH: ${{ inputs.suppressions_path }}
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Restore config.cache
@@ -47,7 +55,7 @@ jobs:
sudo sysctl -w vm.mmap_rnd_bits=28
- name: TSAN Option Setup
run: |
- echo "TSAN_OPTIONS=log_path=${GITHUB_WORKSPACE}/tsan_log suppressions=${GITHUB_WORKSPACE}/${{ inputs.suppressions_path }} handle_segv=0" >> $GITHUB_ENV
+ echo "TSAN_OPTIONS=log_path=${GITHUB_WORKSPACE}/tsan_log suppressions=${GITHUB_WORKSPACE}/${SUPPRESSIONS_PATH} handle_segv=0" >> $GITHUB_ENV
echo "CC=clang" >> $GITHUB_ENV
echo "CXX=clang++" >> $GITHUB_ENV
- name: Add ccache to PATH
@@ -59,7 +67,7 @@ jobs:
save: ${{ github.event_name == 'push' }}
max-size: "200M"
- name: Configure CPython
- run: ${{ inputs.options }}
+ run: "${OPTIONS}"
- name: Build CPython
run: make -j4
- name: Display build info
diff --git a/.github/workflows/reusable-ubuntu.yml b/.github/workflows/reusable-ubuntu.yml
index c9c415600d06b8..ea9e6f96bc2a8b 100644
--- a/.github/workflows/reusable-ubuntu.yml
+++ b/.github/workflows/reusable-ubuntu.yml
@@ -12,6 +12,9 @@ on:
type: boolean
default: false
+env:
+ FORCE_COLOR: 1
+
jobs:
build_ubuntu_reusable:
name: 'build and test'
@@ -20,14 +23,15 @@ jobs:
strategy:
fail-fast: false
matrix:
- os: [ubuntu-24.04, ubuntu-24.04-aarch64]
+ os: [ubuntu-24.04, ubuntu-24.04-arm]
env:
- FORCE_COLOR: 1
OPENSSL_VER: 3.0.15
PYTHONSTRICTEXTENSIONBUILD: 1
TERM: linux
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- name: Register gcc problem matcher
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Install dependencies
diff --git a/.github/workflows/reusable-wasi.yml b/.github/workflows/reusable-wasi.yml
index ca5fc16aacf9cd..e955e6932d602a 100644
--- a/.github/workflows/reusable-wasi.yml
+++ b/.github/workflows/reusable-wasi.yml
@@ -7,6 +7,9 @@ on:
required: true
type: string
+env:
+ FORCE_COLOR: 1
+
jobs:
build_wasi_reusable:
name: 'build and test'
@@ -20,6 +23,8 @@ jobs:
CROSS_BUILD_WASI: cross-build/wasm32-wasip1
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
# No problem resolver registered as one doesn't currently exist for Clang.
- name: "Install wasmtime"
uses: bytecodealliance/actions/wasmtime/setup@v1
@@ -34,9 +39,9 @@ jobs:
- name: "Install WASI SDK" # Hard-coded to x64.
if: steps.cache-wasi-sdk.outputs.cache-hit != 'true'
run: |
- mkdir ${{ env.WASI_SDK_PATH }} && \
- curl -s -S --location https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${{ env.WASI_SDK_VERSION }}/wasi-sdk-${{ env.WASI_SDK_VERSION }}.0-x86_64-linux.tar.gz | \
- tar --strip-components 1 --directory ${{ env.WASI_SDK_PATH }} --extract --gunzip
+ mkdir "${WASI_SDK_PATH}" && \
+ curl -s -S --location "https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${WASI_SDK_VERSION}/wasi-sdk-${WASI_SDK_VERSION}.0-x86_64-linux.tar.gz" | \
+ tar --strip-components 1 --directory "${WASI_SDK_PATH}" --extract --gunzip
- name: "Configure ccache action"
uses: hendrikmuhs/ccache-action@v1.2
with:
@@ -72,6 +77,6 @@ jobs:
- name: "Make host"
run: python3 Tools/wasm/wasi.py make-host
- name: "Display build info"
- run: make --directory ${{ env.CROSS_BUILD_WASI }} pythoninfo
+ run: make --directory "${CROSS_BUILD_WASI}" pythoninfo
- name: "Test"
- run: make --directory ${{ env.CROSS_BUILD_WASI }} test
+ run: make --directory "${CROSS_BUILD_WASI}" test
diff --git a/.github/workflows/reusable-windows-msi.yml b/.github/workflows/reusable-windows-msi.yml
index abdb1a1982fef8..bc0414d1bbcd8f 100644
--- a/.github/workflows/reusable-windows-msi.yml
+++ b/.github/workflows/reusable-windows-msi.yml
@@ -11,14 +11,21 @@ on:
permissions:
contents: read
+env:
+ FORCE_COLOR: 1
+
jobs:
build:
name: installer for ${{ inputs.arch }}
runs-on: windows-latest
timeout-minutes: 60
env:
+ ARCH: ${{ inputs.arch }}
IncludeFreethreaded: true
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- name: Build CPython installer
- run: .\Tools\msi\build.bat --doc -${{ inputs.arch }}
+ run: ./Tools/msi/build.bat --doc -"${ARCH}"
+ shell: bash
diff --git a/.github/workflows/reusable-windows.yml b/.github/workflows/reusable-windows.yml
index 12b68d68466d62..5485a0169130b0 100644
--- a/.github/workflows/reusable-windows.yml
+++ b/.github/workflows/reusable-windows.yml
@@ -18,16 +18,21 @@ on:
default: false
env:
+ FORCE_COLOR: 1
IncludeUwp: >-
true
jobs:
build:
- name: 'build and test (${{ inputs.arch }})'
+ name: ${{ inputs.arch == 'arm64' && 'build' || 'build and test' }} (${{ inputs.arch }})
runs-on: ${{ inputs.os }}
timeout-minutes: 60
+ env:
+ ARCH: ${{ inputs.arch }}
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- name: Register MSVC problem matcher
if: inputs.arch != 'Win32'
run: echo "::add-matcher::.github/problem-matchers/msvc.json"
@@ -35,8 +40,9 @@ jobs:
run: >-
.\\PCbuild\\build.bat
-e -d -v
- -p ${{ inputs.arch }}
+ -p "${ARCH}"
${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }}
+ shell: bash
- name: Display build info # FIXME(diegorusso): remove the `if`
if: inputs.arch != 'arm64'
run: .\\python.bat -m test.pythoninfo
@@ -44,6 +50,7 @@ jobs:
if: inputs.arch != 'arm64'
run: >-
.\\PCbuild\\rt.bat
- -p ${{ inputs.arch }}
+ -p "${ARCH}"
-d -q --fast-ci
${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }}
+ shell: bash
diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
index f97587e68cbbe4..7578189f5d4d67 100644
--- a/.github/workflows/stale.yml
+++ b/.github/workflows/stale.yml
@@ -4,14 +4,13 @@ on:
schedule:
- cron: "0 */6 * * *"
-permissions:
- pull-requests: write
-
jobs:
stale:
if: github.repository_owner == 'python'
runs-on: ubuntu-latest
+ permissions:
+ pull-requests: write
timeout-minutes: 10
steps:
diff --git a/.github/workflows/verify-ensurepip-wheels.yml b/.github/workflows/verify-ensurepip-wheels.yml
index 83b007f1c9c2ef..463e7bf3355cc3 100644
--- a/.github/workflows/verify-ensurepip-wheels.yml
+++ b/.github/workflows/verify-ensurepip-wheels.yml
@@ -26,6 +26,8 @@ jobs:
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: '3'
diff --git a/.github/zizmor.yml b/.github/zizmor.yml
new file mode 100644
index 00000000000000..eeda8d9eaaf484
--- /dev/null
+++ b/.github/zizmor.yml
@@ -0,0 +1,6 @@
+# Configuration for the zizmor static analysis tool, run via pre-commit in CI
+# https://woodruffw.github.io/zizmor/configuration/
+rules:
+ dangerous-triggers:
+ ignore:
+ - documentation-links.yml
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 7e0bdd50b5141a..7f38c3e848f03d 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.6.7
+ rev: v0.9.1
hooks:
- id: ruff
name: Run Ruff (lint) on Doc/
@@ -20,15 +20,14 @@ repos:
files: ^Doc/
- repo: https://github.com/psf/black-pre-commit-mirror
- rev: 24.4.2
+ rev: 24.10.0
hooks:
- id: black
name: Run Black on Tools/jit/
files: ^Tools/jit/
- language_version: python3.12
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.5.0
+ rev: v5.0.0
hooks:
- id: check-case-conflict
- id: check-merge-conflict
@@ -41,8 +40,13 @@ repos:
- id: trailing-whitespace
types_or: [c, inc, python, rst]
+ - repo: https://github.com/woodruffw/zizmor-pre-commit
+ rev: v1.1.1
+ hooks:
+ - id: zizmor
+
- repo: https://github.com/sphinx-contrib/sphinx-lint
- rev: v0.9.1
+ rev: v1.0.0
hooks:
- id: sphinx-lint
args: [--enable=default-role]
diff --git a/Android/android-env.sh b/Android/android-env.sh
index b93e7f21ed5b94..181fcea8f40783 100644
--- a/Android/android-env.sh
+++ b/Android/android-env.sh
@@ -1,10 +1,10 @@
# This script must be sourced with the following variables already set:
-: ${ANDROID_HOME:?} # Path to Android SDK
-: ${HOST:?} # GNU target triplet
+: "${ANDROID_HOME:?}" # Path to Android SDK
+: "${HOST:?}" # GNU target triplet
# You may also override the following:
-: ${api_level:=21} # Minimum Android API level the build will run on
-: ${PREFIX:-} # Path in which to find required libraries
+: "${api_level:=21}" # Minimum Android API level the build will run on
+: "${PREFIX:-}" # Path in which to find required libraries
# Print all messages on stderr so they're visible when running within build-wheel.
@@ -27,20 +27,20 @@ fail() {
ndk_version=27.1.12297006
ndk=$ANDROID_HOME/ndk/$ndk_version
-if ! [ -e $ndk ]; then
+if ! [ -e "$ndk" ]; then
log "Installing NDK - this may take several minutes"
- yes | $ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager "ndk;$ndk_version"
+ yes | "$ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager" "ndk;$ndk_version"
fi
-if [ $HOST = "arm-linux-androideabi" ]; then
+if [ "$HOST" = "arm-linux-androideabi" ]; then
clang_triplet=armv7a-linux-androideabi
else
- clang_triplet=$HOST
+ clang_triplet="$HOST"
fi
# These variables are based on BuildSystemMaintainers.md above, and
# $ndk/build/cmake/android.toolchain.cmake.
-toolchain=$(echo $ndk/toolchains/llvm/prebuilt/*)
+toolchain=$(echo "$ndk"/toolchains/llvm/prebuilt/*)
export AR="$toolchain/bin/llvm-ar"
export AS="$toolchain/bin/llvm-as"
export CC="$toolchain/bin/${clang_triplet}${api_level}-clang"
@@ -72,12 +72,12 @@ LDFLAGS="$LDFLAGS -lm"
# -mstackrealign is included where necessary in the clang launcher scripts which are
# pointed to by $CC, so we don't need to include it here.
-if [ $HOST = "arm-linux-androideabi" ]; then
+if [ "$HOST" = "arm-linux-androideabi" ]; then
CFLAGS="$CFLAGS -march=armv7-a -mthumb"
fi
if [ -n "${PREFIX:-}" ]; then
- abs_prefix=$(realpath $PREFIX)
+ abs_prefix="$(realpath "$PREFIX")"
CFLAGS="$CFLAGS -I$abs_prefix/include"
LDFLAGS="$LDFLAGS -L$abs_prefix/lib"
@@ -87,11 +87,13 @@ fi
# When compiling C++, some build systems will combine CFLAGS and CXXFLAGS, and some will
# use CXXFLAGS alone.
-export CXXFLAGS=$CFLAGS
+export CXXFLAGS="$CFLAGS"
# Use the same variable name as conda-build
-if [ $(uname) = "Darwin" ]; then
- export CPU_COUNT=$(sysctl -n hw.ncpu)
+if [ "$(uname)" = "Darwin" ]; then
+ CPU_COUNT="$(sysctl -n hw.ncpu)"
+ export CPU_COUNT
else
- export CPU_COUNT=$(nproc)
+ CPU_COUNT="$(nproc)"
+ export CPU_COUNT
fi
diff --git a/Doc/Makefile b/Doc/Makefile
index 4a704ad58b33d3..b8896da4a91869 100644
--- a/Doc/Makefile
+++ b/Doc/Makefile
@@ -14,15 +14,15 @@ PAPER =
SOURCES =
DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py)
REQUIREMENTS = requirements.txt
-SPHINXERRORHANDLING = -W
+SPHINXERRORHANDLING = --fail-on-warning
# Internal variables.
-PAPEROPT_a4 = -D latex_elements.papersize=a4paper
-PAPEROPT_letter = -D latex_elements.papersize=letterpaper
+PAPEROPT_a4 = --define latex_elements.papersize=a4paper
+PAPEROPT_letter = --define latex_elements.papersize=letterpaper
-ALLSPHINXOPTS = -b $(BUILDER) \
- -d build/doctrees \
- -j $(JOBS) \
+ALLSPHINXOPTS = --builder $(BUILDER) \
+ --doctree-dir build/doctrees \
+ --jobs $(JOBS) \
$(PAPEROPT_$(PAPER)) \
$(SPHINXOPTS) $(SPHINXERRORHANDLING) \
. build/$(BUILDER) $(SOURCES)
@@ -144,7 +144,7 @@ pydoc-topics: build
.PHONY: gettext
gettext: BUILDER = gettext
-gettext: override SPHINXOPTS := -d build/doctrees-gettext $(SPHINXOPTS)
+gettext: override SPHINXOPTS := --doctree-dir build/doctrees-gettext $(SPHINXOPTS)
gettext: build
.PHONY: htmlview
@@ -172,7 +172,7 @@ venv:
else \
echo "Creating venv in $(VENVDIR)"; \
if $(UV) --version >/dev/null 2>&1; then \
- $(UV) venv $(VENVDIR); \
+ $(UV) venv --python=$(PYTHON) $(VENVDIR); \
VIRTUAL_ENV=$(VENVDIR) $(UV) pip install -r $(REQUIREMENTS); \
else \
$(PYTHON) -m venv $(VENVDIR); \
@@ -300,20 +300,20 @@ serve:
# By default, Sphinx only rebuilds pages where the page content has changed.
# This means it doesn't always pick up changes to preferred link targets, etc
# To ensure such changes are picked up, we build the published docs with
-# `-E` (to ignore the cached environment) and `-a` (to ignore already existing
-# output files)
+# ``--fresh-env`` (to ignore the cached environment) and ``--write-all``
+# (to ignore already existing output files)
# for development releases: always build
.PHONY: autobuild-dev
autobuild-dev: DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py --short)
autobuild-dev:
- $(MAKE) dist-no-html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' DISTVERSION=$(DISTVERSION)
+ $(MAKE) dist-no-html SPHINXOPTS='$(SPHINXOPTS) --fresh-env --write-all --html-define daily=1' DISTVERSION=$(DISTVERSION)
# for HTML-only rebuilds
.PHONY: autobuild-dev-html
autobuild-dev-html: DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py --short)
autobuild-dev-html:
- $(MAKE) dist-html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' DISTVERSION=$(DISTVERSION)
+ $(MAKE) dist-html SPHINXOPTS='$(SPHINXOPTS) --fresh-env --write-all --html-define daily=1' DISTVERSION=$(DISTVERSION)
# for stable releases: only build if not in pre-release stage (alpha, beta)
# release candidate downloads are okay, since the stable tree can be in that stage
diff --git a/Doc/about.rst b/Doc/about.rst
index 5e6160ff2700ed..8f635d7f743a98 100644
--- a/Doc/about.rst
+++ b/Doc/about.rst
@@ -1,10 +1,11 @@
-=====================
-About these documents
-=====================
+========================
+About this documentation
+========================
-These documents are generated from `reStructuredText`_ sources by `Sphinx`_, a
-document processor specifically written for the Python documentation.
+Python's documentation is generated from `reStructuredText`_ sources
+using `Sphinx`_, a documentation generator originally created for Python
+and now maintained as an independent project.
.. _reStructuredText: https://docutils.sourceforge.io/rst.html
.. _Sphinx: https://www.sphinx-doc.org/
@@ -20,14 +21,14 @@ volunteers are always welcome!
Many thanks go to:
* Fred L. Drake, Jr., the creator of the original Python documentation toolset
- and writer of much of the content;
+ and author of much of the content;
* the `Docutils `_ project for creating
reStructuredText and the Docutils suite;
* Fredrik Lundh for his Alternative Python Reference project from which Sphinx
got many good ideas.
-Contributors to the Python Documentation
+Contributors to the Python documentation
----------------------------------------
Many people have contributed to the Python language, the Python standard
diff --git a/Doc/c-api/arg.rst b/Doc/c-api/arg.rst
index 3201bdc82691f4..209056ef2f8bce 100644
--- a/Doc/c-api/arg.rst
+++ b/Doc/c-api/arg.rst
@@ -229,12 +229,24 @@ There are three ways strings and buffers can be converted to C:
Numbers
-------
+These formats allow representing Python numbers or single characters as C numbers.
+Formats that require :class:`int`, :class:`float` or :class:`complex` can
+also use the corresponding special methods :meth:`~object.__index__`,
+:meth:`~object.__float__` or :meth:`~object.__complex__` to convert
+the Python object to the required type.
+
+For signed integer formats, :exc:`OverflowError` is raised if the value
+is out of range for the C type.
+For unsigned integer formats, no range checking is done --- the
+most significant bits are silently truncated when the receiving field is too
+small to receive the value.
+
``b`` (:class:`int`) [unsigned char]
- Convert a nonnegative Python integer to an unsigned tiny int, stored in a C
+ Convert a nonnegative Python integer to an unsigned tiny integer, stored in a C
:c:expr:`unsigned char`.
``B`` (:class:`int`) [unsigned char]
- Convert a Python integer to a tiny int without overflow checking, stored in a C
+ Convert a Python integer to a tiny integer without overflow checking, stored in a C
:c:expr:`unsigned char`.
``h`` (:class:`int`) [short int]
@@ -307,7 +319,7 @@ Other objects
.. _o_ampersand:
-``O&`` (object) [*converter*, *anything*]
+``O&`` (object) [*converter*, *address*]
Convert a Python object to a C variable through a *converter* function. This
takes two arguments: the first is a function, the second is the address of a C
variable (of arbitrary type), converted to :c:expr:`void *`. The *converter*
@@ -321,14 +333,20 @@ Other objects
the conversion has failed. When the conversion fails, the *converter* function
should raise an exception and leave the content of *address* unmodified.
- If the *converter* returns ``Py_CLEANUP_SUPPORTED``, it may get called a
+ .. c:macro:: Py_CLEANUP_SUPPORTED
+ :no-typesetting:
+
+ If the *converter* returns :c:macro:`!Py_CLEANUP_SUPPORTED`, it may get called a
second time if the argument parsing eventually fails, giving the converter a
chance to release any memory that it had already allocated. In this second
call, the *object* parameter will be ``NULL``; *address* will have the same value
as in the original call.
+ Examples of converters: :c:func:`PyUnicode_FSConverter` and
+ :c:func:`PyUnicode_FSDecoder`.
+
.. versionchanged:: 3.1
- ``Py_CLEANUP_SUPPORTED`` was added.
+ :c:macro:`!Py_CLEANUP_SUPPORTED` was added.
``p`` (:class:`bool`) [int]
Tests the value passed in for truth (a boolean **p**\ redicate) and converts
@@ -344,12 +362,6 @@ Other objects
in *items*. The C arguments must correspond to the individual format units in
*items*. Format units for sequences may be nested.
-It is possible to pass "long" integers (integers whose value exceeds the
-platform's :c:macro:`LONG_MAX`) however no proper range checking is done --- the
-most significant bits are silently truncated when the receiving field is too
-small to receive the value (actually, the semantics are inherited from downcasts
-in C --- your mileage may vary).
-
A few other characters have a meaning in a format string. These may not occur
inside nested parentheses. They are:
diff --git a/Doc/c-api/frame.rst b/Doc/c-api/frame.rst
index 638a740e0c24da..1a52e146a69751 100644
--- a/Doc/c-api/frame.rst
+++ b/Doc/c-api/frame.rst
@@ -132,7 +132,7 @@ See also :ref:`Reflection `.
.. versionadded:: 3.11
.. versionchanged:: 3.13
- As part of :pep:`667`, return a proxy object for optimized scopes.
+ As part of :pep:`667`, return an instance of :c:var:`PyFrameLocalsProxy_Type`.
.. c:function:: int PyFrame_GetLineNumber(PyFrameObject *frame)
@@ -140,6 +140,26 @@ See also :ref:`Reflection `.
Return the line number that *frame* is currently executing.
+Frame Locals Proxies
+^^^^^^^^^^^^^^^^^^^^
+
+.. versionadded:: 3.13
+
+The :attr:`~frame.f_locals` attribute on a :ref:`frame object `
+is an instance of a "frame-locals proxy". The proxy object exposes a
+write-through view of the underlying locals dictionary for the frame. This
+ensures that the variables exposed by ``f_locals`` are always up to date with
+the live local variables in the frame itself.
+
+See :pep:`667` for more information.
+
+.. c:var:: PyTypeObject PyFrameLocalsProxy_Type
+
+ The type of frame :func:`locals` proxy objects.
+
+.. c:function:: int PyFrameLocalsProxy_Check(PyObject *obj)
+
+ Return non-zero if *obj* is a frame :func:`locals` proxy.
Internal Frames
^^^^^^^^^^^^^^^
diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst
index 385bed49511f60..130942e070ef7a 100644
--- a/Doc/c-api/init.rst
+++ b/Doc/c-api/init.rst
@@ -557,6 +557,15 @@ Initializing and finalizing the interpreter
customized Python that always runs in isolated mode using
:c:func:`Py_RunMain`.
+.. c:function:: int PyUnstable_AtExit(PyInterpreterState *interp, void (*func)(void *), void *data)
+
+ Register an :mod:`atexit` callback for the target interpreter *interp*.
+ This is similar to :c:func:`Py_AtExit`, but takes an explicit interpreter and
+ data pointer for the callback.
+
+ The :term:`GIL` must be held for *interp*.
+
+ .. versionadded:: 3.13
Process-wide parameters
=======================
@@ -1436,6 +1445,17 @@ All of the following functions must be called after :c:func:`Py_Initialize`.
.. versionadded:: 3.8
+
+.. c:function:: PyObject* PyUnstable_InterpreterState_GetMainModule(PyInterpreterState *interp)
+
+ Return a :term:`strong reference` to the ``__main__`` `module object `_
+ for the given interpreter.
+
+ The caller must hold the GIL.
+
+ .. versionadded:: 3.13
+
+
.. c:type:: PyObject* (*_PyFrameEvalFunction)(PyThreadState *tstate, _PyInterpreterFrame *frame, int throwflag)
Type of a frame evaluation function.
diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst
index 612aa2aa711253..cd78fe18e35d1c 100644
--- a/Doc/c-api/init_config.rst
+++ b/Doc/c-api/init_config.rst
@@ -1271,6 +1271,17 @@ PyConfig
Default: ``1`` in Python config and ``0`` in isolated config.
+ .. c:member:: int use_system_logger
+
+ If non-zero, ``stdout`` and ``stderr`` will be redirected to the system
+ log.
+
+ Only available on macOS 10.12 and later, and on iOS.
+
+ Default: ``0`` (don't use system log).
+
+ .. versionadded:: 3.13.2
+
.. c:member:: int user_site_directory
If non-zero, add the user site directory to :data:`sys.path`.
diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst
index f82a050ab75de0..f71089370152ce 100644
--- a/Doc/c-api/module.rst
+++ b/Doc/c-api/module.rst
@@ -523,9 +523,6 @@ state:
On success, return ``0``. On error, raise an exception and return ``-1``.
- Return ``-1`` if *value* is ``NULL``. It must be called with an exception
- raised in this case.
-
Example usage::
static int
@@ -540,6 +537,10 @@ state:
return res;
}
+ To be convenient, the function accepts ``NULL`` *value* with an exception
+ set. In this case, return ``-1`` and just leave the raised exception
+ unchanged.
+
The example can also be written without checking explicitly if *obj* is
``NULL``::
diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst
index 1e822fad13aa5d..8bb392af01fcd2 100644
--- a/Doc/c-api/object.rst
+++ b/Doc/c-api/object.rst
@@ -111,7 +111,8 @@ Object Protocol
.. note::
Exceptions that occur when this calls :meth:`~object.__getattr__` and
- :meth:`~object.__getattribute__` methods are silently ignored.
+ :meth:`~object.__getattribute__` methods aren't propagated,
+ but instead given to :func:`sys.unraisablehook`.
For proper error handling, use :c:func:`PyObject_HasAttrWithError`,
:c:func:`PyObject_GetOptionalAttr` or :c:func:`PyObject_GetAttr` instead.
@@ -492,6 +493,13 @@ Object Protocol
on failure. This is equivalent to the Python statement ``del o[key]``.
+.. c:function:: int PyObject_DelItemString(PyObject *o, const char *key)
+
+ This is the same as :c:func:`PyObject_DelItem`, but *key* is
+ specified as a :c:expr:`const char*` UTF-8 encoded bytes string,
+ rather than a :c:expr:`PyObject*`.
+
+
.. c:function:: PyObject* PyObject_Dir(PyObject *o)
This is equivalent to the Python expression ``dir(o)``, returning a (possibly
@@ -509,6 +517,12 @@ Object Protocol
iterated.
+.. c:function:: PyObject* PyObject_SelfIter(PyObject *obj)
+
+ This is equivalent to the Python ``__iter__(self): return self`` method.
+ It is intended for :term:`iterator` types, to be used in the :c:member:`PyTypeObject.tp_iter` slot.
+
+
.. c:function:: PyObject* PyObject_GetAIter(PyObject *o)
This is the equivalent to the Python expression ``aiter(o)``. Takes an
diff --git a/Doc/c-api/sys.rst b/Doc/c-api/sys.rst
index d6fca1a0b0a219..c688afdca8231d 100644
--- a/Doc/c-api/sys.rst
+++ b/Doc/c-api/sys.rst
@@ -426,3 +426,7 @@ Process Control
function registered last is called first. Each cleanup function will be called
at most once. Since Python's internal finalization will have completed before
the cleanup function, no Python APIs should be called by *func*.
+
+ .. seealso::
+
+ :c:func:`PyUnstable_AtExit` for passing a ``void *data`` argument.
diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst
index e5a718912e3cef..34dbc699fa1c20 100644
--- a/Doc/c-api/typeobj.rst
+++ b/Doc/c-api/typeobj.rst
@@ -355,7 +355,7 @@ slot typedefs
+-----------------------------+-----------------------------+----------------------+
| :c:type:`newfunc` | .. line-block:: | :c:type:`PyObject` * |
| | | |
-| | :c:type:`PyObject` * | |
+| | :c:type:`PyTypeObject` * | |
| | :c:type:`PyObject` * | |
| | :c:type:`PyObject` * | |
+-----------------------------+-----------------------------+----------------------+
@@ -2618,7 +2618,7 @@ Slot Type typedefs
See :c:member:`~PyTypeObject.tp_free`.
-.. c:type:: PyObject *(*newfunc)(PyObject *, PyObject *, PyObject *)
+.. c:type:: PyObject *(*newfunc)(PyTypeObject *, PyObject *, PyObject *)
See :c:member:`~PyTypeObject.tp_new`.
diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst
index d0a1b9ca1260bd..22e134c9f08ea8 100644
--- a/Doc/c-api/unicode.rst
+++ b/Doc/c-api/unicode.rst
@@ -786,16 +786,25 @@ Functions encoding to and decoding from the :term:`filesystem encoding and
error handler` (:pep:`383` and :pep:`529`).
To encode file names to :class:`bytes` during argument parsing, the ``"O&"``
-converter should be used, passing :c:func:`PyUnicode_FSConverter` as the
+converter should be used, passing :c:func:`!PyUnicode_FSConverter` as the
conversion function:
.. c:function:: int PyUnicode_FSConverter(PyObject* obj, void* result)
- ParseTuple converter: encode :class:`str` objects -- obtained directly or
+ :ref:`PyArg_Parse\* converter `: encode :class:`str` objects -- obtained directly or
through the :class:`os.PathLike` interface -- to :class:`bytes` using
:c:func:`PyUnicode_EncodeFSDefault`; :class:`bytes` objects are output as-is.
- *result* must be a :c:expr:`PyBytesObject*` which must be released when it is
- no longer used.
+ *result* must be an address of a C variable of type :c:expr:`PyObject*`
+ (or :c:expr:`PyBytesObject*`).
+ On success, set the variable to a new :term:`strong reference` to
+ a :ref:`bytes object ` which must be released
+ when it is no longer used and return a non-zero value
+ (:c:macro:`Py_CLEANUP_SUPPORTED`).
+ Embedded null bytes are not allowed in the result.
+ On failure, return ``0`` with an exception set.
+
+ If *obj* is ``NULL``, the function releases a strong reference
+ stored in the variable referred by *result* and returns ``1``.
.. versionadded:: 3.1
@@ -803,16 +812,26 @@ conversion function:
Accepts a :term:`path-like object`.
To decode file names to :class:`str` during argument parsing, the ``"O&"``
-converter should be used, passing :c:func:`PyUnicode_FSDecoder` as the
+converter should be used, passing :c:func:`!PyUnicode_FSDecoder` as the
conversion function:
.. c:function:: int PyUnicode_FSDecoder(PyObject* obj, void* result)
- ParseTuple converter: decode :class:`bytes` objects -- obtained either
+ :ref:`PyArg_Parse\* converter `: decode :class:`bytes` objects -- obtained either
directly or indirectly through the :class:`os.PathLike` interface -- to
:class:`str` using :c:func:`PyUnicode_DecodeFSDefaultAndSize`; :class:`str`
- objects are output as-is. *result* must be a :c:expr:`PyUnicodeObject*` which
- must be released when it is no longer used.
+ objects are output as-is.
+ *result* must be an address of a C variable of type :c:expr:`PyObject*`
+ (or :c:expr:`PyUnicodeObject*`).
+ On success, set the variable to a new :term:`strong reference` to
+ a :ref:`Unicode object ` which must be released
+ when it is no longer used and return a non-zero value
+ (:c:macro:`Py_CLEANUP_SUPPORTED`).
+ Embedded null characters are not allowed in the result.
+ On failure, return ``0`` with an exception set.
+
+ If *obj* is ``NULL``, release the strong reference
+ to the object referred to by *result* and return ``1``.
.. versionadded:: 3.2
@@ -1035,6 +1054,15 @@ These are the UTF-8 codec APIs:
As :c:func:`PyUnicode_AsUTF8AndSize`, but does not store the size.
+ .. warning::
+
+ This function does not have any special behavior for
+ `null characters `_ embedded within
+ *unicode*. As a result, strings containing null characters will remain in the returned
+ string, which some C functions might interpret as the end of the string, leading to
+ truncation. If truncation is an issue, it is recommended to use :c:func:`PyUnicode_AsUTF8AndSize`
+ instead.
+
.. versionadded:: 3.3
.. versionchanged:: 3.7
diff --git a/Doc/c-api/veryhigh.rst b/Doc/c-api/veryhigh.rst
index 9f02bdb5896563..1ef4181d52eb10 100644
--- a/Doc/c-api/veryhigh.rst
+++ b/Doc/c-api/veryhigh.rst
@@ -348,8 +348,20 @@ the same library that the Python runtime is using.
.. versionchanged:: 3.8
Added *cf_feature_version* field.
+ The available compiler flags are accessible as macros:
-.. c:var:: int CO_FUTURE_DIVISION
+ .. c:namespace:: NULL
- This bit can be set in *flags* to cause division operator ``/`` to be
- interpreted as "true division" according to :pep:`238`.
+ .. c:macro:: PyCF_ALLOW_TOP_LEVEL_AWAIT
+ PyCF_ONLY_AST
+ PyCF_OPTIMIZED_AST
+ PyCF_TYPE_COMMENTS
+
+ See :ref:`compiler flags ` in documentation of the
+ :py:mod:`!ast` Python module, which exports these constants under
+ the same names.
+
+ .. c:var:: int CO_FUTURE_DIVISION
+
+ This bit can be set in *flags* to cause division operator ``/`` to be
+ interpreted as "true division" according to :pep:`238`.
diff --git a/Doc/conf.py b/Doc/conf.py
index 73d7d5db26ff7b..00ff894a615536 100644
--- a/Doc/conf.py
+++ b/Doc/conf.py
@@ -9,9 +9,6 @@
import importlib
import os
import sys
-import time
-
-import sphinx
# Make our custom extensions available to Sphinx
sys.path.append(os.path.abspath('tools/extensions'))
@@ -28,8 +25,10 @@
'audit_events',
'availability',
'c_annotations',
+ 'changes',
'glossary_search',
'lexers',
+ 'misc_news',
'pyspecific',
'sphinx.ext.coverage',
'sphinx.ext.doctest',
@@ -67,10 +66,7 @@
# General substitutions.
project = 'Python'
-if sphinx.version_info[:2] >= (8, 1):
- copyright = "2001-%Y, Python Software Foundation"
-else:
- copyright = f"2001-{time.strftime('%Y')}, Python Software Foundation"
+copyright = "2001-%Y, Python Software Foundation"
# We look for the Include/patchlevel.h file in the current Python source tree
# and replace the values accordingly.
@@ -93,13 +89,12 @@
highlight_language = 'python3'
# Minimum version of sphinx required
-needs_sphinx = '7.2.6'
+# Keep this version in sync with ``Doc/requirements.txt``.
+needs_sphinx = '8.1.3'
# Create table of contents entries for domain objects (e.g. functions, classes,
# attributes, etc.). Default is True.
-toc_object_entries = True
-# Hide parents to tidy up long entries in sidebar
-toc_object_entries_show_parents = 'hide'
+toc_object_entries = False
# Ignore any .rst files in the includes/ directory;
# they're embedded in pages but not rendered as individual pages.
@@ -374,13 +369,7 @@
# This 'Last updated on:' timestamp is inserted at the bottom of every page.
html_last_updated_fmt = '%b %d, %Y (%H:%M UTC)'
-if sphinx.version_info[:2] >= (8, 1):
- html_last_updated_use_utc = True
-else:
- html_time = int(os.environ.get('SOURCE_DATE_EPOCH', time.time()))
- html_last_updated_fmt = time.strftime(
- html_last_updated_fmt, time.gmtime(html_time)
- )
+html_last_updated_use_utc = True
# Path to find HTML templates to override theme
templates_path = ['tools/templates']
@@ -564,8 +553,6 @@
r'https://github.com/python/cpython/tree/.*': 'https://github.com/python/cpython/blob/.*',
# Intentional HTTP use at Misc/NEWS.d/3.5.0a1.rst
r'http://www.python.org/$': 'https://www.python.org/$',
- # Used in license page, keep as is
- r'https://www.zope.org/': r'https://www.zope.dev/',
# Microsoft's redirects to learn.microsoft.com
r'https://msdn.microsoft.com/.*': 'https://learn.microsoft.com/.*',
r'https://docs.microsoft.com/.*': 'https://learn.microsoft.com/.*',
@@ -617,16 +604,6 @@
}
extlinks_detect_hardcoded_links = True
-if sphinx.version_info[:2] < (8, 1):
- # Sphinx 8.1 has in-built CVE and CWE roles.
- extlinks |= {
- "cve": (
- "https://www.cve.org/CVERecord?id=CVE-%s",
- "CVE-%s",
- ),
- "cwe": ("https://cwe.mitre.org/data/definitions/%s.html", "CWE-%s"),
- }
-
# Options for c_annotations extension
# -----------------------------------
diff --git a/Doc/constraints.txt b/Doc/constraints.txt
index 26ac1862dbac0b..29cd4be1d3c8db 100644
--- a/Doc/constraints.txt
+++ b/Doc/constraints.txt
@@ -13,14 +13,12 @@ packaging<25
Pygments<3
requests<3
snowballstemmer<3
-# keep lower-bounds until Sphinx 8.1 is released
-# https://github.com/sphinx-doc/sphinx/pull/12756
-sphinxcontrib-applehelp>=1.0.7,<3
-sphinxcontrib-devhelp>=1.0.6,<3
-sphinxcontrib-htmlhelp>=2.0.6,<3
-sphinxcontrib-jsmath>=1.0.1,<2
-sphinxcontrib-qthelp>=1.0.6,<3
-sphinxcontrib-serializinghtml>=1.1.9,<3
+sphinxcontrib-applehelp<3
+sphinxcontrib-devhelp<3
+sphinxcontrib-htmlhelp<3
+sphinxcontrib-jsmath<2
+sphinxcontrib-qthelp<3
+sphinxcontrib-serializinghtml<3
# Direct dependencies of Jinja2 (Jinja is a dependency of Sphinx, see above)
MarkupSafe<3
diff --git a/Doc/data/refcounts.dat b/Doc/data/refcounts.dat
index e5bb0bf78f7a69..99f4d59b558eae 100644
--- a/Doc/data/refcounts.dat
+++ b/Doc/data/refcounts.dat
@@ -1836,6 +1836,9 @@ PyObject_RichCompareBool:PyObject*:o1:0:
PyObject_RichCompareBool:PyObject*:o2:0:
PyObject_RichCompareBool:int:opid::
+PyObject_SelfIter:PyObject*::+1:
+PyObject_SelfIter:PyObject*:obj:0:
+
PyObject_SetAttr:int:::
PyObject_SetAttr:PyObject*:o:0:
PyObject_SetAttr:PyObject*:attr_name:0:
diff --git a/Doc/faq/programming.rst b/Doc/faq/programming.rst
index fa7b22bde1dc6f..776bab1ed5b779 100644
--- a/Doc/faq/programming.rst
+++ b/Doc/faq/programming.rst
@@ -1906,28 +1906,30 @@ In the standard library code, you will see several common patterns for
correctly using identity tests:
1) As recommended by :pep:`8`, an identity test is the preferred way to check
-for ``None``. This reads like plain English in code and avoids confusion with
-other objects that may have boolean values that evaluate to false.
+ for ``None``. This reads like plain English in code and avoids confusion
+ with other objects that may have boolean values that evaluate to false.
2) Detecting optional arguments can be tricky when ``None`` is a valid input
-value. In those situations, you can create a singleton sentinel object
-guaranteed to be distinct from other objects. For example, here is how
-to implement a method that behaves like :meth:`dict.pop`::
+ value. In those situations, you can create a singleton sentinel object
+ guaranteed to be distinct from other objects. For example, here is how
+ to implement a method that behaves like :meth:`dict.pop`:
- _sentinel = object()
+ .. code-block:: python
- def pop(self, key, default=_sentinel):
- if key in self:
- value = self[key]
- del self[key]
- return value
- if default is _sentinel:
- raise KeyError(key)
- return default
+ _sentinel = object()
+
+ def pop(self, key, default=_sentinel):
+ if key in self:
+ value = self[key]
+ del self[key]
+ return value
+ if default is _sentinel:
+ raise KeyError(key)
+ return default
3) Container implementations sometimes need to augment equality tests with
-identity tests. This prevents the code from being confused by objects such as
-``float('NaN')`` that are not equal to themselves.
+ identity tests. This prevents the code from being confused by objects
+ such as ``float('NaN')`` that are not equal to themselves.
For example, here is the implementation of
:meth:`!collections.abc.Sequence.__contains__`::
diff --git a/Doc/glossary.rst b/Doc/glossary.rst
index 5c56c419514da1..858c2b39ee5e33 100644
--- a/Doc/glossary.rst
+++ b/Doc/glossary.rst
@@ -110,7 +110,7 @@ Glossary
:keyword:`yield` expression.
Each :keyword:`yield` temporarily suspends processing, remembering the
- location execution state (including local variables and pending
+ execution state (including local variables and pending
try-statements). When the *asynchronous generator iterator* effectively
resumes with another awaitable returned by :meth:`~object.__anext__`, it
picks up where it left off. See :pep:`492` and :pep:`525`.
@@ -554,7 +554,7 @@ Glossary
An object created by a :term:`generator` function.
Each :keyword:`yield` temporarily suspends processing, remembering the
- location execution state (including local variables and pending
+ execution state (including local variables and pending
try-statements). When the *generator iterator* resumes, it picks up where
it left off (in contrast to functions which start fresh on every
invocation).
@@ -801,9 +801,11 @@ Glossary
processed.
loader
- An object that loads a module. It must define a method named
- :meth:`load_module`. A loader is typically returned by a
- :term:`finder`. See also:
+ An object that loads a module.
+ It must define the :meth:`!exec_module` and :meth:`!create_module` methods
+ to implement the :class:`~importlib.abc.Loader` interface.
+ A loader is typically returned by a :term:`finder`.
+ See also:
* :ref:`finders-and-loaders`
* :class:`importlib.abc.Loader`
diff --git a/Doc/howto/argparse-optparse.rst b/Doc/howto/argparse-optparse.rst
index cef2d893b28a62..b684619885b4c7 100644
--- a/Doc/howto/argparse-optparse.rst
+++ b/Doc/howto/argparse-optparse.rst
@@ -1,20 +1,14 @@
.. currentmodule:: argparse
.. _upgrading-optparse-code:
+.. _migrating-optparse-code:
-==========================
-Upgrading optparse code
-==========================
+============================================
+Migrating ``optparse`` code to ``argparse``
+============================================
-Originally, the :mod:`argparse` module had attempted to maintain compatibility
-with :mod:`optparse`. However, :mod:`optparse` was difficult to extend
-transparently, particularly with the changes required to support
-``nargs=`` specifiers and better usage messages. When most everything in
-:mod:`optparse` had either been copy-pasted over or monkey-patched, it no
-longer seemed practical to try to maintain the backwards compatibility.
-
-The :mod:`argparse` module improves on the :mod:`optparse`
-module in a number of ways including:
+The :mod:`argparse` module offers several higher level features not natively
+provided by the :mod:`optparse` module, including:
* Handling positional arguments.
* Supporting subcommands.
@@ -23,7 +17,23 @@ module in a number of ways including:
* Producing more informative usage messages.
* Providing a much simpler interface for custom ``type`` and ``action``.
-A partial upgrade path from :mod:`optparse` to :mod:`argparse`:
+Originally, the :mod:`argparse` module attempted to maintain compatibility
+with :mod:`optparse`. However, the fundamental design differences between
+supporting declarative command line option processing (while leaving positional
+argument processing to application code), and supporting both named options
+and positional arguments in the declarative interface mean that the
+API has diverged from that of ``optparse`` over time.
+
+As described in :ref:`choosing-an-argument-parser`, applications that are
+currently using :mod:`optparse` and are happy with the way it works can
+just continue to use ``optparse``.
+
+Application developers that are considering migrating should also review
+the list of intrinsic behavioural differences described in that section
+before deciding whether or not migration is desirable.
+
+For applications that do choose to migrate from :mod:`optparse` to :mod:`argparse`,
+the following suggestions should be helpful:
* Replace all :meth:`optparse.OptionParser.add_option` calls with
:meth:`ArgumentParser.add_argument` calls.
diff --git a/Doc/howto/argparse.rst b/Doc/howto/argparse.rst
index 1efbee64d60bb3..902c50de00803c 100644
--- a/Doc/howto/argparse.rst
+++ b/Doc/howto/argparse.rst
@@ -13,11 +13,16 @@ recommended command-line parsing module in the Python standard library.
.. note::
- There are two other modules that fulfill the same task, namely
- :mod:`getopt` (an equivalent for ``getopt()`` from the C
- language) and the deprecated :mod:`optparse`.
- Note also that :mod:`argparse` is based on :mod:`optparse`,
- and therefore very similar in terms of usage.
+ The standard library includes two other libraries directly related
+ to command-line parameter processing: the lower level :mod:`optparse`
+ module (which may require more code to configure for a given application,
+ but also allows an application to request behaviors that ``argparse``
+ doesn't support), and the very low level :mod:`getopt` (which specifically
+ serves as an equivalent to the :c:func:`!getopt` family of functions
+ available to C programmers).
+ While neither of those modules is covered directly in this guide, many of
+ the core concepts in ``argparse`` first originated in ``optparse``, so
+ some aspects of this tutorial will also be relevant to ``optparse`` users.
Concepts
diff --git a/Doc/howto/free-threading-python.rst b/Doc/howto/free-threading-python.rst
index b21e3287ecaa3f..cd920553a3a461 100644
--- a/Doc/howto/free-threading-python.rst
+++ b/Doc/howto/free-threading-python.rst
@@ -43,7 +43,7 @@ Identifying free-threaded Python
================================
To check if the current interpreter supports free-threading, :option:`python -VV <-V>`
-and :attr:`sys.version` contain "experimental free-threading build".
+and :data:`sys.version` contain "experimental free-threading build".
The new :func:`sys._is_gil_enabled` function can be used to check whether
the GIL is actually disabled in the running process.
diff --git a/Doc/howto/gdb_helpers.rst b/Doc/howto/gdb_helpers.rst
index 53bbf7ddaa2ab9..98ce813ca4ab02 100644
--- a/Doc/howto/gdb_helpers.rst
+++ b/Doc/howto/gdb_helpers.rst
@@ -180,7 +180,7 @@ regular machine-level integer::
(gdb) p some_python_integer
$4 = 42
-The internal structure can be revealed with a cast to :c:expr:`PyLongObject *`:
+The internal structure can be revealed with a cast to :c:expr:`PyLongObject *`::
(gdb) p *(PyLongObject*)some_python_integer
$5 = {ob_base = {ob_base = {ob_refcnt = 8, ob_type = 0x3dad39f5e0}, ob_size = 1},
diff --git a/Doc/howto/mro.rst b/Doc/howto/mro.rst
index 46db516e16dae4..0872bedcd3a2d3 100644
--- a/Doc/howto/mro.rst
+++ b/Doc/howto/mro.rst
@@ -398,7 +398,7 @@ with inheritance diagram
We see that class G inherits from F and E, with F *before* E: therefore
we would expect the attribute *G.remember2buy* to be inherited by
-*F.rembermer2buy* and not by *E.remember2buy*: nevertheless Python 2.2
+*F.remember2buy* and not by *E.remember2buy*: nevertheless Python 2.2
gives
>>> G.remember2buy # doctest: +SKIP
diff --git a/Doc/library/allos.rst b/Doc/library/allos.rst
index 0223c1054ea5d8..1aed340b2527ac 100644
--- a/Doc/library/allos.rst
+++ b/Doc/library/allos.rst
@@ -15,14 +15,9 @@ but they are available on most other systems as well. Here's an overview:
os.rst
io.rst
time.rst
- argparse.rst
logging.rst
logging.config.rst
logging.handlers.rst
- getpass.rst
- curses.rst
- curses.ascii.rst
- curses.panel.rst
platform.rst
errno.rst
ctypes.rst
diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst
index e002212199e89b..d4adc164b1bc3d 100644
--- a/Doc/library/argparse.rst
+++ b/Doc/library/argparse.rst
@@ -11,6 +11,18 @@
**Source code:** :source:`Lib/argparse.py`
+.. note::
+
+ While :mod:`argparse` is the default recommended standard library module
+ for implementing basic command line applications, authors with more
+ exacting requirements for exactly how their command line applications
+ behave may find it doesn't provide the necessary level of control.
+ Refer to :ref:`choosing-an-argument-parser` for alternatives to
+ consider when ``argparse`` doesn't support behaviors that the application
+ requires (such as entirely disabling support for interspersed options and
+ positional arguments, or accepting option parameter values that start
+ with ``-`` even when they correspond to another defined option).
+
--------------
.. sidebar:: Tutorial
diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst
index bb126b790270d6..ce261abc2a347d 100644
--- a/Doc/library/ast.rst
+++ b/Doc/library/ast.rst
@@ -1802,7 +1802,7 @@ aliases.
.. doctest::
- >>> print(ast.dump(ast.parse("type Alias[**P = (int, str)] = Callable[P, int]"), indent=4))
+ >>> print(ast.dump(ast.parse("type Alias[**P = [int, str]] = Callable[P, int]"), indent=4))
Module(
body=[
TypeAlias(
@@ -1810,7 +1810,7 @@ aliases.
type_params=[
ParamSpec(
name='P',
- default_value=Tuple(
+ default_value=List(
elts=[
Name(id='int', ctx=Load()),
Name(id='str', ctx=Load())],
diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst
index 8d7d0377c132b8..8027d3525e5999 100644
--- a/Doc/library/asyncio-eventloop.rst
+++ b/Doc/library/asyncio-eventloop.rst
@@ -236,6 +236,9 @@ Scheduling callbacks
another thread, this function *must* be used, since :meth:`call_soon` is not
thread-safe.
+ This function is safe to be called from a reentrant context or signal handler,
+ however, it is not safe or fruitful to use the returned handle in such contexts.
+
Raises :exc:`RuntimeError` if called on a loop that's been closed.
This can happen on a secondary thread when the main application is
shutting down.
@@ -957,6 +960,9 @@ Watching file descriptors
invoke *callback* with the specified arguments once *fd* is available for
reading.
+ Any preexisting callback registered for *fd* is cancelled and replaced by
+ *callback*.
+
.. method:: loop.remove_reader(fd)
Stop monitoring the *fd* file descriptor for read availability. Returns
@@ -968,6 +974,9 @@ Watching file descriptors
invoke *callback* with the specified arguments once *fd* is available for
writing.
+ Any preexisting callback registered for *fd* is cancelled and replaced by
+ *callback*.
+
Use :func:`functools.partial` :ref:`to pass keyword arguments
` to *callback*.
diff --git a/Doc/library/asyncio-queue.rst b/Doc/library/asyncio-queue.rst
index 61991bf2f4ed1d..066edd424d150e 100644
--- a/Doc/library/asyncio-queue.rst
+++ b/Doc/library/asyncio-queue.rst
@@ -115,11 +115,11 @@ Queue
.. method:: task_done()
- Indicate that a formerly enqueued task is complete.
+ Indicate that a formerly enqueued work item is complete.
Used by queue consumers. For each :meth:`~Queue.get` used to
- fetch a task, a subsequent call to :meth:`task_done` tells the
- queue that the processing on the task is complete.
+ fetch a work item, a subsequent call to :meth:`task_done` tells the
+ queue that the processing on the work item is complete.
If a :meth:`join` is currently blocking, it will resume when all
items have been processed (meaning that a :meth:`task_done`
diff --git a/Doc/library/calendar.rst b/Doc/library/calendar.rst
index eafc038d6cb722..086bfcf2e3a909 100644
--- a/Doc/library/calendar.rst
+++ b/Doc/library/calendar.rst
@@ -38,13 +38,33 @@ interpreted as prescribed by the ISO 8601 standard. Year 0 is 1 BC, year -1 is
itself. This is the job of subclasses.
- :class:`Calendar` instances have the following methods:
+ :class:`Calendar` instances have the following methods and attributes:
+
+ .. attribute:: firstweekday
+
+ The first weekday as an integer (0--6).
+
+ This property can also be set and read using
+ :meth:`~Calendar.setfirstweekday` and
+ :meth:`~Calendar.getfirstweekday` respectively.
+
+ .. method:: getfirstweekday()
+
+ Return an :class:`int` for the current first weekday (0--6).
+
+ Identical to reading the :attr:`~Calendar.firstweekday` property.
+
+ .. method:: setfirstweekday(firstweekday)
+
+ Set the first weekday to *firstweekday*, passed as an :class:`int` (0--6)
+
+ Identical to setting the :attr:`~Calendar.firstweekday` property.
.. method:: iterweekdays()
Return an iterator for the week day numbers that will be used for one
week. The first value from the iterator will be the same as the value of
- the :attr:`firstweekday` property.
+ the :attr:`~Calendar.firstweekday` property.
.. method:: itermonthdates(year, month)
@@ -138,6 +158,33 @@ interpreted as prescribed by the ISO 8601 standard. Year 0 is 1 BC, year -1 is
:class:`TextCalendar` instances have the following methods:
+
+ .. method:: formatday(theday, weekday, width)
+
+ Return a string representing a single day formatted with the given *width*.
+ If *theday* is ``0``, return a string of spaces of
+ the specified width, representing an empty day. The *weekday* parameter
+ is unused.
+
+ .. method:: formatweek(theweek, w=0)
+
+ Return a single week in a string with no newline. If *w* is provided, it
+ specifies the width of the date columns, which are centered. Depends
+ on the first weekday as specified in the constructor or set by the
+ :meth:`setfirstweekday` method.
+
+ .. method:: formatweekday(weekday, width)
+
+ Return a string representing the name of a single weekday formatted to
+ the specified *width*. The *weekday* parameter is an integer representing
+ the day of the week, where ``0`` is Monday and ``6`` is Sunday.
+
+ .. method:: formatweekheader(width)
+
+ Return a string containing the header row of weekday names, formatted
+ with the given *width* for each column. The names depend on the locale
+ settings and are padded to the specified width.
+
.. method:: formatmonth(theyear, themonth, w=0, l=0)
Return a month's calendar in a multi-line string. If *w* is provided, it
@@ -146,6 +193,12 @@ interpreted as prescribed by the ISO 8601 standard. Year 0 is 1 BC, year -1 is
on the first weekday as specified in the constructor or set by the
:meth:`setfirstweekday` method.
+ .. method:: formatmonthname(theyear, themonth, width=0, withyear=True)
+
+ Return a string representing the month's name centered within the
+ specified *width*. If *withyear* is ``True``, include the year in the
+ output. The *theyear* and *themonth* parameters specify the year
+ and month for the name to be formatted respectively.
.. method:: prmonth(theyear, themonth, w=0, l=0)
@@ -437,7 +490,7 @@ The :mod:`calendar` module exports the following data attributes:
A sequence that represents the months of the year in the current locale. This
follows normal convention of January being month number 1, so it has a length of
- 13 and ``month_name[0]`` is the empty string.
+ 13 and ``month_name[0]`` is the empty string.
>>> import calendar
>>> list(calendar.month_name)
diff --git a/Doc/library/cmdlinelibs.rst b/Doc/library/cmdlinelibs.rst
new file mode 100644
index 00000000000000..085d31af7bca1f
--- /dev/null
+++ b/Doc/library/cmdlinelibs.rst
@@ -0,0 +1,21 @@
+.. _cmdlinelibs:
+
+********************************
+Command Line Interface Libraries
+********************************
+
+The modules described in this chapter assist with implementing
+command line and terminal interfaces for applications.
+
+Here's an overview:
+
+.. toctree::
+ :maxdepth: 1
+
+ argparse.rst
+ optparse.rst
+ getpass.rst
+ fileinput.rst
+ curses.rst
+ curses.ascii.rst
+ curses.panel.rst
diff --git a/Doc/library/collections.abc.rst b/Doc/library/collections.abc.rst
index 0adbd305b468f4..850853346991b5 100644
--- a/Doc/library/collections.abc.rst
+++ b/Doc/library/collections.abc.rst
@@ -146,7 +146,8 @@ ABC Inherits from Abstract Methods Mi
:class:`Set` :class:`Collection` ``__contains__``, ``__le__``, ``__lt__``, ``__eq__``, ``__ne__``,
``__iter__``, ``__gt__``, ``__ge__``, ``__and__``, ``__or__``,
- ``__len__`` ``__sub__``, ``__xor__``, and ``isdisjoint``
+ ``__len__`` ``__sub__``, ``__rsub__``, ``__xor__``, ``__rxor__``
+ and ``isdisjoint``
:class:`MutableSet` :class:`Set` ``__contains__``, Inherited :class:`Set` methods and
``__iter__``, ``clear``, ``pop``, ``remove``, ``__ior__``,
@@ -165,7 +166,7 @@ ABC Inherits from Abstract Methods Mi
``__len__``
-:class:`MappingView` :class:`Sized` ``__len__``
+:class:`MappingView` :class:`Sized` ``__init__``, ``__len__`` and ``__repr__``
:class:`ItemsView` :class:`MappingView`, ``__contains__``,
:class:`Set` ``__iter__``
:class:`KeysView` :class:`MappingView`, ``__contains__``,
diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst
index 211e625c4303b0..4c7d3cd119dd01 100644
--- a/Doc/library/datetime.rst
+++ b/Doc/library/datetime.rst
@@ -93,7 +93,7 @@ The :mod:`!datetime` module exports the following constants:
The largest year number allowed in a :class:`date` or :class:`.datetime` object.
:const:`MAXYEAR` is 9999.
-.. attribute:: UTC
+.. data:: UTC
Alias for the UTC time zone singleton :attr:`datetime.timezone.utc`.
@@ -937,7 +937,7 @@ Other constructors, all class methods:
.. deprecated:: 3.12
- Use :meth:`datetime.now` with :attr:`UTC` instead.
+ Use :meth:`datetime.now` with :const:`UTC` instead.
.. classmethod:: datetime.fromtimestamp(timestamp, tz=None)
@@ -1009,7 +1009,7 @@ Other constructors, all class methods:
.. deprecated:: 3.12
- Use :meth:`datetime.fromtimestamp` with :attr:`UTC` instead.
+ Use :meth:`datetime.fromtimestamp` with :const:`UTC` instead.
.. classmethod:: datetime.fromordinal(ordinal)
diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst
index 916f17cadfaa7e..2332b6fc62709d 100644
--- a/Doc/library/decimal.rst
+++ b/Doc/library/decimal.rst
@@ -1016,7 +1016,7 @@ New contexts can also be created using the :class:`Context` constructor
described below. In addition, the module provides three pre-made contexts:
-.. class:: BasicContext
+.. data:: BasicContext
This is a standard context defined by the General Decimal Arithmetic
Specification. Precision is set to nine. Rounding is set to
@@ -1027,7 +1027,7 @@ described below. In addition, the module provides three pre-made contexts:
Because many of the traps are enabled, this context is useful for debugging.
-.. class:: ExtendedContext
+.. data:: ExtendedContext
This is a standard context defined by the General Decimal Arithmetic
Specification. Precision is set to nine. Rounding is set to
@@ -1040,7 +1040,7 @@ described below. In addition, the module provides three pre-made contexts:
presence of conditions that would otherwise halt the program.
-.. class:: DefaultContext
+.. data:: DefaultContext
This context is used by the :class:`Context` constructor as a prototype for new
contexts. Changing a field (such a precision) has the effect of changing the
@@ -2245,7 +2245,7 @@ value for :attr:`~Context.prec` as well [#]_::
Decimal('904625697166532776746648320380374280103671755200316906558262375061821325312')
-For inexact results, :attr:`MAX_PREC` is far too large on 64-bit platforms and
+For inexact results, :const:`MAX_PREC` is far too large on 64-bit platforms and
the available memory will be insufficient::
>>> Decimal(1) / 3
diff --git a/Doc/library/email.contentmanager.rst b/Doc/library/email.contentmanager.rst
index a86e227429b06d..b33fe82a6e4c9f 100644
--- a/Doc/library/email.contentmanager.rst
+++ b/Doc/library/email.contentmanager.rst
@@ -157,7 +157,13 @@ Currently the email package provides only one concrete content manager,
:exc:`ValueError`.
* For ``str`` objects, if *cte* is not set use heuristics to
- determine the most compact encoding.
+ determine the most compact encoding. Prior to encoding,
+ :meth:`str.splitlines` is used to normalize all line boundaries,
+ ensuring that each line of the payload is terminated by the
+ current policy's :data:`~email.policy.Policy.linesep` property
+ (even if the original string did not end with one).
+ * For ``bytes`` objects, *cte* is taken to be base64 if not set,
+ and the aforementioned newline translation is not performed.
* For :class:`~email.message.EmailMessage`, per :rfc:`2046`, raise
an error if a *cte* of ``quoted-printable`` or ``base64`` is
requested for *subtype* ``rfc822``, and for any *cte* other than
diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst
index 314767d0802a08..6b997ee784f6e4 100644
--- a/Doc/library/email.policy.rst
+++ b/Doc/library/email.policy.rst
@@ -267,7 +267,7 @@ added matters. To illustrate::
Handle a *defect* found on *obj*. When the email package calls this
method, *defect* will always be a subclass of
- :class:`~email.errors.Defect`.
+ :class:`~email.errors.MessageDefect`.
The default implementation checks the :attr:`raise_on_defect` flag. If
it is ``True``, *defect* is raised as an exception. If it is ``False``
@@ -277,7 +277,7 @@ added matters. To illustrate::
.. method:: register_defect(obj, defect)
Register a *defect* on *obj*. In the email package, *defect* will always
- be a subclass of :class:`~email.errors.Defect`.
+ be a subclass of :class:`~email.errors.MessageDefect`.
The default implementation calls the ``append`` method of the ``defects``
attribute of *obj*. When the email package calls :attr:`handle_defect`,
diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst
index 2df9096c452761..24c0cf26496fed 100644
--- a/Doc/library/enum.rst
+++ b/Doc/library/enum.rst
@@ -110,6 +110,10 @@ Module Contents
``KEEP`` which allows for more fine-grained control over how invalid values
are dealt with in an enumeration.
+ :class:`EnumDict`
+
+ A subclass of :class:`dict` for use when subclassing :class:`EnumType`.
+
:class:`auto`
Instances are replaced with an appropriate value for Enum members.
@@ -152,6 +156,7 @@ Module Contents
.. versionadded:: 3.6 ``Flag``, ``IntFlag``, ``auto``
.. versionadded:: 3.11 ``StrEnum``, ``EnumCheck``, ``ReprEnum``, ``FlagBoundary``, ``property``, ``member``, ``nonmember``, ``global_enum``, ``show_flag_values``
+.. versionadded:: 3.13 ``EnumDict``
---------------
@@ -821,7 +826,27 @@ Data Types
>>> KeepFlag(2**2 + 2**4)
-.. versionadded:: 3.11
+ .. versionadded:: 3.11
+
+.. class:: EnumDict
+
+ *EnumDict* is a subclass of :class:`dict` that is used as the namespace
+ for defining enum classes (see :ref:`prepare`).
+ It is exposed to allow subclasses of :class:`EnumType` with advanced
+ behavior like having multiple values per member.
+ It should be called with the name of the enum class being created, otherwise
+ private names and internal classes will not be handled correctly.
+
+ Note that only the :class:`~collections.abc.MutableMapping` interface
+ (:meth:`~object.__setitem__` and :meth:`~dict.update`) is overridden.
+ It may be possible to bypass the checks using other :class:`!dict`
+ operations like :meth:`|= `.
+
+ .. attribute:: EnumDict.member_names
+
+ A list of member names.
+
+ .. versionadded:: 3.13
---------------
@@ -966,7 +991,6 @@ Utilities and Decorators
Should only be used when the enum members are exported
to the module global namespace (see :class:`re.RegexFlag` for an example).
-
.. versionadded:: 3.11
.. function:: show_flag_values(value)
@@ -975,6 +999,7 @@ Utilities and Decorators
.. versionadded:: 3.11
+
---------------
Notes
diff --git a/Doc/library/errno.rst b/Doc/library/errno.rst
index 4983b8961b1c3f..ffebe8ee485eb6 100644
--- a/Doc/library/errno.rst
+++ b/Doc/library/errno.rst
@@ -665,6 +665,171 @@ defined by the module. The specific list of defined symbols is available as
.. versionadded:: 3.11
+
+.. data:: ENOMEDIUM
+
+ No medium found
+
+
+.. data:: EMEDIUMTYPE
+
+ Wrong medium type
+
+
+.. data:: ENOKEY
+
+ Required key not available
+
+
+.. data:: EKEYEXPIRED
+
+ Key has expired
+
+
+.. data:: EKEYREVOKED
+
+ Key has been revoked
+
+
+.. data:: EKEYREJECTED
+
+ Key was rejected by service
+
+
+.. data:: ERFKILL
+
+ Operation not possible due to RF-kill
+
+
+.. data:: ELOCKUNMAPPED
+
+ Locked lock was unmapped
+
+
+.. data:: ENOTACTIVE
+
+ Facility is not active
+
+
+.. data:: EAUTH
+
+ Authentication error
+
+ .. versionadded:: 3.2
+
+
+.. data:: EBADARCH
+
+ Bad CPU type in executable
+
+ .. versionadded:: 3.2
+
+
+.. data:: EBADEXEC
+
+ Bad executable (or shared library)
+
+ .. versionadded:: 3.2
+
+
+.. data:: EBADMACHO
+
+ Malformed Mach-o file
+
+ .. versionadded:: 3.2
+
+
+.. data:: EDEVERR
+
+ Device error
+
+ .. versionadded:: 3.2
+
+
+.. data:: EFTYPE
+
+ Inappropriate file type or format
+
+ .. versionadded:: 3.2
+
+
+.. data:: ENEEDAUTH
+
+ Need authenticator
+
+ .. versionadded:: 3.2
+
+
+.. data:: ENOATTR
+
+ Attribute not found
+
+ .. versionadded:: 3.2
+
+
+.. data:: ENOPOLICY
+
+ Policy not found
+
+ .. versionadded:: 3.2
+
+
+.. data:: EPROCLIM
+
+ Too many processes
+
+ .. versionadded:: 3.2
+
+
+.. data:: EPROCUNAVAIL
+
+ Bad procedure for program
+
+ .. versionadded:: 3.2
+
+
+.. data:: EPROGMISMATCH
+
+ Program version wrong
+
+ .. versionadded:: 3.2
+
+
+.. data:: EPROGUNAVAIL
+
+ RPC prog. not avail
+
+ .. versionadded:: 3.2
+
+
+.. data:: EPWROFF
+
+ Device power is off
+
+ .. versionadded:: 3.2
+
+
+.. data:: EBADRPC
+
+ RPC struct is bad
+
+ .. versionadded:: 3.2
+
+
+.. data:: ERPCMISMATCH
+
+ RPC version wrong
+
+ .. versionadded:: 3.2
+
+
+.. data:: ESHLIBVERS
+
+ Shared library version mismatch
+
+ .. versionadded:: 3.2
+
+
.. data:: ENOTCAPABLE
Capabilities insufficient. This error is mapped to the exception
diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst
index b5ba86f1b19223..fdbcc2176ae46a 100644
--- a/Doc/library/exceptions.rst
+++ b/Doc/library/exceptions.rst
@@ -562,9 +562,13 @@ The following exceptions are the exceptions that are usually raised.
Raised when the interpreter finds an internal error, but the situation does not
look so serious to cause it to abandon all hope. The associated value is a
- string indicating what went wrong (in low-level terms).
+ string indicating what went wrong (in low-level terms). In :term:`CPython`,
+ this could be raised by incorrectly using Python's C API, such as returning
+ a ``NULL`` value without an exception set.
- You should report this to the author or maintainer of your Python interpreter.
+ If you're confident that this exception wasn't your fault, or the fault of
+ a package you're using, you should report this to the author or maintainer
+ of your Python interpreter.
Be sure to report the version of the Python interpreter (``sys.version``; it is
also printed at the start of an interactive Python session), the exact error
message (the exception's associated value) and if possible the source of the
diff --git a/Doc/library/filecmp.rst b/Doc/library/filecmp.rst
index 282d0e0d8db5cf..abd1b8c826d170 100644
--- a/Doc/library/filecmp.rst
+++ b/Doc/library/filecmp.rst
@@ -189,7 +189,7 @@ The :class:`dircmp` class
are the same type as *self*, if *self* is a subclass of
:class:`dircmp`.
-.. attribute:: DEFAULT_IGNORES
+.. data:: DEFAULT_IGNORES
.. versionadded:: 3.4
diff --git a/Doc/library/filesys.rst b/Doc/library/filesys.rst
index 0ccf2b7bf59a0f..f1ea4761af7cb1 100644
--- a/Doc/library/filesys.rst
+++ b/Doc/library/filesys.rst
@@ -14,7 +14,6 @@ in this chapter is:
pathlib.rst
os.path.rst
- fileinput.rst
stat.rst
filecmp.rst
tempfile.rst
diff --git a/Doc/library/fnmatch.rst b/Doc/library/fnmatch.rst
index fda44923f204fc..5cb47777ae527d 100644
--- a/Doc/library/fnmatch.rst
+++ b/Doc/library/fnmatch.rst
@@ -46,9 +46,15 @@ module. See module :mod:`glob` for pathname expansion (:mod:`glob` uses
a period are not special for this module, and are matched by the ``*`` and ``?``
patterns.
-Also note that :func:`functools.lru_cache` with the *maxsize* of 32768 is used to
-cache the compiled regex patterns in the following functions: :func:`fnmatch`,
-:func:`fnmatchcase`, :func:`.filter`.
+Unless stated otherwise, "filename string" and "pattern string" either refer to
+:class:`str` or ``ISO-8859-1`` encoded :class:`bytes` objects. Note that the
+functions documented below do not allow to mix a :class:`!bytes` pattern with
+a :class:`!str` filename, and vice-versa.
+
+Finally, note that :func:`functools.lru_cache` with a *maxsize* of 32768
+is used to cache the (typed) compiled regex patterns in the following
+functions: :func:`fnmatch`, :func:`fnmatchcase`, :func:`.filter`.
+
.. function:: fnmatch(name, pat)
@@ -78,8 +84,8 @@ cache the compiled regex patterns in the following functions: :func:`fnmatch`,
.. function:: filter(names, pat)
- Construct a list from those elements of the :term:`iterable` *names*
- that match pattern *pat*.
+ Construct a list from those elements of the :term:`iterable` of filename
+ strings *names* that match the pattern string *pat*.
It is the same as ``[n for n in names if fnmatch(n, pat)]``,
but implemented more efficiently.
@@ -87,7 +93,7 @@ cache the compiled regex patterns in the following functions: :func:`fnmatch`,
.. function:: translate(pat)
Return the shell-style pattern *pat* converted to a regular expression for
- using with :func:`re.match`.
+ using with :func:`re.match`. The pattern is expected to be a :class:`str`.
Example:
diff --git a/Doc/library/getopt.rst b/Doc/library/getopt.rst
index 3ab44b9fc56108..16ebb929f46002 100644
--- a/Doc/library/getopt.rst
+++ b/Doc/library/getopt.rst
@@ -7,18 +7,13 @@
**Source code:** :source:`Lib/getopt.py`
-.. deprecated:: 3.13
- The :mod:`getopt` module is :term:`soft deprecated` and will not be
- developed further; development will continue with the :mod:`argparse`
- module.
-
.. note::
- The :mod:`getopt` module is a parser for command line options whose API is
- designed to be familiar to users of the C :c:func:`!getopt` function. Users who
- are unfamiliar with the C :c:func:`!getopt` function or who would like to write
- less code and get better help and error messages should consider using the
- :mod:`argparse` module instead.
+ This module is considered feature complete. A more declarative and
+ extensible alternative to this API is provided in the :mod:`optparse`
+ module. Further functional enhancements for command line parameter
+ processing are provided either as third party modules on PyPI,
+ or else as features in the :mod:`argparse` module.
--------------
@@ -28,6 +23,13 @@ the special meanings of arguments of the form '``-``' and '``--``'). Long
options similar to those supported by GNU software may be used as well via an
optional third argument.
+Users who are unfamiliar with the Unix :c:func:`!getopt` function should consider
+using the :mod:`argparse` module instead. Users who are familiar with the Unix
+:c:func:`!getopt` function, but would like to get equivalent behavior while
+writing less code and getting better help and error messages should consider
+using the :mod:`optparse` module. See :ref:`choosing-an-argument-parser` for
+additional details.
+
This module provides two functions and an
exception:
@@ -150,13 +152,27 @@ In a script, typical usage is something like this:
output = a
else:
assert False, "unhandled option"
- # ...
+ process(args, output=output, verbose=verbose)
if __name__ == "__main__":
main()
Note that an equivalent command line interface could be produced with less code
-and more informative help and error messages by using the :mod:`argparse` module:
+and more informative help and error messages by using the :mod:`optparse` module:
+
+.. testcode::
+
+ import optparse
+
+ if __name__ == '__main__':
+ parser = optparse.OptionParser()
+ parser.add_option('-o', '--output')
+ parser.add_option('-v', dest='verbose', action='store_true')
+ opts, args = parser.parse_args()
+ process(args, output=opts.output, verbose=opts.verbose)
+
+A roughly equivalent command line interface for this case can also be
+produced by using the :mod:`argparse` module:
.. testcode::
@@ -166,12 +182,18 @@ and more informative help and error messages by using the :mod:`argparse` module
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--output')
parser.add_argument('-v', dest='verbose', action='store_true')
+ parser.add_argument('rest', nargs='*')
args = parser.parse_args()
- # ... do something with args.output ...
- # ... do something with args.verbose ..
+ process(args.rest, output=args.output, verbose=args.verbose)
+
+See :ref:`choosing-an-argument-parser` for details on how the ``argparse``
+version of this code differs in behaviour from the ``optparse`` (and
+``getopt``) version.
.. seealso::
- Module :mod:`argparse`
- Alternative command line option and argument parsing library.
+ Module :mod:`optparse`
+ Declarative command line option parsing.
+ Module :mod:`argparse`
+ More opinionated command line option and argument parsing library.
diff --git a/Doc/library/http.cookies.rst b/Doc/library/http.cookies.rst
index 4ce2e3c4f4cb42..ad37a0fca4742d 100644
--- a/Doc/library/http.cookies.rst
+++ b/Doc/library/http.cookies.rst
@@ -98,7 +98,7 @@ Cookie Objects
.. method:: BaseCookie.output(attrs=None, header='Set-Cookie:', sep='\r\n')
Return a string representation suitable to be sent as HTTP headers. *attrs* and
- *header* are sent to each :class:`Morsel`'s :meth:`output` method. *sep* is used
+ *header* are sent to each :class:`Morsel`'s :meth:`~Morsel.output` method. *sep* is used
to join the headers together, and is by default the combination ``'\r\n'``
(CRLF).
diff --git a/Doc/library/importlib.metadata.rst b/Doc/library/importlib.metadata.rst
index c38da0bd341fc9..87f40301a9953d 100644
--- a/Doc/library/importlib.metadata.rst
+++ b/Doc/library/importlib.metadata.rst
@@ -375,7 +375,7 @@ Mapping import to distribution packages
.. function:: packages_distributions()
Return a mapping from the top level module and import package
- names found via :attr:`sys.meta_path` to the names of the distribution
+ names found via :data:`sys.meta_path` to the names of the distribution
packages (if any) that provide the corresponding files.
To allow for namespace packages (which may have members provided by
diff --git a/Doc/library/importlib.resources.abc.rst b/Doc/library/importlib.resources.abc.rst
index 54995ddbfbca12..4085bdf6598d98 100644
--- a/Doc/library/importlib.resources.abc.rst
+++ b/Doc/library/importlib.resources.abc.rst
@@ -43,7 +43,7 @@
:const:`None`. An object compatible with this ABC should only be
returned when the specified module is a package.
- .. deprecated-removed:: 3.12 3.14
+ .. deprecated:: 3.12
Use :class:`importlib.resources.abc.TraversableResources` instead.
.. abstractmethod:: open_resource(resource)
diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst
index de41104216d4fb..3d56ebd75771ee 100644
--- a/Doc/library/importlib.rst
+++ b/Doc/library/importlib.rst
@@ -380,13 +380,15 @@ ABC hierarchy::
.. class:: ResourceLoader
+ *Superseded by TraversableResources*
+
An abstract base class for a :term:`loader` which implements the optional
:pep:`302` protocol for loading arbitrary resources from the storage
back-end.
.. deprecated:: 3.7
This ABC is deprecated in favour of supporting resource loading
- through :class:`importlib.resources.abc.ResourceReader`.
+ through :class:`importlib.resources.abc.TraversableResources`.
.. abstractmethod:: get_data(path)
@@ -744,7 +746,7 @@ ABC hierarchy::
suitable for reading (same as :attr:`pathlib.Path.open`).
When opening as text, accepts encoding parameters such as those
- accepted by :attr:`io.TextIOWrapper`.
+ accepted by :class:`io.TextIOWrapper`.
.. method:: read_bytes()
@@ -792,14 +794,14 @@ ABC hierarchy::
This module contains the various objects that help :keyword:`import`
find and load modules.
-.. attribute:: SOURCE_SUFFIXES
+.. data:: SOURCE_SUFFIXES
A list of strings representing the recognized file suffixes for source
modules.
.. versionadded:: 3.3
-.. attribute:: DEBUG_BYTECODE_SUFFIXES
+.. data:: DEBUG_BYTECODE_SUFFIXES
A list of strings representing the file suffixes for non-optimized bytecode
modules.
@@ -807,9 +809,9 @@ find and load modules.
.. versionadded:: 3.3
.. deprecated:: 3.5
- Use :attr:`BYTECODE_SUFFIXES` instead.
+ Use :const:`BYTECODE_SUFFIXES` instead.
-.. attribute:: OPTIMIZED_BYTECODE_SUFFIXES
+.. data:: OPTIMIZED_BYTECODE_SUFFIXES
A list of strings representing the file suffixes for optimized bytecode
modules.
@@ -817,9 +819,9 @@ find and load modules.
.. versionadded:: 3.3
.. deprecated:: 3.5
- Use :attr:`BYTECODE_SUFFIXES` instead.
+ Use :const:`BYTECODE_SUFFIXES` instead.
-.. attribute:: BYTECODE_SUFFIXES
+.. data:: BYTECODE_SUFFIXES
A list of strings representing the recognized file suffixes for bytecode
modules (including the leading dot).
@@ -829,7 +831,7 @@ find and load modules.
.. versionchanged:: 3.5
The value is no longer dependent on ``__debug__``.
-.. attribute:: EXTENSION_SUFFIXES
+.. data:: EXTENSION_SUFFIXES
A list of strings representing the recognized file suffixes for
extension modules.
@@ -1107,7 +1109,7 @@ find and load modules.
.. method:: is_package(fullname)
Returns ``True`` if the file path points to a package's ``__init__``
- module based on :attr:`EXTENSION_SUFFIXES`.
+ module based on :const:`EXTENSION_SUFFIXES`.
.. method:: get_code(fullname)
@@ -1292,7 +1294,7 @@ find and load modules.
This module contains the various objects that help in the construction of
an :term:`importer`.
-.. attribute:: MAGIC_NUMBER
+.. data:: MAGIC_NUMBER
The bytes which represent the bytecode version number. If you need help with
loading/writing bytecode then consider :class:`importlib.abc.SourceLoader`.
diff --git a/Doc/library/index.rst b/Doc/library/index.rst
index 951fbcf13fbb13..44b218948d07e1 100644
--- a/Doc/library/index.rst
+++ b/Doc/library/index.rst
@@ -55,6 +55,7 @@ the `Python Package Index `_.
fileformats.rst
crypto.rst
allos.rst
+ cmdlinelibs.rst
concurrency.rst
ipc.rst
netdata.rst
diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst
index 79b729e36f931a..e487fc4d553580 100644
--- a/Doc/library/itertools.rst
+++ b/Doc/library/itertools.rst
@@ -30,11 +30,6 @@ For instance, SML provides a tabulation tool: ``tabulate(f)`` which produces a
sequence ``f(0), f(1), ...``. The same effect can be achieved in Python
by combining :func:`map` and :func:`count` to form ``map(f, count())``.
-These tools and their built-in counterparts also work well with the high-speed
-functions in the :mod:`operator` module. For example, the multiplication
-operator can be mapped across two vectors to form an efficient dot-product:
-``sum(starmap(operator.mul, zip(vec1, vec2, strict=True)))``.
-
**Infinite iterators:**
@@ -686,7 +681,7 @@ loops that truncate the stream.
consumed from the input iterator and there is no way to access it.
This could be an issue if an application wants to further consume the
input iterator after *takewhile* has been run to exhaustion. To work
- around this problem, consider using `more-iterools before_and_after()
+ around this problem, consider using `more-itertools before_and_after()
`_
instead.
@@ -843,12 +838,11 @@ and :term:`generators ` which incur interpreter overhead.
.. testcode::
- import collections
- import contextlib
- import functools
- import math
- import operator
- import random
+ from collections import deque
+ from contextlib import suppress
+ from functools import reduce
+ from math import sumprod, isqrt
+ from operator import itemgetter, getitem, mul, neg
def take(n, iterable):
"Return first n items of the iterable as a list."
@@ -863,11 +857,11 @@ and :term:`generators ` which incur interpreter overhead.
"Return function(0), function(1), ..."
return map(function, count(start))
- def repeatfunc(func, times=None, *args):
- "Repeat calls to func with specified arguments."
+ def repeatfunc(function, times=None, *args):
+ "Repeat calls to a function with specified arguments."
if times is None:
- return starmap(func, repeat(args))
- return starmap(func, repeat(args, times))
+ return starmap(function, repeat(args))
+ return starmap(function, repeat(args, times))
def flatten(list_of_lists):
"Flatten one level of nesting."
@@ -877,16 +871,21 @@ and :term:`generators ` which incur interpreter overhead.
"Returns the sequence elements n times."
return chain.from_iterable(repeat(tuple(iterable), n))
+ def loops(n):
+ "Loop n times. Like range(n) but without creating integers."
+ # for _ in loops(100): ...
+ return repeat(None, n)
+
def tail(n, iterable):
"Return an iterator over the last n items."
# tail(3, 'ABCDEFG') → E F G
- return iter(collections.deque(iterable, maxlen=n))
+ return iter(deque(iterable, maxlen=n))
def consume(iterator, n=None):
"Advance the iterator n-steps ahead. If n is None, consume entirely."
# Use functions that consume iterators at C speed.
if n is None:
- collections.deque(iterator, maxlen=0)
+ deque(iterator, maxlen=0)
else:
next(islice(iterator, n, n), None)
@@ -914,8 +913,8 @@ and :term:`generators ` which incur interpreter overhead.
# unique_justseen('AAAABBBCCDAABBB') → A B C D A B
# unique_justseen('ABBcCAD', str.casefold) → A B c A D
if key is None:
- return map(operator.itemgetter(0), groupby(iterable))
- return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
+ return map(itemgetter(0), groupby(iterable))
+ return map(next, map(itemgetter(1), groupby(iterable, key)))
def unique_everseen(iterable, key=None):
"Yield unique elements, preserving order. Remember all elements ever seen."
@@ -936,13 +935,14 @@ and :term:`generators ` which incur interpreter overhead.
def unique(iterable, key=None, reverse=False):
"Yield unique elements in sorted order. Supports unhashable inputs."
# unique([[1, 2], [3, 4], [1, 2]]) → [1, 2] [3, 4]
- return unique_justseen(sorted(iterable, key=key, reverse=reverse), key=key)
+ sequenced = sorted(iterable, key=key, reverse=reverse)
+ return unique_justseen(sequenced, key=key)
def sliding_window(iterable, n):
"Collect data into overlapping fixed-length chunks or blocks."
# sliding_window('ABCDEFG', 4) → ABCD BCDE CDEF DEFG
iterator = iter(iterable)
- window = collections.deque(islice(iterator, n - 1), maxlen=n)
+ window = deque(islice(iterator, n - 1), maxlen=n)
for x in iterator:
window.append(x)
yield tuple(window)
@@ -976,7 +976,7 @@ and :term:`generators ` which incur interpreter overhead.
"Return all contiguous non-empty subslices of a sequence."
# subslices('ABCD') → A AB ABC ABCD B BC BCD C CD D
slices = starmap(slice, combinations(range(len(seq) + 1), 2))
- return map(operator.getitem, repeat(seq), slices)
+ return map(getitem, repeat(seq), slices)
def iter_index(iterable, value, start=0, stop=None):
"Return indices where a value occurs in a sequence or iterable."
@@ -990,19 +990,19 @@ and :term:`generators ` which incur interpreter overhead.
else:
stop = len(iterable) if stop is None else stop
i = start
- with contextlib.suppress(ValueError):
+ with suppress(ValueError):
while True:
yield (i := seq_index(value, i, stop))
i += 1
- def iter_except(func, exception, first=None):
+ def iter_except(function, exception, first=None):
"Convert a call-until-exception interface to an iterator interface."
# iter_except(d.popitem, KeyError) → non-blocking dictionary iterator
- with contextlib.suppress(exception):
+ with suppress(exception):
if first is not None:
yield first()
while True:
- yield func()
+ yield function()
The following recipes have a more mathematical flavor:
@@ -1010,19 +1010,20 @@ The following recipes have a more mathematical flavor:
.. testcode::
def powerset(iterable):
- "powerset([1,2,3]) → () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"
+ "Subsequences of the iterable from shortest to longest."
+ # powerset([1,2,3]) → () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)
s = list(iterable)
return chain.from_iterable(combinations(s, r) for r in range(len(s)+1))
def sum_of_squares(iterable):
"Add up the squares of the input values."
# sum_of_squares([10, 20, 30]) → 1400
- return math.sumprod(*tee(iterable))
+ return sumprod(*tee(iterable))
- def reshape(matrix, cols):
+ def reshape(matrix, columns):
"Reshape a 2-D matrix to have a given number of columns."
# reshape([(0, 1), (2, 3), (4, 5)], 3) → (0, 1, 2), (3, 4, 5)
- return batched(chain.from_iterable(matrix), cols, strict=True)
+ return batched(chain.from_iterable(matrix), columns, strict=True)
def transpose(matrix):
"Swap the rows and columns of a 2-D matrix."
@@ -1033,7 +1034,7 @@ The following recipes have a more mathematical flavor:
"Multiply two matrices."
# matmul([(7, 5), (3, 5)], [(2, 5), (7, 9)]) → (49, 80), (41, 60)
n = len(m2[0])
- return batched(starmap(math.sumprod, product(m1, transpose(m2))), n)
+ return batched(starmap(sumprod, product(m1, transpose(m2))), n)
def convolve(signal, kernel):
"""Discrete linear convolution of two iterables.
@@ -1054,7 +1055,7 @@ The following recipes have a more mathematical flavor:
n = len(kernel)
padded_signal = chain(repeat(0, n-1), signal, repeat(0, n-1))
windowed_signal = sliding_window(padded_signal, n)
- return map(math.sumprod, repeat(kernel), windowed_signal)
+ return map(sumprod, repeat(kernel), windowed_signal)
def polynomial_from_roots(roots):
"""Compute a polynomial's coefficients from its roots.
@@ -1062,8 +1063,8 @@ The following recipes have a more mathematical flavor:
(x - 5) (x + 4) (x - 3) expands to: x³ -4x² -17x + 60
"""
# polynomial_from_roots([5, -4, 3]) → [1, -4, -17, 60]
- factors = zip(repeat(1), map(operator.neg, roots))
- return list(functools.reduce(convolve, factors, [1]))
+ factors = zip(repeat(1), map(neg, roots))
+ return list(reduce(convolve, factors, [1]))
def polynomial_eval(coefficients, x):
"""Evaluate a polynomial at a specific value.
@@ -1076,7 +1077,7 @@ The following recipes have a more mathematical flavor:
if not n:
return type(x)(0)
powers = map(pow, repeat(x), reversed(range(n)))
- return math.sumprod(coefficients, powers)
+ return sumprod(coefficients, powers)
def polynomial_derivative(coefficients):
"""Compute the first derivative of a polynomial.
@@ -1087,7 +1088,7 @@ The following recipes have a more mathematical flavor:
# polynomial_derivative([1, -4, -17, 60]) → [3, -8, -17]
n = len(coefficients)
powers = reversed(range(1, n))
- return list(map(operator.mul, coefficients, powers))
+ return list(map(mul, coefficients, powers))
def sieve(n):
"Primes less than n."
@@ -1095,7 +1096,7 @@ The following recipes have a more mathematical flavor:
if n > 2:
yield 2
data = bytearray((0, 1)) * (n // 2)
- for p in iter_index(data, 1, start=3, stop=math.isqrt(n) + 1):
+ for p in iter_index(data, 1, start=3, stop=isqrt(n) + 1):
data[p*p : n : p+p] = bytes(len(range(p*p, n, p+p)))
yield from iter_index(data, 1, start=3)
@@ -1104,7 +1105,7 @@ The following recipes have a more mathematical flavor:
# factor(99) → 3 3 11
# factor(1_000_000_000_000_007) → 47 59 360620266859
# factor(1_000_000_000_000_403) → 1000000000000403
- for prime in sieve(math.isqrt(n) + 1):
+ for prime in sieve(isqrt(n) + 1):
while not n % prime:
yield prime
n //= prime
@@ -1113,6 +1114,11 @@ The following recipes have a more mathematical flavor:
if n > 1:
yield n
+ def is_prime(n):
+ "Return True if n is prime."
+ # is_prime(1_000_000_000_000_403) → True
+ return n > 1 and next(factor(n)) == n
+
def totient(n):
"Count of natural numbers up to n that are coprime to n."
# https://mathworld.wolfram.com/TotientFunction.html
@@ -1202,6 +1208,16 @@ The following recipes have a more mathematical flavor:
[0, 2, 4, 6]
+ >>> for _ in loops(5):
+ ... print('hi')
+ ...
+ hi
+ hi
+ hi
+ hi
+ hi
+
+
>>> list(tail(3, 'ABCDEFG'))
['E', 'F', 'G']
>>> # Verify the input is consumed greedily
@@ -1475,6 +1491,23 @@ The following recipes have a more mathematical flavor:
True
+ >>> small_primes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97]
+ >>> list(filter(is_prime, range(-100, 100))) == small_primes
+ True
+ >>> carmichael = {561, 1105, 1729, 2465, 2821, 6601, 8911} # https://oeis.org/A002997
+ >>> any(map(is_prime, carmichael))
+ False
+ >>> # https://www.wolframalpha.com/input?i=is+128884753939+prime
+ >>> is_prime(128_884_753_939) # large prime
+ True
+ >>> is_prime(999953 * 999983) # large semiprime
+ False
+ >>> is_prime(1_000_000_000_000_007) # factor() example
+ False
+ >>> is_prime(1_000_000_000_000_403) # factor() example
+ True
+
+
>>> list(factor(99)) # Code example 1
[3, 3, 11]
>>> list(factor(1_000_000_000_000_007)) # Code example 2
@@ -1703,7 +1736,7 @@ The following recipes have a more mathematical flavor:
# Old recipes and their tests which are guaranteed to continue to work.
- def sumprod(vec1, vec2):
+ def old_sumprod_recipe(vec1, vec2):
"Compute a sum of products."
return sum(starmap(operator.mul, zip(vec1, vec2, strict=True)))
@@ -1786,7 +1819,7 @@ The following recipes have a more mathematical flavor:
32
- >>> sumprod([1,2,3], [4,5,6])
+ >>> old_sumprod_recipe([1,2,3], [4,5,6])
32
diff --git a/Doc/library/json.rst b/Doc/library/json.rst
index bb7b1852e804a1..22636027f9dbaf 100644
--- a/Doc/library/json.rst
+++ b/Doc/library/json.rst
@@ -151,69 +151,94 @@ Basic Usage
sort_keys=False, **kw)
Serialize *obj* as a JSON formatted stream to *fp* (a ``.write()``-supporting
- :term:`file-like object`) using this :ref:`conversion table
+ :term:`file-like object`) using this :ref:`Python-to-JSON conversion table
`.
- If *skipkeys* is true (default: ``False``), then dict keys that are not
- of a basic type (:class:`str`, :class:`int`, :class:`float`, :class:`bool`,
- ``None``) will be skipped instead of raising a :exc:`TypeError`.
-
- The :mod:`json` module always produces :class:`str` objects, not
- :class:`bytes` objects. Therefore, ``fp.write()`` must support :class:`str`
- input.
-
- If *ensure_ascii* is true (the default), the output is guaranteed to
- have all incoming non-ASCII characters escaped. If *ensure_ascii* is
- false, these characters will be output as-is.
-
- If *check_circular* is false (default: ``True``), then the circular
- reference check for container types will be skipped and a circular reference
- will result in a :exc:`RecursionError` (or worse).
+ .. note::
- If *allow_nan* is false (default: ``True``), then it will be a
- :exc:`ValueError` to serialize out of range :class:`float` values (``nan``,
- ``inf``, ``-inf``) in strict compliance of the JSON specification.
- If *allow_nan* is true, their JavaScript equivalents (``NaN``,
- ``Infinity``, ``-Infinity``) will be used.
+ Unlike :mod:`pickle` and :mod:`marshal`, JSON is not a framed protocol,
+ so trying to serialize multiple objects with repeated calls to
+ :func:`dump` using the same *fp* will result in an invalid JSON file.
- If *indent* is a non-negative integer or string, then JSON array elements and
- object members will be pretty-printed with that indent level. An indent level
- of 0, negative, or ``""`` will only insert newlines. ``None`` (the default)
- selects the most compact representation. Using a positive integer indent
- indents that many spaces per level. If *indent* is a string (such as ``"\t"``),
- that string is used to indent each level.
+ :param object obj:
+ The Python object to be serialized.
+
+ :param fp:
+ The file-like object *obj* will be serialized to.
+ The :mod:`!json` module always produces :class:`str` objects,
+ not :class:`bytes` objects,
+ therefore ``fp.write()`` must support :class:`str` input.
+ :type fp: :term:`file-like object`
+
+ :param bool skipkeys:
+ If ``True``, keys that are not of a basic type
+ (:class:`str`, :class:`int`, :class:`float`, :class:`bool`, ``None``)
+ will be skipped instead of raising a :exc:`TypeError`.
+ Default ``False``.
+
+ :param bool ensure_ascii:
+ If ``True`` (the default), the output is guaranteed to
+ have all incoming non-ASCII characters escaped.
+ If ``False``, these characters will be outputted as-is.
+
+ :param bool check_circular:
+ If ``False``, the circular reference check for container types is skipped
+ and a circular reference will result in a :exc:`RecursionError` (or worse).
+ Default ``True``.
+
+ :param bool allow_nan:
+ If ``False``, serialization of out-of-range :class:`float` values
+ (``nan``, ``inf``, ``-inf``) will result in a :exc:`ValueError`,
+ in strict compliance with the JSON specification.
+ If ``True`` (the default), their JavaScript equivalents
+ (``NaN``, ``Infinity``, ``-Infinity``) are used.
+
+ :param cls:
+ If set, a custom JSON encoder with the
+ :meth:`~JSONEncoder.default` method overridden,
+ for serializing into custom datatypes.
+ If ``None`` (the default), :class:`!JSONEncoder` is used.
+ :type cls: a :class:`JSONEncoder` subclass
+
+ :param indent:
+ If a positive integer or string, JSON array elements and
+ object members will be pretty-printed with that indent level.
+ A positive integer indents that many spaces per level;
+ a string (such as ``"\t"``) is used to indent each level.
+ If zero, negative, or ``""`` (the empty string),
+ only newlines are inserted.
+ If ``None`` (the default), the most compact representation is used.
+ :type indent: int | str | None
+
+ :param separators:
+ A two-tuple: ``(item_separator, key_separator)``.
+ If ``None`` (the default), *separators* defaults to
+ ``(', ', ': ')`` if *indent* is ``None``,
+ and ``(',', ': ')`` otherwise.
+ For the most compact JSON,
+ specify ``(',', ':')`` to eliminate whitespace.
+ :type separators: tuple | None
+
+ :param default:
+ A function that is called for objects that can't otherwise be serialized.
+ It should return a JSON encodable version of the object
+ or raise a :exc:`TypeError`.
+ If ``None`` (the default), :exc:`!TypeError` is raised.
+ :type default: :term:`callable` | None
+
+ :param bool sort_keys:
+ If ``True``, dictionaries will be outputted sorted by key.
+ Default ``False``.
.. versionchanged:: 3.2
Allow strings for *indent* in addition to integers.
- If specified, *separators* should be an ``(item_separator, key_separator)``
- tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
- ``(',', ': ')`` otherwise. To get the most compact JSON representation,
- you should specify ``(',', ':')`` to eliminate whitespace.
-
.. versionchanged:: 3.4
Use ``(',', ': ')`` as default if *indent* is not ``None``.
- If specified, *default* should be a function that gets called for objects that
- can't otherwise be serialized. It should return a JSON encodable version of
- the object or raise a :exc:`TypeError`. If not specified, :exc:`TypeError`
- is raised.
-
- If *sort_keys* is true (default: ``False``), then the output of
- dictionaries will be sorted by key.
-
- To use a custom :class:`JSONEncoder` subclass (e.g. one that overrides the
- :meth:`~JSONEncoder.default` method to serialize additional types), specify it with the
- *cls* kwarg; otherwise :class:`JSONEncoder` is used.
-
.. versionchanged:: 3.6
All optional parameters are now :ref:`keyword-only `.
- .. note::
-
- Unlike :mod:`pickle` and :mod:`marshal`, JSON is not a framed protocol,
- so trying to serialize multiple objects with repeated calls to
- :func:`dump` using the same *fp* will result in an invalid JSON file.
.. function:: dumps(obj, *, skipkeys=False, ensure_ascii=True, \
check_circular=True, allow_nan=True, cls=None, \
@@ -233,36 +258,86 @@ Basic Usage
the original one. That is, ``loads(dumps(x)) != x`` if x has non-string
keys.
-.. function:: load(fp, *, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, **kw)
-
- Deserialize *fp* (a ``.read()``-supporting :term:`text file` or
- :term:`binary file` containing a JSON document) to a Python object using
- this :ref:`conversion table `.
-
- *object_hook* is an optional function that will be called with the result of
- any object literal decoded (a :class:`dict`). The return value of
- *object_hook* will be used instead of the :class:`dict`. This feature can
- be used to implement custom decoders (e.g. `JSON-RPC
- `_ class hinting).
-
- *object_pairs_hook* is an optional function that will be called with the
- result of any object literal decoded with an ordered list of pairs. The
- return value of *object_pairs_hook* will be used instead of the
- :class:`dict`. This feature can be used to implement custom decoders. If
- *object_hook* is also defined, the *object_pairs_hook* takes priority.
+.. function:: load(fp, *, cls=None, object_hook=None, parse_float=None, \
+ parse_int=None, parse_constant=None, \
+ object_pairs_hook=None, **kw)
+
+ Deserialize *fp* to a Python object
+ using the :ref:`JSON-to-Python conversion table `.
+
+ :param fp:
+ A ``.read()``-supporting :term:`text file` or :term:`binary file`
+ containing the JSON document to be deserialized.
+ :type fp: :term:`file-like object`
+
+ :param cls:
+ If set, a custom JSON decoder.
+ Additional keyword arguments to :func:`!load`
+ will be passed to the constructor of *cls*.
+ If ``None`` (the default), :class:`!JSONDecoder` is used.
+ :type cls: a :class:`JSONDecoder` subclass
+
+ :param object_hook:
+ If set, a function that is called with the result of
+ any object literal decoded (a :class:`dict`).
+ The return value of this function will be used
+ instead of the :class:`dict`.
+ This feature can be used to implement custom decoders,
+ for example `JSON-RPC `_ class hinting.
+ Default ``None``.
+ :type object_hook: :term:`callable` | None
+
+ :param object_pairs_hook:
+ If set, a function that is called with the result of
+ any object literal decoded with an ordered list of pairs.
+ The return value of this function will be used
+ instead of the :class:`dict`.
+ This feature can be used to implement custom decoders.
+ If *object_hook* is also set, *object_pairs_hook* takes priority.
+ Default ``None``.
+ :type object_pairs_hook: :term:`callable` | None
+
+ :param parse_float:
+ If set, a function that is called with
+ the string of every JSON float to be decoded.
+ If ``None`` (the default), it is equivalent to ``float(num_str)``.
+ This can be used to parse JSON floats into custom datatypes,
+ for example :class:`decimal.Decimal`.
+ :type parse_float: :term:`callable` | None
+
+ :param parse_int:
+ If set, a function that is called with
+ the string of every JSON int to be decoded.
+ If ``None`` (the default), it is equivalent to ``int(num_str)``.
+ This can be used to parse JSON integers into custom datatypes,
+ for example :class:`float`.
+ :type parse_int: :term:`callable` | None
+
+ :param parse_constant:
+ If set, a function that is called with one of the following strings:
+ ``'-Infinity'``, ``'Infinity'``, or ``'NaN'``.
+ This can be used to raise an exception
+ if invalid JSON numbers are encountered.
+ Default ``None``.
+ :type parse_constant: :term:`callable` | None
+
+ :raises JSONDecodeError:
+ When the data being deserialized is not a valid JSON document.
+
+ :raises UnicodeDecodeError:
+ When the data being deserialized does not contain
+ UTF-8, UTF-16 or UTF-32 encoded data.
.. versionchanged:: 3.1
- Added support for *object_pairs_hook*.
- *parse_float* is an optional function that will be called with the string of
- every JSON float to be decoded. By default, this is equivalent to
- ``float(num_str)``. This can be used to use another datatype or parser for
- JSON floats (e.g. :class:`decimal.Decimal`).
+ * Added the optional *object_pairs_hook* parameter.
+ * *parse_constant* doesn't get called on 'null', 'true', 'false' anymore.
- *parse_int* is an optional function that will be called with the string of
- every JSON int to be decoded. By default, this is equivalent to
- ``int(num_str)``. This can be used to use another datatype or parser for
- JSON integers (e.g. :class:`float`).
+ .. versionchanged:: 3.6
+
+ * All optional parameters are now :ref:`keyword-only `.
+ * *fp* can now be a :term:`binary file`.
+ The input encoding should be UTF-8, UTF-16 or UTF-32.
.. versionchanged:: 3.11
The default *parse_int* of :func:`int` now limits the maximum length of
@@ -270,38 +345,13 @@ Basic Usage
conversion length limitation ` to help avoid denial
of service attacks.
- *parse_constant* is an optional function that will be called with one of the
- following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This can be
- used to raise an exception if invalid JSON numbers are encountered.
-
- .. versionchanged:: 3.1
- *parse_constant* doesn't get called on 'null', 'true', 'false' anymore.
-
- To use a custom :class:`JSONDecoder` subclass, specify it with the ``cls``
- kwarg; otherwise :class:`JSONDecoder` is used. Additional keyword arguments
- will be passed to the constructor of the class.
-
- If the data being deserialized is not a valid JSON document, a
- :exc:`JSONDecodeError` will be raised.
-
- .. versionchanged:: 3.6
- All optional parameters are now :ref:`keyword-only `.
-
- .. versionchanged:: 3.6
- *fp* can now be a :term:`binary file`. The input encoding should be
- UTF-8, UTF-16 or UTF-32.
-
.. function:: loads(s, *, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, **kw)
- Deserialize *s* (a :class:`str`, :class:`bytes` or :class:`bytearray`
+ Identical to :func:`load`, but instead of a file-like object,
+ deserialize *s* (a :class:`str`, :class:`bytes` or :class:`bytearray`
instance containing a JSON document) to a Python object using this
:ref:`conversion table `.
- The other arguments have the same meaning as in :func:`load`.
-
- If the data being deserialized is not a valid JSON document, a
- :exc:`JSONDecodeError` will be raised.
-
.. versionchanged:: 3.6
*s* can now be of type :class:`bytes` or :class:`bytearray`. The
input encoding should be UTF-8, UTF-16 or UTF-32.
diff --git a/Doc/library/logging.rst b/Doc/library/logging.rst
index 8ab107d2a33e44..e930834209692e 100644
--- a/Doc/library/logging.rst
+++ b/Doc/library/logging.rst
@@ -342,7 +342,7 @@ in a module, ``__name__`` is the module's name in the Python package namespace.
If no handler is attached to this logger (or any of its ancestors,
taking into account the relevant :attr:`Logger.propagate` attributes),
- the message will be sent to the handler set on :attr:`lastResort`.
+ the message will be sent to the handler set on :data:`lastResort`.
.. versionchanged:: 3.2
The *stack_info* parameter was added.
@@ -1495,7 +1495,7 @@ functions.
Module-Level Attributes
-----------------------
-.. attribute:: lastResort
+.. data:: lastResort
A "handler of last resort" is available through this attribute. This
is a :class:`StreamHandler` writing to ``sys.stderr`` with a level of
@@ -1507,7 +1507,7 @@ Module-Level Attributes
.. versionadded:: 3.2
-.. attribute:: raiseExceptions
+.. data:: raiseExceptions
Used to see if exceptions during handling should be propagated.
diff --git a/Doc/library/math.rst b/Doc/library/math.rst
index 68878806749f28..25886da908f9a2 100644
--- a/Doc/library/math.rst
+++ b/Doc/library/math.rst
@@ -248,7 +248,8 @@ Floating point arithmetic
.. function:: fmod(x, y)
- Return ``fmod(x, y)``, as defined by the platform C library. Note that the
+ Return the floating-point remainder of ``x / y``,
+ as defined by the platform C library function ``fmod(x, y)``. Note that the
Python expression ``x % y`` may not return the same result. The intent of the C
standard is that ``fmod(x, y)`` be exactly (mathematically; to infinite
precision) equal to ``x - n*y`` for some integer *n* such that the result has
diff --git a/Doc/library/optparse.rst b/Doc/library/optparse.rst
index 74a49a8fb33666..ff327cf9162a8c 100644
--- a/Doc/library/optparse.rst
+++ b/Doc/library/optparse.rst
@@ -3,25 +3,135 @@
.. module:: optparse
:synopsis: Command-line option parsing library.
- :deprecated:
.. moduleauthor:: Greg Ward
.. sectionauthor:: Greg Ward
**Source code:** :source:`Lib/optparse.py`
-.. deprecated:: 3.2
- The :mod:`optparse` module is :term:`soft deprecated` and will not be
- developed further; development will continue with the :mod:`argparse`
- module.
-
--------------
+.. _choosing-an-argument-parser:
+
+Choosing an argument parsing library
+------------------------------------
+
+The standard library includes three argument parsing libraries:
+
+* :mod:`getopt`: a module that closely mirrors the procedural C ``getopt`` API.
+ Included in the standard library since before the initial Python 1.0 release.
+* :mod:`optparse`: a declarative replacement for ``getopt`` that
+ provides equivalent functionality without requiring each application
+ to implement its own procedural option parsing logic. Included
+ in the standard library since the Python 2.3 release.
+* :mod:`argparse`: a more opinionated alternative to ``optparse`` that
+ provides more functionality by default, at the expense of reduced application
+ flexibility in controlling exactly how arguments are processed. Included in
+ the standard library since the Python 2.7 and Python 3.2 releases.
+
+In the absence of more specific argument parsing design constraints, :mod:`argparse`
+is the recommended choice for implementing command line applications, as it offers
+the highest level of baseline functionality with the least application level code.
+
+:mod:`getopt` is retained almost entirely for backwards compatibility reasons.
+However, it also serves a niche use case as a tool for prototyping and testing
+command line argument handling in ``getopt``-based C applications.
+
+:mod:`optparse` should be considered as an alternative to :mod:`argparse` in the
+following cases:
+
+* an application is already using :mod:`optparse` and doesn't want to risk the
+ subtle behavioural changes that may arise when migrating to :mod:`argparse`
+* the application requires additional control over the way options and
+ positional parameters are interleaved on the command line (including
+ the ability to disable the interleaving feature completely)
+* the application requires additional control over the incremental parsing
+ of command line elements (while ``argparse`` does support this, the
+ exact way it works in practice is undesirable for some use cases)
+* the application requires additional control over the handling of options
+ which accept parameter values that may start with ``-`` (such as delegated
+ options to be passed to invoked subprocesses)
+* the application requires some other command line parameter processing
+ behavior which ``argparse`` does not support, but which can be implemented
+ in terms of the lower level interface offered by ``optparse``
+
+These considerations also mean that :mod:`optparse` is likely to provide a
+better foundation for library authors writing third party command line
+argument processing libraries.
+
+As a concrete example, consider the following two command line argument
+parsing configurations, the first using ``optparse``, and the second
+using ``argparse``:
+
+.. testcode::
+
+ import optparse
+
+ if __name__ == '__main__':
+ parser = optparse.OptionParser()
+ parser.add_option('-o', '--output')
+ parser.add_option('-v', dest='verbose', action='store_true')
+ opts, args = parser.parse_args()
+ process(args, output=opts.output, verbose=opts.verbose)
+
+.. testcode::
+
+ import argparse
+
+ if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-o', '--output')
+ parser.add_argument('-v', dest='verbose', action='store_true')
+ parser.add_argument('rest', nargs='*')
+ args = parser.parse_args()
+ process(args.rest, output=args.output, verbose=args.verbose)
+
+The most obvious difference is that in the ``optparse`` version, the non-option
+arguments are processed separately by the application after the option processing
+is complete. In the ``argparse`` version, positional arguments are declared and
+processed in the same way as the named options.
+
+However, the ``argparse`` version will also handle some parameter combination
+differently from the way the ``optparse`` version would handle them.
+For example (amongst other differences):
+
+* supplying ``-o -v`` gives ``output="-v"`` and ``verbose=False``
+ when using ``optparse``, but a usage error with ``argparse``
+ (complaining that no value has been supplied for ``-o/--output``,
+ since ``-v`` is interpreted as meaning the verbosity flag)
+* similarly, supplying ``-o --`` gives ``output="--"`` and ``args=()``
+ when using ``optparse``, but a usage error with ``argparse``
+ (also complaining that no value has been supplied for ``-o/--output``,
+ since ``--`` is interpreted as terminating the option processing
+ and treating all remaining values as positional arguments)
+* supplying ``-o=foo`` gives ``output="=foo"`` when using ``optparse``,
+ but gives ``output="foo"`` with ``argparse`` (since ``=`` is special
+ cased as an alternative separator for option parameter values)
+
+Whether these differing behaviors in the ``argparse`` version are
+considered desirable or a problem will depend on the specific command line
+application use case.
+
+.. seealso::
+
+ :pypi:`click` is a third party argument processing library (originally
+ based on ``optparse``), which allows command line applications to be
+ developed as a set of decorated command implementation functions.
+
+ Other third party libraries, such as :pypi:`typer` or :pypi:`msgspec-click`,
+ allow command line interfaces to be specified in ways that more effectively
+ integrate with static checking of Python type annotations.
+
+
+Introduction
+------------
+
:mod:`optparse` is a more convenient, flexible, and powerful library for parsing
-command-line options than the old :mod:`getopt` module. :mod:`optparse` uses a
-more declarative style of command-line parsing: you create an instance of
-:class:`OptionParser`, populate it with options, and parse the command
-line. :mod:`optparse` allows users to specify options in the conventional
+command-line options than the minimalist :mod:`getopt` module.
+:mod:`optparse` uses a more declarative style of command-line parsing:
+you create an instance of :class:`OptionParser`,
+populate it with options, and parse the command line.
+:mod:`optparse` allows users to specify options in the conventional
GNU/POSIX syntax, and additionally generates usage and help messages for you.
Here's an example of using :mod:`optparse` in a simple script::
@@ -82,10 +192,11 @@ Background
----------
:mod:`optparse` was explicitly designed to encourage the creation of programs
-with straightforward, conventional command-line interfaces. To that end, it
-supports only the most common command-line syntax and semantics conventionally
-used under Unix. If you are unfamiliar with these conventions, read this
-section to acquaint yourself with them.
+with straightforward command-line interfaces that follow the conventions
+established by the :c:func:`!getopt` family of functions available to C developers.
+To that end, it supports only the most common command-line syntax and semantics
+conventionally used under Unix. If you are unfamiliar with these conventions,
+reading this section will allow you to acquaint yourself with them.
.. _optparse-terminology:
diff --git a/Doc/library/os.rst b/Doc/library/os.rst
index 454e6d769fa555..bc9efd8e6ca450 100644
--- a/Doc/library/os.rst
+++ b/Doc/library/os.rst
@@ -5358,6 +5358,8 @@ information, consult your Unix manpages.
The following scheduling policies are exposed if they are supported by the
operating system.
+.. _os-scheduling-policy:
+
.. data:: SCHED_OTHER
The default scheduling policy.
@@ -5449,7 +5451,7 @@ operating system.
.. function:: sched_yield()
- Voluntarily relinquish the CPU.
+ Voluntarily relinquish the CPU. See :manpage:`sched_yield(2)` for details.
.. function:: sched_setaffinity(pid, mask, /)
diff --git a/Doc/library/pdb.rst b/Doc/library/pdb.rst
index 9478d7b7d5577d..50cafc1e2d823d 100644
--- a/Doc/library/pdb.rst
+++ b/Doc/library/pdb.rst
@@ -173,13 +173,15 @@ slightly different way:
:func:`set_trace` will enter the debugger immediately, rather than
on the next line of code to be executed.
-.. function:: post_mortem(traceback=None)
+.. function:: post_mortem(t=None)
- Enter post-mortem debugging of the given *traceback* object. If no
- *traceback* is given, it uses the one of the exception that is currently
- being handled (an exception must be being handled if the default is to be
- used).
+ Enter post-mortem debugging of the given exception or
+ :ref:`traceback object `. If no value is given, it uses
+ the exception that is currently being handled, or raises ``ValueError`` if
+ there isn’t one.
+ .. versionchanged:: 3.13
+ Support for exception objects was added.
.. function:: pm()
diff --git a/Doc/library/plistlib.rst b/Doc/library/plistlib.rst
index 2906ebe7822f52..075b974501e3da 100644
--- a/Doc/library/plistlib.rst
+++ b/Doc/library/plistlib.rst
@@ -71,7 +71,7 @@ This module defines the following functions:
When *aware_datetime* is true, fields with type ``datetime.datetime`` will
be created as :ref:`aware object `, with
- :attr:`!tzinfo` as :attr:`datetime.UTC`.
+ :attr:`!tzinfo` as :const:`datetime.UTC`.
XML data for the :data:`FMT_XML` format is parsed using the Expat parser
from :mod:`xml.parsers.expat` -- see its documentation for possible
diff --git a/Doc/library/select.rst b/Doc/library/select.rst
index f23a249f44b485..bbac9bc4bbe3ee 100644
--- a/Doc/library/select.rst
+++ b/Doc/library/select.rst
@@ -165,7 +165,7 @@ The module defines the following:
:exc:`InterruptedError`.
-.. attribute:: PIPE_BUF
+.. data:: PIPE_BUF
The minimum number of bytes which can be written without blocking to a pipe
when the pipe has been reported as ready for writing by :func:`~select.select`,
diff --git a/Doc/library/site.rst b/Doc/library/site.rst
index 4508091f679dc7..95aea779f82673 100644
--- a/Doc/library/site.rst
+++ b/Doc/library/site.rst
@@ -35,7 +35,7 @@ are skipped. For the tail part, it uses the empty string and then
:file:`lib/site-packages` (on Windows) or
:file:`lib/python{X.Y[t]}/site-packages` (on Unix and macOS). (The
optional suffix "t" indicates the :term:`free threading` build, and is
-appended if ``"t"`` is present in the :attr:`sys.abiflags` constant.)
+appended if ``"t"`` is present in the :data:`sys.abiflags` constant.)
For each
of the distinct head-tail combinations, it sees if it refers to an existing
directory, and if so, adds it to ``sys.path`` and also inspects the newly
diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst
index b7fb1fc07d199f..4daacd7e3aa7fa 100644
--- a/Doc/library/ssl.rst
+++ b/Doc/library/ssl.rst
@@ -1931,8 +1931,8 @@ to speed up repeated connections from the same clients.
A :class:`TLSVersion` enum member representing the highest supported
TLS version. The value defaults to :attr:`TLSVersion.MAXIMUM_SUPPORTED`.
- The attribute is read-only for protocols other than :attr:`PROTOCOL_TLS`,
- :attr:`PROTOCOL_TLS_CLIENT`, and :attr:`PROTOCOL_TLS_SERVER`.
+ The attribute is read-only for protocols other than :const:`PROTOCOL_TLS`,
+ :const:`PROTOCOL_TLS_CLIENT`, and :const:`PROTOCOL_TLS_SERVER`.
The attributes :attr:`~SSLContext.maximum_version`,
:attr:`~SSLContext.minimum_version` and
@@ -1955,7 +1955,7 @@ to speed up repeated connections from the same clients.
.. attribute:: SSLContext.num_tickets
Control the number of TLS 1.3 session tickets of a
- :attr:`PROTOCOL_TLS_SERVER` context. The setting has no impact on TLS
+ :const:`PROTOCOL_TLS_SERVER` context. The setting has no impact on TLS
1.0 to 1.2 connections.
.. versionadded:: 3.8
@@ -2508,8 +2508,8 @@ thus several things you need to be aware of:
.. seealso::
The :mod:`asyncio` module supports :ref:`non-blocking SSL sockets
- ` and provides a
- higher level API. It polls for events using the :mod:`selectors` module and
+ ` and provides a higher level :ref:`Streams API `.
+ It polls for events using the :mod:`selectors` module and
handles :exc:`SSLWantWriteError`, :exc:`SSLWantReadError` and
:exc:`BlockingIOError` exceptions. It runs the SSL handshake asynchronously
as well.
diff --git a/Doc/library/string.rst b/Doc/library/string.rst
index a000bb49f14800..09165c481b246e 100644
--- a/Doc/library/string.rst
+++ b/Doc/library/string.rst
@@ -59,11 +59,18 @@ The constants defined in this module are:
String of ASCII characters which are considered punctuation characters
in the ``C`` locale: ``!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~``.
+
.. data:: printable
- String of ASCII characters which are considered printable. This is a
- combination of :const:`digits`, :const:`ascii_letters`, :const:`punctuation`,
- and :const:`whitespace`.
+ String of ASCII characters which are considered printable by Python.
+ This is a combination of :const:`digits`, :const:`ascii_letters`,
+ :const:`punctuation`, and :const:`whitespace`.
+
+ .. note::
+
+ By design, :meth:`string.printable.isprintable() `
+ returns :const:`False`. In particular, ``string.printable`` is not
+ printable in the POSIX sense (see :manpage:`LC_CTYPE `).
.. data:: whitespace
@@ -409,7 +416,9 @@ conversions, trailing zeros are not removed from the result.
.. index:: single: , (comma); in string formatting
-The ``','`` option signals the use of a comma for a thousands separator.
+The ``','`` option signals the use of a comma for a thousands separator for
+floating-point presentation types and for integer presentation type ``'d'``.
+For other presentation types, this option is an error.
For a locale aware separator, use the ``'n'`` integer presentation type
instead.
diff --git a/Doc/library/superseded.rst b/Doc/library/superseded.rst
index 17bfa66f043302..d120c6acf621e3 100644
--- a/Doc/library/superseded.rst
+++ b/Doc/library/superseded.rst
@@ -4,12 +4,23 @@
Superseded Modules
******************
-The modules described in this chapter are deprecated or :term:`soft deprecated` and only kept for
-backwards compatibility. They have been superseded by other modules.
+The modules described in this chapter have been superseded by other modules
+for most use cases, and are retained primarily to preserve backwards compatibility.
+Modules may appear in this chapter because they only cover a limited subset of
+a problem space, and a more generally applicable solution is available elsewhere
+in the standard library (for example, :mod:`getopt` covers the very specific
+task of "mimic the C :c:func:`!getopt` API in Python", rather than the broader
+command line option parsing and argument parsing capabilities offered by
+:mod:`optparse` and :mod:`argparse`).
+
+Alternatively, modules may appear in this chapter because they are deprecated
+outright, and awaiting removal in a future release, or they are
+:term:`soft deprecated` and their use is actively discouraged in new projects.
+With the removal of various obsolete modules through :pep:`594`, there are
+currently no modules in this latter category.
.. toctree::
:maxdepth: 1
getopt.rst
- optparse.rst
diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst
index fdacf5aff8c455..7af7a7c0d7c47b 100644
--- a/Doc/library/sys.rst
+++ b/Doc/library/sys.rst
@@ -8,7 +8,7 @@
This module provides access to some variables used or maintained by the
interpreter and to functions that interact strongly with the interpreter. It is
-always available.
+always available. Unless explicitly noted otherwise, all variables are read-only.
.. data:: abiflags
diff --git a/Doc/library/sysconfig.rst b/Doc/library/sysconfig.rst
index 3921908b7c7bfc..9f018f9c8f0e50 100644
--- a/Doc/library/sysconfig.rst
+++ b/Doc/library/sysconfig.rst
@@ -388,7 +388,8 @@ Other functions
Windows will return one of:
- - win-amd64 (64bit Windows on AMD64, aka x86_64, Intel64, and EM64T)
+ - win-amd64 (64-bit Windows on AMD64, aka x86_64, Intel64, and EM64T)
+ - win-arm64 (64-bit Windows on ARM64, aka AArch64)
- win32 (all others - specifically, sys.platform is returned)
macOS can return:
diff --git a/Doc/library/time.rst b/Doc/library/time.rst
index 6265c2214eaa0d..804e2679027bd4 100644
--- a/Doc/library/time.rst
+++ b/Doc/library/time.rst
@@ -385,6 +385,8 @@ Functions
The suspension time may be longer than requested by an arbitrary amount,
because of the scheduling of other activity in the system.
+ .. rubric:: Windows implementation
+
On Windows, if *secs* is zero, the thread relinquishes the remainder of its
time slice to any other thread that is ready to run. If there are no other
threads ready to run, the function returns immediately, and the thread
@@ -393,12 +395,19 @@ Functions
`_
which provides resolution of 100 nanoseconds. If *secs* is zero, ``Sleep(0)`` is used.
- Unix implementation:
+ .. rubric:: Unix implementation
* Use ``clock_nanosleep()`` if available (resolution: 1 nanosecond);
* Or use ``nanosleep()`` if available (resolution: 1 nanosecond);
* Or use ``select()`` (resolution: 1 microsecond).
+ .. note::
+
+ To emulate a "no-op", use :keyword:`pass` instead of ``time.sleep(0)``.
+
+ To voluntarily relinquish the CPU, specify a real-time :ref:`scheduling
+ policy ` and use :func:`os.sched_yield` instead.
+
.. audit-event:: time.sleep secs
.. versionchanged:: 3.5
diff --git a/Doc/library/tokenize.rst b/Doc/library/tokenize.rst
index f719319a302a23..b80917eae66f8b 100644
--- a/Doc/library/tokenize.rst
+++ b/Doc/library/tokenize.rst
@@ -91,11 +91,10 @@ write back the modified script.
sequences with at least two elements, the token type and the token string.
Any additional sequence elements are ignored.
- The reconstructed script is returned as a single string. The result is
- guaranteed to tokenize back to match the input so that the conversion is
- lossless and round-trips are assured. The guarantee applies only to the
- token type and token string as the spacing between tokens (column
- positions) may change.
+ The result is guaranteed to tokenize back to match the input so that the
+ conversion is lossless and round-trips are assured. The guarantee applies
+ only to the token type and token string as the spacing between tokens
+ (column positions) may change.
It returns bytes, encoded using the :data:`~token.ENCODING` token, which
is the first token sequence output by :func:`.tokenize`. If there is no
diff --git a/Doc/library/traceback.rst b/Doc/library/traceback.rst
index 100a92b73d5497..301cf225a51d37 100644
--- a/Doc/library/traceback.rst
+++ b/Doc/library/traceback.rst
@@ -157,6 +157,13 @@ Module-Level Functions
arguments have the same meaning as for :func:`print_stack`.
+.. function:: print_list(extracted_list, file=None)
+
+ Print the list of tuples as returned by :func:`extract_tb` or
+ :func:`extract_stack` as a formatted stack trace to the given file.
+ If *file* is ``None``, the output is written to :data:`sys.stderr`.
+
+
.. function:: format_list(extracted_list)
Given a list of tuples or :class:`FrameSummary` objects as returned by
@@ -263,7 +270,7 @@ Module-Level Functions
:class:`!TracebackException` objects are created from actual exceptions to
capture data for later printing. They offer a more lightweight method of
storing this information by avoiding holding references to
-:ref:`traceback` and :ref:`frame` objects
+:ref:`traceback` and :ref:`frame` objects.
In addition, they expose more options to configure the output compared to
the module-level functions described above.
diff --git a/Doc/library/turtle.rst b/Doc/library/turtle.rst
index afda3685d606bb..38f3263b14bd9a 100644
--- a/Doc/library/turtle.rst
+++ b/Doc/library/turtle.rst
@@ -987,8 +987,8 @@ Settings for measurement
>>> turtle.heading()
90.0
- Change angle measurement unit to grad (also known as gon,
- grade, or gradian and equals 1/100-th of the right angle.)
+ >>> # Change angle measurement unit to grad (also known as gon,
+ >>> # grade, or gradian and equals 1/100-th of the right angle.)
>>> turtle.degrees(400.0)
>>> turtle.heading()
100.0
diff --git a/Doc/library/xmlrpc.client.rst b/Doc/library/xmlrpc.client.rst
index c57f433e6efd98..971e65605841e7 100644
--- a/Doc/library/xmlrpc.client.rst
+++ b/Doc/library/xmlrpc.client.rst
@@ -64,11 +64,11 @@ between conformable Python objects and XML on the wire.
The obsolete *use_datetime* flag is similar to *use_builtin_types* but it
applies only to date/time values.
-.. versionchanged:: 3.3
- The *use_builtin_types* flag was added.
+ .. versionchanged:: 3.3
+ The *use_builtin_types* flag was added.
-.. versionchanged:: 3.8
- The *headers* parameter was added.
+ .. versionchanged:: 3.8
+ The *headers* parameter was added.
Both the HTTP and HTTPS transports support the URL syntax extension for HTTP
Basic Authentication: ``http://user:pass@host:port/path``. The ``user:pass``
diff --git a/Doc/library/xmlrpc.rst b/Doc/library/xmlrpc.rst
index 5f0a2cf68d01f9..a93d08f78cfba7 100644
--- a/Doc/library/xmlrpc.rst
+++ b/Doc/library/xmlrpc.rst
@@ -1,6 +1,9 @@
:mod:`!xmlrpc` --- XMLRPC server and client modules
===================================================
+.. module:: xmlrpc
+ :synopsis: Server and client modules implementing XML-RPC.
+
XML-RPC is a Remote Procedure Call method that uses XML passed via HTTP as a
transport. With it, a client can call methods with parameters on a remote
server (the server is named by a URI) and get back structured data.
diff --git a/Doc/license.rst b/Doc/license.rst
index 674ac5f56e6f97..dd27e9009719cc 100644
--- a/Doc/license.rst
+++ b/Doc/license.rst
@@ -11,59 +11,63 @@ History of the software
=======================
Python was created in the early 1990s by Guido van Rossum at Stichting
-Mathematisch Centrum (CWI, see https://www.cwi.nl/) in the Netherlands as a
+Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands as a
successor of a language called ABC. Guido remains Python's principal author,
although it includes many contributions from others.
In 1995, Guido continued his work on Python at the Corporation for National
-Research Initiatives (CNRI, see https://www.cnri.reston.va.us/) in Reston,
+Research Initiatives (CNRI, see https://www.cnri.reston.va.us) in Reston,
Virginia where he released several versions of the software.
In May 2000, Guido and the Python core development team moved to BeOpen.com to
form the BeOpen PythonLabs team. In October of the same year, the PythonLabs
-team moved to Digital Creations (now Zope Corporation; see
-https://www.zope.org/). In 2001, the Python Software Foundation (PSF, see
+team moved to Digital Creations, which became
+Zope Corporation. In 2001, the Python Software Foundation (PSF, see
https://www.python.org/psf/) was formed, a non-profit organization created
-specifically to own Python-related Intellectual Property. Zope Corporation is a
+specifically to own Python-related Intellectual Property. Zope Corporation was a
sponsoring member of the PSF.
-All Python releases are Open Source (see https://opensource.org/ for the Open
+All Python releases are Open Source (see https://opensource.org for the Open
Source Definition). Historically, most, but not all, Python releases have also
been GPL-compatible; the table below summarizes the various releases.
-+----------------+--------------+------------+------------+-----------------+
-| Release | Derived from | Year | Owner | GPL compatible? |
-+================+==============+============+============+=================+
-| 0.9.0 thru 1.2 | n/a | 1991-1995 | CWI | yes |
-+----------------+--------------+------------+------------+-----------------+
-| 1.3 thru 1.5.2 | 1.2 | 1995-1999 | CNRI | yes |
-+----------------+--------------+------------+------------+-----------------+
-| 1.6 | 1.5.2 | 2000 | CNRI | no |
-+----------------+--------------+------------+------------+-----------------+
-| 2.0 | 1.6 | 2000 | BeOpen.com | no |
-+----------------+--------------+------------+------------+-----------------+
-| 1.6.1 | 1.6 | 2001 | CNRI | no |
-+----------------+--------------+------------+------------+-----------------+
-| 2.1 | 2.0+1.6.1 | 2001 | PSF | no |
-+----------------+--------------+------------+------------+-----------------+
-| 2.0.1 | 2.0+1.6.1 | 2001 | PSF | yes |
-+----------------+--------------+------------+------------+-----------------+
-| 2.1.1 | 2.1+2.0.1 | 2001 | PSF | yes |
-+----------------+--------------+------------+------------+-----------------+
-| 2.1.2 | 2.1.1 | 2002 | PSF | yes |
-+----------------+--------------+------------+------------+-----------------+
-| 2.1.3 | 2.1.2 | 2002 | PSF | yes |
-+----------------+--------------+------------+------------+-----------------+
-| 2.2 and above | 2.1.1 | 2001-now | PSF | yes |
-+----------------+--------------+------------+------------+-----------------+
++----------------+--------------+------------+------------+---------------------+
+| Release | Derived from | Year | Owner | GPL-compatible? (1) |
++================+==============+============+============+=====================+
+| 0.9.0 thru 1.2 | n/a | 1991-1995 | CWI | yes |
++----------------+--------------+------------+------------+---------------------+
+| 1.3 thru 1.5.2 | 1.2 | 1995-1999 | CNRI | yes |
++----------------+--------------+------------+------------+---------------------+
+| 1.6 | 1.5.2 | 2000 | CNRI | no |
++----------------+--------------+------------+------------+---------------------+
+| 2.0 | 1.6 | 2000 | BeOpen.com | no |
++----------------+--------------+------------+------------+---------------------+
+| 1.6.1 | 1.6 | 2001 | CNRI | yes (2) |
++----------------+--------------+------------+------------+---------------------+
+| 2.1 | 2.0+1.6.1 | 2001 | PSF | no |
++----------------+--------------+------------+------------+---------------------+
+| 2.0.1 | 2.0+1.6.1 | 2001 | PSF | yes |
++----------------+--------------+------------+------------+---------------------+
+| 2.1.1 | 2.1+2.0.1 | 2001 | PSF | yes |
++----------------+--------------+------------+------------+---------------------+
+| 2.1.2 | 2.1.1 | 2002 | PSF | yes |
++----------------+--------------+------------+------------+---------------------+
+| 2.1.3 | 2.1.2 | 2002 | PSF | yes |
++----------------+--------------+------------+------------+---------------------+
+| 2.2 and above | 2.1.1 | 2001-now | PSF | yes |
++----------------+--------------+------------+------------+---------------------+
.. note::
- GPL-compatible doesn't mean that we're distributing Python under the GPL. All
- Python licenses, unlike the GPL, let you distribute a modified version without
- making your changes open source. The GPL-compatible licenses make it possible to
- combine Python with other software that is released under the GPL; the others
- don't.
+ (1) GPL-compatible doesn't mean that we're distributing Python under the GPL.
+ All Python licenses, unlike the GPL, let you distribute a modified version
+ without making your changes open source. The GPL-compatible licenses make
+ it possible to combine Python with other software that is released under
+ the GPL; the others don't.
+
+ (2) According to Richard Stallman, 1.6.1 is not GPL-compatible, because its license
+ has a choice of law clause. According to CNRI, however, Stallman's lawyer has
+ told CNRI's lawyer that 1.6.1 is "not incompatible" with the GPL.
Thanks to the many outside volunteers who have worked under Guido's direction to
make these releases possible.
@@ -73,10 +77,10 @@ Terms and conditions for accessing or otherwise using Python
============================================================
Python software and documentation are licensed under the
-:ref:`PSF License Agreement `.
+Python Software Foundation License Version 2.
Starting with Python 3.8.6, examples, recipes, and other code in
-the documentation are dual licensed under the PSF License Agreement
+the documentation are dual licensed under the PSF License Version 2
and the :ref:`Zero-Clause BSD license `.
Some software incorporated into Python is under different licenses.
@@ -86,39 +90,38 @@ See :ref:`OtherLicenses` for an incomplete list of these licenses.
.. _PSF-license:
-PSF LICENSE AGREEMENT FOR PYTHON |release|
-------------------------------------------
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
.. parsed-literal::
1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and
- the Individual or Organization ("Licensee") accessing and otherwise using Python
- |release| software in source or binary form and its associated documentation.
+ the Individual or Organization ("Licensee") accessing and otherwise using this
+ software ("Python") in source or binary form and its associated documentation.
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
- distribute, and otherwise use Python |release| alone or in any derivative
+ distribute, and otherwise use Python alone or in any derivative
version, provided, however, that PSF's License Agreement and PSF's notice of
copyright, i.e., "Copyright © 2001-2024 Python Software Foundation; All Rights
- Reserved" are retained in Python |release| alone or in any derivative version
+ Reserved" are retained in Python alone or in any derivative version
prepared by Licensee.
3. In the event Licensee prepares a derivative work that is based on or
- incorporates Python |release| or any part thereof, and wants to make the
+ incorporates Python or any part thereof, and wants to make the
derivative work available to others as provided herein, then Licensee hereby
- agrees to include in any such work a brief summary of the changes made to Python
- |release|.
+ agrees to include in any such work a brief summary of the changes made to Python.
- 4. PSF is making Python |release| available to Licensee on an "AS IS" basis.
+ 4. PSF is making Python available to Licensee on an "AS IS" basis.
PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF
EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR
WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE
- USE OF PYTHON |release| WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
+ USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
- 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON |release|
+ 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF
- MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON |release|, OR ANY DERIVATIVE
+ MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE
THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material breach of
@@ -130,7 +133,7 @@ PSF LICENSE AGREEMENT FOR PYTHON |release|
trademark sense to endorse or promote products or services of Licensee, or any
third party.
- 8. By copying, installing or otherwise using Python |release|, Licensee agrees
+ 8. By copying, installing or otherwise using Python, Licensee agrees
to be bound by the terms and conditions of this License Agreement.
@@ -205,7 +208,7 @@ CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
Agreement. This Agreement together with Python 1.6.1 may be located on the
internet using the following unique, persistent identifier (known as a handle):
1895.22/1013. This Agreement may also be obtained from a proxy server on the
- internet using the following URL: http://hdl.handle.net/1895.22/1013."
+ internet using the following URL: http://hdl.handle.net/1895.22/1013".
3. In the event Licensee prepares a derivative work that is based on or
incorporates Python 1.6.1 or any part thereof, and wants to make the derivative
@@ -273,8 +276,8 @@ CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
.. _BSD0:
-ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON |release| DOCUMENTATION
-----------------------------------------------------------------------
+ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
+------------------------------------------------------------
.. parsed-literal::
@@ -371,7 +374,7 @@ Project, https://www.wide.ad.jp/. ::
may be used to endorse or promote products derived from this software
without specific prior written permission.
- THIS SOFTWARE IS PROVIDED BY THE PROJECT AND CONTRIBUTORS ``AS IS'' AND
+ THIS SOFTWARE IS PROVIDED BY THE PROJECT AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE PROJECT OR CONTRIBUTORS BE LIABLE
@@ -580,7 +583,7 @@ interface::
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
@@ -881,7 +884,7 @@ sources unless the build is configured ``--with-system-libffi``::
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
- ``Software''), to deal in the Software without restriction, including
+ "Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
@@ -890,7 +893,7 @@ sources unless the build is configured ``--with-system-libffi``::
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
- THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND,
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
@@ -1119,7 +1122,7 @@ The file is distributed under the 2-Clause BSD License::
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
diff --git a/Doc/make.bat b/Doc/make.bat
index 87d8359ef112bb..99f0d5c44f0098 100644
--- a/Doc/make.bat
+++ b/Doc/make.bat
@@ -127,16 +127,14 @@ goto end
:build
if not exist "%BUILDDIR%" mkdir "%BUILDDIR%"
-rem PY_MISC_NEWS_DIR is also used by our Sphinx extension in tools/extensions/pyspecific.py
-if not defined PY_MISC_NEWS_DIR set PY_MISC_NEWS_DIR=%BUILDDIR%\%1
-if not exist "%PY_MISC_NEWS_DIR%" mkdir "%PY_MISC_NEWS_DIR%"
+if not exist build mkdir build
if exist ..\Misc\NEWS (
- echo.Copying Misc\NEWS to %PY_MISC_NEWS_DIR%\NEWS
- copy ..\Misc\NEWS "%PY_MISC_NEWS_DIR%\NEWS" > nul
+ echo.Copying existing Misc\NEWS file to Doc\build\NEWS
+ copy ..\Misc\NEWS build\NEWS > nul
) else if exist ..\Misc\NEWS.D (
if defined BLURB (
echo.Merging Misc/NEWS with %BLURB%
- %BLURB% merge -f "%PY_MISC_NEWS_DIR%\NEWS"
+ %BLURB% merge -f build\NEWS
) else (
echo.No Misc/NEWS file and Blurb is not available.
exit /B 1
@@ -144,12 +142,12 @@ if exist ..\Misc\NEWS (
)
if defined PAPER (
- set SPHINXOPTS=-D latex_elements.papersize=%PAPER% %SPHINXOPTS%
+ set SPHINXOPTS=--define latex_elements.papersize=%PAPER% %SPHINXOPTS%
)
if "%1" EQU "htmlhelp" (
- set SPHINXOPTS=-D html_theme_options.body_max_width=none %SPHINXOPTS%
+ set SPHINXOPTS=--define html_theme_options.body_max_width=none %SPHINXOPTS%
)
-cmd /S /C "%SPHINXBUILD% %SPHINXOPTS% -b%1 -dbuild\doctrees . "%BUILDDIR%\%1" %2 %3 %4 %5 %6 %7 %8 %9"
+cmd /S /C "%SPHINXBUILD% %SPHINXOPTS% --builder %1 --doctree-dir build\doctrees . "%BUILDDIR%\%1" %2 %3 %4 %5 %6 %7 %8 %9"
if "%1" EQU "htmlhelp" (
"%HTMLHELP%" "%BUILDDIR%\htmlhelp\python%DISTVERSION:.=%.hhp"
diff --git a/Doc/reference/compound_stmts.rst b/Doc/reference/compound_stmts.rst
index 69c47686dd6784..e351c962d99bd4 100644
--- a/Doc/reference/compound_stmts.rst
+++ b/Doc/reference/compound_stmts.rst
@@ -534,15 +534,18 @@ is semantically equivalent to::
enter = type(manager).__enter__
exit = type(manager).__exit__
value = enter(manager)
+ hit_except = False
try:
TARGET = value
SUITE
except:
+ hit_except = True
if not exit(manager, *sys.exc_info()):
raise
- else:
- exit(manager, None, None, None)
+ finally:
+ if not hit_except:
+ exit(manager, None, None, None)
With more than one item, the context managers are processed as if multiple
:keyword:`with` statements were nested::
@@ -1214,8 +1217,10 @@ A function definition defines a user-defined function object (see section
: | `parameter_list_no_posonly`
parameter_list_no_posonly: `defparameter` ("," `defparameter`)* ["," [`parameter_list_starargs`]]
: | `parameter_list_starargs`
- parameter_list_starargs: "*" [`star_parameter`] ("," `defparameter`)* ["," ["**" `parameter` [","]]]
- : | "**" `parameter` [","]
+ parameter_list_starargs: "*" [`star_parameter`] ("," `defparameter`)* ["," [`parameter_star_kwargs`]]
+ : "*" ("," `defparameter`)+ ["," [`parameter_star_kwargs`]]
+ : | `parameter_star_kwargs`
+ parameter_star_kwargs: "**" `parameter` [","]
parameter: `identifier` [":" `expression`]
star_parameter: `identifier` [":" ["*"] `expression`]
defparameter: `parameter` ["=" `expression`]
diff --git a/Doc/requirements-oldest-sphinx.txt b/Doc/requirements-oldest-sphinx.txt
deleted file mode 100644
index 3483faea6b56cb..00000000000000
--- a/Doc/requirements-oldest-sphinx.txt
+++ /dev/null
@@ -1,35 +0,0 @@
-# Requirements to build the Python documentation, for the oldest supported
-# Sphinx version.
-#
-# We pin Sphinx and all of its dependencies to ensure a consistent environment.
-
-blurb
-python-docs-theme>=2022.1
-
-# Generated from:
-# pip install "Sphinx~=7.2.6"
-# pip freeze
-#
-# Sphinx 7.2.6 comes from ``needs_sphinx = '7.2.6'`` in ``Doc/conf.py``.
-
-alabaster==0.7.16
-Babel==2.16.0
-certifi==2024.8.30
-charset-normalizer==3.4.0
-docutils==0.20.1
-idna==3.10
-imagesize==1.4.1
-Jinja2==3.1.4
-MarkupSafe==3.0.1
-packaging==24.1
-Pygments==2.18.0
-requests==2.32.3
-snowballstemmer==2.2.0
-Sphinx==7.2.6
-sphinxcontrib-applehelp==2.0.0
-sphinxcontrib-devhelp==2.0.0
-sphinxcontrib-htmlhelp==2.1.0
-sphinxcontrib-jsmath==1.0.1
-sphinxcontrib-qthelp==2.0.0
-sphinxcontrib-serializinghtml==2.0.0
-urllib3==2.2.3
diff --git a/Doc/requirements.txt b/Doc/requirements.txt
index 5105786ccf283c..32ff8f74d05bb6 100644
--- a/Doc/requirements.txt
+++ b/Doc/requirements.txt
@@ -3,9 +3,10 @@
# Note that when updating this file, you will likely also have to update
# the Doc/constraints.txt file.
-# Sphinx version is pinned so that new versions that introduce new warnings
+# The Sphinx version is pinned so that new versions that introduce new warnings
# won't suddenly cause build failures. Updating the version is fine as long
# as no warnings are raised by doing so.
+# Keep this version in sync with ``Doc/conf.py``.
sphinx~=8.1.0
blurb
diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore
index 41ca0bdb907b44..9e36087ff10c3e 100644
--- a/Doc/tools/.nitignore
+++ b/Doc/tools/.nitignore
@@ -12,7 +12,6 @@ Doc/c-api/stable.rst
Doc/c-api/type.rst
Doc/c-api/typeobj.rst
Doc/extending/extending.rst
-Doc/glossary.rst
Doc/library/ast.rst
Doc/library/asyncio-extending.rst
Doc/library/asyncio-policy.rst
@@ -24,7 +23,6 @@ Doc/library/email.charset.rst
Doc/library/email.compat32-message.rst
Doc/library/email.errors.rst
Doc/library/email.parser.rst
-Doc/library/email.policy.rst
Doc/library/exceptions.rst
Doc/library/functools.rst
Doc/library/http.cookiejar.rst
@@ -77,7 +75,6 @@ Doc/whatsnew/2.4.rst
Doc/whatsnew/2.5.rst
Doc/whatsnew/2.6.rst
Doc/whatsnew/2.7.rst
-Doc/whatsnew/3.0.rst
Doc/whatsnew/3.3.rst
Doc/whatsnew/3.4.rst
Doc/whatsnew/3.5.rst
diff --git a/Doc/tools/extensions/availability.py b/Doc/tools/extensions/availability.py
index 47833fdcb87590..1a2c7b02b44439 100644
--- a/Doc/tools/extensions/availability.py
+++ b/Doc/tools/extensions/availability.py
@@ -6,6 +6,7 @@
from docutils import nodes
from sphinx import addnodes
+from sphinx.locale import _ as sphinx_gettext
from sphinx.util import logging
from sphinx.util.docutils import SphinxDirective
@@ -55,7 +56,7 @@ class Availability(SphinxDirective):
final_argument_whitespace = True
def run(self) -> list[nodes.container]:
- title = "Availability"
+ title = sphinx_gettext("Availability")
refnode = addnodes.pending_xref(
title,
nodes.inline(title, title, classes=["xref", "std", "std-ref"]),
diff --git a/Doc/tools/extensions/c_annotations.py b/Doc/tools/extensions/c_annotations.py
index 50065d34a2c27a..089614a1f6c421 100644
--- a/Doc/tools/extensions/c_annotations.py
+++ b/Doc/tools/extensions/c_annotations.py
@@ -16,7 +16,6 @@
from pathlib import Path
from typing import TYPE_CHECKING
-import sphinx
from docutils import nodes
from docutils.statemachine import StringList
from sphinx import addnodes
@@ -285,16 +284,6 @@ def setup(app: Sphinx) -> ExtensionMetadata:
app.connect("builder-inited", init_annotations)
app.connect("doctree-read", add_annotations)
- if sphinx.version_info[:2] < (7, 2):
- from docutils.parsers.rst import directives
- from sphinx.domains.c import CObject
-
- # monkey-patch C object...
- CObject.option_spec |= {
- "no-index-entry": directives.flag,
- "no-contents-entry": directives.flag,
- }
-
return {
"version": "1.0",
"parallel_read_safe": True,
diff --git a/Doc/tools/extensions/changes.py b/Doc/tools/extensions/changes.py
new file mode 100644
index 00000000000000..8de5e7f78c6627
--- /dev/null
+++ b/Doc/tools/extensions/changes.py
@@ -0,0 +1,90 @@
+"""Support for documenting version of changes, additions, deprecations."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from sphinx.domains.changeset import (
+ VersionChange,
+ versionlabel_classes,
+ versionlabels,
+)
+from sphinx.locale import _ as sphinx_gettext
+
+if TYPE_CHECKING:
+ from docutils.nodes import Node
+ from sphinx.application import Sphinx
+ from sphinx.util.typing import ExtensionMetadata
+
+
+def expand_version_arg(argument: str, release: str) -> str:
+ """Expand "next" to the current version"""
+ if argument == "next":
+ return sphinx_gettext("{} (unreleased)").format(release)
+ return argument
+
+
+class PyVersionChange(VersionChange):
+ def run(self) -> list[Node]:
+ # Replace the 'next' special token with the current development version
+ self.arguments[0] = expand_version_arg(
+ self.arguments[0], self.config.release
+ )
+ return super().run()
+
+
+class DeprecatedRemoved(VersionChange):
+ required_arguments = 2
+
+ _deprecated_label = sphinx_gettext(
+ "Deprecated since version %s, will be removed in version %s"
+ )
+ _removed_label = sphinx_gettext(
+ "Deprecated since version %s, removed in version %s"
+ )
+
+ def run(self) -> list[Node]:
+ # Replace the first two arguments (deprecated version and removed version)
+ # with a single tuple of both versions.
+ version_deprecated = expand_version_arg(
+ self.arguments[0], self.config.release
+ )
+ version_removed = self.arguments.pop(1)
+ if version_removed == "next":
+ raise ValueError(
+ "deprecated-removed:: second argument cannot be `next`"
+ )
+ self.arguments[0] = version_deprecated, version_removed
+
+ # Set the label based on if we have reached the removal version
+ current_version = tuple(map(int, self.config.version.split(".")))
+ removed_version = tuple(map(int, version_removed.split(".")))
+ if current_version < removed_version:
+ versionlabels[self.name] = self._deprecated_label
+ versionlabel_classes[self.name] = "deprecated"
+ else:
+ versionlabels[self.name] = self._removed_label
+ versionlabel_classes[self.name] = "removed"
+ try:
+ return super().run()
+ finally:
+ # reset versionlabels and versionlabel_classes
+ versionlabels[self.name] = ""
+ versionlabel_classes[self.name] = ""
+
+
+def setup(app: Sphinx) -> ExtensionMetadata:
+ # Override Sphinx's directives with support for 'next'
+ app.add_directive("versionadded", PyVersionChange, override=True)
+ app.add_directive("versionchanged", PyVersionChange, override=True)
+ app.add_directive("versionremoved", PyVersionChange, override=True)
+ app.add_directive("deprecated", PyVersionChange, override=True)
+
+ # Register the ``.. deprecated-removed::`` directive
+ app.add_directive("deprecated-removed", DeprecatedRemoved)
+
+ return {
+ "version": "1.0",
+ "parallel_read_safe": True,
+ "parallel_write_safe": True,
+ }
diff --git a/Doc/tools/extensions/misc_news.py b/Doc/tools/extensions/misc_news.py
new file mode 100644
index 00000000000000..a24c440595ee92
--- /dev/null
+++ b/Doc/tools/extensions/misc_news.py
@@ -0,0 +1,75 @@
+"""Support for including Misc/NEWS."""
+
+from __future__ import annotations
+
+import re
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+from docutils import nodes
+from sphinx.locale import _ as sphinx_gettext
+from sphinx.util.docutils import SphinxDirective
+
+if TYPE_CHECKING:
+ from typing import Final
+
+ from docutils.nodes import Node
+ from sphinx.application import Sphinx
+ from sphinx.util.typing import ExtensionMetadata
+
+
+BLURB_HEADER = """\
++++++++++++
+Python News
++++++++++++
+"""
+
+bpo_issue_re: Final[re.Pattern[str]] = re.compile(
+ "(?:issue #|bpo-)([0-9]+)", re.ASCII
+)
+gh_issue_re: Final[re.Pattern[str]] = re.compile(
+ "gh-(?:issue-)?([0-9]+)", re.ASCII | re.IGNORECASE
+)
+whatsnew_re: Final[re.Pattern[str]] = re.compile(
+ r"^what's new in (.*?)\??$", re.ASCII | re.IGNORECASE | re.MULTILINE
+)
+
+
+class MiscNews(SphinxDirective):
+ has_content = False
+ required_arguments = 1
+ optional_arguments = 0
+ final_argument_whitespace = False
+ option_spec = {}
+
+ def run(self) -> list[Node]:
+ # Get content of NEWS file
+ source, _ = self.get_source_info()
+ news_file = Path(source).resolve().parent / self.arguments[0]
+ self.env.note_dependency(news_file)
+ try:
+ news_text = news_file.read_text(encoding="utf-8")
+ except (OSError, UnicodeError):
+ text = sphinx_gettext("The NEWS file is not available.")
+ return [nodes.strong(text, text)]
+
+ # remove first 3 lines as they are the main heading
+ news_text = news_text.removeprefix(BLURB_HEADER)
+
+ news_text = bpo_issue_re.sub(r":issue:`\1`", news_text)
+ # Fallback handling for GitHub issues
+ news_text = gh_issue_re.sub(r":gh:`\1`", news_text)
+ news_text = whatsnew_re.sub(r"\1", news_text)
+
+ self.state_machine.insert_input(news_text.splitlines(), str(news_file))
+ return []
+
+
+def setup(app: Sphinx) -> ExtensionMetadata:
+ app.add_directive("miscnews", MiscNews)
+
+ return {
+ "version": "1.0",
+ "parallel_read_safe": True,
+ "parallel_write_safe": True,
+ }
diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py
index 1ad10d4bef6f36..b7e35fb01e2aec 100644
--- a/Doc/tools/extensions/pyspecific.py
+++ b/Doc/tools/extensions/pyspecific.py
@@ -21,7 +21,6 @@
from docutils.utils import new_document, unescape
from sphinx import addnodes
from sphinx.builders import Builder
-from sphinx.domains.changeset import VersionChange, versionlabels, versionlabel_classes
from sphinx.domains.python import PyFunction, PyMethod, PyModule
from sphinx.locale import _ as sphinx_gettext
from sphinx.util.docutils import SphinxDirective
@@ -41,16 +40,6 @@
Body.enum.converters['lowerroman'] = \
Body.enum.converters['upperroman'] = lambda x: None
-# monkey-patch the productionlist directive to allow hyphens in group names
-# https://github.com/sphinx-doc/sphinx/issues/11854
-from sphinx.domains import std
-
-std.token_re = re.compile(r'`((~?[\w-]*:)?\w+)`')
-
-# backport :no-index:
-PyModule.option_spec['no-index'] = directives.flag
-
-
# Support for marking up and linking to bugs.python.org issues
def issue_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
@@ -107,32 +96,6 @@ def run(self):
return [pnode]
-# Support for documenting decorators
-
-class PyDecoratorMixin(object):
- def handle_signature(self, sig, signode):
- ret = super(PyDecoratorMixin, self).handle_signature(sig, signode)
- signode.insert(0, addnodes.desc_addname('@', '@'))
- return ret
-
- def needs_arglist(self):
- return False
-
-
-class PyDecoratorFunction(PyDecoratorMixin, PyFunction):
- def run(self):
- # a decorator function is a function after all
- self.name = 'py:function'
- return PyFunction.run(self)
-
-
-# TODO: Use sphinx.domains.python.PyDecoratorMethod when possible
-class PyDecoratorMethod(PyDecoratorMixin, PyMethod):
- def run(self):
- self.name = 'py:method'
- return PyMethod.run(self)
-
-
class PyCoroutineMixin(object):
def handle_signature(self, sig, signode):
ret = super(PyCoroutineMixin, self).handle_signature(sig, signode)
@@ -184,97 +147,6 @@ def run(self):
return PyMethod.run(self)
-# Support for documenting version of changes, additions, deprecations
-
-def expand_version_arg(argument, release):
- """Expand "next" to the current version"""
- if argument == 'next':
- return sphinx_gettext('{} (unreleased)').format(release)
- return argument
-
-
-class PyVersionChange(VersionChange):
- def run(self):
- # Replace the 'next' special token with the current development version
- self.arguments[0] = expand_version_arg(self.arguments[0],
- self.config.release)
- return super().run()
-
-
-class DeprecatedRemoved(VersionChange):
- required_arguments = 2
-
- _deprecated_label = sphinx_gettext('Deprecated since version %s, will be removed in version %s')
- _removed_label = sphinx_gettext('Deprecated since version %s, removed in version %s')
-
- def run(self):
- # Replace the first two arguments (deprecated version and removed version)
- # with a single tuple of both versions.
- version_deprecated = expand_version_arg(self.arguments[0],
- self.config.release)
- version_removed = self.arguments.pop(1)
- if version_removed == 'next':
- raise ValueError(
- 'deprecated-removed:: second argument cannot be `next`')
- self.arguments[0] = version_deprecated, version_removed
-
- # Set the label based on if we have reached the removal version
- current_version = tuple(map(int, self.config.version.split('.')))
- removed_version = tuple(map(int, version_removed.split('.')))
- if current_version < removed_version:
- versionlabels[self.name] = self._deprecated_label
- versionlabel_classes[self.name] = 'deprecated'
- else:
- versionlabels[self.name] = self._removed_label
- versionlabel_classes[self.name] = 'removed'
- try:
- return super().run()
- finally:
- # reset versionlabels and versionlabel_classes
- versionlabels[self.name] = ''
- versionlabel_classes[self.name] = ''
-
-
-# Support for including Misc/NEWS
-
-issue_re = re.compile('(?:[Ii]ssue #|bpo-)([0-9]+)', re.I)
-gh_issue_re = re.compile('(?:gh-issue-|gh-)([0-9]+)', re.I)
-whatsnew_re = re.compile(r"(?im)^what's new in (.*?)\??$")
-
-
-class MiscNews(SphinxDirective):
- has_content = False
- required_arguments = 1
- optional_arguments = 0
- final_argument_whitespace = False
- option_spec = {}
-
- def run(self):
- fname = self.arguments[0]
- source = self.state_machine.input_lines.source(
- self.lineno - self.state_machine.input_offset - 1)
- source_dir = getenv('PY_MISC_NEWS_DIR')
- if not source_dir:
- source_dir = path.dirname(path.abspath(source))
- fpath = path.join(source_dir, fname)
- self.env.note_dependency(path.abspath(fpath))
- try:
- with io.open(fpath, encoding='utf-8') as fp:
- content = fp.read()
- except Exception:
- text = 'The NEWS file is not available.'
- node = nodes.strong(text, text)
- return [node]
- content = issue_re.sub(r':issue:`\1`', content)
- # Fallback handling for the GitHub issue
- content = gh_issue_re.sub(r':gh:`\1`', content)
- content = whatsnew_re.sub(r'\1', content)
- # remove first 3 lines as they are the main heading
- lines = ['.. default-role:: obj', ''] + content.splitlines()[3:]
- self.state_machine.insert_input(lines, fname)
- return []
-
-
# Support for building "topic help" for pydoc
pydoc_topic_labels = [
@@ -417,23 +289,14 @@ def setup(app):
app.add_role('issue', issue_role)
app.add_role('gh', gh_issue_role)
app.add_directive('impl-detail', ImplementationDetail)
- app.add_directive('versionadded', PyVersionChange, override=True)
- app.add_directive('versionchanged', PyVersionChange, override=True)
- app.add_directive('versionremoved', PyVersionChange, override=True)
- app.add_directive('deprecated', PyVersionChange, override=True)
- app.add_directive('deprecated-removed', DeprecatedRemoved)
app.add_builder(PydocTopicsBuilder)
app.add_object_type('opcode', 'opcode', '%s (opcode)', parse_opcode_signature)
app.add_object_type('pdbcommand', 'pdbcmd', '%s (pdb command)', parse_pdb_command)
app.add_object_type('monitoring-event', 'monitoring-event', '%s (monitoring event)', parse_monitoring_event)
- app.add_directive_to_domain('py', 'decorator', PyDecoratorFunction)
- app.add_directive_to_domain('py', 'decoratormethod', PyDecoratorMethod)
app.add_directive_to_domain('py', 'coroutinefunction', PyCoroutineFunction)
app.add_directive_to_domain('py', 'coroutinemethod', PyCoroutineMethod)
app.add_directive_to_domain('py', 'awaitablefunction', PyAwaitableFunction)
app.add_directive_to_domain('py', 'awaitablemethod', PyAwaitableMethod)
app.add_directive_to_domain('py', 'abstractmethod', PyAbstractMethod)
- app.add_directive('miscnews', MiscNews)
- app.add_css_file('sidebar-wrap.css')
app.connect('env-check-consistency', patch_pairindextypes)
return {'version': '1.0', 'parallel_read_safe': True}
diff --git a/Doc/tools/static/sidebar-wrap.css b/Doc/tools/static/sidebar-wrap.css
deleted file mode 100644
index 0a80f516f28349..00000000000000
--- a/Doc/tools/static/sidebar-wrap.css
+++ /dev/null
@@ -1,6 +0,0 @@
-div.sphinxsidebarwrapper {
- overflow-x: scroll;
-}
-div.sphinxsidebarwrapper li code {
- overflow-wrap: normal;
-}
diff --git a/Doc/tools/templates/dummy.html b/Doc/tools/templates/dummy.html
index 49c2a71a5e40cf..4f0f6f91436a87 100644
--- a/Doc/tools/templates/dummy.html
+++ b/Doc/tools/templates/dummy.html
@@ -7,6 +7,10 @@
{% trans %}Deprecated since version {deprecated}, will be removed in version {removed}{% endtrans %}
{% trans %}Deprecated since version {deprecated}, removed in version {removed}{% endtrans %}
+In extensions/availability.py:
+
+{% trans %}Availability{% endtrans %}
+
In extensions/c_annotations.py:
{% trans %}Part of the{% endtrans %}
diff --git a/Doc/tutorial/datastructures.rst b/Doc/tutorial/datastructures.rst
index 263b0c2e2815a1..cbe780e075baf5 100644
--- a/Doc/tutorial/datastructures.rst
+++ b/Doc/tutorial/datastructures.rst
@@ -142,8 +142,8 @@ Using Lists as Stacks
The list methods make it very easy to use a list as a stack, where the last
element added is the first element retrieved ("last-in, first-out"). To add an
-item to the top of the stack, use :meth:`!~list.append`. To retrieve an item from the
-top of the stack, use :meth:`!~list.pop` without an explicit index. For example::
+item to the top of the stack, use :meth:`!append`. To retrieve an item from the
+top of the stack, use :meth:`!pop` without an explicit index. For example::
>>> stack = [3, 4, 5]
>>> stack.append(6)
@@ -340,7 +340,7 @@ The :keyword:`!del` statement
=============================
There is a way to remove an item from a list given its index instead of its
-value: the :keyword:`del` statement. This differs from the :meth:`!~list.pop` method
+value: the :keyword:`del` statement. This differs from the :meth:`!pop` method
which returns a value. The :keyword:`!del` statement can also be used to remove
slices from a list or clear the entire list (which we did earlier by assignment
of an empty list to the slice). For example::
@@ -500,8 +500,8 @@ any immutable type; strings and numbers can always be keys. Tuples can be used
as keys if they contain only strings, numbers, or tuples; if a tuple contains
any mutable object either directly or indirectly, it cannot be used as a key.
You can't use lists as keys, since lists can be modified in place using index
-assignments, slice assignments, or methods like :meth:`!~list.append` and
-:meth:`!~list.extend`.
+assignments, slice assignments, or methods like :meth:`!append` and
+:meth:`!extend`.
It is best to think of a dictionary as a set of *key: value* pairs,
with the requirement that the keys are unique (within one dictionary). A pair of
diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst
index 8f54a0fdebe1d3..797ba29ad5476c 100644
--- a/Doc/using/cmdline.rst
+++ b/Doc/using/cmdline.rst
@@ -1197,7 +1197,7 @@ conflict.
.. envvar:: PYTHON_BASIC_REPL
- If this variable is set to ``1``, the interpreter will not attempt to
+ If this variable is set to any value, the interpreter will not attempt to
load the Python-based :term:`REPL` that requires :mod:`curses` and
:mod:`readline`, and will instead use the traditional parser-based
:term:`REPL`.
diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst
index bf980b514ccd96..1a1f8ae02df0c0 100644
--- a/Doc/using/configure.rst
+++ b/Doc/using/configure.rst
@@ -314,6 +314,10 @@ General Options
By convention, ``--enable-experimental-jit`` is a shorthand for ``--enable-experimental-jit=yes``.
+ .. note::
+
+ When building CPython with JIT enabled, ensure that your system has Python 3.11 or later installed.
+
.. versionadded:: 3.13
.. option:: PKG_CONFIG
diff --git a/Doc/using/ios.rst b/Doc/using/ios.rst
index 4d4eb2031ee980..aa43f75ec35a6c 100644
--- a/Doc/using/ios.rst
+++ b/Doc/using/ios.rst
@@ -292,10 +292,12 @@ To add Python to an iOS Xcode project:
10. Add Objective C code to initialize and use a Python interpreter in embedded
mode. You should ensure that:
- * :c:member:`UTF-8 mode ` is *enabled*;
- * :c:member:`Buffered stdio ` is *disabled*;
- * :c:member:`Writing bytecode ` is *disabled*;
- * :c:member:`Signal handlers ` are *enabled*;
+ * UTF-8 mode (:c:member:`PyPreConfig.utf8_mode`) is *enabled*;
+ * Buffered stdio (:c:member:`PyConfig.buffered_stdio`) is *disabled*;
+ * Writing bytecode (:c:member:`PyConfig.write_bytecode`) is *disabled*;
+ * Signal handlers (:c:member:`PyConfig.install_signal_handlers`) are *enabled*;
+ * System logging (:c:member:`PyConfig.use_system_logger`) is *enabled*
+ (optional, but strongly recommended);
* ``PYTHONHOME`` for the interpreter is configured to point at the
``python`` subfolder of your app's bundle; and
* The ``PYTHONPATH`` for the interpreter includes:
@@ -324,6 +326,49 @@ modules in your app, some additional steps will be required:
* If you're using a separate folder for third-party packages, ensure that folder
is included as part of the ``PYTHONPATH`` configuration in step 10.
+Testing a Python package
+------------------------
+
+The CPython source tree contains :source:`a testbed project ` that
+is used to run the CPython test suite on the iOS simulator. This testbed can also
+be used as a testbed project for running your Python library's test suite on iOS.
+
+After building or obtaining an iOS XCFramework (See :source:`iOS/README.rst`
+for details), create a clone of the Python iOS testbed project by running:
+
+.. code-block:: bash
+
+ $ python iOS/testbed clone --framework --app --app app-testbed
+
+You will need to modify the ``iOS/testbed`` reference to point to that
+directory in the CPython source tree; any folders specified with the ``--app``
+flag will be copied into the cloned testbed project. The resulting testbed will
+be created in the ``app-testbed`` folder. In this example, the ``module1`` and
+``module2`` would be importable modules at runtime. If your project has
+additional dependencies, they can be installed into the
+``app-testbed/iOSTestbed/app_packages`` folder (using ``pip install --target
+app-testbed/iOSTestbed/app_packages`` or similar).
+
+You can then use the ``app-testbed`` folder to run the test suite for your app,
+For example, if ``module1.tests`` was the entry point to your test suite, you
+could run:
+
+.. code-block:: bash
+
+ $ python app-testbed run -- module1.tests
+
+This is the equivalent of running ``python -m module1.tests`` on a desktop
+Python build. Any arguments after the ``--`` will be passed to the testbed as
+if they were arguments to ``python -m`` on a desktop machine.
+
+You can also open the testbed project in Xcode by running:
+
+.. code-block:: bash
+
+ $ open app-testbed/iOSTestbed.xcodeproj
+
+This will allow you to use the full Xcode suite of tools for debugging.
+
App Store Compliance
====================
diff --git a/Doc/whatsnew/2.3.rst b/Doc/whatsnew/2.3.rst
index ac463f82cfb8ca..b7e4e73f4ce4aa 100644
--- a/Doc/whatsnew/2.3.rst
+++ b/Doc/whatsnew/2.3.rst
@@ -353,7 +353,7 @@ convert them to Unicode using the ``mbcs`` encoding.
Other systems also allow Unicode strings as file names but convert them to byte
strings before passing them to the system, which can cause a :exc:`UnicodeError`
to be raised. Applications can test whether arbitrary Unicode strings are
-supported as file names by checking :attr:`os.path.supports_unicode_filenames`,
+supported as file names by checking :const:`os.path.supports_unicode_filenames`,
a Boolean value.
Under MacOS, :func:`os.listdir` may now return Unicode filenames.
diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst
index 0e4dee0bd24fb2..caed3192be871d 100644
--- a/Doc/whatsnew/2.7.rst
+++ b/Doc/whatsnew/2.7.rst
@@ -1602,7 +1602,7 @@ changes, or look through the Subversion logs for all the details.
identifier instead of the previous default value of ``'python'``.
(Changed by Sean Reifschneider; :issue:`8451`.)
-* The :attr:`sys.version_info` value is now a named tuple, with attributes
+* The :data:`sys.version_info` value is now a named tuple, with attributes
named :attr:`!major`, :attr:`!minor`, :attr:`!micro`,
:attr:`!releaselevel`, and :attr:`!serial`. (Contributed by Ross
Light; :issue:`4285`.)
diff --git a/Doc/whatsnew/3.0.rst b/Doc/whatsnew/3.0.rst
index d97f5fdd9eaa4a..26e62080e819d8 100644
--- a/Doc/whatsnew/3.0.rst
+++ b/Doc/whatsnew/3.0.rst
@@ -150,8 +150,8 @@ Some well-known APIs no longer return lists:
sorted(d)`` instead (this works in Python 2.5 too and is just
as efficient).
-* Also, the :meth:`dict.iterkeys`, :meth:`dict.iteritems` and
- :meth:`dict.itervalues` methods are no longer supported.
+* Also, the :meth:`!dict.iterkeys`, :meth:`!dict.iteritems` and
+ :meth:`!dict.itervalues` methods are no longer supported.
* :func:`map` and :func:`filter` return iterators. If you really need
a list and the input sequences are all of equal length, a quick
@@ -170,7 +170,7 @@ Some well-known APIs no longer return lists:
:func:`itertools.zip_longest`, e.g. ``map(func, *sequences)`` becomes
``list(map(func, itertools.zip_longest(*sequences)))``.
-* :func:`range` now behaves like :func:`xrange` used to behave, except
+* :func:`range` now behaves like :func:`!xrange` used to behave, except
it works with values of arbitrary size. The latter no longer
exists.
@@ -192,33 +192,33 @@ Python 3.0 has simplified the rules for ordering comparisons:
operators: objects of different incomparable types always compare
unequal to each other.
-* :meth:`builtin.sorted` and :meth:`list.sort` no longer accept the
+* :meth:`sorted` and :meth:`list.sort` no longer accept the
*cmp* argument providing a comparison function. Use the *key*
argument instead. N.B. the *key* and *reverse* arguments are now
"keyword-only".
-* The :func:`cmp` function should be treated as gone, and the :meth:`__cmp__`
- special method is no longer supported. Use :meth:`__lt__` for sorting,
- :meth:`__eq__` with :meth:`__hash__`, and other rich comparisons as needed.
- (If you really need the :func:`cmp` functionality, you could use the
+* The :func:`!cmp` function should be treated as gone, and the :meth:`!__cmp__`
+ special method is no longer supported. Use :meth:`~object.__lt__` for sorting,
+ :meth:`~object.__eq__` with :meth:`~object.__hash__`, and other rich comparisons as needed.
+ (If you really need the :func:`!cmp` functionality, you could use the
expression ``(a > b) - (a < b)`` as the equivalent for ``cmp(a, b)``.)
Integers
--------
-* :pep:`237`: Essentially, :class:`long` renamed to :class:`int`.
+* :pep:`237`: Essentially, :class:`!long` renamed to :class:`int`.
That is, there is only one built-in integral type, named
- :class:`int`; but it behaves mostly like the old :class:`long` type.
+ :class:`int`; but it behaves mostly like the old :class:`!long` type.
* :pep:`238`: An expression like ``1/2`` returns a float. Use
``1//2`` to get the truncating behavior. (The latter syntax has
existed for years, at least since Python 2.2.)
-* The :data:`sys.maxint` constant was removed, since there is no
+* The :data:`!sys.maxint` constant was removed, since there is no
longer a limit to the value of integers. However, :data:`sys.maxsize`
can be used as an integer larger than any practical list or string
index. It conforms to the implementation's "natural" integer size
- and is typically the same as :data:`sys.maxint` in previous releases
+ and is typically the same as :data:`!sys.maxint` in previous releases
on the same platform (assuming the same build options).
* The :func:`repr` of a long integer doesn't include the trailing ``L``
@@ -251,7 +251,7 @@ changed.
that uses Unicode, encodings or binary data most likely has to
change. The change is for the better, as in the 2.x world there
were numerous bugs having to do with mixing encoded and unencoded
- text. To be prepared in Python 2.x, start using :class:`unicode`
+ text. To be prepared in Python 2.x, start using :class:`!unicode`
for all unencoded text, and :class:`str` for binary or encoded data
only. Then the ``2to3`` tool will do most of the work for you.
@@ -269,7 +269,7 @@ changed.
separate *mutable* type to hold buffered binary data,
:class:`bytearray`. Nearly all APIs that accept :class:`bytes` also
accept :class:`bytearray`. The mutable API is based on
- :class:`collections.MutableSequence`.
+ :class:`collections.MutableSequence `.
* All backslashes in raw string literals are interpreted literally.
This means that ``'\U'`` and ``'\u'`` escapes in raw strings are not
@@ -278,11 +278,11 @@ changed.
single "euro" character. (Of course, this change only affects raw
string literals; the euro character is ``'\u20ac'`` in Python 3.0.)
-* The built-in :class:`basestring` abstract type was removed. Use
+* The built-in :class:`!basestring` abstract type was removed. Use
:class:`str` instead. The :class:`str` and :class:`bytes` types
don't have functionality enough in common to warrant a shared base
class. The ``2to3`` tool (see below) replaces every occurrence of
- :class:`basestring` with :class:`str`.
+ :class:`!basestring` with :class:`str`.
* Files opened as text files (still the default mode for :func:`open`)
always use an encoding to map between strings (in memory) and bytes
@@ -357,7 +357,7 @@ New Syntax
provides a standardized way of annotating a function's parameters
and return value. There are no semantics attached to such
annotations except that they can be introspected at runtime using
- the :attr:`~object.__annotations__` attribute. The intent is to
+ the :attr:`!__annotations__` attribute. The intent is to
encourage experimentation through metaclasses, decorators or frameworks.
* :pep:`3102`: Keyword-only arguments. Named parameters occurring
@@ -428,7 +428,7 @@ Changed Syntax
class C(metaclass=M):
...
- The module-global :data:`__metaclass__` variable is no longer
+ The module-global :data:`!__metaclass__` variable is no longer
supported. (It was a crutch to make it easier to default to
new-style classes without deriving every class from
:class:`object`.)
@@ -522,19 +522,19 @@ consulted for longer descriptions.
*encoding*, *errors*, *newline* and *closefd*. Also note that an
invalid *mode* argument now raises :exc:`ValueError`, not
:exc:`IOError`. The binary file object underlying a text file
- object can be accessed as :attr:`f.buffer` (but beware that the
+ object can be accessed as :attr:`!f.buffer` (but beware that the
text object maintains a buffer of itself in order to speed up
the encoding and decoding operations).
-* :ref:`pep-3118`. The old builtin :func:`buffer` is now really gone;
+* :ref:`pep-3118`. The old builtin :func:`!buffer` is now really gone;
the new builtin :func:`memoryview` provides (mostly) similar
functionality.
* :ref:`pep-3119`. The :mod:`abc` module and the ABCs defined in the
:mod:`collections` module plays a somewhat more prominent role in
the language now, and built-in collection types like :class:`dict`
- and :class:`list` conform to the :class:`collections.MutableMapping`
- and :class:`collections.MutableSequence` ABCs, respectively.
+ and :class:`list` conform to the :class:`collections.MutableMapping `
+ and :class:`collections.MutableSequence ` ABCs, respectively.
* :ref:`pep-3127`. As mentioned above, the new octal literal
notation is the only one supported, and binary literals have been
@@ -612,7 +612,7 @@ review:
:mod:`!CGIHTTPServer`, :mod:`!SimpleHTTPServer`, :mod:`!Cookie`,
:mod:`!cookielib`).
- * :mod:`tkinter` (all :mod:`Tkinter`-related modules except
+ * :mod:`tkinter` (all ``Tkinter``-related modules except
:mod:`turtle`). The target audience of :mod:`turtle` doesn't
really care about :mod:`tkinter`. Also note that as of Python
2.6, the functionality of :mod:`turtle` has been greatly enhanced.
@@ -628,47 +628,47 @@ Some other changes to standard library modules, not covered by
* Killed :mod:`!sets`. Use the built-in :func:`set` class.
-* Cleanup of the :mod:`sys` module: removed :func:`sys.exitfunc`,
- :func:`sys.exc_clear`, :data:`sys.exc_type`, :data:`sys.exc_value`,
- :data:`sys.exc_traceback`. (Note that :data:`sys.last_type`
+* Cleanup of the :mod:`sys` module: removed :func:`!sys.exitfunc`,
+ :func:`!sys.exc_clear`, :data:`!sys.exc_type`, :data:`!sys.exc_value`,
+ :data:`!sys.exc_traceback`. (Note that :data:`sys.last_type`
etc. remain.)
-* Cleanup of the :class:`array.array` type: the :meth:`read` and
- :meth:`write` methods are gone; use :meth:`fromfile` and
- :meth:`tofile` instead. Also, the ``'c'`` typecode for array is
+* Cleanup of the :class:`array.array` type: the :meth:`!read` and
+ :meth:`!write` methods are gone; use :meth:`~array.array.fromfile` and
+ :meth:`~array.array.tofile` instead. Also, the ``'c'`` typecode for array is
gone -- use either ``'b'`` for bytes or ``'u'`` for Unicode
characters.
* Cleanup of the :mod:`operator` module: removed
- :func:`sequenceIncludes` and :func:`isCallable`.
+ :func:`!sequenceIncludes` and :func:`!isCallable`.
* Cleanup of the :mod:`!thread` module: :func:`!acquire_lock` and
:func:`!release_lock` are gone; use :meth:`~threading.Lock.acquire` and
:meth:`~threading.Lock.release` instead.
-* Cleanup of the :mod:`random` module: removed the :func:`jumpahead` API.
+* Cleanup of the :mod:`random` module: removed the :func:`!jumpahead` API.
* The :mod:`!new` module is gone.
-* The functions :func:`os.tmpnam`, :func:`os.tempnam` and
- :func:`os.tmpfile` have been removed in favor of the :mod:`tempfile`
+* The functions :func:`!os.tmpnam`, :func:`!os.tempnam` and
+ :func:`!os.tmpfile` have been removed in favor of the :mod:`tempfile`
module.
* The :mod:`tokenize` module has been changed to work with bytes. The
main entry point is now :func:`tokenize.tokenize`, instead of
generate_tokens.
-* :data:`string.letters` and its friends (:data:`string.lowercase` and
- :data:`string.uppercase`) are gone. Use
+* :data:`!string.letters` and its friends (:data:`!string.lowercase` and
+ :data:`!string.uppercase`) are gone. Use
:data:`string.ascii_letters` etc. instead. (The reason for the
- removal is that :data:`string.letters` and friends had
+ removal is that :data:`!string.letters` and friends had
locale-specific behavior, which is a bad idea for such
attractively named global "constants".)
-* Renamed module :mod:`__builtin__` to :mod:`builtins` (removing the
- underscores, adding an 's'). The :data:`__builtins__` variable
+* Renamed module :mod:`!__builtin__` to :mod:`builtins` (removing the
+ underscores, adding an 's'). The :data:`!__builtins__` variable
found in most global namespaces is unchanged. To modify a builtin,
- you should use :mod:`builtins`, not :data:`__builtins__`!
+ you should use :mod:`builtins`, not :data:`!__builtins__`!
:pep:`3101`: A New Approach To String Formatting
@@ -702,9 +702,9 @@ new powerful features added:
idiom for handling all exceptions except for this latter category is
to use :keyword:`except` :exc:`Exception`.
-* :exc:`StandardError` was removed.
+* :exc:`!StandardError` was removed.
-* Exceptions no longer behave as sequences. Use the :attr:`args`
+* Exceptions no longer behave as sequences. Use the :attr:`~BaseException.args`
attribute instead.
* :pep:`3109`: Raising exceptions. You must now use :samp:`raise
@@ -765,20 +765,20 @@ Operators And Special Methods
When referencing a method as a class attribute, you now get a plain
function object.
-* :meth:`__getslice__`, :meth:`__setslice__` and :meth:`__delslice__`
+* :meth:`!__getslice__`, :meth:`!__setslice__` and :meth:`!__delslice__`
were killed. The syntax ``a[i:j]`` now translates to
- ``a.__getitem__(slice(i, j))`` (or :meth:`__setitem__` or
- :meth:`__delitem__`, when used as an assignment or deletion target,
+ ``a.__getitem__(slice(i, j))`` (or :meth:`~object.__setitem__` or
+ :meth:`~object.__delitem__`, when used as an assignment or deletion target,
respectively).
* :pep:`3114`: the standard :meth:`next` method has been renamed to
:meth:`~iterator.__next__`.
-* The :meth:`__oct__` and :meth:`__hex__` special methods are removed
- -- :func:`oct` and :func:`hex` use :meth:`__index__` now to convert
+* The :meth:`!__oct__` and :meth:`!__hex__` special methods are removed
+ -- :func:`oct` and :func:`hex` use :meth:`~object.__index__` now to convert
the argument to an integer.
-* Removed support for :attr:`__members__` and :attr:`__methods__`.
+* Removed support for :attr:`!__members__` and :attr:`!__methods__`.
* The function attributes named :attr:`!func_X` have been renamed to
use the :attr:`!__X__` form, freeing up these names in the function
@@ -802,7 +802,7 @@ Builtins
instance will automatically be chosen. With arguments, the behavior
of :func:`super` is unchanged.
-* :pep:`3111`: :func:`raw_input` was renamed to :func:`input`. That
+* :pep:`3111`: :func:`!raw_input` was renamed to :func:`input`. That
is, the new :func:`input` function reads a line from
:data:`sys.stdin` and returns it with the trailing newline stripped.
It raises :exc:`EOFError` if the input is terminated prematurely.
@@ -820,31 +820,31 @@ Builtins
argument and a value of the same type as ``x`` when called with two
arguments.
-* Moved :func:`intern` to :func:`sys.intern`.
+* Moved :func:`!intern` to :func:`sys.intern`.
-* Removed: :func:`apply`. Instead of ``apply(f, args)`` use
+* Removed: :func:`!apply`. Instead of ``apply(f, args)`` use
``f(*args)``.
* Removed :func:`callable`. Instead of ``callable(f)`` you can use
- ``isinstance(f, collections.Callable)``. The :func:`operator.isCallable`
+ ``isinstance(f, collections.Callable)``. The :func:`!operator.isCallable`
function is also gone.
-* Removed :func:`coerce`. This function no longer serves a purpose
+* Removed :func:`!coerce`. This function no longer serves a purpose
now that classic classes are gone.
-* Removed :func:`execfile`. Instead of ``execfile(fn)`` use
+* Removed :func:`!execfile`. Instead of ``execfile(fn)`` use
``exec(open(fn).read())``.
-* Removed the :class:`file` type. Use :func:`open`. There are now several
+* Removed the :class:`!file` type. Use :func:`open`. There are now several
different kinds of streams that open can return in the :mod:`io` module.
-* Removed :func:`reduce`. Use :func:`functools.reduce` if you really
+* Removed :func:`!reduce`. Use :func:`functools.reduce` if you really
need it; however, 99 percent of the time an explicit :keyword:`for`
loop is more readable.
-* Removed :func:`reload`. Use :func:`!imp.reload`.
+* Removed :func:`!reload`. Use :func:`!imp.reload`.
-* Removed. :meth:`dict.has_key` -- use the :keyword:`in` operator
+* Removed. :meth:`!dict.has_key` -- use the :keyword:`in` operator
instead.
.. ======================================================================
diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst
index d2b0911ed627a0..cb9f54c6897ee3 100644
--- a/Doc/whatsnew/3.12.rst
+++ b/Doc/whatsnew/3.12.rst
@@ -1467,8 +1467,8 @@ imp
``imp.NullImporter`` Insert ``None`` into ``sys.path_importer_cache``
``imp.cache_from_source()`` :func:`importlib.util.cache_from_source`
``imp.find_module()`` :func:`importlib.util.find_spec`
- ``imp.get_magic()`` :attr:`importlib.util.MAGIC_NUMBER`
- ``imp.get_suffixes()`` :attr:`importlib.machinery.SOURCE_SUFFIXES`, :attr:`importlib.machinery.EXTENSION_SUFFIXES`, and :attr:`importlib.machinery.BYTECODE_SUFFIXES`
+ ``imp.get_magic()`` :const:`importlib.util.MAGIC_NUMBER`
+ ``imp.get_suffixes()`` :const:`importlib.machinery.SOURCE_SUFFIXES`, :const:`importlib.machinery.EXTENSION_SUFFIXES`, and :const:`importlib.machinery.BYTECODE_SUFFIXES`
``imp.get_tag()`` :attr:`sys.implementation.cache_tag `
``imp.load_module()`` :func:`importlib.import_module`
``imp.new_module(name)`` ``types.ModuleType(name)``
diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst
index 237b1d5f642676..69960b29f2b7c5 100644
--- a/Doc/whatsnew/3.13.rst
+++ b/Doc/whatsnew/3.13.rst
@@ -334,7 +334,7 @@ enabled at runtime using the environment variable :envvar:`PYTHON_GIL` or
the command-line option :option:`-X gil=1`.
To check if the current interpreter supports free-threading, :option:`python -VV <-V>`
-and :attr:`sys.version` contain "experimental free-threading build".
+and :data:`sys.version` contain "experimental free-threading build".
The new :func:`!sys._is_gil_enabled` function can be used to check whether
the GIL is actually disabled in the running process.
@@ -889,6 +889,13 @@ email
the :cve:`2023-27043` fix.)
+enum
+----
+
+* :class:`~enum.EnumDict` has been made public to better support subclassing
+ :class:`~enum.EnumType`.
+
+
fractions
---------
@@ -1064,7 +1071,7 @@ os
which makes the newly spawned process use the current process environment.
(Contributed by Jakub Kulik in :gh:`113119`.)
-* :func:`~os.posix_spawn` can now use the :attr:`~os.POSIX_SPAWN_CLOSEFROM`
+* :func:`~os.posix_spawn` can now use the :const:`~os.POSIX_SPAWN_CLOSEFROM`
attribute in the *file_actions* parameter on platforms that support
:c:func:`!posix_spawn_file_actions_addclosefrom_np`.
(Contributed by Jakub Kulik in :gh:`113117`.)
@@ -1501,8 +1508,20 @@ All of the following modules were deprecated in Python 3.11,
and are now removed:
* :mod:`!aifc`
+
+ * :pypi:`standard-aifc`:
+ Use the redistribution of ``aifc`` library from PyPI.
+
* :mod:`!audioop`
+
+ * :pypi:`audioop-lts`:
+ Use ``audioop-lts`` library from PyPI.
+
* :mod:`!chunk`
+
+ * :pypi:`standard-chunk`:
+ Use the redistribution of ``chunk`` library from PyPI.
+
* :mod:`!cgi` and :mod:`!cgitb`
* :class:`!cgi.FieldStorage` can typically be replaced with
@@ -1533,6 +1552,9 @@ and are now removed:
For example, the :class:`email.message.EmailMessage`
and :class:`email.message.Message` classes.
+ * :pypi:`standard-cgi`: and :pypi:`standard-cgitb`:
+ Use the redistribution of ``cgi`` and ``cgitb`` library from PyPI.
+
* :mod:`!crypt` and the private :mod:`!_crypt` extension.
The :mod:`hashlib` module may be an appropriate replacement
when simply hashing a value is required.
@@ -1551,6 +1573,8 @@ and are now removed:
Fork of the :mod:`!crypt` module,
wrapper to the :manpage:`crypt_r(3)` library call
and associated functionality.
+ * :pypi:`standard-crypt` and :pypi:`deprecated-crypt-alternative`:
+ Use the redistribution of ``crypt`` and reimplementation of ``_crypt`` libraries from PyPI.
* :mod:`!imghdr`:
The :pypi:`filetype`, :pypi:`puremagic`, or :pypi:`python-magic` libraries
@@ -1558,30 +1582,65 @@ and are now removed:
For example, the :func:`!puremagic.what` function can be used
to replace the :func:`!imghdr.what` function for all file formats
that were supported by :mod:`!imghdr`.
+
+ * :pypi:`standard-imghdr`:
+ Use the redistribution of ``imghdr`` library from PyPI.
+
* :mod:`!mailcap`:
Use the :mod:`mimetypes` module instead.
+
+ * :pypi:`standard-mailcap`:
+ Use the redistribution of ``mailcap`` library from PyPI.
+
* :mod:`!msilib`
* :mod:`!nis`
* :mod:`!nntplib`:
Use the :pypi:`pynntp` library from PyPI instead.
+
+ * :pypi:`standard-nntplib`:
+ Use the redistribution of ``nntplib`` library from PyPI.
+
* :mod:`!ossaudiodev`:
For audio playback, use the :pypi:`pygame` library from PyPI instead.
* :mod:`!pipes`:
Use the :mod:`subprocess` module instead.
Use :func:`shlex.quote` to replace the undocumented ``pipes.quote``
function.
+
+ * :pypi:`standard-pipes`:
+ Use the redistribution of ``pipes`` library from PyPI.
+
* :mod:`!sndhdr`:
The :pypi:`filetype`, :pypi:`puremagic`, or :pypi:`python-magic` libraries
should be used as replacements.
+
+ * :pypi:`standard-sndhdr`:
+ Use the redistribution of ``sndhdr`` library from PyPI.
+
* :mod:`!spwd`:
Use the :pypi:`python-pam` library from PyPI instead.
* :mod:`!sunau`
+
+ * :pypi:`standard-sunau`:
+ Use the redistribution of ``sunau`` library from PyPI.
+
* :mod:`!telnetlib`,
Use the :pypi:`telnetlib3` or :pypi:`Exscript` libraries from PyPI instead.
+
+ * :pypi:`standard-telnetlib`:
+ Use the redistribution of ``telnetlib`` library from PyPI.
+
* :mod:`!uu`:
Use the :mod:`base64` module instead, as a modern alternative.
+
+ * :pypi:`standard-uu`:
+ Use the redistribution of ``uu`` library from PyPI.
+
* :mod:`!xdrlib`
+ * :pypi:`standard-xdrlib`:
+ Use the redistribution of ``xdrlib`` library from PyPI.
+
(Contributed by Victor Stinner and Zachary Ware in :gh:`104773` and :gh:`104780`.)
@@ -1650,6 +1709,22 @@ opcode
(Contributed by Irit Katriel in :gh:`105481`.)
+optparse
+--------
+
+* This module is no longer considered :term:`soft deprecated`.
+ While :mod:`argparse` remains preferred for new projects that
+ aren't using a third party command line argument processing
+ library, there are aspects of the way ``argparse`` works that
+ mean the lower level ``optparse`` module may provide a better
+ foundation for *writing* argument processing libraries, and
+ for implementing command line applications which adhere more
+ strictly than ``argparse`` does to various Unix command line
+ processing conventions that originate in the behaviour of the
+ C :c:func:`!getopt` function .
+ (Contributed by Alyssa Coghlan and Serhiy Storchaka in :gh:`126180`.)
+
+
pathlib
-------
@@ -1789,14 +1864,6 @@ New Deprecations
Check membership in :data:`~dis.hasarg` instead.
(Contributed by Irit Katriel in :gh:`109319`.)
-* :mod:`getopt` and :mod:`optparse`:
-
- * Both modules are now :term:`soft deprecated`,
- with :mod:`argparse` preferred for new projects.
- This is a new soft-deprecation for the :mod:`!getopt` module,
- whereas the :mod:`!optparse` module was already *de facto* soft deprecated.
- (Contributed by Victor Stinner in :gh:`106535`.)
-
* :mod:`gettext`:
* Deprecate non-integer numbers as arguments to functions and methods
@@ -2700,6 +2767,33 @@ Changes in the C API
Calling this function is redundant now that :c:func:`PyFrame_GetLocals`
returns a write-through proxy for :term:`optimized scopes `.
+* Python 3.13 removed many private functions. Some of them can be replaced using these
+ alternatives:
+
+ * ``_PyDict_Pop()``: :c:func:`PyDict_Pop` or :c:func:`PyDict_PopString`;
+ * ``_PyDict_GetItemWithError()``: :c:func:`PyDict_GetItemRef`;
+ * ``_PyErr_WriteUnraisableMsg()``: :c:func:`PyErr_FormatUnraisable`;
+ * ``_PyEval_SetTrace()``: :c:func:`PyEval_SetTrace` or :c:func:`PyEval_SetTraceAllThreads`;
+ * ``_PyList_Extend()``: :c:func:`PyList_Extend`;
+ * ``_PyLong_AsInt()``: :c:func:`PyLong_AsInt`;
+ * ``_PyMem_RawStrdup()``: ``strdup()``;
+ * ``_PyMem_Strdup()``: ``strdup()``;
+ * ``_PyObject_ClearManagedDict()``: :c:func:`PyObject_ClearManagedDict`;
+ * ``_PyObject_VisitManagedDict()``: :c:func:`PyObject_VisitManagedDict`;
+ * ``_PyThreadState_UncheckedGet()``: :c:func:`PyThreadState_GetUnchecked()`;
+ * ``_PyTime_AsSecondsDouble()``: :c:func:`PyTime_AsSecondsDouble`;
+ * ``_PyTime_GetMonotonicClock()``: :c:func:`PyTime_Monotonic` or :c:func:`PyTime_MonotonicRaw`;
+ * ``_PyTime_GetPerfCounter()``: :c:func:`PyTime_PerfCounter` or :c:func:`PyTime_PerfCounterRaw`;
+ * ``_PyTime_GetSystemClock()``: :c:func:`PyTime_Time` or :c:func:`PyTime_TimeRaw`;
+ * ``_PyTime_MAX``: :c:var:`PyTime_MAX`;
+ * ``_PyTime_MIN``: :c:var:`PyTime_MIN`;
+ * ``_PyTime_t``: :c:type:`PyTime_t`;
+ * ``_Py_HashPointer()``: :c:func:`Py_HashPointer`;
+ * ``_Py_IsFinalizing()``: :c:func:`Py_IsFinalizing`.
+
+ The `pythoncapi-compat project`_ can be used to get most of these new
+ functions on Python 3.12 and older.
+
Regression Test Changes
=======================
diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst
index f814c4e90d5719..7a8eb47cbdb354 100644
--- a/Doc/whatsnew/3.3.rst
+++ b/Doc/whatsnew/3.3.rst
@@ -1147,8 +1147,8 @@ API changes
| :const:`MIN_EMIN` | ``-425000000`` | ``-999999999999999999`` |
+-------------------+----------------+-------------------------+
-* In the context templates (:class:`~decimal.DefaultContext`,
- :class:`~decimal.BasicContext` and :class:`~decimal.ExtendedContext`)
+* In the context templates (:const:`~decimal.DefaultContext`,
+ :const:`~decimal.BasicContext` and :const:`~decimal.ExtendedContext`)
the magnitude of :attr:`~decimal.Context.Emax` and
:attr:`~decimal.Context.Emin` has changed to ``999999``.
diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst
index 71b186aeed7359..e4f602a17ee968 100644
--- a/Doc/whatsnew/3.4.rst
+++ b/Doc/whatsnew/3.4.rst
@@ -994,7 +994,7 @@ The :func:`~importlib.reload` function has been moved from :mod:`!imp` to
:mod:`importlib` as part of the :mod:`!imp` module deprecation. (Contributed by
Berker Peksag in :issue:`18193`.)
-:mod:`importlib.util` now has a :data:`~importlib.util.MAGIC_NUMBER` attribute
+:mod:`importlib.util` now has a :const:`~importlib.util.MAGIC_NUMBER` attribute
providing access to the bytecode version number. This replaces the
:func:`!get_magic` function in the deprecated :mod:`!imp` module.
(Contributed by Brett Cannon in :issue:`18192`.)
diff --git a/Doc/whatsnew/changelog.rst b/Doc/whatsnew/changelog.rst
index b4356143659031..e796d4157cec76 100644
--- a/Doc/whatsnew/changelog.rst
+++ b/Doc/whatsnew/changelog.rst
@@ -1,5 +1,7 @@
.. _changelog:
+.. default-role:: py:obj
+
+++++++++
Changelog
+++++++++
diff --git a/Include/cpython/initconfig.h b/Include/cpython/initconfig.h
index 5da5ef9e5431b1..20f5c9ad9bb9a8 100644
--- a/Include/cpython/initconfig.h
+++ b/Include/cpython/initconfig.h
@@ -179,6 +179,9 @@ typedef struct PyConfig {
int use_frozen_modules;
int safe_path;
int int_max_str_digits;
+#ifdef __APPLE__
+ int use_system_logger;
+#endif
int cpu_count;
#ifdef Py_GIL_DISABLED
diff --git a/Include/cpython/pystats.h b/Include/cpython/pystats.h
index c4480758f48514..378c2760ec3f55 100644
--- a/Include/cpython/pystats.h
+++ b/Include/cpython/pystats.h
@@ -18,6 +18,12 @@
//
// Define _PY_INTERPRETER macro to increment interpreter_increfs and
// interpreter_decrefs. Otherwise, increment increfs and decrefs.
+//
+// The number of incref operations counted by `incref` and
+// `interpreter_incref` is the number of increment operations, which is
+// not equal to the total of all reference counts. A single increment
+// operation may increase the reference count of an object by more than
+// one. For example, see `_Py_RefcntAdd`.
#ifndef Py_CPYTHON_PYSTATS_H
# error "this header file must not be included directly"
diff --git a/Include/cpython/tracemalloc.h b/Include/cpython/tracemalloc.h
index 61a16ea9a9f3eb..6d094291ae2e90 100644
--- a/Include/cpython/tracemalloc.h
+++ b/Include/cpython/tracemalloc.h
@@ -1,6 +1,9 @@
#ifndef Py_LIMITED_API
#ifndef Py_TRACEMALLOC_H
#define Py_TRACEMALLOC_H
+#ifdef __cplusplus
+extern "C" {
+#endif
/* Track an allocated memory block in the tracemalloc module.
Return 0 on success, return -1 on error (failed to allocate memory to store
@@ -22,5 +25,8 @@ PyAPI_FUNC(int) PyTraceMalloc_Untrack(
unsigned int domain,
uintptr_t ptr);
+#ifdef __cplusplus
+}
+#endif
#endif // !Py_TRACEMALLOC_H
#endif // !Py_LIMITED_API
diff --git a/Include/internal/pycore_atexit.h b/Include/internal/pycore_atexit.h
index 507a5c03cbc792..72c66a05939500 100644
--- a/Include/internal/pycore_atexit.h
+++ b/Include/internal/pycore_atexit.h
@@ -44,6 +44,7 @@ typedef struct {
struct atexit_state {
atexit_callback *ll_callbacks;
+ // Kept for ABI compatibility--do not use! (See GH-127791.)
atexit_callback *last_ll_callback;
// XXX The rest of the state could be moved to the atexit module state
diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h
index d50a688d5b752a..07b5a95ae1a037 100644
--- a/Include/internal/pycore_object.h
+++ b/Include/internal/pycore_object.h
@@ -105,6 +105,14 @@ PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalRefcountErrorFunc(
#define _Py_FatalRefcountError(message) \
_Py_FatalRefcountErrorFunc(__func__, (message))
+#define _PyReftracerTrack(obj, operation) \
+ do { \
+ struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer; \
+ if (tracer->tracer_func != NULL) { \
+ void *data = tracer->tracer_data; \
+ tracer->tracer_func((obj), (operation), data); \
+ } \
+ } while(0)
#ifdef Py_REF_DEBUG
/* The symbol is only exposed in the API for the sake of extensions
@@ -147,6 +155,10 @@ static inline void _Py_RefcntAdd(PyObject* op, Py_ssize_t n)
_Py_atomic_add_ssize(&op->ob_ref_shared, (n << _Py_REF_SHARED_SHIFT));
}
#endif
+ // Although the ref count was increased by `n` (which may be greater than 1)
+ // it is only a single increment (i.e. addition) operation, so only 1 refcnt
+ // increment operation is counted.
+ _Py_INCREF_STAT_INC();
}
#define _Py_RefcntAdd(op, n) _Py_RefcntAdd(_PyObject_CAST(op), n)
@@ -216,11 +228,7 @@ _Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct)
#ifdef Py_TRACE_REFS
_Py_ForgetReference(op);
#endif
- struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer;
- if (tracer->tracer_func != NULL) {
- void* data = tracer->tracer_data;
- tracer->tracer_func(op, PyRefTracer_DESTROY, data);
- }
+ _PyReftracerTrack(op, PyRefTracer_DESTROY);
destruct(op);
}
}
@@ -572,8 +580,52 @@ _PyObject_SetMaybeWeakref(PyObject *op)
}
}
+extern int _PyObject_ResurrectEndSlow(PyObject *op);
#endif
+// Temporarily resurrects an object during deallocation. The refcount is set
+// to one.
+static inline void
+_PyObject_ResurrectStart(PyObject *op)
+{
+ assert(Py_REFCNT(op) == 0);
+#ifdef Py_REF_DEBUG
+ _Py_IncRefTotal(_PyThreadState_GET());
+#endif
+#ifdef Py_GIL_DISABLED
+ _Py_atomic_store_uintptr_relaxed(&op->ob_tid, _Py_ThreadId());
+ _Py_atomic_store_uint32_relaxed(&op->ob_ref_local, 1);
+ _Py_atomic_store_ssize_relaxed(&op->ob_ref_shared, 0);
+#else
+ Py_SET_REFCNT(op, 1);
+#endif
+}
+
+// Undoes an object resurrection by decrementing the refcount without calling
+// _Py_Dealloc(). Returns 0 if the object is dead (the normal case), and
+// deallocation should continue. Returns 1 if the object is still alive.
+static inline int
+_PyObject_ResurrectEnd(PyObject *op)
+{
+#ifdef Py_REF_DEBUG
+ _Py_DecRefTotal(_PyThreadState_GET());
+#endif
+#ifndef Py_GIL_DISABLED
+ Py_SET_REFCNT(op, Py_REFCNT(op) - 1);
+ return Py_REFCNT(op) != 0;
+#else
+ uint32_t local = _Py_atomic_load_uint32_relaxed(&op->ob_ref_local);
+ Py_ssize_t shared = _Py_atomic_load_ssize_acquire(&op->ob_ref_shared);
+ if (_Py_IsOwnedByCurrentThread(op) && local == 1 && shared == 0) {
+ // Fast-path: object has a single refcount and is owned by this thread
+ _Py_atomic_store_uint32_relaxed(&op->ob_ref_local, 0);
+ return 0;
+ }
+ // Slow-path: object has a shared refcount or is not owned by this thread
+ return _PyObject_ResurrectEndSlow(op);
+#endif
+}
+
/* Tries to incref op and returns 1 if successful or 0 otherwise. */
static inline int
_Py_TryIncref(PyObject *op)
diff --git a/Include/internal/pycore_pyerrors.h b/Include/internal/pycore_pyerrors.h
index 15071638203457..615cc23ec93528 100644
--- a/Include/internal/pycore_pyerrors.h
+++ b/Include/internal/pycore_pyerrors.h
@@ -125,6 +125,18 @@ PyAPI_FUNC(void) _PyErr_SetString(
PyObject *exception,
const char *string);
+/*
+ * Set an exception with the error message decoded from the current locale
+ * encoding (LC_CTYPE).
+ *
+ * Exceptions occurring in decoding take priority over the desired exception.
+ *
+ * Exported for '_ctypes' shared extensions.
+ */
+PyAPI_FUNC(void) _PyErr_SetLocaleString(
+ PyObject *exception,
+ const char *string);
+
PyAPI_FUNC(PyObject*) _PyErr_Format(
PyThreadState *tstate,
PyObject *exception,
diff --git a/Include/internal/pycore_tracemalloc.h b/Include/internal/pycore_tracemalloc.h
index 7ddc5bac5d10af..f70d47074f813c 100644
--- a/Include/internal/pycore_tracemalloc.h
+++ b/Include/internal/pycore_tracemalloc.h
@@ -144,7 +144,7 @@ extern PyObject* _PyTraceMalloc_GetTraces(void);
extern PyObject* _PyTraceMalloc_GetObjectTraceback(PyObject *obj);
/* Initialize tracemalloc */
-extern int _PyTraceMalloc_Init(void);
+extern PyStatus _PyTraceMalloc_Init(void);
/* Start tracemalloc */
extern int _PyTraceMalloc_Start(int max_nframe);
diff --git a/Include/patchlevel.h b/Include/patchlevel.h
index 93b0d7b7c7984b..cf92489f67a0ca 100644
--- a/Include/patchlevel.h
+++ b/Include/patchlevel.h
@@ -18,12 +18,12 @@
/*--start constants--*/
#define PY_MAJOR_VERSION 3
#define PY_MINOR_VERSION 13
-#define PY_MICRO_VERSION 1
+#define PY_MICRO_VERSION 2
#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL
#define PY_RELEASE_SERIAL 0
/* Version as a string */
-#define PY_VERSION "3.13.1"
+#define PY_VERSION "3.13.2"
/*--end constants--*/
/* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2.
diff --git a/Include/pymacro.h b/Include/pymacro.h
index e3e9cd13594814..e0378f9d27a048 100644
--- a/Include/pymacro.h
+++ b/Include/pymacro.h
@@ -47,7 +47,7 @@
#define Py_CHARMASK(c) ((unsigned char)((c) & 0xff))
#if (defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L \
- && !defined(__cplusplus))
+ && !defined(__cplusplus) && !defined(_MSC_VER))
# define Py_BUILD_ASSERT_EXPR(cond) \
((void)sizeof(struct { int dummy; _Static_assert(cond, #cond); }), \
0)
diff --git a/Lib/_android_support.py b/Lib/_android_support.py
index 7572745c851847..ae506f6a4b57b8 100644
--- a/Lib/_android_support.py
+++ b/Lib/_android_support.py
@@ -6,7 +6,7 @@
# The maximum length of a log message in bytes, including the level marker and
# tag, is defined as LOGGER_ENTRY_MAX_PAYLOAD at
# https://cs.android.com/android/platform/superproject/+/android-14.0.0_r1:system/logging/liblog/include/log/log.h;l=71.
-# Messages longer than this will be be truncated by logcat. This limit has already
+# Messages longer than this will be truncated by logcat. This limit has already
# been reduced at least once in the history of Android (from 4076 to 4068 between
# API level 23 and 26), so leave some headroom.
MAX_BYTES_PER_WRITE = 4000
diff --git a/Lib/_apple_support.py b/Lib/_apple_support.py
new file mode 100644
index 00000000000000..92febdcf587070
--- /dev/null
+++ b/Lib/_apple_support.py
@@ -0,0 +1,66 @@
+import io
+import sys
+
+
+def init_streams(log_write, stdout_level, stderr_level):
+ # Redirect stdout and stderr to the Apple system log. This method is
+ # invoked by init_apple_streams() (initconfig.c) if config->use_system_logger
+ # is enabled.
+ sys.stdout = SystemLog(log_write, stdout_level, errors=sys.stderr.errors)
+ sys.stderr = SystemLog(log_write, stderr_level, errors=sys.stderr.errors)
+
+
+class SystemLog(io.TextIOWrapper):
+ def __init__(self, log_write, level, **kwargs):
+ kwargs.setdefault("encoding", "UTF-8")
+ kwargs.setdefault("line_buffering", True)
+ super().__init__(LogStream(log_write, level), **kwargs)
+
+ def __repr__(self):
+ return f""
+
+ def write(self, s):
+ if not isinstance(s, str):
+ raise TypeError(
+ f"write() argument must be str, not {type(s).__name__}")
+
+ # In case `s` is a str subclass that writes itself to stdout or stderr
+ # when we call its methods, convert it to an actual str.
+ s = str.__str__(s)
+
+ # We want to emit one log message per line, so split
+ # the string before sending it to the superclass.
+ for line in s.splitlines(keepends=True):
+ super().write(line)
+
+ return len(s)
+
+
+class LogStream(io.RawIOBase):
+ def __init__(self, log_write, level):
+ self.log_write = log_write
+ self.level = level
+
+ def __repr__(self):
+ return f""
+
+ def writable(self):
+ return True
+
+ def write(self, b):
+ if type(b) is not bytes:
+ try:
+ b = bytes(memoryview(b))
+ except TypeError:
+ raise TypeError(
+ f"write() argument must be bytes-like, not {type(b).__name__}"
+ ) from None
+
+ # Writing an empty string to the stream should have no effect.
+ if b:
+ # Encode null bytes using "modified UTF-8" to avoid truncating the
+ # message. This should not affect the return value, as the caller
+ # may be expecting it to match the length of the input.
+ self.log_write(self.level, b.replace(b"\x00", b"\xc0\x80"))
+
+ return len(b)
diff --git a/Lib/_colorize.py b/Lib/_colorize.py
index 845fb57a90abb8..70acfd4ad0ba8f 100644
--- a/Lib/_colorize.py
+++ b/Lib/_colorize.py
@@ -24,41 +24,44 @@ class ANSIColors:
setattr(NoColors, attr, "")
-def get_colors(colorize: bool = False) -> ANSIColors:
- if colorize or can_colorize():
+def get_colors(colorize: bool = False, *, file=None) -> ANSIColors:
+ if colorize or can_colorize(file=file):
return ANSIColors()
else:
return NoColors
-def can_colorize() -> bool:
- if sys.platform == "win32":
- try:
- import nt
+def can_colorize(*, file=None) -> bool:
+ if file is None:
+ file = sys.stdout
- if not nt._supports_virtual_terminal():
- return False
- except (ImportError, AttributeError):
- return False
if not sys.flags.ignore_environment:
if os.environ.get("PYTHON_COLORS") == "0":
return False
if os.environ.get("PYTHON_COLORS") == "1":
return True
- if "NO_COLOR" in os.environ:
- return False
+ if os.environ.get("NO_COLOR"):
+ return False
if not COLORIZE:
return False
- if not sys.flags.ignore_environment:
- if "FORCE_COLOR" in os.environ:
- return True
- if os.environ.get("TERM") == "dumb":
- return False
+ if os.environ.get("FORCE_COLOR"):
+ return True
+ if os.environ.get("TERM") == "dumb":
+ return False
- if not hasattr(sys.stderr, "fileno"):
+ if not hasattr(file, "fileno"):
return False
+ if sys.platform == "win32":
+ try:
+ import nt
+
+ if not nt._supports_virtual_terminal():
+ return False
+ except (ImportError, AttributeError):
+ return False
+
try:
- return os.isatty(sys.stderr.fileno())
+ return os.isatty(file.fileno())
except io.UnsupportedOperation:
- return sys.stderr.isatty()
+ return file.isatty()
diff --git a/Lib/_pydatetime.py b/Lib/_pydatetime.py
index 34ccb2da13d0f3..e001bd3bbba4b7 100644
--- a/Lib/_pydatetime.py
+++ b/Lib/_pydatetime.py
@@ -2306,7 +2306,6 @@ def __reduce__(self):
def _isoweek1monday(year):
# Helper to calculate the day number of the Monday starting week 1
- # XXX This could be done more efficiently
THURSDAY = 3
firstday = _ymd2ord(year, 1, 1)
firstweekday = (firstday + 6) % 7 # See weekday() above
diff --git a/Lib/_pydecimal.py b/Lib/_pydecimal.py
index 75df3db262470b..ff80180a79e97a 100644
--- a/Lib/_pydecimal.py
+++ b/Lib/_pydecimal.py
@@ -97,7 +97,7 @@ class DecimalException(ArithmeticError):
Used exceptions derive from this.
If an exception derives from another exception besides this (such as
- Underflow (Inexact, Rounded, Subnormal) that indicates that it is only
+ Underflow (Inexact, Rounded, Subnormal)) that indicates that it is only
called if the others are present. This isn't actually used for
anything, though.
@@ -145,7 +145,7 @@ class InvalidOperation(DecimalException):
x ** (+-)INF
An operand is invalid
- The result of the operation after these is a quiet positive NaN,
+ The result of the operation after this is a quiet positive NaN,
except when the cause is a signaling NaN, in which case the result is
also a quiet NaN, but with the original sign, and an optional
diagnostic information.
diff --git a/Lib/_pyrepl/commands.py b/Lib/_pyrepl/commands.py
index c3fce91013b001..503ca1da329eaa 100644
--- a/Lib/_pyrepl/commands.py
+++ b/Lib/_pyrepl/commands.py
@@ -282,7 +282,7 @@ def do(self) -> None:
x, y = r.pos2xy()
new_y = y + 1
- if new_y > r.max_row():
+ if r.eol() == len(b):
if r.historyi < len(r.history):
r.select_item(r.historyi + 1)
r.pos = r.eol(0)
@@ -309,7 +309,7 @@ def do(self) -> None:
class left(MotionCommand):
def do(self) -> None:
r = self.reader
- for i in range(r.get_arg()):
+ for _ in range(r.get_arg()):
p = r.pos - 1
if p >= 0:
r.pos = p
@@ -321,7 +321,7 @@ class right(MotionCommand):
def do(self) -> None:
r = self.reader
b = r.buffer
- for i in range(r.get_arg()):
+ for _ in range(r.get_arg()):
p = r.pos + 1
if p <= len(b):
r.pos = p
@@ -459,9 +459,15 @@ def do(self) -> None:
from site import gethistoryfile # type: ignore[attr-defined]
history = os.linesep.join(self.reader.history[:])
- with self.reader.suspend():
- pager = get_pager()
- pager(history, gethistoryfile())
+ self.reader.console.restore()
+ pager = get_pager()
+ pager(history, gethistoryfile())
+ self.reader.console.prepare()
+
+ # We need to copy over the state so that it's consistent between
+ # console and reader, and console does not overwrite/append stuff
+ self.reader.console.screen = self.reader.screen.copy()
+ self.reader.console.posxy = self.reader.cxy
class paste_mode(Command):
diff --git a/Lib/_pyrepl/completing_reader.py b/Lib/_pyrepl/completing_reader.py
index 05770aaf5060cc..9a005281dab1e6 100644
--- a/Lib/_pyrepl/completing_reader.py
+++ b/Lib/_pyrepl/completing_reader.py
@@ -260,10 +260,15 @@ def after_command(self, cmd: Command) -> None:
def calc_screen(self) -> list[str]:
screen = super().calc_screen()
if self.cmpltn_menu_visible:
- ly = self.lxy[1]
+ # We display the completions menu below the current prompt
+ ly = self.lxy[1] + 1
screen[ly:ly] = self.cmpltn_menu
- self.screeninfo[ly:ly] = [(0, [])]*len(self.cmpltn_menu)
- self.cxy = self.cxy[0], self.cxy[1] + len(self.cmpltn_menu)
+ # If we're not in the middle of multiline edit, don't append to screeninfo
+ # since that screws up the position calculation in pos2xy function.
+ # This is a hack to prevent the cursor jumping
+ # into the completions menu when pressing left or down arrow.
+ if self.pos != len(self.buffer):
+ self.screeninfo[ly:ly] = [(0, [])]*len(self.cmpltn_menu)
return screen
def finish(self) -> None:
diff --git a/Lib/_pyrepl/console.py b/Lib/_pyrepl/console.py
index 03266c4dfc2dd8..0d78890b4f45d5 100644
--- a/Lib/_pyrepl/console.py
+++ b/Lib/_pyrepl/console.py
@@ -45,6 +45,7 @@ class Event:
@dataclass
class Console(ABC):
+ posxy: tuple[int, int]
screen: list[str] = field(default_factory=list)
height: int = 25
width: int = 80
diff --git a/Lib/_pyrepl/fancy_termios.py b/Lib/_pyrepl/fancy_termios.py
index 5b85cb0f52521f..0468b9a2670267 100644
--- a/Lib/_pyrepl/fancy_termios.py
+++ b/Lib/_pyrepl/fancy_termios.py
@@ -40,7 +40,9 @@ def as_list(self):
self.lflag,
self.ispeed,
self.ospeed,
- self.cc,
+ # Always return a copy of the control characters list to ensure
+ # there are not any additional references to self.cc
+ self.cc[:],
]
def copy(self):
diff --git a/Lib/_pyrepl/historical_reader.py b/Lib/_pyrepl/historical_reader.py
index 5d416f336ad5d2..c4b95fa2e81ee6 100644
--- a/Lib/_pyrepl/historical_reader.py
+++ b/Lib/_pyrepl/historical_reader.py
@@ -290,13 +290,17 @@ def get_item(self, i: int) -> str:
@contextmanager
def suspend(self) -> SimpleContextManager:
- with super().suspend():
- try:
- old_history = self.history[:]
- del self.history[:]
- yield
- finally:
- self.history[:] = old_history
+ with super().suspend(), self.suspend_history():
+ yield
+
+ @contextmanager
+ def suspend_history(self) -> SimpleContextManager:
+ try:
+ old_history = self.history[:]
+ del self.history[:]
+ yield
+ finally:
+ self.history[:] = old_history
def prepare(self) -> None:
super().prepare()
diff --git a/Lib/_pyrepl/reader.py b/Lib/_pyrepl/reader.py
index 935c520c5ad553..dc26bfd3a34ffb 100644
--- a/Lib/_pyrepl/reader.py
+++ b/Lib/_pyrepl/reader.py
@@ -587,10 +587,11 @@ def setpos_from_xy(self, x: int, y: int) -> None:
def pos2xy(self) -> tuple[int, int]:
"""Return the x, y coordinates of position 'pos'."""
# this *is* incomprehensible, yes.
- y = 0
+ p, y = 0, 0
+ l2: list[int] = []
pos = self.pos
assert 0 <= pos <= len(self.buffer)
- if pos == len(self.buffer):
+ if pos == len(self.buffer) and len(self.screeninfo) > 0:
y = len(self.screeninfo) - 1
p, l2 = self.screeninfo[y]
return p + sum(l2) + l2.count(0), y
diff --git a/Lib/_pyrepl/simple_interact.py b/Lib/_pyrepl/simple_interact.py
index 342a4b58bfd0f3..66e66eae7ead9c 100644
--- a/Lib/_pyrepl/simple_interact.py
+++ b/Lib/_pyrepl/simple_interact.py
@@ -77,7 +77,7 @@ def _clear_screen():
"exit": _sitebuiltins.Quitter('exit', ''),
"quit": _sitebuiltins.Quitter('quit' ,''),
"copyright": _sitebuiltins._Printer('copyright', sys.copyright),
- "help": "help",
+ "help": _sitebuiltins._Helper(),
"clear": _clear_screen,
"\x1a": _sitebuiltins.Quitter('\x1a', ''),
}
@@ -124,18 +124,10 @@ def maybe_run_command(statement: str) -> bool:
reader.history.pop() # skip internal commands in history
command = REPL_COMMANDS[statement]
if callable(command):
- command()
+ # Make sure that history does not change because of commands
+ with reader.suspend_history():
+ command()
return True
-
- if isinstance(command, str):
- # Internal readline commands require a prepared reader like
- # inside multiline_input.
- reader.prepare()
- reader.refresh()
- reader.do_cmd((command, [statement]))
- reader.restore()
- return True
-
return False
while 1:
diff --git a/Lib/_pyrepl/unix_console.py b/Lib/_pyrepl/unix_console.py
index 09b5094f986fed..e69c96b11598aa 100644
--- a/Lib/_pyrepl/unix_console.py
+++ b/Lib/_pyrepl/unix_console.py
@@ -240,7 +240,7 @@ def refresh(self, screen, c_xy):
self.__hide_cursor()
self.__move(0, len(self.screen) - 1)
self.__write("\n")
- self.__posxy = 0, len(self.screen)
+ self.posxy = 0, len(self.screen)
self.screen.append("")
else:
while len(self.screen) < len(screen):
@@ -250,7 +250,7 @@ def refresh(self, screen, c_xy):
self.__gone_tall = 1
self.__move = self.__move_tall
- px, py = self.__posxy
+ px, py = self.posxy
old_offset = offset = self.__offset
height = self.height
@@ -271,7 +271,7 @@ def refresh(self, screen, c_xy):
if old_offset > offset and self._ri:
self.__hide_cursor()
self.__write_code(self._cup, 0, 0)
- self.__posxy = 0, old_offset
+ self.posxy = 0, old_offset
for i in range(old_offset - offset):
self.__write_code(self._ri)
oldscr.pop(-1)
@@ -279,7 +279,7 @@ def refresh(self, screen, c_xy):
elif old_offset < offset and self._ind:
self.__hide_cursor()
self.__write_code(self._cup, self.height - 1, 0)
- self.__posxy = 0, old_offset + self.height - 1
+ self.posxy = 0, old_offset + self.height - 1
for i in range(offset - old_offset):
self.__write_code(self._ind)
oldscr.pop(0)
@@ -299,7 +299,7 @@ def refresh(self, screen, c_xy):
while y < len(oldscr):
self.__hide_cursor()
self.__move(0, y)
- self.__posxy = 0, y
+ self.posxy = 0, y
self.__write_code(self._el)
y += 1
@@ -321,7 +321,7 @@ def move_cursor(self, x, y):
self.event_queue.insert(Event("scroll", None))
else:
self.__move(x, y)
- self.__posxy = x, y
+ self.posxy = x, y
self.flushoutput()
def prepare(self):
@@ -350,7 +350,7 @@ def prepare(self):
self.__buffer = []
- self.__posxy = 0, 0
+ self.posxy = 0, 0
self.__gone_tall = 0
self.__move = self.__move_short
self.__offset = 0
@@ -449,10 +449,12 @@ def getheightwidth(self):
"""
try:
return int(os.environ["LINES"]), int(os.environ["COLUMNS"])
- except KeyError:
- height, width = struct.unpack(
- "hhhh", ioctl(self.input_fd, TIOCGWINSZ, b"\000" * 8)
- )[0:2]
+ except (KeyError, TypeError, ValueError):
+ try:
+ size = ioctl(self.input_fd, TIOCGWINSZ, b"\000" * 8)
+ except OSError:
+ return 25, 80
+ height, width = struct.unpack("hhhh", size)[0:2]
if not height:
return 25, 80
return height, width
@@ -468,7 +470,7 @@ def getheightwidth(self):
"""
try:
return int(os.environ["LINES"]), int(os.environ["COLUMNS"])
- except KeyError:
+ except (KeyError, TypeError, ValueError):
return 25, 80
def forgetinput(self):
@@ -559,7 +561,7 @@ def clear(self):
self.__write_code(self._clear)
self.__gone_tall = 1
self.__move = self.__move_tall
- self.__posxy = 0, 0
+ self.posxy = 0, 0
self.screen = []
@property
@@ -644,8 +646,8 @@ def __write_changed_line(self, y, oldline, newline, px_coord):
# if we need to insert a single character right after the first detected change
if oldline[x_pos:] == newline[x_pos + 1 :] and self.ich1:
if (
- y == self.__posxy[1]
- and x_coord > self.__posxy[0]
+ y == self.posxy[1]
+ and x_coord > self.posxy[0]
and oldline[px_pos:x_pos] == newline[px_pos + 1 : x_pos + 1]
):
x_pos = px_pos
@@ -654,7 +656,7 @@ def __write_changed_line(self, y, oldline, newline, px_coord):
self.__move(x_coord, y)
self.__write_code(self.ich1)
self.__write(newline[x_pos])
- self.__posxy = x_coord + character_width, y
+ self.posxy = x_coord + character_width, y
# if it's a single character change in the middle of the line
elif (
@@ -665,7 +667,7 @@ def __write_changed_line(self, y, oldline, newline, px_coord):
character_width = wlen(newline[x_pos])
self.__move(x_coord, y)
self.__write(newline[x_pos])
- self.__posxy = x_coord + character_width, y
+ self.posxy = x_coord + character_width, y
# if this is the last character to fit in the line and we edit in the middle of the line
elif (
@@ -677,14 +679,14 @@ def __write_changed_line(self, y, oldline, newline, px_coord):
):
self.__hide_cursor()
self.__move(self.width - 2, y)
- self.__posxy = self.width - 2, y
+ self.posxy = self.width - 2, y
self.__write_code(self.dch1)
character_width = wlen(newline[x_pos])
self.__move(x_coord, y)
self.__write_code(self.ich1)
self.__write(newline[x_pos])
- self.__posxy = character_width + 1, y
+ self.posxy = character_width + 1, y
else:
self.__hide_cursor()
@@ -692,7 +694,7 @@ def __write_changed_line(self, y, oldline, newline, px_coord):
if wlen(oldline) > wlen(newline):
self.__write_code(self._el)
self.__write(newline[x_pos:])
- self.__posxy = wlen(newline), y
+ self.posxy = wlen(newline), y
if "\x1b" in newline:
# ANSI escape characters are present, so we can't assume
@@ -711,32 +713,36 @@ def __maybe_write_code(self, fmt, *args):
self.__write_code(fmt, *args)
def __move_y_cuu1_cud1(self, y):
- dy = y - self.__posxy[1]
+ assert self._cud1 is not None
+ assert self._cuu1 is not None
+ dy = y - self.posxy[1]
if dy > 0:
self.__write_code(dy * self._cud1)
elif dy < 0:
self.__write_code((-dy) * self._cuu1)
def __move_y_cuu_cud(self, y):
- dy = y - self.__posxy[1]
+ dy = y - self.posxy[1]
if dy > 0:
self.__write_code(self._cud, dy)
elif dy < 0:
self.__write_code(self._cuu, -dy)
def __move_x_hpa(self, x: int) -> None:
- if x != self.__posxy[0]:
+ if x != self.posxy[0]:
self.__write_code(self._hpa, x)
def __move_x_cub1_cuf1(self, x: int) -> None:
- dx = x - self.__posxy[0]
+ assert self._cuf1 is not None
+ assert self._cub1 is not None
+ dx = x - self.posxy[0]
if dx > 0:
self.__write_code(self._cuf1 * dx)
elif dx < 0:
self.__write_code(self._cub1 * (-dx))
def __move_x_cub_cuf(self, x: int) -> None:
- dx = x - self.__posxy[0]
+ dx = x - self.posxy[0]
if dx > 0:
self.__write_code(self._cuf, dx)
elif dx < 0:
@@ -766,12 +772,12 @@ def __show_cursor(self):
def repaint(self):
if not self.__gone_tall:
- self.__posxy = 0, self.__posxy[1]
+ self.posxy = 0, self.posxy[1]
self.__write("\r")
ns = len(self.screen) * ["\000" * self.width]
self.screen = ns
else:
- self.__posxy = 0, self.__offset
+ self.posxy = 0, self.__offset
self.__move(0, self.__offset)
ns = self.height * ["\000" * self.width]
self.screen = ns
diff --git a/Lib/_pyrepl/utils.py b/Lib/_pyrepl/utils.py
index 0f36083b6ffa92..4651717bd7e121 100644
--- a/Lib/_pyrepl/utils.py
+++ b/Lib/_pyrepl/utils.py
@@ -16,7 +16,7 @@ def str_width(c: str) -> int:
def wlen(s: str) -> int:
- if len(s) == 1:
+ if len(s) == 1 and s != '\x1a':
return str_width(s)
length = sum(str_width(i) for i in s)
# remove lengths of any escape sequences
diff --git a/Lib/_pyrepl/windows_console.py b/Lib/_pyrepl/windows_console.py
index ea6a9b4e8adeb7..fffadd5e2ec28e 100644
--- a/Lib/_pyrepl/windows_console.py
+++ b/Lib/_pyrepl/windows_console.py
@@ -148,10 +148,10 @@ def refresh(self, screen: list[str], c_xy: tuple[int, int]) -> None:
self._hide_cursor()
self._move_relative(0, len(self.screen) - 1)
self.__write("\n")
- self.__posxy = 0, len(self.screen)
+ self.posxy = 0, len(self.screen)
self.screen.append("")
- px, py = self.__posxy
+ px, py = self.posxy
old_offset = offset = self.__offset
height = self.height
@@ -167,7 +167,7 @@ def refresh(self, screen: list[str], c_xy: tuple[int, int]) -> None:
# portion of the window. We need to scroll the visible portion and the
# entire history
self._scroll(scroll_lines, self._getscrollbacksize())
- self.__posxy = self.__posxy[0], self.__posxy[1] + scroll_lines
+ self.posxy = self.posxy[0], self.posxy[1] + scroll_lines
self.__offset += scroll_lines
for i in range(scroll_lines):
@@ -193,7 +193,7 @@ def refresh(self, screen: list[str], c_xy: tuple[int, int]) -> None:
y = len(newscr)
while y < len(oldscr):
self._move_relative(0, y)
- self.__posxy = 0, y
+ self.posxy = 0, y
self._erase_to_end()
y += 1
@@ -250,11 +250,11 @@ def __write_changed_line(
if wlen(newline) == self.width:
# If we wrapped we want to start at the next line
self._move_relative(0, y + 1)
- self.__posxy = 0, y + 1
+ self.posxy = 0, y + 1
else:
- self.__posxy = wlen(newline), y
+ self.posxy = wlen(newline), y
- if "\x1b" in newline or y != self.__posxy[1] or '\x1a' in newline:
+ if "\x1b" in newline or y != self.posxy[1] or '\x1a' in newline:
# ANSI escape characters are present, so we can't assume
# anything about the position of the cursor. Moving the cursor
# to the left margin should work to get to a known position.
@@ -316,7 +316,7 @@ def prepare(self) -> None:
self.screen = []
self.height, self.width = self.getheightwidth()
- self.__posxy = 0, 0
+ self.posxy = 0, 0
self.__gone_tall = 0
self.__offset = 0
@@ -324,9 +324,9 @@ def restore(self) -> None:
pass
def _move_relative(self, x: int, y: int) -> None:
- """Moves relative to the current __posxy"""
- dx = x - self.__posxy[0]
- dy = y - self.__posxy[1]
+ """Moves relative to the current posxy"""
+ dx = x - self.posxy[0]
+ dy = y - self.posxy[1]
if dx < 0:
self.__write(MOVE_LEFT.format(-dx))
elif dx > 0:
@@ -345,7 +345,7 @@ def move_cursor(self, x: int, y: int) -> None:
self.event_queue.insert(0, Event("scroll", ""))
else:
self._move_relative(x, y)
- self.__posxy = x, y
+ self.posxy = x, y
def set_cursor_vis(self, visible: bool) -> None:
if visible:
@@ -445,7 +445,7 @@ def beep(self) -> None:
def clear(self) -> None:
"""Wipe the screen"""
self.__write(CLEAR)
- self.__posxy = 0, 0
+ self.posxy = 0, 0
self.screen = [""]
def finish(self) -> None:
diff --git a/Lib/_strptime.py b/Lib/_strptime.py
index 4c68a6a88e5b04..8d763e5bd331e5 100644
--- a/Lib/_strptime.py
+++ b/Lib/_strptime.py
@@ -301,8 +301,6 @@ def __init__(self, locale_time=None):
'V': r"(?P5[0-3]|0[1-9]|[1-4]\d|\d)",
# W is set below by using 'U'
'y': r"(?P\d\d)",
- #XXX: Does 'Y' need to worry about having less or more than
- # 4 digits?
'Y': r"(?P\d\d\d\d)",
'z': r"(?P[+-]\d\d:?[0-5]\d(:?[0-5]\d(\.\d{1,6})?)?|(?-i:Z))",
'A': self.__seqToRE(self.locale_time.f_weekday, 'A'),
diff --git a/Lib/ast.py b/Lib/ast.py
index d7e51aba595706..2bf08c86b6e5bf 100644
--- a/Lib/ast.py
+++ b/Lib/ast.py
@@ -1275,9 +1275,14 @@ def visit_JoinedStr(self, node):
fallback_to_repr = True
break
quote_types = new_quote_types
- elif "\n" in value:
- quote_types = [q for q in quote_types if q in _MULTI_QUOTES]
- assert quote_types
+ else:
+ if "\n" in value:
+ quote_types = [q for q in quote_types if q in _MULTI_QUOTES]
+ assert quote_types
+
+ new_quote_types = [q for q in quote_types if q not in value]
+ if new_quote_types:
+ quote_types = new_quote_types
new_fstring_parts.append(value)
if fallback_to_repr:
diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py
index 91434042685239..910fc76e884d2c 100644
--- a/Lib/asyncio/base_events.py
+++ b/Lib/asyncio/base_events.py
@@ -477,7 +477,12 @@ def create_task(self, coro, *, name=None, context=None):
task.set_name(name)
- return task
+ try:
+ return task
+ finally:
+ # gh-128552: prevent a refcycle of
+ # task.exception().__traceback__->BaseEventLoop.create_task->task
+ del task
def set_task_factory(self, factory):
"""Set a task factory that will be used by loop.create_task().
@@ -1585,7 +1590,9 @@ async def create_server(
if reuse_address:
sock.setsockopt(
socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
- if reuse_port:
+ # Since Linux 6.12.9, SO_REUSEPORT is not allowed
+ # on other address families than AF_INET/AF_INET6.
+ if reuse_port and af in (socket.AF_INET, socket.AF_INET6):
_set_reuseport(sock)
if keep_alive:
sock.setsockopt(
diff --git a/Lib/asyncio/locks.py b/Lib/asyncio/locks.py
index aaee8ff0702923..3df4c693a915d5 100644
--- a/Lib/asyncio/locks.py
+++ b/Lib/asyncio/locks.py
@@ -485,7 +485,7 @@ class Barrier(mixins._LoopBoundMixin):
def __init__(self, parties):
"""Create a barrier, initialised to 'parties' tasks."""
if parties < 1:
- raise ValueError('parties must be > 0')
+ raise ValueError('parties must be >= 1')
self._cond = Condition() # notify all tasks when state changes
diff --git a/Lib/asyncio/runners.py b/Lib/asyncio/runners.py
index 1b89236599aad7..102ae78021b22f 100644
--- a/Lib/asyncio/runners.py
+++ b/Lib/asyncio/runners.py
@@ -168,6 +168,7 @@ def run(main, *, debug=None, loop_factory=None):
running in the same thread.
If debug is True, the event loop will be run in debug mode.
+ If loop_factory is passed, it is used for new event loop creation.
This function always creates a new event loop and closes it at the end.
It should be used as a main entry point for asyncio programs, and should
diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py
index f94bf10b4225e7..f847d7df3f8c1a 100644
--- a/Lib/asyncio/selector_events.py
+++ b/Lib/asyncio/selector_events.py
@@ -1175,15 +1175,19 @@ def writelines(self, list_of_data):
# If the entire buffer couldn't be written, register a write handler
if self._buffer:
self._loop._add_writer(self._sock_fd, self._write_ready)
+ self._maybe_pause_protocol()
def can_write_eof(self):
return True
def _call_connection_lost(self, exc):
- super()._call_connection_lost(exc)
- if self._empty_waiter is not None:
- self._empty_waiter.set_exception(
- ConnectionError("Connection is closed by peer"))
+ try:
+ super()._call_connection_lost(exc)
+ finally:
+ self._write_ready = None
+ if self._empty_waiter is not None:
+ self._empty_waiter.set_exception(
+ ConnectionError("Connection is closed by peer"))
def _make_empty_waiter(self):
if self._empty_waiter is not None:
@@ -1198,7 +1202,6 @@ def _reset_empty_waiter(self):
def close(self):
self._read_ready_cb = None
- self._write_ready = None
super().close()
diff --git a/Lib/asyncio/staggered.py b/Lib/asyncio/staggered.py
index 0f4df8855a80b9..0afed64fdf9c0f 100644
--- a/Lib/asyncio/staggered.py
+++ b/Lib/asyncio/staggered.py
@@ -66,8 +66,27 @@ async def staggered_race(coro_fns, delay, *, loop=None):
enum_coro_fns = enumerate(coro_fns)
winner_result = None
winner_index = None
+ unhandled_exceptions = []
exceptions = []
- running_tasks = []
+ running_tasks = set()
+ on_completed_fut = None
+
+ def task_done(task):
+ running_tasks.discard(task)
+ if (
+ on_completed_fut is not None
+ and not on_completed_fut.done()
+ and not running_tasks
+ ):
+ on_completed_fut.set_result(None)
+
+ if task.cancelled():
+ return
+
+ exc = task.exception()
+ if exc is None:
+ return
+ unhandled_exceptions.append(exc)
async def run_one_coro(ok_to_start, previous_failed) -> None:
# in eager tasks this waits for the calling task to append this task
@@ -91,11 +110,11 @@ async def run_one_coro(ok_to_start, previous_failed) -> None:
this_failed = locks.Event()
next_ok_to_start = locks.Event()
next_task = loop.create_task(run_one_coro(next_ok_to_start, this_failed))
- running_tasks.append(next_task)
+ running_tasks.add(next_task)
+ next_task.add_done_callback(task_done)
# next_task has been appended to running_tasks so next_task is ok to
# start.
next_ok_to_start.set()
- assert len(running_tasks) == this_index + 2
# Prepare place to put this coroutine's exceptions if not won
exceptions.append(None)
assert len(exceptions) == this_index + 1
@@ -120,31 +139,36 @@ async def run_one_coro(ok_to_start, previous_failed) -> None:
# up as done() == True, cancelled() == False, exception() ==
# asyncio.CancelledError. This behavior is specified in
# https://bugs.python.org/issue30048
- for i, t in enumerate(running_tasks):
- if i != this_index:
+ current_task = tasks.current_task(loop)
+ for t in running_tasks:
+ if t is not current_task:
t.cancel()
- ok_to_start = locks.Event()
- first_task = loop.create_task(run_one_coro(ok_to_start, None))
- running_tasks.append(first_task)
- # first_task has been appended to running_tasks so first_task is ok to start.
- ok_to_start.set()
+ propagate_cancellation_error = None
try:
- # Wait for a growing list of tasks to all finish: poor man's version of
- # curio's TaskGroup or trio's nursery
- done_count = 0
- while done_count != len(running_tasks):
- done, _ = await tasks.wait(running_tasks)
- done_count = len(done)
+ ok_to_start = locks.Event()
+ first_task = loop.create_task(run_one_coro(ok_to_start, None))
+ running_tasks.add(first_task)
+ first_task.add_done_callback(task_done)
+ # first_task has been appended to running_tasks so first_task is ok to start.
+ ok_to_start.set()
+ propagate_cancellation_error = None
+ # Make sure no tasks are left running if we leave this function
+ while running_tasks:
+ on_completed_fut = loop.create_future()
+ try:
+ await on_completed_fut
+ except exceptions_mod.CancelledError as ex:
+ propagate_cancellation_error = ex
+ for task in running_tasks:
+ task.cancel(*ex.args)
+ on_completed_fut = None
+ if __debug__ and unhandled_exceptions:
# If run_one_coro raises an unhandled exception, it's probably a
# programming error, and I want to see it.
- if __debug__:
- for d in done:
- if d.done() and not d.cancelled() and d.exception():
- raise d.exception()
+ raise ExceptionGroup("staggered race failed", unhandled_exceptions)
+ if propagate_cancellation_error is not None:
+ raise propagate_cancellation_error
return winner_result, winner_index, exceptions
finally:
- del exceptions
- # Make sure no tasks are left running if we leave this function
- for t in running_tasks:
- t.cancel()
+ del exceptions, propagate_cancellation_error, unhandled_exceptions
diff --git a/Lib/asyncio/taskgroups.py b/Lib/asyncio/taskgroups.py
index 9fa772ca9d02cc..8fda6c8d55e16c 100644
--- a/Lib/asyncio/taskgroups.py
+++ b/Lib/asyncio/taskgroups.py
@@ -197,15 +197,18 @@ def create_task(self, coro, *, name=None, context=None):
else:
task = self._loop.create_task(coro, name=name, context=context)
- # optimization: Immediately call the done callback if the task is
+ # Always schedule the done callback even if the task is
# already done (e.g. if the coro was able to complete eagerly),
- # and skip scheduling a done callback
- if task.done():
- self._on_task_done(task)
- else:
- self._tasks.add(task)
- task.add_done_callback(self._on_task_done)
- return task
+ # otherwise if the task completes with an exception then it will cancel
+ # the current task too early. gh-128550, gh-128588
+ self._tasks.add(task)
+ task.add_done_callback(self._on_task_done)
+ try:
+ return task
+ finally:
+ # gh-128552: prevent a refcycle of
+ # task.exception().__traceback__->TaskGroup.create_task->task
+ del task
# Since Python 3.8 Tasks propagate all exceptions correctly,
# except for KeyboardInterrupt and SystemExit which are
diff --git a/Lib/bdb.py b/Lib/bdb.py
index ece0a29fe9f3b1..0a3b6dfbfc6025 100644
--- a/Lib/bdb.py
+++ b/Lib/bdb.py
@@ -3,6 +3,7 @@
import fnmatch
import sys
import os
+from contextlib import contextmanager
from inspect import CO_GENERATOR, CO_COROUTINE, CO_ASYNC_GENERATOR
__all__ = ["BdbQuit", "Bdb", "Breakpoint"]
@@ -63,6 +64,12 @@ def reset(self):
self.botframe = None
self._set_stopinfo(None, None)
+ @contextmanager
+ def set_enterframe(self, frame):
+ self.enterframe = frame
+ yield
+ self.enterframe = None
+
def trace_dispatch(self, frame, event, arg):
"""Dispatch a trace function for debugged frames based on the event.
@@ -88,28 +95,27 @@ def trace_dispatch(self, frame, event, arg):
The arg parameter depends on the previous event.
"""
- self.enterframe = frame
-
- if self.quitting:
- return # None
- if event == 'line':
- return self.dispatch_line(frame)
- if event == 'call':
- return self.dispatch_call(frame, arg)
- if event == 'return':
- return self.dispatch_return(frame, arg)
- if event == 'exception':
- return self.dispatch_exception(frame, arg)
- if event == 'c_call':
- return self.trace_dispatch
- if event == 'c_exception':
- return self.trace_dispatch
- if event == 'c_return':
+ with self.set_enterframe(frame):
+ if self.quitting:
+ return # None
+ if event == 'line':
+ return self.dispatch_line(frame)
+ if event == 'call':
+ return self.dispatch_call(frame, arg)
+ if event == 'return':
+ return self.dispatch_return(frame, arg)
+ if event == 'exception':
+ return self.dispatch_exception(frame, arg)
+ if event == 'c_call':
+ return self.trace_dispatch
+ if event == 'c_exception':
+ return self.trace_dispatch
+ if event == 'c_return':
+ return self.trace_dispatch
+ if event == 'opcode':
+ return self.dispatch_opcode(frame, arg)
+ print('bdb.Bdb.dispatch: unknown debugging event:', repr(event))
return self.trace_dispatch
- if event == 'opcode':
- return self.dispatch_opcode(frame, arg)
- print('bdb.Bdb.dispatch: unknown debugging event:', repr(event))
- return self.trace_dispatch
def dispatch_line(self, frame):
"""Invoke user function and return trace function for line event.
@@ -373,15 +379,15 @@ def set_trace(self, frame=None):
if frame is None:
frame = sys._getframe().f_back
self.reset()
- self.enterframe = frame
- while frame:
- frame.f_trace = self.trace_dispatch
- self.botframe = frame
- self.frame_trace_lines_opcodes[frame] = (frame.f_trace_lines, frame.f_trace_opcodes)
- # We need f_trace_lines == True for the debugger to work
- frame.f_trace_lines = True
- frame = frame.f_back
- self.set_stepinstr()
+ with self.set_enterframe(frame):
+ while frame:
+ frame.f_trace = self.trace_dispatch
+ self.botframe = frame
+ self.frame_trace_lines_opcodes[frame] = (frame.f_trace_lines, frame.f_trace_opcodes)
+ # We need f_trace_lines == True for the debugger to work
+ frame.f_trace_lines = True
+ frame = frame.f_back
+ self.set_stepinstr()
sys.settrace(self.trace_dispatch)
def set_continue(self):
diff --git a/Lib/configparser.py b/Lib/configparser.py
index ff7d712bed4afc..42d0ae1c0b52fb 100644
--- a/Lib/configparser.py
+++ b/Lib/configparser.py
@@ -1093,11 +1093,7 @@ def _handle_continuation_line(self, st, line, fpname):
def _handle_rest(self, st, line, fpname):
# a section header or option header?
if self._allow_unnamed_section and st.cursect is None:
- st.sectname = UNNAMED_SECTION
- st.cursect = self._dict()
- self._sections[st.sectname] = st.cursect
- self._proxies[st.sectname] = SectionProxy(self, st.sectname)
- st.elements_added.add(st.sectname)
+ self._handle_header(st, UNNAMED_SECTION, fpname)
st.indent_level = st.cur_indent_level
# is it a section header?
@@ -1106,10 +1102,10 @@ def _handle_rest(self, st, line, fpname):
if not mo and st.cursect is None:
raise MissingSectionHeaderError(fpname, st.lineno, line)
- self._handle_header(st, mo, fpname) if mo else self._handle_option(st, line, fpname)
+ self._handle_header(st, mo.group('header'), fpname) if mo else self._handle_option(st, line, fpname)
- def _handle_header(self, st, mo, fpname):
- st.sectname = mo.group('header')
+ def _handle_header(self, st, sectname, fpname):
+ st.sectname = sectname
if st.sectname in self._sections:
if self._strict and st.sectname in st.elements_added:
raise DuplicateSectionError(st.sectname, fpname,
diff --git a/Lib/dis.py b/Lib/dis.py
index 76934eb00e63f0..797e0f8a0888d0 100644
--- a/Lib/dis.py
+++ b/Lib/dis.py
@@ -1051,7 +1051,7 @@ def dis(self):
return output.getvalue()
-def main():
+def main(args=None):
import argparse
parser = argparse.ArgumentParser()
@@ -1060,7 +1060,7 @@ def main():
parser.add_argument('-O', '--show-offsets', action='store_true',
help='show instruction offsets')
parser.add_argument('infile', nargs='?', default='-')
- args = parser.parse_args()
+ args = parser.parse_args(args=args)
if args.infile == '-':
name = ''
source = sys.stdin.buffer.read()
diff --git a/Lib/doctest.py b/Lib/doctest.py
index c531e3ca6a3d5e..dd4d62a210a902 100644
--- a/Lib/doctest.py
+++ b/Lib/doctest.py
@@ -1558,7 +1558,7 @@ def out(s):
save_displayhook = sys.displayhook
sys.displayhook = sys.__displayhook__
saved_can_colorize = _colorize.can_colorize
- _colorize.can_colorize = lambda: False
+ _colorize.can_colorize = lambda *args, **kwargs: False
color_variables = {"PYTHON_COLORS": None, "FORCE_COLOR": None}
for key in color_variables:
color_variables[key] = os.environ.pop(key, None)
diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py
index ec2215a5e5f33c..3d845c09d415f6 100644
--- a/Lib/email/_header_value_parser.py
+++ b/Lib/email/_header_value_parser.py
@@ -95,8 +95,16 @@
NLSET = {'\n', '\r'}
SPECIALSNL = SPECIALS | NLSET
+
+def make_quoted_pairs(value):
+ """Escape dquote and backslash for use within a quoted-string."""
+ return str(value).replace('\\', '\\\\').replace('"', '\\"')
+
+
def quote_string(value):
- return '"'+str(value).replace('\\', '\\\\').replace('"', r'\"')+'"'
+ escaped = make_quoted_pairs(value)
+ return f'"{escaped}"'
+
# Match a RFC 2047 word, looks like =?utf-8?q?someword?=
rfc2047_matcher = re.compile(r'''
@@ -2905,6 +2913,15 @@ def _refold_parse_tree(parse_tree, *, policy):
if not hasattr(part, 'encode'):
# It's not a terminal, try folding the subparts.
newparts = list(part)
+ if part.token_type == 'bare-quoted-string':
+ # To fold a quoted string we need to create a list of terminal
+ # tokens that will render the leading and trailing quotes
+ # and use quoted pairs in the value as appropriate.
+ newparts = (
+ [ValueTerminal('"', 'ptext')] +
+ [ValueTerminal(make_quoted_pairs(p), 'ptext')
+ for p in newparts] +
+ [ValueTerminal('"', 'ptext')])
if not part.as_ew_allowed:
wrap_as_ew_blocked += 1
newparts.append(end_ew_not_allowed)
diff --git a/Lib/email/message.py b/Lib/email/message.py
index 46bb8c21942af8..6b7c3a2377765a 100644
--- a/Lib/email/message.py
+++ b/Lib/email/message.py
@@ -286,8 +286,12 @@ def get_payload(self, i=None, decode=False):
if i is not None and not isinstance(self._payload, list):
raise TypeError('Expected list, got %s' % type(self._payload))
payload = self._payload
- # cte might be a Header, so for now stringify it.
- cte = str(self.get('content-transfer-encoding', '')).lower()
+ cte = self.get('content-transfer-encoding', '')
+ if hasattr(cte, 'cte'):
+ cte = cte.cte
+ else:
+ # cte might be a Header, so for now stringify it.
+ cte = str(cte).strip().lower()
# payload may be bytes here.
if not decode:
if isinstance(payload, str) and utils._has_surrogates(payload):
diff --git a/Lib/enum.py b/Lib/enum.py
index fc765643692db2..37f16976bbacde 100644
--- a/Lib/enum.py
+++ b/Lib/enum.py
@@ -343,12 +343,13 @@ class EnumDict(dict):
EnumType will use the names found in self._member_names as the
enumeration member names.
"""
- def __init__(self):
+ def __init__(self, cls_name=None):
super().__init__()
self._member_names = {} # use a dict -- faster look-up than a list, and keeps insertion order since 3.7
self._last_values = []
self._ignore = []
self._auto_called = False
+ self._cls_name = cls_name
def __setitem__(self, key, value):
"""
@@ -359,7 +360,7 @@ def __setitem__(self, key, value):
Single underscore (sunder) names are reserved.
"""
- if _is_private(self._cls_name, key):
+ if self._cls_name is not None and _is_private(self._cls_name, key):
# do nothing, name will be a normal attribute
pass
elif _is_sunder(key):
@@ -413,7 +414,7 @@ def __setitem__(self, key, value):
'old behavior', FutureWarning, stacklevel=2)
elif _is_descriptor(value):
pass
- elif _is_internal_class(self._cls_name, value):
+ elif self._cls_name is not None and _is_internal_class(self._cls_name, value):
# do nothing, name will be a normal attribute
pass
else:
@@ -485,8 +486,7 @@ def __prepare__(metacls, cls, bases, **kwds):
# check that previous enum members do not exist
metacls._check_for_existing_members_(cls, bases)
# create the namespace dict
- enum_dict = EnumDict()
- enum_dict._cls_name = cls
+ enum_dict = EnumDict(cls)
# inherit previous flags and _generate_next_value_ function
member_type, first_enum = metacls._get_mixins_(cls, bases)
if first_enum is not None:
diff --git a/Lib/functools.py b/Lib/functools.py
index e0140e84842a7d..2bc5053bd1b53f 100644
--- a/Lib/functools.py
+++ b/Lib/functools.py
@@ -351,6 +351,9 @@ def __setstate__(self, state):
self.args = args
self.keywords = kwds
+ __class_getitem__ = classmethod(GenericAlias)
+
+
try:
from _functools import partial
except ImportError:
diff --git a/Lib/http/__init__.py b/Lib/http/__init__.py
index d64741ec0dd29a..17a47b180e55c2 100644
--- a/Lib/http/__init__.py
+++ b/Lib/http/__init__.py
@@ -179,7 +179,7 @@ class HTTPMethod:
Methods from the following RFCs are all observed:
- * RFF 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616
+ * RFC 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616
* RFC 5789: PATCH Method for HTTP
"""
def __new__(cls, value, description):
diff --git a/Lib/http/client.py b/Lib/http/client.py
index a353716a8506e6..fb29923d94274c 100644
--- a/Lib/http/client.py
+++ b/Lib/http/client.py
@@ -472,7 +472,7 @@ def read(self, amt=None):
if self.chunked:
return self._read_chunked(amt)
- if amt is not None:
+ if amt is not None and amt >= 0:
if self.length is not None and amt > self.length:
# clip the read to the "end of response"
amt = self.length
@@ -590,6 +590,8 @@ def _get_chunk_left(self):
def _read_chunked(self, amt=None):
assert self.chunked != _UNKNOWN
+ if amt is not None and amt < 0:
+ amt = None
value = []
try:
while (chunk_left := self._get_chunk_left()) is not None:
diff --git a/Lib/http/cookies.py b/Lib/http/cookies.py
index 6b9ed24ad8ec78..57791c6ab0886d 100644
--- a/Lib/http/cookies.py
+++ b/Lib/http/cookies.py
@@ -424,9 +424,11 @@ def OutputString(self, attrs=None):
( # Optional group: there may not be a value.
\s*=\s* # Equal Sign
(?P # Start of group 'val'
- "(?:[^\\"]|\\.)*" # Any doublequoted string
+ "(?:[^\\"]|\\.)*" # Any double-quoted string
| # or
- \w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr
+ # Special case for "expires" attr
+ (\w{3,6}day|\w{3}),\s # Day of the week or abbreviated day
+ [\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Date and time in specific format
| # or
[""" + _LegalValueChars + r"""]* # Any word or empty string
) # End of group 'val'
diff --git a/Lib/idlelib/idle_test/test_configdialog.py b/Lib/idlelib/idle_test/test_configdialog.py
index 5099d093382445..0daca89084b048 100644
--- a/Lib/idlelib/idle_test/test_configdialog.py
+++ b/Lib/idlelib/idle_test/test_configdialog.py
@@ -5,6 +5,7 @@
from idlelib import configdialog
from test.support import requires
requires('gui')
+from test.support.testcase import ExtraAssertions
import unittest
from unittest import mock
from idlelib.idle_test.mock_idle import Func
@@ -59,7 +60,7 @@ def activate_config_changes(self):
pass
-class ButtonTest(unittest.TestCase):
+class ButtonTest(unittest.TestCase, ExtraAssertions):
def test_click_ok(self):
d = dialog
@@ -98,8 +99,8 @@ def test_click_help(self):
dialog.buttons['Help'].invoke()
title, contents = view.kwds['title'], view.kwds['contents']
self.assertEqual(title, 'Help for IDLE preferences')
- self.assertTrue(contents.startswith('When you click') and
- contents.endswith('a different name.\n'))
+ self.assertStartsWith(contents, 'When you click')
+ self.assertEndsWith(contents,'a different name.\n')
class FontPageTest(unittest.TestCase):
diff --git a/Lib/idlelib/idle_test/test_debugger.py b/Lib/idlelib/idle_test/test_debugger.py
index d1c9638dd5d711..f4009258e0594c 100644
--- a/Lib/idlelib/idle_test/test_debugger.py
+++ b/Lib/idlelib/idle_test/test_debugger.py
@@ -9,6 +9,7 @@
from tkinter import Tk
from test.support import requires
+from test.support.testcase import ExtraAssertions
import unittest
from unittest import mock
from unittest.mock import Mock, patch
@@ -227,7 +228,7 @@ def test_show_stack_with_frame(self):
self.idb.get_stack.assert_called_once_with(test_frame, None)
-class StackViewerTest(unittest.TestCase):
+class StackViewerTest(unittest.TestCase, ExtraAssertions):
@classmethod
def setUpClass(cls):
@@ -256,7 +257,7 @@ def test_init(self):
flist = None
master_window = self.root
sv = debugger.StackViewer(master_window, flist, gui)
- self.assertTrue(hasattr(sv, 'stack'))
+ self.assertHasAttr(sv, 'stack')
def test_load_stack(self):
# Test the .load_stack() method against a fixed test stack.
diff --git a/Lib/idlelib/idle_test/test_grep.py b/Lib/idlelib/idle_test/test_grep.py
index a0b5b69171879c..ad0a813e71d7b8 100644
--- a/Lib/idlelib/idle_test/test_grep.py
+++ b/Lib/idlelib/idle_test/test_grep.py
@@ -8,6 +8,7 @@
from idlelib import grep
import unittest
from test.support import captured_stdout
+from test.support.testcase import ExtraAssertions
from idlelib.idle_test.mock_tk import Var
import os
import re
@@ -115,7 +116,7 @@ def test_recurse(self):
self.assertIn(self.realpath, filelist)
-class Grep_itTest(unittest.TestCase):
+class Grep_itTest(unittest.TestCase, ExtraAssertions):
# Test captured reports with 0 and some hits.
# Should test file names, but Windows reports have mixed / and \ separators
# from incomplete replacement, so 'later'.
@@ -143,7 +144,7 @@ def test_found(self):
self.assertIn(pat, lines[0])
self.assertIn('py: 1:', lines[1]) # line number 1
self.assertIn('2', lines[3]) # hits found 2
- self.assertTrue(lines[4].startswith('(Hint:'))
+ self.assertStartsWith(lines[4], '(Hint:')
class Default_commandTest(unittest.TestCase):
diff --git a/Lib/idlelib/idle_test/test_multicall.py b/Lib/idlelib/idle_test/test_multicall.py
index b3a3bfb88f9c31..0c1fbfd648820f 100644
--- a/Lib/idlelib/idle_test/test_multicall.py
+++ b/Lib/idlelib/idle_test/test_multicall.py
@@ -3,10 +3,11 @@
from idlelib import multicall
import unittest
from test.support import requires
+from test.support.testcase import ExtraAssertions
from tkinter import Tk, Text
-class MultiCallTest(unittest.TestCase):
+class MultiCallTest(unittest.TestCase, ExtraAssertions):
@classmethod
def setUpClass(cls):
@@ -27,7 +28,7 @@ def tearDownClass(cls):
def test_creator(self):
mc = self.mc
self.assertIs(multicall._multicall_dict[Text], mc)
- self.assertTrue(issubclass(mc, Text))
+ self.assertIsSubclass(mc, Text)
mc2 = multicall.MultiCallCreator(Text)
self.assertIs(mc, mc2)
diff --git a/Lib/idlelib/idle_test/test_query.py b/Lib/idlelib/idle_test/test_query.py
index bb12b2b08652d5..ee368255f9b5c7 100644
--- a/Lib/idlelib/idle_test/test_query.py
+++ b/Lib/idlelib/idle_test/test_query.py
@@ -12,6 +12,7 @@
from idlelib import query
import unittest
from test.support import requires
+from test.support.testcase import ExtraAssertions
from tkinter import Tk, END
import sys
@@ -105,7 +106,7 @@ def test_good_section_name(self):
self.assertEqual(dialog.entry_error['text'], '')
-class ModuleNameTest(unittest.TestCase):
+class ModuleNameTest(unittest.TestCase, ExtraAssertions):
"Test ModuleName subclass of Query."
class Dummy_ModuleName:
@@ -134,10 +135,10 @@ def test_c_source_name(self):
def test_good_module_name(self):
dialog = self.Dummy_ModuleName('idlelib')
- self.assertTrue(dialog.entry_ok().endswith('__init__.py'))
+ self.assertEndsWith(dialog.entry_ok(), '__init__.py')
self.assertEqual(dialog.entry_error['text'], '')
dialog = self.Dummy_ModuleName('idlelib.idle')
- self.assertTrue(dialog.entry_ok().endswith('idle.py'))
+ self.assertEndsWith(dialog.entry_ok(), 'idle.py')
self.assertEqual(dialog.entry_error['text'], '')
@@ -376,7 +377,7 @@ def test_click_section_name(self):
root.destroy()
-class ModulenameGuiTest(unittest.TestCase):
+class ModulenameGuiTest(unittest.TestCase, ExtraAssertions):
@classmethod
def setUpClass(cls):
@@ -389,7 +390,7 @@ def test_click_module_name(self):
self.assertEqual(dialog.text0, 'idlelib')
self.assertEqual(dialog.entry.get(), 'idlelib')
dialog.button_ok.invoke()
- self.assertTrue(dialog.result.endswith('__init__.py'))
+ self.assertEndsWith(dialog.result, '__init__.py')
root.destroy()
diff --git a/Lib/idlelib/idle_test/test_redirector.py b/Lib/idlelib/idle_test/test_redirector.py
index a97b3002afcf12..7bd1116debc238 100644
--- a/Lib/idlelib/idle_test/test_redirector.py
+++ b/Lib/idlelib/idle_test/test_redirector.py
@@ -3,11 +3,12 @@
from idlelib.redirector import WidgetRedirector
import unittest
from test.support import requires
+from test.support.testcase import ExtraAssertions
from tkinter import Tk, Text, TclError
from idlelib.idle_test.mock_idle import Func
-class InitCloseTest(unittest.TestCase):
+class InitCloseTest(unittest.TestCase, ExtraAssertions):
@classmethod
def setUpClass(cls):
@@ -34,7 +35,7 @@ def test_close(self):
redir.register('insert', Func)
redir.close()
self.assertEqual(redir._operations, {})
- self.assertFalse(hasattr(self.text, 'widget'))
+ self.assertNotHasAttr(self.text, 'widget')
class WidgetRedirectorTest(unittest.TestCase):
diff --git a/Lib/idlelib/idle_test/test_sidebar.py b/Lib/idlelib/idle_test/test_sidebar.py
index 605e7a892570d7..3e854561a0f5b9 100644
--- a/Lib/idlelib/idle_test/test_sidebar.py
+++ b/Lib/idlelib/idle_test/test_sidebar.py
@@ -5,8 +5,8 @@
from itertools import chain
import unittest
import unittest.mock
-from test.support import requires, swap_attr
-from test import support
+from test.support import adjust_int_max_str_digits, requires, swap_attr
+from test.support.testcase import ExtraAssertions
import tkinter as tk
from idlelib.idle_test.tkinter_testing_utils import run_in_tk_mainloop
@@ -391,7 +391,7 @@ def assert_colors_are_equal(colors):
assert_colors_are_equal(orig_colors)
-class ShellSidebarTest(unittest.TestCase):
+class ShellSidebarTest(unittest.TestCase, ExtraAssertions):
root: tk.Tk = None
shell: PyShell = None
@@ -613,7 +613,7 @@ def test_interrupt_recall_undo_redo(self):
@run_in_tk_mainloop()
def test_very_long_wrapped_line(self):
- with support.adjust_int_max_str_digits(11_111), \
+ with adjust_int_max_str_digits(11_111), \
swap_attr(self.shell, 'squeezer', None):
self.do_input('x = ' + '1'*10_000 + '\n')
yield
@@ -725,7 +725,7 @@ def test_copy(self):
text.tag_add('sel', f'{first_line}.0', 'end-1c')
selected_text = text.get('sel.first', 'sel.last')
- self.assertTrue(selected_text.startswith('if True:\n'))
+ self.assertStartsWith(selected_text, 'if True:\n')
self.assertIn('\n1\n', selected_text)
text.event_generate('<>')
@@ -749,7 +749,7 @@ def test_copy_with_prompts(self):
text.tag_add('sel', f'{first_line}.3', 'end-1c')
selected_text = text.get('sel.first', 'sel.last')
- self.assertTrue(selected_text.startswith('True:\n'))
+ self.assertStartsWith(selected_text, 'True:\n')
selected_lines_text = text.get('sel.first linestart', 'sel.last')
selected_lines = selected_lines_text.split('\n')
diff --git a/Lib/idlelib/pyshell.py b/Lib/idlelib/pyshell.py
index e882c6cb3b8d19..66fbbd4a97b7af 100755
--- a/Lib/idlelib/pyshell.py
+++ b/Lib/idlelib/pyshell.py
@@ -424,7 +424,9 @@ def __init__(self, tkconsole):
def spawn_subprocess(self):
if self.subprocess_arglist is None:
self.subprocess_arglist = self.build_subprocess_arglist()
- self.rpcsubproc = subprocess.Popen(self.subprocess_arglist)
+ # gh-127060: Disable traceback colors
+ env = dict(os.environ, TERM='dumb')
+ self.rpcsubproc = subprocess.Popen(self.subprocess_arglist, env=env)
def build_subprocess_arglist(self):
assert (self.port!=0), (
diff --git a/Lib/imaplib.py b/Lib/imaplib.py
index 577b4b9b03a88d..e337fe6471069f 100644
--- a/Lib/imaplib.py
+++ b/Lib/imaplib.py
@@ -52,6 +52,9 @@
# search command can be quite large, so we now use 1M.
_MAXLINE = 1000000
+# Data larger than this will be read in chunks, to prevent extreme
+# overallocation.
+_SAFE_BUF_SIZE = 1 << 20
# Commands
@@ -315,7 +318,13 @@ def open(self, host='', port=IMAP4_PORT, timeout=None):
def read(self, size):
"""Read 'size' bytes from remote."""
- return self.file.read(size)
+ cursize = min(size, _SAFE_BUF_SIZE)
+ data = self.file.read(cursize)
+ while cursize < size and len(data) == cursize:
+ delta = min(cursize, size - cursize)
+ data += self.file.read(delta)
+ cursize += delta
+ return data
def readline(self):
diff --git a/Lib/importlib/resources/__init__.py b/Lib/importlib/resources/__init__.py
index ec4441c9116118..723c9f9eb33ce1 100644
--- a/Lib/importlib/resources/__init__.py
+++ b/Lib/importlib/resources/__init__.py
@@ -1,4 +1,11 @@
-"""Read resources contained within a package."""
+"""
+Read resources contained within a package.
+
+This codebase is shared between importlib.resources in the stdlib
+and importlib_resources in PyPI. See
+https://github.com/python/importlib_metadata/wiki/Development-Methodology
+for more detail.
+"""
from ._common import (
as_file,
diff --git a/Lib/importlib/resources/_common.py b/Lib/importlib/resources/_common.py
index ca5b06743b46a6..171a7f29249943 100644
--- a/Lib/importlib/resources/_common.py
+++ b/Lib/importlib/resources/_common.py
@@ -66,10 +66,10 @@ def get_resource_reader(package: types.ModuleType) -> Optional[ResourceReader]:
# zipimport.zipimporter does not support weak references, resulting in a
# TypeError. That seems terrible.
spec = package.__spec__
- reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore
+ reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore[union-attr]
if reader is None:
return None
- return reader(spec.name) # type: ignore
+ return reader(spec.name) # type: ignore[union-attr]
@functools.singledispatch
diff --git a/Lib/importlib/resources/readers.py b/Lib/importlib/resources/readers.py
index ccc5abbeb4e56e..70fc7e2b9c0145 100644
--- a/Lib/importlib/resources/readers.py
+++ b/Lib/importlib/resources/readers.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import collections
import contextlib
import itertools
@@ -6,6 +8,7 @@
import re
import warnings
import zipfile
+from collections.abc import Iterator
from . import abc
@@ -135,27 +138,31 @@ class NamespaceReader(abc.TraversableResources):
def __init__(self, namespace_path):
if 'NamespacePath' not in str(namespace_path):
raise ValueError('Invalid path')
- self.path = MultiplexedPath(*map(self._resolve, namespace_path))
+ self.path = MultiplexedPath(*filter(bool, map(self._resolve, namespace_path)))
@classmethod
- def _resolve(cls, path_str) -> abc.Traversable:
+ def _resolve(cls, path_str) -> abc.Traversable | None:
r"""
Given an item from a namespace path, resolve it to a Traversable.
path_str might be a directory on the filesystem or a path to a
zipfile plus the path within the zipfile, e.g. ``/foo/bar`` or
``/foo/baz.zip/inner_dir`` or ``foo\baz.zip\inner_dir\sub``.
+
+ path_str might also be a sentinel used by editable packages to
+ trigger other behaviors (see python/importlib_resources#311).
+ In that case, return None.
"""
- (dir,) = (cand for cand in cls._candidate_paths(path_str) if cand.is_dir())
- return dir
+ dirs = (cand for cand in cls._candidate_paths(path_str) if cand.is_dir())
+ return next(dirs, None)
@classmethod
- def _candidate_paths(cls, path_str):
+ def _candidate_paths(cls, path_str: str) -> Iterator[abc.Traversable]:
yield pathlib.Path(path_str)
yield from cls._resolve_zip_path(path_str)
@staticmethod
- def _resolve_zip_path(path_str):
+ def _resolve_zip_path(path_str: str):
for match in reversed(list(re.finditer(r'[\\/]', path_str))):
with contextlib.suppress(
FileNotFoundError,
diff --git a/Lib/importlib/resources/simple.py b/Lib/importlib/resources/simple.py
index 96f117fec62c10..2e75299b13aabf 100644
--- a/Lib/importlib/resources/simple.py
+++ b/Lib/importlib/resources/simple.py
@@ -77,7 +77,7 @@ class ResourceHandle(Traversable):
def __init__(self, parent: ResourceContainer, name: str):
self.parent = parent
- self.name = name # type: ignore
+ self.name = name # type: ignore[misc]
def is_file(self):
return True
diff --git a/Lib/linecache.py b/Lib/linecache.py
index 4b38a0464d8747..8ba2df73d5a8fb 100644
--- a/Lib/linecache.py
+++ b/Lib/linecache.py
@@ -49,14 +49,17 @@ def checkcache(filename=None):
(This is not checked upon each call!)"""
if filename is None:
- filenames = list(cache.keys())
- elif filename in cache:
- filenames = [filename]
+ # get keys atomically
+ filenames = cache.copy().keys()
else:
- return
+ filenames = [filename]
for filename in filenames:
- entry = cache[filename]
+ try:
+ entry = cache[filename]
+ except KeyError:
+ continue
+
if len(entry) == 1:
# lazy cache entry, leave it lazy.
continue
diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py
index c48e556294738f..8caddd204d7c98 100644
--- a/Lib/multiprocessing/connection.py
+++ b/Lib/multiprocessing/connection.py
@@ -846,7 +846,7 @@ def PipeClient(address):
_LEGACY_LENGTHS = (_MD5ONLY_MESSAGE_LENGTH, _MD5_DIGEST_LEN)
-def _get_digest_name_and_payload(message: bytes) -> (str, bytes):
+def _get_digest_name_and_payload(message): # type: (bytes) -> tuple[str, bytes]
"""Returns a digest name and the payload for a response hash.
If a legacy protocol is detected based on the message length
diff --git a/Lib/multiprocessing/resource_tracker.py b/Lib/multiprocessing/resource_tracker.py
index 20ddd9c50e3d88..90e036ae905afa 100644
--- a/Lib/multiprocessing/resource_tracker.py
+++ b/Lib/multiprocessing/resource_tracker.py
@@ -155,13 +155,14 @@ def ensure_running(self):
# that can make the child die before it registers signal handlers
# for SIGINT and SIGTERM. The mask is unregistered after spawning
# the child.
+ prev_sigmask = None
try:
if _HAVE_SIGMASK:
- signal.pthread_sigmask(signal.SIG_BLOCK, _IGNORED_SIGNALS)
+ prev_sigmask = signal.pthread_sigmask(signal.SIG_BLOCK, _IGNORED_SIGNALS)
pid = util.spawnv_passfds(exe, args, fds_to_pass)
finally:
- if _HAVE_SIGMASK:
- signal.pthread_sigmask(signal.SIG_UNBLOCK, _IGNORED_SIGNALS)
+ if prev_sigmask is not None:
+ signal.pthread_sigmask(signal.SIG_SETMASK, prev_sigmask)
except:
os.close(w)
raise
diff --git a/Lib/multiprocessing/synchronize.py b/Lib/multiprocessing/synchronize.py
index 0f682b9a0944b8..870c91349b9164 100644
--- a/Lib/multiprocessing/synchronize.py
+++ b/Lib/multiprocessing/synchronize.py
@@ -360,7 +360,7 @@ def wait(self, timeout=None):
return True
return False
- def __repr__(self) -> str:
+ def __repr__(self):
set_status = 'set' if self.is_set() else 'unset'
return f"<{type(self).__qualname__} at {id(self):#x} {set_status}>"
#
diff --git a/Lib/pdb.py b/Lib/pdb.py
index 9b6dffda1cfcd1..cb0a3405c58e55 100755
--- a/Lib/pdb.py
+++ b/Lib/pdb.py
@@ -383,6 +383,7 @@ def forget(self):
if hasattr(self, 'curframe') and self.curframe:
self.curframe.f_globals.pop('__pdb_convenience_variables', None)
self.curframe = None
+ self.curframe_locals = {}
self.tb_lineno.clear()
def setup(self, f, tb):
diff --git a/Lib/platform.py b/Lib/platform.py
index 5958382276e79c..8895177e326a5e 100755
--- a/Lib/platform.py
+++ b/Lib/platform.py
@@ -354,7 +354,8 @@ def _wmi_query(table, *keys):
]
_WIN32_SERVER_RELEASES = [
- ((10, 1, 0), "post2022Server"),
+ ((10, 1, 0), "post2025Server"),
+ ((10, 0, 26100), "2025Server"),
((10, 0, 20348), "2022Server"),
((10, 0, 17763), "2019Server"),
((6, 4, 0), "2016Server"),
diff --git a/Lib/poplib.py b/Lib/poplib.py
index 1a1629d175b6d9..beb93a0d57cf93 100644
--- a/Lib/poplib.py
+++ b/Lib/poplib.py
@@ -309,7 +309,7 @@ def close(self):
# optional commands:
def rpop(self, user):
- """Not sure what this does."""
+ """Send RPOP command to access the mailbox with an alternate user."""
return self._shortcmd('RPOP %s' % user)
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
index 30cd0b3fa3cb4e..1a527b2c307b68 100755
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -54,6 +54,7 @@ class or function within a module or module in a package. If the
# the current directory is changed with os.chdir(), an incorrect
# path will be displayed.
+import ast
import __future__
import builtins
import importlib._bootstrap
@@ -381,21 +382,29 @@ def ispackage(path):
return False
def source_synopsis(file):
- line = file.readline()
- while line[:1] == '#' or not line.strip():
- line = file.readline()
- if not line: break
- line = line.strip()
- if line[:4] == 'r"""': line = line[1:]
- if line[:3] == '"""':
- line = line[3:]
- if line[-1:] == '\\': line = line[:-1]
- while not line.strip():
- line = file.readline()
- if not line: break
- result = line.split('"""')[0].strip()
- else: result = None
- return result
+ """Return the one-line summary of a file object, if present"""
+
+ string = ''
+ try:
+ tokens = tokenize.generate_tokens(file.readline)
+ for tok_type, tok_string, _, _, _ in tokens:
+ if tok_type == tokenize.STRING:
+ string += tok_string
+ elif tok_type == tokenize.NEWLINE:
+ with warnings.catch_warnings():
+ # Ignore the "invalid escape sequence" warning.
+ warnings.simplefilter("ignore", SyntaxWarning)
+ docstring = ast.literal_eval(string)
+ if not isinstance(docstring, str):
+ return None
+ return docstring.strip().split('\n')[0].strip()
+ elif tok_type == tokenize.OP and tok_string in ('(', ')'):
+ string += tok_string
+ elif tok_type not in (tokenize.COMMENT, tokenize.NL, tokenize.ENCODING):
+ return None
+ except (tokenize.TokenError, UnicodeDecodeError, SyntaxError):
+ return None
+ return None
def synopsis(filename, cache={}):
"""Get the one-line summary out of a module file."""
diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py
index 5b73d86718af4d..8d68b73d71203f 100644
--- a/Lib/pydoc_data/topics.py
+++ b/Lib/pydoc_data/topics.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Autogenerated by Sphinx on Tue Dec 3 18:56:45 2024
+# Autogenerated by Sphinx on Tue Feb 4 15:50:17 2025
# as part of the release process.
topics = {'assert': 'The "assert" statement\n'
'**********************\n'
@@ -2807,15 +2807,18 @@
' enter = type(manager).__enter__\n'
' exit = type(manager).__exit__\n'
' value = enter(manager)\n'
+ ' hit_except = False\n'
'\n'
' try:\n'
' TARGET = value\n'
' SUITE\n'
' except:\n'
+ ' hit_except = True\n'
' if not exit(manager, *sys.exc_info()):\n'
' raise\n'
- ' else:\n'
- ' exit(manager, None, None, None)\n'
+ ' finally:\n'
+ ' if not hit_except:\n'
+ ' exit(manager, None, None, None)\n'
'\n'
'With more than one item, the context managers are processed as '
'if\n'
@@ -3616,8 +3619,11 @@
'defparameter)* ["," [parameter_list_starargs]]\n'
' | parameter_list_starargs\n'
' parameter_list_starargs ::= "*" [star_parameter] ("," '
- 'defparameter)* ["," ["**" parameter [","]]]\n'
- ' | "**" parameter [","]\n'
+ 'defparameter)* ["," [parameter_star_kwargs]]\n'
+ ' "*" ("," defparameter)+ ["," '
+ '[parameter_star_kwargs]]\n'
+ ' | parameter_star_kwargs\n'
+ ' parameter_star_kwargs ::= "**" parameter [","]\n'
' parameter ::= identifier [":" expression]\n'
' star_parameter ::= identifier [":" ["*"] '
'expression]\n'
@@ -5226,15 +5232,16 @@
' immediately, rather than on the next line of code to be '
'executed.\n'
'\n'
- 'pdb.post_mortem(traceback=None)\n'
+ 'pdb.post_mortem(t=None)\n'
'\n'
- ' Enter post-mortem debugging of the given *traceback* object. '
- 'If no\n'
- ' *traceback* is given, it uses the one of the exception that '
- 'is\n'
- ' currently being handled (an exception must be being handled '
- 'if the\n'
- ' default is to be used).\n'
+ ' Enter post-mortem debugging of the given exception or '
+ 'traceback\n'
+ ' object. If no value is given, it uses the exception that is\n'
+ ' currently being handled, or raises "ValueError" if there '
+ 'isn’t one.\n'
+ '\n'
+ ' Changed in version 3.13: Support for exception objects was '
+ 'added.\n'
'\n'
'pdb.pm()\n'
'\n'
@@ -7067,8 +7074,12 @@
'trailing zeros are not removed from the result.\n'
'\n'
'The "\',\'" option signals the use of a comma for a '
- 'thousands separator.\n'
- 'For a locale aware separator, use the "\'n\'" integer '
+ 'thousands separator\n'
+ 'for floating-point presentation types and for integer '
+ 'presentation\n'
+ 'type "\'d\'". For other presentation types, this option is '
+ 'an error. For\n'
+ 'a locale aware separator, use the "\'n\'" integer '
'presentation type\n'
'instead.\n'
'\n'
@@ -7576,8 +7587,11 @@
'defparameter)* ["," [parameter_list_starargs]]\n'
' | parameter_list_starargs\n'
' parameter_list_starargs ::= "*" [star_parameter] ("," '
- 'defparameter)* ["," ["**" parameter [","]]]\n'
- ' | "**" parameter [","]\n'
+ 'defparameter)* ["," [parameter_star_kwargs]]\n'
+ ' "*" ("," defparameter)+ ["," '
+ '[parameter_star_kwargs]]\n'
+ ' | parameter_star_kwargs\n'
+ ' parameter_star_kwargs ::= "**" parameter [","]\n'
' parameter ::= identifier [":" expression]\n'
' star_parameter ::= identifier [":" ["*"] '
'expression]\n'
@@ -17198,15 +17212,18 @@
' enter = type(manager).__enter__\n'
' exit = type(manager).__exit__\n'
' value = enter(manager)\n'
+ ' hit_except = False\n'
'\n'
' try:\n'
' TARGET = value\n'
' SUITE\n'
' except:\n'
+ ' hit_except = True\n'
' if not exit(manager, *sys.exc_info()):\n'
' raise\n'
- ' else:\n'
- ' exit(manager, None, None, None)\n'
+ ' finally:\n'
+ ' if not hit_except:\n'
+ ' exit(manager, None, None, None)\n'
'\n'
'With more than one item, the context managers are processed as if\n'
'multiple "with" statements were nested:\n'
diff --git a/Lib/socket.py b/Lib/socket.py
index 9207101dcf9d58..35d87eff34deb1 100644
--- a/Lib/socket.py
+++ b/Lib/socket.py
@@ -931,7 +931,9 @@ def create_server(address, *, family=AF_INET, backlog=None, reuse_port=False,
# Fail later on bind(), for platforms which may not
# support this option.
pass
- if reuse_port:
+ # Since Linux 6.12.9, SO_REUSEPORT is not allowed
+ # on other address families than AF_INET/AF_INET6.
+ if reuse_port and family in (AF_INET, AF_INET6):
sock.setsockopt(SOL_SOCKET, SO_REUSEPORT, 1)
if has_ipv6 and family == AF_INET6:
if dualstack_ipv6:
diff --git a/Lib/socketserver.py b/Lib/socketserver.py
index cd028ef1c63b85..35b2723de3babe 100644
--- a/Lib/socketserver.py
+++ b/Lib/socketserver.py
@@ -468,7 +468,12 @@ def server_bind(self):
"""
if self.allow_reuse_address and hasattr(socket, "SO_REUSEADDR"):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- if self.allow_reuse_port and hasattr(socket, "SO_REUSEPORT"):
+ # Since Linux 6.12.9, SO_REUSEPORT is not allowed
+ # on other address families than AF_INET/AF_INET6.
+ if (
+ self.allow_reuse_port and hasattr(socket, "SO_REUSEPORT")
+ and self.address_family in (socket.AF_INET, socket.AF_INET6)
+ ):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
self.socket.bind(self.server_address)
self.server_address = self.socket.getsockname()
diff --git a/Lib/sqlite3/__init__.py b/Lib/sqlite3/__init__.py
index 927267cf0b92ff..e3c81ffcf1c11f 100644
--- a/Lib/sqlite3/__init__.py
+++ b/Lib/sqlite3/__init__.py
@@ -22,7 +22,7 @@
"""
The sqlite3 extension module provides a DB-API 2.0 (PEP 249) compliant
-interface to the SQLite library, and requires SQLite 3.7.15 or newer.
+interface to the SQLite library, and requires SQLite 3.15.2 or newer.
To use the module, start by creating a database Connection object:
diff --git a/Lib/subprocess.py b/Lib/subprocess.py
index b2dcb1454c139e..98b98be01ac315 100644
--- a/Lib/subprocess.py
+++ b/Lib/subprocess.py
@@ -43,10 +43,8 @@
import builtins
import errno
import io
-import locale
import os
import time
-import signal
import sys
import threading
import warnings
@@ -144,6 +142,8 @@ def __init__(self, returncode, cmd, output=None, stderr=None):
def __str__(self):
if self.returncode and self.returncode < 0:
+ # Lazy import to improve module import time
+ import signal
try:
return "Command '%s' died with %r." % (
self.cmd, signal.Signals(-self.returncode))
@@ -381,12 +381,14 @@ def _text_encoding():
if sys.flags.utf8_mode:
return "utf-8"
else:
+ # Lazy import to improve module import time
+ import locale
return locale.getencoding()
def call(*popenargs, timeout=None, **kwargs):
"""Run command with arguments. Wait for command to complete or
- timeout, then return the returncode attribute.
+ for timeout seconds, then return the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
@@ -523,8 +525,8 @@ def run(*popenargs,
in the returncode attribute, and output & stderr attributes if those streams
were captured.
- If timeout is given, and the process takes too long, a TimeoutExpired
- exception will be raised.
+ If timeout (seconds) is given and the process takes too long,
+ a TimeoutExpired exception will be raised.
There is an optional argument "input", allowing you to
pass bytes or a string to the subprocess's stdin. If you use this argument
@@ -1665,6 +1667,9 @@ def send_signal(self, sig):
# Don't signal a process that we know has already died.
if self.returncode is not None:
return
+
+ # Lazy import to improve module import time
+ import signal
if sig == signal.SIGTERM:
self.terminate()
elif sig == signal.CTRL_C_EVENT:
@@ -1766,6 +1771,9 @@ def _posix_spawn(self, args, executable, env, restore_signals, close_fds,
"""Execute program using os.posix_spawn()."""
kwargs = {}
if restore_signals:
+ # Lazy import to improve module import time
+ import signal
+
# See _Py_RestoreSignals() in Python/pylifecycle.c
sigset = []
for signame in ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ'):
@@ -2215,9 +2223,13 @@ def send_signal(self, sig):
def terminate(self):
"""Terminate the process with SIGTERM
"""
+ # Lazy import to improve module import time
+ import signal
self.send_signal(signal.SIGTERM)
def kill(self):
"""Kill the process with SIGKILL
"""
+ # Lazy import to improve module import time
+ import signal
self.send_signal(signal.SIGKILL)
diff --git a/Lib/sysconfig/__init__.py b/Lib/sysconfig/__init__.py
index ec3b638f00766d..7bcb737ff2cca3 100644
--- a/Lib/sysconfig/__init__.py
+++ b/Lib/sysconfig/__init__.py
@@ -594,7 +594,8 @@ def get_platform():
solaris-2.6-sun4u
Windows will return one of:
- win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
+ win-amd64 (64-bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
+ win-arm64 (64-bit Windows on ARM64 (aka AArch64)
win32 (all others - specifically, sys.platform is returned)
For other non-POSIX platforms, currently just returns 'sys.platform'.
@@ -695,6 +696,9 @@ def expand_makefile_vars(s, vars):
"""
import re
+ _findvar1_rx = r"\$\(([A-Za-z][A-Za-z0-9_]*)\)"
+ _findvar2_rx = r"\${([A-Za-z][A-Za-z0-9_]*)}"
+
# This algorithm does multiple expansion, so if vars['foo'] contains
# "${bar}", it will expand ${foo} to ${bar}, and then expand
# ${bar}... and so forth. This is fine as long as 'vars' comes from
diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py
index ce93d94699ce78..84c8fa146709f6 100644
--- a/Lib/test/_test_multiprocessing.py
+++ b/Lib/test/_test_multiprocessing.py
@@ -5950,6 +5950,27 @@ def test_resource_tracker_exit_code(self):
cleanup=cleanup,
)
+ @unittest.skipUnless(hasattr(signal, "pthread_sigmask"), "pthread_sigmask is not available")
+ def test_resource_tracker_blocked_signals(self):
+ #
+ # gh-127586: Check that resource_tracker does not override blocked signals of caller.
+ #
+ from multiprocessing.resource_tracker import ResourceTracker
+ orig_sigmask = signal.pthread_sigmask(signal.SIG_BLOCK, set())
+ signals = {signal.SIGTERM, signal.SIGINT, signal.SIGUSR1}
+
+ try:
+ for sig in signals:
+ signal.pthread_sigmask(signal.SIG_SETMASK, {sig})
+ self.assertEqual(signal.pthread_sigmask(signal.SIG_BLOCK, set()), {sig})
+ tracker = ResourceTracker()
+ tracker.ensure_running()
+ self.assertEqual(signal.pthread_sigmask(signal.SIG_BLOCK, set()), {sig})
+ tracker._stop()
+ finally:
+ # restore sigmask to what it was before executing test
+ signal.pthread_sigmask(signal.SIG_SETMASK, orig_sigmask)
+
class TestSimpleQueue(unittest.TestCase):
@classmethod
diff --git a/Lib/test/clinic.test.c b/Lib/test/clinic.test.c
index de5dbfdbe82fdd..bfc15c024de7fd 100644
--- a/Lib/test/clinic.test.c
+++ b/Lib/test/clinic.test.c
@@ -5011,6 +5011,75 @@ static int
Test_property_set_impl(TestObj *self, PyObject *value)
/*[clinic end generated code: output=e4342fe9bb1d7817 input=3bc3f46a23c83a88]*/
+/*[clinic input]
+@setter
+Test.setter_first_with_docstr
+[clinic start generated code]*/
+
+#if !defined(Test_setter_first_with_docstr_DOCSTR)
+# define Test_setter_first_with_docstr_DOCSTR NULL
+#endif
+#if defined(TEST_SETTER_FIRST_WITH_DOCSTR_GETSETDEF)
+# undef TEST_SETTER_FIRST_WITH_DOCSTR_GETSETDEF
+# define TEST_SETTER_FIRST_WITH_DOCSTR_GETSETDEF {"setter_first_with_docstr", (getter)Test_setter_first_with_docstr_get, (setter)Test_setter_first_with_docstr_set, Test_setter_first_with_docstr_DOCSTR},
+#else
+# define TEST_SETTER_FIRST_WITH_DOCSTR_GETSETDEF {"setter_first_with_docstr", NULL, (setter)Test_setter_first_with_docstr_set, NULL},
+#endif
+
+static int
+Test_setter_first_with_docstr_set_impl(TestObj *self, PyObject *value);
+
+static int
+Test_setter_first_with_docstr_set(TestObj *self, PyObject *value, void *Py_UNUSED(context))
+{
+ int return_value;
+
+ return_value = Test_setter_first_with_docstr_set_impl(self, value);
+
+ return return_value;
+}
+
+static int
+Test_setter_first_with_docstr_set_impl(TestObj *self, PyObject *value)
+/*[clinic end generated code: output=e4d76b558a4061db input=31a045ce11bbe961]*/
+
+/*[clinic input]
+@getter
+Test.setter_first_with_docstr
+
+my silly docstring
+[clinic start generated code]*/
+
+PyDoc_STRVAR(Test_setter_first_with_docstr__doc__,
+"my silly docstring");
+#if defined(Test_setter_first_with_docstr_DOCSTR)
+# undef Test_setter_first_with_docstr_DOCSTR
+#endif
+#define Test_setter_first_with_docstr_DOCSTR Test_setter_first_with_docstr__doc__
+
+#if !defined(Test_setter_first_with_docstr_DOCSTR)
+# define Test_setter_first_with_docstr_DOCSTR NULL
+#endif
+#if defined(TEST_SETTER_FIRST_WITH_DOCSTR_GETSETDEF)
+# undef TEST_SETTER_FIRST_WITH_DOCSTR_GETSETDEF
+# define TEST_SETTER_FIRST_WITH_DOCSTR_GETSETDEF {"setter_first_with_docstr", (getter)Test_setter_first_with_docstr_get, (setter)Test_setter_first_with_docstr_set, Test_setter_first_with_docstr_DOCSTR},
+#else
+# define TEST_SETTER_FIRST_WITH_DOCSTR_GETSETDEF {"setter_first_with_docstr", (getter)Test_setter_first_with_docstr_get, NULL, Test_setter_first_with_docstr_DOCSTR},
+#endif
+
+static PyObject *
+Test_setter_first_with_docstr_get_impl(TestObj *self);
+
+static PyObject *
+Test_setter_first_with_docstr_get(TestObj *self, void *Py_UNUSED(context))
+{
+ return Test_setter_first_with_docstr_get_impl(self);
+}
+
+static PyObject *
+Test_setter_first_with_docstr_get_impl(TestObj *self)
+/*[clinic end generated code: output=749a30266f9fb443 input=10af4e43b3cb34dc]*/
+
/*[clinic input]
output push
output preset buffer
diff --git a/Lib/test/libregrtest/findtests.py b/Lib/test/libregrtest/findtests.py
index 4ac95e23a56b8f..f01c1240774707 100644
--- a/Lib/test/libregrtest/findtests.py
+++ b/Lib/test/libregrtest/findtests.py
@@ -1,6 +1,7 @@
import os
import sys
import unittest
+from collections.abc import Container
from test import support
@@ -34,7 +35,7 @@ def findtestdir(path: StrPath | None = None) -> StrPath:
return path or os.path.dirname(os.path.dirname(__file__)) or os.curdir
-def findtests(*, testdir: StrPath | None = None, exclude=(),
+def findtests(*, testdir: StrPath | None = None, exclude: Container[str] = (),
split_test_dirs: set[TestName] = SPLITTESTDIRS,
base_mod: str = "") -> TestList:
"""Return a list of all applicable test modules."""
@@ -60,8 +61,9 @@ def findtests(*, testdir: StrPath | None = None, exclude=(),
return sorted(tests)
-def split_test_packages(tests, *, testdir: StrPath | None = None, exclude=(),
- split_test_dirs=SPLITTESTDIRS):
+def split_test_packages(tests, *, testdir: StrPath | None = None,
+ exclude: Container[str] = (),
+ split_test_dirs=SPLITTESTDIRS) -> list[TestName]:
testdir = findtestdir(testdir)
splitted = []
for name in tests:
@@ -75,9 +77,9 @@ def split_test_packages(tests, *, testdir: StrPath | None = None, exclude=(),
return splitted
-def _list_cases(suite):
+def _list_cases(suite: unittest.TestSuite) -> None:
for test in suite:
- if isinstance(test, unittest.loader._FailedTest):
+ if isinstance(test, unittest.loader._FailedTest): # type: ignore[attr-defined]
continue
if isinstance(test, unittest.TestSuite):
_list_cases(test)
@@ -87,7 +89,7 @@ def _list_cases(suite):
def list_cases(tests: TestTuple, *,
match_tests: TestFilter | None = None,
- test_dir: StrPath | None = None):
+ test_dir: StrPath | None = None) -> None:
support.verbose = False
set_match_tests(match_tests)
diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py
index 36c45586db1b17..da63079399a372 100644
--- a/Lib/test/libregrtest/main.py
+++ b/Lib/test/libregrtest/main.py
@@ -6,6 +6,7 @@
import sysconfig
import time
import trace
+from typing import NoReturn
from test.support import (os_helper, MS_WINDOWS, flush_std_streams,
suppress_immortalization)
@@ -155,7 +156,7 @@ def __init__(self, ns: Namespace, _add_python_opts: bool = False):
self.next_single_test: TestName | None = None
self.next_single_filename: StrPath | None = None
- def log(self, line=''):
+ def log(self, line: str = '') -> None:
self.logger.log(line)
def find_tests(self, tests: TestList | None = None) -> tuple[TestTuple, TestList | None]:
@@ -233,11 +234,11 @@ def find_tests(self, tests: TestList | None = None) -> tuple[TestTuple, TestList
return (tuple(selected), tests)
@staticmethod
- def list_tests(tests: TestTuple):
+ def list_tests(tests: TestTuple) -> None:
for name in tests:
print(name)
- def _rerun_failed_tests(self, runtests: RunTests):
+ def _rerun_failed_tests(self, runtests: RunTests) -> RunTests:
# Configure the runner to re-run tests
if self.num_workers == 0 and not self.single_process:
# Always run tests in fresh processes to have more deterministic
@@ -269,7 +270,7 @@ def _rerun_failed_tests(self, runtests: RunTests):
self.run_tests_sequentially(runtests)
return runtests
- def rerun_failed_tests(self, runtests: RunTests):
+ def rerun_failed_tests(self, runtests: RunTests) -> None:
if self.python_cmd:
# Temp patch for https://github.com/python/cpython/issues/94052
self.log(
@@ -338,7 +339,7 @@ def run_bisect(self, runtests: RunTests) -> None:
if not self._run_bisect(runtests, name, progress):
return
- def display_result(self, runtests):
+ def display_result(self, runtests: RunTests) -> None:
# If running the test suite for PGO then no one cares about results.
if runtests.pgo:
return
@@ -368,7 +369,7 @@ def run_test(
return result
- def run_tests_sequentially(self, runtests) -> None:
+ def run_tests_sequentially(self, runtests: RunTests) -> None:
if self.coverage:
tracer = trace.Trace(trace=False, count=True)
else:
@@ -425,7 +426,7 @@ def run_tests_sequentially(self, runtests) -> None:
if previous_test:
print(previous_test)
- def get_state(self):
+ def get_state(self) -> str:
state = self.results.get_state(self.fail_env_changed)
if self.first_state:
state = f'{self.first_state} then {state}'
@@ -474,7 +475,7 @@ def display_summary(self) -> None:
state = self.get_state()
print(f"Result: {state}")
- def create_run_tests(self, tests: TestTuple):
+ def create_run_tests(self, tests: TestTuple) -> RunTests:
return RunTests(
tests,
fail_fast=self.fail_fast,
@@ -685,9 +686,9 @@ def _execute_python(self, cmd, environ):
f"Command: {cmd_text}")
# continue executing main()
- def _add_python_opts(self):
- python_opts = []
- regrtest_opts = []
+ def _add_python_opts(self) -> None:
+ python_opts: list[str] = []
+ regrtest_opts: list[str] = []
environ, keep_environ = self._add_cross_compile_opts(regrtest_opts)
if self.ci_mode:
@@ -728,7 +729,7 @@ def tmp_dir(self) -> StrPath:
)
return self._tmp_dir
- def main(self, tests: TestList | None = None):
+ def main(self, tests: TestList | None = None) -> NoReturn:
if self.want_add_python_opts:
self._add_python_opts()
@@ -757,7 +758,7 @@ def main(self, tests: TestList | None = None):
sys.exit(exitcode)
-def main(tests=None, _add_python_opts=False, **kwargs):
+def main(tests=None, _add_python_opts=False, **kwargs) -> NoReturn:
"""Run the Python suite."""
ns = _parse_args(sys.argv[1:], **kwargs)
Regrtest(ns, _add_python_opts=_add_python_opts).main(tests=tests)
diff --git a/Lib/test/libregrtest/pgo.py b/Lib/test/libregrtest/pgo.py
index e3a6927be5db1d..f762345c88cde3 100644
--- a/Lib/test/libregrtest/pgo.py
+++ b/Lib/test/libregrtest/pgo.py
@@ -50,7 +50,7 @@
'test_xml_etree_c',
]
-def setup_pgo_tests(cmdline_args, pgo_extended: bool):
+def setup_pgo_tests(cmdline_args, pgo_extended: bool) -> None:
if not cmdline_args and not pgo_extended:
# run default set of tests for PGO training
cmdline_args[:] = PGO_TESTS[:]
diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py
index ff811ee0a4a9c2..2e49b31e253d54 100644
--- a/Lib/test/libregrtest/refleak.py
+++ b/Lib/test/libregrtest/refleak.py
@@ -263,7 +263,7 @@ def dash_R_cleanup(fs, ps, pic, zdc, abcs):
sys._clear_internal_caches()
-def warm_caches():
+def warm_caches() -> None:
# char cache
s = bytes(range(256))
for i in range(256):
diff --git a/Lib/test/libregrtest/result.py b/Lib/test/libregrtest/result.py
index 74eae40440435d..7553efe5e8abeb 100644
--- a/Lib/test/libregrtest/result.py
+++ b/Lib/test/libregrtest/result.py
@@ -149,6 +149,7 @@ def __str__(self) -> str:
case State.DID_NOT_RUN:
return f"{self.test_name} ran no tests"
case State.TIMEOUT:
+ assert self.duration is not None, "self.duration is None"
return f"{self.test_name} timed out ({format_duration(self.duration)})"
case _:
raise ValueError("unknown result state: {state!r}")
diff --git a/Lib/test/libregrtest/results.py b/Lib/test/libregrtest/results.py
index 53758bf56946f1..9eda926966dc7e 100644
--- a/Lib/test/libregrtest/results.py
+++ b/Lib/test/libregrtest/results.py
@@ -75,7 +75,7 @@ def get_state(self, fail_env_changed: bool) -> str:
return ', '.join(state)
- def get_exitcode(self, fail_env_changed, fail_rerun):
+ def get_exitcode(self, fail_env_changed: bool, fail_rerun: bool) -> int:
exitcode = 0
if self.bad:
exitcode = EXITCODE_BAD_TEST
@@ -91,7 +91,7 @@ def get_exitcode(self, fail_env_changed, fail_rerun):
exitcode = EXITCODE_BAD_TEST
return exitcode
- def accumulate_result(self, result: TestResult, runtests: RunTests):
+ def accumulate_result(self, result: TestResult, runtests: RunTests) -> None:
test_name = result.test_name
rerun = runtests.rerun
fail_env_changed = runtests.fail_env_changed
@@ -139,7 +139,7 @@ def get_coverage_results(self) -> trace.CoverageResults:
counts = {loc: 1 for loc in self.covered_lines}
return trace.CoverageResults(counts=counts)
- def need_rerun(self):
+ def need_rerun(self) -> bool:
return bool(self.rerun_results)
def prepare_rerun(self, *, clear: bool = True) -> tuple[TestTuple, FilterDict]:
@@ -162,7 +162,7 @@ def prepare_rerun(self, *, clear: bool = True) -> tuple[TestTuple, FilterDict]:
return (tuple(tests), match_tests_dict)
- def add_junit(self, xml_data: list[str]):
+ def add_junit(self, xml_data: list[str]) -> None:
import xml.etree.ElementTree as ET
for e in xml_data:
try:
@@ -171,7 +171,7 @@ def add_junit(self, xml_data: list[str]):
print(xml_data, file=sys.__stderr__)
raise
- def write_junit(self, filename: StrPath):
+ def write_junit(self, filename: StrPath) -> None:
if not self.testsuite_xml:
# Don't create empty XML file
return
@@ -196,7 +196,7 @@ def write_junit(self, filename: StrPath):
for s in ET.tostringlist(root):
f.write(s)
- def display_result(self, tests: TestTuple, quiet: bool, print_slowest: bool):
+ def display_result(self, tests: TestTuple, quiet: bool, print_slowest: bool) -> None:
if print_slowest:
self.test_times.sort(reverse=True)
print()
@@ -238,7 +238,7 @@ def display_result(self, tests: TestTuple, quiet: bool, print_slowest: bool):
print()
print("Test suite interrupted by signal SIGINT.")
- def display_summary(self, first_runtests: RunTests, filtered: bool):
+ def display_summary(self, first_runtests: RunTests, filtered: bool) -> None:
# Total tests
stats = self.stats
text = f'run={stats.tests_run:,}'
diff --git a/Lib/test/libregrtest/runtests.py b/Lib/test/libregrtest/runtests.py
index 3279c1f1aadba7..7b607d4a559d88 100644
--- a/Lib/test/libregrtest/runtests.py
+++ b/Lib/test/libregrtest/runtests.py
@@ -5,12 +5,12 @@
import shlex
import subprocess
import sys
-from typing import Any
+from typing import Any, Iterator
from test import support
from .utils import (
- StrPath, StrJSON, TestTuple, TestFilter, FilterTuple, FilterDict)
+ StrPath, StrJSON, TestTuple, TestName, TestFilter, FilterTuple, FilterDict)
class JsonFileType:
@@ -41,8 +41,8 @@ def configure_subprocess(self, popen_kwargs: dict[str, Any]) -> None:
popen_kwargs['startupinfo'] = startupinfo
@contextlib.contextmanager
- def inherit_subprocess(self):
- if self.file_type == JsonFileType.WINDOWS_HANDLE:
+ def inherit_subprocess(self) -> Iterator[None]:
+ if sys.platform == 'win32' and self.file_type == JsonFileType.WINDOWS_HANDLE:
os.set_handle_inheritable(self.file, True)
try:
yield
@@ -106,25 +106,25 @@ def copy(self, **override) -> 'RunTests':
state.update(override)
return RunTests(**state)
- def create_worker_runtests(self, **override):
+ def create_worker_runtests(self, **override) -> 'WorkerRunTests':
state = dataclasses.asdict(self)
state.update(override)
return WorkerRunTests(**state)
- def get_match_tests(self, test_name) -> FilterTuple | None:
+ def get_match_tests(self, test_name: TestName) -> FilterTuple | None:
if self.match_tests_dict is not None:
return self.match_tests_dict.get(test_name, None)
else:
return None
- def get_jobs(self):
+ def get_jobs(self) -> int | None:
# Number of run_single_test() calls needed to run all tests.
# None means that there is not bound limit (--forever option).
if self.forever:
return None
return len(self.tests)
- def iter_tests(self):
+ def iter_tests(self) -> Iterator[TestName]:
if self.forever:
while True:
yield from self.tests
diff --git a/Lib/test/libregrtest/setup.py b/Lib/test/libregrtest/setup.py
index 9e9741493e9a5b..ba57f06b4841d4 100644
--- a/Lib/test/libregrtest/setup.py
+++ b/Lib/test/libregrtest/setup.py
@@ -25,9 +25,10 @@ def setup_test_dir(testdir: str | None) -> None:
sys.path.insert(0, os.path.abspath(testdir))
-def setup_process():
+def setup_process() -> None:
fix_umask()
+ assert sys.__stderr__ is not None, "sys.__stderr__ is None"
try:
stderr_fd = sys.__stderr__.fileno()
except (ValueError, AttributeError):
@@ -35,7 +36,7 @@ def setup_process():
# and ValueError on a closed stream.
#
# Catch AttributeError for stderr being None.
- stderr_fd = None
+ pass
else:
# Display the Python traceback on fatal errors (e.g. segfault)
faulthandler.enable(all_threads=True, file=stderr_fd)
@@ -68,7 +69,7 @@ def setup_process():
for index, path in enumerate(module.__path__):
module.__path__[index] = os.path.abspath(path)
if getattr(module, '__file__', None):
- module.__file__ = os.path.abspath(module.__file__)
+ module.__file__ = os.path.abspath(module.__file__) # type: ignore[type-var]
if hasattr(sys, 'addaudithook'):
# Add an auditing hook for all tests to ensure PySys_Audit is tested
@@ -87,7 +88,7 @@ def _test_audit_hook(name, args):
os.environ.setdefault(UNICODE_GUARD_ENV, FS_NONASCII)
-def setup_tests(runtests: RunTests):
+def setup_tests(runtests: RunTests) -> None:
support.verbose = runtests.verbose
support.failfast = runtests.fail_fast
support.PGO = runtests.pgo
diff --git a/Lib/test/libregrtest/tsan.py b/Lib/test/libregrtest/tsan.py
index dd18ae2584f5d8..822ac0f4044d9e 100644
--- a/Lib/test/libregrtest/tsan.py
+++ b/Lib/test/libregrtest/tsan.py
@@ -28,6 +28,6 @@
]
-def setup_tsan_tests(cmdline_args):
+def setup_tsan_tests(cmdline_args) -> None:
if not cmdline_args:
cmdline_args[:] = TSAN_TESTS[:]
diff --git a/Lib/test/libregrtest/utils.py b/Lib/test/libregrtest/utils.py
index 521a849376d68d..2b8362e7963183 100644
--- a/Lib/test/libregrtest/utils.py
+++ b/Lib/test/libregrtest/utils.py
@@ -58,7 +58,7 @@
FilterDict = dict[TestName, FilterTuple]
-def format_duration(seconds):
+def format_duration(seconds: float) -> str:
ms = math.ceil(seconds * 1e3)
seconds, ms = divmod(ms, 1000)
minutes, seconds = divmod(seconds, 60)
@@ -92,7 +92,7 @@ def strip_py_suffix(names: list[str] | None) -> None:
names[idx] = basename
-def plural(n, singular, plural=None):
+def plural(n: int, singular: str, plural: str | None = None) -> str:
if n == 1:
return singular
elif plural is not None:
@@ -101,7 +101,7 @@ def plural(n, singular, plural=None):
return singular + 's'
-def count(n, word):
+def count(n: int, word: str) -> str:
if n == 1:
return f"{n} {word}"
else:
@@ -123,14 +123,14 @@ def printlist(x, width=70, indent=4, file=None):
file=file)
-def print_warning(msg):
+def print_warning(msg: str) -> None:
support.print_warning(msg)
-orig_unraisablehook = None
+orig_unraisablehook: Callable[..., None] | None = None
-def regrtest_unraisable_hook(unraisable):
+def regrtest_unraisable_hook(unraisable) -> None:
global orig_unraisablehook
support.environment_altered = True
support.print_warning("Unraisable exception")
@@ -138,22 +138,23 @@ def regrtest_unraisable_hook(unraisable):
try:
support.flush_std_streams()
sys.stderr = support.print_warning.orig_stderr
+ assert orig_unraisablehook is not None, "orig_unraisablehook not set"
orig_unraisablehook(unraisable)
sys.stderr.flush()
finally:
sys.stderr = old_stderr
-def setup_unraisable_hook():
+def setup_unraisable_hook() -> None:
global orig_unraisablehook
orig_unraisablehook = sys.unraisablehook
sys.unraisablehook = regrtest_unraisable_hook
-orig_threading_excepthook = None
+orig_threading_excepthook: Callable[..., None] | None = None
-def regrtest_threading_excepthook(args):
+def regrtest_threading_excepthook(args) -> None:
global orig_threading_excepthook
support.environment_altered = True
support.print_warning(f"Uncaught thread exception: {args.exc_type.__name__}")
@@ -161,13 +162,14 @@ def regrtest_threading_excepthook(args):
try:
support.flush_std_streams()
sys.stderr = support.print_warning.orig_stderr
+ assert orig_threading_excepthook is not None, "orig_threading_excepthook not set"
orig_threading_excepthook(args)
sys.stderr.flush()
finally:
sys.stderr = old_stderr
-def setup_threading_excepthook():
+def setup_threading_excepthook() -> None:
global orig_threading_excepthook
import threading
orig_threading_excepthook = threading.excepthook
@@ -476,7 +478,7 @@ def get_temp_dir(tmp_dir: StrPath | None = None) -> StrPath:
return os.path.abspath(tmp_dir)
-def fix_umask():
+def fix_umask() -> None:
if support.is_emscripten:
# Emscripten has default umask 0o777, which breaks some tests.
# see https://github.com/emscripten-core/emscripten/issues/17269
@@ -572,7 +574,8 @@ def abs_module_name(test_name: TestName, test_dir: StrPath | None) -> TestName:
'setUpModule', 'tearDownModule',
))
-def normalize_test_name(test_full_name, *, is_error=False):
+def normalize_test_name(test_full_name: str, *,
+ is_error: bool = False) -> str | None:
short_name = test_full_name.split(" ")[0]
if is_error and short_name in _TEST_LIFECYCLE_HOOKS:
if test_full_name.startswith(('setUpModule (', 'tearDownModule (')):
@@ -593,7 +596,7 @@ def normalize_test_name(test_full_name, *, is_error=False):
return short_name
-def adjust_rlimit_nofile():
+def adjust_rlimit_nofile() -> None:
"""
On macOS the default fd limit (RLIMIT_NOFILE) is sometimes too low (256)
for our test suite to succeed. Raise it to something more reasonable. 1024
@@ -619,17 +622,17 @@ def adjust_rlimit_nofile():
f"{new_fd_limit}: {err}.")
-def get_host_runner():
+def get_host_runner() -> str:
if (hostrunner := os.environ.get("_PYTHON_HOSTRUNNER")) is None:
hostrunner = sysconfig.get_config_var("HOSTRUNNER")
return hostrunner
-def is_cross_compiled():
+def is_cross_compiled() -> bool:
return ('_PYTHON_HOST_PLATFORM' in os.environ)
-def format_resources(use_resources: Iterable[str]):
+def format_resources(use_resources: Iterable[str]) -> str:
use_resources = set(use_resources)
all_resources = set(ALL_RESOURCES)
@@ -654,7 +657,7 @@ def format_resources(use_resources: Iterable[str]):
def display_header(use_resources: tuple[str, ...],
- python_cmd: tuple[str, ...] | None):
+ python_cmd: tuple[str, ...] | None) -> None:
# Print basic platform information
print("==", platform.python_implementation(), *sys.version.split())
print("==", platform.platform(aliased=True),
@@ -732,7 +735,7 @@ def display_header(use_resources: tuple[str, ...],
print(flush=True)
-def cleanup_temp_dir(tmp_dir: StrPath):
+def cleanup_temp_dir(tmp_dir: StrPath) -> None:
import glob
path = os.path.join(glob.escape(tmp_dir), TMP_PREFIX + '*')
@@ -792,5 +795,5 @@ def _sanitize_xml_replace(regs):
return ''.join(f'\\x{ord(ch):02x}' if ch <= '\xff' else ascii(ch)[1:-1]
for ch in text)
-def sanitize_xml(text):
+def sanitize_xml(text: str) -> str:
return ILLEGAL_XML_CHARS_RE.sub(_sanitize_xml_replace, text)
diff --git a/Lib/test/libregrtest/worker.py b/Lib/test/libregrtest/worker.py
index 7c801a3cbc15b8..d232ea69483277 100644
--- a/Lib/test/libregrtest/worker.py
+++ b/Lib/test/libregrtest/worker.py
@@ -98,7 +98,7 @@ def worker_process(worker_json: StrJSON) -> NoReturn:
sys.exit(0)
-def main():
+def main() -> NoReturn:
if len(sys.argv) != 2:
print("usage: python -m test.libregrtest.worker JSON")
sys.exit(1)
diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py
index 7938b46012c853..d7fa6096d375c5 100644
--- a/Lib/test/support/__init__.py
+++ b/Lib/test/support/__init__.py
@@ -60,6 +60,8 @@
"skip_on_s390x",
"without_optimizer",
"force_not_colorized",
+ "force_not_colorized_test_class",
+ "make_clean_env",
"BrokenIter",
]
@@ -2693,30 +2695,54 @@ def is_slot_wrapper(name, value):
yield name, True
+@contextlib.contextmanager
+def no_color():
+ import _colorize
+ from .os_helper import EnvironmentVarGuard
+
+ with (
+ swap_attr(_colorize, "can_colorize", lambda file=None: False),
+ EnvironmentVarGuard() as env,
+ ):
+ for var in {"FORCE_COLOR", "NO_COLOR", "PYTHON_COLORS"}:
+ env.unset(var)
+ env.set("NO_COLOR", "1")
+ yield
+
+
def force_not_colorized(func):
"""Force the terminal not to be colorized."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
- import _colorize
- original_fn = _colorize.can_colorize
- variables: dict[str, str | None] = {
- "PYTHON_COLORS": None, "FORCE_COLOR": None, "NO_COLOR": None
- }
- try:
- for key in variables:
- variables[key] = os.environ.pop(key, None)
- os.environ["NO_COLOR"] = "1"
- _colorize.can_colorize = lambda: False
+ with no_color():
return func(*args, **kwargs)
- finally:
- _colorize.can_colorize = original_fn
- del os.environ["NO_COLOR"]
- for key, value in variables.items():
- if value is not None:
- os.environ[key] = value
return wrapper
+def force_not_colorized_test_class(cls):
+ """Force the terminal not to be colorized for the entire test class."""
+ original_setUpClass = cls.setUpClass
+
+ @classmethod
+ @functools.wraps(cls.setUpClass)
+ def new_setUpClass(cls):
+ cls.enterClassContext(no_color())
+ original_setUpClass()
+
+ cls.setUpClass = new_setUpClass
+ return cls
+
+
+def make_clean_env() -> dict[str, str]:
+ clean_env = os.environ.copy()
+ for k in clean_env.copy():
+ if k.startswith("PYTHON"):
+ clean_env.pop(k)
+ clean_env.pop("FORCE_COLOR", None)
+ clean_env.pop("NO_COLOR", None)
+ return clean_env
+
+
def initialized_with_pyrepl():
"""Detect whether PyREPL was used during Python initialization."""
# If the main module has a __file__ attribute it's a Python module, which means PyREPL.
diff --git a/Lib/test/support/testcase.py b/Lib/test/support/testcase.py
index fad1e4cb3499c0..fd32457d1467ca 100644
--- a/Lib/test/support/testcase.py
+++ b/Lib/test/support/testcase.py
@@ -1,6 +1,63 @@
from math import copysign, isnan
+class ExtraAssertions:
+
+ def assertIsSubclass(self, cls, superclass, msg=None):
+ if issubclass(cls, superclass):
+ return
+ standardMsg = f'{cls!r} is not a subclass of {superclass!r}'
+ self.fail(self._formatMessage(msg, standardMsg))
+
+ def assertNotIsSubclass(self, cls, superclass, msg=None):
+ if not issubclass(cls, superclass):
+ return
+ standardMsg = f'{cls!r} is a subclass of {superclass!r}'
+ self.fail(self._formatMessage(msg, standardMsg))
+
+ def assertHasAttr(self, obj, name, msg=None):
+ if not hasattr(obj, name):
+ if isinstance(obj, types.ModuleType):
+ standardMsg = f'module {obj.__name__!r} has no attribute {name!r}'
+ elif isinstance(obj, type):
+ standardMsg = f'type object {obj.__name__!r} has no attribute {name!r}'
+ else:
+ standardMsg = f'{type(obj).__name__!r} object has no attribute {name!r}'
+ self.fail(self._formatMessage(msg, standardMsg))
+
+ def assertNotHasAttr(self, obj, name, msg=None):
+ if hasattr(obj, name):
+ if isinstance(obj, types.ModuleType):
+ standardMsg = f'module {obj.__name__!r} has unexpected attribute {name!r}'
+ elif isinstance(obj, type):
+ standardMsg = f'type object {obj.__name__!r} has unexpected attribute {name!r}'
+ else:
+ standardMsg = f'{type(obj).__name__!r} object has unexpected attribute {name!r}'
+ self.fail(self._formatMessage(msg, standardMsg))
+
+ def assertStartsWith(self, s, prefix, msg=None):
+ if s.startswith(prefix):
+ return
+ standardMsg = f"{s!r} doesn't start with {prefix!r}"
+ self.fail(self._formatMessage(msg, standardMsg))
+
+ def assertNotStartsWith(self, s, prefix, msg=None):
+ if not s.startswith(prefix):
+ return
+ self.fail(self._formatMessage(msg, f"{s!r} starts with {prefix!r}"))
+
+ def assertEndsWith(self, s, suffix, msg=None):
+ if s.endswith(suffix):
+ return
+ standardMsg = f"{s!r} doesn't end with {suffix!r}"
+ self.fail(self._formatMessage(msg, standardMsg))
+
+ def assertNotEndsWith(self, s, suffix, msg=None):
+ if not s.endswith(suffix):
+ return
+ self.fail(self._formatMessage(msg, f"{s!r} ends with {suffix!r}"))
+
+
class ExceptionIsLikeMixin:
def assertExceptionIsLike(self, exc, template):
"""
diff --git a/Lib/test/test__colorize.py b/Lib/test/test__colorize.py
index d55b97ade68cef..056a5306ced183 100644
--- a/Lib/test/test__colorize.py
+++ b/Lib/test/test__colorize.py
@@ -1,58 +1,134 @@
import contextlib
+import io
import sys
import unittest
import unittest.mock
import _colorize
-from test.support import force_not_colorized
+from test.support.os_helper import EnvironmentVarGuard
-ORIGINAL_CAN_COLORIZE = _colorize.can_colorize
+@contextlib.contextmanager
+def clear_env():
+ with EnvironmentVarGuard() as mock_env:
+ for var in "FORCE_COLOR", "NO_COLOR", "PYTHON_COLORS":
+ mock_env.unset(var)
+ yield mock_env
-def setUpModule():
- _colorize.can_colorize = lambda: False
-
-def tearDownModule():
- _colorize.can_colorize = ORIGINAL_CAN_COLORIZE
+def supports_virtual_terminal():
+ if sys.platform == "win32":
+ return unittest.mock.patch("nt._supports_virtual_terminal", return_value=True)
+ else:
+ return contextlib.nullcontext()
class TestColorizeFunction(unittest.TestCase):
- @force_not_colorized
def test_colorized_detection_checks_for_environment_variables(self):
- if sys.platform == "win32":
- virtual_patching = unittest.mock.patch("nt._supports_virtual_terminal",
- return_value=True)
- else:
- virtual_patching = contextlib.nullcontext()
- with virtual_patching:
-
- flags = unittest.mock.MagicMock(ignore_environment=False)
- with (unittest.mock.patch("os.isatty") as isatty_mock,
- unittest.mock.patch("sys.flags", flags),
- unittest.mock.patch("_colorize.can_colorize", ORIGINAL_CAN_COLORIZE)):
+ def check(env, fallback, expected):
+ with (self.subTest(env=env, fallback=fallback),
+ clear_env() as mock_env):
+ mock_env.update(env)
+ isatty_mock.return_value = fallback
+ stdout_mock.isatty.return_value = fallback
+ self.assertEqual(_colorize.can_colorize(), expected)
+
+ with (unittest.mock.patch("os.isatty") as isatty_mock,
+ unittest.mock.patch("sys.stdout") as stdout_mock,
+ supports_virtual_terminal()):
+ stdout_mock.fileno.return_value = 1
+
+ for fallback in False, True:
+ check({}, fallback, fallback)
+ check({'TERM': 'dumb'}, fallback, False)
+ check({'TERM': 'xterm'}, fallback, fallback)
+ check({'TERM': ''}, fallback, fallback)
+ check({'FORCE_COLOR': '1'}, fallback, True)
+ check({'FORCE_COLOR': '0'}, fallback, True)
+ check({'FORCE_COLOR': ''}, fallback, fallback)
+ check({'NO_COLOR': '1'}, fallback, False)
+ check({'NO_COLOR': '0'}, fallback, False)
+ check({'NO_COLOR': ''}, fallback, fallback)
+
+ check({'TERM': 'dumb', 'FORCE_COLOR': '1'}, False, True)
+ check({'FORCE_COLOR': '1', 'NO_COLOR': '1'}, True, False)
+
+ for ignore_environment in False, True:
+ # Simulate running with or without `-E`.
+ flags = unittest.mock.MagicMock(ignore_environment=ignore_environment)
+ with unittest.mock.patch("sys.flags", flags):
+ check({'PYTHON_COLORS': '1'}, True, True)
+ check({'PYTHON_COLORS': '1'}, False, not ignore_environment)
+ check({'PYTHON_COLORS': '0'}, True, ignore_environment)
+ check({'PYTHON_COLORS': '0'}, False, False)
+ for fallback in False, True:
+ check({'PYTHON_COLORS': 'x'}, fallback, fallback)
+ check({'PYTHON_COLORS': ''}, fallback, fallback)
+
+ check({'TERM': 'dumb', 'PYTHON_COLORS': '1'}, False, not ignore_environment)
+ check({'NO_COLOR': '1', 'PYTHON_COLORS': '1'}, False, not ignore_environment)
+ check({'FORCE_COLOR': '1', 'PYTHON_COLORS': '0'}, True, ignore_environment)
+
+ @unittest.skipUnless(sys.platform == "win32", "requires Windows")
+ def test_colorized_detection_checks_on_windows(self):
+ with (clear_env(),
+ unittest.mock.patch("os.isatty") as isatty_mock,
+ unittest.mock.patch("sys.stdout") as stdout_mock,
+ supports_virtual_terminal() as vt_mock):
+ stdout_mock.fileno.return_value = 1
+ isatty_mock.return_value = True
+ stdout_mock.isatty.return_value = True
+
+ vt_mock.return_value = True
+ self.assertEqual(_colorize.can_colorize(), True)
+ vt_mock.return_value = False
+ self.assertEqual(_colorize.can_colorize(), False)
+ import nt
+ del nt._supports_virtual_terminal
+ self.assertEqual(_colorize.can_colorize(), False)
+
+ def test_colorized_detection_checks_for_std_streams(self):
+ with (clear_env(),
+ unittest.mock.patch("os.isatty") as isatty_mock,
+ unittest.mock.patch("sys.stdout") as stdout_mock,
+ unittest.mock.patch("sys.stderr") as stderr_mock,
+ supports_virtual_terminal()):
+ stdout_mock.fileno.return_value = 1
+ stderr_mock.fileno.side_effect = ZeroDivisionError
+ stderr_mock.isatty.side_effect = ZeroDivisionError
+
+ isatty_mock.return_value = True
+ stdout_mock.isatty.return_value = True
+ self.assertEqual(_colorize.can_colorize(), True)
+
+ isatty_mock.return_value = False
+ stdout_mock.isatty.return_value = False
+ self.assertEqual(_colorize.can_colorize(), False)
+
+ def test_colorized_detection_checks_for_file(self):
+ with clear_env(), supports_virtual_terminal():
+
+ with unittest.mock.patch("os.isatty") as isatty_mock:
+ file = unittest.mock.MagicMock()
+ file.fileno.return_value = 1
isatty_mock.return_value = True
- with unittest.mock.patch("os.environ", {'TERM': 'dumb'}):
- self.assertEqual(_colorize.can_colorize(), False)
- with unittest.mock.patch("os.environ", {'PYTHON_COLORS': '1'}):
- self.assertEqual(_colorize.can_colorize(), True)
- with unittest.mock.patch("os.environ", {'PYTHON_COLORS': '0'}):
- self.assertEqual(_colorize.can_colorize(), False)
- with unittest.mock.patch("os.environ", {'NO_COLOR': '1'}):
- self.assertEqual(_colorize.can_colorize(), False)
- with unittest.mock.patch("os.environ",
- {'NO_COLOR': '1', "PYTHON_COLORS": '1'}):
- self.assertEqual(_colorize.can_colorize(), True)
- with unittest.mock.patch("os.environ", {'FORCE_COLOR': '1'}):
- self.assertEqual(_colorize.can_colorize(), True)
- with unittest.mock.patch("os.environ",
- {'FORCE_COLOR': '1', 'NO_COLOR': '1'}):
- self.assertEqual(_colorize.can_colorize(), False)
- with unittest.mock.patch("os.environ",
- {'FORCE_COLOR': '1', "PYTHON_COLORS": '0'}):
- self.assertEqual(_colorize.can_colorize(), False)
+ self.assertEqual(_colorize.can_colorize(file=file), True)
isatty_mock.return_value = False
- with unittest.mock.patch("os.environ", {}):
- self.assertEqual(_colorize.can_colorize(), False)
+ self.assertEqual(_colorize.can_colorize(file=file), False)
+
+ # No file.fileno.
+ with unittest.mock.patch("os.isatty", side_effect=ZeroDivisionError):
+ file = unittest.mock.MagicMock(spec=['isatty'])
+ file.isatty.return_value = True
+ self.assertEqual(_colorize.can_colorize(file=file), False)
+
+ # file.fileno() raises io.UnsupportedOperation.
+ with unittest.mock.patch("os.isatty", side_effect=ZeroDivisionError):
+ file = unittest.mock.MagicMock()
+ file.fileno.side_effect = io.UnsupportedOperation
+ file.isatty.return_value = True
+ self.assertEqual(_colorize.can_colorize(file=file), True)
+ file.isatty.return_value = False
+ self.assertEqual(_colorize.can_colorize(file=file), False)
if __name__ == "__main__":
diff --git a/Lib/test/test__interpreters.py b/Lib/test/test__interpreters.py
index 533120a3221987..8f32a446572d5a 100644
--- a/Lib/test/test__interpreters.py
+++ b/Lib/test/test__interpreters.py
@@ -557,7 +557,7 @@ def setUp(self):
self.id = _interpreters.create()
def test_signatures(self):
- # for method in ['exec', 'run_string', 'run_func']:
+ # See https://github.com/python/cpython/issues/126654
msg = "expected 'shared' to be a dict"
with self.assertRaisesRegex(TypeError, msg):
_interpreters.exec(self.id, 'a', 1)
@@ -568,6 +568,17 @@ def test_signatures(self):
with self.assertRaisesRegex(TypeError, msg):
_interpreters.run_func(self.id, lambda: None, shared=1)
+ def test_invalid_shared_encoding(self):
+ # See https://github.com/python/cpython/issues/127196
+ bad_shared = {"\uD82A": 0}
+ msg = 'surrogates not allowed'
+ with self.assertRaisesRegex(UnicodeEncodeError, msg):
+ _interpreters.exec(self.id, 'a', shared=bad_shared)
+ with self.assertRaisesRegex(UnicodeEncodeError, msg):
+ _interpreters.run_string(self.id, 'a', shared=bad_shared)
+ with self.assertRaisesRegex(UnicodeEncodeError, msg):
+ _interpreters.run_func(self.id, lambda: None, shared=bad_shared)
+
class RunStringTests(TestBase):
diff --git a/Lib/test/test__locale.py b/Lib/test/test__locale.py
index 89c203250557f0..606dd297452bc4 100644
--- a/Lib/test/test__locale.py
+++ b/Lib/test/test__locale.py
@@ -102,6 +102,11 @@ def accept(loc):
# ps_AF doesn't work on Windows: see bpo-38324 (msg361830)
del known_numerics['ps_AF']
+if sys.platform == 'sunos5':
+ # On Solaris, Japanese ERAs start with the year 1927,
+ # and thus there's less of them.
+ known_era['ja_JP'] = (5, '+:1:2019/05/01:2019/12/31:令和:%EC元年')
+
class _LocaleTests(unittest.TestCase):
def setUp(self):
diff --git a/Lib/test/test_apple.py b/Lib/test/test_apple.py
new file mode 100644
index 00000000000000..ab5296afad1d3f
--- /dev/null
+++ b/Lib/test/test_apple.py
@@ -0,0 +1,155 @@
+import unittest
+from _apple_support import SystemLog
+from test.support import is_apple
+from unittest.mock import Mock, call
+
+if not is_apple:
+ raise unittest.SkipTest("Apple-specific")
+
+
+# Test redirection of stdout and stderr to the Apple system log.
+class TestAppleSystemLogOutput(unittest.TestCase):
+ maxDiff = None
+
+ def assert_writes(self, output):
+ self.assertEqual(
+ self.log_write.mock_calls,
+ [
+ call(self.log_level, line)
+ for line in output
+ ]
+ )
+
+ self.log_write.reset_mock()
+
+ def setUp(self):
+ self.log_write = Mock()
+ self.log_level = 42
+ self.log = SystemLog(self.log_write, self.log_level, errors="replace")
+
+ def test_repr(self):
+ self.assertEqual(repr(self.log), "")
+ self.assertEqual(repr(self.log.buffer), "")
+
+ def test_log_config(self):
+ self.assertIs(self.log.writable(), True)
+ self.assertIs(self.log.readable(), False)
+
+ self.assertEqual("UTF-8", self.log.encoding)
+ self.assertEqual("replace", self.log.errors)
+
+ self.assertIs(self.log.line_buffering, True)
+ self.assertIs(self.log.write_through, False)
+
+ def test_empty_str(self):
+ self.log.write("")
+ self.log.flush()
+
+ self.assert_writes([])
+
+ def test_simple_str(self):
+ self.log.write("hello world\n")
+
+ self.assert_writes([b"hello world\n"])
+
+ def test_buffered_str(self):
+ self.log.write("h")
+ self.log.write("ello")
+ self.log.write(" ")
+ self.log.write("world\n")
+ self.log.write("goodbye.")
+ self.log.flush()
+
+ self.assert_writes([b"hello world\n", b"goodbye."])
+
+ def test_manual_flush(self):
+ self.log.write("Hello")
+
+ self.assert_writes([])
+
+ self.log.write(" world\nHere for a while...\nGoodbye")
+ self.assert_writes([b"Hello world\n", b"Here for a while...\n"])
+
+ self.log.write(" world\nHello again")
+ self.assert_writes([b"Goodbye world\n"])
+
+ self.log.flush()
+ self.assert_writes([b"Hello again"])
+
+ def test_non_ascii(self):
+ # Spanish
+ self.log.write("ol\u00e9\n")
+ self.assert_writes([b"ol\xc3\xa9\n"])
+
+ # Chinese
+ self.log.write("\u4e2d\u6587\n")
+ self.assert_writes([b"\xe4\xb8\xad\xe6\x96\x87\n"])
+
+ # Printing Non-BMP emoji
+ self.log.write("\U0001f600\n")
+ self.assert_writes([b"\xf0\x9f\x98\x80\n"])
+
+ # Non-encodable surrogates are replaced
+ self.log.write("\ud800\udc00\n")
+ self.assert_writes([b"??\n"])
+
+ def test_modified_null(self):
+ # Null characters are logged using "modified UTF-8".
+ self.log.write("\u0000\n")
+ self.assert_writes([b"\xc0\x80\n"])
+ self.log.write("a\u0000\n")
+ self.assert_writes([b"a\xc0\x80\n"])
+ self.log.write("\u0000b\n")
+ self.assert_writes([b"\xc0\x80b\n"])
+ self.log.write("a\u0000b\n")
+ self.assert_writes([b"a\xc0\x80b\n"])
+
+ def test_nonstandard_str(self):
+ # String subclasses are accepted, but they should be converted
+ # to a standard str without calling any of their methods.
+ class CustomStr(str):
+ def splitlines(self, *args, **kwargs):
+ raise AssertionError()
+
+ def __len__(self):
+ raise AssertionError()
+
+ def __str__(self):
+ raise AssertionError()
+
+ self.log.write(CustomStr("custom\n"))
+ self.assert_writes([b"custom\n"])
+
+ def test_non_str(self):
+ # Non-string classes are not accepted.
+ for obj in [b"", b"hello", None, 42]:
+ with self.subTest(obj=obj):
+ with self.assertRaisesRegex(
+ TypeError,
+ fr"write\(\) argument must be str, not "
+ fr"{type(obj).__name__}"
+ ):
+ self.log.write(obj)
+
+ def test_byteslike_in_buffer(self):
+ # The underlying buffer *can* accept bytes-like objects
+ self.log.buffer.write(bytearray(b"hello"))
+ self.log.flush()
+
+ self.log.buffer.write(b"")
+ self.log.flush()
+
+ self.log.buffer.write(b"goodbye")
+ self.log.flush()
+
+ self.assert_writes([b"hello", b"goodbye"])
+
+ def test_non_byteslike_in_buffer(self):
+ for obj in ["hello", None, 42]:
+ with self.subTest(obj=obj):
+ with self.assertRaisesRegex(
+ TypeError,
+ fr"write\(\) argument must be bytes-like, not "
+ fr"{type(obj).__name__}"
+ ):
+ self.log.buffer.write(obj)
diff --git a/Lib/test/test_array.py b/Lib/test/test_array.py
index 47cbe60bfca4e4..f0e32befd6628d 100755
--- a/Lib/test/test_array.py
+++ b/Lib/test/test_array.py
@@ -1665,5 +1665,13 @@ def test_tolist(self, size):
self.assertEqual(ls[:8], list(example[:8]))
self.assertEqual(ls[-8:], list(example[-8:]))
+ def test_gh_128961(self):
+ a = array.array('i')
+ it = iter(a)
+ list(it)
+ it.__setstate__(0)
+ self.assertRaises(StopIteration, next, it)
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_asyncgen.py b/Lib/test/test_asyncgen.py
index 4f2278bb263681..951c5502803789 100644
--- a/Lib/test/test_asyncgen.py
+++ b/Lib/test/test_asyncgen.py
@@ -1152,6 +1152,23 @@ async def run():
self.loop.run_until_complete(run())
+ def test_async_gen_asyncio_anext_tuple_no_exceptions(self):
+ # StopAsyncIteration exceptions should be cleared.
+ # See: https://github.com/python/cpython/issues/128078.
+
+ async def foo():
+ if False:
+ yield (1, 2)
+
+ async def run():
+ it = foo().__aiter__()
+ with self.assertRaises(StopAsyncIteration):
+ await it.__anext__()
+ res = await anext(it, ('a', 'b'))
+ self.assertTupleEqual(res, ('a', 'b'))
+
+ self.loop.run_until_complete(run())
+
def test_async_gen_asyncio_anext_stopiteration(self):
async def foo():
try:
diff --git a/Lib/test/test_asyncio/test_selector_events.py b/Lib/test/test_asyncio/test_selector_events.py
index aaeda33dd0c677..893c9e2fba3d21 100644
--- a/Lib/test/test_asyncio/test_selector_events.py
+++ b/Lib/test/test_asyncio/test_selector_events.py
@@ -805,6 +805,18 @@ def test_writelines_send_partial(self):
self.assertTrue(self.sock.send.called)
self.assertTrue(self.loop.writers)
+ def test_writelines_pauses_protocol(self):
+ data = memoryview(b'data')
+ self.sock.send.return_value = 2
+ self.sock.send.fileno.return_value = 7
+
+ transport = self.socket_transport()
+ transport._high_water = 1
+ transport.writelines([data])
+ self.assertTrue(self.protocol.pause_writing.called)
+ self.assertTrue(self.sock.send.called)
+ self.assertTrue(self.loop.writers)
+
@unittest.skipUnless(selector_events._HAS_SENDMSG, 'no sendmsg')
def test_write_sendmsg_full(self):
data = memoryview(b'data')
@@ -1014,6 +1026,48 @@ def test_transport_close_remove_writer(self, m_log):
transport.close()
remove_writer.assert_called_with(self.sock_fd)
+ def test_write_buffer_after_close(self):
+ # gh-115514: If the transport is closed while:
+ # * Transport write buffer is not empty
+ # * Transport is paused
+ # * Protocol has data in its buffer, like SSLProtocol in self._outgoing
+ # The data is still written out.
+
+ # Also tested with real SSL transport in
+ # test.test_asyncio.test_ssl.TestSSL.test_remote_shutdown_receives_trailing_data
+
+ data = memoryview(b'data')
+ self.sock.send.return_value = 2
+ self.sock.send.fileno.return_value = 7
+
+ def _resume_writing():
+ transport.write(b"data")
+ self.protocol.resume_writing.side_effect = None
+
+ self.protocol.resume_writing.side_effect = _resume_writing
+
+ transport = self.socket_transport()
+ transport._high_water = 1
+
+ transport.write(data)
+
+ self.assertTrue(transport._protocol_paused)
+ self.assertTrue(self.sock.send.called)
+ self.loop.assert_writer(7, transport._write_ready)
+
+ transport.close()
+
+ # not called, we still have data in write buffer
+ self.assertFalse(self.protocol.connection_lost.called)
+
+ self.loop.writers[7]._run()
+ # during this ^ run, the _resume_writing mock above was called and added more data
+
+ self.assertEqual(transport.get_write_buffer_size(), 2)
+ self.loop.writers[7]._run()
+
+ self.assertEqual(transport.get_write_buffer_size(), 0)
+ self.assertTrue(self.protocol.connection_lost.called)
class SelectorSocketTransportBufferedProtocolTests(test_utils.TestCase):
diff --git a/Lib/test/test_asyncio/test_ssl.py b/Lib/test/test_asyncio/test_ssl.py
index e072ede29ee3c7..e4ab5a9024c956 100644
--- a/Lib/test/test_asyncio/test_ssl.py
+++ b/Lib/test/test_asyncio/test_ssl.py
@@ -12,6 +12,7 @@
import tempfile
import threading
import time
+import unittest.mock
import weakref
import unittest
@@ -1431,6 +1432,166 @@ def wrapper(sock):
with self.tcp_server(run(eof_server)) as srv:
self.loop.run_until_complete(client(srv.addr))
+ def test_remote_shutdown_receives_trailing_data_on_slow_socket(self):
+ # This test is the same as test_remote_shutdown_receives_trailing_data,
+ # except it simulates a socket that is not able to write data in time,
+ # thus triggering different code path in _SelectorSocketTransport.
+ # This triggers bug gh-115514, also tested using mocks in
+ # test.test_asyncio.test_selector_events.SelectorSocketTransportTests.test_write_buffer_after_close
+ # The slow path is triggered here by setting SO_SNDBUF, see code and comment below.
+
+ CHUNK = 1024 * 128
+ SIZE = 32
+
+ sslctx = self._create_server_ssl_context(
+ test_utils.ONLYCERT,
+ test_utils.ONLYKEY
+ )
+ client_sslctx = self._create_client_ssl_context()
+ future = None
+
+ def server(sock):
+ incoming = ssl.MemoryBIO()
+ outgoing = ssl.MemoryBIO()
+ sslobj = sslctx.wrap_bio(incoming, outgoing, server_side=True)
+
+ while True:
+ try:
+ sslobj.do_handshake()
+ except ssl.SSLWantReadError:
+ if outgoing.pending:
+ sock.send(outgoing.read())
+ incoming.write(sock.recv(16384))
+ else:
+ if outgoing.pending:
+ sock.send(outgoing.read())
+ break
+
+ while True:
+ try:
+ data = sslobj.read(4)
+ except ssl.SSLWantReadError:
+ incoming.write(sock.recv(16384))
+ else:
+ break
+
+ self.assertEqual(data, b'ping')
+ sslobj.write(b'pong')
+ sock.send(outgoing.read())
+
+ time.sleep(0.2) # wait for the peer to fill its backlog
+
+ # send close_notify but don't wait for response
+ with self.assertRaises(ssl.SSLWantReadError):
+ sslobj.unwrap()
+ sock.send(outgoing.read())
+
+ # should receive all data
+ data_len = 0
+ while True:
+ try:
+ chunk = len(sslobj.read(16384))
+ data_len += chunk
+ except ssl.SSLWantReadError:
+ incoming.write(sock.recv(16384))
+ except ssl.SSLZeroReturnError:
+ break
+
+ self.assertEqual(data_len, CHUNK * SIZE*2)
+
+ # verify that close_notify is received
+ sslobj.unwrap()
+
+ sock.close()
+
+ def eof_server(sock):
+ sock.starttls(sslctx, server_side=True)
+ self.assertEqual(sock.recv_all(4), b'ping')
+ sock.send(b'pong')
+
+ time.sleep(0.2) # wait for the peer to fill its backlog
+
+ # send EOF
+ sock.shutdown(socket.SHUT_WR)
+
+ # should receive all data
+ data = sock.recv_all(CHUNK * SIZE)
+ self.assertEqual(len(data), CHUNK * SIZE)
+
+ sock.close()
+
+ async def client(addr):
+ nonlocal future
+ future = self.loop.create_future()
+
+ reader, writer = await asyncio.open_connection(
+ *addr,
+ ssl=client_sslctx,
+ server_hostname='')
+ writer.write(b'ping')
+ data = await reader.readexactly(4)
+ self.assertEqual(data, b'pong')
+
+ # fill write backlog in a hacky way - renegotiation won't help
+ for _ in range(SIZE*2):
+ writer.transport._test__append_write_backlog(b'x' * CHUNK)
+
+ try:
+ data = await reader.read()
+ self.assertEqual(data, b'')
+ except (BrokenPipeError, ConnectionResetError):
+ pass
+
+ # Make sure _SelectorSocketTransport enters the delayed write
+ # path in its `write` method by wrapping socket in a fake class
+ # that acts as if there is not enough space in socket buffer.
+ # This triggers bug gh-115514, also tested using mocks in
+ # test.test_asyncio.test_selector_events.SelectorSocketTransportTests.test_write_buffer_after_close
+ socket_transport = writer.transport._ssl_protocol._transport
+
+ class SocketWrapper:
+ def __init__(self, sock) -> None:
+ self.sock = sock
+
+ def __getattr__(self, name):
+ return getattr(self.sock, name)
+
+ def send(self, data):
+ # Fake that our write buffer is full, send only half
+ to_send = len(data)//2
+ return self.sock.send(data[:to_send])
+
+ def _fake_full_write_buffer(data):
+ if socket_transport._read_ready_cb is None and not isinstance(socket_transport._sock, SocketWrapper):
+ socket_transport._sock = SocketWrapper(socket_transport._sock)
+ return unittest.mock.DEFAULT
+
+ with unittest.mock.patch.object(
+ socket_transport, "write",
+ wraps=socket_transport.write,
+ side_effect=_fake_full_write_buffer
+ ):
+ await future
+
+ writer.close()
+ await self.wait_closed(writer)
+
+ def run(meth):
+ def wrapper(sock):
+ try:
+ meth(sock)
+ except Exception as ex:
+ self.loop.call_soon_threadsafe(future.set_exception, ex)
+ else:
+ self.loop.call_soon_threadsafe(future.set_result, None)
+ return wrapper
+
+ with self.tcp_server(run(server)) as srv:
+ self.loop.run_until_complete(client(srv.addr))
+
+ with self.tcp_server(run(eof_server)) as srv:
+ self.loop.run_until_complete(client(srv.addr))
+
def test_connect_timeout_warning(self):
s = socket.socket(socket.AF_INET)
s.bind(('127.0.0.1', 0))
diff --git a/Lib/test/test_asyncio/test_staggered.py b/Lib/test/test_asyncio/test_staggered.py
index 74941f704c4890..40455a3804e3dd 100644
--- a/Lib/test/test_asyncio/test_staggered.py
+++ b/Lib/test/test_asyncio/test_staggered.py
@@ -122,3 +122,30 @@ async def do_set():
self.assertIsNone(excs[0], None)
self.assertIsInstance(excs[1], asyncio.CancelledError)
self.assertIsInstance(excs[2], asyncio.CancelledError)
+
+
+ async def test_cancelled(self):
+ log = []
+ with self.assertRaises(TimeoutError):
+ async with asyncio.timeout(None) as cs_outer, asyncio.timeout(None) as cs_inner:
+ async def coro_fn():
+ cs_inner.reschedule(-1)
+ await asyncio.sleep(0)
+ try:
+ await asyncio.sleep(0)
+ except asyncio.CancelledError:
+ log.append("cancelled 1")
+
+ cs_outer.reschedule(-1)
+ await asyncio.sleep(0)
+ try:
+ await asyncio.sleep(0)
+ except asyncio.CancelledError:
+ log.append("cancelled 2")
+ try:
+ await staggered_race([coro_fn], delay=None)
+ except asyncio.CancelledError:
+ log.append("cancelled 3")
+ raise
+
+ self.assertListEqual(log, ["cancelled 1", "cancelled 2", "cancelled 3"])
diff --git a/Lib/test/test_asyncio/test_taskgroups.py b/Lib/test/test_asyncio/test_taskgroups.py
index 138f59ebf57ef7..ad61cb46c7c07c 100644
--- a/Lib/test/test_asyncio/test_taskgroups.py
+++ b/Lib/test/test_asyncio/test_taskgroups.py
@@ -1,6 +1,8 @@
# Adapted with permission from the EdgeDB project;
# license: PSFL.
+import weakref
+import sys
import gc
import asyncio
import contextvars
@@ -28,7 +30,25 @@ def get_error_types(eg):
return {type(exc) for exc in eg.exceptions}
-class TestTaskGroup(unittest.IsolatedAsyncioTestCase):
+def set_gc_state(enabled):
+ was_enabled = gc.isenabled()
+ if enabled:
+ gc.enable()
+ else:
+ gc.disable()
+ return was_enabled
+
+
+@contextlib.contextmanager
+def disable_gc():
+ was_enabled = set_gc_state(enabled=False)
+ try:
+ yield
+ finally:
+ set_gc_state(enabled=was_enabled)
+
+
+class BaseTestTaskGroup:
async def test_taskgroup_01(self):
@@ -822,15 +842,15 @@ async def test_taskgroup_without_parent_task(self):
with self.assertRaisesRegex(RuntimeError, "has not been entered"):
tg.create_task(coro)
- def test_coro_closed_when_tg_closed(self):
+ async def test_coro_closed_when_tg_closed(self):
async def run_coro_after_tg_closes():
async with taskgroups.TaskGroup() as tg:
pass
coro = asyncio.sleep(0)
with self.assertRaisesRegex(RuntimeError, "is finished"):
tg.create_task(coro)
- loop = asyncio.get_event_loop()
- loop.run_until_complete(run_coro_after_tg_closes())
+
+ await run_coro_after_tg_closes()
async def test_cancelling_level_preserved(self):
async def raise_after(t, e):
@@ -955,6 +975,30 @@ async def coro_fn():
self.assertIsInstance(exc, _Done)
self.assertListEqual(gc.get_referrers(exc), [])
+
+ async def test_exception_refcycles_parent_task_wr(self):
+ """Test that TaskGroup deletes self._parent_task and create_task() deletes task"""
+ tg = asyncio.TaskGroup()
+ exc = None
+
+ class _Done(Exception):
+ pass
+
+ async def coro_fn():
+ async with tg:
+ raise _Done
+
+ with disable_gc():
+ try:
+ async with asyncio.TaskGroup() as tg2:
+ task_wr = weakref.ref(tg2.create_task(coro_fn()))
+ except* _Done as excs:
+ exc = excs.exceptions[0].exceptions[0]
+
+ self.assertIsNone(task_wr())
+ self.assertIsInstance(exc, _Done)
+ self.assertListEqual(gc.get_referrers(exc), [])
+
async def test_exception_refcycles_propagate_cancellation_error(self):
"""Test that TaskGroup deletes propagate_cancellation_error"""
tg = asyncio.TaskGroup()
@@ -988,5 +1032,66 @@ class MyKeyboardInterrupt(KeyboardInterrupt):
self.assertListEqual(gc.get_referrers(exc), [])
+ async def test_cancels_task_if_created_during_creation(self):
+ # regression test for gh-128550
+ ran = False
+ class MyError(Exception):
+ pass
+
+ exc = None
+ try:
+ async with asyncio.TaskGroup() as tg:
+ async def third_task():
+ raise MyError("third task failed")
+
+ async def second_task():
+ nonlocal ran
+ tg.create_task(third_task())
+ with self.assertRaises(asyncio.CancelledError):
+ await asyncio.sleep(0) # eager tasks cancel here
+ await asyncio.sleep(0) # lazy tasks cancel here
+ ran = True
+
+ tg.create_task(second_task())
+ except* MyError as excs:
+ exc = excs.exceptions[0]
+
+ self.assertTrue(ran)
+ self.assertIsInstance(exc, MyError)
+
+
+ async def test_cancellation_does_not_leak_out_of_tg(self):
+ class MyError(Exception):
+ pass
+
+ async def throw_error():
+ raise MyError
+
+ try:
+ async with asyncio.TaskGroup() as tg:
+ tg.create_task(throw_error())
+ except* MyError:
+ pass
+ else:
+ self.fail("should have raised one MyError in group")
+
+ # if this test fails this current task will be cancelled
+ # outside the task group and inside unittest internals
+ # we yield to the event loop with sleep(0) so that
+ # cancellation happens here and error is more understandable
+ await asyncio.sleep(0)
+
+
+class TestTaskGroup(BaseTestTaskGroup, unittest.IsolatedAsyncioTestCase):
+ loop_factory = asyncio.EventLoop
+
+class TestEagerTaskTaskGroup(BaseTestTaskGroup, unittest.IsolatedAsyncioTestCase):
+ @staticmethod
+ def loop_factory():
+ loop = asyncio.EventLoop()
+ loop.set_task_factory(asyncio.eager_task_factory)
+ return loop
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_capi/test_eval.py b/Lib/test/test_capi/test_eval.py
new file mode 100644
index 00000000000000..20ef2695ef3e27
--- /dev/null
+++ b/Lib/test/test_capi/test_eval.py
@@ -0,0 +1,103 @@
+import sys
+import unittest
+from test.support import import_helper
+
+_testlimitedcapi = import_helper.import_module('_testlimitedcapi')
+
+
+class Tests(unittest.TestCase):
+ def test_eval_get_func_name(self):
+ eval_get_func_name = _testlimitedcapi.eval_get_func_name
+
+ def function_example(): ...
+
+ class A:
+ def method_example(self): ...
+
+ self.assertEqual(eval_get_func_name(function_example),
+ "function_example")
+ self.assertEqual(eval_get_func_name(A.method_example),
+ "method_example")
+ self.assertEqual(eval_get_func_name(A().method_example),
+ "method_example")
+ self.assertEqual(eval_get_func_name(sum), "sum") # c function
+ self.assertEqual(eval_get_func_name(A), "type")
+
+ def test_eval_get_func_desc(self):
+ eval_get_func_desc = _testlimitedcapi.eval_get_func_desc
+
+ def function_example(): ...
+
+ class A:
+ def method_example(self): ...
+
+ self.assertEqual(eval_get_func_desc(function_example),
+ "()")
+ self.assertEqual(eval_get_func_desc(A.method_example),
+ "()")
+ self.assertEqual(eval_get_func_desc(A().method_example),
+ "()")
+ self.assertEqual(eval_get_func_desc(sum), "()") # c function
+ self.assertEqual(eval_get_func_desc(A), " object")
+
+ def test_eval_getlocals(self):
+ # Test PyEval_GetLocals()
+ x = 1
+ self.assertEqual(_testlimitedcapi.eval_getlocals(),
+ {'self': self,
+ 'x': 1})
+
+ y = 2
+ self.assertEqual(_testlimitedcapi.eval_getlocals(),
+ {'self': self,
+ 'x': 1,
+ 'y': 2})
+
+ def test_eval_getglobals(self):
+ # Test PyEval_GetGlobals()
+ self.assertEqual(_testlimitedcapi.eval_getglobals(),
+ globals())
+
+ def test_eval_getbuiltins(self):
+ # Test PyEval_GetBuiltins()
+ self.assertEqual(_testlimitedcapi.eval_getbuiltins(),
+ globals()['__builtins__'])
+
+ def test_eval_getframe(self):
+ # Test PyEval_GetFrame()
+ self.assertEqual(_testlimitedcapi.eval_getframe(),
+ sys._getframe())
+
+ def test_eval_getframe_builtins(self):
+ # Test PyEval_GetFrameBuiltins()
+ self.assertEqual(_testlimitedcapi.eval_getframe_builtins(),
+ sys._getframe().f_builtins)
+
+ def test_eval_getframe_globals(self):
+ # Test PyEval_GetFrameGlobals()
+ self.assertEqual(_testlimitedcapi.eval_getframe_globals(),
+ sys._getframe().f_globals)
+
+ def test_eval_getframe_locals(self):
+ # Test PyEval_GetFrameLocals()
+ self.assertEqual(_testlimitedcapi.eval_getframe_locals(),
+ sys._getframe().f_locals)
+
+ def test_eval_get_recursion_limit(self):
+ # Test Py_GetRecursionLimit()
+ self.assertEqual(_testlimitedcapi.eval_get_recursion_limit(),
+ sys.getrecursionlimit())
+
+ def test_eval_set_recursion_limit(self):
+ # Test Py_SetRecursionLimit()
+ old_limit = sys.getrecursionlimit()
+ try:
+ limit = old_limit + 123
+ _testlimitedcapi.eval_set_recursion_limit(limit)
+ self.assertEqual(sys.getrecursionlimit(), limit)
+ finally:
+ sys.setrecursionlimit(old_limit)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/Lib/test/test_capi/test_file.py b/Lib/test/test_capi/test_file.py
new file mode 100644
index 00000000000000..373692d938568a
--- /dev/null
+++ b/Lib/test/test_capi/test_file.py
@@ -0,0 +1,234 @@
+import io
+import os
+import unittest
+import warnings
+from test import support
+from test.support import import_helper, os_helper, warnings_helper
+
+
+_testcapi = import_helper.import_module('_testcapi')
+_testlimitedcapi = import_helper.import_module('_testlimitedcapi')
+_io = import_helper.import_module('_io')
+NULL = None
+STDOUT_FD = 1
+
+with open(__file__, 'rb') as fp:
+ FIRST_LINE = next(fp).decode()
+FIRST_LINE_NORM = FIRST_LINE.rstrip() + '\n'
+
+
+class CAPIFileTest(unittest.TestCase):
+ def test_pyfile_fromfd(self):
+ # Test PyFile_FromFd() which is a thin wrapper to _io.open()
+ pyfile_fromfd = _testlimitedcapi.pyfile_fromfd
+ filename = __file__
+ with open(filename, "rb") as fp:
+ fd = fp.fileno()
+
+ # FileIO
+ fp.seek(0)
+ obj = pyfile_fromfd(fd, filename, "rb", 0, NULL, NULL, NULL, 0)
+ try:
+ self.assertIsInstance(obj, _io.FileIO)
+ self.assertEqual(obj.readline(), FIRST_LINE.encode())
+ finally:
+ obj.close()
+
+ # BufferedReader
+ fp.seek(0)
+ obj = pyfile_fromfd(fd, filename, "rb", 1024, NULL, NULL, NULL, 0)
+ try:
+ self.assertIsInstance(obj, _io.BufferedReader)
+ self.assertEqual(obj.readline(), FIRST_LINE.encode())
+ finally:
+ obj.close()
+
+ # TextIOWrapper
+ fp.seek(0)
+ obj = pyfile_fromfd(fd, filename, "r", 1,
+ "utf-8", "replace", NULL, 0)
+ try:
+ self.assertIsInstance(obj, _io.TextIOWrapper)
+ self.assertEqual(obj.encoding, "utf-8")
+ self.assertEqual(obj.errors, "replace")
+ self.assertEqual(obj.readline(), FIRST_LINE_NORM)
+ finally:
+ obj.close()
+
+ def test_pyfile_getline(self):
+ # Test PyFile_GetLine(file, n): call file.readline()
+ # and strip "\n" suffix if n < 0.
+ pyfile_getline = _testlimitedcapi.pyfile_getline
+
+ # Test Unicode
+ with open(__file__, "r") as fp:
+ fp.seek(0)
+ self.assertEqual(pyfile_getline(fp, -1),
+ FIRST_LINE_NORM.rstrip('\n'))
+ fp.seek(0)
+ self.assertEqual(pyfile_getline(fp, 0),
+ FIRST_LINE_NORM)
+ fp.seek(0)
+ self.assertEqual(pyfile_getline(fp, 6),
+ FIRST_LINE_NORM[:6])
+
+ # Test bytes
+ with open(__file__, "rb") as fp:
+ fp.seek(0)
+ self.assertEqual(pyfile_getline(fp, -1),
+ FIRST_LINE.rstrip('\n').encode())
+ fp.seek(0)
+ self.assertEqual(pyfile_getline(fp, 0),
+ FIRST_LINE.encode())
+ fp.seek(0)
+ self.assertEqual(pyfile_getline(fp, 6),
+ FIRST_LINE.encode()[:6])
+
+ def test_pyfile_writestring(self):
+ # Test PyFile_WriteString(str, file): call file.write(str)
+ writestr = _testlimitedcapi.pyfile_writestring
+
+ with io.StringIO() as fp:
+ self.assertEqual(writestr("a\xe9\u20ac\U0010FFFF".encode(), fp), 0)
+ with self.assertRaises(UnicodeDecodeError):
+ writestr(b"\xff", fp)
+ with self.assertRaises(UnicodeDecodeError):
+ writestr("\udc80".encode("utf-8", "surrogatepass"), fp)
+
+ text = fp.getvalue()
+ self.assertEqual(text, "a\xe9\u20ac\U0010FFFF")
+
+ with self.assertRaises(SystemError):
+ writestr(b"abc", NULL)
+
+ def test_pyfile_writeobject(self):
+ # Test PyFile_WriteObject(obj, file, flags):
+ # - Call file.write(str(obj)) if flags equals Py_PRINT_RAW.
+ # - Call file.write(repr(obj)) otherwise.
+ writeobject = _testlimitedcapi.pyfile_writeobject
+ Py_PRINT_RAW = 1
+
+ with io.StringIO() as fp:
+ # Test flags=Py_PRINT_RAW
+ self.assertEqual(writeobject("raw", fp, Py_PRINT_RAW), 0)
+ writeobject(NULL, fp, Py_PRINT_RAW)
+
+ # Test flags=0
+ self.assertEqual(writeobject("repr", fp, 0), 0)
+ writeobject(NULL, fp, 0)
+
+ text = fp.getvalue()
+ self.assertEqual(text, "raw'repr'")
+
+ # invalid file type
+ for invalid_file in (123, "abc", object()):
+ with self.subTest(file=invalid_file):
+ with self.assertRaises(AttributeError):
+ writeobject("abc", invalid_file, Py_PRINT_RAW)
+
+ with self.assertRaises(TypeError):
+ writeobject("abc", NULL, 0)
+
+ def test_pyobject_asfiledescriptor(self):
+ # Test PyObject_AsFileDescriptor(obj):
+ # - Return obj if obj is an integer.
+ # - Return obj.fileno() otherwise.
+ # File descriptor must be >= 0.
+ asfd = _testlimitedcapi.pyobject_asfiledescriptor
+
+ self.assertEqual(asfd(123), 123)
+ self.assertEqual(asfd(0), 0)
+
+ with open(__file__, "rb") as fp:
+ self.assertEqual(asfd(fp), fp.fileno())
+
+ # bool emits RuntimeWarning
+ msg = r"bool is used as a file descriptor"
+ with warnings_helper.check_warnings((msg, RuntimeWarning)):
+ self.assertEqual(asfd(True), 1)
+
+ class FakeFile:
+ def __init__(self, fd):
+ self.fd = fd
+ def fileno(self):
+ return self.fd
+
+ # file descriptor must be positive
+ with self.assertRaises(ValueError):
+ asfd(-1)
+ with self.assertRaises(ValueError):
+ asfd(FakeFile(-1))
+
+ # fileno() result must be an integer
+ with self.assertRaises(TypeError):
+ asfd(FakeFile("text"))
+
+ # unsupported types
+ for obj in ("string", ["list"], object()):
+ with self.subTest(obj=obj):
+ with self.assertRaises(TypeError):
+ asfd(obj)
+
+ # CRASHES asfd(NULL)
+
+ def test_pyfile_newstdprinter(self):
+ # Test PyFile_NewStdPrinter()
+ pyfile_newstdprinter = _testcapi.pyfile_newstdprinter
+
+ file = pyfile_newstdprinter(STDOUT_FD)
+ self.assertEqual(file.closed, False)
+ self.assertIsNone(file.encoding)
+ self.assertEqual(file.mode, "w")
+
+ self.assertEqual(file.fileno(), STDOUT_FD)
+ self.assertEqual(file.isatty(), os.isatty(STDOUT_FD))
+
+ # flush() is a no-op
+ self.assertIsNone(file.flush())
+
+ # close() is a no-op
+ self.assertIsNone(file.close())
+ self.assertEqual(file.closed, False)
+
+ support.check_disallow_instantiation(self, type(file))
+
+ def test_pyfile_newstdprinter_write(self):
+ # Test the write() method of PyFile_NewStdPrinter()
+ pyfile_newstdprinter = _testcapi.pyfile_newstdprinter
+
+ filename = os_helper.TESTFN
+ self.addCleanup(os_helper.unlink, filename)
+
+ try:
+ old_stdout = os.dup(STDOUT_FD)
+ except OSError as exc:
+ # os.dup(STDOUT_FD) is not supported on WASI
+ self.skipTest(f"os.dup() failed with {exc!r}")
+
+ try:
+ with open(filename, "wb") as fp:
+ # PyFile_NewStdPrinter() only accepts fileno(stdout)
+ # or fileno(stderr) file descriptor.
+ fd = fp.fileno()
+ os.dup2(fd, STDOUT_FD)
+
+ file = pyfile_newstdprinter(STDOUT_FD)
+ self.assertEqual(file.write("text"), 4)
+ # The surrogate character is encoded with
+ # the "surrogateescape" error handler
+ self.assertEqual(file.write("[\udc80]"), 8)
+ finally:
+ os.dup2(old_stdout, STDOUT_FD)
+ os.close(old_stdout)
+
+ with open(filename, "r") as fp:
+ self.assertEqual(fp.read(), "text[\\udc80]")
+
+ # TODO: Test Py_UniversalNewlineFgets()
+
+ # PyFile_SetOpenCodeHook() and PyFile_OpenCode() are tested by
+ # test_embed.test_open_code_hook()
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/Lib/test/test_capi/test_import.py b/Lib/test/test_capi/test_import.py
new file mode 100644
index 00000000000000..3abd1cd7470111
--- /dev/null
+++ b/Lib/test/test_capi/test_import.py
@@ -0,0 +1,322 @@
+import importlib.util
+import os.path
+import sys
+import types
+import unittest
+from test.support import os_helper
+from test.support import import_helper
+from test.support.warnings_helper import check_warnings
+
+_testlimitedcapi = import_helper.import_module('_testlimitedcapi')
+NULL = None
+
+
+class ImportTests(unittest.TestCase):
+ def test_getmagicnumber(self):
+ # Test PyImport_GetMagicNumber()
+ magic = _testlimitedcapi.PyImport_GetMagicNumber()
+ self.assertEqual(magic,
+ int.from_bytes(importlib.util.MAGIC_NUMBER, 'little'))
+
+ def test_getmagictag(self):
+ # Test PyImport_GetMagicTag()
+ tag = _testlimitedcapi.PyImport_GetMagicTag()
+ self.assertEqual(tag, sys.implementation.cache_tag)
+
+ def test_getmoduledict(self):
+ # Test PyImport_GetModuleDict()
+ modules = _testlimitedcapi.PyImport_GetModuleDict()
+ self.assertIs(modules, sys.modules)
+
+ def check_import_loaded_module(self, import_module):
+ for name in ('os', 'sys', 'test', 'unittest'):
+ with self.subTest(name=name):
+ self.assertIn(name, sys.modules)
+ old_module = sys.modules[name]
+ module = import_module(name)
+ self.assertIsInstance(module, types.ModuleType)
+ self.assertIs(module, old_module)
+
+ def check_import_fresh_module(self, import_module):
+ old_modules = dict(sys.modules)
+ try:
+ for name in ('colorsys', 'math'):
+ with self.subTest(name=name):
+ sys.modules.pop(name, None)
+ module = import_module(name)
+ self.assertIsInstance(module, types.ModuleType)
+ self.assertIs(module, sys.modules[name])
+ self.assertEqual(module.__name__, name)
+ finally:
+ sys.modules.clear()
+ sys.modules.update(old_modules)
+
+ def test_getmodule(self):
+ # Test PyImport_GetModule()
+ getmodule = _testlimitedcapi.PyImport_GetModule
+ self.check_import_loaded_module(getmodule)
+
+ nonexistent = 'nonexistent'
+ self.assertNotIn(nonexistent, sys.modules)
+ self.assertIs(getmodule(nonexistent), KeyError)
+ self.assertIs(getmodule(''), KeyError)
+ self.assertIs(getmodule(object()), KeyError)
+
+ self.assertRaises(TypeError, getmodule, []) # unhashable
+ # CRASHES getmodule(NULL)
+
+ def check_addmodule(self, add_module, accept_nonstr=False):
+ # create a new module
+ names = ['nonexistent']
+ if accept_nonstr:
+ names.append(b'\xff') # non-UTF-8
+ for name in names:
+ with self.subTest(name=name):
+ self.assertNotIn(name, sys.modules)
+ try:
+ module = add_module(name)
+ self.assertIsInstance(module, types.ModuleType)
+ self.assertEqual(module.__name__, name)
+ self.assertIs(module, sys.modules[name])
+ finally:
+ sys.modules.pop(name, None)
+
+ # get an existing module
+ self.check_import_loaded_module(add_module)
+
+ def test_addmoduleobject(self):
+ # Test PyImport_AddModuleObject()
+ addmoduleobject = _testlimitedcapi.PyImport_AddModuleObject
+ self.check_addmodule(addmoduleobject, accept_nonstr=True)
+
+ self.assertRaises(TypeError, addmoduleobject, []) # unhashable
+ # CRASHES addmoduleobject(NULL)
+
+ def test_addmodule(self):
+ # Test PyImport_AddModule()
+ addmodule = _testlimitedcapi.PyImport_AddModule
+ self.check_addmodule(addmodule)
+
+ self.assertRaises(UnicodeDecodeError, addmodule, b'\xff')
+ # CRASHES addmodule(NULL)
+
+ def test_addmoduleref(self):
+ # Test PyImport_AddModuleRef()
+ addmoduleref = _testlimitedcapi.PyImport_AddModuleRef
+ self.check_addmodule(addmoduleref)
+
+ self.assertRaises(UnicodeDecodeError, addmoduleref, b'\xff')
+ # CRASHES addmoduleref(NULL)
+
+ def check_import_func(self, import_module):
+ self.check_import_loaded_module(import_module)
+ self.check_import_fresh_module(import_module)
+ self.assertRaises(ModuleNotFoundError, import_module, 'nonexistent')
+ self.assertRaises(ValueError, import_module, '')
+
+ def test_import(self):
+ # Test PyImport_Import()
+ import_ = _testlimitedcapi.PyImport_Import
+ self.check_import_func(import_)
+
+ self.assertRaises(TypeError, import_, b'os')
+ self.assertRaises(SystemError, import_, NULL)
+
+ def test_importmodule(self):
+ # Test PyImport_ImportModule()
+ importmodule = _testlimitedcapi.PyImport_ImportModule
+ self.check_import_func(importmodule)
+
+ self.assertRaises(UnicodeDecodeError, importmodule, b'\xff')
+ # CRASHES importmodule(NULL)
+
+ def test_importmodulenoblock(self):
+ # Test deprecated PyImport_ImportModuleNoBlock()
+ importmodulenoblock = _testlimitedcapi.PyImport_ImportModuleNoBlock
+ with check_warnings(('', DeprecationWarning)):
+ self.check_import_func(importmodulenoblock)
+ self.assertRaises(UnicodeDecodeError, importmodulenoblock, b'\xff')
+
+ # CRASHES importmodulenoblock(NULL)
+
+ def check_frozen_import(self, import_frozen_module):
+ # Importing a frozen module executes its code, so start by unloading
+ # the module to execute the code in a new (temporary) module.
+ old_zipimport = sys.modules.pop('zipimport')
+ try:
+ self.assertEqual(import_frozen_module('zipimport'), 1)
+
+ # import zipimport again
+ self.assertEqual(import_frozen_module('zipimport'), 1)
+ finally:
+ sys.modules['zipimport'] = old_zipimport
+
+ # not a frozen module
+ self.assertEqual(import_frozen_module('sys'), 0)
+ self.assertEqual(import_frozen_module('nonexistent'), 0)
+ self.assertEqual(import_frozen_module(''), 0)
+
+ def test_importfrozenmodule(self):
+ # Test PyImport_ImportFrozenModule()
+ importfrozenmodule = _testlimitedcapi.PyImport_ImportFrozenModule
+ self.check_frozen_import(importfrozenmodule)
+
+ self.assertRaises(UnicodeDecodeError, importfrozenmodule, b'\xff')
+ # CRASHES importfrozenmodule(NULL)
+
+ def test_importfrozenmoduleobject(self):
+ # Test PyImport_ImportFrozenModuleObject()
+ importfrozenmoduleobject = _testlimitedcapi.PyImport_ImportFrozenModuleObject
+ self.check_frozen_import(importfrozenmoduleobject)
+ self.assertEqual(importfrozenmoduleobject(b'zipimport'), 0)
+ self.assertEqual(importfrozenmoduleobject(NULL), 0)
+
+ def test_importmoduleex(self):
+ # Test PyImport_ImportModuleEx()
+ importmoduleex = _testlimitedcapi.PyImport_ImportModuleEx
+ self.check_import_func(lambda name: importmoduleex(name, NULL, NULL, NULL))
+
+ self.assertRaises(ModuleNotFoundError, importmoduleex, 'nonexistent', NULL, NULL, NULL)
+ self.assertRaises(ValueError, importmoduleex, '', NULL, NULL, NULL)
+ self.assertRaises(UnicodeDecodeError, importmoduleex, b'\xff', NULL, NULL, NULL)
+ # CRASHES importmoduleex(NULL, NULL, NULL, NULL)
+
+ def check_importmodulelevel(self, importmodulelevel):
+ self.check_import_func(lambda name: importmodulelevel(name, NULL, NULL, NULL, 0))
+
+ self.assertRaises(ModuleNotFoundError, importmodulelevel, 'nonexistent', NULL, NULL, NULL, 0)
+ self.assertRaises(ValueError, importmodulelevel, '', NULL, NULL, NULL, 0)
+
+ if __package__:
+ self.assertIs(importmodulelevel('test_import', globals(), NULL, NULL, 1),
+ sys.modules['test.test_capi.test_import'])
+ self.assertIs(importmodulelevel('test_capi', globals(), NULL, NULL, 2),
+ sys.modules['test.test_capi'])
+ self.assertRaises(ValueError, importmodulelevel, 'os', NULL, NULL, NULL, -1)
+ with self.assertWarns(ImportWarning):
+ self.assertRaises(KeyError, importmodulelevel, 'test_import', {}, NULL, NULL, 1)
+ self.assertRaises(TypeError, importmodulelevel, 'test_import', [], NULL, NULL, 1)
+
+ def test_importmodulelevel(self):
+ # Test PyImport_ImportModuleLevel()
+ importmodulelevel = _testlimitedcapi.PyImport_ImportModuleLevel
+ self.check_importmodulelevel(importmodulelevel)
+
+ self.assertRaises(UnicodeDecodeError, importmodulelevel, b'\xff', NULL, NULL, NULL, 0)
+ # CRASHES importmodulelevel(NULL, NULL, NULL, NULL, 0)
+
+ def test_importmodulelevelobject(self):
+ # Test PyImport_ImportModuleLevelObject()
+ importmodulelevel = _testlimitedcapi.PyImport_ImportModuleLevelObject
+ self.check_importmodulelevel(importmodulelevel)
+
+ self.assertRaises(TypeError, importmodulelevel, b'os', NULL, NULL, NULL, 0)
+ self.assertRaises(ValueError, importmodulelevel, NULL, NULL, NULL, NULL, 0)
+
+ def check_executecodemodule(self, execute_code, *args):
+ name = 'test_import_executecode'
+ try:
+ # Create a temporary module where the code will be executed
+ self.assertNotIn(name, sys.modules)
+ module = _testlimitedcapi.PyImport_AddModuleRef(name)
+ self.assertFalse(hasattr(module, 'attr'))
+
+ # Execute the code
+ code = compile('attr = 1', '', 'exec')
+ module2 = execute_code(name, code, *args)
+ self.assertIs(module2, module)
+
+ # Check the function side effects
+ self.assertEqual(module.attr, 1)
+ finally:
+ sys.modules.pop(name, None)
+ return module.__spec__.origin
+
+ def test_executecodemodule(self):
+ # Test PyImport_ExecCodeModule()
+ execcodemodule = _testlimitedcapi.PyImport_ExecCodeModule
+ self.check_executecodemodule(execcodemodule)
+
+ code = compile('attr = 1', '', 'exec')
+ self.assertRaises(UnicodeDecodeError, execcodemodule, b'\xff', code)
+ # CRASHES execcodemodule(NULL, code)
+ # CRASHES execcodemodule(name, NULL)
+
+ def test_executecodemoduleex(self):
+ # Test PyImport_ExecCodeModuleEx()
+ execcodemoduleex = _testlimitedcapi.PyImport_ExecCodeModuleEx
+
+ # Test NULL path (it should not crash)
+ self.check_executecodemodule(execcodemoduleex, NULL)
+
+ # Test non-NULL path
+ pathname = b'pathname'
+ origin = self.check_executecodemodule(execcodemoduleex, pathname)
+ self.assertEqual(origin, os.path.abspath(os.fsdecode(pathname)))
+
+ pathname = os_helper.TESTFN_UNDECODABLE
+ if pathname:
+ origin = self.check_executecodemodule(execcodemoduleex, pathname)
+ self.assertEqual(origin, os.path.abspath(os.fsdecode(pathname)))
+
+ code = compile('attr = 1', '', 'exec')
+ self.assertRaises(UnicodeDecodeError, execcodemoduleex, b'\xff', code, NULL)
+ # CRASHES execcodemoduleex(NULL, code, NULL)
+ # CRASHES execcodemoduleex(name, NULL, NULL)
+
+ def check_executecode_pathnames(self, execute_code_func, object=False):
+ # Test non-NULL pathname and NULL cpathname
+
+ # Test NULL paths (it should not crash)
+ self.check_executecodemodule(execute_code_func, NULL, NULL)
+
+ pathname = 'pathname'
+ origin = self.check_executecodemodule(execute_code_func, pathname, NULL)
+ self.assertEqual(origin, os.path.abspath(os.fsdecode(pathname)))
+ origin = self.check_executecodemodule(execute_code_func, NULL, pathname)
+ if not object:
+ self.assertEqual(origin, os.path.abspath(os.fsdecode(pathname)))
+
+ pathname = os_helper.TESTFN_UNDECODABLE
+ if pathname:
+ if object:
+ pathname = os.fsdecode(pathname)
+ origin = self.check_executecodemodule(execute_code_func, pathname, NULL)
+ self.assertEqual(origin, os.path.abspath(os.fsdecode(pathname)))
+ self.check_executecodemodule(execute_code_func, NULL, pathname)
+
+ # Test NULL pathname and non-NULL cpathname
+ pyc_filename = importlib.util.cache_from_source(__file__)
+ py_filename = importlib.util.source_from_cache(pyc_filename)
+ origin = self.check_executecodemodule(execute_code_func, NULL, pyc_filename)
+ if not object:
+ self.assertEqual(origin, py_filename)
+
+ def test_executecodemodulewithpathnames(self):
+ # Test PyImport_ExecCodeModuleWithPathnames()
+ execute_code_func = _testlimitedcapi.PyImport_ExecCodeModuleWithPathnames
+ self.check_executecode_pathnames(execute_code_func)
+
+ code = compile('attr = 1', '', 'exec')
+ self.assertRaises(UnicodeDecodeError, execute_code_func, b'\xff', code, NULL, NULL)
+ # CRASHES execute_code_func(NULL, code, NULL, NULL)
+ # CRASHES execute_code_func(name, NULL, NULL, NULL)
+
+ def test_executecodemoduleobject(self):
+ # Test PyImport_ExecCodeModuleObject()
+ execute_code_func = _testlimitedcapi.PyImport_ExecCodeModuleObject
+ self.check_executecode_pathnames(execute_code_func, object=True)
+
+ code = compile('attr = 1', '', 'exec')
+ self.assertRaises(TypeError, execute_code_func, [], code, NULL, NULL)
+ # CRASHES execute_code_func(NULL, code, NULL, NULL)
+ # CRASHES execute_code_func(name, NULL, NULL, NULL)
+
+ # TODO: test PyImport_GetImporter()
+ # TODO: test PyImport_ReloadModule()
+ # TODO: test PyImport_ExtendInittab()
+ # PyImport_AppendInittab() is tested by test_embed
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py
index 080b3e65332af4..f0b872627f6f93 100644
--- a/Lib/test/test_capi/test_misc.py
+++ b/Lib/test/test_capi/test_misc.py
@@ -75,6 +75,8 @@ class InstanceMethod:
id = _testcapi.instancemethod(id)
testfunction = _testcapi.instancemethod(testfunction)
+
+@support.force_not_colorized_test_class
class CAPITest(unittest.TestCase):
def test_instancemethod(self):
@@ -892,36 +894,6 @@ def __init__(self):
_testcapi.clear_managed_dict(c)
self.assertEqual(c.__dict__, {})
- def test_eval_get_func_name(self):
- def function_example(): ...
-
- class A:
- def method_example(self): ...
-
- self.assertEqual(_testcapi.eval_get_func_name(function_example),
- "function_example")
- self.assertEqual(_testcapi.eval_get_func_name(A.method_example),
- "method_example")
- self.assertEqual(_testcapi.eval_get_func_name(A().method_example),
- "method_example")
- self.assertEqual(_testcapi.eval_get_func_name(sum), "sum") # c function
- self.assertEqual(_testcapi.eval_get_func_name(A), "type")
-
- def test_eval_get_func_desc(self):
- def function_example(): ...
-
- class A:
- def method_example(self): ...
-
- self.assertEqual(_testcapi.eval_get_func_desc(function_example),
- "()")
- self.assertEqual(_testcapi.eval_get_func_desc(A.method_example),
- "()")
- self.assertEqual(_testcapi.eval_get_func_desc(A().method_example),
- "()")
- self.assertEqual(_testcapi.eval_get_func_desc(sum), "()") # c function
- self.assertEqual(_testcapi.eval_get_func_desc(A), " object")
-
def test_function_get_code(self):
import types
@@ -1180,19 +1152,6 @@ def genf(): yield
gen = genf()
self.assertEqual(_testcapi.gen_get_code(gen), gen.gi_code)
- def test_pyeval_getlocals(self):
- # Test PyEval_GetLocals()
- x = 1
- self.assertEqual(_testcapi.pyeval_getlocals(),
- {'self': self,
- 'x': 1})
-
- y = 2
- self.assertEqual(_testcapi.pyeval_getlocals(),
- {'self': self,
- 'x': 1,
- 'y': 2})
-
@requires_limited_api
class TestHeapTypeRelative(unittest.TestCase):
diff --git a/Lib/test/test_capi/test_unicode.py b/Lib/test/test_capi/test_unicode.py
index a69f817c515ba7..f750ec1a56fef9 100644
--- a/Lib/test/test_capi/test_unicode.py
+++ b/Lib/test/test_capi/test_unicode.py
@@ -1,7 +1,7 @@
import unittest
import sys
from test import support
-from test.support import import_helper
+from test.support import threading_helper, import_helper
try:
import _testcapi
@@ -959,6 +959,24 @@ def test_asutf8(self):
self.assertRaises(TypeError, unicode_asutf8, [], 0)
# CRASHES unicode_asutf8(NULL, 0)
+ @unittest.skipIf(_testcapi is None, 'need _testcapi module')
+ @threading_helper.requires_working_threading()
+ def test_asutf8_race(self):
+ """Test that there's no race condition in PyUnicode_AsUTF8()"""
+ unicode_asutf8 = _testcapi.unicode_asutf8
+ from threading import Thread
+
+ data = "😊"
+
+ def worker():
+ for _ in range(1000):
+ self.assertEqual(unicode_asutf8(data, 5), b'\xf0\x9f\x98\x8a\0')
+
+ threads = [Thread(target=worker) for _ in range(10)]
+ with threading_helper.start_threads(threads):
+ pass
+
+
@support.cpython_only
@unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module')
def test_asutf8andsize(self):
diff --git a/Lib/test/test_cext/__init__.py b/Lib/test/test_cext/__init__.py
index ec44b0ce1f8a56..54859f9ff7622d 100644
--- a/Lib/test/test_cext/__init__.py
+++ b/Lib/test/test_cext/__init__.py
@@ -86,6 +86,8 @@ def run_cmd(operation, cmd):
cmd = [python_exe, '-X', 'dev',
'-m', 'pip', 'install', '--no-build-isolation',
os.path.abspath(pkg_dir)]
+ if support.verbose:
+ cmd.append('-v')
run_cmd('Install', cmd)
# Do a reference run. Until we test that running python
diff --git a/Lib/test/test_cext/extension.c b/Lib/test/test_cext/extension.c
index eb23dbe20353ba..b76abe1d74c628 100644
--- a/Lib/test/test_cext/extension.c
+++ b/Lib/test/test_cext/extension.c
@@ -37,7 +37,13 @@ static PyMethodDef _testcext_methods[] = {
static int
-_testcext_exec(PyObject *module)
+_testcext_exec(
+#ifdef __STDC_VERSION__
+ PyObject *module
+#else
+ PyObject *Py_UNUSED(module)
+#endif
+ )
{
#ifdef __STDC_VERSION__
if (PyModule_AddIntMacro(module, __STDC_VERSION__) < 0) {
@@ -53,7 +59,7 @@ _testcext_exec(PyObject *module)
}
static PyModuleDef_Slot _testcext_slots[] = {
- {Py_mod_exec, _testcext_exec},
+ {Py_mod_exec, (void*)_testcext_exec},
{0, NULL}
};
diff --git a/Lib/test/test_cext/setup.py b/Lib/test/test_cext/setup.py
index ccad3fa62ad086..e97749b45ea6f3 100644
--- a/Lib/test/test_cext/setup.py
+++ b/Lib/test/test_cext/setup.py
@@ -11,12 +11,16 @@
SOURCE = 'extension.c'
+
if not support.MS_WINDOWS:
# C compiler flags for GCC and clang
CFLAGS = [
# The purpose of test_cext extension is to check that building a C
# extension using the Python C API does not emit C compiler warnings.
'-Werror',
+
+ # gh-120593: Check the 'const' qualifier
+ '-Wcast-qual',
]
if not support.Py_GIL_DISABLED:
CFLAGS.append(
@@ -25,8 +29,13 @@
'-Werror=declaration-after-statement',
)
else:
- # Don't pass any compiler flag to MSVC
- CFLAGS = []
+ # MSVC compiler flags
+ CFLAGS = [
+ # Display warnings level 1 to 4
+ '/W4',
+ # Treat all compiler warnings as compiler errors
+ '/WX',
+ ]
def main():
diff --git a/Lib/test/test_clinic.py b/Lib/test/test_clinic.py
index a7ba7f3d99860e..e210103a80f399 100644
--- a/Lib/test/test_clinic.py
+++ b/Lib/test/test_clinic.py
@@ -731,6 +731,16 @@ def test_cloned_forced_text_signature_illegal(self):
err = "Cannot use @text_signature when cloning a function"
self.expect_failure(block, err, lineno=11)
+ def test_ignore_preprocessor_in_comments(self):
+ for dsl in "clinic", "python":
+ raw = dedent(f"""\
+ /*[{dsl} input]
+ # CPP directives, valid or not, should be ignored in C comments.
+ #
+ [{dsl} start generated code]*/
+ """)
+ self.clinic.parse(raw)
+
class ParseFileUnitTest(TestCase):
def expect_parsing_failure(
diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py
index 3a5a8abf81e43d..1ec5e581f81d17 100644
--- a/Lib/test/test_cmd_line_script.py
+++ b/Lib/test/test_cmd_line_script.py
@@ -88,6 +88,8 @@ def _make_test_zip_pkg(zip_dir, zip_basename, pkg_name, script_basename,
importlib.invalidate_caches()
return to_return
+
+@support.force_not_colorized_test_class
class CmdLineTest(unittest.TestCase):
def _check_output(self, script_name, exit_code, data,
expected_file, expected_argv0,
diff --git a/Lib/test/test_code_module.py b/Lib/test/test_code_module.py
index 37c7bc772ed8c7..20b960ce8d1e02 100644
--- a/Lib/test/test_code_module.py
+++ b/Lib/test/test_code_module.py
@@ -5,9 +5,9 @@
from textwrap import dedent
from contextlib import ExitStack
from unittest import mock
+from test.support import force_not_colorized_test_class
from test.support import import_helper
-
code = import_helper.import_module('code')
@@ -30,6 +30,7 @@ def mock_sys(self):
del self.sysmod.ps2
+@force_not_colorized_test_class
class TestInteractiveConsole(unittest.TestCase, MockSys):
maxDiff = None
diff --git a/Lib/test/test_compileall.py b/Lib/test/test_compileall.py
index 812ff5e7f84461..21ecebc088d3df 100644
--- a/Lib/test/test_compileall.py
+++ b/Lib/test/test_compileall.py
@@ -766,6 +766,7 @@ def test_d_compile_error(self):
rc, out, err = self.assertRunNotOK('-q', '-d', 'dinsdale', self.pkgdir)
self.assertRegex(out, b'File "dinsdale')
+ @support.force_not_colorized
def test_d_runtime_error(self):
bazfn = script_helper.make_script(self.pkgdir, 'baz', 'raise Exception')
self.assertRunOK('-q', '-d', 'dinsdale', self.pkgdir)
diff --git a/Lib/test/test_configparser.py b/Lib/test/test_configparser.py
index a934e493a76391..ab86d7f1af3981 100644
--- a/Lib/test/test_configparser.py
+++ b/Lib/test/test_configparser.py
@@ -2161,6 +2161,15 @@ def test_no_section(self):
self.assertEqual('1', cfg2[configparser.UNNAMED_SECTION]['a'])
self.assertEqual('2', cfg2[configparser.UNNAMED_SECTION]['b'])
+ def test_multiple_configs(self):
+ cfg = configparser.ConfigParser(allow_unnamed_section=True)
+ cfg.read_string('a = 1')
+ cfg.read_string('b = 2')
+
+ self.assertEqual([configparser.UNNAMED_SECTION], cfg.sections())
+ self.assertEqual('1', cfg[configparser.UNNAMED_SECTION]['a'])
+ self.assertEqual('2', cfg[configparser.UNNAMED_SECTION]['b'])
+
class MiscTestCase(unittest.TestCase):
def test__all__(self):
diff --git a/Lib/test/test_cppext/__init__.py b/Lib/test/test_cppext/__init__.py
index 00a2840d49c779..d5195227308fec 100644
--- a/Lib/test/test_cppext/__init__.py
+++ b/Lib/test/test_cppext/__init__.py
@@ -41,12 +41,17 @@ def test_build_cpp11(self):
def test_build_cpp14(self):
self.check_build('_testcpp14ext', std='c++14')
- def check_build(self, extension_name, std=None):
+ @support.requires_gil_enabled('incompatible with Free Threading')
+ def test_build_limited(self):
+ self.check_build('_testcppext_limited', limited=True)
+
+ def check_build(self, extension_name, std=None, limited=False):
venv_dir = 'env'
with support.setup_venv_with_pip_setuptools_wheel(venv_dir) as python_exe:
- self._check_build(extension_name, python_exe, std=std)
+ self._check_build(extension_name, python_exe,
+ std=std, limited=limited)
- def _check_build(self, extension_name, python_exe, std):
+ def _check_build(self, extension_name, python_exe, std, limited):
pkg_dir = 'pkg'
os.mkdir(pkg_dir)
shutil.copy(SETUP, os.path.join(pkg_dir, os.path.basename(SETUP)))
@@ -56,6 +61,8 @@ def run_cmd(operation, cmd):
env = os.environ.copy()
if std:
env['CPYTHON_TEST_CPP_STD'] = std
+ if limited:
+ env['CPYTHON_TEST_LIMITED'] = '1'
env['CPYTHON_TEST_EXT_NAME'] = extension_name
if support.verbose:
print('Run:', ' '.join(map(shlex.quote, cmd)))
@@ -76,6 +83,8 @@ def run_cmd(operation, cmd):
cmd = [python_exe, '-X', 'dev',
'-m', 'pip', 'install', '--no-build-isolation',
os.path.abspath(pkg_dir)]
+ if support.verbose:
+ cmd.append('-v')
run_cmd('Install', cmd)
# Do a reference run. Until we test that running python
diff --git a/Lib/test/test_cppext/extension.cpp b/Lib/test/test_cppext/extension.cpp
index ab485b629b7788..500d5918145c00 100644
--- a/Lib/test/test_cppext/extension.cpp
+++ b/Lib/test/test_cppext/extension.cpp
@@ -62,6 +62,7 @@ test_api_casts(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args))
Py_ssize_t refcnt = Py_REFCNT(obj);
assert(refcnt >= 1);
+#ifndef Py_LIMITED_API
// gh-92138: For backward compatibility, functions of Python C API accepts
// "const PyObject*". Check that using it does not emit C++ compiler
// warnings.
@@ -74,6 +75,7 @@ test_api_casts(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args))
assert(PyTuple_GET_SIZE(const_obj) == 2);
PyObject *one = PyTuple_GET_ITEM(const_obj, 0);
assert(PyLong_AsLong(one) == 1);
+#endif
// gh-92898: StrongRef doesn't inherit from PyObject but has an operator to
// cast to PyObject*.
@@ -106,6 +108,12 @@ test_unicode(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args))
}
assert(PyUnicode_Check(str));
+
+ assert(PyUnicode_GetLength(str) == 3);
+ assert(PyUnicode_ReadChar(str, 0) == 'a');
+ assert(PyUnicode_ReadChar(str, 1) == 'b');
+
+#ifndef Py_LIMITED_API
assert(PyUnicode_GET_LENGTH(str) == 3);
// gh-92800: test PyUnicode_READ()
@@ -121,6 +129,7 @@ test_unicode(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args))
assert(PyUnicode_READ(ukind, const_data, 2) == 'c');
assert(PyUnicode_READ_CHAR(str, 1) == 'b');
+#endif
Py_DECREF(str);
Py_RETURN_NONE;
diff --git a/Lib/test/test_cppext/setup.py b/Lib/test/test_cppext/setup.py
index 80b3e0d5212f7b..019ff18446a2eb 100644
--- a/Lib/test/test_cppext/setup.py
+++ b/Lib/test/test_cppext/setup.py
@@ -10,6 +10,7 @@
SOURCE = 'extension.cpp'
+
if not support.MS_WINDOWS:
# C++ compiler flags for GCC and clang
CPPFLAGS = [
@@ -19,14 +20,20 @@
'-Werror',
]
else:
- # Don't pass any compiler flag to MSVC
- CPPFLAGS = []
+ # MSVC compiler flags
+ CPPFLAGS = [
+ # Display warnings level 1 to 4
+ '/W4',
+ # Treat all compiler warnings as compiler errors
+ '/WX',
+ ]
def main():
cppflags = list(CPPFLAGS)
std = os.environ.get("CPYTHON_TEST_CPP_STD", "")
module_name = os.environ["CPYTHON_TEST_EXT_NAME"]
+ limited = bool(os.environ.get("CPYTHON_TEST_LIMITED", ""))
cppflags = list(CPPFLAGS)
cppflags.append(f'-DMODULE_NAME={module_name}')
@@ -53,6 +60,11 @@ def main():
# CC env var overrides sysconfig CC variable in setuptools
os.environ['CC'] = cmd
+ # Define Py_LIMITED_API macro
+ if limited:
+ version = sys.hexversion
+ cppflags.append(f'-DPy_LIMITED_API={version:#x}')
+
# On Windows, add PCbuild\amd64\ to include and library directories
include_dirs = []
library_dirs = []
diff --git a/Lib/test/test_ctypes/test_as_parameter.py b/Lib/test/test_ctypes/test_as_parameter.py
index cc62b1a22a3b06..c5e1840b0eb7af 100644
--- a/Lib/test/test_ctypes/test_as_parameter.py
+++ b/Lib/test/test_ctypes/test_as_parameter.py
@@ -198,8 +198,16 @@ class A:
a = A()
a._as_parameter_ = a
- with self.assertRaises(RecursionError):
- c_int.from_param(a)
+ for c_type in (
+ ctypes.c_wchar_p,
+ ctypes.c_char_p,
+ ctypes.c_void_p,
+ ctypes.c_int, # PyCSimpleType
+ POINT, # CDataType
+ ):
+ with self.subTest(c_type=c_type):
+ with self.assertRaises(RecursionError):
+ c_type.from_param(a)
class AsParamWrapper:
diff --git a/Lib/test/test_ctypes/test_dlerror.py b/Lib/test/test_ctypes/test_dlerror.py
index 4441e30cd7a2a7..6bf492399cbf95 100644
--- a/Lib/test/test_ctypes/test_dlerror.py
+++ b/Lib/test/test_ctypes/test_dlerror.py
@@ -1,7 +1,12 @@
+import _ctypes
import os
+import platform
import sys
+import test.support
import unittest
-import platform
+from ctypes import CDLL, c_int
+from ctypes.util import find_library
+
FOO_C = r"""
#include
@@ -26,7 +31,7 @@
@unittest.skipUnless(sys.platform.startswith('linux'),
- 'Test only valid for Linux')
+ 'test requires GNU IFUNC support')
class TestNullDlsym(unittest.TestCase):
"""GH-126554: Ensure that we catch NULL dlsym return values
@@ -53,14 +58,6 @@ def test_null_dlsym(self):
import subprocess
import tempfile
- # To avoid ImportErrors on Windows, where _ctypes does not have
- # dlopen and dlsym,
- # import here, i.e., inside the test function.
- # The skipUnless('linux') decorator ensures that we're on linux
- # if we're executing these statements.
- from ctypes import CDLL, c_int
- from _ctypes import dlopen, dlsym
-
retcode = subprocess.call(["gcc", "--version"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
@@ -111,6 +108,8 @@ def test_null_dlsym(self):
self.assertEqual(os.read(pipe_r, 2), b'OK')
# Case #3: Test 'py_dl_sym' from Modules/_ctypes/callproc.c
+ dlopen = test.support.get_attribute(_ctypes, 'dlopen')
+ dlsym = test.support.get_attribute(_ctypes, 'dlsym')
L = dlopen(dstname)
with self.assertRaisesRegex(OSError, "symbol 'foo' not found"):
dlsym(L, "foo")
@@ -119,5 +118,59 @@ def test_null_dlsym(self):
self.assertEqual(os.read(pipe_r, 2), b'OK')
+@unittest.skipUnless(os.name != 'nt', 'test requires dlerror() calls')
+class TestLocalization(unittest.TestCase):
+
+ @staticmethod
+ def configure_locales(func):
+ return test.support.run_with_locale(
+ 'LC_ALL',
+ 'fr_FR.iso88591', 'ja_JP.sjis', 'zh_CN.gbk',
+ 'fr_FR.utf8', 'en_US.utf8',
+ '',
+ )(func)
+
+ @classmethod
+ def setUpClass(cls):
+ cls.libc_filename = find_library("c")
+ if cls.libc_filename is None:
+ raise unittest.SkipTest('cannot find libc')
+
+ @configure_locales
+ def test_localized_error_from_dll(self):
+ dll = CDLL(self.libc_filename)
+ with self.assertRaises(AttributeError):
+ dll.this_name_does_not_exist
+
+ @configure_locales
+ def test_localized_error_in_dll(self):
+ dll = CDLL(self.libc_filename)
+ with self.assertRaises(ValueError):
+ c_int.in_dll(dll, 'this_name_does_not_exist')
+
+ @unittest.skipUnless(hasattr(_ctypes, 'dlopen'),
+ 'test requires _ctypes.dlopen()')
+ @configure_locales
+ def test_localized_error_dlopen(self):
+ missing_filename = b'missing\xff.so'
+ # Depending whether the locale, we may encode '\xff' differently
+ # but we are only interested in avoiding a UnicodeDecodeError
+ # when reporting the dlerror() error message which contains
+ # the localized filename.
+ filename_pattern = r'missing.*?\.so'
+ with self.assertRaisesRegex(OSError, filename_pattern):
+ _ctypes.dlopen(missing_filename, 2)
+
+ @unittest.skipUnless(hasattr(_ctypes, 'dlopen'),
+ 'test requires _ctypes.dlopen()')
+ @unittest.skipUnless(hasattr(_ctypes, 'dlsym'),
+ 'test requires _ctypes.dlsym()')
+ @configure_locales
+ def test_localized_error_dlsym(self):
+ dll = _ctypes.dlopen(self.libc_filename)
+ with self.assertRaises(OSError):
+ _ctypes.dlsym(dll, 'this_name_does_not_exist')
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_ctypes/test_struct_fields.py b/Lib/test/test_ctypes/test_struct_fields.py
index 7adab794809def..fd9509757a39ad 100644
--- a/Lib/test/test_ctypes/test_struct_fields.py
+++ b/Lib/test/test_ctypes/test_struct_fields.py
@@ -1,4 +1,5 @@
import unittest
+import sys
from ctypes import Structure, Union, sizeof, c_char, c_int
from ._support import (CField, Py_TPFLAGS_DISALLOW_INSTANTIATION,
Py_TPFLAGS_IMMUTABLETYPE)
@@ -75,6 +76,27 @@ def __init_subclass__(cls, **kwargs):
'ctypes state is not initialized'):
class Subclass(BrokenStructure): ...
+ def test_max_field_size_gh126937(self):
+ # Classes for big structs should be created successfully.
+ # (But they most likely can't be instantiated.)
+ # The size must fit in Py_ssize_t.
+
+ class X(Structure):
+ _fields_ = [('char', c_char),]
+ max_field_size = sys.maxsize
+
+ class Y(Structure):
+ _fields_ = [('largeField', X * max_field_size)]
+ class Z(Structure):
+ _fields_ = [('largeField', c_char * max_field_size)]
+
+ with self.assertRaises(OverflowError):
+ class TooBig(Structure):
+ _fields_ = [('largeField', X * (max_field_size + 1))]
+ with self.assertRaises(OverflowError):
+ class TooBig(Structure):
+ _fields_ = [('largeField', c_char * (max_field_size + 1))]
+
# __set__ and __get__ should raise a TypeError in case their self
# argument is not a ctype instance.
def test___set__(self):
diff --git a/Lib/test/test_dbm_gnu.py b/Lib/test/test_dbm_gnu.py
index e20addf1f04f1b..66268c42a300b5 100644
--- a/Lib/test/test_dbm_gnu.py
+++ b/Lib/test/test_dbm_gnu.py
@@ -1,10 +1,11 @@
-from test import support
-from test.support import import_helper, cpython_only
-gdbm = import_helper.import_module("dbm.gnu") #skip if not supported
-import unittest
import os
-from test.support.os_helper import TESTFN, TESTFN_NONASCII, unlink, FakePath
+import unittest
+from test import support
+from test.support import cpython_only, import_helper
+from test.support.os_helper import (TESTFN, TESTFN_NONASCII, FakePath,
+ create_empty_file, temp_dir, unlink)
+gdbm = import_helper.import_module("dbm.gnu") # skip if not supported
filename = TESTFN
@@ -205,6 +206,16 @@ def test_clear(self):
self.assertNotIn(k, db)
self.assertEqual(len(db), 0)
+ @support.run_with_locale(
+ 'LC_ALL',
+ 'fr_FR.iso88591', 'ja_JP.sjis', 'zh_CN.gbk',
+ 'fr_FR.utf8', 'en_US.utf8',
+ '',
+ )
+ def test_localized_error(self):
+ with temp_dir() as d:
+ create_empty_file(os.path.join(d, 'test'))
+ self.assertRaises(gdbm.error, gdbm.open, filename, 'r')
if __name__ == '__main__':
diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py
index 14bd87eb9c8d84..dd1fa321ecf171 100644
--- a/Lib/test/test_descr.py
+++ b/Lib/test/test_descr.py
@@ -15,6 +15,7 @@
from copy import deepcopy
from contextlib import redirect_stdout
from test import support
+from test.support.testcase import ExtraAssertions
try:
import _testcapi
@@ -403,15 +404,7 @@ def test_wrap_lenfunc_bad_cast(self):
self.assertEqual(range(sys.maxsize).__len__(), sys.maxsize)
-class ClassPropertiesAndMethods(unittest.TestCase):
-
- def assertHasAttr(self, obj, name):
- self.assertTrue(hasattr(obj, name),
- '%r has no attribute %r' % (obj, name))
-
- def assertNotHasAttr(self, obj, name):
- self.assertFalse(hasattr(obj, name),
- '%r has unexpected attribute %r' % (obj, name))
+class ClassPropertiesAndMethods(unittest.TestCase, ExtraAssertions):
def test_python_dicts(self):
# Testing Python subclass of dict...
diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py
index a7f6919655b94a..73807d7fa95fbd 100644
--- a/Lib/test/test_dis.py
+++ b/Lib/test/test_dis.py
@@ -4,15 +4,19 @@
import dis
import functools
import io
+import itertools
+import opcode
import re
import sys
+import tempfile
+import textwrap
import types
import unittest
from test.support import (captured_stdout, requires_debug_ranges,
- requires_specialization, cpython_only)
+ requires_specialization, cpython_only,
+ os_helper)
from test.support.bytecode_helper import BytecodeTestCase
-import opcode
CACHE = dis.opmap["CACHE"]
@@ -2281,5 +2285,91 @@ def _unroll_caches_as_Instructions(instrs, show_caches=False):
False, None, None, instr.positions)
+class TestDisCLI(unittest.TestCase):
+
+ def setUp(self):
+ self.filename = tempfile.mktemp()
+ self.addCleanup(os_helper.unlink, self.filename)
+
+ @staticmethod
+ def text_normalize(string):
+ """Dedent *string* and strip it from its surrounding whitespaces.
+
+ This method is used by the other utility functions so that any
+ string to write or to match against can be freely indented.
+ """
+ return textwrap.dedent(string).strip()
+
+ def set_source(self, content):
+ with open(self.filename, 'w') as fp:
+ fp.write(self.text_normalize(content))
+
+ def invoke_dis(self, *flags):
+ output = io.StringIO()
+ with contextlib.redirect_stdout(output):
+ dis.main(args=[*flags, self.filename])
+ return self.text_normalize(output.getvalue())
+
+ def check_output(self, source, expect, *flags):
+ with self.subTest(source=source, flags=flags):
+ self.set_source(source)
+ res = self.invoke_dis(*flags)
+ expect = self.text_normalize(expect)
+ self.assertListEqual(res.splitlines(), expect.splitlines())
+
+ def test_invocation(self):
+ # test various combinations of parameters
+ base_flags = [
+ ('-C', '--show-caches'),
+ ('-O', '--show-offsets'),
+ ]
+
+ self.set_source('''
+ def f():
+ print(x)
+ return None
+ ''')
+
+ for r in range(1, len(base_flags) + 1):
+ for choices in itertools.combinations(base_flags, r=r):
+ for args in itertools.product(*choices):
+ with self.subTest(args=args[1:]):
+ _ = self.invoke_dis(*args)
+
+ with self.assertRaises(SystemExit):
+ # suppress argparse error message
+ with contextlib.redirect_stderr(io.StringIO()):
+ _ = self.invoke_dis('--unknown')
+
+ def test_show_cache(self):
+ # test 'python -m dis -C/--show-caches'
+ source = 'print()'
+ expect = '''
+ 0 RESUME 0
+
+ 1 LOAD_NAME 0 (print)
+ PUSH_NULL
+ CALL 0
+ CACHE 0 (counter: 0)
+ CACHE 0 (func_version: 0)
+ CACHE 0
+ POP_TOP
+ RETURN_CONST 0 (None)
+ '''
+ for flag in ['-C', '--show-caches']:
+ self.check_output(source, expect, flag)
+
+ def test_show_offsets(self):
+ # test 'python -m dis -O/--show-offsets'
+ source = 'pass'
+ expect = '''
+ 0 0 RESUME 0
+
+ 1 2 RETURN_CONST 0 (None)
+ '''
+ for flag in ['-O', '--show-offsets']:
+ self.check_output(source, expect, flag)
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_doctest/test_doctest.py b/Lib/test/test_doctest/test_doctest.py
index 286c3ecfbc9239..7da6b983359041 100644
--- a/Lib/test/test_doctest/test_doctest.py
+++ b/Lib/test/test_doctest/test_doctest.py
@@ -2872,7 +2872,7 @@ def test_testfile(): r"""
>>> _colorize.COLORIZE = save_colorize
"""
-class TestImporter(importlib.abc.MetaPathFinder, importlib.abc.ResourceLoader):
+class TestImporter(importlib.abc.MetaPathFinder):
def find_spec(self, fullname, path, target=None):
return importlib.util.spec_from_file_location(fullname, path, loader=self)
@@ -2881,6 +2881,12 @@ def get_data(self, path):
with open(path, mode='rb') as f:
return f.read()
+ def exec_module(self, module):
+ raise ImportError
+
+ def create_module(self, spec):
+ return None
+
class TestHook:
def __init__(self, pathdir):
diff --git a/Lib/test/test_email/test__header_value_parser.py b/Lib/test/test_email/test__header_value_parser.py
index 5413319a414a62..efd1695711d7c1 100644
--- a/Lib/test/test_email/test__header_value_parser.py
+++ b/Lib/test/test_email/test__header_value_parser.py
@@ -3082,13 +3082,40 @@ def test_address_list_with_list_separator_after_fold(self):
self._test(parser.get_address_list(to)[0],
f'{a},\n =?utf-8?q?H=C3=BCbsch?= Kaktus \n')
- a = '.' * 79
+ a = '.' * 79 # ('.' is a special, so must be in quoted-string.)
to = f'"{a}" , "Hübsch Kaktus" '
self._test(parser.get_address_list(to)[0],
- f'{a}\n'
+ f'"{a}"\n'
' , =?utf-8?q?H=C3=BCbsch?= Kaktus '
'\n')
+ def test_address_list_with_specials_in_long_quoted_string(self):
+ # Regression for gh-80222.
+ policy = self.policy.clone(max_line_length=40)
+ cases = [
+ # (to, folded)
+ ('"Exfiltrator (unclosed comment?" ',
+ '"Exfiltrator (unclosed\n'
+ ' comment?" \n'),
+ ('"Escaped \\" chars \\\\ in quoted-string stay escaped" ',
+ '"Escaped \\" chars \\\\ in quoted-string\n'
+ ' stay escaped" \n'),
+ ('This long display name does not need quotes ',
+ 'This long display name does not need\n'
+ ' quotes \n'),
+ ('"Quotes are not required but are retained here" ',
+ '"Quotes are not required but are\n'
+ ' retained here" \n'),
+ ('"A quoted-string, it can be a valid local-part"@example.com',
+ '"A quoted-string, it can be a valid\n'
+ ' local-part"@example.com\n'),
+ ('"local-part-with-specials@but-no-fws.cannot-fold"@example.com',
+ '"local-part-with-specials@but-no-fws.cannot-fold"@example.com\n'),
+ ]
+ for (to, folded) in cases:
+ with self.subTest(to=to):
+ self._test(parser.get_address_list(to)[0], folded, policy=policy)
+
# XXX Need tests with comments on various sides of a unicode token,
# and with unicode tokens in the comments. Spaces inside the quotes
# currently don't do the right thing.
diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py
index 65ddbabcaa1997..925a638572d79c 100644
--- a/Lib/test/test_email/test_email.py
+++ b/Lib/test/test_email/test_email.py
@@ -810,6 +810,16 @@ def test_unicode_body_defaults_to_utf8_encoding(self):
w4kgdGVzdGFiYwo=
"""))
+ def test_string_payload_with_base64_cte(self):
+ msg = email.message_from_string(textwrap.dedent("""\
+ Content-Transfer-Encoding: base64
+
+ SGVsbG8uIFRlc3Rpbmc=
+ """), policy=email.policy.default)
+ self.assertEqual(msg.get_payload(decode=True), b"Hello. Testing")
+ self.assertDefectsEqual(msg['content-transfer-encoding'].defects, [])
+
+
# Test the email.encoders module
class TestEncoders(unittest.TestCase):
@@ -2352,6 +2362,40 @@ def test_missing_header_body_separator(self):
self.assertDefectsEqual(msg.defects,
[errors.MissingHeaderBodySeparatorDefect])
+ def test_string_payload_with_extra_space_after_cte(self):
+ # https://github.com/python/cpython/issues/98188
+ cte = "base64 "
+ msg = email.message_from_string(textwrap.dedent(f"""\
+ Content-Transfer-Encoding: {cte}
+
+ SGVsbG8uIFRlc3Rpbmc=
+ """), policy=email.policy.default)
+ self.assertEqual(msg.get_payload(decode=True), b"Hello. Testing")
+ self.assertDefectsEqual(msg['content-transfer-encoding'].defects, [])
+
+ def test_string_payload_with_extra_text_after_cte(self):
+ msg = email.message_from_string(textwrap.dedent("""\
+ Content-Transfer-Encoding: base64 some text
+
+ SGVsbG8uIFRlc3Rpbmc=
+ """), policy=email.policy.default)
+ self.assertEqual(msg.get_payload(decode=True), b"Hello. Testing")
+ cte = msg['content-transfer-encoding']
+ self.assertDefectsEqual(cte.defects, [email.errors.InvalidHeaderDefect])
+
+ def test_string_payload_with_extra_space_after_cte_compat32(self):
+ cte = "base64 "
+ msg = email.message_from_string(textwrap.dedent(f"""\
+ Content-Transfer-Encoding: {cte}
+
+ SGVsbG8uIFRlc3Rpbmc=
+ """), policy=email.policy.compat32)
+ pasted_cte = msg['content-transfer-encoding']
+ self.assertEqual(pasted_cte, cte)
+ self.assertEqual(msg.get_payload(decode=True), b"Hello. Testing")
+ self.assertDefectsEqual(msg.defects, [])
+
+
# Test RFC 2047 header encoding and decoding
class TestRFC2047(TestEmailBase):
diff --git a/Lib/test/test_email/test_headerregistry.py b/Lib/test/test_email/test_headerregistry.py
index 5a608a033c7e54..a579f9eeb7f4bf 100644
--- a/Lib/test/test_email/test_headerregistry.py
+++ b/Lib/test/test_email/test_headerregistry.py
@@ -838,6 +838,11 @@ def cte_as_value(self,
'7bit',
[errors.InvalidHeaderDefect]),
+ 'extra_space_after_cte': (
+ 'base64 ',
+ 'base64',
+ []),
+
}
diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py
index 3b43e422f82399..ed459794952581 100644
--- a/Lib/test/test_embed.py
+++ b/Lib/test/test_embed.py
@@ -627,6 +627,8 @@ class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase):
CONFIG_COMPAT.update({
'legacy_windows_stdio': 0,
})
+ if support.is_apple:
+ CONFIG_COMPAT['use_system_logger'] = False
CONFIG_PYTHON = dict(CONFIG_COMPAT,
_config_init=API_PYTHON,
@@ -1953,56 +1955,5 @@ def test_presite(self):
self.assertIn("cmd", out)
-class StdPrinterTests(EmbeddingTestsMixin, unittest.TestCase):
- # Test PyStdPrinter_Type which is used by _PySys_SetPreliminaryStderr():
- # "Set up a preliminary stderr printer until we have enough
- # infrastructure for the io module in place."
-
- STDOUT_FD = 1
-
- def create_printer(self, fd):
- ctypes = import_helper.import_module('ctypes')
- PyFile_NewStdPrinter = ctypes.pythonapi.PyFile_NewStdPrinter
- PyFile_NewStdPrinter.argtypes = (ctypes.c_int,)
- PyFile_NewStdPrinter.restype = ctypes.py_object
- return PyFile_NewStdPrinter(fd)
-
- def test_write(self):
- message = "unicode:\xe9-\u20ac-\udc80!\n"
-
- stdout_fd = self.STDOUT_FD
- stdout_fd_copy = os.dup(stdout_fd)
- self.addCleanup(os.close, stdout_fd_copy)
-
- rfd, wfd = os.pipe()
- self.addCleanup(os.close, rfd)
- self.addCleanup(os.close, wfd)
- try:
- # PyFile_NewStdPrinter() only accepts fileno(stdout)
- # or fileno(stderr) file descriptor.
- os.dup2(wfd, stdout_fd)
-
- printer = self.create_printer(stdout_fd)
- printer.write(message)
- finally:
- os.dup2(stdout_fd_copy, stdout_fd)
-
- data = os.read(rfd, 100)
- self.assertEqual(data, message.encode('utf8', 'backslashreplace'))
-
- def test_methods(self):
- fd = self.STDOUT_FD
- printer = self.create_printer(fd)
- self.assertEqual(printer.fileno(), fd)
- self.assertEqual(printer.isatty(), os.isatty(fd))
- printer.flush() # noop
- printer.close() # noop
-
- def test_disallow_instantiation(self):
- fd = self.STDOUT_FD
- printer = self.create_printer(fd)
- support.check_disallow_instantiation(self, type(printer))
-
-
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py
index e9948de39ed599..11e95d5b88b8c9 100644
--- a/Lib/test/test_enum.py
+++ b/Lib/test/test_enum.py
@@ -15,7 +15,7 @@
from enum import Enum, EnumMeta, IntEnum, StrEnum, EnumType, Flag, IntFlag, unique, auto
from enum import STRICT, CONFORM, EJECT, KEEP, _simple_enum, _test_simple_enum
from enum import verify, UNIQUE, CONTINUOUS, NAMED_FLAGS, ReprEnum
-from enum import member, nonmember, _iter_bits_lsb
+from enum import member, nonmember, _iter_bits_lsb, EnumDict
from io import StringIO
from pickle import dumps, loads, PicklingError, HIGHEST_PROTOCOL
from test import support
@@ -5454,6 +5454,37 @@ def test_convert_repr_and_str(self):
self.assertEqual(format(test_type.CONVERT_STRING_TEST_NAME_A), '5')
+class TestEnumDict(unittest.TestCase):
+ def test_enum_dict_in_metaclass(self):
+ """Test that EnumDict is usable as a class namespace"""
+ class Meta(type):
+ @classmethod
+ def __prepare__(metacls, cls, bases, **kwds):
+ return EnumDict(cls)
+
+ class MyClass(metaclass=Meta):
+ a = 1
+
+ with self.assertRaises(TypeError):
+ a = 2 # duplicate
+
+ with self.assertRaises(ValueError):
+ _a_sunder_ = 3
+
+ def test_enum_dict_standalone(self):
+ """Test that EnumDict is usable on its own"""
+ enumdict = EnumDict()
+ enumdict['a'] = 1
+
+ with self.assertRaises(TypeError):
+ enumdict['a'] = 'other value'
+
+ # Only MutableMapping interface is overridden for now.
+ # If this stops passing, update the documentation.
+ enumdict |= {'a': 'other value'}
+ self.assertEqual(enumdict['a'], 'other value')
+
+
# helpers
def enum_dir(cls):
diff --git a/Lib/test/test_eof.py b/Lib/test/test_eof.py
index e377383450e19d..582e5b6de6e687 100644
--- a/Lib/test/test_eof.py
+++ b/Lib/test/test_eof.py
@@ -2,7 +2,7 @@
import sys
from codecs import BOM_UTF8
-from test import support
+from test.support import force_not_colorized
from test.support import os_helper
from test.support import script_helper
from test.support import warnings_helper
@@ -44,6 +44,7 @@ def test_EOFS(self):
self.assertEqual(cm.exception.text, "ä = '''thîs is ")
self.assertEqual(cm.exception.offset, 5)
+ @force_not_colorized
def test_EOFS_with_file(self):
expect = ("(, line 1)")
with os_helper.temp_dir() as temp_dir:
@@ -123,6 +124,7 @@ def test_line_continuation_EOF(self):
self.assertEqual(str(cm.exception), expect)
@unittest.skipIf(not sys.executable, "sys.executable required")
+ @force_not_colorized
def test_line_continuation_EOF_from_file_bpo2180(self):
"""Ensure tok_nextc() does not add too many ending newlines."""
with os_helper.temp_dir() as temp_dir:
diff --git a/Lib/test/test_except_star.py b/Lib/test/test_except_star.py
index c49c6008e08e8c..284907f61213f8 100644
--- a/Lib/test/test_except_star.py
+++ b/Lib/test/test_except_star.py
@@ -952,6 +952,49 @@ def derive(self, excs):
self.assertExceptionIsLike(tes, FalsyEG("eg", [TypeError(1)]))
self.assertExceptionIsLike(ves, FalsyEG("eg", [ValueError(2)]))
+ def test_exception_group_subclass_with_bad_split_func(self):
+ # see gh-128049.
+ class BadEG1(ExceptionGroup):
+ def split(self, *args):
+ return "NOT A 2-TUPLE!"
+
+ class BadEG2(ExceptionGroup):
+ def split(self, *args):
+ return ("NOT A 2-TUPLE!",)
+
+ eg_list = [
+ (BadEG1("eg", [OSError(123), ValueError(456)]),
+ r"split must return a tuple, not str"),
+ (BadEG2("eg", [OSError(123), ValueError(456)]),
+ r"split must return a 2-tuple, got tuple of size 1")
+ ]
+
+ for eg_class, msg in eg_list:
+ with self.assertRaisesRegex(TypeError, msg) as m:
+ try:
+ raise eg_class
+ except* ValueError:
+ pass
+ except* OSError:
+ pass
+
+ self.assertExceptionIsLike(m.exception.__context__, eg_class)
+
+ # we allow tuples of length > 2 for backwards compatibility
+ class WeirdEG(ExceptionGroup):
+ def split(self, *args):
+ return super().split(*args) + ("anything", 123456, None)
+
+ try:
+ raise WeirdEG("eg", [OSError(123), ValueError(456)])
+ except* OSError as e:
+ oeg = e
+ except* ValueError as e:
+ veg = e
+
+ self.assertExceptionIsLike(oeg, WeirdEG("eg", [OSError(123)]))
+ self.assertExceptionIsLike(veg, WeirdEG("eg", [ValueError(456)]))
+
class TestExceptStarCleanup(ExceptStarTest):
def test_sys_exception_restored(self):
diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py
index 6a29bc38252fdb..c6fb848b8d7fda 100644
--- a/Lib/test/test_exceptions.py
+++ b/Lib/test/test_exceptions.py
@@ -1465,6 +1465,7 @@ def gen():
@cpython_only
@unittest.skipIf(_testcapi is None, "requires _testcapi")
+ @force_not_colorized
def test_recursion_normalizing_infinite_exception(self):
# Issue #30697. Test that a RecursionError is raised when
# maximum recursion depth has been exceeded when creating
@@ -2157,6 +2158,7 @@ def test_multiline_not_highlighted(self):
self.assertEqual(result[-len(expected):], expected)
+@support.force_not_colorized_test_class
class SyntaxErrorTests(unittest.TestCase):
maxDiff = None
diff --git a/Lib/test/test_faulthandler.py b/Lib/test/test_faulthandler.py
index 60815be96e14eb..fd56dee5d842ac 100644
--- a/Lib/test/test_faulthandler.py
+++ b/Lib/test/test_faulthandler.py
@@ -7,7 +7,7 @@
import subprocess
import sys
from test import support
-from test.support import os_helper, script_helper, is_android, MS_WINDOWS
+from test.support import os_helper, script_helper, is_android, MS_WINDOWS, threading_helper
import tempfile
import unittest
from textwrap import dedent
@@ -896,6 +896,34 @@ def test_cancel_later_without_dump_traceback_later(self):
self.assertEqual(output, [])
self.assertEqual(exitcode, 0)
+ @threading_helper.requires_working_threading()
+ @unittest.skipUnless(support.Py_GIL_DISABLED, "only meaningful if the GIL is disabled")
+ def test_free_threaded_dump_traceback(self):
+ # gh-128400: Other threads need to be paused to invoke faulthandler
+ code = dedent("""
+ import faulthandler
+ from threading import Thread, Event
+
+ class Waiter(Thread):
+ def __init__(self):
+ Thread.__init__(self)
+ self.running = Event()
+ self.stop = Event()
+
+ def run(self):
+ self.running.set()
+ self.stop.wait()
+
+ for _ in range(100):
+ waiter = Waiter()
+ waiter.start()
+ waiter.running.wait()
+ faulthandler.dump_traceback(all_threads=True)
+ waiter.stop.set()
+ waiter.join()
+ """)
+ _, exitcode = self.get_output(code)
+ self.assertEqual(exitcode, 0)
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py
index c747bbedc6dba8..6e217ef63be618 100644
--- a/Lib/test/test_fstring.py
+++ b/Lib/test/test_fstring.py
@@ -1650,6 +1650,14 @@ def __repr__(self):
#self.assertEqual(f'X{x =}Y', 'Xx\t='+repr(x)+'Y')
#self.assertEqual(f'X{x = }Y', 'Xx\t=\t'+repr(x)+'Y')
+ def test_debug_expressions_are_raw_strings(self):
+
+ self.assertEqual(f'{b"\N{OX}"=}', 'b"\\N{OX}"=b\'\\\\N{OX}\'')
+ self.assertEqual(f'{r"\xff"=}', 'r"\\xff"=\'\\\\xff\'')
+ self.assertEqual(f'{r"\n"=}', 'r"\\n"=\'\\\\n\'')
+ self.assertEqual(f"{'\''=}", "'\\''=\"'\"")
+ self.assertEqual(f'{'\xc5'=}', r"'\xc5'='Å'")
+
def test_walrus(self):
x = 20
# This isn't an assignment expression, it's 'x', with a format
@@ -1758,5 +1766,23 @@ def get_code(s):
for s in ["", "some string"]:
self.assertEqual(get_code(f"'{s}'"), get_code(f"f'{s}'"))
+ def test_gh129093(self):
+ self.assertEqual(f'{1==2=}', '1==2=False')
+ self.assertEqual(f'{1 == 2=}', '1 == 2=False')
+ self.assertEqual(f'{1!=2=}', '1!=2=True')
+ self.assertEqual(f'{1 != 2=}', '1 != 2=True')
+
+ self.assertEqual(f'{(1) != 2=}', '(1) != 2=True')
+ self.assertEqual(f'{(1*2) != (3)=}', '(1*2) != (3)=True')
+
+ self.assertEqual(f'{1 != 2 == 3 != 4=}', '1 != 2 == 3 != 4=False')
+ self.assertEqual(f'{1 == 2 != 3 == 4=}', '1 == 2 != 3 == 4=False')
+
+ self.assertEqual(f'{f'{1==2=}'=}', "f'{1==2=}'='1==2=False'")
+ self.assertEqual(f'{f'{1 == 2=}'=}', "f'{1 == 2=}'='1 == 2=False'")
+ self.assertEqual(f'{f'{1!=2=}'=}', "f'{1!=2=}'='1!=2=True'")
+ self.assertEqual(f'{f'{1 != 2=}'=}', "f'{1 != 2=}'='1 != 2=True'")
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py
index 1ce0f4d0aea6ee..894adb699c87f2 100644
--- a/Lib/test/test_functools.py
+++ b/Lib/test/test_functools.py
@@ -411,6 +411,12 @@ class A:
self.assertEqual(a.cmeth(3, b=4), ((1, A, 3), {'a': 2, 'b': 4}))
self.assertEqual(a.smeth(3, b=4), ((1, 3), {'a': 2, 'b': 4}))
+ def test_partial_genericalias(self):
+ alias = self.partial[int]
+ self.assertIs(alias.__origin__, self.partial)
+ self.assertEqual(alias.__args__, (int,))
+ self.assertEqual(alias.__parameters__, ())
+
@unittest.skipUnless(c_functools, 'requires the C _functools module')
class TestPartialC(TestPartial, unittest.TestCase):
diff --git a/Lib/test/test_gdb/util.py b/Lib/test/test_gdb/util.py
index 8fe9cfc543395e..54c6b2de7cc99d 100644
--- a/Lib/test/test_gdb/util.py
+++ b/Lib/test/test_gdb/util.py
@@ -7,6 +7,7 @@
import sysconfig
import unittest
from test import support
+from test.support.testcase import ExtraAssertions
GDB_PROGRAM = shutil.which('gdb') or 'gdb'
@@ -152,7 +153,7 @@ def setup_module():
print()
-class DebuggerTests(unittest.TestCase):
+class DebuggerTests(unittest.TestCase, ExtraAssertions):
"""Test that the debugger can debug Python."""
@@ -280,11 +281,6 @@ def get_stack_trace(self, source=None, script=None,
return out
- def assertEndsWith(self, actual, exp_end):
- '''Ensure that the given "actual" string ends with "exp_end"'''
- self.assertTrue(actual.endswith(exp_end),
- msg='%r did not end with %r' % (actual, exp_end))
-
def assertMultilineMatches(self, actual, pattern):
m = re.match(pattern, actual, re.DOTALL)
if not m:
diff --git a/Lib/test/test_glob.py b/Lib/test/test_glob.py
index b72640bd871ba6..b52c12f4b6cf14 100644
--- a/Lib/test/test_glob.py
+++ b/Lib/test/test_glob.py
@@ -510,6 +510,10 @@ def fn(pat):
@skip_unless_symlink
class SymlinkLoopGlobTests(unittest.TestCase):
+ # gh-109959: On Linux, glob._isdir() and glob._lexists() can return False
+ # randomly when checking the "link/" symbolic link.
+ # https://github.com/python/cpython/issues/109959#issuecomment-2577550700
+ @unittest.skip("flaky test")
def test_selflink(self):
tempdir = TESTFN + "_dir"
os.makedirs(tempdir)
diff --git a/Lib/test/test_http_cookies.py b/Lib/test/test_http_cookies.py
index 8879902a6e2f41..7b3dc0fdaedc3b 100644
--- a/Lib/test/test_http_cookies.py
+++ b/Lib/test/test_http_cookies.py
@@ -59,6 +59,52 @@ def test_basic(self):
for k, v in sorted(case['dict'].items()):
self.assertEqual(C[k].value, v)
+ def test_obsolete_rfc850_date_format(self):
+ # Test cases with different days and dates in obsolete RFC 850 format
+ test_cases = [
+ # from RFC 850, change EST to GMT
+ # https://datatracker.ietf.org/doc/html/rfc850#section-2
+ {
+ 'data': 'key=value; expires=Saturday, 01-Jan-83 00:00:00 GMT',
+ 'output': 'Saturday, 01-Jan-83 00:00:00 GMT'
+ },
+ {
+ 'data': 'key=value; expires=Friday, 19-Nov-82 16:59:30 GMT',
+ 'output': 'Friday, 19-Nov-82 16:59:30 GMT'
+ },
+ # from RFC 9110
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.6.7-6
+ {
+ 'data': 'key=value; expires=Sunday, 06-Nov-94 08:49:37 GMT',
+ 'output': 'Sunday, 06-Nov-94 08:49:37 GMT'
+ },
+ # other test cases
+ {
+ 'data': 'key=value; expires=Wednesday, 09-Nov-94 08:49:37 GMT',
+ 'output': 'Wednesday, 09-Nov-94 08:49:37 GMT'
+ },
+ {
+ 'data': 'key=value; expires=Friday, 11-Nov-94 08:49:37 GMT',
+ 'output': 'Friday, 11-Nov-94 08:49:37 GMT'
+ },
+ {
+ 'data': 'key=value; expires=Monday, 14-Nov-94 08:49:37 GMT',
+ 'output': 'Monday, 14-Nov-94 08:49:37 GMT'
+ },
+ ]
+
+ for case in test_cases:
+ with self.subTest(data=case['data']):
+ C = cookies.SimpleCookie()
+ C.load(case['data'])
+
+ # Extract the cookie name from the data string
+ cookie_name = case['data'].split('=')[0]
+
+ # Check if the cookie is loaded correctly
+ self.assertIn(cookie_name, C)
+ self.assertEqual(C[cookie_name].get('expires'), case['output'])
+
def test_unquote(self):
cases = [
(r'a="b=\""', 'b="'),
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
index 9d853d254db7c6..0c4214addb9baa 100644
--- a/Lib/test/test_httplib.py
+++ b/Lib/test/test_httplib.py
@@ -1081,6 +1081,25 @@ def test_chunked(self):
self.assertEqual(resp.read(), expected)
resp.close()
+ # Explicit full read
+ for n in (-123, -1, None):
+ with self.subTest('full read', n=n):
+ sock = FakeSocket(chunked_start + last_chunk + chunked_end)
+ resp = client.HTTPResponse(sock, method="GET")
+ resp.begin()
+ self.assertTrue(resp.chunked)
+ self.assertEqual(resp.read(n), expected)
+ resp.close()
+
+ # Read first chunk
+ with self.subTest('read1(-1)'):
+ sock = FakeSocket(chunked_start + last_chunk + chunked_end)
+ resp = client.HTTPResponse(sock, method="GET")
+ resp.begin()
+ self.assertTrue(resp.chunked)
+ self.assertEqual(resp.read1(-1), b"hello worl")
+ resp.close()
+
# Various read sizes
for n in range(1, 12):
sock = FakeSocket(chunked_start + last_chunk + chunked_end)
diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py
index b448227a0292fc..d462a2cda5047a 100644
--- a/Lib/test/test_imaplib.py
+++ b/Lib/test/test_imaplib.py
@@ -901,6 +901,20 @@ def handle(self):
self.assertRaises(imaplib.IMAP4.error,
self.imap_class, *server.server_address)
+ def test_truncated_large_literal(self):
+ size = 0
+ class BadHandler(SimpleIMAPHandler):
+ def handle(self):
+ self._send_textline('* OK {%d}' % size)
+ self._send_textline('IMAP4rev1')
+
+ for exponent in range(15, 64):
+ size = 1 << exponent
+ with self.subTest(f"size=2e{size}"):
+ with self.reaped_server(BadHandler) as server:
+ with self.assertRaises(imaplib.IMAP4.abort):
+ self.imap_class(*server.server_address)
+
@threading_helper.reap_threads
def test_simple_with_statement(self):
# simplest call
diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py
index 9139d455f4a16d..d9246c0ea70a04 100644
--- a/Lib/test/test_import/__init__.py
+++ b/Lib/test/test_import/__init__.py
@@ -29,9 +29,20 @@
from test.support import os_helper
from test.support import (
- STDLIB_DIR, swap_attr, swap_item, cpython_only, is_apple_mobile, is_emscripten,
- is_wasi, run_in_subinterp, run_in_subinterp_with_config, Py_TRACE_REFS,
- requires_gil_enabled, Py_GIL_DISABLED)
+ STDLIB_DIR,
+ swap_attr,
+ swap_item,
+ cpython_only,
+ is_apple_mobile,
+ is_emscripten,
+ is_wasi,
+ run_in_subinterp,
+ run_in_subinterp_with_config,
+ Py_TRACE_REFS,
+ requires_gil_enabled,
+ Py_GIL_DISABLED,
+ force_not_colorized_test_class,
+)
from test.support.import_helper import (
forget, make_legacy_pyc, unlink, unload, ready_to_import,
DirsOnSysPath, CleanImport, import_module)
@@ -352,6 +363,7 @@ def _from_subinterp(cls, name, interpid, pipe, script_kwargs):
return cls.parse(text.decode())
+@force_not_colorized_test_class
class ImportTests(unittest.TestCase):
def setUp(self):
@@ -826,6 +838,73 @@ def test_issue105979(self):
self.assertIn("Frozen object named 'x' is invalid",
str(cm.exception))
+ def test_frozen_module_from_import_error(self):
+ with self.assertRaises(ImportError) as cm:
+ from os import this_will_never_exist
+ self.assertIn(
+ f"cannot import name 'this_will_never_exist' from 'os' ({os.__file__})",
+ str(cm.exception),
+ )
+ with self.assertRaises(ImportError) as cm:
+ from sys import this_will_never_exist
+ self.assertIn(
+ "cannot import name 'this_will_never_exist' from 'sys' (unknown location)",
+ str(cm.exception),
+ )
+
+ scripts = [
+ """
+import os
+os.__spec__.has_location = False
+os.__file__ = []
+from os import this_will_never_exist
+""",
+ """
+import os
+os.__spec__.has_location = False
+del os.__file__
+from os import this_will_never_exist
+""",
+ """
+import os
+os.__spec__.origin = []
+os.__file__ = []
+from os import this_will_never_exist
+"""
+ ]
+ for script in scripts:
+ with self.subTest(script=script):
+ expected_error = (
+ b"cannot import name 'this_will_never_exist' "
+ b"from 'os' (unknown location)"
+ )
+ popen = script_helper.spawn_python("-c", script)
+ stdout, stderr = popen.communicate()
+ self.assertIn(expected_error, stdout)
+
+ def test_non_module_from_import_error(self):
+ prefix = """
+import sys
+class NotAModule: ...
+nm = NotAModule()
+nm.symbol = 123
+sys.modules["not_a_module"] = nm
+from not_a_module import symbol
+"""
+ scripts = [
+ prefix + "from not_a_module import missing_symbol",
+ prefix + "nm.__spec__ = []\nfrom not_a_module import missing_symbol",
+ ]
+ for script in scripts:
+ with self.subTest(script=script):
+ expected_error = (
+ b"ImportError: cannot import name 'missing_symbol' from "
+ b"'' (unknown location)"
+ )
+ popen = script_helper.spawn_python("-c", script)
+ stdout, stderr = popen.communicate()
+ self.assertIn(expected_error, stdout)
+
def test_script_shadowing_stdlib(self):
script_errors = [
(
@@ -1087,7 +1166,7 @@ class substr(str):
except AttributeError as e:
print(str(e))
-fractions.__spec__.origin = 0
+fractions.__spec__.origin = []
try:
fractions.Fraction
except AttributeError as e:
@@ -1111,7 +1190,7 @@ class substr(str):
except ImportError as e:
print(str(e))
-fractions.__spec__.origin = 0
+fractions.__spec__.origin = []
try:
from fractions import Fraction
except ImportError as e:
@@ -3263,30 +3342,6 @@ def test_basic_multiple_interpreters_reset_each(self):
# * module's global state was initialized, not reset
-@cpython_only
-class CAPITests(unittest.TestCase):
- def test_pyimport_addmodule(self):
- # gh-105922: Test PyImport_AddModuleRef(), PyImport_AddModule()
- # and PyImport_AddModuleObject()
- _testcapi = import_module("_testcapi")
- for name in (
- 'sys', # frozen module
- 'test', # package
- __name__, # package.module
- ):
- _testcapi.check_pyimport_addmodule(name)
-
- def test_pyimport_addmodule_create(self):
- # gh-105922: Test PyImport_AddModuleRef(), create a new module
- _testcapi = import_module("_testcapi")
- name = 'dontexist'
- self.assertNotIn(name, sys.modules)
- self.addCleanup(unload, name)
-
- mod = _testcapi.check_pyimport_addmodule(name)
- self.assertIs(mod, sys.modules[name])
-
-
if __name__ == '__main__':
# Test needs to be a package, so we can do relative imports.
unittest.main()
diff --git a/Lib/test/test_importlib/resources/_path.py b/Lib/test/test_importlib/resources/_path.py
index 1f97c96146960d..b144628cb73c77 100644
--- a/Lib/test/test_importlib/resources/_path.py
+++ b/Lib/test/test_importlib/resources/_path.py
@@ -2,15 +2,44 @@
import functools
from typing import Dict, Union
+from typing import runtime_checkable
+from typing import Protocol
####
-# from jaraco.path 3.4.1
+# from jaraco.path 3.7.1
-FilesSpec = Dict[str, Union[str, bytes, 'FilesSpec']] # type: ignore
+class Symlink(str):
+ """
+ A string indicating the target of a symlink.
+ """
+
+
+FilesSpec = Dict[str, Union[str, bytes, Symlink, 'FilesSpec']]
+
+
+@runtime_checkable
+class TreeMaker(Protocol):
+ def __truediv__(self, *args, **kwargs): ... # pragma: no cover
+
+ def mkdir(self, **kwargs): ... # pragma: no cover
+
+ def write_text(self, content, **kwargs): ... # pragma: no cover
+
+ def write_bytes(self, content): ... # pragma: no cover
-def build(spec: FilesSpec, prefix=pathlib.Path()):
+ def symlink_to(self, target): ... # pragma: no cover
+
+
+def _ensure_tree_maker(obj: Union[str, TreeMaker]) -> TreeMaker:
+ return obj if isinstance(obj, TreeMaker) else pathlib.Path(obj) # type: ignore[return-value]
+
+
+def build(
+ spec: FilesSpec,
+ prefix: Union[str, TreeMaker] = pathlib.Path(), # type: ignore[assignment]
+):
"""
Build a set of files/directories, as described by the spec.
@@ -25,21 +54,25 @@ def build(spec: FilesSpec, prefix=pathlib.Path()):
... "__init__.py": "",
... },
... "baz.py": "# Some code",
- ... }
+ ... "bar.py": Symlink("baz.py"),
+ ... },
+ ... "bing": Symlink("foo"),
... }
>>> target = getfixture('tmp_path')
>>> build(spec, target)
>>> target.joinpath('foo/baz.py').read_text(encoding='utf-8')
'# Some code'
+ >>> target.joinpath('bing/bar.py').read_text(encoding='utf-8')
+ '# Some code'
"""
for name, contents in spec.items():
- create(contents, pathlib.Path(prefix) / name)
+ create(contents, _ensure_tree_maker(prefix) / name)
@functools.singledispatch
def create(content: Union[str, bytes, FilesSpec], path):
path.mkdir(exist_ok=True)
- build(content, prefix=path) # type: ignore
+ build(content, prefix=path) # type: ignore[arg-type]
@create.register
@@ -52,5 +85,10 @@ def _(content: str, path):
path.write_text(content, encoding='utf-8')
+@create.register
+def _(content: Symlink, path):
+ path.symlink_to(content)
+
+
# end from jaraco.path
####
diff --git a/Lib/test/test_importlib/resources/test_files.py b/Lib/test/test_importlib/resources/test_files.py
index 08b840834dfd4b..ef7b57959974ee 100644
--- a/Lib/test/test_importlib/resources/test_files.py
+++ b/Lib/test/test_importlib/resources/test_files.py
@@ -55,6 +55,26 @@ class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
class OpenNamespaceTests(FilesTests, util.DiskSetup, unittest.TestCase):
MODULE = 'namespacedata01'
+ def test_non_paths_in_dunder_path(self):
+ """
+ Non-path items in a namespace package's ``__path__`` are ignored.
+
+ As reported in python/importlib_resources#311, some tools
+ like Setuptools, when creating editable packages, will inject
+ non-paths into a namespace package's ``__path__``, a
+ sentinel like
+ ``__editable__.sample_namespace-1.0.finder.__path_hook__``
+ to cause the ``PathEntryFinder`` to be called when searching
+ for packages. In that case, resources should still be loadable.
+ """
+ import namespacedata01
+
+ namespacedata01.__path__.append(
+ '__editable__.sample_namespace-1.0.finder.__path_hook__'
+ )
+
+ resources.files(namespacedata01)
+
class OpenNamespaceZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
ZIP_MODULE = 'namespacedata01'
@@ -81,7 +101,7 @@ def test_module_resources(self):
"""
A module can have resources found adjacent to the module.
"""
- import mod
+ import mod # type: ignore[import-not-found]
actual = resources.files(mod).joinpath('res.txt').read_text(encoding='utf-8')
assert actual == self.spec['res.txt']
diff --git a/Lib/test/test_importlib/resources/test_functional.py b/Lib/test/test_importlib/resources/test_functional.py
index 4317abf3162c52..3fc1ade35bef5a 100644
--- a/Lib/test/test_importlib/resources/test_functional.py
+++ b/Lib/test/test_importlib/resources/test_functional.py
@@ -3,6 +3,7 @@
import importlib
from test.support import warnings_helper
+from test.support.testcase import ExtraAssertions
from importlib import resources
@@ -28,7 +29,7 @@ def anchor02(self):
return importlib.import_module('data02')
-class FunctionalAPIBase(util.DiskSetup):
+class FunctionalAPIBase(util.DiskSetup, ExtraAssertions):
def setUp(self):
super().setUp()
self.load_fixture('data02')
@@ -43,12 +44,6 @@ def _gen_resourcetxt_path_parts(self):
with self.subTest(path_parts=path_parts):
yield path_parts
- def assertEndsWith(self, string, suffix):
- """Assert that `string` ends with `suffix`.
-
- Used to ignore an architecture-specific UTF-16 byte-order mark."""
- self.assertEqual(string[-len(suffix) :], suffix)
-
def test_read_text(self):
self.assertEqual(
resources.read_text(self.anchor01, 'utf-8.file'),
diff --git a/Lib/test/test_inspect/test_inspect.py b/Lib/test/test_inspect/test_inspect.py
index 34ae951b38ad58..f30dc7affda11a 100644
--- a/Lib/test/test_inspect/test_inspect.py
+++ b/Lib/test/test_inspect/test_inspect.py
@@ -880,6 +880,7 @@ def test_getsource_stdlib_decimal(self):
self.assertEqual(src.splitlines(True), lines)
class TestGetsourceInteractive(unittest.TestCase):
+ @support.force_not_colorized
def test_getclasses_interactive(self):
# bpo-44648: simulate a REPL session;
# there is no `__file__` in the __main__ module
diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py
index 5b25f514b9e772..c34046f0e5bb6f 100644
--- a/Lib/test/test_pdb.py
+++ b/Lib/test/test_pdb.py
@@ -2810,6 +2810,57 @@ def test_pdb_f_trace_lines():
(Pdb) continue
"""
+def test_pdb_frame_refleak():
+ """
+ pdb should not leak reference to frames
+
+ >>> def frame_leaker(container):
+ ... import sys
+ ... container.append(sys._getframe())
+ ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace()
+ ... pass
+
+ >>> def test_function():
+ ... import gc
+ ... container = []
+ ... frame_leaker(container) # c
+ ... print(len(gc.get_referrers(container[0])))
+ ... container = []
+ ... frame_leaker(container) # n c
+ ... print(len(gc.get_referrers(container[0])))
+ ... container = []
+ ... frame_leaker(container) # r c
+ ... print(len(gc.get_referrers(container[0])))
+
+ >>> with PdbTestInput([ # doctest: +NORMALIZE_WHITESPACE
+ ... 'continue',
+ ... 'next',
+ ... 'continue',
+ ... 'return',
+ ... 'continue',
+ ... ]):
+ ... test_function()
+ > (4)frame_leaker()
+ -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace()
+ (Pdb) continue
+ 1
+ > (4)frame_leaker()
+ -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace()
+ (Pdb) next
+ > (5)frame_leaker()
+ -> pass
+ (Pdb) continue
+ 1
+ > (4)frame_leaker()
+ -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace()
+ (Pdb) return
+ --Return--
+ > (5)frame_leaker()->None
+ -> pass
+ (Pdb) continue
+ 1
+ """
+
def test_pdb_function_break():
"""Testing the line number of break on function
diff --git a/Lib/test/test_pyclbr.py b/Lib/test/test_pyclbr.py
index d409a2d4a312e6..a65705aaf53abc 100644
--- a/Lib/test/test_pyclbr.py
+++ b/Lib/test/test_pyclbr.py
@@ -10,6 +10,7 @@
from unittest import TestCase, main as unittest_main
from test.test_importlib import util as test_importlib_util
import warnings
+from test.support.testcase import ExtraAssertions
StaticMethodType = type(staticmethod(lambda: None))
@@ -22,7 +23,7 @@
# is imperfect (as designed), testModule is called with a set of
# members to ignore.
-class PyclbrTest(TestCase):
+class PyclbrTest(TestCase, ExtraAssertions):
def assertListEq(self, l1, l2, ignore):
''' succeed iff {l1} - {ignore} == {l2} - {ignore} '''
@@ -31,14 +32,6 @@ def assertListEq(self, l1, l2, ignore):
print("l1=%r\nl2=%r\nignore=%r" % (l1, l2, ignore), file=sys.stderr)
self.fail("%r missing" % missing.pop())
- def assertHasattr(self, obj, attr, ignore):
- ''' succeed iff hasattr(obj,attr) or attr in ignore. '''
- if attr in ignore: return
- if not hasattr(obj, attr): print("???", attr)
- self.assertTrue(hasattr(obj, attr),
- 'expected hasattr(%r, %r)' % (obj, attr))
-
-
def assertHaskey(self, obj, key, ignore):
''' succeed iff key in obj or key in ignore. '''
if key in ignore: return
@@ -86,7 +79,7 @@ def ismethod(oclass, obj, name):
for name, value in dict.items():
if name in ignore:
continue
- self.assertHasattr(module, name, ignore)
+ self.assertHasAttr(module, name, ignore)
py_item = getattr(module, name)
if isinstance(value, pyclbr.Function):
self.assertIsInstance(py_item, (FunctionType, BuiltinFunctionType))
diff --git a/Lib/test/test_pydoc/test_pydoc.py b/Lib/test/test_pydoc/test_pydoc.py
index 42776562fb7fa5..9cc2252e29367d 100644
--- a/Lib/test/test_pydoc/test_pydoc.py
+++ b/Lib/test/test_pydoc/test_pydoc.py
@@ -4,6 +4,7 @@
import contextlib
import importlib.util
import inspect
+import io
import pydoc
import py_compile
import keyword
@@ -879,6 +880,82 @@ def test_synopsis(self):
synopsis = pydoc.synopsis(TESTFN, {})
self.assertEqual(synopsis, 'line 1: h\xe9')
+ def test_source_synopsis(self):
+ def check(source, expected, encoding=None):
+ if isinstance(source, str):
+ source_file = StringIO(source)
+ else:
+ source_file = io.TextIOWrapper(io.BytesIO(source), encoding=encoding)
+ with source_file:
+ result = pydoc.source_synopsis(source_file)
+ self.assertEqual(result, expected)
+
+ check('"""Single line docstring."""',
+ 'Single line docstring.')
+ check('"""First line of docstring.\nSecond line.\nThird line."""',
+ 'First line of docstring.')
+ check('"""First line of docstring.\\nSecond line.\\nThird line."""',
+ 'First line of docstring.')
+ check('""" Whitespace around docstring. """',
+ 'Whitespace around docstring.')
+ check('import sys\n"""No docstring"""',
+ None)
+ check(' \n"""Docstring after empty line."""',
+ 'Docstring after empty line.')
+ check('# Comment\n"""Docstring after comment."""',
+ 'Docstring after comment.')
+ check(' # Indented comment\n"""Docstring after comment."""',
+ 'Docstring after comment.')
+ check('""""""', # Empty docstring
+ '')
+ check('', # Empty file
+ None)
+ check('"""Embedded\0null byte"""',
+ None)
+ check('"""Embedded null byte"""\0',
+ None)
+ check('"""Café and résumé."""',
+ 'Café and résumé.')
+ check("'''Triple single quotes'''",
+ 'Triple single quotes')
+ check('"Single double quotes"',
+ 'Single double quotes')
+ check("'Single single quotes'",
+ 'Single single quotes')
+ check('"""split\\\nline"""',
+ 'splitline')
+ check('"""Unrecognized escape \\sequence"""',
+ 'Unrecognized escape \\sequence')
+ check('"""Invalid escape seq\\uence"""',
+ None)
+ check('r"""Raw \\stri\\ng"""',
+ 'Raw \\stri\\ng')
+ check('b"""Bytes literal"""',
+ None)
+ check('f"""f-string"""',
+ None)
+ check('"""Concatenated""" \\\n"string" \'literals\'',
+ 'Concatenatedstringliterals')
+ check('"""String""" + """expression"""',
+ None)
+ check('("""In parentheses""")',
+ 'In parentheses')
+ check('("""Multiple lines """\n"""in parentheses""")',
+ 'Multiple lines in parentheses')
+ check('()', # tuple
+ None)
+ check(b'# coding: iso-8859-15\n"""\xa4uro sign"""',
+ '€uro sign', encoding='iso-8859-15')
+ check(b'"""\xa4"""', # Decoding error
+ None, encoding='utf-8')
+
+ with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8') as temp_file:
+ temp_file.write('"""Real file test."""\n')
+ temp_file.flush()
+ temp_file.seek(0)
+ result = pydoc.source_synopsis(temp_file)
+ self.assertEqual(result, "Real file test.")
+
@requires_docstrings
def test_synopsis_sourceless(self):
os = import_helper.import_fresh_module('os')
diff --git a/Lib/test/test_pyrepl/support.py b/Lib/test/test_pyrepl/support.py
index 672d4896c92283..45e3bf758f17de 100644
--- a/Lib/test/test_pyrepl/support.py
+++ b/Lib/test/test_pyrepl/support.py
@@ -101,16 +101,6 @@ def handle_all_events(
)
-def make_clean_env() -> dict[str, str]:
- clean_env = os.environ.copy()
- for k in clean_env.copy():
- if k.startswith("PYTHON"):
- clean_env.pop(k)
- clean_env.pop("FORCE_COLOR", None)
- clean_env.pop("NO_COLOR", None)
- return clean_env
-
-
class FakeConsole(Console):
def __init__(self, events, encoding="utf-8") -> None:
self.events = iter(events)
diff --git a/Lib/test/test_pyrepl/test_pyrepl.py b/Lib/test/test_pyrepl/test_pyrepl.py
index e5936c0984ae9a..191fce3f7ae6d5 100644
--- a/Lib/test/test_pyrepl/test_pyrepl.py
+++ b/Lib/test/test_pyrepl/test_pyrepl.py
@@ -10,7 +10,7 @@
import tempfile
from unittest import TestCase, skipUnless, skipIf
from unittest.mock import patch
-from test.support import force_not_colorized
+from test.support import force_not_colorized, make_clean_env
from test.support import SHORT_TIMEOUT
from test.support.import_helper import import_module
from test.support.os_helper import unlink
@@ -23,7 +23,6 @@
multiline_input,
code_to_events,
clean_screen,
- make_clean_env,
)
from _pyrepl.console import Event
from _pyrepl.readline import (ReadlineAlikeReader, ReadlineConfig,
@@ -851,7 +850,7 @@ def test_global_namespace_completion(self):
output = multiline_input(reader, namespace)
self.assertEqual(output, "python")
- def test_updown_arrow_with_completion_menu(self):
+ def test_up_down_arrow_with_completion_menu(self):
"""Up arrow in the middle of unfinished tab completion when the menu is displayed
should work and trigger going back in history. Down arrow should subsequently
get us back to the incomplete command."""
@@ -861,6 +860,7 @@ def test_updown_arrow_with_completion_menu(self):
events = itertools.chain(
code_to_events(code),
[
+ Event(evt="key", data="down", raw=bytearray(b"\x1bOB")),
Event(evt="key", data="up", raw=bytearray(b"\x1bOA")),
Event(evt="key", data="down", raw=bytearray(b"\x1bOB")),
],
@@ -1324,23 +1324,35 @@ def test_readline_history_file(self):
if readline.backend != "editline":
self.skipTest("GNU readline is not affected by this issue")
- hfile = tempfile.NamedTemporaryFile()
- self.addCleanup(unlink, hfile.name)
- env = os.environ.copy()
- env["PYTHON_HISTORY"] = hfile.name
+ with tempfile.NamedTemporaryFile() as hfile:
+ env = os.environ.copy()
+ env["PYTHON_HISTORY"] = hfile.name
- env["PYTHON_BASIC_REPL"] = "1"
- output, exit_code = self.run_repl("spam \nexit()\n", env=env)
- self.assertEqual(exit_code, 0)
- self.assertIn("spam ", output)
- self.assertNotEqual(pathlib.Path(hfile.name).stat().st_size, 0)
- self.assertIn("spam\\040", pathlib.Path(hfile.name).read_text())
+ env["PYTHON_BASIC_REPL"] = "1"
+ output, exit_code = self.run_repl("spam \nexit()\n", env=env)
+ self.assertEqual(exit_code, 0)
+ self.assertIn("spam ", output)
+ self.assertNotEqual(pathlib.Path(hfile.name).stat().st_size, 0)
+ self.assertIn("spam\\040", pathlib.Path(hfile.name).read_text())
- env.pop("PYTHON_BASIC_REPL", None)
- output, exit_code = self.run_repl("exit\n", env=env)
- self.assertEqual(exit_code, 0)
- self.assertNotIn("\\040", pathlib.Path(hfile.name).read_text())
+ env.pop("PYTHON_BASIC_REPL", None)
+ output, exit_code = self.run_repl("exit\n", env=env)
+ self.assertEqual(exit_code, 0)
+ self.assertNotIn("\\040", pathlib.Path(hfile.name).read_text())
def test_keyboard_interrupt_after_isearch(self):
output, exit_code = self.run_repl(["\x12", "\x03", "exit"])
self.assertEqual(exit_code, 0)
+
+ def test_prompt_after_help(self):
+ output, exit_code = self.run_repl(["help", "q", "exit"])
+
+ # Regex pattern to remove ANSI escape sequences
+ ansi_escape = re.compile(r"(\x1B(=|>|(\[)[0-?]*[ -\/]*[@-~]))")
+ cleaned_output = ansi_escape.sub("", output)
+ self.assertEqual(exit_code, 0)
+
+ # Ensure that we don't see multiple prompts after exiting `help`
+ # Extra stuff (newline and `exit` rewrites) are necessary
+ # because of how run_repl works.
+ self.assertNotIn(">>> \n>>> >>>", cleaned_output)
diff --git a/Lib/test/test_pyrepl/test_reader.py b/Lib/test/test_pyrepl/test_reader.py
index 421545eb1f64b7..27c6d6664eda9e 100644
--- a/Lib/test/test_pyrepl/test_reader.py
+++ b/Lib/test/test_pyrepl/test_reader.py
@@ -2,10 +2,9 @@
import functools
import rlcompleter
from unittest import TestCase
-from unittest.mock import MagicMock, patch
+from unittest.mock import MagicMock
-from .support import handle_all_events, handle_events_narrow_console, code_to_events, prepare_reader
-from test.support import import_helper
+from .support import handle_all_events, handle_events_narrow_console, code_to_events, prepare_reader, prepare_console
from _pyrepl.console import Event
from _pyrepl.reader import Reader
@@ -296,8 +295,8 @@ def test_completions_updated_on_key_press(self):
actual = reader.screen
self.assertEqual(len(actual), 2)
- self.assertEqual(actual[0].rstrip(), "itertools.accumulate(")
- self.assertEqual(actual[1], f"{code}a")
+ self.assertEqual(actual[0], f"{code}a")
+ self.assertEqual(actual[1].rstrip(), "itertools.accumulate(")
def test_key_press_on_tab_press_once(self):
namespace = {"itertools": itertools}
@@ -313,3 +312,10 @@ def test_key_press_on_tab_press_once(self):
reader, _ = handle_all_events(events, prepare_reader=completing_reader)
self.assert_screen_equals(reader, f"{code}a")
+
+ def test_pos2xy_with_no_columns(self):
+ console = prepare_console([])
+ reader = prepare_reader(console)
+ # Simulate a resize to 0 columns
+ reader.screeninfo = []
+ self.assertEqual(reader.pos2xy(), (0, 0))
diff --git a/Lib/test/test_pyrepl/test_unix_console.py b/Lib/test/test_pyrepl/test_unix_console.py
index e3bbabcb0089fb..15dbf48bcf0f1c 100644
--- a/Lib/test/test_pyrepl/test_unix_console.py
+++ b/Lib/test/test_pyrepl/test_unix_console.py
@@ -1,7 +1,9 @@
import itertools
+import os
import sys
import unittest
from functools import partial
+from test.support import os_helper
from unittest import TestCase
from unittest.mock import MagicMock, call, patch, ANY
@@ -312,3 +314,14 @@ def same_console(events):
)
console.restore()
con.restore()
+
+ def test_getheightwidth_with_invalid_environ(self, _os_write):
+ # gh-128636
+ console = UnixConsole()
+ with os_helper.EnvironmentVarGuard() as env:
+ env["LINES"] = ""
+ self.assertIsInstance(console.getheightwidth(), tuple)
+ env["COLUMNS"] = ""
+ self.assertIsInstance(console.getheightwidth(), tuple)
+ os.environ = []
+ self.assertIsInstance(console.getheightwidth(), tuple)
diff --git a/Lib/test/test_pyrepl/test_windows_console.py b/Lib/test/test_pyrepl/test_windows_console.py
index 4a3b2baf64a944..07eaccd1124cd6 100644
--- a/Lib/test/test_pyrepl/test_windows_console.py
+++ b/Lib/test/test_pyrepl/test_windows_console.py
@@ -329,6 +329,20 @@ def move_right(self, cols=1):
def erase_in_line(self):
return ERASE_IN_LINE.encode("utf8")
+ def test_multiline_ctrl_z(self):
+ # see gh-126332
+ code = "abcdefghi"
+
+ events = itertools.chain(
+ code_to_events(code),
+ [
+ Event(evt="key", data='\x1a', raw=bytearray(b'\x1a')),
+ Event(evt="key", data='\x1a', raw=bytearray(b'\x1a')),
+ ],
+ )
+ reader, _ = self.handle_events_narrow(events)
+ self.assertEqual(reader.cxy, (2, 3))
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_readline.py b/Lib/test/test_readline.py
index 50e77cbbb6be13..8b8772c66ee654 100644
--- a/Lib/test/test_readline.py
+++ b/Lib/test/test_readline.py
@@ -114,6 +114,14 @@ def test_write_read_append(self):
# write_history_file can create the target
readline.write_history_file(hfilename)
+ # Negative values should be disallowed
+ with self.assertRaises(ValueError):
+ readline.append_history_file(-42, hfilename)
+
+ # See gh-122431, using the minimum signed integer value caused a segfault
+ with self.assertRaises(ValueError):
+ readline.append_history_file(-2147483648, hfilename)
+
def test_nonascii_history(self):
readline.clear_history()
try:
diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py
index 54b6a16a0dab05..a5c9617bb07fd6 100644
--- a/Lib/test/test_regrtest.py
+++ b/Lib/test/test_regrtest.py
@@ -789,6 +789,7 @@ def test_finds_expected_number_of_tests(self):
f'{", ".join(output.splitlines())}')
+@support.force_not_colorized_test_class
class ProgramsTestCase(BaseTestCase):
"""
Test various ways to run the Python test suite. Use options close
@@ -902,6 +903,7 @@ def test_pcbuild_rt(self):
self.run_batch(script, *rt_args, *self.regrtest_args, *self.tests)
+@support.force_not_colorized_test_class
class ArgsTestCase(BaseTestCase):
"""
Test arguments of the Python test suite.
diff --git a/Lib/test/test_repl.py b/Lib/test/test_repl.py
index e764e60560db23..356ff5b198d637 100644
--- a/Lib/test/test_repl.py
+++ b/Lib/test/test_repl.py
@@ -70,6 +70,7 @@ def run_on_interactive_mode(source):
return output
+@support.force_not_colorized_test_class
class TestInteractiveInterpreter(unittest.TestCase):
@cpython_only
@@ -273,6 +274,8 @@ def test_asyncio_repl_is_ok(self):
self.assertEqual(exit_code, 0, "".join(output))
+
+@support.force_not_colorized_test_class
class TestInteractiveModeSyntaxErrors(unittest.TestCase):
def test_interactive_syntax_error_correct_line(self):
diff --git a/Lib/test/test_runpy.py b/Lib/test/test_runpy.py
index b64383f6546f31..ada78ec8e6b0c7 100644
--- a/Lib/test/test_runpy.py
+++ b/Lib/test/test_runpy.py
@@ -12,8 +12,14 @@
import textwrap
import unittest
import warnings
-from test.support import (infinite_recursion, no_tracing, verbose,
- requires_subprocess, requires_resource)
+from test.support import (
+ force_not_colorized_test_class,
+ infinite_recursion,
+ no_tracing,
+ requires_resource,
+ requires_subprocess,
+ verbose,
+)
from test.support.import_helper import forget, make_legacy_pyc, unload
from test.support.os_helper import create_empty_file, temp_dir, FakePath
from test.support.script_helper import make_script, make_zip_script
@@ -758,6 +764,7 @@ def test_encoding(self):
self.assertEqual(result['s'], "non-ASCII: h\xe9")
+@force_not_colorized_test_class
class TestExit(unittest.TestCase):
STATUS_CONTROL_C_EXIT = 0xC000013A
EXPECTED_CODE = (
diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py
index 28764536fa0400..fefb2d42d558b0 100644
--- a/Lib/test/test_socket.py
+++ b/Lib/test/test_socket.py
@@ -521,6 +521,8 @@ def clientTearDown(self):
@unittest.skipIf(WSL, 'VSOCK does not work on Microsoft WSL')
@unittest.skipUnless(HAVE_SOCKET_VSOCK,
'VSOCK sockets required for this test.')
+@unittest.skipUnless(get_cid() != 2, # VMADDR_CID_HOST
+ "This test can only be run on a virtual guest.")
class ThreadedVSOCKSocketStreamTest(unittest.TestCase, ThreadableTest):
def __init__(self, methodName='runTest'):
@@ -548,7 +550,10 @@ def clientSetUp(self):
self.cli.connect((cid, VSOCKPORT))
def testStream(self):
- msg = self.conn.recv(1024)
+ try:
+ msg = self.conn.recv(1024)
+ except PermissionError as exc:
+ self.skipTest(repr(exc))
self.assertEqual(msg, MSG)
def _testStream(self):
diff --git a/Lib/test/test_str.py b/Lib/test/test_str.py
index a4c92a66aa1eb5..c4f59224a6fe6f 100644
--- a/Lib/test/test_str.py
+++ b/Lib/test/test_str.py
@@ -7,6 +7,7 @@
"""
import _string
import codecs
+import datetime
import itertools
import operator
import pickle
@@ -1908,6 +1909,12 @@ def test_utf8_decode_invalid_sequences(self):
self.assertRaises(UnicodeDecodeError,
(b'\xF4'+cb+b'\xBF\xBF').decode, 'utf-8')
+ def test_issue127903(self):
+ # gh-127903: ``_copy_characters`` crashes on DEBUG builds when
+ # there is nothing to copy.
+ d = datetime.datetime(2013, 11, 10, 14, 20, 59)
+ self.assertEqual(d.strftime('%z'), '')
+
def test_issue8271(self):
# Issue #8271: during the decoding of an invalid UTF-8 byte sequence,
# only the start byte and the continuation byte(s) are now considered
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index 400577d36cd44d..01ce0118651b75 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -362,6 +362,36 @@ def test_setrecursionlimit_to_depth(self):
finally:
sys.setrecursionlimit(old_limit)
+ @unittest.skipUnless(support.Py_GIL_DISABLED, "only meaningful if the GIL is disabled")
+ @threading_helper.requires_working_threading()
+ def test_racing_recursion_limit(self):
+ from threading import Thread
+ def something_recursive():
+ def count(n):
+ if n > 0:
+ return count(n - 1) + 1
+ return 0
+
+ count(50)
+
+ def set_recursion_limit():
+ for limit in range(100, 200):
+ sys.setrecursionlimit(limit)
+
+ threads = []
+ for _ in range(5):
+ threads.append(Thread(target=set_recursion_limit))
+
+ for _ in range(5):
+ threads.append(Thread(target=something_recursive))
+
+ with threading_helper.catch_threading_exception() as cm:
+ with threading_helper.start_threads(threads):
+ pass
+
+ if cm.exc_value:
+ raise cm.exc_value
+
def test_getwindowsversion(self):
# Raise SkipTest if sys doesn't have getwindowsversion attribute
test.support.get_attribute(sys, "getwindowsversion")
diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py
index c13d1bd0f81efb..c4cf3e6a14a61c 100644
--- a/Lib/test/test_threading.py
+++ b/Lib/test/test_threading.py
@@ -1192,11 +1192,12 @@ def f():
resource.setrlimit(resource.RLIMIT_NPROC, (0, hard))
try:
- _thread.start_new_thread(f, ())
+ handle = _thread.start_joinable_thread(f)
except RuntimeError:
print('ok')
else:
print('!skip!')
+ handle.join()
"""
_, out, err = assert_python_ok("-u", "-c", code)
out = out.strip()
diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py
index d368f08b610870..ee17c93f54cd88 100644
--- a/Lib/test/test_time.py
+++ b/Lib/test/test_time.py
@@ -157,10 +157,19 @@ def test_conversions(self):
self.assertEqual(int(time.mktime(time.localtime(self.t))),
int(self.t))
- def test_sleep(self):
+ def test_sleep_exceptions(self):
+ self.assertRaises(TypeError, time.sleep, [])
+ self.assertRaises(TypeError, time.sleep, "a")
+ self.assertRaises(TypeError, time.sleep, complex(0, 0))
+
self.assertRaises(ValueError, time.sleep, -2)
self.assertRaises(ValueError, time.sleep, -1)
- time.sleep(1.2)
+ self.assertRaises(ValueError, time.sleep, -0.1)
+
+ def test_sleep(self):
+ for value in [-0.0, 0, 0.0, 1e-100, 1e-9, 1e-6, 1, 1.2]:
+ with self.subTest(value=value):
+ time.sleep(value)
def test_epoch(self):
# bpo-43869: Make sure that Python use the same Epoch on all platforms:
diff --git a/Lib/test/test_tkinter/test_misc.py b/Lib/test/test_tkinter/test_misc.py
index b0b9ed60040443..dbaf970161ce78 100644
--- a/Lib/test/test_tkinter/test_misc.py
+++ b/Lib/test/test_tkinter/test_misc.py
@@ -4,7 +4,8 @@
from tkinter import TclError
import enum
from test import support
-from test.test_tkinter.support import AbstractTkTest, AbstractDefaultRootTest, requires_tk
+from test.test_tkinter.support import (AbstractTkTest, AbstractDefaultRootTest,
+ requires_tk, get_tk_patchlevel)
support.requires('gui')
@@ -30,12 +31,20 @@ def test_repr(self):
self.assertEqual(repr(f), '')
def test_generated_names(self):
+ class Button2(tkinter.Button):
+ pass
+
t = tkinter.Toplevel(self.root)
f = tkinter.Frame(t)
f2 = tkinter.Frame(t)
+ self.assertNotEqual(str(f), str(f2))
b = tkinter.Button(f2)
- for name in str(b).split('.'):
+ b2 = Button2(f2)
+ for name in str(b).split('.') + str(b2).split('.'):
self.assertFalse(name.isidentifier(), msg=repr(name))
+ b3 = tkinter.Button(f2)
+ b4 = Button2(f2)
+ self.assertEqual(len({str(b), str(b2), str(b3), str(b4)}), 4)
@requires_tk(8, 6, 6)
def test_tk_busy(self):
@@ -540,6 +549,31 @@ def test_wm_attribute(self):
self.assertEqual(w.wm_attributes('alpha'),
1.0 if self.wantobjects else '1.0')
+ def test_wm_iconbitmap(self):
+ t = tkinter.Toplevel(self.root)
+ self.assertEqual(t.wm_iconbitmap(), '')
+ t.wm_iconbitmap('hourglass')
+ bug = False
+ if t._windowingsystem == 'aqua':
+ # Tk bug 13ac26b35dc55f7c37f70b39d59d7ef3e63017c8.
+ patchlevel = get_tk_patchlevel(t)
+ if patchlevel < (8, 6, 17) or (9, 0) <= patchlevel < (9, 0, 2):
+ bug = True
+ if not bug:
+ self.assertEqual(t.wm_iconbitmap(), 'hourglass')
+ self.assertEqual(self.root.wm_iconbitmap(), '')
+ t.wm_iconbitmap('')
+ self.assertEqual(t.wm_iconbitmap(), '')
+
+ if t._windowingsystem == 'win32':
+ t.wm_iconbitmap(default='hourglass')
+ self.assertEqual(t.wm_iconbitmap(), 'hourglass')
+ self.assertEqual(self.root.wm_iconbitmap(), '')
+ t.wm_iconbitmap(default='')
+ self.assertEqual(t.wm_iconbitmap(), '')
+
+ t.destroy()
+
class EventTest(AbstractTkTest, unittest.TestCase):
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index 75710db7d05375..480bff743a9f8a 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -1,4 +1,5 @@
import os
+import re
import token
import tokenize
import unittest
@@ -1819,6 +1820,22 @@ def test_iter_compat(self):
self.assertEqual(tokenize.untokenize(iter(tokens)), b'Hello ')
+def contains_ambiguous_backslash(source):
+ """Return `True` if the source contains a backslash on a
+ line by itself. For example:
+
+ a = (1
+ \\
+ )
+
+ Code like this cannot be untokenized exactly. This is because
+ the tokenizer does not produce any tokens for the line containing
+ the backslash and so there is no way to know its indent.
+ """
+ pattern = re.compile(br'\n\s*\\\r?\n')
+ return pattern.search(source) is not None
+
+
class TestRoundtrip(TestCase):
def check_roundtrip(self, f):
@@ -1829,6 +1846,9 @@ def check_roundtrip(self, f):
tokenize.untokenize(), and the latter tokenized again to 2-tuples.
The test fails if the 3 pair tokenizations do not match.
+ If the source code can be untokenized unambiguously, the
+ untokenized code must match the original code exactly.
+
When untokenize bugs are fixed, untokenize with 5-tuples should
reproduce code that does not contain a backslash continuation
following spaces. A proper test should test this.
@@ -1852,6 +1872,13 @@ def check_roundtrip(self, f):
tokens2_from5 = [tok[:2] for tok in tokenize.tokenize(readline5)]
self.assertEqual(tokens2_from5, tokens2)
+ if not contains_ambiguous_backslash(code):
+ # The BOM does not produce a token so there is no way to preserve it.
+ code_without_bom = code.removeprefix(b'\xef\xbb\xbf')
+ readline = iter(code_without_bom.splitlines(keepends=True)).__next__
+ untokenized_code = tokenize.untokenize(tokenize.tokenize(readline))
+ self.assertEqual(code_without_bom, untokenized_code)
+
def check_line_extraction(self, f):
if isinstance(f, str):
code = f.encode('utf-8')
diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py
index cfcd5bb21a7fe8..f89fe8b26dc1bf 100644
--- a/Lib/test/test_traceback.py
+++ b/Lib/test/test_traceback.py
@@ -21,7 +21,7 @@
from test.support.os_helper import TESTFN, unlink
from test.support.script_helper import assert_python_ok, assert_python_failure
from test.support.import_helper import forget
-from test.support import force_not_colorized
+from test.support import force_not_colorized, force_not_colorized_test_class
import json
import textwrap
@@ -376,6 +376,30 @@ def f():
' ValueError: 0\n',
])
+ def test_format_exception_group_syntax_error_with_custom_values(self):
+ # See https://github.com/python/cpython/issues/128894
+ for exc in [
+ SyntaxError('error', 'abcd'),
+ SyntaxError('error', [None] * 4),
+ SyntaxError('error', (1, 2, 3, 4)),
+ SyntaxError('error', (1, 2, 3, 4)),
+ SyntaxError('error', (1, 'a', 'b', 2)),
+ # with end_lineno and end_offset:
+ SyntaxError('error', 'abcdef'),
+ SyntaxError('error', [None] * 6),
+ SyntaxError('error', (1, 2, 3, 4, 5, 6)),
+ SyntaxError('error', (1, 'a', 'b', 2, 'c', 'd')),
+ ]:
+ with self.subTest(exc=exc):
+ err = traceback.format_exception_only(exc, show_group=True)
+ # Should not raise an exception:
+ if exc.lineno is not None:
+ self.assertEqual(len(err), 2)
+ self.assertTrue(err[0].startswith(' File'))
+ else:
+ self.assertEqual(len(err), 1)
+ self.assertEqual(err[-1], 'SyntaxError: error\n')
+
@requires_subprocess()
@force_not_colorized
def test_encoded_file(self):
@@ -1709,6 +1733,7 @@ def f():
@requires_debug_ranges()
+@force_not_colorized_test_class
class PurePythonTracebackErrorCaretTests(
PurePythonExceptionFormattingMixin,
TracebackErrorLocationCaretTestBase,
@@ -1722,6 +1747,7 @@ class PurePythonTracebackErrorCaretTests(
@cpython_only
@requires_debug_ranges()
+@force_not_colorized_test_class
class CPythonTracebackErrorCaretTests(
CAPIExceptionFormattingMixin,
TracebackErrorLocationCaretTestBase,
@@ -1733,6 +1759,7 @@ class CPythonTracebackErrorCaretTests(
@cpython_only
@requires_debug_ranges()
+@force_not_colorized_test_class
class CPythonTracebackLegacyErrorCaretTests(
CAPIExceptionFormattingLegacyMixin,
TracebackErrorLocationCaretTestBase,
@@ -2144,10 +2171,12 @@ def test_print_exception_bad_type_python(self):
boundaries = re.compile(
'(%s|%s)' % (re.escape(cause_message), re.escape(context_message)))
+@force_not_colorized_test_class
class TestTracebackFormat(unittest.TestCase, TracebackFormatMixin):
pass
@cpython_only
+@force_not_colorized_test_class
class TestFallbackTracebackFormat(unittest.TestCase, TracebackFormatMixin):
DEBUG_RANGES = False
def setUp(self) -> None:
@@ -2909,6 +2938,33 @@ def exc():
report = self.get_report(exc)
self.assertEqual(report, expected)
+ def test_exception_group_wrapped_naked(self):
+ # See gh-128799
+
+ def exc():
+ try:
+ raise Exception(42)
+ except* Exception as e:
+ raise
+
+ expected = (f' + Exception Group Traceback (most recent call last):\n'
+ f' | File "{__file__}", line {self.callable_line}, in get_exception\n'
+ f' | exception_or_callable()\n'
+ f' | ~~~~~~~~~~~~~~~~~~~~~^^\n'
+ f' | File "{__file__}", line {exc.__code__.co_firstlineno + 3}, in exc\n'
+ f' | except* Exception as e:\n'
+ f' | raise\n'
+ f' | ExceptionGroup: (1 sub-exception)\n'
+ f' +-+---------------- 1 ----------------\n'
+ f' | Traceback (most recent call last):\n'
+ f' | File "{__file__}", line {exc.__code__.co_firstlineno + 2}, in exc\n'
+ f' | raise Exception(42)\n'
+ f' | Exception: 42\n'
+ f' +------------------------------------\n')
+
+ report = self.get_report(exc)
+ self.assertEqual(report, expected)
+
def test_KeyboardInterrupt_at_first_line_of_frame(self):
# see GH-93249
def f():
@@ -2935,6 +2991,7 @@ def f():
self.assertEqual(report, expected)
+@force_not_colorized_test_class
class PyExcReportingTests(BaseExceptionReportingTests, unittest.TestCase):
#
# This checks reporting through the 'traceback' module, with both
@@ -2951,6 +3008,7 @@ def get_report(self, e):
return s
+@force_not_colorized_test_class
class CExcReportingTests(BaseExceptionReportingTests, unittest.TestCase):
#
# This checks built-in reporting by the interpreter.
diff --git a/Lib/test/test_tracemalloc.py b/Lib/test/test_tracemalloc.py
index 5755f7697de91a..0220a83d24b428 100644
--- a/Lib/test/test_tracemalloc.py
+++ b/Lib/test/test_tracemalloc.py
@@ -1,14 +1,16 @@
import contextlib
import os
import sys
+import textwrap
import tracemalloc
import unittest
from unittest.mock import patch
from test.support.script_helper import (assert_python_ok, assert_python_failure,
interpreter_requires_environment)
from test import support
-from test.support import os_helper
from test.support import force_not_colorized
+from test.support import os_helper
+from test.support import threading_helper
try:
import _testcapi
@@ -18,6 +20,7 @@
_testinternalcapi = None
+DEFAULT_DOMAIN = 0
EMPTY_STRING_SIZE = sys.getsizeof(b'')
INVALID_NFRAME = (-1, 2**30)
@@ -952,7 +955,6 @@ def check_env_var_invalid(self, nframe):
return
self.fail(f"unexpected output: {stderr!a}")
-
def test_env_var_invalid(self):
for nframe in INVALID_NFRAME:
with self.subTest(nframe=nframe):
@@ -981,6 +983,7 @@ def check_sys_xoptions_invalid(self, nframe):
return
self.fail(f"unexpected output: {stderr!a}")
+ @force_not_colorized
def test_sys_xoptions_invalid(self):
for nframe in INVALID_NFRAME:
with self.subTest(nframe=nframe):
@@ -1026,8 +1029,8 @@ def track(self, release_gil=False, nframe=1):
release_gil)
return frames
- def untrack(self):
- _testcapi.tracemalloc_untrack(self.domain, self.ptr)
+ def untrack(self, release_gil=False):
+ _testcapi.tracemalloc_untrack(self.domain, self.ptr, release_gil)
def get_traced_memory(self):
# Get the traced size in the domain
@@ -1069,7 +1072,7 @@ def test_track_already_tracked(self):
self.assertEqual(self.get_traceback(),
tracemalloc.Traceback(frames))
- def test_untrack(self):
+ def check_untrack(self, release_gil):
tracemalloc.start()
self.track()
@@ -1077,13 +1080,19 @@ def test_untrack(self):
self.assertEqual(self.get_traced_memory(), self.size)
# untrack must remove the trace
- self.untrack()
+ self.untrack(release_gil)
self.assertIsNone(self.get_traceback())
self.assertEqual(self.get_traced_memory(), 0)
# calling _PyTraceMalloc_Untrack() multiple times must not crash
- self.untrack()
- self.untrack()
+ self.untrack(release_gil)
+ self.untrack(release_gil)
+
+ def test_untrack(self):
+ self.check_untrack(False)
+
+ def test_untrack_without_gil(self):
+ self.check_untrack(True)
def test_stop_track(self):
tracemalloc.start()
@@ -1101,6 +1110,37 @@ def test_stop_untrack(self):
with self.assertRaises(RuntimeError):
self.untrack()
+ @unittest.skipIf(_testcapi is None, 'need _testcapi')
+ @threading_helper.requires_working_threading()
+ # gh-128679: Test crash on a debug build (especially on FreeBSD).
+ @unittest.skipIf(support.Py_DEBUG, 'need release build')
+ def test_tracemalloc_track_race(self):
+ # gh-128679: Test fix for tracemalloc.stop() race condition
+ _testcapi.tracemalloc_track_race()
+
+ def test_late_untrack(self):
+ code = textwrap.dedent(f"""
+ from test import support
+ import tracemalloc
+ import _testcapi
+
+ class Tracked:
+ def __init__(self, domain, size):
+ self.domain = domain
+ self.ptr = id(self)
+ self.size = size
+ _testcapi.tracemalloc_track(self.domain, self.ptr, self.size)
+
+ def __del__(self, untrack=_testcapi.tracemalloc_untrack):
+ untrack(self.domain, self.ptr, 1)
+
+ domain = {DEFAULT_DOMAIN}
+ tracemalloc.start()
+ obj = Tracked(domain, 1024 * 1024)
+ support.late_deletion(obj)
+ """)
+ assert_python_ok("-c", code)
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py
index 06a37c5494db5c..89a32c7a1a0d14 100644
--- a/Lib/test/test_typing.py
+++ b/Lib/test/test_typing.py
@@ -46,6 +46,7 @@
import types
from test.support import captured_stderr, cpython_only, infinite_recursion, requires_docstrings, import_helper
+from test.support.testcase import ExtraAssertions
from test.typinganndata import ann_module695, mod_generics_cache, _typed_dict_helper
@@ -54,21 +55,7 @@
CANNOT_SUBCLASS_INSTANCE = 'Cannot subclass an instance of %s'
-class BaseTestCase(TestCase):
-
- def assertIsSubclass(self, cls, class_or_tuple, msg=None):
- if not issubclass(cls, class_or_tuple):
- message = '%r is not a subclass of %r' % (cls, class_or_tuple)
- if msg is not None:
- message += ' : %s' % msg
- raise self.failureException(message)
-
- def assertNotIsSubclass(self, cls, class_or_tuple, msg=None):
- if issubclass(cls, class_or_tuple):
- message = '%r is a subclass of %r' % (cls, class_or_tuple)
- if msg is not None:
- message += ' : %s' % msg
- raise self.failureException(message)
+class BaseTestCase(TestCase, ExtraAssertions):
def clear_caches(self):
for f in typing._cleanups:
@@ -120,7 +107,7 @@ class Sub(Any): pass
def test_errors(self):
with self.assertRaises(TypeError):
- issubclass(42, Any)
+ isinstance(42, Any)
with self.assertRaises(TypeError):
Any[int] # Any is not a generic type.
@@ -135,6 +122,9 @@ class Something: pass
class MockSomething(Something, Mock): pass
self.assertTrue(issubclass(MockSomething, Any))
+ self.assertTrue(issubclass(MockSomething, MockSomething))
+ self.assertTrue(issubclass(MockSomething, Something))
+ self.assertTrue(issubclass(MockSomething, Mock))
ms = MockSomething()
self.assertIsInstance(ms, MockSomething)
self.assertIsInstance(ms, Something)
@@ -1246,10 +1236,6 @@ class Gen[*Ts]: ...
class TypeVarTupleTests(BaseTestCase):
- def assertEndsWith(self, string, tail):
- if not string.endswith(tail):
- self.fail(f"String {string!r} does not end with {tail!r}")
-
def test_name(self):
Ts = TypeVarTuple('Ts')
self.assertEqual(Ts.__name__, 'Ts')
@@ -2008,13 +1994,81 @@ def test_basics(self):
u = Union[int, float]
self.assertNotEqual(u, Union)
- def test_subclass_error(self):
+ def test_union_isinstance(self):
+ self.assertTrue(isinstance(42, Union[int, str]))
+ self.assertTrue(isinstance('abc', Union[int, str]))
+ self.assertFalse(isinstance(3.14, Union[int, str]))
+ self.assertTrue(isinstance(42, Union[int, list[int]]))
+ self.assertTrue(isinstance(42, Union[int, Any]))
+
+ def test_union_isinstance_type_error(self):
+ with self.assertRaises(TypeError):
+ isinstance(42, Union[str, list[int]])
+ with self.assertRaises(TypeError):
+ isinstance(42, Union[list[int], int])
+ with self.assertRaises(TypeError):
+ isinstance(42, Union[list[int], str])
+ with self.assertRaises(TypeError):
+ isinstance(42, Union[str, Any])
+ with self.assertRaises(TypeError):
+ isinstance(42, Union[Any, int])
+ with self.assertRaises(TypeError):
+ isinstance(42, Union[Any, str])
+
+ def test_optional_isinstance(self):
+ self.assertTrue(isinstance(42, Optional[int]))
+ self.assertTrue(isinstance(None, Optional[int]))
+ self.assertFalse(isinstance('abc', Optional[int]))
+
+ def test_optional_isinstance_type_error(self):
+ with self.assertRaises(TypeError):
+ isinstance(42, Optional[list[int]])
+ with self.assertRaises(TypeError):
+ isinstance(None, Optional[list[int]])
+ with self.assertRaises(TypeError):
+ isinstance(42, Optional[Any])
+ with self.assertRaises(TypeError):
+ isinstance(None, Optional[Any])
+
+ def test_union_issubclass(self):
+ self.assertTrue(issubclass(int, Union[int, str]))
+ self.assertTrue(issubclass(str, Union[int, str]))
+ self.assertFalse(issubclass(float, Union[int, str]))
+ self.assertTrue(issubclass(int, Union[int, list[int]]))
+ self.assertTrue(issubclass(int, Union[int, Any]))
+ self.assertFalse(issubclass(int, Union[str, Any]))
+ self.assertTrue(issubclass(int, Union[Any, int]))
+ self.assertFalse(issubclass(int, Union[Any, str]))
+
+ def test_union_issubclass_type_error(self):
with self.assertRaises(TypeError):
issubclass(int, Union)
with self.assertRaises(TypeError):
issubclass(Union, int)
with self.assertRaises(TypeError):
issubclass(Union[int, str], int)
+ with self.assertRaises(TypeError):
+ issubclass(int, Union[str, list[int]])
+ with self.assertRaises(TypeError):
+ issubclass(int, Union[list[int], int])
+ with self.assertRaises(TypeError):
+ issubclass(int, Union[list[int], str])
+
+ def test_optional_issubclass(self):
+ self.assertTrue(issubclass(int, Optional[int]))
+ self.assertTrue(issubclass(type(None), Optional[int]))
+ self.assertFalse(issubclass(str, Optional[int]))
+ self.assertTrue(issubclass(Any, Optional[Any]))
+ self.assertTrue(issubclass(type(None), Optional[Any]))
+ self.assertFalse(issubclass(int, Optional[Any]))
+
+ def test_optional_issubclass_type_error(self):
+ with self.assertRaises(TypeError):
+ issubclass(list[int], Optional[list[int]])
+ with self.assertRaises(TypeError):
+ issubclass(type(None), Optional[list[int]])
+ with self.assertRaises(TypeError):
+ issubclass(int, Optional[list[int]])
def test_union_any(self):
u = Union[Any]
@@ -5106,6 +5160,18 @@ class C(B[int]):
x = pickle.loads(z)
self.assertEqual(s, x)
+ # Test ParamSpec args and kwargs
+ global PP
+ PP = ParamSpec('PP')
+ for thing in [PP.args, PP.kwargs]:
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(thing=thing, proto=proto):
+ self.assertEqual(
+ pickle.loads(pickle.dumps(thing, proto)),
+ thing,
+ )
+ del PP
+
def test_copy_and_deepcopy(self):
T = TypeVar('T')
class Node(Generic[T]): ...
@@ -8758,13 +8824,13 @@ class Child1(Base1):
self.assertEqual(Child1.__mutable_keys__, frozenset({'b'}))
class Base2(TypedDict):
- a: ReadOnly[int]
+ a: int
class Child2(Base2):
- b: str
+ b: ReadOnly[str]
- self.assertEqual(Child1.__readonly_keys__, frozenset({'a'}))
- self.assertEqual(Child1.__mutable_keys__, frozenset({'b'}))
+ self.assertEqual(Child2.__readonly_keys__, frozenset({'b'}))
+ self.assertEqual(Child2.__mutable_keys__, frozenset({'a'}))
def test_cannot_make_mutable_key_readonly(self):
class Base(TypedDict):
@@ -9927,6 +9993,18 @@ def test_valid_uses(self):
self.assertEqual(C4.__args__, (Concatenate[int, T, P], T))
self.assertEqual(C4.__parameters__, (T, P))
+ def test_invalid_uses(self):
+ with self.assertRaisesRegex(TypeError, 'Concatenate of no types'):
+ Concatenate[()]
+ with self.assertRaisesRegex(
+ TypeError,
+ (
+ 'The last parameter to Concatenate should be a '
+ 'ParamSpec variable or ellipsis'
+ ),
+ ):
+ Concatenate[int]
+
def test_var_substitution(self):
T = TypeVar('T')
P = ParamSpec('P')
diff --git a/Lib/test/test_unicodedata.py b/Lib/test/test_unicodedata.py
index d3bf4ea7c7d437..2cf367a2cfe85b 100644
--- a/Lib/test/test_unicodedata.py
+++ b/Lib/test/test_unicodedata.py
@@ -11,8 +11,14 @@
import sys
import unicodedata
import unittest
-from test.support import (open_urlresource, requires_resource, script_helper,
- cpython_only, check_disallow_instantiation)
+from test.support import (
+ open_urlresource,
+ requires_resource,
+ script_helper,
+ cpython_only,
+ check_disallow_instantiation,
+ force_not_colorized,
+)
class UnicodeMethodsTest(unittest.TestCase):
@@ -277,6 +283,7 @@ def test_disallow_instantiation(self):
# Ensure that the type disallows instantiation (bpo-43916)
check_disallow_instantiation(self, unicodedata.UCD)
+ @force_not_colorized
def test_failed_import_during_compiling(self):
# Issue 4367
# Decoding \N escapes requires the unicodedata module. If it can't be
diff --git a/Lib/test/test_unittest/test_program.py b/Lib/test/test_unittest/test_program.py
index 7241cf59f73d4f..aa7e8b712fd763 100644
--- a/Lib/test/test_unittest/test_program.py
+++ b/Lib/test/test_unittest/test_program.py
@@ -7,6 +7,7 @@
from test.test_unittest.test_result import BufferedWriter
+@support.force_not_colorized_test_class
class Test_TestProgram(unittest.TestCase):
def test_discovery_from_dotted_path(self):
diff --git a/Lib/test/test_unittest/test_result.py b/Lib/test/test_unittest/test_result.py
index 15e3f62ef66a4b..4d552d54e9a6df 100644
--- a/Lib/test/test_unittest/test_result.py
+++ b/Lib/test/test_unittest/test_result.py
@@ -1,12 +1,11 @@
import io
import sys
import textwrap
-
-from test.support import warnings_helper, captured_stdout
-
import traceback
import unittest
from unittest.util import strclass
+from test.support import warnings_helper
+from test.support import captured_stdout, force_not_colorized_test_class
from test.test_unittest.support import BufferedWriter
@@ -34,6 +33,7 @@ def bad_cleanup2():
raise ValueError('bad cleanup2')
+@force_not_colorized_test_class
class Test_TestResult(unittest.TestCase):
# Note: there are not separate tests for TestResult.wasSuccessful(),
# TestResult.errors, TestResult.failures, TestResult.testsRun or
@@ -457,6 +457,7 @@ def test(result):
self.assertTrue(stream.getvalue().endswith('\n\nOK\n'))
+@force_not_colorized_test_class
class Test_TextTestResult(unittest.TestCase):
maxDiff = None
@@ -758,6 +759,7 @@ def testFoo(self):
runner.run(Test('testFoo'))
+@force_not_colorized_test_class
class TestOutputBuffering(unittest.TestCase):
def setUp(self):
diff --git a/Lib/test/test_unittest/test_runner.py b/Lib/test/test_unittest/test_runner.py
index 1b9cef43e3f9c5..4d3cfd60b8d9c3 100644
--- a/Lib/test/test_unittest/test_runner.py
+++ b/Lib/test/test_unittest/test_runner.py
@@ -106,6 +106,7 @@ def cleanup2(*args, **kwargs):
self.assertTrue(test.doCleanups())
self.assertEqual(cleanups, [(2, (), {}), (1, (1, 2, 3), dict(four='hello', five='goodbye'))])
+ @support.force_not_colorized
def testCleanUpWithErrors(self):
class TestableTest(unittest.TestCase):
def testNothing(self):
@@ -249,6 +250,7 @@ def testNothing(self):
self.assertEqual(test._cleanups, [])
+@support.force_not_colorized_test_class
class TestClassCleanup(unittest.TestCase):
def test_addClassCleanUp(self):
class TestableTest(unittest.TestCase):
@@ -601,6 +603,7 @@ class EmptyTest(unittest.TestCase):
self.assertIn("\nNO TESTS RAN\n", runner.stream.getvalue())
+@support.force_not_colorized_test_class
class TestModuleCleanUp(unittest.TestCase):
def test_add_and_do_ModuleCleanup(self):
module_cleanups = []
@@ -1318,6 +1321,7 @@ def MockResultClass(*args):
expectedresult = (runner.stream, DESCRIPTIONS, VERBOSITY)
self.assertEqual(runner._makeResult(), expectedresult)
+ @support.force_not_colorized
@support.requires_subprocess()
def test_warnings(self):
"""
diff --git a/Lib/test/test_unparse.py b/Lib/test/test_unparse.py
index 35394f29fbe49d..971fdb2ba32170 100644
--- a/Lib/test/test_unparse.py
+++ b/Lib/test/test_unparse.py
@@ -513,11 +513,13 @@ def test_class_bases_and_keywords(self):
self.check_src_roundtrip("class X(*args, **kwargs):\n pass")
def test_fstrings(self):
- self.check_src_roundtrip("f'-{f'*{f'+{f'.{x}.'}+'}*'}-'")
- self.check_src_roundtrip("f'\\u2028{'x'}'")
+ self.check_src_roundtrip('''f\'\'\'-{f"""*{f"+{f'.{x}.'}+"}*"""}-\'\'\'''')
+ self.check_src_roundtrip('''f\'-{f\'\'\'*{f"""+{f".{f'{x}'}."}+"""}*\'\'\'}-\'''')
+ self.check_src_roundtrip('''f\'-{f\'*{f\'\'\'+{f""".{f"{f'{x}'}"}."""}+\'\'\'}*\'}-\'''')
+ self.check_src_roundtrip('''f"\\u2028{'x'}"''')
self.check_src_roundtrip(r"f'{x}\n'")
- self.check_src_roundtrip("f'{'\\n'}\\n'")
- self.check_src_roundtrip("f'{f'{x}\\n'}\\n'")
+ self.check_src_roundtrip('''f"{'\\n'}\\n"''')
+ self.check_src_roundtrip('''f"{f'{x}\\n'}\\n"''')
def test_docstrings(self):
docstrings = (
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py
index 6539496370116a..fbdb8548e1ad4f 100644
--- a/Lib/test/test_urllib.py
+++ b/Lib/test/test_urllib.py
@@ -476,7 +476,9 @@ def test_read_bogus(self):
Content-Type: text/html; charset=iso-8859-1
''', mock_close=True)
try:
- self.assertRaises(OSError, urlopen, "http://python.org/")
+ with self.assertRaises(urllib.error.HTTPError) as cm:
+ urllib.request.urlopen("http://python.org/")
+ cm.exception.close()
finally:
self.unfakehttp()
@@ -491,8 +493,9 @@ def test_invalid_redirect(self):
''', mock_close=True)
try:
msg = "Redirection to url 'file:"
- with self.assertRaisesRegex(urllib.error.HTTPError, msg):
- urlopen("http://python.org/")
+ with self.assertRaisesRegex(urllib.error.HTTPError, msg) as cm:
+ urllib.request.urlopen("http://python.org/")
+ cm.exception.close()
finally:
self.unfakehttp()
@@ -505,8 +508,9 @@ def test_redirect_limit_independent(self):
Connection: close
''', mock_close=True)
try:
- self.assertRaises(urllib.error.HTTPError, urlopen,
- "http://something")
+ with self.assertRaises(urllib.error.HTTPError) as cm:
+ urllib.request.urlopen("http://something")
+ cm.exception.close()
finally:
self.unfakehttp()
@@ -626,10 +630,11 @@ def setUp(self):
"QOjdAAAAAXNSR0IArs4c6QAAAA9JREFUCNdj%0AYGBg%2BP//PwAGAQL%2BCm8 "
"vHgAAAABJRU5ErkJggg%3D%3D%0A%20")
- self.text_url_resp = urllib.request.urlopen(self.text_url)
- self.text_url_base64_resp = urllib.request.urlopen(
- self.text_url_base64)
- self.image_url_resp = urllib.request.urlopen(self.image_url)
+ self.text_url_resp = self.enterContext(
+ urllib.request.urlopen(self.text_url))
+ self.text_url_base64_resp = self.enterContext(
+ urllib.request.urlopen(self.text_url_base64))
+ self.image_url_resp = self.enterContext(urllib.request.urlopen(self.image_url))
def test_interface(self):
# Make sure object returned by urlopen() has the specified methods
@@ -645,8 +650,10 @@ def test_info(self):
[('text/plain', ''), ('charset', 'ISO-8859-1')])
self.assertEqual(self.image_url_resp.info()['content-length'],
str(len(self.image)))
- self.assertEqual(urllib.request.urlopen("data:,").info().get_params(),
+ r = urllib.request.urlopen("data:,")
+ self.assertEqual(r.info().get_params(),
[('text/plain', ''), ('charset', 'US-ASCII')])
+ r.close()
def test_geturl(self):
self.assertEqual(self.text_url_resp.geturl(), self.text_url)
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index 068dd859f27220..229cb9d9741210 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -789,6 +789,7 @@ def connect_ftp(self, user, passwd, host, port, dirs,
headers = r.info()
self.assertEqual(headers.get("Content-type"), mimetype)
self.assertEqual(int(headers["Content-length"]), len(data))
+ r.close()
def test_file(self):
import email.utils
@@ -1229,10 +1230,11 @@ def test_redirect(self):
try:
method(req, MockFile(), code, "Blah",
MockHeaders({"location": to_url}))
- except urllib.error.HTTPError:
+ except urllib.error.HTTPError as err:
# 307 and 308 in response to POST require user OK
self.assertIn(code, (307, 308))
self.assertIsNotNone(data)
+ err.close()
self.assertEqual(o.req.get_full_url(), to_url)
try:
self.assertEqual(o.req.get_method(), "GET")
@@ -1268,9 +1270,10 @@ def redirect(h, req, url=to_url):
while 1:
redirect(h, req, "http://example.com/")
count = count + 1
- except urllib.error.HTTPError:
+ except urllib.error.HTTPError as err:
# don't stop until max_repeats, because cookies may introduce state
self.assertEqual(count, urllib.request.HTTPRedirectHandler.max_repeats)
+ err.close()
# detect endless non-repeating chain of redirects
req = Request(from_url, origin_req_host="example.com")
@@ -1280,9 +1283,10 @@ def redirect(h, req, url=to_url):
while 1:
redirect(h, req, "http://example.com/%d" % count)
count = count + 1
- except urllib.error.HTTPError:
+ except urllib.error.HTTPError as err:
self.assertEqual(count,
urllib.request.HTTPRedirectHandler.max_redirections)
+ err.close()
def test_invalid_redirect(self):
from_url = "http://example.com/a.html"
@@ -1296,9 +1300,11 @@ def test_invalid_redirect(self):
for scheme in invalid_schemes:
invalid_url = scheme + '://' + schemeless_url
- self.assertRaises(urllib.error.HTTPError, h.http_error_302,
+ with self.assertRaises(urllib.error.HTTPError) as cm:
+ h.http_error_302(
req, MockFile(), 302, "Security Loophole",
MockHeaders({"location": invalid_url}))
+ cm.exception.close()
for scheme in valid_schemes:
valid_url = scheme + '://' + schemeless_url
@@ -1894,11 +1900,13 @@ def test_HTTPError_interface(self):
self.assertEqual(str(err), expected_errmsg)
expected_errmsg = '' % (err.code, err.msg)
self.assertEqual(repr(err), expected_errmsg)
+ err.close()
def test_gh_98778(self):
x = urllib.error.HTTPError("url", 405, "METHOD NOT ALLOWED", None, None)
self.assertEqual(getattr(x, "__notes__", ()), ())
self.assertIsInstance(x.fp.read(), bytes)
+ x.close()
def test_parse_proxy(self):
parse_proxy_test_cases = [
diff --git a/Lib/test/test_urllib2_localnet.py b/Lib/test/test_urllib2_localnet.py
index 50c491a3cfd3d0..9cb15d61c2ad4d 100644
--- a/Lib/test/test_urllib2_localnet.py
+++ b/Lib/test/test_urllib2_localnet.py
@@ -316,7 +316,9 @@ def test_basic_auth_httperror(self):
ah = urllib.request.HTTPBasicAuthHandler()
ah.add_password(self.REALM, self.server_url, self.USER, self.INCORRECT_PASSWD)
urllib.request.install_opener(urllib.request.build_opener(ah))
- self.assertRaises(urllib.error.HTTPError, urllib.request.urlopen, self.server_url)
+ with self.assertRaises(urllib.error.HTTPError) as cm:
+ urllib.request.urlopen(self.server_url)
+ cm.exception.close()
@hashlib_helper.requires_hashdigest("md5", openssl=True)
@@ -362,15 +364,15 @@ def test_proxy_with_bad_password_raises_httperror(self):
self.proxy_digest_handler.add_password(self.REALM, self.URL,
self.USER, self.PASSWD+"bad")
self.digest_auth_handler.set_qop("auth")
- self.assertRaises(urllib.error.HTTPError,
- self.opener.open,
- self.URL)
+ with self.assertRaises(urllib.error.HTTPError) as cm:
+ self.opener.open(self.URL)
+ cm.exception.close()
def test_proxy_with_no_password_raises_httperror(self):
self.digest_auth_handler.set_qop("auth")
- self.assertRaises(urllib.error.HTTPError,
- self.opener.open,
- self.URL)
+ with self.assertRaises(urllib.error.HTTPError) as cm:
+ self.opener.open(self.URL)
+ cm.exception.close()
def test_proxy_qop_auth_works(self):
self.proxy_digest_handler.add_password(self.REALM, self.URL,
diff --git a/Lib/test/test_urllib_response.py b/Lib/test/test_urllib_response.py
index b76763f4ed824f..d949fa38bfc42f 100644
--- a/Lib/test/test_urllib_response.py
+++ b/Lib/test/test_urllib_response.py
@@ -48,6 +48,7 @@ def test_addinfo(self):
info = urllib.response.addinfo(self.fp, self.test_headers)
self.assertEqual(info.info(), self.test_headers)
self.assertEqual(info.headers, self.test_headers)
+ info.close()
def test_addinfourl(self):
url = "http://www.python.org"
@@ -60,6 +61,7 @@ def test_addinfourl(self):
self.assertEqual(infourl.headers, self.test_headers)
self.assertEqual(infourl.url, url)
self.assertEqual(infourl.status, code)
+ infourl.close()
def tearDown(self):
self.sock.close()
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
index 818e7e93dbbe11..5e429b9259fee7 100644
--- a/Lib/test/test_urlparse.py
+++ b/Lib/test/test_urlparse.py
@@ -1273,16 +1273,51 @@ def test_invalid_bracketed_hosts(self):
self.assertRaises(ValueError, urllib.parse.urlsplit, 'Scheme://user@[0439:23af::2309::fae7:1234]/Path?Query')
self.assertRaises(ValueError, urllib.parse.urlsplit, 'Scheme://user@[0439:23af:2309::fae7:1234:2342:438e:192.0.2.146]/Path?Query')
self.assertRaises(ValueError, urllib.parse.urlsplit, 'Scheme://user@]v6a.ip[/Path')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[v6a.ip]')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[v6a.ip].suffix')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[v6a.ip]/')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[v6a.ip].suffix/')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[v6a.ip]?')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[v6a.ip].suffix?')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]/')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix/')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]?')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix?')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]:a')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix:a')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]:a1')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix:a1')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]:1a')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix:1a')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]:')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix:/')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]:?')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://user@prefix.[v6a.ip]')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://user@[v6a.ip].suffix')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[v6a.ip')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://v6a.ip]')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://]v6a.ip[')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://]v6a.ip')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://v6a.ip[')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[v6a.ip')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://v6a.ip].suffix')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix]v6a.ip[suffix')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix]v6a.ip')
+ self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://v6a.ip[suffix')
def test_splitting_bracketed_hosts(self):
- p1 = urllib.parse.urlsplit('scheme://user@[v6a.ip]/path?query')
+ p1 = urllib.parse.urlsplit('scheme://user@[v6a.ip]:1234/path?query')
self.assertEqual(p1.hostname, 'v6a.ip')
self.assertEqual(p1.username, 'user')
self.assertEqual(p1.path, '/path')
+ self.assertEqual(p1.port, 1234)
p2 = urllib.parse.urlsplit('scheme://user@[0439:23af:2309::fae7%test]/path?query')
self.assertEqual(p2.hostname, '0439:23af:2309::fae7%test')
self.assertEqual(p2.username, 'user')
self.assertEqual(p2.path, '/path')
+ self.assertIs(p2.port, None)
p3 = urllib.parse.urlsplit('scheme://user@[0439:23af:2309::fae7:1234:192.0.2.146%test]/path?query')
self.assertEqual(p3.hostname, '0439:23af:2309::fae7:1234:192.0.2.146%test')
self.assertEqual(p3.username, 'user')
diff --git a/Lib/test/test_uuid.py b/Lib/test/test_uuid.py
index e177464c00f7a6..e7e44c6413c2e2 100755
--- a/Lib/test/test_uuid.py
+++ b/Lib/test/test_uuid.py
@@ -19,7 +19,7 @@ def importable(name):
try:
__import__(name)
return True
- except:
+ except ModuleNotFoundError:
return False
diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py
index 0b09010c69d4ea..c39c83f9d0a5c3 100644
--- a/Lib/test/test_venv.py
+++ b/Lib/test/test_venv.py
@@ -26,6 +26,7 @@
requires_resource, copy_python_src_ignore)
from test.support.os_helper import (can_symlink, EnvironmentVarGuard, rmtree,
TESTFN, FakePath)
+from test.support.testcase import ExtraAssertions
import unittest
import venv
from unittest.mock import patch, Mock
@@ -64,7 +65,7 @@ def check_output(cmd, encoding=None):
)
return out, err
-class BaseTest(unittest.TestCase):
+class BaseTest(unittest.TestCase, ExtraAssertions):
"""Base class for venv tests."""
maxDiff = 80 * 50
@@ -111,10 +112,6 @@ def get_text_file_contents(self, *args, encoding='utf-8'):
result = f.read()
return result
- def assertEndsWith(self, string, tail):
- if not string.endswith(tail):
- self.fail(f"String {string!r} does not end with {tail!r}")
-
class BasicTest(BaseTest):
"""Test venv module functionality."""
diff --git a/Lib/test/test_xml_dom_xmlbuilder.py b/Lib/test/test_xml_dom_xmlbuilder.py
new file mode 100644
index 00000000000000..5f5f2eb328df9f
--- /dev/null
+++ b/Lib/test/test_xml_dom_xmlbuilder.py
@@ -0,0 +1,88 @@
+import io
+import unittest
+from http import client
+from test.test_httplib import FakeSocket
+from unittest import mock
+from xml.dom import getDOMImplementation, minidom, xmlbuilder
+
+SMALL_SAMPLE = b"""
+
+
+Introduction to XSL
+
+A. Namespace
+"""
+
+
+class XMLBuilderTest(unittest.TestCase):
+ def test_entity_resolver(self):
+ body = (
+ b"HTTP/1.1 200 OK\r\nContent-Type: text/xml; charset=utf-8\r\n\r\n"
+ + SMALL_SAMPLE
+ )
+
+ sock = FakeSocket(body)
+ response = client.HTTPResponse(sock)
+ response.begin()
+ attrs = {"open.return_value": response}
+ opener = mock.Mock(**attrs)
+
+ resolver = xmlbuilder.DOMEntityResolver()
+
+ with mock.patch("urllib.request.build_opener") as mock_build:
+ mock_build.return_value = opener
+ source = resolver.resolveEntity(None, "http://example.com/2000/svg")
+
+ self.assertIsInstance(source, xmlbuilder.DOMInputSource)
+ self.assertIsNone(source.publicId)
+ self.assertEqual(source.systemId, "http://example.com/2000/svg")
+ self.assertEqual(source.baseURI, "http://example.com/2000/")
+ self.assertEqual(source.encoding, "utf-8")
+ self.assertIs(source.byteStream, response)
+
+ self.assertIsNone(source.characterStream)
+ self.assertIsNone(source.stringData)
+
+ def test_builder(self):
+ imp = getDOMImplementation()
+ self.assertIsInstance(imp, xmlbuilder.DOMImplementationLS)
+
+ builder = imp.createDOMBuilder(imp.MODE_SYNCHRONOUS, None)
+ self.assertIsInstance(builder, xmlbuilder.DOMBuilder)
+
+ def test_parse_uri(self):
+ body = (
+ b"HTTP/1.1 200 OK\r\nContent-Type: text/xml; charset=utf-8\r\n\r\n"
+ + SMALL_SAMPLE
+ )
+
+ sock = FakeSocket(body)
+ response = client.HTTPResponse(sock)
+ response.begin()
+ attrs = {"open.return_value": response}
+ opener = mock.Mock(**attrs)
+
+ with mock.patch("urllib.request.build_opener") as mock_build:
+ mock_build.return_value = opener
+
+ imp = getDOMImplementation()
+ builder = imp.createDOMBuilder(imp.MODE_SYNCHRONOUS, None)
+ document = builder.parseURI("http://example.com/2000/svg")
+
+ self.assertIsInstance(document, minidom.Document)
+ self.assertEqual(len(document.childNodes), 1)
+
+ def test_parse_with_systemId(self):
+ response = io.BytesIO(SMALL_SAMPLE)
+
+ with mock.patch("urllib.request.urlopen") as mock_open:
+ mock_open.return_value = response
+
+ imp = getDOMImplementation()
+ source = imp.createDOMInputSource()
+ builder = imp.createDOMBuilder(imp.MODE_SYNCHRONOUS, None)
+ source.systemId = "http://example.com/2000/svg"
+ document = builder.parse(source)
+
+ self.assertIsInstance(document, minidom.Document)
+ self.assertEqual(len(document.childNodes), 1)
diff --git a/Lib/test/test_zipfile/test_core.py b/Lib/test/test_zipfile/test_core.py
index 36f7f542872897..4b56f6a380f219 100644
--- a/Lib/test/test_zipfile/test_core.py
+++ b/Lib/test/test_zipfile/test_core.py
@@ -1,3 +1,4 @@
+import _pyio
import array
import contextlib
import importlib.util
@@ -2327,6 +2328,18 @@ def test_read_after_seek(self):
fp.seek(1, os.SEEK_CUR)
self.assertEqual(fp.read(-1), b'men!')
+ def test_uncompressed_interleaved_seek_read(self):
+ # gh-127847: Make sure the position in the archive is correct
+ # in the special case of seeking in a ZIP_STORED entry.
+ with zipfile.ZipFile(TESTFN, "w") as zipf:
+ zipf.writestr("a.txt", "123")
+ zipf.writestr("b.txt", "456")
+ with zipfile.ZipFile(TESTFN, "r") as zipf:
+ with zipf.open("a.txt", "r") as a, zipf.open("b.txt", "r") as b:
+ self.assertEqual(a.read(1), b"1")
+ self.assertEqual(b.seek(1), 1)
+ self.assertEqual(b.read(1), b"5")
+
@requires_bz2()
def test_decompress_without_3rd_party_library(self):
data = b'PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
@@ -3442,5 +3455,87 @@ def test_too_short(self):
b"zzz", zipfile._Extra.strip(b"zzz", (self.ZIP64_EXTRA,)))
+class StatIO(_pyio.BytesIO):
+ """Buffer which remembers the number of bytes that were read."""
+
+ def __init__(self):
+ super().__init__()
+ self.bytes_read = 0
+
+ def read(self, size=-1):
+ bs = super().read(size)
+ self.bytes_read += len(bs)
+ return bs
+
+
+class StoredZipExtFileRandomReadTest(unittest.TestCase):
+ """Tests whether an uncompressed, unencrypted zip entry can be randomly
+ seek and read without reading redundant bytes."""
+ def test_stored_seek_and_read(self):
+
+ sio = StatIO()
+ # 20000 bytes
+ txt = b'0123456789' * 2000
+
+ # The seek length must be greater than ZipExtFile.MIN_READ_SIZE
+ # as `ZipExtFile._read2()` reads in blocks of this size and we
+ # need to seek out of the buffered data
+ read_buffer_size = zipfile.ZipExtFile.MIN_READ_SIZE
+ self.assertGreaterEqual(10002, read_buffer_size) # for forward seek test
+ self.assertGreaterEqual(5003, read_buffer_size) # for backward seek test
+ # The read length must be less than MIN_READ_SIZE, since we assume that
+ # only 1 block is read in the test.
+ read_length = 100
+ self.assertGreaterEqual(read_buffer_size, read_length) # for read() calls
+
+ with zipfile.ZipFile(sio, "w", compression=zipfile.ZIP_STORED) as zipf:
+ zipf.writestr("foo.txt", txt)
+
+ # check random seek and read on a file
+ with zipfile.ZipFile(sio, "r") as zipf:
+ with zipf.open("foo.txt", "r") as fp:
+ # Test this optimized read hasn't rewound and read from the
+ # start of the file (as in the case of the unoptimized path)
+
+ # forward seek
+ old_count = sio.bytes_read
+ forward_seek_len = 10002
+ current_pos = 0
+ fp.seek(forward_seek_len, os.SEEK_CUR)
+ current_pos += forward_seek_len
+ self.assertEqual(fp.tell(), current_pos)
+ self.assertEqual(fp._left, fp._compress_left)
+ arr = fp.read(read_length)
+ current_pos += read_length
+ self.assertEqual(fp.tell(), current_pos)
+ self.assertEqual(arr, txt[current_pos - read_length:current_pos])
+ self.assertEqual(fp._left, fp._compress_left)
+ read_count = sio.bytes_read - old_count
+ self.assertLessEqual(read_count, read_buffer_size)
+
+ # backward seek
+ old_count = sio.bytes_read
+ backward_seek_len = 5003
+ fp.seek(-backward_seek_len, os.SEEK_CUR)
+ current_pos -= backward_seek_len
+ self.assertEqual(fp.tell(), current_pos)
+ self.assertEqual(fp._left, fp._compress_left)
+ arr = fp.read(read_length)
+ current_pos += read_length
+ self.assertEqual(fp.tell(), current_pos)
+ self.assertEqual(arr, txt[current_pos - read_length:current_pos])
+ self.assertEqual(fp._left, fp._compress_left)
+ read_count = sio.bytes_read - old_count
+ self.assertLessEqual(read_count, read_buffer_size)
+
+ # eof flags test
+ fp.seek(0, os.SEEK_END)
+ fp.seek(12345, os.SEEK_SET)
+ current_pos = 12345
+ arr = fp.read(read_length)
+ current_pos += read_length
+ self.assertEqual(arr, txt[current_pos - read_length:current_pos])
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/threading.py b/Lib/threading.py
index 94ea2f08178369..d6b0e2b4fbf161 100644
--- a/Lib/threading.py
+++ b/Lib/threading.py
@@ -690,7 +690,7 @@ def __init__(self, parties, action=None, timeout=None):
"""
if parties < 1:
- raise ValueError("parties must be > 0")
+ raise ValueError("parties must be >= 1")
self._cond = Condition(Lock())
self._action = action
self._timeout = timeout
diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py
index 5352276e874bf5..8b2502b4c0a165 100644
--- a/Lib/tkinter/__init__.py
+++ b/Lib/tkinter/__init__.py
@@ -2263,7 +2263,7 @@ def wm_iconbitmap(self, bitmap=None, default=None):
explicitly. DEFAULT can be the relative path to a .ico file
(example: root.iconbitmap(default='myicon.ico') ). See Tk
documentation for more information."""
- if default:
+ if default is not None:
return self.tk.call('wm', 'iconbitmap', self._w, '-default', default)
else:
return self.tk.call('wm', 'iconbitmap', self._w, bitmap)
@@ -2739,6 +2739,8 @@ def _setup(self, master, cnf):
del cnf['name']
if not name:
name = self.__class__.__name__.lower()
+ if name[-1].isdigit():
+ name += "!" # Avoid duplication when calculating names below
if master._last_child_ids is None:
master._last_child_ids = {}
count = master._last_child_ids.get(name, 0) + 1
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index 4b4c3cfe16999b..7ca552c4fc590e 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -169,6 +169,7 @@ def __init__(self):
self.prev_row = 1
self.prev_col = 0
self.prev_type = None
+ self.prev_line = ""
self.encoding = None
def add_whitespace(self, start):
@@ -176,14 +177,28 @@ def add_whitespace(self, start):
if row < self.prev_row or row == self.prev_row and col < self.prev_col:
raise ValueError("start ({},{}) precedes previous end ({},{})"
.format(row, col, self.prev_row, self.prev_col))
- row_offset = row - self.prev_row
- if row_offset:
- self.tokens.append("\\\n" * row_offset)
- self.prev_col = 0
+ self.add_backslash_continuation(start)
col_offset = col - self.prev_col
if col_offset:
self.tokens.append(" " * col_offset)
+ def add_backslash_continuation(self, start):
+ """Add backslash continuation characters if the row has increased
+ without encountering a newline token.
+
+ This also inserts the correct amount of whitespace before the backslash.
+ """
+ row = start[0]
+ row_offset = row - self.prev_row
+ if row_offset == 0:
+ return
+
+ newline = '\r\n' if self.prev_line.endswith('\r\n') else '\n'
+ line = self.prev_line.rstrip('\\\r\n')
+ ws = ''.join(_itertools.takewhile(str.isspace, reversed(line)))
+ self.tokens.append(ws + f"\\{newline}" * row_offset)
+ self.prev_col = 0
+
def escape_brackets(self, token):
characters = []
consume_until_next_bracket = False
@@ -243,8 +258,6 @@ def untokenize(self, iterable):
end_line, end_col = end
extra_chars = last_line.count("{{") + last_line.count("}}")
end = (end_line, end_col + extra_chars)
- elif tok_type in (STRING, FSTRING_START) and self.prev_type in (STRING, FSTRING_END):
- self.tokens.append(" ")
self.add_whitespace(start)
self.tokens.append(token)
@@ -253,6 +266,7 @@ def untokenize(self, iterable):
self.prev_row += 1
self.prev_col = 0
self.prev_type = tok_type
+ self.prev_line = line
return "".join(self.tokens)
def compat(self, token, iterable):
@@ -318,16 +332,10 @@ def untokenize(iterable):
with at least two elements, a token number and token value. If
only two tokens are passed, the resulting output is poor.
- Round-trip invariant for full input:
- Untokenized source will match input source exactly
-
- Round-trip invariant for limited input:
- # Output bytes will tokenize back to the input
- t1 = [tok[:2] for tok in tokenize(f.readline)]
- newcode = untokenize(t1)
- readline = BytesIO(newcode).readline
- t2 = [tok[:2] for tok in tokenize(readline)]
- assert t1 == t2
+ The result is guaranteed to tokenize back to match the input so
+ that the conversion is lossless and round-trips are assured.
+ The guarantee applies only to the token type and token string as
+ the spacing between tokens (column positions) may change.
"""
ut = Untokenizer()
out = ut.untokenize(iterable)
diff --git a/Lib/traceback.py b/Lib/traceback.py
index f73149271b9bc9..f5e054190eaf1e 100644
--- a/Lib/traceback.py
+++ b/Lib/traceback.py
@@ -135,7 +135,7 @@ def print_exception(exc, /, value=_sentinel, tb=_sentinel, limit=None, \
def _print_exception_bltin(exc, /):
file = sys.stderr if sys.stderr is not None else sys.__stderr__
- colorize = _colorize.can_colorize()
+ colorize = _colorize.can_colorize(file=file)
return print_exception(exc, limit=BUILTIN_EXCEPTION_LIMIT, file=file, colorize=colorize)
@@ -1283,7 +1283,7 @@ def _format_syntax_error(self, stype, **kwargs):
filename_suffix = ' ({})'.format(self.filename)
text = self.text
- if text is not None:
+ if isinstance(text, str):
# text = " foo\n"
# rtext = " foo"
# ltext = "foo"
@@ -1292,10 +1292,17 @@ def _format_syntax_error(self, stype, **kwargs):
spaces = len(rtext) - len(ltext)
if self.offset is None:
yield ' {}\n'.format(ltext)
- else:
+ elif isinstance(self.offset, int):
offset = self.offset
if self.lineno == self.end_lineno:
- end_offset = self.end_offset if self.end_offset not in {None, 0} else offset
+ end_offset = (
+ self.end_offset
+ if (
+ isinstance(self.end_offset, int)
+ and self.end_offset != 0
+ )
+ else offset
+ )
else:
end_offset = len(rtext) + 1
diff --git a/Lib/turtledemo/__main__.py b/Lib/turtledemo/__main__.py
index df94ebc10c01e8..8dbda474bb3efd 100755
--- a/Lib/turtledemo/__main__.py
+++ b/Lib/turtledemo/__main__.py
@@ -107,7 +107,6 @@
DONE = 4
EVENTDRIVEN = 5
-menufont = ("Arial", 12, NORMAL)
btnfont = ("Arial", 12, 'bold')
txtfont = ['Lucida Console', 10, 'normal']
@@ -299,23 +298,21 @@ def makeLoadDemoMenu(self, master):
for entry in getExampleEntries():
def load(entry=entry):
self.loadfile(entry)
- menu.add_command(label=entry, underline=0,
- font=menufont, command=load)
+ menu.add_command(label=entry, underline=0, command=load)
return menu
def makeFontMenu(self, master):
menu = Menu(master, tearoff=0)
- menu.add_command(label="Decrease (C-'-')", command=self.decrease_size,
- font=menufont)
- menu.add_command(label="Increase (C-'+')", command=self.increase_size,
- font=menufont)
+ menu.add_command(label="Decrease", command=self.decrease_size,
+ accelerator=f"{'Command' if darwin else 'Ctrl'}+-")
+ menu.add_command(label="Increase", command=self.increase_size,
+ accelerator=f"{'Command' if darwin else 'Ctrl'}+=")
menu.add_separator()
for size in font_sizes:
def resize(size=size):
self.set_txtsize(size)
- menu.add_command(label=str(size), underline=0,
- font=menufont, command=resize)
+ menu.add_command(label=str(size), underline=0, command=resize)
return menu
def makeHelpMenu(self, master):
@@ -324,7 +321,7 @@ def makeHelpMenu(self, master):
for help_label, help_file in help_entries:
def show(help_label=help_label, help_file=help_file):
view_text(self.root, help_label, help_file)
- menu.add_command(label=help_label, font=menufont, command=show)
+ menu.add_command(label=help_label, command=show)
return menu
def refreshCanvas(self):
diff --git a/Lib/typing.py b/Lib/typing.py
index bba29db8559da2..fe939a2e68b9bd 100644
--- a/Lib/typing.py
+++ b/Lib/typing.py
@@ -1783,12 +1783,16 @@ def __repr__(self):
return super().__repr__()
def __instancecheck__(self, obj):
- return self.__subclasscheck__(type(obj))
+ for arg in self.__args__:
+ if isinstance(obj, arg):
+ return True
+ return False
def __subclasscheck__(self, cls):
for arg in self.__args__:
if issubclass(cls, arg):
return True
+ return False
def __reduce__(self):
func, (origin, args) = super().__reduce__()
diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py
index 24815952037fef..c72138a33ca6d4 100644
--- a/Lib/urllib/parse.py
+++ b/Lib/urllib/parse.py
@@ -436,6 +436,23 @@ def _checknetloc(netloc):
raise ValueError("netloc '" + netloc + "' contains invalid " +
"characters under NFKC normalization")
+def _check_bracketed_netloc(netloc):
+ # Note that this function must mirror the splitting
+ # done in NetlocResultMixins._hostinfo().
+ hostname_and_port = netloc.rpartition('@')[2]
+ before_bracket, have_open_br, bracketed = hostname_and_port.partition('[')
+ if have_open_br:
+ # No data is allowed before a bracket.
+ if before_bracket:
+ raise ValueError("Invalid IPv6 URL")
+ hostname, _, port = bracketed.partition(']')
+ # No data is allowed after the bracket but before the port delimiter.
+ if port and not port.startswith(":"):
+ raise ValueError("Invalid IPv6 URL")
+ else:
+ hostname, _, port = hostname_and_port.partition(':')
+ _check_bracketed_host(hostname)
+
# Valid bracketed hosts are defined in
# https://www.rfc-editor.org/rfc/rfc3986#page-49 and https://url.spec.whatwg.org/
def _check_bracketed_host(hostname):
@@ -496,8 +513,7 @@ def urlsplit(url, scheme='', allow_fragments=True):
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if '[' in netloc and ']' in netloc:
- bracketed_host = netloc.partition('[')[2].partition(']')[0]
- _check_bracketed_host(bracketed_host)
+ _check_bracketed_netloc(netloc)
if allow_fragments and '#' in url:
url, fragment = url.split('#', 1)
if '?' in url:
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
index b4882f4129e3a6..a2504a2b9c783c 100644
--- a/Lib/urllib/request.py
+++ b/Lib/urllib/request.py
@@ -878,9 +878,9 @@ def find_user_password(self, realm, authuri):
class HTTPPasswordMgrWithPriorAuth(HTTPPasswordMgrWithDefaultRealm):
- def __init__(self, *args, **kwargs):
+ def __init__(self):
self.authenticated = {}
- super().__init__(*args, **kwargs)
+ super().__init__()
def add_password(self, realm, uri, user, passwd, is_authenticated=False):
self.update_authenticated(uri, is_authenticated)
diff --git a/Lib/urllib/robotparser.py b/Lib/urllib/robotparser.py
index c58565e3945146..409f2b2e48de6e 100644
--- a/Lib/urllib/robotparser.py
+++ b/Lib/urllib/robotparser.py
@@ -11,6 +11,7 @@
"""
import collections
+import urllib.error
import urllib.parse
import urllib.request
@@ -65,6 +66,7 @@ def read(self):
self.disallow_all = True
elif err.code >= 400 and err.code < 500:
self.allow_all = True
+ err.close()
else:
raw = f.read()
self.parse(raw.decode("utf-8").splitlines())
diff --git a/Lib/xml/dom/xmlbuilder.py b/Lib/xml/dom/xmlbuilder.py
index 8a200263497b89..a8852625a2f9a2 100644
--- a/Lib/xml/dom/xmlbuilder.py
+++ b/Lib/xml/dom/xmlbuilder.py
@@ -189,7 +189,7 @@ def parse(self, input):
options.filter = self.filter
options.errorHandler = self.errorHandler
fp = input.byteStream
- if fp is None and options.systemId:
+ if fp is None and input.systemId:
import urllib.request
fp = urllib.request.urlopen(input.systemId)
return self._parse_bytestream(fp, options)
@@ -247,10 +247,12 @@ def _create_opener(self):
def _guess_media_encoding(self, source):
info = source.byteStream.info()
- if "Content-Type" in info:
- for param in info.getplist():
- if param.startswith("charset="):
- return param.split("=", 1)[1].lower()
+ # import email.message
+ # assert isinstance(info, email.message.Message)
+ charset = info.get_param('charset')
+ if charset is not None:
+ return charset.lower()
+ return None
class DOMInputSource(object):
diff --git a/Lib/zipfile/__init__.py b/Lib/zipfile/__init__.py
index 05e917afe487fd..82e307f78e8e3d 100644
--- a/Lib/zipfile/__init__.py
+++ b/Lib/zipfile/__init__.py
@@ -817,7 +817,10 @@ def seek(self, offset, whence=0):
raise ValueError("Can't reposition in the ZIP file while "
"there is an open writing handle on it. "
"Close the writing handle before trying to read.")
- self._file.seek(offset, whence)
+ if whence == os.SEEK_CUR:
+ self._file.seek(self._pos + offset)
+ else:
+ self._file.seek(offset, whence)
self._pos = self._file.tell()
return self._pos
@@ -1160,13 +1163,15 @@ def seek(self, offset, whence=os.SEEK_SET):
self._offset = buff_offset
read_offset = 0
# Fast seek uncompressed unencrypted file
- elif self._compress_type == ZIP_STORED and self._decrypter is None and read_offset > 0:
+ elif self._compress_type == ZIP_STORED and self._decrypter is None and read_offset != 0:
# disable CRC checking after first seeking - it would be invalid
self._expected_crc = None
# seek actual file taking already buffered data into account
read_offset -= len(self._readbuffer) - self._offset
self._fileobj.seek(read_offset, os.SEEK_CUR)
self._left -= read_offset
+ self._compress_left -= read_offset
+ self._eof = self._left <= 0
read_offset = 0
# flush read buffer
self._readbuffer = b''
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 03ca4cb635bd38..46a37ded970573 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -2061,7 +2061,6 @@ testuniversal: all
# This must be run *after* a `make install` has completed the build. The
# `--with-framework-name` argument *cannot* be used when configuring the build.
XCFOLDER:=iOSTestbed.$(MULTIARCH).$(shell date +%s)
-XCRESULT=$(XCFOLDER)/$(MULTIARCH).xcresult
.PHONY: testios
testios:
@if test "$(MACHDEP)" != "ios"; then \
@@ -2080,29 +2079,12 @@ testios:
echo "Cannot find a finalized iOS Python.framework. Have you run 'make install' to finalize the framework build?"; \
exit 1;\
fi
- # Copy the testbed project into the build folder
- cp -r $(srcdir)/iOS/testbed $(XCFOLDER)
- # Copy the framework from the install location to the testbed project.
- cp -r $(PYTHONFRAMEWORKPREFIX)/* $(XCFOLDER)/Python.xcframework/ios-arm64_x86_64-simulator
-
- # Run the test suite for the Xcode project, targeting the iOS simulator.
- # If the suite fails, touch a file in the test folder as a marker
- if ! xcodebuild test -project $(XCFOLDER)/iOSTestbed.xcodeproj -scheme "iOSTestbed" -destination "platform=iOS Simulator,name=iPhone SE (3rd Generation)" -resultBundlePath $(XCRESULT) -derivedDataPath $(XCFOLDER)/DerivedData ; then \
- touch $(XCFOLDER)/failed; \
- fi
- # Regardless of success or failure, extract and print the test output
- xcrun xcresulttool get --path $(XCRESULT) \
- --id $$( \
- xcrun xcresulttool get --path $(XCRESULT) --format json | \
- $(PYTHON_FOR_BUILD) -c "import sys, json; result = json.load(sys.stdin); print(result['actions']['_values'][0]['actionResult']['logRef']['id']['_value'])" \
- ) \
- --format json | \
- $(PYTHON_FOR_BUILD) -c "import sys, json; result = json.load(sys.stdin); print(result['subsections']['_values'][1]['subsections']['_values'][0]['emittedOutput']['_value'])"
+ # Clone the testbed project into the XCFOLDER
+ $(PYTHON_FOR_BUILD) $(srcdir)/iOS/testbed clone --framework $(PYTHONFRAMEWORKPREFIX) "$(XCFOLDER)"
- @if test -e $(XCFOLDER)/failed ; then \
- exit 1; \
- fi
+ # Run the testbed project
+ $(PYTHON_FOR_BUILD) "$(XCFOLDER)" run --verbose -- test -uall --single-process --rerun -W
# Like test, but using --slow-ci which enables all test resources and use
# longer timeout. Run an optional pybuildbot.identify script to include
diff --git a/Misc/ACKS b/Misc/ACKS
index bed3e028d0a18d..54e6076773793b 100644
--- a/Misc/ACKS
+++ b/Misc/ACKS
@@ -188,6 +188,7 @@ Stéphane Blondon
Eric Blossom
Sergey Bobrov
Finn Bock
+Vojtěch Boček
Paul Boddie
Matthew Boedicker
Robin Boerdijk
@@ -1122,6 +1123,7 @@ Gregor Lingl
Everett Lipman
Mirko Liss
Alexander Liu
+Hui Liu
Yuan Liu
Nick Lockwood
Stephanie Lockwood
diff --git a/Misc/NEWS.d/3.10.0b1.rst b/Misc/NEWS.d/3.10.0b1.rst
index 25c6b827146e82..406a5d7853edc0 100644
--- a/Misc/NEWS.d/3.10.0b1.rst
+++ b/Misc/NEWS.d/3.10.0b1.rst
@@ -941,7 +941,7 @@ result from ``entry_points()`` as deprecated.
..
-.. gh: 47383
+.. gh-issue: 47383
.. date: 2021-04-08-19-32-26
.. nonce: YI1hdL
.. section: Library
diff --git a/Misc/NEWS.d/3.11.0a4.rst b/Misc/NEWS.d/3.11.0a4.rst
index 64e2f39ad9db18..a2d36202045887 100644
--- a/Misc/NEWS.d/3.11.0a4.rst
+++ b/Misc/NEWS.d/3.11.0a4.rst
@@ -775,8 +775,8 @@ Ensure that :func:`math.expm1` does not raise on underflow.
.. nonce: s9PuyF
.. section: Library
-Adding :attr:`F_DUP2FD` and :attr:`F_DUP2FD_CLOEXEC` constants from FreeBSD
-into the fcntl module.
+Adding :const:`!F_DUP2FD` and :const:`!F_DUP2FD_CLOEXEC` constants from FreeBSD
+into the :mod:`fcntl` module.
..
diff --git a/Misc/NEWS.d/3.11.0b1.rst b/Misc/NEWS.d/3.11.0b1.rst
index 85cb0f1b5cffbd..87442dbbbd17f5 100644
--- a/Misc/NEWS.d/3.11.0b1.rst
+++ b/Misc/NEWS.d/3.11.0b1.rst
@@ -570,7 +570,7 @@ planned). Patch by Alex Waygood.
..
-.. gh: 78157
+.. gh-issue: 78157
.. date: 2022-05-05-20-40-45
.. nonce: IA_9na
.. section: Library
@@ -1289,7 +1289,7 @@ Deprecate the chunk module.
..
-.. gh: 91498
+.. gh-issue: 91498
.. date: 2022-04-10-08-39-44
.. nonce: 8oII92
.. section: Library
diff --git a/Misc/NEWS.d/3.13.2.rst b/Misc/NEWS.d/3.13.2.rst
new file mode 100644
index 00000000000000..21c97c85427e6d
--- /dev/null
+++ b/Misc/NEWS.d/3.13.2.rst
@@ -0,0 +1,935 @@
+.. date: 2024-12-22-08-54-30
+.. gh-issue: 127592
+.. nonce: iyuFCC
+.. release date: 2025-02-04
+.. section: macOS
+
+Usage of the unified Apple System Log APIs was disabled when the minimum
+macOS version is earlier than 10.12.
+
+..
+
+.. date: 2024-11-28-15-55-48
+.. gh-issue: 127353
+.. nonce: i-XOXg
+.. section: Windows
+
+Allow to force color output on Windows using environment variables. Patch by
+Andrey Efremov.
+
+..
+
+.. date: 2025-01-24-14-49-40
+.. gh-issue: 129248
+.. nonce: JAapG2
+.. section: Tools/Demos
+
+The iOS test runner now strips the log prefix from each line output by the
+test suite.
+
+..
+
+.. date: 2025-01-03-23-51-07
+.. gh-issue: 128152
+.. nonce: IhzElS
+.. section: Tools/Demos
+
+Fix a bug where Argument Clinic's C pre-processor parser tried to parse
+pre-processor directives inside C comments. Patch by Erlend Aasland.
+
+..
+
+.. date: 2024-12-13-13-41-34
+.. gh-issue: 127906
+.. nonce: NuRHlB
+.. section: Tests
+
+Test the limited C API in test_cppext. Patch by Victor Stinner.
+
+..
+
+.. date: 2024-12-09-12-35-44
+.. gh-issue: 127637
+.. nonce: KLx-9I
+.. section: Tests
+
+Add tests for the :mod:`dis` command-line interface. Patch by Bénédikt Tran.
+
+..
+
+.. date: 2024-12-04-15-03-24
+.. gh-issue: 126925
+.. nonce: uxAMK-
+.. section: Tests
+
+iOS test results are now streamed during test execution, and the deprecated
+xcresulttool is no longer used.
+
+..
+
+.. date: 2025-01-28-14-08-03
+.. gh-issue: 105704
+.. nonce: EnhHxu
+.. section: Security
+
+When using :func:`urllib.parse.urlsplit` and :func:`urllib.parse.urlparse`
+host parsing would not reject domain names containing square brackets (``[``
+and ``]``). Square brackets are only valid for IPv6 and IPvFuture hosts
+according to `RFC 3986 Section 3.2.2
+`__.
+
+..
+
+.. date: 2024-12-05-21-35-19
+.. gh-issue: 127655
+.. nonce: xpPoOf
+.. section: Security
+
+Fixed the :class:`!asyncio.selector_events._SelectorSocketTransport`
+transport not pausing writes for the protocol when the buffer reaches the
+high water mark when using :meth:`asyncio.WriteTransport.writelines`.
+
+..
+
+.. date: 2024-10-29-09-15-10
+.. gh-issue: 126108
+.. nonce: eTIjHY
+.. section: Security
+
+Fix a possible ``NULL`` pointer dereference in
+:c:func:`!PySys_AddWarnOptionUnicode`.
+
+..
+
+.. date: 2024-08-06-11-43-08
+.. gh-issue: 80222
+.. nonce: wfR4BU
+.. section: Security
+
+Fix bug in the folding of quoted strings when flattening an email message
+using a modern email policy. Previously when a quoted string was folded so
+that it spanned more than one line, the surrounding quotes and internal
+escapes would be omitted. This could theoretically be used to spoof header
+lines using a carefully constructed quoted string if the resulting rendered
+email was transmitted or re-parsed.
+
+..
+
+.. date: 2024-05-24-21-00-52
+.. gh-issue: 119511
+.. nonce: jKrXQ8
+.. section: Security
+
+Fix a potential denial of service in the :mod:`imaplib` module. When
+connecting to a malicious server, it could cause an arbitrary amount of
+memory to be allocated. On many systems this is harmless as unused virtual
+memory is only a mapping, but if this hit a virtual address size limit it
+could lead to a :exc:`MemoryError` or other process crash. On unusual
+systems or builds where all allocated memory is touched and backed by actual
+ram or storage it could've consumed resources doing so until similarly
+crashing.
+
+..
+
+.. date: 2025-01-31-11-14-05
+.. gh-issue: 129502
+.. nonce: j_ArNo
+.. section: Library
+
+Unlikely errors in preparing arguments for :mod:`ctypes` callback are now
+handled in the same way as errors raised in the callback of in converting
+the result of the callback -- using :func:`sys.unraisablehook` instead of
+:func:`sys.excepthook` and not setting :data:`sys.last_exc` and other
+variables.
+
+..
+
+.. date: 2025-01-29-17-10-00
+.. gh-issue: 129403
+.. nonce: 314159
+.. section: Library
+
+Corrected :exc:`ValueError` message for :class:`asyncio.Barrier` and
+:class:`threading.Barrier`.
+
+..
+
+.. date: 2025-01-29-14-30-54
+.. gh-issue: 129409
+.. nonce: JZbOE6
+.. section: Library
+
+Fix an integer overflow in the :mod:`csv` module when writing a data field
+larger than 2GB.
+
+..
+
+.. date: 2025-01-29-10-53-32
+.. gh-issue: 118761
+.. nonce: i8wjpV
+.. section: Library
+
+Improve import time of :mod:`subprocess` by lazy importing ``locale`` and
+``signal``. Patch by Taneli Hukkinen.
+
+..
+
+.. date: 2025-01-27-14-05-19
+.. gh-issue: 129346
+.. nonce: gZRd3g
+.. section: Library
+
+In :mod:`sqlite3`, handle out-of-memory when creating user-defined SQL
+functions.
+
+..
+
+.. date: 2025-01-21-18-52-32
+.. gh-issue: 129061
+.. nonce: 4idD_B
+.. section: Library
+
+Fix FORCE_COLOR and NO_COLOR when empty strings. Patch by Hugo van Kemenade.
+
+..
+
+.. date: 2025-01-20-13-12-39
+.. gh-issue: 128550
+.. nonce: AJ5TOL
+.. section: Library
+
+Removed an incorrect optimization relating to eager tasks in
+:class:`asyncio.TaskGroup` that resulted in cancellations being missed.
+
+..
+
+.. date: 2025-01-18-16-58-10
+.. gh-issue: 128991
+.. nonce: EzJit9
+.. section: Library
+
+Release the enter frame reference within :mod:`bdb` callback
+
+..
+
+.. date: 2025-01-18-11-04-44
+.. gh-issue: 128978
+.. nonce: hwg7-w
+.. section: Library
+
+Fix a :exc:`NameError` in :func:`!sysconfig.expand_makefile_vars`. Patch by
+Bénédikt Tran.
+
+..
+
+.. date: 2025-01-17-21-33-11
+.. gh-issue: 128961
+.. nonce: XwvyIZ
+.. section: Library
+
+Fix a crash when setting state on an exhausted :class:`array.array`
+iterator.
+
+..
+
+.. date: 2025-01-17-17-20-51
+.. gh-issue: 128894
+.. nonce: gX1-8J
+.. section: Library
+
+Fix ``traceback.TracebackException._format_syntax_error`` not to fail on
+exceptions with custom metadata.
+
+..
+
+.. date: 2025-01-17-11-46-16
+.. gh-issue: 128916
+.. nonce: GEePbO
+.. section: Library
+
+Do not attempt to set ``SO_REUSEPORT`` on sockets of address families other
+than ``AF_INET`` and ``AF_INET6``, as it is meaningless with these address
+families, and the call with fail with Linux kernel 6.12.9 and newer.
+
+..
+
+.. date: 2025-01-10-15-43-52
+.. gh-issue: 128679
+.. nonce: KcfVVR
+.. section: Library
+
+Fix :func:`tracemalloc.stop` race condition. Fix :mod:`tracemalloc` to
+support calling :func:`tracemalloc.stop` in one thread, while another thread
+is tracing memory allocations. Patch by Victor Stinner.
+
+..
+
+.. date: 2025-01-08-22-30-38
+.. gh-issue: 128636
+.. nonce: jQfWXj
+.. section: Library
+
+Fix PyREPL failure when :data:`os.environ` is overwritten with an invalid
+value.
+
+..
+
+.. date: 2025-01-08-03-09-29
+.. gh-issue: 128562
+.. nonce: Mlv-yO
+.. section: Library
+
+Fix possible conflicts in generated :mod:`tkinter` widget names if the
+widget class name ends with a digit.
+
+..
+
+.. date: 2025-01-07-21-48-32
+.. gh-issue: 128498
+.. nonce: n6jtlW
+.. section: Library
+
+Default to stdout isatty for color detection instead of stderr. Patch by
+Hugo van Kemenade.
+
+..
+
+.. date: 2025-01-06-18-41-08
+.. gh-issue: 128552
+.. nonce: fV-f8j
+.. section: Library
+
+Fix cyclic garbage introduced by :meth:`asyncio.loop.create_task` and
+:meth:`asyncio.TaskGroup.create_task` holding a reference to the created
+task if it is eager.
+
+..
+
+.. date: 2025-01-04-11-10-04
+.. gh-issue: 128479
+.. nonce: jvOrF-
+.. section: Library
+
+Fix :func:`!asyncio.staggered.staggered_race` leaking tasks and issuing an
+unhandled exception.
+
+..
+
+.. date: 2025-01-02-13-05-16
+.. gh-issue: 128400
+.. nonce: 5N43fF
+.. section: Library
+
+Fix crash when using :func:`faulthandler.dump_traceback` while other threads
+are active on the :term:`free threaded ` build.
+
+..
+
+.. date: 2024-12-30-20-48-28
+.. gh-issue: 88834
+.. nonce: RIvgwc
+.. section: Library
+
+Unify the instance check for :class:`typing.Union` and
+:class:`types.UnionType`: :class:`!Union` now uses the instance checks
+against its parameters instead of the subclass checks.
+
+..
+
+.. date: 2024-12-29-13-49-46
+.. gh-issue: 128302
+.. nonce: psRpPN
+.. section: Library
+
+Fix :meth:`!xml.dom.xmlbuilder.DOMEntityResolver.resolveEntity`, which was
+broken by the Python 3.0 transition.
+
+..
+
+.. date: 2024-12-27-16-28-57
+.. gh-issue: 128302
+.. nonce: 2GMvyl
+.. section: Library
+
+Allow :meth:`!xml.dom.xmlbuilder.DOMParser.parse` to correctly handle
+:class:`!xml.dom.xmlbuilder.DOMInputSource` instances that only have a
+:attr:`!systemId` attribute set.
+
+..
+
+.. date: 2024-12-26-11-00-03
+.. gh-issue: 112064
+.. nonce: mCcw3B
+.. section: Library
+
+Fix incorrect handling of negative read sizes in :meth:`HTTPResponse.read
+`. Patch by Yury Manushkin.
+
+..
+
+.. date: 2024-12-23-02-09-44
+.. gh-issue: 58956
+.. nonce: 4OdMdT
+.. section: Library
+
+Fixed a frame reference leak in :mod:`bdb`.
+
+..
+
+.. date: 2024-12-21-03-20-12
+.. gh-issue: 128131
+.. nonce: QpPmNt
+.. section: Library
+
+Completely support random access of uncompressed unencrypted read-only zip
+files obtained by :meth:`ZipFile.open `.
+
+..
+
+.. date: 2024-12-20-15-19-38
+.. gh-issue: 112328
+.. nonce: d9GfLR
+.. section: Library
+
+:class:`enum.EnumDict` can now be used without resorting to private API.
+
+..
+
+.. date: 2024-12-20-08-44-12
+.. gh-issue: 127975
+.. nonce: 8HJwu9
+.. section: Library
+
+Avoid reusing quote types in :func:`ast.unparse` if not needed.
+
+..
+
+.. date: 2024-12-18-10-18-55
+.. gh-issue: 128062
+.. nonce: E9oU7-
+.. section: Library
+
+Revert the font of :mod:`turtledemo`'s menu bar to its default value and
+display the shortcut keys in the correct position.
+
+..
+
+.. date: 2024-12-18-00-07-50
+.. gh-issue: 128014
+.. nonce: F3aUbz
+.. section: Library
+
+Fix resetting the default window icon by passing ``default=''`` to the
+:mod:`tkinter` method :meth:`!wm_iconbitmap`.
+
+..
+
+.. date: 2024-12-17-16-48-02
+.. gh-issue: 115514
+.. nonce: 1yOJ7T
+.. section: Library
+
+Fix exceptions and incomplete writes after
+:class:`!asyncio._SelectorTransport` is closed before writes are completed.
+
+..
+
+.. date: 2024-12-17-15-23-40
+.. gh-issue: 41872
+.. nonce: 31LjKY
+.. section: Library
+
+Fix quick extraction of module docstrings from a file in :mod:`pydoc`. It
+now supports docstrings with single quotes, escape sequences, raw string
+literals, and other Python syntax.
+
+..
+
+.. date: 2024-12-17-13-21-52
+.. gh-issue: 127060
+.. nonce: mv2bX6
+.. section: Library
+
+Set TERM environment variable to "dumb" to disable traceback colors in IDLE,
+since IDLE doesn't understand ANSI escape sequences. Patch by Victor
+Stinner.
+
+..
+
+.. date: 2024-12-17-12-41-07
+.. gh-issue: 126742
+.. nonce: l07qvT
+.. section: Library
+
+Fix support of localized error messages reported by :manpage:`dlerror(3)`
+and :manpage:`gdbm_strerror ` in :mod:`ctypes` and :mod:`dbm.gnu`
+functions respectively. Patch by Bénédikt Tran.
+
+..
+
+.. date: 2024-12-12-18-25-50
+.. gh-issue: 127873
+.. nonce: WJRwfz
+.. section: Library
+
+When ``-E`` is set, only ignore ``PYTHON_COLORS`` and not
+``FORCE_COLOR``/``NO_COLOR``/``TERM`` when colourising output. Patch by Hugo
+van Kemenade.
+
+..
+
+.. date: 2024-12-12-16-59-42
+.. gh-issue: 127870
+.. nonce: _NFG-3
+.. section: Library
+
+Detect recursive calls in ctypes ``_as_parameter_`` handling. Patch by
+Victor Stinner.
+
+..
+
+.. date: 2024-12-12-07-27-51
+.. gh-issue: 127847
+.. nonce: ksfNKM
+.. section: Library
+
+Fix the position when doing interleaved seeks and reads in uncompressed,
+unencrypted zip files returned by :meth:`zipfile.ZipFile.open`.
+
+..
+
+.. date: 2024-12-08-08-36-18
+.. gh-issue: 127732
+.. nonce: UEKxoa
+.. section: Library
+
+The :mod:`platform` module now correctly detects Windows Server 2025.
+
+..
+
+.. date: 2024-12-04-15-04-12
+.. gh-issue: 126821
+.. nonce: lKCLVV
+.. section: Library
+
+macOS and iOS apps can now choose to redirect stdout and stderr to the
+system log during interpreter configuration.
+
+..
+
+.. date: 2024-12-04-11-01-16
+.. gh-issue: 93312
+.. nonce: 9sB-Qw
+.. section: Library
+
+Include ```` to get ``os.PIDFD_NONBLOCK`` constant. Patch by
+Victor Stinner.
+
+..
+
+.. date: 2024-12-04-10-39-29
+.. gh-issue: 83662
+.. nonce: CG1s3m
+.. section: Library
+
+Add missing ``__class_getitem__`` method to the Python implementation of
+:func:`functools.partial`, to make it compatible with the C version. This is
+mainly relevant for alternative Python implementations like PyPy and
+GraalPy, because CPython will usually use the C-implementation of that
+function.
+
+..
+
+.. date: 2024-12-03-20-28-08
+.. gh-issue: 127586
+.. nonce: zgotYF
+.. section: Library
+
+:class:`multiprocessing.pool.Pool` now properly restores blocked signal
+handlers of the parent thread when creating processes via either *spawn* or
+*forkserver*.
+
+..
+
+.. date: 2024-12-03-14-45-16
+.. gh-issue: 98188
+.. nonce: GX9i2b
+.. section: Library
+
+Fix an issue in :meth:`email.message.Message.get_payload` where data cannot
+be decoded if the Content Transfer Encoding mechanism contains trailing
+whitespaces or additional junk text. Patch by Hui Liu.
+
+..
+
+.. date: 2024-11-28-14-14-46
+.. gh-issue: 127257
+.. nonce: n6-jU9
+.. section: Library
+
+In :mod:`ssl`, system call failures that OpenSSL reports using
+``ERR_LIB_SYS`` are now raised as :exc:`OSError`.
+
+..
+
+.. date: 2024-11-24-22-06-42
+.. gh-issue: 127096
+.. nonce: R7LLpQ
+.. section: Library
+
+Do not recreate unnamed section on every read in
+:class:`configparser.ConfigParser`. Patch by Andrey Efremov.
+
+..
+
+.. date: 2024-11-24-14-53-35
+.. gh-issue: 127196
+.. nonce: 8CBkUa
+.. section: Library
+
+Fix crash when dict with keys in invalid encoding were passed to several
+functions in ``_interpreters`` module.
+
+..
+
+.. date: 2024-11-13-10-44-25
+.. gh-issue: 126775
+.. nonce: a3ubjh
+.. section: Library
+
+Make :func:`linecache.checkcache` thread safe and GC re-entrancy safe.
+
+..
+
+.. date: 2024-11-10-19-45-01
+.. gh-issue: 126332
+.. nonce: WCCKoH
+.. section: Library
+
+Fix _pyrepl crash when entering a double CTRL-Z on an overflowing line.
+
+..
+
+.. date: 2024-10-31-14-31-36
+.. gh-issue: 126225
+.. nonce: vTxGXm
+.. section: Library
+
+:mod:`getopt` and :mod:`optparse` are no longer marked as deprecated. There
+are legitimate reasons to use one of these modules in preference to
+:mod:`argparse`, and none of these modules are at risk of being removed from
+the standard library. Of the three, ``argparse`` remains the recommended
+default choice, *unless* one of the concerns noted at the top of the
+``optparse`` module documentation applies.
+
+..
+
+.. date: 2024-10-26-16-59-02
+.. gh-issue: 125553
+.. nonce: 4pDLzt
+.. section: Library
+
+Fix round-trip invariance for backslash continuations in
+:func:`tokenize.untokenize`.
+
+..
+
+.. date: 2024-09-12-14-24-25
+.. gh-issue: 123987
+.. nonce: 7_OD1p
+.. section: Library
+
+Fixed issue in NamespaceReader where a non-path item in a namespace path,
+such as a sentinel added by an editable installer, would break resource
+loading.
+
+..
+
+.. date: 2024-08-27-18-58-01
+.. gh-issue: 123401
+.. nonce: t4-FpI
+.. section: Library
+
+The :mod:`http.cookies` module now supports parsing obsolete :rfc:`850` date
+formats, in accordance with :rfc:`9110` requirements. Patch by Nano Zheng.
+
+..
+
+.. date: 2024-07-30-11-37-40
+.. gh-issue: 122431
+.. nonce: lAzVtu
+.. section: Library
+
+:func:`readline.append_history_file` now raises a :exc:`ValueError` when
+given a negative value.
+
+..
+
+.. date: 2024-07-14-23-19-20
+.. gh-issue: 119257
+.. nonce: 9OEzcN
+.. section: Library
+
+Show tab completions menu below the current line, which results in less
+janky behaviour, and fixes a cursor movement bug. Patch by Daniel Hollas
+
+..
+
+.. date: 2025-01-16-18-59-11
+.. gh-issue: 125722
+.. nonce: eHHRga
+.. section: Documentation
+
+Require Sphinx 8.1.3 or later to build the Python documentation. Patch by
+Adam Turner.
+
+..
+
+.. date: 2025-01-14-11-06-41
+.. gh-issue: 67206
+.. nonce: LYKmi5
+.. section: Documentation
+
+Document that :const:`string.printable` is not printable in the POSIX sense.
+In particular, :meth:`string.printable.isprintable() `
+returns :const:`False`. Patch by Bénédikt Tran.
+
+..
+
+.. date: 2025-01-28-06-23-59
+.. gh-issue: 129345
+.. nonce: uOjkML
+.. section: Core and Builtins
+
+Fix null pointer dereference in :func:`syslog.openlog` when an audit hook
+raises an exception.
+
+..
+
+.. date: 2025-01-21-23-35-41
+.. gh-issue: 129093
+.. nonce: 0rvETC
+.. section: Core and Builtins
+
+Fix f-strings such as ``f'{expr=}'`` sometimes not displaying the full
+expression when the expression contains ``!=``.
+
+..
+
+.. date: 2025-01-21-19-48-30
+.. gh-issue: 124363
+.. nonce: vOFhHW
+.. section: Core and Builtins
+
+Treat debug expressions in f-string as raw strings. Patch by Pablo Galindo
+
+..
+
+.. date: 2025-01-18-01-06-58
+.. gh-issue: 128799
+.. nonce: vSNagk
+.. section: Core and Builtins
+
+Add frame of ``except*`` to traceback when it wraps a naked exception.
+
+..
+
+.. date: 2025-01-13-12-48-30
+.. gh-issue: 128078
+.. nonce: qOsl9B
+.. section: Core and Builtins
+
+Fix a :exc:`SystemError` when using :func:`anext` with a default tuple
+value. Patch by Bénédikt Tran.
+
+..
+
+.. date: 2025-01-11-12-39-17
+.. gh-issue: 128717
+.. nonce: i65d06
+.. section: Core and Builtins
+
+Fix a crash when setting the recursion limit while other threads are active
+on the :term:`free threaded ` build.
+
+..
+
+.. date: 2024-12-29-15-09-21
+.. gh-issue: 128330
+.. nonce: IaYL7G
+.. section: Core and Builtins
+
+Restore terminal control characters on REPL exit.
+
+..
+
+.. date: 2024-12-18-14-22-48
+.. gh-issue: 128079
+.. nonce: SUD5le
+.. section: Core and Builtins
+
+Fix a bug where :keyword:`except* ` does not properly check the
+return value of an :exc:`ExceptionGroup`'s :meth:`~BaseExceptionGroup.split`
+function, leading to a crash in some cases. Now when
+:meth:`~BaseExceptionGroup.split` returns an invalid object,
+:keyword:`except* ` raises a :exc:`TypeError` with the original
+raised :exc:`ExceptionGroup` object chained to it.
+
+..
+
+.. date: 2024-12-17-22-28-15
+.. gh-issue: 128030
+.. nonce: H1ptOD
+.. section: Core and Builtins
+
+Avoid error from calling ``PyModule_GetFilenameObject`` on a non-module
+object when importing a non-existent symbol from a non-module object.
+
+..
+
+.. date: 2024-12-13-14-17-24
+.. gh-issue: 127903
+.. nonce: vemHSl
+.. section: Core and Builtins
+
+``Objects/unicodeobject.c``: fix a crash on DEBUG builds in
+``_copy_characters`` when there is nothing to copy.
+
+..
+
+.. date: 2024-12-07-13-06-09
+.. gh-issue: 127599
+.. nonce: tXCZb_
+.. section: Core and Builtins
+
+Fix statistics for increments of object reference counts (in particular,
+when a reference count was increased by more than 1 in a single operation).
+
+..
+
+.. date: 2024-12-06-01-09-40
+.. gh-issue: 127651
+.. nonce: 80cm6j
+.. section: Core and Builtins
+
+When raising :exc:`ImportError` for missing symbols in ``from`` imports, use
+``__file__`` in the error message if ``__spec__.origin`` is not a location
+
+..
+
+.. date: 2024-12-05-19-25-00
+.. gh-issue: 127582
+.. nonce: ogUY2a
+.. section: Core and Builtins
+
+Fix non-thread-safe object resurrection when calling finalizers and watcher
+callbacks in the free threading build.
+
+..
+
+.. date: 2024-12-04-09-52-08
+.. gh-issue: 127434
+.. nonce: RjkGT_
+.. section: Core and Builtins
+
+The iOS compiler shims can now accept arguments with spaces.
+
+..
+
+.. date: 2024-12-03-21-07-06
+.. gh-issue: 127536
+.. nonce: 3jMMrT
+.. section: Core and Builtins
+
+Add missing locks around some list assignment operations in the free
+threading build.
+
+..
+
+.. date: 2024-12-02-18-15-37
+.. gh-issue: 126862
+.. nonce: fdIK7T
+.. section: Core and Builtins
+
+Fix a possible overflow when a class inherits from an absurd number of
+super-classes. Reported by Valery Fedorenko. Patch by Bénédikt Tran.
+
+..
+
+.. date: 2024-11-30-16-13-31
+.. gh-issue: 127349
+.. nonce: ssYd6n
+.. section: Core and Builtins
+
+Fixed the error when resizing terminal in Python REPL. Patch by Semyon
+Moroz.
+
+..
+
+.. date: 2024-10-28-13-18-16
+.. gh-issue: 126076
+.. nonce: MebZuS
+.. section: Core and Builtins
+
+Relocated objects such as ``tuple``, ``bytes`` and ``str`` objects are
+properly tracked by :mod:`tracemalloc` and its associated hooks. Patch by
+Pablo Galindo.
+
+..
+
+.. date: 2024-12-10-14-25-22
+.. gh-issue: 127791
+.. nonce: YRw4GU
+.. section: C API
+
+Fix loss of callbacks after more than one call to
+:c:func:`PyUnstable_AtExit`.
+
+..
+
+.. date: 2025-02-02-09-11-45
+.. gh-issue: 129539
+.. nonce: SYXXCg
+.. section: Build
+
+Don't redefine ``EX_OK`` when the system has the ``sysexits.h`` header.
+
+..
+
+.. date: 2025-01-04-22-39-10
+.. gh-issue: 128472
+.. nonce: Wt5E6M
+.. section: Build
+
+Skip BOLT optimization of functions using computed gotos, fixing errors on
+build with LLVM 19.
+
+..
+
+.. date: 2025-01-02-11-02-45
+.. gh-issue: 123925
+.. nonce: TLlyUi
+.. section: Build
+
+Fix building the :mod:`curses` module on platforms with libncurses but
+without libncursesw.
+
+..
+
+.. date: 2024-12-28-21-05-19
+.. gh-issue: 128321
+.. nonce: 0UvbXw
+.. section: Build
+
+Set ``LIBS`` instead of ``LDFLAGS`` when checking if :mod:`sqlite3` library
+functions are available. This fixes the ordering of linked libraries during
+checks, which was incorrect when using a statically linked ``libsqlite3``.
+
+..
+
+.. date: 2024-12-12-17-21-45
+.. gh-issue: 127865
+.. nonce: 30GDzs
+.. section: Build
+
+Fix build failure on systems without thread-locals support.
diff --git a/Misc/NEWS.d/3.5.3rc1.rst b/Misc/NEWS.d/3.5.3rc1.rst
index 2424604249a65c..cfc729dd82556f 100644
--- a/Misc/NEWS.d/3.5.3rc1.rst
+++ b/Misc/NEWS.d/3.5.3rc1.rst
@@ -1146,7 +1146,7 @@ after a commit.
.. section: Library
A new version of typing.py from https://github.com/python/typing:
-Collection (only for 3.6) (Issue #27598). Add FrozenSet to __all__
+Collection (only for 3.6) (issue #27598). Add FrozenSet to __all__
(upstream #261). Fix crash in _get_type_vars() (upstream #259). Remove the
dict constraint in ForwardRef._eval_type (upstream #252).
diff --git a/Misc/NEWS.d/3.6.0a4.rst b/Misc/NEWS.d/3.6.0a4.rst
index 3abbdecb57038b..6f3f5262e5749d 100644
--- a/Misc/NEWS.d/3.6.0a4.rst
+++ b/Misc/NEWS.d/3.6.0a4.rst
@@ -177,7 +177,7 @@ Support keyword arguments to zlib.decompress(). Patch by Xiang Zhang.
.. section: Library
Prevent segfault after interpreter re-initialization due to ref count
-problem introduced in code for Issue #27038 in 3.6.0a3. Patch by Xiang
+problem introduced in code for issue #27038 in 3.6.0a3. Patch by Xiang
Zhang.
..
diff --git a/Misc/NEWS.d/3.6.0b1.rst b/Misc/NEWS.d/3.6.0b1.rst
index bd54cf601d053b..1e2dcdd6c642bb 100644
--- a/Misc/NEWS.d/3.6.0b1.rst
+++ b/Misc/NEWS.d/3.6.0b1.rst
@@ -1137,7 +1137,7 @@ chunked transfer-encoding.
.. section: Library
A new version of typing.py from https://github.com/python/typing: -
-Collection (only for 3.6) (Issue #27598) - Add FrozenSet to __all__
+Collection (only for 3.6) (issue #27598) - Add FrozenSet to __all__
(upstream #261) - fix crash in _get_type_vars() (upstream #259) - Remove the
dict constraint in ForwardRef._eval_type (upstream #252)
diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in
index 06b30feef43e40..57b90101bbe4a6 100644
--- a/Modules/Setup.stdlib.in
+++ b/Modules/Setup.stdlib.in
@@ -164,7 +164,7 @@
@MODULE__TESTBUFFER_TRUE@_testbuffer _testbuffer.c
@MODULE__TESTINTERNALCAPI_TRUE@_testinternalcapi _testinternalcapi.c _testinternalcapi/test_lock.c _testinternalcapi/pytime.c _testinternalcapi/set.c _testinternalcapi/test_critical_sections.c
@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/heaptype.c _testcapi/abstract.c _testcapi/unicode.c _testcapi/dict.c _testcapi/set.c _testcapi/list.c _testcapi/tuple.c _testcapi/getargs.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/complex.c _testcapi/numbers.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/buffer.c _testcapi/pyatomic.c _testcapi/run.c _testcapi/file.c _testcapi/codec.c _testcapi/immortal.c _testcapi/gc.c _testcapi/hash.c _testcapi/time.c _testcapi/bytes.c _testcapi/object.c _testcapi/monitoring.c
-@MODULE__TESTLIMITEDCAPI_TRUE@_testlimitedcapi _testlimitedcapi.c _testlimitedcapi/abstract.c _testlimitedcapi/bytearray.c _testlimitedcapi/bytes.c _testlimitedcapi/complex.c _testlimitedcapi/dict.c _testlimitedcapi/float.c _testlimitedcapi/heaptype_relative.c _testlimitedcapi/list.c _testlimitedcapi/long.c _testlimitedcapi/object.c _testlimitedcapi/pyos.c _testlimitedcapi/set.c _testlimitedcapi/sys.c _testlimitedcapi/tuple.c _testlimitedcapi/unicode.c _testlimitedcapi/vectorcall_limited.c
+@MODULE__TESTLIMITEDCAPI_TRUE@_testlimitedcapi _testlimitedcapi.c _testlimitedcapi/abstract.c _testlimitedcapi/bytearray.c _testlimitedcapi/bytes.c _testlimitedcapi/complex.c _testlimitedcapi/dict.c _testlimitedcapi/eval.c _testlimitedcapi/float.c _testlimitedcapi/heaptype_relative.c _testlimitedcapi/import.c _testlimitedcapi/list.c _testlimitedcapi/long.c _testlimitedcapi/object.c _testlimitedcapi/pyos.c _testlimitedcapi/set.c _testlimitedcapi/sys.c _testlimitedcapi/tuple.c _testlimitedcapi/unicode.c _testlimitedcapi/vectorcall_limited.c _testlimitedcapi/file.c
@MODULE__TESTCLINIC_TRUE@_testclinic _testclinic.c
@MODULE__TESTCLINIC_LIMITED_TRUE@_testclinic_limited _testclinic_limited.c
diff --git a/Modules/_csv.c b/Modules/_csv.c
index 0d315088c7a55a..f9387abe0e663a 100644
--- a/Modules/_csv.c
+++ b/Modules/_csv.c
@@ -1116,7 +1116,7 @@ join_append_data(WriterObj *self, int field_kind, const void *field_data,
int copy_phase)
{
DialectObj *dialect = self->dialect;
- int i;
+ Py_ssize_t i;
Py_ssize_t rec_len;
#define INCLEN \
diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c
index 0ac5458ea2d349..3d7cb1b1164843 100644
--- a/Modules/_ctypes/_ctypes.c
+++ b/Modules/_ctypes/_ctypes.c
@@ -986,15 +986,8 @@ CDataType_in_dll_impl(PyObject *type, PyTypeObject *cls, PyObject *dll,
#ifdef USE_DLERROR
const char *dlerr = dlerror();
if (dlerr) {
- PyObject *message = PyUnicode_DecodeLocale(dlerr, "surrogateescape");
- if (message) {
- PyErr_SetObject(PyExc_ValueError, message);
- Py_DECREF(message);
- return NULL;
- }
- // Ignore errors from PyUnicode_DecodeLocale,
- // fall back to the generic error below.
- PyErr_Clear();
+ _PyErr_SetLocaleString(PyExc_ValueError, dlerr);
+ return NULL;
}
#endif
#undef USE_DLERROR
@@ -1054,8 +1047,13 @@ CDataType_from_param_impl(PyObject *type, PyTypeObject *cls, PyObject *value)
return NULL;
}
if (as_parameter) {
+ if (_Py_EnterRecursiveCall(" while processing _as_parameter_")) {
+ Py_DECREF(as_parameter);
+ return NULL;
+ }
value = CDataType_from_param_impl(type, cls, as_parameter);
Py_DECREF(as_parameter);
+ _Py_LeaveRecursiveCall();
return value;
}
PyErr_Format(PyExc_TypeError,
@@ -1842,8 +1840,13 @@ c_wchar_p_from_param_impl(PyObject *type, PyTypeObject *cls, PyObject *value)
return NULL;
}
if (as_parameter) {
+ if (_Py_EnterRecursiveCall(" while processing _as_parameter_")) {
+ Py_DECREF(as_parameter);
+ return NULL;
+ }
value = c_wchar_p_from_param_impl(type, cls, as_parameter);
Py_DECREF(as_parameter);
+ _Py_LeaveRecursiveCall();
return value;
}
PyErr_Format(PyExc_TypeError,
@@ -1926,8 +1929,13 @@ c_char_p_from_param_impl(PyObject *type, PyTypeObject *cls, PyObject *value)
return NULL;
}
if (as_parameter) {
+ if (_Py_EnterRecursiveCall(" while processing _as_parameter_")) {
+ Py_DECREF(as_parameter);
+ return NULL;
+ }
value = c_char_p_from_param_impl(type, cls, as_parameter);
Py_DECREF(as_parameter);
+ _Py_LeaveRecursiveCall();
return value;
}
PyErr_Format(PyExc_TypeError,
@@ -2078,8 +2086,13 @@ c_void_p_from_param_impl(PyObject *type, PyTypeObject *cls, PyObject *value)
return NULL;
}
if (as_parameter) {
+ if (_Py_EnterRecursiveCall(" while processing _as_parameter_")) {
+ Py_DECREF(as_parameter);
+ return NULL;
+ }
value = c_void_p_from_param_impl(type, cls, as_parameter);
Py_DECREF(as_parameter);
+ _Py_LeaveRecursiveCall();
return value;
}
PyErr_Format(PyExc_TypeError,
@@ -2435,9 +2448,9 @@ PyCSimpleType_from_param_impl(PyObject *type, PyTypeObject *cls,
return NULL;
}
value = PyCSimpleType_from_param_impl(type, cls, as_parameter);
- _Py_LeaveRecursiveCall();
Py_DECREF(as_parameter);
Py_XDECREF(exc);
+ _Py_LeaveRecursiveCall();
return value;
}
if (exc) {
@@ -3789,21 +3802,14 @@ PyCFuncPtr_FromDll(PyTypeObject *type, PyObject *args, PyObject *kwds)
address = (PPROC)dlsym(handle, name);
if (!address) {
- #ifdef USE_DLERROR
+ #ifdef USE_DLERROR
const char *dlerr = dlerror();
if (dlerr) {
- PyObject *message = PyUnicode_DecodeLocale(dlerr, "surrogateescape");
- if (message) {
- PyErr_SetObject(PyExc_AttributeError, message);
- Py_DECREF(ftuple);
- Py_DECREF(message);
- return NULL;
- }
- // Ignore errors from PyUnicode_DecodeLocale,
- // fall back to the generic error below.
- PyErr_Clear();
+ _PyErr_SetLocaleString(PyExc_AttributeError, dlerr);
+ Py_DECREF(ftuple);
+ return NULL;
}
- #endif
+ #endif
PyErr_Format(PyExc_AttributeError, "function '%s' not found", name);
Py_DECREF(ftuple);
return NULL;
diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c
index 7b9f6437c7d55f..675a82577fa9b8 100644
--- a/Modules/_ctypes/callbacks.c
+++ b/Modules/_ctypes/callbacks.c
@@ -81,22 +81,6 @@ PyType_Spec cthunk_spec = {
/**************************************************************/
-static void
-PrintError(const char *msg, ...)
-{
- char buf[512];
- PyObject *f = PySys_GetObject("stderr");
- va_list marker;
-
- va_start(marker, msg);
- PyOS_vsnprintf(buf, sizeof(buf), msg, marker);
- va_end(marker);
- if (f != NULL && f != Py_None)
- PyFile_WriteString(buf, f);
- PyErr_Print();
-}
-
-
#ifdef MS_WIN32
/*
* We must call AddRef() on non-NULL COM pointers we receive as arguments
@@ -108,26 +92,23 @@ PrintError(const char *msg, ...)
* after checking for PyObject_IsTrue(), but this would probably be somewhat
* slower.
*/
-static void
+static int
TryAddRef(PyObject *cnv, CDataObject *obj)
{
IUnknown *punk;
PyObject *attrdict = _PyType_GetDict((PyTypeObject *)cnv);
if (!attrdict) {
- return;
+ return 0;
}
int r = PyDict_Contains(attrdict, &_Py_ID(_needs_com_addref_));
if (r <= 0) {
- if (r < 0) {
- PrintError("getting _needs_com_addref_");
- }
- return;
+ return r;
}
punk = *(IUnknown **)obj->b_ptr;
if (punk)
punk->lpVtbl->AddRef(punk);
- return;
+ return 0;
}
#endif
@@ -162,14 +143,13 @@ static void _CallPythonObject(ctypes_state *st,
StgInfo *info;
if (PyStgInfo_FromType(st, cnv, &info) < 0) {
- goto Done;
+ goto Error;
}
if (info && info->getfunc && !_ctypes_simple_instance(st, cnv)) {
PyObject *v = info->getfunc(*pArgs, info->size);
if (!v) {
- PrintError("create argument %zd:\n", i);
- goto Done;
+ goto Error;
}
args[i] = v;
/* XXX XXX XX
@@ -182,24 +162,25 @@ static void _CallPythonObject(ctypes_state *st,
/* Hm, shouldn't we use PyCData_AtAddress() or something like that instead? */
CDataObject *obj = (CDataObject *)_PyObject_CallNoArgs(cnv);
if (!obj) {
- PrintError("create argument %zd:\n", i);
- goto Done;
+ goto Error;
}
if (!CDataObject_Check(st, obj)) {
+ PyErr_Format(PyExc_TypeError,
+ "%R returned unexpected result of type %T", cnv, obj);
Py_DECREF(obj);
- PrintError("unexpected result of create argument %zd:\n", i);
- goto Done;
+ goto Error;
}
memcpy(obj->b_ptr, *pArgs, info->size);
args[i] = (PyObject *)obj;
#ifdef MS_WIN32
- TryAddRef(cnv, obj);
+ if (TryAddRef(cnv, obj) < 0) {
+ goto Error;
+ }
#endif
} else {
- PyErr_SetString(PyExc_TypeError,
- "cannot build parameter");
- PrintError("Parsing argument %zd\n", i);
- goto Done;
+ PyErr_Format(PyExc_TypeError,
+ "cannot build parameter of type %R", cnv);
+ goto Error;
}
/* XXX error handling! */
pArgs++;
@@ -207,8 +188,13 @@ static void _CallPythonObject(ctypes_state *st,
if (flags & (FUNCFLAG_USE_ERRNO | FUNCFLAG_USE_LASTERROR)) {
error_object = _ctypes_get_errobj(st, &space);
- if (error_object == NULL)
+ if (error_object == NULL) {
+ PyErr_FormatUnraisable(
+ "Exception ignored while setting error for "
+ "ctypes callback function %R",
+ callable);
goto Done;
+ }
if (flags & FUNCFLAG_USE_ERRNO) {
int temp = space[0];
space[0] = errno;
@@ -295,6 +281,14 @@ static void _CallPythonObject(ctypes_state *st,
for (j = 0; j < i; j++) {
Py_DECREF(args[j]);
}
+ return;
+
+ Error:
+ PyErr_FormatUnraisable(
+ "Exception ignored while creating argument %zd for "
+ "ctypes callback function %R",
+ i, callable);
+ goto Done;
}
static void closure_fcn(ffi_cif *cif,
diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c
index b5511f06e3904a..f9864ebb735ddf 100644
--- a/Modules/_ctypes/callproc.c
+++ b/Modules/_ctypes/callproc.c
@@ -1579,10 +1579,11 @@ static PyObject *py_dl_open(PyObject *self, PyObject *args)
Py_XDECREF(name2);
if (!handle) {
const char *errmsg = dlerror();
- if (!errmsg)
- errmsg = "dlopen() error";
- PyErr_SetString(PyExc_OSError,
- errmsg);
+ if (errmsg) {
+ _PyErr_SetLocaleString(PyExc_OSError, errmsg);
+ return NULL;
+ }
+ PyErr_SetString(PyExc_OSError, "dlopen() error");
return NULL;
}
return PyLong_FromVoidPtr(handle);
@@ -1595,8 +1596,12 @@ static PyObject *py_dl_close(PyObject *self, PyObject *args)
if (!PyArg_ParseTuple(args, "O&:dlclose", &_parse_voidp, &handle))
return NULL;
if (dlclose(handle)) {
- PyErr_SetString(PyExc_OSError,
- dlerror());
+ const char *errmsg = dlerror();
+ if (errmsg) {
+ _PyErr_SetLocaleString(PyExc_OSError, errmsg);
+ return NULL;
+ }
+ PyErr_SetString(PyExc_OSError, "dlclose() error");
return NULL;
}
Py_RETURN_NONE;
@@ -1630,21 +1635,14 @@ static PyObject *py_dl_sym(PyObject *self, PyObject *args)
if (ptr) {
return PyLong_FromVoidPtr(ptr);
}
- #ifdef USE_DLERROR
- const char *dlerr = dlerror();
- if (dlerr) {
- PyObject *message = PyUnicode_DecodeLocale(dlerr, "surrogateescape");
- if (message) {
- PyErr_SetObject(PyExc_OSError, message);
- Py_DECREF(message);
- return NULL;
- }
- // Ignore errors from PyUnicode_DecodeLocale,
- // fall back to the generic error below.
- PyErr_Clear();
+ #ifdef USE_DLERROR
+ const char *errmsg = dlerror();
+ if (errmsg) {
+ _PyErr_SetLocaleString(PyExc_OSError, errmsg);
+ return NULL;
}
- #endif
- #undef USE_DLERROR
+ #endif
+ #undef USE_DLERROR
PyErr_Format(PyExc_OSError, "symbol '%s' not found", name);
return NULL;
}
diff --git a/Modules/_cursesmodule.c b/Modules/_cursesmodule.c
index 55038cf09c2876..a7cbe81226eb19 100644
--- a/Modules/_cursesmodule.c
+++ b/Modules/_cursesmodule.c
@@ -137,7 +137,7 @@ typedef chtype attr_t; /* No attr_t type is available */
#define STRICT_SYSV_CURSES
#endif
-#if NCURSES_EXT_FUNCS+0 >= 20170401 && NCURSES_EXT_COLORS+0 >= 20170401
+#if defined(HAVE_NCURSESW) && NCURSES_EXT_FUNCS+0 >= 20170401 && NCURSES_EXT_COLORS+0 >= 20170401
#define _NCURSES_EXTENDED_COLOR_FUNCS 1
#else
#define _NCURSES_EXTENDED_COLOR_FUNCS 0
diff --git a/Modules/_gdbmmodule.c b/Modules/_gdbmmodule.c
index df7fba67810ed0..ea4fe247987e9d 100644
--- a/Modules/_gdbmmodule.c
+++ b/Modules/_gdbmmodule.c
@@ -8,10 +8,11 @@
#endif
#include "Python.h"
+#include "pycore_pyerrors.h" // _PyErr_SetLocaleString()
#include "gdbm.h"
#include
-#include // free()
+#include // free()
#include
#include
@@ -33,6 +34,24 @@ get_gdbm_state(PyObject *module)
return (_gdbm_state *)state;
}
+/*
+ * Set the gdbm error obtained by gdbm_strerror(gdbm_errno).
+ *
+ * If no error message exists, a generic (UTF-8) error message
+ * is used instead.
+ */
+static void
+set_gdbm_error(_gdbm_state *state, const char *generic_error)
+{
+ const char *gdbm_errmsg = gdbm_strerror(gdbm_errno);
+ if (gdbm_errmsg) {
+ _PyErr_SetLocaleString(state->gdbm_error, gdbm_errmsg);
+ }
+ else {
+ PyErr_SetString(state->gdbm_error, generic_error);
+ }
+}
+
/*[clinic input]
module _gdbm
class _gdbm.gdbm "gdbmobject *" "&Gdbmtype"
@@ -91,7 +110,7 @@ newgdbmobject(_gdbm_state *state, const char *file, int flags, int mode)
PyErr_SetFromErrnoWithFilename(state->gdbm_error, file);
}
else {
- PyErr_SetString(state->gdbm_error, gdbm_strerror(gdbm_errno));
+ set_gdbm_error(state, "gdbm_open() error");
}
Py_DECREF(dp);
return NULL;
@@ -136,7 +155,7 @@ gdbm_length(gdbmobject *dp)
PyErr_SetFromErrno(state->gdbm_error);
}
else {
- PyErr_SetString(state->gdbm_error, gdbm_strerror(gdbm_errno));
+ set_gdbm_error(state, "gdbm_count() error");
}
return -1;
}
@@ -286,7 +305,7 @@ gdbm_ass_sub(gdbmobject *dp, PyObject *v, PyObject *w)
PyErr_SetObject(PyExc_KeyError, v);
}
else {
- PyErr_SetString(state->gdbm_error, gdbm_strerror(gdbm_errno));
+ set_gdbm_error(state, "gdbm_delete() error");
}
return -1;
}
@@ -297,11 +316,12 @@ gdbm_ass_sub(gdbmobject *dp, PyObject *v, PyObject *w)
}
errno = 0;
if (gdbm_store(dp->di_dbm, krec, drec, GDBM_REPLACE) < 0) {
- if (errno != 0)
+ if (errno != 0) {
PyErr_SetFromErrno(state->gdbm_error);
- else
- PyErr_SetString(state->gdbm_error,
- gdbm_strerror(gdbm_errno));
+ }
+ else {
+ set_gdbm_error(state, "gdbm_store() error");
+ }
return -1;
}
}
@@ -534,10 +554,12 @@ _gdbm_gdbm_reorganize_impl(gdbmobject *self, PyTypeObject *cls)
check_gdbmobject_open(self, state->gdbm_error);
errno = 0;
if (gdbm_reorganize(self->di_dbm) < 0) {
- if (errno != 0)
+ if (errno != 0) {
PyErr_SetFromErrno(state->gdbm_error);
- else
- PyErr_SetString(state->gdbm_error, gdbm_strerror(gdbm_errno));
+ }
+ else {
+ set_gdbm_error(state, "gdbm_reorganize() error");
+ }
return NULL;
}
Py_RETURN_NONE;
diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c
index 14d9c186151232..d2e7630c22e7e9 100644
--- a/Modules/_hashopenssl.c
+++ b/Modules/_hashopenssl.c
@@ -320,6 +320,7 @@ _setException(PyObject *exc, const char* altmsg, ...)
va_end(vargs);
ERR_clear_error();
+ /* ERR_ERROR_STRING(3) ensures that the messages below are ASCII */
lib = ERR_lib_error_string(errcode);
func = ERR_func_error_string(errcode);
reason = ERR_reason_error_string(errcode);
diff --git a/Modules/_interpretersmodule.c b/Modules/_interpretersmodule.c
index 706316234c7d24..c968b33939c420 100644
--- a/Modules/_interpretersmodule.c
+++ b/Modules/_interpretersmodule.c
@@ -462,7 +462,12 @@ _run_in_interpreter(PyInterpreterState *interp,
// Prep and switch interpreters.
if (_PyXI_Enter(&session, interp, shareables) < 0) {
- assert(!PyErr_Occurred());
+ if (PyErr_Occurred()) {
+ // If an error occured at this step, it means that interp
+ // was not prepared and switched.
+ return -1;
+ }
+ // Now, apply the error from another interpreter:
PyObject *excinfo = _PyXI_ApplyError(session.error);
if (excinfo != NULL) {
*p_excinfo = excinfo;
diff --git a/Modules/_io/clinic/textio.c.h b/Modules/_io/clinic/textio.c.h
index 5d042df5def511..efa992fca71022 100644
--- a/Modules/_io/clinic/textio.c.h
+++ b/Modules/_io/clinic/textio.c.h
@@ -205,6 +205,9 @@ PyDoc_STRVAR(_io__TextIOBase_encoding__doc__,
"Encoding of the text stream.\n"
"\n"
"Subclasses should override.");
+#if defined(_io__TextIOBase_encoding_DOCSTR)
+# undef _io__TextIOBase_encoding_DOCSTR
+#endif
#define _io__TextIOBase_encoding_DOCSTR _io__TextIOBase_encoding__doc__
#if !defined(_io__TextIOBase_encoding_DOCSTR)
@@ -232,6 +235,9 @@ PyDoc_STRVAR(_io__TextIOBase_newlines__doc__,
"Only line endings translated during reading are considered.\n"
"\n"
"Subclasses should override.");
+#if defined(_io__TextIOBase_newlines_DOCSTR)
+# undef _io__TextIOBase_newlines_DOCSTR
+#endif
#define _io__TextIOBase_newlines_DOCSTR _io__TextIOBase_newlines__doc__
#if !defined(_io__TextIOBase_newlines_DOCSTR)
@@ -257,6 +263,9 @@ PyDoc_STRVAR(_io__TextIOBase_errors__doc__,
"The error setting of the decoder or encoder.\n"
"\n"
"Subclasses should override.");
+#if defined(_io__TextIOBase_errors_DOCSTR)
+# undef _io__TextIOBase_errors_DOCSTR
+#endif
#define _io__TextIOBase_errors_DOCSTR _io__TextIOBase_errors__doc__
#if !defined(_io__TextIOBase_errors_DOCSTR)
@@ -1274,4 +1283,4 @@ _io_TextIOWrapper__CHUNK_SIZE_set(textio *self, PyObject *value, void *Py_UNUSED
return return_value;
}
-/*[clinic end generated code: output=8074cba93ae39cf7 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=6f78a6d99fad4893 input=a9049054013a1b77]*/
diff --git a/Modules/_multiprocessing/semaphore.c b/Modules/_multiprocessing/semaphore.c
index 4de4ee6c78fbd1..8c6a4a279205d3 100644
--- a/Modules/_multiprocessing/semaphore.c
+++ b/Modules/_multiprocessing/semaphore.c
@@ -27,6 +27,8 @@ typedef struct {
char *name;
} SemLockObject;
+#define _SemLockObject_CAST(op) ((SemLockObject *)(op))
+
/*[python input]
class SEM_HANDLE_converter(CConverter):
type = "SEM_HANDLE"
@@ -575,8 +577,9 @@ _multiprocessing_SemLock__rebuild_impl(PyTypeObject *type, SEM_HANDLE handle,
}
static void
-semlock_dealloc(SemLockObject* self)
+semlock_dealloc(PyObject *op)
{
+ SemLockObject *self = _SemLockObject_CAST(op);
PyTypeObject *tp = Py_TYPE(self);
PyObject_GC_UnTrack(self);
if (self->handle != SEM_FAILED)
@@ -717,7 +720,7 @@ _multiprocessing_SemLock___exit___impl(SemLockObject *self,
}
static int
-semlock_traverse(SemLockObject *s, visitproc visit, void *arg)
+semlock_traverse(PyObject *s, visitproc visit, void *arg)
{
Py_VISIT(Py_TYPE(s));
return 0;
diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c
index fc03e4a085c179..62598ecc864120 100644
--- a/Modules/_sqlite/connection.c
+++ b/Modules/_sqlite/connection.c
@@ -958,6 +958,11 @@ step_callback(sqlite3_context *context, int argc, sqlite3_value **params)
assert(ctx != NULL);
aggregate_instance = (PyObject**)sqlite3_aggregate_context(context, sizeof(PyObject*));
+ if (aggregate_instance == NULL) {
+ (void)PyErr_NoMemory();
+ set_sqlite_error(context, "unable to allocate SQLite aggregate context");
+ goto error;
+ }
if (*aggregate_instance == NULL) {
*aggregate_instance = PyObject_CallNoArgs(ctx->callable);
if (!*aggregate_instance) {
diff --git a/Modules/_sqlite/util.c b/Modules/_sqlite/util.c
index 9e8613ef67916e..b0622e66928f47 100644
--- a/Modules/_sqlite/util.c
+++ b/Modules/_sqlite/util.c
@@ -134,6 +134,7 @@ _pysqlite_seterror(pysqlite_state *state, sqlite3 *db)
/* Create and set the exception. */
int extended_errcode = sqlite3_extended_errcode(db);
+ // sqlite3_errmsg() always returns an UTF-8 encoded message
const char *errmsg = sqlite3_errmsg(db);
raise_exception(exc_class, extended_errcode, errmsg);
return extended_errcode;
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
index a7a278de5dd493..8d72779fd63a46 100644
--- a/Modules/_ssl.c
+++ b/Modules/_ssl.c
@@ -661,6 +661,11 @@ PySSL_SetError(PySSLSocket *sslsock, const char *filename, int lineno)
ERR_GET_REASON(e) == SSL_R_CERTIFICATE_VERIFY_FAILED) {
type = state->PySSLCertVerificationErrorObject;
}
+ if (ERR_GET_LIB(e) == ERR_LIB_SYS) {
+ // A system error is being reported; reason is set to errno
+ errno = ERR_GET_REASON(e);
+ return PyErr_SetFromErrno(PyExc_OSError);
+ }
p = PY_SSL_ERROR_SYSCALL;
}
break;
@@ -686,6 +691,11 @@ PySSL_SetError(PySSLSocket *sslsock, const char *filename, int lineno)
errstr = "EOF occurred in violation of protocol";
}
#endif
+ if (ERR_GET_LIB(e) == ERR_LIB_SYS) {
+ // A system error is being reported; reason is set to errno
+ errno = ERR_GET_REASON(e);
+ return PyErr_SetFromErrno(PyExc_OSError);
+ }
break;
}
default:
diff --git a/Modules/_testcapi/clinic/file.c.h b/Modules/_testcapi/clinic/file.c.h
new file mode 100644
index 00000000000000..2a01a63caf7ff3
--- /dev/null
+++ b/Modules/_testcapi/clinic/file.c.h
@@ -0,0 +1,31 @@
+/*[clinic input]
+preserve
+[clinic start generated code]*/
+
+PyDoc_STRVAR(_testcapi_pyfile_newstdprinter__doc__,
+"pyfile_newstdprinter($module, fd, /)\n"
+"--\n"
+"\n");
+
+#define _TESTCAPI_PYFILE_NEWSTDPRINTER_METHODDEF \
+ {"pyfile_newstdprinter", (PyCFunction)_testcapi_pyfile_newstdprinter, METH_O, _testcapi_pyfile_newstdprinter__doc__},
+
+static PyObject *
+_testcapi_pyfile_newstdprinter_impl(PyObject *module, int fd);
+
+static PyObject *
+_testcapi_pyfile_newstdprinter(PyObject *module, PyObject *arg)
+{
+ PyObject *return_value = NULL;
+ int fd;
+
+ fd = PyLong_AsInt(arg);
+ if (fd == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = _testcapi_pyfile_newstdprinter_impl(module, fd);
+
+exit:
+ return return_value;
+}
+/*[clinic end generated code: output=44002184a5d9dbb9 input=a9049054013a1b77]*/
diff --git a/Modules/_testcapi/exceptions.c b/Modules/_testcapi/exceptions.c
index 42a9915143e6fa..d73625c453f6c4 100644
--- a/Modules/_testcapi/exceptions.c
+++ b/Modules/_testcapi/exceptions.c
@@ -3,6 +3,7 @@
#include "parts.h"
#include "util.h"
+
#include "clinic/exceptions.c.h"
diff --git a/Modules/_testcapi/file.c b/Modules/_testcapi/file.c
index 634563f6ea12cb..bfb794d9ceaf77 100644
--- a/Modules/_testcapi/file.c
+++ b/Modules/_testcapi/file.c
@@ -1,17 +1,37 @@
#include "parts.h"
#include "util.h"
+#include "clinic/file.c.h"
+
+
+/*[clinic input]
+module _testcapi
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=6361033e795369fc]*/
+
+
+/*[clinic input]
+_testcapi.pyfile_newstdprinter
+
+ fd: int
+ /
+
+[clinic start generated code]*/
+
+static PyObject *
+_testcapi_pyfile_newstdprinter_impl(PyObject *module, int fd)
+/*[clinic end generated code: output=8a2d1c57b6892db3 input=442f1824142262ea]*/
+{
+ return PyFile_NewStdPrinter(fd);
+}
static PyMethodDef test_methods[] = {
+ _TESTCAPI_PYFILE_NEWSTDPRINTER_METHODDEF
{NULL},
};
int
_PyTestCapi_Init_File(PyObject *m)
{
- if (PyModule_AddFunctions(m, test_methods) < 0){
- return -1;
- }
-
- return 0;
+ return PyModule_AddFunctions(m, test_methods);
}
diff --git a/Modules/_testcapi/mem.c b/Modules/_testcapi/mem.c
index ab4ad934644c38..ecae5ba26226a6 100644
--- a/Modules/_testcapi/mem.c
+++ b/Modules/_testcapi/mem.c
@@ -557,8 +557,9 @@ tracemalloc_untrack(PyObject *self, PyObject *args)
{
unsigned int domain;
PyObject *ptr_obj;
+ int release_gil = 0;
- if (!PyArg_ParseTuple(args, "IO", &domain, &ptr_obj)) {
+ if (!PyArg_ParseTuple(args, "IO|i", &domain, &ptr_obj, &release_gil)) {
return NULL;
}
void *ptr = PyLong_AsVoidPtr(ptr_obj);
@@ -566,7 +567,15 @@ tracemalloc_untrack(PyObject *self, PyObject *args)
return NULL;
}
- int res = PyTraceMalloc_Untrack(domain, (uintptr_t)ptr);
+ int res;
+ if (release_gil) {
+ Py_BEGIN_ALLOW_THREADS
+ res = PyTraceMalloc_Untrack(domain, (uintptr_t)ptr);
+ Py_END_ALLOW_THREADS
+ }
+ else {
+ res = PyTraceMalloc_Untrack(domain, (uintptr_t)ptr);
+ }
if (res < 0) {
PyErr_SetString(PyExc_RuntimeError, "PyTraceMalloc_Untrack error");
return NULL;
diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c
index 01b6bd89d1371e..fd6ca815f22efa 100644
--- a/Modules/_testcapimodule.c
+++ b/Modules/_testcapimodule.c
@@ -2646,18 +2646,6 @@ test_frame_getvarstring(PyObject *self, PyObject *args)
}
-static PyObject *
-eval_get_func_name(PyObject *self, PyObject *func)
-{
- return PyUnicode_FromString(PyEval_GetFuncName(func));
-}
-
-static PyObject *
-eval_get_func_desc(PyObject *self, PyObject *func)
-{
- return PyUnicode_FromString(PyEval_GetFuncDesc(func));
-}
-
static PyObject *
gen_get_code(PyObject *self, PyObject *gen)
{
@@ -3081,52 +3069,6 @@ function_set_closure(PyObject *self, PyObject *args)
Py_RETURN_NONE;
}
-static PyObject *
-check_pyimport_addmodule(PyObject *self, PyObject *args)
-{
- const char *name;
- if (!PyArg_ParseTuple(args, "s", &name)) {
- return NULL;
- }
-
- // test PyImport_AddModuleRef()
- PyObject *module = PyImport_AddModuleRef(name);
- if (module == NULL) {
- return NULL;
- }
- assert(PyModule_Check(module));
- // module is a strong reference
-
- // test PyImport_AddModule()
- PyObject *module2 = PyImport_AddModule(name);
- if (module2 == NULL) {
- goto error;
- }
- assert(PyModule_Check(module2));
- assert(module2 == module);
- // module2 is a borrowed ref
-
- // test PyImport_AddModuleObject()
- PyObject *name_obj = PyUnicode_FromString(name);
- if (name_obj == NULL) {
- goto error;
- }
- PyObject *module3 = PyImport_AddModuleObject(name_obj);
- Py_DECREF(name_obj);
- if (module3 == NULL) {
- goto error;
- }
- assert(PyModule_Check(module3));
- assert(module3 == module);
- // module3 is a borrowed ref
-
- return module;
-
-error:
- Py_DECREF(module);
- return NULL;
-}
-
static PyObject *
test_weakref_capi(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args))
@@ -3332,10 +3274,151 @@ test_critical_sections(PyObject *module, PyObject *Py_UNUSED(args))
Py_RETURN_NONE;
}
+struct atexit_data {
+ int called;
+ PyThreadState *tstate;
+ PyInterpreterState *interp;
+};
+
+static void
+atexit_callback(void *data)
+{
+ struct atexit_data *at_data = (struct atexit_data *)data;
+ // Ensure that the callback is from the same interpreter
+ assert(PyThreadState_Get() == at_data->tstate);
+ assert(PyInterpreterState_Get() == at_data->interp);
+ ++at_data->called;
+}
+
+static PyObject *
+test_atexit(PyObject *self, PyObject *Py_UNUSED(args))
+{
+ PyThreadState *oldts = PyThreadState_Swap(NULL);
+ PyThreadState *tstate = Py_NewInterpreter();
+
+ struct atexit_data data = {0};
+ data.tstate = PyThreadState_Get();
+ data.interp = PyInterpreterState_Get();
+
+ int amount = 10;
+ for (int i = 0; i < amount; ++i)
+ {
+ int res = PyUnstable_AtExit(tstate->interp, atexit_callback, (void *)&data);
+ if (res < 0) {
+ Py_EndInterpreter(tstate);
+ PyThreadState_Swap(oldts);
+ PyErr_SetString(PyExc_RuntimeError, "atexit callback failed");
+ return NULL;
+ }
+ }
+
+ Py_EndInterpreter(tstate);
+ PyThreadState_Swap(oldts);
+
+ if (data.called != amount) {
+ PyErr_SetString(PyExc_RuntimeError, "atexit callback not called");
+ return NULL;
+ }
+ Py_RETURN_NONE;
+}
+
+
+static void
+tracemalloc_track_race_thread(void *data)
+{
+ PyTraceMalloc_Track(123, 10, 1);
+ PyTraceMalloc_Untrack(123, 10);
+
+ PyThread_type_lock lock = (PyThread_type_lock)data;
+ PyThread_release_lock(lock);
+}
+
+// gh-128679: Test fix for tracemalloc.stop() race condition
static PyObject *
-pyeval_getlocals(PyObject *module, PyObject *Py_UNUSED(args))
+tracemalloc_track_race(PyObject *self, PyObject *args)
{
- return Py_XNewRef(PyEval_GetLocals());
+#define NTHREAD 50
+ PyObject *tracemalloc = NULL;
+ PyObject *stop = NULL;
+ PyThread_type_lock locks[NTHREAD];
+ memset(locks, 0, sizeof(locks));
+
+ // Call tracemalloc.start()
+ tracemalloc = PyImport_ImportModule("tracemalloc");
+ if (tracemalloc == NULL) {
+ goto error;
+ }
+ PyObject *start = PyObject_GetAttrString(tracemalloc, "start");
+ if (start == NULL) {
+ goto error;
+ }
+ PyObject *res = PyObject_CallNoArgs(start);
+ Py_DECREF(start);
+ if (res == NULL) {
+ goto error;
+ }
+ Py_DECREF(res);
+
+ stop = PyObject_GetAttrString(tracemalloc, "stop");
+ Py_CLEAR(tracemalloc);
+ if (stop == NULL) {
+ goto error;
+ }
+
+ // Start threads
+ for (size_t i = 0; i < NTHREAD; i++) {
+ PyThread_type_lock lock = PyThread_allocate_lock();
+ if (!lock) {
+ PyErr_NoMemory();
+ goto error;
+ }
+ locks[i] = lock;
+ PyThread_acquire_lock(lock, 1);
+
+ unsigned long thread;
+ thread = PyThread_start_new_thread(tracemalloc_track_race_thread,
+ (void*)lock);
+ if (thread == (unsigned long)-1) {
+ PyErr_SetString(PyExc_RuntimeError, "can't start new thread");
+ goto error;
+ }
+ }
+
+ // Call tracemalloc.stop() while threads are running
+ res = PyObject_CallNoArgs(stop);
+ Py_CLEAR(stop);
+ if (res == NULL) {
+ goto error;
+ }
+ Py_DECREF(res);
+
+ // Wait until threads complete with the GIL released
+ Py_BEGIN_ALLOW_THREADS
+ for (size_t i = 0; i < NTHREAD; i++) {
+ PyThread_type_lock lock = locks[i];
+ PyThread_acquire_lock(lock, 1);
+ PyThread_release_lock(lock);
+ }
+ Py_END_ALLOW_THREADS
+
+ // Free threads locks
+ for (size_t i=0; i < NTHREAD; i++) {
+ PyThread_type_lock lock = locks[i];
+ PyThread_free_lock(lock);
+ }
+ Py_RETURN_NONE;
+
+error:
+ Py_CLEAR(tracemalloc);
+ Py_CLEAR(stop);
+ for (size_t i=0; i < NTHREAD; i++) {
+ PyThread_type_lock lock = locks[i];
+ if (lock) {
+ PyThread_free_lock(lock);
+ }
+ }
+ return NULL;
+#undef NTHREAD
}
static PyMethodDef TestMethods[] = {
@@ -3460,8 +3543,6 @@ static PyMethodDef TestMethods[] = {
{"frame_new", frame_new, METH_VARARGS, NULL},
{"frame_getvar", test_frame_getvar, METH_VARARGS, NULL},
{"frame_getvarstring", test_frame_getvarstring, METH_VARARGS, NULL},
- {"eval_get_func_name", eval_get_func_name, METH_O, NULL},
- {"eval_get_func_desc", eval_get_func_desc, METH_O, NULL},
{"gen_get_code", gen_get_code, METH_O, NULL},
{"get_feature_macros", get_feature_macros, METH_NOARGS, NULL},
{"test_code_api", test_code_api, METH_NOARGS, NULL},
@@ -3478,11 +3559,11 @@ static PyMethodDef TestMethods[] = {
{"function_set_kw_defaults", function_set_kw_defaults, METH_VARARGS, NULL},
{"function_get_closure", function_get_closure, METH_O, NULL},
{"function_set_closure", function_set_closure, METH_VARARGS, NULL},
- {"check_pyimport_addmodule", check_pyimport_addmodule, METH_VARARGS},
{"test_weakref_capi", test_weakref_capi, METH_NOARGS},
{"function_set_warning", function_set_warning, METH_NOARGS},
{"test_critical_sections", test_critical_sections, METH_NOARGS},
- {"pyeval_getlocals", pyeval_getlocals, METH_NOARGS},
+ {"test_atexit", test_atexit, METH_NOARGS},
+ {"tracemalloc_track_race", tracemalloc_track_race, METH_NOARGS},
{NULL, NULL} /* sentinel */
};
diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c
index 6185fa313daa09..dd0fe61d42d25e 100644
--- a/Modules/_testinternalcapi.c
+++ b/Modules/_testinternalcapi.c
@@ -1234,39 +1234,6 @@ unicode_transformdecimalandspacetoascii(PyObject *self, PyObject *arg)
return _PyUnicode_TransformDecimalAndSpaceToASCII(arg);
}
-
-struct atexit_data {
- int called;
-};
-
-static void
-callback(void *data)
-{
- ((struct atexit_data *)data)->called += 1;
-}
-
-static PyObject *
-test_atexit(PyObject *self, PyObject *Py_UNUSED(args))
-{
- PyThreadState *oldts = PyThreadState_Swap(NULL);
- PyThreadState *tstate = Py_NewInterpreter();
-
- struct atexit_data data = {0};
- int res = PyUnstable_AtExit(tstate->interp, callback, (void *)&data);
- Py_EndInterpreter(tstate);
- PyThreadState_Swap(oldts);
- if (res < 0) {
- return NULL;
- }
-
- if (data.called == 0) {
- PyErr_SetString(PyExc_RuntimeError, "atexit callback not called");
- return NULL;
- }
- Py_RETURN_NONE;
-}
-
-
static PyObject *
test_pyobject_is_freed(const char *test_name, PyObject *op)
{
@@ -2065,7 +2032,6 @@ static PyMethodDef module_functions[] = {
{"_PyTraceMalloc_GetTraceback", tracemalloc_get_traceback, METH_VARARGS},
{"test_tstate_capi", test_tstate_capi, METH_NOARGS, NULL},
{"_PyUnicode_TransformDecimalAndSpaceToASCII", unicode_transformdecimalandspacetoascii, METH_O},
- {"test_atexit", test_atexit, METH_NOARGS},
{"check_pyobject_forbidden_bytes_is_freed",
check_pyobject_forbidden_bytes_is_freed, METH_NOARGS},
{"check_pyobject_freed_is_freed", check_pyobject_freed_is_freed, METH_NOARGS},
diff --git a/Modules/_testlimitedcapi.c b/Modules/_testlimitedcapi.c
index ec19da217d8223..b183df7751d8db 100644
--- a/Modules/_testlimitedcapi.c
+++ b/Modules/_testlimitedcapi.c
@@ -44,12 +44,18 @@ PyInit__testlimitedcapi(void)
if (_PyTestLimitedCAPI_Init_Dict(mod) < 0) {
return NULL;
}
+ if (_PyTestLimitedCAPI_Init_Eval(mod) < 0) {
+ return NULL;
+ }
if (_PyTestLimitedCAPI_Init_Float(mod) < 0) {
return NULL;
}
if (_PyTestLimitedCAPI_Init_HeaptypeRelative(mod) < 0) {
return NULL;
}
+ if (_PyTestLimitedCAPI_Init_Import(mod) < 0) {
+ return NULL;
+ }
if (_PyTestLimitedCAPI_Init_List(mod) < 0) {
return NULL;
}
@@ -77,5 +83,8 @@ PyInit__testlimitedcapi(void)
if (_PyTestLimitedCAPI_Init_VectorcallLimited(mod) < 0) {
return NULL;
}
+ if (_PyTestLimitedCAPI_Init_File(mod) < 0) {
+ return NULL;
+ }
return mod;
}
diff --git a/Modules/_testlimitedcapi/clinic/file.c.h b/Modules/_testlimitedcapi/clinic/file.c.h
new file mode 100644
index 00000000000000..663619eead2a3a
--- /dev/null
+++ b/Modules/_testlimitedcapi/clinic/file.c.h
@@ -0,0 +1,81 @@
+/*[clinic input]
+preserve
+[clinic start generated code]*/
+
+PyDoc_STRVAR(_testcapi_pyfile_getline__doc__,
+"pyfile_getline($module, file, n, /)\n"
+"--\n"
+"\n");
+
+#define _TESTCAPI_PYFILE_GETLINE_METHODDEF \
+ {"pyfile_getline", (PyCFunction)(void(*)(void))_testcapi_pyfile_getline, METH_FASTCALL, _testcapi_pyfile_getline__doc__},
+
+static PyObject *
+_testcapi_pyfile_getline_impl(PyObject *module, PyObject *file, int n);
+
+static PyObject *
+_testcapi_pyfile_getline(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *file;
+ int n;
+
+ if (nargs != 2) {
+ PyErr_Format(PyExc_TypeError, "pyfile_getline expected 2 arguments, got %zd", nargs);
+ goto exit;
+ }
+ file = args[0];
+ n = PyLong_AsInt(args[1]);
+ if (n == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = _testcapi_pyfile_getline_impl(module, file, n);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(_testcapi_pyfile_writeobject__doc__,
+"pyfile_writeobject($module, obj, file, flags, /)\n"
+"--\n"
+"\n");
+
+#define _TESTCAPI_PYFILE_WRITEOBJECT_METHODDEF \
+ {"pyfile_writeobject", (PyCFunction)(void(*)(void))_testcapi_pyfile_writeobject, METH_FASTCALL, _testcapi_pyfile_writeobject__doc__},
+
+static PyObject *
+_testcapi_pyfile_writeobject_impl(PyObject *module, PyObject *obj,
+ PyObject *file, int flags);
+
+static PyObject *
+_testcapi_pyfile_writeobject(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *obj;
+ PyObject *file;
+ int flags;
+
+ if (nargs != 3) {
+ PyErr_Format(PyExc_TypeError, "pyfile_writeobject expected 3 arguments, got %zd", nargs);
+ goto exit;
+ }
+ obj = args[0];
+ file = args[1];
+ flags = PyLong_AsInt(args[2]);
+ if (flags == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = _testcapi_pyfile_writeobject_impl(module, obj, file, flags);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(_testcapi_pyobject_asfiledescriptor__doc__,
+"pyobject_asfiledescriptor($module, obj, /)\n"
+"--\n"
+"\n");
+
+#define _TESTCAPI_PYOBJECT_ASFILEDESCRIPTOR_METHODDEF \
+ {"pyobject_asfiledescriptor", (PyCFunction)_testcapi_pyobject_asfiledescriptor, METH_O, _testcapi_pyobject_asfiledescriptor__doc__},
+/*[clinic end generated code: output=ea572aaaa01aec7b input=a9049054013a1b77]*/
diff --git a/Modules/_testlimitedcapi/eval.c b/Modules/_testlimitedcapi/eval.c
new file mode 100644
index 00000000000000..28f5746dfb1783
--- /dev/null
+++ b/Modules/_testlimitedcapi/eval.c
@@ -0,0 +1,95 @@
+#include "parts.h"
+#include "util.h"
+
+static PyObject *
+eval_get_func_name(PyObject *self, PyObject *func)
+{
+ return PyUnicode_FromString(PyEval_GetFuncName(func));
+}
+
+static PyObject *
+eval_get_func_desc(PyObject *self, PyObject *func)
+{
+ return PyUnicode_FromString(PyEval_GetFuncDesc(func));
+}
+
+static PyObject *
+eval_getlocals(PyObject *module, PyObject *Py_UNUSED(args))
+{
+ return Py_XNewRef(PyEval_GetLocals());
+}
+
+static PyObject *
+eval_getglobals(PyObject *module, PyObject *Py_UNUSED(args))
+{
+ return Py_XNewRef(PyEval_GetGlobals());
+}
+
+static PyObject *
+eval_getbuiltins(PyObject *module, PyObject *Py_UNUSED(args))
+{
+ return Py_XNewRef(PyEval_GetBuiltins());
+}
+
+static PyObject *
+eval_getframe(PyObject *module, PyObject *Py_UNUSED(args))
+{
+ return Py_XNewRef(PyEval_GetFrame());
+}
+
+static PyObject *
+eval_getframe_builtins(PyObject *module, PyObject *Py_UNUSED(args))
+{
+ return PyEval_GetFrameBuiltins();
+}
+
+static PyObject *
+eval_getframe_globals(PyObject *module, PyObject *Py_UNUSED(args))
+{
+ return PyEval_GetFrameGlobals();
+}
+
+static PyObject *
+eval_getframe_locals(PyObject *module, PyObject *Py_UNUSED(args))
+{
+ return PyEval_GetFrameLocals();
+}
+
+static PyObject *
+eval_get_recursion_limit(PyObject *module, PyObject *Py_UNUSED(args))
+{
+ int limit = Py_GetRecursionLimit();
+ return PyLong_FromLong(limit);
+}
+
+static PyObject *
+eval_set_recursion_limit(PyObject *module, PyObject *args)
+{
+ int limit;
+ if (!PyArg_ParseTuple(args, "i", &limit)) {
+ return NULL;
+ }
+ Py_SetRecursionLimit(limit);
+ Py_RETURN_NONE;
+}
+
+static PyMethodDef test_methods[] = {
+ {"eval_get_func_name", eval_get_func_name, METH_O, NULL},
+ {"eval_get_func_desc", eval_get_func_desc, METH_O, NULL},
+ {"eval_getlocals", eval_getlocals, METH_NOARGS},
+ {"eval_getglobals", eval_getglobals, METH_NOARGS},
+ {"eval_getbuiltins", eval_getbuiltins, METH_NOARGS},
+ {"eval_getframe", eval_getframe, METH_NOARGS},
+ {"eval_getframe_builtins", eval_getframe_builtins, METH_NOARGS},
+ {"eval_getframe_globals", eval_getframe_globals, METH_NOARGS},
+ {"eval_getframe_locals", eval_getframe_locals, METH_NOARGS},
+ {"eval_get_recursion_limit", eval_get_recursion_limit, METH_NOARGS},
+ {"eval_set_recursion_limit", eval_set_recursion_limit, METH_VARARGS},
+ {NULL},
+};
+
+int
+_PyTestLimitedCAPI_Init_Eval(PyObject *m)
+{
+ return PyModule_AddFunctions(m, test_methods);
+}
diff --git a/Modules/_testlimitedcapi/file.c b/Modules/_testlimitedcapi/file.c
new file mode 100644
index 00000000000000..e082e3c6700ee7
--- /dev/null
+++ b/Modules/_testlimitedcapi/file.c
@@ -0,0 +1,128 @@
+#include "pyconfig.h" // Py_GIL_DISABLED
+#ifndef Py_GIL_DISABLED
+ // Need limited C API 3.13 for PyLong_AsInt()
+# define Py_LIMITED_API 0x030d0000
+#endif
+
+#include "parts.h"
+#include "util.h"
+#include "clinic/file.c.h"
+
+
+/*[clinic input]
+module _testcapi
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=6361033e795369fc]*/
+
+
+static PyObject *
+pyfile_fromfd(PyObject *module, PyObject *args)
+{
+ int fd;
+ const char *name;
+ Py_ssize_t size;
+ const char *mode;
+ int buffering;
+ const char *encoding;
+ const char *errors;
+ const char *newline;
+ int closefd;
+ if (!PyArg_ParseTuple(args,
+ "iz#z#"
+ "iz#z#"
+ "z#i",
+ &fd, &name, &size, &mode, &size,
+ &buffering, &encoding, &size, &errors, &size,
+ &newline, &size, &closefd)) {
+ return NULL;
+ }
+
+ return PyFile_FromFd(fd, name, mode, buffering,
+ encoding, errors, newline, closefd);
+}
+
+
+/*[clinic input]
+_testcapi.pyfile_getline
+
+ file: object
+ n: int
+ /
+
+[clinic start generated code]*/
+
+static PyObject *
+_testcapi_pyfile_getline_impl(PyObject *module, PyObject *file, int n)
+/*[clinic end generated code: output=137fde2774563266 input=df26686148b3657e]*/
+{
+ return PyFile_GetLine(file, n);
+}
+
+
+/*[clinic input]
+_testcapi.pyfile_writeobject
+
+ obj: object
+ file: object
+ flags: int
+ /
+
+[clinic start generated code]*/
+
+static PyObject *
+_testcapi_pyfile_writeobject_impl(PyObject *module, PyObject *obj,
+ PyObject *file, int flags)
+/*[clinic end generated code: output=ebb4d802e3db489c input=64a34a3e75b9935a]*/
+{
+ NULLABLE(obj);
+ NULLABLE(file);
+ RETURN_INT(PyFile_WriteObject(obj, file, flags));
+}
+
+
+static PyObject *
+pyfile_writestring(PyObject *module, PyObject *args)
+{
+ const char *str;
+ Py_ssize_t size;
+ PyObject *file;
+ if (!PyArg_ParseTuple(args, "z#O", &str, &size, &file)) {
+ return NULL;
+ }
+ NULLABLE(file);
+
+ RETURN_INT(PyFile_WriteString(str, file));
+}
+
+
+/*[clinic input]
+_testcapi.pyobject_asfiledescriptor
+
+ obj: object
+ /
+
+[clinic start generated code]*/
+
+static PyObject *
+_testcapi_pyobject_asfiledescriptor(PyObject *module, PyObject *obj)
+/*[clinic end generated code: output=2d640c6a1970c721 input=45fa1171d62b18d7]*/
+{
+ NULLABLE(obj);
+ RETURN_INT(PyObject_AsFileDescriptor(obj));
+}
+
+
+static PyMethodDef test_methods[] = {
+ {"pyfile_fromfd", pyfile_fromfd, METH_VARARGS},
+ _TESTCAPI_PYFILE_GETLINE_METHODDEF
+ _TESTCAPI_PYFILE_WRITEOBJECT_METHODDEF
+ {"pyfile_writestring", pyfile_writestring, METH_VARARGS},
+ _TESTCAPI_PYOBJECT_ASFILEDESCRIPTOR_METHODDEF
+ {NULL},
+};
+
+int
+_PyTestLimitedCAPI_Init_File(PyObject *m)
+{
+ return PyModule_AddFunctions(m, test_methods);
+}
diff --git a/Modules/_testlimitedcapi/import.c b/Modules/_testlimitedcapi/import.c
new file mode 100644
index 00000000000000..3707dbedeea0d9
--- /dev/null
+++ b/Modules/_testlimitedcapi/import.c
@@ -0,0 +1,306 @@
+// Need limited C API version 3.13 for PyImport_AddModuleRef()
+#include "pyconfig.h" // Py_GIL_DISABLED
+#if !defined(Py_GIL_DISABLED) && !defined(Py_LIMITED_API)
+# define Py_LIMITED_API 0x030d0000
+#endif
+
+#include "parts.h"
+#include "util.h"
+
+
+/* Test PyImport_GetMagicNumber() */
+static PyObject *
+pyimport_getmagicnumber(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args))
+{
+ long magic = PyImport_GetMagicNumber();
+ return PyLong_FromLong(magic);
+}
+
+
+/* Test PyImport_GetMagicTag() */
+static PyObject *
+pyimport_getmagictag(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args))
+{
+ const char *tag = PyImport_GetMagicTag();
+ return PyUnicode_FromString(tag);
+}
+
+
+/* Test PyImport_GetModuleDict() */
+static PyObject *
+pyimport_getmoduledict(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args))
+{
+ return Py_XNewRef(PyImport_GetModuleDict());
+}
+
+
+/* Test PyImport_GetModule() */
+static PyObject *
+pyimport_getmodule(PyObject *Py_UNUSED(module), PyObject *name)
+{
+ assert(!PyErr_Occurred());
+ NULLABLE(name);
+ PyObject *module = PyImport_GetModule(name);
+ if (module == NULL && !PyErr_Occurred()) {
+ return Py_NewRef(PyExc_KeyError);
+ }
+ return module;
+}
+
+
+/* Test PyImport_AddModuleObject() */
+static PyObject *
+pyimport_addmoduleobject(PyObject *Py_UNUSED(module), PyObject *name)
+{
+ NULLABLE(name);
+ return Py_XNewRef(PyImport_AddModuleObject(name));
+}
+
+
+/* Test PyImport_AddModule() */
+static PyObject *
+pyimport_addmodule(PyObject *Py_UNUSED(module), PyObject *args)
+{
+ const char *name;
+ Py_ssize_t size;
+ if (!PyArg_ParseTuple(args, "z#", &name, &size)) {
+ return NULL;
+ }
+
+ return Py_XNewRef(PyImport_AddModule(name));
+}
+
+
+/* Test PyImport_AddModuleRef() */
+static PyObject *
+pyimport_addmoduleref(PyObject *Py_UNUSED(module), PyObject *args)
+{
+ const char *name;
+ Py_ssize_t size;
+ if (!PyArg_ParseTuple(args, "z#", &name, &size)) {
+ return NULL;
+ }
+
+ return PyImport_AddModuleRef(name);
+}
+
+
+/* Test PyImport_Import() */
+static PyObject *
+pyimport_import(PyObject *Py_UNUSED(module), PyObject *name)
+{
+ NULLABLE(name);
+ return PyImport_Import(name);
+}
+
+
+/* Test PyImport_ImportModule() */
+static PyObject *
+pyimport_importmodule(PyObject *Py_UNUSED(module), PyObject *args)
+{
+ const char *name;
+ Py_ssize_t size;
+ if (!PyArg_ParseTuple(args, "z#", &name, &size)) {
+ return NULL;
+ }
+
+ return PyImport_ImportModule(name);
+}
+
+
+/* Test PyImport_ImportModuleNoBlock() */
+static PyObject *
+pyimport_importmodulenoblock(PyObject *Py_UNUSED(module), PyObject *args)
+{
+ const char *name;
+ Py_ssize_t size;
+ if (!PyArg_ParseTuple(args, "z#", &name, &size)) {
+ return NULL;
+ }
+
+ _Py_COMP_DIAG_PUSH
+ _Py_COMP_DIAG_IGNORE_DEPR_DECLS
+ return PyImport_ImportModuleNoBlock(name);
+ _Py_COMP_DIAG_POP
+}
+
+
+/* Test PyImport_ImportModuleEx() */
+static PyObject *
+pyimport_importmoduleex(PyObject *Py_UNUSED(module), PyObject *args)
+{
+ const char *name;
+ Py_ssize_t size;
+ PyObject *globals, *locals, *fromlist;
+ if (!PyArg_ParseTuple(args, "z#OOO",
+ &name, &size, &globals, &locals, &fromlist)) {
+ return NULL;
+ }
+ NULLABLE(globals);
+ NULLABLE(locals);
+ NULLABLE(fromlist);
+
+ return PyImport_ImportModuleEx(name, globals, locals, fromlist);
+}
+
+
+/* Test PyImport_ImportModuleLevel() */
+static PyObject *
+pyimport_importmodulelevel(PyObject *Py_UNUSED(module), PyObject *args)
+{
+ const char *name;
+ Py_ssize_t size;
+ PyObject *globals, *locals, *fromlist;
+ int level;
+ if (!PyArg_ParseTuple(args, "z#OOOi",
+ &name, &size, &globals, &locals, &fromlist, &level)) {
+ return NULL;
+ }
+ NULLABLE(globals);
+ NULLABLE(locals);
+ NULLABLE(fromlist);
+
+ return PyImport_ImportModuleLevel(name, globals, locals, fromlist, level);
+}
+
+
+/* Test PyImport_ImportModuleLevelObject() */
+static PyObject *
+pyimport_importmodulelevelobject(PyObject *Py_UNUSED(module), PyObject *args)
+{
+ PyObject *name, *globals, *locals, *fromlist;
+ int level;
+ if (!PyArg_ParseTuple(args, "OOOOi",
+ &name, &globals, &locals, &fromlist, &level)) {
+ return NULL;
+ }
+ NULLABLE(name);
+ NULLABLE(globals);
+ NULLABLE(locals);
+ NULLABLE(fromlist);
+
+ return PyImport_ImportModuleLevelObject(name, globals, locals, fromlist, level);
+}
+
+
+/* Test PyImport_ImportFrozenModule() */
+static PyObject *
+pyimport_importfrozenmodule(PyObject *Py_UNUSED(module), PyObject *args)
+{
+ const char *name;
+ Py_ssize_t size;
+ if (!PyArg_ParseTuple(args, "z#", &name, &size)) {
+ return NULL;
+ }
+
+ RETURN_INT(PyImport_ImportFrozenModule(name));
+}
+
+
+/* Test PyImport_ImportFrozenModuleObject() */
+static PyObject *
+pyimport_importfrozenmoduleobject(PyObject *Py_UNUSED(module), PyObject *name)
+{
+ NULLABLE(name);
+ RETURN_INT(PyImport_ImportFrozenModuleObject(name));
+}
+
+
+/* Test PyImport_ExecCodeModule() */
+static PyObject *
+pyimport_executecodemodule(PyObject *Py_UNUSED(module), PyObject *args)
+{
+ const char *name;
+ Py_ssize_t size;
+ PyObject *code;
+ if (!PyArg_ParseTuple(args, "z#O", &name, &size, &code)) {
+ return NULL;
+ }
+ NULLABLE(code);
+
+ return PyImport_ExecCodeModule(name, code);
+}
+
+
+/* Test PyImport_ExecCodeModuleEx() */
+static PyObject *
+pyimport_executecodemoduleex(PyObject *Py_UNUSED(module), PyObject *args)
+{
+ const char *name;
+ Py_ssize_t size;
+ PyObject *code;
+ const char *pathname;
+ if (!PyArg_ParseTuple(args, "z#Oz#", &name, &size, &code, &pathname, &size)) {
+ return NULL;
+ }
+ NULLABLE(code);
+
+ return PyImport_ExecCodeModuleEx(name, code, pathname);
+}
+
+
+/* Test PyImport_ExecCodeModuleWithPathnames() */
+static PyObject *
+pyimport_executecodemodulewithpathnames(PyObject *Py_UNUSED(module), PyObject *args)
+{
+ const char *name;
+ Py_ssize_t size;
+ PyObject *code;
+ const char *pathname;
+ const char *cpathname;
+ if (!PyArg_ParseTuple(args, "z#Oz#z#", &name, &size, &code, &pathname, &size, &cpathname, &size)) {
+ return NULL;
+ }
+ NULLABLE(code);
+
+ return PyImport_ExecCodeModuleWithPathnames(name, code,
+ pathname, cpathname);
+}
+
+
+/* Test PyImport_ExecCodeModuleObject() */
+static PyObject *
+pyimport_executecodemoduleobject(PyObject *Py_UNUSED(module), PyObject *args)
+{
+ PyObject *name, *code, *pathname, *cpathname;
+ if (!PyArg_ParseTuple(args, "OOOO", &name, &code, &pathname, &cpathname)) {
+ return NULL;
+ }
+ NULLABLE(name);
+ NULLABLE(code);
+ NULLABLE(pathname);
+ NULLABLE(cpathname);
+
+ return PyImport_ExecCodeModuleObject(name, code, pathname, cpathname);
+}
+
+
+static PyMethodDef test_methods[] = {
+ {"PyImport_GetMagicNumber", pyimport_getmagicnumber, METH_NOARGS},
+ {"PyImport_GetMagicTag", pyimport_getmagictag, METH_NOARGS},
+ {"PyImport_GetModuleDict", pyimport_getmoduledict, METH_NOARGS},
+ {"PyImport_GetModule", pyimport_getmodule, METH_O},
+ {"PyImport_AddModuleObject", pyimport_addmoduleobject, METH_O},
+ {"PyImport_AddModule", pyimport_addmodule, METH_VARARGS},
+ {"PyImport_AddModuleRef", pyimport_addmoduleref, METH_VARARGS},
+ {"PyImport_Import", pyimport_import, METH_O},
+ {"PyImport_ImportModule", pyimport_importmodule, METH_VARARGS},
+ {"PyImport_ImportModuleNoBlock", pyimport_importmodulenoblock, METH_VARARGS},
+ {"PyImport_ImportModuleEx", pyimport_importmoduleex, METH_VARARGS},
+ {"PyImport_ImportModuleLevel", pyimport_importmodulelevel, METH_VARARGS},
+ {"PyImport_ImportModuleLevelObject", pyimport_importmodulelevelobject, METH_VARARGS},
+ {"PyImport_ImportFrozenModule", pyimport_importfrozenmodule, METH_VARARGS},
+ {"PyImport_ImportFrozenModuleObject", pyimport_importfrozenmoduleobject, METH_O},
+ {"PyImport_ExecCodeModule", pyimport_executecodemodule, METH_VARARGS},
+ {"PyImport_ExecCodeModuleEx", pyimport_executecodemoduleex, METH_VARARGS},
+ {"PyImport_ExecCodeModuleWithPathnames", pyimport_executecodemodulewithpathnames, METH_VARARGS},
+ {"PyImport_ExecCodeModuleObject", pyimport_executecodemoduleobject, METH_VARARGS},
+ {NULL},
+};
+
+
+int
+_PyTestLimitedCAPI_Init_Import(PyObject *module)
+{
+ return PyModule_AddFunctions(module, test_methods);
+}
diff --git a/Modules/_testlimitedcapi/parts.h b/Modules/_testlimitedcapi/parts.h
index 140396d6b990ff..11b2e5c6b833bb 100644
--- a/Modules/_testlimitedcapi/parts.h
+++ b/Modules/_testlimitedcapi/parts.h
@@ -27,8 +27,10 @@ int _PyTestLimitedCAPI_Init_ByteArray(PyObject *module);
int _PyTestLimitedCAPI_Init_Bytes(PyObject *module);
int _PyTestLimitedCAPI_Init_Complex(PyObject *module);
int _PyTestLimitedCAPI_Init_Dict(PyObject *module);
+int _PyTestLimitedCAPI_Init_Eval(PyObject *module);
int _PyTestLimitedCAPI_Init_Float(PyObject *module);
int _PyTestLimitedCAPI_Init_HeaptypeRelative(PyObject *module);
+int _PyTestLimitedCAPI_Init_Import(PyObject *module);
int _PyTestLimitedCAPI_Init_Object(PyObject *module);
int _PyTestLimitedCAPI_Init_List(PyObject *module);
int _PyTestLimitedCAPI_Init_Long(PyObject *module);
@@ -38,5 +40,6 @@ int _PyTestLimitedCAPI_Init_Sys(PyObject *module);
int _PyTestLimitedCAPI_Init_Tuple(PyObject *module);
int _PyTestLimitedCAPI_Init_Unicode(PyObject *module);
int _PyTestLimitedCAPI_Init_VectorcallLimited(PyObject *module);
+int _PyTestLimitedCAPI_Init_File(PyObject *module);
#endif // Py_TESTLIMITEDCAPI_PARTS_H
diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c
index 887a1e820e250e..0b85187e5fce07 100644
--- a/Modules/_tracemalloc.c
+++ b/Modules/_tracemalloc.c
@@ -223,10 +223,5 @@ PyInit__tracemalloc(void)
PyUnstable_Module_SetGIL(m, Py_MOD_GIL_NOT_USED);
#endif
- if (_PyTraceMalloc_Init() < 0) {
- Py_DECREF(m);
- return NULL;
- }
-
return m;
}
diff --git a/Modules/_winapi.c b/Modules/_winapi.c
index bd80c5c94fe36d..d51586e60257d2 100644
--- a/Modules/_winapi.c
+++ b/Modules/_winapi.c
@@ -1048,7 +1048,7 @@ getenvironment(PyObject* environment)
}
normalized_environment = normalize_environment(environment);
- if (normalize_environment == NULL) {
+ if (normalized_environment == NULL) {
return NULL;
}
diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c
index 679222c3f03d23..600302e1183f99 100644
--- a/Modules/arraymodule.c
+++ b/Modules/arraymodule.c
@@ -3074,11 +3074,16 @@ array_arrayiterator___setstate__(arrayiterobject *self, PyObject *state)
Py_ssize_t index = PyLong_AsSsize_t(state);
if (index == -1 && PyErr_Occurred())
return NULL;
- if (index < 0)
- index = 0;
- else if (index > Py_SIZE(self->ao))
- index = Py_SIZE(self->ao); /* iterator exhausted */
- self->index = index;
+ arrayobject *ao = self->ao;
+ if (ao != NULL) {
+ if (index < 0) {
+ index = 0;
+ }
+ else if (index > Py_SIZE(ao)) {
+ index = Py_SIZE(ao); /* iterator exhausted */
+ }
+ self->index = index;
+ }
Py_RETURN_NONE;
}
diff --git a/Modules/atexitmodule.c b/Modules/atexitmodule.c
index 297a8d74ba3bf4..c009235b7a36c2 100644
--- a/Modules/atexitmodule.c
+++ b/Modules/atexitmodule.c
@@ -27,7 +27,10 @@ int
PyUnstable_AtExit(PyInterpreterState *interp,
atexit_datacallbackfunc func, void *data)
{
- assert(interp == _PyInterpreterState_GET());
+ PyThreadState *tstate = _PyThreadState_GET();
+ _Py_EnsureTstateNotNULL(tstate);
+ assert(tstate->interp == interp);
+
atexit_callback *callback = PyMem_Malloc(sizeof(atexit_callback));
if (callback == NULL) {
PyErr_NoMemory();
@@ -38,12 +41,13 @@ PyUnstable_AtExit(PyInterpreterState *interp,
callback->next = NULL;
struct atexit_state *state = &interp->atexit;
- if (state->ll_callbacks == NULL) {
+ atexit_callback *top = state->ll_callbacks;
+ if (top == NULL) {
state->ll_callbacks = callback;
- state->last_ll_callback = callback;
}
else {
- state->last_ll_callback->next = callback;
+ callback->next = top;
+ state->ll_callbacks = callback;
}
return 0;
}
diff --git a/Modules/clinic/_ssl.c.h b/Modules/clinic/_ssl.c.h
index deac4e192c3bad..7c1ea6ad1798eb 100644
--- a/Modules/clinic/_ssl.c.h
+++ b/Modules/clinic/_ssl.c.h
@@ -264,6 +264,9 @@ PyDoc_STRVAR(_ssl__SSLSocket_context__doc__,
"This is typically used from within a callback function set by the sni_callback\n"
"on the SSLContext to change the certificate information associated with the\n"
"SSLSocket before the cryptographic exchange handshake messages.");
+#if defined(_ssl__SSLSocket_context_DOCSTR)
+# undef _ssl__SSLSocket_context_DOCSTR
+#endif
#define _ssl__SSLSocket_context_DOCSTR _ssl__SSLSocket_context__doc__
#if !defined(_ssl__SSLSocket_context_DOCSTR)
@@ -318,6 +321,9 @@ _ssl__SSLSocket_context_set(PySSLSocket *self, PyObject *value, void *Py_UNUSED(
PyDoc_STRVAR(_ssl__SSLSocket_server_side__doc__,
"Whether this is a server-side socket.");
+#if defined(_ssl__SSLSocket_server_side_DOCSTR)
+# undef _ssl__SSLSocket_server_side_DOCSTR
+#endif
#define _ssl__SSLSocket_server_side_DOCSTR _ssl__SSLSocket_server_side__doc__
#if !defined(_ssl__SSLSocket_server_side_DOCSTR)
@@ -347,6 +353,9 @@ _ssl__SSLSocket_server_side_get(PySSLSocket *self, void *Py_UNUSED(context))
PyDoc_STRVAR(_ssl__SSLSocket_server_hostname__doc__,
"The currently set server hostname (for SNI).");
+#if defined(_ssl__SSLSocket_server_hostname_DOCSTR)
+# undef _ssl__SSLSocket_server_hostname_DOCSTR
+#endif
#define _ssl__SSLSocket_server_hostname_DOCSTR _ssl__SSLSocket_server_hostname__doc__
#if !defined(_ssl__SSLSocket_server_hostname_DOCSTR)
@@ -378,6 +387,9 @@ PyDoc_STRVAR(_ssl__SSLSocket_owner__doc__,
"The Python-level owner of this object.\n"
"\n"
"Passed as \"self\" in servername callback.");
+#if defined(_ssl__SSLSocket_owner_DOCSTR)
+# undef _ssl__SSLSocket_owner_DOCSTR
+#endif
#define _ssl__SSLSocket_owner_DOCSTR _ssl__SSLSocket_owner__doc__
#if !defined(_ssl__SSLSocket_owner_DOCSTR)
@@ -667,6 +679,9 @@ _ssl__SSLSocket_verify_client_post_handshake(PySSLSocket *self, PyObject *Py_UNU
PyDoc_STRVAR(_ssl__SSLSocket_session__doc__,
"The underlying SSLSession object.");
+#if defined(_ssl__SSLSocket_session_DOCSTR)
+# undef _ssl__SSLSocket_session_DOCSTR
+#endif
#define _ssl__SSLSocket_session_DOCSTR _ssl__SSLSocket_session__doc__
#if !defined(_ssl__SSLSocket_session_DOCSTR)
@@ -721,6 +736,9 @@ _ssl__SSLSocket_session_set(PySSLSocket *self, PyObject *value, void *Py_UNUSED(
PyDoc_STRVAR(_ssl__SSLSocket_session_reused__doc__,
"Was the client session reused during handshake?");
+#if defined(_ssl__SSLSocket_session_reused_DOCSTR)
+# undef _ssl__SSLSocket_session_reused_DOCSTR
+#endif
#define _ssl__SSLSocket_session_reused_DOCSTR _ssl__SSLSocket_session_reused__doc__
#if !defined(_ssl__SSLSocket_session_reused_DOCSTR)
@@ -1076,6 +1094,9 @@ _ssl__SSLContext_maximum_version_set(PySSLContext *self, PyObject *value, void *
PyDoc_STRVAR(_ssl__SSLContext_num_tickets__doc__,
"Control the number of TLSv1.3 session tickets.");
+#if defined(_ssl__SSLContext_num_tickets_DOCSTR)
+# undef _ssl__SSLContext_num_tickets_DOCSTR
+#endif
#define _ssl__SSLContext_num_tickets_DOCSTR _ssl__SSLContext_num_tickets__doc__
#if !defined(_ssl__SSLContext_num_tickets_DOCSTR)
@@ -1130,6 +1151,9 @@ _ssl__SSLContext_num_tickets_set(PySSLContext *self, PyObject *value, void *Py_U
PyDoc_STRVAR(_ssl__SSLContext_security_level__doc__,
"The current security level.");
+#if defined(_ssl__SSLContext_security_level_DOCSTR)
+# undef _ssl__SSLContext_security_level_DOCSTR
+#endif
#define _ssl__SSLContext_security_level_DOCSTR _ssl__SSLContext_security_level__doc__
#if !defined(_ssl__SSLContext_security_level_DOCSTR)
@@ -1773,6 +1797,9 @@ PyDoc_STRVAR(_ssl__SSLContext_sni_callback__doc__,
"with the SSLSocket, the server name as a string, and the SSLContext object.\n"
"\n"
"See RFC 6066 for details of the SNI extension.");
+#if defined(_ssl__SSLContext_sni_callback_DOCSTR)
+# undef _ssl__SSLContext_sni_callback_DOCSTR
+#endif
#define _ssl__SSLContext_sni_callback_DOCSTR _ssl__SSLContext_sni_callback__doc__
#if !defined(_ssl__SSLContext_sni_callback_DOCSTR)
@@ -2092,6 +2119,9 @@ _ssl_MemoryBIO(PyTypeObject *type, PyObject *args, PyObject *kwargs)
PyDoc_STRVAR(_ssl_MemoryBIO_pending__doc__,
"The number of bytes pending in the memory BIO.");
+#if defined(_ssl_MemoryBIO_pending_DOCSTR)
+# undef _ssl_MemoryBIO_pending_DOCSTR
+#endif
#define _ssl_MemoryBIO_pending_DOCSTR _ssl_MemoryBIO_pending__doc__
#if !defined(_ssl_MemoryBIO_pending_DOCSTR)
@@ -2121,6 +2151,9 @@ _ssl_MemoryBIO_pending_get(PySSLMemoryBIO *self, void *Py_UNUSED(context))
PyDoc_STRVAR(_ssl_MemoryBIO_eof__doc__,
"Whether the memory BIO is at EOF.");
+#if defined(_ssl_MemoryBIO_eof_DOCSTR)
+# undef _ssl_MemoryBIO_eof_DOCSTR
+#endif
#define _ssl_MemoryBIO_eof_DOCSTR _ssl_MemoryBIO_eof__doc__
#if !defined(_ssl_MemoryBIO_eof_DOCSTR)
@@ -2254,6 +2287,9 @@ _ssl_MemoryBIO_write_eof(PySSLMemoryBIO *self, PyObject *Py_UNUSED(ignored))
PyDoc_STRVAR(_ssl_SSLSession_time__doc__,
"Session creation time (seconds since epoch).");
+#if defined(_ssl_SSLSession_time_DOCSTR)
+# undef _ssl_SSLSession_time_DOCSTR
+#endif
#define _ssl_SSLSession_time_DOCSTR _ssl_SSLSession_time__doc__
#if !defined(_ssl_SSLSession_time_DOCSTR)
@@ -2283,6 +2319,9 @@ _ssl_SSLSession_time_get(PySSLSession *self, void *Py_UNUSED(context))
PyDoc_STRVAR(_ssl_SSLSession_timeout__doc__,
"Session timeout (delta in seconds).");
+#if defined(_ssl_SSLSession_timeout_DOCSTR)
+# undef _ssl_SSLSession_timeout_DOCSTR
+#endif
#define _ssl_SSLSession_timeout_DOCSTR _ssl_SSLSession_timeout__doc__
#if !defined(_ssl_SSLSession_timeout_DOCSTR)
@@ -2312,6 +2351,9 @@ _ssl_SSLSession_timeout_get(PySSLSession *self, void *Py_UNUSED(context))
PyDoc_STRVAR(_ssl_SSLSession_ticket_lifetime_hint__doc__,
"Ticket life time hint.");
+#if defined(_ssl_SSLSession_ticket_lifetime_hint_DOCSTR)
+# undef _ssl_SSLSession_ticket_lifetime_hint_DOCSTR
+#endif
#define _ssl_SSLSession_ticket_lifetime_hint_DOCSTR _ssl_SSLSession_ticket_lifetime_hint__doc__
#if !defined(_ssl_SSLSession_ticket_lifetime_hint_DOCSTR)
@@ -2341,6 +2383,9 @@ _ssl_SSLSession_ticket_lifetime_hint_get(PySSLSession *self, void *Py_UNUSED(con
PyDoc_STRVAR(_ssl_SSLSession_id__doc__,
"Session ID.");
+#if defined(_ssl_SSLSession_id_DOCSTR)
+# undef _ssl_SSLSession_id_DOCSTR
+#endif
#define _ssl_SSLSession_id_DOCSTR _ssl_SSLSession_id__doc__
#if !defined(_ssl_SSLSession_id_DOCSTR)
@@ -2370,6 +2415,9 @@ _ssl_SSLSession_id_get(PySSLSession *self, void *Py_UNUSED(context))
PyDoc_STRVAR(_ssl_SSLSession_has_ticket__doc__,
"Does the session contain a ticket?");
+#if defined(_ssl_SSLSession_has_ticket_DOCSTR)
+# undef _ssl_SSLSession_has_ticket_DOCSTR
+#endif
#define _ssl_SSLSession_has_ticket_DOCSTR _ssl_SSLSession_has_ticket__doc__
#if !defined(_ssl_SSLSession_has_ticket_DOCSTR)
@@ -2819,4 +2867,4 @@ _ssl_enum_crls(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje
#ifndef _SSL_ENUM_CRLS_METHODDEF
#define _SSL_ENUM_CRLS_METHODDEF
#endif /* !defined(_SSL_ENUM_CRLS_METHODDEF) */
-/*[clinic end generated code: output=1ed5d1c707ad352e input=a9049054013a1b77]*/
+/*[clinic end generated code: output=8c4a1e44702afeb7 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h
index 6a3aacf13e74ad..b69817ae9bb482 100644
--- a/Modules/clinic/posixmodule.c.h
+++ b/Modules/clinic/posixmodule.c.h
@@ -306,7 +306,7 @@ os_access(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k
return return_value;
}
-#if defined(HAVE_TTYNAME)
+#if defined(HAVE_TTYNAME_R)
PyDoc_STRVAR(os_ttyname__doc__,
"ttyname($module, fd, /)\n"
@@ -339,7 +339,7 @@ os_ttyname(PyObject *module, PyObject *arg)
return return_value;
}
-#endif /* defined(HAVE_TTYNAME) */
+#endif /* defined(HAVE_TTYNAME_R) */
#if defined(HAVE_CTERMID)
@@ -12879,4 +12879,4 @@ os__is_inputhook_installed(PyObject *module, PyObject *Py_UNUSED(ignored))
#ifndef OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF
#define OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF
#endif /* !defined(OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF) */
-/*[clinic end generated code: output=a4410a686958d8e6 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=a75be356cd4abca5 input=a9049054013a1b77]*/
diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c
index b62362f277797e..2d16028a5232d0 100644
--- a/Modules/faulthandler.c
+++ b/Modules/faulthandler.c
@@ -237,7 +237,12 @@ faulthandler_dump_traceback_py(PyObject *self,
return NULL;
if (all_threads) {
+ PyInterpreterState *interp = _PyInterpreterState_GET();
+ /* gh-128400: Accessing other thread states while they're running
+ * isn't safe if those threads are running. */
+ _PyEval_StopTheWorld(interp);
errmsg = _Py_DumpTracebackThreads(fd, NULL, tstate);
+ _PyEval_StartTheWorld(interp);
if (errmsg != NULL) {
PyErr_SetString(PyExc_RuntimeError, errmsg);
return NULL;
diff --git a/Modules/main.c b/Modules/main.c
index 15ea49a1bad19e..3bf2241f2837a3 100644
--- a/Modules/main.c
+++ b/Modules/main.c
@@ -374,6 +374,7 @@ pymain_run_file_obj(PyObject *program_name, PyObject *filename,
if (fp == NULL) {
// Ignore the OSError
PyErr_Clear();
+ // TODO(picnixz): strerror() is locale dependent but not PySys_FormatStderr().
PySys_FormatStderr("%S: can't open file %R: [Errno %d] %s\n",
program_name, filename, errno, strerror(errno));
return 2;
diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c
index a09305c91ecbba..40d0f86e6aecdd 100644
--- a/Modules/posixmodule.c
+++ b/Modules/posixmodule.c
@@ -52,10 +52,6 @@
# include "winreparse.h"
#endif
-#if !defined(EX_OK) && defined(EXIT_SUCCESS)
-# define EX_OK EXIT_SUCCESS
-#endif
-
#ifdef __APPLE__
/* Needed for the implementation of os.statvfs */
# include
@@ -73,6 +69,9 @@
#ifdef HAVE_SYS_TIME_H
# include // futimes()
#endif
+#ifdef HAVE_SYS_PIDFD_H
+# include // PIDFD_NONBLOCK
+#endif
// SGI apparently needs this forward declaration
@@ -289,6 +288,10 @@ corresponding Unix manual entries for more information on calls.");
# include
#endif
+#if !defined(EX_OK) && defined(EXIT_SUCCESS)
+# define EX_OK EXIT_SUCCESS
+#endif
+
#ifdef HAVE_SYS_LOADAVG_H
# include
#endif
@@ -3295,7 +3298,7 @@ os_access_impl(PyObject *module, path_t *path, int mode, int dir_fd,
#endif
-#ifdef HAVE_TTYNAME
+#ifdef HAVE_TTYNAME_R
/*[clinic input]
os.ttyname
@@ -9530,42 +9533,33 @@ os_kill_impl(PyObject *module, pid_t pid, Py_ssize_t signal)
Py_RETURN_NONE;
#else /* !MS_WINDOWS */
- PyObject *result;
DWORD sig = (DWORD)signal;
- DWORD err;
- HANDLE handle;
#ifdef HAVE_WINDOWS_CONSOLE_IO
/* Console processes which share a common console can be sent CTRL+C or
CTRL+BREAK events, provided they handle said events. */
if (sig == CTRL_C_EVENT || sig == CTRL_BREAK_EVENT) {
if (GenerateConsoleCtrlEvent(sig, (DWORD)pid) == 0) {
- err = GetLastError();
- PyErr_SetFromWindowsErr(err);
- }
- else {
- Py_RETURN_NONE;
+ return PyErr_SetFromWindowsErr(0);
}
+ Py_RETURN_NONE;
}
#endif /* HAVE_WINDOWS_CONSOLE_IO */
/* If the signal is outside of what GenerateConsoleCtrlEvent can use,
attempt to open and terminate the process. */
- handle = OpenProcess(PROCESS_ALL_ACCESS, FALSE, (DWORD)pid);
+ HANDLE handle = OpenProcess(PROCESS_ALL_ACCESS, FALSE, (DWORD)pid);
if (handle == NULL) {
- err = GetLastError();
- return PyErr_SetFromWindowsErr(err);
+ return PyErr_SetFromWindowsErr(0);
}
- if (TerminateProcess(handle, sig) == 0) {
- err = GetLastError();
- result = PyErr_SetFromWindowsErr(err);
- } else {
- result = Py_NewRef(Py_None);
+ BOOL res = TerminateProcess(handle, sig);
+ CloseHandle(handle);
+ if (res == 0) {
+ return PyErr_SetFromWindowsErr(0);
}
- CloseHandle(handle);
- return result;
+ Py_RETURN_NONE;
#endif /* !MS_WINDOWS */
}
#endif /* HAVE_KILL */
diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c
index fafb5ce4106111..e131389805bf3d 100644
--- a/Modules/pyexpat.c
+++ b/Modules/pyexpat.c
@@ -1782,7 +1782,12 @@ add_error(PyObject *errors_module, PyObject *codes_dict,
* with the other uses of the XML_ErrorString function
* elsewhere within this file. pyexpat's copy of the messages
* only acts as a fallback in case of outdated runtime libexpat,
- * where it returns NULL. */
+ * where it returns NULL.
+ *
+ * In addition, XML_ErrorString is assumed to return UTF-8 encoded
+ * strings (in conv_string_to_unicode, we decode them using 'strict'
+ * error handling).
+ */
const char *error_string = XML_ErrorString(error_code);
if (error_string == NULL) {
error_string = error_info_of[error_index].description;
diff --git a/Modules/readline.c b/Modules/readline.c
index 35655c70a4618f..7d1f703f7dbdde 100644
--- a/Modules/readline.c
+++ b/Modules/readline.c
@@ -351,6 +351,12 @@ readline_append_history_file_impl(PyObject *module, int nelements,
PyObject *filename_obj)
/*[clinic end generated code: output=5df06fc9da56e4e4 input=784b774db3a4b7c5]*/
{
+ if (nelements < 0)
+ {
+ PyErr_SetString(PyExc_ValueError, "nelements must be positive");
+ return NULL;
+ }
+
PyObject *filename_bytes;
const char *filename;
int err;
diff --git a/Modules/syslogmodule.c b/Modules/syslogmodule.c
index 14e7ca591a076b..aa1bc9da91dfb9 100644
--- a/Modules/syslogmodule.c
+++ b/Modules/syslogmodule.c
@@ -176,7 +176,7 @@ syslog_openlog_impl(PyObject *module, PyObject *ident, long logopt,
}
}
if (PySys_Audit("syslog.openlog", "Oll", ident ? ident : Py_None, logopt, facility) < 0) {
- Py_DECREF(ident);
+ Py_XDECREF(ident);
return NULL;
}
@@ -258,7 +258,7 @@ syslog_closelog_impl(PyObject *module)
// Since the sys.closelog changes the process level state of syslog library,
// this operation is only allowed for the main interpreter.
if (!is_main_interpreter()) {
- PyErr_SetString(PyExc_RuntimeError, "sunbinterpreter can't use syslog.closelog()");
+ PyErr_SetString(PyExc_RuntimeError, "subinterpreter can't use syslog.closelog()");
return NULL;
}
diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c
index b8bcef27cf1f18..32cb9966ff1ae4 100644
--- a/Objects/bytesobject.c
+++ b/Objects/bytesobject.c
@@ -3171,6 +3171,7 @@ _PyBytes_Resize(PyObject **pv, Py_ssize_t newsize)
#ifdef Py_TRACE_REFS
_Py_ForgetReference(v);
#endif
+ _PyReftracerTrack(v, PyRefTracer_DESTROY);
*pv = (PyObject *)
PyObject_Realloc(v, PyBytesObject_SIZE + newsize);
if (*pv == NULL) {
diff --git a/Objects/codeobject.c b/Objects/codeobject.c
index 83b477e19c7a5b..6c4eef8e0116a3 100644
--- a/Objects/codeobject.c
+++ b/Objects/codeobject.c
@@ -1845,14 +1845,11 @@ free_monitoring_data(_PyCoMonitoringData *data)
static void
code_dealloc(PyCodeObject *co)
{
- assert(Py_REFCNT(co) == 0);
- Py_SET_REFCNT(co, 1);
+ _PyObject_ResurrectStart((PyObject *)co);
notify_code_watchers(PY_CODE_EVENT_DESTROY, co);
- if (Py_REFCNT(co) > 1) {
- Py_SET_REFCNT(co, Py_REFCNT(co) - 1);
+ if (_PyObject_ResurrectEnd((PyObject *)co)) {
return;
}
- Py_SET_REFCNT(co, 0);
#ifdef Py_GIL_DISABLED
PyObject_GC_UnTrack(co);
diff --git a/Objects/dictobject.c b/Objects/dictobject.c
index f1f9110ff73e6d..ff16a41b9a5d4e 100644
--- a/Objects/dictobject.c
+++ b/Objects/dictobject.c
@@ -624,11 +624,14 @@ estimate_log2_keysize(Py_ssize_t n)
/* This immutable, empty PyDictKeysObject is used for PyDict_Clear()
* (which cannot fail and thus can do no allocation).
+ *
+ * See https://github.com/python/cpython/pull/127568#discussion_r1868070614
+ * for the rationale of using dk_log2_index_bytes=3 instead of 0.
*/
static PyDictKeysObject empty_keys_struct = {
_Py_IMMORTAL_REFCNT, /* dk_refcnt */
0, /* dk_log2_size */
- 0, /* dk_log2_index_bytes */
+ 3, /* dk_log2_index_bytes */
DICT_KEYS_UNICODE, /* dk_kind */
#ifdef Py_GIL_DISABLED
{0}, /* dk_mutex */
@@ -3150,14 +3153,11 @@ dict_dealloc(PyObject *self)
{
PyDictObject *mp = (PyDictObject *)self;
PyInterpreterState *interp = _PyInterpreterState_GET();
- assert(Py_REFCNT(mp) == 0);
- Py_SET_REFCNT(mp, 1);
+ _PyObject_ResurrectStart(self);
_PyDict_NotifyEvent(interp, PyDict_EVENT_DEALLOCATED, mp, NULL, NULL);
- if (Py_REFCNT(mp) > 1) {
- Py_SET_REFCNT(mp, Py_REFCNT(mp) - 1);
+ if (_PyObject_ResurrectEnd(self)) {
return;
}
- Py_SET_REFCNT(mp, 0);
PyDictValues *values = mp->ma_values;
PyDictKeysObject *keys = mp->ma_keys;
Py_ssize_t i, n;
diff --git a/Objects/frameobject.c b/Objects/frameobject.c
index 44afda0c755ab0..8c596ede70ca3d 100644
--- a/Objects/frameobject.c
+++ b/Objects/frameobject.c
@@ -263,6 +263,10 @@ framelocalsproxy_merge(PyObject* self, PyObject* other)
Py_DECREF(iter);
+ if (PyErr_Occurred()) {
+ return -1;
+ }
+
return 0;
}
diff --git a/Objects/funcobject.c b/Objects/funcobject.c
index 8a30213888ef87..12d60f991534ab 100644
--- a/Objects/funcobject.c
+++ b/Objects/funcobject.c
@@ -986,14 +986,11 @@ func_clear(PyFunctionObject *op)
static void
func_dealloc(PyFunctionObject *op)
{
- assert(Py_REFCNT(op) == 0);
- Py_SET_REFCNT(op, 1);
+ _PyObject_ResurrectStart((PyObject *)op);
handle_func_event(PyFunction_EVENT_DESTROY, op, NULL);
- if (Py_REFCNT(op) > 1) {
- Py_SET_REFCNT(op, Py_REFCNT(op) - 1);
+ if (_PyObject_ResurrectEnd((PyObject *)op)) {
return;
}
- Py_SET_REFCNT(op, 0);
_PyObject_GC_UNTRACK(op);
if (op->func_weakreflist != NULL) {
PyObject_ClearWeakRefs((PyObject *) op);
diff --git a/Objects/genobject.c b/Objects/genobject.c
index 92cd8c61e7e9ca..3a9af4d4c182a3 100644
--- a/Objects/genobject.c
+++ b/Objects/genobject.c
@@ -604,30 +604,19 @@ gen_iternext(PyGenObject *gen)
int
_PyGen_SetStopIterationValue(PyObject *value)
{
- PyObject *e;
-
- if (value == NULL ||
- (!PyTuple_Check(value) && !PyExceptionInstance_Check(value)))
- {
- /* Delay exception instantiation if we can */
- PyErr_SetObject(PyExc_StopIteration, value);
- return 0;
- }
- /* Construct an exception instance manually with
- * PyObject_CallOneArg and pass it to PyErr_SetObject.
- *
- * We do this to handle a situation when "value" is a tuple, in which
- * case PyErr_SetObject would set the value of StopIteration to
- * the first element of the tuple.
- *
- * (See PyErr_SetObject/_PyErr_CreateException code for details.)
- */
- e = PyObject_CallOneArg(PyExc_StopIteration, value);
- if (e == NULL) {
+ assert(!PyErr_Occurred());
+ // Construct an exception instance manually with PyObject_CallOneArg()
+ // but use PyErr_SetRaisedException() instead of PyErr_SetObject() as
+ // PyErr_SetObject(exc_type, value) has a fast path when 'value'
+ // is a tuple, where the value of the StopIteration exception would be
+ // set to 'value[0]' instead of 'value'.
+ PyObject *exc = value == NULL
+ ? PyObject_CallNoArgs(PyExc_StopIteration)
+ : PyObject_CallOneArg(PyExc_StopIteration, value);
+ if (exc == NULL) {
return -1;
}
- PyErr_SetObject(PyExc_StopIteration, e);
- Py_DECREF(e);
+ PyErr_SetRaisedException(exc /* stolen */);
return 0;
}
diff --git a/Objects/iterobject.c b/Objects/iterobject.c
index 135ced9ea1f268..ebb342ff109222 100644
--- a/Objects/iterobject.c
+++ b/Objects/iterobject.c
@@ -384,6 +384,7 @@ anextawaitable_iternext(anextawaitableobject *obj)
return result;
}
if (PyErr_ExceptionMatches(PyExc_StopAsyncIteration)) {
+ PyErr_Clear();
_PyGen_SetStopIterationValue(obj->default_value);
}
return NULL;
@@ -407,6 +408,7 @@ anextawaitable_proxy(anextawaitableobject *obj, char *meth, PyObject *arg) {
* exception we replace it with a `StopIteration(default)`, as if
* it was the return value of `__anext__()` coroutine.
*/
+ PyErr_Clear();
_PyGen_SetStopIterationValue(obj->default_value);
}
return NULL;
diff --git a/Objects/listobject.c b/Objects/listobject.c
index 89abbda155a8b6..31ec8d5e05cf50 100644
--- a/Objects/listobject.c
+++ b/Objects/listobject.c
@@ -3,6 +3,7 @@
#include "Python.h"
#include "pycore_abstract.h" // _PyIndex_Check()
#include "pycore_ceval.h" // _PyEval_GetBuiltin()
+#include "pycore_critical_section.h" // _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED()
#include "pycore_dict.h" // _PyDictViewObject
#include "pycore_pyatomic_ft_wrappers.h"
#include "pycore_interp.h" // PyInterpreterState.list
@@ -81,6 +82,11 @@ static void
ensure_shared_on_resize(PyListObject *self)
{
#ifdef Py_GIL_DISABLED
+ // We can't use _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED here because
+ // the `CALL_LIST_APPEND` bytecode handler may lock the list without
+ // a critical section.
+ assert(Py_REFCNT(self) == 1 || PyMutex_IsLocked(&_PyObject_CAST(self)->ob_mutex));
+
// Ensure that the list array is freed using QSBR if we are not the
// owning thread.
if (!_Py_IsOwnedByCurrentThread((PyObject *)self) &&
@@ -367,11 +373,7 @@ list_item_impl(PyListObject *self, Py_ssize_t idx)
if (!valid_index(idx, size)) {
goto exit;
}
-#ifdef Py_GIL_DISABLED
item = _Py_NewRefWithLock(self->ob_item[idx]);
-#else
- item = Py_NewRef(self->ob_item[idx]);
-#endif
exit:
Py_END_CRITICAL_SECTION();
return item;
@@ -995,10 +997,12 @@ list_ass_slice(PyListObject *a, Py_ssize_t ilow, Py_ssize_t ihigh, PyObject *v)
Py_ssize_t n = PyList_GET_SIZE(a);
PyObject *copy = list_slice_lock_held(a, 0, n);
if (copy == NULL) {
- return -1;
+ ret = -1;
+ }
+ else {
+ ret = list_ass_slice_lock_held(a, ilow, ihigh, copy);
+ Py_DECREF(copy);
}
- ret = list_ass_slice_lock_held(a, ilow, ihigh, copy);
- Py_DECREF(copy);
Py_END_CRITICAL_SECTION();
}
else if (v != NULL && PyList_CheckExact(v)) {
@@ -1475,7 +1479,9 @@ PyList_Clear(PyObject *self)
PyErr_BadInternalCall();
return -1;
}
+ Py_BEGIN_CRITICAL_SECTION(self);
list_clear((PyListObject*)self);
+ Py_END_CRITICAL_SECTION();
return 0;
}
@@ -3446,7 +3452,9 @@ list___init___impl(PyListObject *self, PyObject *iterable)
/* Empty previous contents */
if (self->ob_item != NULL) {
+ Py_BEGIN_CRITICAL_SECTION(self);
list_clear(self);
+ Py_END_CRITICAL_SECTION();
}
if (iterable != NULL) {
if (_list_extend(self, iterable) < 0) {
@@ -3619,8 +3627,10 @@ adjust_slice_indexes(PyListObject *lst,
}
static int
-list_ass_subscript(PyObject* _self, PyObject* item, PyObject* value)
+list_ass_subscript_lock_held(PyObject *_self, PyObject *item, PyObject *value)
{
+ _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(_self);
+
PyListObject *self = (PyListObject *)_self;
if (_PyIndex_Check(item)) {
Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError);
@@ -3628,7 +3638,7 @@ list_ass_subscript(PyObject* _self, PyObject* item, PyObject* value)
return -1;
if (i < 0)
i += PyList_GET_SIZE(self);
- return list_ass_item((PyObject *)self, i, value);
+ return list_ass_item_lock_held(self, i, value);
}
else if (PySlice_Check(item)) {
Py_ssize_t start, stop, step;
@@ -3648,7 +3658,7 @@ list_ass_subscript(PyObject* _self, PyObject* item, PyObject* value)
step);
if (step == 1)
- return list_ass_slice(self, start, stop, value);
+ return list_ass_slice_lock_held(self, start, stop, value);
if (slicelength <= 0)
return 0;
@@ -3714,10 +3724,8 @@ list_ass_subscript(PyObject* _self, PyObject* item, PyObject* value)
/* protect against a[::-1] = a */
if (self == (PyListObject*)value) {
- Py_BEGIN_CRITICAL_SECTION(value);
- seq = list_slice_lock_held((PyListObject*)value, 0,
+ seq = list_slice_lock_held((PyListObject *)value, 0,
Py_SIZE(value));
- Py_END_CRITICAL_SECTION();
}
else {
seq = PySequence_Fast(value,
@@ -3731,7 +3739,7 @@ list_ass_subscript(PyObject* _self, PyObject* item, PyObject* value)
step);
if (step == 1) {
- int res = list_ass_slice(self, start, stop, seq);
+ int res = list_ass_slice_lock_held(self, start, stop, seq);
Py_DECREF(seq);
return res;
}
@@ -3787,6 +3795,24 @@ list_ass_subscript(PyObject* _self, PyObject* item, PyObject* value)
}
}
+static int
+list_ass_subscript(PyObject *self, PyObject *item, PyObject *value)
+{
+ int res;
+#ifdef Py_GIL_DISABLED
+ if (PySlice_Check(item) && value != NULL && PyList_CheckExact(value)) {
+ Py_BEGIN_CRITICAL_SECTION2(self, value);
+ res = list_ass_subscript_lock_held(self, item, value);
+ Py_END_CRITICAL_SECTION2();
+ return res;
+ }
+#endif
+ Py_BEGIN_CRITICAL_SECTION(self);
+ res = list_ass_subscript_lock_held(self, item, value);
+ Py_END_CRITICAL_SECTION();
+ return res;
+}
+
static PyMappingMethods list_as_mapping = {
list_length,
list_subscript,
diff --git a/Objects/namespaceobject.c b/Objects/namespaceobject.c
index 5b7547103a2b3f..4ef3bd92f5a569 100644
--- a/Objects/namespaceobject.c
+++ b/Objects/namespaceobject.c
@@ -141,6 +141,10 @@ namespace_repr(PyObject *ns)
goto error;
}
+ if (PyErr_Occurred()) {
+ goto error;
+ }
+
separator = PyUnicode_FromString(", ");
if (separator == NULL)
goto error;
diff --git a/Objects/object.c b/Objects/object.c
index a03c0cc55b4ae2..a80d20c182ae0b 100644
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -360,8 +360,10 @@ is_dead(PyObject *o)
}
# endif
-void
-_Py_DecRefSharedDebug(PyObject *o, const char *filename, int lineno)
+// Decrement the shared reference count of an object. Return 1 if the object
+// is dead and should be deallocated, 0 otherwise.
+static int
+_Py_DecRefSharedIsDead(PyObject *o, const char *filename, int lineno)
{
// Should we queue the object for the owning thread to merge?
int should_queue;
@@ -402,6 +404,15 @@ _Py_DecRefSharedDebug(PyObject *o, const char *filename, int lineno)
}
else if (new_shared == _Py_REF_MERGED) {
// refcount is zero AND merged
+ return 1;
+ }
+ return 0;
+}
+
+void
+_Py_DecRefSharedDebug(PyObject *o, const char *filename, int lineno)
+{
+ if (_Py_DecRefSharedIsDead(o, filename, lineno)) {
_Py_Dealloc(o);
}
}
@@ -470,6 +481,26 @@ _Py_ExplicitMergeRefcount(PyObject *op, Py_ssize_t extra)
&shared, new_shared));
return refcnt;
}
+
+// The more complicated "slow" path for undoing the resurrection of an object.
+int
+_PyObject_ResurrectEndSlow(PyObject *op)
+{
+ if (_Py_IsImmortal(op)) {
+ return 1;
+ }
+ if (_Py_IsOwnedByCurrentThread(op)) {
+ // If the object is owned by the current thread, give up ownership and
+ // merge the refcount. This isn't necessary in all cases, but it
+ // simplifies the implementation.
+ Py_ssize_t refcount = _Py_ExplicitMergeRefcount(op, -1);
+ return refcount != 0;
+ }
+ int is_dead = _Py_DecRefSharedIsDead(op, NULL, 0);
+ return !is_dead;
+}
+
+
#endif /* Py_GIL_DISABLED */
@@ -548,7 +579,7 @@ PyObject_CallFinalizerFromDealloc(PyObject *self)
}
/* Temporarily resurrect the object. */
- Py_SET_REFCNT(self, 1);
+ _PyObject_ResurrectStart(self);
PyObject_CallFinalizer(self);
@@ -558,8 +589,7 @@ PyObject_CallFinalizerFromDealloc(PyObject *self)
/* Undo the temporary resurrection; can't use DECREF here, it would
* cause a recursive call. */
- Py_SET_REFCNT(self, Py_REFCNT(self) - 1);
- if (Py_REFCNT(self) == 0) {
+ if (!_PyObject_ResurrectEnd(self)) {
return 0; /* this is the normal path out */
}
@@ -2437,11 +2467,7 @@ new_reference(PyObject *op)
#ifdef Py_TRACE_REFS
_Py_AddToAllObjects(op);
#endif
- struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer;
- if (tracer->tracer_func != NULL) {
- void* data = tracer->tracer_data;
- tracer->tracer_func(op, PyRefTracer_CREATE, data);
- }
+ _PyReftracerTrack(op, PyRefTracer_CREATE);
}
void
@@ -2513,10 +2539,6 @@ _Py_ResurrectReference(PyObject *op)
#ifdef Py_TRACE_REFS
_Py_AddToAllObjects(op);
#endif
- if (_PyRuntime.ref_tracer.tracer_func != NULL) {
- void* data = _PyRuntime.ref_tracer.tracer_data;
- _PyRuntime.ref_tracer.tracer_func(op, PyRefTracer_CREATE, data);
- }
}
@@ -2906,15 +2928,10 @@ _Py_Dealloc(PyObject *op)
Py_INCREF(type);
#endif
- struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer;
- if (tracer->tracer_func != NULL) {
- void* data = tracer->tracer_data;
- tracer->tracer_func(op, PyRefTracer_DESTROY, data);
- }
-
#ifdef Py_TRACE_REFS
_Py_ForgetReference(op);
#endif
+ _PyReftracerTrack(op, PyRefTracer_DESTROY);
(*dealloc)(op);
#ifdef Py_DEBUG
diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c
index 371fb074387bbd..818814b663d534 100644
--- a/Objects/tupleobject.c
+++ b/Objects/tupleobject.c
@@ -942,6 +942,7 @@ _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
for (i = newsize; i < oldsize; i++) {
Py_CLEAR(v->ob_item[i]);
}
+ _PyReftracerTrack((PyObject *)v, PyRefTracer_DESTROY);
sv = PyObject_GC_Resize(PyTupleObject, v, newsize);
if (sv == NULL) {
*pv = NULL;
diff --git a/Objects/typeobject.c b/Objects/typeobject.c
index 20b99816593cbe..bd79676b8e925f 100644
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -996,9 +996,15 @@ type_modified_unlocked(PyTypeObject *type)
We don't assign new version tags eagerly, but only as
needed.
*/
+#ifdef Py_GIL_DISABLED
+ if (_Py_atomic_load_uint_relaxed(&type->tp_version_tag) == 0) {
+ return;
+ }
+#else
if (type->tp_version_tag == 0) {
return;
}
+#endif
// Cannot modify static builtin types.
assert((type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) == 0);
@@ -2649,7 +2655,7 @@ vectorcall_maybe(PyThreadState *tstate, PyObject *name,
*/
static int
-tail_contains(PyObject *tuple, int whence, PyObject *o)
+tail_contains(PyObject *tuple, Py_ssize_t whence, PyObject *o)
{
Py_ssize_t j, size;
size = PyTuple_GET_SIZE(tuple);
@@ -2712,7 +2718,7 @@ check_duplicates(PyObject *tuple)
*/
static void
-set_mro_error(PyObject **to_merge, Py_ssize_t to_merge_size, int *remain)
+set_mro_error(PyObject **to_merge, Py_ssize_t to_merge_size, Py_ssize_t *remain)
{
Py_ssize_t i, n, off;
char buf[1000];
@@ -2767,13 +2773,13 @@ pmerge(PyObject *acc, PyObject **to_merge, Py_ssize_t to_merge_size)
{
int res = 0;
Py_ssize_t i, j, empty_cnt;
- int *remain;
+ Py_ssize_t *remain;
/* remain stores an index into each sublist of to_merge.
remain[i] is the index of the next base in to_merge[i]
that is not included in acc.
*/
- remain = PyMem_New(int, to_merge_size);
+ remain = PyMem_New(Py_ssize_t, to_merge_size);
if (remain == NULL) {
PyErr_NoMemory();
return -1;
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
index 7ea058b9730b4c..434cb5ffb61c0e 100644
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -111,20 +111,42 @@ NOTE: In the interpreter's initialization phase, some globals are currently
# define _PyUnicode_CHECK(op) PyUnicode_Check(op)
#endif
-#define _PyUnicode_UTF8(op) \
- (_PyCompactUnicodeObject_CAST(op)->utf8)
-#define PyUnicode_UTF8(op) \
- (assert(_PyUnicode_CHECK(op)), \
- PyUnicode_IS_COMPACT_ASCII(op) ? \
- ((char*)(_PyASCIIObject_CAST(op) + 1)) : \
- _PyUnicode_UTF8(op))
-#define _PyUnicode_UTF8_LENGTH(op) \
- (_PyCompactUnicodeObject_CAST(op)->utf8_length)
-#define PyUnicode_UTF8_LENGTH(op) \
- (assert(_PyUnicode_CHECK(op)), \
- PyUnicode_IS_COMPACT_ASCII(op) ? \
- _PyASCIIObject_CAST(op)->length : \
- _PyUnicode_UTF8_LENGTH(op))
+static inline char* _PyUnicode_UTF8(PyObject *op)
+{
+ return FT_ATOMIC_LOAD_PTR_ACQUIRE(_PyCompactUnicodeObject_CAST(op)->utf8);
+}
+
+static inline char* PyUnicode_UTF8(PyObject *op)
+{
+ assert(_PyUnicode_CHECK(op));
+ if (PyUnicode_IS_COMPACT_ASCII(op)) {
+ return ((char*)(_PyASCIIObject_CAST(op) + 1));
+ }
+ else {
+ return _PyUnicode_UTF8(op);
+ }
+}
+
+static inline void PyUnicode_SET_UTF8(PyObject *op, char *utf8)
+{
+ FT_ATOMIC_STORE_PTR_RELEASE(_PyCompactUnicodeObject_CAST(op)->utf8, utf8);
+}
+
+static inline Py_ssize_t PyUnicode_UTF8_LENGTH(PyObject *op)
+{
+ assert(_PyUnicode_CHECK(op));
+ if (PyUnicode_IS_COMPACT_ASCII(op)) {
+ return _PyASCIIObject_CAST(op)->length;
+ }
+ else {
+ return _PyCompactUnicodeObject_CAST(op)->utf8_length;
+ }
+}
+
+static inline void PyUnicode_SET_UTF8_LENGTH(PyObject *op, Py_ssize_t length)
+{
+ _PyCompactUnicodeObject_CAST(op)->utf8_length = length;
+}
#define _PyUnicode_LENGTH(op) \
(_PyASCIIObject_CAST(op)->length)
@@ -132,26 +154,37 @@ NOTE: In the interpreter's initialization phase, some globals are currently
(_PyASCIIObject_CAST(op)->state)
#define _PyUnicode_HASH(op) \
(_PyASCIIObject_CAST(op)->hash)
-#define _PyUnicode_KIND(op) \
- (assert(_PyUnicode_CHECK(op)), \
- _PyASCIIObject_CAST(op)->state.kind)
-#define _PyUnicode_GET_LENGTH(op) \
- (assert(_PyUnicode_CHECK(op)), \
- _PyASCIIObject_CAST(op)->length)
+
+static inline Py_hash_t PyUnicode_HASH(PyObject *op)
+{
+ assert(_PyUnicode_CHECK(op));
+ return FT_ATOMIC_LOAD_SSIZE_RELAXED(_PyASCIIObject_CAST(op)->hash);
+}
+
+static inline void PyUnicode_SET_HASH(PyObject *op, Py_hash_t hash)
+{
+ FT_ATOMIC_STORE_SSIZE_RELAXED(_PyASCIIObject_CAST(op)->hash, hash);
+}
+
#define _PyUnicode_DATA_ANY(op) \
(_PyUnicodeObject_CAST(op)->data.any)
-#define _PyUnicode_SHARE_UTF8(op) \
- (assert(_PyUnicode_CHECK(op)), \
- assert(!PyUnicode_IS_COMPACT_ASCII(op)), \
- (_PyUnicode_UTF8(op) == PyUnicode_DATA(op)))
+static inline int _PyUnicode_SHARE_UTF8(PyObject *op)
+{
+ assert(_PyUnicode_CHECK(op));
+ assert(!PyUnicode_IS_COMPACT_ASCII(op));
+ return (_PyUnicode_UTF8(op) == PyUnicode_DATA(op));
+}
/* true if the Unicode object has an allocated UTF-8 memory block
(not shared with other data) */
-#define _PyUnicode_HAS_UTF8_MEMORY(op) \
- ((!PyUnicode_IS_COMPACT_ASCII(op) \
- && _PyUnicode_UTF8(op) \
- && _PyUnicode_UTF8(op) != PyUnicode_DATA(op)))
+static inline int _PyUnicode_HAS_UTF8_MEMORY(PyObject *op)
+{
+ return (!PyUnicode_IS_COMPACT_ASCII(op)
+ && _PyUnicode_UTF8(op) != NULL
+ && _PyUnicode_UTF8(op) != PyUnicode_DATA(op));
+}
+
/* Generic helper macro to convert characters of different types.
from_type and to_type have to be valid type names, begin and end
@@ -650,7 +683,7 @@ _PyUnicode_CheckConsistency(PyObject *op, int check_content)
|| kind == PyUnicode_2BYTE_KIND
|| kind == PyUnicode_4BYTE_KIND);
CHECK(ascii->state.ascii == 0);
- CHECK(compact->utf8 != data);
+ CHECK(_PyUnicode_UTF8(op) != data);
}
else {
PyUnicodeObject *unicode = _PyUnicodeObject_CAST(op);
@@ -662,16 +695,17 @@ _PyUnicode_CheckConsistency(PyObject *op, int check_content)
CHECK(ascii->state.compact == 0);
CHECK(data != NULL);
if (ascii->state.ascii) {
- CHECK(compact->utf8 == data);
+ CHECK(_PyUnicode_UTF8(op) == data);
CHECK(compact->utf8_length == ascii->length);
}
else {
- CHECK(compact->utf8 != data);
+ CHECK(_PyUnicode_UTF8(op) != data);
}
}
-
- if (compact->utf8 == NULL)
+#ifndef Py_GIL_DISABLED
+ if (_PyUnicode_UTF8(op) == NULL)
CHECK(compact->utf8_length == 0);
+#endif
}
/* check that the best kind is used: O(n) operation */
@@ -1115,12 +1149,13 @@ resize_compact(PyObject *unicode, Py_ssize_t length)
if (_PyUnicode_HAS_UTF8_MEMORY(unicode)) {
PyMem_Free(_PyUnicode_UTF8(unicode));
- _PyUnicode_UTF8(unicode) = NULL;
- _PyUnicode_UTF8_LENGTH(unicode) = 0;
+ PyUnicode_SET_UTF8_LENGTH(unicode, 0);
+ PyUnicode_SET_UTF8(unicode, NULL);
}
#ifdef Py_TRACE_REFS
_Py_ForgetReference(unicode);
#endif
+ _PyReftracerTrack(unicode, PyRefTracer_DESTROY);
new_unicode = (PyObject *)PyObject_Realloc(unicode, new_size);
if (new_unicode == NULL) {
@@ -1168,8 +1203,8 @@ resize_inplace(PyObject *unicode, Py_ssize_t length)
if (!share_utf8 && _PyUnicode_HAS_UTF8_MEMORY(unicode))
{
PyMem_Free(_PyUnicode_UTF8(unicode));
- _PyUnicode_UTF8(unicode) = NULL;
- _PyUnicode_UTF8_LENGTH(unicode) = 0;
+ PyUnicode_SET_UTF8_LENGTH(unicode, 0);
+ PyUnicode_SET_UTF8(unicode, NULL);
}
data = (PyObject *)PyObject_Realloc(data, new_size);
@@ -1179,8 +1214,8 @@ resize_inplace(PyObject *unicode, Py_ssize_t length)
}
_PyUnicode_DATA_ANY(unicode) = data;
if (share_utf8) {
- _PyUnicode_UTF8(unicode) = data;
- _PyUnicode_UTF8_LENGTH(unicode) = length;
+ PyUnicode_SET_UTF8_LENGTH(unicode, length);
+ PyUnicode_SET_UTF8(unicode, data);
}
_PyUnicode_LENGTH(unicode) = length;
PyUnicode_WRITE(PyUnicode_KIND(unicode), data, length, 0);
@@ -1410,12 +1445,12 @@ unicode_convert_wchar_to_ucs4(const wchar_t *begin, const wchar_t *end,
assert(unicode != NULL);
assert(_PyUnicode_CHECK(unicode));
- assert(_PyUnicode_KIND(unicode) == PyUnicode_4BYTE_KIND);
+ assert(PyUnicode_KIND(unicode) == PyUnicode_4BYTE_KIND);
ucs4_out = PyUnicode_4BYTE_DATA(unicode);
for (iter = begin; iter < end; ) {
assert(ucs4_out < (PyUnicode_4BYTE_DATA(unicode) +
- _PyUnicode_GET_LENGTH(unicode)));
+ PyUnicode_GET_LENGTH(unicode)));
if (Py_UNICODE_IS_HIGH_SURROGATE(iter[0])
&& (iter+1) < end
&& Py_UNICODE_IS_LOW_SURROGATE(iter[1]))
@@ -1429,7 +1464,7 @@ unicode_convert_wchar_to_ucs4(const wchar_t *begin, const wchar_t *end,
}
}
assert(ucs4_out == (PyUnicode_4BYTE_DATA(unicode) +
- _PyUnicode_GET_LENGTH(unicode)));
+ PyUnicode_GET_LENGTH(unicode)));
}
#endif
@@ -1460,11 +1495,14 @@ _copy_characters(PyObject *to, Py_ssize_t to_start,
assert(PyUnicode_Check(from));
assert(from_start + how_many <= PyUnicode_GET_LENGTH(from));
- assert(PyUnicode_Check(to));
- assert(to_start + how_many <= PyUnicode_GET_LENGTH(to));
+ assert(to == NULL || PyUnicode_Check(to));
- if (how_many == 0)
+ if (how_many == 0) {
return 0;
+ }
+
+ assert(to != NULL);
+ assert(to_start + how_many <= PyUnicode_GET_LENGTH(to));
from_kind = PyUnicode_KIND(from);
from_data = PyUnicode_DATA(from);
@@ -1800,7 +1838,7 @@ unicode_modifiable(PyObject *unicode)
assert(_PyUnicode_CHECK(unicode));
if (Py_REFCNT(unicode) != 1)
return 0;
- if (FT_ATOMIC_LOAD_SSIZE_RELAXED(_PyUnicode_HASH(unicode)) != -1)
+ if (PyUnicode_HASH(unicode) != -1)
return 0;
if (PyUnicode_CHECK_INTERNED(unicode))
return 0;
@@ -4051,6 +4089,21 @@ PyUnicode_FSDecoder(PyObject* arg, void* addr)
static int unicode_fill_utf8(PyObject *unicode);
+
+static int
+unicode_ensure_utf8(PyObject *unicode)
+{
+ int err = 0;
+ if (PyUnicode_UTF8(unicode) == NULL) {
+ Py_BEGIN_CRITICAL_SECTION(unicode);
+ if (PyUnicode_UTF8(unicode) == NULL) {
+ err = unicode_fill_utf8(unicode);
+ }
+ Py_END_CRITICAL_SECTION();
+ }
+ return err;
+}
+
const char *
PyUnicode_AsUTF8AndSize(PyObject *unicode, Py_ssize_t *psize)
{
@@ -4062,13 +4115,11 @@ PyUnicode_AsUTF8AndSize(PyObject *unicode, Py_ssize_t *psize)
return NULL;
}
- if (PyUnicode_UTF8(unicode) == NULL) {
- if (unicode_fill_utf8(unicode) == -1) {
- if (psize) {
- *psize = -1;
- }
- return NULL;
+ if (unicode_ensure_utf8(unicode) == -1) {
+ if (psize) {
+ *psize = -1;
}
+ return NULL;
}
if (psize) {
@@ -5400,6 +5451,7 @@ unicode_encode_utf8(PyObject *unicode, _Py_error_handler error_handler,
static int
unicode_fill_utf8(PyObject *unicode)
{
+ _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(unicode);
/* the string cannot be ASCII, or PyUnicode_UTF8() would be set */
assert(!PyUnicode_IS_ASCII(unicode));
@@ -5441,10 +5493,10 @@ unicode_fill_utf8(PyObject *unicode)
PyErr_NoMemory();
return -1;
}
- _PyUnicode_UTF8(unicode) = cache;
- _PyUnicode_UTF8_LENGTH(unicode) = len;
memcpy(cache, start, len);
cache[len] = '\0';
+ PyUnicode_SET_UTF8_LENGTH(unicode, len);
+ PyUnicode_SET_UTF8(unicode, cache);
_PyBytesWriter_Dealloc(&writer);
return 0;
}
@@ -10995,9 +11047,9 @@ _PyUnicode_EqualToASCIIId(PyObject *left, _Py_Identifier *right)
return 0;
}
- Py_hash_t right_hash = FT_ATOMIC_LOAD_SSIZE_RELAXED(_PyUnicode_HASH(right_uni));
+ Py_hash_t right_hash = PyUnicode_HASH(right_uni);
assert(right_hash != -1);
- Py_hash_t hash = FT_ATOMIC_LOAD_SSIZE_RELAXED(_PyUnicode_HASH(left));
+ Py_hash_t hash = PyUnicode_HASH(left);
if (hash != -1 && hash != right_hash) {
return 0;
}
@@ -11483,14 +11535,14 @@ unicode_hash(PyObject *self)
#ifdef Py_DEBUG
assert(_Py_HashSecret_Initialized);
#endif
- Py_hash_t hash = FT_ATOMIC_LOAD_SSIZE_RELAXED(_PyUnicode_HASH(self));
+ Py_hash_t hash = PyUnicode_HASH(self);
if (hash != -1) {
return hash;
}
x = _Py_HashBytes(PyUnicode_DATA(self),
PyUnicode_GET_LENGTH(self) * PyUnicode_KIND(self));
- FT_ATOMIC_STORE_SSIZE_RELAXED(_PyUnicode_HASH(self), x);
+ PyUnicode_SET_HASH(self, x);
return x;
}
@@ -14887,8 +14939,8 @@ unicode_subtype_new(PyTypeObject *type, PyObject *unicode)
_PyUnicode_STATE(self).compact = 0;
_PyUnicode_STATE(self).ascii = _PyUnicode_STATE(unicode).ascii;
_PyUnicode_STATE(self).statically_allocated = 0;
- _PyUnicode_UTF8_LENGTH(self) = 0;
- _PyUnicode_UTF8(self) = NULL;
+ PyUnicode_SET_UTF8_LENGTH(self, 0);
+ PyUnicode_SET_UTF8(self, NULL);
_PyUnicode_DATA_ANY(self) = NULL;
share_utf8 = 0;
@@ -14918,8 +14970,8 @@ unicode_subtype_new(PyTypeObject *type, PyObject *unicode)
_PyUnicode_DATA_ANY(self) = data;
if (share_utf8) {
- _PyUnicode_UTF8_LENGTH(self) = length;
- _PyUnicode_UTF8(self) = data;
+ PyUnicode_SET_UTF8_LENGTH(self, length);
+ PyUnicode_SET_UTF8(self, data);
}
memcpy(data, PyUnicode_DATA(unicode), kind * (length + 1));
diff --git a/PCbuild/_testlimitedcapi.vcxproj b/PCbuild/_testlimitedcapi.vcxproj
index 7e5809fec31791..a5e0be93ab9390 100644
--- a/PCbuild/_testlimitedcapi.vcxproj
+++ b/PCbuild/_testlimitedcapi.vcxproj
@@ -99,8 +99,10 @@
+
+
@@ -110,6 +112,7 @@
+
diff --git a/PCbuild/_testlimitedcapi.vcxproj.filters b/PCbuild/_testlimitedcapi.vcxproj.filters
index 47f059040bed91..4b3521afc06158 100644
--- a/PCbuild/_testlimitedcapi.vcxproj.filters
+++ b/PCbuild/_testlimitedcapi.vcxproj.filters
@@ -14,8 +14,10 @@
+
+
@@ -26,6 +28,7 @@
+
diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c
index 8dfc919da71b5a..98f9091ee43f90 100644
--- a/Parser/action_helpers.c
+++ b/Parser/action_helpers.c
@@ -961,8 +961,6 @@ _PyPegen_check_fstring_conversion(Parser *p, Token* conv_token, expr_ty conv)
return result_token_with_metadata(p, conv, conv_token->metadata);
}
-static asdl_expr_seq *
-unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions);
ResultTokenWithMetadata *
_PyPegen_setup_full_format_spec(Parser *p, Token *colon, asdl_expr_seq *spec, int lineno, int col_offset,
int end_lineno, int end_col_offset, PyArena *arena)
@@ -1271,9 +1269,9 @@ _PyPegen_decode_fstring_part(Parser* p, int is_raw, expr_ty constant, Token* tok
p->arena);
}
-static asdl_expr_seq *
-unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions)
-{
+expr_ty
+_PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* expr, Token*b) {
+
/* The parser might put multiple f-string values into an individual
* JoinedStr node at the top level due to stuff like f-string debugging
* expressions. This function flattens those and promotes them to the
@@ -1281,44 +1279,14 @@ unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions)
* of the regular output, so this is not necessary if you are not going
* to expose the output AST to Python level. */
- Py_ssize_t i, req_size, raw_size;
-
- req_size = raw_size = asdl_seq_LEN(raw_expressions);
- expr_ty expr;
- for (i = 0; i < raw_size; i++) {
- expr = asdl_seq_GET(raw_expressions, i);
- if (expr->kind == JoinedStr_kind) {
- req_size += asdl_seq_LEN(expr->v.JoinedStr.values) - 1;
- }
- }
-
- asdl_expr_seq *expressions = _Py_asdl_expr_seq_new(req_size, p->arena);
- if (expressions == NULL) {
- return NULL;
- }
-
- Py_ssize_t raw_index, req_index = 0;
- for (raw_index = 0; raw_index < raw_size; raw_index++) {
- expr = asdl_seq_GET(raw_expressions, raw_index);
- if (expr->kind == JoinedStr_kind) {
- asdl_expr_seq *values = expr->v.JoinedStr.values;
- for (Py_ssize_t n = 0; n < asdl_seq_LEN(values); n++) {
- asdl_seq_SET(expressions, req_index, asdl_seq_GET(values, n));
- req_index++;
- }
- } else {
- asdl_seq_SET(expressions, req_index, expr);
- req_index++;
+ Py_ssize_t n_items = asdl_seq_LEN(expr);
+ Py_ssize_t total_items = n_items;
+ for (Py_ssize_t i = 0; i < n_items; i++) {
+ expr_ty item = asdl_seq_GET(expr, i);
+ if (item->kind == JoinedStr_kind) {
+ total_items += asdl_seq_LEN(item->v.JoinedStr.values) - 1;
}
}
- return expressions;
-}
-
-expr_ty
-_PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b) {
-
- asdl_expr_seq *expr = unpack_top_level_joined_strs(p, raw_expressions);
- Py_ssize_t n_items = asdl_seq_LEN(expr);
const char* quote_str = PyBytes_AsString(a->bytes);
if (quote_str == NULL) {
@@ -1326,7 +1294,7 @@ _PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b
}
int is_raw = strpbrk(quote_str, "rR") != NULL;
- asdl_expr_seq *seq = _Py_asdl_expr_seq_new(n_items, p->arena);
+ asdl_expr_seq *seq = _Py_asdl_expr_seq_new(total_items, p->arena);
if (seq == NULL) {
return NULL;
}
@@ -1334,6 +1302,31 @@ _PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b
Py_ssize_t index = 0;
for (Py_ssize_t i = 0; i < n_items; i++) {
expr_ty item = asdl_seq_GET(expr, i);
+
+ // This should correspond to a JoinedStr node of two elements
+ // created _PyPegen_formatted_value. This situation can only be the result of
+ // a f-string debug expression where the first element is a constant with the text and the second
+ // a formatted value with the expression.
+ if (item->kind == JoinedStr_kind) {
+ asdl_expr_seq *values = item->v.JoinedStr.values;
+ if (asdl_seq_LEN(values) != 2) {
+ PyErr_Format(PyExc_SystemError,
+ "unexpected JoinedStr node without debug data in f-string at line %d",
+ item->lineno);
+ return NULL;
+ }
+
+ expr_ty first = asdl_seq_GET(values, 0);
+ assert(first->kind == Constant_kind);
+ asdl_seq_SET(seq, index++, first);
+
+ expr_ty second = asdl_seq_GET(values, 1);
+ assert(second->kind == FormattedValue_kind);
+ asdl_seq_SET(seq, index++, second);
+
+ continue;
+ }
+
if (item->kind == Constant_kind) {
item = _PyPegen_decode_fstring_part(p, is_raw, item, b);
if (item == NULL) {
@@ -1352,7 +1345,7 @@ _PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b
}
asdl_expr_seq *resized_exprs;
- if (index != n_items) {
+ if (index != total_items) {
resized_exprs = _Py_asdl_expr_seq_new(index, p->arena);
if (resized_exprs == NULL) {
return NULL;
diff --git a/Parser/lexer/lexer.c b/Parser/lexer/lexer.c
index 8c868593f944c8..3ced1be0b003b8 100644
--- a/Parser/lexer/lexer.c
+++ b/Parser/lexer/lexer.c
@@ -212,9 +212,7 @@ _PyLexer_update_fstring_expr(struct tok_state *tok, char cur)
case '}':
case '!':
case ':':
- if (tok_mode->last_expr_end == -1) {
- tok_mode->last_expr_end = strlen(tok->start);
- }
+ tok_mode->last_expr_end = strlen(tok->start);
break;
default:
Py_UNREACHABLE();
diff --git a/Python/ceval.c b/Python/ceval.c
index d970ffa0309a8d..763c8688266f9d 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -27,6 +27,7 @@
#include "pycore_setobject.h" // _PySet_Update()
#include "pycore_sliceobject.h" // _PyBuildSlice_ConsumeRefs
#include "pycore_sysmodule.h" // _PySys_Audit()
+#include "pycore_traceback.h" // _PyTraceBack_FromFrame
#include "pycore_tuple.h" // _PyTuple_ITEMS()
#include "pycore_typeobject.h" // _PySuper_Lookup()
#include "pycore_uop_ids.h" // Uops
@@ -95,11 +96,7 @@
} \
_Py_DECREF_STAT_INC(); \
if (--op->ob_refcnt == 0) { \
- struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer; \
- if (tracer->tracer_func != NULL) { \
- void* data = tracer->tracer_data; \
- tracer->tracer_func(op, PyRefTracer_DESTROY, data); \
- } \
+ _PyReftracerTrack(op, PyRefTracer_DESTROY); \
destructor d = (destructor)(dealloc); \
d(op); \
} \
@@ -269,12 +266,16 @@ void
Py_SetRecursionLimit(int new_limit)
{
PyInterpreterState *interp = _PyInterpreterState_GET();
+ _PyEval_StopTheWorld(interp);
+ HEAD_LOCK(interp->runtime);
interp->ceval.recursion_limit = new_limit;
for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) {
int depth = p->py_recursion_limit - p->py_recursion_remaining;
p->py_recursion_limit = new_limit;
p->py_recursion_remaining = new_limit - depth;
}
+ HEAD_UNLOCK(interp->runtime);
+ _PyEval_StartTheWorld(interp);
}
/* The function _Py_EnterRecursiveCallTstate() only calls _Py_CheckRecursiveCall()
@@ -2018,6 +2019,17 @@ _PyEval_ExceptionGroupMatch(PyObject* exc_value, PyObject *match_type,
if (wrapped == NULL) {
return -1;
}
+ PyThreadState *tstate = _PyThreadState_GET();
+ _PyInterpreterFrame *frame = _PyThreadState_GetFrame(tstate);
+ PyFrameObject *f = _PyFrame_GetFrameObject(frame);
+ if (f != NULL) {
+ PyObject *tb = _PyTraceBack_FromFrame(NULL, f);
+ if (tb == NULL) {
+ return -1;
+ }
+ PyException_SetTraceback(wrapped, tb);
+ Py_DECREF(tb);
+ }
*match = wrapped;
}
*rest = Py_NewRef(Py_None);
@@ -2033,8 +2045,25 @@ _PyEval_ExceptionGroupMatch(PyObject* exc_value, PyObject *match_type,
if (pair == NULL) {
return -1;
}
- assert(PyTuple_CheckExact(pair));
- assert(PyTuple_GET_SIZE(pair) == 2);
+
+ if (!PyTuple_CheckExact(pair)) {
+ PyErr_Format(PyExc_TypeError,
+ "%.200s.split must return a tuple, not %.200s",
+ Py_TYPE(exc_value)->tp_name, Py_TYPE(pair)->tp_name);
+ Py_DECREF(pair);
+ return -1;
+ }
+
+ // allow tuples of length > 2 for backwards compatibility
+ if (PyTuple_GET_SIZE(pair) < 2) {
+ PyErr_Format(PyExc_TypeError,
+ "%.200s.split must return a 2-tuple, "
+ "got tuple of size %zd",
+ Py_TYPE(exc_value)->tp_name, PyTuple_GET_SIZE(pair));
+ Py_DECREF(pair);
+ return -1;
+ }
+
*match = Py_NewRef(PyTuple_GET_ITEM(pair, 0));
*rest = Py_NewRef(PyTuple_GET_ITEM(pair, 1));
Py_DECREF(pair);
@@ -2785,6 +2814,20 @@ import_from(PyThreadState *tstate, PyObject *v, PyObject *name)
}
}
+ if (origin == NULL && PyModule_Check(v)) {
+ // Fall back to __file__ for diagnostics if we don't have
+ // an origin that is a location
+ origin = PyModule_GetFilenameObject(v);
+ if (origin == NULL) {
+ if (!PyErr_ExceptionMatches(PyExc_SystemError)) {
+ goto done;
+ }
+ // PyModule_GetFilenameObject raised "module filename missing"
+ _PyErr_Clear(tstate);
+ }
+ assert(origin == NULL || PyUnicode_Check(origin));
+ }
+
if (is_possibly_shadowing_stdlib) {
assert(origin);
errmsg = PyUnicode_FromFormat(
@@ -2845,9 +2888,11 @@ import_from(PyThreadState *tstate, PyObject *v, PyObject *name)
}
done_with_errmsg:
- /* NULL checks for errmsg, mod_name, origin done by PyErr_SetImportError. */
- _PyErr_SetImportErrorWithNameFrom(errmsg, mod_name, origin, name);
- Py_DECREF(errmsg);
+ if (errmsg != NULL) {
+ /* NULL checks for mod_name and origin done by _PyErr_SetImportErrorWithNameFrom */
+ _PyErr_SetImportErrorWithNameFrom(errmsg, mod_name, origin, name);
+ Py_DECREF(errmsg);
+ }
done:
Py_XDECREF(origin);
diff --git a/Python/errors.c b/Python/errors.c
index bf1ddfa8cfb4dc..f19c030b1be114 100644
--- a/Python/errors.c
+++ b/Python/errors.c
@@ -299,6 +299,15 @@ PyErr_SetString(PyObject *exception, const char *string)
_PyErr_SetString(tstate, exception, string);
}
+void
+_PyErr_SetLocaleString(PyObject *exception, const char *string)
+{
+ PyObject *value = PyUnicode_DecodeLocale(string, "surrogateescape");
+ if (value != NULL) {
+ PyErr_SetObject(exception, value);
+ Py_DECREF(value);
+ }
+}
PyObject* _Py_HOT_FUNCTION
PyErr_Occurred(void)
diff --git a/Python/import.c b/Python/import.c
index ea5a3e4a7622d0..755a6e45e6afa3 100644
--- a/Python/import.c
+++ b/Python/import.c
@@ -747,7 +747,7 @@ const char *
_PyImport_ResolveNameWithPackageContext(const char *name)
{
#ifndef HAVE_THREAD_LOCAL
- PyThread_acquire_lock(EXTENSIONS.mutex, WAIT_LOCK);
+ PyMutex_Lock(&EXTENSIONS.mutex);
#endif
if (PKGCONTEXT != NULL) {
const char *p = strrchr(PKGCONTEXT, '.');
@@ -757,7 +757,7 @@ _PyImport_ResolveNameWithPackageContext(const char *name)
}
}
#ifndef HAVE_THREAD_LOCAL
- PyThread_release_lock(EXTENSIONS.mutex);
+ PyMutex_Unlock(&EXTENSIONS.mutex);
#endif
return name;
}
@@ -766,12 +766,12 @@ const char *
_PyImport_SwapPackageContext(const char *newcontext)
{
#ifndef HAVE_THREAD_LOCAL
- PyThread_acquire_lock(EXTENSIONS.mutex, WAIT_LOCK);
+ PyMutex_Lock(&EXTENSIONS.mutex);
#endif
const char *oldcontext = PKGCONTEXT;
PKGCONTEXT = newcontext;
#ifndef HAVE_THREAD_LOCAL
- PyThread_release_lock(EXTENSIONS.mutex);
+ PyMutex_Unlock(&EXTENSIONS.mutex);
#endif
return oldcontext;
}
diff --git a/Python/initconfig.c b/Python/initconfig.c
index 84717b4e3c934b..5746416c826522 100644
--- a/Python/initconfig.c
+++ b/Python/initconfig.c
@@ -129,6 +129,10 @@ static const PyConfigSpec PYCONFIG_SPEC[] = {
#ifdef Py_DEBUG
SPEC(run_presite, WSTR_OPT),
#endif
+#ifdef __APPLE__
+ SPEC(use_system_logger, BOOL),
+#endif
+
{NULL, 0, 0},
};
@@ -744,6 +748,9 @@ config_check_consistency(const PyConfig *config)
assert(config->cpu_count != 0);
// config->use_frozen_modules is initialized later
// by _PyConfig_InitImportConfig().
+#ifdef __APPLE__
+ assert(config->use_system_logger >= 0);
+#endif
#ifdef Py_STATS
assert(config->_pystats >= 0);
#endif
@@ -846,6 +853,9 @@ _PyConfig_InitCompatConfig(PyConfig *config)
config->_is_python_build = 0;
config->code_debug_ranges = 1;
config->cpu_count = -1;
+#ifdef __APPLE__
+ config->use_system_logger = 0;
+#endif
#ifdef Py_GIL_DISABLED
config->enable_gil = _PyConfig_GIL_DEFAULT;
#endif
@@ -874,6 +884,9 @@ config_init_defaults(PyConfig *config)
#ifdef MS_WINDOWS
config->legacy_windows_stdio = 0;
#endif
+#ifdef __APPLE__
+ config->use_system_logger = 0;
+#endif
}
@@ -909,6 +922,9 @@ PyConfig_InitIsolatedConfig(PyConfig *config)
#ifdef MS_WINDOWS
config->legacy_windows_stdio = 0;
#endif
+#ifdef __APPLE__
+ config->use_system_logger = 0;
+#endif
}
diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c
index 8fe5bb8b3007d9..ba835ef4c848e8 100644
--- a/Python/pylifecycle.c
+++ b/Python/pylifecycle.c
@@ -43,7 +43,26 @@
#endif
#if defined(__APPLE__)
+# include
+# include
# include
+// The os_log unified logging APIs were introduced in macOS 10.12, iOS 10.0,
+// tvOS 10.0, and watchOS 3.0;
+# if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+# define HAS_APPLE_SYSTEM_LOG 1
+# elif defined(TARGET_OS_OSX) && TARGET_OS_OSX
+# if defined(MAC_OS_X_VERSION_10_12) && MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_12
+# define HAS_APPLE_SYSTEM_LOG 1
+# else
+# define HAS_APPLE_SYSTEM_LOG 0
+# endif
+# else
+# define HAS_APPLE_SYSTEM_LOG 0
+# endif
+
+# if HAS_APPLE_SYSTEM_LOG
+# include
+# endif
#endif
#ifdef HAVE_SIGNAL_H
@@ -73,6 +92,9 @@ static PyStatus init_sys_streams(PyThreadState *tstate);
#ifdef __ANDROID__
static PyStatus init_android_streams(PyThreadState *tstate);
#endif
+#if defined(__APPLE__) && HAS_APPLE_SYSTEM_LOG
+static PyStatus init_apple_streams(PyThreadState *tstate);
+#endif
static void wait_for_thread_shutdown(PyThreadState *tstate);
static void finalize_subinterpreters(void);
static void call_ll_exitfuncs(_PyRuntimeState *runtime);
@@ -684,6 +706,11 @@ pycore_create_interpreter(_PyRuntimeState *runtime,
return _PyStatus_NO_MEMORY();
}
+ status = _PyTraceMalloc_Init();
+ if (_PyStatus_EXCEPTION(status)) {
+ return status;
+ }
+
PyThreadState *tstate = _PyThreadState_New(interp,
_PyThreadState_WHENCE_INIT);
if (tstate == NULL) {
@@ -1253,6 +1280,14 @@ init_interp_main(PyThreadState *tstate)
return status;
}
#endif
+#if defined(__APPLE__) && HAS_APPLE_SYSTEM_LOG
+ if (config->use_system_logger) {
+ status = init_apple_streams(tstate);
+ if (_PyStatus_EXCEPTION(status)) {
+ return status;
+ }
+ }
+#endif
#ifdef Py_DEBUG
run_presite(tstate);
@@ -2121,7 +2156,7 @@ _Py_Finalize(_PyRuntimeState *runtime)
/* Disable tracemalloc after all Python objects have been destroyed,
so it is possible to use tracemalloc in objects destructor. */
- _PyTraceMalloc_Fini();
+ _PyTraceMalloc_Stop();
/* Finalize any remaining import state */
// XXX Move these up to where finalize_modules() is currently.
@@ -2174,6 +2209,8 @@ _Py_Finalize(_PyRuntimeState *runtime)
finalize_interp_clear(tstate);
+ _PyTraceMalloc_Fini();
+
#ifdef Py_TRACE_REFS
/* Display addresses (& refcnts) of all objects still alive.
* An address can be used to find the repr of the object, printed
@@ -2920,6 +2957,69 @@ init_android_streams(PyThreadState *tstate)
#endif // __ANDROID__
+#if defined(__APPLE__) && HAS_APPLE_SYSTEM_LOG
+
+static PyObject *
+apple_log_write_impl(PyObject *self, PyObject *args)
+{
+ int logtype = 0;
+ const char *text = NULL;
+ if (!PyArg_ParseTuple(args, "iy", &logtype, &text)) {
+ return NULL;
+ }
+
+ // Pass the user-provided text through explicit %s formatting
+ // to avoid % literals being interpreted as a formatting directive.
+ os_log_with_type(OS_LOG_DEFAULT, logtype, "%s", text);
+ Py_RETURN_NONE;
+}
+
+
+static PyMethodDef apple_log_write_method = {
+ "apple_log_write", apple_log_write_impl, METH_VARARGS
+};
+
+
+static PyStatus
+init_apple_streams(PyThreadState *tstate)
+{
+ PyStatus status = _PyStatus_OK();
+ PyObject *_apple_support = NULL;
+ PyObject *apple_log_write = NULL;
+ PyObject *result = NULL;
+
+ _apple_support = PyImport_ImportModule("_apple_support");
+ if (_apple_support == NULL) {
+ goto error;
+ }
+
+ apple_log_write = PyCFunction_New(&apple_log_write_method, NULL);
+ if (apple_log_write == NULL) {
+ goto error;
+ }
+
+ // Initialize the logging streams, sending stdout -> Default; stderr -> Error
+ result = PyObject_CallMethod(
+ _apple_support, "init_streams", "Oii",
+ apple_log_write, OS_LOG_TYPE_DEFAULT, OS_LOG_TYPE_ERROR);
+ if (result == NULL) {
+ goto error;
+ }
+ goto done;
+
+error:
+ _PyErr_Print(tstate);
+ status = _PyStatus_ERR("failed to initialize Apple log streams");
+
+done:
+ Py_XDECREF(result);
+ Py_XDECREF(apple_log_write);
+ Py_XDECREF(_apple_support);
+ return status;
+}
+
+#endif // __APPLE__ && HAS_APPLE_SYSTEM_LOG
+
static void
_Py_FatalError_DumpTracebacks(int fd, PyInterpreterState *interp,
diff --git a/Python/pythonrun.c b/Python/pythonrun.c
index 5891d50cd3f598..f9daf8168f1c52 100644
--- a/Python/pythonrun.c
+++ b/Python/pythonrun.c
@@ -1485,6 +1485,7 @@ Py_CompileStringObject(const char *str, PyObject *filename, int start,
if (flags && (flags->cf_flags & PyCF_ONLY_AST)) {
if ((flags->cf_flags & PyCF_OPTIMIZED_AST) == PyCF_OPTIMIZED_AST) {
if (_PyCompile_AstOptimize(mod, filename, flags, optimize, arena) < 0) {
+ _PyArena_Free(arena);
return NULL;
}
}
diff --git a/Python/specialize.c b/Python/specialize.c
index 1a2043d0e8dc11..ad166ea091be9d 100644
--- a/Python/specialize.c
+++ b/Python/specialize.c
@@ -841,7 +841,10 @@ specialize_dict_access(
return 0;
}
_PyAttrCache *cache = (_PyAttrCache *)(instr + 1);
- if (type->tp_flags & Py_TPFLAGS_INLINE_VALUES && _PyObject_InlineValues(owner)->valid) {
+ if (type->tp_flags & Py_TPFLAGS_INLINE_VALUES &&
+ _PyObject_InlineValues(owner)->valid &&
+ !(base_op == STORE_ATTR && _PyObject_GetManagedDict(owner) != NULL))
+ {
PyDictKeysObject *keys = ((PyHeapTypeObject *)type)->ht_cached_keys;
assert(PyUnicode_CheckExact(name));
Py_ssize_t index = _PyDictKeys_StringLookup(keys, name);
diff --git a/Python/stdlib_module_names.h b/Python/stdlib_module_names.h
index faeed0b7125808..dfe0fa2acd8d6c 100644
--- a/Python/stdlib_module_names.h
+++ b/Python/stdlib_module_names.h
@@ -6,6 +6,7 @@ static const char* _Py_stdlib_module_names[] = {
"_abc",
"_aix_support",
"_android_support",
+"_apple_support",
"_ast",
"_asyncio",
"_bisect",
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
index 3f170fff156fcd..9cf4a580d4408f 100644
--- a/Python/sysmodule.c
+++ b/Python/sysmodule.c
@@ -2841,6 +2841,7 @@ PySys_ResetWarnOptions(void)
static int
_PySys_AddWarnOptionWithError(PyThreadState *tstate, PyObject *option)
{
+ assert(tstate != NULL);
PyObject *warnoptions = get_warnoptions(tstate);
if (warnoptions == NULL) {
return -1;
@@ -2856,11 +2857,11 @@ PyAPI_FUNC(void)
PySys_AddWarnOptionUnicode(PyObject *option)
{
PyThreadState *tstate = _PyThreadState_GET();
+ _Py_EnsureTstateNotNULL(tstate);
+ assert(!_PyErr_Occurred(tstate));
if (_PySys_AddWarnOptionWithError(tstate, option) < 0) {
/* No return value, therefore clear error state if possible */
- if (tstate) {
- _PyErr_Clear(tstate);
- }
+ _PyErr_Clear(tstate);
}
}
diff --git a/Python/tracemalloc.c b/Python/tracemalloc.c
index e58b60ddd5e484..99ba7876acced7 100644
--- a/Python/tracemalloc.c
+++ b/Python/tracemalloc.c
@@ -2,6 +2,7 @@
#include "pycore_fileutils.h" // _Py_write_noraise()
#include "pycore_gc.h" // PyGC_Head
#include "pycore_hashtable.h" // _Py_hashtable_t
+#include "pycore_initconfig.h" // _PyStatus_NO_MEMORY()
#include "pycore_object.h" // _PyType_PreHeaderSize()
#include "pycore_pymem.h" // _Py_tracemalloc_config
#include "pycore_runtime.h" // _Py_ID()
@@ -538,12 +539,16 @@ tracemalloc_alloc(int use_calloc, void *ctx, size_t nelem, size_t elsize)
return NULL;
TABLES_LOCK();
- if (ADD_TRACE(ptr, nelem * elsize) < 0) {
- /* Failed to allocate a trace for the new memory block */
- TABLES_UNLOCK();
- alloc->free(alloc->ctx, ptr);
- return NULL;
+
+ if (tracemalloc_config.tracing) {
+ if (ADD_TRACE(ptr, nelem * elsize) < 0) {
+ /* Failed to allocate a trace for the new memory block */
+ alloc->free(alloc->ctx, ptr);
+ ptr = NULL;
+ }
}
+ // else: gh-128679: tracemalloc.stop() was called by another thread
+
TABLES_UNLOCK();
return ptr;
}
@@ -559,11 +564,15 @@ tracemalloc_realloc(void *ctx, void *ptr, size_t new_size)
if (ptr2 == NULL)
return NULL;
+ TABLES_LOCK();
+ if (!tracemalloc_config.tracing) {
+ // gh-128679: tracemalloc.stop() was called by another thread
+ goto done;
+ }
+
if (ptr != NULL) {
/* an existing memory block has been resized */
- TABLES_LOCK();
-
/* tracemalloc_add_trace() updates the trace if there is already
a trace at address ptr2 */
if (ptr2 != ptr) {
@@ -582,20 +591,19 @@ tracemalloc_realloc(void *ctx, void *ptr, size_t new_size)
allocating memory. */
Py_FatalError("tracemalloc_realloc() failed to allocate a trace");
}
- TABLES_UNLOCK();
}
else {
/* new allocation */
- TABLES_LOCK();
if (ADD_TRACE(ptr2, new_size) < 0) {
/* Failed to allocate a trace for the new memory block */
- TABLES_UNLOCK();
alloc->free(alloc->ctx, ptr2);
- return NULL;
+ ptr2 = NULL;
}
- TABLES_UNLOCK();
}
+
+done:
+ TABLES_UNLOCK();
return ptr2;
}
@@ -614,7 +622,12 @@ tracemalloc_free(void *ctx, void *ptr)
alloc->free(alloc->ctx, ptr);
TABLES_LOCK();
- REMOVE_TRACE(ptr);
+
+ if (tracemalloc_config.tracing) {
+ REMOVE_TRACE(ptr);
+ }
+ // else: gh-128679: tracemalloc.stop() was called by another thread
+
TABLES_UNLOCK();
}
@@ -673,7 +686,9 @@ tracemalloc_realloc_gil(void *ctx, void *ptr, size_t new_size)
ptr2 = alloc->realloc(alloc->ctx, ptr, new_size);
if (ptr2 != NULL && ptr != NULL) {
TABLES_LOCK();
- REMOVE_TRACE(ptr);
+ if (tracemalloc_config.tracing) {
+ REMOVE_TRACE(ptr);
+ }
TABLES_UNLOCK();
}
return ptr2;
@@ -748,7 +763,9 @@ tracemalloc_raw_realloc(void *ctx, void *ptr, size_t new_size)
if (ptr2 != NULL && ptr != NULL) {
TABLES_LOCK();
- REMOVE_TRACE(ptr);
+ if (tracemalloc_config.tracing) {
+ REMOVE_TRACE(ptr);
+ }
TABLES_UNLOCK();
}
return ptr2;
@@ -779,46 +796,36 @@ tracemalloc_clear_filename(void *value)
/* reentrant flag must be set to call this function and GIL must be held */
static void
-tracemalloc_clear_traces(void)
+tracemalloc_clear_traces_unlocked(void)
{
+ set_reentrant(1);
+
/* The GIL protects variables against concurrent access */
assert(PyGILState_Check());
- TABLES_LOCK();
_Py_hashtable_clear(tracemalloc_traces);
_Py_hashtable_clear(tracemalloc_domains);
tracemalloc_traced_memory = 0;
tracemalloc_peak_traced_memory = 0;
- TABLES_UNLOCK();
_Py_hashtable_clear(tracemalloc_tracebacks);
_Py_hashtable_clear(tracemalloc_filenames);
+
+ set_reentrant(0);
}
-int
+PyStatus
_PyTraceMalloc_Init(void)
{
- if (tracemalloc_config.initialized == TRACEMALLOC_FINALIZED) {
- PyErr_SetString(PyExc_RuntimeError,
- "the tracemalloc module has been unloaded");
- return -1;
- }
-
- if (tracemalloc_config.initialized == TRACEMALLOC_INITIALIZED)
- return 0;
+ assert(tracemalloc_config.initialized == TRACEMALLOC_NOT_INITIALIZED);
PyMem_GetAllocator(PYMEM_DOMAIN_RAW, &allocators.raw);
#ifdef REENTRANT_THREADLOCAL
if (PyThread_tss_create(&tracemalloc_reentrant_key) != 0) {
-#ifdef MS_WINDOWS
- PyErr_SetFromWindowsErr(0);
-#else
- PyErr_SetFromErrno(PyExc_OSError);
-#endif
- return -1;
+ return _PyStatus_NO_MEMORY();
}
#endif
@@ -826,8 +833,7 @@ _PyTraceMalloc_Init(void)
if (tables_lock == NULL) {
tables_lock = PyThread_allocate_lock();
if (tables_lock == NULL) {
- PyErr_SetString(PyExc_RuntimeError, "cannot allocate lock");
- return -1;
+ return _PyStatus_NO_MEMORY();
}
}
#endif
@@ -844,9 +850,9 @@ _PyTraceMalloc_Init(void)
tracemalloc_domains = tracemalloc_create_domains_table();
if (tracemalloc_filenames == NULL || tracemalloc_tracebacks == NULL
- || tracemalloc_traces == NULL || tracemalloc_domains == NULL) {
- PyErr_NoMemory();
- return -1;
+ || tracemalloc_traces == NULL || tracemalloc_domains == NULL)
+ {
+ return _PyStatus_NO_MEMORY();
}
tracemalloc_empty_traceback.nframe = 1;
@@ -857,7 +863,7 @@ _PyTraceMalloc_Init(void)
tracemalloc_empty_traceback.hash = traceback_hash(&tracemalloc_empty_traceback);
tracemalloc_config.initialized = TRACEMALLOC_INITIALIZED;
- return 0;
+ return _PyStatus_OK();
}
@@ -902,10 +908,6 @@ _PyTraceMalloc_Start(int max_nframe)
return -1;
}
- if (_PyTraceMalloc_Init() < 0) {
- return -1;
- }
-
if (PyRefTracer_SetTracer(_PyTraceMalloc_TraceRef, NULL) < 0) {
return -1;
}
@@ -960,8 +962,13 @@ _PyTraceMalloc_Start(int max_nframe)
void
_PyTraceMalloc_Stop(void)
{
- if (!tracemalloc_config.tracing)
- return;
+ // Lock to synchronize with tracemalloc_free() which checks
+ // 'tracing' while holding the lock.
+ TABLES_LOCK();
+
+ if (!tracemalloc_config.tracing) {
+ goto done;
+ }
/* stop tracing Python memory allocations */
tracemalloc_config.tracing = 0;
@@ -973,11 +980,16 @@ _PyTraceMalloc_Stop(void)
PyMem_SetAllocator(PYMEM_DOMAIN_MEM, &allocators.mem);
PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &allocators.obj);
- tracemalloc_clear_traces();
+ tracemalloc_clear_traces_unlocked();
/* release memory */
raw_free(tracemalloc_traceback);
tracemalloc_traceback = NULL;
+
+ (void)PyRefTracer_SetTracer(NULL, NULL);
+
+done:
+ TABLES_UNLOCK();
}
@@ -1227,23 +1239,17 @@ tracemalloc_pyobject_decref(void *value)
static traceback_t*
-tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr)
+tracemalloc_get_traceback_unlocked(unsigned int domain, uintptr_t ptr)
{
-
- if (!tracemalloc_config.tracing)
+ if (!tracemalloc_config.tracing) {
return NULL;
+ }
- trace_t *trace;
- TABLES_LOCK();
_Py_hashtable_t *traces = tracemalloc_get_traces_table(domain);
+ trace_t *trace = NULL;
if (traces) {
trace = _Py_hashtable_get(traces, TO_PTR(ptr));
}
- else {
- trace = NULL;
- }
- TABLES_UNLOCK();
-
if (!trace) {
return NULL;
}
@@ -1272,13 +1278,20 @@ _PyMem_DumpTraceback(int fd, const void *ptr)
traceback_t *traceback;
int i;
- if (!tracemalloc_config.tracing) {
+ TABLES_LOCK();
+
+ if (tracemalloc_config.tracing) {
+ traceback = tracemalloc_get_traceback_unlocked(DEFAULT_DOMAIN,
+ (uintptr_t)ptr);
+ }
+ else {
+ traceback = NULL;
PUTS(fd, "Enable tracemalloc to get the memory block "
"allocation traceback\n\n");
- return;
}
- traceback = tracemalloc_get_traceback(DEFAULT_DOMAIN, (uintptr_t)ptr);
+ TABLES_UNLOCK();
+
if (traceback == NULL)
return;
@@ -1307,38 +1320,62 @@ int
PyTraceMalloc_Track(unsigned int domain, uintptr_t ptr,
size_t size)
{
- int res;
- PyGILState_STATE gil_state;
+ PyGILState_STATE gil_state = PyGILState_Ensure();
+ int result;
+ // gh-129185: Check before TABLES_LOCK() to support calls after
+ // _PyTraceMalloc_Fini().
if (!tracemalloc_config.tracing) {
- /* tracemalloc is not tracing: do nothing */
- return -2;
+ result = -2;
+ goto done;
}
- gil_state = PyGILState_Ensure();
-
TABLES_LOCK();
- res = tracemalloc_add_trace(domain, ptr, size);
- TABLES_UNLOCK();
+ if (tracemalloc_config.tracing) {
+ result = tracemalloc_add_trace(domain, ptr, size);
+ }
+ else {
+ // gh-128679: tracemalloc.stop() was called by another thread
+ result = -2;
+ }
+
+ TABLES_UNLOCK();
+done:
PyGILState_Release(gil_state);
- return res;
+ return result;
}
int
PyTraceMalloc_Untrack(unsigned int domain, uintptr_t ptr)
{
+ // Need the GIL to prevent races on the first 'tracing' test
+ PyGILState_STATE gil_state = PyGILState_Ensure();
+ int result;
+
+ // gh-129185: Check before TABLES_LOCK() to support calls after
+ // _PyTraceMalloc_Fini()
if (!tracemalloc_config.tracing) {
- /* tracemalloc is not tracing: do nothing */
- return -2;
+ result = -2;
+ goto done;
}
TABLES_LOCK();
- tracemalloc_remove_trace(domain, ptr);
- TABLES_UNLOCK();
- return 0;
+ if (tracemalloc_config.tracing) {
+ tracemalloc_remove_trace(domain, ptr);
+ result = 0;
+ }
+ else {
+ /* tracemalloc is not tracing: do nothing */
+ result = -2;
+ }
+
+ TABLES_UNLOCK();
+done:
+ PyGILState_Release(gil_state);
+ return result;
}
@@ -1376,6 +1413,12 @@ _PyTraceMalloc_TraceRef(PyObject *op, PyRefTracerEvent event, void* Py_UNUSED(ig
int res = -1;
TABLES_LOCK();
+
+ if (!tracemalloc_config.tracing) {
+ // gh-128679: tracemalloc.stop() was called by another thread
+ goto done;
+ }
+
trace_t *trace = _Py_hashtable_get(tracemalloc_traces, TO_PTR(ptr));
if (trace != NULL) {
/* update the traceback of the memory block */
@@ -1386,6 +1429,8 @@ _PyTraceMalloc_TraceRef(PyObject *op, PyRefTracerEvent event, void* Py_UNUSED(ig
}
}
/* else: cannot track the object, its memory block size is unknown */
+
+done:
TABLES_UNLOCK();
return res;
@@ -1397,7 +1442,9 @@ _PyTraceMalloc_GetTraceback(unsigned int domain, uintptr_t ptr)
{
traceback_t *traceback;
- traceback = tracemalloc_get_traceback(domain, ptr);
+ TABLES_LOCK();
+ traceback = tracemalloc_get_traceback_unlocked(domain, ptr);
+ TABLES_UNLOCK();
if (traceback == NULL)
Py_RETURN_NONE;
@@ -1407,19 +1454,20 @@ _PyTraceMalloc_GetTraceback(unsigned int domain, uintptr_t ptr)
int
_PyTraceMalloc_IsTracing(void)
{
- return tracemalloc_config.tracing;
+ TABLES_LOCK();
+ int tracing = tracemalloc_config.tracing;
+ TABLES_UNLOCK();
+ return tracing;
}
void
_PyTraceMalloc_ClearTraces(void)
{
-
- if (!tracemalloc_config.tracing) {
- return;
+ TABLES_LOCK();
+ if (tracemalloc_config.tracing) {
+ tracemalloc_clear_traces_unlocked();
}
- set_reentrant(1);
- tracemalloc_clear_traces();
- set_reentrant(0);
+ TABLES_UNLOCK();
}
PyObject *
@@ -1506,19 +1554,10 @@ PyObject *
_PyTraceMalloc_GetObjectTraceback(PyObject *obj)
/*[clinic end generated code: output=41ee0553a658b0aa input=29495f1b21c53212]*/
{
- PyTypeObject *type;
- traceback_t *traceback;
-
- type = Py_TYPE(obj);
+ PyTypeObject *type = Py_TYPE(obj);
const size_t presize = _PyType_PreHeaderSize(type);
uintptr_t ptr = (uintptr_t)((char *)obj - presize);
-
- traceback = tracemalloc_get_traceback(DEFAULT_DOMAIN, ptr);
- if (traceback == NULL) {
- Py_RETURN_NONE;
- }
-
- return traceback_to_pyobject(traceback, NULL);
+ return _PyTraceMalloc_GetTraceback(DEFAULT_DOMAIN, ptr);
}
int _PyTraceMalloc_GetTracebackLimit(void) {
@@ -1530,14 +1569,19 @@ _PyTraceMalloc_GetMemory(void) {
size_t size;
- size = _Py_hashtable_size(tracemalloc_tracebacks);
- size += _Py_hashtable_size(tracemalloc_filenames);
-
TABLES_LOCK();
- size += _Py_hashtable_size(tracemalloc_traces);
- _Py_hashtable_foreach(tracemalloc_domains,
- tracemalloc_get_tracemalloc_memory_cb, &size);
+ if (tracemalloc_config.tracing) {
+ size = _Py_hashtable_size(tracemalloc_tracebacks);
+ size += _Py_hashtable_size(tracemalloc_filenames);
+ size += _Py_hashtable_size(tracemalloc_traces);
+ _Py_hashtable_foreach(tracemalloc_domains,
+ tracemalloc_get_tracemalloc_memory_cb, &size);
+ }
+ else {
+ size = 0;
+ }
TABLES_UNLOCK();
+
return size;
}
@@ -1547,12 +1591,15 @@ _PyTraceMalloc_GetTracedMemory(void)
{
Py_ssize_t size, peak_size;
- if (!tracemalloc_config.tracing)
- return Py_BuildValue("ii", 0, 0);
-
TABLES_LOCK();
- size = tracemalloc_traced_memory;
- peak_size = tracemalloc_peak_traced_memory;
+ if (tracemalloc_config.tracing) {
+ size = tracemalloc_traced_memory;
+ peak_size = tracemalloc_peak_traced_memory;
+ }
+ else {
+ size = 0;
+ peak_size = 0;
+ }
TABLES_UNLOCK();
return Py_BuildValue("nn", size, peak_size);
@@ -1561,10 +1608,9 @@ _PyTraceMalloc_GetTracedMemory(void)
void
_PyTraceMalloc_ResetPeak(void)
{
- if (!tracemalloc_config.tracing) {
- return;
- }
TABLES_LOCK();
- tracemalloc_peak_traced_memory = tracemalloc_traced_memory;
+ if (tracemalloc_config.tracing) {
+ tracemalloc_peak_traced_memory = tracemalloc_traced_memory;
+ }
TABLES_UNLOCK();
}
diff --git a/README.rst b/README.rst
index dfb321505ef412..ec89d56378fa1d 100644
--- a/README.rst
+++ b/README.rst
@@ -1,4 +1,4 @@
-This is Python version 3.13.1
+This is Python version 3.13.2
=============================
.. image:: https://github.com/python/cpython/workflows/Tests/badge.svg
diff --git a/Tools/build/mypy.ini b/Tools/build/mypy.ini
index cf1dac7fde5ac5..0e5d6e874a72e5 100644
--- a/Tools/build/mypy.ini
+++ b/Tools/build/mypy.ini
@@ -8,6 +8,6 @@ python_version = 3.10
# ...And be strict:
strict = True
-strict_concatenate = True
+extra_checks = True
enable_error_code = ignore-without-code,redundant-expr,truthy-bool,possibly-undefined
warn_unreachable = True
diff --git a/Tools/cases_generator/mypy.ini b/Tools/cases_generator/mypy.ini
index 8e5a31851c596e..e54349bf54a954 100644
--- a/Tools/cases_generator/mypy.ini
+++ b/Tools/cases_generator/mypy.ini
@@ -8,7 +8,7 @@ python_version = 3.10
# ...And be strict:
strict = True
-strict_concatenate = True
+extra_checks = True
enable_error_code = ignore-without-code,redundant-expr,truthy-bool,possibly-undefined
warn_unreachable = True
allow_redefinition = True
diff --git a/Tools/clinic/libclinic/cpp.py b/Tools/clinic/libclinic/cpp.py
index e115d65a88e1b6..3cfe99b712641d 100644
--- a/Tools/clinic/libclinic/cpp.py
+++ b/Tools/clinic/libclinic/cpp.py
@@ -132,6 +132,9 @@ def pop_stack() -> TokenAndCondition:
if line_comment:
line = before.rstrip()
+ if self.in_comment:
+ return
+
if not line.startswith('#'):
return
diff --git a/Tools/clinic/libclinic/parse_args.py b/Tools/clinic/libclinic/parse_args.py
index 93b4b592b27f97..1e6ebdd68367b5 100644
--- a/Tools/clinic/libclinic/parse_args.py
+++ b/Tools/clinic/libclinic/parse_args.py
@@ -146,6 +146,9 @@ def declare_parser(
GETSET_DOCSTRING_PROTOTYPE_STRVAR: Final[str] = libclinic.normalize_snippet("""
PyDoc_STRVAR({getset_basename}__doc__,
{docstring});
+ #if defined({getset_basename}_DOCSTR)
+ # undef {getset_basename}_DOCSTR
+ #endif
#define {getset_basename}_DOCSTR {getset_basename}__doc__
""")
IMPL_DEFINITION_PROTOTYPE: Final[str] = libclinic.normalize_snippet("""
diff --git a/Tools/clinic/mypy.ini b/Tools/clinic/mypy.ini
index b1fdad673c61a1..6520e05db0bc31 100644
--- a/Tools/clinic/mypy.ini
+++ b/Tools/clinic/mypy.ini
@@ -7,6 +7,6 @@ python_version = 3.10
# and be strict!
strict = True
-strict_concatenate = True
+extra_checks = True
enable_error_code = ignore-without-code,redundant-expr,truthy-bool
warn_unreachable = True
diff --git a/Tools/jit/README.md b/Tools/jit/README.md
index bc6f793b296f12..42d4fcd48e6b41 100644
--- a/Tools/jit/README.md
+++ b/Tools/jit/README.md
@@ -3,6 +3,8 @@ The JIT Compiler
This version of CPython can be built with an experimental just-in-time compiler[^pep-744]. While most everything you already know about building and using CPython is unchanged, you will probably need to install a compatible version of LLVM first.
+Python 3.11 or newer is required to build the JIT.
+
## Installing LLVM
The JIT compiler does not require end users to install any third-party dependencies, but part of it must be *built* using LLVM[^why-llvm]. You are *not* required to build the rest of CPython using LLVM, or even the same version of LLVM (in fact, this is uncommon).
@@ -57,7 +59,7 @@ For `PCbuild`-based builds, pass the new `--experimental-jit` option to `build.b
For all other builds, pass the new `--enable-experimental-jit` option to `configure`.
-Otherwise, just configure and build as you normally would. Cross-compiling "just works", since the JIT is built for the host platform.
+Otherwise, just configure and build as you normally would. Cross-compiling "just works", since the JIT is built for the host platform.
The JIT can also be enabled or disabled using the `PYTHON_JIT` environment variable, even on builds where it is enabled or disabled by default. More details about configuring CPython with the JIT and optional values for `--enable-experimental-jit` can be found [here](https://docs.python.org/dev/whatsnew/3.13.html#experimental-jit-compiler).
diff --git a/Tools/requirements-dev.txt b/Tools/requirements-dev.txt
index a4261ff0a38d1b..b1d0e0235418fe 100644
--- a/Tools/requirements-dev.txt
+++ b/Tools/requirements-dev.txt
@@ -1,6 +1,6 @@
# Requirements file for external linters and checks we run on
# Tools/clinic, Tools/cases_generator/, and Tools/peg_generator/ in CI
-mypy==1.12
+mypy==1.13
# needed for peg_generator:
types-psutil==5.9.5.20240423
diff --git a/configure b/configure
index ae70f02f70e5d1..1cd1f690f7b9c1 100755
--- a/configure
+++ b/configure
@@ -912,6 +912,7 @@ CFLAGS_ALIASING
OPT
BOLT_APPLY_FLAGS
BOLT_INSTRUMENT_FLAGS
+BOLT_COMMON_FLAGS
BOLT_BINARIES
MERGE_FDATA
LLVM_BOLT
@@ -1146,6 +1147,7 @@ CPPFLAGS
CPP
HOSTRUNNER
PROFILE_TASK
+BOLT_COMMON_FLAGS
BOLT_INSTRUMENT_FLAGS
BOLT_APPLY_FLAGS
LIBUUID_CFLAGS
@@ -1969,6 +1971,8 @@ Some influential environment variables:
HOSTRUNNER Program to run CPython for the host platform
PROFILE_TASK
Python args for PGO generation task
+ BOLT_COMMON_FLAGS
+ Common arguments to llvm-bolt when instrumenting and applying
BOLT_INSTRUMENT_FLAGS
Arguments to llvm-bolt when instrumenting binaries
BOLT_APPLY_FLAGS
@@ -9326,11 +9330,21 @@ then :
fi
+
+{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking BOLT_COMMON_FLAGS" >&5
+printf %s "checking BOLT_COMMON_FLAGS... " >&6; }
+if test -z "${BOLT_COMMON_FLAGS}"
+then
+ BOLT_COMMON_FLAGS=" -update-debug-sections -skip-funcs=_PyEval_EvalFrameDefault,sre_ucs1_match/1,sre_ucs2_match/1,sre_ucs4_match/1 "
+
+fi
+
+
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking BOLT_INSTRUMENT_FLAGS" >&5
printf %s "checking BOLT_INSTRUMENT_FLAGS... " >&6; }
if test -z "${BOLT_INSTRUMENT_FLAGS}"
then
- BOLT_INSTRUMENT_FLAGS=
+ BOLT_INSTRUMENT_FLAGS="${BOLT_COMMON_FLAGS}"
fi
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $BOLT_INSTRUMENT_FLAGS" >&5
printf "%s\n" "$BOLT_INSTRUMENT_FLAGS" >&6; }
@@ -9340,7 +9354,7 @@ printf "%s\n" "$BOLT_INSTRUMENT_FLAGS" >&6; }
printf %s "checking BOLT_APPLY_FLAGS... " >&6; }
if test -z "${BOLT_APPLY_FLAGS}"
then
- BOLT_APPLY_FLAGS=" -update-debug-sections -reorder-blocks=ext-tsp -reorder-functions=hfsort+ -split-functions -icf=1 -inline-all -split-eh -reorder-functions-use-hot-size -peepholes=none -jump-tables=aggressive -inline-ap -indirect-call-promotion=all -dyno-stats -use-gnu-stack -frame-opt=hot "
+ BOLT_APPLY_FLAGS=" ${BOLT_COMMON_FLAGS} -reorder-blocks=ext-tsp -reorder-functions=cdsort -split-functions -icf=1 -inline-all -split-eh -reorder-functions-use-hot-size -peepholes=none -jump-tables=aggressive -inline-ap -indirect-call-promotion=all -dyno-stats -use-gnu-stack -frame-opt=hot "
fi
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $BOLT_APPLY_FLAGS" >&5
@@ -10980,6 +10994,12 @@ if test "x$ac_cv_header_sys_param_h" = xyes
then :
printf "%s\n" "#define HAVE_SYS_PARAM_H 1" >>confdefs.h
+fi
+ac_fn_c_check_header_compile "$LINENO" "sys/pidfd.h" "ac_cv_header_sys_pidfd_h" "$ac_includes_default"
+if test "x$ac_cv_header_sys_pidfd_h" = xyes
+then :
+ printf "%s\n" "#define HAVE_SYS_PIDFD_H 1" >>confdefs.h
+
fi
ac_fn_c_check_header_compile "$LINENO" "sys/poll.h" "ac_cv_header_sys_poll_h" "$ac_includes_default"
if test "x$ac_cv_header_sys_poll_h" = xyes
@@ -13461,7 +13481,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $LIBUUID_CFLAGS"
- LDFLAGS="$LDFLAGS $LIBUUID_LIBS"
+ LIBS="$LIBS $LIBUUID_LIBS"
for ac_header in uuid/uuid.h
do :
ac_fn_c_check_header_compile "$LINENO" "uuid/uuid.h" "ac_cv_header_uuid_uuid_h" "$ac_includes_default"
@@ -13587,7 +13607,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $LIBUUID_CFLAGS"
- LDFLAGS="$LDFLAGS $LIBUUID_LIBS"
+ LIBS="$LIBS $LIBUUID_LIBS"
for ac_header in uuid/uuid.h
do :
ac_fn_c_check_header_compile "$LINENO" "uuid/uuid.h" "ac_cv_header_uuid_uuid_h" "$ac_includes_default"
@@ -14357,7 +14377,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $LIBFFI_CFLAGS"
- LDFLAGS="$LDFLAGS $LIBFFI_LIBS"
+ LIBS="$LIBS $LIBFFI_LIBS"
ac_fn_c_check_header_compile "$LINENO" "ffi.h" "ac_cv_header_ffi_h" "$ac_includes_default"
if test "x$ac_cv_header_ffi_h" = xyes
then :
@@ -14430,7 +14450,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $LIBFFI_CFLAGS"
- LDFLAGS="$LDFLAGS $LIBFFI_LIBS"
+ LIBS="$LIBS $LIBFFI_LIBS"
ac_fn_c_check_header_compile "$LINENO" "ffi.h" "ac_cv_header_ffi_h" "$ac_includes_default"
if test "x$ac_cv_header_ffi_h" = xyes
then :
@@ -14541,8 +14561,8 @@ save_LDFLAGS=$LDFLAGS
save_LIBS=$LIBS
- CFLAGS="$LIBFFI_CFLAGS $CFLAGS"
- LDFLAGS="$LIBFFI_LIBS $LDFLAGS"
+ CFLAGS="$CFLAGS $LIBFFI_CFLAGS"
+ LIBS="$LIBS $LIBFFI_LIBS"
@@ -14781,8 +14801,8 @@ save_LDFLAGS=$LDFLAGS
save_LIBS=$LIBS
- CPPFLAGS="$LIBMPDEC_CFLAGS $CPPFLAGS"
- LIBS="$LIBMPDEC_LIBS $LIBS"
+ CPPFLAGS="$CPPFLAGS $LIBMPDEC_CFLAGS"
+ LIBS="$LIBS $LIBMPDEC_LIBS"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
@@ -15049,7 +15069,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $LIBSQLITE3_CFLAGS"
- LDFLAGS="$LIBSQLITE3_LIBS $LDFLAGS"
+ LIBS="$LIBS $LIBSQLITE3_LIBS"
ac_fn_c_check_header_compile "$LINENO" "sqlite3.h" "ac_cv_header_sqlite3_h" "$ac_includes_default"
if test "x$ac_cv_header_sqlite3_h" = xyes
@@ -15929,7 +15949,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $TCLTK_CFLAGS"
- LIBS="$TCLTK_LIBS $LDFLAGS"
+ LIBS="$LIBS $TCLTK_LIBS"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
@@ -15995,7 +16015,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $GDBM_CFLAGS"
- LDFLAGS="$GDBM_LIBS $LDFLAGS"
+ LIBS="$LIBS $GDBM_LIBS"
for ac_header in gdbm.h
do :
ac_fn_c_check_header_compile "$LINENO" "gdbm.h" "ac_cv_header_gdbm_h" "$ac_includes_default"
@@ -18923,10 +18943,10 @@ then :
printf "%s\n" "#define HAVE_TRUNCATE 1" >>confdefs.h
fi
-ac_fn_c_check_func "$LINENO" "ttyname" "ac_cv_func_ttyname"
-if test "x$ac_cv_func_ttyname" = xyes
+ac_fn_c_check_func "$LINENO" "ttyname_r" "ac_cv_func_ttyname_r"
+if test "x$ac_cv_func_ttyname_r" = xyes
then :
- printf "%s\n" "#define HAVE_TTYNAME 1" >>confdefs.h
+ printf "%s\n" "#define HAVE_TTYNAME_R 1" >>confdefs.h
fi
ac_fn_c_check_func "$LINENO" "umask" "ac_cv_func_umask"
@@ -20272,7 +20292,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $ZLIB_CFLAGS"
- LDFLAGS="$LDFLAGS $ZLIB_LIBS"
+ LIBS="$LIBS $ZLIB_LIBS"
for ac_header in zlib.h
do :
ac_fn_c_check_header_compile "$LINENO" "zlib.h" "ac_cv_header_zlib_h" "$ac_includes_default"
@@ -20401,7 +20421,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $ZLIB_CFLAGS"
- LDFLAGS="$LDFLAGS $ZLIB_LIBS"
+ LIBS="$LIBS $ZLIB_LIBS"
for ac_header in zlib.h
do :
ac_fn_c_check_header_compile "$LINENO" "zlib.h" "ac_cv_header_zlib_h" "$ac_includes_default"
@@ -20620,7 +20640,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $BZIP2_CFLAGS"
- LDFLAGS="$LDFLAGS $BZIP2_LIBS"
+ LIBS="$LIBS $BZIP2_LIBS"
for ac_header in bzlib.h
do :
ac_fn_c_check_header_compile "$LINENO" "bzlib.h" "ac_cv_header_bzlib_h" "$ac_includes_default"
@@ -20702,7 +20722,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $BZIP2_CFLAGS"
- LDFLAGS="$LDFLAGS $BZIP2_LIBS"
+ LIBS="$LIBS $BZIP2_LIBS"
for ac_header in bzlib.h
do :
ac_fn_c_check_header_compile "$LINENO" "bzlib.h" "ac_cv_header_bzlib_h" "$ac_includes_default"
@@ -20848,7 +20868,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $LIBLZMA_CFLAGS"
- LDFLAGS="$LDFLAGS $LIBLZMA_LIBS"
+ LIBS="$LIBS $LIBLZMA_LIBS"
for ac_header in lzma.h
do :
ac_fn_c_check_header_compile "$LINENO" "lzma.h" "ac_cv_header_lzma_h" "$ac_includes_default"
@@ -20930,7 +20950,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $LIBLZMA_CFLAGS"
- LDFLAGS="$LDFLAGS $LIBLZMA_LIBS"
+ LIBS="$LIBS $LIBLZMA_LIBS"
for ac_header in lzma.h
do :
ac_fn_c_check_header_compile "$LINENO" "lzma.h" "ac_cv_header_lzma_h" "$ac_includes_default"
@@ -24856,7 +24876,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $LIBREADLINE_CFLAGS"
- LDFLAGS="$LDFLAGS $LIBREADLINE_LIBS"
+ LIBS="$LIBS $LIBREADLINE_LIBS"
for ac_header in readline/readline.h
do :
ac_fn_c_check_header_compile "$LINENO" "readline/readline.h" "ac_cv_header_readline_readline_h" "$ac_includes_default"
@@ -24935,7 +24955,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $LIBREADLINE_CFLAGS"
- LDFLAGS="$LDFLAGS $LIBREADLINE_LIBS"
+ LIBS="$LIBS $LIBREADLINE_LIBS"
for ac_header in readline/readline.h
do :
ac_fn_c_check_header_compile "$LINENO" "readline/readline.h" "ac_cv_header_readline_readline_h" "$ac_includes_default"
@@ -25087,7 +25107,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $LIBEDIT_CFLAGS"
- LDFLAGS="$LDFLAGS $LIBEDIT_LIBS"
+ LIBS="$LIBS $LIBEDIT_LIBS"
for ac_header in editline/readline.h
do :
ac_fn_c_check_header_compile "$LINENO" "editline/readline.h" "ac_cv_header_editline_readline_h" "$ac_includes_default"
@@ -25168,7 +25188,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $LIBEDIT_CFLAGS"
- LDFLAGS="$LDFLAGS $LIBEDIT_LIBS"
+ LIBS="$LIBS $LIBEDIT_LIBS"
for ac_header in editline/readline.h
do :
ac_fn_c_check_header_compile "$LINENO" "editline/readline.h" "ac_cv_header_editline_readline_h" "$ac_includes_default"
@@ -25276,7 +25296,7 @@ save_LIBS=$LIBS
CPPFLAGS="$CPPFLAGS $READLINE_CFLAGS"
- LIBS="$READLINE_LIBS $LIBS"
+ LIBS="$LIBS $READLINE_LIBS"
LIBS_SAVE=$LIBS
diff --git a/configure.ac b/configure.ac
index a764028e49fcfd..3fcb18922c5330 100644
--- a/configure.ac
+++ b/configure.ac
@@ -2214,6 +2214,27 @@ AS_VAR_IF([enable_shared], [yes], [
BOLT_BINARIES="${BOLT_BINARIES} \$(INSTSONAME)"
])
+AC_ARG_VAR(
+ [BOLT_COMMON_FLAGS],
+ [Common arguments to llvm-bolt when instrumenting and applying]
+)
+
+AC_MSG_CHECKING([BOLT_COMMON_FLAGS])
+if test -z "${BOLT_COMMON_FLAGS}"
+then
+ AS_VAR_SET(
+ [BOLT_COMMON_FLAGS],
+ [m4_normalize("
+ [-update-debug-sections]
+
+ dnl At least LLVM 19.x doesn't support computed gotos in PIC compiled code.
+ dnl Exclude functions containing computed gotos.
+ dnl TODO this may be fixed in LLVM 20.x via https://github.com/llvm/llvm-project/pull/120267.
+ [-skip-funcs=_PyEval_EvalFrameDefault,sre_ucs1_match/1,sre_ucs2_match/1,sre_ucs4_match/1]
+ ")]
+ )
+fi
+
AC_ARG_VAR(
[BOLT_INSTRUMENT_FLAGS],
[Arguments to llvm-bolt when instrumenting binaries]
@@ -2221,7 +2242,7 @@ AC_ARG_VAR(
AC_MSG_CHECKING([BOLT_INSTRUMENT_FLAGS])
if test -z "${BOLT_INSTRUMENT_FLAGS}"
then
- BOLT_INSTRUMENT_FLAGS=
+ BOLT_INSTRUMENT_FLAGS="${BOLT_COMMON_FLAGS}"
fi
AC_MSG_RESULT([$BOLT_INSTRUMENT_FLAGS])
@@ -2235,9 +2256,9 @@ then
AS_VAR_SET(
[BOLT_APPLY_FLAGS],
[m4_normalize("
- -update-debug-sections
+ ${BOLT_COMMON_FLAGS}
-reorder-blocks=ext-tsp
- -reorder-functions=hfsort+
+ -reorder-functions=cdsort
-split-functions
-icf=1
-inline-all
@@ -2985,7 +3006,7 @@ AC_CHECK_HEADERS([ \
linux/tipc.h linux/wait.h netdb.h net/ethernet.h netinet/in.h netpacket/packet.h poll.h process.h pthread.h pty.h \
sched.h setjmp.h shadow.h signal.h spawn.h stropts.h sys/audioio.h sys/bsdtty.h sys/devpoll.h \
sys/endian.h sys/epoll.h sys/event.h sys/eventfd.h sys/file.h sys/ioctl.h sys/kern_control.h \
- sys/loadavg.h sys/lock.h sys/memfd.h sys/mkdev.h sys/mman.h sys/modem.h sys/param.h sys/poll.h \
+ sys/loadavg.h sys/lock.h sys/memfd.h sys/mkdev.h sys/mman.h sys/modem.h sys/param.h sys/pidfd.h sys/poll.h \
sys/random.h sys/resource.h sys/select.h sys/sendfile.h sys/socket.h sys/soundcard.h sys/stat.h \
sys/statvfs.h sys/sys_domain.h sys/syscall.h sys/sysmacros.h sys/termio.h sys/time.h sys/times.h sys/timerfd.h \
sys/types.h sys/uio.h sys/un.h sys/utsname.h sys/wait.h sys/xattr.h sysexits.h syslog.h \
@@ -3744,7 +3765,7 @@ AS_VAR_IF([have_uuid], [missing], [
], [
WITH_SAVE_ENV([
CPPFLAGS="$CPPFLAGS $LIBUUID_CFLAGS"
- LDFLAGS="$LDFLAGS $LIBUUID_LIBS"
+ LIBS="$LIBS $LIBUUID_LIBS"
AC_CHECK_HEADERS([uuid/uuid.h], [
PY_CHECK_LIB([uuid], [uuid_generate_time], [have_uuid=yes])
PY_CHECK_LIB([uuid], [uuid_generate_time_safe],
@@ -3978,7 +3999,7 @@ AS_VAR_IF([have_libffi], [missing], [
PKG_CHECK_MODULES([LIBFFI], [libffi], [have_libffi=yes], [
WITH_SAVE_ENV([
CPPFLAGS="$CPPFLAGS $LIBFFI_CFLAGS"
- LDFLAGS="$LDFLAGS $LIBFFI_LIBS"
+ LIBS="$LIBS $LIBFFI_LIBS"
AC_CHECK_HEADER([ffi.h], [
AC_CHECK_LIB([ffi], [ffi_call], [
have_libffi=yes
@@ -4012,8 +4033,8 @@ AS_VAR_IF([have_libffi], [yes], [
AS_VAR_IF([ac_cv_lib_dl_dlopen], [yes], [AS_VAR_APPEND([LIBFFI_LIBS], [" -ldl"])])
WITH_SAVE_ENV([
- CFLAGS="$LIBFFI_CFLAGS $CFLAGS"
- LDFLAGS="$LIBFFI_LIBS $LDFLAGS"
+ CFLAGS="$CFLAGS $LIBFFI_CFLAGS"
+ LIBS="$LIBS $LIBFFI_LIBS"
PY_CHECK_FUNC([ffi_prep_cif_var], [@%:@include ])
PY_CHECK_FUNC([ffi_prep_closure_loc], [@%:@include ])
@@ -4051,8 +4072,8 @@ AS_VAR_IF(
AS_VAR_IF([with_system_libmpdec], [yes],
[WITH_SAVE_ENV([
- CPPFLAGS="$LIBMPDEC_CFLAGS $CPPFLAGS"
- LIBS="$LIBMPDEC_LIBS $LIBS"
+ CPPFLAGS="$CPPFLAGS $LIBMPDEC_CFLAGS"
+ LIBS="$LIBS $LIBMPDEC_LIBS"
AC_LINK_IFELSE([
AC_LANG_PROGRAM([
@@ -4185,7 +4206,7 @@ WITH_SAVE_ENV([
dnl bpo-45774/GH-29507: The CPP check in AC_CHECK_HEADER can fail on FreeBSD,
dnl hence CPPFLAGS instead of CFLAGS.
CPPFLAGS="$CPPFLAGS $LIBSQLITE3_CFLAGS"
- LDFLAGS="$LIBSQLITE3_LIBS $LDFLAGS"
+ LIBS="$LIBS $LIBSQLITE3_LIBS"
AC_CHECK_HEADER([sqlite3.h], [
have_sqlite3=yes
@@ -4288,7 +4309,7 @@ AS_CASE([$ac_sys_system],
WITH_SAVE_ENV([
CPPFLAGS="$CPPFLAGS $TCLTK_CFLAGS"
- LIBS="$TCLTK_LIBS $LDFLAGS"
+ LIBS="$LIBS $TCLTK_LIBS"
AC_LINK_IFELSE([
AC_LANG_PROGRAM([
@@ -4330,7 +4351,7 @@ AC_ARG_VAR([GDBM_CFLAGS], [C compiler flags for gdbm])
AC_ARG_VAR([GDBM_LIBS], [additional linker flags for gdbm])
WITH_SAVE_ENV([
CPPFLAGS="$CPPFLAGS $GDBM_CFLAGS"
- LDFLAGS="$GDBM_LIBS $LDFLAGS"
+ LIBS="$LIBS $GDBM_LIBS"
AC_CHECK_HEADERS([gdbm.h], [
AC_CHECK_LIB([gdbm], [gdbm_open], [
have_gdbm=yes
@@ -5113,7 +5134,7 @@ AC_CHECK_FUNCS([ \
sigfillset siginterrupt sigpending sigrelse sigtimedwait sigwait \
sigwaitinfo snprintf splice strftime strlcpy strsignal symlinkat sync \
sysconf tcgetpgrp tcsetpgrp tempnam timegm times tmpfile \
- tmpnam tmpnam_r truncate ttyname umask uname unlinkat unlockpt utimensat utimes vfork \
+ tmpnam tmpnam_r truncate ttyname_r umask uname unlinkat unlockpt utimensat utimes vfork \
wait wait3 wait4 waitid waitpid wcscoll wcsftime wcsxfrm wmemcmp writev \
])
@@ -5288,7 +5309,7 @@ PKG_CHECK_MODULES([ZLIB], [zlib >= 1.2.0], [
], [
WITH_SAVE_ENV([
CPPFLAGS="$CPPFLAGS $ZLIB_CFLAGS"
- LDFLAGS="$LDFLAGS $ZLIB_LIBS"
+ LIBS="$LIBS $ZLIB_LIBS"
AC_CHECK_HEADERS([zlib.h], [
PY_CHECK_LIB([z], [gzread], [have_zlib=yes], [have_zlib=no])
], [have_zlib=no])
@@ -5312,7 +5333,7 @@ PY_CHECK_EMSCRIPTEN_PORT([BZIP2], [-sUSE_BZIP2])
PKG_CHECK_MODULES([BZIP2], [bzip2], [have_bzip2=yes], [
WITH_SAVE_ENV([
CPPFLAGS="$CPPFLAGS $BZIP2_CFLAGS"
- LDFLAGS="$LDFLAGS $BZIP2_LIBS"
+ LIBS="$LIBS $BZIP2_LIBS"
AC_CHECK_HEADERS([bzlib.h], [
AC_CHECK_LIB([bz2], [BZ2_bzCompress], [have_bzip2=yes], [have_bzip2=no])
], [have_bzip2=no])
@@ -5326,7 +5347,7 @@ PKG_CHECK_MODULES([BZIP2], [bzip2], [have_bzip2=yes], [
PKG_CHECK_MODULES([LIBLZMA], [liblzma], [have_liblzma=yes], [
WITH_SAVE_ENV([
CPPFLAGS="$CPPFLAGS $LIBLZMA_CFLAGS"
- LDFLAGS="$LDFLAGS $LIBLZMA_LIBS"
+ LIBS="$LIBS $LIBLZMA_LIBS"
AC_CHECK_HEADERS([lzma.h], [
AC_CHECK_LIB([lzma], [lzma_easy_encoder], [have_liblzma=yes], [have_liblzma=no])
], [have_liblzma=no])
@@ -6318,7 +6339,7 @@ AS_VAR_IF([with_readline], [readline], [
], [
WITH_SAVE_ENV([
CPPFLAGS="$CPPFLAGS $LIBREADLINE_CFLAGS"
- LDFLAGS="$LDFLAGS $LIBREADLINE_LIBS"
+ LIBS="$LIBS $LIBREADLINE_LIBS"
AC_CHECK_HEADERS([readline/readline.h], [
AC_CHECK_LIB([readline], [readline], [
LIBREADLINE=readline
@@ -6339,7 +6360,7 @@ AS_VAR_IF([with_readline], [edit], [
], [
WITH_SAVE_ENV([
CPPFLAGS="$CPPFLAGS $LIBEDIT_CFLAGS"
- LDFLAGS="$LDFLAGS $LIBEDIT_LIBS"
+ LIBS="$LIBS $LIBEDIT_LIBS"
AC_CHECK_HEADERS([editline/readline.h], [
AC_CHECK_LIB([edit], [readline], [
LIBREADLINE=edit
@@ -6363,7 +6384,7 @@ AS_VAR_IF([with_readline], [no], [
WITH_SAVE_ENV([
CPPFLAGS="$CPPFLAGS $READLINE_CFLAGS"
- LIBS="$READLINE_LIBS $LIBS"
+ LIBS="$LIBS $READLINE_LIBS"
LIBS_SAVE=$LIBS
m4_define([readline_includes], [
diff --git a/iOS/README.rst b/iOS/README.rst
index e33455eef8f44a..13b885144932e4 100644
--- a/iOS/README.rst
+++ b/iOS/README.rst
@@ -285,52 +285,42 @@ This will:
* Install the Python iOS framework into the copy of the testbed project; and
* Run the test suite on an "iPhone SE (3rd generation)" simulator.
-While the test suite is running, Xcode does not display any console output.
-After showing some Xcode build commands, the console output will print ``Testing
-started``, and then appear to stop. It will remain in this state until the test
-suite completes. On a 2022 M1 MacBook Pro, the test suite takes approximately 12
-minutes to run; a couple of extra minutes is required to boot and prepare the
-iOS simulator.
-
On success, the test suite will exit and report successful completion of the
-test suite. No output of the Python test suite will be displayed.
-
-On failure, the output of the Python test suite *will* be displayed. This will
-show the details of the tests that failed.
+test suite. On a 2022 M1 MacBook Pro, the test suite takes approximately 15
+minutes to run; a couple of extra minutes is required to compile the testbed
+project, and then boot and prepare the iOS simulator.
Debugging test failures
-----------------------
-The easiest way to diagnose a single test failure is to open the testbed project
-in Xcode and run the tests from there using the "Product > Test" menu item.
-
-To test in Xcode, you must ensure the testbed project has a copy of a compiled
-framework. If you've configured your build with the default install location of
-``iOS/Frameworks``, you can copy from that location into the test project. To
-test on an ARM64 simulator, run::
-
- $ rm -rf iOS/testbed/Python.xcframework/ios-arm64_x86_64-simulator/*
- $ cp -r iOS/Frameworks/arm64-iphonesimulator/* iOS/testbed/Python.xcframework/ios-arm64_x86_64-simulator
+Running ``make test`` generates a standalone version of the ``iOS/testbed``
+project, and runs the full test suite. It does this using ``iOS/testbed``
+itself - the folder is an executable module that can be used to create and run
+a clone of the testbed project.
-To test on an x86-64 simulator, run::
+You can generate your own standalone testbed instance by running::
- $ rm -rf iOS/testbed/Python.xcframework/ios-arm64_x86_64-simulator/*
- $ cp -r iOS/Frameworks/x86_64-iphonesimulator/* iOS/testbed/Python.xcframework/ios-arm64_x86_64-simulator
+ $ python iOS/testbed clone --framework iOS/Frameworks/arm64-iphonesimulator my-testbed
-To test on a physical device::
+This invocation assumes that ``iOS/Frameworks/arm64-iphonesimulator`` is the
+path to the iOS simulator framework for your platform (ARM64 in this case);
+``my-testbed`` is the name of the folder for the new testbed clone.
- $ rm -rf iOS/testbed/Python.xcframework/ios-arm64/*
- $ cp -r iOS/Frameworks/arm64-iphoneos/* iOS/testbed/Python.xcframework/ios-arm64
+You can then use the ``my-testbed`` folder to run the Python test suite,
+passing in any command line arguments you may require. For example, if you're
+trying to diagnose a failure in the ``os`` module, you might run::
-Alternatively, you can configure your build to install directly into the
-testbed project. For a simulator, use::
+ $ python my-testbed run -- test -W test_os
- --enable-framework=$(pwd)/iOS/testbed/Python.xcframework/ios-arm64_x86_64-simulator
+This is the equivalent of running ``python -m test -W test_os`` on a desktop
+Python build. Any arguments after the ``--`` will be passed to testbed as if
+they were arguments to ``python -m`` on a desktop machine.
-For a physical device, use::
+You can also open the testbed project in Xcode by running::
- --enable-framework=$(pwd)/iOS/testbed/Python.xcframework/ios-arm64
+ $ open my-testbed/iOSTestbed.xcodeproj
+This will allow you to use the full Xcode suite of tools for debugging.
Testing on an iOS device
^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/iOS/Resources/bin/arm64-apple-ios-ar b/iOS/Resources/bin/arm64-apple-ios-ar
index 8122332b9c1de0..3cf3eb218741fa 100755
--- a/iOS/Resources/bin/arm64-apple-ios-ar
+++ b/iOS/Resources/bin/arm64-apple-ios-ar
@@ -1,2 +1,2 @@
#!/bin/sh
-xcrun --sdk iphoneos${IOS_SDK_VERSION} ar $@
+xcrun --sdk iphoneos${IOS_SDK_VERSION} ar "$@"
diff --git a/iOS/Resources/bin/arm64-apple-ios-clang b/iOS/Resources/bin/arm64-apple-ios-clang
index 4d525751eba798..c39519cd1f8c94 100755
--- a/iOS/Resources/bin/arm64-apple-ios-clang
+++ b/iOS/Resources/bin/arm64-apple-ios-clang
@@ -1,2 +1,2 @@
#!/bin/sh
-xcrun --sdk iphoneos${IOS_SDK_VERSION} clang -target arm64-apple-ios $@
+xcrun --sdk iphoneos${IOS_SDK_VERSION} clang -target arm64-apple-ios "$@"
diff --git a/iOS/Resources/bin/arm64-apple-ios-clang++ b/iOS/Resources/bin/arm64-apple-ios-clang++
index f24bec11268f7e..d9b12925f384b9 100755
--- a/iOS/Resources/bin/arm64-apple-ios-clang++
+++ b/iOS/Resources/bin/arm64-apple-ios-clang++
@@ -1,2 +1,2 @@
#!/bin/sh
-xcrun --sdk iphoneos${IOS_SDK_VERSION} clang++ -target arm64-apple-ios $@
+xcrun --sdk iphoneos${IOS_SDK_VERSION} clang++ -target arm64-apple-ios "$@"
diff --git a/iOS/Resources/bin/arm64-apple-ios-cpp b/iOS/Resources/bin/arm64-apple-ios-cpp
index 891bb25bb4318c..24da23d3448ae0 100755
--- a/iOS/Resources/bin/arm64-apple-ios-cpp
+++ b/iOS/Resources/bin/arm64-apple-ios-cpp
@@ -1,2 +1,2 @@
#!/bin/sh
-xcrun --sdk iphoneos${IOS_SDK_VERSION} clang -target arm64-apple-ios -E $@
+xcrun --sdk iphoneos${IOS_SDK_VERSION} clang -target arm64-apple-ios -E "$@"
diff --git a/iOS/Resources/bin/arm64-apple-ios-simulator-ar b/iOS/Resources/bin/arm64-apple-ios-simulator-ar
index 74ed3bc6df1c2b..b836b6db9025bb 100755
--- a/iOS/Resources/bin/arm64-apple-ios-simulator-ar
+++ b/iOS/Resources/bin/arm64-apple-ios-simulator-ar
@@ -1,2 +1,2 @@
#!/bin/sh
-xcrun --sdk iphonesimulator${IOS_SDK_VERSION} ar $@
+xcrun --sdk iphonesimulator${IOS_SDK_VERSION} ar "$@"
diff --git a/iOS/Resources/bin/arm64-apple-ios-simulator-clang b/iOS/Resources/bin/arm64-apple-ios-simulator-clang
index 32574cad284441..92e8d853d6ebc3 100755
--- a/iOS/Resources/bin/arm64-apple-ios-simulator-clang
+++ b/iOS/Resources/bin/arm64-apple-ios-simulator-clang
@@ -1,2 +1,2 @@
#!/bin/sh
-xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang -target arm64-apple-ios-simulator $@
+xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang -target arm64-apple-ios-simulator "$@"
diff --git a/iOS/Resources/bin/arm64-apple-ios-simulator-clang++ b/iOS/Resources/bin/arm64-apple-ios-simulator-clang++
index ef37d05b512959..076469cc70cf98 100755
--- a/iOS/Resources/bin/arm64-apple-ios-simulator-clang++
+++ b/iOS/Resources/bin/arm64-apple-ios-simulator-clang++
@@ -1,2 +1,2 @@
#!/bin/sh
-xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang++ -target arm64-apple-ios-simulator $@
+xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang++ -target arm64-apple-ios-simulator "$@"
diff --git a/iOS/Resources/bin/arm64-apple-ios-simulator-cpp b/iOS/Resources/bin/arm64-apple-ios-simulator-cpp
index 6aaf6fbe188c32..c57f28cee5bcfe 100755
--- a/iOS/Resources/bin/arm64-apple-ios-simulator-cpp
+++ b/iOS/Resources/bin/arm64-apple-ios-simulator-cpp
@@ -1,2 +1,2 @@
#!/bin/sh
-xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang -target arm64-apple-ios-simulator -E $@
+xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang -target arm64-apple-ios-simulator -E "$@"
diff --git a/iOS/Resources/bin/x86_64-apple-ios-simulator-ar b/iOS/Resources/bin/x86_64-apple-ios-simulator-ar
index 74ed3bc6df1c2b..b836b6db9025bb 100755
--- a/iOS/Resources/bin/x86_64-apple-ios-simulator-ar
+++ b/iOS/Resources/bin/x86_64-apple-ios-simulator-ar
@@ -1,2 +1,2 @@
#!/bin/sh
-xcrun --sdk iphonesimulator${IOS_SDK_VERSION} ar $@
+xcrun --sdk iphonesimulator${IOS_SDK_VERSION} ar "$@"
diff --git a/iOS/Resources/bin/x86_64-apple-ios-simulator-clang b/iOS/Resources/bin/x86_64-apple-ios-simulator-clang
index bcbe91f6061e16..17cbe0c8a1e213 100755
--- a/iOS/Resources/bin/x86_64-apple-ios-simulator-clang
+++ b/iOS/Resources/bin/x86_64-apple-ios-simulator-clang
@@ -1,2 +1,2 @@
#!/bin/sh
-xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang -target x86_64-apple-ios-simulator $@
+xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang -target x86_64-apple-ios-simulator "$@"
diff --git a/iOS/Resources/bin/x86_64-apple-ios-simulator-clang++ b/iOS/Resources/bin/x86_64-apple-ios-simulator-clang++
index 86f03ea32bc2fd..565d47b24c214b 100755
--- a/iOS/Resources/bin/x86_64-apple-ios-simulator-clang++
+++ b/iOS/Resources/bin/x86_64-apple-ios-simulator-clang++
@@ -1,2 +1,2 @@
#!/bin/sh
-xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang++ -target x86_64-apple-ios-simulator $@
+xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang++ -target x86_64-apple-ios-simulator "$@"
diff --git a/iOS/Resources/bin/x86_64-apple-ios-simulator-cpp b/iOS/Resources/bin/x86_64-apple-ios-simulator-cpp
index e6a42d9b85dec7..63fc8e8de2d38d 100755
--- a/iOS/Resources/bin/x86_64-apple-ios-simulator-cpp
+++ b/iOS/Resources/bin/x86_64-apple-ios-simulator-cpp
@@ -1,2 +1,2 @@
#!/bin/sh
-xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang -target x86_64-apple-ios-simulator -E $@
+xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang -target x86_64-apple-ios-simulator -E "$@"
diff --git a/iOS/testbed/__main__.py b/iOS/testbed/__main__.py
new file mode 100644
index 00000000000000..b4499f5ac171a8
--- /dev/null
+++ b/iOS/testbed/__main__.py
@@ -0,0 +1,410 @@
+import argparse
+import asyncio
+import json
+import plistlib
+import re
+import shutil
+import subprocess
+import sys
+from contextlib import asynccontextmanager
+from datetime import datetime
+from pathlib import Path
+
+
+DECODE_ARGS = ("UTF-8", "backslashreplace")
+
+# The system log prefixes each line:
+# 2025-01-17 16:14:29.090 Df iOSTestbed[23987:1fd393b4] (Python) ...
+# 2025-01-17 16:14:29.090 E iOSTestbed[23987:1fd393b4] (Python) ...
+
+LOG_PREFIX_REGEX = re.compile(
+ r"^\d{4}-\d{2}-\d{2}" # YYYY-MM-DD
+ r"\s+\d+:\d{2}:\d{2}\.\d+" # HH:MM:SS.sss
+ r"\s+\w+" # Df/E
+ r"\s+iOSTestbed\[\d+:\w+\]" # Process/thread ID
+ r"\s+\(Python\)\s" # Logger name
+)
+
+
+# Work around a bug involving sys.exit and TaskGroups
+# (https://github.com/python/cpython/issues/101515).
+def exit(*args):
+ raise MySystemExit(*args)
+
+
+class MySystemExit(Exception):
+ pass
+
+
+# All subprocesses are executed through this context manager so that no matter
+# what happens, they can always be cancelled from another task, and they will
+# always be cleaned up on exit.
+@asynccontextmanager
+async def async_process(*args, **kwargs):
+ process = await asyncio.create_subprocess_exec(*args, **kwargs)
+ try:
+ yield process
+ finally:
+ if process.returncode is None:
+ # Allow a reasonably long time for Xcode to clean itself up,
+ # because we don't want stale emulators left behind.
+ timeout = 10
+ process.terminate()
+ try:
+ await asyncio.wait_for(process.wait(), timeout)
+ except TimeoutError:
+ print(
+ f"Command {args} did not terminate after {timeout} seconds "
+ f" - sending SIGKILL"
+ )
+ process.kill()
+
+ # Even after killing the process we must still wait for it,
+ # otherwise we'll get the warning "Exception ignored in __del__".
+ await asyncio.wait_for(process.wait(), timeout=1)
+
+
+async def async_check_output(*args, **kwargs):
+ async with async_process(
+ *args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs
+ ) as process:
+ stdout, stderr = await process.communicate()
+ if process.returncode == 0:
+ return stdout.decode(*DECODE_ARGS)
+ else:
+ raise subprocess.CalledProcessError(
+ process.returncode,
+ args,
+ stdout.decode(*DECODE_ARGS),
+ stderr.decode(*DECODE_ARGS),
+ )
+
+
+# Return a list of UDIDs associated with booted simulators
+async def list_devices():
+ # List the testing simulators, in JSON format
+ raw_json = await async_check_output(
+ "xcrun", "simctl", "--set", "testing", "list", "-j"
+ )
+ json_data = json.loads(raw_json)
+
+ # Filter out the booted iOS simulators
+ return [
+ simulator["udid"]
+ for runtime, simulators in json_data["devices"].items()
+ for simulator in simulators
+ if runtime.split(".")[-1].startswith("iOS") and simulator["state"] == "Booted"
+ ]
+
+
+async def find_device(initial_devices):
+ while True:
+ new_devices = set(await list_devices()).difference(initial_devices)
+ if len(new_devices) == 0:
+ await asyncio.sleep(1)
+ elif len(new_devices) == 1:
+ udid = new_devices.pop()
+ print(f"{datetime.now():%Y-%m-%d %H:%M:%S}: New test simulator detected")
+ print(f"UDID: {udid}")
+ return udid
+ else:
+ exit(f"Found more than one new device: {new_devices}")
+
+
+async def log_stream_task(initial_devices):
+ # Wait up to 5 minutes for the build to complete and the simulator to boot.
+ udid = await asyncio.wait_for(find_device(initial_devices), 5 * 60)
+
+ # Stream the iOS device's logs, filtering out messages that come from the
+ # XCTest test suite (catching NSLog messages from the test method), or
+ # Python itself (catching stdout/stderr content routed to the system log
+ # with config->use_system_logger).
+ args = [
+ "xcrun",
+ "simctl",
+ "--set",
+ "testing",
+ "spawn",
+ udid,
+ "log",
+ "stream",
+ "--style",
+ "compact",
+ "--predicate",
+ (
+ 'senderImagePath ENDSWITH "/iOSTestbedTests.xctest/iOSTestbedTests"'
+ ' OR senderImagePath ENDSWITH "/Python.framework/Python"'
+ ),
+ ]
+
+ async with async_process(
+ *args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ ) as process:
+ suppress_dupes = False
+ while line := (await process.stdout.readline()).decode(*DECODE_ARGS):
+ # Strip the prefix from each log line
+ line = LOG_PREFIX_REGEX.sub("", line)
+ # The iOS log streamer can sometimes lag; when it does, it outputs
+ # a warning about messages being dropped... often multiple times.
+ # Only print the first of these duplicated warnings.
+ if line.startswith("=== Messages dropped "):
+ if not suppress_dupes:
+ suppress_dupes = True
+ sys.stdout.write(line)
+ else:
+ suppress_dupes = False
+ sys.stdout.write(line)
+ sys.stdout.flush()
+
+
+async def xcode_test(location, simulator, verbose):
+ # Run the test suite on the named simulator
+ print("Starting xcodebuild...")
+ args = [
+ "xcodebuild",
+ "test",
+ "-project",
+ str(location / "iOSTestbed.xcodeproj"),
+ "-scheme",
+ "iOSTestbed",
+ "-destination",
+ f"platform=iOS Simulator,name={simulator}",
+ "-resultBundlePath",
+ str(location / f"{datetime.now():%Y%m%d-%H%M%S}.xcresult"),
+ "-derivedDataPath",
+ str(location / "DerivedData"),
+ ]
+ if not verbose:
+ args += ["-quiet"]
+
+ async with async_process(
+ *args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ ) as process:
+ while line := (await process.stdout.readline()).decode(*DECODE_ARGS):
+ sys.stdout.write(line)
+ sys.stdout.flush()
+
+ status = await asyncio.wait_for(process.wait(), timeout=1)
+ exit(status)
+
+
+def clone_testbed(
+ source: Path,
+ target: Path,
+ framework: Path,
+ apps: list[Path],
+) -> None:
+ if target.exists():
+ print(f"{target} already exists; aborting without creating project.")
+ sys.exit(10)
+
+ if framework is None:
+ if not (
+ source / "Python.xcframework/ios-arm64_x86_64-simulator/bin"
+ ).is_dir():
+ print(
+ f"The testbed being cloned ({source}) does not contain "
+ f"a simulator framework. Re-run with --framework"
+ )
+ sys.exit(11)
+ else:
+ if not framework.is_dir():
+ print(f"{framework} does not exist.")
+ sys.exit(12)
+ elif not (
+ framework.suffix == ".xcframework"
+ or (framework / "Python.framework").is_dir()
+ ):
+ print(
+ f"{framework} is not an XCframework, "
+ f"or a simulator slice of a framework build."
+ )
+ sys.exit(13)
+
+ print("Cloning testbed project:")
+ print(f" Cloning {source}...", end="", flush=True)
+ shutil.copytree(source, target, symlinks=True)
+ print(" done")
+
+ if framework is not None:
+ if framework.suffix == ".xcframework":
+ print(" Installing XCFramework...", end="", flush=True)
+ xc_framework_path = (target / "Python.xcframework").resolve()
+ if xc_framework_path.is_dir():
+ shutil.rmtree(xc_framework_path)
+ else:
+ xc_framework_path.unlink()
+ xc_framework_path.symlink_to(
+ framework.relative_to(xc_framework_path.parent, walk_up=True)
+ )
+ print(" done")
+ else:
+ print(" Installing simulator framework...", end="", flush=True)
+ sim_framework_path = (
+ target / "Python.xcframework" / "ios-arm64_x86_64-simulator"
+ ).resolve()
+ if sim_framework_path.is_dir():
+ shutil.rmtree(sim_framework_path)
+ else:
+ sim_framework_path.unlink()
+ sim_framework_path.symlink_to(
+ framework.relative_to(sim_framework_path.parent, walk_up=True)
+ )
+ print(" done")
+ else:
+ print(" Using pre-existing iOS framework.")
+
+ for app_src in apps:
+ print(f" Installing app {app_src.name!r}...", end="", flush=True)
+ app_target = target / f"iOSTestbed/app/{app_src.name}"
+ if app_target.is_dir():
+ shutil.rmtree(app_target)
+ shutil.copytree(app_src, app_target)
+ print(" done")
+
+ print(f"Successfully cloned testbed: {target.resolve()}")
+
+
+def update_plist(testbed_path, args):
+ # Add the test runner arguments to the testbed's Info.plist file.
+ info_plist = testbed_path / "iOSTestbed" / "iOSTestbed-Info.plist"
+ with info_plist.open("rb") as f:
+ info = plistlib.load(f)
+
+ info["TestArgs"] = args
+
+ with info_plist.open("wb") as f:
+ plistlib.dump(info, f)
+
+
+async def run_testbed(simulator: str, args: list[str], verbose: bool=False):
+ location = Path(__file__).parent
+ print("Updating plist...", end="", flush=True)
+ update_plist(location, args)
+ print(" done.")
+
+ # Get the list of devices that are booted at the start of the test run.
+ # The simulator started by the test suite will be detected as the new
+ # entry that appears on the device list.
+ initial_devices = await list_devices()
+
+ try:
+ async with asyncio.TaskGroup() as tg:
+ tg.create_task(log_stream_task(initial_devices))
+ tg.create_task(xcode_test(location, simulator=simulator, verbose=verbose))
+ except* MySystemExit as e:
+ raise SystemExit(*e.exceptions[0].args) from None
+ except* subprocess.CalledProcessError as e:
+ # Extract it from the ExceptionGroup so it can be handled by `main`.
+ raise e.exceptions[0]
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description=(
+ "Manages the process of testing a Python project in the iOS simulator."
+ ),
+ )
+
+ subcommands = parser.add_subparsers(dest="subcommand")
+
+ clone = subcommands.add_parser(
+ "clone",
+ description=(
+ "Clone the testbed project, copying in an iOS Python framework and"
+ "any specified application code."
+ ),
+ help="Clone a testbed project to a new location.",
+ )
+ clone.add_argument(
+ "--framework",
+ help=(
+ "The location of the XCFramework (or simulator-only slice of an "
+ "XCFramework) to use when running the testbed"
+ ),
+ )
+ clone.add_argument(
+ "--app",
+ dest="apps",
+ action="append",
+ default=[],
+ help="The location of any code to include in the testbed project",
+ )
+ clone.add_argument(
+ "location",
+ help="The path where the testbed will be cloned.",
+ )
+
+ run = subcommands.add_parser(
+ "run",
+ usage="%(prog)s [-h] [--simulator SIMULATOR] -- [ ...]",
+ description=(
+ "Run a testbed project. The arguments provided after `--` will be "
+ "passed to the running iOS process as if they were arguments to "
+ "`python -m`."
+ ),
+ help="Run a testbed project",
+ )
+ run.add_argument(
+ "--simulator",
+ default="iPhone SE (3rd Generation)",
+ help="The name of the simulator to use (default: 'iPhone SE (3rd Generation)')",
+ )
+ run.add_argument(
+ "-v", "--verbose",
+ action="store_true",
+ help="Enable verbose output",
+ )
+
+ try:
+ pos = sys.argv.index("--")
+ testbed_args = sys.argv[1:pos]
+ test_args = sys.argv[pos + 1 :]
+ except ValueError:
+ testbed_args = sys.argv[1:]
+ test_args = []
+
+ context = parser.parse_args(testbed_args)
+
+ if context.subcommand == "clone":
+ clone_testbed(
+ source=Path(__file__).parent,
+ target=Path(context.location),
+ framework=Path(context.framework).resolve() if context.framework else None,
+ apps=[Path(app) for app in context.apps],
+ )
+ elif context.subcommand == "run":
+ if test_args:
+ if not (
+ Path(__file__).parent / "Python.xcframework/ios-arm64_x86_64-simulator/bin"
+ ).is_dir():
+ print(
+ f"Testbed does not contain a compiled iOS framework. Use "
+ f"`python {sys.argv[0]} clone ...` to create a runnable "
+ f"clone of this testbed."
+ )
+ sys.exit(20)
+
+ asyncio.run(
+ run_testbed(
+ simulator=context.simulator,
+ verbose=context.verbose,
+ args=test_args,
+ )
+ )
+ else:
+ print(f"Must specify test arguments (e.g., {sys.argv[0]} run -- test)")
+ print()
+ parser.print_help(sys.stderr)
+ sys.exit(21)
+ else:
+ parser.print_help(sys.stderr)
+ sys.exit(1)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj b/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj
index 6819ac0eeed95f..c7d63909ee2453 100644
--- a/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj
+++ b/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj
@@ -263,6 +263,7 @@
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "set -e\n\nmkdir -p \"$CODESIGNING_FOLDER_PATH/python/lib\"\nif [ \"$EFFECTIVE_PLATFORM_NAME\" = \"-iphonesimulator\" ]; then\n echo \"Installing Python modules for iOS Simulator\"\n rsync -au --delete \"$PROJECT_DIR/Python.xcframework/ios-arm64_x86_64-simulator/lib/\" \"$CODESIGNING_FOLDER_PATH/python/lib/\" \nelse\n echo \"Installing Python modules for iOS Device\"\n rsync -au --delete \"$PROJECT_DIR/Python.xcframework/ios-arm64/lib/\" \"$CODESIGNING_FOLDER_PATH/python/lib/\" \nfi\n";
+ showEnvVarsInLog = 0;
};
607A66562B0F06200010BFC8 /* Prepare Python Binary Modules */ = {
isa = PBXShellScriptBuildPhase;
@@ -282,6 +283,7 @@
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "set -e\n\ninstall_dylib () {\n INSTALL_BASE=$1\n FULL_EXT=$2\n\n # The name of the extension file\n EXT=$(basename \"$FULL_EXT\")\n # The location of the extension file, relative to the bundle\n RELATIVE_EXT=${FULL_EXT#$CODESIGNING_FOLDER_PATH/} \n # The path to the extension file, relative to the install base\n PYTHON_EXT=${RELATIVE_EXT/$INSTALL_BASE/}\n # The full dotted name of the extension module, constructed from the file path.\n FULL_MODULE_NAME=$(echo $PYTHON_EXT | cut -d \".\" -f 1 | tr \"/\" \".\"); \n # A bundle identifier; not actually used, but required by Xcode framework packaging\n FRAMEWORK_BUNDLE_ID=$(echo $PRODUCT_BUNDLE_IDENTIFIER.$FULL_MODULE_NAME | tr \"_\" \"-\")\n # The name of the framework folder.\n FRAMEWORK_FOLDER=\"Frameworks/$FULL_MODULE_NAME.framework\"\n\n # If the framework folder doesn't exist, create it.\n if [ ! -d \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\" ]; then\n echo \"Creating framework for $RELATIVE_EXT\" \n mkdir -p \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\"\n cp \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleExecutable -string \"$FULL_MODULE_NAME\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleIdentifier -string \"$FRAMEWORK_BUNDLE_ID\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n fi\n \n echo \"Installing binary for $FRAMEWORK_FOLDER/$FULL_MODULE_NAME\" \n mv \"$FULL_EXT\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME\"\n # Create a placeholder .fwork file where the .so was\n echo \"$FRAMEWORK_FOLDER/$FULL_MODULE_NAME\" > ${FULL_EXT%.so}.fwork\n # Create a back reference to the .so file location in the framework\n echo \"${RELATIVE_EXT%.so}.fwork\" > \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME.origin\" \n}\n\nPYTHON_VER=$(ls -1 \"$CODESIGNING_FOLDER_PATH/python/lib\")\necho \"Install Python $PYTHON_VER standard library extension modules...\"\nfind \"$CODESIGNING_FOLDER_PATH/python/lib/$PYTHON_VER/lib-dynload\" -name \"*.so\" | while read FULL_EXT; do\n install_dylib python/lib/$PYTHON_VER/lib-dynload/ \"$FULL_EXT\"\ndone\necho \"Install app package extension modules...\"\nfind \"$CODESIGNING_FOLDER_PATH/app_packages\" -name \"*.so\" | while read FULL_EXT; do\n install_dylib app_packages/ \"$FULL_EXT\"\ndone\necho \"Install app extension modules...\"\nfind \"$CODESIGNING_FOLDER_PATH/app\" -name \"*.so\" | while read FULL_EXT; do\n install_dylib app/ \"$FULL_EXT\"\ndone\n\n# Clean up dylib template \nrm -f \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\"\necho \"Signing frameworks as $EXPANDED_CODE_SIGN_IDENTITY_NAME ($EXPANDED_CODE_SIGN_IDENTITY)...\"\nfind \"$CODESIGNING_FOLDER_PATH/Frameworks\" -name \"*.framework\" -exec /usr/bin/codesign --force --sign \"$EXPANDED_CODE_SIGN_IDENTITY\" ${OTHER_CODE_SIGN_FLAGS:-} -o runtime --timestamp=none --preserve-metadata=identifier,entitlements,flags --generate-entitlement-der \"{}\" \\; \n";
+ showEnvVarsInLog = 0;
};
/* End PBXShellScriptBuildPhase section */
diff --git a/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m b/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m
index db00d43da85cbc..6db38253396c8d 100644
--- a/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m
+++ b/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m
@@ -24,8 +24,11 @@ - (void)testPython {
NSString *resourcePath = [[NSBundle mainBundle] resourcePath];
- // Disable all color, as the Xcode log can't display color
+ // Set some other common environment indicators to disable color, as the
+ // Xcode log can't display color. Stdout will report that it is *not* a
+ // TTY.
setenv("NO_COLOR", "1", true);
+ setenv("PY_COLORS", "0", true);
// Arguments to pass into the test suite runner.
// argv[0] must identify the process; any subsequent arg
@@ -50,6 +53,8 @@ - (void)testPython {
// Enforce UTF-8 encoding for stderr, stdout, file-system encoding and locale.
// See https://docs.python.org/3/library/os.html#python-utf-8-mode.
preconfig.utf8_mode = 1;
+ // Use the system logger for stdout/err
+ config.use_system_logger = 1;
// Don't buffer stdio. We want output to appears in the log immediately
config.buffered_stdio = 0;
// Don't write bytecode; we can't modify the app bundle
diff --git a/pyconfig.h.in b/pyconfig.h.in
index 4531dadee384a4..3c16c694c84599 100644
--- a/pyconfig.h.in
+++ b/pyconfig.h.in
@@ -1385,6 +1385,9 @@
/* Define to 1 if you have the header file. */
#undef HAVE_SYS_PARAM_H
+/* Define to 1 if you have the header file. */
+#undef HAVE_SYS_PIDFD_H
+
/* Define to 1 if you have the header file. */
#undef HAVE_SYS_POLL_H
@@ -1491,8 +1494,8 @@
/* Define to 1 if you have the `truncate' function. */
#undef HAVE_TRUNCATE
-/* Define to 1 if you have the `ttyname' function. */
-#undef HAVE_TTYNAME
+/* Define to 1 if you have the `ttyname_r' function. */
+#undef HAVE_TTYNAME_R
/* Define to 1 if you don't have `tm_zone' but do have the external array
`tzname'. */