diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 4badbef9c..25330d632 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -51,7 +51,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3 + uses: github/codeql-action/init@5f8171a638ada777af81d42b55959a643bb29017 # v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -62,7 +62,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3 + uses: github/codeql-action/autobuild@5f8171a638ada777af81d42b55959a643bb29017 # v3 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -76,4 +76,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3 + uses: github/codeql-action/analyze@5f8171a638ada777af81d42b55959a643bb29017 # v3 diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 91d5c0096..9a3cc59a9 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -84,7 +84,7 @@ jobs: persist-credentials: false - name: "Set up Python" - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "${{ matrix.python-version }}" allow-prereleases: true @@ -125,7 +125,7 @@ jobs: mv .metacov .metacov.$MATRIX_ID - name: "Upload coverage data" - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: metacov-${{ env.MATRIX_ID }} path: .metacov.* @@ -147,7 +147,7 @@ jobs: persist-credentials: false - name: "Set up Python" - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "3.9" # Minimum of PYVERSIONS # At a certain point, installing dependencies failed on pypy 3.9 and @@ -170,7 +170,7 @@ jobs: python igor.py zip_mods - name: "Download coverage data" - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1 with: pattern: metacov-* merge-multiple: true @@ -184,7 +184,7 @@ jobs: python igor.py combine_html - name: "Upload HTML report" - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: html_report path: htmlcov @@ -239,7 +239,7 @@ jobs: - name: "Download coverage HTML report" if: ${{ github.ref == 'refs/heads/master' }} - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1 with: name: html_report path: reports_repo/${{ env.report_dir }} diff --git a/.github/workflows/kit.yml b/.github/workflows/kit.yml index b9ecbbf57..f5f45ef92 100644 --- a/.github/workflows/kit.yml +++ b/.github/workflows/kit.yml @@ -37,6 +37,9 @@ defaults: env: PIP_DISABLE_PIP_VERSION_CHECK: 1 + # PYVERSIONS: changing the list of versions will change the number of + # expected distributions. + EXPECTED: 63 permissions: contents: read @@ -99,6 +102,9 @@ jobs: # } # if the_os == "macos": # them["os-version"] = "13" + # if the_arch == "aarch64": + # # https://github.com/pypa/cibuildwheel/issues/2257 + # them["os-version"] = "22.04-arm" # print(f"- {json.dumps(them)}") # ]]] - {"os": "ubuntu", "py": "cp39", "arch": "x86_64"} @@ -111,11 +117,11 @@ jobs: - {"os": "ubuntu", "py": "cp311", "arch": "i686"} - {"os": "ubuntu", "py": "cp312", "arch": "i686"} - {"os": "ubuntu", "py": "cp313", "arch": "i686"} - - {"os": "ubuntu", "py": "cp39", "arch": "aarch64"} - - {"os": "ubuntu", "py": "cp310", "arch": "aarch64"} - - {"os": "ubuntu", "py": "cp311", "arch": "aarch64"} - - {"os": "ubuntu", "py": "cp312", "arch": "aarch64"} - - {"os": "ubuntu", "py": "cp313", "arch": "aarch64"} + - {"os": "ubuntu", "py": "cp39", "arch": "aarch64", "os-version": "22.04-arm"} + - {"os": "ubuntu", "py": "cp310", "arch": "aarch64", "os-version": "22.04-arm"} + - {"os": "ubuntu", "py": "cp311", "arch": "aarch64", "os-version": "22.04-arm"} + - {"os": "ubuntu", "py": "cp312", "arch": "aarch64", "os-version": "22.04-arm"} + - {"os": "ubuntu", "py": "cp313", "arch": "aarch64", "os-version": "22.04-arm"} - {"os": "macos", "py": "cp39", "arch": "arm64", "os-version": "13"} - {"os": "macos", "py": "cp310", "arch": "arm64", "os-version": "13"} - {"os": "macos", "py": "cp311", "arch": "arm64", "os-version": "13"} @@ -136,23 +142,17 @@ jobs: - {"os": "windows", "py": "cp311", "arch": "AMD64"} - {"os": "windows", "py": "cp312", "arch": "AMD64"} - {"os": "windows", "py": "cp313", "arch": "AMD64"} - # [[[end]]] (checksum: 38b83d67f00c838e5e7f69f803b7536c) + # [[[end]]] (checksum: 7c3758a4ca41df53d7ebcad68f12d0d0) fail-fast: false steps: - - name: "Setup QEMU" - if: matrix.os == 'ubuntu' - uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3.2.0 - with: - platforms: arm64 - - name: "Check out the repo" uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Install Python" - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "3.9" # Minimum of PYVERSIONS cache: pip @@ -162,7 +162,7 @@ jobs: run: | python -m pip install -r requirements/kit.pip - - name: "Build wheels" + - name: "Build binary wheels" env: CIBW_BUILD: ${{ matrix.py }}*-* CIBW_ARCHS: ${{ matrix.arch }} @@ -173,7 +173,7 @@ jobs: run: | python -m cibuildwheel --output-dir wheelhouse - - name: "List wheels" + - name: "List binary wheels" run: | ls -al wheelhouse/ @@ -181,15 +181,15 @@ jobs: run: | python -m twine check wheelhouse/* - - name: "Upload wheels" - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 + - name: "Upload binary wheels" + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: dist-${{ env.MATRIX_ID }} path: wheelhouse/*.whl retention-days: 7 - sdist: - name: "Source distribution" + non-binary: + name: "Non-binary artifacts" runs-on: ubuntu-latest steps: - name: "Check out the repo" @@ -198,7 +198,7 @@ jobs: persist-credentials: false - name: "Install Python" - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "3.9" # Minimum of PYVERSIONS cache: pip @@ -208,23 +208,25 @@ jobs: run: | python -m pip install -r requirements/kit.pip - - name: "Build sdist" + - name: "Build non-binary artifacts" + env: + COVERAGE_DISABLE_EXTENSION: 1 run: | python -m build - - name: "List sdist" + - name: "List non-binary artifacts" run: | ls -al dist/ - - name: "Check sdist" + - name: "Check non-binary artifacts" run: | python -m twine check dist/* - - name: "Upload sdist" - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 + - name: "Upload non-binary artifacts" + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: - name: dist-sdist - path: dist/*.tar.gz + name: dist-non-binary + path: dist/* retention-days: 7 pypy: @@ -237,7 +239,7 @@ jobs: persist-credentials: false - name: "Install PyPy" - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "pypy-3.9" # Minimum of PyPy PYVERSIONS cache: pip @@ -253,7 +255,7 @@ jobs: run: | # One wheel works for all PyPy versions. PYVERSIONS # yes, this is weird syntax: https://github.com/pypa/build/issues/202 - echo -e "[bdist_wheel]\npython_tag=pp39.pp310" > $DIST_EXTRA_CONFIG + echo -e "[bdist_wheel]\npython_tag=pp39.pp310.pp311" > $DIST_EXTRA_CONFIG pypy3 -m build -w - name: "List wheels" @@ -265,7 +267,7 @@ jobs: python -m twine check dist/* - name: "Upload wheels" - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: dist-pypy path: dist/*.whl @@ -277,14 +279,14 @@ jobs: name: "Sign artifacts" needs: - wheels - - sdist + - non-binary - pypy runs-on: ubuntu-latest permissions: id-token: write steps: - name: "Download artifacts" - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1 with: pattern: dist-* merge-multiple: true @@ -292,8 +294,9 @@ jobs: - name: "List distributions" run: | ls -alR - echo "Number of dists, there should be 72:" + echo "Number of dists, there should be $EXPECTED:" ls -1 coverage-* | wc -l + files=$(ls coverage-* 2>/dev/null | wc -l) && [ "$files" -eq $EXPECTED ] || exit 1 - name: "Sign artifacts" uses: sigstore/gh-action-sigstore-python@f514d46b907ebcd5bedc05145c03b69c1edd8b46 # v3.0.0 @@ -305,7 +308,7 @@ jobs: ls -alR - name: "Upload signatures" - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: signatures path: "*.sigstore.json" diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index c87677169..4e6032605 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -14,6 +14,11 @@ defaults: run: shell: bash +env: + # PYVERSIONS: changing the list of versions will change the number of + # expected distributions. + EXPECTED: 63 + permissions: contents: read @@ -37,9 +42,12 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: "Record run id" + - name: "Record run id" # zizmor: ignore[template-injection] id: run-id run: | + # There must be a shorter way to write this... + [ "${{ fromJson(steps.runs.outputs.data).workflow_runs[0].status}}" = "completed" ] || exit 1 + [ "${{ fromJson(steps.runs.outputs.data).workflow_runs[0].conclusion}}" = "success" ] || exit 1 echo "run-id=${{ fromJson(steps.runs.outputs.data).workflow_runs[0].id }}" >> "$GITHUB_OUTPUT" publish-to-test-pypi: @@ -56,7 +64,7 @@ jobs: steps: - name: "Download dists" - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1 with: repository: "nedbat/coveragepy" run-id: ${{ needs.find-run.outputs.run-id }} @@ -68,16 +76,17 @@ jobs: - name: "What did we get?" run: | ls -alR - echo "Number of dists, should be 72:" + echo "Number of dists, should be $EXPECTED:" ls -1 dist | wc -l + files=$(ls dist 2>/dev/null | wc -l) && [ "$files" -eq $EXPECTED ] || exit 1 - name: "Generate attestations" - uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0 + uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3 with: subject-path: "dist/*" - name: "Publish dists to Test PyPI" - uses: pypa/gh-action-pypi-publish@67339c736fd9354cd4f8cb0b744f2b82a74b5c70 # v1.12.3 + uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4 with: repository-url: https://test.pypi.org/legacy/ @@ -95,7 +104,7 @@ jobs: steps: - name: "Download dists" - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1 with: repository: "nedbat/coveragepy" run-id: ${{ needs.find-run.outputs.run-id }} @@ -107,13 +116,14 @@ jobs: - name: "What did we get?" run: | ls -alR - echo "Number of dists, should be 72:" + echo "Number of dists, should be $EXPECTED:" ls -1 dist | wc -l + files=$(ls dist 2>/dev/null | wc -l) && [ "$files" -eq $EXPECTED ] || exit 1 - name: "Generate attestations" - uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0 + uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3 with: subject-path: "dist/*" - name: "Publish dists to PyPI" - uses: pypa/gh-action-pypi-publish@67339c736fd9354cd4f8cb0b744f2b82a74b5c70 # v1.12.3 + uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4 diff --git a/.github/workflows/python-nightly.yml b/.github/workflows/python-nightly.yml index 4b73e09cc..99e19d091 100644 --- a/.github/workflows/python-nightly.yml +++ b/.github/workflows/python-nightly.yml @@ -92,7 +92,7 @@ jobs: nogil: "${{ matrix.nogil || false }}" - name: "Install ${{ matrix.python-version }} with setup-python" - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 if: "startsWith(matrix.python-version, 'pypy-')" with: python-version: "${{ matrix.python-version }}" diff --git a/.github/workflows/quality.yml b/.github/workflows/quality.yml index 54d3a3418..56f063c0d 100644 --- a/.github/workflows/quality.yml +++ b/.github/workflows/quality.yml @@ -42,7 +42,7 @@ jobs: persist-credentials: false - name: "Install Python" - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "3.9" # Minimum of PYVERSIONS cache: pip @@ -67,7 +67,7 @@ jobs: persist-credentials: false - name: "Install Python" - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "3.9" # Minimum of PYVERSIONS cache: pip @@ -92,7 +92,7 @@ jobs: persist-credentials: false - name: "Install Python" - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "3.11" # Doc version from PYVERSIONS cache: pip diff --git a/.github/workflows/testsuite.yml b/.github/workflows/testsuite.yml index 1f85ca745..0f831e81d 100644 --- a/.github/workflows/testsuite.yml +++ b/.github/workflows/testsuite.yml @@ -54,15 +54,14 @@ jobs: - "3.14" - "pypy-3.9" - "pypy-3.10" - exclude: - # Windows pypy 3.9 and 3.10 get stuck with PyPy 7.3.15. I hope to - # unstick them, but I don't want that to block all other progress, so - # skip them for now. These excludes can be removed once GitHub uses - # PyPy 7.3.16 on Windows. https://github.com/pypy/pypy/issues/4876 - - os: windows - python-version: "pypy-3.9" - - os: windows - python-version: "pypy-3.10" + - "pypy-3.11" + # + # If we need to exclude any combinations, do it like this: + # exclude: + # # Windows pypy 3.9 and 3.10 get stuck with PyPy 7.3.15. + # - os: windows + # python-version: "pypy-3.10" + # # If we need to tweak the os version we can do it with an include like # this: # include: @@ -79,7 +78,7 @@ jobs: persist-credentials: false - name: "Set up Python" - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "${{ matrix.python-version }}" allow-prereleases: true diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 466833a9e..7e20a1034 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,3 +14,18 @@ repos: exclude: "(status\\.json|\\.min\\.js)$" - id: trailing-whitespace exclude: "stress_phystoken|\\.py,cover$" + + - repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.10.0 + hooks: + - id: rst-directive-colons + - id: rst-inline-touching-normal + + - repo: https://github.com/sphinx-contrib/sphinx-lint + rev: v1.0.0 + hooks: + - id: sphinx-lint + + - repo: meta + hooks: + - id: check-useless-excludes diff --git a/CHANGES.rst b/CHANGES.rst index e5bfd2f0b..a2b172dc6 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -22,6 +22,90 @@ upgrading your version of coverage.py. .. start-releases +.. _changes_7-8-0: + +Version 7.8.0 — 2025-03-30 +-------------------------- + +- Added a new ``source_dirs`` setting for symmetry with the existing + ``source_pkgs`` setting. It's preferable to the existing ``source`` setting, + because you'll get a clear error when directories don't exist. Fixes `issue + 1942`_. Thanks, `Jeremy Fleischman `_. + +- Fix: the PYTHONSAFEPATH environment variable new in Python 3.11 is properly + supported, closing `issue 1696`_. Thanks, `Philipp A. `_. This + works properly except for a detail when using the ``coverage`` command on + Windows. There you can use ``python -m coverage`` instead if you need exact + emulation. + +.. _issue 1696: https://github.com/nedbat/coveragepy/issues/1696 +.. _pull 1700: https://github.com/nedbat/coveragepy/pull/1700 +.. _issue 1942: https://github.com/nedbat/coveragepy/issues/1942 +.. _pull 1943: https://github.com/nedbat/coveragepy/pull/1943 + +.. _changes_7-7-1: + +Version 7.7.1 — 2025-03-21 +-------------------------- + +- A few small tweaks to the sys.monitoring support for Python 3.14. Please + test! + + +.. _changes_7-7-0: + +Version 7.7.0 — 2025-03-16 +-------------------------- + +- The Coverage object has a new method, :meth:`.Coverage.branch_stats` for + getting simple branch information for a module. Closes `issue 1888`_. + +- The :class:`Coverage constructor<.Coverage>` now has a ``plugins`` parameter + for passing in plugin objects directly, thanks to `Alex Gaynor `_. + +- Many constant tests in if statements are now recognized as being optimized + away. For example, previously ``if 13:`` would have been considered a branch + with one path not taken. Now it is understood as always true and no coverage + is missing. + +- The experimental sys.monitoring support now works for branch coverage if you + are using Python 3.14.0 alpha 6 or newer. This should reduce the overhead + coverage.py imposes on your test suite. Set the environment variable + ``COVERAGE_CORE=sysmon`` to try it out. + +- Confirmed support for PyPy 3.11. Thanks Michał Górny. + +.. _issue 1888: https://github.com/nedbat/coveragepy/issues/1888 +.. _pull 1919: https://github.com/nedbat/coveragepy/pull/1919 + + +.. _changes_7-6-12: + +Version 7.6.12 — 2025-02-11 +--------------------------- + +- Fix: some aarch64 distributions were missing (`issue 1927`_). These are now + building reliably. + +.. _issue 1927: https://github.com/nedbat/coveragepy/issues/1927 + +.. _changes_7-6-11: + +Version 7.6.11 — 2025-02-08 +--------------------------- + +- Fix: a memory leak in CTracer has been fixed. The details are in `issue + 1924`_ and `pytest-dev 676`_. This should reduce the memory footprint for + everyone even if it hadn't caused a problem before. + +- We now ship a py3-none-any.whl wheel file. Thanks, `Russell Keith-Magee + `_. + +.. _pull 1914: https://github.com/nedbat/coveragepy/pull/1914 +.. _issue 1924: https://github.com/nedbat/coveragepy/issues/1924 +.. _pytest-dev 676: https://github.com/pytest-dev/pytest-cov/issues/676 + .. _changes_7-6-10: Version 7.6.10 — 2024-12-26 @@ -1669,7 +1753,7 @@ Version 5.4 — 2021-01-24 - Combining files on Windows across drives now works properly, fixing `issue 577`_. Thanks, `Valentin Lab `_. -- Fix an obscure warning from deep in the _decimal module, as reported in +- Fix an obscure warning from deep in the decimal module, as reported in `issue 1084`_. - Update to support Python 3.10 alphas in progress, including `PEP 626: Precise diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 1b51c9e0e..12fc1dab5 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -110,6 +110,7 @@ James Valleroy Jan Kühle Jan Rusak Janakarajan Natarajan +Jeremy Fleischman Jerin Peter George Jessamyn Smith Joanna Ejzel @@ -174,6 +175,7 @@ Naveen Srinivasan Naveen Yadav Neil Pilgrim Nicholas Nadeau +Nick Drozd Nikita Bloshchanevich Nikita Sobolev Nils Kattenbeck diff --git a/Makefile b/Makefile index 943d40ec4..e03d5e5f7 100644 --- a/Makefile +++ b/Makefile @@ -235,7 +235,7 @@ update_rtd: #: Update ReadTheDocs with the versions to show bump_version: #: Edit sources to bump the version after a release (see howto.txt). git switch -c nedbat/bump-version python igor.py bump_version - git commit -a -m "build: bump version" + git commit -a -m "build: bump version to $$(python setup.py --version | sed 's/a.*//')" git push -u origin @ @@ -288,6 +288,7 @@ RELNOTES_JSON = tmp/relnotes.json $(CHANGES_MD): CHANGES.rst $(DOCBIN) $(SPHINXBUILD) -b rst doc tmp/rst_rst + pandoc --version pandoc -frst -tmarkdown_strict --markdown-headings=atx --wrap=none tmp/rst_rst/changes.rst > $(CHANGES_MD) relnotes_json: $(RELNOTES_JSON) ## Convert changelog to JSON for further parsing. diff --git a/NOTICE.txt b/NOTICE.txt index 7376ffdda..05dd87e01 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -1,5 +1,5 @@ Copyright 2001 Gareth Rees. All rights reserved. -Copyright 2004-2024 Ned Batchelder. All rights reserved. +Copyright 2004-2025 Ned Batchelder. All rights reserved. Except where noted otherwise, this software is licensed under the Apache License, Version 2.0 (the "License"); you may not use this work except in diff --git a/README.rst b/README.rst index a4eb7ed52..cf1e856f5 100644 --- a/README.rst +++ b/README.rst @@ -25,8 +25,8 @@ Coverage.py runs on these versions of Python: .. PYVERSIONS -* Python 3.9 through 3.14 alpha 2, including free-threading. -* PyPy3 versions 3.9 and 3.10. +* Python 3.9 through 3.14 alpha 6, including free-threading. +* PyPy3 versions 3.9, 3.10, and 3.11. Documentation is on `Read the Docs`_. Code repository and issue tracker are on `GitHub`_. @@ -35,6 +35,8 @@ Documentation is on `Read the Docs`_. Code repository and issue tracker are on .. _GitHub: https://github.com/nedbat/coveragepy **New in 7.x:** +``[run] source_dirs`` setting; +``Coverage.branch_stats()``; multi-line exclusion patterns; function/class reporting; experimental support for sys.monitoring; diff --git a/benchmark/README.rst b/benchmark/README.rst index 3e1588547..2372a7c6c 100644 --- a/benchmark/README.rst +++ b/benchmark/README.rst @@ -50,6 +50,15 @@ for the table. There will be a row for each combination of the two dimensions. The `column` argument is the remaining dimension that is used to add columns to the table, one for each item in that dimension. +To run a benchmark, create a Python file with a run_experiment call in it. +Many are in run.py, guarded by ``if 0:`` and ``if 1:`` clauses. In the +benchmark directory, run your Python file. If you haven't provided the +``num_runs`` argument to run_experiment, put the number of runs on the command +line:: + + % cd benchmark + % python3 run.py 3 + For example:: run_experiment( diff --git a/benchmark/benchmark.py b/benchmark/benchmark.py index 0a17218db..35cf938c7 100644 --- a/benchmark/benchmark.py +++ b/benchmark/benchmark.py @@ -191,11 +191,15 @@ class ProjectToTest: # Where can we clone the project from? git_url: str = "" + local_git: str = "" slug: str = "" env_vars: Env_VarsType = {} def __init__(self) -> None: - url_must_exist(self.git_url) + if self.git_url: + url_must_exist(self.git_url) + if self.local_git: + file_must_exist(self.local_git) if not self.slug: if self.git_url: self.slug = self.git_url.split("/")[-1] @@ -211,12 +215,13 @@ def make_dir(self) -> None: def get_source(self, shell: ShellSession, retries: int = 5) -> None: """Get the source of the project.""" + git_source = self.local_git or self.git_url for retry in range(retries): try: - shell.run_command(f"git clone {self.git_url} {self.dir}") + shell.run_command(f"git clone {git_source} {self.dir}") return except Exception as e: - print(f"Retrying to clone {self.git_url} due to error:\n{e}") + print(f"Retrying to clone {git_source} due to error:\n{e}") if retry == retries - 1: raise e @@ -423,6 +428,38 @@ def __init__(self, more_pytest_args: str = ""): self.slug = "mashbranch" +class ProjectPillow(ProjectToTest): + git_url = "https://github.com/python-pillow/Pillow" + local_git = "/src/Pillow" + + def __init__(self, more_pytest_args: str = ""): + super().__init__() + self.more_pytest_args = more_pytest_args + + def prep_environment(self, env: Env) -> None: + env.shell.run_command(f"{env.python} -m pip install '.[tests]'") + + def run_no_coverage(self, env: Env) -> float: + env.shell.run_command(f"{env.python} -m pytest {self.more_pytest_args}") + return env.shell.last_duration + + def run_with_coverage(self, env: Env, cov_ver: Coverage) -> float: + env.shell.run_command(f"{env.python} -m pip install {cov_ver.pip_args}") + env.shell.run_command( + f"{env.python} -m pytest --cov=PIL --cov=Tests {self.more_pytest_args}" + ) + duration = env.shell.last_duration + report = env.shell.run_command(f"{env.python} -m coverage report --precision=6") + print("Results:", report.splitlines()[-1]) + return duration + + +class ProjectPillowBranch(ProjectPillow): + def __init__(self, more_pytest_args: str = ""): + super().__init__(more_pytest_args="--cov-branch " + more_pytest_args) + self.slug = "Pilbranch" + + class ProjectOperator(ProjectToTest): git_url = "https://github.com/nedbat/operator" @@ -823,7 +860,9 @@ def __init__( tweaks: TweaksType = None, env_vars: Env_VarsType = None, ): + # Check that it really is a coverage source directory. directory = file_must_exist(directory_name, "coverage directory") + file_must_exist(str(directory / "igor.py")) super().__init__( slug=slug, pip_args=str(directory), diff --git a/benchmark/run.py b/benchmark/run.py index e606cade9..075196a6d 100644 --- a/benchmark/run.py +++ b/benchmark/run.py @@ -72,7 +72,7 @@ ], ) -if 1: +if 0: # Compare N Python versions vers = [10, 11, 12, 13] run_experiment( @@ -93,12 +93,13 @@ ], ) -if 0: +if 1: # Compare sysmon on many projects run_experiment( py_versions=[ - Python(3, 12), + # Python(3, 12), + AdHocPython("/usr/local/cpython", "main"), ], cov_versions=[ NoCoverage("nocov"), @@ -106,15 +107,17 @@ CoverageSource(slug="sysmon", env_vars={"COVERAGE_CORE": "sysmon"}), ], projects=[ + ProjectPillow(), #"-k test_pickle"), + ProjectPillowBranch(), #"-k test_pickle"), # ProjectSphinx(), # Works, slow - ProjectPygments(), # Works + # ProjectPygments(), # Doesn't work on 3.14 # ProjectRich(), # Doesn't work # ProjectTornado(), # Works, tests fail # ProjectDulwich(), # Works # ProjectBlack(), # Works, slow # ProjectMpmath(), # Works, slow - ProjectMypy(), # Works, slow - # ProjectHtml5lib(), # Works + # ProjectMypy(), # Works, slow + # ProjectHtml5lib(), # Doesn't work on 3.14 # ProjectUrllib3(), # Works ], rows=["pyver", "proj"], diff --git a/coverage/bytecode.py b/coverage/bytecode.py index 764b29b80..bea039c87 100644 --- a/coverage/bytecode.py +++ b/coverage/bytecode.py @@ -1,13 +1,18 @@ # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 # For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt -"""Bytecode manipulation for coverage.py""" +"""Bytecode analysis for coverage.py""" from __future__ import annotations +import dis + from types import CodeType +from typing import Iterable, Optional from collections.abc import Iterator +from coverage.types import TArc, TOffset + def code_objects(code: CodeType) -> Iterator[CodeType]: """Iterate over all the code objects in `code`.""" @@ -20,3 +25,138 @@ def code_objects(code: CodeType) -> Iterator[CodeType]: if isinstance(c, CodeType): stack.append(c) yield code + + +def op_set(*op_names: str) -> set[int]: + """Make a set of opcodes from instruction names. + + The names might not exist in this version of Python, skip those if not. + """ + return {op for name in op_names if (op := dis.opmap.get(name))} + + +# Opcodes that are unconditional jumps elsewhere. +ALWAYS_JUMPS = op_set( + "JUMP_BACKWARD", + "JUMP_BACKWARD_NO_INTERRUPT", + "JUMP_FORWARD", +) + +# Opcodes that exit from a function. +RETURNS = op_set("RETURN_VALUE", "RETURN_GENERATOR") + + +class InstructionWalker: + """Utility to step through trails of instructions. + + We have two reasons to need sequences of instructions from a code object: + First, in strict sequence to visit all the instructions in the object. + This is `walk(follow_jumps=False)`. Second, we want to follow jumps to + understand how execution will flow: `walk(follow_jumps=True)`. + + """ + + def __init__(self, code: CodeType) -> None: + self.code = code + self.insts: dict[TOffset, dis.Instruction] = {} + + inst = None + for inst in dis.get_instructions(code): + self.insts[inst.offset] = inst + + assert inst is not None + self.max_offset = inst.offset + + def walk( + self, *, start_at: TOffset = 0, follow_jumps: bool = True + ) -> Iterable[dis.Instruction]: + """ + Yield instructions starting from `start_at`. Follow unconditional + jumps if `follow_jumps` is true. + """ + seen = set() + offset = start_at + while offset < self.max_offset + 1: + if offset in seen: + break + seen.add(offset) + if inst := self.insts.get(offset): + yield inst + if follow_jumps and inst.opcode in ALWAYS_JUMPS: + offset = inst.jump_target + continue + offset += 2 + + +TBranchTrail = tuple[set[TOffset], Optional[TArc]] +TBranchTrails = dict[TOffset, list[TBranchTrail]] + + +def branch_trails(code: CodeType) -> TBranchTrails: + """ + Calculate branch trails for `code`. + + Instructions can have a jump_target, where they might jump to next. Some + instructions with a jump_target are unconditional jumps (ALWAYS_JUMPS), so + they aren't interesting to us, since they aren't the start of a branch + possibility. + + Instructions that might or might not jump somewhere else are branch + possibilities. For each of those, we track a trail of instructions. These + are lists of instruction offsets, the next instructions that can execute. + We follow the trail until we get to a new source line. That gives us the + arc from the original instruction's line to the new source line. + + """ + the_trails: TBranchTrails = {} + iwalker = InstructionWalker(code) + for inst in iwalker.walk(follow_jumps=False): + if not inst.jump_target: + # We only care about instructions with jump targets. + continue + if inst.opcode in ALWAYS_JUMPS: + # We don't care about unconditional jumps. + continue + + from_line = inst.line_number + if from_line is None: + continue + + def walk_one_branch(start_at: TOffset) -> TBranchTrail: + # pylint: disable=cell-var-from-loop + inst_offsets: set[TOffset] = set() + to_line = None + for inst2 in iwalker.walk(start_at=start_at): + inst_offsets.add(inst2.offset) + if inst2.line_number and inst2.line_number != from_line: + to_line = inst2.line_number + break + elif inst2.jump_target and (inst2.opcode not in ALWAYS_JUMPS): + break + elif inst2.opcode in RETURNS: + to_line = -code.co_firstlineno + break + if to_line is not None: + return inst_offsets, (from_line, to_line) + else: + return set(), None + + # Calculate two trails: one from the next instruction, and one from the + # jump_target instruction. + trails = [ + walk_one_branch(start_at=inst.offset + 2), + walk_one_branch(start_at=inst.jump_target), + ] + the_trails[inst.offset] = trails + + # Sometimes we get BRANCH_RIGHT or BRANCH_LEFT events from instructions + # other than the original jump possibility instruction. Register each + # trail under all of their offsets so we can pick up in the middle of a + # trail if need be. + for trail in trails: + for offset in trail[0]: + if offset not in the_trails: + the_trails[offset] = [] + the_trails[offset].append(trail) + + return the_trails diff --git a/coverage/cmdline.py b/coverage/cmdline.py index 7c01ccbf0..783345e01 100644 --- a/coverage/cmdline.py +++ b/coverage/cmdline.py @@ -6,7 +6,7 @@ from __future__ import annotations import glob -import optparse # pylint: disable=deprecated-module +import optparse import os import os.path import shlex diff --git a/coverage/collector.py b/coverage/collector.py index 53fa6871c..3f1519a98 100644 --- a/coverage/collector.py +++ b/coverage/collector.py @@ -367,7 +367,7 @@ def pause(self) -> None: tracer.stop() stats = tracer.get_stats() if stats: - print("\nCoverage.py tracer stats:") + print(f"\nCoverage.py {tracer.__class__.__name__} stats:") for k, v in human_sorted_items(stats.items()): print(f"{k:>20}: {v}") if self.threading: diff --git a/coverage/config.py b/coverage/config.py index 357fc5af0..94831e070 100644 --- a/coverage/config.py +++ b/coverage/config.py @@ -13,7 +13,7 @@ import re from typing import ( - Any, Callable, Union, + Any, Callable, Final, Mapping, Union, ) from collections.abc import Iterable @@ -211,6 +211,7 @@ def __init__(self) -> None: self.sigterm = False self.source: list[str] | None = None self.source_pkgs: list[str] = [] + self.source_dirs: list[str] = [] self.timid = False self._crash: str | None = None @@ -360,7 +361,9 @@ def copy(self) -> CoverageConfig: """Return a copy of the configuration.""" return copy.deepcopy(self) - CONCURRENCY_CHOICES = {"thread", "gevent", "greenlet", "eventlet", "multiprocessing"} + CONCURRENCY_CHOICES: Final[set[str]] = { + "thread", "gevent", "greenlet", "eventlet", "multiprocessing" + } CONFIG_FILE_OPTIONS = [ # These are *args for _set_attr_from_config_option: @@ -390,6 +393,7 @@ def copy(self) -> CoverageConfig: ("sigterm", "run:sigterm", "boolean"), ("source", "run:source", "list"), ("source_pkgs", "run:source_pkgs", "list"), + ("source_dirs", "run:source_dirs", "list"), ("timid", "run:timid", "boolean"), ("_crash", "run:_crash"), @@ -447,7 +451,7 @@ def _set_attr_from_config_option( """ section, option = where.split(":") if cp.has_option(section, option): - method = getattr(cp, "get" + type_) + method = getattr(cp, f"get{type_}") setattr(self, attr, method(section, option)) return True return False @@ -468,7 +472,13 @@ def set_option(self, option_name: str, value: TConfigValueIn | TConfigSectionIn) """ # Special-cased options. if option_name == "paths": - self.paths = value # type: ignore[assignment] + # This is ugly, but type-checks and ensures the values are close + # to right. + self.paths = {} + assert isinstance(value, Mapping) + for k, v in value.items(): + assert isinstance(v, Iterable) + self.paths[k] = list(v) return # Check all the hard-coded options. @@ -499,7 +509,7 @@ def get_option(self, option_name: str) -> TConfigValueOut | None: """ # Special-cased options. if option_name == "paths": - return self.paths # type: ignore[return-value] + return self.paths # Check all the hard-coded options. for option_spec in self.CONFIG_FILE_OPTIONS: diff --git a/coverage/context.py b/coverage/context.py index 977e9b4ef..2921372a8 100644 --- a/coverage/context.py +++ b/coverage/context.py @@ -6,7 +6,6 @@ from __future__ import annotations from types import FrameType -from typing import cast from collections.abc import Sequence from coverage.types import TShouldStartContextFn @@ -65,11 +64,11 @@ def qualname_from_frame(frame: FrameType) -> str | None: func = frame.f_globals.get(fname) if func is None: return None - return cast(str, func.__module__ + "." + fname) + return f"{func.__module__}.{fname}" func = getattr(method, "__func__", None) if func is None: cls = self.__class__ - return cast(str, cls.__module__ + "." + cls.__name__ + "." + fname) + return f"{cls.__module__}.{cls.__name__}.{fname}" - return cast(str, func.__module__ + "." + func.__qualname__) + return f"{func.__module__}.{func.__qualname__}" diff --git a/coverage/control.py b/coverage/control.py index 54b90aa28..16c99f7f0 100644 --- a/coverage/control.py +++ b/coverage/control.py @@ -19,7 +19,7 @@ import warnings from types import FrameType -from typing import cast, Any, Callable, IO +from typing import cast, Any, Callable, IO, Union from collections.abc import Iterable, Iterator from coverage import env @@ -43,14 +43,14 @@ from coverage.misc import DefaultValue, ensure_dir_for_file, isolate_module from coverage.multiproc import patch_multiprocessing from coverage.plugin import FileReporter -from coverage.plugin_support import Plugins +from coverage.plugin_support import Plugins, TCoverageInit from coverage.python import PythonFileReporter from coverage.report import SummaryReporter from coverage.report_core import render_report from coverage.results import Analysis, analysis_from_file_reporter from coverage.types import ( FilePath, TConfigurable, TConfigSectionIn, TConfigValueIn, TConfigValueOut, - TFileDisposition, TLineNo, TMorf, + TFileDisposition, TLineNo, TMorf ) from coverage.xmlreport import XmlReporter @@ -131,6 +131,7 @@ def __init__( # pylint: disable=too-many-arguments config_file: FilePath | bool = True, source: Iterable[str] | None = None, source_pkgs: Iterable[str] | None = None, + source_dirs: Iterable[str] | None = None, omit: str | Iterable[str] | None = None, include: str | Iterable[str] | None = None, debug: Iterable[str] | None = None, @@ -138,6 +139,7 @@ def __init__( # pylint: disable=too-many-arguments check_preimported: bool = False, context: str | None = None, messages: bool = False, + plugins: Iterable[Callable[..., None]] | None = None, ) -> None: """ Many of these arguments duplicate and override values that can be @@ -187,6 +189,10 @@ def __init__( # pylint: disable=too-many-arguments `source`, but can be used to name packages where the name can also be interpreted as a file path. + `source_dirs` is a list of file paths. It works the same as + `source`, but raises an error if the path doesn't exist, rather + than being treated as a package name. + `include` and `omit` are lists of file name patterns. Files that match `include` will be measured, files that match `omit` will not. Each will also accept a single string argument. @@ -211,6 +217,11 @@ def __init__( # pylint: disable=too-many-arguments If `messages` is true, some messages will be printed to stdout indicating what is happening. + If `plugins` are passed, they are an iterable of function objects + accepting a `reg` object to register plugins, as described in + :ref:`api_plugin`. When they are provided, they will override the + plugins found in the coverage configuration file. + .. versionadded:: 4.0 The `concurrency` parameter. @@ -226,6 +237,11 @@ def __init__( # pylint: disable=too-many-arguments .. versionadded:: 6.0 The `messages` parameter. + .. versionadded:: 7.7 + The `plugins` parameter. + + .. versionadded:: 7.8 + The `source_dirs` parameter. """ # Start self.config as a usable default configuration. It will soon be # replaced with the real configuration. @@ -249,7 +265,7 @@ def __init__( # pylint: disable=too-many-arguments self._warn_no_data = True self._warn_unimported_source = True self._warn_preimported_source = check_preimported - self._no_warn_slugs: list[str] = [] + self._no_warn_slugs: set[str] = set() self._messages = messages # A record of all the warnings that have been issued. @@ -260,6 +276,7 @@ def __init__( # pylint: disable=too-many-arguments self._debug: DebugControl = NoDebugging() self._inorout: InOrOut | None = None self._plugins: Plugins = Plugins() + self._plugin_override = cast(Union[Iterable[TCoverageInit], None], plugins) self._data: CoverageData | None = None self._core: Core | None = None self._collector: Collector | None = None @@ -292,6 +309,7 @@ def __init__( # pylint: disable=too-many-arguments parallel=bool_or_none(data_suffix), source=source, source_pkgs=source_pkgs, + source_dirs=source_dirs, run_omit=omit, run_include=include, debug=debug, @@ -340,7 +358,11 @@ def _init(self) -> None: self._file_mapper = relative_filename # Load plugins - self._plugins = Plugins.load_plugins(self.config.plugins, self.config, self._debug) + self._plugins = Plugins(self._debug) + if self._plugin_override: + self._plugins.load_from_callables(self._plugin_override) + else: + self._plugins.load_from_config(self.config.plugins, self.config) # Run configuring plugins. for plugin in self._plugins.configurers: @@ -424,7 +446,7 @@ def _warn(self, msg: str, slug: str | None = None, once: bool = False) -> None: """ if not self._no_warn_slugs: - self._no_warn_slugs = list(self.config.disable_warnings) + self._no_warn_slugs = set(self.config.disable_warnings) if slug in self._no_warn_slugs: # Don't issue the warning @@ -439,7 +461,7 @@ def _warn(self, msg: str, slug: str | None = None, once: bool = False) -> None: if once: assert slug is not None - self._no_warn_slugs.append(slug) + self._no_warn_slugs.add(slug) def _message(self, msg: str) -> None: """Write a message to the user, if configured to do so.""" @@ -534,7 +556,8 @@ def _init_for_start(self) -> None: self._core = Core( warn=self._warn, - timid=self.config.timid, + config=self.config, + dynamic_contexts=(should_start_context is not None), metacov=self._metacov, ) self._collector = Collector( @@ -935,6 +958,7 @@ def analysis2( analysis.missing_formatted(), ) + @functools.lru_cache(maxsize=1) def _analyze(self, morf: TMorf) -> Analysis: """Analyze a module or file. Private for now.""" self._init() @@ -945,6 +969,20 @@ def _analyze(self, morf: TMorf) -> Analysis: filename = self._file_mapper(file_reporter.filename) return analysis_from_file_reporter(data, self.config.precision, file_reporter, filename) + def branch_stats(self, morf: TMorf) -> dict[TLineNo, tuple[int, int]]: + """Get branch statistics about a module. + + `morf` is a module or a file name. + + Returns a dict mapping line numbers to a tuple: + (total_exits, taken_exits). + + .. versionadded:: 7.7 + + """ + analysis = self._analyze(morf) + return analysis.branch_stats() + @functools.lru_cache(maxsize=1) def _get_file_reporter(self, morf: TMorf) -> FileReporter: """Get a FileReporter for a module or file name.""" diff --git a/coverage/core.py b/coverage/core.py index b19ecd532..38c27578b 100644 --- a/coverage/core.py +++ b/coverage/core.py @@ -10,6 +10,7 @@ from typing import Any from coverage import env +from coverage.config import CoverageConfig from coverage.disposition import FileDisposition from coverage.exceptions import ConfigError from coverage.misc import isolate_module @@ -17,8 +18,8 @@ from coverage.sysmon import SysMonitor from coverage.types import ( TFileDisposition, - Tracer, TWarnFn, + Tracer, ) @@ -52,36 +53,47 @@ class Core: packed_arcs: bool systrace: bool - def __init__(self, + def __init__( + self, warn: TWarnFn, - timid: bool, + config: CoverageConfig, + dynamic_contexts: bool, metacov: bool, ) -> None: - # Defaults - self.tracer_kwargs = {} + # Check the conditions that preclude us from using sys.monitoring. + reason_no_sysmon = "" + if not env.PYBEHAVIOR.pep669: + reason_no_sysmon = "isn't available in this version" + elif config.branch and not env.PYBEHAVIOR.branch_right_left: + reason_no_sysmon = "can't measure branches in this version" + elif dynamic_contexts: + reason_no_sysmon = "doesn't yet support dynamic contexts" - core_name: str | None - if timid: + core_name: str | None = None + if config.timid: core_name = "pytrace" - else: + + if core_name is None: core_name = os.getenv("COVERAGE_CORE") - if core_name == "sysmon" and not env.PYBEHAVIOR.pep669: - warn("sys.monitoring isn't available, using default core", slug="no-sysmon") - core_name = None + if core_name == "sysmon" and reason_no_sysmon: + warn(f"sys.monitoring {reason_no_sysmon}, using default core", slug="no-sysmon") + core_name = None + + if core_name is None: + # Someday we will default to sysmon, but it's still experimental: + # if not reason_no_sysmon: + # core_name = "sysmon" + if HAS_CTRACER: + core_name = "ctrace" + else: + core_name = "pytrace" - if not core_name: - # Once we're comfortable with sysmon as a default: - # if env.PYBEHAVIOR.pep669 and self.should_start_context is None: - # core_name = "sysmon" - if HAS_CTRACER: - core_name = "ctrace" - else: - core_name = "pytrace" + self.tracer_kwargs = {} if core_name == "sysmon": self.tracer_class = SysMonitor - self.tracer_kwargs = {"tool_id": 3 if metacov else 1} + self.tracer_kwargs["tool_id"] = 3 if metacov else 1 self.file_disposition_class = FileDisposition self.supports_plugins = False self.packed_arcs = False diff --git a/coverage/ctracer/tracer.c b/coverage/ctracer/tracer.c index 8deedb37f..4fca2fee7 100644 --- a/coverage/ctracer/tracer.c +++ b/coverage/ctracer/tracer.c @@ -288,6 +288,17 @@ CTracer_set_pdata_stack(CTracer *self) return ret; } +// Thanks for the idea, memray! +inline PyCodeObject* +MyFrame_BorrowCode(PyFrameObject* frame) +{ + // Return a borrowed reference. + PyCodeObject* pCode = PyFrame_GetCode(frame); + assert(Py_REFCNT(pCode) >= 2); + Py_DECREF(pCode); + return pCode; +} + /* * Parts of the trace function. */ @@ -303,7 +314,7 @@ CTracer_handle_call(CTracer *self, PyFrameObject *frame) PyObject * plugin = NULL; PyObject * plugin_name = NULL; PyObject * next_tracename = NULL; -#ifdef RESUME +#ifdef RESUME // >=3.11 PyObject * pCode = NULL; #endif @@ -359,7 +370,7 @@ CTracer_handle_call(CTracer *self, PyFrameObject *frame) } /* Check if we should trace this line. */ - filename = MyFrame_GetCode(frame)->co_filename; + filename = MyFrame_BorrowCode(frame)->co_filename; disposition = PyDict_GetItem(self->should_trace_cache, filename); if (disposition == NULL) { if (PyErr_Occurred()) { @@ -554,7 +565,7 @@ CTracer_handle_call(CTracer *self, PyFrameObject *frame) * The current opcode is guaranteed to be RESUME. The argument * determines what kind of resume it is. */ - pCode = MyCode_GetCode(MyFrame_GetCode(frame)); + pCode = MyCode_GetCode(MyFrame_BorrowCode(frame)); real_call = (PyBytes_AS_STRING(pCode)[MyFrame_GetLasti(frame) + 1] == 0); #else // f_lasti is -1 for a true call, and a real byte offset for a generator re-entry. @@ -562,7 +573,7 @@ CTracer_handle_call(CTracer *self, PyFrameObject *frame) #endif if (real_call) { - self->pcur_entry->last_line = -MyFrame_GetCode(frame)->co_firstlineno; + self->pcur_entry->last_line = -MyFrame_BorrowCode(frame)->co_firstlineno; } else { self->pcur_entry->last_line = PyFrame_GetLineNumber(frame); @@ -649,7 +660,7 @@ CTracer_handle_line(CTracer *self, PyFrameObject *frame) STATS( self->stats.lines++; ) if (self->pdata_stack->depth >= 0) { - SHOWLOG(PyFrame_GetLineNumber(frame), MyFrame_GetCode(frame)->co_filename, "line"); + SHOWLOG(PyFrame_GetLineNumber(frame), MyFrame_BorrowCode(frame)->co_filename, "line"); if (self->pcur_entry->file_data) { int lineno_from = -1; int lineno_to = -1; @@ -727,7 +738,7 @@ CTracer_handle_return(CTracer *self, PyFrameObject *frame) self->pcur_entry = &self->pdata_stack->stack[self->pdata_stack->depth]; if (self->tracing_arcs && self->pcur_entry->file_data) { BOOL real_return = FALSE; - pCode = MyCode_GetCode(MyFrame_GetCode(frame)); + pCode = MyCode_GetCode(MyFrame_BorrowCode(frame)); int lasti = MyFrame_GetLasti(frame); Py_ssize_t code_size = PyBytes_GET_SIZE(pCode); unsigned char * code_bytes = (unsigned char *)PyBytes_AS_STRING(pCode); @@ -759,7 +770,7 @@ CTracer_handle_return(CTracer *self, PyFrameObject *frame) real_return = !(is_yield || is_yield_from); #endif if (real_return) { - int first = MyFrame_GetCode(frame)->co_firstlineno; + int first = MyFrame_BorrowCode(frame)->co_firstlineno; if (CTracer_record_pair(self, self->pcur_entry->last_line, -first) < 0) { goto error; } @@ -782,7 +793,7 @@ CTracer_handle_return(CTracer *self, PyFrameObject *frame) } /* Pop the stack. */ - SHOWLOG(PyFrame_GetLineNumber(frame), MyFrame_GetCode(frame)->co_filename, "return"); + SHOWLOG(PyFrame_GetLineNumber(frame), MyFrame_BorrowCode(frame)->co_filename, "return"); self->pdata_stack->depth--; self->pcur_entry = &self->pdata_stack->stack[self->pdata_stack->depth]; } @@ -824,13 +835,13 @@ CTracer_trace(CTracer *self, PyFrameObject *frame, int what, PyObject *arg_unuse if (what <= (int)(sizeof(what_sym)/sizeof(const char *))) { w = what_sym[what]; } - ascii = PyUnicode_AsASCIIString(MyFrame_GetCode(frame)->co_filename); + ascii = PyUnicode_AsASCIIString(MyFrame_BorrowCode(frame)->co_filename); printf("%x trace: f:%x %s @ %s %d\n", (int)self, (int)frame, what_sym[what], PyBytes_AS_STRING(ascii), PyFrame_GetLineNumber(frame)); Py_DECREF(ascii); #endif #if TRACE_LOG - ascii = PyUnicode_AsASCIIString(MyFrame_GetCode(frame)->co_filename); + ascii = PyUnicode_AsASCIIString(MyFrame_BorrowCode(frame)->co_filename); if (strstr(PyBytes_AS_STRING(ascii), start_file) && PyFrame_GetLineNumber(frame) == start_line) { logging = TRUE; } @@ -926,7 +937,7 @@ CTracer_call(CTracer *self, PyObject *args, PyObject *kwds) } #if WHAT_LOG - ascii = PyUnicode_AsASCIIString(MyFrame_GetCode(frame)->co_filename); + ascii = PyUnicode_AsASCIIString(MyFrame_BorrowCode(frame)->co_filename); printf("pytrace: %s @ %s %d\n", what_sym[what], PyBytes_AS_STRING(ascii), PyFrame_GetLineNumber(frame)); Py_DECREF(ascii); #endif diff --git a/coverage/ctracer/util.h b/coverage/ctracer/util.h index 473db2080..94591e0d2 100644 --- a/coverage/ctracer/util.h +++ b/coverage/ctracer/util.h @@ -41,13 +41,6 @@ #define MyFrame_SetTrace(f, obj) {Py_INCREF(obj); Py_XSETREF((f)->f_trace, (PyObject*)(obj));} #endif -// Access f_code should be done through a helper starting in 3.9. -#if PY_VERSION_HEX >= 0x03090000 -#define MyFrame_GetCode(f) (PyFrame_GetCode(f)) -#else -#define MyFrame_GetCode(f) ((f)->f_code) -#endif - #if PY_VERSION_HEX >= 0x030B00B1 #define MyCode_GetCode(co) (PyCode_GetCode(co)) #define MyCode_FreeCode(code) Py_XDECREF(code) diff --git a/coverage/data.py b/coverage/data.py index 9baab8edd..8f4346007 100644 --- a/coverage/data.py +++ b/coverage/data.py @@ -23,7 +23,7 @@ from coverage.exceptions import CoverageException, NoDataError from coverage.files import PathAliases from coverage.misc import Hasher, file_be_gone, human_sorted, plural -from coverage.sqldata import CoverageData +from coverage.sqldata import CoverageData as CoverageData # pylint: disable=useless-import-alias def line_counts(data: CoverageData, fullpath: bool = False) -> dict[str, int]: diff --git a/coverage/debug.py b/coverage/debug.py index cf9310dc5..73e842f99 100644 --- a/coverage/debug.py +++ b/coverage/debug.py @@ -21,7 +21,7 @@ from typing import ( overload, - Any, Callable, IO, + Any, Callable, Final, IO, ) from collections.abc import Iterable, Iterator, Mapping @@ -119,6 +119,11 @@ def should(self, option: str) -> bool: """Should we write debug messages? Never.""" return False + @contextlib.contextmanager + def without_callers(self) -> Iterator[None]: + """A dummy context manager to satisfy the api.""" + yield + def write(self, msg: str, *, exc: BaseException | None = None) -> None: """This will never be called.""" raise AssertionError("NoDebugging.write should never be called.") @@ -357,7 +362,7 @@ def filter(self, text: str) -> str: """Add a cwd message for each new cwd.""" cwd = os.getcwd() if cwd != self.cwd: - text = f"cwd is now {cwd!r}\n" + text + text = f"cwd is now {cwd!r}\n{text}" self.cwd = cwd return text @@ -399,7 +404,7 @@ def filter(self, text: str) -> str: """Add a message when the pytest test changes.""" test_name = os.getenv("PYTEST_CURRENT_TEST") if test_name != self.test_name: - text = f"Pytest context: {test_name}\n" + text + text = f"Pytest context: {test_name}\n{text}" self.test_name = test_name return text @@ -467,8 +472,8 @@ def get_one( # a process-wide singleton. So stash it in sys.modules instead of # on a class attribute. Yes, this is aggressively gross. - SYS_MOD_NAME = "$coverage.debug.DebugOutputFile.the_one" - SINGLETON_ATTR = "the_one_and_is_interim" + SYS_MOD_NAME: Final[str] = "$coverage.debug.DebugOutputFile.the_one" + SINGLETON_ATTR: Final[str] = "the_one_and_is_interim" @classmethod def _set_singleton_data(cls, the_one: DebugOutputFile, interim: bool) -> None: diff --git a/coverage/env.py b/coverage/env.py index 0fb8683c5..a88161b38 100644 --- a/coverage/env.py +++ b/coverage/env.py @@ -9,7 +9,7 @@ import platform import sys -from typing import Any +from typing import Any, Final from collections.abc import Iterable # debug_info() at the bottom wants to show all the globals, but not imports. @@ -22,7 +22,7 @@ # Operating systems. WINDOWS = sys.platform == "win32" LINUX = sys.platform.startswith("linux") -OSX = sys.platform == "darwin" +MACOS = sys.platform == "darwin" # Python implementations. CPYTHON = (platform.python_implementation() == "CPython") @@ -53,10 +53,7 @@ class PYBEHAVIOR: # Is "if not __debug__" optimized away? The exact details have changed # across versions. - if pep626: - optimize_if_not_debug = 1 - else: - optimize_if_not_debug = 2 + optimize_if_not_debug = 1 if pep626 else 2 # 3.7 changed how functions with only docstrings are numbered. docstring_only_function = (not PYPY) and (PYVERSION <= (3, 10)) @@ -148,7 +145,7 @@ class PYBEHAVIOR: soft_keywords = (PYVERSION >= (3, 10)) # PEP669 Low Impact Monitoring: https://peps.python.org/pep-0669/ - pep669 = bool(getattr(sys, "monitoring", None)) + pep669: Final[bool] = bool(getattr(sys, "monitoring", None)) # Where does frame.f_lasti point when yielding from a generator? # It used to point at the YIELD, in 3.13 it points at the RESUME, @@ -159,6 +156,11 @@ class PYBEHAVIOR: # PEP649 and PEP749: Deferred annotations deferred_annotations = (PYVERSION >= (3, 14)) + # Does sys.monitoring support BRANCH_RIGHT and BRANCH_LEFT? The names + # were added in early 3.14 alphas, but didn't work entirely correctly until + # after 3.14.0a5. + branch_right_left = (pep669 and (PYVERSION > (3, 14, 0, "alpha", 5, 0))) + # Coverage.py specifics, about testing scenarios. See tests/testenv.py also. diff --git a/coverage/execfile.py b/coverage/execfile.py index cbecec847..b44c95280 100644 --- a/coverage/execfile.py +++ b/coverage/execfile.py @@ -17,6 +17,7 @@ from types import CodeType, ModuleType from typing import Any +from coverage import env from coverage.exceptions import CoverageException, _ExceptionDuringRun, NoCode, NoSource from coverage.files import canonical_filename, python_reported_file from coverage.misc import isolate_module @@ -89,7 +90,10 @@ def prepare(self) -> None: This needs to happen before any importing, and without importing anything. """ path0: str | None - if self.as_module: + if env.PYVERSION >= (3, 11) and getattr(sys.flags, "safe_path"): + # See https://docs.python.org/3/using/cmdline.html#cmdoption-P + path0 = None + elif self.as_module: path0 = os.getcwd() elif os.path.isdir(self.arg0): # Running a directory means running the __main__.py file in that @@ -142,7 +146,7 @@ def _prepare2(self) -> None: # Running a directory means running the __main__.py file in that # directory. for ext in [".py", ".pyc", ".pyo"]: - try_filename = os.path.join(self.arg0, "__main__" + ext) + try_filename = os.path.join(self.arg0, f"__main__{ext}") # 3.8.10 changed how files are reported when running a # directory. try_filename = os.path.abspath(try_filename) diff --git a/coverage/files.py b/coverage/files.py index 15d39acbd..21ba3f167 100644 --- a/coverage/files.py +++ b/coverage/files.py @@ -332,7 +332,7 @@ def _glob_to_regex(pattern: str) -> str: # Turn all backslashes into slashes to simplify the tokenizer. pattern = pattern.replace("\\", "/") if "/" not in pattern: - pattern = "**/" + pattern + pattern = f"**/{pattern}" path_rx = [] pos = 0 while pos < len(pattern): diff --git a/coverage/inorout.py b/coverage/inorout.py index e2b4c8ca3..8a5a1e27d 100644 --- a/coverage/inorout.py +++ b/coverage/inorout.py @@ -24,7 +24,7 @@ from coverage import env from coverage.disposition import FileDisposition, disposition_init -from coverage.exceptions import CoverageException, PluginError +from coverage.exceptions import ConfigError, CoverageException, PluginError from coverage.files import TreeMatcher, GlobMatcher, ModuleMatcher from coverage.files import prep_patterns, find_python_files, canonical_filename from coverage.misc import isolate_module, sys_modules_saved @@ -36,26 +36,18 @@ from coverage.plugin_support import Plugins -# Pypy has some unusual stuff in the "stdlib". Consider those locations -# when deciding where the stdlib is. These modules are not used for anything, -# they are modules importable from the pypy lib directories, so that we can -# find those directories. modules_we_happen_to_have: list[ModuleType] = [ inspect, itertools, os, platform, re, sysconfig, traceback, ] if env.PYPY: - try: - import _structseq - modules_we_happen_to_have.append(_structseq) - except ImportError: - pass - - try: - import _pypy_irc_topic - modules_we_happen_to_have.append(_pypy_irc_topic) - except ImportError: - pass + # Pypy has some unusual stuff in the "stdlib". Consider those locations + # when deciding where the stdlib is. These modules are not used for anything, + # they are modules importable from the pypy lib directories, so that we can + # find those directories. + import _pypy_irc_topic # pylint: disable=import-error + import _structseq # pylint: disable=import-error + modules_we_happen_to_have.extend([_structseq, _pypy_irc_topic]) os = isolate_module(os) @@ -191,14 +183,23 @@ def __init__( self.debug = debug self.include_namespace_packages = include_namespace_packages - self.source: list[str] = [] self.source_pkgs: list[str] = [] self.source_pkgs.extend(config.source_pkgs) + self.source_dirs: list[str] = [] + self.source_dirs.extend(config.source_dirs) for src in config.source or []: if os.path.isdir(src): - self.source.append(canonical_filename(src)) + self.source_dirs.append(src) else: self.source_pkgs.append(src) + + # Canonicalize everything in `source_dirs`. + # Also confirm that they actually are directories. + for i, src in enumerate(self.source_dirs): + if not os.path.isdir(src): + raise ConfigError(f"Source dir is not a directory: {src!r}") + self.source_dirs[i] = canonical_filename(src) + self.source_pkgs_unmatched = self.source_pkgs[:] self.include = prep_patterns(config.run_include) @@ -233,10 +234,10 @@ def _debug(msg: str) -> None: self.pylib_match = None self.include_match = self.omit_match = None - if self.source or self.source_pkgs: + if self.source_dirs or self.source_pkgs: against = [] - if self.source: - self.source_match = TreeMatcher(self.source, "source") + if self.source_dirs: + self.source_match = TreeMatcher(self.source_dirs, "source") against.append(f"trees {self.source_match!r}") if self.source_pkgs: self.source_pkgs_match = ModuleMatcher(self.source_pkgs, "source_pkgs") @@ -285,7 +286,7 @@ def _debug(msg: str) -> None: ) self.source_in_third_paths.add(pathdir) - for src in self.source: + for src in self.source_dirs: if self.third_match.match(src): _debug(f"Source in third-party: source directory {src!r}") self.source_in_third_paths.add(src) @@ -457,12 +458,12 @@ def check_include_omit_etc(self, filename: str, frame: FrameType | None) -> str def warn_conflicting_settings(self) -> None: """Warn if there are settings that conflict.""" if self.include: - if self.source or self.source_pkgs: + if self.source_dirs or self.source_pkgs: self.warn("--include is ignored because --source is set", slug="include-ignored") def warn_already_imported_files(self) -> None: """Warn if files have already been imported that we will be measuring.""" - if self.include or self.source or self.source_pkgs: + if self.include or self.source_dirs or self.source_pkgs: warned = set() for mod in list(sys.modules.values()): filename = getattr(mod, "__file__", None) @@ -535,7 +536,7 @@ def find_possibly_unexecuted_files(self) -> Iterable[tuple[str, str | None]]: pkg_file = source_for_file(cast(str, sys.modules[pkg].__file__)) yield from self._find_executable_files(canonical_path(pkg_file)) - for src in self.source: + for src in self.source_dirs: yield from self._find_executable_files(src) def _find_plugin_files(self, src_dir: str) -> Iterable[tuple[str, str]]: diff --git a/coverage/parser.py b/coverage/parser.py index fb74ea9e0..306123b47 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -466,7 +466,7 @@ def _line_numbers(self) -> Iterable[TLineNo]: byte_increments = self.code.co_lnotab[0::2] line_increments = self.code.co_lnotab[1::2] - last_line_num = None + last_line_num: TLineNo | None = None line_num = self.code.co_firstlineno byte_num = 0 for byte_incr, line_incr in zip(byte_increments, line_increments): @@ -654,6 +654,30 @@ def __init__(self, body: Sequence[ast.AST]) -> None: # TODO: Shouldn't the cause messages join with "and" instead of "or"? +def is_constant_test_expr(node: ast.AST) -> tuple[bool, bool]: + """Is this a compile-time constant test expression? + + We don't try to mimic all of CPython's optimizations. We just have to + handle the kinds of constant expressions people might actually use. + + """ + if isinstance(node, ast.Constant): + return True, bool(node.value) + elif isinstance(node, ast.Name): + if node.id in ["True", "False", "None", "__debug__"]: + return True, eval(node.id) # pylint: disable=eval-used + elif isinstance(node, ast.UnaryOp) and isinstance(node.op, ast.Not): + is_constant, val = is_constant_test_expr(node.operand) + return is_constant, not val + elif isinstance(node, ast.BoolOp): + rets = [is_constant_test_expr(v) for v in node.values] + is_constant = all(is_const for is_const, _ in rets) + if is_constant: + op = any if isinstance(node.op, ast.Or) else all + return True, op(v for _, v in rets) + return False, False + + class AstArcAnalyzer: """Analyze source text with an AST to find executable code paths. @@ -717,7 +741,7 @@ def analyze(self) -> None: """Examine the AST tree from `self.root_node` to determine possible arcs.""" for node in ast.walk(self.root_node): node_name = node.__class__.__name__ - code_object_handler = getattr(self, "_code_object__" + node_name, None) + code_object_handler = getattr(self, f"_code_object__{node_name}", None) if code_object_handler is not None: code_object_handler(node) @@ -808,7 +832,7 @@ def line_for_node(self, node: ast.AST) -> TLineNo: node_name = node.__class__.__name__ handler = cast( Optional[Callable[[ast.AST], TLineNo]], - getattr(self, "_line__" + node_name, None), + getattr(self, f"_line__{node_name}", None), ) if handler is not None: line = handler(node) @@ -889,7 +913,7 @@ def node_exits(self, node: ast.AST) -> set[ArcStart]: node_name = node.__class__.__name__ handler = cast( Optional[Callable[[ast.AST], set[ArcStart]]], - getattr(self, "_handle__" + node_name, None), + getattr(self, f"_handle__{node_name}", None), ) if handler is not None: arc_starts = handler(node) @@ -965,7 +989,7 @@ def find_non_missing_node(self, node: ast.AST) -> ast.AST | None: missing_fn = cast( Optional[Callable[[ast.AST], Optional[ast.AST]]], - getattr(self, "_missing__" + node.__class__.__name__, None), + getattr(self, f"_missing__{node.__class__.__name__}", None), ) if missing_fn is not None: ret_node = missing_fn(node) @@ -1022,16 +1046,6 @@ def _missing__While(self, node: ast.While) -> ast.AST | None: new_while.orelse = [] return new_while - def is_constant_expr(self, node: ast.AST) -> str | None: - """Is this a compile-time constant?""" - node_name = node.__class__.__name__ - if node_name in ["Constant", "NameConstant", "Num"]: - return "Num" - elif isinstance(node, ast.Name): - if node.id in ["True", "False", "None", "__debug__"]: - return "Name" - return None - # In the fullness of time, these might be good tests to write: # while EXPR: # while False: @@ -1097,8 +1111,8 @@ def _handle_decorated(self, node: ast.FunctionDef) -> set[ArcStart]: last = None for dec_node in decs: dec_start = self.line_for_node(dec_node) - if last is not None and dec_start != last: # type: ignore[unreachable] - self.add_arc(last, dec_start) # type: ignore[unreachable] + if last is not None and dec_start != last: + self.add_arc(last, dec_start) last = dec_start assert last is not None self.add_arc(last, main_line) @@ -1147,10 +1161,14 @@ def _handle__For(self, node: ast.For) -> set[ArcStart]: def _handle__If(self, node: ast.If) -> set[ArcStart]: start = self.line_for_node(node.test) - from_start = ArcStart(start, cause="the condition on line {lineno} was never true") - exits = self.process_body(node.body, from_start=from_start) - from_start = ArcStart(start, cause="the condition on line {lineno} was always true") - exits |= self.process_body(node.orelse, from_start=from_start) + constant_test, val = is_constant_test_expr(node.test) + exits = set() + if not constant_test or val: + from_start = ArcStart(start, cause="the condition on line {lineno} was never true") + exits |= self.process_body(node.body, from_start=from_start) + if not constant_test or not val: + from_start = ArcStart(start, cause="the condition on line {lineno} was always true") + exits |= self.process_body(node.orelse, from_start=from_start) return exits if sys.version_info >= (3, 10): @@ -1262,7 +1280,7 @@ def _handle__Try(self, node: ast.Try) -> set[ArcStart]: def _handle__While(self, node: ast.While) -> set[ArcStart]: start = to_top = self.line_for_node(node.test) - constant_test = self.is_constant_expr(node.test) + constant_test, _ = is_constant_test_expr(node.test) top_is_body0 = False if constant_test: top_is_body0 = True diff --git a/coverage/plugin_support.py b/coverage/plugin_support.py index 99e3bc22b..127e375eb 100644 --- a/coverage/plugin_support.py +++ b/coverage/plugin_support.py @@ -10,14 +10,14 @@ import sys from types import FrameType -from typing import Any +from typing import Any, Callable from collections.abc import Iterable, Iterator from coverage.exceptions import PluginError from coverage.misc import isolate_module from coverage.plugin import CoveragePlugin, FileTracer, FileReporter from coverage.types import ( - TArc, TConfigurable, TDebugCtl, TLineNo, TPluginConfig, TSourceTokenLines, + TArc, TConfigurable, TDebugCtl, TLineNo, TPluginConfig, TSourceTokenLines ) os = isolate_module(os) @@ -26,7 +26,7 @@ class Plugins: """The currently loaded collection of coverage.py plugins.""" - def __init__(self) -> None: + def __init__(self, debug: TDebugCtl | None = None) -> None: self.order: list[CoveragePlugin] = [] self.names: dict[str, CoveragePlugin] = {} self.file_tracers: list[CoveragePlugin] = [] @@ -34,25 +34,17 @@ def __init__(self) -> None: self.context_switchers: list[CoveragePlugin] = [] self.current_module: str | None = None - self.debug: TDebugCtl | None + self.debug = debug - @classmethod - def load_plugins( - cls, + def load_from_config( + self, modules: Iterable[str], config: TPluginConfig, - debug: TDebugCtl | None = None, - ) -> Plugins: - """Load plugins from `modules`. - - Returns a Plugins object with the loaded and configured plugins. - - """ - plugins = cls() - plugins.debug = debug + ) -> None: + """Load plugin modules, and read their settings from configuration.""" for module in modules: - plugins.current_module = module + self.current_module = module __import__(module) mod = sys.modules[module] @@ -63,10 +55,17 @@ def load_plugins( ) options = config.get_plugin_options(module) - coverage_init(plugins, options) + coverage_init(self, options) - plugins.current_module = None - return plugins + self.current_module = None + + def load_from_callables( + self, + plugin_inits: Iterable[TCoverageInit], + ) -> None: + """Load plugins from callables provided.""" + for fn in plugin_inits: + fn(self) def add_file_tracer(self, plugin: CoveragePlugin) -> None: """Add a file tracer plugin. @@ -138,6 +137,9 @@ def get(self, plugin_name: str) -> CoveragePlugin: return self.names[plugin_name] +TCoverageInit = Callable[[Plugins], None] + + class LabelledDebug: """A Debug writer, but with labels for prepending to the messages.""" diff --git a/coverage/results.py b/coverage/results.py index a9bde97c3..6d28e73f7 100644 --- a/coverage/results.py +++ b/coverage/results.py @@ -231,6 +231,7 @@ def branch_stats(self) -> dict[TLineNo, tuple[int, int]]: Returns a dict mapping line numbers to a tuple: (total_exits, taken_exits). + """ missing_arcs = self.missing_branch_arcs() @@ -343,7 +344,7 @@ def _line_ranges( lines = sorted(lines) pairs = [] - start = None + start: TLineNo | None = None lidx = 0 for stmt in statements: if lidx >= len(lines): diff --git a/coverage/sqldata.py b/coverage/sqldata.py index 76b569285..169649f3a 100644 --- a/coverage/sqldata.py +++ b/coverage/sqldata.py @@ -266,7 +266,7 @@ def _choose_filename(self) -> None: self._filename = self._basename suffix = filename_suffix(self._suffix) if suffix: - self._filename += "." + suffix + self._filename += f".{suffix}" def _reset(self) -> None: """Reset our attributes.""" diff --git a/coverage/sysmon.py b/coverage/sysmon.py index 2809aa087..8e5376cf0 100644 --- a/coverage/sysmon.py +++ b/coverage/sysmon.py @@ -3,8 +3,6 @@ """Callback functions and support for sys.monitoring data collection.""" -# TODO: https://github.com/python/cpython/issues/111963#issuecomment-2386584080 - from __future__ import annotations import functools @@ -16,21 +14,24 @@ import traceback from dataclasses import dataclass -from types import CodeType, FrameType +from types import CodeType from typing import ( Any, Callable, - TYPE_CHECKING, + NewType, + Optional, cast, ) +from coverage import env +from coverage.bytecode import TBranchTrails, branch_trails from coverage.debug import short_filename, short_stack from coverage.misc import isolate_module from coverage.types import ( AnyCallable, - TArc, TFileDisposition, TLineNo, + TOffset, TShouldStartContextFn, TShouldTraceFn, TTraceData, @@ -43,17 +44,19 @@ # pylint: disable=unused-argument -LOG = False +# $set_env.py: COVERAGE_SYSMON_LOG - Log sys.monitoring activity +LOG = bool(int(os.getenv("COVERAGE_SYSMON_LOG", 0))) + +# $set_env.py: COVERAGE_SYSMON_STATS - Collect sys.monitoring stats +COLLECT_STATS = bool(int(os.getenv("COVERAGE_SYSMON_STATS", 0))) # This module will be imported in all versions of Python, but only used in 3.12+ # It will be type-checked for 3.12, but not for earlier versions. sys_monitoring = getattr(sys, "monitoring", None) -if TYPE_CHECKING: - assert sys_monitoring is not None - # I want to say this but it's not allowed: - # MonitorReturn = Literal[sys.monitoring.DISABLE] | None - MonitorReturn = Any +DISABLE_TYPE = NewType("DISABLE_TYPE", object) +MonitorReturn = Optional[DISABLE_TYPE] +DISABLE = cast(MonitorReturn, getattr(sys_monitoring, "DISABLE", None)) if LOG: # pragma: debugging @@ -76,7 +79,10 @@ def _wrapped(*args: Any, **kwargs: Any) -> Any: assert sys_monitoring is not None short_stack = functools.partial( - short_stack, full=True, short_filenames=True, frame_ids=True, + short_stack, + full=True, + short_filenames=True, + frame_ids=True, ) seen_threads: set[int] = set() @@ -99,7 +105,10 @@ def log(msg: str) -> None: # f"{root}-{pid}-{tslug}.out", ]: with open(filename, "a") as f: - print(f"{pid}:{tslug}: {msg}", file=f, flush=True) + try: + print(f"{pid}:{tslug}: {msg}", file=f, flush=True) + except UnicodeError: + print(f"{pid}:{tslug}: {ascii(msg)}", file=f, flush=True) def arg_repr(arg: Any) -> str: """Make a customized repr for logged values.""" @@ -130,7 +139,9 @@ def _wrapped(self: Any, *args: Any) -> Any: return ret except Exception as exc: log(f"!!{exc.__class__.__name__}: {exc}") - log("".join(traceback.format_exception(exc))) # pylint: disable=[no-value-for-parameter] + if 1: + # pylint: disable=no-value-for-parameter + log("".join(traceback.format_exception(exc))) try: assert sys_monitoring is not None sys_monitoring.set_events(sys.monitoring.COVERAGE_ID, 0) @@ -163,11 +174,19 @@ class CodeInfo: tracing: bool file_data: TTraceFileData | None - # TODO: what is byte_to_line for? - byte_to_line: dict[int, int] | None + byte_to_line: dict[TOffset, TLineNo] | None + # Keys are start instruction offsets for branches. + # Values are lists: + # [ + # ([offset, offset, ...], (from_line, to_line)), + # ([offset, offset, ...], (from_line, to_line)), + # ] + # Two possible trails from the branch point, left and right. + branch_trails: TBranchTrails -def bytes_to_lines(code: CodeType) -> dict[int, int]: + +def bytes_to_lines(code: CodeType) -> dict[TOffset, TLineNo]: """Make a dict mapping byte code offsets to line numbers.""" b2l = {} for bstart, bend, lineno in code.co_lines(): @@ -204,15 +223,15 @@ def __init__(self, tool_id: int) -> None: # A list of code_objects, just to keep them alive so that id's are # useful as identity. self.code_objects: list[CodeType] = [] - self.last_lines: dict[FrameType, int] = {} - # Map id(code_object) -> code_object - self.local_event_codes: dict[int, CodeType] = {} self.sysmon_on = False self.lock = threading.Lock() - self.stats = { - "starts": 0, - } + self.stats: dict[str, int] | None = None + if COLLECT_STATS: + self.stats = dict.fromkeys( + "starts start_tracing returns line_lines line_arcs branches branch_trails".split(), + 0, + ) self.stopped = False self._activity = False @@ -230,20 +249,23 @@ def start(self) -> None: assert sys_monitoring is not None sys_monitoring.use_tool_id(self.myid, "coverage.py") register = functools.partial(sys_monitoring.register_callback, self.myid) - events = sys_monitoring.events + events = sys.monitoring.events + + sys_monitoring.set_events(self.myid, events.PY_START) + register(events.PY_START, self.sysmon_py_start) if self.trace_arcs: - sys_monitoring.set_events( - self.myid, - events.PY_START | events.PY_UNWIND, - ) - register(events.PY_START, self.sysmon_py_start) - register(events.PY_RESUME, self.sysmon_py_resume_arcs) - register(events.PY_RETURN, self.sysmon_py_return_arcs) - register(events.PY_UNWIND, self.sysmon_py_unwind_arcs) + register(events.PY_RETURN, self.sysmon_py_return) register(events.LINE, self.sysmon_line_arcs) + if env.PYBEHAVIOR.branch_right_left: + register( + events.BRANCH_RIGHT, # type:ignore[attr-defined] + self.sysmon_branch_either, + ) + register( + events.BRANCH_LEFT, # type:ignore[attr-defined] + self.sysmon_branch_either, + ) else: - sys_monitoring.set_events(self.myid, events.PY_START) - register(events.PY_START, self.sysmon_py_start) register(events.LINE, self.sysmon_line_lines) sys_monitoring.restart_events() self.sysmon_on = True @@ -257,11 +279,7 @@ def stop(self) -> None: return assert sys_monitoring is not None sys_monitoring.set_events(self.myid, 0) - with self.lock: - self.sysmon_on = False - for code in self.local_event_codes.values(): - sys_monitoring.set_local_events(self.myid, code, 0) - self.local_event_codes = {} + self.sysmon_on = False sys_monitoring.free_tool_id(self.myid) @panopticon() @@ -279,29 +297,17 @@ def reset_activity(self) -> None: def get_stats(self) -> dict[str, int] | None: """Return a dictionary of statistics, or None.""" - return None - - # The number of frames in callers_frame takes @panopticon into account. - if LOG: - - def callers_frame(self) -> FrameType: - """Get the frame of the Python code we're monitoring.""" - return ( - inspect.currentframe().f_back.f_back.f_back # type: ignore[union-attr,return-value] - ) - - else: - - def callers_frame(self) -> FrameType: - """Get the frame of the Python code we're monitoring.""" - return inspect.currentframe().f_back.f_back # type: ignore[union-attr,return-value] + return self.stats @panopticon("code", "@") - def sysmon_py_start(self, code: CodeType, instruction_offset: int) -> MonitorReturn: + def sysmon_py_start( + self, code: CodeType, instruction_offset: TOffset + ) -> MonitorReturn: """Handle sys.monitoring.events.PY_START events.""" # Entering a new frame. Decide if we should trace in this file. self._activity = True - self.stats["starts"] += 1 + if self.stats is not None: + self.stats["starts"] += 1 code_info = self.code_infos.get(id(code)) tracing_code: bool | None = None @@ -337,102 +343,116 @@ def sysmon_py_start(self, code: CodeType, instruction_offset: int) -> MonitorRet file_data = None b2l = None - self.code_infos[id(code)] = CodeInfo( + code_info = CodeInfo( tracing=tracing_code, file_data=file_data, byte_to_line=b2l, + branch_trails={}, ) + self.code_infos[id(code)] = code_info self.code_objects.append(code) if tracing_code: + if self.stats is not None: + self.stats["start_tracing"] += 1 events = sys.monitoring.events with self.lock: if self.sysmon_on: assert sys_monitoring is not None - sys_monitoring.set_local_events( - self.myid, - code, - events.PY_RETURN - # - | events.PY_RESUME - # | events.PY_YIELD - | events.LINE, - # | events.BRANCH - # | events.JUMP - ) - self.local_event_codes[id(code)] = code - - if tracing_code and self.trace_arcs: - frame = self.callers_frame() - self.last_lines[frame] = -code.co_firstlineno - return None - else: - return sys.monitoring.DISABLE + local_events = events.PY_RETURN | events.PY_RESUME | events.LINE + if self.trace_arcs: + assert env.PYBEHAVIOR.branch_right_left + local_events |= ( + events.BRANCH_RIGHT # type:ignore[attr-defined] + | events.BRANCH_LEFT # type:ignore[attr-defined] + ) + sys_monitoring.set_local_events(self.myid, code, local_events) - @panopticon("code", "@") - def sysmon_py_resume_arcs( - self, code: CodeType, instruction_offset: int, - ) -> MonitorReturn: - """Handle sys.monitoring.events.PY_RESUME events for branch coverage.""" - frame = self.callers_frame() - self.last_lines[frame] = frame.f_lineno + return DISABLE @panopticon("code", "@", None) - def sysmon_py_return_arcs( - self, code: CodeType, instruction_offset: int, retval: object, + def sysmon_py_return( + self, + code: CodeType, + instruction_offset: TOffset, + retval: object, ) -> MonitorReturn: """Handle sys.monitoring.events.PY_RETURN events for branch coverage.""" - frame = self.callers_frame() - code_info = self.code_infos.get(id(code)) - if code_info is not None and code_info.file_data is not None: - last_line = self.last_lines.get(frame) - if last_line is not None: - arc = (last_line, -code.co_firstlineno) - # log(f"adding {arc=}") - cast(set[TArc], code_info.file_data).add(arc) - - # Leaving this function, no need for the frame any more. - self.last_lines.pop(frame, None) - - @panopticon("code", "@", "exc") - def sysmon_py_unwind_arcs( - self, code: CodeType, instruction_offset: int, exception: BaseException, - ) -> MonitorReturn: - """Handle sys.monitoring.events.PY_UNWIND events for branch coverage.""" - frame = self.callers_frame() - # Leaving this function. - last_line = self.last_lines.pop(frame, None) - if isinstance(exception, GeneratorExit): - # We don't want to count generator exits as arcs. - return + if self.stats is not None: + self.stats["returns"] += 1 code_info = self.code_infos.get(id(code)) - if code_info is not None and code_info.file_data is not None: - if last_line is not None: - arc = (last_line, -code.co_firstlineno) - # log(f"adding {arc=}") - cast(set[TArc], code_info.file_data).add(arc) - + # code_info is not None and code_info.file_data is not None, since we + # wouldn't have enabled this event if they were. + last_line = code_info.byte_to_line[instruction_offset] # type: ignore + if last_line is not None: + arc = (last_line, -code.co_firstlineno) + code_info.file_data.add(arc) # type: ignore + # log(f"adding {arc=}") + return DISABLE @panopticon("code", "line") - def sysmon_line_lines(self, code: CodeType, line_number: int) -> MonitorReturn: + def sysmon_line_lines(self, code: CodeType, line_number: TLineNo) -> MonitorReturn: """Handle sys.monitoring.events.LINE events for line coverage.""" - code_info = self.code_infos[id(code)] - if code_info.file_data is not None: - cast(set[TLineNo], code_info.file_data).add(line_number) - # log(f"adding {line_number=}") - return sys.monitoring.DISABLE + if self.stats is not None: + self.stats["line_lines"] += 1 + code_info = self.code_infos.get(id(code)) + # It should be true that code_info is not None and code_info.file_data + # is not None, since we wouldn't have enabled this event if they were. + # But somehow code_info can be None here, so we have to check. + if code_info is not None and code_info.file_data is not None: + code_info.file_data.add(line_number) # type: ignore + # log(f"adding {line_number=}") + return DISABLE @panopticon("code", "line") - def sysmon_line_arcs(self, code: CodeType, line_number: int) -> MonitorReturn: + def sysmon_line_arcs(self, code: CodeType, line_number: TLineNo) -> MonitorReturn: """Handle sys.monitoring.events.LINE events for branch coverage.""" + if self.stats is not None: + self.stats["line_arcs"] += 1 code_info = self.code_infos[id(code)] - ret = None - if code_info.file_data is not None: - frame = self.callers_frame() - last_line = self.last_lines.get(frame) - if last_line is not None: - arc = (last_line, line_number) - cast(set[TArc], code_info.file_data).add(arc) - # log(f"adding {arc=}") - self.last_lines[frame] = line_number - return ret + # code_info is not None and code_info.file_data is not None, since we + # wouldn't have enabled this event if they were. + arc = (line_number, line_number) + code_info.file_data.add(arc) # type: ignore + # log(f"adding {arc=}") + return DISABLE + + @panopticon("code", "@", "@") + def sysmon_branch_either( + self, code: CodeType, instruction_offset: TOffset, destination_offset: TOffset + ) -> MonitorReturn: + """Handle BRANCH_RIGHT and BRANCH_LEFT events.""" + if self.stats is not None: + self.stats["branches"] += 1 + code_info = self.code_infos[id(code)] + # code_info is not None and code_info.file_data is not None, since we + # wouldn't have enabled this event if they were. + if not code_info.branch_trails: + if self.stats is not None: + self.stats["branch_trails"] += 1 + code_info.branch_trails = branch_trails(code) + # log(f"branch_trails for {code}:\n {code_info.branch_trails}") + added_arc = False + dest_info = code_info.branch_trails.get(instruction_offset) + # log(f"{dest_info = }") + if dest_info is not None: + for offsets, arc in dest_info: + if arc is None: + continue + if destination_offset in offsets: + code_info.file_data.add(arc) # type: ignore + # log(f"adding {arc=}") + added_arc = True + break + + if not added_arc: + # This could be an exception jumping from line to line. + assert code_info.byte_to_line is not None + l1 = code_info.byte_to_line[instruction_offset] + l2 = code_info.byte_to_line[destination_offset] + if l1 != l2: + arc = (l1, l2) + code_info.file_data.add(arc) # type: ignore + # log(f"adding unforeseen {arc=}") + + return DISABLE diff --git a/coverage/types.py b/coverage/types.py index bcf8396d6..8b919a89b 100644 --- a/coverage/types.py +++ b/coverage/types.py @@ -53,6 +53,9 @@ def __call__( # Line numbers are pervasive enough that they deserve their own type. TLineNo = int +# Bytecode offsets are pervasive enough that they deserve their own type. +TOffset = int + TArc = tuple[TLineNo, TLineNo] class TFileDisposition(Protocol): @@ -125,8 +128,8 @@ def get_stats(self) -> dict[str, int] | None: ## Configuration # One value read from a config file. -TConfigValueIn = Optional[Union[bool, int, float, str, Iterable[str]]] -TConfigValueOut = Optional[Union[bool, int, float, str, list[str]]] +TConfigValueIn = Optional[Union[bool, int, float, str, Iterable[str], Mapping[str, Iterable[str]]]] +TConfigValueOut = Optional[Union[bool, int, float, str, list[str], dict[str, list[str]]]] # An entire config section, mapping option names to values. TConfigSectionIn = Mapping[str, TConfigValueIn] TConfigSectionOut = Mapping[str, TConfigValueOut] diff --git a/coverage/version.py b/coverage/version.py index a76371e77..fe08b5f98 100644 --- a/coverage/version.py +++ b/coverage/version.py @@ -8,7 +8,7 @@ # version_info: same semantics as sys.version_info. # _dev: the .devN suffix if any. -version_info = (7, 6, 10, "final", 0) +version_info = (7, 8, 0, "final", 0) _dev = 0 diff --git a/doc/cmd.rst b/doc/cmd.rst index fa6565678..74113549b 100644 --- a/doc/cmd.rst +++ b/doc/cmd.rst @@ -273,14 +273,26 @@ Conflicting dynamic contexts (dynamic-conflict) :meth:`.Coverage.switch_context` function to change the context. Only one of these mechanisms should be in use at a time. -sys.monitoring isn't available, using default core (no-sysmon) +sys.monitoring isn't available in this version, using default core (no-sysmon) You requested to use the sys.monitoring measurement core, but are running on Python 3.11 or lower where it isn't available. A default core will be used instead. +sys.monitoring can't measure branches in this version, using default core (no-sysmon) + You requested the sys.monitoring measurement core and also branch coverage. + This isn't supported until the later alphas of Python 3.14. A default core + will be used instead. + +sys.monitoring doesn't yet support dynamic contexts, using default core (no-sysmon) + You requested the sys.monitoring measurement core and also dynamic contexts. + This isn't supported by coverage.py yet. A default core will be used + instead. + Individual warnings can be disabled with the :ref:`disable_warnings -` configuration setting. To silence "No data was -collected," add this to your configuration file: +` configuration setting. It is a list of the +short parenthetical nicknames in the warning messages. For example, to silence +"No data was collected (no-data-collected)", add this to your configuration +file: .. [[[cog show_configs( @@ -588,6 +600,12 @@ detail the missed branches:: --------------------------------------------------------------------- TOTAL 91 12 18 3 87% +Ranges of lines are shown with a dash: "17-23" means all lines from 17 to 23 +inclusive are missing coverage. Missed branches are shown with an arrow: +"40->45" means the branch from line 40 to line 45 is missing. A branch can go +backwards in a file, so you might see a branch from a later line to an earlier +line, like "55->50". + You can restrict the report to only certain files by naming them on the command line:: diff --git a/doc/conf.py b/doc/conf.py index 9756049e0..57a1ffd00 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -65,13 +65,13 @@ # built documents. # @@@ editable -copyright = "2009–2024, Ned Batchelder" # pylint: disable=redefined-builtin +copyright = "2009–2025, Ned Batchelder" # pylint: disable=redefined-builtin # The short X.Y.Z version. -version = "7.6.10" +version = "7.8.0" # The full version, including alpha/beta/rc tags. -release = "7.6.10" +release = "7.8.0" # The date of release, in "monthname day, year" format. -release_date = "December 26, 2024" +release_date = "March 30, 2025" # @@@ end rst_epilog = f""" diff --git a/doc/config.rst b/doc/config.rst index 87cbdd108..7a02d6a04 100644 --- a/doc/config.rst +++ b/doc/config.rst @@ -476,6 +476,18 @@ ambiguities between packages and directories. .. versionadded:: 5.3 +.. _config_run_source_dirs: + +[run] source_dirs +................. + +(multi-string) A list of directories, the source to measure during execution. +Operates the same as ``source``, but only names directories, for resolving +ambiguities between packages and directories. + +.. versionadded:: 7.8 + + .. _config_run_timid: [run] timid diff --git a/doc/excluding.rst b/doc/excluding.rst index a3481fb5f..034627b55 100644 --- a/doc/excluding.rst +++ b/doc/excluding.rst @@ -104,7 +104,7 @@ all of them by adding a regex to the exclusion list: [tool.coverage.report] exclude_also = [ "def __repr__", - ] + ] """, ) .. ]]] @@ -124,7 +124,7 @@ all of them by adding a regex to the exclusion list: [tool.coverage.report] exclude_also = [ "def __repr__", - ] + ] .. code-tab:: ini :caption: setup.cfg or tox.ini @@ -133,7 +133,7 @@ all of them by adding a regex to the exclusion list: exclude_also = def __repr__ -.. [[[end]]] (checksum: e3194120285bcbac38a92b109edaa20c) +.. [[[end]]] (checksum: f3e70ebf128fbef4087efe75dcfadcb8) For example, here's a list of exclusions I've used: @@ -156,17 +156,17 @@ For example, here's a list of exclusions I've used: toml=r""" [tool.coverage.report] exclude_also = [ - "def __repr__", - "if self.debug:", - "if settings.DEBUG", - "raise AssertionError", - "raise NotImplementedError", - "if 0:", - "if __name__ == .__main__.:", - "if TYPE_CHECKING:", - "class .*\\bProtocol\\):", - "@(abc\\.)?abstractmethod", - ] + 'def __repr__', + 'if self.debug:', + 'if settings.DEBUG', + 'raise AssertionError', + 'raise NotImplementedError', + 'if 0:', + 'if __name__ == .__main__.:', + 'if TYPE_CHECKING:', + 'class .*\bProtocol\):', + '@(abc\.)?abstractmethod', + ] """, ) .. ]]] @@ -194,17 +194,17 @@ For example, here's a list of exclusions I've used: [tool.coverage.report] exclude_also = [ - "def __repr__", - "if self.debug:", - "if settings.DEBUG", - "raise AssertionError", - "raise NotImplementedError", - "if 0:", - "if __name__ == .__main__.:", - "if TYPE_CHECKING:", - "class .*\\bProtocol\\):", - "@(abc\\.)?abstractmethod", - ] + 'def __repr__', + 'if self.debug:', + 'if settings.DEBUG', + 'raise AssertionError', + 'raise NotImplementedError', + 'if 0:', + 'if __name__ == .__main__.:', + 'if TYPE_CHECKING:', + 'class .*\bProtocol\):', + '@(abc\.)?abstractmethod', + ] .. code-tab:: ini :caption: setup.cfg or tox.ini @@ -222,7 +222,7 @@ For example, here's a list of exclusions I've used: class .*\bProtocol\): @(abc\.)?abstractmethod -.. [[[end]]] (checksum: 91f09828a1e6d0e92543e14a8ea3ba39) +.. [[[end]]] (checksum: 650b209edd27112381b5f0a8d2ee0c45) The :ref:`config_report_exclude_also` option adds regexes to the built-in default list so that you can add your own exclusions. The older @@ -270,12 +270,12 @@ Here are some examples: [tool.coverage.report] exclude_also = [ # 1. Exclude an except clause of a specific form: - "except ValueError:\\n\\s*assume\\(False\\)", + 'except ValueError:\n\s*assume\(False\)', # 2. Comments to turn coverage on and off: - "no cover: start(?s:.)*?no cover: stop", + 'no cover: start(?s:.)*?no cover: stop', # 3. A pragma comment that excludes an entire file: - "\\A(?s:.*# pragma: exclude file.*)\\Z", - ] + '\A(?s:.*# pragma: exclude file.*)\Z', + ] """, ) .. ]]] @@ -300,12 +300,12 @@ Here are some examples: [tool.coverage.report] exclude_also = [ # 1. Exclude an except clause of a specific form: - "except ValueError:\\n\\s*assume\\(False\\)", + 'except ValueError:\n\s*assume\(False\)', # 2. Comments to turn coverage on and off: - "no cover: start(?s:.)*?no cover: stop", + 'no cover: start(?s:.)*?no cover: stop', # 3. A pragma comment that excludes an entire file: - "\\A(?s:.*# pragma: exclude file.*)\\Z", - ] + '\A(?s:.*# pragma: exclude file.*)\Z', + ] .. code-tab:: ini :caption: setup.cfg or tox.ini @@ -319,7 +319,7 @@ Here are some examples: ; 3. A pragma comment that excludes an entire file: \A(?s:.*# pragma: exclude file.*)\Z -.. [[[end]]] (checksum: ee3ef14b5a5d73f987b924df623a4927) +.. [[[end]]] (checksum: c46e819ad9a1d3a8e37037a89d28cfde) The first regex matches a specific except line followed by a specific function call. Both lines must be present for the exclusion to take effect. Note that diff --git a/doc/index.rst b/doc/index.rst index f835c3b4e..ea3486e12 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -18,8 +18,8 @@ supported on: .. PYVERSIONS -* Python 3.9 through 3.14 alpha 2, including free-threading. -* PyPy3 versions 3.9 and 3.10. +* Python 3.9 through 3.14 alpha 6, including free-threading. +* PyPy3 versions 3.9, 3.10, and 3.11. .. ifconfig:: prerelease diff --git a/doc/requirements.pip b/doc/requirements.pip index 20b13366e..ae5de5420 100644 --- a/doc/requirements.pip +++ b/doc/requirements.pip @@ -6,13 +6,13 @@ # alabaster==1.0.0 # via sphinx -anyio==4.7.0 +anyio==4.8.0 # via # starlette # watchfiles -babel==2.16.0 +babel==2.17.0 # via sphinx -certifi==2024.12.14 +certifi==2025.1.31 # via requests charset-normalizer==3.4.1 # via requests @@ -38,13 +38,13 @@ idna==3.10 # requests imagesize==1.4.1 # via sphinx -jinja2==3.1.5 +jinja2==3.1.6 # via sphinx markupsafe==3.0.2 # via jinja2 packaging==24.2 # via sphinx -pbr==6.1.0 +pbr==6.1.1 # via stevedore polib==1.2.0 # via sphinx-lint @@ -52,7 +52,7 @@ pyenchant==3.2.2 # via # -r doc/requirements.in # sphinxcontrib-spelling -pygments==2.18.0 +pygments==2.19.1 # via # doc8 # sphinx @@ -64,11 +64,13 @@ requests==2.32.3 # sphinxcontrib-spelling restructuredtext-lint==1.4.0 # via doc8 +roman-numerals-py==3.1.0 + # via sphinx sniffio==1.3.1 # via anyio snowballstemmer==2.2.0 # via sphinx -sphinx==8.1.3 +sphinx==8.2.3 # via # -r doc/requirements.in # sphinx-autobuild @@ -103,9 +105,9 @@ sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-spelling==8.0.1 # via -r doc/requirements.in -starlette==0.43.0 +starlette==0.46.1 # via sphinx-autobuild -stevedore==5.4.0 +stevedore==5.4.1 # via doc8 typing-extensions==4.12.2 # via anyio @@ -113,7 +115,11 @@ urllib3==2.3.0 # via requests uvicorn==0.34.0 # via sphinx-autobuild -watchfiles==1.0.3 +watchfiles==1.0.4 # via sphinx-autobuild -websockets==14.1 +websockets==15.0.1 # via sphinx-autobuild + +# The following packages are considered to be unsafe in a requirements file: +setuptools==76.0.0 + # via pbr diff --git a/doc/sample_html/class_index.html b/doc/sample_html/class_index.html index dcd0c2515..c4f4afb5e 100644 --- a/doc/sample_html/class_index.html +++ b/doc/sample_html/class_index.html @@ -56,8 +56,8 @@

Classes

- coverage.py v7.6.10, - created at 2024-12-26 11:29 -0500 + coverage.py v7.8.0, + created at 2025-03-30 15:44 -0400

@@ -537,8 +537,8 @@

- coverage.py v7.6.10, - created at 2024-12-26 11:29 -0500 + coverage.py v7.8.0, + created at 2025-03-30 15:44 -0400

diff --git a/doc/sample_html/z_7b071bdc2a35fa80_makefiles_py.html b/doc/sample_html/z_7b071bdc2a35fa80_makefiles_py.html index 3c0f62ea4..43a29b1e3 100644 --- a/doc/sample_html/z_7b071bdc2a35fa80_makefiles_py.html +++ b/doc/sample_html/z_7b071bdc2a35fa80_makefiles_py.html @@ -66,8 +66,8 @@

^ index     » next       - coverage.py v7.6.10, - created at 2024-12-26 11:29 -0500 + coverage.py v7.8.0, + created at 2025-03-30 15:44 -0400