diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 5eddd8137..25330d632 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -51,7 +51,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
- uses: github/codeql-action/init@6bb031afdd8eb862ea3fc1848194185e076637e5 # v3
+ uses: github/codeql-action/init@5f8171a638ada777af81d42b55959a643bb29017 # v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -62,7 +62,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
- uses: github/codeql-action/autobuild@6bb031afdd8eb862ea3fc1848194185e076637e5 # v3
+ uses: github/codeql-action/autobuild@5f8171a638ada777af81d42b55959a643bb29017 # v3
# ℹ️ Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
@@ -76,4 +76,4 @@ jobs:
# make release
- name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@6bb031afdd8eb862ea3fc1848194185e076637e5 # v3
+ uses: github/codeql-action/analyze@5f8171a638ada777af81d42b55959a643bb29017 # v3
diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml
index d94227f04..9a3cc59a9 100644
--- a/.github/workflows/coverage.yml
+++ b/.github/workflows/coverage.yml
@@ -125,7 +125,7 @@ jobs:
mv .metacov .metacov.$MATRIX_ID
- name: "Upload coverage data"
- uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
+ uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: metacov-${{ env.MATRIX_ID }}
path: .metacov.*
@@ -170,7 +170,7 @@ jobs:
python igor.py zip_mods
- name: "Download coverage data"
- uses: actions/download-artifact@cc203385981b70ca67e1cc392babf9cc229d5806 # v4.1.9
+ uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
pattern: metacov-*
merge-multiple: true
@@ -184,7 +184,7 @@ jobs:
python igor.py combine_html
- name: "Upload HTML report"
- uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
+ uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: html_report
path: htmlcov
@@ -239,7 +239,7 @@ jobs:
- name: "Download coverage HTML report"
if: ${{ github.ref == 'refs/heads/master' }}
- uses: actions/download-artifact@cc203385981b70ca67e1cc392babf9cc229d5806 # v4.1.9
+ uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
name: html_report
path: reports_repo/${{ env.report_dir }}
diff --git a/.github/workflows/kit.yml b/.github/workflows/kit.yml
index 421ea5af0..f5f45ef92 100644
--- a/.github/workflows/kit.yml
+++ b/.github/workflows/kit.yml
@@ -182,7 +182,7 @@ jobs:
python -m twine check wheelhouse/*
- name: "Upload binary wheels"
- uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
+ uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: dist-${{ env.MATRIX_ID }}
path: wheelhouse/*.whl
@@ -223,7 +223,7 @@ jobs:
python -m twine check dist/*
- name: "Upload non-binary artifacts"
- uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
+ uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: dist-non-binary
path: dist/*
@@ -267,7 +267,7 @@ jobs:
python -m twine check dist/*
- name: "Upload wheels"
- uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
+ uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: dist-pypy
path: dist/*.whl
@@ -286,7 +286,7 @@ jobs:
id-token: write
steps:
- name: "Download artifacts"
- uses: actions/download-artifact@cc203385981b70ca67e1cc392babf9cc229d5806 # v4.1.9
+ uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
pattern: dist-*
merge-multiple: true
@@ -308,7 +308,7 @@ jobs:
ls -alR
- name: "Upload signatures"
- uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
+ uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: signatures
path: "*.sigstore.json"
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
index e6098e027..4e6032605 100644
--- a/.github/workflows/publish.yml
+++ b/.github/workflows/publish.yml
@@ -64,7 +64,7 @@ jobs:
steps:
- name: "Download dists"
- uses: actions/download-artifact@cc203385981b70ca67e1cc392babf9cc229d5806 # v4.1.9
+ uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
repository: "nedbat/coveragepy"
run-id: ${{ needs.find-run.outputs.run-id }}
@@ -104,7 +104,7 @@ jobs:
steps:
- name: "Download dists"
- uses: actions/download-artifact@cc203385981b70ca67e1cc392babf9cc229d5806 # v4.1.9
+ uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
repository: "nedbat/coveragepy"
run-id: ${{ needs.find-run.outputs.run-id }}
diff --git a/CHANGES.rst b/CHANGES.rst
index 4d3e07702..a2b172dc6 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -22,6 +22,27 @@ upgrading your version of coverage.py.
.. start-releases
+.. _changes_7-8-0:
+
+Version 7.8.0 — 2025-03-30
+--------------------------
+
+- Added a new ``source_dirs`` setting for symmetry with the existing
+ ``source_pkgs`` setting. It's preferable to the existing ``source`` setting,
+ because you'll get a clear error when directories don't exist. Fixes `issue
+ 1942`_. Thanks, `Jeremy Fleischman `_.
+
+- Fix: the PYTHONSAFEPATH environment variable new in Python 3.11 is properly
+ supported, closing `issue 1696`_. Thanks, `Philipp A. `_. This
+ works properly except for a detail when using the ``coverage`` command on
+ Windows. There you can use ``python -m coverage`` instead if you need exact
+ emulation.
+
+.. _issue 1696: https://github.com/nedbat/coveragepy/issues/1696
+.. _pull 1700: https://github.com/nedbat/coveragepy/pull/1700
+.. _issue 1942: https://github.com/nedbat/coveragepy/issues/1942
+.. _pull 1943: https://github.com/nedbat/coveragepy/pull/1943
+
.. _changes_7-7-1:
Version 7.7.1 — 2025-03-21
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
index 186608d1b..12fc1dab5 100644
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -110,6 +110,7 @@ James Valleroy
Jan Kühle
Jan Rusak
Janakarajan Natarajan
+Jeremy Fleischman
Jerin Peter George
Jessamyn Smith
Joanna Ejzel
diff --git a/README.rst b/README.rst
index cb5f41b2d..cf1e856f5 100644
--- a/README.rst
+++ b/README.rst
@@ -35,6 +35,7 @@ Documentation is on `Read the Docs`_. Code repository and issue tracker are on
.. _GitHub: https://github.com/nedbat/coveragepy
**New in 7.x:**
+``[run] source_dirs`` setting;
``Coverage.branch_stats()``;
multi-line exclusion patterns;
function/class reporting;
diff --git a/coverage/bytecode.py b/coverage/bytecode.py
index 764b29b80..bea039c87 100644
--- a/coverage/bytecode.py
+++ b/coverage/bytecode.py
@@ -1,13 +1,18 @@
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
-"""Bytecode manipulation for coverage.py"""
+"""Bytecode analysis for coverage.py"""
from __future__ import annotations
+import dis
+
from types import CodeType
+from typing import Iterable, Optional
from collections.abc import Iterator
+from coverage.types import TArc, TOffset
+
def code_objects(code: CodeType) -> Iterator[CodeType]:
"""Iterate over all the code objects in `code`."""
@@ -20,3 +25,138 @@ def code_objects(code: CodeType) -> Iterator[CodeType]:
if isinstance(c, CodeType):
stack.append(c)
yield code
+
+
+def op_set(*op_names: str) -> set[int]:
+ """Make a set of opcodes from instruction names.
+
+ The names might not exist in this version of Python, skip those if not.
+ """
+ return {op for name in op_names if (op := dis.opmap.get(name))}
+
+
+# Opcodes that are unconditional jumps elsewhere.
+ALWAYS_JUMPS = op_set(
+ "JUMP_BACKWARD",
+ "JUMP_BACKWARD_NO_INTERRUPT",
+ "JUMP_FORWARD",
+)
+
+# Opcodes that exit from a function.
+RETURNS = op_set("RETURN_VALUE", "RETURN_GENERATOR")
+
+
+class InstructionWalker:
+ """Utility to step through trails of instructions.
+
+ We have two reasons to need sequences of instructions from a code object:
+ First, in strict sequence to visit all the instructions in the object.
+ This is `walk(follow_jumps=False)`. Second, we want to follow jumps to
+ understand how execution will flow: `walk(follow_jumps=True)`.
+
+ """
+
+ def __init__(self, code: CodeType) -> None:
+ self.code = code
+ self.insts: dict[TOffset, dis.Instruction] = {}
+
+ inst = None
+ for inst in dis.get_instructions(code):
+ self.insts[inst.offset] = inst
+
+ assert inst is not None
+ self.max_offset = inst.offset
+
+ def walk(
+ self, *, start_at: TOffset = 0, follow_jumps: bool = True
+ ) -> Iterable[dis.Instruction]:
+ """
+ Yield instructions starting from `start_at`. Follow unconditional
+ jumps if `follow_jumps` is true.
+ """
+ seen = set()
+ offset = start_at
+ while offset < self.max_offset + 1:
+ if offset in seen:
+ break
+ seen.add(offset)
+ if inst := self.insts.get(offset):
+ yield inst
+ if follow_jumps and inst.opcode in ALWAYS_JUMPS:
+ offset = inst.jump_target
+ continue
+ offset += 2
+
+
+TBranchTrail = tuple[set[TOffset], Optional[TArc]]
+TBranchTrails = dict[TOffset, list[TBranchTrail]]
+
+
+def branch_trails(code: CodeType) -> TBranchTrails:
+ """
+ Calculate branch trails for `code`.
+
+ Instructions can have a jump_target, where they might jump to next. Some
+ instructions with a jump_target are unconditional jumps (ALWAYS_JUMPS), so
+ they aren't interesting to us, since they aren't the start of a branch
+ possibility.
+
+ Instructions that might or might not jump somewhere else are branch
+ possibilities. For each of those, we track a trail of instructions. These
+ are lists of instruction offsets, the next instructions that can execute.
+ We follow the trail until we get to a new source line. That gives us the
+ arc from the original instruction's line to the new source line.
+
+ """
+ the_trails: TBranchTrails = {}
+ iwalker = InstructionWalker(code)
+ for inst in iwalker.walk(follow_jumps=False):
+ if not inst.jump_target:
+ # We only care about instructions with jump targets.
+ continue
+ if inst.opcode in ALWAYS_JUMPS:
+ # We don't care about unconditional jumps.
+ continue
+
+ from_line = inst.line_number
+ if from_line is None:
+ continue
+
+ def walk_one_branch(start_at: TOffset) -> TBranchTrail:
+ # pylint: disable=cell-var-from-loop
+ inst_offsets: set[TOffset] = set()
+ to_line = None
+ for inst2 in iwalker.walk(start_at=start_at):
+ inst_offsets.add(inst2.offset)
+ if inst2.line_number and inst2.line_number != from_line:
+ to_line = inst2.line_number
+ break
+ elif inst2.jump_target and (inst2.opcode not in ALWAYS_JUMPS):
+ break
+ elif inst2.opcode in RETURNS:
+ to_line = -code.co_firstlineno
+ break
+ if to_line is not None:
+ return inst_offsets, (from_line, to_line)
+ else:
+ return set(), None
+
+ # Calculate two trails: one from the next instruction, and one from the
+ # jump_target instruction.
+ trails = [
+ walk_one_branch(start_at=inst.offset + 2),
+ walk_one_branch(start_at=inst.jump_target),
+ ]
+ the_trails[inst.offset] = trails
+
+ # Sometimes we get BRANCH_RIGHT or BRANCH_LEFT events from instructions
+ # other than the original jump possibility instruction. Register each
+ # trail under all of their offsets so we can pick up in the middle of a
+ # trail if need be.
+ for trail in trails:
+ for offset in trail[0]:
+ if offset not in the_trails:
+ the_trails[offset] = []
+ the_trails[offset].append(trail)
+
+ return the_trails
diff --git a/coverage/config.py b/coverage/config.py
index 75f314816..94831e070 100644
--- a/coverage/config.py
+++ b/coverage/config.py
@@ -211,6 +211,7 @@ def __init__(self) -> None:
self.sigterm = False
self.source: list[str] | None = None
self.source_pkgs: list[str] = []
+ self.source_dirs: list[str] = []
self.timid = False
self._crash: str | None = None
@@ -392,6 +393,7 @@ def copy(self) -> CoverageConfig:
("sigterm", "run:sigterm", "boolean"),
("source", "run:source", "list"),
("source_pkgs", "run:source_pkgs", "list"),
+ ("source_dirs", "run:source_dirs", "list"),
("timid", "run:timid", "boolean"),
("_crash", "run:_crash"),
diff --git a/coverage/control.py b/coverage/control.py
index d79c97ace..16c99f7f0 100644
--- a/coverage/control.py
+++ b/coverage/control.py
@@ -131,6 +131,7 @@ def __init__( # pylint: disable=too-many-arguments
config_file: FilePath | bool = True,
source: Iterable[str] | None = None,
source_pkgs: Iterable[str] | None = None,
+ source_dirs: Iterable[str] | None = None,
omit: str | Iterable[str] | None = None,
include: str | Iterable[str] | None = None,
debug: Iterable[str] | None = None,
@@ -188,6 +189,10 @@ def __init__( # pylint: disable=too-many-arguments
`source`, but can be used to name packages where the name can also be
interpreted as a file path.
+ `source_dirs` is a list of file paths. It works the same as
+ `source`, but raises an error if the path doesn't exist, rather
+ than being treated as a package name.
+
`include` and `omit` are lists of file name patterns. Files that match
`include` will be measured, files that match `omit` will not. Each
will also accept a single string argument.
@@ -235,6 +240,8 @@ def __init__( # pylint: disable=too-many-arguments
.. versionadded:: 7.7
The `plugins` parameter.
+ .. versionadded:: 7.8
+ The `source_dirs` parameter.
"""
# Start self.config as a usable default configuration. It will soon be
# replaced with the real configuration.
@@ -302,6 +309,7 @@ def __init__( # pylint: disable=too-many-arguments
parallel=bool_or_none(data_suffix),
source=source,
source_pkgs=source_pkgs,
+ source_dirs=source_dirs,
run_omit=omit,
run_include=include,
debug=debug,
diff --git a/coverage/execfile.py b/coverage/execfile.py
index 0affda498..b44c95280 100644
--- a/coverage/execfile.py
+++ b/coverage/execfile.py
@@ -17,6 +17,7 @@
from types import CodeType, ModuleType
from typing import Any
+from coverage import env
from coverage.exceptions import CoverageException, _ExceptionDuringRun, NoCode, NoSource
from coverage.files import canonical_filename, python_reported_file
from coverage.misc import isolate_module
@@ -89,7 +90,10 @@ def prepare(self) -> None:
This needs to happen before any importing, and without importing anything.
"""
path0: str | None
- if self.as_module:
+ if env.PYVERSION >= (3, 11) and getattr(sys.flags, "safe_path"):
+ # See https://docs.python.org/3/using/cmdline.html#cmdoption-P
+ path0 = None
+ elif self.as_module:
path0 = os.getcwd()
elif os.path.isdir(self.arg0):
# Running a directory means running the __main__.py file in that
diff --git a/coverage/inorout.py b/coverage/inorout.py
index e2b4c8ca3..8a5a1e27d 100644
--- a/coverage/inorout.py
+++ b/coverage/inorout.py
@@ -24,7 +24,7 @@
from coverage import env
from coverage.disposition import FileDisposition, disposition_init
-from coverage.exceptions import CoverageException, PluginError
+from coverage.exceptions import ConfigError, CoverageException, PluginError
from coverage.files import TreeMatcher, GlobMatcher, ModuleMatcher
from coverage.files import prep_patterns, find_python_files, canonical_filename
from coverage.misc import isolate_module, sys_modules_saved
@@ -36,26 +36,18 @@
from coverage.plugin_support import Plugins
-# Pypy has some unusual stuff in the "stdlib". Consider those locations
-# when deciding where the stdlib is. These modules are not used for anything,
-# they are modules importable from the pypy lib directories, so that we can
-# find those directories.
modules_we_happen_to_have: list[ModuleType] = [
inspect, itertools, os, platform, re, sysconfig, traceback,
]
if env.PYPY:
- try:
- import _structseq
- modules_we_happen_to_have.append(_structseq)
- except ImportError:
- pass
-
- try:
- import _pypy_irc_topic
- modules_we_happen_to_have.append(_pypy_irc_topic)
- except ImportError:
- pass
+ # Pypy has some unusual stuff in the "stdlib". Consider those locations
+ # when deciding where the stdlib is. These modules are not used for anything,
+ # they are modules importable from the pypy lib directories, so that we can
+ # find those directories.
+ import _pypy_irc_topic # pylint: disable=import-error
+ import _structseq # pylint: disable=import-error
+ modules_we_happen_to_have.extend([_structseq, _pypy_irc_topic])
os = isolate_module(os)
@@ -191,14 +183,23 @@ def __init__(
self.debug = debug
self.include_namespace_packages = include_namespace_packages
- self.source: list[str] = []
self.source_pkgs: list[str] = []
self.source_pkgs.extend(config.source_pkgs)
+ self.source_dirs: list[str] = []
+ self.source_dirs.extend(config.source_dirs)
for src in config.source or []:
if os.path.isdir(src):
- self.source.append(canonical_filename(src))
+ self.source_dirs.append(src)
else:
self.source_pkgs.append(src)
+
+ # Canonicalize everything in `source_dirs`.
+ # Also confirm that they actually are directories.
+ for i, src in enumerate(self.source_dirs):
+ if not os.path.isdir(src):
+ raise ConfigError(f"Source dir is not a directory: {src!r}")
+ self.source_dirs[i] = canonical_filename(src)
+
self.source_pkgs_unmatched = self.source_pkgs[:]
self.include = prep_patterns(config.run_include)
@@ -233,10 +234,10 @@ def _debug(msg: str) -> None:
self.pylib_match = None
self.include_match = self.omit_match = None
- if self.source or self.source_pkgs:
+ if self.source_dirs or self.source_pkgs:
against = []
- if self.source:
- self.source_match = TreeMatcher(self.source, "source")
+ if self.source_dirs:
+ self.source_match = TreeMatcher(self.source_dirs, "source")
against.append(f"trees {self.source_match!r}")
if self.source_pkgs:
self.source_pkgs_match = ModuleMatcher(self.source_pkgs, "source_pkgs")
@@ -285,7 +286,7 @@ def _debug(msg: str) -> None:
)
self.source_in_third_paths.add(pathdir)
- for src in self.source:
+ for src in self.source_dirs:
if self.third_match.match(src):
_debug(f"Source in third-party: source directory {src!r}")
self.source_in_third_paths.add(src)
@@ -457,12 +458,12 @@ def check_include_omit_etc(self, filename: str, frame: FrameType | None) -> str
def warn_conflicting_settings(self) -> None:
"""Warn if there are settings that conflict."""
if self.include:
- if self.source or self.source_pkgs:
+ if self.source_dirs or self.source_pkgs:
self.warn("--include is ignored because --source is set", slug="include-ignored")
def warn_already_imported_files(self) -> None:
"""Warn if files have already been imported that we will be measuring."""
- if self.include or self.source or self.source_pkgs:
+ if self.include or self.source_dirs or self.source_pkgs:
warned = set()
for mod in list(sys.modules.values()):
filename = getattr(mod, "__file__", None)
@@ -535,7 +536,7 @@ def find_possibly_unexecuted_files(self) -> Iterable[tuple[str, str | None]]:
pkg_file = source_for_file(cast(str, sys.modules[pkg].__file__))
yield from self._find_executable_files(canonical_path(pkg_file))
- for src in self.source:
+ for src in self.source_dirs:
yield from self._find_executable_files(src)
def _find_plugin_files(self, src_dir: str) -> Iterable[tuple[str, str]]:
diff --git a/coverage/sysmon.py b/coverage/sysmon.py
index e7b5659a6..8e5376cf0 100644
--- a/coverage/sysmon.py
+++ b/coverage/sysmon.py
@@ -5,7 +5,6 @@
from __future__ import annotations
-import dis
import functools
import inspect
import os
@@ -19,20 +18,20 @@
from typing import (
Any,
Callable,
- Iterable,
NewType,
Optional,
cast,
)
from coverage import env
+from coverage.bytecode import TBranchTrails, branch_trails
from coverage.debug import short_filename, short_stack
from coverage.misc import isolate_module
from coverage.types import (
AnyCallable,
- TArc,
TFileDisposition,
TLineNo,
+ TOffset,
TShouldStartContextFn,
TShouldTraceFn,
TTraceData,
@@ -58,18 +57,6 @@
DISABLE_TYPE = NewType("DISABLE_TYPE", object)
MonitorReturn = Optional[DISABLE_TYPE]
DISABLE = cast(MonitorReturn, getattr(sys_monitoring, "DISABLE", None))
-TOffset = int
-
-ALWAYS_JUMPS: set[int] = set()
-RETURNS: set[int] = set()
-
-if env.PYBEHAVIOR.branch_right_left:
- ALWAYS_JUMPS.update(
- dis.opmap[name]
- for name in ["JUMP_FORWARD", "JUMP_BACKWARD", "JUMP_BACKWARD_NO_INTERRUPT"]
- )
-
- RETURNS.update(dis.opmap[name] for name in ["RETURN_VALUE", "RETURN_GENERATOR"])
if LOG: # pragma: debugging
@@ -181,131 +168,6 @@ def _decorator(meth: AnyCallable) -> AnyCallable:
return _decorator
-class InstructionWalker:
- """Utility to step through trails of instructions.
-
- We have two reasons to need sequences of instructions from a code object:
- First, in strict sequence to visit all the instructions in the object.
- This is `walk(follow_jumps=False)`. Second, we want to follow jumps to
- understand how execution will flow: `walk(follow_jumps=True)`.
-
- """
-
- def __init__(self, code: CodeType) -> None:
- self.code = code
- self.insts: dict[TOffset, dis.Instruction] = {}
-
- inst = None
- for inst in dis.get_instructions(code):
- self.insts[inst.offset] = inst
-
- assert inst is not None
- self.max_offset = inst.offset
-
- def walk(
- self, *, start_at: TOffset = 0, follow_jumps: bool = True
- ) -> Iterable[dis.Instruction]:
- """
- Yield instructions starting from `start_at`. Follow unconditional
- jumps if `follow_jumps` is true.
- """
- seen = set()
- offset = start_at
- while offset < self.max_offset + 1:
- if offset in seen:
- break
- seen.add(offset)
- if inst := self.insts.get(offset):
- yield inst
- if follow_jumps and inst.opcode in ALWAYS_JUMPS:
- offset = inst.jump_target
- continue
- offset += 2
-
-
-def populate_branch_trails(code: CodeType, code_info: CodeInfo) -> None:
- """
- Populate the `branch_trails` attribute on `code_info`.
-
- Instructions can have a jump_target, where they might jump to next. Some
- instructions with a jump_target are unconditional jumps (ALWAYS_JUMPS), so
- they aren't interesting to us, since they aren't the start of a branch
- possibility.
-
- Instructions that might or might not jump somewhere else are branch
- possibilities. For each of those, we track a trail of instructions. These
- are lists of instruction offsets, the next instructions that can execute.
- We follow the trail until we get to a new source line. That gives us the
- arc from the original instruction's line to the new source line.
-
- """
- # log(f"populate_branch_trails: {code}")
- iwalker = InstructionWalker(code)
- for inst in iwalker.walk(follow_jumps=False):
- # log(f"considering {inst=}")
- if not inst.jump_target:
- # We only care about instructions with jump targets.
- # log("no jump_target")
- continue
- if inst.opcode in ALWAYS_JUMPS:
- # We don't care about unconditional jumps.
- # log("always jumps")
- continue
-
- from_line = inst.line_number
- if from_line is None:
- continue
-
- def walk_one_branch(
- start_at: TOffset, branch_kind: str
- ) -> tuple[list[TOffset], TArc | None]:
- # pylint: disable=cell-var-from-loop
- inst_offsets: list[TOffset] = []
- to_line = None
- for inst2 in iwalker.walk(start_at=start_at):
- inst_offsets.append(inst2.offset)
- if inst2.line_number and inst2.line_number != from_line:
- to_line = inst2.line_number
- break
- elif inst2.jump_target and (inst2.opcode not in ALWAYS_JUMPS):
- # log(
- # f"stop: {inst2.jump_target=}, "
- # + f"{inst2.opcode=} ({dis.opname[inst2.opcode]}), "
- # + f"{ALWAYS_JUMPS=}"
- # )
- break
- elif inst2.opcode in RETURNS:
- to_line = -code.co_firstlineno
- break
- if to_line is not None:
- # log(
- # f"possible branch from @{start_at}: "
- # + f"{inst_offsets}, {(from_line, to_line)} {code}"
- # )
- return inst_offsets, (from_line, to_line)
- else:
- # log(f"no possible branch from @{start_at}: {inst_offsets}")
- return [], None
-
- # Calculate two trails: one from the next instruction, and one from the
- # jump_target instruction.
- trails = [
- walk_one_branch(start_at=inst.offset + 2, branch_kind="not-taken"),
- walk_one_branch(start_at=inst.jump_target, branch_kind="taken"),
- ]
- code_info.branch_trails[inst.offset] = trails
-
- # Sometimes we get BRANCH_RIGHT or BRANCH_LEFT events from instructions
- # other than the original jump possibility instruction. Register each
- # trail under all of their offsets so we can pick up in the middle of a
- # trail if need be.
- for trail in trails:
- for offset in trail[0]:
- if offset not in code_info.branch_trails:
- code_info.branch_trails[offset] = []
- code_info.branch_trails[offset].append(trail)
-
-
@dataclass
class CodeInfo:
"""The information we want about each code object."""
@@ -321,10 +183,7 @@ class CodeInfo:
# ([offset, offset, ...], (from_line, to_line)),
# ]
# Two possible trails from the branch point, left and right.
- branch_trails: dict[
- TOffset,
- list[tuple[list[TOffset], TArc | None]],
- ]
+ branch_trails: TBranchTrails
def bytes_to_lines(code: CodeType) -> dict[TOffset, TLineNo]:
@@ -571,7 +430,7 @@ def sysmon_branch_either(
if not code_info.branch_trails:
if self.stats is not None:
self.stats["branch_trails"] += 1
- populate_branch_trails(code, code_info)
+ code_info.branch_trails = branch_trails(code)
# log(f"branch_trails for {code}:\n {code_info.branch_trails}")
added_arc = False
dest_info = code_info.branch_trails.get(instruction_offset)
diff --git a/coverage/types.py b/coverage/types.py
index ac1fc4c59..8b919a89b 100644
--- a/coverage/types.py
+++ b/coverage/types.py
@@ -53,6 +53,9 @@ def __call__(
# Line numbers are pervasive enough that they deserve their own type.
TLineNo = int
+# Bytecode offsets are pervasive enough that they deserve their own type.
+TOffset = int
+
TArc = tuple[TLineNo, TLineNo]
class TFileDisposition(Protocol):
diff --git a/coverage/version.py b/coverage/version.py
index b14eab49f..fe08b5f98 100644
--- a/coverage/version.py
+++ b/coverage/version.py
@@ -8,7 +8,7 @@
# version_info: same semantics as sys.version_info.
# _dev: the .devN suffix if any.
-version_info = (7, 7, 1, "final", 0)
+version_info = (7, 8, 0, "final", 0)
_dev = 0
diff --git a/doc/conf.py b/doc/conf.py
index 80fc2cca8..57a1ffd00 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -67,11 +67,11 @@
# @@@ editable
copyright = "2009–2025, Ned Batchelder" # pylint: disable=redefined-builtin
# The short X.Y.Z version.
-version = "7.7.1"
+version = "7.8.0"
# The full version, including alpha/beta/rc tags.
-release = "7.7.1"
+release = "7.8.0"
# The date of release, in "monthname day, year" format.
-release_date = "March 21, 2025"
+release_date = "March 30, 2025"
# @@@ end
rst_epilog = f"""
diff --git a/doc/config.rst b/doc/config.rst
index 87cbdd108..7a02d6a04 100644
--- a/doc/config.rst
+++ b/doc/config.rst
@@ -476,6 +476,18 @@ ambiguities between packages and directories.
.. versionadded:: 5.3
+.. _config_run_source_dirs:
+
+[run] source_dirs
+.................
+
+(multi-string) A list of directories, the source to measure during execution.
+Operates the same as ``source``, but only names directories, for resolving
+ambiguities between packages and directories.
+
+.. versionadded:: 7.8
+
+
.. _config_run_timid:
[run] timid
diff --git a/doc/sample_html/class_index.html b/doc/sample_html/class_index.html
index 796683503..c4f4afb5e 100644
--- a/doc/sample_html/class_index.html
+++ b/doc/sample_html/class_index.html
@@ -56,8 +56,8 @@