diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index a2172ab84..0c0d2bd8c 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -51,7 +51,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@df559355d593797519d70b90fc8edd5db049e7a2 # v3 + uses: github/codeql-action/init@3c3833e0f8c1c83d449a7478aa59c036a9165498 # v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -62,7 +62,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@df559355d593797519d70b90fc8edd5db049e7a2 # v3 + uses: github/codeql-action/autobuild@3c3833e0f8c1c83d449a7478aa59c036a9165498 # v3 # â„šī¸ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -76,4 +76,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@df559355d593797519d70b90fc8edd5db049e7a2 # v3 + uses: github/codeql-action/analyze@3c3833e0f8c1c83d449a7478aa59c036a9165498 # v3 diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index ba81d4ad1..2841c55d7 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -54,6 +54,7 @@ jobs: - "**.c" - ".github/workflows/coverage.yml" - "tox.ini" + - "metacov.ini" - "requirements/*.pip" - "tests/gold/**" diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index 99c70bbb9..312c23b13 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -29,7 +29,7 @@ jobs: persist-credentials: false - name: 'Dependency Review' - uses: actions/dependency-review-action@da24556b548a50705dd671f47852072ea4c105d9 # v4.7.1 + uses: actions/dependency-review-action@bc41886e18ea39df68b1b1245f4184881938e050 # v4.7.2 with: base-ref: ${{ github.event.pull_request.base.sha || 'master' }} head-ref: ${{ github.event.pull_request.head.sha || github.ref }} diff --git a/.github/workflows/quality.yml b/.github/workflows/quality.yml index 67c7608ef..94e4243e0 100644 --- a/.github/workflows/quality.yml +++ b/.github/workflows/quality.yml @@ -173,7 +173,7 @@ jobs: persist-credentials: false - name: Install the latest version of uv - uses: astral-sh/setup-uv@d9e0f98d3fc6adb07d1e3d37f3043649ddad06a1 #v6.5.0 + uses: astral-sh/setup-uv@4959332f0f014c5280e7eac8b70c90cb574c9f9b #v6.6.0 with: enable-cache: false diff --git a/CHANGES.rst b/CHANGES.rst index f264538ee..a115f44cb 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -22,6 +22,22 @@ upgrading your version of coverage.py. .. start-releases +.. _changes_7-10-6: + +Version 7.10.6 — 2025-08-29 +--------------------------- + +- Fix: ``source`` directories were not properly communicated to subprocesses + that ran in different directories, as reported in `issue 1499`_. This is now + fixed. + +- Performance: `Alex Gaynor continues fine-tuning `_ the speed of + combination, especially with many contexts. + +.. _issue 1499: https://github.com/nedbat/coveragepy/issues/1499 +.. _pull 2038: https://github.com/nedbat/coveragepy/pull/2038 + + .. _changes_7-10-5: Version 7.10.5 — 2025-08-23 diff --git a/coverage/config.py b/coverage/config.py index 7cde8ec42..82e56fd3b 100644 --- a/coverage/config.py +++ b/coverage/config.py @@ -274,10 +274,15 @@ def __init__(self) -> None: "patch", } + # File paths to make absolute during serialization. + # The pairs are (config_key, must_exist). SERIALIZE_ABSPATH = { - "data_file", - "debug_file", - "source_dirs", + ("data_file", False), + ("debug_file", False), + # `source` can be directories or modules, so don't abspath it if it + # doesn't exist. + ("source", True), + ("source_dirs", False), } def from_args(self, **kwargs: TConfigValueIn) -> None: @@ -569,12 +574,13 @@ def serialize(self) -> str: deserialized config will refer to the same files. """ data = {k: v for k, v in self.__dict__.items() if not k.startswith("_")} - for k in self.SERIALIZE_ABSPATH: + for k, must_exist in self.SERIALIZE_ABSPATH: + abs_fn = abs_path_if_exists if must_exist else os.path.abspath v = data[k] if isinstance(v, list): - v = list(map(os.path.abspath, v)) + v = list(map(abs_fn, v)) elif isinstance(v, str): - v = os.path.abspath(v) + v = abs_fn(v) data[k] = v return base64.b64encode(json.dumps(data).encode()).decode() @@ -584,6 +590,14 @@ def process_file_value(path: str) -> str: return os.path.expanduser(path) +def abs_path_if_exists(path: str) -> str: + """os.path.abspath, but only if the path exists.""" + if os.path.exists(path): + return os.path.abspath(path) + else: + return path + + def process_regexlist(name: str, option: str, values: list[str]) -> list[str]: """Check the values in a regex list and keep the non-blank ones.""" value_list = [] diff --git a/coverage/control.py b/coverage/control.py index 7ce36af56..fece86c0f 100644 --- a/coverage/control.py +++ b/coverage/control.py @@ -793,7 +793,7 @@ def switch_context(self, new_context: str) -> None: def clear_exclude(self, which: str = "exclude") -> None: """Clear the exclude list.""" self._init() - setattr(self.config, which + "_list", []) + setattr(self.config, f"{which}_list", []) self._exclude_regex_stale() def exclude(self, regex: str, which: str = "exclude") -> None: @@ -812,7 +812,7 @@ def exclude(self, regex: str, which: str = "exclude") -> None: """ self._init() - excl_list = getattr(self.config, which + "_list") + excl_list = getattr(self.config, f"{which}_list") excl_list.append(regex) self._exclude_regex_stale() @@ -823,7 +823,7 @@ def _exclude_regex_stale(self) -> None: def _exclude_regex(self, which: str) -> str: """Return a regex string for the given exclusion list.""" if which not in self._exclude_re: - excl_list = getattr(self.config, which + "_list") + excl_list = getattr(self.config, f"{which}_list") self._exclude_re[which] = join_regex(excl_list) return self._exclude_re[which] @@ -835,7 +835,7 @@ def get_exclude_list(self, which: str = "exclude") -> list[str]: """ self._init() - return cast(list[str], getattr(self.config, which + "_list")) + return cast(list[str], getattr(self.config, f"{which}_list")) def save(self) -> None: """Save the collected coverage data to the data file.""" @@ -1064,6 +1064,7 @@ def _get_file_reporters( if not isinstance(morfs, (list, tuple, set)): morfs = [morfs] # type: ignore[list-item] + morfs = sorted(morfs, key=lambda m: m if isinstance(m, str) else m.__name__) return [(self._get_file_reporter(morf), morf) for morf in morfs] def _prepare_data_for_reporting(self) -> None: @@ -1242,8 +1243,7 @@ def html_report( precision=precision, ): reporter = HtmlReporter(self) - ret = reporter.report(morfs) - return ret + return reporter.report(morfs) def xml_report( self, diff --git a/coverage/debug.py b/coverage/debug.py index b15f3ea36..21ac1d826 100644 --- a/coverage/debug.py +++ b/coverage/debug.py @@ -149,21 +149,20 @@ def info_formatter(info: Iterable[tuple[str, Any]]) -> Iterator[str]: info = list(info) if not info: return - label_len = 30 - assert all(len(l) < label_len for l, _ in info) + LABEL_LEN = 30 + assert all(len(l) < LABEL_LEN for l, _ in info) for label, data in info: if data == []: data = "-none-" - if isinstance(data, tuple) and len(repr(tuple(data))) < 30: - # Convert to tuple to scrub namedtuples. - yield "%*s: %r" % (label_len, label, tuple(data)) + prefix = f"{label:>{LABEL_LEN}}: " + if isinstance(data, tuple) and len(str(data)) < 30: + yield f"{prefix}{data}" elif isinstance(data, (list, set, tuple)): - prefix = "%*s:" % (label_len, label) for e in data: - yield "%*s %s" % (label_len + 1, prefix, e) - prefix = "" + yield f"{prefix}{e}" + prefix = " " * (LABEL_LEN + 2) else: - yield "%*s: %s" % (label_len, label, data) + yield f"{prefix}{data}" def write_formatted_info( diff --git a/coverage/env.py b/coverage/env.py index fcd8f3a36..ccee36e83 100644 --- a/coverage/env.py +++ b/coverage/env.py @@ -35,7 +35,7 @@ if PYPY: # Minimum now is 7.3.16 - PYPYVERSION = sys.pypy_version_info # type: ignore[attr-defined] + PYPYVERSION = tuple(sys.pypy_version_info) # type: ignore[attr-defined] else: PYPYVERSION = (0,) diff --git a/coverage/misc.py b/coverage/misc.py index 399bf1ba0..f310ed0eb 100644 --- a/coverage/misc.py +++ b/coverage/misc.py @@ -109,9 +109,9 @@ def nice_pair(pair: TArc) -> str: """ start, end = pair if start == end: - return "%d" % start + return f"{start}" else: - return "%d-%d" % (start, end) + return f"{start}-{end}" def bool_or_none(b: Any) -> bool | None: diff --git a/coverage/plugin_support.py b/coverage/plugin_support.py index b5ca302d0..8402ba0ae 100644 --- a/coverage/plugin_support.py +++ b/coverage/plugin_support.py @@ -210,10 +210,8 @@ def __init__(self, tracer: FileTracer, debug: LabelledDebug) -> None: def _show_frame(self, frame: FrameType) -> str: """A short string identifying a frame, for debug messages.""" - return "%s@%d" % ( - os.path.basename(frame.f_code.co_filename), - frame.f_lineno, - ) + filename = os.path.basename(frame.f_code.co_filename) + return f"{filename}@{frame.f_lineno}" def source_filename(self) -> str: sfilename = self.tracer.source_filename() @@ -292,10 +290,10 @@ def arcs(self) -> set[TArc]: def source(self) -> str: ret = self.reporter.source() - self.debug.write("source() --> %d chars" % (len(ret),)) + self.debug.write(f"source() --> {len(ret)} chars") return ret def source_token_lines(self) -> TSourceTokenLines: ret = list(self.reporter.source_token_lines()) - self.debug.write("source_token_lines() --> %d tokens" % (len(ret),)) + self.debug.write(f"source_token_lines() --> {len(ret)} tokens") return ret diff --git a/coverage/report.py b/coverage/report.py index 7322d3c89..7c1f9860e 100644 --- a/coverage/report.py +++ b/coverage/report.py @@ -10,7 +10,7 @@ from typing import IO, TYPE_CHECKING, Any from coverage.exceptions import ConfigError, NoDataError -from coverage.misc import human_sorted_items +from coverage.misc import human_sorted_items, plural from coverage.plugin import FileReporter from coverage.report_core import get_analysis_to_report from coverage.results import Analysis, Numbers @@ -31,7 +31,7 @@ def __init__(self, coverage: Coverage) -> None: self.output_format = self.config.format or "text" if self.output_format not in {"text", "markdown", "total"}: raise ConfigError(f"Unknown report format choice: {self.output_format!r}") - self.fr_analysis: list[tuple[FileReporter, Analysis]] = [] + self.fr_analyses: list[tuple[FileReporter, Analysis]] = [] self.skipped_count = 0 self.empty_count = 0 self.total = Numbers(precision=self.config.precision) @@ -46,7 +46,7 @@ def write_items(self, items: Iterable[str]) -> None: """Write a list of strings, joined together.""" self.write("".join(items)) - def _report_text( + def report_text( self, header: list[str], lines_values: list[list[Any]], @@ -82,29 +82,36 @@ def _report_text( self.write(header_str) self.write(rule) - formats.update(dict(Cover="{:>{n}}%"), Missing=" {:9}") + # Write the data lines + formats.update( + dict( + Cover="{:>{n}}%", + Missing=" {:9}", + ) + ) for values in lines_values: - # build string with line values - line_items = [ - formats[item].format(str(value), name_len=max_name, n=max_n - 1) - for item, value in zip(header, values) - ] - self.write_items(line_items) + self.write_items( + ( + formats[item].format(str(value), name_len=max_name, n=max_n - 1) + for item, value in zip(header, values) + ) + ) # Write a TOTAL line if lines_values: self.write(rule) - line_items = [ - formats[item].format(str(value), name_len=max_name, n=max_n - 1) - for item, value in zip(header, total_line) - ] - self.write_items(line_items) + self.write_items( + ( + formats[item].format(str(value), name_len=max_name, n=max_n - 1) + for item, value in zip(header, total_line) + ) + ) for end_line in end_lines: self.write(end_line) - def _report_markdown( + def report_markdown( self, header: list[str], lines_values: list[list[Any]], @@ -143,17 +150,29 @@ def _report_markdown( self.write(header_str) self.write(rule_str) + # Write the data lines for values in lines_values: - # build string with line values - formats.update(dict(Cover="{:>{n}}% |")) - line_items = [ - formats[item].format(str(value).replace("_", "\\_"), name_len=max_name, n=max_n - 1) - for item, value in zip(header, values) - ] - self.write_items(line_items) + formats.update( + dict( + Cover="{:>{n}}% |", + ) + ) + self.write_items( + ( + formats[item].format( + str(value).replace("_", "\\_"), name_len=max_name, n=max_n - 1 + ) + for item, value in zip(header, values) + ) + ) # Write the TOTAL line - formats.update(dict(Name="|{:>{name_len}} |", Cover="{:>{n}} |")) + formats.update( + dict( + Name="|{:>{name_len}} |", + Cover="{:>{n}} |", + ), + ) total_line_items: list[str] = [] for item, value in zip(header, total_line): if value == "": @@ -164,6 +183,7 @@ def _report_markdown( insert = f" **{value}**" total_line_items += formats[item].format(insert, name_len=max_name, n=max_n) self.write_items(total_line_items) + for end_line in end_lines: self.write(end_line) @@ -206,9 +226,8 @@ def tabular_report(self) -> None: # `lines_values` is list of lists of sortable values. lines_values = [] - for fr, analysis in self.fr_analysis: + for fr, analysis in self.fr_analyses: nums = analysis.numbers - args = [fr.relative_filename(), nums.n_statements, nums.n_missing] if self.branches: args += [nums.n_branches, nums.n_partial_branches] @@ -248,18 +267,18 @@ def tabular_report(self) -> None: # Create other final lines. end_lines = [] if self.config.skip_covered and self.skipped_count: - file_suffix = "s" if self.skipped_count > 1 else "" + files = plural(self.skipped_count, "file") end_lines.append( - f"\n{self.skipped_count} file{file_suffix} skipped due to complete coverage.", + f"\n{self.skipped_count} {files} skipped due to complete coverage.", ) if self.config.skip_empty and self.empty_count: - file_suffix = "s" if self.empty_count > 1 else "" - end_lines.append(f"\n{self.empty_count} empty file{file_suffix} skipped.") + files = plural(self.empty_count, "file") + end_lines.append(f"\n{self.empty_count} empty {files} skipped.") if self.output_format == "markdown": - formatter = self._report_markdown + formatter = self.report_markdown else: - formatter = self._report_text + formatter = self.report_text formatter(header, lines_values, total_line, end_lines) def report_one_file(self, fr: FileReporter, analysis: Analysis) -> None: @@ -276,4 +295,4 @@ def report_one_file(self, fr: FileReporter, analysis: Analysis) -> None: # Don't report on empty files. self.empty_count += 1 else: - self.fr_analysis.append((fr, analysis)) + self.fr_analyses.append((fr, analysis)) diff --git a/coverage/results.py b/coverage/results.py index 163fc902e..86f6fcc15 100644 --- a/coverage/results.py +++ b/coverage/results.py @@ -325,7 +325,7 @@ def display_covered(pc: float, precision: int) -> str: pc = 100.0 - near0 else: pc = round(pc, precision) - return "%.*f" % (precision, pc) + return f"{pc:.{precision}f}" def _line_ranges( diff --git a/coverage/sqldata.py b/coverage/sqldata.py index 6de07c593..693b6e14c 100644 --- a/coverage/sqldata.py +++ b/coverage/sqldata.py @@ -614,7 +614,7 @@ def add_file_tracers(self, file_tracers: Mapping[str, str]) -> None: """ if self._debug.should("dataop"): - self._debug.write("Adding file tracers: %d files" % (len(file_tracers),)) + self._debug.write(f"Adding file tracers: {len(file_tracers)} files") if not file_tracers: return self._start_using() @@ -793,18 +793,28 @@ def update( # Handle arcs if present in other_db if has_arcs: self._choose_lines_or_arcs(arcs=True) + + # Create context mapping table for faster lookups + con.execute_void(""" + CREATE TEMP TABLE context_mapping AS + SELECT + other_context.id as other_id, + main_context.id as main_id + FROM other_db.context AS other_context + INNER JOIN main.context AS main_context ON other_context.context = main_context.context + """) + con.execute_void(""" INSERT OR IGNORE INTO main.arc (file_id, context_id, fromno, tono) SELECT main_file.id, - main_context.id, + context_mapping.main_id, other_arc.fromno, other_arc.tono FROM other_db.arc AS other_arc INNER JOIN other_file_mapped ON other_arc.file_id = other_file_mapped.other_file_id - INNER JOIN other_db.context AS other_context ON other_arc.context_id = other_context.id + INNER JOIN context_mapping ON other_arc.context_id = context_mapping.other_id INNER JOIN main.file AS main_file ON other_file_mapped.mapped_path = main_file.path - INNER JOIN main.context AS main_context ON other_context.context = main_context.context """) # Handle line_bits if present in other_db diff --git a/coverage/version.py b/coverage/version.py index a7cd7a5ff..a9d77c510 100644 --- a/coverage/version.py +++ b/coverage/version.py @@ -8,7 +8,7 @@ # version_info: same semantics as sys.version_info. # _dev: the .devN suffix if any. -version_info = (7, 10, 5, "final", 0) +version_info = (7, 10, 6, "final", 0) _dev = 0 @@ -22,7 +22,7 @@ def _make_version( ) -> str: """Create a readable version string from version_info tuple components.""" assert releaselevel in ["alpha", "beta", "candidate", "final"] - version = "%d.%d.%d" % (major, minor, micro) + version = f"{major}.{minor}.{micro}" if releaselevel != "final": short = {"alpha": "a", "beta": "b", "candidate": "rc"}[releaselevel] version += f"{short}{serial}" diff --git a/coverage/xmlreport.py b/coverage/xmlreport.py index 87055f27c..97db7f4f8 100644 --- a/coverage/xmlreport.py +++ b/coverage/xmlreport.py @@ -36,7 +36,7 @@ def rate(hit: int, num: int) -> str: if num == 0: return "1" else: - return "%.4g" % (hit / num) + return f"{hit / num:.4g}" @dataclass @@ -226,7 +226,7 @@ def xml_file(self, fr: FileReporter, analysis: Analysis, has_arcs: bool) -> None xline.setAttribute("branch", "true") xline.setAttribute( "condition-coverage", - "%d%% (%d/%d)" % (100 * taken // total, taken, total), + f"{100 * taken // total}% ({taken}/{total})", ) if line in missing_branch_arcs: annlines = ["exit" if b < 0 else str(b) for b in missing_branch_arcs[line]] diff --git a/doc/conf.py b/doc/conf.py index 75a012e73..b1fa60438 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -67,11 +67,11 @@ # @@@ editable copyright = "2009–2025, Ned Batchelder" # pylint: disable=redefined-builtin # The short X.Y.Z version. -version = "7.10.5" +version = "7.10.6" # The full version, including alpha/beta/rc tags. -release = "7.10.5" +release = "7.10.6" # The date of release, in "monthname day, year" format. -release_date = "August 23, 2025" +release_date = "August 29, 2025" # @@@ end rst_epilog = f""" @@ -250,4 +250,4 @@ def setup(app): """Configure Sphinx""" app.add_css_file("coverage.css") app.add_config_value("prerelease", False, "env") - print("** Prerelease = %r" % prerelease) + print(f"** Prerelease = {prerelease!r}") diff --git a/doc/sample_html/class_index.html b/doc/sample_html/class_index.html index e03b7540e..277ef81f7 100644 --- a/doc/sample_html/class_index.html +++ b/doc/sample_html/class_index.html @@ -56,8 +56,8 @@

Classes

- coverage.py v7.10.5, - created at 2025-08-23 08:07 -0400 + coverage.py v7.10.6, + created at 2025-08-29 10:20 -0400

@@ -567,8 +567,8 @@

- coverage.py v7.10.5, - created at 2025-08-23 08:07 -0400 + coverage.py v7.10.6, + created at 2025-08-29 10:20 -0400

diff --git a/doc/sample_html/z_7b071bdc2a35fa80_hashhandler_py.html b/doc/sample_html/z_7b071bdc2a35fa80_hashhandler_py.html index 1c64be138..b187aaf79 100644 --- a/doc/sample_html/z_7b071bdc2a35fa80_hashhandler_py.html +++ b/doc/sample_html/z_7b071bdc2a35fa80_hashhandler_py.html @@ -66,8 +66,8 @@

^ index     » next       - coverage.py v7.10.5, - created at 2025-08-23 08:07 -0400 + coverage.py v7.10.6, + created at 2025-08-29 10:20 -0400