diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index 433310b17..5c7bfc9d2 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -8,9 +8,6 @@ contact_links:
- name: Frequently Asked Questions
url: https://coverage.readthedocs.io/en/latest/faq.html
about: Some common problems are described here.
- - name: Testing in Python mailing list
- url: http://lists.idyll.org/listinfo/testing-in-python
- about: Ask questions about using coverage.py here.
- name: Tidelift security contact
url: https://tidelift.com/security
about: Please report security vulnerabilities here.
diff --git a/.readthedocs.yml b/.readthedocs.yml
index 8c96c02fd..48d6b434d 100644
--- a/.readthedocs.yml
+++ b/.readthedocs.yml
@@ -17,6 +17,7 @@ formats:
- pdf
python:
+ # PYVERSIONS
version: 3.7
install:
- requirements: doc/requirements.pip
diff --git a/CHANGES.rst b/CHANGES.rst
index 628999113..fa01b701e 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -17,6 +17,28 @@ development at the same time, such as 4.5.x and 5.0.
.. Version 9.8.1 — 2027-07-27
.. --------------------------
+.. scriv-start-here
+
+.. _changes_7-1-0:
+
+Version 7.1.0 — 2023-01-24
+--------------------------
+
+- Added: the debug output file can now be specified with ``[run] debug_file``
+ in the configuration file. Closes `issue 1319`_.
+
+- Performance: fixed a slowdown with dynamic contexts that's been around since
+ 6.4.3. The fix closes `issue 1538`_. Thankfully this doesn't break the
+ `Cython change`_ that fixed `issue 972`_. Thanks to Mathieu Kniewallner for
+ the deep investigative work and comprehensive issue report.
+
+- Typing: all product and test code has type annotations.
+
+.. _Cython change: https://github.com/nedbat/coveragepy/pull/1347
+.. _issue 972: https://github.com/nedbat/coveragepy/issues/972
+.. _issue 1319: https://github.com/nedbat/coveragepy/issues/1319
+.. _issue 1538: https://github.com/nedbat/coveragepy/issues/1538
+
.. _changes_7-0-5:
Version 7.0.5 — 2023-01-10
@@ -959,7 +981,7 @@ Version 5.3 — 2020-09-13
.. _issue 1011: https://github.com/nedbat/coveragepy/issues/1011
-.. endchangesinclude
+.. scriv-end-here
Older changes
-------------
diff --git a/Makefile b/Makefile
index 5bca7c53d..a3028b8bf 100644
--- a/Makefile
+++ b/Makefile
@@ -99,7 +99,13 @@ upgrade: ## Update the *.pip files with the latest packages satisfying *.in
$(PIP_COMPILE) -o requirements/mypy.pip requirements/mypy.in
diff_upgrade: ## Summarize the last `make upgrade`
- @git diff -U0 | grep -v '^@' | grep == | sort -k1.2,1.99 -k1.1,1.1r -u
+ @# The sort flags sort by the package name first, then by the -/+, and
+ @# sort by version numbers, so we get a summary with lines like this:
+ @# -bashlex==0.16
+ @# +bashlex==0.17
+ @# -build==0.9.0
+ @# +build==0.10.0
+ @git diff -U0 | grep -v '^@' | grep == | sort -k1.2,1.99 -k1.1,1.1r -u -V
##@ Pre-builds for prepping the code
@@ -247,8 +253,8 @@ relnotes_json: $(RELNOTES_JSON) ## Convert changelog to JSON for further parsin
$(RELNOTES_JSON): $(CHANGES_MD)
$(DOCBIN)/python ci/parse_relnotes.py tmp/rst_rst/changes.md $(RELNOTES_JSON)
-github_releases: $(RELNOTES_JSON) ## Update GitHub releases.
- $(DOCBIN)/python ci/github_releases.py $(RELNOTES_JSON) $(REPO_OWNER)
+github_releases: $(DOCBIN) ## Update GitHub releases.
+ $(DOCBIN)/python -m scriv github-release
comment_on_fixes: $(RELNOTES_JSON) ## Add a comment to issues that were fixed.
python ci/comment_on_fixes.py $(REPO_OWNER)
diff --git a/ci/ghrel_template.md.j2 b/ci/ghrel_template.md.j2
new file mode 100644
index 000000000..9d626bcab
--- /dev/null
+++ b/ci/ghrel_template.md.j2
@@ -0,0 +1,5 @@
+
+{{body}}
+
+:arrow_right: PyPI page: [coverage {{version}}](https://pypi.org/project/coverage/{{version}}).
+:arrow_right: To install: `python3 -m pip install coverage=={{version}}`
diff --git a/ci/github_releases.py b/ci/github_releases.py
deleted file mode 100644
index 5ba3d5229..000000000
--- a/ci/github_releases.py
+++ /dev/null
@@ -1,146 +0,0 @@
-# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
-# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
-
-"""Upload release notes into GitHub releases."""
-
-import json
-import shlex
-import subprocess
-import sys
-
-import pkg_resources
-import requests
-
-
-RELEASES_URL = "https://api.github.com/repos/{repo}/releases"
-
-def run_command(cmd):
- """
- Run a command line (with no shell).
-
- Returns a tuple:
- bool: true if the command succeeded.
- str: the output of the command.
-
- """
- proc = subprocess.run(
- shlex.split(cmd),
- shell=False,
- check=False,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- )
- output = proc.stdout.decode("utf-8")
- succeeded = proc.returncode == 0
- return succeeded, output
-
-def does_tag_exist(tag_name):
- """
- Does `tag_name` exist as a tag in git?
- """
- return run_command(f"git rev-parse --verify {tag_name}")[0]
-
-def check_ok(resp):
- """
- Check that the Requests response object was successful.
-
- Raise an exception if not.
- """
- if not resp:
- print(f"text: {resp.text!r}")
- resp.raise_for_status()
-
-def github_paginated(session, url):
- """
- Get all the results from a paginated GitHub url.
- """
- while True:
- resp = session.get(url)
- check_ok(resp)
- yield from resp.json()
- next_link = resp.links.get("next", None)
- if not next_link:
- break
- url = next_link["url"]
-
-def get_releases(session, repo):
- """
- Get all the releases from a name/project repo.
-
- Returns:
- A dict mapping tag names to release dictionaries.
- """
- url = RELEASES_URL.format(repo=repo)
- releases = { r['tag_name']: r for r in github_paginated(session, url) }
- return releases
-
-RELEASE_BODY_FMT = """\
-{relnote_text}
-
-:arrow_right:\xa0 PyPI page: [coverage {version}](https://pypi.org/project/coverage/{version}).
-:arrow_right:\xa0 To install: `python3 -m pip install coverage=={version}`
-"""
-
-def release_for_relnote(relnote):
- """
- Turn a release note dict into the data needed by GitHub for a release.
- """
- relnote_text = relnote["text"]
- tag = version = relnote["version"]
- body = RELEASE_BODY_FMT.format(relnote_text=relnote_text, version=version)
- return {
- "tag_name": tag,
- "name": version,
- "body": body,
- "draft": False,
- "prerelease": relnote["prerelease"],
- }
-
-def create_release(session, repo, release_data):
- """
- Create a new GitHub release.
- """
- print(f"Creating {release_data['name']}")
- resp = session.post(RELEASES_URL.format(repo=repo), json=release_data)
- check_ok(resp)
-
-def update_release(session, url, release_data):
- """
- Update an existing GitHub release.
- """
- print(f"Updating {release_data['name']}")
- resp = session.patch(url, json=release_data)
- check_ok(resp)
-
-def update_github_releases(json_filename, repo):
- """
- Read the json file, and create or update releases in GitHub.
- """
- gh_session = requests.Session()
- releases = get_releases(gh_session, repo)
- if 0: # if you need to delete all the releases!
- for release in releases.values():
- print(release["tag_name"])
- resp = gh_session.delete(release["url"])
- check_ok(resp)
- return
-
- with open(json_filename) as jf:
- relnotes = json.load(jf)
- relnotes.sort(key=lambda rel: pkg_resources.parse_version(rel["version"]))
- for relnote in relnotes:
- tag = relnote["version"]
- if not does_tag_exist(tag):
- continue
- release_data = release_for_relnote(relnote)
- exists = tag in releases
- if not exists:
- create_release(gh_session, repo, release_data)
- else:
- release = releases[tag]
- if release["body"] != release_data["body"]:
- url = release["url"]
- update_release(gh_session, url, release_data)
-
-if __name__ == "__main__":
- update_github_releases(*sys.argv[1:3])
diff --git a/coverage/collector.py b/coverage/collector.py
index ab743ee38..22471504f 100644
--- a/coverage/collector.py
+++ b/coverage/collector.py
@@ -232,7 +232,7 @@ def _clear_data(self) -> None:
def reset(self) -> None:
"""Clear collected data, and prepare to collect more."""
# The trace data we are collecting.
- self.data: TTraceData = {} # type: ignore[assignment]
+ self.data: TTraceData = {}
# A dictionary mapping file names to file tracer plugin names that will
# handle them.
@@ -310,12 +310,12 @@ def _start_tracer(self) -> TTraceFn:
#
# New in 3.12: threading.settrace_all_threads: https://github.com/python/cpython/pull/96681
- def _installation_trace(self, frame: FrameType, event: str, arg: Any) -> TTraceFn:
+ def _installation_trace(self, frame: FrameType, event: str, arg: Any) -> Optional[TTraceFn]:
"""Called on new threads, installs the real tracer."""
# Remove ourselves as the trace function.
sys.settrace(None)
# Install the real tracer.
- fn = self._start_tracer()
+ fn: Optional[TTraceFn] = self._start_tracer()
# Invoke the real trace function with the current event, to be sure
# not to lose an event.
if fn:
diff --git a/coverage/config.py b/coverage/config.py
index ee30b8a43..e15d2affc 100644
--- a/coverage/config.py
+++ b/coverage/config.py
@@ -20,7 +20,8 @@
from coverage.misc import isolate_module, human_sorted_items, substitute_variables
from coverage.tomlconfig import TomlConfigParser, TomlDecodeError
from coverage.types import (
- TConfigurable, TConfigSectionIn, TConfigValueIn, TConfigSectionOut, TConfigValueOut,
+ TConfigurable, TConfigSectionIn, TConfigValueIn, TConfigSectionOut,
+ TConfigValueOut, TPluginConfig,
)
os = isolate_module(os)
@@ -166,7 +167,7 @@ def getregexlist(self, section: str, option: str) -> List[str]:
]
-class CoverageConfig(TConfigurable):
+class CoverageConfig(TConfigurable, TPluginConfig):
"""Coverage.py configuration.
The attributes of this class are the various settings that control the
@@ -198,6 +199,7 @@ def __init__(self) -> None:
self.cover_pylib = False
self.data_file = ".coverage"
self.debug: List[str] = []
+ self.debug_file: Optional[str] = None
self.disable_warnings: List[str] = []
self.dynamic_context: Optional[str] = None
self.parallel = False
@@ -374,6 +376,7 @@ def copy(self) -> CoverageConfig:
('cover_pylib', 'run:cover_pylib', 'boolean'),
('data_file', 'run:data_file'),
('debug', 'run:debug', 'list'),
+ ('debug_file', 'run:debug_file'),
('disable_warnings', 'run:disable_warnings', 'list'),
('dynamic_context', 'run:dynamic_context'),
('parallel', 'run:parallel', 'boolean'),
diff --git a/coverage/control.py b/coverage/control.py
index d37c77e38..78e0c70e6 100644
--- a/coverage/control.py
+++ b/coverage/control.py
@@ -303,10 +303,8 @@ def _init(self) -> None:
self._inited = True
- # Create and configure the debugging controller. COVERAGE_DEBUG_FILE
- # is an environment variable, the name of a file to append debug logs
- # to.
- self._debug = DebugControl(self.config.debug, self._debug_file)
+ # Create and configure the debugging controller.
+ self._debug = DebugControl(self.config.debug, self._debug_file, self.config.debug_file)
if "multiprocessing" in (self.config.concurrency or ()):
# Multi-processing uses parallel for the subprocesses, so also use
diff --git a/coverage/data.py b/coverage/data.py
index ee4f007dd..c737d5939 100644
--- a/coverage/data.py
+++ b/coverage/data.py
@@ -35,6 +35,7 @@ def line_counts(data: CoverageData, fullpath: bool = False) -> Dict[str, int]:
"""
summ = {}
+ filename_fn: Callable[[str], str]
if fullpath:
# pylint: disable=unnecessary-lambda-assignment
filename_fn = lambda f: f
diff --git a/coverage/debug.py b/coverage/debug.py
index 29dd1e7f3..122339597 100644
--- a/coverage/debug.py
+++ b/coverage/debug.py
@@ -39,7 +39,12 @@ class DebugControl:
show_repr_attr = False # For AutoReprMixin
- def __init__(self, options: Iterable[str], output: Optional[IO[str]]) -> None:
+ def __init__(
+ self,
+ options: Iterable[str],
+ output: Optional[IO[str]],
+ file_name: Optional[str] = None,
+ ) -> None:
"""Configure the options and output file for debugging."""
self.options = list(options) + FORCED_DEBUG
self.suppress_callers = False
@@ -49,6 +54,7 @@ def __init__(self, options: Iterable[str], output: Optional[IO[str]]) -> None:
filters.append(add_pid_and_tid)
self.output = DebugOutputFile.get_one(
output,
+ file_name=file_name,
show_process=self.should('process'),
filters=filters,
)
@@ -273,7 +279,7 @@ def filter_text(text: str, filters: Iterable[Callable[[str], str]]) -> str:
return text + ending
-class CwdTracker: # pragma: debugging
+class CwdTracker:
"""A class to add cwd info to debug messages."""
def __init__(self) -> None:
self.cwd: Optional[str] = None
@@ -287,7 +293,7 @@ def filter(self, text: str) -> str:
return text
-class DebugOutputFile: # pragma: debugging
+class DebugOutputFile:
"""A file-like object that includes pid and cwd information."""
def __init__(
self,
@@ -306,13 +312,11 @@ def __init__(
if hasattr(os, 'getppid'):
self.write(f"New process: pid: {os.getpid()!r}, parent pid: {os.getppid()!r}\n")
- SYS_MOD_NAME = '$coverage.debug.DebugOutputFile.the_one'
- SINGLETON_ATTR = 'the_one_and_is_interim'
-
@classmethod
def get_one(
cls,
fileobj: Optional[IO[str]] = None,
+ file_name: Optional[str] = None,
show_process: bool = True,
filters: Iterable[Callable[[str], str]] = (),
interim: bool = False,
@@ -321,9 +325,9 @@ def get_one(
If `fileobj` is provided, then a new DebugOutputFile is made with it.
- If `fileobj` isn't provided, then a file is chosen
- (COVERAGE_DEBUG_FILE, or stderr), and a process-wide singleton
- DebugOutputFile is made.
+ If `fileobj` isn't provided, then a file is chosen (`file_name` if
+ provided, or COVERAGE_DEBUG_FILE, or stderr), and a process-wide
+ singleton DebugOutputFile is made.
`show_process` controls whether the debug file adds process-level
information, and filters is a list of other message filters to apply.
@@ -338,27 +342,49 @@ def get_one(
# Make DebugOutputFile around the fileobj passed.
return cls(fileobj, show_process, filters)
- # Because of the way igor.py deletes and re-imports modules,
- # this class can be defined more than once. But we really want
- # a process-wide singleton. So stash it in sys.modules instead of
- # on a class attribute. Yes, this is aggressively gross.
- singleton_module = sys.modules.get(cls.SYS_MOD_NAME)
- the_one, is_interim = getattr(singleton_module, cls.SINGLETON_ATTR, (None, True))
+ the_one, is_interim = cls._get_singleton_data()
if the_one is None or is_interim:
- if fileobj is None:
- debug_file_name = os.environ.get("COVERAGE_DEBUG_FILE", FORCED_DEBUG_FILE)
- if debug_file_name in ("stdout", "stderr"):
- fileobj = getattr(sys, debug_file_name)
- elif debug_file_name:
- fileobj = open(debug_file_name, "a")
+ if file_name is not None:
+ fileobj = open(file_name, "a", encoding="utf-8")
+ else:
+ file_name = os.environ.get("COVERAGE_DEBUG_FILE", FORCED_DEBUG_FILE)
+ if file_name in ("stdout", "stderr"):
+ fileobj = getattr(sys, file_name)
+ elif file_name:
+ fileobj = open(file_name, "a", encoding="utf-8")
else:
fileobj = sys.stderr
the_one = cls(fileobj, show_process, filters)
- singleton_module = types.ModuleType(cls.SYS_MOD_NAME)
- setattr(singleton_module, cls.SINGLETON_ATTR, (the_one, interim))
- sys.modules[cls.SYS_MOD_NAME] = singleton_module
+ cls._set_singleton_data(the_one, interim)
return the_one
+ # Because of the way igor.py deletes and re-imports modules,
+ # this class can be defined more than once. But we really want
+ # a process-wide singleton. So stash it in sys.modules instead of
+ # on a class attribute. Yes, this is aggressively gross.
+
+ SYS_MOD_NAME = '$coverage.debug.DebugOutputFile.the_one'
+ SINGLETON_ATTR = 'the_one_and_is_interim'
+
+ @classmethod
+ def _set_singleton_data(cls, the_one: DebugOutputFile, interim: bool) -> None:
+ """Set the one DebugOutputFile to rule them all."""
+ singleton_module = types.ModuleType(cls.SYS_MOD_NAME)
+ setattr(singleton_module, cls.SINGLETON_ATTR, (the_one, interim))
+ sys.modules[cls.SYS_MOD_NAME] = singleton_module
+
+ @classmethod
+ def _get_singleton_data(cls) -> Tuple[Optional[DebugOutputFile], bool]:
+ """Get the one DebugOutputFile."""
+ singleton_module = sys.modules.get(cls.SYS_MOD_NAME)
+ return getattr(singleton_module, cls.SINGLETON_ATTR, (None, True))
+
+ @classmethod
+ def _del_singleton_data(cls) -> None:
+ """Delete the one DebugOutputFile, just for tests to use."""
+ if cls.SYS_MOD_NAME in sys.modules:
+ del sys.modules[cls.SYS_MOD_NAME]
+
def write(self, text: str) -> None:
"""Just like file.write, but filter through all our filters."""
assert self.outfile is not None
diff --git a/coverage/html.py b/coverage/html.py
index b10bab245..9e1b11b20 100644
--- a/coverage/html.py
+++ b/coverage/html.py
@@ -12,10 +12,10 @@
import shutil
from dataclasses import dataclass
-from typing import Iterable, List, Optional, TYPE_CHECKING
+from typing import Any, Dict, Iterable, List, Optional, Tuple, TYPE_CHECKING, cast
import coverage
-from coverage.data import add_data_to_hash
+from coverage.data import CoverageData, add_data_to_hash
from coverage.exceptions import NoDataError
from coverage.files import flat_rootname
from coverage.misc import ensure_dir, file_be_gone, Hasher, isolate_module, format_local_datetime
@@ -25,10 +25,27 @@
from coverage.templite import Templite
from coverage.types import TLineNo, TMorf
+
if TYPE_CHECKING:
+ # To avoid circular imports:
from coverage import Coverage
from coverage.plugins import FileReporter
+ # To be able to use 3.8 typing features, and still run on 3.7:
+ from typing import TypedDict
+
+ class IndexInfoDict(TypedDict):
+ """Information for each file, to render the index file."""
+ nums: Numbers
+ html_filename: str
+ relative_filename: str
+
+ class FileInfoDict(TypedDict):
+ """Summary of the information from last rendering, to avoid duplicate work."""
+ hash: str
+ index: IndexInfoDict
+
+
os = isolate_module(os)
@@ -56,7 +73,7 @@ def write_html(fname: str, html: str) -> None:
@dataclass
class LineData:
"""The data for each source line of HTML output."""
- tokens: str
+ tokens: List[Tuple[str, str]]
number: TLineNo
category: str
statement: bool
@@ -65,6 +82,10 @@ class LineData:
context_list: List[str]
short_annotations: List[str]
long_annotations: List[str]
+ html: str = ""
+ annotate: Optional[str] = None
+ annotate_long: Optional[str] = None
+ css_class: str = ""
@dataclass
@@ -201,8 +222,8 @@ def __init__(self, cov: Coverage) -> None:
self.data = self.coverage.get_data()
self.has_arcs = self.data.has_arcs()
- self.file_summaries = []
- self.all_files_nums = []
+ self.file_summaries: List[IndexInfoDict] = []
+ self.all_files_nums: List[Numbers] = []
self.incr = IncrementalChecker(self.directory)
self.datagen = HtmlDataGeneration(self.coverage)
self.totals = Numbers(precision=self.config.precision)
@@ -274,7 +295,7 @@ def report(self, morfs: Optional[Iterable[TMorf]]) -> float:
if not self.all_files_nums:
raise NoDataError("No data to report.")
- self.totals = sum(self.all_files_nums)
+ self.totals = cast(Numbers, sum(self.all_files_nums))
# Write the index file.
if files_to_report:
@@ -308,9 +329,10 @@ def make_local_static_report_files(self) -> None:
# The user may have extra CSS they want copied.
if self.extra_css:
+ assert self.config.extra_css is not None
shutil.copyfile(self.config.extra_css, os.path.join(self.directory, self.extra_css))
- def should_report_file(self, ftr):
+ def should_report_file(self, ftr: FileToReport) -> bool:
"""Determine if we'll report this file."""
# Get the numbers for this file.
nums = ftr.analysis.numbers
@@ -333,7 +355,7 @@ def should_report_file(self, ftr):
return True
- def write_html_file(self, ftr, prev_html, next_html):
+ def write_html_file(self, ftr: FileToReport, prev_html: str, next_html: str) -> None:
"""Generate an HTML file for one source file."""
self.make_directory()
@@ -346,16 +368,16 @@ def write_html_file(self, ftr, prev_html, next_html):
file_data = self.datagen.data_for_file(ftr.fr, ftr.analysis)
for ldata in file_data.lines:
# Build the HTML for the line.
- html = []
+ html_parts = []
for tok_type, tok_text in ldata.tokens:
if tok_type == "ws":
- html.append(escape(tok_text))
+ html_parts.append(escape(tok_text))
else:
tok_html = escape(tok_text) or ' '
- html.append(
+ html_parts.append(
f'{tok_html}'
)
- ldata.html = ''.join(html)
+ ldata.html = ''.join(html_parts)
if ldata.short_annotations:
# 202F is NARROW NO-BREAK SPACE.
@@ -384,7 +406,9 @@ def write_html_file(self, ftr, prev_html, next_html):
css_classes = []
if ldata.category:
- css_classes.append(self.template_globals['category'][ldata.category])
+ css_classes.append(
+ self.template_globals['category'][ldata.category] # type: ignore[index]
+ )
ldata.css_class = ' '.join(css_classes) or "pln"
html_path = os.path.join(self.directory, ftr.html_filename)
@@ -396,7 +420,7 @@ def write_html_file(self, ftr, prev_html, next_html):
write_html(html_path, html)
# Save this file's information for the index file.
- index_info = {
+ index_info: IndexInfoDict = {
'nums': ftr.analysis.numbers,
'html_filename': ftr.html_filename,
'relative_filename': ftr.fr.relative_filename(),
@@ -404,7 +428,7 @@ def write_html_file(self, ftr, prev_html, next_html):
self.file_summaries.append(index_info)
self.incr.set_index_info(ftr.rootname, index_info)
- def index_file(self, first_html, final_html):
+ def index_file(self, first_html: str, final_html: str) -> None:
"""Write the index.html file for this report."""
self.make_directory()
index_tmpl = Templite(read_data("index.html"), self.template_globals)
@@ -440,7 +464,6 @@ class IncrementalChecker:
STATUS_FILE = "status.json"
STATUS_FORMAT = 2
- # pylint: disable=wrong-spelling-in-comment,useless-suppression
# The data looks like:
#
# {
@@ -468,14 +491,14 @@ class IncrementalChecker:
# }
# }
- def __init__(self, directory):
+ def __init__(self, directory: str) -> None:
self.directory = directory
self.reset()
def reset(self) -> None:
"""Initialize to empty. Causes all files to be reported."""
self.globals = ''
- self.files = {}
+ self.files: Dict[str, FileInfoDict] = {}
def read(self) -> None:
"""Read the information we stored last time."""
@@ -507,7 +530,8 @@ def write(self) -> None:
status_file = os.path.join(self.directory, self.STATUS_FILE)
files = {}
for filename, fileinfo in self.files.items():
- fileinfo['index']['nums'] = fileinfo['index']['nums'].init_args()
+ index = fileinfo['index']
+ index['nums'] = index['nums'].init_args() # type: ignore[typeddict-item]
files[filename] = fileinfo
status = {
@@ -519,7 +543,7 @@ def write(self) -> None:
with open(status_file, "w") as fout:
json.dump(status, fout, separators=(',', ':'))
- def check_global_data(self, *data):
+ def check_global_data(self, *data: Any) -> None:
"""Check the global data that can affect incremental reporting."""
m = Hasher()
for d in data:
@@ -529,7 +553,7 @@ def check_global_data(self, *data):
self.reset()
self.globals = these_globals
- def can_skip_file(self, data, fr, rootname):
+ def can_skip_file(self, data: CoverageData, fr: FileReporter, rootname: str) -> bool:
"""Can we skip reporting this file?
`data` is a CoverageData object, `fr` is a `FileReporter`, and
@@ -549,26 +573,26 @@ def can_skip_file(self, data, fr, rootname):
self.set_file_hash(rootname, this_hash)
return False
- def file_hash(self, fname):
+ def file_hash(self, fname: str) -> str:
"""Get the hash of `fname`'s contents."""
- return self.files.get(fname, {}).get('hash', '')
+ return self.files.get(fname, {}).get('hash', '') # type: ignore[call-overload]
- def set_file_hash(self, fname, val):
+ def set_file_hash(self, fname: str, val: str) -> None:
"""Set the hash of `fname`'s contents."""
- self.files.setdefault(fname, {})['hash'] = val
+ self.files.setdefault(fname, {})['hash'] = val # type: ignore[typeddict-item]
- def index_info(self, fname):
+ def index_info(self, fname: str) -> IndexInfoDict:
"""Get the information for index.html for `fname`."""
- return self.files.get(fname, {}).get('index', {})
+ return self.files.get(fname, {}).get('index', {}) # type: ignore
- def set_index_info(self, fname, info):
+ def set_index_info(self, fname: str, info: IndexInfoDict) -> None:
"""Set the information for index.html for `fname`."""
- self.files.setdefault(fname, {})['index'] = info
+ self.files.setdefault(fname, {})['index'] = info # type: ignore[typeddict-item]
# Helpers for templates and generating HTML
-def escape(t):
+def escape(t: str) -> str:
"""HTML-escape the text in `t`.
This is only suitable for HTML text, not attributes.
@@ -578,6 +602,6 @@ def escape(t):
return t.replace("&", "&").replace("<", "<")
-def pair(ratio):
+def pair(ratio: Tuple[int, int]) -> str:
"""Format a pair of numbers so JavaScript can read them in an attribute."""
return "%s %s" % ratio
diff --git a/coverage/inorout.py b/coverage/inorout.py
index d5ca938f7..babaa3d80 100644
--- a/coverage/inorout.py
+++ b/coverage/inorout.py
@@ -313,7 +313,7 @@ def nope(disp: TFileDisposition, reason: str) -> TFileDisposition:
return disp
if original_filename.startswith('<'):
- return nope(disp, "not a real original file name")
+ return nope(disp, "original file name is not real")
if frame is not None:
# Compiled Python files have two file names: frame.f_code.co_filename is
@@ -345,7 +345,7 @@ def nope(disp: TFileDisposition, reason: str) -> TFileDisposition:
# file names like "", "", or
# "". Don't ever trace these executions, since we
# can't do anything with the data later anyway.
- return nope(disp, "not a real file name")
+ return nope(disp, "file name is not real")
canonical = canonical_filename(filename)
disp.canonical_filename = canonical
diff --git a/coverage/plugin_support.py b/coverage/plugin_support.py
index 62985a066..4ed02c5c0 100644
--- a/coverage/plugin_support.py
+++ b/coverage/plugin_support.py
@@ -12,11 +12,12 @@
from types import FrameType
from typing import Any, Dict, Iterable, Iterator, List, Optional, Set, Tuple, Union
-from coverage.config import CoverageConfig
from coverage.exceptions import PluginError
from coverage.misc import isolate_module
from coverage.plugin import CoveragePlugin, FileTracer, FileReporter
-from coverage.types import TArc, TConfigurable, TDebugCtl, TLineNo, TSourceTokenLines
+from coverage.types import (
+ TArc, TConfigurable, TDebugCtl, TLineNo, TPluginConfig, TSourceTokenLines,
+)
os = isolate_module(os)
@@ -38,7 +39,7 @@ def __init__(self) -> None:
def load_plugins(
cls,
modules: Iterable[str],
- config: CoverageConfig,
+ config: TPluginConfig,
debug: Optional[TDebugCtl] = None,
) -> Plugins:
"""Load plugins from `modules`.
diff --git a/coverage/pytracer.py b/coverage/pytracer.py
index 94d2ecdcd..326c50ba8 100644
--- a/coverage/pytracer.py
+++ b/coverage/pytracer.py
@@ -8,12 +8,16 @@
import atexit
import dis
import sys
+import threading
-from types import FrameType
-from typing import Any, Callable, Dict, Optional
+from types import FrameType, ModuleType
+from typing import Any, Callable, Dict, List, Optional, Set, Tuple, cast
from coverage import env
-from coverage.types import TFileDisposition, TTraceData, TTraceFn, TTracer, TWarnFn
+from coverage.types import (
+ TArc, TFileDisposition, TLineNo, TTraceData, TTraceFileData, TTraceFn,
+ TTracer, TWarnFn,
+)
# We need the YIELD_VALUE opcode below, in a comparison-friendly form.
RESUME = dis.opmap.get('RESUME')
@@ -59,16 +63,16 @@ def __init__(self) -> None:
self.warn: TWarnFn
# The threading module to use, if any.
- self.threading = None
+ self.threading: Optional[ModuleType] = None
- self.cur_file_data = None
- self.last_line = 0 # int, but uninitialized.
+ self.cur_file_data: Optional[TTraceFileData] = None
+ self.last_line: TLineNo = 0
self.cur_file_name: Optional[str] = None
self.context: Optional[str] = None
self.started_context = False
- self.data_stack = []
- self.thread = None
+ self.data_stack: List[Tuple[Optional[TTraceFileData], Optional[str], TLineNo, bool]] = []
+ self.thread: Optional[threading.Thread] = None
self.stopped = False
self._activity = False
@@ -78,7 +82,7 @@ def __init__(self) -> None:
# Cache a bound method on the instance, so that we don't have to
# re-create a bound method object all the time.
- self._cached_bound_method_trace = self._trace
+ self._cached_bound_method_trace: TTraceFn = self._trace
def __repr__(self) -> str:
me = id(self)
@@ -109,7 +113,13 @@ def log(self, marker: str, *args: Any) -> None:
f.write(stack)
f.write("\n")
- def _trace(self, frame: FrameType, event: str, arg_unused: Any) -> Optional[TTraceFn]:
+ def _trace(
+ self,
+ frame: FrameType,
+ event: str,
+ arg: Any, # pylint: disable=unused-argument
+ lineno: Optional[TLineNo] = None, # pylint: disable=unused-argument
+ ) -> Optional[TTraceFn]:
"""The trace function passed to sys.settrace."""
if THIS_FILE in frame.f_code.co_filename:
@@ -164,7 +174,7 @@ def _trace(self, frame: FrameType, event: str, arg_unused: Any) -> Optional[TTra
# Improve tracing performance: when calling a function, both caller
# and callee are often within the same file. if that's the case, we
# don't have to re-check whether to trace the corresponding
- # function (which is a little bit espensive since it involves
+ # function (which is a little bit expensive since it involves
# dictionary lookups). This optimization is only correct if we
# didn't start a context.
filename = frame.f_code.co_filename
@@ -180,7 +190,7 @@ def _trace(self, frame: FrameType, event: str, arg_unused: Any) -> Optional[TTra
tracename = disp.source_filename
assert tracename is not None
if tracename not in self.data:
- self.data[tracename] = set()
+ self.data[tracename] = set() # type: ignore[assignment]
self.cur_file_data = self.data[tracename]
else:
frame.f_trace_lines = False
@@ -206,13 +216,13 @@ def _trace(self, frame: FrameType, event: str, arg_unused: Any) -> Optional[TTra
elif event == 'line':
# Record an executed line.
if self.cur_file_data is not None:
- lineno = frame.f_lineno
+ flineno: TLineNo = frame.f_lineno
if self.trace_arcs:
- self.cur_file_data.add((self.last_line, lineno))
+ cast(Set[TArc], self.cur_file_data).add((self.last_line, flineno))
else:
- self.cur_file_data.add(lineno)
- self.last_line = lineno
+ cast(Set[TLineNo], self.cur_file_data).add(flineno)
+ self.last_line = flineno
elif event == 'return':
if self.trace_arcs and self.cur_file_data:
@@ -240,7 +250,7 @@ def _trace(self, frame: FrameType, event: str, arg_unused: Any) -> Optional[TTra
real_return = True
if real_return:
first = frame.f_code.co_firstlineno
- self.cur_file_data.add((self.last_line, -first))
+ cast(Set[TArc], self.cur_file_data).add((self.last_line, -first))
# Leaving this function, pop the filename stack.
self.cur_file_data, self.cur_file_name, self.last_line, self.started_context = (
@@ -248,6 +258,7 @@ def _trace(self, frame: FrameType, event: str, arg_unused: Any) -> Optional[TTra
)
# Leaving a context?
if self.started_context:
+ assert self.switch_context is not None
self.context = None
self.switch_context(None)
return self._cached_bound_method_trace
@@ -284,12 +295,14 @@ def stop(self) -> None:
# right thread.
self.stopped = True
- if self.threading and self.thread.ident != self.threading.current_thread().ident:
- # Called on a different thread than started us: we can't unhook
- # ourselves, but we've set the flag that we should stop, so we
- # won't do any more tracing.
- #self.log("~", "stopping on different threads")
- return
+ if self.threading:
+ assert self.thread is not None
+ if self.thread.ident != self.threading.current_thread().ident:
+ # Called on a different thread than started us: we can't unhook
+ # ourselves, but we've set the flag that we should stop, so we
+ # won't do any more tracing.
+ #self.log("~", "stopping on different threads")
+ return
if self.warn:
# PyPy clears the trace function before running atexit functions,
diff --git a/coverage/sqldata.py b/coverage/sqldata.py
index da66ad099..1cb8abe47 100644
--- a/coverage/sqldata.py
+++ b/coverage/sqldata.py
@@ -528,6 +528,8 @@ def add_arcs(self, arc_data: Mapping[str, Collection[TArc]]) -> None:
with self._connect() as con:
self._set_context_id()
for filename, arcs in arc_data.items():
+ if not arcs:
+ continue
file_id = self._file_id(filename, add=True)
data = [(file_id, self._current_context_id, fromno, tono) for fromno, tono in arcs]
con.executemany_void(
@@ -571,12 +573,7 @@ def add_file_tracers(self, file_tracers: Mapping[str, str]) -> None:
self._start_using()
with self._connect() as con:
for filename, plugin_name in file_tracers.items():
- file_id = self._file_id(filename)
- if file_id is None:
- raise DataError(
- f"Can't add file tracer data for unmeasured file '{filename}'"
- )
-
+ file_id = self._file_id(filename, add=True)
existing_plugin = self.file_tracer(filename)
if existing_plugin:
if existing_plugin != plugin_name:
@@ -1213,10 +1210,9 @@ def execute_one(self, sql: str, parameters: Iterable[Any] = ()) -> Optional[Tupl
else:
raise AssertionError(f"SQL {sql!r} shouldn't return {len(rows)} rows")
- def _executemany(self, sql: str, data: Iterable[Any]) -> sqlite3.Cursor:
+ def _executemany(self, sql: str, data: List[Any]) -> sqlite3.Cursor:
"""Same as :meth:`python:sqlite3.Connection.executemany`."""
if self.debug.should("sql"):
- data = list(data)
final = ":" if self.debug.should("sqldata") else ""
self.debug.write(f"Executing many {sql!r} with {len(data)} rows{final}")
if self.debug.should("sqldata"):
@@ -1233,7 +1229,9 @@ def _executemany(self, sql: str, data: Iterable[Any]) -> sqlite3.Cursor:
def executemany_void(self, sql: str, data: Iterable[Any]) -> None:
"""Same as :meth:`python:sqlite3.Connection.executemany` when you don't need the cursor."""
- self._executemany(sql, data).close()
+ data = list(data)
+ if data:
+ self._executemany(sql, data).close()
def executescript(self, script: str) -> None:
"""Same as :meth:`python:sqlite3.Connection.executescript`."""
diff --git a/coverage/types.py b/coverage/types.py
index a45b831e8..3d21ac9d0 100644
--- a/coverage/types.py
+++ b/coverage/types.py
@@ -32,8 +32,8 @@ def __call__(
frame: FrameType,
event: str,
arg: Any,
- lineno: Optional[int] = None # Our own twist, see collector.py
- ) -> TTraceFn:
+ lineno: Optional[TLineNo] = None # Our own twist, see collector.py
+ ) -> Optional[TTraceFn]:
...
## Coverage.py tracing
@@ -63,11 +63,9 @@ class TFileDisposition(Protocol):
# - If measuring arcs in the C tracer, the values are sets of packed arcs (two
# line numbers combined into one integer).
-TTraceData = Union[
- Dict[str, Set[TLineNo]],
- Dict[str, Set[TArc]],
- Dict[str, Set[int]],
-]
+TTraceFileData = Union[Set[TLineNo], Set[TArc], Set[int]]
+
+TTraceData = Dict[str, TTraceFileData]
class TTracer(Protocol):
"""Either CTracer or PyTracer."""
@@ -138,6 +136,13 @@ def set_option(self, option_name: str, value: Union[TConfigValueIn, TConfigSecti
"""
+class TPluginConfig(Protocol):
+ """Something that can provide options to a plugin."""
+
+ def get_plugin_options(self, plugin: str) -> TConfigSectionOut:
+ """Get the options for a plugin."""
+
+
## Parsing
TMorf = Union[ModuleType, str]
diff --git a/coverage/version.py b/coverage/version.py
index b20b5568f..6f6375b67 100644
--- a/coverage/version.py
+++ b/coverage/version.py
@@ -8,7 +8,7 @@
# version_info: same semantics as sys.version_info.
# _dev: the .devN suffix if any.
-version_info = (7, 0, 5, "final", 0)
+version_info = (7, 1, 0, "final", 0)
_dev = 0
diff --git a/coverage/xmlreport.py b/coverage/xmlreport.py
index 86fdc18f2..6867f2e92 100644
--- a/coverage/xmlreport.py
+++ b/coverage/xmlreport.py
@@ -12,7 +12,7 @@
import xml.dom.minidom
from dataclasses import dataclass
-from typing import Dict, IO, Iterable, Optional, TYPE_CHECKING, cast
+from typing import Any, Dict, IO, Iterable, Optional, TYPE_CHECKING, cast
from coverage import __url__, __version__, files
from coverage.misc import isolate_module, human_sorted, human_sorted_items
@@ -48,6 +48,11 @@ class PackageData:
branches: int
+def appendChild(parent: Any, child: Any) -> None:
+ """Append a child to a parent, in a way mypy will shut up about."""
+ parent.appendChild(child)
+
+
class XmlReporter:
"""A reporter for writing Cobertura-style XML coverage results."""
@@ -103,9 +108,9 @@ def report(self, morfs: Optional[Iterable[TMorf]], outfile: Optional[IO[str]] =
# Populate the XML DOM with the source info.
for path in human_sorted(self.source_paths):
xsource = self.xml_out.createElement("source")
- xsources.appendChild(xsource)
+ appendChild(xsources, xsource)
txt = self.xml_out.createTextNode(path)
- xsource.appendChild(txt)
+ appendChild(xsource, txt)
lnum_tot, lhits_tot = 0, 0
bnum_tot, bhits_tot = 0, 0
@@ -116,11 +121,11 @@ def report(self, morfs: Optional[Iterable[TMorf]], outfile: Optional[IO[str]] =
# Populate the XML DOM with the package info.
for pkg_name, pkg_data in human_sorted_items(self.packages.items()):
xpackage = self.xml_out.createElement("package")
- xpackages.appendChild(xpackage)
+ appendChild(xpackages, xpackage)
xclasses = self.xml_out.createElement("classes")
- xpackage.appendChild(xclasses)
+ appendChild(xpackage, xclasses)
for _, class_elt in human_sorted_items(pkg_data.elements.items()):
- xclasses.appendChild(class_elt)
+ appendChild(xclasses, class_elt)
xpackage.setAttribute("name", pkg_name.replace(os.sep, '.'))
xpackage.setAttribute("line-rate", rate(pkg_data.hits, pkg_data.lines))
if has_arcs:
@@ -187,10 +192,10 @@ def xml_file(self, fr: FileReporter, analysis: Analysis, has_arcs: bool) -> None
xclass: xml.dom.minidom.Element = self.xml_out.createElement("class")
- xclass.appendChild(self.xml_out.createElement("methods"))
+ appendChild(xclass, self.xml_out.createElement("methods"))
xlines = self.xml_out.createElement("lines")
- xclass.appendChild(xlines)
+ appendChild(xclass, xlines)
xclass.setAttribute("name", os.path.relpath(rel_name, dirname))
xclass.setAttribute("filename", rel_name.replace("\\", "/"))
@@ -219,7 +224,7 @@ def xml_file(self, fr: FileReporter, analysis: Analysis, has_arcs: bool) -> None
if line in missing_branch_arcs:
annlines = ["exit" if b < 0 else str(b) for b in missing_branch_arcs[line]]
xline.setAttribute("missing-branches", ",".join(annlines))
- xlines.appendChild(xline)
+ appendChild(xlines, xline)
class_lines = len(analysis.statements)
class_hits = class_lines - len(analysis.missing)
diff --git a/doc/changes.rst b/doc/changes.rst
index da0f45aef..7f2df6181 100644
--- a/doc/changes.rst
+++ b/doc/changes.rst
@@ -6,7 +6,7 @@
.. The recent changes from the top-level file:
.. include:: ../CHANGES.rst
- :end-before: endchangesinclude
+ :end-before: scriv-end-here
.. Older changes here:
diff --git a/doc/cmd.rst b/doc/cmd.rst
index c1f52ee74..0704e940a 100644
--- a/doc/cmd.rst
+++ b/doc/cmd.rst
@@ -1056,7 +1056,9 @@ Debug options can also be set with the ``COVERAGE_DEBUG`` environment variable,
a comma-separated list of these options, or in the :ref:`config_run_debug`
section of the .coveragerc file.
-The debug output goes to stderr, unless the ``COVERAGE_DEBUG_FILE`` environment
-variable names a different file, which will be appended to.
-``COVERAGE_DEBUG_FILE`` accepts the special names ``stdout`` and ``stderr`` to
-write to those destinations.
+The debug output goes to stderr, unless the :ref:`config_run_debug_file`
+setting or the ``COVERAGE_DEBUG_FILE`` environment variable names a different
+file, which will be appended to. This can be useful because many test runners
+capture output, which could hide important details. ``COVERAGE_DEBUG_FILE``
+accepts the special names ``stdout`` and ``stderr`` to write to those
+destinations.
diff --git a/doc/conf.py b/doc/conf.py
index b321144e9..39601fab2 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -65,11 +65,11 @@
# @@@ editable
copyright = "2009–2023, Ned Batchelder" # pylint: disable=redefined-builtin
# The short X.Y.Z version.
-version = "7.0.5"
+version = "7.1.0"
# The full version, including alpha/beta/rc tags.
-release = "7.0.5"
+release = "7.1.0"
# The date of release, in "monthname day, year" format.
-release_date = "January 10, 2023"
+release_date = "January 24, 2023"
# @@@ end
rst_epilog = """
diff --git a/doc/config.rst b/doc/config.rst
index 90949506a..8e3d885be 100644
--- a/doc/config.rst
+++ b/doc/config.rst
@@ -203,6 +203,15 @@ include a short string at the end, the name of the warning. See
` for details.
+.. _config_run_debug_file:
+
+[run] debug_file
+................
+
+(string) A file name to write debug output to. See :ref:`the run --debug
+option ` for details.
+
+
.. _config_run_dynamic_context:
[run] dynamic_context
diff --git a/doc/faq.rst b/doc/faq.rst
index b8c2758c5..8252eeb98 100644
--- a/doc/faq.rst
+++ b/doc/faq.rst
@@ -11,6 +11,22 @@ FAQ and other help
Frequently asked questions
--------------------------
+Q: Why are some of my files not measured?
+.........................................
+
+Coverage.py has a number of mechanisms for deciding which files to measure and
+which to skip. If your files aren't being measured, use the ``--debug=trace``
+:ref:`option `, also settable as ``[run] debug=trace`` in the
+:ref:`settings file `, or as ``COVERAGE_DEBUG=trace`` in an
+environment variable.
+
+This will write a line for each file considered, indicating whether it is
+traced or not, and if not, why not. Be careful though: the output might be
+swallowed by your test runner. If so, a ``COVERAGE_DEBUG_FILE=/tmp/cov.out``
+environemnt variable can direct the output to a file insttead to ensure you see
+everything.
+
+
Q: Why do unexecutable lines show up as executed?
.................................................
@@ -130,9 +146,9 @@ __ https://nedbatchelder.com/blog/200710/flaws_in_coverage_measurement.html
.. _trialcoverage: https://pypi.org/project/trialcoverage/
- - `pytest-coverage`_
+ - `pytest-cov`_
- .. _pytest-coverage: https://pypi.org/project/pytest-coverage/
+ .. _pytest-cov: https://pypi.org/project/pytest-cov/
- `django-coverage`_ for use with Django.
@@ -142,10 +158,11 @@ __ https://nedbatchelder.com/blog/200710/flaws_in_coverage_measurement.html
Q: Where can I get more help with coverage.py?
..............................................
-You can discuss coverage.py or get help using it on the `Testing In Python`_
-mailing list.
+You can discuss coverage.py or get help using it on the `Python discussion
+forums`_. If you ping me (``@nedbat``), there's a higher chance I'll see the
+post.
-.. _Testing In Python: http://lists.idyll.org/listinfo/testing-in-python
+.. _Python discussion forums: https://discuss.python.org/
Bug reports are gladly accepted at the `GitHub issue tracker`_.
@@ -164,6 +181,6 @@ Coverage.py was originally written by `Gareth Rees`_.
Since 2004, `Ned Batchelder`_ has extended and maintained it with the help of
`many others`_. The :ref:`change history ` has all the details.
-.. _Gareth Rees: http://garethrees.org/
+.. _Gareth Rees: http://garethrees.org/
.. _Ned Batchelder: https://nedbatchelder.com
-.. _many others: https://github.com/nedbat/coveragepy/blob/master/CONTRIBUTORS.txt
+.. _many others: https://github.com/nedbat/coveragepy/blob/master/CONTRIBUTORS.txt
diff --git a/doc/index.rst b/doc/index.rst
index 47fe4f1f0..62acaebb9 100644
--- a/doc/index.rst
+++ b/doc/index.rst
@@ -61,11 +61,17 @@ Getting started is easy:
For more details, see :ref:`install`.
#. Use ``coverage run`` to run your test suite and gather data. However you
- normally run your test suite, you can run your test runner under coverage.
- If your test runner command starts with "python", just replace the initial
- "python" with "coverage run".
+ normally run your test suite, you can use your test runner under coverage.
- Instructions for specific test runners:
+ .. tip::
+ If your test runner command starts with "python", just replace the initial
+ "python" with "coverage run".
+
+ ``python something.py`` becomes ``coverage run something.py``
+
+ ``python -m amodule`` becomes ``coverage run -m amodule``
+
+ Other instructions for specific test runners:
- **pytest**
@@ -182,9 +188,10 @@ Getting help
------------
If the :ref:`FAQ ` doesn't answer your question, you can discuss
-coverage.py or get help using it on the `Testing In Python`_ mailing list.
+coverage.py or get help using it on the `Python discussion forums`_. If you
+ping me (``@nedbat``), there's a higher chance I'll see the post.
-.. _Testing In Python: http://lists.idyll.org/listinfo/testing-in-python
+.. _Python discussion forums: https://discuss.python.org/
Bug reports are gladly accepted at the `GitHub issue tracker`_.
GitHub also hosts the `code repository`_.
diff --git a/doc/requirements.in b/doc/requirements.in
index 12aba6cc1..42eca4052 100644
--- a/doc/requirements.in
+++ b/doc/requirements.in
@@ -9,6 +9,7 @@
cogapp
#doc8
pyenchant
+scriv # for writing GitHub releases
sphinx
sphinx-autobuild
sphinx_rtd_theme
diff --git a/doc/requirements.pip b/doc/requirements.pip
index c8a729e2f..c084ea8ca 100644
--- a/doc/requirements.pip
+++ b/doc/requirements.pip
@@ -4,10 +4,14 @@
#
# make upgrade
#
-alabaster==0.7.12 \
- --hash=sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359 \
- --hash=sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02
+alabaster==0.7.13 \
+ --hash=sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3 \
+ --hash=sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2
# via sphinx
+attrs==22.2.0 \
+ --hash=sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836 \
+ --hash=sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99
+ # via scriv
babel==2.11.0 \
--hash=sha256:1ad3eca1c885218f6dce2ab67291178944f810a10a9b5f3cb8382a5a232b64fe \
--hash=sha256:5ef4b3226b0180dedded4229651c8b0e1a3a6a2837d45a073272f313e4cf97f6
@@ -16,10 +20,106 @@ certifi==2022.12.7 \
--hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
--hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
# via requests
-charset-normalizer==2.1.1 \
- --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \
- --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f
+charset-normalizer==3.0.1 \
+ --hash=sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b \
+ --hash=sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42 \
+ --hash=sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d \
+ --hash=sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b \
+ --hash=sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a \
+ --hash=sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59 \
+ --hash=sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154 \
+ --hash=sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1 \
+ --hash=sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c \
+ --hash=sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a \
+ --hash=sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d \
+ --hash=sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6 \
+ --hash=sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b \
+ --hash=sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b \
+ --hash=sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783 \
+ --hash=sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5 \
+ --hash=sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918 \
+ --hash=sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555 \
+ --hash=sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639 \
+ --hash=sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786 \
+ --hash=sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e \
+ --hash=sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed \
+ --hash=sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820 \
+ --hash=sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8 \
+ --hash=sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3 \
+ --hash=sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541 \
+ --hash=sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14 \
+ --hash=sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be \
+ --hash=sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e \
+ --hash=sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76 \
+ --hash=sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b \
+ --hash=sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c \
+ --hash=sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b \
+ --hash=sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3 \
+ --hash=sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc \
+ --hash=sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6 \
+ --hash=sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59 \
+ --hash=sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4 \
+ --hash=sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d \
+ --hash=sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d \
+ --hash=sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3 \
+ --hash=sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a \
+ --hash=sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea \
+ --hash=sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6 \
+ --hash=sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e \
+ --hash=sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603 \
+ --hash=sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24 \
+ --hash=sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a \
+ --hash=sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58 \
+ --hash=sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678 \
+ --hash=sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a \
+ --hash=sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c \
+ --hash=sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6 \
+ --hash=sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18 \
+ --hash=sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174 \
+ --hash=sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317 \
+ --hash=sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f \
+ --hash=sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc \
+ --hash=sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837 \
+ --hash=sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41 \
+ --hash=sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c \
+ --hash=sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579 \
+ --hash=sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753 \
+ --hash=sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8 \
+ --hash=sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291 \
+ --hash=sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087 \
+ --hash=sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866 \
+ --hash=sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3 \
+ --hash=sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d \
+ --hash=sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1 \
+ --hash=sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca \
+ --hash=sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e \
+ --hash=sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db \
+ --hash=sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72 \
+ --hash=sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d \
+ --hash=sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc \
+ --hash=sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539 \
+ --hash=sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d \
+ --hash=sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af \
+ --hash=sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b \
+ --hash=sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602 \
+ --hash=sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f \
+ --hash=sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478 \
+ --hash=sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c \
+ --hash=sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e \
+ --hash=sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479 \
+ --hash=sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7 \
+ --hash=sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8
# via requests
+click==8.1.3 \
+ --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \
+ --hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
+ # via
+ # click-log
+ # scriv
+click-log==0.4.0 \
+ --hash=sha256:3970f8570ac54491237bcdb3d8ab5e3eef6c057df29f8c3d1151a51a9c23b975 \
+ --hash=sha256:a43e394b528d52112af599f2fc9e4b7cf3c15f94e53581f74fa6867e68c91756
+ # via scriv
cogapp==3.3.0 \
--hash=sha256:1be95183f70282422d594fa42426be6923070a4bd8335621f6347f3aeee81db0 \
--hash=sha256:8b5b5f6063d8ee231961c05da010cb27c30876b2279e23ad0eae5f8f09460d50
@@ -42,65 +142,78 @@ imagesize==1.4.1 \
--hash=sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b \
--hash=sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a
# via sphinx
-importlib-metadata==5.2.0 \
- --hash=sha256:0eafa39ba42bf225fc00e67f701d71f85aead9f878569caf13c3724f704b970f \
- --hash=sha256:404d48d62bba0b7a77ff9d405efd91501bef2e67ff4ace0bed40a0cf28c3c7cd
+importlib-metadata==6.0.0 \
+ --hash=sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad \
+ --hash=sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d
# via
+ # click
# sphinx
# sphinxcontrib-spelling
jinja2==3.1.2 \
--hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
--hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
- # via sphinx
+ # via
+ # scriv
+ # sphinx
livereload==2.6.3 \
--hash=sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869 \
--hash=sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4
# via sphinx-autobuild
-markupsafe==2.1.1 \
- --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \
- --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \
- --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \
- --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \
- --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \
- --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \
- --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \
- --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \
- --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \
- --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \
- --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \
- --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \
- --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \
- --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \
- --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \
- --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \
- --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \
- --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \
- --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \
- --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \
- --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \
- --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \
- --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \
- --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \
- --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \
- --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \
- --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \
- --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \
- --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \
- --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \
- --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \
- --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \
- --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \
- --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \
- --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \
- --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \
- --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \
- --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \
- --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \
- --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7
+markupsafe==2.1.2 \
+ --hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
+ --hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
+ --hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \
+ --hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \
+ --hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \
+ --hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \
+ --hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \
+ --hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \
+ --hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \
+ --hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \
+ --hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \
+ --hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \
+ --hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \
+ --hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \
+ --hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \
+ --hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \
+ --hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \
+ --hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \
+ --hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \
+ --hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \
+ --hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \
+ --hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \
+ --hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \
+ --hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \
+ --hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \
+ --hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \
+ --hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \
+ --hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \
+ --hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \
+ --hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \
+ --hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \
+ --hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \
+ --hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \
+ --hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \
+ --hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \
+ --hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \
+ --hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \
+ --hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \
+ --hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \
+ --hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \
+ --hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \
+ --hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \
+ --hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \
+ --hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \
+ --hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \
+ --hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \
+ --hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \
+ --hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \
+ --hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \
+ --hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58
# via jinja2
-packaging==22.0 \
- --hash=sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3 \
- --hash=sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3
+packaging==23.0 \
+ --hash=sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2 \
+ --hash=sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97
# via sphinx
pyenchant==3.2.2 \
--hash=sha256:1cf830c6614362a78aab78d50eaf7c6c93831369c52e1bb64ffae1df0341e637 \
@@ -110,18 +223,24 @@ pyenchant==3.2.2 \
# via
# -r doc/requirements.in
# sphinxcontrib-spelling
-pygments==2.13.0 \
- --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \
- --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42
+pygments==2.14.0 \
+ --hash=sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297 \
+ --hash=sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717
# via sphinx
-pytz==2022.7 \
- --hash=sha256:7ccfae7b4b2c067464a6733c6261673fdb8fd1be905460396b97a073e9fa683a \
- --hash=sha256:93007def75ae22f7cd991c84e02d434876818661f8df9ad5df9e950ff4e52cfd
+pytz==2022.7.1 \
+ --hash=sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0 \
+ --hash=sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a
# via babel
-requests==2.28.1 \
- --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \
- --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349
- # via sphinx
+requests==2.28.2 \
+ --hash=sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa \
+ --hash=sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf
+ # via
+ # scriv
+ # sphinx
+scriv==1.1.0 \
+ --hash=sha256:1064101623e318d906d91f7e1405c97af414c67f0c7da8ee4d08eaa523b735eb \
+ --hash=sha256:f2670624b2c44cdf34224c8b032b71a00a41b78e9f587140da6dd0b010e66b75
+ # via -r doc/requirements.in
six==1.16.0 \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
@@ -196,9 +315,9 @@ typing-extensions==4.4.0 \
--hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \
--hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e
# via importlib-metadata
-urllib3==1.26.13 \
- --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc \
- --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8
+urllib3==1.26.14 \
+ --hash=sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72 \
+ --hash=sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1
# via requests
zipp==3.11.0 \
--hash=sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa \
diff --git a/doc/sample_html/d_7b071bdc2a35fa80___init___py.html b/doc/sample_html/d_7b071bdc2a35fa80___init___py.html
index 18bdc7adb..7d2c3e89e 100644
--- a/doc/sample_html/d_7b071bdc2a35fa80___init___py.html
+++ b/doc/sample_html/d_7b071bdc2a35fa80___init___py.html
@@ -66,8 +66,8 @@