diff --git a/.flake8 b/.flake8 deleted file mode 100644 index e14b09a0543..00000000000 --- a/.flake8 +++ /dev/null @@ -1,38 +0,0 @@ -[flake8] -max-line-length = 95 -ignore = - E116, - E203, - E241, - E251, - E501, - E741, - W503, - W504, - I101, - SIM102, - SIM103, - SIM105, - SIM114, - SIM115, - SIM117, - SIM223, - SIM401, - SIM907, - SIM910, -exclude = - .git, - .tox, - .venv, - venv, - node_modules/*, - tests/roots/*, - build/*, - doc/_build/*, - sphinx/search/*, - doc/usage/extensions/example*.py, -per-file-ignores = - doc/conf.py:W605 - sphinx/events.py:E704, - tests/test_extensions/ext_napoleon_pep526_data_google.py:MLL001, - tests/test_extensions/ext_napoleon_pep526_data_numpy.py:MLL001, diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000000..d0f6ad06464 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,65 @@ +# Binary data types +*.gif binary +*.jpg binary +*.mo binary +*.pdf binary +*.png binary +*.zip binary + +# Unix-style line endings +[attr]unix text eol=lf + +*.conf unix +*.css unix +*.cls unix +*.csv unix +*.dot unix +*.html unix +*.inc unix +*.ini unix +*.jinja unix +*.js unix +*.md unix +*.mjs unix +*.py unix +*.rst unix +*.sty unix +*.tex unix +*.toml unix +*.txt unix +*.svg unix +*.xml unix +*.yml unix + +# CRLF files +[attr]dos text eol=crlf + +*.bat dos +*.bat.jinja dos +*.stp dos +tests/roots/test-pycode/cp_1251_coded.py dos + +# Language aware diff headers +*.c diff=cpp +*.h diff=cpp +*.css diff=css +*.html diff=html +*.md diff=markdown +*.py diff=python +# *.rst diff=reStructuredText +*.tex diff=tex + +# Non UTF-8 encodings +tests/roots/test-pycode/cp_1251_coded.py working-tree-encoding=windows-1251 + +# Generated files +# https://github.com/github/linguist/blob/master/docs/overrides.md +# +# To always hide generated files in local diffs, mark them as binary: +# $ git config diff.generated.binary true +# +[attr]generated linguist-generated=true diff=generated + +tests/js/fixtures/**/*.js generated +sphinx/search/minified-js/*.js generated +sphinx/themes/bizstyle/static/css3-mediaqueries.js generated diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 226532b79bc..b4fdfdf70b9 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -5,5 +5,5 @@ contact_links: url: https://stackoverflow.com/questions/tagged/python-sphinx about: For Q&A purpose, please use Stackoverflow with the tag python-sphinx - name: Discussion - url: https://groups.google.com/forum/#!forum/sphinx-users - about: For general discussion, please use sphinx-users mailing list. + url: https://github.com/sphinx-doc/sphinx/discussions + about: For general discussion, please use GitHub Discussions. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 2b0c121c5e4..5df17861b08 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,19 +1,33 @@ -Subject: + -### Feature or Bugfix - -- Feature -- Bugfix -- Refactoring -### Purpose -- -- +## Purpose -### Detail -- -- + + +## References + + + +- <...> +- <...> +- <...> diff --git a/.github/workflows/builddoc.yml b/.github/workflows/builddoc.yml index f03a4ec3833..7f8471deecb 100644 --- a/.github/workflows/builddoc.yml +++ b/.github/workflows/builddoc.yml @@ -31,11 +31,10 @@ jobs: - name: Install graphviz run: sudo apt-get install --no-install-recommends --yes graphviz - name: Install uv - run: > - curl --no-progress-meter --location --fail - --proto '=https' --tlsv1.2 - "https://astral.sh/uv/install.sh" - | sh + uses: astral-sh/setup-uv@v5 + with: + version: latest + enable-cache: false - name: Install dependencies run: uv pip install .[docs] - name: Render the documentation diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index adbbc5e03db..878b4237bce 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -35,17 +35,14 @@ jobs: with: python-version: "3" - name: Install uv - run: > - curl --no-progress-meter --location --fail - --proto '=https' --tlsv1.2 - "https://astral.sh/uv/install.sh" - | sh + uses: astral-sh/setup-uv@v5 + with: + version: latest + enable-cache: false - name: Install build dependencies (pypa/build, twine) run: | uv pip install build "twine>=5.1" - # resolution fails without betterproto and protobuf-specs - uv pip install "pypi-attestations~=0.0.12" "sigstore-protobuf-specs==0.3.2" "betterproto==2.0.0b6" - name: Build distribution run: python -m build @@ -65,7 +62,7 @@ jobs: - name: Convert attestations to PEP 740 run: > - python utils/convert_attestations.py + uv run utils/convert_attestations.py "$BUNDLE_PATH" "$SIGNER_IDENTITY" env: diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index f7503a5afce..c8444c6a14f 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -31,41 +31,17 @@ jobs: RUFF_VERSION=$(awk -F'[="]' '/\[project\.optional-dependencies\]/ {p=1} /ruff/ {if (p) print $4}' pyproject.toml) echo "RUFF_VERSION=$RUFF_VERSION" >> $GITHUB_ENV - - name: Install Ruff - run: > - curl --no-progress-meter --location --fail - --proto '=https' --tlsv1.2 - --write-out "%{stderr}Downloaded: %{url}\n" - "https://astral.sh/ruff/$RUFF_VERSION/install.sh" - | sh + - name: Install Ruff ${{ env.RUFF_VERSION }} + uses: astral-sh/ruff-action@v3 + with: + args: --version + version: ${{ env.RUFF_VERSION }} - name: Lint with Ruff - run: ruff check . --output-format github + run: ruff check --output-format=github - name: Format with Ruff - run: ruff format . --diff - - flake8: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3" - - name: Install uv - run: > - curl --no-progress-meter --location --fail - --proto '=https' --tlsv1.2 - "https://astral.sh/uv/install.sh" - | sh - - name: Install dependencies - run: uv pip install --upgrade "flake8>=6.0" - - name: Lint with flake8 - run: flake8 . + run: ruff format --diff mypy: runs-on: ubuntu-latest @@ -79,11 +55,10 @@ jobs: with: python-version: "3" - name: Install uv - run: > - curl --no-progress-meter --location --fail - --proto '=https' --tlsv1.2 - "https://astral.sh/uv/install.sh" - | sh + uses: astral-sh/setup-uv@v5 + with: + version: latest + enable-cache: false - name: Install dependencies run: uv pip install ".[lint,test]" - name: Type check with mypy @@ -101,11 +76,10 @@ jobs: with: python-version: "3" - name: Install uv - run: > - curl --no-progress-meter --location --fail - --proto '=https' --tlsv1.2 - "https://astral.sh/uv/install.sh" - | sh + uses: astral-sh/setup-uv@v5 + with: + version: latest + enable-cache: false - name: Install dependencies run: uv pip install ".[lint,test]" - name: Type check with pyright @@ -123,11 +97,10 @@ jobs: with: python-version: "3" - name: Install uv - run: > - curl --no-progress-meter --location --fail - --proto '=https' --tlsv1.2 - "https://astral.sh/uv/install.sh" - | sh + uses: astral-sh/setup-uv@v5 + with: + version: latest + enable-cache: false - name: Install dependencies run: uv pip install --upgrade sphinx-lint - name: Lint documentation with sphinx-lint @@ -145,11 +118,10 @@ jobs: with: python-version: "3" - name: Install uv - run: > - curl --no-progress-meter --location --fail - --proto '=https' --tlsv1.2 - "https://astral.sh/uv/install.sh" - | sh + uses: astral-sh/setup-uv@v5 + with: + version: latest + enable-cache: false - name: Install dependencies run: uv pip install --upgrade twine build - name: Lint with twine diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index caf633d3785..4b3b3283536 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -58,11 +58,10 @@ jobs: - name: Install graphviz run: sudo apt-get install --no-install-recommends --yes graphviz - name: Install uv - run: > - curl --no-progress-meter --location --fail - --proto '=https' --tlsv1.2 - "https://astral.sh/uv/install.sh" - | sh + uses: astral-sh/setup-uv@v5 + with: + version: latest + enable-cache: false - name: Install dependencies run: uv pip install .[test] - name: Install Docutils ${{ matrix.docutils }} @@ -198,9 +197,10 @@ jobs: - name: Install graphviz run: choco install --no-progress graphviz - name: Install uv - run: > - Invoke-WebRequest -Uri "https://astral.sh/uv/install.ps1" - | Invoke-Expression + uses: astral-sh/setup-uv@v5 + with: + version: latest + enable-cache: false - name: Install dependencies run: uv pip install .[test] - name: Test with pytest @@ -232,11 +232,10 @@ jobs: - name: Install graphviz run: sudo apt-get install --no-install-recommends --yes graphviz - name: Install uv - run: > - curl --no-progress-meter --location --fail - --proto '=https' --tlsv1.2 - "https://astral.sh/uv/install.sh" - | sh + uses: astral-sh/setup-uv@v5 + with: + version: latest + enable-cache: false - name: Install dependencies run: uv pip install .[test] - name: Install Docutils' HEAD @@ -266,11 +265,10 @@ jobs: - name: Install graphviz run: sudo apt-get install --no-install-recommends --yes graphviz - name: Install uv - run: > - curl --no-progress-meter --location --fail - --proto '=https' --tlsv1.2 - "https://astral.sh/uv/install.sh" - | sh + uses: astral-sh/setup-uv@v5 + with: + version: latest + enable-cache: false - name: Install dependencies run: | uv pip install .[test] --resolution lowest-direct @@ -298,11 +296,10 @@ jobs: - name: Check Python version run: python --version --version - name: Install uv - run: > - curl --no-progress-meter --location --fail - --proto '=https' --tlsv1.2 - "https://astral.sh/uv/install.sh" - | sh + uses: astral-sh/setup-uv@v5 + with: + version: latest + enable-cache: false - name: Install dependencies run: uv pip install .[test] - name: Test with pytest @@ -330,11 +327,10 @@ jobs: - name: Install graphviz run: sudo apt-get install --no-install-recommends --yes graphviz - name: Install uv - run: > - curl --no-progress-meter --location --fail - --proto '=https' --tlsv1.2 - "https://astral.sh/uv/install.sh" - | sh + uses: astral-sh/setup-uv@v5 + with: + version: latest + enable-cache: false - name: Install dependencies run: uv pip install .[test] pytest-cov - name: Test with pytest diff --git a/.github/workflows/transifex.yml b/.github/workflows/transifex.yml index f240d69a8a1..9f4698ead52 100644 --- a/.github/workflows/transifex.yml +++ b/.github/workflows/transifex.yml @@ -27,8 +27,13 @@ jobs: mkdir -p /tmp/tx_cli && cd $_ curl -o- https://raw.githubusercontent.com/transifex/cli/master/install.sh | bash shell: bash + - name: Install uv + uses: astral-sh/setup-uv@v5 + with: + version: latest + enable-cache: false - name: Install dependencies - run: pip install --upgrade babel jinja2 + run: uv pip install --upgrade babel jinja2 - name: Extract translations from source code run: python utils/babel_runner.py extract - name: Push translations to transifex.com @@ -58,8 +63,13 @@ jobs: mkdir -p /tmp/tx_cli && cd $_ curl -o- https://raw.githubusercontent.com/transifex/cli/master/install.sh | bash shell: bash + - name: Install uv + uses: astral-sh/setup-uv@v5 + with: + version: latest + enable-cache: false - name: Install dependencies - run: pip install --upgrade babel jinja2 + run: uv pip install --upgrade babel jinja2 - name: Extract translations from source code run: python utils/babel_runner.py extract - name: Pull translations from transifex.com diff --git a/.ruff.toml b/.ruff.toml index 5efb7d35ac6..ce194986213 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -3,13 +3,16 @@ line-length = 88 output-format = "full" extend-exclude = [ - "tests/roots/*", - "tests/js/roots/*", "build/*", "doc/_build/*", - "doc/usage/extensions/example*.py", + "tests/roots/test-directive-code/target.py", # Tests break if formatted + "tests/roots/test-pycode/cp_1251_coded.py", # Not UTF-8 ] +[format] +preview = true +quote-style = "single" + [lint] preview = true ignore = [ @@ -68,9 +71,17 @@ ignore = [ # pyupgrade "UP031", # Use format specifiers instead of percent format "UP032", # Use f-string instead of `format` call + # Ruff-specific rules ('RUF') + "RUF001", # String contains ambiguous {}. Did you mean {}? + "RUF012", # Mutable class attributes should be annotated with `typing.ClassVar` + "RUF021", # Parenthesize `a and b` expressions when chaining `and` and `or` together, to make the precedence clear + "RUF022", # `__all__` is not sorted + "RUF023", # `{}.__slots__` is not sorted + "RUF027", # Possible f-string without an `f` prefix + "RUF039", # First argument to {call} is not raw string + "RUF052", # Local dummy variable `{}` is accessed ] external = [ # Whitelist for RUF100 unknown code warnings - "E704", "SIM113", ] select = [ @@ -203,15 +214,11 @@ select = [ "PYI", # Stub files are not used in Sphinx # flake8-quotes ('Q') -# "Q000", # Double quotes found but single quotes preferred -# "Q001", # Single quote multiline found but double quotes preferred - "Q002", # Single quote docstring found but double quotes preferred - "Q003", # Change outer quotes to avoid escaping inner quotes - "Q004", # Unnecessary escape on inner quote character + "Q", # flake8-return ('RET') "RET501", # Do not explicitly `return None` in function if it is the only possible return value "RET502", # Do not implicitly `return None` in function able to return non-`None` value -# "RET503", # Missing explicit `return` at the end of function able to return non-`None` value + "RET503", # Missing explicit `return` at the end of function able to return non-`None` value # "RET504", # Unnecessary assignment to `{name}` before `return` statement # "RET505", # Unnecessary `{branch}` after `return` statement # "RET506", # Unnecessary `{branch}` after `raise` statement @@ -219,48 +226,8 @@ select = [ "RET508", # Unnecessary `{branch}` after `break` statement # flake8-raise ('RSE') "RSE", - # ruff-specific rules ('RUF') -# "RUF001", # String contains ambiguous {}. Did you mean {}? - "RUF002", # Docstring contains ambiguous {}. Did you mean {}? - "RUF003", # Comment contains ambiguous {}. Did you mean {}? - "RUF005", # Consider `{expression}` instead of concatenation - "RUF006", # Store a reference to the return value of `{expr}.{method}` - "RUF007", # Prefer `itertools.pairwise()` over `zip()` when iterating over successive pairs - "RUF008", # Do not use mutable default values for dataclass attributes - "RUF009", # Do not perform function call `{name}` in dataclass defaults - "RUF010", # Use explicit conversion flag -# "RUF012", # Mutable class attributes should be annotated with `typing.ClassVar` - "RUF013", # PEP 484 prohibits implicit `Optional` - "RUF015", # Prefer `next({iterable})` over single element slice - "RUF016", # Slice in indexed access to type `{value_type}` uses type `{index_type}` instead of an integer - "RUF017", # Avoid quadratic list summation - "RUF018", # Avoid assignment expressions in `assert` statements - "RUF019", # Unnecessary key check before dictionary access - "RUF020", # `{never_like} | T` is equivalent to `T` -# "RUF021", # Parenthesize `a and b` expressions when chaining `and` and `or` together, to make the precedence clear -# "RUF022", # `__all__` is not sorted -# "RUF023", # `{}.__slots__` is not sorted - "RUF024", # Do not pass mutable objects as values to `dict.fromkeys` - "RUF026", # `default_factory` is a positional-only argument to `defaultdict` -# "RUF027", # Possible f-string without an `f` prefix - "RUF028", # This suppression comment is invalid because {} - "RUF029", # Function `{name}` is declared `async`, but doesn't `await` or use `async` features. - "RUF030", # `print()` call in `assert` statement is likely unintentional - "RUF031", # Use parentheses for tuples in subscripts - "RUF032", # `Decimal()` called with float literal argument - "RUF033", # `__post_init__` method with argument defaults - "RUF034", # Useless `if`-`else` condition - "RUF035", # Unsafe use of `{name}` detected - "RUF036", # `None` not at the end of the type annotation. - "RUF038", # `Literal[True, False, ...]` can be replaced with `Literal[...] | bool` -# "RUF039", # First argument to {call} is not raw string - "RUF040", # Non-string literal used as assert message - "RUF041", # Unnecessary nested `Literal` -# "RUF048", # `__version__` may contain non-integral-like elements - "RUF055", # Plain string pattern passed to `re` function -# "RUF100", # Unused `noqa` directive - "RUF101", # `{original}` is a redirect to `{target}` - "RUF200", # Failed to parse pyproject.toml: {message} + # Ruff-specific rules ('RUF') + "RUF", # flake8-bandit ('S') "S", # flake8-simplify ('SIM') @@ -309,8 +276,12 @@ select = [ "FURB118", "T201" ] -"sphinx/domains/**" = ["FURB113"] -"tests/test_domains/test_domain_cpp.py" = ["FURB113"] +"doc/usage/extensions/example_{google,numpy}.py" = [ + "D416", # Section name should end with a colon ("{name}") + "I002", # Missing required import: {name} + "INP001", # File {filename} is part of an implicit namespace package. Add an __init__.py. + "PLW3201", # Dunder method {name} has no special meaning in Python 3 +] # from .flake8 "sphinx/*" = ["E241"] @@ -366,7 +337,17 @@ select = [ "T201", # whitelist ``print`` for tests ] +# test roots are not packages +"tests/js/roots/*" = ["I002", "INP001"] +"tests/roots/*" = [ + "D403", # permit uncapitalised docstrings + "F401", # names may be unused in test roots + "I002", # we don't need the annotations future + "INP001", # test roots are not packages +] + # these tests need old ``typing`` generic aliases +"tests/roots/test-ext-autodoc/target/genericalias.py" = ["UP006", "UP007", "UP035"] "tests/test_util/test_util_typing.py" = ["RUF036", "UP006", "UP007", "UP035"] "tests/test_util/typing_test_data.py" = ["FA100", "I002", "PYI030", "UP006", "UP007", "UP035"] @@ -388,58 +369,3 @@ forced-separate = [ required-imports = [ "from __future__ import annotations", ] - -[format] -preview = true -quote-style = "single" -exclude = [ - "sphinx/addnodes.py", - "sphinx/application.py", - "sphinx/builders/latex/constants.py", - "sphinx/config.py", - "sphinx/domains/__init__.py", - "sphinx/domains/c/_parser.py", - "sphinx/domains/c/_ids.py", - "sphinx/domains/c/__init__.py", - "sphinx/domains/c/_symbol.py", - "sphinx/domains/c/_ast.py", - "sphinx/domains/changeset.py", - "sphinx/domains/citation.py", - "sphinx/domains/cpp/_parser.py", - "sphinx/domains/cpp/_ids.py", - "sphinx/domains/cpp/__init__.py", - "sphinx/domains/cpp/_symbol.py", - "sphinx/domains/cpp/_ast.py", - "sphinx/domains/index.py", - "sphinx/domains/javascript.py", - "sphinx/domains/math.py", - "sphinx/domains/python/_annotations.py", - "sphinx/domains/python/__init__.py", - "sphinx/domains/python/_object.py", - "sphinx/domains/rst.py", - "sphinx/domains/std/__init__.py", - "sphinx/ext/autodoc/__init__.py", - "sphinx/ext/autodoc/directive.py", - "sphinx/ext/autodoc/importer.py", - "sphinx/ext/autodoc/mock.py", - "sphinx/ext/autodoc/preserve_defaults.py", - "sphinx/ext/autodoc/type_comment.py", - "sphinx/ext/autodoc/typehints.py", - "sphinx/ext/autosectionlabel.py", - "sphinx/ext/autosummary/__init__.py", - "sphinx/ext/coverage.py", - "sphinx/ext/doctest.py", - "sphinx/ext/duration.py", - "sphinx/ext/extlinks.py", - "sphinx/ext/githubpages.py", - "sphinx/ext/graphviz.py", - "sphinx/ext/ifconfig.py", - "sphinx/ext/imgconverter.py", - "sphinx/ext/imgmath.py", - "sphinx/ext/inheritance_diagram.py", - "sphinx/ext/linkcode.py", - "sphinx/ext/mathjax.py", - "sphinx/ext/todo.py", - "sphinx/ext/viewcode.py", - "sphinx/registry.py", -] diff --git a/AUTHORS.rst b/AUTHORS.rst index 29759d1df7a..091a87ebdba 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -28,13 +28,14 @@ Contributors * Antonio Valentino -- qthelp builder, docstring inheritance * Antti Kaihola -- doctest extension (skipif option) * Barry Warsaw -- setup command improvements -* Ben Egan -- Napoleon improvements +* Ben Egan -- Napoleon improvements & viewcode improvements * Benjamin Peterson -- unittests * Blaise Laflamme -- pyramid theme * Brecht Machiels -- builder entry-points * Bruce Mitchener -- Minor epub improvement * Buck Evan -- dummy builder * Charles Duffy -- original graphviz extension +* Chris Barrick -- Napoleon type preprocessing logic * Chris Holdgraf -- improved documentation structure * Chris Lamb -- reproducibility fixes * Christopher Perkins -- autosummary integration diff --git a/CHANGES.rst b/CHANGES.rst index e20c1e4571e..f9a587f3181 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -9,6 +9,13 @@ Dependencies Incompatible changes -------------------- +* #13044: Remove the internal and undocumented ``has_equations`` data + from the :py:class:`!MathDomain`` domain. + The undocumented :py:meth:`!MathDomain.has_equations` method + now unconditionally returns ``True``. + These are replaced by the ``has_maths_elements`` key of the page context dict. + Patch by Adam Turner. + Deprecated ---------- @@ -18,6 +25,18 @@ Deprecated Features added -------------- +* Add a new ``duplicate_declaration`` warning type, + with ``duplicate_declaration.c`` and ``duplicate_declaration.cpp`` subtypes. + Patch by Julien Lecomte and Adam Turner. +* #11824: linkcode: Allow extensions to add support for a domain by defining + the keys that should be present. + Patch by Nicolas Peugnet. +* #13144: Add a ``class`` option to the :rst:dir:`autosummary` directive. + Patch by Tim Hoffmann. +* #13146: Napoleon: Unify the type preprocessing logic to allow + Google-style docstrings to use the optional and default keywords. + Patch by Chris Barrick. + Bugs fixed ---------- @@ -29,6 +48,13 @@ Bugs fixed * LaTeX: fix a ``7.4.0`` typo in a default for ``\sphinxboxsetup`` (refs: PR #13152). Patch by Jean-François B. +* #13096: HTML Search: check that query terms exist as properties in + term indices before accessing them. +* #11233: linkcheck: match redirect URIs against :confval:`linkcheck_ignore` by + overriding session-level ``requests.get_redirect_target``. +* #13195: viewcode: Fix issue where import paths differ from the directory + structure. + Patch by Ben Egan and Adam Turner. Testing ------- diff --git a/LICENSE.rst b/LICENSE.rst index 79c6aee4bc9..de3688cd2c6 100644 --- a/LICENSE.rst +++ b/LICENSE.rst @@ -4,7 +4,7 @@ License for Sphinx Unless otherwise indicated, all code in the Sphinx project is licenced under the two clause BSD licence below. -Copyright (c) 2007-2024 by the Sphinx team (see AUTHORS file). +Copyright (c) 2007-2025 by the Sphinx team (see AUTHORS file). All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/Makefile b/Makefile index 54dd66dc888..a0fa6314dc2 100644 --- a/Makefile +++ b/Makefile @@ -45,7 +45,6 @@ clean: clean .PHONY: style-check style-check: - @echo '[+] running flake8' ; flake8 . @echo '[+] running ruff' ; ruff check . .PHONY: format diff --git a/doc/_static/translation.svg b/doc/_static/translation.svg index 4e3ab5ab47d..599a0fc2395 100644 --- a/doc/_static/translation.svg +++ b/doc/_static/translation.svg @@ -13,22 +13,22 @@ link .pot to .po-->msgfmtsphinx-build -Dlanguage=' % depth) -def generate_latex_macro(image_format: str, - math: str, - config: Config, - confdir: str | os.PathLike[str] = '') -> str: +def generate_latex_macro( + image_format: str, + math: str, + config: Config, + confdir: str | os.PathLike[str] = '', +) -> str: """Generate LaTeX macro.""" variables = { 'fontsize': config.imgmath_font_size, - 'baselineskip': int(round(config.imgmath_font_size * 1.2)), + 'baselineskip': round(config.imgmath_font_size * 1.2), 'preamble': config.imgmath_latex_preamble, # the dvips option is important when imgmath_latex in {"xelatex", "tectonic"}, # it has no impact when imgmath_latex="latex" @@ -109,7 +108,9 @@ def generate_latex_macro(image_format: str, for template_dir in config.templates_path: for template_suffix in ('.jinja', '_t'): - template = os.path.join(confdir, template_dir, template_name + template_suffix) + template = os.path.join( + confdir, template_dir, template_name + template_suffix + ) if os.path.exists(template): return LaTeXRenderer().render(template, variables) @@ -149,16 +150,21 @@ def compile_math(latex: str, builder: Builder) -> str: command.append('math.tex') try: - subprocess.run(command, capture_output=True, cwd=tempdir, check=True, - encoding='ascii') + subprocess.run( + command, capture_output=True, cwd=tempdir, check=True, encoding='ascii' + ) if imgmath_latex_name in {'xelatex', 'tectonic'}: return os.path.join(tempdir, 'math.xdv') else: return os.path.join(tempdir, 'math.dvi') except OSError as exc: - logger.warning(__('LaTeX command %r cannot be run (needed for math ' - 'display), check the imgmath_latex setting'), - builder.config.imgmath_latex) + logger.warning( + __( + 'LaTeX command %r cannot be run (needed for math ' + 'display), check the imgmath_latex setting' + ), + builder.config.imgmath_latex, + ) raise InvokeError from exc except CalledProcessError as exc: msg = 'latex exited with error' @@ -171,12 +177,19 @@ def convert_dvi_to_image(command: list[str], name: str) -> tuple[str, str]: ret = subprocess.run(command, capture_output=True, check=True, encoding='ascii') return ret.stdout, ret.stderr except OSError as exc: - logger.warning(__('%s command %r cannot be run (needed for math ' - 'display), check the imgmath_%s setting'), - name, command[0], name) + logger.warning( + __( + '%s command %r cannot be run (needed for math ' + 'display), check the imgmath_%s setting' + ), + name, + command[0], + name, + ) raise InvokeError from exc except CalledProcessError as exc: - raise MathExtError('%s exited with error' % name, exc.stderr, exc.stdout) from exc + msg = f'{name} exited with error' + raise MathExtError(msg, exc.stderr, exc.stdout) from exc def convert_dvi_to_png(dvipath: str, builder: Builder, out_path: str) -> int | None: @@ -245,13 +258,16 @@ def render_math( unsupported_format_msg = 'imgmath_image_format must be either "png" or "svg"' raise MathExtError(unsupported_format_msg) - latex = generate_latex_macro(image_format, - math, - self.builder.config, - self.builder.confdir) + latex = generate_latex_macro( + image_format, math, self.builder.config, self.builder.confdir + ) - filename = f"{sha1(latex.encode(), usedforsecurity=False).hexdigest()}.{image_format}" - generated_path = os.path.join(self.builder.outdir, self.builder.imagedir, 'math', filename) + filename = ( + f'{sha1(latex.encode(), usedforsecurity=False).hexdigest()}.{image_format}' + ) + generated_path = os.path.join( + self.builder.outdir, self.builder.imagedir, 'math', filename + ) ensuredir(os.path.dirname(generated_path)) if os.path.isfile(generated_path): if image_format == 'png': @@ -261,8 +277,9 @@ def render_math( return generated_path, depth # if latex or dvipng (dvisvgm) has failed once, don't bother to try again - if hasattr(self.builder, '_imgmath_warned_latex') or \ - hasattr(self.builder, '_imgmath_warned_image_translator'): + latex_failed = hasattr(self.builder, '_imgmath_warned_latex') + trans_failed = hasattr(self.builder, '_imgmath_warned_image_translator') + if latex_failed or trans_failed: return None, None # .tex -> .dvi @@ -286,8 +303,9 @@ def render_math( def render_maths_to_base64(image_format: str, generated_path: str) -> str: - with open(generated_path, "rb") as f: - encoded = base64.b64encode(f.read()).decode(encoding='utf-8') + with open(generated_path, 'rb') as f: + content = f.read() + encoded = base64.b64encode(content).decode(encoding='utf-8') if image_format == 'png': return f'data:image/png;base64,{encoded}' if image_format == 'svg': @@ -308,12 +326,14 @@ def clean_up_files(app: Sphinx, exc: Exception) -> None: # in embed mode, the images are still generated in the math output dir # to be shared across workers, but are not useful to the final document with contextlib.suppress(Exception): - shutil.rmtree(os.path.join(app.builder.outdir, app.builder.imagedir, 'math')) + shutil.rmtree( + os.path.join(app.builder.outdir, app.builder.imagedir, 'math') + ) def get_tooltip(self: HTML5Translator, node: Element) -> str: if self.builder.config.imgmath_add_tooltips: - return ' alt="%s"' % self.encode(node.astext()).strip() + return f' alt="{self.encode(node.astext()).strip()}"' return '' @@ -322,16 +342,18 @@ def html_visit_math(self: HTML5Translator, node: nodes.math) -> None: rendered_path, depth = render_math(self, '$' + node.astext() + '$') except MathExtError as exc: msg = str(exc) - sm = nodes.system_message(msg, type='WARNING', level=2, - backrefs=[], source=node.astext()) + sm = nodes.system_message( + msg, type='WARNING', level=2, backrefs=[], source=node.astext() + ) sm.walkabout(self) logger.warning(__('display latex %r: %s'), node.astext(), msg) raise nodes.SkipNode from exc if rendered_path is None: # something failed -- use text-only as a bad substitute - self.body.append('%s' % - self.encode(node.astext()).strip()) + self.body.append( + f'{self.encode(node.astext()).strip()}' + ) else: if self.builder.config.imgmath_embed: image_format = self.builder.config.imgmath_image_format.lower() @@ -340,10 +362,10 @@ def html_visit_math(self: HTML5Translator, node: nodes.math) -> None: bname = os.path.basename(rendered_path) relative_path = os.path.join(self.builder.imgpath, 'math', bname) img_src = relative_path.replace(os.path.sep, '/') - c = f'') + align = f' style="vertical-align: {-depth:d}px"' if depth is not None else '' + self.body.append( + f'' + ) raise nodes.SkipNode @@ -356,8 +378,9 @@ def html_visit_displaymath(self: HTML5Translator, node: nodes.math_block) -> Non rendered_path, depth = render_math(self, latex) except MathExtError as exc: msg = str(exc) - sm = nodes.system_message(msg, type='WARNING', level=2, - backrefs=[], source=node.astext()) + sm = nodes.system_message( + msg, type='WARNING', level=2, backrefs=[], source=node.astext() + ) sm.walkabout(self) logger.warning(__('inline latex %r: %s'), node.astext(), msg) raise nodes.SkipNode from exc @@ -371,8 +394,9 @@ def html_visit_displaymath(self: HTML5Translator, node: nodes.math_block) -> Non if rendered_path is None: # something failed -- use text-only as a bad substitute - self.body.append('%s

\n' % - self.encode(node.astext()).strip()) + self.body.append( + f'{self.encode(node.astext()).strip()}

\n' + ) else: if self.builder.config.imgmath_embed: image_format = self.builder.config.imgmath_image_format.lower() @@ -381,24 +405,27 @@ def html_visit_displaymath(self: HTML5Translator, node: nodes.math_block) -> Non bname = os.path.basename(rendered_path) relative_path = os.path.join(self.builder.imgpath, 'math', bname) img_src = relative_path.replace(os.path.sep, '/') - self.body.append(f'

\n') + self.body.append(f'

\n') raise nodes.SkipNode def setup(app: Sphinx) -> ExtensionMetadata: - app.add_html_math_renderer('imgmath', - (html_visit_math, None), - (html_visit_displaymath, None)) + app.add_html_math_renderer( + 'imgmath', + inline_renderers=(html_visit_math, None), + block_renderers=(html_visit_displaymath, None), + ) app.add_config_value('imgmath_image_format', 'png', 'html') app.add_config_value('imgmath_dvipng', 'dvipng', 'html') app.add_config_value('imgmath_dvisvgm', 'dvisvgm', 'html') app.add_config_value('imgmath_latex', 'latex', 'html') app.add_config_value('imgmath_use_preview', False, 'html') - app.add_config_value('imgmath_dvipng_args', - ['-gamma', '1.5', '-D', '110', '-bg', 'Transparent'], - 'html') + app.add_config_value( + 'imgmath_dvipng_args', + ['-gamma', '1.5', '-D', '110', '-bg', 'Transparent'], + 'html', + ) app.add_config_value('imgmath_dvisvgm_args', ['--no-fonts'], 'html') app.add_config_value('imgmath_latex_args', [], 'html') app.add_config_value('imgmath_latex_preamble', '', 'html') @@ -406,4 +433,7 @@ def setup(app: Sphinx) -> ExtensionMetadata: app.add_config_value('imgmath_font_size', 12, 'html') app.add_config_value('imgmath_embed', False, 'html', bool) app.connect('build-finished', clean_up_files) - return {'version': sphinx.__display_version__, 'parallel_read_safe': True} + return { + 'version': sphinx.__display_version__, + 'parallel_read_safe': True, + } diff --git a/sphinx/ext/inheritance_diagram.py b/sphinx/ext/inheritance_diagram.py index 1bdbc2513ab..4b42532e7d4 100644 --- a/sphinx/ext/inheritance_diagram.py +++ b/sphinx/ext/inheritance_diagram.py @@ -36,7 +36,7 @@ class E(B): pass import os.path import re from importlib import import_module -from typing import TYPE_CHECKING, Any, ClassVar, cast +from typing import TYPE_CHECKING, Any, ClassVar, Final, cast from docutils import nodes from docutils.parsers.rst import directives @@ -58,19 +58,22 @@ class E(B): pass from docutils.nodes import Node from sphinx.application import Sphinx - from sphinx.environment import BuildEnvironment + from sphinx.config import Config from sphinx.util.typing import ExtensionMetadata, OptionSpec from sphinx.writers.html5 import HTML5Translator from sphinx.writers.latex import LaTeXTranslator from sphinx.writers.texinfo import TexinfoTranslator -module_sig_re = re.compile(r'''^(?:([\w.]*)\.)? # module names - (\w+) \s* $ # class/final module name - ''', re.VERBOSE) +module_sig_re = re.compile( + r"""^ + (?:([\w.]*)\.)? # module names + (\w+) \s* $ # class/final module name + """, + re.VERBOSE, +) -py_builtins = [obj for obj in vars(builtins).values() - if inspect.isclass(obj)] +PY_BUILTINS: Final = frozenset(filter(inspect.isclass, vars(builtins).values())) def try_import(objname: str) -> Any: @@ -117,17 +120,21 @@ def import_classes(name: str, currmodule: str) -> Any: if target is None: raise InheritanceException( 'Could not import class or module %r specified for ' - 'inheritance diagram' % name) + 'inheritance diagram' % name + ) if inspect.isclass(target): # If imported object is a class, just return it return [target] elif inspect.ismodule(target): # If imported object is a module, return classes defined on it - return [cls for cls in target.__dict__.values() - if inspect.isclass(cls) and cls.__module__ == target.__name__] - raise InheritanceException('%r specified for inheritance diagram is ' - 'not a class or module' % name) + return [ + cls + for cls in target.__dict__.values() + if inspect.isclass(cls) and cls.__module__ == target.__name__ + ] + msg = f'{name!r} specified for inheritance diagram is not a class or module' + raise InheritanceException(msg) class InheritanceException(Exception): @@ -141,10 +148,16 @@ class InheritanceGraph: graphviz dot graph from them. """ - def __init__(self, class_names: list[str], currmodule: str, show_builtins: bool = False, - private_bases: bool = False, parts: int = 0, - aliases: dict[str, str] | None = None, top_classes: Sequence[Any] = (), - ) -> None: + def __init__( + self, + class_names: list[str], + currmodule: str, + show_builtins: bool = False, + private_bases: bool = False, + parts: int = 0, + aliases: dict[str, str] | None = None, + top_classes: Sequence[Any] = (), + ) -> None: """*class_names* is a list of child classes to show bases from. If *show_builtins* is True, then Python builtins will be shown @@ -152,8 +165,9 @@ def __init__(self, class_names: list[str], currmodule: str, show_builtins: bool """ self.class_names = class_names classes = self._import_classes(class_names, currmodule) - self.class_info = self._class_info(classes, show_builtins, - private_bases, parts, aliases, top_classes) + self.class_info = self._class_info( + classes, show_builtins, private_bases, parts, aliases, top_classes + ) if not self.class_info: msg = 'No classes found for inheritance diagram' raise InheritanceException(msg) @@ -165,9 +179,15 @@ def _import_classes(self, class_names: list[str], currmodule: str) -> list[Any]: classes.extend(import_classes(name, currmodule)) return classes - def _class_info(self, classes: list[Any], show_builtins: bool, private_bases: bool, - parts: int, aliases: dict[str, str] | None, top_classes: Sequence[Any], - ) -> list[tuple[str, str, Sequence[str], str | None]]: + def _class_info( + self, + classes: list[Any], + show_builtins: bool, + private_bases: bool, + parts: int, + aliases: dict[str, str] | None, + top_classes: Sequence[Any], + ) -> list[tuple[str, str, Sequence[str], str | None]]: """Return name and bases for all classes that are ancestors of *classes*. @@ -186,7 +206,7 @@ def _class_info(self, classes: list[Any], show_builtins: bool, private_bases: bo all_classes = {} def recurse(cls: Any) -> None: - if not show_builtins and cls in py_builtins: + if not show_builtins and cls in PY_BUILTINS: return if not private_bases and cls.__name__.startswith('_'): return @@ -198,7 +218,7 @@ def recurse(cls: Any) -> None: tooltip = None try: if cls.__doc__: - doc = cls.__doc__.strip().split("\n")[0] + doc = cls.__doc__.strip().split('\n')[0] if doc: tooltip = '"%s"' % doc.replace('"', '\\"') except Exception: # might raise AttributeError for strange classes @@ -211,7 +231,7 @@ def recurse(cls: Any) -> None: return for base in cls.__bases__: - if not show_builtins and base in py_builtins: + if not show_builtins and base in PY_BUILTINS: continue if not private_bases and base.__name__.startswith('_'): continue @@ -224,12 +244,11 @@ def recurse(cls: Any) -> None: return [ (cls_name, fullname, tuple(bases), tooltip) - for (cls_name, fullname, bases, tooltip) - in all_classes.values() + for (cls_name, fullname, bases, tooltip) in all_classes.values() ] def class_name( - self, cls: Any, parts: int = 0, aliases: dict[str, str] | None = None, + self, cls: Any, parts: int = 0, aliases: dict[str, str] | None = None ) -> str: """Given a class object, return a fully-qualified name. @@ -264,8 +283,7 @@ def get_all_class_names(self) -> list[str]: 'shape': 'box', 'fontsize': 10, 'height': 0.25, - 'fontname': '"Vera Sans, DejaVu Sans, Liberation Sans, ' - 'Arial, Helvetica, sans"', + 'fontname': '"Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans"', 'style': '"setlinewidth(0.5),filled"', 'fillcolor': 'white', } @@ -280,12 +298,15 @@ def _format_node_attrs(self, attrs: dict[str, Any]) -> str: def _format_graph_attrs(self, attrs: dict[str, Any]) -> str: return ''.join(f'{k}={v};\n' for k, v in sorted(attrs.items())) - def generate_dot(self, name: str, urls: dict[str, str] | None = None, - env: BuildEnvironment | None = None, - graph_attrs: dict | None = None, - node_attrs: dict | None = None, - edge_attrs: dict | None = None, - ) -> str: + def generate_dot( + self, + name: str, + urls: dict[str, str] | None = None, + config: Config | None = None, + graph_attrs: dict | None = None, + node_attrs: dict | None = None, + edge_attrs: dict | None = None, + ) -> str: """Generate a graphviz dot graph from the classes that were passed in to __init__. @@ -307,10 +328,10 @@ def generate_dot(self, name: str, urls: dict[str, str] | None = None, n_attrs.update(node_attrs) if edge_attrs is not None: e_attrs.update(edge_attrs) - if env: - g_attrs.update(env.config.inheritance_graph_attrs) - n_attrs.update(env.config.inheritance_node_attrs) - e_attrs.update(env.config.inheritance_edge_attrs) + if config: + g_attrs.update(config.inheritance_graph_attrs) + n_attrs.update(config.inheritance_node_attrs) + e_attrs.update(config.inheritance_edge_attrs) res: list[str] = [ f'digraph {name} {{\n', @@ -321,19 +342,21 @@ def generate_dot(self, name: str, urls: dict[str, str] | None = None, # Write the node this_node_attrs = n_attrs.copy() if fullname in urls: - this_node_attrs["URL"] = f'"{urls[fullname]}"' - this_node_attrs["target"] = '"_top"' + this_node_attrs['URL'] = f'"{urls[fullname]}"' + this_node_attrs['target'] = '"_top"' if tooltip: - this_node_attrs["tooltip"] = tooltip - res.append(f' "{cls_name}" [{self._format_node_attrs(this_node_attrs)}];\n') + this_node_attrs['tooltip'] = tooltip + res.append( + f' "{cls_name}" [{self._format_node_attrs(this_node_attrs)}];\n' + ) # Write the edges res.extend( f' "{base_name}" -> "{cls_name}" [{self._format_node_attrs(e_attrs)}];\n' for base_name in bases ) - res.append("}\n") - return "".join(res) + res.append('}\n') + return ''.join(res) class inheritance_diagram(graphviz): @@ -377,11 +400,13 @@ def run(self) -> list[Node]: # Create a graph starting with the list of classes try: graph = InheritanceGraph( - class_names, self.env.ref_context.get('py:module'), # type: ignore[arg-type] + class_names, + self.env.ref_context.get('py:module'), # type: ignore[arg-type] parts=node['parts'], private_bases='private-bases' in self.options, aliases=self.config.inheritance_alias, - top_classes=node['top-classes']) + top_classes=node['top-classes'], + ) except InheritanceException as err: return [node.document.reporter.warning(err, line=self.lineno)] @@ -391,7 +416,8 @@ def run(self) -> list[Node]: # removed from the doctree after we're done with them. for name in graph.get_all_class_names(): refnodes, x = class_role( # type: ignore[misc] - 'class', ':class:`%s`' % name, name, 0, self.state.inliner) + 'class', f':class:`{name}`', name, 0, self.state.inliner + ) node.extend(refnodes) # Store the graph object so we can use it to generate the # dot file later @@ -411,7 +437,9 @@ def get_graph_hash(node: inheritance_diagram) -> str: return hashlib.md5(encoded, usedforsecurity=False).hexdigest()[-10:] -def html_visit_inheritance_diagram(self: HTML5Translator, node: inheritance_diagram) -> None: +def html_visit_inheritance_diagram( + self: HTML5Translator, node: inheritance_diagram +) -> None: """ Output the graph for HTML. This will insert a PNG with clickable image map. @@ -422,8 +450,10 @@ def html_visit_inheritance_diagram(self: HTML5Translator, node: inheritance_diag name = 'inheritance%s' % graph_hash # Create a mapping from fully-qualified class names to URLs. - graphviz_output_format = self.builder.env.config.graphviz_output_format.upper() - current_filename = os.path.basename(self.builder.current_docname + self.builder.out_suffix) + graphviz_output_format = self.config.graphviz_output_format.upper() + current_filename = os.path.basename( + self.builder.current_docname + self.builder.out_suffix + ) urls = {} pending_xrefs = cast('Iterable[addnodes.pending_xref]', node) for child in pending_xrefs: @@ -441,13 +471,22 @@ def html_visit_inheritance_diagram(self: HTML5Translator, node: inheritance_diag else: urls[child['reftitle']] = '#' + child.get('refid') - dotcode = graph.generate_dot(name, urls, env=self.builder.env) - render_dot_html(self, node, dotcode, {}, 'inheritance', 'inheritance', - alt='Inheritance diagram of ' + node['content']) + dotcode = graph.generate_dot(name, urls, config=self.config) + render_dot_html( + self, + node, + dotcode, + {}, + 'inheritance', + 'inheritance', + alt='Inheritance diagram of ' + node['content'], + ) raise nodes.SkipNode -def latex_visit_inheritance_diagram(self: LaTeXTranslator, node: inheritance_diagram) -> None: +def latex_visit_inheritance_diagram( + self: LaTeXTranslator, node: inheritance_diagram +) -> None: """ Output the graph for LaTeX. This will insert a PDF. """ @@ -456,14 +495,17 @@ def latex_visit_inheritance_diagram(self: LaTeXTranslator, node: inheritance_dia graph_hash = get_graph_hash(node) name = 'inheritance%s' % graph_hash - dotcode = graph.generate_dot(name, env=self.builder.env, - graph_attrs={'size': '"6.0,6.0"'}) + dotcode = graph.generate_dot( + name, config=self.config, graph_attrs={'size': '"6.0,6.0"'} + ) render_dot_latex(self, node, dotcode, {}, 'inheritance') raise nodes.SkipNode -def texinfo_visit_inheritance_diagram(self: TexinfoTranslator, node: inheritance_diagram, - ) -> None: +def texinfo_visit_inheritance_diagram( + self: TexinfoTranslator, + node: inheritance_diagram, +) -> None: """ Output the graph for Texinfo. This will insert a PNG. """ @@ -472,8 +514,9 @@ def texinfo_visit_inheritance_diagram(self: TexinfoTranslator, node: inheritance graph_hash = get_graph_hash(node) name = 'inheritance%s' % graph_hash - dotcode = graph.generate_dot(name, env=self.builder.env, - graph_attrs={'size': '"6.0,6.0"'}) + dotcode = graph.generate_dot( + name, config=self.config, graph_attrs={'size': '"6.0,6.0"'} + ) render_dot_texinfo(self, node, dotcode, {}, 'inheritance') raise nodes.SkipNode @@ -490,10 +533,14 @@ def setup(app: Sphinx) -> ExtensionMetadata: html=(html_visit_inheritance_diagram, None), text=(skip, None), man=(skip, None), - texinfo=(texinfo_visit_inheritance_diagram, None)) + texinfo=(texinfo_visit_inheritance_diagram, None), + ) app.add_directive('inheritance-diagram', InheritanceDiagram) app.add_config_value('inheritance_graph_attrs', {}, '') app.add_config_value('inheritance_node_attrs', {}, '') app.add_config_value('inheritance_edge_attrs', {}, '') app.add_config_value('inheritance_alias', {}, '') - return {'version': sphinx.__display_version__, 'parallel_read_safe': True} + return { + 'version': sphinx.__display_version__, + 'parallel_read_safe': True, + } diff --git a/sphinx/ext/intersphinx/_cli.py b/sphinx/ext/intersphinx/_cli.py index 04ac2876291..552b537efec 100644 --- a/sphinx/ext/intersphinx/_cli.py +++ b/sphinx/ext/intersphinx/_cli.py @@ -3,8 +3,9 @@ from __future__ import annotations import sys +from pathlib import Path -from sphinx.ext.intersphinx._load import _fetch_inventory +from sphinx.ext.intersphinx._load import _fetch_inventory, _InvConfig def inspect_main(argv: list[str], /) -> int: @@ -17,19 +18,21 @@ def inspect_main(argv: list[str], /) -> int: ) return 1 - class MockConfig: - intersphinx_timeout: int | None = None - tls_verify = False - tls_cacerts: str | dict[str, str] | None = None - user_agent: str = '' + filename = argv[0] + config = _InvConfig( + intersphinx_cache_limit=5, + intersphinx_timeout=None, + tls_verify=False, + tls_cacerts=None, + user_agent='', + ) try: - filename = argv[0] inv_data = _fetch_inventory( target_uri='', inv_location=filename, - config=MockConfig(), # type: ignore[arg-type] - srcdir='', # type: ignore[arg-type] + config=config, + srcdir=Path(''), ) for key in sorted(inv_data or {}): print(key) diff --git a/sphinx/ext/intersphinx/_load.py b/sphinx/ext/intersphinx/_load.py index eb8b46be807..a81c6bbb2be 100644 --- a/sphinx/ext/intersphinx/_load.py +++ b/sphinx/ext/intersphinx/_load.py @@ -3,6 +3,7 @@ from __future__ import annotations import concurrent.futures +import dataclasses import os.path import posixpath import time @@ -20,8 +21,6 @@ if TYPE_CHECKING: from pathlib import Path - from urllib3.response import HTTPResponse - from sphinx.application import Sphinx from sphinx.config import Config from sphinx.ext.intersphinx._shared import ( @@ -31,7 +30,7 @@ InventoryName, InventoryURI, ) - from sphinx.util.typing import Inventory, _ReadableStream + from sphinx.util.typing import Inventory def validate_intersphinx_mapping(app: Sphinx, config: Config) -> None: @@ -140,8 +139,9 @@ def load_mappings(app: Sphinx) -> None: The intersphinx mappings are expected to be normalized. """ + env = app.env now = int(time.time()) - inventories = InventoryAdapter(app.builder.env) + inventories = InventoryAdapter(env) intersphinx_cache: dict[InventoryURI, InventoryCacheEntry] = inventories.cache intersphinx_mapping: IntersphinxMapping = app.config.intersphinx_mapping @@ -170,6 +170,7 @@ def load_mappings(app: Sphinx) -> None: # This happens when the URI in `intersphinx_mapping` is changed. del intersphinx_cache[uri] + inv_config = _InvConfig.from_config(app.config) with concurrent.futures.ThreadPoolExecutor() as pool: futures = [ pool.submit( @@ -177,7 +178,7 @@ def load_mappings(app: Sphinx) -> None: project=project, cache=intersphinx_cache, now=now, - config=app.config, + config=inv_config, srcdir=app.srcdir, ) for project in projects @@ -202,12 +203,31 @@ def load_mappings(app: Sphinx) -> None: inventories.main_inventory.setdefault(objtype, {}).update(objects) +@dataclasses.dataclass(frozen=True, kw_only=True, slots=True) +class _InvConfig: + intersphinx_cache_limit: int + intersphinx_timeout: int | float | None + tls_verify: bool + tls_cacerts: str | dict[str, str] | None + user_agent: str + + @classmethod + def from_config(cls, config: Config) -> _InvConfig: + return cls( + intersphinx_cache_limit=config.intersphinx_cache_limit, + intersphinx_timeout=config.intersphinx_timeout, + tls_verify=config.tls_verify, + tls_cacerts=config.tls_cacerts, + user_agent=config.user_agent, + ) + + def _fetch_inventory_group( *, project: _IntersphinxProject, cache: dict[InventoryURI, InventoryCacheEntry], now: int, - config: Config, + config: _InvConfig, srcdir: Path, ) -> bool: if config.intersphinx_cache_limit >= 0: @@ -284,26 +304,50 @@ def fetch_inventory(app: Sphinx, uri: InventoryURI, inv: str) -> Inventory: return _fetch_inventory( target_uri=uri, inv_location=inv, - config=app.config, + config=_InvConfig.from_config(app.config), srcdir=app.srcdir, ) def _fetch_inventory( - *, target_uri: InventoryURI, inv_location: str, config: Config, srcdir: Path + *, target_uri: InventoryURI, inv_location: str, config: _InvConfig, srcdir: Path ) -> Inventory: """Fetch, parse and return an intersphinx inventory file.""" # both *target_uri* (base URI of the links to generate) # and *inv_location* (actual location of the inventory file) # can be local or remote URIs if '://' in target_uri: - # case: inv URI points to remote resource; strip any existing auth + # inv URI points to remote resource; strip any existing auth target_uri = _strip_basic_auth(target_uri) + if '://' in inv_location: + raw_data, target_uri = _fetch_inventory_url( + target_uri=target_uri, inv_location=inv_location, config=config + ) + else: + raw_data = _fetch_inventory_file(inv_location=inv_location, srcdir=srcdir) + try: - if '://' in inv_location: - f: _ReadableStream[bytes] = _read_from_url(inv_location, config=config) - else: - f = open(os.path.join(srcdir, inv_location), 'rb') # NoQA: SIM115 + invdata = InventoryFile.loads(raw_data, uri=target_uri) + except ValueError as exc: + msg = f'unknown or unsupported inventory version: {exc!r}' + raise ValueError(msg) from exc + return invdata + + +def _fetch_inventory_url( + *, target_uri: InventoryURI, inv_location: str, config: _InvConfig +) -> tuple[bytes, str]: + try: + with requests.get( + inv_location, + stream=True, + timeout=config.intersphinx_timeout, + _user_agent=config.user_agent, + _tls_info=(config.tls_verify, config.tls_cacerts), + ) as r: + r.raise_for_status() + raw_data = r.content + new_inv_location = r.url except Exception as err: err.args = ( 'intersphinx inventory %r not fetchable due to %s: %s', @@ -312,25 +356,25 @@ def _fetch_inventory( str(err), ) raise + + if inv_location != new_inv_location: + msg = __('intersphinx inventory has moved: %s -> %s') + LOGGER.info(msg, inv_location, new_inv_location) + + if target_uri in { + inv_location, + os.path.dirname(inv_location), + os.path.dirname(inv_location) + '/', + }: + target_uri = os.path.dirname(new_inv_location) + + return raw_data, target_uri + + +def _fetch_inventory_file(*, inv_location: str, srcdir: Path) -> bytes: try: - if hasattr(f, 'url'): - new_inv_location = f.url - if inv_location != new_inv_location: - msg = __('intersphinx inventory has moved: %s -> %s') - LOGGER.info(msg, inv_location, new_inv_location) - - if target_uri in { - inv_location, - os.path.dirname(inv_location), - os.path.dirname(inv_location) + '/', - }: - target_uri = os.path.dirname(new_inv_location) - with f: - try: - invdata = InventoryFile.load(f, target_uri, posixpath.join) - except ValueError as exc: - msg = f'unknown or unsupported inventory version: {exc!r}' - raise ValueError(msg) from exc + with open(srcdir / inv_location, 'rb') as f: + raw_data = f.read() except Exception as err: err.args = ( 'intersphinx inventory %r not readable due to %s: %s', @@ -339,8 +383,7 @@ def _fetch_inventory( str(err), ) raise - else: - return invdata + return raw_data def _get_safe_url(url: str) -> str: @@ -387,37 +430,3 @@ def _strip_basic_auth(url: str) -> str: if '@' in frags[1]: frags[1] = frags[1].split('@')[1] return urlunsplit(frags) - - -def _read_from_url(url: str, *, config: Config) -> HTTPResponse: - """Reads data from *url* with an HTTP *GET*. - - This function supports fetching from resources which use basic HTTP auth as - laid out by RFC1738 § 3.1. See § 5 for grammar definitions for URLs. - - .. seealso: - - https://www.ietf.org/rfc/rfc1738.txt - - :param url: URL of an HTTP resource - :type url: ``str`` - - :return: data read from resource described by *url* - :rtype: ``file``-like object - """ - r = requests.get( - url, - stream=True, - timeout=config.intersphinx_timeout, - _user_agent=config.user_agent, - _tls_info=(config.tls_verify, config.tls_cacerts), - ) - r.raise_for_status() - - # For inv_location / new_inv_location - r.raw.url = r.url # type: ignore[union-attr] - - # Decode content-body based on the header. - # xref: https://github.com/psf/requests/issues/2155 - r.raw.decode_content = True - return r.raw diff --git a/sphinx/ext/intersphinx/_resolve.py b/sphinx/ext/intersphinx/_resolve.py index 94101588bfa..09a7a057cc8 100644 --- a/sphinx/ext/intersphinx/_resolve.py +++ b/sphinx/ext/intersphinx/_resolve.py @@ -398,7 +398,7 @@ def run(self) -> tuple[list[Node], list[system_message]]: # the user did not specify a domain, # so we check first the default (if available) then standard domains domains: list[Domain] = [] - if default_domain := self.env.temp_data.get('default_domain'): + if default_domain := self.env.current_document.default_domain: domains.append(default_domain) if ( std_domain := self.env.domains.standard_domain @@ -505,7 +505,7 @@ def get_role_name(self, name: str) -> tuple[str, str] | None: names = name.split(':') if len(names) == 1: # role - default_domain = self.env.temp_data.get('default_domain') + default_domain = self.env.current_document.default_domain domain = default_domain.name if default_domain else None role = names[0] elif len(names) == 2: diff --git a/sphinx/ext/linkcode.py b/sphinx/ext/linkcode.py index 6b722b483ce..05523794348 100644 --- a/sphinx/ext/linkcode.py +++ b/sphinx/ext/linkcode.py @@ -18,12 +18,29 @@ from sphinx.util.typing import ExtensionMetadata +_DOMAIN_KEYS = { + 'py': ['module', 'fullname'], + 'c': ['names'], + 'cpp': ['names'], + 'js': ['object', 'fullname'], +} + + +def add_linkcode_domain(domain: str, keys: list[str], override: bool = False) -> None: + """Register a new list of keys to use for a domain. + + .. versionadded:: 8.2 + """ + if override or domain not in _DOMAIN_KEYS: + _DOMAIN_KEYS[domain] = list(keys) + + class LinkcodeError(SphinxError): - category = "linkcode error" + category = 'linkcode error' def doctree_read(app: Sphinx, doctree: Node) -> None: - env = app.builder.env + env = app.env resolve_target = getattr(env.config, 'linkcode_resolve', None) if not callable(env.config.linkcode_resolve): @@ -37,13 +54,6 @@ def doctree_read(app: Sphinx, doctree: Node) -> None: # ``supported_linkcode`` attribute. node_only_expr = getattr(app.builder, 'supported_linkcode', 'html') - domain_keys = { - 'py': ['module', 'fullname'], - 'c': ['names'], - 'cpp': ['names'], - 'js': ['object', 'fullname'], - } - for objnode in list(doctree.findall(addnodes.desc)): domain = objnode.get('domain') uris: set[str] = set() @@ -53,7 +63,7 @@ def doctree_read(app: Sphinx, doctree: Node) -> None: # Convert signode to a specified format info = {} - for key in domain_keys.get(domain, []): + for key in _DOMAIN_KEYS.get(domain, ()): value = signode.get(key) if not value: value = '' @@ -81,4 +91,7 @@ def doctree_read(app: Sphinx, doctree: Node) -> None: def setup(app: Sphinx) -> ExtensionMetadata: app.connect('doctree-read', doctree_read) app.add_config_value('linkcode_resolve', None, '') - return {'version': sphinx.__display_version__, 'parallel_read_safe': True} + return { + 'version': sphinx.__display_version__, + 'parallel_read_safe': True, + } diff --git a/sphinx/ext/mathjax.py b/sphinx/ext/mathjax.py index e9357fee604..92756b7ee3f 100644 --- a/sphinx/ext/mathjax.py +++ b/sphinx/ext/mathjax.py @@ -31,10 +31,15 @@ def html_visit_math(self: HTML5Translator, node: nodes.math) -> None: - self.body.append(self.starttag(node, 'span', '', CLASS='math notranslate nohighlight')) - self.body.append(self.builder.config.mathjax_inline[0] + - self.encode(node.astext()) + - self.builder.config.mathjax_inline[1] + '') + self.body.append( + self.starttag(node, 'span', '', CLASS='math notranslate nohighlight') + ) + self.body.append( + self.builder.config.mathjax_inline[0] + + self.encode(node.astext()) + + self.builder.config.mathjax_inline[1] + + '' + ) raise nodes.SkipNode @@ -70,26 +75,31 @@ def html_visit_displaymath(self: HTML5Translator, node: nodes.math_block) -> Non raise nodes.SkipNode -def install_mathjax(app: Sphinx, pagename: str, templatename: str, context: dict[str, Any], - event_arg: Any) -> None: - if ( - app.builder.format != 'html' or - app.builder.math_renderer_name != 'mathjax' # type: ignore[attr-defined] - ): +def install_mathjax( + app: Sphinx, + pagename: str, + templatename: str, + context: dict[str, Any], + event_arg: Any, +) -> None: + if app.builder.format != 'html': + return + if app.builder.math_renderer_name != 'mathjax': # type: ignore[attr-defined] return if not app.config.mathjax_path: msg = 'mathjax_path config value must be set for the mathjax extension to work' raise ExtensionError(msg) - domain = app.env.domains.math_domain builder = cast('StandaloneHTMLBuilder', app.builder) - if app.registry.html_assets_policy == 'always' or domain.has_equations(pagename): + page_has_equations = context.get('has_maths_elements', False) + if app.registry.html_assets_policy == 'always' or page_has_equations: # Enable mathjax only if equations exists if app.config.mathjax2_config: if app.config.mathjax_path == MATHJAX_URL: logger.warning( 'mathjax_config/mathjax2_config does not work ' - 'for the current MathJax version, use mathjax3_config instead') + 'for the current MathJax version, use mathjax3_config instead' + ) body = 'MathJax.Hub.Config(%s)' % json.dumps(app.config.mathjax2_config) builder.add_js_file('', type='text/x-mathjax-config', body=body) if app.config.mathjax3_config: @@ -110,9 +120,11 @@ def install_mathjax(app: Sphinx, pagename: str, templatename: str, context: dict def setup(app: Sphinx) -> ExtensionMetadata: - app.add_html_math_renderer('mathjax', - (html_visit_math, None), - (html_visit_displaymath, None)) + app.add_html_math_renderer( + 'mathjax', + inline_renderers=(html_visit_math, None), + block_renderers=(html_visit_displaymath, None), + ) app.add_config_value('mathjax_path', MATHJAX_URL, 'html') app.add_config_value('mathjax_options', {}, 'html') @@ -123,4 +135,7 @@ def setup(app: Sphinx) -> ExtensionMetadata: app.add_config_value('mathjax3_config', None, 'html') app.connect('html-page-context', install_mathjax) - return {'version': sphinx.__display_version__, 'parallel_read_safe': True} + return { + 'version': sphinx.__display_version__, + 'parallel_read_safe': True, + } diff --git a/sphinx/ext/napoleon/__init__.py b/sphinx/ext/napoleon/__init__.py index b2a85815d6f..9af1fa09b7a 100644 --- a/sphinx/ext/napoleon/__init__.py +++ b/sphinx/ext/napoleon/__init__.py @@ -265,7 +265,7 @@ def __unicode__(self): Use the type annotations of class attributes that are documented in the docstring but do not have a type in the docstring. - """ # NoQA: D301 + """ _config_values: dict[str, tuple[Any, _ConfigRebuild]] = { 'napoleon_google_docstring': (True, 'env'), @@ -317,7 +317,10 @@ def setup(app: Sphinx) -> ExtensionMetadata: """ if not isinstance(app, Sphinx): # probably called by tests - return {'version': sphinx.__display_version__, 'parallel_read_safe': True} + return { + 'version': sphinx.__display_version__, + 'parallel_read_safe': True, + } _patch_python_domain() @@ -327,7 +330,10 @@ def setup(app: Sphinx) -> ExtensionMetadata: for name, (default, rebuild) in Config._config_values.items(): app.add_config_value(name, default, rebuild) - return {'version': sphinx.__display_version__, 'parallel_read_safe': True} + return { + 'version': sphinx.__display_version__, + 'parallel_read_safe': True, + } def _patch_python_domain() -> None: diff --git a/sphinx/ext/napoleon/docstring.py b/sphinx/ext/napoleon/docstring.py index d9f1eb357dd..90a5bf57ca3 100644 --- a/sphinx/ext/napoleon/docstring.py +++ b/sphinx/ext/napoleon/docstring.py @@ -51,7 +51,7 @@ _default_regex = re.compile( r'^default[^_0-9A-Za-z].*$', ) -_SINGLETONS = ('None', 'True', 'False', 'Ellipsis') +_SINGLETONS = frozenset({'None', 'True', 'False', 'Ellipsis', '...'}) class Deque(collections.deque[Any]): @@ -77,13 +77,183 @@ def next(self) -> Any: raise StopIteration -def _convert_type_spec(_type: str, translations: dict[str, str] | None = None) -> str: - """Convert type specification to reference in reST.""" - if translations is not None and _type in translations: - return translations[_type] - if _type == 'None': - return ':py:obj:`None`' - return f':py:class:`{_type}`' +def _recombine_set_tokens(tokens: list[str]) -> list[str]: + token_queue = collections.deque(tokens) + keywords = ('optional', 'default') + + def takewhile_set(tokens: collections.deque[str]) -> Iterator[str]: + open_braces = 0 + previous_token = None + while True: + try: + token = tokens.popleft() + except IndexError: + break + + if token == ', ': + previous_token = token + continue + + if not token.strip(): + continue + + if token in keywords: + tokens.appendleft(token) + if previous_token is not None: + tokens.appendleft(previous_token) + break + + if previous_token is not None: + yield previous_token + previous_token = None + + if token == '{': + open_braces += 1 + elif token == '}': + open_braces -= 1 + + yield token + + if open_braces == 0: + break + + def combine_set(tokens: collections.deque[str]) -> Iterator[str]: + while True: + try: + token = tokens.popleft() + except IndexError: + break + + if token == '{': + tokens.appendleft('{') + yield ''.join(takewhile_set(tokens)) + else: + yield token + + return list(combine_set(token_queue)) + + +def _tokenize_type_spec(spec: str) -> list[str]: + def postprocess(item: str) -> list[str]: + if _default_regex.match(item): + default = item[:7] + # can't be separated by anything other than a single space + # for now + other = item[8:] + + return [default, ' ', other] + else: + return [item] + + tokens = [ + item + for raw_token in _token_regex.split(spec) + for item in postprocess(raw_token) + if item + ] + return tokens + + +def _token_type(token: str, debug_location: str | None = None) -> str: + def is_numeric(token: str) -> bool: + try: + # use complex to make sure every numeric value is detected as literal + complex(token) + except ValueError: + return False + else: + return True + + if token.startswith(' ') or token.endswith(' '): + type_ = 'delimiter' + elif ( + is_numeric(token) + or (token.startswith('{') and token.endswith('}')) + or (token.startswith('"') and token.endswith('"')) + or (token.startswith("'") and token.endswith("'")) + ): + type_ = 'literal' + elif token.startswith('{'): + logger.warning( + __('invalid value set (missing closing brace): %s'), + token, + location=debug_location, + ) + type_ = 'literal' + elif token.endswith('}'): + logger.warning( + __('invalid value set (missing opening brace): %s'), + token, + location=debug_location, + ) + type_ = 'literal' + elif token.startswith(("'", '"')): + logger.warning( + __('malformed string literal (missing closing quote): %s'), + token, + location=debug_location, + ) + type_ = 'literal' + elif token.endswith(("'", '"')): + logger.warning( + __('malformed string literal (missing opening quote): %s'), + token, + location=debug_location, + ) + type_ = 'literal' + elif token in {'optional', 'default'}: + # default is not a official keyword (yet) but supported by the + # reference implementation (numpydoc) and widely used + type_ = 'control' + elif _xref_regex.match(token): + type_ = 'reference' + else: + type_ = 'obj' + + return type_ + + +def _convert_type_spec( + _type: str, + translations: dict[str, str] | None = None, + debug_location: str | None = None, +) -> str: + if translations is None: + translations = {} + + tokens = _tokenize_type_spec(_type) + combined_tokens = _recombine_set_tokens(tokens) + types = [(token, _token_type(token, debug_location)) for token in combined_tokens] + + converters = { + 'literal': lambda x: f'``{x}``', + 'obj': lambda x: _convert_type_spec_obj(x, translations), + 'control': lambda x: f'*{x}*', + 'delimiter': lambda x: x, + 'reference': lambda x: x, + } + + converted = ''.join( + converters.get(type_)(token) # type: ignore[misc] + for token, type_ in types + ) + + return converted + + +def _convert_type_spec_obj(obj: str, translations: dict[str, str]) -> str: + translation = translations.get(obj, obj) + + if _xref_regex.match(translation) is not None: + return translation + + # use :py:obj: if obj is a standard singleton + if translation in _SINGLETONS: + if translation == '...': # allow referencing the builtin ... + return ':py:obj:`... `' + return f':py:obj:`{translation}`' + + return f':py:class:`{translation}`' class GoogleDocstring: @@ -252,6 +422,20 @@ def __str__(self) -> str: """ return '\n'.join(self.lines()) + def _get_location(self) -> str | None: + try: + filepath = inspect.getfile(self._obj) if self._obj is not None else None + except TypeError: + filepath = None + name = self._name + + if filepath is None and name is None: + return None + elif filepath is None: + filepath = '' + + return f'{filepath}:docstring of {name}' + def lines(self) -> list[str]: """Return the parsed lines of the docstring in reStructuredText format. @@ -309,7 +493,11 @@ def _consume_field( _type, _name = _name, _type if _type and self._config.napoleon_preprocess_types: - _type = _convert_type_spec(_type, self._config.napoleon_type_aliases or {}) + _type = _convert_type_spec( + _type, + translations=self._config.napoleon_type_aliases or {}, + debug_location=self._get_location(), + ) indent = self._get_indent(line) + 1 _descs = [_desc, *self._dedent(self._consume_indented_block(indent))] @@ -357,7 +545,9 @@ def _consume_returns_section( if _type and preprocess_types and self._config.napoleon_preprocess_types: _type = _convert_type_spec( - _type, self._config.napoleon_type_aliases or {} + _type, + translations=self._config.napoleon_type_aliases or {}, + debug_location=self._get_location(), ) _desc = self.__class__(_desc, self._config).lines() @@ -428,9 +618,9 @@ def _format_admonition(self, admonition: str, lines: list[str]) -> list[str]: return [f'.. {admonition}:: {lines[0].strip()}', ''] elif lines: lines = self._indent(self._dedent(lines), 3) - return ['.. %s::' % admonition, '', *lines, ''] + return [f'.. {admonition}::', '', *lines, ''] else: - return ['.. %s::' % admonition, ''] + return [f'.. {admonition}::', ''] def _format_block( self, @@ -507,7 +697,7 @@ def _format_fields( field_type: str, fields: list[tuple[str, str, list[str]]], ) -> list[str]: - field_type = ':%s:' % field_type.strip() + field_type = f':{field_type.strip()}:' padding = ' ' * len(field_type) multi = len(fields) > 1 lines: list[str] = [] @@ -558,7 +748,7 @@ def _indent(self, lines: list[str], n: int = 4) -> list[str]: return [(' ' * n) + line for line in lines] def _is_indented(self, line: str, indent: int = 1) -> bool: - for i, s in enumerate(line): # NoQA: SIM110 + for i, s in enumerate(line): if i >= indent: return True elif not s.isspace(): @@ -672,7 +862,7 @@ def _parse_attribute_docstring(self) -> list[str]: _type, _desc = self._consume_inline_attribute() lines = self._format_field('', '', _desc) if _type: - lines.extend(['', ':type: %s' % _type]) + lines.extend(['', f':type: {_type}']) return lines def _parse_attributes_section(self, section: str) -> list[str]: @@ -681,7 +871,7 @@ def _parse_attributes_section(self, section: str) -> list[str]: if not _type: _type = self._lookup_annotation(_name) if self._config.napoleon_use_ivar: - field = ':ivar %s: ' % _name + field = f':ivar {_name}: ' lines.extend(self._format_block(field, _desc)) if _type: lines.append(f':vartype {_name}: {_type}') @@ -696,7 +886,7 @@ def _parse_attributes_section(self, section: str) -> list[str]: lines.extend(self._indent(fields, 3)) if _type: lines.append('') - lines.extend(self._indent([':type: %s' % _type], 3)) + lines.extend(self._indent([f':type: {_type}'], 3)) lines.append('') if self._config.napoleon_use_ivar: lines.append('') @@ -733,10 +923,10 @@ def _parse_generic_section(self, section: str, use_admonition: bool) -> list[str lines = self._strip_empty(self._consume_to_next_section()) lines = self._dedent(lines) if use_admonition: - header = '.. admonition:: %s' % section + header = f'.. admonition:: {section}' lines = self._indent(lines, 3) else: - header = '.. rubric:: %s' % section + header = f'.. rubric:: {section}' if lines: return [header, '', *lines, ''] else: @@ -754,7 +944,7 @@ def _parse_keyword_arguments_section(self, section: str) -> list[str]: def _parse_methods_section(self, section: str) -> list[str]: lines: list[str] = [] for _name, _type, _desc in self._consume_fields(parse_type=False): - lines.append('.. method:: %s' % _name) + lines.append(f'.. method:: {_name}') if self._opt: if 'no-index' in self._opt or 'noindex' in self._opt: lines.append(' :no-index:') @@ -837,7 +1027,7 @@ def _parse_returns_section(self, section: str) -> list[str]: if any(field): # only add :returns: if there's something to say lines.extend(self._format_block(':returns: ', field)) if _type and use_rtype: - lines.extend([':rtype: %s' % _type, '']) + lines.extend([f':rtype: {_type}', '']) if lines and lines[-1]: lines.append('') return lines @@ -914,187 +1104,6 @@ def _lookup_annotation(self, _name: str) -> str: return '' -def _recombine_set_tokens(tokens: list[str]) -> list[str]: - token_queue = collections.deque(tokens) - keywords = ('optional', 'default') - - def takewhile_set(tokens: collections.deque[str]) -> Iterator[str]: - open_braces = 0 - previous_token = None - while True: - try: - token = tokens.popleft() - except IndexError: - break - - if token == ', ': - previous_token = token - continue - - if not token.strip(): - continue - - if token in keywords: - tokens.appendleft(token) - if previous_token is not None: - tokens.appendleft(previous_token) - break - - if previous_token is not None: - yield previous_token - previous_token = None - - if token == '{': - open_braces += 1 - elif token == '}': - open_braces -= 1 - - yield token - - if open_braces == 0: - break - - def combine_set(tokens: collections.deque[str]) -> Iterator[str]: - while True: - try: - token = tokens.popleft() - except IndexError: - break - - if token == '{': - tokens.appendleft('{') - yield ''.join(takewhile_set(tokens)) - else: - yield token - - return list(combine_set(token_queue)) - - -def _tokenize_type_spec(spec: str) -> list[str]: - def postprocess(item: str) -> list[str]: - if _default_regex.match(item): - default = item[:7] - # can't be separated by anything other than a single space - # for now - other = item[8:] - - return [default, ' ', other] - else: - return [item] - - tokens = [ - item - for raw_token in _token_regex.split(spec) - for item in postprocess(raw_token) - if item - ] - return tokens - - -def _token_type(token: str, location: str | None = None) -> str: - def is_numeric(token: str) -> bool: - try: - # use complex to make sure every numeric value is detected as literal - complex(token) - except ValueError: - return False - else: - return True - - if token.startswith(' ') or token.endswith(' '): - type_ = 'delimiter' - elif ( - is_numeric(token) - or (token.startswith('{') and token.endswith('}')) - or (token.startswith('"') and token.endswith('"')) - or (token.startswith("'") and token.endswith("'")) - ): - type_ = 'literal' - elif token.startswith('{'): - logger.warning( - __('invalid value set (missing closing brace): %s'), - token, - location=location, - ) - type_ = 'literal' - elif token.endswith('}'): - logger.warning( - __('invalid value set (missing opening brace): %s'), - token, - location=location, - ) - type_ = 'literal' - elif token.startswith(("'", '"')): - logger.warning( - __('malformed string literal (missing closing quote): %s'), - token, - location=location, - ) - type_ = 'literal' - elif token.endswith(("'", '"')): - logger.warning( - __('malformed string literal (missing opening quote): %s'), - token, - location=location, - ) - type_ = 'literal' - elif token in {'optional', 'default'}: - # default is not a official keyword (yet) but supported by the - # reference implementation (numpydoc) and widely used - type_ = 'control' - elif _xref_regex.match(token): - type_ = 'reference' - else: - type_ = 'obj' - - return type_ - - -def _convert_numpy_type_spec( - _type: str, - location: str | None = None, - translations: dict[str, str] | None = None, -) -> str: - if translations is None: - translations = {} - - def convert_obj( - obj: str, translations: dict[str, str], default_translation: str - ) -> str: - translation = translations.get(obj, obj) - - # use :class: (the default) only if obj is not a standard singleton - if translation in _SINGLETONS and default_translation == ':class:`%s`': - default_translation = ':obj:`%s`' - elif translation == '...' and default_translation == ':class:`%s`': - # allow referencing the builtin ... - default_translation = ':obj:`%s `' - - if _xref_regex.match(translation) is None: - translation = default_translation % translation - - return translation - - tokens = _tokenize_type_spec(_type) - combined_tokens = _recombine_set_tokens(tokens) - types = [(token, _token_type(token, location)) for token in combined_tokens] - - converters = { - 'literal': lambda x: '``%s``' % x, - 'obj': lambda x: convert_obj(x, translations, ':class:`%s`'), - 'control': lambda x: '*%s*' % x, - 'delimiter': lambda x: x, - 'reference': lambda x: x, - } - - converted = ''.join( - converters.get(type_)(token) # type: ignore[misc] - for token, type_ in types - ) - - return converted - - class NumpyDocstring(GoogleDocstring): """Convert NumPy style docstrings to reStructuredText. @@ -1202,20 +1211,6 @@ def __init__( self._directive_sections = ['.. index::'] super().__init__(docstring, config, app, what, name, obj, options) - def _get_location(self) -> str | None: - try: - filepath = inspect.getfile(self._obj) if self._obj is not None else None - except TypeError: - filepath = None - name = self._name - - if filepath is None and name is None: - return None - elif filepath is None: - filepath = '' - - return f'{filepath}:docstring of {name}' - def _escape_args_and_kwargs(self, name: str) -> str: func = super()._escape_args_and_kwargs @@ -1242,10 +1237,10 @@ def _consume_field( _type, _name = _name, _type if self._config.napoleon_preprocess_types: - _type = _convert_numpy_type_spec( + _type = _convert_type_spec( _type, - location=self._get_location(), translations=self._config.napoleon_type_aliases or {}, + debug_location=self._get_location(), ) indent = self._get_indent(line) + 1 @@ -1349,7 +1344,8 @@ def parse_item_name(text: str) -> tuple[str, str | None]: return g[3], None else: return g[2], g[1] - raise ValueError('%s is not a item name' % text) + msg = f'{text} is not a item name' + raise ValueError(msg) def push_item(name: str | None, rest: list[str]) -> None: if not name: @@ -1417,12 +1413,12 @@ def translate( if role: link = f':{role}:`{name}`' else: - link = ':obj:`%s`' % name + link = f':py:obj:`{name}`' if desc or last_had_desc: lines += [''] lines += [link] else: - lines[-1] += ', %s' % link + lines[-1] += f', {link}' if desc: lines += self._indent([' '.join(desc)]) last_had_desc = True diff --git a/sphinx/ext/todo.py b/sphinx/ext/todo.py index cec0599b4aa..924211d3515 100644 --- a/sphinx/ext/todo.py +++ b/sphinx/ext/todo.py @@ -93,16 +93,18 @@ def merge_domaindata(self, docnames: Set[str], otherdata: dict[str, Any]) -> Non for docname in docnames: self.todos[docname] = otherdata['todos'][docname] - def process_doc(self, env: BuildEnvironment, docname: str, - document: nodes.document) -> None: + def process_doc( + self, env: BuildEnvironment, docname: str, document: nodes.document + ) -> None: todos = self.todos.setdefault(docname, []) for todo in document.findall(todo_node): env.events.emit('todo-defined', todo) todos.append(todo) if env.config.todo_emit_warnings: - logger.warning(__("TODO entry found: %s"), todo[1].astext(), - location=todo) + logger.warning( + __('TODO entry found: %s'), todo[1].astext(), location=todo + ) class TodoList(SphinxDirective): @@ -134,7 +136,8 @@ def __init__(self, app: Sphinx, doctree: nodes.document, docname: str) -> None: def process(self, doctree: nodes.document, docname: str) -> None: todos: list[todo_node] = functools.reduce( - operator.iadd, self.domain.todos.values(), []) + operator.iadd, self.domain.todos.values(), [] + ) for node in list(doctree.findall(todolist)): if not self.config.todo_include_todos: node.parent.remove(node) @@ -162,11 +165,13 @@ def create_todo_reference(self, todo: todo_node, docname: str) -> nodes.paragrap if self.config.todo_link_only: description = _('<>') else: - description = (_('(The <> is located in %s, line %d.)') % - (todo.source, todo.line)) + description = _('(The <> is located in %s, line %d.)') % ( + todo.source, + todo.line, + ) - prefix = description[:description.find('<<')] - suffix = description[description.find('>>') + 2:] + prefix = description[: description.find('<<')] + suffix = description[description.find('>>') + 2 :] para = nodes.paragraph(classes=['todo-source']) para += nodes.Text(prefix) @@ -175,7 +180,9 @@ def create_todo_reference(self, todo: todo_node, docname: str) -> nodes.paragrap linktext = nodes.emphasis(_('original entry'), _('original entry')) reference = nodes.reference('', '', linktext, internal=True) try: - reference['refuri'] = self.builder.get_relative_uri(docname, todo['docname']) + reference['refuri'] = self.builder.get_relative_uri( + docname, todo['docname'] + ) reference['refuri'] += '#' + todo['ids'][0] except NoUri: # ignore if no URI can be determined, e.g. for LaTeX output @@ -237,12 +244,14 @@ def setup(app: Sphinx) -> ExtensionMetadata: app.add_config_value('todo_emit_warnings', False, 'html') app.add_node(todolist) - app.add_node(todo_node, - html=(visit_todo_node, depart_todo_node), - latex=(latex_visit_todo_node, latex_depart_todo_node), - text=(visit_todo_node, depart_todo_node), - man=(visit_todo_node, depart_todo_node), - texinfo=(visit_todo_node, depart_todo_node)) + app.add_node( + todo_node, + html=(visit_todo_node, depart_todo_node), + latex=(latex_visit_todo_node, latex_depart_todo_node), + text=(visit_todo_node, depart_todo_node), + man=(visit_todo_node, depart_todo_node), + texinfo=(visit_todo_node, depart_todo_node), + ) app.add_directive('todo', Todo) app.add_directive('todolist', TodoList) diff --git a/sphinx/ext/viewcode.py b/sphinx/ext/viewcode.py index afc4b06f5cc..dc182a91329 100644 --- a/sphinx/ext/viewcode.py +++ b/sphinx/ext/viewcode.py @@ -2,11 +2,11 @@ from __future__ import annotations +import importlib.util import operator import os.path import posixpath import traceback -from importlib import import_module from typing import TYPE_CHECKING, Any, cast from docutils import nodes @@ -23,7 +23,7 @@ from sphinx.util.osutil import _last_modified_time if TYPE_CHECKING: - from collections.abc import Iterable, Iterator + from collections.abc import Iterator, Set from sphinx.application import Sphinx from sphinx.builders import Builder @@ -48,12 +48,30 @@ class viewcode_anchor(Element): def _get_full_modname(modname: str, attribute: str) -> str | None: + if modname is None: + # Prevents a TypeError: if the last getattr() call will return None + # then it's better to return it directly + return None + try: - if modname is None: - # Prevents a TypeError: if the last getattr() call will return None - # then it's better to return it directly + # Attempt to find full path of module + module_path = modname.split('.') + num_parts = len(module_path) + for i in range(num_parts, 0, -1): + mod_root = '.'.join(module_path[:i]) + module_spec = importlib.util.find_spec(mod_root) + if module_spec is not None: + break + else: return None - module = import_module(modname) + # Load and execute the module + module = importlib.util.module_from_spec(module_spec) + if module_spec.loader is None: + return None + module_spec.loader.exec_module(module) + if i != num_parts: + for mod in module_path[i:]: + module = getattr(module, mod) # Allow an attribute to have multiple parts and incidentally allow # repeated .s in the attribute. @@ -79,15 +97,15 @@ def _get_full_modname(modname: str, attribute: str) -> str | None: def is_supported_builder(builder: Builder) -> bool: - if builder.format != 'html': - return False - if builder.name == 'singlehtml': - return False - return not (builder.name.startswith('epub') and not builder.config.viewcode_enable_epub) + return ( + builder.format == 'html' + and builder.name != 'singlehtml' + and (not builder.name.startswith('epub') or builder.config.viewcode_enable_epub) + ) def doctree_read(app: Sphinx, doctree: Node) -> None: - env = app.builder.env + env = app.env if not hasattr(env, '_viewcode_modules'): env._viewcode_modules = {} # type: ignore[attr-defined] @@ -132,7 +150,7 @@ def has_tag(modname: str, fullname: str, docname: str, refname: str) -> bool: refname = modname if env.config.viewcode_follow_imported_members: new_modname = app.emit_firstresult( - 'viewcode-follow-imported', modname, fullname, + 'viewcode-follow-imported', modname, fullname ) if not new_modname: new_modname = _get_full_modname(modname, fullname) @@ -147,11 +165,14 @@ def has_tag(modname: str, fullname: str, docname: str, refname: str) -> bool: continue names.add(fullname) pagename = posixpath.join(OUTPUT_DIRNAME, modname.replace('.', '/')) - signode += viewcode_anchor(reftarget=pagename, refid=fullname, refdoc=env.docname) + signode += viewcode_anchor( + reftarget=pagename, refid=fullname, refdoc=env.docname + ) -def env_merge_info(app: Sphinx, env: BuildEnvironment, docnames: Iterable[str], - other: BuildEnvironment) -> None: +def env_merge_info( + app: Sphinx, env: BuildEnvironment, docnames: Set[str], other: BuildEnvironment +) -> None: if not hasattr(other, '_viewcode_modules'): return # create a _viewcode_modules dict on the main environment @@ -199,8 +220,13 @@ def run(self, **kwargs: Any) -> None: def convert_viewcode_anchors(self) -> None: for node in self.document.findall(viewcode_anchor): anchor = nodes.inline('', _('[source]'), classes=['viewcode-link']) - refnode = make_refnode(self.app.builder, node['refdoc'], node['reftarget'], - node['refid'], anchor) + refnode = make_refnode( + self.app.builder, + node['refdoc'], + node['reftarget'], + node['refid'], + anchor, + ) node.replace_self(refnode) def remove_viewcode_anchors(self) -> None: @@ -243,7 +269,7 @@ def should_generate_module_page(app: Sphinx, modname: str) -> bool: def collect_pages(app: Sphinx) -> Iterator[tuple[str, dict[str, Any], str]]: - env = app.builder.env + env = app.env if not hasattr(env, '_viewcode_modules'): return if not is_supported_builder(app.builder): @@ -254,10 +280,13 @@ def collect_pages(app: Sphinx) -> Iterator[tuple[str, dict[str, Any], str]]: modnames = set(env._viewcode_modules) for modname, entry in status_iterator( - sorted(env._viewcode_modules.items()), - __('highlighting module code... '), "blue", - len(env._viewcode_modules), - app.verbosity, operator.itemgetter(0)): + sorted(env._viewcode_modules.items()), + __('highlighting module code... '), + 'blue', + len(env._viewcode_modules), + app.verbosity, + operator.itemgetter(0), + ): if not entry: continue if not should_generate_module_page(app, modname): @@ -287,9 +316,11 @@ def collect_pages(app: Sphinx) -> Iterator[tuple[str, dict[str, Any], str]]: for name, docname in used.items(): type, start, end = tags[name] backlink = urito(pagename, docname) + '#' + refname + '.' + name - lines[start] = (f'
\n' - f'{link_text}\n' - + lines[start]) + lines[start] = ( + f'
\n' + f'{link_text}\n' + + lines[start] + ) lines[min(end, max_index)] += '
\n' # try to find parents (for submodules) @@ -299,18 +330,22 @@ def collect_pages(app: Sphinx) -> Iterator[tuple[str, dict[str, Any], str]]: parent = parent.rsplit('.', 1)[0] if parent in modnames: parents.append({ - 'link': urito(pagename, - posixpath.join(OUTPUT_DIRNAME, parent.replace('.', '/'))), - 'title': parent}) - parents.append({'link': urito(pagename, posixpath.join(OUTPUT_DIRNAME, 'index')), - 'title': _('Module code')}) + 'link': urito( + pagename, + posixpath.join(OUTPUT_DIRNAME, parent.replace('.', '/')), + ), + 'title': parent, + }) + parents.append({ + 'link': urito(pagename, posixpath.join(OUTPUT_DIRNAME, 'index')), + 'title': _('Module code'), + }) parents.reverse() # putting it all together context = { 'parents': parents, 'title': modname, - 'body': (_('

Source code for %s

') % modname + - '\n'.join(lines)), + 'body': (_('

Source code for %s

') % modname + '\n'.join(lines)), } yield pagename, context, 'page.html' @@ -330,14 +365,15 @@ def collect_pages(app: Sphinx) -> Iterator[tuple[str, dict[str, Any], str]]: stack.pop() html.append('') stack.append(modname + '.') - relative_uri = urito(posixpath.join(OUTPUT_DIRNAME, 'index'), - posixpath.join(OUTPUT_DIRNAME, modname.replace('.', '/'))) + relative_uri = urito( + posixpath.join(OUTPUT_DIRNAME, 'index'), + posixpath.join(OUTPUT_DIRNAME, modname.replace('.', '/')), + ) html.append(f'
  • {modname}
  • \n') html.append('' * (len(stack) - 1)) context = { 'title': _('Overview: module code'), - 'body': (_('

    All modules for which code is available

    ') + - ''.join(html)), + 'body': (_('

    All modules for which code is available

    ') + ''.join(html)), } yield posixpath.join(OUTPUT_DIRNAME, 'index'), context, 'page.html' diff --git a/sphinx/highlighting.py b/sphinx/highlighting.py index c7ae156689d..f49c884ef71 100644 --- a/sphinx/highlighting.py +++ b/sphinx/highlighting.py @@ -31,8 +31,8 @@ from pygments.lexer import Lexer from pygments.style import Style -if tuple(map(int, pygments.__version__.split('.')))[:2] < (2, 18): - from pygments.formatter import Formatter # NoQA: F811 +if tuple(map(int, pygments.__version__.split('.')[:2])) < (2, 18): + from pygments.formatter import Formatter Formatter.__class_getitem__ = classmethod(lambda cls, name: cls) # type: ignore[attr-defined] diff --git a/sphinx/locale/__init__.py b/sphinx/locale/__init__.py index 4379ea10367..0fce1dd5d6a 100644 --- a/sphinx/locale/__init__.py +++ b/sphinx/locale/__init__.py @@ -207,7 +207,7 @@ def setup(app): def gettext(message: str) -> str: if not is_translator_registered(catalog, namespace): # not initialized yet - return _TranslationProxy(catalog, namespace, message) # type: ignore[return-value] # NoQA: E501 + return _TranslationProxy(catalog, namespace, message) # type: ignore[return-value] else: translator = get_translator(catalog, namespace) return translator.gettext(message) diff --git a/sphinx/pycode/ast.py b/sphinx/pycode/ast.py index f6bcbda9759..ffb1a4e9c26 100644 --- a/sphinx/pycode/ast.py +++ b/sphinx/pycode/ast.py @@ -29,11 +29,11 @@ @overload -def unparse(node: None, code: str = '') -> None: ... # NoQA: E704 +def unparse(node: None, code: str = '') -> None: ... @overload -def unparse(node: ast.AST, code: str = '') -> str: ... # NoQA: E704 +def unparse(node: ast.AST, code: str = '') -> str: ... def unparse(node: ast.AST | None, code: str = '') -> str | None: diff --git a/sphinx/registry.py b/sphinx/registry.py index b4c2478653b..49b121f826a 100644 --- a/sphinx/registry.py +++ b/sphinx/registry.py @@ -48,9 +48,9 @@ # list of deprecated extensions. Keys are extension name. # Values are Sphinx version that merge the extension. EXTENSION_BLACKLIST = { - "sphinxjp.themecore": "1.2", + 'sphinxjp.themecore': '1.2', 'sphinxcontrib-napoleon': '1.3', - "sphinxprettysearchresults": "2.0.0", + 'sphinxprettysearchresults': '2.0.0', } @@ -93,10 +93,10 @@ def __init__(self) -> None: #: HTML inline and block math renderers #: a dict of name -> tuple of visit function and depart function - self.html_inline_math_renderers: dict[str, - tuple[Callable, Callable | None]] = {} - self.html_block_math_renderers: dict[str, - tuple[Callable, Callable | None]] = {} + self.html_inline_math_renderers: dict[ + str, tuple[Callable, Callable | None] + ] = {} + self.html_block_math_renderers: dict[str, tuple[Callable, Callable | None]] = {} #: HTML assets self.html_assets_policy: str = 'per_page' @@ -126,7 +126,9 @@ def __init__(self) -> None: #: custom handlers for translators #: a dict of builder name -> dict of node name -> visitor and departure functions - self.translation_handlers: dict[str, dict[str, tuple[Callable, Callable | None]]] = {} + self.translation_handlers: dict[ + str, dict[str, tuple[Callable, Callable | None]] + ] = {} #: additional transforms; list of transforms self.transforms: list[type[Transform]] = [] @@ -141,10 +143,14 @@ def autodoc_attrgettrs(self) -> dict[type, Callable[[Any, str, Any], Any]]: def add_builder(self, builder: type[Builder], override: bool = False) -> None: logger.debug('[app] adding builder: %r', builder) if not hasattr(builder, 'name'): - raise ExtensionError(__('Builder class %s has no "name" attribute') % builder) + raise ExtensionError( + __('Builder class %s has no "name" attribute') % builder + ) if builder.name in self.builders and not override: - raise ExtensionError(__('Builder %r already exists (in module %s)') % - (builder.name, self.builders[builder.name].__module__)) + raise ExtensionError( + __('Builder %r already exists (in module %s)') + % (builder.name, self.builders[builder.name].__module__) + ) self.builders[builder.name] = builder def preload_builder(self, app: Sphinx, name: str) -> None: @@ -156,8 +162,13 @@ def preload_builder(self, app: Sphinx, name: str) -> None: try: entry_point = builder_entry_points[name] except KeyError as exc: - raise SphinxError(__('Builder name %s not registered or available' - ' through entry point') % name) from exc + raise SphinxError( + __( + 'Builder name %s not registered or available' + ' through entry point' + ) + % name + ) from exc self.load_extension(app, entry_point.module) @@ -189,39 +200,52 @@ def create_domains(self, env: BuildEnvironment) -> Iterator[Domain]: yield domain - def add_directive_to_domain(self, domain: str, name: str, - cls: type[Directive], override: bool = False) -> None: + def add_directive_to_domain( + self, domain: str, name: str, cls: type[Directive], override: bool = False + ) -> None: logger.debug('[app] adding directive to domain: %r', (domain, name, cls)) if domain not in self.domains: raise ExtensionError(__('domain %s not yet registered') % domain) - directives: dict[str, type[Directive]] = self.domain_directives.setdefault(domain, {}) + directives: dict[str, type[Directive]] = self.domain_directives.setdefault( + domain, {} + ) if name in directives and not override: - raise ExtensionError(__('The %r directive is already registered to domain %s') % - (name, domain)) + raise ExtensionError( + __('The %r directive is already registered to domain %s') + % (name, domain) + ) directives[name] = cls - def add_role_to_domain(self, domain: str, name: str, - role: RoleFunction | XRefRole, override: bool = False, - ) -> None: + def add_role_to_domain( + self, + domain: str, + name: str, + role: RoleFunction | XRefRole, + override: bool = False, + ) -> None: logger.debug('[app] adding role to domain: %r', (domain, name, role)) if domain not in self.domains: raise ExtensionError(__('domain %s not yet registered') % domain) roles = self.domain_roles.setdefault(domain, {}) if name in roles and not override: - raise ExtensionError(__('The %r role is already registered to domain %s') % - (name, domain)) + raise ExtensionError( + __('The %r role is already registered to domain %s') % (name, domain) + ) roles[name] = role - def add_index_to_domain(self, domain: str, index: type[Index], - override: bool = False) -> None: + def add_index_to_domain( + self, domain: str, index: type[Index], override: bool = False + ) -> None: logger.debug('[app] adding index to domain: %r', (domain, index)) if domain not in self.domains: raise ExtensionError(__('domain %s not yet registered') % domain) indices = self.domain_indices.setdefault(domain, []) if index in indices and not override: - raise ExtensionError(__('The %r index is already registered to domain %s') % - (index.name, domain)) + raise ExtensionError( + __('The %r index is already registered to domain %s') + % (index.name, domain) + ) indices.append(index) def add_object_type( @@ -235,24 +259,38 @@ def add_object_type( doc_field_types: Sequence = (), override: bool = False, ) -> None: - logger.debug('[app] adding object type: %r', - (directivename, rolename, indextemplate, parse_node, - ref_nodeclass, objname, doc_field_types)) + logger.debug( + '[app] adding object type: %r', + ( + directivename, + rolename, + indextemplate, + parse_node, + ref_nodeclass, + objname, + doc_field_types, + ), + ) # create a subclass of GenericObject as the new directive - directive = type(directivename, - (GenericObject, object), - {'indextemplate': indextemplate, - 'parse_node': parse_node and staticmethod(parse_node), - 'doc_field_types': doc_field_types}) + directive = type( + directivename, + (GenericObject, object), + { + 'indextemplate': indextemplate, + 'parse_node': parse_node and staticmethod(parse_node), + 'doc_field_types': doc_field_types, + }, + ) self.add_directive_to_domain('std', directivename, directive) self.add_role_to_domain('std', rolename, XRefRole(innernodeclass=ref_nodeclass)) object_types = self.domain_object_types.setdefault('std', {}) if directivename in object_types and not override: - raise ExtensionError(__('The %r object_type is already registered') % - directivename) + raise ExtensionError( + __('The %r object_type is already registered') % directivename + ) object_types[directivename] = ObjType(objname or directivename, rolename) def add_crossref_type( @@ -264,24 +302,31 @@ def add_crossref_type( objname: str = '', override: bool = False, ) -> None: - logger.debug('[app] adding crossref type: %r', - (directivename, rolename, indextemplate, ref_nodeclass, objname)) + logger.debug( + '[app] adding crossref type: %r', + (directivename, rolename, indextemplate, ref_nodeclass, objname), + ) # create a subclass of Target as the new directive - directive = type(directivename, - (Target, object), - {'indextemplate': indextemplate}) + directive = type( + directivename, + (Target, object), + {'indextemplate': indextemplate}, + ) self.add_directive_to_domain('std', directivename, directive) self.add_role_to_domain('std', rolename, XRefRole(innernodeclass=ref_nodeclass)) object_types = self.domain_object_types.setdefault('std', {}) if directivename in object_types and not override: - raise ExtensionError(__('The %r crossref_type is already registered') % - directivename) + raise ExtensionError( + __('The %r crossref_type is already registered') % directivename + ) object_types[directivename] = ObjType(objname or directivename, rolename) - def add_source_suffix(self, suffix: str, filetype: str, override: bool = False) -> None: + def add_source_suffix( + self, suffix: str, filetype: str, override: bool = False + ) -> None: logger.debug('[app] adding source_suffix: %r, %r', suffix, filetype) if suffix in self.source_suffix and not override: raise ExtensionError(__('source_suffix %r is already registered') % suffix) @@ -293,15 +338,18 @@ def add_source_parser(self, parser: type[Parser], override: bool = False) -> Non # create a map from filetype to parser for filetype in parser.supported: if filetype in self.source_parsers and not override: - raise ExtensionError(__('source_parser for %r is already registered') % - filetype) + raise ExtensionError( + __('source_parser for %r is already registered') % filetype + ) self.source_parsers[filetype] = parser def get_source_parser(self, filetype: str) -> type[Parser]: try: return self.source_parsers[filetype] except KeyError as exc: - raise SphinxError(__('Source parser for %s not registered') % filetype) from exc + raise SphinxError( + __('Source parser for %s not registered') % filetype + ) from exc def get_source_parsers(self) -> dict[str, type[Parser]]: return self.source_parsers @@ -313,8 +361,9 @@ def create_source_parser(self, app: Sphinx, filename: str) -> Parser: parser.set_application(app) return parser - def add_translator(self, name: str, translator: type[nodes.NodeVisitor], - override: bool = False) -> None: + def add_translator( + self, name: str, translator: type[nodes.NodeVisitor], override: bool = False + ) -> None: logger.debug('[app] Change of translator for the %s builder.', name) if name in self.translators and not override: raise ExtensionError(__('Translator for %r already exists') % name) @@ -327,14 +376,19 @@ def add_translation_handlers( ) -> None: logger.debug('[app] adding translation_handlers: %r, %r', node, kwargs) for builder_name, handlers in kwargs.items(): - translation_handlers = self.translation_handlers.setdefault(builder_name, {}) + translation_handlers = self.translation_handlers.setdefault( + builder_name, {} + ) try: visit, depart = handlers # unpack once for assertion translation_handlers[node.__name__] = (visit, depart) except ValueError as exc: raise ExtensionError( - __('kwargs for add_node() must be a (visit, depart) ' - 'function tuple: %r=%r') % (builder_name, handlers), + __( + 'kwargs for add_node() must be a (visit, depart) ' + 'function tuple: %r=%r' + ) + % (builder_name, handlers), ) from exc def get_translator_class(self, builder: Builder) -> type[nodes.NodeVisitor]: @@ -381,8 +435,9 @@ def get_post_transforms(self) -> list[type[Transform]]: def add_documenter(self, objtype: str, documenter: type[Documenter]) -> None: self.documenters[objtype] = documenter - def add_autodoc_attrgetter(self, typ: type, - attrgetter: Callable[[Any, str, Any], Any]) -> None: + def add_autodoc_attrgetter( + self, typ: type, attrgetter: Callable[[Any, str, Any], Any] + ) -> None: self.autodoc_attrgetters[typ] = attrgetter def add_css_files(self, filename: str, **attributes: Any) -> None: @@ -397,7 +452,7 @@ def has_latex_package(self, name: str) -> bool: return bool([x for x in packages if x[0] == name]) def add_latex_package( - self, name: str, options: str | None, after_hyperref: bool = False, + self, name: str, options: str | None, after_hyperref: bool = False ) -> None: if self.has_latex_package(name): logger.warning("latex package '%s' already included", name) @@ -412,9 +467,12 @@ def add_enumerable_node( self, node: type[Node], figtype: str, - title_getter: TitleGetter | None = None, override: bool = False, + title_getter: TitleGetter | None = None, + override: bool = False, ) -> None: - logger.debug('[app] adding enumerable node: (%r, %r, %r)', node, figtype, title_getter) + logger.debug( + '[app] adding enumerable node: (%r, %r, %r)', node, figtype, title_getter + ) if node in self.enumerable_nodes and not override: raise ExtensionError(__('enumerable_node %r already registered') % node) self.enumerable_nodes[node] = (figtype, title_getter) @@ -425,8 +483,12 @@ def add_html_math_renderer( inline_renderers: tuple[Callable, Callable | None] | None, block_renderers: tuple[Callable, Callable | None] | None, ) -> None: - logger.debug('[app] adding html_math_renderer: %s, %r, %r', - name, inline_renderers, block_renderers) + logger.debug( + '[app] adding html_math_renderer: %s, %r, %r', + name, + inline_renderers, + block_renderers, + ) if name in self.html_inline_math_renderers: raise ExtensionError(__('math renderer %s is already registered') % name) @@ -443,9 +505,14 @@ def load_extension(self, app: Sphinx, extname: str) -> None: if extname in app.extensions: # already loaded return if extname in EXTENSION_BLACKLIST: - logger.warning(__('the extension %r was already merged with Sphinx since ' - 'version %s; this extension is ignored.'), - extname, EXTENSION_BLACKLIST[extname]) + logger.warning( + __( + 'the extension %r was already merged with Sphinx since ' + 'version %s; this extension is ignored.' + ), + extname, + EXTENSION_BLACKLIST[extname], + ) return # update loading context @@ -455,13 +522,19 @@ def load_extension(self, app: Sphinx, extname: str) -> None: mod = import_module(extname) except ImportError as err: logger.verbose(__('Original exception:\n') + traceback.format_exc()) - raise ExtensionError(__('Could not import extension %s') % extname, - err) from err + raise ExtensionError( + __('Could not import extension %s') % extname, err + ) from err setup: _ExtensionSetupFunc | None = getattr(mod, 'setup', None) if setup is None: - logger.warning(__('extension %r has no setup() function; is it really ' - 'a Sphinx extension module?'), extname) + logger.warning( + __( + 'extension %r has no setup() function; is it really ' + 'a Sphinx extension module?' + ), + extname, + ) metadata: ExtensionMetadata = {} else: try: @@ -469,25 +542,37 @@ def load_extension(self, app: Sphinx, extname: str) -> None: except VersionRequirementError as err: # add the extension name to the version required raise VersionRequirementError( - __('The %s extension used by this project needs at least ' - 'Sphinx v%s; it therefore cannot be built with this ' - 'version.') % (extname, err), + __( + 'The %s extension used by this project needs at least ' + 'Sphinx v%s; it therefore cannot be built with this ' + 'version.' + ) + % (extname, err), ) from err if metadata is None: metadata = {} elif not isinstance(metadata, dict): - logger.warning(__('extension %r returned an unsupported object from ' - 'its setup() function; it should return None or a ' - 'metadata dictionary'), extname) + logger.warning( + __( + 'extension %r returned an unsupported object from ' + 'its setup() function; it should return None or a ' + 'metadata dictionary' + ), + extname, + ) metadata = {} app.extensions[extname] = Extension(extname, mod, **metadata) def get_envversion(self, app: Sphinx) -> dict[str, int]: from sphinx.environment import ENV_VERSION - envversion = {ext.name: ext.metadata['env_version'] for ext in app.extensions.values() - if ext.metadata.get('env_version')} + + envversion = { + ext.name: ext.metadata['env_version'] + for ext in app.extensions.values() + if ext.metadata.get('env_version') + } envversion['sphinx'] = ENV_VERSION return envversion @@ -504,7 +589,7 @@ def get_publisher(self, app: Sphinx, filetype: str) -> Publisher: def merge_source_suffix(app: Sphinx, config: Config) -> None: """Merge any user-specified source_suffix with any added by extensions.""" for suffix, filetype in app.registry.source_suffix.items(): - if suffix not in app.config.source_suffix: # NoQA: SIM114 + if suffix not in app.config.source_suffix: app.config.source_suffix[suffix] = filetype elif app.config.source_suffix[suffix] == 'restructuredtext': # The filetype is not specified (default filetype). diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py index 3f19d3663a0..683df0b6bbf 100644 --- a/sphinx/search/__init__.py +++ b/sphinx/search/__init__.py @@ -272,7 +272,8 @@ class IndexBuilder: def __init__( self, env: BuildEnvironment, lang: str, options: dict[str, str], scoring: str ) -> None: - self.env = env + self._domains = env.domains + self._env_version = env.version # docname -> title self._titles: dict[str, str | None] = env._search_index_titles # docname -> filename @@ -323,7 +324,10 @@ def load(self, stream: IO, format: Any) -> None: format = self.formats[format] frozen = format.load(stream) # if an old index is present, we treat it as not existing. - if not isinstance(frozen, dict) or frozen.get('envversion') != self.env.version: + if ( + not isinstance(frozen, dict) + or frozen.get('envversion') != self._env_version + ): msg = 'old format' raise ValueError(msg) index2fn = frozen['docnames'] @@ -362,7 +366,7 @@ def get_objects( rv: dict[str, list[tuple[int, int, int, str, str]]] = {} otypes = self._objtypes onames = self._objnames - for domain in self.env.domains.sorted(): + for domain in self._domains.sorted(): sorted_objects = sorted(domain.get_objects()) for fullname, dispname, type, docname, anchor, prio in sorted_objects: if docname not in fn2index: @@ -452,7 +456,7 @@ def freeze(self) -> dict[str, Any]: 'objtypes': objtypes, 'objnames': objnames, 'titleterms': title_terms, - 'envversion': self.env.version, + 'envversion': self._env_version, 'alltitles': alltitles, 'indexentries': index_entries, } diff --git a/sphinx/search/zh.py b/sphinx/search/zh.py index 342b566512a..59c575b4adb 100644 --- a/sphinx/search/zh.py +++ b/sphinx/search/zh.py @@ -260,7 +260,7 @@ def stem(self, word: str) -> str: stemmed = self.stemmer.stemWord(word.lower()) should_not_be_stemmed = ( len(word) >= 3 > len(stemmed) and word in self.latin_terms - ) # fmt: skip + ) if should_not_be_stemmed: return word.lower() return stemmed diff --git a/sphinx/templates/htmlhelp/project.stp b/sphinx/templates/htmlhelp/project.stp index bae1f90c4e4..16c49b01daa 100644 --- a/sphinx/templates/htmlhelp/project.stp +++ b/sphinx/templates/htmlhelp/project.stp @@ -1,33 +1,33 @@ -a -and -are -as -at -be -but -by -for -if -in -into -is -it -near -no -not -of -on -or -such -that -the -their -then -there -these -they -this -to -was -will -with +a +and +are +as +at +be +but +by +for +if +in +into +is +it +near +no +not +of +on +or +such +that +the +their +then +there +these +they +this +to +was +will +with diff --git a/sphinx/testing/fixtures.py b/sphinx/testing/fixtures.py index 2bf7a76594f..2e09c1ea896 100644 --- a/sphinx/testing/fixtures.py +++ b/sphinx/testing/fixtures.py @@ -222,7 +222,7 @@ def _shared_result_cache() -> None: @pytest.fixture -def if_graphviz_found(app: SphinxTestApp) -> None: # NoQA: PT004 +def if_graphviz_found(app: SphinxTestApp) -> None: """ The test will be skipped when using 'if_graphviz_found' fixture and graphviz dot command is not found. @@ -246,7 +246,7 @@ def sphinx_test_tempdir(tmp_path_factory: pytest.TempPathFactory) -> Path: @pytest.fixture -def rollback_sysmodules() -> Iterator[None]: # NoQA: PT004 +def rollback_sysmodules() -> Iterator[None]: """ Rollback sys.modules to its value before testing to unload modules during tests. diff --git a/sphinx/testing/path.py b/sphinx/testing/path.py index 3419ccbc893..b41df34bcc0 100644 --- a/sphinx/testing/path.py +++ b/sphinx/testing/path.py @@ -157,7 +157,7 @@ def utime(self, arg: Any) -> None: os.utime(self, arg) def open(self, mode: str = 'r', **kwargs: Any) -> IO[str]: - return open(self, mode, **kwargs) # NoQA: SIM115 + return open(self, mode, **kwargs) def write_text(self, text: str, encoding: str = 'utf-8', **kwargs: Any) -> None: """ diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py index 7231c464c72..b04b61a4021 100644 --- a/sphinx/testing/restructuredtext.py +++ b/sphinx/testing/restructuredtext.py @@ -16,20 +16,21 @@ def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document: """Parse a string as reStructuredText with Sphinx application.""" + env = app.env try: - app.env.temp_data['docname'] = docname + app.env.current_document.docname = docname reader = SphinxStandaloneReader() reader.setup(app) parser = RSTParser() parser.set_application(app) - with sphinx_domains(app.env): + with sphinx_domains(env): return publish_doctree( text, str(app.srcdir / f'{docname}.rst'), reader=reader, parser=parser, settings_overrides={ - 'env': app.env, + 'env': env, 'gettext_compact': True, 'input_encoding': 'utf-8', 'output_encoding': 'unicode', @@ -37,4 +38,4 @@ def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document: }, ) finally: - app.env.temp_data.pop('docname', None) + env.current_document.docname = '' diff --git a/sphinx/themes/basic/static/searchtools.js b/sphinx/themes/basic/static/searchtools.js index aaf078d2b91..91f4be57fc8 100644 --- a/sphinx/themes/basic/static/searchtools.js +++ b/sphinx/themes/basic/static/searchtools.js @@ -513,9 +513,11 @@ const Search = { // perform the search on the required terms searchTerms.forEach((word) => { const files = []; + // find documents, if any, containing the query word in their text/title term indices + // use Object.hasOwnProperty to avoid mismatching against prototype properties const arr = [ - { files: terms[word], score: Scorer.term }, - { files: titleTerms[word], score: Scorer.title }, + { files: terms.hasOwnProperty(word) ? terms[word] : undefined, score: Scorer.term }, + { files: titleTerms.hasOwnProperty(word) ? titleTerms[word] : undefined, score: Scorer.title }, ]; // add support for partial matches if (word.length > 2) { diff --git a/sphinx/transforms/post_transforms/__init__.py b/sphinx/transforms/post_transforms/__init__.py index de880c9c27f..011b3081356 100644 --- a/sphinx/transforms/post_transforms/__init__.py +++ b/sphinx/transforms/post_transforms/__init__.py @@ -277,7 +277,7 @@ def run(self, **kwargs: Any) -> None: # result in a "Losing ids" exception if there is a target node before # the only node, so we make sure docutils can transfer the id to # something, even if it's just a comment and will lose the id anyway... - process_only_nodes(self.document, self.app.builder.tags) + process_only_nodes(self.document, self.app.tags) class SigElementFallbackTransform(SphinxPostTransform): diff --git a/sphinx/transforms/post_transforms/images.py b/sphinx/transforms/post_transforms/images.py index 7040952ac14..6a2da874a28 100644 --- a/sphinx/transforms/post_transforms/images.py +++ b/sphinx/transforms/post_transforms/images.py @@ -83,7 +83,7 @@ def _download_image(self, node: nodes.image, path: Path) -> None: timestamp: float = ceil(path.stat().st_mtime) headers['If-Modified-Since'] = epoch_to_rfc1123(timestamp) - config = self.app.config + config = self.config r = requests.get( node['uri'], headers=headers, @@ -94,7 +94,7 @@ def _download_image(self, node: nodes.image, path: Path) -> None: msg = __('Could not fetch remote image: %s [%d]') logger.warning(msg, node['uri'], r.status_code) else: - self.app.env.original_image_uri[_StrPath(path)] = node['uri'] + self.env.original_image_uri[_StrPath(path)] = node['uri'] if r.status_code == 200: path.write_bytes(r.content) @@ -106,7 +106,7 @@ def _download_image(self, node: nodes.image, path: Path) -> None: def _process_image(self, node: nodes.image, path: Path) -> None: str_path = _StrPath(path) - self.app.env.original_image_uri[str_path] = node['uri'] + self.env.original_image_uri[str_path] = node['uri'] mimetype = guess_mimetype(path, default='*') if mimetype != '*' and not path.suffix: @@ -114,14 +114,14 @@ def _process_image(self, node: nodes.image, path: Path) -> None: ext = get_image_extension(mimetype) or '' with_ext = path.with_name(path.name + ext) os.replace(path, with_ext) - self.app.env.original_image_uri.pop(str_path) - self.app.env.original_image_uri[_StrPath(with_ext)] = node['uri'] + self.env.original_image_uri.pop(str_path) + self.env.original_image_uri[_StrPath(with_ext)] = node['uri'] path = with_ext path_str = str(path) node['candidates'].pop('?') node['candidates'][mimetype] = path_str node['uri'] = path_str - self.app.env.images.add_file(self.env.docname, path_str) + self.env.images.add_file(self.env.docname, path_str) class DataURIExtractor(BaseImageConverter): @@ -145,7 +145,7 @@ def handle(self, node: nodes.image) -> None: ensuredir(os.path.join(self.imagedir, 'embeded')) digest = sha1(image.data, usedforsecurity=False).hexdigest() path = _StrPath(self.imagedir, 'embeded', digest + ext) - self.app.env.original_image_uri[path] = node['uri'] + self.env.original_image_uri[path] = node['uri'] with open(path, 'wb') as f: f.write(image.data) @@ -154,7 +154,7 @@ def handle(self, node: nodes.image) -> None: node['candidates'].pop('?') node['candidates'][image.mimetype] = path_str node['uri'] = path_str - self.app.env.images.add_file(self.env.docname, path_str) + self.env.images.add_file(self.env.docname, path_str) def get_filename_for(filename: str, mimetype: str) -> str: diff --git a/sphinx/util/_files.py b/sphinx/util/_files.py index 85f63359e98..97233ec260c 100644 --- a/sphinx/util/_files.py +++ b/sphinx/util/_files.py @@ -2,7 +2,10 @@ import hashlib import os.path -from typing import Any +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from collections.abc import Set class FilenameUniqDict(dict[str, tuple[set[str], str]]): @@ -37,7 +40,7 @@ def purge_doc(self, docname: str) -> None: self._existing.discard(unique) def merge_other( - self, docnames: set[str], other: dict[str, tuple[set[str], Any]] + self, docnames: Set[str], other: dict[str, tuple[set[str], Any]] ) -> None: for filename, (docs, _unique) in other.items(): for doc in docs & set(docnames): @@ -73,7 +76,7 @@ def purge_doc(self, docname: str) -> None: del self[filename] def merge_other( - self, docnames: set[str], other: dict[str, tuple[set[str], Any]] + self, docnames: Set[str], other: dict[str, tuple[set[str], Any]] ) -> None: for filename, (docs, _dest) in other.items(): for docname in docs & set(docnames): diff --git a/sphinx/util/_inventory_file_reader.py b/sphinx/util/_inventory_file_reader.py new file mode 100644 index 00000000000..d19faa87ea9 --- /dev/null +++ b/sphinx/util/_inventory_file_reader.py @@ -0,0 +1,76 @@ +from __future__ import annotations + +import zlib +from typing import TYPE_CHECKING + +from sphinx.util import logging + +BUFSIZE = 16 * 1024 +logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from collections.abc import Iterator + from typing import Protocol + + # Readable file stream for inventory loading + class _SupportsRead(Protocol): + def read(self, size: int = ...) -> bytes: ... + + +__all__ = ('InventoryFileReader',) + + +class InventoryFileReader: + """A file reader for an inventory file. + + This reader supports mixture of texts and compressed texts. + """ + + def __init__(self, stream: _SupportsRead) -> None: + self.stream = stream + self.buffer = b'' + self.eof = False + + def read_buffer(self) -> None: + chunk = self.stream.read(BUFSIZE) + if chunk == b'': + self.eof = True + self.buffer += chunk + + def readline(self) -> str: + pos = self.buffer.find(b'\n') + if pos != -1: + line = self.buffer[:pos].decode() + self.buffer = self.buffer[pos + 1 :] + elif self.eof: + line = self.buffer.decode() + self.buffer = b'' + else: + self.read_buffer() + line = self.readline() + + return line + + def readlines(self) -> Iterator[str]: + while not self.eof: + line = self.readline() + if line: + yield line + + def read_compressed_chunks(self) -> Iterator[bytes]: + decompressor = zlib.decompressobj() + while not self.eof: + self.read_buffer() + yield decompressor.decompress(self.buffer) + self.buffer = b'' + yield decompressor.flush() + + def read_compressed_lines(self) -> Iterator[str]: + buf = b'' + for chunk in self.read_compressed_chunks(): + buf += chunk + pos = buf.find(b'\n') + while pos != -1: + yield buf[:pos].decode() + buf = buf[pos + 1 :] + pos = buf.find(b'\n') diff --git a/sphinx/util/_io.py b/sphinx/util/_io.py index 9a36097bcf4..b85f49ded26 100644 --- a/sphinx/util/_io.py +++ b/sphinx/util/_io.py @@ -8,7 +8,7 @@ from typing import Protocol class SupportsWrite(Protocol): - def write(self, text: str, /) -> int | None: ... # NoQA: E704 + def write(self, text: str, /) -> int | None: ... class TeeStripANSI: diff --git a/sphinx/util/_pathlib.py b/sphinx/util/_pathlib.py index 1f3209cef48..079615a389d 100644 --- a/sphinx/util/_pathlib.py +++ b/sphinx/util/_pathlib.py @@ -143,10 +143,10 @@ def __set_name__(self, owner: object, name: str) -> None: self.instance_attr = f'_{name}' # i.e. '_srcdir' @overload - def __get__(self, obj: None, objtype: None) -> _StrPathProperty: ... # NoQA: E704 + def __get__(self, obj: None, objtype: None) -> _StrPathProperty: ... @overload - def __get__(self, obj: object, objtype: type[object]) -> _StrPath: ... # NoQA: E704 + def __get__(self, obj: object, objtype: type[object]) -> _StrPath: ... def __get__( self, obj: object | None, objtype: type[object] | None = None diff --git a/sphinx/util/cfamily.py b/sphinx/util/cfamily.py index 05b5b89222f..b2350a73940 100644 --- a/sphinx/util/cfamily.py +++ b/sphinx/util/cfamily.py @@ -4,7 +4,7 @@ import re from copy import deepcopy -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, NoReturn from docutils import nodes @@ -33,7 +33,7 @@ | (@[a-zA-Z0-9_]) # our extension for names of anonymous entities ) [a-zA-Z0-9_]*\b -""", + """, flags=re.VERBOSE, ) integer_literal_re = re.compile(r'[1-9][0-9]*(\'[0-9]+)*') @@ -50,7 +50,7 @@ )\b # the ending word boundary is important for distinguishing # between suffixes and UDLs in C++ -""", + """, flags=re.VERBOSE, ) float_literal_re = re.compile( @@ -66,7 +66,7 @@ [0-9a-fA-F]+(\'[0-9a-fA-F]+)*([pP][+-]?[0-9a-fA-F]+(\'[0-9a-fA-F]+)*)?) | (0[xX][0-9a-fA-F]+(\'[0-9a-fA-F]+)*\.([pP][+-]?[0-9a-fA-F]+(\'[0-9a-fA-F]+)*)?) ) -""", + """, flags=re.VERBOSE, ) float_literal_suffix_re = re.compile(r'[fFlL]\b') @@ -84,7 +84,7 @@ | (?:U[0-9a-fA-F]{8}) )) )' -""", + """, flags=re.VERBOSE, ) @@ -341,7 +341,7 @@ def status(self, msg: str) -> None: indicator = '-' * self.pos + '^' logger.debug(f'{msg}\n{self.definition}\n{indicator}') # NoQA: G004 - def fail(self, msg: str) -> None: + def fail(self, msg: str) -> NoReturn: errors = [] indicator = '-' * self.pos + '^' msg = ( @@ -436,7 +436,7 @@ def paren_attributes(self) -> Sequence[str]: def _parse_balanced_token_seq(self, end: list[str]) -> str: # TODO: add handling of string literals and similar brackets = {'(': ')', '[': ']', '{': '}'} - startPos = self.pos + start_pos = self.pos symbols: list[str] = [] while not self.eof: if len(symbols) == 0 and self.current_char in end: @@ -450,17 +450,17 @@ def _parse_balanced_token_seq(self, end: list[str]) -> str: self.pos += 1 if self.eof: self.fail( - f'Could not find end of balanced-token-seq starting at {startPos}.' + f'Could not find end of balanced-token-seq starting at {start_pos}.' ) - return self.definition[startPos : self.pos] + return self.definition[start_pos : self.pos] def _parse_attribute(self) -> ASTAttribute | None: self.skip_ws() # try C++11 style - startPos = self.pos + start_pos = self.pos if self.skip_string_and_ws('['): if not self.skip_string('['): - self.pos = startPos + self.pos = start_pos else: # TODO: actually implement the correct grammar arg = self._parse_balanced_token_seq(end=[']']) diff --git a/sphinx/util/console.py b/sphinx/util/console.py index 86e4223782a..327dc7afb45 100644 --- a/sphinx/util/console.py +++ b/sphinx/util/console.py @@ -12,30 +12,30 @@ from typing import Final # fmt: off - def reset(text: str) -> str: ... # NoQA: E704 - def bold(text: str) -> str: ... # NoQA: E704 - def faint(text: str) -> str: ... # NoQA: E704 - def standout(text: str) -> str: ... # NoQA: E704 - def underline(text: str) -> str: ... # NoQA: E704 - def blink(text: str) -> str: ... # NoQA: E704 - - def black(text: str) -> str: ... # NoQA: E704 - def white(text: str) -> str: ... # NoQA: E704 - def red(text: str) -> str: ... # NoQA: E704 - def green(text: str) -> str: ... # NoQA: E704 - def yellow(text: str) -> str: ... # NoQA: E704 - def blue(text: str) -> str: ... # NoQA: E704 - def fuchsia(text: str) -> str: ... # NoQA: E704 - def teal(text: str) -> str: ... # NoQA: E704 - - def darkgray(text: str) -> str: ... # NoQA: E704 - def lightgray(text: str) -> str: ... # NoQA: E704 - def darkred(text: str) -> str: ... # NoQA: E704 - def darkgreen(text: str) -> str: ... # NoQA: E704 - def brown(text: str) -> str: ... # NoQA: E704 - def darkblue(text: str) -> str: ... # NoQA: E704 - def purple(text: str) -> str: ... # NoQA: E704 - def turquoise(text: str) -> str: ... # NoQA: E704 + def reset(text: str) -> str: ... + def bold(text: str) -> str: ... + def faint(text: str) -> str: ... + def standout(text: str) -> str: ... + def underline(text: str) -> str: ... + def blink(text: str) -> str: ... + + def black(text: str) -> str: ... + def white(text: str) -> str: ... + def red(text: str) -> str: ... + def green(text: str) -> str: ... + def yellow(text: str) -> str: ... + def blue(text: str) -> str: ... + def fuchsia(text: str) -> str: ... + def teal(text: str) -> str: ... + + def darkgray(text: str) -> str: ... + def lightgray(text: str) -> str: ... + def darkred(text: str) -> str: ... + def darkgreen(text: str) -> str: ... + def brown(text: str) -> str: ... + def darkblue(text: str) -> str: ... + def purple(text: str) -> str: ... + def turquoise(text: str) -> str: ... # fmt: on try: diff --git a/sphinx/util/docfields.py b/sphinx/util/docfields.py index 1bfcd7451e4..2f14de03ffa 100644 --- a/sphinx/util/docfields.py +++ b/sphinx/util/docfields.py @@ -410,7 +410,7 @@ def transform(self, node: nodes.field_list) -> None: self.directive.domain or '', target, contnode=content[0], - env=self.directive.state.document.settings.env, + env=self.directive.env, ) if _is_single_paragraph(field_body): paragraph = cast('nodes.paragraph', field_body[0]) @@ -477,7 +477,7 @@ def transform(self, node: nodes.field_list) -> None: else: fieldtype, items, location = entry fieldtypes = types.get(fieldtype.name, {}) - env = self.directive.state.document.settings.env + env = self.directive.env inliner = self.directive.state.inliner domain = self.directive.domain or '' new_list += fieldtype.make_field( diff --git a/sphinx/util/docutils.py b/sphinx/util/docutils.py index ed8ebdb3544..0c91f4df195 100644 --- a/sphinx/util/docutils.py +++ b/sphinx/util/docutils.py @@ -19,7 +19,7 @@ from docutils.utils import Reporter, unescape from sphinx.errors import SphinxError -from sphinx.locale import _, __ +from sphinx.locale import __ from sphinx.util import logging from sphinx.util.parsing import nested_parse_to_nodes @@ -29,7 +29,7 @@ ) if TYPE_CHECKING: - from collections.abc import Callable, Iterator # NoQA: TC003 + from collections.abc import Callable, Iterator from types import ModuleType, TracebackType from docutils.frontend import Values @@ -263,51 +263,44 @@ class sphinx_domains(CustomReSTDispatcher): """ def __init__(self, env: BuildEnvironment) -> None: - self.env = env + self.domains = env.domains + self.current_document = env.current_document super().__init__() - def lookup_domain_element(self, type: str, name: str) -> Any: - """Lookup a markup element (directive or role), given its name which can - be a full name (with domain). - """ - name = name.lower() + def directive( + self, + directive_name: str, + language_module: ModuleType, + document: nodes.document, + ) -> tuple[type[Directive] | None, list[system_message]]: + """Lookup a directive, given its name which can include a domain.""" + directive_name = directive_name.lower() # explicit domain given? - if ':' in name: - domain_name, name = name.split(':', 1) - if domain_name in self.env.domains: - domain = self.env.get_domain(domain_name) - element = getattr(domain, type)(name) + if ':' in directive_name: + domain_name, _, name = directive_name.partition(':') + try: + domain = self.domains[domain_name] + except KeyError: + logger.warning(__('unknown directive name: %s'), directive_name) + else: + element = domain.directive(name) if element is not None: return element, [] - else: - logger.warning( - _('unknown directive or role name: %s:%s'), domain_name, name - ) # else look in the default domain else: - def_domain = self.env.temp_data.get('default_domain') - if def_domain is not None: - element = getattr(def_domain, type)(name) + name = directive_name + default_domain = self.current_document.default_domain + if default_domain is not None: + element = default_domain.directive(name) if element is not None: return element, [] # always look in the std domain - element = getattr(self.env.domains.standard_domain, type)(name) + element = self.domains.standard_domain.directive(name) if element is not None: return element, [] - raise ElementLookupError - - def directive( - self, - directive_name: str, - language_module: ModuleType, - document: nodes.document, - ) -> tuple[type[Directive] | None, list[system_message]]: - try: - return self.lookup_domain_element('directive', directive_name) - except ElementLookupError: - return super().directive(directive_name, language_module, document) + return super().directive(directive_name, language_module, document) def role( self, @@ -316,10 +309,34 @@ def role( lineno: int, reporter: Reporter, ) -> tuple[RoleFunction, list[system_message]]: - try: - return self.lookup_domain_element('role', role_name) - except ElementLookupError: - return super().role(role_name, language_module, lineno, reporter) + """Lookup a role, given its name which can include a domain.""" + role_name = role_name.lower() + # explicit domain given? + if ':' in role_name: + domain_name, _, name = role_name.partition(':') + try: + domain = self.domains[domain_name] + except KeyError: + logger.warning(__('unknown role name: %s'), role_name) + else: + element = domain.role(name) + if element is not None: + return element, [] + # else look in the default domain + else: + name = role_name + default_domain = self.current_document.default_domain + if default_domain is not None: + element = default_domain.role(name) + if element is not None: + return element, [] + + # always look in the std domain + element = self.domains.standard_domain.role(name) + if element is not None: + return element, [] + + return super().role(role_name, language_module, lineno, reporter) class WarningStream: @@ -378,7 +395,7 @@ def switch_source_input(state: State, content: StringList) -> Iterator[None]: # replace it by new one state_machine: StateMachine[None] = StateMachine([], None) # type: ignore[arg-type] state_machine.input_lines = content - state.memo.reporter.get_source_and_line = state_machine.get_source_and_line # type: ignore[attr-defined] # NoQA: E501 + state.memo.reporter.get_source_and_line = state_machine.get_source_and_line # type: ignore[attr-defined] yield finally: @@ -403,9 +420,10 @@ def write(self, data: str) -> str: and os.path.exists(self.destination_path) ): with open(self.destination_path, encoding=self.encoding) as f: - # skip writing: content not changed - if f.read() == data: - return data + on_disk = f.read() + # skip writing: content not changed + if on_disk == data: + return data return super().write(data) @@ -587,7 +605,7 @@ def __call__( if name: self.name = name.lower() else: - self.name = self.env.temp_data.get('default_role', '') + self.name = self.env.current_document.default_role if not self.name: self.name = self.env.config.default_role if not self.name: @@ -708,6 +726,7 @@ def __init__(self, document: nodes.document, builder: Builder) -> None: self.builder = builder self.config = builder.config self.settings = document.settings + self._domains = builder.env.domains def dispatch_visit(self, node: Node) -> None: """ diff --git a/sphinx/util/exceptions.py b/sphinx/util/exceptions.py index f12732558b5..c25a9ac7fdd 100644 --- a/sphinx/util/exceptions.py +++ b/sphinx/util/exceptions.py @@ -2,65 +2,10 @@ import sys import traceback -from tempfile import NamedTemporaryFile -from typing import TYPE_CHECKING -from sphinx.errors import SphinxParallelError -from sphinx.util.console import strip_escape_sequences +from sphinx._cli.util.errors import save_traceback -if TYPE_CHECKING: - from sphinx.application import Sphinx - - -def save_traceback(app: Sphinx | None, exc: BaseException) -> str: - """Save the given exception's traceback in a temporary file.""" - import platform - - import docutils - import jinja2 - import pygments - - import sphinx - - if isinstance(exc, SphinxParallelError): - exc_format = '(Error in parallel process)\n' + exc.traceback - else: - exc_format = traceback.format_exc() - - if app is None: - last_msgs = exts_list = '' - else: - extensions = app.extensions.values() - last_msgs = '\n'.join( - f'# {strip_escape_sequences(s).strip()}' for s in app.messagelog - ) - exts_list = '\n'.join( - f'# {ext.name} ({ext.version})' - for ext in extensions - if ext.version != 'builtin' - ) - - with NamedTemporaryFile( - 'w', encoding='utf-8', suffix='.log', prefix='sphinx-err-', delete=False - ) as f: - f.write(f"""\ -# Platform: {sys.platform}; ({platform.platform()}) -# Sphinx version: {sphinx.__display_version__} -# Python version: {platform.python_version()} ({platform.python_implementation()}) -# Docutils version: {docutils.__version__} -# Jinja2 version: {jinja2.__version__} -# Pygments version: {pygments.__version__} - -# Last messages: -{last_msgs} - -# Loaded extensions: -{exts_list} - -# Traceback: -{exc_format} -""") - return f.name +__all__ = 'save_traceback', 'format_exception_cut_frames' def format_exception_cut_frames(x: int = 1) -> str: diff --git a/sphinx/util/i18n.py b/sphinx/util/i18n.py index 48425a03385..b4953032dc4 100644 --- a/sphinx/util/i18n.py +++ b/sphinx/util/i18n.py @@ -32,7 +32,7 @@ from sphinx.environment import BuildEnvironment class DateFormatter(Protocol): - def __call__( # NoQA: E704 + def __call__( self, date: dt.date | None = ..., format: str = ..., @@ -40,7 +40,7 @@ def __call__( # NoQA: E704 ) -> str: ... class TimeFormatter(Protocol): - def __call__( # NoQA: E704 + def __call__( self, time: dt.time | dt.datetime | float | None = ..., format: str = ..., @@ -49,7 +49,7 @@ def __call__( # NoQA: E704 ) -> str: ... class DatetimeFormatter(Protocol): - def __call__( # NoQA: E704 + def __call__( self, datetime: dt.date | dt.time | float | None = ..., format: str = ..., @@ -249,6 +249,9 @@ def format_date( source_date_epoch = os.getenv('SOURCE_DATE_EPOCH') if source_date_epoch is not None: date = datetime.fromtimestamp(float(source_date_epoch), tz=UTC) + # If SOURCE_DATE_EPOCH is set, users likely want a reproducible result, + # so enforce GMT/UTC for consistency. + local_time = False else: date = datetime.now(tz=UTC) diff --git a/sphinx/util/images.py b/sphinx/util/images.py index f1e7344eb7a..b43a0705d36 100644 --- a/sphinx/util/images.py +++ b/sphinx/util/images.py @@ -56,11 +56,11 @@ def get_image_size(filename: str | PathLike[str]) -> tuple[int, int] | None: @overload -def guess_mimetype(filename: PathLike[str] | str, default: str) -> str: ... # NoQA: E704 +def guess_mimetype(filename: PathLike[str] | str, default: str) -> str: ... @overload -def guess_mimetype( # NoQA: E704 +def guess_mimetype( filename: PathLike[str] | str, default: None = None ) -> str | None: ... diff --git a/sphinx/util/inspect.py b/sphinx/util/inspect.py index e2bc8b1e6a7..1b06f97d8ec 100644 --- a/sphinx/util/inspect.py +++ b/sphinx/util/inspect.py @@ -32,15 +32,15 @@ from typing_extensions import TypeIs class _SupportsGet(Protocol): - def __get__(self, instance: Any, owner: type | None = ..., /) -> Any: ... # NoQA: E704 + def __get__(self, instance: Any, owner: type | None = ..., /) -> Any: ... class _SupportsSet(Protocol): # instance and value are contravariants but we do not need that precision - def __set__(self, instance: Any, value: Any, /) -> None: ... # NoQA: E704 + def __set__(self, instance: Any, value: Any, /) -> None: ... class _SupportsDelete(Protocol): # instance is contravariant but we do not need that precision - def __delete__(self, instance: Any, /) -> None: ... # NoQA: E704 + def __delete__(self, instance: Any, /) -> None: ... _RoutineType: TypeAlias = ( types.FunctionType diff --git a/sphinx/util/inventory.py b/sphinx/util/inventory.py index 507d7a1d8ed..9da94f85d4b 100644 --- a/sphinx/util/inventory.py +++ b/sphinx/util/inventory.py @@ -2,6 +2,7 @@ from __future__ import annotations +import posixpath import re import zlib from typing import TYPE_CHECKING @@ -14,127 +15,96 @@ if TYPE_CHECKING: import os - from collections.abc import Callable, Iterator + from collections.abc import Callable, Sequence + from typing import Protocol from sphinx.builders import Builder from sphinx.environment import BuildEnvironment - from sphinx.util.typing import Inventory, InventoryItem, _ReadableStream - - -class InventoryFileReader: - """A file reader for an inventory file. - - This reader supports mixture of texts and compressed texts. - """ - - def __init__(self, stream: _ReadableStream[bytes]) -> None: - self.stream = stream - self.buffer = b'' - self.eof = False - - def read_buffer(self) -> None: - chunk = self.stream.read(BUFSIZE) - if chunk == b'': - self.eof = True - self.buffer += chunk - - def readline(self) -> str: - pos = self.buffer.find(b'\n') - if pos != -1: - line = self.buffer[:pos].decode() - self.buffer = self.buffer[pos + 1 :] - elif self.eof: - line = self.buffer.decode() - self.buffer = b'' - else: - self.read_buffer() - line = self.readline() - - return line - - def readlines(self) -> Iterator[str]: - while not self.eof: - line = self.readline() - if line: - yield line - - def read_compressed_chunks(self) -> Iterator[bytes]: - decompressor = zlib.decompressobj() - while not self.eof: - self.read_buffer() - yield decompressor.decompress(self.buffer) - self.buffer = b'' - yield decompressor.flush() - - def read_compressed_lines(self) -> Iterator[str]: - buf = b'' - for chunk in self.read_compressed_chunks(): - buf += chunk - pos = buf.find(b'\n') - while pos != -1: - yield buf[:pos].decode() - buf = buf[pos + 1 :] - pos = buf.find(b'\n') + from sphinx.util.typing import Inventory, InventoryItem + + # Readable file stream for inventory loading + class _SupportsRead(Protocol): + def read(self, size: int = ...) -> bytes: ... + + _JoinFunc = Callable[[str, str], str] + + +def __getattr__(name: str) -> object: + if name == 'InventoryFileReader': + from sphinx.util._inventory_file_reader import InventoryFileReader + + return InventoryFileReader + msg = f'module {__name__!r} has no attribute {name!r}' + raise AttributeError(msg) class InventoryFile: @classmethod - def load( - cls: type[InventoryFile], - stream: _ReadableStream[bytes], + def loads( + cls, + content: bytes, + *, uri: str, - joinfunc: Callable[[str, str], str], ) -> Inventory: - reader = InventoryFileReader(stream) - line = reader.readline().rstrip() - if line == '# Sphinx inventory version 1': - return cls.load_v1(reader, uri, joinfunc) - elif line == '# Sphinx inventory version 2': - return cls.load_v2(reader, uri, joinfunc) - else: - raise ValueError('invalid inventory header: %s' % line) + format_line, _, content = content.partition(b'\n') + format_line = format_line.rstrip() # remove trailing \r or spaces + if format_line == b'# Sphinx inventory version 2': + return cls._loads_v2(content, uri=uri) + if format_line == b'# Sphinx inventory version 1': + lines = content.decode().splitlines() + return cls._loads_v1(lines, uri=uri) + if format_line.startswith(b'# Sphinx inventory version '): + unknown_version = format_line[27:].decode() + msg = f'unknown or unsupported inventory version: {unknown_version!r}' + raise ValueError(msg) + msg = f'invalid inventory header: {format_line.decode()}' + raise ValueError(msg) @classmethod - def load_v1( - cls: type[InventoryFile], - stream: InventoryFileReader, - uri: str, - join: Callable[[str, str], str], - ) -> Inventory: + def load(cls, stream: _SupportsRead, uri: str, joinfunc: _JoinFunc) -> Inventory: + return cls.loads(stream.read(), uri=uri) + + @classmethod + def _loads_v1(cls, lines: Sequence[str], *, uri: str) -> Inventory: + if len(lines) < 2: + msg = 'invalid inventory header: missing project name or version' + raise ValueError(msg) invdata: Inventory = {} - projname = stream.readline().rstrip()[11:] - version = stream.readline().rstrip()[11:] - for line in stream.readlines(): - name, type, location = line.rstrip().split(None, 2) - location = join(uri, location) + projname = lines[0].rstrip()[11:] # Project name + version = lines[1].rstrip()[11:] # Project version + for line in lines[2:]: + name, item_type, location = line.rstrip().split(None, 2) + location = posixpath.join(uri, location) # version 1 did not add anchors to the location - if type == 'mod': - type = 'py:module' - location += '#module-' + name + if item_type == 'mod': + item_type = 'py:module' + location += f'#module-{name}' else: - type = 'py:' + type - location += '#' + name - invdata.setdefault(type, {})[name] = (projname, version, location, '-') + item_type = f'py:{item_type}' + location += f'#{name}' + inv_item: InventoryItem = projname, version, location, '-' + invdata.setdefault(item_type, {})[name] = inv_item return invdata @classmethod - def load_v2( - cls: type[InventoryFile], - stream: InventoryFileReader, - uri: str, - join: Callable[[str, str], str], - ) -> Inventory: + def _loads_v2(cls, inv_data: bytes, *, uri: str) -> Inventory: + try: + line_1, line_2, check_line, compressed = inv_data.split(b'\n', maxsplit=3) + except ValueError: + msg = 'invalid inventory header: missing project name or version' + raise ValueError(msg) from None invdata: Inventory = {} - projname = stream.readline().rstrip()[11:] - version = stream.readline().rstrip()[11:] + projname = line_1.rstrip()[11:].decode() # Project name + version = line_2.rstrip()[11:].decode() # Project version # definition -> priority, location, display name potential_ambiguities: dict[str, tuple[str, str, str]] = {} actual_ambiguities = set() - line = stream.readline() - if 'zlib' not in line: - raise ValueError('invalid inventory header (not compressed): %s' % line) + if b'zlib' not in check_line: # '... compressed using zlib' + msg = f'invalid inventory header (not compressed): {check_line.decode()}' + raise ValueError(msg) - for line in stream.read_compressed_lines(): + decompressed_content = zlib.decompress(compressed) + for line in decompressed_content.decode().splitlines(): # be careful to handle names with embedded spaces correctly m = re.match( r'(.+?)\s+(\S+)\s+(-?\d+)\s+?(\S*)\s+(.*)', @@ -177,7 +147,7 @@ def load_v2( potential_ambiguities[lowercase_definition] = content if location.endswith('$'): location = location[:-1] + name - location = join(uri, location) + location = posixpath.join(uri, location) inv_item: InventoryItem = projname, version, location, dispname invdata.setdefault(type, {})[name] = inv_item for ambiguity in actual_ambiguities: @@ -192,10 +162,7 @@ def load_v2( @classmethod def dump( - cls: type[InventoryFile], - filename: str | os.PathLike[str], - env: BuildEnvironment, - builder: Builder, + cls, filename: str | os.PathLike[str], env: BuildEnvironment, builder: Builder ) -> None: def escape(string: str) -> str: return re.sub('\\s+', ' ', string) diff --git a/sphinx/util/nodes.py b/sphinx/util/nodes.py index 19c21986d98..9cc62bc1ee5 100644 --- a/sphinx/util/nodes.py +++ b/sphinx/util/nodes.py @@ -110,7 +110,7 @@ def get_full_module_name(node: Node) -> str: def repr_domxml(node: Node, length: int = 80) -> str: """ - return DOM XML representation of the specified node like: + Return DOM XML representation of the specified node like: 'Added in version...' :param nodes.Node node: target node diff --git a/sphinx/util/osutil.py b/sphinx/util/osutil.py index c3f5aaa139a..d24bbf55b2d 100644 --- a/sphinx/util/osutil.py +++ b/sphinx/util/osutil.py @@ -227,8 +227,8 @@ def close(self) -> None: try: with open(self._path, encoding='utf-8') as old_f: old_content = old_f.read() - if old_content == buf: - return + if old_content == buf: + return except OSError: pass diff --git a/sphinx/util/parallel.py b/sphinx/util/parallel.py index 27ea0d591cc..03debd6ed25 100644 --- a/sphinx/util/parallel.py +++ b/sphinx/util/parallel.py @@ -61,7 +61,7 @@ def __init__(self, nproc: int) -> None: # list of receiving pipe connections of running subprocesses self._precvs: dict[int, Any] = {} # list of receiving pipe connections of waiting subprocesses - self._precvsWaiting: dict[int, Any] = {} + self._precvs_waiting: dict[int, Any] = {} # number of working subprocesses self._pworking = 0 # task number of each subprocess @@ -94,7 +94,7 @@ def add_task( context: Any = multiprocessing.get_context('fork') proc = context.Process(target=self._process, args=(psend, task_func, arg)) self._procs[tid] = proc - self._precvsWaiting[tid] = precv + self._precvs_waiting[tid] = precv try: self._join_one() except Exception: @@ -135,8 +135,8 @@ def _join_one(self) -> bool: joined_any = True break - while self._precvsWaiting and self._pworking < self.nproc: - newtid, newprecv = self._precvsWaiting.popitem() + while self._precvs_waiting and self._pworking < self.nproc: + newtid, newprecv = self._precvs_waiting.popitem() self._precvs[newtid] = newprecv self._procs[newtid].start() self._pworking += 1 diff --git a/sphinx/util/requests.py b/sphinx/util/requests.py index a647bf5ba50..b439ce437e8 100644 --- a/sphinx/util/requests.py +++ b/sphinx/util/requests.py @@ -3,20 +3,34 @@ from __future__ import annotations import warnings -from typing import Any -from urllib.parse import urlsplit +from typing import TYPE_CHECKING +from urllib.parse import urljoin, urlsplit import requests from urllib3.exceptions import InsecureRequestWarning import sphinx +if TYPE_CHECKING: + import re + from collections.abc import Sequence + from typing import Any + + _USER_AGENT = ( f'Mozilla/5.0 (X11; Linux x86_64; rv:100.0) Gecko/20100101 Firefox/100.0 ' f'Sphinx/{sphinx.__version__}' ) +class _IgnoredRedirection(Exception): + """Sphinx-internal exception raised when an HTTP redirect is ignored""" + + def __init__(self, destination: str, status_code: int) -> None: + self.destination = destination + self.status_code = status_code + + def _get_tls_cacert(url: str, certs: str | dict[str, str] | None) -> str | bool: """Get additional CA cert for a specific URL.""" if not certs: @@ -50,6 +64,23 @@ def head(url: str, **kwargs: Any) -> requests.Response: class _Session(requests.Session): + _ignored_redirects: Sequence[re.Pattern[str]] + + def __init__(self, *args: Any, **kwargs: Any) -> None: + self._ignored_redirects = kwargs.pop('_ignored_redirects', ()) + super().__init__(*args, **kwargs) + + def get_redirect_target(self, resp: requests.Response) -> str | None: + """Overrides the default requests.Session.get_redirect_target""" + # do not follow redirections that match ignored URI patterns + if resp.is_redirect: + destination = urljoin(resp.url, resp.headers['location']) + if any(pat.match(destination) for pat in self._ignored_redirects): + raise _IgnoredRedirection( + destination=destination, status_code=resp.status_code + ) + return super().get_redirect_target(resp) + def request( # type: ignore[override] self, method: str, diff --git a/sphinx/util/typing.py b/sphinx/util/typing.py index 3bac26e30ec..6b72ad9e811 100644 --- a/sphinx/util/typing.py +++ b/sphinx/util/typing.py @@ -95,7 +95,7 @@ def is_invalid_builtin_class(obj: Any) -> bool: if TYPE_CHECKING: class RoleFunction(Protocol): - def __call__( # NoQA: E704 + def __call__( self, name: str, rawtext: str, @@ -119,26 +119,6 @@ def __call__( # NoQA: E704 # title getter functions for enumerable nodes (see sphinx.domains.std) TitleGetter: TypeAlias = Callable[[nodes.Node], str] -# Readable file stream for inventory loading -if TYPE_CHECKING: - from types import TracebackType - from typing import Self - - _T_co = TypeVar('_T_co', str, bytes, covariant=True) - - class _ReadableStream(Protocol[_T_co]): # NoQA: PYI046 (false positive) - def read(self, size: int = ...) -> _T_co: ... # NoQA: E704 - - def __enter__(self) -> Self: ... # NoQA: E704 - - def __exit__( # NoQA: E704 - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: ... - - # inventory data on memory InventoryItem: TypeAlias = tuple[ str, # project name diff --git a/sphinx/writers/html.py b/sphinx/writers/html.py index e9640dc2925..e2c04ca32db 100644 --- a/sphinx/writers/html.py +++ b/sphinx/writers/html.py @@ -27,6 +27,7 @@ class HTMLWriter(Writer): # type: ignore[misc] def __init__(self, builder: StandaloneHTMLBuilder) -> None: super().__init__() self.builder = builder + self._has_maths_elements: bool = False def translate(self) -> None: # sadly, this is mostly copied from parent class @@ -57,3 +58,4 @@ def translate(self) -> None: ): setattr(self, attr, getattr(visitor, attr, None)) self.clean_meta = ''.join(self.visitor.meta[2:]) + self._has_maths_elements = getattr(visitor, '_has_maths_elements', False) diff --git a/sphinx/writers/html5.py b/sphinx/writers/html5.py index d5c77fee8fe..7b7f04290c3 100644 --- a/sphinx/writers/html5.py +++ b/sphinx/writers/html5.py @@ -66,6 +66,7 @@ def __init__(self, document: nodes.document, builder: Builder) -> None: self._table_row_indices = [0] self._fieldlist_row_indices = [0] self.required_params_left = 0 + self._has_maths_elements: bool = False def visit_start_of_file(self, node: Element) -> None: # only occurs in the single-file builder @@ -232,7 +233,7 @@ def depart_desc_parameter(self, node: Element) -> None: next_is_required = ( not is_last_group and self.list_is_required_param[self.param_group_index + 1] - ) # fmt: skip + ) opt_param_left_at_level = self.params_left_at_level > 0 if ( opt_param_left_at_level @@ -419,9 +420,7 @@ def append_fignumber(figtype: str, figure_id: str) -> None: self.body.append(prefix % '.'.join(map(str, numbers)) + ' ') self.body.append('') - figtype = self.builder.env.domains.standard_domain.get_enumerable_node_type( - node - ) + figtype = self._domains.standard_domain.get_enumerable_node_type(node) if figtype: if len(node['ids']) == 0: msg = __('Any IDs not assigned for %s node') % node.tagname @@ -957,6 +956,8 @@ def visit_field(self, node: Element) -> None: node['classes'].append('field-odd') def visit_math(self, node: Element, math_env: str = '') -> None: + self._has_maths_elements = True + # see validate_math_renderer name: str = self.builder.math_renderer_name # type: ignore[assignment] visit, _ = self.builder.app.registry.html_inline_math_renderers[name] @@ -970,6 +971,8 @@ def depart_math(self, node: Element, math_env: str = '') -> None: depart(self, node) def visit_math_block(self, node: Element, math_env: str = '') -> None: + self._has_maths_elements = True + # see validate_math_renderer name: str = self.builder.math_renderer_name # type: ignore[assignment] visit, _ = self.builder.app.registry.html_block_math_renderers[name] diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py index cb07c0bf615..ea6ab38222f 100644 --- a/sphinx/writers/latex.py +++ b/sphinx/writers/latex.py @@ -560,7 +560,7 @@ def generate( indices_config = frozenset(indices_config) else: check_names = False - for domain in self.builder.env.domains.sorted(): + for domain in self._domains.sorted(): for index_cls in domain.indices: index_name = f'{domain.name}-{index_cls.name}' if check_names and index_name not in indices_config: @@ -1816,7 +1816,7 @@ def add_target(id: str) -> None: while isinstance(next_node, nodes.target): next_node = next_node.next_node(ascend=True) - domain = self.builder.env.domains.standard_domain + domain = self._domains.standard_domain if isinstance(next_node, HYPERLINK_SUPPORT_NODES): return if ( diff --git a/sphinx/writers/texinfo.py b/sphinx/writers/texinfo.py index bae5714425e..4294c8eabb2 100644 --- a/sphinx/writers/texinfo.py +++ b/sphinx/writers/texinfo.py @@ -10,7 +10,6 @@ from docutils import nodes, writers from sphinx import __display_version__, addnodes -from sphinx.errors import ExtensionError from sphinx.locale import _, __, admonitionlabels from sphinx.util import logging from sphinx.util.docutils import SphinxTranslator @@ -493,7 +492,7 @@ def generate( indices_config = frozenset(indices_config) else: check_names = False - for domain in self.builder.env.domains.sorted(): + for domain in self._domains.sorted(): for index_cls in domain.indices: index_name = f'{domain.name}-{index_cls.name}' if check_names and index_name not in indices_config: @@ -507,7 +506,7 @@ def generate( generate(content, collapsed), )) # only add the main Index if it's not empty - domain = self.builder.env.domains.index_domain + domain = self._domains.index_domain for docname in self.builder.docnames: if domain.entries[docname]: self.indices.append((_('Index'), '\n@printindex ge\n')) @@ -1172,7 +1171,7 @@ def depart_decoration(self, node: Element) -> None: def visit_topic(self, node: Element) -> None: # ignore TOC's since we have to have a "menu" anyway - if 'contents' in node.get('classes', []): + if 'contents' in node.get('classes', ()): raise nodes.SkipNode title = cast('nodes.title', node[0]) self.visit_rubric(title) @@ -1420,11 +1419,11 @@ def visit_desc_signature(self, node: Element) -> None: self.add_anchor(id, node) # use the full name of the objtype for the category try: - domain = self.builder.env.get_domain(node.parent['domain']) + domain = self._domains[node.parent['domain']] name = domain.get_type_name( domain.object_types[objtype], self.config.primary_domain == domain.name ) - except (KeyError, ExtensionError): + except KeyError: name = objtype # by convention, the deffn category should be capitalized like a title category = self.escape_arg(smart_capwords(name)) diff --git a/sphinx/writers/xml.py b/sphinx/writers/xml.py index 825f6da5ca7..f909e3406f9 100644 --- a/sphinx/writers/xml.py +++ b/sphinx/writers/xml.py @@ -16,10 +16,11 @@ class XMLWriter(BaseXMLWriter): # type: ignore[misc] def __init__(self, builder: Builder) -> None: super().__init__() self.builder = builder + self._config = builder.config def translate(self, *args: Any, **kwargs: Any) -> None: self.document.settings.newlines = self.document.settings.indents = ( - self.builder.env.config.xml_pretty + self._config.xml_pretty ) self.document.settings.xml_declaration = True self.document.settings.doctype_declaration = True diff --git a/tests/js/roots/titles/relevance.py b/tests/js/roots/titles/relevance.py index c4d0eec557f..b9ebc2b00e8 100644 --- a/tests/js/roots/titles/relevance.py +++ b/tests/js/roots/titles/relevance.py @@ -1,7 +1,8 @@ class Example: """Example class""" + num_attribute = 5 - text_attribute = "string" + text_attribute = 'string' - relevance = "testing" + relevance = 'testing' """attribute docstring""" diff --git a/tests/js/searchtools.spec.js b/tests/js/searchtools.spec.js index cfe5fdcf7ed..2ee55ad00ac 100644 --- a/tests/js/searchtools.spec.js +++ b/tests/js/searchtools.spec.js @@ -209,6 +209,19 @@ describe('Basic html theme search', function() { }); + describe('can handle edge-case search queries', function() { + + it('does not find the javascript prototype property in unrelated documents', function() { + eval(loadFixture("partial/searchindex.js")); + + searchParameters = Search._parseQuery('__proto__'); + + hits = []; + expect(Search._performSearch(...searchParameters)).toEqual(hits); + }); + + }); + }); describe("htmlToText", function() { diff --git a/tests/roots/test-add_enumerable_node/enumerable_node.py b/tests/roots/test-add_enumerable_node/enumerable_node.py index 782365e655b..2cf93e9104f 100644 --- a/tests/roots/test-add_enumerable_node/enumerable_node.py +++ b/tests/roots/test-add_enumerable_node/enumerable_node.py @@ -37,7 +37,7 @@ def visit_numbered_text(self, node): raise nodes.SkipNode -def get_title(node): +def get_title(node): # NoQA: FURB118 return node['title'] @@ -51,12 +51,14 @@ def run(self): def setup(app): # my-figure - app.add_enumerable_node(my_figure, 'figure', - html=(visit_my_figure, depart_my_figure)) + app.add_enumerable_node( + my_figure, 'figure', html=(visit_my_figure, depart_my_figure) + ) app.add_directive('my-figure', MyFigure) # numbered_label - app.add_enumerable_node(numbered_text, 'original', get_title, - html=(visit_numbered_text, None)) + app.add_enumerable_node( + numbered_text, 'original', get_title, html=(visit_numbered_text, None) + ) app.add_directive('numbered-text', NumberedText) app.config.numfig_format.setdefault('original', 'No.%s') diff --git a/tests/roots/test-add_source_parser-conflicts-with-users-setting/conf.py b/tests/roots/test-add_source_parser-conflicts-with-users-setting/conf.py index 5f979c70918..25d1e76be4d 100644 --- a/tests/roots/test-add_source_parser-conflicts-with-users-setting/conf.py +++ b/tests/roots/test-add_source_parser-conflicts-with-users-setting/conf.py @@ -16,5 +16,5 @@ class DummyTestParser(Parser): '.test': 'restructuredtext', } source_parsers = { - '.test': DummyTestParser + '.test': DummyTestParser, } diff --git a/tests/roots/test-apidoc-pep420/a/b/c/__init__.py b/tests/roots/test-apidoc-pep420/a/b/c/__init__.py deleted file mode 100644 index 5b727c1139b..00000000000 --- a/tests/roots/test-apidoc-pep420/a/b/c/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"Package C" diff --git a/tests/roots/test-apidoc-pep420/a/b/c/d.py b/tests/roots/test-apidoc-pep420/a/b/c/d.py deleted file mode 100644 index 63b0e3436b8..00000000000 --- a/tests/roots/test-apidoc-pep420/a/b/c/d.py +++ /dev/null @@ -1 +0,0 @@ -"Module d" diff --git a/tests/roots/test-apidoc-pep420/a/b/e/f.py b/tests/roots/test-apidoc-pep420/a/b/e/f.py deleted file mode 100644 index a09affe861f..00000000000 --- a/tests/roots/test-apidoc-pep420/a/b/e/f.py +++ /dev/null @@ -1 +0,0 @@ -"Module f" diff --git a/tests/roots/test-apidoc-pep420/a/b/x/y.py b/tests/roots/test-apidoc-pep420/a/b/x/y.py deleted file mode 100644 index 46bc245051b..00000000000 --- a/tests/roots/test-apidoc-pep420/a/b/x/y.py +++ /dev/null @@ -1 +0,0 @@ -"Module y" diff --git a/tests/roots/test-apidoc-subpackage-in-toc/parent/child/foo.py b/tests/roots/test-apidoc-subpackage-in-toc/parent/child/foo.py deleted file mode 100644 index 810c96eeeb7..00000000000 --- a/tests/roots/test-apidoc-subpackage-in-toc/parent/child/foo.py +++ /dev/null @@ -1 +0,0 @@ -"foo" diff --git a/tests/roots/test-apidoc-toc/mypackage/something/__init__.py b/tests/roots/test-apidoc-toc/mypackage/something/__init__.py deleted file mode 100644 index 6401e43ec46..00000000000 --- a/tests/roots/test-apidoc-toc/mypackage/something/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"Subpackage Something" diff --git a/tests/roots/test-apidoc-trailing-underscore/package_/__init__.py b/tests/roots/test-apidoc-trailing-underscore/package_/__init__.py deleted file mode 100644 index b09612b8326..00000000000 --- a/tests/roots/test-apidoc-trailing-underscore/package_/__init__.py +++ /dev/null @@ -1 +0,0 @@ -""" A package with trailing underscores """ diff --git a/tests/roots/test-apidoc-trailing-underscore/package_/module_.py b/tests/roots/test-apidoc-trailing-underscore/package_/module_.py deleted file mode 100644 index e16461c21ca..00000000000 --- a/tests/roots/test-apidoc-trailing-underscore/package_/module_.py +++ /dev/null @@ -1,9 +0,0 @@ -""" A module with a trailing underscore """ - - -class SomeClass_: - """ A class with a trailing underscore """ - - -def some_function_(some_arg_): - """ A function with a trailing underscore in name and argument """ diff --git a/tests/roots/test-basic/conf.py b/tests/roots/test-basic/conf.py index 69a316101c9..c4fb1abdda2 100644 --- a/tests/roots/test-basic/conf.py +++ b/tests/roots/test-basic/conf.py @@ -1,4 +1,10 @@ html_theme = 'basic' latex_documents = [ - ('index', 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report') + ( + 'index', + 'test.tex', + 'The basic Sphinx documentation for testing', + 'Sphinx', + 'report', + ) ] diff --git a/tests/roots/test-build-text/conf.py b/tests/roots/test-build-text/conf.py index b0fdaf8d231..b20b895493f 100644 --- a/tests/roots/test-build-text/conf.py +++ b/tests/roots/test-build-text/conf.py @@ -1,4 +1,4 @@ source_suffix = { - '.txt': 'restructuredtext' + '.txt': 'restructuredtext', } exclude_patterns = ['_build'] diff --git a/tests/roots/test-directive-code/emphasize.rst b/tests/roots/test-directive-code/emphasize.rst index 95db574cebe..ae444e9a89a 100644 --- a/tests/roots/test-directive-code/emphasize.rst +++ b/tests/roots/test-directive-code/emphasize.rst @@ -3,5 +3,4 @@ Literal Includes with Highlighted Lines .. literalinclude:: target.py :language: python - :emphasize-lines: 5-6, 13-15, 24- - + :emphasize-lines: 6-7, 16-19, 29- diff --git a/tests/roots/test-directive-code/python.rst b/tests/roots/test-directive-code/python.rst index 794c190f107..17e3d7d0e49 100644 --- a/tests/roots/test-directive-code/python.rst +++ b/tests/roots/test-directive-code/python.rst @@ -1,13 +1,13 @@ -=========================== -Literal Includes for python -=========================== - -block start with blank or comment -================================= - -.. literalinclude:: target.py - :pyobject: block_start_with_comment - -.. literalinclude:: target.py - :pyobject: block_start_with_blank - +=========================== +Literal Includes for python +=========================== + +block start with blank or comment +================================= + +.. literalinclude:: target.py + :pyobject: block_start_with_comment + +.. literalinclude:: target.py + :pyobject: block_start_with_blank + diff --git a/tests/roots/test-directive-code/target.py b/tests/roots/test-directive-code/target.py index b95dffbf9ef..31f3822ac0b 100644 --- a/tests/roots/test-directive-code/target.py +++ b/tests/roots/test-directive-code/target.py @@ -1,21 +1,26 @@ # Literally included file using Python highlighting -foo = "Including Unicode characters: üöä" +foo = 'Including Unicode characters: üöä' + class Foo: pass + class Bar: def baz(): pass + # comment after Bar class definition def bar(): pass + def block_start_with_comment(): # Comment return 1 + def block_start_with_blank(): return 1 diff --git a/tests/roots/test-domain-c-c_maximum_signature_line_length/conf.py b/tests/roots/test-domain-c-c_maximum_signature_line_length/conf.py index ba480ed2884..817983754b6 100644 --- a/tests/roots/test-domain-c-c_maximum_signature_line_length/conf.py +++ b/tests/roots/test-domain-c-c_maximum_signature_line_length/conf.py @@ -1 +1 @@ -c_maximum_signature_line_length = len("str hello(str name)") - 1 +c_maximum_signature_line_length = len('str hello(str name)') - 1 diff --git a/tests/roots/test-domain-c-intersphinx/conf.py b/tests/roots/test-domain-c-intersphinx/conf.py index c176af77528..896cad799b9 100644 --- a/tests/roots/test-domain-c-intersphinx/conf.py +++ b/tests/roots/test-domain-c-intersphinx/conf.py @@ -1,4 +1,4 @@ exclude_patterns = ['_build'] extensions = [ - 'sphinx.ext.intersphinx', + 'sphinx.ext.intersphinx', ] diff --git a/tests/roots/test-domain-cpp-cpp_maximum_signature_line_length/conf.py b/tests/roots/test-domain-cpp-cpp_maximum_signature_line_length/conf.py index 1eb3a64bfc4..b75c1418f1a 100644 --- a/tests/roots/test-domain-cpp-cpp_maximum_signature_line_length/conf.py +++ b/tests/roots/test-domain-cpp-cpp_maximum_signature_line_length/conf.py @@ -1 +1 @@ -cpp_maximum_signature_line_length = len("str hello(str name)") - 1 +cpp_maximum_signature_line_length = len('str hello(str name)') - 1 diff --git a/tests/roots/test-domain-cpp-intersphinx/conf.py b/tests/roots/test-domain-cpp-intersphinx/conf.py index c176af77528..896cad799b9 100644 --- a/tests/roots/test-domain-cpp-intersphinx/conf.py +++ b/tests/roots/test-domain-cpp-intersphinx/conf.py @@ -1,4 +1,4 @@ exclude_patterns = ['_build'] extensions = [ - 'sphinx.ext.intersphinx', + 'sphinx.ext.intersphinx', ] diff --git a/tests/roots/test-epub-anchor-id/conf.py b/tests/roots/test-epub-anchor-id/conf.py index 2a56f1f6689..eb614a04051 100644 --- a/tests/roots/test-epub-anchor-id/conf.py +++ b/tests/roots/test-epub-anchor-id/conf.py @@ -1,2 +1,2 @@ def setup(app): - app.add_crossref_type(directivename="setting", rolename="setting") + app.add_crossref_type(directivename='setting', rolename='setting') diff --git a/tests/roots/test-apidoc-custom-templates/_templates/module.rst.jinja b/tests/roots/test-ext-apidoc-custom-templates/_templates/module.rst.jinja similarity index 100% rename from tests/roots/test-apidoc-custom-templates/_templates/module.rst.jinja rename to tests/roots/test-ext-apidoc-custom-templates/_templates/module.rst.jinja diff --git a/tests/roots/test-apidoc-custom-templates/_templates/module.rst_t b/tests/roots/test-ext-apidoc-custom-templates/_templates/module.rst_t similarity index 100% rename from tests/roots/test-apidoc-custom-templates/_templates/module.rst_t rename to tests/roots/test-ext-apidoc-custom-templates/_templates/module.rst_t diff --git a/tests/roots/test-apidoc-custom-templates/_templates/package.rst_t b/tests/roots/test-ext-apidoc-custom-templates/_templates/package.rst_t similarity index 100% rename from tests/roots/test-apidoc-custom-templates/_templates/package.rst_t rename to tests/roots/test-ext-apidoc-custom-templates/_templates/package.rst_t diff --git a/tests/roots/test-apidoc-custom-templates/mypackage/__init__.py b/tests/roots/test-ext-apidoc-custom-templates/mypackage/__init__.py similarity index 100% rename from tests/roots/test-apidoc-custom-templates/mypackage/__init__.py rename to tests/roots/test-ext-apidoc-custom-templates/mypackage/__init__.py diff --git a/tests/roots/test-apidoc-custom-templates/mypackage/mymodule.py b/tests/roots/test-ext-apidoc-custom-templates/mypackage/mymodule.py old mode 100755 new mode 100644 similarity index 100% rename from tests/roots/test-apidoc-custom-templates/mypackage/mymodule.py rename to tests/roots/test-ext-apidoc-custom-templates/mypackage/mymodule.py diff --git a/tests/roots/test-apidoc-duplicates/fish_licence/halibut.cpython-38-x86_64-linux-gnu.so b/tests/roots/test-ext-apidoc-duplicates/fish_licence/halibut.cpython-38-x86_64-linux-gnu.so similarity index 100% rename from tests/roots/test-apidoc-duplicates/fish_licence/halibut.cpython-38-x86_64-linux-gnu.so rename to tests/roots/test-ext-apidoc-duplicates/fish_licence/halibut.cpython-38-x86_64-linux-gnu.so diff --git a/tests/roots/test-apidoc-duplicates/fish_licence/halibut.pyx b/tests/roots/test-ext-apidoc-duplicates/fish_licence/halibut.pyx similarity index 100% rename from tests/roots/test-apidoc-duplicates/fish_licence/halibut.pyx rename to tests/roots/test-ext-apidoc-duplicates/fish_licence/halibut.pyx diff --git a/tests/roots/test-ext-apidoc-pep420/a/b/c/__init__.py b/tests/roots/test-ext-apidoc-pep420/a/b/c/__init__.py new file mode 100644 index 00000000000..0dda7cf32df --- /dev/null +++ b/tests/roots/test-ext-apidoc-pep420/a/b/c/__init__.py @@ -0,0 +1 @@ +"""Package C""" diff --git a/tests/roots/test-ext-apidoc-pep420/a/b/c/d.py b/tests/roots/test-ext-apidoc-pep420/a/b/c/d.py new file mode 100644 index 00000000000..7566ec33bc2 --- /dev/null +++ b/tests/roots/test-ext-apidoc-pep420/a/b/c/d.py @@ -0,0 +1 @@ +"""Module d""" diff --git a/tests/roots/test-apidoc-pep420/a/b/e/__init__.py b/tests/roots/test-ext-apidoc-pep420/a/b/e/__init__.py similarity index 100% rename from tests/roots/test-apidoc-pep420/a/b/e/__init__.py rename to tests/roots/test-ext-apidoc-pep420/a/b/e/__init__.py diff --git a/tests/roots/test-ext-apidoc-pep420/a/b/e/f.py b/tests/roots/test-ext-apidoc-pep420/a/b/e/f.py new file mode 100644 index 00000000000..1a33f3970b4 --- /dev/null +++ b/tests/roots/test-ext-apidoc-pep420/a/b/e/f.py @@ -0,0 +1 @@ +"""Module f""" diff --git a/tests/roots/test-ext-apidoc-pep420/a/b/x/y.py b/tests/roots/test-ext-apidoc-pep420/a/b/x/y.py new file mode 100644 index 00000000000..14beabd289b --- /dev/null +++ b/tests/roots/test-ext-apidoc-pep420/a/b/x/y.py @@ -0,0 +1 @@ +"""Module y""" diff --git a/tests/roots/test-apidoc-subpackage-in-toc/parent/__init__.py b/tests/roots/test-ext-apidoc-subpackage-in-toc/parent/__init__.py similarity index 100% rename from tests/roots/test-apidoc-subpackage-in-toc/parent/__init__.py rename to tests/roots/test-ext-apidoc-subpackage-in-toc/parent/__init__.py diff --git a/tests/roots/test-apidoc-subpackage-in-toc/parent/child/__init__.py b/tests/roots/test-ext-apidoc-subpackage-in-toc/parent/child/__init__.py similarity index 100% rename from tests/roots/test-apidoc-subpackage-in-toc/parent/child/__init__.py rename to tests/roots/test-ext-apidoc-subpackage-in-toc/parent/child/__init__.py diff --git a/tests/roots/test-ext-apidoc-subpackage-in-toc/parent/child/foo.py b/tests/roots/test-ext-apidoc-subpackage-in-toc/parent/child/foo.py new file mode 100644 index 00000000000..6d3c1bc67e3 --- /dev/null +++ b/tests/roots/test-ext-apidoc-subpackage-in-toc/parent/child/foo.py @@ -0,0 +1 @@ +"""foo""" diff --git a/tests/roots/test-apidoc-toc/mypackage/__init__.py b/tests/roots/test-ext-apidoc-toc/mypackage/__init__.py similarity index 100% rename from tests/roots/test-apidoc-toc/mypackage/__init__.py rename to tests/roots/test-ext-apidoc-toc/mypackage/__init__.py diff --git a/tests/roots/test-apidoc-toc/mypackage/main.py b/tests/roots/test-ext-apidoc-toc/mypackage/main.py old mode 100755 new mode 100644 similarity index 50% rename from tests/roots/test-apidoc-toc/mypackage/main.py rename to tests/roots/test-ext-apidoc-toc/mypackage/main.py index 1f6d1376cbb..aacbfdde76e --- a/tests/roots/test-apidoc-toc/mypackage/main.py +++ b/tests/roots/test-ext-apidoc-toc/mypackage/main.py @@ -1,13 +1,11 @@ -#!/usr/bin/env python3 - from pathlib import Path import mod_resource import mod_something -if __name__ == "__main__": - print(f"Hello, world! -> something returns: {mod_something.something()}") +if __name__ == '__main__': + print(f'Hello, world! -> something returns: {mod_something.something()}') res_path = Path(mod_resource.__file__).parent / 'resource.txt' text = res_path.read_text(encoding='utf-8') - print(f"From mod_resource:resource.txt -> {text}") + print(f'From mod_resource:resource.txt -> {text}') diff --git a/tests/roots/test-apidoc-toc/mypackage/no_init/foo.py b/tests/roots/test-ext-apidoc-toc/mypackage/no_init/foo.py similarity index 100% rename from tests/roots/test-apidoc-toc/mypackage/no_init/foo.py rename to tests/roots/test-ext-apidoc-toc/mypackage/no_init/foo.py diff --git a/tests/roots/test-apidoc-toc/mypackage/resource/__init__.py b/tests/roots/test-ext-apidoc-toc/mypackage/resource/__init__.py similarity index 100% rename from tests/roots/test-apidoc-toc/mypackage/resource/__init__.py rename to tests/roots/test-ext-apidoc-toc/mypackage/resource/__init__.py diff --git a/tests/roots/test-apidoc-toc/mypackage/resource/resource.txt b/tests/roots/test-ext-apidoc-toc/mypackage/resource/resource.txt similarity index 100% rename from tests/roots/test-apidoc-toc/mypackage/resource/resource.txt rename to tests/roots/test-ext-apidoc-toc/mypackage/resource/resource.txt diff --git a/tests/roots/test-ext-apidoc-toc/mypackage/something/__init__.py b/tests/roots/test-ext-apidoc-toc/mypackage/something/__init__.py new file mode 100644 index 00000000000..a8cbeecd923 --- /dev/null +++ b/tests/roots/test-ext-apidoc-toc/mypackage/something/__init__.py @@ -0,0 +1 @@ +"""Subpackage Something""" diff --git a/tests/roots/test-ext-apidoc-trailing-underscore/package_/__init__.py b/tests/roots/test-ext-apidoc-trailing-underscore/package_/__init__.py new file mode 100644 index 00000000000..ce09465758b --- /dev/null +++ b/tests/roots/test-ext-apidoc-trailing-underscore/package_/__init__.py @@ -0,0 +1 @@ +"""A package with trailing underscores""" diff --git a/tests/roots/test-ext-apidoc-trailing-underscore/package_/module_.py b/tests/roots/test-ext-apidoc-trailing-underscore/package_/module_.py new file mode 100644 index 00000000000..25d311ca4cf --- /dev/null +++ b/tests/roots/test-ext-apidoc-trailing-underscore/package_/module_.py @@ -0,0 +1,9 @@ +"""A module with a trailing underscore""" + + +class SomeClass_: + """A class with a trailing underscore""" + + +def some_function_(some_arg_): + """A function with a trailing underscore in name and argument""" diff --git a/tests/roots/test-ext-autodoc/autodoc_dummy_bar.py b/tests/roots/test-ext-autodoc/autodoc_dummy_bar.py index 3b5bbfdd1cb..c66a33741d9 100644 --- a/tests/roots/test-ext-autodoc/autodoc_dummy_bar.py +++ b/tests/roots/test-ext-autodoc/autodoc_dummy_bar.py @@ -3,4 +3,5 @@ class Bar: """Dummy class Bar with alias.""" + my_name = Foo diff --git a/tests/roots/test-ext-autodoc/autodoc_dummy_module.py b/tests/roots/test-ext-autodoc/autodoc_dummy_module.py index c05d96e0d6d..21380310662 100644 --- a/tests/roots/test-ext-autodoc/autodoc_dummy_module.py +++ b/tests/roots/test-ext-autodoc/autodoc_dummy_module.py @@ -1,6 +1,6 @@ -from dummy import * +from dummy import * # NoQA: F403 def test(): """Dummy function using dummy.*""" - dummy_function() + dummy_function() # NoQA: F405 diff --git a/tests/roots/test-ext-autodoc/bug2437/autodoc_dummy_foo.py b/tests/roots/test-ext-autodoc/bug2437/autodoc_dummy_foo.py index 9c954d80a52..227c28f3d25 100644 --- a/tests/roots/test-ext-autodoc/bug2437/autodoc_dummy_foo.py +++ b/tests/roots/test-ext-autodoc/bug2437/autodoc_dummy_foo.py @@ -1,3 +1,4 @@ class Foo: """Dummy class Foo.""" + pass diff --git a/tests/roots/test-ext-autodoc/conf.py b/tests/roots/test-ext-autodoc/conf.py index abaea1c996b..f134359a32a 100644 --- a/tests/roots/test-ext-autodoc/conf.py +++ b/tests/roots/test-ext-autodoc/conf.py @@ -6,7 +6,7 @@ extensions = ['sphinx.ext.autodoc'] autodoc_mock_imports = [ - 'dummy' + 'dummy', ] nitpicky = True diff --git a/tests/roots/test-ext-autodoc/target/TYPE_CHECKING.py b/tests/roots/test-ext-autodoc/target/TYPE_CHECKING.py index 85aea3a090e..08068a49734 100644 --- a/tests/roots/test-ext-autodoc/target/TYPE_CHECKING.py +++ b/tests/roots/test-ext-autodoc/target/TYPE_CHECKING.py @@ -1,6 +1,6 @@ from __future__ import annotations -from gettext import NullTranslations +from gettext import NullTranslations # NoQA: TC003 from typing import TYPE_CHECKING if TYPE_CHECKING: diff --git a/tests/roots/test-ext-autodoc/target/__init__.py b/tests/roots/test-ext-autodoc/target/__init__.py index d7ee4ac0f37..8b9c445c5be 100644 --- a/tests/roots/test-ext-autodoc/target/__init__.py +++ b/tests/roots/test-ext-autodoc/target/__init__.py @@ -22,11 +22,15 @@ def f(self): def _funky_classmethod(name, b, c, d, docstring=None): - """Generates a classmethod for a class from a template by filling out - some arguments.""" + """ + Generates a classmethod for a class from a template by filling out some arguments. + """ + def template(cls, a, b, c, d=4, e=5, f=6): return a, b, c, d, e, f + from functools import partial + function = partial(template, b=b, c=c, d=d) function.__name__ = name function.__doc__ = docstring @@ -64,10 +68,11 @@ def excludemeth(self): mdocattr = StringIO() """should be documented as well - süß""" - roger = _funky_classmethod("roger", 2, 3, 4) + roger = _funky_classmethod('roger', 2, 3, 4) - moore = _funky_classmethod("moore", 9, 8, 7, - docstring="moore(a, e, f) -> happiness") + moore = _funky_classmethod( + 'moore', 9, 8, 7, docstring='moore(a, e, f) -> happiness' + ) def __init__(self, arg): self.inst_attr_inline = None #: an inline documented instance attr @@ -77,15 +82,15 @@ def __init__(self, arg): """a documented instance attribute""" self._private_inst_attr = None #: a private instance attribute - def __special1__(self): + def __special1__(self): # NoQA: PLW3201 """documented special method""" - def __special2__(self): + def __special2__(self): # NoQA: PLW3201 # undocumented special method pass -class CustomDict(dict): +class CustomDict(dict): # NoQA: FURB189 """Docstring.""" @@ -116,21 +121,21 @@ class InnerChild(Outer.Inner): class DocstringSig: def __new__(cls, *new_args, **new_kwargs): """__new__(cls, d, e=1) -> DocstringSig -First line of docstring + First line of docstring rest of docstring """ def __init__(self, *init_args, **init_kwargs): """__init__(self, a, b=1) -> None -First line of docstring + First line of docstring rest of docstring """ def meth(self): """meth(FOO, BAR=1) -> BAZ -First line of docstring + First line of docstring rest of docstring """ @@ -157,7 +162,7 @@ def prop2(self): return 456 -class StrRepr(str): +class StrRepr(str): # NoQA: FURB189,SLOT000 """docstring""" def __repr__(self): @@ -176,7 +181,7 @@ class InstAttCls: #: It can have multiple lines. ca1 = 'a' - ca2 = 'b' #: Doc comment for InstAttCls.ca2. One line only. + ca2 = 'b' #: Doc comment for InstAttCls.ca2. One line only. ca3 = 'c' """Docstring for class attribute InstAttCls.ca3.""" @@ -197,8 +202,8 @@ def __init__(self): def __iter__(self): """Iterate squares of each value.""" for i in self.values: - yield i ** 2 + yield i**2 def snafucate(self): """Makes this snafucated.""" - print("snafucated") + print('snafucated') diff --git a/tests/roots/test-ext-autodoc/target/_functions_to_import.py b/tests/roots/test-ext-autodoc/target/_functions_to_import.py index 7663e979842..5e96c9f9ff6 100644 --- a/tests/roots/test-ext-autodoc/target/_functions_to_import.py +++ b/tests/roots/test-ext-autodoc/target/_functions_to_import.py @@ -4,5 +4,5 @@ from sphinx.application import Sphinx -def function_to_be_imported(app: Optional["Sphinx"]) -> str: +def function_to_be_imported(app: Optional['Sphinx']) -> str: """docstring""" diff --git a/tests/roots/test-ext-autodoc/target/annotated.py b/tests/roots/test-ext-autodoc/target/annotated.py index 7adc3e0f152..d9cdb83b9e9 100644 --- a/tests/roots/test-ext-autodoc/target/annotated.py +++ b/tests/roots/test-ext-autodoc/target/annotated.py @@ -24,7 +24,7 @@ def validate(value: str) -> str: ValidatedString = Annotated[str, FuncValidator(validate)] -def hello(name: Annotated[str, "attribute"]) -> None: +def hello(name: Annotated[str, 'attribute']) -> None: """docstring""" pass @@ -33,7 +33,7 @@ class AnnotatedAttributes: """docstring""" #: Docstring about the ``name`` attribute. - name: Annotated[str, "attribute"] + name: Annotated[str, 'attribute'] #: Docstring about the ``max_len`` attribute. max_len: list[Annotated[str, MaxLen(10, ['word_one', 'word_two'])]] diff --git a/tests/roots/test-ext-autodoc/target/autoclass_content.py b/tests/roots/test-ext-autodoc/target/autoclass_content.py index 52b98064a14..d5900ccf237 100644 --- a/tests/roots/test-ext-autodoc/target/autoclass_content.py +++ b/tests/roots/test-ext-autodoc/target/autoclass_content.py @@ -4,30 +4,35 @@ class A: class B: """A class having __init__(no docstring), no __new__""" + def __init__(self): pass class C: """A class having __init__, no __new__""" + def __init__(self): """__init__ docstring""" class D: """A class having no __init__, __new__(no docstring)""" + def __new__(cls): pass class E: """A class having no __init__, __new__""" + def __new__(cls): """__new__ docstring""" class F: """A class having both __init__ and __new__""" + def __init__(self): """__init__ docstring""" @@ -37,11 +42,13 @@ def __new__(cls): class G(C): """A class inherits __init__ without docstring.""" + def __init__(self): pass class H(E): """A class inherits __new__ without docstring.""" + def __init__(self): pass diff --git a/tests/roots/test-ext-autodoc/target/autodoc_type_aliases.py b/tests/roots/test-ext-autodoc/target/autodoc_type_aliases.py index f2c07a0c7cc..4ba37706799 100644 --- a/tests/roots/test-ext-autodoc/target/autodoc_type_aliases.py +++ b/tests/roots/test-ext-autodoc/target/autodoc_type_aliases.py @@ -1,6 +1,6 @@ from __future__ import annotations -import io +import io # NoQA: TC003 from typing import Optional, overload myint = int @@ -12,7 +12,7 @@ variable2 = None # type: myint #: docstring -variable3: Optional[myint] +variable3: Optional[myint] # NoQA: UP007 def read(r: io.BytesIO) -> io.StringIO: @@ -25,13 +25,11 @@ def sum(x: myint, y: myint) -> myint: @overload -def mult(x: myint, y: myint) -> myint: - ... +def mult(x: myint, y: myint) -> myint: ... @overload -def mult(x: float, y: float) -> float: - ... +def mult(x: float, y: float) -> float: ... def mult(x, y): diff --git a/tests/roots/test-ext-autodoc/target/classes.py b/tests/roots/test-ext-autodoc/target/classes.py index e5cce7a69de..fd36ded9525 100644 --- a/tests/roots/test-ext-autodoc/target/classes.py +++ b/tests/roots/test-ext-autodoc/target/classes.py @@ -1,7 +1,7 @@ from __future__ import annotations from inspect import Parameter, Signature -from typing import List, Union +from typing import List, Union # NoQA: UP035 class Foo: @@ -19,15 +19,20 @@ def __new__(cls, x, y): class Qux: - __signature__ = Signature(parameters=[Parameter('foo', Parameter.POSITIONAL_OR_KEYWORD), - Parameter('bar', Parameter.POSITIONAL_OR_KEYWORD)]) + __signature__ = Signature( + parameters=[ + Parameter('foo', Parameter.POSITIONAL_OR_KEYWORD), + Parameter('bar', Parameter.POSITIONAL_OR_KEYWORD), + ] + ) def __init__(self, x, y): pass -class Quux(List[Union[int, float]]): +class Quux(List[Union[int, float]]): # NoQA: UP006,UP007 """A subclass of List[Union[int, float]]""" + pass diff --git a/tests/roots/test-ext-autodoc/target/coroutine.py b/tests/roots/test-ext-autodoc/target/coroutine.py index f977b6e77e3..d1355ed4a8e 100644 --- a/tests/roots/test-ext-autodoc/target/coroutine.py +++ b/tests/roots/test-ext-autodoc/target/coroutine.py @@ -22,8 +22,8 @@ async def do_asyncgen(self): yield -async def _other_coro_func(): - return "run" +async def _other_coro_func(): # NoQA: RUF029 + return 'run' def myawait(f): @@ -31,6 +31,7 @@ def myawait(f): def wrapper(*args, **kwargs): awaitable = f(*args, **kwargs) return asyncio.run(awaitable) + return wrapper diff --git a/tests/roots/test-ext-autodoc/target/decorator.py b/tests/roots/test-ext-autodoc/target/decorator.py index faad3fff954..444a70f6f0f 100644 --- a/tests/roots/test-ext-autodoc/target/decorator.py +++ b/tests/roots/test-ext-autodoc/target/decorator.py @@ -3,6 +3,7 @@ def deco1(func): """docstring for deco1""" + @wraps(func) def wrapper(): return func() @@ -12,11 +13,13 @@ def wrapper(): def deco2(condition, message): """docstring for deco2""" + def decorator(func): def wrapper(): return func() return wrapper + return decorator diff --git a/tests/roots/test-ext-autodoc/target/descriptor.py b/tests/roots/test-ext-autodoc/target/descriptor.py index 2857c99f9d4..2c7d7389b4e 100644 --- a/tests/roots/test-ext-autodoc/target/descriptor.py +++ b/tests/roots/test-ext-autodoc/target/descriptor.py @@ -11,7 +11,7 @@ def __get__(self, obj, type=None): def meth(self): """Function.""" - return "The Answer" + return 'The Answer' class CustomDataDescriptorMeta(type): @@ -20,11 +20,12 @@ class CustomDataDescriptorMeta(type): class CustomDataDescriptor2(CustomDataDescriptor): """Descriptor class with custom metaclass docstring.""" + __metaclass__ = CustomDataDescriptorMeta class Class: - descr = CustomDataDescriptor("Descriptor instance docstring.") + descr = CustomDataDescriptor('Descriptor instance docstring.') @property def prop(self): diff --git a/tests/roots/test-ext-autodoc/target/docstring_signature.py b/tests/roots/test-ext-autodoc/target/docstring_signature.py index 981d936cd13..a6c5aa504c5 100644 --- a/tests/roots/test-ext-autodoc/target/docstring_signature.py +++ b/tests/roots/test-ext-autodoc/target/docstring_signature.py @@ -4,12 +4,14 @@ class A: class B: """B(foo, bar)""" + def __init__(self): """B(foo, bar, baz)""" class C: """C(foo, bar)""" + def __new__(cls): """C(foo, bar, baz)""" @@ -21,13 +23,13 @@ def __init__(self): class E: def __init__(self): - """E(foo: int, bar: int, baz: int) -> None \\ - E(foo: str, bar: str, baz: str) -> None \\ - E(foo: float, bar: float, baz: float)""" + r"""E(foo: int, bar: int, baz: int) -> None \ + E(foo: str, bar: str, baz: str) -> None \ + E(foo: float, bar: float, baz: float)""" # NoQA: D209 class F: def __init__(self): """F(foo: int, bar: int, baz: int) -> None F(foo: str, bar: str, baz: str) -> None - F(foo: float, bar: float, baz: float)""" + F(foo: float, bar: float, baz: float)""" # NoQA: D209 diff --git a/tests/roots/test-ext-autodoc/target/empty_all.py b/tests/roots/test-ext-autodoc/target/empty_all.py index c094cff70fe..b01855aceaa 100644 --- a/tests/roots/test-ext-autodoc/target/empty_all.py +++ b/tests/roots/test-ext-autodoc/target/empty_all.py @@ -1,6 +1,7 @@ """ docsting of empty_all module. """ + __all__ = [] diff --git a/tests/roots/test-ext-autodoc/target/functions.py b/tests/roots/test-ext-autodoc/target/functions.py index 0265fb34612..54c8803a745 100644 --- a/tests/roots/test-ext-autodoc/target/functions.py +++ b/tests/roots/test-ext-autodoc/target/functions.py @@ -9,14 +9,16 @@ async def coroutinefunc(): pass -async def asyncgenerator(): +async def asyncgenerator(): # NoQA: RUF029 yield + partial_func = partial(func) partial_coroutinefunc = partial(coroutinefunc) builtin_func = print partial_builtin_func = partial(print) -def slice_arg_func(arg: 'float64[:, :]'): + +def slice_arg_func(arg: 'float64[:, :]'): # NoQA: F821 pass diff --git a/tests/roots/test-ext-autodoc/target/generic_class.py b/tests/roots/test-ext-autodoc/target/generic_class.py index 1ec80584db3..957681ae485 100644 --- a/tests/roots/test-ext-autodoc/target/generic_class.py +++ b/tests/roots/test-ext-autodoc/target/generic_class.py @@ -9,5 +9,6 @@ # __init__ signature. class A(Generic[T]): """docstring for A""" + def __init__(self, a, b=None): pass diff --git a/tests/roots/test-ext-autodoc/target/genericalias.py b/tests/roots/test-ext-autodoc/target/genericalias.py index 06026fbbc12..fee22881b26 100644 --- a/tests/roots/test-ext-autodoc/target/genericalias.py +++ b/tests/roots/test-ext-autodoc/target/genericalias.py @@ -12,5 +12,6 @@ class Class: #: A list of int T = List[int] + #: A list of Class L = List[Class] diff --git a/tests/roots/test-ext-autodoc/target/inheritance.py b/tests/roots/test-ext-autodoc/target/inheritance.py index e06f7a842b2..5c65aa65afd 100644 --- a/tests/roots/test-ext-autodoc/target/inheritance.py +++ b/tests/roots/test-ext-autodoc/target/inheritance.py @@ -10,7 +10,7 @@ def inheritedclassmeth(cls): """Inherited class method.""" @staticmethod - def inheritedstaticmeth(cls): + def inheritedstaticmeth(cls): # NoQA: PLW0211 """Inherited static method.""" @@ -20,6 +20,6 @@ def inheritedmeth(self): pass -class MyList(list): +class MyList(list): # NoQA: FURB189 def meth(self): """docstring""" diff --git a/tests/roots/test-ext-autodoc/target/inherited_annotations.py b/tests/roots/test-ext-autodoc/target/inherited_annotations.py index 3ae58a852e4..896cc1fb83d 100644 --- a/tests/roots/test-ext-autodoc/target/inherited_annotations.py +++ b/tests/roots/test-ext-autodoc/target/inherited_annotations.py @@ -1,17 +1,19 @@ """ - Test case for #11387 corner case involving inherited - members with type annotations on python 3.9 and earlier +Test case for #11387 corner case involving inherited +members with type annotations on python 3.9 and earlier """ + class HasTypeAnnotatedMember: inherit_me: int """Inherited""" + class NoTypeAnnotation(HasTypeAnnotatedMember): a = 1 """Local""" + class NoTypeAnnotation2(HasTypeAnnotatedMember): a = 1 """Local""" - diff --git a/tests/roots/test-ext-autodoc/target/name_conflict/__init__.py b/tests/roots/test-ext-autodoc/target/name_conflict/__init__.py index 0a6f4965305..6ed930c9335 100644 --- a/tests/roots/test-ext-autodoc/target/name_conflict/__init__.py +++ b/tests/roots/test-ext-autodoc/target/name_conflict/__init__.py @@ -3,4 +3,5 @@ class foo: """docstring of target.name_conflict::foo.""" + pass diff --git a/tests/roots/test-ext-autodoc/target/need_mocks.py b/tests/roots/test-ext-autodoc/target/need_mocks.py index 881220bd09e..1b8af7055d6 100644 --- a/tests/roots/test-ext-autodoc/target/need_mocks.py +++ b/tests/roots/test-ext-autodoc/target/need_mocks.py @@ -2,16 +2,16 @@ import missing_package1.missing_module1 from missing_module import missing_name from missing_package2 import missing_module2 -from missing_package3.missing_module3 import missing_name +from missing_package3.missing_module3 import missing_name # NoQA: F811 import sphinx.missing_module4 from sphinx.missing_module4 import missing_name2 @missing_name(int) -def decoratedFunction(): - """decoratedFunction docstring""" - return None +def decorated_function(): + """decorated_function docstring""" + return None # NoQA: RET501 def func(arg: missing_module.Class): @@ -26,13 +26,14 @@ class TestAutodoc: Alias = missing_module2.Class @missing_name - def decoratedMethod(self): - """TestAutodoc::decoratedMethod docstring""" - return None + def decorated_method(self): + """TestAutodoc::decorated_method docstring""" + return None # NoQA: RET501 class Inherited(missing_module.Class): """docstring""" + pass diff --git a/tests/roots/test-ext-autodoc/target/overload.py b/tests/roots/test-ext-autodoc/target/overload.py index 4bcb6ea3cad..e8826993fcf 100644 --- a/tests/roots/test-ext-autodoc/target/overload.py +++ b/tests/roots/test-ext-autodoc/target/overload.py @@ -4,18 +4,15 @@ @overload -def sum(x: int, y: int = 0) -> int: - ... +def sum(x: int, y: int = 0) -> int: ... @overload -def sum(x: float, y: float = 0.0) -> float: - ... +def sum(x: float, y: float = 0.0) -> float: ... @overload -def sum(x: str, y: str = ...) -> str: - ... +def sum(x: str, y: str = ...) -> str: ... def sum(x, y=None): @@ -27,16 +24,13 @@ class Math: """docstring""" @overload - def sum(self, x: int, y: int = 0) -> int: - ... + def sum(self, x: int, y: int = 0) -> int: ... @overload - def sum(self, x: float, y: float = 0.0) -> float: - ... + def sum(self, x: float, y: float = 0.0) -> float: ... @overload - def sum(self, x: str, y: str = ...) -> str: - ... + def sum(self, x: str, y: str = ...) -> str: ... def sum(self, x, y=None): """docstring""" @@ -47,12 +41,10 @@ class Foo: """docstring""" @overload - def __new__(cls, x: int, y: int) -> Foo: - ... + def __new__(cls, x: int, y: int) -> Foo: ... @overload - def __new__(cls, x: str, y: str) -> Foo: - ... + def __new__(cls, x: str, y: str) -> Foo: ... def __new__(cls, x, y): pass @@ -62,12 +54,10 @@ class Bar: """docstring""" @overload - def __init__(cls, x: int, y: int) -> None: - ... + def __init__(cls, x: int, y: int) -> None: ... @overload - def __init__(cls, x: str, y: str) -> None: - ... + def __init__(cls, x: str, y: str) -> None: ... def __init__(cls, x, y): pass @@ -75,12 +65,10 @@ def __init__(cls, x, y): class Meta(type): @overload - def __call__(cls, x: int, y: int) -> Any: - ... + def __call__(cls, x: int, y: int) -> Any: ... @overload - def __call__(cls, x: str, y: str) -> Any: - ... + def __call__(cls, x: str, y: str) -> Any: ... def __call__(cls, x, y): pass diff --git a/tests/roots/test-ext-autodoc/target/partialfunction.py b/tests/roots/test-ext-autodoc/target/partialfunction.py index 3be63eeee6e..30ba045ad26 100644 --- a/tests/roots/test-ext-autodoc/target/partialfunction.py +++ b/tests/roots/test-ext-autodoc/target/partialfunction.py @@ -8,5 +8,5 @@ def func1(a, b, c): func2 = partial(func1, 1) func3 = partial(func2, 2) -func3.__doc__ = "docstring of func3" +func3.__doc__ = 'docstring of func3' func4 = partial(func3, 3) diff --git a/tests/roots/test-ext-autodoc/target/preserve_defaults.py b/tests/roots/test-ext-autodoc/target/preserve_defaults.py index 86e103840d2..8985edae6a8 100644 --- a/tests/roots/test-ext-autodoc/target/preserve_defaults.py +++ b/tests/roots/test-ext-autodoc/target/preserve_defaults.py @@ -7,32 +7,48 @@ SENTINEL = object() -def foo(name: str = CONSTANT, - sentinel: Any = SENTINEL, - now: datetime = datetime.now(), - color: int = 0xFFFFFF, - *, - kwarg1, - kwarg2 = 0xFFFFFF) -> None: +def foo( + name: str = CONSTANT, + sentinel: Any = SENTINEL, + now: datetime = datetime.now(), # NoQA: B008,DTZ005 + color: int = 0xFFFFFF, + *, + kwarg1, + kwarg2=0xFFFFFF, +) -> None: """docstring""" class Class: """docstring""" - def meth(self, name: str = CONSTANT, sentinel: Any = SENTINEL, - now: datetime = datetime.now(), color: int = 0xFFFFFF, - *, kwarg1, kwarg2 = 0xFFFFFF) -> None: + def meth( + self, + name: str = CONSTANT, + sentinel: Any = SENTINEL, + now: datetime = datetime.now(), # NoQA: B008,DTZ005 + color: int = 0xFFFFFF, + *, + kwarg1, + kwarg2=0xFFFFFF, + ) -> None: """docstring""" @classmethod - def clsmeth(cls, name: str = CONSTANT, sentinel: Any = SENTINEL, - now: datetime = datetime.now(), color: int = 0xFFFFFF, - *, kwarg1, kwarg2 = 0xFFFFFF) -> None: + def clsmeth( + cls, + name: str = CONSTANT, + sentinel: Any = SENTINEL, + now: datetime = datetime.now(), # NoQA: B008,DTZ005 + color: int = 0xFFFFFF, + *, + kwarg1, + kwarg2=0xFFFFFF, + ) -> None: """docstring""" -get_sentinel = lambda custom=SENTINEL: custom +get_sentinel = lambda custom=SENTINEL: custom # NoQA: E731 """docstring""" @@ -44,17 +60,19 @@ class MultiLine: # only prop3 will not fail because it's on a single line whereas the others # will fail to parse. + # fmt: off prop1 = property( - lambda self: 1, doc="docstring") + lambda self: 1, doc='docstring') prop2 = property( - lambda self: 2, doc="docstring" + lambda self: 2, doc='docstring' ) - prop3 = property(lambda self: 3, doc="docstring") + prop3 = property(lambda self: 3, doc='docstring') prop4 = (property - (lambda self: 4, doc="docstring")) + (lambda self: 4, doc='docstring')) prop5 = property\ - (lambda self: 5, doc="docstring") + (lambda self: 5, doc='docstring') # NoQA: E211 + # fmt: on diff --git a/tests/roots/test-ext-autodoc/target/preserve_defaults_special_constructs.py b/tests/roots/test-ext-autodoc/target/preserve_defaults_special_constructs.py index 0fdb11ac874..ba397f86711 100644 --- a/tests/roots/test-ext-autodoc/target/preserve_defaults_special_constructs.py +++ b/tests/roots/test-ext-autodoc/target/preserve_defaults_special_constructs.py @@ -9,7 +9,7 @@ #: docstring -ze_lambda = lambda z=SENTINEL: None +ze_lambda = lambda z=SENTINEL: None # NoQA: E731 def foo(x, y, z=SENTINEL): @@ -19,6 +19,7 @@ def foo(x, y, z=SENTINEL): @dataclass class DataClass: """docstring""" + a: int b: object = SENTINEL c: list[int] = field(default_factory=lambda: [1, 2, 3]) @@ -27,6 +28,7 @@ class DataClass: @dataclass(init=False) class DataClassNoInit: """docstring""" + a: int b: object = SENTINEL c: list[int] = field(default_factory=lambda: [1, 2, 3]) @@ -34,6 +36,7 @@ class DataClassNoInit: class MyTypedDict(TypedDict): """docstring""" + a: int b: object c: list[int] @@ -41,10 +44,11 @@ class MyTypedDict(TypedDict): class MyNamedTuple1(NamedTuple): """docstring""" + a: int b: object = object() c: list[int] = [1, 2, 3] -class MyNamedTuple2(namedtuple('Base', ('a', 'b'), defaults=(0, SENTINEL))): +class MyNamedTuple2(namedtuple('Base', ('a', 'b'), defaults=(0, SENTINEL))): # NoQA: PYI024,SLOT002 """docstring""" diff --git a/tests/roots/test-ext-autodoc/target/private.py b/tests/roots/test-ext-autodoc/target/private.py index e46344818a7..de8a43c0b47 100644 --- a/tests/roots/test-ext-autodoc/target/private.py +++ b/tests/roots/test-ext-autodoc/target/private.py @@ -4,6 +4,7 @@ def private_function(name): :meta private: """ + def _public_function(name): """public_function is a docstring(). diff --git a/tests/roots/test-ext-autodoc/target/singledispatch.py b/tests/roots/test-ext-autodoc/target/singledispatch.py index 3dd5aaf388a..718504e5273 100644 --- a/tests/roots/test-ext-autodoc/target/singledispatch.py +++ b/tests/roots/test-ext-autodoc/target/singledispatch.py @@ -33,4 +33,3 @@ def _func_dict(arg: dict, kwarg=None): """A function for dict.""" # This function tests for specifying type through annotations pass - diff --git a/tests/roots/test-ext-autodoc/target/slots.py b/tests/roots/test-ext-autodoc/target/slots.py index 75c7a4a5227..3fa3f0798c4 100644 --- a/tests/roots/test-ext-autodoc/target/slots.py +++ b/tests/roots/test-ext-autodoc/target/slots.py @@ -7,9 +7,11 @@ class Foo: class Bar: """docstring""" - __slots__ = {'attr1': 'docstring of attr1', - 'attr2': 'docstring of attr2', - 'attr3': None} + __slots__ = { + 'attr1': 'docstring of attr1', + 'attr2': 'docstring of attr2', + 'attr3': None, + } __annotations__ = {'attr1': int} def __init__(self): @@ -19,4 +21,4 @@ def __init__(self): class Baz: """docstring""" - __slots__ = 'attr' + __slots__ = 'attr' # NoQA: PLC0205 diff --git a/tests/roots/test-ext-autodoc/target/typed_vars.py b/tests/roots/test-ext-autodoc/target/typed_vars.py index 0fe7468c84f..a87bd6accd3 100644 --- a/tests/roots/test-ext-autodoc/target/typed_vars.py +++ b/tests/roots/test-ext-autodoc/target/typed_vars.py @@ -8,8 +8,9 @@ class _Descriptor: def __init__(self, name): - self.__doc__ = f"This is {name}" - def __get__(self): + self.__doc__ = f'This is {name}' + + def __get__(self): # NoQA: PLE0302 pass @@ -18,12 +19,14 @@ class Class: attr2: int attr3 = 0 # type: int - descr4: int = _Descriptor("descr4") + descr4: int = _Descriptor('descr4') def __init__(self): + # fmt: off self.attr4: int = 0 #: attr4 self.attr5: int #: attr5 self.attr6 = 0 # type: int + # fmt: on """attr6""" diff --git a/tests/roots/test-ext-autodoc/target/typehints.py b/tests/roots/test-ext-autodoc/target/typehints.py index 90715945f14..589325cbf13 100644 --- a/tests/roots/test-ext-autodoc/target/typehints.py +++ b/tests/roots/test-ext-autodoc/target/typehints.py @@ -1,22 +1,22 @@ from __future__ import annotations import pathlib -from typing import Any, Tuple, TypeVar, Union +from typing import Any, Tuple, TypeVar, Union # NoQA: UP035 CONST1: int #: docstring CONST2: int = 1 #: docstring -CONST3: pathlib.PurePosixPath = pathlib.PurePosixPath("/a/b/c") +CONST3: pathlib.PurePosixPath = pathlib.PurePosixPath('/a/b/c') #: docstring -T = TypeVar("T", bound=pathlib.PurePosixPath) +T = TypeVar('T', bound=pathlib.PurePosixPath) def incr(a: int, b: int = 1) -> int: return a + b -def decr(a, b = 1): +def decr(a, b=1): # type: (int, int) -> int return a - b @@ -24,7 +24,7 @@ def decr(a, b = 1): class Math: CONST1: int CONST2: int = 1 - CONST3: pathlib.PurePosixPath = pathlib.PurePosixPath("/a/b/c") + CONST3: pathlib.PurePosixPath = pathlib.PurePosixPath('/a/b/c') def __init__(self, s: str, o: Any = None) -> None: pass @@ -32,7 +32,7 @@ def __init__(self, s: str, o: Any = None) -> None: def incr(self, a: int, b: int = 1) -> int: return a + b - def decr(self, a, b = 1): + def decr(self, a, b=1): # type: (int, int) -> int return a - b @@ -40,10 +40,11 @@ def nothing(self): # type: () -> None pass - def horse(self, - a, # type: str - b, # type: int - ): + def horse( + self, + a, # type: str + b, # type: int + ): # type: (...) -> None return @@ -53,7 +54,7 @@ def prop(self) -> int: @property def path(self) -> pathlib.PurePosixPath: - return pathlib.PurePosixPath("/a/b/c") + return pathlib.PurePosixPath('/a/b/c') def tuple_args(x: tuple[int, int | str]) -> tuple[int, int]: @@ -61,7 +62,7 @@ def tuple_args(x: tuple[int, int | str]) -> tuple[int, int]: class NewAnnotation: - def __new__(cls, i: int) -> NewAnnotation: + def __new__(cls, i: int) -> NewAnnotation: # NoQA: PYI034 pass @@ -85,12 +86,13 @@ def complex_func(arg1, arg2, arg3=None, *args, **kwargs): pass -def missing_attr(c, - a, # type: str - b=None # type: Optional[str] - ): +def missing_attr( + c, + a, # type: str + b=None, # type: Optional[str] +): # type: (...) -> str - return a + (b or "") + return a + (b or '') class _ClassWithDocumentedInit: diff --git a/tests/roots/test-ext-autodoc/target/typevar.py b/tests/roots/test-ext-autodoc/target/typevar.py index 1a02f3e2e76..6cc088a8219 100644 --- a/tests/roots/test-ext-autodoc/target/typevar.py +++ b/tests/roots/test-ext-autodoc/target/typevar.py @@ -4,29 +4,29 @@ from typing import NewType, TypeVar #: T1 -T1 = TypeVar("T1") +T1 = TypeVar('T1') -T2 = TypeVar("T2") # A TypeVar not having doc comment +T2 = TypeVar('T2') # A TypeVar not having doc comment #: T3 -T3 = TypeVar("T3", int, str) +T3 = TypeVar('T3', int, str) #: T4 -T4 = TypeVar("T4", covariant=True) +T4 = TypeVar('T4', covariant=True) # NoQA: PLC0105 #: T5 -T5 = TypeVar("T5", contravariant=True) +T5 = TypeVar('T5', contravariant=True) # NoQA: PLC0105 #: T6 -T6 = NewType("T6", date) +T6 = NewType('T6', date) #: T7 -T7 = TypeVar("T7", bound=int) +T7 = TypeVar('T7', bound=int) class Class: #: T1 - T1 = TypeVar("T1") + T1 = TypeVar('T1') #: T6 - T6 = NewType("T6", date) + T6 = NewType('T6', date) diff --git a/tests/roots/test-ext-autodoc/target/wrappedfunction.py b/tests/roots/test-ext-autodoc/target/wrappedfunction.py index 064d7774247..95b0ece0e6b 100644 --- a/tests/roots/test-ext-autodoc/target/wrappedfunction.py +++ b/tests/roots/test-ext-autodoc/target/wrappedfunction.py @@ -2,10 +2,10 @@ from contextlib import contextmanager from functools import lru_cache -from typing import Generator +from typing import Generator # NoQA: UP035 -@lru_cache(maxsize=None) +@lru_cache(maxsize=None) # NoQA: UP033 def slow_function(message, timeout): """This function is slow.""" print(message) diff --git a/tests/roots/test-autosummary/conf.py b/tests/roots/test-ext-autosummary-ext/conf.py similarity index 100% rename from tests/roots/test-autosummary/conf.py rename to tests/roots/test-ext-autosummary-ext/conf.py diff --git a/tests/roots/test-autosummary/dummy_module.py b/tests/roots/test-ext-autosummary-ext/dummy_module.py similarity index 81% rename from tests/roots/test-autosummary/dummy_module.py rename to tests/roots/test-ext-autosummary-ext/dummy_module.py index 4adc0313ecd..ac261c62805 100644 --- a/tests/roots/test-autosummary/dummy_module.py +++ b/tests/roots/test-ext-autosummary-ext/dummy_module.py @@ -10,27 +10,27 @@ """ -def withSentence(): - '''I have a sentence which +def with_sentence(): + """I have a sentence which spans multiple lines. Then I have more stuff - ''' + """ pass -def noSentence(): - '''this doesn't start with a +def no_sentence(): + """this doesn't start with a capital. so it's not considered a sentence - ''' + """ pass -def emptyLine(): - '''This is the real summary +def empty_line(): + """This is the real summary However, it did't end with a period. - ''' + """ pass @@ -41,11 +41,11 @@ def emptyLine(): class C: - ''' + """ My C class with class_attr attribute - ''' + """ #: This is a class attribute #: @@ -56,7 +56,7 @@ def __init__(self): #: This is an instance attribute #: #: value is a string - self.instance_attr = "42" + self.instance_attr = '42' def _prop_attr_get(self): """ @@ -76,9 +76,9 @@ def _prop_attr_get(self): """ class C2: - ''' + """ This is a nested inner class docstring - ''' + """ def func(arg_, *args, **kwargs): diff --git a/tests/roots/test-autosummary/index.rst b/tests/roots/test-ext-autosummary-ext/index.rst similarity index 100% rename from tests/roots/test-autosummary/index.rst rename to tests/roots/test-ext-autosummary-ext/index.rst diff --git a/tests/roots/test-autosummary/sphinx.rst b/tests/roots/test-ext-autosummary-ext/sphinx.rst similarity index 100% rename from tests/roots/test-autosummary/sphinx.rst rename to tests/roots/test-ext-autosummary-ext/sphinx.rst diff --git a/tests/roots/test-autosummary/underscore_module_.py b/tests/roots/test-ext-autosummary-ext/underscore_module_.py similarity index 68% rename from tests/roots/test-autosummary/underscore_module_.py rename to tests/roots/test-ext-autosummary-ext/underscore_module_.py index 8584e60787b..9ef28259075 100644 --- a/tests/roots/test-autosummary/underscore_module_.py +++ b/tests/roots/test-ext-autosummary-ext/underscore_module_.py @@ -4,12 +4,13 @@ class class_: - """ Class """ + """Class""" + def method_(_arg): - """ Method """ + """Method""" pass def function_(_arg): - """ Function """ + """Function""" pass diff --git a/tests/roots/test-ext-autosummary-filename-map/autosummary_dummy_module.py b/tests/roots/test-ext-autosummary-filename-map/autosummary_dummy_module.py index b88e33520b3..9ed0ce66877 100644 --- a/tests/roots/test-ext-autosummary-filename-map/autosummary_dummy_module.py +++ b/tests/roots/test-ext-autosummary-filename-map/autosummary_dummy_module.py @@ -5,7 +5,7 @@ class Foo: - class Bar: + class Bar: # NoQA: D106 pass def __init__(self): diff --git a/tests/roots/test-ext-autosummary-filename-map/conf.py b/tests/roots/test-ext-autosummary-filename-map/conf.py index ea64caec23d..7ef1c6fd7fb 100644 --- a/tests/roots/test-ext-autosummary-filename-map/conf.py +++ b/tests/roots/test-ext-autosummary-filename-map/conf.py @@ -6,6 +6,6 @@ extensions = ['sphinx.ext.autosummary'] autosummary_generate = True autosummary_filename_map = { - "autosummary_dummy_module": "module_mangled", - "autosummary_dummy_module.bar": "bar" + 'autosummary_dummy_module': 'module_mangled', + 'autosummary_dummy_module.bar': 'bar', } diff --git a/tests/roots/test-ext-autosummary-import_cycle/spam/eggs.py b/tests/roots/test-ext-autosummary-import_cycle/spam/eggs.py index 12122e88de8..9bef80f06a7 100644 --- a/tests/roots/test-ext-autosummary-import_cycle/spam/eggs.py +++ b/tests/roots/test-ext-autosummary-import_cycle/spam/eggs.py @@ -5,6 +5,7 @@ class Ham: """``spam.eggs.Ham`` class docstring.""" + a = 1 b = 2 c = 3 diff --git a/tests/roots/test-ext-autosummary-imported_members/autosummary_dummy_package/autosummary_dummy_module.py b/tests/roots/test-ext-autosummary-imported_members/autosummary_dummy_package/autosummary_dummy_module.py index 9c93f064e03..837a617093a 100644 --- a/tests/roots/test-ext-autosummary-imported_members/autosummary_dummy_package/autosummary_dummy_module.py +++ b/tests/roots/test-ext-autosummary-imported_members/autosummary_dummy_package/autosummary_dummy_module.py @@ -1,5 +1,6 @@ class Bar: """Bar class""" + pass diff --git a/tests/roots/test-ext-autosummary-mock_imports/foo.py b/tests/roots/test-ext-autosummary-mock_imports/foo.py index ab4460ef0af..36b8b4de5c2 100644 --- a/tests/roots/test-ext-autosummary-mock_imports/foo.py +++ b/tests/roots/test-ext-autosummary-mock_imports/foo.py @@ -3,4 +3,5 @@ class Foo(unknown.Class): """Foo class""" + pass diff --git a/tests/roots/test-ext-autosummary-module_all/autosummary_dummy_package_all/__init__.py b/tests/roots/test-ext-autosummary-module_all/autosummary_dummy_package_all/__init__.py index 82f2060fb58..e1ba34e63cf 100644 --- a/tests/roots/test-ext-autosummary-module_all/autosummary_dummy_package_all/__init__.py +++ b/tests/roots/test-ext-autosummary-module_all/autosummary_dummy_package_all/__init__.py @@ -10,4 +10,4 @@ def public_baz(): """Public Baz function""" -__all__ = ["PublicBar", "public_foo", "public_baz", "extra_dummy_module"] +__all__ = ['PublicBar', 'public_foo', 'public_baz', 'extra_dummy_module'] # NoQA: F822 diff --git a/tests/roots/test-ext-autosummary-recursive/package/module.py b/tests/roots/test-ext-autosummary-recursive/package/module.py index c76e7330246..30e6cb819e6 100644 --- a/tests/roots/test-ext-autosummary-recursive/package/module.py +++ b/tests/roots/test-ext-autosummary-recursive/package/module.py @@ -1,4 +1,4 @@ -from os import * +from os import * # NoQA: F403 class Foo: diff --git a/tests/roots/test-ext-autosummary-recursive/package/package/module.py b/tests/roots/test-ext-autosummary-recursive/package/package/module.py index c76e7330246..30e6cb819e6 100644 --- a/tests/roots/test-ext-autosummary-recursive/package/package/module.py +++ b/tests/roots/test-ext-autosummary-recursive/package/package/module.py @@ -1,4 +1,4 @@ -from os import * +from os import * # NoQA: F403 class Foo: diff --git a/tests/roots/test-ext-autosummary-recursive/package2/module.py b/tests/roots/test-ext-autosummary-recursive/package2/module.py index c76e7330246..30e6cb819e6 100644 --- a/tests/roots/test-ext-autosummary-recursive/package2/module.py +++ b/tests/roots/test-ext-autosummary-recursive/package2/module.py @@ -1,4 +1,4 @@ -from os import * +from os import * # NoQA: F403 class Foo: diff --git a/tests/roots/test-ext-autosummary-skip-member/conf.py b/tests/roots/test-ext-autosummary-skip-member/conf.py index f409bdc5c17..ff6cd3ccd00 100644 --- a/tests/roots/test-ext-autosummary-skip-member/conf.py +++ b/tests/roots/test-ext-autosummary-skip-member/conf.py @@ -13,6 +13,7 @@ def skip_member(app, what, name, obj, skip, options): return True elif name == '_privatemeth': return False + return None def setup(app): diff --git a/tests/roots/test-ext-autosummary/autosummary_class_module.py b/tests/roots/test-ext-autosummary/autosummary_class_module.py index 2b1f40419d6..050e8a09605 100644 --- a/tests/roots/test-ext-autosummary/autosummary_class_module.py +++ b/tests/roots/test-ext-autosummary/autosummary_class_module.py @@ -1,2 +1,2 @@ class Class: - pass + pass diff --git a/tests/roots/test-ext-autosummary/autosummary_dummy_inherited_module.py b/tests/roots/test-ext-autosummary/autosummary_dummy_inherited_module.py index 2b3d2da84ca..3672c13b400 100644 --- a/tests/roots/test-ext-autosummary/autosummary_dummy_inherited_module.py +++ b/tests/roots/test-ext-autosummary/autosummary_dummy_inherited_module.py @@ -2,12 +2,11 @@ class InheritedAttrClass(Foo): - def __init__(self): #: other docstring - self.subclassattr = "subclassattr" + self.subclassattr = 'subclassattr' super().__init__() -__all__ = ["InheritedAttrClass"] +__all__ = ['InheritedAttrClass'] diff --git a/tests/roots/test-ext-autosummary/autosummary_dummy_module.py b/tests/roots/test-ext-autosummary/autosummary_dummy_module.py index 2d8829a2375..c8575d3a122 100644 --- a/tests/roots/test-ext-autosummary/autosummary_dummy_module.py +++ b/tests/roots/test-ext-autosummary/autosummary_dummy_module.py @@ -4,13 +4,13 @@ from autosummary_class_module import Class __all__ = [ - "CONSTANT1", - "Exc", - "Foo", - "_Baz", - "bar", - "qux", - "path", + 'CONSTANT1', + 'Exc', + 'Foo', + '_Baz', + 'bar', + 'qux', + 'path', ] #: module variable @@ -23,7 +23,7 @@ class Foo: CONSTANT3 = None CONSTANT4 = None - class Bar: + class Bar: # NoQA: D106 pass def __init__(self): @@ -42,7 +42,7 @@ class _Baz: pass -def bar(x: Union[int, str], y: int = 1) -> None: +def bar(x: Union[int, str], y: int = 1) -> None: # NoQA: UP007 pass diff --git a/tests/roots/test-ext-coverage/grog/coverage_missing.py b/tests/roots/test-ext-coverage/grog/coverage_missing.py index 2fe44338caa..b737f0d3776 100644 --- a/tests/roots/test-ext-coverage/grog/coverage_missing.py +++ b/tests/roots/test-ext-coverage/grog/coverage_missing.py @@ -1,5 +1,6 @@ """This module is intentionally not documented.""" + class Missing: """An undocumented class.""" diff --git a/tests/roots/test-ext-doctest-skipif/conf.py b/tests/roots/test-ext-doctest-skipif/conf.py index ae00e35407f..fc999b43494 100644 --- a/tests/roots/test-ext-doctest-skipif/conf.py +++ b/tests/roots/test-ext-doctest-skipif/conf.py @@ -3,16 +3,16 @@ project = 'test project for the doctest :skipif: directive' root_doc = 'skipif' source_suffix = { - '.txt': 'restructuredtext' + '.txt': 'restructuredtext', } exclude_patterns = ['_build'] -doctest_global_setup = ''' +doctest_global_setup = """ from tests.test_extensions.test_ext_doctest import record record('doctest_global_setup', 'body', True) -''' +""" -doctest_global_cleanup = ''' +doctest_global_cleanup = """ record('doctest_global_cleanup', 'body', True) -''' +""" diff --git a/tests/roots/test-ext-doctest/conf.py b/tests/roots/test-ext-doctest/conf.py index 57fc40607b6..ce73258e5b6 100644 --- a/tests/roots/test-ext-doctest/conf.py +++ b/tests/roots/test-ext-doctest/conf.py @@ -3,6 +3,6 @@ project = 'test project for doctest' root_doc = 'doctest' source_suffix = { - '.txt': 'restructuredtext' + '.txt': 'restructuredtext', } exclude_patterns = ['_build'] diff --git a/tests/roots/test-ext-graphviz/conf.py b/tests/roots/test-ext-graphviz/conf.py index 317457ff95b..1a12f2c2b54 100644 --- a/tests/roots/test-ext-graphviz/conf.py +++ b/tests/roots/test-ext-graphviz/conf.py @@ -1,3 +1,3 @@ extensions = ['sphinx.ext.graphviz'] exclude_patterns = ['_build'] -html_static_path = ["_static"] +html_static_path = ['_static'] diff --git a/tests/roots/test-ext-imgmockconverter/mocksvgconverter.py b/tests/roots/test-ext-imgmockconverter/mocksvgconverter.py index 234732ddc0c..97b08a9a3a7 100644 --- a/tests/roots/test-ext-imgmockconverter/mocksvgconverter.py +++ b/tests/roots/test-ext-imgmockconverter/mocksvgconverter.py @@ -1,6 +1,4 @@ -""" - Does foo.svg --> foo.pdf with no change to the file. -""" +"""Does foo.svg --> foo.pdf with no change to the file.""" from __future__ import annotations diff --git a/tests/roots/test-ext-inheritance_diagram/test.py b/tests/roots/test-ext-inheritance_diagram/test.py index efb1c2a7f6e..0146c5d0f39 100644 --- a/tests/roots/test-ext-inheritance_diagram/test.py +++ b/tests/roots/test-ext-inheritance_diagram/test.py @@ -18,5 +18,5 @@ class DocMainLevel(Foo): pass -class Alice(object): +class Alice(object): # NoQA: UP004 pass diff --git a/tests/roots/test-ext-math-compat/conf.py b/tests/roots/test-ext-math-compat/conf.py index 85e3950a5d0..82cc265bc9f 100644 --- a/tests/roots/test-ext-math-compat/conf.py +++ b/tests/roots/test-ext-math-compat/conf.py @@ -4,7 +4,7 @@ extensions = ['sphinx.ext.mathjax'] -def my_math_role(role, rawtext, text, lineno, inliner, options={}, content=[]): +def my_math_role(role, rawtext, text, lineno, inliner, options={}, content=[]): # NoQA: B006 text = 'E = mc^2' return [nodes.math(text, text)], [] diff --git a/tests/roots/test-ext-napoleon-paramtype/conf.py b/tests/roots/test-ext-napoleon-paramtype/conf.py index 70d30ce0a65..1eb7bb0b5c1 100644 --- a/tests/roots/test-ext-napoleon-paramtype/conf.py +++ b/tests/roots/test-ext-napoleon-paramtype/conf.py @@ -6,7 +6,7 @@ extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.napoleon', - 'sphinx.ext.intersphinx' + 'sphinx.ext.intersphinx', ] # Python inventory is manually created in the test diff --git a/tests/roots/test-ext-napoleon-paramtype/pkg/bar.py b/tests/roots/test-ext-napoleon-paramtype/pkg/bar.py index e1ae794c799..a1560f99141 100644 --- a/tests/roots/test-ext-napoleon-paramtype/pkg/bar.py +++ b/tests/roots/test-ext-napoleon-paramtype/pkg/bar.py @@ -1,5 +1,6 @@ class Bar: """The bar.""" + def list(self) -> None: """A list method.""" @@ -7,4 +8,3 @@ def list(self) -> None: def int() -> float: """An int method.""" return 1.0 - diff --git a/tests/roots/test-ext-napoleon-paramtype/pkg/foo.py b/tests/roots/test-ext-napoleon-paramtype/pkg/foo.py index 6979f9e4a19..a6116f6d619 100644 --- a/tests/roots/test-ext-napoleon-paramtype/pkg/foo.py +++ b/tests/roots/test-ext-napoleon-paramtype/pkg/foo.py @@ -1,5 +1,6 @@ class Foo: """The foo.""" + def do( self, *, diff --git a/tests/roots/test-ext-viewcode-find-package/conf.py b/tests/roots/test-ext-viewcode-find-package/conf.py new file mode 100644 index 00000000000..cad4c5597de --- /dev/null +++ b/tests/roots/test-ext-viewcode-find-package/conf.py @@ -0,0 +1,24 @@ +import os +import sys + +source_dir = os.path.abspath('.') +if source_dir not in sys.path: + sys.path.insert(0, source_dir) +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] +exclude_patterns = ['_build'] + + +if 'test_linkcode' in tags: # NoQA: F821 (tags is injected into conf.py) + extensions.remove('sphinx.ext.viewcode') + extensions.append('sphinx.ext.linkcode') + + def linkcode_resolve(domain, info): + if domain == 'py': + fn = info['module'].replace('.', '/') + return 'http://foobar/source/%s.py' % fn + elif domain == 'js': + return 'http://foobar/js/' + info['fullname'] + elif domain in {'c', 'cpp'}: + return 'http://foobar/%s/%s' % (domain, ''.join(info['names'])) + else: + raise AssertionError diff --git a/tests/roots/test-ext-viewcode-find-package/index.rst b/tests/roots/test-ext-viewcode-find-package/index.rst new file mode 100644 index 00000000000..b40d1cd06c5 --- /dev/null +++ b/tests/roots/test-ext-viewcode-find-package/index.rst @@ -0,0 +1,10 @@ +viewcode +======== + +.. currentmodule:: main_package.subpackage.submodule + +.. autofunction:: func1 + +.. autoclass:: Class1 + +.. autoclass:: Class3 diff --git a/tests/roots/test-ext-viewcode-find-package/main_package/__init__.py b/tests/roots/test-ext-viewcode-find-package/main_package/__init__.py new file mode 100644 index 00000000000..7654a53caa9 --- /dev/null +++ b/tests/roots/test-ext-viewcode-find-package/main_package/__init__.py @@ -0,0 +1 @@ +from main_package import subpackage diff --git a/tests/roots/test-ext-viewcode-find-package/main_package/subpackage/__init__.py b/tests/roots/test-ext-viewcode-find-package/main_package/subpackage/__init__.py new file mode 100644 index 00000000000..a1e31add516 --- /dev/null +++ b/tests/roots/test-ext-viewcode-find-package/main_package/subpackage/__init__.py @@ -0,0 +1,3 @@ +from main_package.subpackage._subpackage2 import submodule + +__all__ = ['submodule'] diff --git a/tests/roots/test-ext-viewcode-find-package/main_package/subpackage/_subpackage2/__init__.py b/tests/roots/test-ext-viewcode-find-package/main_package/subpackage/_subpackage2/__init__.py new file mode 100644 index 00000000000..8b137891791 --- /dev/null +++ b/tests/roots/test-ext-viewcode-find-package/main_package/subpackage/_subpackage2/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/roots/test-ext-viewcode-find-package/main_package/subpackage/_subpackage2/submodule.py b/tests/roots/test-ext-viewcode-find-package/main_package/subpackage/_subpackage2/submodule.py new file mode 100644 index 00000000000..a8f8acc8317 --- /dev/null +++ b/tests/roots/test-ext-viewcode-find-package/main_package/subpackage/_subpackage2/submodule.py @@ -0,0 +1,32 @@ +""" +submodule +""" +# raise RuntimeError('This module should not get imported') + + +def decorator(f): + return f + + +@decorator +def func1(a, b): + """ + this is func1 + """ + return a, b + + +@decorator +class Class1: + """ + this is Class1 + """ + + +class Class3: + """ + this is Class3 + """ + + class_attr = 42 + """this is the class attribute class_attr""" diff --git a/tests/roots/test-ext-viewcode-find/not_a_package/submodule.py b/tests/roots/test-ext-viewcode-find/not_a_package/submodule.py index ba8be78de5f..9b6d09166c1 100644 --- a/tests/roots/test-ext-viewcode-find/not_a_package/submodule.py +++ b/tests/roots/test-ext-viewcode-find/not_a_package/submodule.py @@ -1,7 +1,8 @@ """ submodule """ -raise RuntimeError('This module should not get imported') + +raise RuntimeError('This module should not get imported') # NoQA: EM101 def decorator(f): @@ -27,5 +28,6 @@ class Class3: """ this is Class3 """ + class_attr = 42 """this is the class attribute class_attr""" diff --git a/tests/roots/test-ext-viewcode/conf.py b/tests/roots/test-ext-viewcode/conf.py index 0a5d2fcb06b..a15e2eab586 100644 --- a/tests/roots/test-ext-viewcode/conf.py +++ b/tests/roots/test-ext-viewcode/conf.py @@ -1,23 +1,30 @@ import sys from pathlib import Path +from sphinx.ext.linkcode import add_linkcode_domain + sys.path.insert(0, str(Path.cwd().resolve())) extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] exclude_patterns = ['_build'] -if 'test_linkcode' in tags: +if 'test_linkcode' in tags: # NoQA: F821 (tags is injected into conf.py) extensions.remove('sphinx.ext.viewcode') extensions.append('sphinx.ext.linkcode') def linkcode_resolve(domain, info): if domain == 'py': fn = info['module'].replace('.', '/') - return "https://foobar/source/%s.py" % fn - elif domain == "js": - return "https://foobar/js/" + info['fullname'] - elif domain in ("c", "cpp"): - return f"https://foobar/{domain}/{''.join(info['names'])}" + return 'https://foobar/source/%s.py' % fn + elif domain == 'js': + return 'https://foobar/js/' + info['fullname'] + elif domain in {'c', 'cpp'}: + return f'https://foobar/{domain}/{"".join(info["names"])}' + elif domain == 'rst': + return 'http://foobar/rst/{fullname}'.format(**info) else: - raise AssertionError() + raise AssertionError + + def setup(app): + add_linkcode_domain('rst', ['fullname']) diff --git a/tests/roots/test-ext-viewcode/objects.rst b/tests/roots/test-ext-viewcode/objects.rst index 114adbf2eb1..bf492e2630d 100644 --- a/tests/roots/test-ext-viewcode/objects.rst +++ b/tests/roots/test-ext-viewcode/objects.rst @@ -167,3 +167,10 @@ CPP domain .. cpp:function:: T& operator[]( unsigned j ) const T& operator[]( unsigned j ) const + +rST domain +========== + +.. rst:role:: foo + + Foo description. diff --git a/tests/roots/test-ext-viewcode/spam/mod1.py b/tests/roots/test-ext-viewcode/spam/mod1.py index a078328c283..13f43282202 100644 --- a/tests/roots/test-ext-viewcode/spam/mod1.py +++ b/tests/roots/test-ext-viewcode/spam/mod1.py @@ -26,5 +26,6 @@ class Class3: """ this is Class3 """ + class_attr = 42 """this is the class attribute class_attr""" diff --git a/tests/roots/test-extensions/read_parallel.py b/tests/roots/test-extensions/read_parallel.py index a3e052f9570..08770105543 100644 --- a/tests/roots/test-extensions/read_parallel.py +++ b/tests/roots/test-extensions/read_parallel.py @@ -1,4 +1,4 @@ def setup(app): return { - 'parallel_read_safe': True + 'parallel_read_safe': True, } diff --git a/tests/roots/test-extensions/read_serial.py b/tests/roots/test-extensions/read_serial.py index c55570a5c44..4910d4f7faf 100644 --- a/tests/roots/test-extensions/read_serial.py +++ b/tests/roots/test-extensions/read_serial.py @@ -1,4 +1,4 @@ def setup(app): return { - 'parallel_read_safe': False + 'parallel_read_safe': False, } diff --git a/tests/roots/test-extensions/write_serial.py b/tests/roots/test-extensions/write_serial.py index 75494ce7772..6e2910870cb 100644 --- a/tests/roots/test-extensions/write_serial.py +++ b/tests/roots/test-extensions/write_serial.py @@ -1,4 +1,4 @@ def setup(app): return { - 'parallel_write_safe': False + 'parallel_write_safe': False, } diff --git a/tests/roots/test-highlight_options/conf.py b/tests/roots/test-highlight_options/conf.py index 90997d44482..a8ee7730db9 100644 --- a/tests/roots/test-highlight_options/conf.py +++ b/tests/roots/test-highlight_options/conf.py @@ -1,4 +1,4 @@ highlight_options = { 'default': {'default_option': True}, - 'python': {'python_option': True} + 'python': {'python_option': True}, } diff --git a/tests/roots/test-html_assets/conf.py b/tests/roots/test-html_assets/conf.py index 7f94bbbce73..7212d4ac687 100644 --- a/tests/roots/test-html_assets/conf.py +++ b/tests/roots/test-html_assets/conf.py @@ -3,10 +3,18 @@ html_static_path = ['static', 'subdir'] html_extra_path = ['extra', 'subdir'] -html_css_files = ['css/style.css', - ('https://example.com/custom.css', - {'title': 'title', 'media': 'print', 'priority': 400})] -html_js_files = ['js/custom.js', - ('https://example.com/script.js', - {'async': 'async', 'priority': 400})] +html_css_files = [ + 'css/style.css', + ( + 'https://example.com/custom.css', + {'title': 'title', 'media': 'print', 'priority': 400}, + ), +] +html_js_files = [ + 'js/custom.js', + ( + 'https://example.com/script.js', + {'async': 'async', 'priority': 400}, + ), +] exclude_patterns = ['**/_build', '**/.htpasswd'] diff --git a/tests/roots/test-image-in-parsed-literal/conf.py b/tests/roots/test-image-in-parsed-literal/conf.py index 5d06da63366..69ad26aa4e9 100644 --- a/tests/roots/test-image-in-parsed-literal/conf.py +++ b/tests/roots/test-image-in-parsed-literal/conf.py @@ -1,9 +1,9 @@ exclude_patterns = ['_build'] -rst_epilog = ''' +rst_epilog = """ .. |picture| image:: pic.png :height: 1cm :scale: 200% :align: middle :alt: alternative_text -''' +""" diff --git a/tests/roots/test-image-in-section/conf.py b/tests/roots/test-image-in-section/conf.py index 9cb250c1aef..08b11db4f63 100644 --- a/tests/roots/test-image-in-section/conf.py +++ b/tests/roots/test-image-in-section/conf.py @@ -1,8 +1,8 @@ exclude_patterns = ['_build'] -rst_epilog = ''' +rst_epilog = """ .. |picture| image:: pic.png :width: 15pt :height: 15pt :alt: alternative_text -''' +""" diff --git a/tests/roots/test-inheritance/dummy/test.py b/tests/roots/test-inheritance/dummy/test.py index 12fe8d900f3..75c65cd540f 100644 --- a/tests/roots/test-inheritance/dummy/test.py +++ b/tests/roots/test-inheritance/dummy/test.py @@ -8,7 +8,7 @@ / \ / \ E D F -""" +""" # NoQA: D208 class A: diff --git a/tests/roots/test-inheritance/dummy/test_nested.py b/tests/roots/test-inheritance/dummy/test_nested.py index 4b6801892ec..ccbde523d11 100644 --- a/tests/roots/test-inheritance/dummy/test_nested.py +++ b/tests/roots/test-inheritance/dummy/test_nested.py @@ -1,9 +1,8 @@ -"""Test with nested classes. -""" +"""Test with nested classes.""" class A: - class B: + class B: # NoQA: D106 pass diff --git a/tests/roots/test-intl/conf.py b/tests/roots/test-intl/conf.py index 09c47bb0637..2b49de56e7a 100644 --- a/tests/roots/test-intl/conf.py +++ b/tests/roots/test-intl/conf.py @@ -1,6 +1,6 @@ project = 'Sphinx intl ' source_suffix = { - '.txt': 'restructuredtext' + '.txt': 'restructuredtext', } keep_warnings = True templates_path = ['_templates'] diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/definition_terms.po b/tests/roots/test-intl/xx/LC_MESSAGES/definition_terms.po index a19c9d19ff9..9c7ab7d2a38 100644 --- a/tests/roots/test-intl/xx/LC_MESSAGES/definition_terms.po +++ b/tests/roots/test-intl/xx/LC_MESSAGES/definition_terms.po @@ -1,47 +1,47 @@ -# SOME DESCRIPTIVE TITLE. -# Copyright (C) 2012, foof -# This file is distributed under the same license as the foo package. -# FIRST AUTHOR , YEAR. -# -#, fuzzy -msgid "" -msgstr "" -"Project-Id-Version: sphinx 1.0\n" -"Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2013-01-01 05:00+0000\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: FULL NAME \n" -"Language-Team: LANGUAGE \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" - -msgid "i18n with definition terms" -msgstr "I18N WITH DEFINITION TERMS" - -msgid "Some term" -msgstr "SOME TERM" - -msgid "The corresponding definition" -msgstr "THE CORRESPONDING DEFINITION" - -msgid "Some *term* `with link `__" -msgstr "SOME *TERM* `WITH LINK `__" - -msgid "The corresponding definition #2" -msgstr "THE CORRESPONDING DEFINITION #2" - -msgid "Some **term** with" -msgstr "SOME **TERM** WITH" - -msgid "classifier1" -msgstr "CLASSIFIER1" - -msgid "classifier2" -msgstr "CLASSIFIER2" - -msgid "Some term with" -msgstr "SOME TERM WITH" - -msgid "classifier[]" -msgstr "CLASSIFIER[]" +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2012, foof +# This file is distributed under the same license as the foo package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: sphinx 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2013-01-01 05:00+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with definition terms" +msgstr "I18N WITH DEFINITION TERMS" + +msgid "Some term" +msgstr "SOME TERM" + +msgid "The corresponding definition" +msgstr "THE CORRESPONDING DEFINITION" + +msgid "Some *term* `with link `__" +msgstr "SOME *TERM* `WITH LINK `__" + +msgid "The corresponding definition #2" +msgstr "THE CORRESPONDING DEFINITION #2" + +msgid "Some **term** with" +msgstr "SOME **TERM** WITH" + +msgid "classifier1" +msgstr "CLASSIFIER1" + +msgid "classifier2" +msgstr "CLASSIFIER2" + +msgid "Some term with" +msgstr "SOME TERM WITH" + +msgid "classifier[]" +msgstr "CLASSIFIER[]" diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/glossary_terms_inconsistency.po b/tests/roots/test-intl/xx/LC_MESSAGES/glossary_terms_inconsistency.po index 048b81f9555..1dfb4bec8dd 100644 --- a/tests/roots/test-intl/xx/LC_MESSAGES/glossary_terms_inconsistency.po +++ b/tests/roots/test-intl/xx/LC_MESSAGES/glossary_terms_inconsistency.po @@ -1,26 +1,26 @@ -# SOME DESCRIPTIVE TITLE. -# Copyright (C) 2012, foof -# This file is distributed under the same license as the foo package. -# FIRST AUTHOR , YEAR. -# -#, fuzzy -msgid "" -msgstr "" -"Project-Id-Version: sphinx 1.0\n" -"Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2013-01-29 14:10+0000\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: FULL NAME \n" -"Language-Team: LANGUAGE \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" - -msgid "i18n with glossary terms inconsistency" -msgstr "I18N WITH GLOSSARY TERMS INCONSISTENCY" - -msgid "link to :term:`Some term` and :term:`Some other term`." -msgstr "LINK TO :term:`SOME NEW TERM`." - -msgid "link to :term:`Some term`." -msgstr "LINK TO :term:`TERM NOT IN GLOSSARY`." +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2012, foof +# This file is distributed under the same license as the foo package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: sphinx 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2013-01-29 14:10+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with glossary terms inconsistency" +msgstr "I18N WITH GLOSSARY TERMS INCONSISTENCY" + +msgid "link to :term:`Some term` and :term:`Some other term`." +msgstr "LINK TO :term:`SOME NEW TERM`." + +msgid "link to :term:`Some term`." +msgstr "LINK TO :term:`TERM NOT IN GLOSSARY`." diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/rubric.po b/tests/roots/test-intl/xx/LC_MESSAGES/rubric.po index 91376236d82..e4b097a68dc 100644 --- a/tests/roots/test-intl/xx/LC_MESSAGES/rubric.po +++ b/tests/roots/test-intl/xx/LC_MESSAGES/rubric.po @@ -1,29 +1,29 @@ -# SOME DESCRIPTIVE TITLE. -# Copyright (C) 2012, foof -# This file is distributed under the same license as the foo package. -# FIRST AUTHOR , YEAR. -# -#, fuzzy -msgid "" -msgstr "" -"Project-Id-Version: sphinx 1.0\n" -"Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2013-11-12 07:00+0000\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: FULL NAME \n" -"Language-Team: LANGUAGE \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" - -msgid "i18n with rubric" -msgstr "I18N WITH RUBRIC" - -msgid "rubric title" -msgstr "RUBRIC TITLE" - -msgid "rubric in the block" -msgstr "RUBRIC IN THE BLOCK" - -msgid "block" -msgstr "BLOCK" +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2012, foof +# This file is distributed under the same license as the foo package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: sphinx 1.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2013-11-12 07:00+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "i18n with rubric" +msgstr "I18N WITH RUBRIC" + +msgid "rubric title" +msgstr "RUBRIC TITLE" + +msgid "rubric in the block" +msgstr "RUBRIC IN THE BLOCK" + +msgid "block" +msgstr "BLOCK" diff --git a/tests/roots/test-latex-includegraphics/conf.py b/tests/roots/test-latex-includegraphics/conf.py index 65c19ab859c..86570bdfea3 100644 --- a/tests/roots/test-latex-includegraphics/conf.py +++ b/tests/roots/test-latex-includegraphics/conf.py @@ -1,7 +1,7 @@ exclude_patterns = ['_build'] latex_elements = { - 'preamble': r''' + 'preamble': r""" \makeatletter \def\dividetwolengths#1#2{\the\dimexpr \numexpr65536*\dimexpr#1\relax/\dimexpr#2\relax sp}% @@ -43,5 +43,5 @@ } \def\sphinxincludegraphics#1#{\tempincludegraphics#1} \makeatother -''', +""", } diff --git a/tests/roots/test-latex-labels-before-module/automodule1.py b/tests/roots/test-latex-labels-before-module/automodule1.py index 0545aa42705..fb1e2b64861 100644 --- a/tests/roots/test-latex-labels-before-module/automodule1.py +++ b/tests/roots/test-latex-labels-before-module/automodule1.py @@ -1,2 +1 @@ """docstring""" - diff --git a/tests/roots/test-latex-labels-before-module/automodule2a.py b/tests/roots/test-latex-labels-before-module/automodule2a.py index 0545aa42705..fb1e2b64861 100644 --- a/tests/roots/test-latex-labels-before-module/automodule2a.py +++ b/tests/roots/test-latex-labels-before-module/automodule2a.py @@ -1,2 +1 @@ """docstring""" - diff --git a/tests/roots/test-latex-labels-before-module/automodule2b.py b/tests/roots/test-latex-labels-before-module/automodule2b.py index 0545aa42705..fb1e2b64861 100644 --- a/tests/roots/test-latex-labels-before-module/automodule2b.py +++ b/tests/roots/test-latex-labels-before-module/automodule2b.py @@ -1,2 +1 @@ """docstring""" - diff --git a/tests/roots/test-latex-labels-before-module/automodule3.py b/tests/roots/test-latex-labels-before-module/automodule3.py index 0545aa42705..fb1e2b64861 100644 --- a/tests/roots/test-latex-labels-before-module/automodule3.py +++ b/tests/roots/test-latex-labels-before-module/automodule3.py @@ -1,2 +1 @@ """docstring""" - diff --git a/tests/roots/test-latex-numfig/conf.py b/tests/roots/test-latex-numfig/conf.py index 287bd1c9b6e..7d92eddc256 100644 --- a/tests/roots/test-latex-numfig/conf.py +++ b/tests/roots/test-latex-numfig/conf.py @@ -1,8 +1,6 @@ extensions = ['sphinx.ext.imgmath'] # for math_numfig latex_documents = [ - ('indexmanual', 'SphinxManual.tex', 'Test numfig manual', - 'Sphinx', 'manual'), - ('indexhowto', 'SphinxHowTo.tex', 'Test numfig howto', - 'Sphinx', 'howto'), + ('indexmanual', 'SphinxManual.tex', 'Test numfig manual', 'Sphinx', 'manual'), + ('indexhowto', 'SphinxHowTo.tex', 'Test numfig howto', 'Sphinx', 'howto'), ] diff --git a/tests/roots/test-latex-title/conf.py b/tests/roots/test-latex-title/conf.py index 64433165b73..2059c9f994d 100644 --- a/tests/roots/test-latex-title/conf.py +++ b/tests/roots/test-latex-title/conf.py @@ -1,4 +1,4 @@ # set empty string to the third column to use the first section title to document title latex_documents = [ - ('index', 'test.tex', '', 'Sphinx', 'report') + ('index', 'test.tex', '', 'Sphinx', 'report'), ] diff --git a/tests/roots/test-local-logo/conf.py b/tests/roots/test-local-logo/conf.py index 1a166c13058..580424fc77e 100644 --- a/tests/roots/test-local-logo/conf.py +++ b/tests/roots/test-local-logo/conf.py @@ -1,4 +1,10 @@ latex_documents = [ - ('index', 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report') + ( + 'index', + 'test.tex', + 'The basic Sphinx documentation for testing', + 'Sphinx', + 'report', + ) ] -html_logo = "images/img.png" +html_logo = 'images/img.png' diff --git a/tests/roots/test-markup-citation/conf.py b/tests/roots/test-markup-citation/conf.py index e274bde806b..facde90889e 100644 --- a/tests/roots/test-markup-citation/conf.py +++ b/tests/roots/test-markup-citation/conf.py @@ -1,3 +1,9 @@ latex_documents = [ - ('index', 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report') + ( + 'index', + 'test.tex', + 'The basic Sphinx documentation for testing', + 'Sphinx', + 'report', + ) ] diff --git a/tests/roots/test-markup-rubric/conf.py b/tests/roots/test-markup-rubric/conf.py index eccdbf78895..78d95a237d6 100644 --- a/tests/roots/test-markup-rubric/conf.py +++ b/tests/roots/test-markup-rubric/conf.py @@ -1,4 +1,10 @@ latex_documents = [ - ('index', 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report') + ( + 'index', + 'test.tex', + 'The basic Sphinx documentation for testing', + 'Sphinx', + 'report', + ) ] latex_toplevel_sectioning = 'section' diff --git a/tests/roots/test-pycode/cp_1251_coded.py b/tests/roots/test-pycode/cp_1251_coded.py index 43d98f354d0..ca849762241 100644 --- a/tests/roots/test-pycode/cp_1251_coded.py +++ b/tests/roots/test-pycode/cp_1251_coded.py @@ -1,4 +1,4 @@ -#!python -# -*- coding: windows-1251 -*- - -X="Õ" #:It MUST look like X="Õ" \ No newline at end of file +#!python +# -*- coding: windows-1251 -*- + +X="Ð¥" #:It MUST look like X="Ð¥" \ No newline at end of file diff --git a/tests/roots/test-remote-logo/conf.py b/tests/roots/test-remote-logo/conf.py index 07949ba91fc..b96edae1de7 100644 --- a/tests/roots/test-remote-logo/conf.py +++ b/tests/roots/test-remote-logo/conf.py @@ -1,5 +1,11 @@ latex_documents = [ - ('index', 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report') + ( + 'index', + 'test.tex', + 'The basic Sphinx documentation for testing', + 'Sphinx', + 'report', + ) ] -html_logo = "https://www.python.org/static/img/python-logo.png" -html_favicon = "https://www.python.org/static/favicon.ico" +html_logo = 'https://www.python.org/static/img/python-logo.png' +html_favicon = 'https://www.python.org/static/favicon.ico' diff --git a/tests/roots/test-roles-download/conf.py b/tests/roots/test-roles-download/conf.py index e274bde806b..facde90889e 100644 --- a/tests/roots/test-roles-download/conf.py +++ b/tests/roots/test-roles-download/conf.py @@ -1,3 +1,9 @@ latex_documents = [ - ('index', 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report') + ( + 'index', + 'test.tex', + 'The basic Sphinx documentation for testing', + 'Sphinx', + 'report', + ) ] diff --git a/tests/roots/test-root/autodoc_target.py b/tests/roots/test-root/autodoc_target.py index 59f6c74d64c..e320bddbb4f 100644 --- a/tests/roots/test-root/autodoc_target.py +++ b/tests/roots/test-root/autodoc_target.py @@ -32,7 +32,7 @@ def __get__(self, obj, type=None): def meth(self): """Function.""" - return "The Answer" + return 'The Answer' class CustomDataDescriptorMeta(type): @@ -41,15 +41,20 @@ class CustomDataDescriptorMeta(type): class CustomDataDescriptor2(CustomDataDescriptor): """Descriptor class with custom metaclass docstring.""" + __metaclass__ = CustomDataDescriptorMeta def _funky_classmethod(name, b, c, d, docstring=None): - """Generates a classmethod for a class from a template by filling out - some arguments.""" + """ + Generates a classmethod for a class from a template by filling out some arguments. + """ + def template(cls, a, b, c, d=4, e=5, f=6): return a, b, c, d, e, f + from functools import partial + function = partial(template, b=b, c=c, d=d) function.__name__ = name function.__doc__ = docstring @@ -70,7 +75,7 @@ def inheritedmeth(self): class Class(Base): """Class to document.""" - descr = CustomDataDescriptor("Descriptor instance docstring.") + descr = CustomDataDescriptor('Descriptor instance docstring.') def meth(self): """Function.""" @@ -104,10 +109,11 @@ def prop(self): mdocattr = StringIO() """should be documented as well - süß""" - roger = _funky_classmethod("roger", 2, 3, 4) + roger = _funky_classmethod('roger', 2, 3, 4) - moore = _funky_classmethod("moore", 9, 8, 7, - docstring="moore(a, e, f) -> happiness") + moore = _funky_classmethod( + 'moore', 9, 8, 7, docstring='moore(a, e, f) -> happiness' + ) def __init__(self, arg): self.inst_attr_inline = None #: an inline documented instance attr @@ -117,15 +123,15 @@ def __init__(self, arg): """a documented instance attribute""" self._private_inst_attr = None #: a private instance attribute - def __special1__(self): + def __special1__(self): # NoQA: PLW3201 """documented special method""" - def __special2__(self): + def __special2__(self): # NoQA: PLW3201 # undocumented special method pass -class CustomDict(dict): +class CustomDict(dict): # NoQA: FURB189 """Docstring.""" @@ -152,7 +158,7 @@ def meth(self): class DocstringSig: def meth(self): """meth(FOO, BAR=1) -> BAZ -First line of docstring + First line of docstring rest of docstring """ @@ -179,7 +185,7 @@ def prop2(self): return 456 -class StrRepr(str): +class StrRepr(str): # NoQA: FURB189,SLOT000 def __repr__(self): return self @@ -196,7 +202,7 @@ class InstAttCls: #: It can have multiple lines. ca1 = 'a' - ca2 = 'b' #: Doc comment for InstAttCls.ca2. One line only. + ca2 = 'b' #: Doc comment for InstAttCls.ca2. One line only. ca3 = 'c' """Docstring for class attribute InstAttCls.ca3.""" diff --git a/tests/roots/test-root/conf.py b/tests/roots/test-root/conf.py index 21ec2922e97..0a750789128 100644 --- a/tests/roots/test-root/conf.py +++ b/tests/roots/test-root/conf.py @@ -8,10 +8,12 @@ from sphinx import addnodes -extensions = ['sphinx.ext.autodoc', - 'sphinx.ext.todo', - 'sphinx.ext.coverage', - 'sphinx.ext.extlinks'] +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.extlinks', +] jsmath_path = 'dummy.js' @@ -34,9 +36,16 @@ show_authors = True numfig = True -html_sidebars = {'**': ['localtoc.html', 'relations.html', 'sourcelink.html', - 'customsb.html', 'searchbox.html'], - 'index': ['contentssb.html', 'localtoc.html', 'globaltoc.html']} +html_sidebars = { + '**': [ + 'localtoc.html', + 'relations.html', + 'sourcelink.html', + 'customsb.html', + 'searchbox.html', + ], + 'index': ['contentssb.html', 'localtoc.html', 'globaltoc.html'], +} html_last_updated_fmt = '%b %d, %Y' html_context = {'hckey': 'hcval', 'hckey_co': 'wrong_hcval_co'} @@ -122,11 +131,13 @@ coverage_c_path = ['special/*.h'] coverage_c_regexes = {'function': r'^PyAPI_FUNC\(.*\)\s+([^_][\w_]+)'} -extlinks = {'issue': ('https://bugs.python.org/issue%s', 'issue %s'), - 'pyurl': ('https://python.org/%s', None)} +extlinks = { + 'issue': ('https://bugs.python.org/issue%s', 'issue %s'), + 'pyurl': ('https://python.org/%s', None), +} # modify tags from conf.py -tags.add('confpytag') +tags.add('confpytag') # NoQA: F821 (tags is injected into conf.py) # -- extension API @@ -149,8 +160,9 @@ def setup(app): import parsermod app.add_directive('clsdir', ClassDirective) - app.add_object_type('userdesc', 'userdescrole', '%s (userdesc)', - userdesc_parse, objname='user desc') + app.add_object_type( + 'userdesc', 'userdescrole', '%s (userdesc)', userdesc_parse, objname='user desc' + ) app.add_js_file('file://moo.js') app.add_source_suffix('.foo', 'foo') app.add_source_parser(parsermod.Parser) diff --git a/tests/roots/test-root/special/code.py b/tests/roots/test-root/special/code.py index b7934b2312c..624b494459e 100644 --- a/tests/roots/test-root/special/code.py +++ b/tests/roots/test-root/special/code.py @@ -1,2 +1,2 @@ -print("line 1") -print("line 2") +print('line 1') +print('line 2') diff --git a/tests/roots/test-stylesheets/conf.py b/tests/roots/test-stylesheets/conf.py index fa37130a5c9..d3c6f62d5e0 100644 --- a/tests/roots/test-stylesheets/conf.py +++ b/tests/roots/test-stylesheets/conf.py @@ -4,6 +4,6 @@ def setup(app): app.add_css_file('persistent.css') - app.add_css_file('default.css', title="Default") - app.add_css_file('alternate1.css', title="Alternate", rel="alternate stylesheet") - app.add_css_file('alternate2.css', rel="alternate stylesheet") + app.add_css_file('default.css', title='Default') + app.add_css_file('alternate1.css', title='Alternate', rel='alternate stylesheet') + app.add_css_file('alternate2.css', rel='alternate stylesheet') diff --git a/tests/roots/test-templating/conf.py b/tests/roots/test-templating/conf.py index 7a2baeda29e..52c2526911d 100644 --- a/tests/roots/test-templating/conf.py +++ b/tests/roots/test-templating/conf.py @@ -1,6 +1,6 @@ project = 'Sphinx templating ' source_suffix = { - '.txt': 'restructuredtext' + '.txt': 'restructuredtext', } keep_warnings = True templates_path = ['_templates'] diff --git a/tests/roots/test-util-copyasset_overwrite/myext.py b/tests/roots/test-util-copyasset_overwrite/myext.py index 5ef9e69e645..e1e6c8f89b3 100644 --- a/tests/roots/test-util-copyasset_overwrite/myext.py +++ b/tests/roots/test-util-copyasset_overwrite/myext.py @@ -6,14 +6,16 @@ def _copy_asset_overwrite_hook(app): css = app.outdir / '_static' / 'custom-styles.css' # html_static_path is copied by default - assert css.read_text(encoding='utf-8') == '/* html_static_path */\n', 'invalid default text' + css_content = css.read_text(encoding='utf-8') + assert css_content == '/* html_static_path */\n', 'invalid default text' # warning generated by here copy_asset( Path(__file__).resolve().parent.joinpath('myext_static', 'custom-styles.css'), app.outdir / '_static', ) # This demonstrates that no overwriting occurs - assert css.read_text(encoding='utf-8') == '/* html_static_path */\n', 'file overwritten!' + css_content = css.read_text(encoding='utf-8') + assert css_content == '/* html_static_path */\n', 'file overwritten!' return [] diff --git a/tests/roots/test-versioning/conf.py b/tests/roots/test-versioning/conf.py index d52d1f2746c..5520b516eee 100644 --- a/tests/roots/test-versioning/conf.py +++ b/tests/roots/test-versioning/conf.py @@ -1,5 +1,5 @@ project = 'versioning test root' source_suffix = { - '.txt': 'restructuredtext' + '.txt': 'restructuredtext', } exclude_patterns = ['_build'] diff --git a/tests/test_builders/test_build_html_code.py b/tests/test_builders/test_build_html_code.py index 349e1d1641a..02684b22e04 100644 --- a/tests/test_builders/test_build_html_code.py +++ b/tests/test_builders/test_build_html_code.py @@ -1,5 +1,6 @@ from __future__ import annotations +import pygments import pytest @@ -34,11 +35,16 @@ def test_html_codeblock_linenos_style_inline(app): @pytest.mark.sphinx('html', testroot='reST-code-role') def test_html_code_role(app): + if tuple(map(int, pygments.__version__.split('.')[:2])) >= (2, 19): + sp = ' ' + else: + sp = ' ' + app.build() content = (app.outdir / 'index.html').read_text(encoding='utf8') common_content = ( - 'def foo' + f'def{sp}foo' '(' '1 ' '+ ' diff --git a/tests/test_builders/test_build_html_maths.py b/tests/test_builders/test_build_html_maths.py index 9ab04922607..f3a96fccfa1 100644 --- a/tests/test_builders/test_build_html_maths.py +++ b/tests/test_builders/test_build_html_maths.py @@ -37,7 +37,7 @@ def test_html_math_renderer_is_duplicated(make_app, app_params): args, kwargs = app_params with pytest.raises( ConfigError, - match='Many math_renderers are registered. But no math_renderer is selected.', + match=r'Many math_renderers are registered\. But no math_renderer is selected\.', ): make_app(*args, **kwargs) @@ -74,5 +74,8 @@ def test_html_math_renderer_is_chosen(app): ) def test_html_math_renderer_is_mismatched(make_app, app_params): args, kwargs = app_params - with pytest.raises(ConfigError, match="Unknown math_renderer 'imgmath' is given."): + with pytest.raises( + ConfigError, + match=r"Unknown math_renderer 'imgmath' is given\.", + ): make_app(*args, **kwargs) diff --git a/tests/test_builders/test_build_latex.py b/tests/test_builders/test_build_latex.py index 71141d573d7..4626b7fbe47 100644 --- a/tests/test_builders/test_build_latex.py +++ b/tests/test_builders/test_build_latex.py @@ -11,6 +11,7 @@ from shutil import copyfile from subprocess import CalledProcessError +import pygments import pytest from sphinx.builders.latex import default_latex_documents @@ -100,7 +101,7 @@ def do_GET(self): if self.path == '/sphinx.png': with open('tests/roots/test-local-logo/images/img.png', 'rb') as f: content = f.read() - content_type = 'image/png' + content_type = 'image/png' if content: self.send_response(200, 'OK') @@ -2115,12 +2116,16 @@ def test_latex_container(app): @pytest.mark.sphinx('latex', testroot='reST-code-role') def test_latex_code_role(app): + if tuple(map(int, pygments.__version__.split('.')[:2])) >= (2, 19): + sp = r'\PYG{+w}{ }' + else: + sp = ' ' + app.build() content = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8') common_content = ( - r'\PYG{k}{def} ' - r'\PYG{n+nf}{foo}' + r'\PYG{k}{def}' + sp + r'\PYG{n+nf}{foo}' r'\PYG{p}{(}' r'\PYG{l+m+mi}{1} ' r'\PYG{o}{+} ' @@ -2136,7 +2141,7 @@ def test_latex_code_role(app): r'\PYG{k}{pass}' ) assert ( - r'Inline \sphinxcode{\sphinxupquote{%' # NoQA: ISC003 + r'Inline \sphinxcode{\sphinxupquote{%' + '\n' + common_content + '%\n}} code block' diff --git a/tests/test_builders/test_build_linkcheck.py b/tests/test_builders/test_build_linkcheck.py index c27ad189f3e..f1cc1c88ae1 100644 --- a/tests/test_builders/test_build_linkcheck.py +++ b/tests/test_builders/test_build_linkcheck.py @@ -500,7 +500,7 @@ def custom_handler( def authenticated( method: Callable[[CustomHandler], None], ) -> Callable[[CustomHandler], None]: - def method_if_authenticated(self): + def method_if_authenticated(self: CustomHandler) -> None: if expected_token is None: return method(self) elif not self.headers['Authorization']: @@ -512,6 +512,7 @@ def method_if_authenticated(self): self.send_response(403, 'Forbidden') self.send_header('Content-Length', '0') self.end_headers() + return None return method_if_authenticated @@ -925,7 +926,7 @@ class InfiniteRedirectOnHeadHandler(BaseHTTPRequestHandler): def do_HEAD(self): self.send_response(302, 'Found') - self.send_header('Location', '/') + self.send_header('Location', '/redirected') self.send_header('Content-Length', '0') self.end_headers() @@ -965,6 +966,55 @@ def test_TooManyRedirects_on_HEAD(app, monkeypatch): } +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver') +def test_ignore_local_redirection(app): + with serve_application(app, InfiniteRedirectOnHeadHandler) as address: + app.config.linkcheck_ignore = [f'http://{address}/redirected'] + app.build() + + with open(app.outdir / 'output.json', encoding='utf-8') as fp: + content = json.load(fp) + assert content == { + 'code': 302, + 'status': 'ignored', + 'filename': 'index.rst', + 'lineno': 1, + 'uri': f'http://{address}/', + 'info': f'ignored redirect: http://{address}/redirected', + } + + +class RemoteDomainRedirectHandler(InfiniteRedirectOnHeadHandler): + protocol_version = 'HTTP/1.1' + + def do_GET(self): + self.send_response(301, 'Found') + if self.path == '/': + self.send_header('Location', '/local') + elif self.path == '/local': + self.send_header('Location', 'http://example.test/migrated') + self.send_header('Content-Length', '0') + self.end_headers() + + +@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver') +def test_ignore_remote_redirection(app): + with serve_application(app, RemoteDomainRedirectHandler) as address: + app.config.linkcheck_ignore = ['http://example.test'] + app.build() + + with open(app.outdir / 'output.json', encoding='utf-8') as fp: + content = json.load(fp) + assert content == { + 'code': 301, + 'status': 'ignored', + 'filename': 'index.rst', + 'lineno': 1, + 'uri': f'http://{address}/', + 'info': 'ignored redirect: http://example.test/migrated', + } + + def make_retry_after_handler( responses: list[tuple[int, str | None]], ) -> type[BaseHTTPRequestHandler]: diff --git a/tests/test_directives/test_directive_code.py b/tests/test_directives/test_directive_code.py index 5acebd08d60..d54b320ce16 100644 --- a/tests/test_directives/test_directive_code.py +++ b/tests/test_directives/test_directive_code.py @@ -2,6 +2,7 @@ from __future__ import annotations +import pygments import pytest from docutils import nodes @@ -358,7 +359,7 @@ def test_code_block_emphasize_latex(app): .read_text(encoding='utf8') .replace('\r\n', '\n') ) - includes = '\\fvset{hllines={, 5, 6, 13, 14, 15, 24, 25, 26,}}%\n' + includes = '\\fvset{hllines={, 6, 7, 16, 17, 18, 19, 29, 30, 31,}}%\n' assert includes in latex includes = '\\end{sphinxVerbatim}\n\\sphinxresetverbatimhllines\n' assert includes in latex @@ -394,6 +395,11 @@ def test_literal_include_block_start_with_comment_or_brank(app): @pytest.mark.sphinx('html', testroot='directive-code') def test_literal_include_linenos(app): + if tuple(map(int, pygments.__version__.split('.')[:2])) >= (2, 19): + sp = ' ' + else: + sp = ' ' + app.build(filenames=[app.srcdir / 'linenos.rst']) html = (app.outdir / 'linenos.html').read_text(encoding='utf8') @@ -411,7 +417,7 @@ def test_literal_include_linenos(app): # :lines: 5-9 assert ( - '5class ' + f'5class{sp}' 'Foo:' ) in html @@ -556,12 +562,17 @@ def test_code_block_highlighted(app): @pytest.mark.sphinx('html', testroot='directive-code') def test_linenothreshold(app): + if tuple(map(int, pygments.__version__.split('.')[:2])) >= (2, 19): + sp = ' ' + else: + sp = ' ' + app.build(filenames=[app.srcdir / 'linenothreshold.rst']) html = (app.outdir / 'linenothreshold.html').read_text(encoding='utf8') # code-block using linenothreshold assert ( - '1class ' + f'1class{sp}' 'Foo:' ) in html diff --git a/tests/test_directives/test_directive_object_description.py b/tests/test_directives/test_directive_object_description.py index 7f3ffba633a..501a6c9772a 100644 --- a/tests/test_directives/test_directive_object_description.py +++ b/tests/test_directives/test_directive_object_description.py @@ -14,21 +14,24 @@ from sphinx.util.docutils import sphinx_domains if TYPE_CHECKING: - from sphinx.builders import Builder + from sphinx.application import Sphinx + from sphinx.environment import BuildEnvironment -def _doctree_for_test(builder: Builder, docname: str) -> nodes.document: - builder.env.prepare_settings(docname) - publisher = create_publisher(builder.app, 'restructuredtext') - with sphinx_domains(builder.env): - publisher.set_source(source_path=str(builder.env.doc2path(docname))) +def _doctree_for_test( + app: Sphinx, env: BuildEnvironment, docname: str +) -> nodes.document: + env.prepare_settings(docname) + publisher = create_publisher(app, 'restructuredtext') + with sphinx_domains(env): + publisher.set_source(source_path=str(env.doc2path(docname))) publisher.publish() return publisher.document @pytest.mark.sphinx('text', testroot='object-description-sections') def test_object_description_sections(app): - doctree = _doctree_for_test(app.builder, 'index') + doctree = _doctree_for_test(app, app.env, 'index') # # # diff --git a/tests/test_domains/test_domain_c.py b/tests/test_domains/test_domain_c.py index 339938010f9..d9f432315b8 100644 --- a/tests/test_domains/test_domain_c.py +++ b/tests/test_domains/test_domain_c.py @@ -22,7 +22,7 @@ desc_signature_line, pending_xref, ) -from sphinx.domains.c._ids import _id_prefix, _macroKeywords, _max_id +from sphinx.domains.c._ids import _id_prefix, _macro_keywords, _max_id from sphinx.domains.c._parser import DefinitionParser from sphinx.domains.c._symbol import Symbol from sphinx.ext.intersphinx import load_mappings, validate_intersphinx_mapping @@ -38,7 +38,7 @@ class Config: c_id_attributes = ['id_attr', 'LIGHTGBM_C_EXPORT'] c_paren_attributes = ['paren_attr'] - c_extra_keywords = _macroKeywords + c_extra_keywords = _macro_keywords def parse(name, string): @@ -49,62 +49,62 @@ def parse(name, string): return ast -def _check(name, input, idDict, output, key, asTextOutput): +def _check(name, input, id_dict, output, key, as_text_output): if key is None: key = name key += ' ' if name in {'function', 'member'}: - inputActual = input - outputAst = output - outputAsText = output + input_actual = input + output_ast = output + output_as_text = output else: - inputActual = input.format(key='') - outputAst = output.format(key='') - outputAsText = output.format(key=key) - if asTextOutput is not None: - outputAsText = asTextOutput + input_actual = input.format(key='') + output_ast = output.format(key='') + output_as_text = output.format(key=key) + if as_text_output is not None: + output_as_text = as_text_output # first a simple check of the AST - ast = parse(name, inputActual) + ast = parse(name, input_actual) res = str(ast) - if res != outputAst: + if res != output_ast: print() print('Input: ', input) print('Result: ', res) - print('Expected: ', outputAst) + print('Expected: ', output_ast) raise DefinitionError - rootSymbol = Symbol(None, None, None, None, None) - symbol = rootSymbol.add_declaration(ast, docname='TestDoc', line=42) - parentNode = addnodes.desc() + root_symbol = Symbol(None, None, None, None, None) + symbol = root_symbol.add_declaration(ast, docname='TestDoc', line=42) + parent_node = addnodes.desc() signode = addnodes.desc_signature(input, '') - parentNode += signode + parent_node += signode ast.describe_signature(signode, 'lastIsName', symbol, options={}) - resAsText = parentNode.astext() - if resAsText != outputAsText: + res_as_text = parent_node.astext() + if res_as_text != output_as_text: print() print('Input: ', input) - print('astext(): ', resAsText) - print('Expected: ', outputAsText) + print('astext(): ', res_as_text) + print('Expected: ', output_as_text) raise DefinitionError - idExpected = [None] + id_expected = [None] for i in range(1, _max_id + 1): - if i in idDict: - idExpected.append(idDict[i]) + if i in id_dict: + id_expected.append(id_dict[i]) else: - idExpected.append(idExpected[i - 1]) - idActual = [None] + id_expected.append(id_expected[i - 1]) + id_actual = [None] for i in range(1, _max_id + 1): # try: id = ast.get_id(version=i) assert id is not None - idActual.append(id[len(_id_prefix[i]) :]) + id_actual.append(id[len(_id_prefix[i]) :]) # except NoOldIdError: - # idActual.append(None) + # id_actual.append(None) res = [True] for i in range(1, _max_id + 1): - res.append(idExpected[i] == idActual[i]) + res.append(id_expected[i] == id_actual[i]) if not all(res): print('input: %s' % input.rjust(20)) @@ -112,31 +112,31 @@ def _check(name, input, idDict, output, key, asTextOutput): if res[i]: continue print('Error in id version %d.' % i) - print('result: %s' % idActual[i]) - print('expected: %s' % idExpected[i]) - # print(rootSymbol.dump(0)) + print('result: %s' % id_actual[i]) + print('expected: %s' % id_expected[i]) + # print(root_symbol.dump(0)) raise DefinitionError -def check(name, input, idDict, output=None, key=None, asTextOutput=None): +def check(name, input, id_dict, output=None, key=None, as_text_output=None): if output is None: output = input # First, check without semicolon - _check(name, input, idDict, output, key, asTextOutput) + _check(name, input, id_dict, output, key, as_text_output) if name != 'macro': # Second, check with semicolon _check( name, input + ' ;', - idDict, + id_dict, output + ';', key, - asTextOutput + ';' if asTextOutput is not None else None, + as_text_output + ';' if as_text_output is not None else None, ) def test_domain_c_ast_expressions(): - def exprCheck(expr, output=None): + def expr_check(expr, output=None): parser = DefinitionParser(expr, location=None, config=Config()) parser.allowFallbackExpressionParsing = False ast = parser.parse_expression() @@ -151,30 +151,30 @@ def exprCheck(expr, output=None): print('Result: ', res) print('Expected: ', output) raise DefinitionError - displayString = ast.get_display_string() - if res != displayString: + display_string = ast.get_display_string() + if res != display_string: # note: if the expression contains an anon name then this will trigger a falsely print() print('Input: ', expr) print('Result: ', res) - print('Display: ', displayString) + print('Display: ', display_string) raise DefinitionError # type expressions - exprCheck('int*') - exprCheck('int *const*') - exprCheck('int *volatile*') - exprCheck('int *restrict*') - exprCheck('int *(*)(double)') - exprCheck('const int*') - exprCheck('__int64') - exprCheck('unsigned __int64') + expr_check('int*') + expr_check('int *const*') + expr_check('int *volatile*') + expr_check('int *restrict*') + expr_check('int *(*)(double)') + expr_check('const int*') + expr_check('__int64') + expr_check('unsigned __int64') # actual expressions # primary - exprCheck('true') - exprCheck('false') + expr_check('true') + expr_check('false') ints = [ '5', '0', @@ -188,15 +188,15 @@ def exprCheck(expr, output=None): "0x0'1'2", "1'2'3", ] - unsignedSuffix = ['', 'u', 'U'] - longSuffix = ['', 'l', 'L', 'll', 'LL'] + unsigned_suffix = ['', 'u', 'U'] + long_suffix = ['', 'l', 'L', 'll', 'LL'] for i in ints: - for u in unsignedSuffix: - for l in longSuffix: + for u in unsigned_suffix: + for l in long_suffix: expr = i + u + l - exprCheck(expr) + expr_check(expr) expr = i + l + u - exprCheck(expr) + expr_check(expr) for suffix in ('', 'f', 'F', 'l', 'L'): for e in ( '5e42', @@ -220,7 +220,7 @@ def exprCheck(expr, output=None): "1'2'3.4'5'6e7'8'9", ): expr = e + suffix - exprCheck(expr) + expr_check(expr) for e in ( 'ApF', 'Ap+F', @@ -243,94 +243,94 @@ def exprCheck(expr, output=None): "A'B'C.D'E'Fp1'2'3", ): expr = '0x' + e + suffix - exprCheck(expr) - exprCheck('"abc\\"cba"') # string + expr_check(expr) + expr_check('"abc\\"cba"') # string # character literals for p in ('', 'u8', 'u', 'U', 'L'): - exprCheck(p + "'a'") - exprCheck(p + "'\\n'") - exprCheck(p + "'\\012'") - exprCheck(p + "'\\0'") - exprCheck(p + "'\\x0a'") - exprCheck(p + "'\\x0A'") - exprCheck(p + "'\\u0a42'") - exprCheck(p + "'\\u0A42'") - exprCheck(p + "'\\U0001f34c'") - exprCheck(p + "'\\U0001F34C'") - - exprCheck('(5)') - exprCheck('C') + expr_check(p + "'a'") + expr_check(p + "'\\n'") + expr_check(p + "'\\012'") + expr_check(p + "'\\0'") + expr_check(p + "'\\x0a'") + expr_check(p + "'\\x0A'") + expr_check(p + "'\\u0a42'") + expr_check(p + "'\\u0A42'") + expr_check(p + "'\\U0001f34c'") + expr_check(p + "'\\U0001F34C'") + + expr_check('(5)') + expr_check('C') # postfix - exprCheck('A(2)') - exprCheck('A[2]') - exprCheck('a.b.c') - exprCheck('a->b->c') - exprCheck('i++') - exprCheck('i--') + expr_check('A(2)') + expr_check('A[2]') + expr_check('a.b.c') + expr_check('a->b->c') + expr_check('i++') + expr_check('i--') # unary - exprCheck('++5') - exprCheck('--5') - exprCheck('*5') - exprCheck('&5') - exprCheck('+5') - exprCheck('-5') - exprCheck('!5') - exprCheck('not 5') - exprCheck('~5') - exprCheck('compl 5') - exprCheck('sizeof(T)') - exprCheck('sizeof -42') - exprCheck('alignof(T)') + expr_check('++5') + expr_check('--5') + expr_check('*5') + expr_check('&5') + expr_check('+5') + expr_check('-5') + expr_check('!5') + expr_check('not 5') + expr_check('~5') + expr_check('compl 5') + expr_check('sizeof(T)') + expr_check('sizeof -42') + expr_check('alignof(T)') # cast - exprCheck('(int)2') + expr_check('(int)2') # binary op - exprCheck('5 || 42') - exprCheck('5 or 42') - exprCheck('5 && 42') - exprCheck('5 and 42') - exprCheck('5 | 42') - exprCheck('5 bitor 42') - exprCheck('5 ^ 42') - exprCheck('5 xor 42') - exprCheck('5 & 42') - exprCheck('5 bitand 42') + expr_check('5 || 42') + expr_check('5 or 42') + expr_check('5 && 42') + expr_check('5 and 42') + expr_check('5 | 42') + expr_check('5 bitor 42') + expr_check('5 ^ 42') + expr_check('5 xor 42') + expr_check('5 & 42') + expr_check('5 bitand 42') # ['==', '!='] - exprCheck('5 == 42') - exprCheck('5 != 42') - exprCheck('5 not_eq 42') + expr_check('5 == 42') + expr_check('5 != 42') + expr_check('5 not_eq 42') # ['<=', '>=', '<', '>'] - exprCheck('5 <= 42') - exprCheck('5 >= 42') - exprCheck('5 < 42') - exprCheck('5 > 42') + expr_check('5 <= 42') + expr_check('5 >= 42') + expr_check('5 < 42') + expr_check('5 > 42') # ['<<', '>>'] - exprCheck('5 << 42') - exprCheck('5 >> 42') + expr_check('5 << 42') + expr_check('5 >> 42') # ['+', '-'] - exprCheck('5 + 42') - exprCheck('5 - 42') + expr_check('5 + 42') + expr_check('5 - 42') # ['*', '/', '%'] - exprCheck('5 * 42') - exprCheck('5 / 42') - exprCheck('5 % 42') + expr_check('5 * 42') + expr_check('5 / 42') + expr_check('5 % 42') # ['.*', '->*'] # conditional # TODO # assignment - exprCheck('a = 5') - exprCheck('a *= 5') - exprCheck('a /= 5') - exprCheck('a %= 5') - exprCheck('a += 5') - exprCheck('a -= 5') - exprCheck('a >>= 5') - exprCheck('a <<= 5') - exprCheck('a &= 5') - exprCheck('a and_eq 5') - exprCheck('a ^= 5') - exprCheck('a xor_eq 5') - exprCheck('a |= 5') - exprCheck('a or_eq 5') + expr_check('a = 5') + expr_check('a *= 5') + expr_check('a /= 5') + expr_check('a %= 5') + expr_check('a += 5') + expr_check('a -= 5') + expr_check('a >>= 5') + expr_check('a <<= 5') + expr_check('a &= 5') + expr_check('a and_eq 5') + expr_check('a ^= 5') + expr_check('a xor_eq 5') + expr_check('a |= 5') + expr_check('a or_eq 5') def test_domain_c_ast_fundamental_types(): @@ -588,29 +588,29 @@ def test_domain_c_ast_enum_definitions(): def test_domain_c_ast_anon_definitions(): - check('struct', '@a', {1: '@a'}, asTextOutput='struct [anonymous]') - check('union', '@a', {1: '@a'}, asTextOutput='union [anonymous]') - check('enum', '@a', {1: '@a'}, asTextOutput='enum [anonymous]') - check('struct', '@1', {1: '@1'}, asTextOutput='struct [anonymous]') - check('struct', '@a.A', {1: '@a.A'}, asTextOutput='struct [anonymous].A') + check('struct', '@a', {1: '@a'}, as_text_output='struct [anonymous]') + check('union', '@a', {1: '@a'}, as_text_output='union [anonymous]') + check('enum', '@a', {1: '@a'}, as_text_output='enum [anonymous]') + check('struct', '@1', {1: '@1'}, as_text_output='struct [anonymous]') + check('struct', '@a.A', {1: '@a.A'}, as_text_output='struct [anonymous].A') def test_domain_c_ast_initializers(): - idsMember = {1: 'v'} - idsFunction = {1: 'f'} + ids_member = {1: 'v'} + ids_function = {1: 'f'} # no init - check('member', 'T v', idsMember) - check('function', 'void f(T v)', idsFunction) + check('member', 'T v', ids_member) + check('function', 'void f(T v)', ids_function) # with '=', assignment-expression - check('member', 'T v = 42', idsMember) - check('function', 'void f(T v = 42)', idsFunction) + check('member', 'T v = 42', ids_member) + check('function', 'void f(T v = 42)', ids_function) # with '=', braced-init - check('member', 'T v = {}', idsMember) - check('function', 'void f(T v = {})', idsFunction) - check('member', 'T v = {42, 42, 42}', idsMember) - check('function', 'void f(T v = {42, 42, 42})', idsFunction) - check('member', 'T v = {42, 42, 42,}', idsMember) - check('function', 'void f(T v = {42, 42, 42,})', idsFunction) + check('member', 'T v = {}', ids_member) + check('function', 'void f(T v = {})', ids_function) + check('member', 'T v = {42, 42, 42}', ids_member) + check('function', 'void f(T v = {42, 42, 42})', ids_function) + check('member', 'T v = {42, 42, 42,}', ids_member) + check('function', 'void f(T v = {42, 42, 42,})', ids_function) # TODO: designator-list @@ -723,9 +723,9 @@ def extract_role_links(app, filename): entries = [] for l in lis: li = ET.fromstring(l) # NoQA: S314 # using known data in tests - aList = list(li.iter('a')) - assert len(aList) == 1 - a = aList[0] + a_list = list(li.iter('a')) + assert len(a_list) == 1 + a = a_list[0] target = a.attrib['href'].lstrip('#') title = a.attrib['title'] assert len(a) == 1 @@ -808,12 +808,12 @@ def test_domain_c_build_field_role(app): assert len(ws) == 0 -def _get_obj(app, queryName): +def _get_obj(app, query_name): domain = app.env.domains.c_domain - for name, _dispname, objectType, docname, anchor, _prio in domain.get_objects(): - if name == queryName: - return docname, anchor, objectType - return queryName, 'not', 'found' + for name, _dispname, object_type, docname, anchor, _prio in domain.get_objects(): + if name == query_name: + return docname, anchor, object_type + return query_name, 'not', 'found' @pytest.mark.sphinx( @@ -822,7 +822,7 @@ def _get_obj(app, queryName): def test_domain_c_build_intersphinx(tmp_path, app): # a splitting of test_ids_vs_tags0 into the primary directives in a remote project, # and then the references in the test project - origSource = """\ + orig_source = """\ .. c:member:: int _member .. c:var:: int _var .. c:function:: void _function() @@ -835,7 +835,7 @@ def test_domain_c_build_intersphinx(tmp_path, app): .. c:type:: _type .. c:function:: void _functionParam(int param) -""" # NoQA: F841 +""" inv_file = tmp_path / 'inventory' inv_file.write_bytes( b"""\ @@ -858,7 +858,7 @@ def test_domain_c_build_intersphinx(tmp_path, app): _union c:union 1 index.html#c.$ - _var c:member 1 index.html#c.$ - """) - ) # NoQA: W291 + ) app.config.intersphinx_mapping = { 'local': ('https://localhost/intersphinx/c/', str(inv_file)), } diff --git a/tests/test_domains/test_domain_cpp.py b/tests/test_domains/test_domain_cpp.py index 96208bf7409..09fd0140875 100644 --- a/tests/test_domains/test_domain_cpp.py +++ b/tests/test_domains/test_domain_cpp.py @@ -51,63 +51,63 @@ class Config: return ast -def _check(name, input, idDict, output, key, asTextOutput): +def _check(name, input, id_dict, output, key, as_text_output): if key is None: key = name key += ' ' if name in {'function', 'member'}: - inputActual = input - outputAst = output - outputAsText = output + input_actual = input + output_ast = output + output_as_text = output else: - inputActual = input.format(key='') - outputAst = output.format(key='') - outputAsText = output.format(key=key) - if asTextOutput is not None: - outputAsText = asTextOutput + input_actual = input.format(key='') + output_ast = output.format(key='') + output_as_text = output.format(key=key) + if as_text_output is not None: + output_as_text = as_text_output # first a simple check of the AST - ast = parse(name, inputActual) + ast = parse(name, input_actual) res = str(ast) - if res != outputAst: + if res != output_ast: print() print('Input: ', input) print('Result: ', res) - print('Expected: ', outputAst) + print('Expected: ', output_ast) raise DefinitionError - rootSymbol = Symbol(None, None, None, None, None, None, None) - symbol = rootSymbol.add_declaration(ast, docname='TestDoc', line=42) - parentNode = addnodes.desc() + root_symbol = Symbol(None, None, None, None, None, None, None) + symbol = root_symbol.add_declaration(ast, docname='TestDoc', line=42) + parent_node = addnodes.desc() signode = addnodes.desc_signature(input, '') - parentNode += signode + parent_node += signode ast.describe_signature(signode, 'lastIsName', symbol, options={}) - resAsText = parentNode.astext() - if resAsText != outputAsText: + res_as_text = parent_node.astext() + if res_as_text != output_as_text: print() print('Input: ', input) - print('astext(): ', resAsText) - print('Expected: ', outputAsText) - print('Node:', parentNode) + print('astext(): ', res_as_text) + print('Expected: ', output_as_text) + print('Node:', parent_node) raise DefinitionError - idExpected = [None] + id_expected = [None] for i in range(1, _max_id + 1): - if i in idDict: - idExpected.append(idDict[i]) + if i in id_dict: + id_expected.append(id_dict[i]) else: - idExpected.append(idExpected[i - 1]) - idActual = [None] + id_expected.append(id_expected[i - 1]) + id_actual = [None] for i in range(1, _max_id + 1): try: id = ast.get_id(version=i) assert id is not None - idActual.append(id[len(_id_prefix[i]) :]) + id_actual.append(id[len(_id_prefix[i]) :]) except NoOldIdError: - idActual.append(None) + id_actual.append(None) res = [True] for i in range(1, _max_id + 1): - res.append(idExpected[i] == idActual[i]) + res.append(id_expected[i] == id_actual[i]) if not all(res): print('input: %s' % input.rjust(20)) @@ -115,25 +115,25 @@ def _check(name, input, idDict, output, key, asTextOutput): if res[i]: continue print('Error in id version %d.' % i) - print('result: %s' % idActual[i]) - print('expected: %s' % idExpected[i]) - print(rootSymbol.dump(0)) + print('result: %s' % id_actual[i]) + print('expected: %s' % id_expected[i]) + print(root_symbol.dump(0)) raise DefinitionError -def check(name, input, idDict, output=None, key=None, asTextOutput=None): +def check(name, input, id_dict, output=None, key=None, as_text_output=None): if output is None: output = input # First, check without semicolon - _check(name, input, idDict, output, key, asTextOutput) + _check(name, input, id_dict, output, key, as_text_output) # Second, check with semicolon _check( name, input + ' ;', - idDict, + id_dict, output + ';', key, - asTextOutput + ';' if asTextOutput is not None else None, + as_text_output + ';' if as_text_output is not None else None, ) @@ -177,13 +177,13 @@ def make_id_v2(): def test_domain_cpp_ast_expressions(): - def exprCheck(expr, id, id4=None): + def expr_check(expr, id, id4=None): ids = 'IE1CIA%s_1aE' # call .format() on the expr to unescape double curly braces - idDict = {2: ids % expr.format(), 3: ids % id} + id_dict = {2: ids % expr.format(), 3: ids % id} if id4 is not None: - idDict[4] = ids % id4 - check('class', 'template<> {key}C' % expr, idDict) + id_dict[4] = ids % id4 + check('class', 'template<> {key}C' % expr, id_dict) class Config: cpp_id_attributes = ['id_attr'] @@ -198,19 +198,19 @@ class Config: print('Input: ', expr) print('Result: ', res) raise DefinitionError - displayString = ast.get_display_string() - if res != displayString: + display_string = ast.get_display_string() + if res != display_string: # note: if the expression contains an anon name then this will trigger a falsely print() print('Input: ', expr) print('Result: ', res) - print('Display: ', displayString) + print('Display: ', display_string) raise DefinitionError # primary - exprCheck('nullptr', 'LDnE') - exprCheck('true', 'L1E') - exprCheck('false', 'L0E') + expr_check('nullptr', 'LDnE') + expr_check('true', 'L1E') + expr_check('false', 'L0E') ints = [ '5', '0', @@ -224,16 +224,16 @@ class Config: "0x0'1'2", "1'2'3", ] - unsignedSuffix = ['', 'u', 'U'] - longSuffix = ['', 'l', 'L', 'll', 'LL'] + unsigned_suffix = ['', 'u', 'U'] + long_suffix = ['', 'l', 'L', 'll', 'LL'] for i in ints: - for u in unsignedSuffix: - for l in longSuffix: + for u in unsigned_suffix: + for l in long_suffix: expr = i + u + l - exprCheck(expr, 'L' + expr.replace("'", '') + 'E') + expr_check(expr, 'L' + expr.replace("'", '') + 'E') expr = i + l + u - exprCheck(expr, 'L' + expr.replace("'", '') + 'E') - decimalFloats = [ + expr_check(expr, 'L' + expr.replace("'", '') + 'E') + decimal_floats = [ '5e42', '5e+42', '5e-42', @@ -254,7 +254,7 @@ class Config: ".4'5'6e7'8'9", "1'2'3.4'5'6e7'8'9", ] - hexFloats = [ + hex_floats = [ 'ApF', 'Ap+F', 'Ap-F', @@ -276,16 +276,16 @@ class Config: "A'B'C.D'E'Fp1'2'3", ] for suffix in ('', 'f', 'F', 'l', 'L'): - for e in decimalFloats: + for e in decimal_floats: expr = e + suffix - exprCheck(expr, 'L' + expr.replace("'", '') + 'E') - for e in hexFloats: + expr_check(expr, 'L' + expr.replace("'", '') + 'E') + for e in hex_floats: expr = '0x' + e + suffix - exprCheck(expr, 'L' + expr.replace("'", '') + 'E') - exprCheck('"abc\\"cba"', 'LA8_KcE') # string - exprCheck('this', 'fpT') + expr_check(expr, 'L' + expr.replace("'", '') + 'E') + expr_check('"abc\\"cba"', 'LA8_KcE') # string + expr_check('this', 'fpT') # character literals - charPrefixAndIds = [('', 'c'), ('u8', 'c'), ('u', 'Ds'), ('U', 'Di'), ('L', 'w')] + char_prefix_and_ids = [('', 'c'), ('u8', 'c'), ('u', 'Ds'), ('U', 'Di'), ('L', 'w')] chars = [ ('a', '97'), ('\\n', '10'), @@ -298,156 +298,156 @@ class Config: ('\\U0001f34c', '127820'), ('\\U0001F34C', '127820'), ] - for p, t in charPrefixAndIds: + for p, t in char_prefix_and_ids: for c, val in chars: - exprCheck(f"{p}'{c}'", t + val) + expr_check(f"{p}'{c}'", t + val) # user-defined literals for i in ints: - exprCheck(i + '_udl', 'clL_Zli4_udlEL' + i.replace("'", '') + 'EE') - exprCheck(i + 'uludl', 'clL_Zli5uludlEL' + i.replace("'", '') + 'EE') - for f in decimalFloats: - exprCheck(f + '_udl', 'clL_Zli4_udlEL' + f.replace("'", '') + 'EE') - exprCheck(f + 'fudl', 'clL_Zli4fudlEL' + f.replace("'", '') + 'EE') - for f in hexFloats: - exprCheck('0x' + f + '_udl', 'clL_Zli4_udlEL0x' + f.replace("'", '') + 'EE') - for p, t in charPrefixAndIds: + expr_check(i + '_udl', 'clL_Zli4_udlEL' + i.replace("'", '') + 'EE') + expr_check(i + 'uludl', 'clL_Zli5uludlEL' + i.replace("'", '') + 'EE') + for f in decimal_floats: + expr_check(f + '_udl', 'clL_Zli4_udlEL' + f.replace("'", '') + 'EE') + expr_check(f + 'fudl', 'clL_Zli4fudlEL' + f.replace("'", '') + 'EE') + for f in hex_floats: + expr_check('0x' + f + '_udl', 'clL_Zli4_udlEL0x' + f.replace("'", '') + 'EE') + for p, t in char_prefix_and_ids: for c, val in chars: - exprCheck(f"{p}'{c}'_udl", 'clL_Zli4_udlE' + t + val + 'E') - exprCheck('"abc"_udl', 'clL_Zli4_udlELA3_KcEE') + expr_check(f"{p}'{c}'_udl", 'clL_Zli4_udlE' + t + val + 'E') + expr_check('"abc"_udl', 'clL_Zli4_udlELA3_KcEE') # from issue #7294 - exprCheck('6.62607015e-34q_J', 'clL_Zli3q_JEL6.62607015e-34EE') + expr_check('6.62607015e-34q_J', 'clL_Zli3q_JEL6.62607015e-34EE') # fold expressions, paren, name - exprCheck('(... + Ns)', '(... + Ns)', id4='flpl2Ns') - exprCheck('(Ns + ...)', '(Ns + ...)', id4='frpl2Ns') - exprCheck('(Ns + ... + 0)', '(Ns + ... + 0)', id4='fLpl2NsL0E') - exprCheck('(5)', 'L5E') - exprCheck('C', '1C') + expr_check('(... + Ns)', '(... + Ns)', id4='flpl2Ns') + expr_check('(Ns + ...)', '(Ns + ...)', id4='frpl2Ns') + expr_check('(Ns + ... + 0)', '(Ns + ... + 0)', id4='fLpl2NsL0E') + expr_check('(5)', 'L5E') + expr_check('C', '1C') # postfix - exprCheck('A(2)', 'cl1AL2EE') - exprCheck('A[2]', 'ix1AL2E') - exprCheck('a.b.c', 'dtdt1a1b1c') - exprCheck('a->b->c', 'ptpt1a1b1c') - exprCheck('i++', 'pp1i') - exprCheck('i--', 'mm1i') - exprCheck('dynamic_cast(i)++', 'ppdcR1T1i') - exprCheck('static_cast(i)++', 'ppscR1T1i') - exprCheck('reinterpret_cast(i)++', 'pprcR1T1i') - exprCheck('const_cast(i)++', 'ppccR1T1i') - exprCheck('typeid(T).name', 'dtti1T4name') - exprCheck('typeid(a + b).name', 'dttepl1a1b4name') + expr_check('A(2)', 'cl1AL2EE') + expr_check('A[2]', 'ix1AL2E') + expr_check('a.b.c', 'dtdt1a1b1c') + expr_check('a->b->c', 'ptpt1a1b1c') + expr_check('i++', 'pp1i') + expr_check('i--', 'mm1i') + expr_check('dynamic_cast(i)++', 'ppdcR1T1i') + expr_check('static_cast(i)++', 'ppscR1T1i') + expr_check('reinterpret_cast(i)++', 'pprcR1T1i') + expr_check('const_cast(i)++', 'ppccR1T1i') + expr_check('typeid(T).name', 'dtti1T4name') + expr_check('typeid(a + b).name', 'dttepl1a1b4name') # unary - exprCheck('++5', 'pp_L5E') - exprCheck('--5', 'mm_L5E') - exprCheck('*5', 'deL5E') - exprCheck('&5', 'adL5E') - exprCheck('+5', 'psL5E') - exprCheck('-5', 'ngL5E') - exprCheck('!5', 'ntL5E') - exprCheck('not 5', 'ntL5E') - exprCheck('~5', 'coL5E') - exprCheck('compl 5', 'coL5E') - exprCheck('sizeof...(a)', 'sZ1a') - exprCheck('sizeof(T)', 'st1T') - exprCheck('sizeof -42', 'szngL42E') - exprCheck('alignof(T)', 'at1T') - exprCheck('noexcept(-42)', 'nxngL42E') + expr_check('++5', 'pp_L5E') + expr_check('--5', 'mm_L5E') + expr_check('*5', 'deL5E') + expr_check('&5', 'adL5E') + expr_check('+5', 'psL5E') + expr_check('-5', 'ngL5E') + expr_check('!5', 'ntL5E') + expr_check('not 5', 'ntL5E') + expr_check('~5', 'coL5E') + expr_check('compl 5', 'coL5E') + expr_check('sizeof...(a)', 'sZ1a') + expr_check('sizeof(T)', 'st1T') + expr_check('sizeof -42', 'szngL42E') + expr_check('alignof(T)', 'at1T') + expr_check('noexcept(-42)', 'nxngL42E') # new-expression - exprCheck('new int', 'nw_iE') - exprCheck('new volatile int', 'nw_ViE') - exprCheck('new int[42]', 'nw_AL42E_iE') - exprCheck('new int()', 'nw_ipiE') - exprCheck('new int(5, 42)', 'nw_ipiL5EL42EE') - exprCheck('::new int', 'nw_iE') - exprCheck('new int{{}}', 'nw_iilE') - exprCheck('new int{{5, 42}}', 'nw_iilL5EL42EE') + expr_check('new int', 'nw_iE') + expr_check('new volatile int', 'nw_ViE') + expr_check('new int[42]', 'nw_AL42E_iE') + expr_check('new int()', 'nw_ipiE') + expr_check('new int(5, 42)', 'nw_ipiL5EL42EE') + expr_check('::new int', 'nw_iE') + expr_check('new int{{}}', 'nw_iilE') + expr_check('new int{{5, 42}}', 'nw_iilL5EL42EE') # delete-expression - exprCheck('delete p', 'dl1p') - exprCheck('delete [] p', 'da1p') - exprCheck('::delete p', 'dl1p') - exprCheck('::delete [] p', 'da1p') + expr_check('delete p', 'dl1p') + expr_check('delete [] p', 'da1p') + expr_check('::delete p', 'dl1p') + expr_check('::delete [] p', 'da1p') # cast - exprCheck('(int)2', 'cviL2E') + expr_check('(int)2', 'cviL2E') # binary op - exprCheck('5 || 42', 'ooL5EL42E') - exprCheck('5 or 42', 'ooL5EL42E') - exprCheck('5 && 42', 'aaL5EL42E') - exprCheck('5 and 42', 'aaL5EL42E') - exprCheck('5 | 42', 'orL5EL42E') - exprCheck('5 bitor 42', 'orL5EL42E') - exprCheck('5 ^ 42', 'eoL5EL42E') - exprCheck('5 xor 42', 'eoL5EL42E') - exprCheck('5 & 42', 'anL5EL42E') - exprCheck('5 bitand 42', 'anL5EL42E') + expr_check('5 || 42', 'ooL5EL42E') + expr_check('5 or 42', 'ooL5EL42E') + expr_check('5 && 42', 'aaL5EL42E') + expr_check('5 and 42', 'aaL5EL42E') + expr_check('5 | 42', 'orL5EL42E') + expr_check('5 bitor 42', 'orL5EL42E') + expr_check('5 ^ 42', 'eoL5EL42E') + expr_check('5 xor 42', 'eoL5EL42E') + expr_check('5 & 42', 'anL5EL42E') + expr_check('5 bitand 42', 'anL5EL42E') # ['==', '!='] - exprCheck('5 == 42', 'eqL5EL42E') - exprCheck('5 != 42', 'neL5EL42E') - exprCheck('5 not_eq 42', 'neL5EL42E') + expr_check('5 == 42', 'eqL5EL42E') + expr_check('5 != 42', 'neL5EL42E') + expr_check('5 not_eq 42', 'neL5EL42E') # ['<=', '>=', '<', '>', '<=>'] - exprCheck('5 <= 42', 'leL5EL42E') - exprCheck('A <= 42', 'le1AL42E') - exprCheck('5 >= 42', 'geL5EL42E') - exprCheck('5 < 42', 'ltL5EL42E') - exprCheck('A < 42', 'lt1AL42E') - exprCheck('5 > 42', 'gtL5EL42E') - exprCheck('A > 42', 'gt1AL42E') - exprCheck('5 <=> 42', 'ssL5EL42E') - exprCheck('A <=> 42', 'ss1AL42E') + expr_check('5 <= 42', 'leL5EL42E') + expr_check('A <= 42', 'le1AL42E') + expr_check('5 >= 42', 'geL5EL42E') + expr_check('5 < 42', 'ltL5EL42E') + expr_check('A < 42', 'lt1AL42E') + expr_check('5 > 42', 'gtL5EL42E') + expr_check('A > 42', 'gt1AL42E') + expr_check('5 <=> 42', 'ssL5EL42E') + expr_check('A <=> 42', 'ss1AL42E') # ['<<', '>>'] - exprCheck('5 << 42', 'lsL5EL42E') - exprCheck('A << 42', 'ls1AL42E') - exprCheck('5 >> 42', 'rsL5EL42E') + expr_check('5 << 42', 'lsL5EL42E') + expr_check('A << 42', 'ls1AL42E') + expr_check('5 >> 42', 'rsL5EL42E') # ['+', '-'] - exprCheck('5 + 42', 'plL5EL42E') - exprCheck('5 - 42', 'miL5EL42E') + expr_check('5 + 42', 'plL5EL42E') + expr_check('5 - 42', 'miL5EL42E') # ['*', '/', '%'] - exprCheck('5 * 42', 'mlL5EL42E') - exprCheck('5 / 42', 'dvL5EL42E') - exprCheck('5 % 42', 'rmL5EL42E') + expr_check('5 * 42', 'mlL5EL42E') + expr_check('5 / 42', 'dvL5EL42E') + expr_check('5 % 42', 'rmL5EL42E') # ['.*', '->*'] - exprCheck('5 .* 42', 'dsL5EL42E') - exprCheck('5 ->* 42', 'pmL5EL42E') + expr_check('5 .* 42', 'dsL5EL42E') + expr_check('5 ->* 42', 'pmL5EL42E') # conditional - exprCheck('5 ? 7 : 3', 'quL5EL7EL3E') + expr_check('5 ? 7 : 3', 'quL5EL7EL3E') # assignment - exprCheck('a = 5', 'aS1aL5E') - exprCheck('a *= 5', 'mL1aL5E') - exprCheck('a /= 5', 'dV1aL5E') - exprCheck('a %= 5', 'rM1aL5E') - exprCheck('a += 5', 'pL1aL5E') - exprCheck('a -= 5', 'mI1aL5E') - exprCheck('a >>= 5', 'rS1aL5E') - exprCheck('a <<= 5', 'lS1aL5E') - exprCheck('a &= 5', 'aN1aL5E') - exprCheck('a and_eq 5', 'aN1aL5E') - exprCheck('a ^= 5', 'eO1aL5E') - exprCheck('a xor_eq 5', 'eO1aL5E') - exprCheck('a |= 5', 'oR1aL5E') - exprCheck('a or_eq 5', 'oR1aL5E') - exprCheck('a = {{1, 2, 3}}', 'aS1ailL1EL2EL3EE') + expr_check('a = 5', 'aS1aL5E') + expr_check('a *= 5', 'mL1aL5E') + expr_check('a /= 5', 'dV1aL5E') + expr_check('a %= 5', 'rM1aL5E') + expr_check('a += 5', 'pL1aL5E') + expr_check('a -= 5', 'mI1aL5E') + expr_check('a >>= 5', 'rS1aL5E') + expr_check('a <<= 5', 'lS1aL5E') + expr_check('a &= 5', 'aN1aL5E') + expr_check('a and_eq 5', 'aN1aL5E') + expr_check('a ^= 5', 'eO1aL5E') + expr_check('a xor_eq 5', 'eO1aL5E') + expr_check('a |= 5', 'oR1aL5E') + expr_check('a or_eq 5', 'oR1aL5E') + expr_check('a = {{1, 2, 3}}', 'aS1ailL1EL2EL3EE') # complex assignment and conditional - exprCheck('5 = 6 = 7', 'aSL5EaSL6EL7E') - exprCheck('5 = 6 ? 7 = 8 : 3', 'aSL5EquL6EaSL7EL8EL3E') + expr_check('5 = 6 = 7', 'aSL5EaSL6EL7E') + expr_check('5 = 6 ? 7 = 8 : 3', 'aSL5EquL6EaSL7EL8EL3E') # comma operator - exprCheck('a, 5', 'cm1aL5E') + expr_check('a, 5', 'cm1aL5E') # Additional tests # a < expression that starts with something that could be a template - exprCheck('A < 42', 'lt1AL42E') + expr_check('A < 42', 'lt1AL42E') check( 'function', 'template<> void f(A &v)', {2: 'IE1fR1AI1BX2EE', 3: 'IE1fR1AI1BXL2EEE', 4: 'IE1fvR1AI1BXL2EEE'}, ) - exprCheck('A<1>::value', 'N1AIXL1EEE5valueE') + expr_check('A<1>::value', 'N1AIXL1EEE5valueE') check('class', 'template {key}A', {2: 'I_iE1A'}) check('enumerator', '{key}A = std::numeric_limits::max()', {2: '1A'}) - exprCheck('operator()()', 'clclE') - exprCheck('operator()()', 'clclIiEE') + expr_check('operator()()', 'clclE') + expr_check('operator()()', 'clclIiEE') # pack expansion - exprCheck('a(b(c, 1 + d...)..., e(f..., g))', 'cl1aspcl1b1cspplL1E1dEcl1esp1f1gEE') + expr_check('a(b(c, 1 + d...)..., e(f..., g))', 'cl1aspcl1b1cspplL1E1dEcl1esp1f1gEE') def test_domain_cpp_ast_type_definitions(): @@ -1062,17 +1062,17 @@ def test_domain_cpp_ast_enum_definitions(): def test_domain_cpp_ast_anon_definitions(): - check('class', '@a', {3: 'Ut1_a'}, asTextOutput='class [anonymous]') - check('union', '@a', {3: 'Ut1_a'}, asTextOutput='union [anonymous]') - check('enum', '@a', {3: 'Ut1_a'}, asTextOutput='enum [anonymous]') - check('class', '@1', {3: 'Ut1_1'}, asTextOutput='class [anonymous]') - check('class', '@a::A', {3: 'NUt1_a1AE'}, asTextOutput='class [anonymous]::A') + check('class', '@a', {3: 'Ut1_a'}, as_text_output='class [anonymous]') + check('union', '@a', {3: 'Ut1_a'}, as_text_output='union [anonymous]') + check('enum', '@a', {3: 'Ut1_a'}, as_text_output='enum [anonymous]') + check('class', '@1', {3: 'Ut1_1'}, as_text_output='class [anonymous]') + check('class', '@a::A', {3: 'NUt1_a1AE'}, as_text_output='class [anonymous]::A') check( 'function', 'int f(int @a)', {1: 'f__i', 2: '1fi'}, - asTextOutput='int f(int [anonymous])', + as_text_output='int f(int [anonymous])', ) @@ -1370,37 +1370,37 @@ def test_domain_cpp_ast_template_args(): def test_domain_cpp_ast_initializers(): - idsMember = {1: 'v__T', 2: '1v'} - idsFunction = {1: 'f__T', 2: '1f1T'} - idsTemplate = {2: 'I_1TE1fv', 4: 'I_1TE1fvv'} + ids_member = {1: 'v__T', 2: '1v'} + ids_function = {1: 'f__T', 2: '1f1T'} + ids_template = {2: 'I_1TE1fv', 4: 'I_1TE1fvv'} # no init - check('member', 'T v', idsMember) - check('function', 'void f(T v)', idsFunction) - check('function', 'template void f()', idsTemplate) + check('member', 'T v', ids_member) + check('function', 'void f(T v)', ids_function) + check('function', 'template void f()', ids_template) # with '=', assignment-expression - check('member', 'T v = 42', idsMember) - check('function', 'void f(T v = 42)', idsFunction) - check('function', 'template void f()', idsTemplate) + check('member', 'T v = 42', ids_member) + check('function', 'void f(T v = 42)', ids_function) + check('function', 'template void f()', ids_template) # with '=', braced-init - check('member', 'T v = {}', idsMember) - check('function', 'void f(T v = {})', idsFunction) - check('function', 'template void f()', idsTemplate) - check('member', 'T v = {42, 42, 42}', idsMember) - check('function', 'void f(T v = {42, 42, 42})', idsFunction) - check('function', 'template void f()', idsTemplate) - check('member', 'T v = {42, 42, 42,}', idsMember) - check('function', 'void f(T v = {42, 42, 42,})', idsFunction) - check('function', 'template void f()', idsTemplate) - check('member', 'T v = {42, 42, args...}', idsMember) - check('function', 'void f(T v = {42, 42, args...})', idsFunction) - check('function', 'template void f()', idsTemplate) + check('member', 'T v = {}', ids_member) + check('function', 'void f(T v = {})', ids_function) + check('function', 'template void f()', ids_template) + check('member', 'T v = {42, 42, 42}', ids_member) + check('function', 'void f(T v = {42, 42, 42})', ids_function) + check('function', 'template void f()', ids_template) + check('member', 'T v = {42, 42, 42,}', ids_member) + check('function', 'void f(T v = {42, 42, 42,})', ids_function) + check('function', 'template void f()', ids_template) + check('member', 'T v = {42, 42, args...}', ids_member) + check('function', 'void f(T v = {42, 42, args...})', ids_function) + check('function', 'template void f()', ids_template) # without '=', braced-init - check('member', 'T v{}', idsMember) - check('member', 'T v{42, 42, 42}', idsMember) - check('member', 'T v{42, 42, 42,}', idsMember) - check('member', 'T v{42, 42, args...}', idsMember) + check('member', 'T v{}', ids_member) + check('member', 'T v{42, 42, 42}', ids_member) + check('member', 'T v{42, 42, 42,}', ids_member) + check('member', 'T v{42, 42, args...}', ids_member) # other - check('member', 'T v = T{}', idsMember) + check('member', 'T v = T{}', ids_member) def test_domain_cpp_ast_attributes(): @@ -1606,7 +1606,7 @@ def test_domain_cpp_build_misuse_of_roles(app): ws = filter_warnings(app.warning, 'roles-targets-warn') # the roles that should be able to generate warnings: - allRoles = [ + all_roles = [ 'class', 'struct', 'union', @@ -1618,7 +1618,7 @@ def test_domain_cpp_build_misuse_of_roles(app): 'enum', 'enumerator', ] - ok = [ # targetType, okRoles + ok = [ # target_type, ok_roles ('class', ['class', 'struct', 'type']), ('union', ['union', 'type']), ('func', ['func', 'type']), @@ -1631,14 +1631,16 @@ def test_domain_cpp_build_misuse_of_roles(app): ('templateParam', ['class', 'struct', 'union', 'member', 'var', 'type']), ] warn = [] - for targetType, roles in ok: - txtTargetType = 'function' if targetType == 'func' else targetType - for r in allRoles: + for target_type, roles in ok: + txt_target_type = 'function' if target_type == 'func' else target_type + for r in all_roles: if r not in roles: - warn.append(f'WARNING: cpp:{r} targets a {txtTargetType} (') - if targetType == 'templateParam': - warn.append(f'WARNING: cpp:{r} targets a {txtTargetType} (') - warn.append(f'WARNING: cpp:{r} targets a {txtTargetType} (') + warn.append(f'WARNING: cpp:{r} targets a {txt_target_type} (') + if target_type == 'templateParam': + warn.extend(( + f'WARNING: cpp:{r} targets a {txt_target_type} (', + f'WARNING: cpp:{r} targets a {txt_target_type} (', + )) warn = sorted(warn) for w in ws: assert 'targets a' in w @@ -1665,14 +1667,14 @@ def test_domain_cpp_build_misuse_of_roles(app): def test_domain_cpp_build_with_add_function_parentheses_is_True(app): app.build(force_all=True) - rolePatterns = [ + role_patterns = [ 'Sphinx', 'Sphinx::version', 'version', 'List', 'MyEnum', ] - parenPatterns = [ + paren_patterns = [ ('ref function without parens ', r'paren_1\(\)'), ('ref function with parens ', r'paren_2\(\)'), ('ref function without parens, explicit title ', 'paren_3_title'), @@ -1684,19 +1686,19 @@ def test_domain_cpp_build_with_add_function_parentheses_is_True(app): ] text = (app.outdir / 'roles.html').read_text(encoding='utf8') - for ref_text in rolePatterns: + for ref_text in role_patterns: pattern = ( f'
  • {ref_text}

  • ' ) match = re.search(pattern, text) assert match is not None, f'Pattern not found in roles.html:\n\t{pattern}' - for desc_text, ref_text in parenPatterns: + for desc_text, ref_text in paren_patterns: pattern = f'
  • {desc_text}{ref_text}

  • ' match = re.search(pattern, text) assert match is not None, f'Pattern not found in roles.html:\n\t{pattern}' text = (app.outdir / 'any-role.html').read_text(encoding='utf8') - for desc_text, ref_text in parenPatterns: + for desc_text, ref_text in paren_patterns: pattern = f'
  • {desc_text}{ref_text}

  • ' match = re.search(pattern, text) assert match is not None, f'Pattern not found in any-role.html:\n\t{pattern}' @@ -1710,14 +1712,14 @@ def test_domain_cpp_build_with_add_function_parentheses_is_True(app): def test_domain_cpp_build_with_add_function_parentheses_is_False(app): app.build(force_all=True) - rolePatterns = [ + role_patterns = [ 'Sphinx', 'Sphinx::version', 'version', 'List', 'MyEnum', ] - parenPatterns = [ + paren_patterns = [ ('ref function without parens ', 'paren_1'), ('ref function with parens ', 'paren_2'), ('ref function without parens, explicit title ', 'paren_3_title'), @@ -1729,19 +1731,19 @@ def test_domain_cpp_build_with_add_function_parentheses_is_False(app): ] text = (app.outdir / 'roles.html').read_text(encoding='utf8') - for ref_text in rolePatterns: + for ref_text in role_patterns: pattern = ( f'
  • {ref_text}

  • ' ) match = re.search(pattern, text) assert match is not None, f'Pattern not found in roles.html:\n\t{pattern}' - for desc_text, ref_text in parenPatterns: + for desc_text, ref_text in paren_patterns: pattern = f'
  • {desc_text}{ref_text}

  • ' match = re.search(pattern, text) assert match is not None, f'Pattern not found in roles.html:\n\t{pattern}' text = (app.outdir / 'any-role.html').read_text(encoding='utf8') - for desc_text, ref_text in parenPatterns: + for desc_text, ref_text in paren_patterns: pattern = f'
  • {desc_text}{ref_text}

  • ' match = re.search(pattern, text) assert match is not None, f'Pattern not found in any-role.html:\n\t{pattern}' @@ -1843,7 +1845,7 @@ def test_domain_cpp_build_operator_lookup(app): 'html', testroot='domain-cpp-intersphinx', confoverrides={'nitpicky': True} ) def test_domain_cpp_build_intersphinx(tmp_path, app): - origSource = """\ + orig_source = """\ .. cpp:class:: _class .. cpp:struct:: _struct .. cpp:union:: _union @@ -1863,7 +1865,7 @@ def test_domain_cpp_build_intersphinx(tmp_path, app): .. cpp:enum-class:: _enumClass .. cpp:function:: void _functionParam(int param) .. cpp:function:: template void _templateParam() -""" # NoQA: F841 +""" inv_file = tmp_path / 'inventory' inv_file.write_bytes( b"""\ @@ -1893,7 +1895,7 @@ def test_domain_cpp_build_intersphinx(tmp_path, app): _union cpp:union 1 index.html#_CPPv46$ - _var cpp:member 1 index.html#_CPPv44$ - """) - ) # NoQA: W291 + ) app.config.intersphinx_mapping = { 'test': ('https://localhost/intersphinx/cpp/', str(inv_file)), } diff --git a/tests/test_domains/test_domain_std.py b/tests/test_domains/test_domain_std.py index eb01c8e7093..b253f28e3d7 100644 --- a/tests/test_domains/test_domain_std.py +++ b/tests/test_domains/test_domain_std.py @@ -522,8 +522,8 @@ def test_productionlist(app): assert len(code_list) == 1 span = code_list[0] assert span.tag == 'span' - linkText = span.text.strip() - cases.append((text, link, linkText)) + link_text = span.text.strip() + cases.append((text, link, link_text)) assert cases == [ ('A', 'Bare.html#grammar-token-A', 'A'), ('B', 'Bare.html#grammar-token-B', 'B'), diff --git a/tests/test_environment/test_environment.py b/tests/test_environment/test_environment.py index c50dc21c484..a32e6999146 100644 --- a/tests/test_environment/test_environment.py +++ b/tests/test_environment/test_environment.py @@ -185,13 +185,13 @@ def test_env_relfn2path(app): assert absfn == str(app.srcdir / 'logo.jpg') # omit docname (w/ current docname) - app.env.temp_data['docname'] = 'subdir/document' + app.env.current_document.docname = 'subdir/document' relfn, absfn = app.env.relfn2path('images/logo.jpg') assert Path(relfn) == Path('subdir/images/logo.jpg') assert absfn == str(app.srcdir / 'subdir' / 'images' / 'logo.jpg') # omit docname (w/o current docname) - app.env.temp_data.clear() + app.env.current_document.clear() with pytest.raises(KeyError, match=r"^'docname'$"): app.env.relfn2path('images/logo.jpg') diff --git a/tests/test_environment/test_environment_toctree.py b/tests/test_environment/test_environment_toctree.py index 0020fb9a161..890fd596bf8 100644 --- a/tests/test_environment/test_environment_toctree.py +++ b/tests/test_environment/test_environment_toctree.py @@ -452,7 +452,7 @@ def test_domain_objects_document_scoping(app): @pytest.mark.test_params(shared_result='test_environment_toctree_basic') def test_document_toc(app): app.build() - toctree = document_toc(app.env, 'index', app.builder.tags) + toctree = document_toc(app.env, 'index', app.tags) assert_node( toctree, @@ -502,8 +502,8 @@ def test_document_toc(app): @pytest.mark.test_params(shared_result='test_environment_toctree_basic') def test_document_toc_only(app): app.build() - builder = StandaloneHTMLBuilder(app, app.env) - toctree = document_toc(app.env, 'index', builder.tags) + StandaloneHTMLBuilder(app, app.env) # adds format/builder tags + toctree = document_toc(app.env, 'index', app.tags) assert_node( toctree, @@ -561,7 +561,7 @@ def test_document_toc_only(app): @pytest.mark.test_params(shared_result='test_environment_toctree_basic') def test_document_toc_tocdepth(app): app.build() - toctree = document_toc(app.env, 'tocdepth', app.builder.tags) + toctree = document_toc(app.env, 'tocdepth', app.tags) assert_node( toctree, diff --git a/tests/test_extensions/autodoc_util.py b/tests/test_extensions/autodoc_util.py index 7c4da07970e..3d08c739300 100644 --- a/tests/test_extensions/autodoc_util.py +++ b/tests/test_extensions/autodoc_util.py @@ -22,7 +22,8 @@ def do_autodoc( options: dict[str, Any] | None = None, ) -> StringList: options = {} if options is None else options.copy() - app.env.temp_data.setdefault('docname', 'index') # set dummy docname + if not app.env.current_document.docname: + app.env.current_document.docname = 'index' # set dummy docname doccls = app.registry.documenters[objtype] docoptions = process_documenter_options(doccls, app.config, options) state = Mock() diff --git a/tests/test_extensions/test_ext_apidoc.py b/tests/test_extensions/test_ext_apidoc.py index 1e191de9a7b..61550bea529 100644 --- a/tests/test_extensions/test_ext_apidoc.py +++ b/tests/test_extensions/test_ext_apidoc.py @@ -7,8 +7,8 @@ import pytest -import sphinx.ext.apidoc -from sphinx.ext.apidoc import main as apidoc_main +import sphinx.ext.apidoc._generate +from sphinx.ext.apidoc._cli import main as apidoc_main if TYPE_CHECKING: from pathlib import Path @@ -60,10 +60,10 @@ def test_simple(make_app, apidoc): @pytest.mark.apidoc( - coderoot='test-apidoc-custom-templates', + coderoot='test-ext-apidoc-custom-templates', options=[ '--separate', - '--templatedir=tests/roots/test-apidoc-custom-templates/_templates', + '--templatedir=tests/roots/test-ext-apidoc-custom-templates/_templates', ], ) def test_custom_templates(make_app, apidoc): @@ -86,16 +86,16 @@ def test_custom_templates(make_app, apidoc): # Assert that the legacy filename is discovered with open(builddir / 'mypackage.txt', encoding='utf-8') as f: txt = f.read() - assert 'The legacy package template was found!' in txt + assert 'The legacy package template was found!' in txt # Assert that the new filename is preferred with open(builddir / 'mypackage.mymodule.txt', encoding='utf-8') as f: txt = f.read() - assert 'The Jinja module template was found!' in txt + assert 'The Jinja module template was found!' in txt @pytest.mark.apidoc( - coderoot='test-apidoc-pep420/a', + coderoot='test-ext-apidoc-pep420/a', options=['--implicit-namespaces'], ) def test_pep_0420_enabled(make_app, apidoc): @@ -107,17 +107,17 @@ def test_pep_0420_enabled(make_app, apidoc): with open(outdir / 'a.b.c.rst', encoding='utf-8') as f: rst = f.read() - assert 'automodule:: a.b.c.d\n' in rst - assert 'automodule:: a.b.c\n' in rst + assert 'automodule:: a.b.c.d\n' in rst + assert 'automodule:: a.b.c\n' in rst with open(outdir / 'a.b.e.rst', encoding='utf-8') as f: rst = f.read() - assert 'automodule:: a.b.e.f\n' in rst + assert 'automodule:: a.b.e.f\n' in rst with open(outdir / 'a.b.x.rst', encoding='utf-8') as f: rst = f.read() - assert 'automodule:: a.b.x.y\n' in rst - assert 'automodule:: a.b.x\n' not in rst + assert 'automodule:: a.b.x.y\n' in rst + assert 'automodule:: a.b.x\n' not in rst app = make_app('text', srcdir=outdir) app.build() @@ -131,19 +131,19 @@ def test_pep_0420_enabled(make_app, apidoc): with open(builddir / 'a.b.c.txt', encoding='utf-8') as f: txt = f.read() - assert 'a.b.c package\n' in txt + assert 'a.b.c package\n' in txt with open(builddir / 'a.b.e.txt', encoding='utf-8') as f: txt = f.read() - assert 'a.b.e.f module\n' in txt + assert 'a.b.e.f module\n' in txt with open(builddir / 'a.b.x.txt', encoding='utf-8') as f: txt = f.read() - assert 'a.b.x namespace\n' in txt + assert 'a.b.x namespace\n' in txt @pytest.mark.apidoc( - coderoot='test-apidoc-pep420/a', + coderoot='test-ext-apidoc-pep420/a', options=['--implicit-namespaces', '--separate'], ) def test_pep_0420_enabled_separate(make_app, apidoc): @@ -157,15 +157,15 @@ def test_pep_0420_enabled_separate(make_app, apidoc): with open(outdir / 'a.b.c.rst', encoding='utf-8') as f: rst = f.read() - assert '.. toctree::\n :maxdepth: 4\n\n a.b.c.d\n' in rst + assert '.. toctree::\n :maxdepth: 4\n\n a.b.c.d\n' in rst with open(outdir / 'a.b.e.rst', encoding='utf-8') as f: rst = f.read() - assert '.. toctree::\n :maxdepth: 4\n\n a.b.e.f\n' in rst + assert '.. toctree::\n :maxdepth: 4\n\n a.b.e.f\n' in rst with open(outdir / 'a.b.x.rst', encoding='utf-8') as f: rst = f.read() - assert '.. toctree::\n :maxdepth: 4\n\n a.b.x.y\n' in rst + assert '.. toctree::\n :maxdepth: 4\n\n a.b.x.y\n' in rst app = make_app('text', srcdir=outdir) app.build() @@ -181,18 +181,18 @@ def test_pep_0420_enabled_separate(make_app, apidoc): with open(builddir / 'a.b.c.txt', encoding='utf-8') as f: txt = f.read() - assert 'a.b.c package\n' in txt + assert 'a.b.c package\n' in txt with open(builddir / 'a.b.e.f.txt', encoding='utf-8') as f: txt = f.read() - assert 'a.b.e.f module\n' in txt + assert 'a.b.e.f module\n' in txt with open(builddir / 'a.b.x.txt', encoding='utf-8') as f: txt = f.read() - assert 'a.b.x namespace\n' in txt + assert 'a.b.x namespace\n' in txt -@pytest.mark.apidoc(coderoot='test-apidoc-pep420/a') +@pytest.mark.apidoc(coderoot='test-ext-apidoc-pep420/a') def test_pep_0420_disabled(make_app, apidoc): outdir = apidoc.outdir assert (outdir / 'conf.py').is_file() @@ -205,7 +205,7 @@ def test_pep_0420_disabled(make_app, apidoc): print(app._warning.getvalue()) -@pytest.mark.apidoc(coderoot='test-apidoc-pep420/a/b') +@pytest.mark.apidoc(coderoot='test-ext-apidoc-pep420/a/b') def test_pep_0420_disabled_top_level_verify(make_app, apidoc): outdir = apidoc.outdir assert (outdir / 'conf.py').is_file() @@ -214,9 +214,9 @@ def test_pep_0420_disabled_top_level_verify(make_app, apidoc): with open(outdir / 'c.rst', encoding='utf-8') as f: rst = f.read() - assert 'c package\n' in rst - assert 'automodule:: c.d\n' in rst - assert 'automodule:: c\n' in rst + assert 'c package\n' in rst + assert 'automodule:: c.d\n' in rst + assert 'automodule:: c\n' in rst app = make_app('text', srcdir=outdir) app.build() @@ -224,7 +224,7 @@ def test_pep_0420_disabled_top_level_verify(make_app, apidoc): print(app._warning.getvalue()) -@pytest.mark.apidoc(coderoot='test-apidoc-trailing-underscore') +@pytest.mark.apidoc(coderoot='test-ext-apidoc-trailing-underscore') def test_trailing_underscore(make_app, apidoc): outdir = apidoc.outdir assert (outdir / 'conf.py').is_file() @@ -238,12 +238,12 @@ def test_trailing_underscore(make_app, apidoc): builddir = outdir / '_build' / 'text' with open(builddir / 'package_.txt', encoding='utf-8') as f: rst = f.read() - assert 'package_ package\n' in rst - assert 'package_.module_ module\n' in rst + assert 'package_ package\n' in rst + assert 'package_.module_ module\n' in rst @pytest.mark.apidoc( - coderoot='test-apidoc-pep420/a', + coderoot='test-ext-apidoc-pep420/a', excludes=['b/c/d.py', 'b/e/f.py', 'b/e/__init__.py'], options=['--implicit-namespaces', '--separate'], ) @@ -261,7 +261,7 @@ def test_excludes(apidoc): @pytest.mark.apidoc( - coderoot='test-apidoc-pep420/a', + coderoot='test-ext-apidoc-pep420/a', excludes=['b/e'], options=['--implicit-namespaces', '--separate'], ) @@ -278,7 +278,7 @@ def test_excludes_subpackage_should_be_skipped(apidoc): @pytest.mark.apidoc( - coderoot='test-apidoc-pep420/a', + coderoot='test-ext-apidoc-pep420/a', excludes=['b/e/f.py'], options=['--implicit-namespaces', '--separate'], ) @@ -295,7 +295,7 @@ def test_excludes_module_should_be_skipped(apidoc): @pytest.mark.apidoc( - coderoot='test-apidoc-pep420/a', + coderoot='test-ext-apidoc-pep420/a', excludes=[], options=['--implicit-namespaces', '--separate'], ) @@ -349,11 +349,11 @@ def test_extension_parsed(apidoc): with open(outdir / 'conf.py', encoding='utf-8') as f: rst = f.read() - assert 'sphinx.ext.mathjax' in rst + assert 'sphinx.ext.mathjax' in rst @pytest.mark.apidoc( - coderoot='test-apidoc-toc/mypackage', + coderoot='test-ext-apidoc-toc/mypackage', options=['--implicit-namespaces'], ) def test_toc_all_references_should_exist_pep420_enabled(apidoc): @@ -385,7 +385,7 @@ def test_toc_all_references_should_exist_pep420_enabled(apidoc): @pytest.mark.apidoc( - coderoot='test-apidoc-toc/mypackage', + coderoot='test-ext-apidoc-toc/mypackage', ) def test_toc_all_references_should_exist_pep420_disabled(apidoc): """All references in toc should exist. This test doesn't say if @@ -432,7 +432,7 @@ def extract_toc(path): @pytest.mark.apidoc( - coderoot='test-apidoc-subpackage-in-toc', + coderoot='test-ext-apidoc-subpackage-in-toc', options=['--separate'], ) def test_subpackage_in_toc(apidoc): @@ -507,8 +507,8 @@ def test_module_file(tmp_path): '\n' '.. automodule:: example\n' ' :members:\n' - ' :undoc-members:\n' ' :show-inheritance:\n' + ' :undoc-members:\n' ) @@ -522,8 +522,8 @@ def test_module_file_noheadings(tmp_path): assert content == ( '.. automodule:: example\n' ' :members:\n' - ' :undoc-members:\n' ' :show-inheritance:\n' + ' :undoc-members:\n' ) @@ -560,24 +560,24 @@ def test_package_file(tmp_path): '\n' '.. automodule:: testpkg.hello\n' ' :members:\n' - ' :undoc-members:\n' ' :show-inheritance:\n' + ' :undoc-members:\n' '\n' 'testpkg.world module\n' '--------------------\n' '\n' '.. automodule:: testpkg.world\n' ' :members:\n' - ' :undoc-members:\n' ' :show-inheritance:\n' + ' :undoc-members:\n' '\n' 'Module contents\n' '---------------\n' '\n' '.. automodule:: testpkg\n' ' :members:\n' - ' :undoc-members:\n' ' :show-inheritance:\n' + ' :undoc-members:\n' ) content = (outdir / 'testpkg.subpkg.rst').read_text(encoding='utf8') @@ -590,8 +590,8 @@ def test_package_file(tmp_path): '\n' '.. automodule:: testpkg.subpkg\n' ' :members:\n' - ' :undoc-members:\n' ' :show-inheritance:\n' + ' :undoc-members:\n' ) @@ -622,8 +622,8 @@ def test_package_file_separate(tmp_path): '\n' '.. automodule:: testpkg\n' ' :members:\n' - ' :undoc-members:\n' ' :show-inheritance:\n' + ' :undoc-members:\n' ) content = (outdir / 'testpkg.example.rst').read_text(encoding='utf8') @@ -633,8 +633,8 @@ def test_package_file_separate(tmp_path): '\n' '.. automodule:: testpkg.example\n' ' :members:\n' - ' :undoc-members:\n' ' :show-inheritance:\n' + ' :undoc-members:\n' ) @@ -652,8 +652,8 @@ def test_package_file_module_first(tmp_path): '\n' '.. automodule:: testpkg\n' ' :members:\n' - ' :undoc-members:\n' ' :show-inheritance:\n' + ' :undoc-members:\n' '\n' 'Submodules\n' '----------\n' @@ -663,8 +663,8 @@ def test_package_file_module_first(tmp_path): '\n' '.. automodule:: testpkg.example\n' ' :members:\n' - ' :undoc-members:\n' ' :show-inheritance:\n' + ' :undoc-members:\n' ) @@ -685,8 +685,8 @@ def test_package_file_without_submodules(tmp_path): '\n' '.. automodule:: testpkg\n' ' :members:\n' - ' :undoc-members:\n' ' :show-inheritance:\n' + ' :undoc-members:\n' ) @@ -717,8 +717,8 @@ def test_namespace_package_file(tmp_path): '\n' '.. automodule:: testpkg.example\n' ' :members:\n' - ' :undoc-members:\n' ' :show-inheritance:\n' + ' :undoc-members:\n' ) @@ -728,12 +728,12 @@ def test_no_duplicates(rootdir, tmp_path): We can't use pytest.mark.apidoc here as we use a different set of arguments to apidoc_main """ - original_suffixes = sphinx.ext.apidoc.PY_SUFFIXES + original_suffixes = sphinx.ext.apidoc._generate.PY_SUFFIXES try: # Ensure test works on Windows - sphinx.ext.apidoc.PY_SUFFIXES += ('.so',) + sphinx.ext.apidoc._generate.PY_SUFFIXES += ('.so',) - package = rootdir / 'test-apidoc-duplicates' / 'fish_licence' + package = rootdir / 'test-ext-apidoc-duplicates' / 'fish_licence' outdir = tmp_path / 'out' apidoc_main(['-o', str(outdir), '-T', str(package), '--implicit-namespaces']) @@ -746,7 +746,7 @@ def test_no_duplicates(rootdir, tmp_path): assert count_submodules == 1 finally: - sphinx.ext.apidoc.PY_SUFFIXES = original_suffixes + sphinx.ext.apidoc._generate.PY_SUFFIXES = original_suffixes def test_remove_old_files(tmp_path: Path): diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py index eb775aba081..63f7f090ec5 100644 --- a/tests/test_extensions/test_ext_autodoc.py +++ b/tests/test_extensions/test_ext_autodoc.py @@ -10,17 +10,18 @@ import itertools import operator import sys -from types import SimpleNamespace from typing import TYPE_CHECKING from unittest.mock import Mock from warnings import catch_warnings import pytest -from docutils.statemachine import ViewList from sphinx import addnodes from sphinx.ext.autodoc import ALL, ModuleLevelDocumenter, Options +# NEVER import these objects from sphinx.ext.autodoc directly +from sphinx.ext.autodoc.directive import DocumenterBridge + from tests.test_extensions.autodoc_util import do_autodoc try: @@ -34,8 +35,10 @@ if TYPE_CHECKING: from typing import Any + from sphinx.environment import BuildEnvironment + -def make_directive_bridge(env): +def make_directive_bridge(env: BuildEnvironment) -> DocumenterBridge: options = Options( inherited_members=False, undoc_members=False, @@ -54,11 +57,11 @@ def make_directive_bridge(env): ignore_module_all=False, ) - directive = SimpleNamespace( + directive = DocumenterBridge( env=env, - genopt=options, - result=ViewList(), - record_dependencies=set(), + reporter=None, + options=options, + lineno=0, state=Mock(), ) directive.state.document.settings.tab_width = 8 @@ -95,9 +98,10 @@ def verify(objtype, name, result): 'test_ext_autodoc.raises(exc) -> None', ('test_ext_autodoc', ['raises'], 'exc', 'None'), ) - directive.env.temp_data['autodoc:module'] = 'test_ext_autodoc' + directive.env.current_document.autodoc_module = 'test_ext_autodoc' verify('function', 'raises', ('test_ext_autodoc', ['raises'], None, None)) - del directive.env.temp_data['autodoc:module'] + directive.env.current_document.autodoc_module = '' + directive.env.ref_context['py:module'] = 'test_ext_autodoc' verify('function', 'raises', ('test_ext_autodoc', ['raises'], None, None)) verify('class', 'Base', ('test_ext_autodoc', ['Base'], None, None)) @@ -111,7 +115,7 @@ def verify(objtype, name, result): ) directive.env.ref_context['py:module'] = 'sphinx.testing.util' directive.env.ref_context['py:class'] = 'Foo' - directive.env.temp_data['autodoc:class'] = 'SphinxTestApp' + directive.env.current_document.autodoc_class = 'SphinxTestApp' verify( 'method', 'cleanup', @@ -526,7 +530,7 @@ def test_autodoc_exception(app): @pytest.mark.sphinx('html', testroot='ext-autodoc') def test_autodoc_warnings(app): - app.env.temp_data['docname'] = 'dummy' + app.env.current_document.docname = 'dummy' # can't import module do_autodoc(app, 'module', 'unknown') @@ -1299,7 +1303,7 @@ def test_autodoc_module_member_order(app): @pytest.mark.sphinx('html', testroot='ext-autodoc') def test_autodoc_module_scope(app): - app.env.temp_data['autodoc:module'] = 'target' + app.env.current_document.autodoc_module = 'target' actual = do_autodoc(app, 'attribute', 'Class.mdocattr') assert list(actual) == [ '', @@ -1314,8 +1318,8 @@ def test_autodoc_module_scope(app): @pytest.mark.sphinx('html', testroot='ext-autodoc') def test_autodoc_class_scope(app): - app.env.temp_data['autodoc:module'] = 'target' - app.env.temp_data['autodoc:class'] = 'Class' + app.env.current_document.autodoc_module = 'target' + app.env.current_document.autodoc_class = 'Class' actual = do_autodoc(app, 'attribute', 'mdocattr') assert list(actual) == [ '', diff --git a/tests/test_extensions/test_ext_autodoc_configs.py b/tests/test_extensions/test_ext_autodoc_configs.py index 155cbc36347..348e84b2bda 100644 --- a/tests/test_extensions/test_ext_autodoc_configs.py +++ b/tests/test_extensions/test_ext_autodoc_configs.py @@ -17,11 +17,6 @@ from collections.abc import Iterator from pathlib import Path -skip_py314_segfault = pytest.mark.skipif( - sys.version_info[:2] >= (3, 14), - reason='Segmentation fault: https://github.com/python/cpython/issues/125017', -) - IS_PYPY = platform.python_implementation() == 'PyPy' @@ -194,7 +189,6 @@ def test_autodoc_class_signature_separated_init(app): ] -@skip_py314_segfault @pytest.mark.sphinx('html', testroot='ext-autodoc') def test_autodoc_class_signature_separated_new(app): app.config.autodoc_class_signature = 'separated' @@ -378,7 +372,6 @@ def test_autodoc_inherit_docstrings_for_inherited_members(app): ] -@skip_py314_segfault @pytest.mark.sphinx('html', testroot='ext-autodoc') def test_autodoc_docstring_signature(app): options = {'members': None, 'special-members': '__init__, __new__'} @@ -455,8 +448,7 @@ def test_autodoc_docstring_signature(app): ' __init__(self, a, b=1) -> None', ' First line of docstring', '', - ' rest of docstring', - '', + ' rest of docstring', '', '', ' .. py:method:: DocstringSig.__new__(cls, *new_args, **new_kwargs)', @@ -466,8 +458,7 @@ def test_autodoc_docstring_signature(app): ' __new__(cls, d, e=1) -> DocstringSig', ' First line of docstring', '', - ' rest of docstring', - '', + ' rest of docstring', '', '', ' .. py:method:: DocstringSig.meth()', @@ -476,8 +467,7 @@ def test_autodoc_docstring_signature(app): ' meth(FOO, BAR=1) -> BAZ', ' First line of docstring', '', - ' rest of docstring', - '', + ' rest of docstring', '', '', ' .. py:method:: DocstringSig.meth2()', @@ -640,7 +630,7 @@ def test_mocked_module_imports(app): sys.modules.pop('target', None) # unload target module to clear the module cache # no autodoc_mock_imports - options = {'members': 'TestAutodoc,decoratedFunction,func,Alias'} + options = {'members': 'TestAutodoc,decorated_function,func,Alias'} actual = do_autodoc(app, 'module', 'target.need_mocks', options) assert list(actual) == [] assert "autodoc: failed to import module 'need_mocks'" in app.warning.getvalue() @@ -679,16 +669,16 @@ def test_mocked_module_imports(app): ' docstring', '', '', - ' .. py:method:: TestAutodoc.decoratedMethod()', + ' .. py:method:: TestAutodoc.decorated_method()', ' :module: target.need_mocks', '', - ' TestAutodoc::decoratedMethod docstring', + ' TestAutodoc::decorated_method docstring', '', '', - '.. py:function:: decoratedFunction()', + '.. py:function:: decorated_function()', ' :module: target.need_mocks', '', - ' decoratedFunction docstring', + ' decorated_function docstring', '', '', '.. py:function:: func(arg: missing_module.Class)', @@ -1705,7 +1695,7 @@ def test_autodoc_default_options(app): if (3, 11, 7) <= sys.version_info < (3, 12) or sys.version_info >= (3, 12, 1): list_of_weak_references = ' list of weak references to the object' else: - list_of_weak_references = " list of weak references to the object (if defined)" # fmt: skip + list_of_weak_references = ' list of weak references to the object (if defined)' # fmt: skip # no settings actual = do_autodoc(app, 'class', 'target.enums.EnumCls') @@ -1784,7 +1774,7 @@ def test_autodoc_default_options_with_values(app): if (3, 11, 7) <= sys.version_info < (3, 12) or sys.version_info >= (3, 12, 1): list_of_weak_references = ' list of weak references to the object' else: - list_of_weak_references = " list of weak references to the object (if defined)" # fmt: skip + list_of_weak_references = ' list of weak references to the object (if defined)' # fmt: skip # with :members: app.config.autodoc_default_options = {'members': 'val1,val2'} diff --git a/tests/test_extensions/test_ext_autosummary.py b/tests/test_extensions/test_ext_autosummary.py index 175abc4e21c..901edb29915 100644 --- a/tests/test_extensions/test_ext_autosummary.py +++ b/tests/test_extensions/test_ext_autosummary.py @@ -147,7 +147,11 @@ def test_extract_summary(capsys): assert err == '' -@pytest.mark.sphinx('dummy', testroot='autosummary', confoverrides=defaults.copy()) +@pytest.mark.sphinx( + 'dummy', + testroot='ext-autosummary-ext', + confoverrides=defaults.copy(), +) def test_get_items_summary(make_app, app_params): import sphinx.ext.autosummary import sphinx.ext.autosummary.generate @@ -186,9 +190,9 @@ def handler(app, what, name, obj, options, lines): assert html_warnings == '' expected_values = { - 'withSentence': 'I have a sentence which spans multiple lines.', - 'noSentence': "this doesn't start with a capital.", - 'emptyLine': 'This is the real summary', + 'with_sentence': 'I have a sentence which spans multiple lines.', + 'no_sentence': "this doesn't start with a capital.", + 'empty_line': 'This is the real summary', 'module_attr': 'This is a module attribute', 'C.class_attr': 'This is a class attribute', 'C.instance_attr': 'This is an instance attribute', @@ -219,7 +223,11 @@ def str_content(elem: Element) -> str: return ''.join(str_content(e) for e in elem) -@pytest.mark.sphinx('xml', testroot='autosummary', confoverrides=defaults.copy()) +@pytest.mark.sphinx( + 'xml', + testroot='ext-autosummary-ext', + confoverrides=defaults.copy(), +) def test_escaping(app): app.build(force_all=True) @@ -744,7 +752,11 @@ def test_autosummary_filename_map(app): assert html_warnings == '' -@pytest.mark.sphinx('latex', testroot='autosummary', confoverrides=defaults.copy()) +@pytest.mark.sphinx( + 'latex', + testroot='ext-autosummary-ext', + confoverrides=defaults.copy(), +) def test_autosummary_latex_table_colspec(app): app.build(force_all=True) result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8') diff --git a/tests/test_extensions/test_ext_intersphinx.py b/tests/test_extensions/test_ext_intersphinx.py index 5aaa91d9365..090510400c4 100644 --- a/tests/test_extensions/test_ext_intersphinx.py +++ b/tests/test_extensions/test_ext_intersphinx.py @@ -25,6 +25,7 @@ _fetch_inventory, _fetch_inventory_group, _get_safe_url, + _InvConfig, _strip_basic_auth, ) from sphinx.ext.intersphinx._shared import _IntersphinxProject @@ -67,35 +68,37 @@ def set_config(app, mapping): app.config.intersphinx_mapping = mapping.copy() app.config.intersphinx_cache_limit = 0 app.config.intersphinx_disabled_reftypes = [] + app.config.intersphinx_timeout = None @mock.patch('sphinx.ext.intersphinx._load.InventoryFile') -@mock.patch('sphinx.ext.intersphinx._load._read_from_url') +@mock.patch('sphinx.ext.intersphinx._load.requests.get') @pytest.mark.sphinx('html', testroot='root') -def test_fetch_inventory_redirection(_read_from_url, InventoryFile, app): # NoQA: PT019 +def test_fetch_inventory_redirection(get_request, InventoryFile, app): + mocked_get = get_request.return_value.__enter__.return_value intersphinx_setup(app) - _read_from_url().readline.return_value = b'# Sphinx inventory version 2' + mocked_get.content = b'# Sphinx inventory version 2' # same uri and inv, not redirected - _read_from_url().url = 'https://hostname/' + INVENTORY_FILENAME + mocked_get.url = 'https://hostname/' + INVENTORY_FILENAME _fetch_inventory( target_uri='https://hostname/', inv_location='https://hostname/' + INVENTORY_FILENAME, - config=app.config, + config=_InvConfig.from_config(app.config), srcdir=app.srcdir, ) assert 'intersphinx inventory has moved' not in app.status.getvalue() - assert InventoryFile.load.call_args[0][1] == 'https://hostname/' + assert InventoryFile.loads.call_args[1]['uri'] == 'https://hostname/' # same uri and inv, redirected app.status.seek(0) app.status.truncate(0) - _read_from_url().url = 'https://hostname/new/' + INVENTORY_FILENAME + mocked_get.url = 'https://hostname/new/' + INVENTORY_FILENAME _fetch_inventory( target_uri='https://hostname/', inv_location='https://hostname/' + INVENTORY_FILENAME, - config=app.config, + config=_InvConfig.from_config(app.config), srcdir=app.srcdir, ) assert app.status.getvalue() == ( @@ -103,31 +106,31 @@ def test_fetch_inventory_redirection(_read_from_url, InventoryFile, app): # NoQ 'https://hostname/%s -> https://hostname/new/%s\n' % (INVENTORY_FILENAME, INVENTORY_FILENAME) ) - assert InventoryFile.load.call_args[0][1] == 'https://hostname/new' + assert InventoryFile.loads.call_args[1]['uri'] == 'https://hostname/new' # different uri and inv, not redirected app.status.seek(0) app.status.truncate(0) - _read_from_url().url = 'https://hostname/new/' + INVENTORY_FILENAME + mocked_get.url = 'https://hostname/new/' + INVENTORY_FILENAME _fetch_inventory( target_uri='https://hostname/', inv_location='https://hostname/new/' + INVENTORY_FILENAME, - config=app.config, + config=_InvConfig.from_config(app.config), srcdir=app.srcdir, ) assert 'intersphinx inventory has moved' not in app.status.getvalue() - assert InventoryFile.load.call_args[0][1] == 'https://hostname/' + assert InventoryFile.loads.call_args[1]['uri'] == 'https://hostname/' # different uri and inv, redirected app.status.seek(0) app.status.truncate(0) - _read_from_url().url = 'https://hostname/other/' + INVENTORY_FILENAME + mocked_get.url = 'https://hostname/other/' + INVENTORY_FILENAME _fetch_inventory( target_uri='https://hostname/', inv_location='https://hostname/new/' + INVENTORY_FILENAME, - config=app.config, + config=_InvConfig.from_config(app.config), srcdir=app.srcdir, ) assert app.status.getvalue() == ( @@ -135,7 +138,7 @@ def test_fetch_inventory_redirection(_read_from_url, InventoryFile, app): # NoQ 'https://hostname/new/%s -> https://hostname/other/%s\n' % (INVENTORY_FILENAME, INVENTORY_FILENAME) ) - assert InventoryFile.load.call_args[0][1] == 'https://hostname/' + assert InventoryFile.loads.call_args[1]['uri'] == 'https://hostname/' @pytest.mark.sphinx('html', testroot='root') @@ -760,6 +763,7 @@ def test_intersphinx_cache_limit(app, monkeypatch, cache_limit, expected_expired app.config.intersphinx_mapping = { 'inv': (url, None), } + app.config.intersphinx_timeout = None # load the inventory and check if it's done correctly intersphinx_cache: dict[str, InventoryCacheEntry] = { url: ('inv', 0, {}), # Timestamp of last cache write is zero. @@ -784,7 +788,7 @@ def test_intersphinx_cache_limit(app, monkeypatch, cache_limit, expected_expired project=project, cache=intersphinx_cache, now=now, - config=app.config, + config=_InvConfig.from_config(app.config), srcdir=app.srcdir, ) # If we hadn't mocked `_fetch_inventory`, it would've made diff --git a/tests/test_extensions/test_ext_napoleon_docstring.py b/tests/test_extensions/test_ext_napoleon_docstring.py index cf78e98f6ab..393aeac7de8 100644 --- a/tests/test_extensions/test_ext_napoleon_docstring.py +++ b/tests/test_extensions/test_ext_napoleon_docstring.py @@ -1,4 +1,4 @@ -"""Tests for :mod:`sphinx.ext.napoleon.docstring` module.""" +"""Tests for :py:mod:`sphinx.ext.napoleon.docstring` module.""" from __future__ import annotations @@ -17,7 +17,7 @@ from sphinx.ext.napoleon.docstring import ( GoogleDocstring, NumpyDocstring, - _convert_numpy_type_spec, + _convert_type_spec, _recombine_set_tokens, _token_type, _tokenize_type_spec, @@ -120,9 +120,9 @@ def test_class_data_member_inline_no_type(self): assert actual == [source] def test_class_data_member_inline_ref_in_type(self): - source = f':class:`int`: {self.inline_google_docstring}' + source = f':py:class:`int`: {self.inline_google_docstring}' actual = self._docstring(source).splitlines() - assert actual == [self.inline_google_docstring, '', ':type: :class:`int`'] + assert actual == [self.inline_google_docstring, '', ':type: :py:class:`int`'] class TestGoogleDocstring: @@ -467,14 +467,14 @@ def test_parameters_with_class_reference(self): This class should only be used by runtimes. Arguments: - runtime (:class:`~typing.Dict`\\[:class:`int`,:class:`str`\\]): Use it to + runtime (:py:class:`~typing.Dict`\\[:py:class:`int`,:py:class:`str`\\]): Use it to access the environment. It is available in XBlock code as ``self.runtime``. - field_data (:class:`FieldData`): Interface used by the XBlock + field_data (:py:class:`FieldData`): Interface used by the XBlock fields to access their data from wherever it is persisted. - scope_ids (:class:`ScopeIds`): Identifiers needed to resolve scopes. + scope_ids (:py:class:`ScopeIds`): Identifiers needed to resolve scopes. """ @@ -487,19 +487,19 @@ def test_parameters_with_class_reference(self): :param runtime: Use it to access the environment. It is available in XBlock code as ``self.runtime``. -:type runtime: :class:`~typing.Dict`\\[:class:`int`,:class:`str`\\] +:type runtime: :py:class:`~typing.Dict`\\[:py:class:`int`,:py:class:`str`\\] :param field_data: Interface used by the XBlock fields to access their data from wherever it is persisted. -:type field_data: :class:`FieldData` +:type field_data: :py:class:`FieldData` :param scope_ids: Identifiers needed to resolve scopes. -:type scope_ids: :class:`ScopeIds` +:type scope_ids: :py:class:`ScopeIds` """ assert str(actual) == expected def test_attributes_with_class_reference(self): docstring = """\ Attributes: - in_attr(:class:`numpy.ndarray`): super-dooper attribute + in_attr(:py:class:`numpy.ndarray`): super-dooper attribute """ actual = GoogleDocstring(docstring) @@ -508,7 +508,7 @@ def test_attributes_with_class_reference(self): super-dooper attribute - :type: :class:`numpy.ndarray` + :type: :py:class:`numpy.ndarray` """ assert str(actual) == expected @@ -583,14 +583,14 @@ def test_xrefs_in_return_type(self): docstring = """Example Function Returns: - :class:`numpy.ndarray`: A :math:`n \\times 2` array containing + :py:class:`numpy.ndarray`: A :math:`n \\times 2` array containing a bunch of math items """ expected = """Example Function :returns: A :math:`n \\times 2` array containing a bunch of math items -:rtype: :class:`numpy.ndarray` +:rtype: :py:class:`numpy.ndarray` """ actual = GoogleDocstring(docstring) assert str(actual) == expected @@ -612,7 +612,7 @@ def test_raises_types(self): If the dimensions couldn't be parsed. `InvalidArgumentsError` If the arguments are invalid. - :exc:`~ValueError` + :py:exc:`~ValueError` If the arguments are wrong. """, @@ -723,7 +723,7 @@ def test_raises_types(self): Example Function Raises: - :class:`exc.InvalidDimensionsError` + :py:class:`exc.InvalidDimensionsError` """, """ @@ -738,7 +738,7 @@ def test_raises_types(self): Example Function Raises: - :class:`exc.InvalidDimensionsError`: If the dimensions couldn't be parsed. + :py:class:`exc.InvalidDimensionsError`: If the dimensions couldn't be parsed. """, """ @@ -753,15 +753,15 @@ def test_raises_types(self): Example Function Raises: - :class:`exc.InvalidDimensionsError`: If the dimensions couldn't be parsed, - then a :class:`exc.InvalidDimensionsError` will be raised. + :py:class:`exc.InvalidDimensionsError`: If the dimensions couldn't be parsed, + then a :py:class:`exc.InvalidDimensionsError` will be raised. """, """ Example Function :raises exc.InvalidDimensionsError: If the dimensions couldn't be parsed, - then a :class:`exc.InvalidDimensionsError` will be raised. + then a :py:class:`exc.InvalidDimensionsError` will be raised. """, ), ################################ @@ -770,8 +770,8 @@ def test_raises_types(self): Example Function Raises: - :class:`exc.InvalidDimensionsError`: If the dimensions couldn't be parsed. - :class:`exc.InvalidArgumentsError`: If the arguments are invalid. + :py:class:`exc.InvalidDimensionsError`: If the dimensions couldn't be parsed. + :py:class:`exc.InvalidArgumentsError`: If the arguments are invalid. """, """ @@ -787,8 +787,8 @@ def test_raises_types(self): Example Function Raises: - :class:`exc.InvalidDimensionsError` - :class:`exc.InvalidArgumentsError` + :py:class:`exc.InvalidDimensionsError` + :py:class:`exc.InvalidArgumentsError` """, """ @@ -1221,7 +1221,7 @@ def test_custom_generic_sections(self): ), ) - testConfig = Config( + test_config = Config( napoleon_custom_sections=[ 'Really Important Details', ('Sooper Warning', 'warns'), @@ -1231,7 +1231,7 @@ def test_custom_generic_sections(self): ) for docstring, expected in docstrings: - actual = GoogleDocstring(docstring, testConfig) + actual = GoogleDocstring(docstring, test_config) assert str(actual) == expected def test_noindex(self): @@ -1369,7 +1369,7 @@ class TestNumpyDocstring: """ Single line summary - :Parameters: **arg1** (:class:`str`) -- Extended + :Parameters: **arg1** (:py:class:`str`) -- Extended description of arg1 """, ), @@ -1398,14 +1398,14 @@ class TestNumpyDocstring: """ Single line summary - :Parameters: * **arg1** (:class:`str`) -- Extended + :Parameters: * **arg1** (:py:class:`str`) -- Extended description of arg1 - * **arg2** (:class:`int`) -- Extended + * **arg2** (:py:class:`int`) -- Extended description of arg2 - :Keyword Arguments: * **kwarg1** (:class:`str`) -- Extended + :Keyword Arguments: * **kwarg1** (:py:class:`str`) -- Extended description of kwarg1 - * **kwarg2** (:class:`int`) -- Extended + * **kwarg2** (:py:class:`int`) -- Extended description of kwarg2 """, ), @@ -1422,7 +1422,7 @@ class TestNumpyDocstring: """ Single line summary - :returns: :class:`str` -- Extended + :returns: :py:class:`str` -- Extended description of return value """, ), @@ -1439,7 +1439,7 @@ class TestNumpyDocstring: """ Single line summary - :returns: :class:`str` -- Extended + :returns: :py:class:`str` -- Extended description of return value """, ), @@ -1459,7 +1459,7 @@ class TestNumpyDocstring: """ Single line summary - :Parameters: * **arg1** (:class:`str`) -- Extended description of arg1 + :Parameters: * **arg1** (:py:class:`str`) -- Extended description of arg1 * **\\*args** -- Variable length argument list. * **\\*\\*kwargs** -- Arbitrary keyword arguments. """, @@ -1478,7 +1478,7 @@ class TestNumpyDocstring: """ Single line summary - :Parameters: * **arg1** (:class:`str`) -- Extended description of arg1 + :Parameters: * **arg1** (:py:class:`str`) -- Extended description of arg1 * **\\*args, \\*\\*kwargs** -- Variable length argument list and arbitrary keyword arguments. """, ), @@ -1498,9 +1498,9 @@ class TestNumpyDocstring: """ Single line summary - :Receives: * **arg1** (:class:`str`) -- Extended + :Receives: * **arg1** (:py:class:`str`) -- Extended description of arg1 - * **arg2** (:class:`int`) -- Extended + * **arg2** (:py:class:`int`) -- Extended description of arg2 """, ), @@ -1520,9 +1520,9 @@ class TestNumpyDocstring: """ Single line summary - :Receives: * **arg1** (:class:`str`) -- Extended + :Receives: * **arg1** (:py:class:`str`) -- Extended description of arg1 - * **arg2** (:class:`int`) -- Extended + * **arg2** (:py:class:`int`) -- Extended description of arg2 """, ), @@ -1539,7 +1539,7 @@ class TestNumpyDocstring: """ Single line summary - :Yields: :class:`str` -- Extended + :Yields: :py:class:`str` -- Extended description of yielded value """, ), @@ -1556,7 +1556,7 @@ class TestNumpyDocstring: """ Single line summary - :Yields: :class:`str` -- Extended + :Yields: :py:class:`str` -- Extended description of yielded value """, ), @@ -1737,9 +1737,9 @@ def test_see_also_refs(self): .. seealso:: - :obj:`some`, :obj:`other`, :obj:`funcs` + :py:obj:`some`, :py:obj:`other`, :py:obj:`funcs` \n\ - :obj:`otherfunc` + :py:obj:`otherfunc` relationship """ assert str(actual) == expected @@ -1763,9 +1763,9 @@ def test_see_also_refs(self): .. seealso:: - :obj:`some`, :obj:`other`, :obj:`funcs` + :py:obj:`some`, :py:obj:`other`, :py:obj:`funcs` \n\ - :obj:`otherfunc` + :py:obj:`otherfunc` relationship """ assert str(actual) == expected @@ -1792,7 +1792,7 @@ def test_see_also_refs(self): .. seealso:: - :obj:`some`, :obj:`MyClass.other`, :func:`funcs` + :py:obj:`some`, :py:obj:`MyClass.other`, :func:`funcs` \n\ :func:`~my_package.otherfunc` relationship @@ -1872,7 +1872,7 @@ def test_return_types(self): """) expected = dedent(""" :returns: a dataframe - :rtype: :class:`~pandas.DataFrame` + :rtype: :py:class:`~pandas.DataFrame` """) translations = { 'DataFrame': '~pandas.DataFrame', @@ -1898,11 +1898,11 @@ def test_yield_types(self): expected = dedent(""" Example Function - :Yields: :term:`scalar` or :class:`array-like ` -- The result of the computation + :Yields: :term:`scalar` or :py:class:`array-like ` -- The result of the computation """) translations = { 'scalar': ':term:`scalar`', - 'array-like': ':class:`array-like `', + 'array-like': ':py:class:`array-like `', } config = Config( napoleon_type_aliases=translations, napoleon_preprocess_types=True @@ -2454,60 +2454,60 @@ def test_list_in_parameter_description(self): expected = """One line summary. -:Parameters: * **no_list** (:class:`int`) - * **one_bullet_empty** (:class:`int`) -- +:Parameters: * **no_list** (:py:class:`int`) + * **one_bullet_empty** (:py:class:`int`) -- * - * **one_bullet_single_line** (:class:`int`) -- + * **one_bullet_single_line** (:py:class:`int`) -- - first line - * **one_bullet_two_lines** (:class:`int`) -- + * **one_bullet_two_lines** (:py:class:`int`) -- + first line continued - * **two_bullets_single_line** (:class:`int`) -- + * **two_bullets_single_line** (:py:class:`int`) -- - first line - second line - * **two_bullets_two_lines** (:class:`int`) -- + * **two_bullets_two_lines** (:py:class:`int`) -- * first line continued * second line continued - * **one_enumeration_single_line** (:class:`int`) -- + * **one_enumeration_single_line** (:py:class:`int`) -- 1. first line - * **one_enumeration_two_lines** (:class:`int`) -- + * **one_enumeration_two_lines** (:py:class:`int`) -- 1) first line continued - * **two_enumerations_one_line** (:class:`int`) -- + * **two_enumerations_one_line** (:py:class:`int`) -- (iii) first line (iv) second line - * **two_enumerations_two_lines** (:class:`int`) -- + * **two_enumerations_two_lines** (:py:class:`int`) -- a. first line continued b. second line continued - * **one_definition_one_line** (:class:`int`) -- + * **one_definition_one_line** (:py:class:`int`) -- item 1 first line - * **one_definition_two_lines** (:class:`int`) -- + * **one_definition_two_lines** (:py:class:`int`) -- item 1 first line continued - * **two_definitions_one_line** (:class:`int`) -- + * **two_definitions_one_line** (:py:class:`int`) -- item 1 first line item 2 second line - * **two_definitions_two_lines** (:class:`int`) -- + * **two_definitions_two_lines** (:py:class:`int`) -- item 1 first line @@ -2515,14 +2515,14 @@ def test_list_in_parameter_description(self): item 2 second line continued - * **one_definition_blank_line** (:class:`int`) -- + * **one_definition_blank_line** (:py:class:`int`) -- item 1 first line extra first line - * **two_definitions_blank_lines** (:class:`int`) -- + * **two_definitions_blank_lines** (:py:class:`int`) -- item 1 @@ -2535,7 +2535,7 @@ def test_list_in_parameter_description(self): second line extra second line - * **definition_after_normal_text** (:class:`int`) -- text line + * **definition_after_normal_text** (:py:class:`int`) -- text line item 1 first line @@ -2666,18 +2666,18 @@ def test_convert_numpy_type_spec(self): converted = ( '', '*optional*', - ':class:`str`, *optional*', - ':class:`int` or :class:`float` or :obj:`None`, *default*: :obj:`None`', - ':class:`list` of :class:`tuple` of :class:`str`, *optional*', - ':class:`int`, *default* :obj:`None`', + ':py:class:`str`, *optional*', + ':py:class:`int` or :py:class:`float` or :py:obj:`None`, *default*: :py:obj:`None`', + ':py:class:`list` of :py:class:`tuple` of :py:class:`str`, *optional*', + ':py:class:`int`, *default* :py:obj:`None`', '``{"F", "C", "N"}``', "``{'F', 'C', 'N'}``, *default*: ``'N'``", "``{'F', 'C', 'N'}``, *default* ``'N'``", - ':class:`pandas.DataFrame`, *optional*', + ':py:class:`pandas.DataFrame`, *optional*', ) for spec, expected in zip(specs, converted, strict=True): - actual = _convert_numpy_type_spec(spec, translations=translations) + actual = _convert_type_spec(spec, translations=translations) assert actual == expected def test_parameter_types(self): @@ -2705,23 +2705,23 @@ def test_parameter_types(self): """) expected = dedent("""\ :param param1: the data to work on - :type param1: :class:`DataFrame` + :type param1: :py:class:`DataFrame` :param param2: a parameter with different types - :type param2: :class:`int` or :class:`float` or :obj:`None`, *optional* + :type param2: :py:class:`int` or :py:class:`float` or :py:obj:`None`, *optional* :param param3: a optional mapping :type param3: :term:`dict-like `, *optional* :param param4: a optional parameter with different types - :type param4: :class:`int` or :class:`float` or :obj:`None`, *optional* + :type param4: :py:class:`int` or :py:class:`float` or :py:obj:`None`, *optional* :param param5: a optional parameter with fixed values :type param5: ``{"F", "C", "N"}``, *optional* :param param6: different default format - :type param6: :class:`int`, *default* :obj:`None` + :type param6: :py:class:`int`, *default* :py:obj:`None` :param param7: a optional mapping - :type param7: :term:`mapping` of :term:`hashable` to :class:`str`, *optional* + :type param7: :term:`mapping` of :term:`hashable` to :py:class:`str`, *optional* :param param8: ellipsis - :type param8: :obj:`... ` or :obj:`Ellipsis` + :type param8: :py:obj:`... ` or :py:obj:`Ellipsis` :param param9: a parameter with tuple of list of int - :type param9: :class:`tuple` of :class:`list` of :class:`int` + :type param9: :py:class:`tuple` of :py:class:`list` of :py:class:`int` """) translations = { 'dict-like': ':term:`dict-like `', @@ -2881,7 +2881,7 @@ def test_napoleon_keyword_and_paramtype(app, tmp_path): list py:class 1 list.html - int py:class 1 int.html - """) - ) # NoQA: W291 + ) app.config.intersphinx_mapping = {'python': ('127.0.0.1:5555', str(inv_file))} validate_intersphinx_mapping(app, app.config) load_mappings(app) diff --git a/tests/test_extensions/test_ext_viewcode.py b/tests/test_extensions/test_ext_viewcode.py index eeef391c1e4..92455ee5ad5 100644 --- a/tests/test_extensions/test_ext_viewcode.py +++ b/tests/test_extensions/test_ext_viewcode.py @@ -6,6 +6,7 @@ import shutil from typing import TYPE_CHECKING +import pygments import pytest if TYPE_CHECKING: @@ -13,6 +14,11 @@ def check_viewcode_output(app: SphinxTestApp) -> str: + if tuple(map(int, pygments.__version__.split('.')[:2])) >= (2, 19): + sp = ' ' + else: + sp = ' ' + warnings = re.sub(r'\\+', '/', app.warning.getvalue()) assert re.findall( r"index.rst:\d+: WARNING: Object named 'func1' not found in include " @@ -41,7 +47,7 @@ def check_viewcode_output(app: SphinxTestApp) -> str: '[docs]\n' ) in result assert '@decorator\n' in result - assert 'class Class1:\n' in result + assert f'class{sp}Class1:\n' in result assert ' """\n' in result assert ' this is Class1\n' in result assert ' """\n' in result @@ -116,6 +122,7 @@ def test_linkcode(app): assert 'https://foobar/js/' in stuff assert 'https://foobar/c/' in stuff assert 'https://foobar/cpp/' in stuff + assert 'http://foobar/rst/' in stuff @pytest.mark.sphinx('html', testroot='ext-viewcode-find', freshenv=True) @@ -161,3 +168,24 @@ def find_source(app, modname): 'This is the class attribute class_attr', ): assert result.count(needle) == 1 + + +@pytest.mark.sphinx('html', testroot='ext-viewcode-find-package', freshenv=True) +def test_find_local_package_import_path(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'index.html').read_text(encoding='utf8') + + count_func1 = result.count( + 'href="_modules/main_package/subpackage/_subpackage2/submodule.html#func1"' + ) + assert count_func1 == 1 + + count_class1 = result.count( + 'href="_modules/main_package/subpackage/_subpackage2/submodule.html#Class1"' + ) + assert count_class1 == 1 + + count_class3 = result.count( + 'href="_modules/main_package/subpackage/_subpackage2/submodule.html#Class3"' + ) + assert count_class3 == 1 diff --git a/tests/test_highlighting.py b/tests/test_highlighting.py index c32c1887ebe..141de97020d 100644 --- a/tests/test_highlighting.py +++ b/tests/test_highlighting.py @@ -12,7 +12,7 @@ from sphinx.highlighting import PygmentsBridge -if tuple(map(int, pygments.__version__.split('.')))[:2] < (2, 18): +if tuple(map(int, pygments.__version__.split('.')[:2])) < (2, 18): from pygments.formatter import Formatter Formatter.__class_getitem__ = classmethod(lambda cls, name: cls) # type: ignore[attr-defined] diff --git a/tests/test_intl/test_intl.py b/tests/test_intl/test_intl.py index ab104b55600..1280a3d04c0 100644 --- a/tests/test_intl/test_intl.py +++ b/tests/test_intl/test_intl.py @@ -11,6 +11,7 @@ import time from typing import TYPE_CHECKING +import pygments import pytest from babel.messages import mofile, pofile from babel.messages.catalog import Catalog @@ -1487,6 +1488,11 @@ def test_xml_strange_markup(app): @pytest.mark.sphinx('html', testroot='intl') @pytest.mark.test_params(shared_result='test_intl_basic') def test_additional_targets_should_not_be_translated(app): + if tuple(map(int, pygments.__version__.split('.')[:2])) >= (2, 19): + sp = ' ' + else: + sp = ' ' + app.build() # [literalblock.txt] result = (app.outdir / 'literalblock.html').read_text(encoding='utf8') @@ -1525,7 +1531,7 @@ def test_additional_targets_should_not_be_translated(app): # doctest block should not be translated but be highlighted expected_expr = ( """>>> """ - """import sys """ + f"""import{sp}sys """ """# sys importing""" ) assert_count(expected_expr, result, 1) @@ -1570,6 +1576,11 @@ def test_additional_targets_should_not_be_translated(app): }, ) def test_additional_targets_should_be_translated(app): + if tuple(map(int, pygments.__version__.split('.')[:2])) >= (2, 19): + sp = ' ' + else: + sp = ' ' + app.build() # [literalblock.txt] result = (app.outdir / 'literalblock.html').read_text(encoding='utf8') @@ -1619,7 +1630,7 @@ def test_additional_targets_should_be_translated(app): # doctest block should not be translated but be highlighted expected_expr = ( """>>> """ - """import sys """ + f"""import{sp}sys """ """# SYS IMPORTING""" ) assert_count(expected_expr, result, 1) diff --git a/tests/test_markup/test_markup.py b/tests/test_markup/test_markup.py index fa99fa352dd..09deb90ff0f 100644 --- a/tests/test_markup/test_markup.py +++ b/tests/test_markup/test_markup.py @@ -26,6 +26,7 @@ @pytest.fixture def settings(app): + env = app.env texescape.init() # otherwise done by the latex builder with warnings.catch_warnings(): warnings.filterwarnings('ignore', category=DeprecationWarning) @@ -37,10 +38,10 @@ def settings(app): ) settings = optparser.get_default_values() settings.smart_quotes = True - settings.env = app.builder.env - settings.env.temp_data['docname'] = 'dummy' + settings.env = env + settings.env.current_document.docname = 'dummy' settings.contentsname = 'dummy' - domain_context = sphinx_domains(settings.env) + domain_context = sphinx_domains(env) domain_context.enable() yield settings domain_context.disable() diff --git a/tests/test_markup/test_parser.py b/tests/test_markup/test_parser.py index 3f40a21693a..eb8ccf24f1d 100644 --- a/tests/test_markup/test_parser.py +++ b/tests/test_markup/test_parser.py @@ -29,7 +29,7 @@ def test_RSTParser_prolog_epilog(RSTStateMachine, app): ] # with rst_prolog - app.env.config.rst_prolog = 'this is rst_prolog\nhello reST!' + app.config.rst_prolog = 'this is rst_prolog\nhello reST!' parser.parse(text, document) (content, _), _ = RSTStateMachine().run.call_args assert list(content.xitems()) == [ @@ -41,8 +41,8 @@ def test_RSTParser_prolog_epilog(RSTStateMachine, app): ] # with rst_epilog - app.env.config.rst_prolog = None - app.env.config.rst_epilog = 'this is rst_epilog\ngood-bye reST!' + app.config.rst_prolog = None + app.config.rst_epilog = 'this is rst_epilog\ngood-bye reST!' parser.parse(text, document) (content, _), _ = RSTStateMachine().run.call_args assert list(content.xitems()) == [ @@ -54,8 +54,8 @@ def test_RSTParser_prolog_epilog(RSTStateMachine, app): ] # expandtabs / convert whitespaces - app.env.config.rst_prolog = None - app.env.config.rst_epilog = None + app.config.rst_prolog = None + app.config.rst_epilog = None text = '\thello Sphinx world\n\v\fSphinx is a document generator' parser.parse(text, document) (content, _), _ = RSTStateMachine().run.call_args diff --git a/tests/test_transforms/test_transforms_post_transforms_code.py b/tests/test_transforms/test_transforms_post_transforms_code.py index a4243fb85bc..73bfcf49a8b 100644 --- a/tests/test_transforms/test_transforms_post_transforms_code.py +++ b/tests/test_transforms/test_transforms_post_transforms_code.py @@ -15,6 +15,8 @@ def test_trim_doctest_flags_html(app): assert 'QUUX' not in result assert 'CORGE' not in result assert 'GRAULT' in result + assert 'now() \n' not in result + assert 'now()\n' in result @pytest.mark.sphinx( diff --git a/tests/test_util/test_util_i18n.py b/tests/test_util/test_util_i18n.py index b6e865c331d..9040083a844 100644 --- a/tests/test_util/test_util_i18n.py +++ b/tests/test_util/test_util_i18n.py @@ -7,15 +7,12 @@ import time from pathlib import Path -import babel import pytest from babel.messages.mofile import read_mo from sphinx.errors import SphinxError from sphinx.util import i18n -BABEL_VERSION = tuple(map(int, babel.__version__.split('.'))) - def test_catalog_info_for_file_and_path(): cat = i18n.CatalogInfo('path', 'domain', 'utf-8') @@ -119,10 +116,10 @@ def test_format_date_timezone(): @pytest.mark.sphinx('html', testroot='root') def test_get_filename_for_language(app): get_filename = i18n.get_image_filename_for_language - app.env.temp_data['docname'] = 'index' + app.env.current_document.docname = 'index' # language is en - app.env.config.language = 'en' + app.config.language = 'en' assert get_filename('foo.png', app.env) == 'foo.en.png' assert get_filename('foo.bar.png', app.env) == 'foo.bar.en.png' assert get_filename('dir/foo.png', app.env) == 'dir/foo.en.png' @@ -130,8 +127,8 @@ def test_get_filename_for_language(app): assert get_filename('foo', app.env) == 'foo.en' # modify figure_language_filename and language is 'en' - app.env.config.language = 'en' - app.env.config.figure_language_filename = 'images/{language}/{root}{ext}' + app.config.language = 'en' + app.config.figure_language_filename = 'images/{language}/{root}{ext}' assert get_filename('foo.png', app.env) == 'images/en/foo.png' assert get_filename('foo.bar.png', app.env) == 'images/en/foo.bar.png' assert get_filename('subdir/foo.png', app.env) == 'images/en/subdir/foo.png' @@ -139,8 +136,8 @@ def test_get_filename_for_language(app): assert get_filename('foo', app.env) == 'images/en/foo' # new path and basename tokens - app.env.config.language = 'en' - app.env.config.figure_language_filename = '{path}{language}/{basename}{ext}' + app.config.language = 'en' + app.config.figure_language_filename = '{path}{language}/{basename}{ext}' assert get_filename('foo.png', app.env) == 'en/foo.png' assert get_filename('foo.bar.png', app.env) == 'en/foo.bar.png' assert get_filename('subdir/foo.png', app.env) == 'subdir/en/foo.png' @@ -148,17 +145,17 @@ def test_get_filename_for_language(app): assert get_filename('foo', app.env) == 'en/foo' # invalid figure_language_filename - app.env.config.figure_language_filename = '{root}.{invalid}{ext}' + app.config.figure_language_filename = '{root}.{invalid}{ext}' with pytest.raises(SphinxError): get_filename('foo.png', app.env) # docpath (for a document in the top of source directory) - app.env.config.language = 'en' - app.env.config.figure_language_filename = '/{docpath}{language}/{basename}{ext}' + app.config.language = 'en' + app.config.figure_language_filename = '/{docpath}{language}/{basename}{ext}' assert get_filename('foo.png', app.env) == '/en/foo.png' # docpath (for a document in the sub directory) - app.env.temp_data['docname'] = 'subdir/index' + app.env.current_document.docname = 'subdir/index' assert get_filename('foo.png', app.env) == '/subdir/en/foo.png' diff --git a/tests/test_util/test_util_inspect.py b/tests/test_util/test_util_inspect.py index 8e6c1e01bcb..68e40ec7882 100644 --- a/tests/test_util/test_util_inspect.py +++ b/tests/test_util/test_util_inspect.py @@ -811,7 +811,7 @@ def test_isproperty(): def test_isgenericalias(): #: A list of int T = List[int] # NoQA: UP006 - S = list[Union[str, None]] # NoQA: UP006, UP007 + S = list[Union[str, None]] # NoQA: UP007 C = Callable[[int], None] # a generic alias not having a doccomment @@ -843,7 +843,7 @@ class Foo: @classmethod def meth(self): """ - docstring + Docstring indented text """ @@ -861,7 +861,7 @@ def test_getdoc_inherited_decorated_method(): class Foo: def meth(self): """ - docstring + Docstring indented text """ diff --git a/tests/test_util/test_util_inventory.py b/tests/test_util/test_util_inventory.py index b09c2ec2dd8..77267496c01 100644 --- a/tests/test_util/test_util_inventory.py +++ b/tests/test_util/test_util_inventory.py @@ -3,8 +3,6 @@ from __future__ import annotations import os -import posixpath -from io import BytesIO from typing import TYPE_CHECKING import pytest @@ -25,8 +23,7 @@ def test_read_inventory_v1(): - f = BytesIO(INVENTORY_V1) - invdata = InventoryFile.load(f, '/util', posixpath.join) + invdata = InventoryFile.loads(INVENTORY_V1, uri='/util') assert invdata['py:module']['module'] == ( 'foo', '1.0', @@ -42,8 +39,7 @@ def test_read_inventory_v1(): def test_read_inventory_v2(): - f = BytesIO(INVENTORY_V2) - invdata = InventoryFile.load(f, '/util', posixpath.join) + invdata = InventoryFile.loads(INVENTORY_V2, uri='/util') assert len(invdata['py:module']) == 2 assert invdata['py:module']['module1'] == ( @@ -69,8 +65,7 @@ def test_read_inventory_v2(): def test_read_inventory_v2_not_having_version(): - f = BytesIO(INVENTORY_V2_NO_VERSION) - invdata = InventoryFile.load(f, '/util', posixpath.join) + invdata = InventoryFile.loads(INVENTORY_V2_NO_VERSION, uri='/util') assert invdata['py:module']['module1'] == ( 'foo', '', @@ -81,8 +76,7 @@ def test_read_inventory_v2_not_having_version(): @pytest.mark.sphinx('html', testroot='root') def test_ambiguous_definition_warning(app): - f = BytesIO(INVENTORY_V2_AMBIGUOUS_TERMS) - InventoryFile.load(f, '/util', posixpath.join) + InventoryFile.loads(INVENTORY_V2_AMBIGUOUS_TERMS, uri='/util') def _multiple_defs_notice_for(entity: str) -> str: return f'contains multiple definitions for {entity}' diff --git a/tox.ini b/tox.ini index 572647c1196..674013fdc08 100644 --- a/tox.ini +++ b/tox.ini @@ -37,7 +37,6 @@ extras = # GitHub Workflow step commands = ruff check . --output-format github - flake8 . mypy pyright diff --git a/utils/babel_runner.py b/utils/babel_runner.py index c3a2d030a35..3bb42efc308 100644 --- a/utils/babel_runner.py +++ b/utils/babel_runner.py @@ -84,11 +84,12 @@ def run_extract() -> None: log = _get_logger() with open('sphinx/__init__.py', encoding='utf-8') as f: - for line in f.read().splitlines(): - if line.startswith('__version__ = '): - # remove prefix; strip whitespace; remove quotation marks - sphinx_version = line[14:].strip()[1:-1] - break + lines = f.readlines() + for line in lines: + if line.startswith('__version__ = '): + # remove prefix; strip whitespace; remove quotation marks + sphinx_version = line[14:].strip()[1:-1] + break catalogue = Catalog(project='Sphinx', version=sphinx_version, charset='utf-8') diff --git a/utils/convert_attestations.py b/utils/convert_attestations.py index b8bd19a4a02..46697677d34 100644 --- a/utils/convert_attestations.py +++ b/utils/convert_attestations.py @@ -3,6 +3,16 @@ See https://github.com/trailofbits/pypi-attestations. """ +# resolution fails without betterproto and protobuf-specs +# /// script +# requires-python = ">=3.11" +# dependencies = [ +# "pypi-attestations~=0.0.12", +# "sigstore-protobuf-specs==0.3.2", +# "betterproto==2.0.0b6", +# ] +# /// + from __future__ import annotations import json